code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package gointgeo
// Defines a 2D line by two of its points.
// Two definitions of the same line
// which do not have points matching in the same order
// are not considered equal.
// Use the Line method to get a structure with equality
// which represents the equality of the defined lines.
type Line2DDefinition struct {
point1, point2 Point2D
}
func NewLine2DDefinition(point1, point2 Point2D) Line2DDefinition {
if point1 == point2 {
panic("Points are equal")
}
return Line2DDefinition{point1, point2}
}
func (lineDefinition Line2DDefinition) Point1() Point2D {
return lineDefinition.point1
}
func (lineDefinition Line2DDefinition) Point2() Point2D {
return lineDefinition.point2
}
func (lineDefinition Line2DDefinition) IsLineHorizontal() bool {
return lineDefinition.point1.Y == lineDefinition.point2.Y
}
func (lineDefinition Line2DDefinition) IsLineVertical() bool {
return lineDefinition.point1.X == lineDefinition.point2.X
}
func (lineDefinition Line2DDefinition) DoesLineHavePoint(point Point2D) bool {
// Ignoring the case when the point is equal
// to either the point1 or the point2,
// the point lies on the line defined by the point1 and the point2,
// iff the line defined by the point and the point1 has the same slope
// as the line defined by the point and the point2.
// To avoid division (which is slower than multiplication)
// and especially division by zero for vertical lines,
// we rewrite the slope equality equation using multiplication.
// That also takes care of the case when the point is equal
// to either the point1 or the point2.
// Both sides of the equation become zero in that case.
// Using int32 to avoid overflow when subtracting 16-bit values
// resulting in 17-bit values.
xDifference1 := int32(point.X) - int32(lineDefinition.point1.X)
yDifference1 := int32(point.Y) - int32(lineDefinition.point1.Y)
xDifference2 := int32(point.X) - int32(lineDefinition.point2.X)
yDifference2 := int32(point.Y) - int32(lineDefinition.point2.Y)
// Using int64 to avoid overflow when multiplying 17-bit values
// resulting in 34-bit values.
return int64(xDifference1)*int64(yDifference2) ==
int64(yDifference1)*int64(xDifference2)
}
func (lineDefinition Line2DDefinition) lineSlope() fraction64 {
if lineDefinition.IsLineVertical() {
panic("The line is vertical.")
}
// Using int32 to avoid overflow when subtracting 16-bit values
// resulting in 17-bit values.
xDifference := int32(lineDefinition.point2.X) - int32(lineDefinition.point1.X)
yDifference := int32(lineDefinition.point2.Y) - int32(lineDefinition.point1.Y)
return newFraction64FromNonCanonical(int64(yDifference), int64(xDifference))
}
func (lineDefinition Line2DDefinition) xOfLineCrossingXAxis() fraction64 {
if lineDefinition.IsLineHorizontal() {
panic("The line is horizontal.")
}
// Using int32 to avoid overflow when subtracting 16-bit values
// resulting in 17-bit values.
xDifference := int32(lineDefinition.point2.X) - int32(lineDefinition.point1.X)
yDifference := int32(lineDefinition.point2.Y) - int32(lineDefinition.point1.Y)
return newFraction64FromNonCanonical(
int64(lineDefinition.point1.X)*int64(yDifference)-
int64(lineDefinition.point1.Y)*int64(xDifference),
int64(yDifference))
}
func (line1Definition Line2DDefinition) LineEqualsTo(line2Definition Line2DDefinition) bool {
return line1Definition.DoesLineHavePoint(line2Definition.point1) &&
line1Definition.DoesLineHavePoint(line2Definition.point2)
}
func (lineDefinition Line2DDefinition) Line() Line2D {
if lineDefinition.IsLineVertical() {
// A vertical line is completely described by its x coordinate.
return Line2D{
isVertical: true,
isHorizontal: false,
verticalX: lineDefinition.point1.X}
}
if lineDefinition.IsLineHorizontal() {
// A horizontal line is completely described by its y coordinate.
return Line2D{
isVertical: false,
isHorizontal: true,
horizontalY: lineDefinition.point1.Y}
}
// The slope together with the x axis crossing point
// form a canonical representation of the line.
return Line2D{
isVertical: false,
isHorizontal: false,
slope: lineDefinition.lineSlope(),
xOfXAxisCrossing: lineDefinition.xOfLineCrossingXAxis()}
} | line_2d_definition.go | 0.89869 | 0.742748 | line_2d_definition.go | starcoder |
package runes
// CloneSlice return a copy of the supplied rune slice
func CloneSlice(r []rune) []rune {
return append([]rune(nil), r...)
}
// Concat joins a number of rune slices together
func Concat(r ...[]rune) []rune {
if len(r) == 0 {
return []rune(nil)
} else if len(r) == 1 {
return r[0]
} else {
res := CloneSlice(r[0])
for _, v := range r[1:] {
res = append(res, v...)
}
return res
}
}
// InsertAt inserts the rune r into the slice s at index i.
// All runes after index i are moved one position right, growing the slice by one rune
// the modified slice is returned
func InsertAt(s []rune, r rune, i int) []rune {
s = append(s, 0)
copy(s[i+1:], s[i:])
s[i] = r
return s
}
// InsertSliceAt inserts the rune slice r into the slice s at index i.
// All runes after the insertion point are moved one position right, growing the slice by the length of r
// the modified slice is returned
func InsertSliceAt(s, r []rune, i int) []rune {
return append(s[:i], append(r, s[i:]...)...)
}
// CutSliceAt cuts a slice from the rune slice s from index i, with length cnt.
// returns the origial slice with the cut removed, and a slice containing the cut runes.
func CutSliceAt(s []rune, i, cnt int) ([]rune, []rune) {
cut := CloneSlice(s[i : i+cnt])
return append(s[:i], s[i+cnt:]...), cut
}
// DeleteAt deletes the rune at index i from slice s
// All runes after index i are moved one position left, shrinking the slice by one rune
// the modified slice is returned
func DeleteAt(s []rune, i int) []rune {
copy(s[i:], s[i+1:])
return s[:len(s)-1]
}
// TrimLeft returns a slice of r with all leading Unicode code points c satisfying f(c) removed
func TrimLeft(r []rune, f func(rune) bool) []rune {
for i := 0; i < len(r); i++ {
if !f(r[i]) {
return r[i:]
}
}
return []rune{}
}
// TrimRight returns a slice of the r with all trailing Unicode code points c satisfying f(c) removed.
func TrimRight(r []rune, f func(rune) bool) []rune {
for i := len(r) - 1; i >= 0; i-- {
if !f(r[i]) {
return r[:i+1]
}
}
return []rune{}
}
// Trim returns a slice of the r with all leading and trailing Unicode code points c satisfying f(c) removed.
func Trim(r []rune, f func(rune) bool) []rune {
return TrimRight(TrimLeft(r, f), f)
} | runes.go | 0.802439 | 0.595228 | runes.go | starcoder |
package main
type IndexEntry struct {
K CompositeKey
V bool
}
type Index struct {
IndexEntry IndexEntry
h int
len int
children [2]*Index
}
func (node *Index) Height() int {
if node == nil {
return 0
}
return node.h
}
// suffix Index is needed because this will get specialised in codegen
func combinedDepthIndex(n1, n2 *Index) int {
d1 := n1.Height()
d2 := n2.Height()
var d int
if d1 > d2 {
d = d1
} else {
d = d2
}
return d + 1
}
// suffix Index is needed because this will get specialised in codegen
func mkIndex(entry IndexEntry, left *Index, right *Index) *Index {
len := 1
if left != nil {
len += left.len
}
if right != nil {
len += right.len
}
return &Index{
IndexEntry: entry,
h: combinedDepthIndex(left, right),
len: len,
children: [2]*Index{left, right},
}
}
func (node *Index) Get(key CompositeKey) (value bool, ok bool) {
finger := node
for {
if finger == nil {
ok = false
return // using named returns so we keep the zero value for `value`
}
if key.Less(finger.IndexEntry.K) {
finger = finger.children[0]
} else if finger.IndexEntry.K.Less(key) {
finger = finger.children[1]
} else {
// equal
return finger.IndexEntry.V, true
}
}
}
func (node *Index) Insert(key CompositeKey, value bool) *Index {
if node == nil {
return mkIndex(IndexEntry{key, value}, nil, nil)
}
entry, left, right := node.IndexEntry, node.children[0], node.children[1]
if node.IndexEntry.K.Less(key) {
right = right.Insert(key, value)
} else if key.Less(node.IndexEntry.K) {
left = left.Insert(key, value)
} else { // equals
entry = IndexEntry{key, value}
}
return rotateIndex(entry, left, right)
}
func (node *Index) Remove(key CompositeKey) *Index {
if node == nil {
return nil
}
entry, left, right := node.IndexEntry, node.children[0], node.children[1]
if node.IndexEntry.K.Less(key) {
right = right.Remove(key)
} else if key.Less(node.IndexEntry.K) {
left = left.Remove(key)
} else { // equals
max := left.Max()
if max == nil {
return right
} else {
left = left.Remove(max.K)
entry = *max
}
}
return rotateIndex(entry, left, right)
}
// suffix Index is needed because this will get specialised in codegen
func rotateIndex(entry IndexEntry, left *Index, right *Index) *Index {
if right.Height()-left.Height() > 1 { // implies right != nil
// single left
rl := right.children[0]
rr := right.children[1]
if combinedDepthIndex(left, rl)-rr.Height() > 1 {
// double rotation
return mkIndex(
rl.IndexEntry,
mkIndex(entry, left, rl.children[0]),
mkIndex(right.IndexEntry, rl.children[1], rr),
)
}
return mkIndex(right.IndexEntry, mkIndex(entry, left, rl), rr)
}
if left.Height()-right.Height() > 1 { // implies left != nil
// single right
ll := left.children[0]
lr := left.children[1]
if combinedDepthIndex(right, lr)-ll.Height() > 1 {
// double rotation
return mkIndex(
lr.IndexEntry,
mkIndex(left.IndexEntry, ll, lr.children[0]),
mkIndex(entry, lr.children[1], right),
)
}
return mkIndex(left.IndexEntry, ll, mkIndex(entry, lr, right))
}
return mkIndex(entry, left, right)
}
func (node *Index) Len() int {
if node == nil {
return 0
}
return node.len
}
func (node *Index) Entries() []IndexEntry {
elems := make([]IndexEntry, 0, node.Len())
if node == nil {
return elems
}
type frame struct {
node *Index
leftDone bool
}
var preallocated [20]frame // preallocate on stack for common case
stack := preallocated[:0]
stack = append(stack, frame{node, false})
for len(stack) > 0 {
top := &stack[len(stack)-1]
if !top.leftDone {
if top.node.children[0] != nil {
stack = append(stack, frame{top.node.children[0], false})
}
top.leftDone = true
} else {
stack = stack[:len(stack)-1] // pop
elems = append(elems, top.node.IndexEntry)
if top.node.children[1] != nil {
stack = append(stack, frame{top.node.children[1], false})
}
}
}
return elems
}
func (node *Index) extreme(dir int) *IndexEntry {
if node == nil {
return nil
}
finger := node
for finger.children[dir] != nil {
finger = finger.children[dir]
}
return &finger.IndexEntry
}
func (node *Index) Min() *IndexEntry {
return node.extreme(0)
}
func (node *Index) Max() *IndexEntry {
return node.extreme(1)
}
func (node *Index) Iterate() IndexIterator {
return newIteratorIndex(node, 0, nil)
}
func (node *Index) IterateFrom(k CompositeKey) IndexIterator {
return newIteratorIndex(node, 0, &k)
}
func (node *Index) IterateReverse() IndexIterator {
return newIteratorIndex(node, 1, nil)
}
func (node *Index) IterateReverseFrom(k CompositeKey) IndexIterator {
return newIteratorIndex(node, 1, &k)
}
type IndexIteratorStackFrame struct {
node *Index
state int8
}
type IndexIterator struct {
direction int
stack []IndexIteratorStackFrame
currentEntry IndexEntry
}
// suffix Index is needed because this will get specialised in codegen
func newIteratorIndex(node *Index, direction int, startFrom *CompositeKey) IndexIterator {
if node == nil {
return IndexIterator{}
}
stack := make([]IndexIteratorStackFrame, 1, node.Height())
stack[0] = IndexIteratorStackFrame{node: node, state: 0}
iter := IndexIterator{direction: direction, stack: stack}
if startFrom != nil {
stack[0].state = 2
iter.seek(*startFrom)
} else {
iter.Next()
}
return iter
}
func (i *IndexIterator) Done() bool {
return len(i.stack) == 0
}
func (i *IndexIterator) GetKey() CompositeKey {
return i.currentEntry.K
}
func (i *IndexIterator) GetValue() bool {
return i.currentEntry.V
}
func (i *IndexIterator) Next() {
for len(i.stack) > 0 {
frame := &i.stack[len(i.stack)-1]
switch frame.state {
case 0:
if frame.node == nil {
last := len(i.stack) - 1
i.stack[last] = IndexIteratorStackFrame{} // zero out
i.stack = i.stack[:last] // pop
} else {
frame.state = 1
}
case 1:
i.stack = append(i.stack, IndexIteratorStackFrame{node: frame.node.children[i.direction], state: 0})
frame.state = 2
case 2:
i.currentEntry = frame.node.IndexEntry
frame.state = 3
return
case 3:
// override frame - tail call optimisation
i.stack[len(i.stack)-1] = IndexIteratorStackFrame{node: frame.node.children[1-i.direction], state: 0}
}
}
}
func (i *IndexIterator) seek(k CompositeKey) {
LOOP:
for {
frame := &i.stack[len(i.stack)-1]
if frame.node == nil {
last := len(i.stack) - 1
i.stack[last] = IndexIteratorStackFrame{} // zero out
i.stack = i.stack[:last] // pop
break LOOP
}
if (i.direction == 0 && !(frame.node.IndexEntry.K.Less(k))) || (i.direction == 1 && !(k.Less(frame.node.IndexEntry.K))) {
i.stack = append(i.stack, IndexIteratorStackFrame{node: frame.node.children[i.direction], state: 2})
} else {
// override frame - tail call optimisation
i.stack[len(i.stack)-1] = IndexIteratorStackFrame{node: frame.node.children[1-i.direction], state: 2}
}
}
if len(i.stack) > 0 {
frame := &i.stack[len(i.stack)-1]
i.currentEntry = frame.node.IndexEntry
frame.state = 3
}
} | examples/composite_index/composite.go | 0.576065 | 0.411229 | composite.go | starcoder |
package rnd
import (
"math"
"github.com/cpmech/gosl/io"
"github.com/cpmech/gosl/plt"
"github.com/cpmech/gosl/utl"
)
// DistFrechet implements the Frechet / Type II Extreme Value Distribution (largest value)
type DistFrechet struct {
L float64 // location. default = 0
C float64 // scale. default = 1
A float64 // shape
}
// set factory
func init() {
distallocators["F"] = func() Distribution { return new(DistFrechet) }
}
// Name returns the name of this probability distribution
func (o *DistFrechet) Name() string { return "Frechet" }
// Init initialises Frechet distribution
func (o *DistFrechet) Init(p *Variable) {
o.L, o.C, o.A = p.L, p.C, p.A
if math.Abs(o.C) < 1e-15 {
o.C = 1
}
p.M = o.Mean()
p.S = math.Sqrt(o.Variance())
}
// Pdf computes the probability density function @ x
func (o DistFrechet) Pdf(x float64) float64 {
if x-o.L < 1e-15 {
return 0
}
z := (x - o.L) / o.C
return math.Exp(-math.Pow(z, -o.A)) * math.Pow(z, -1.0-o.A) * o.A / o.C
}
// Cdf computes the cumulative probability function @ x
func (o DistFrechet) Cdf(x float64) float64 {
if x-o.L < 1e-15 {
return 0
}
z := (x - o.L) / o.C
return math.Exp(-math.Pow(z, -o.A))
}
// Mean returns the expected value
func (o DistFrechet) Mean() float64 {
if o.A > 1.0 {
return o.L + o.C*math.Gamma(1.0-1.0/o.A)
}
return math.Inf(1)
}
// Variance returns the variance
func (o DistFrechet) Variance() float64 {
if o.A > 2.0 {
return o.C * o.C * (math.Gamma(1.0-2.0/o.A) - math.Pow(math.Gamma(1.0-1.0/o.A), 2.0))
}
return math.Inf(1)
}
// FrechetPlotCoef plots coefficients for Frechet parameter's estimation
func FrechetPlotCoef(dirout, fnkey string, amin, amax float64) {
np := 201
A := utl.LinSpace(amin, amax, np)
X := make([]float64, np)
Y := make([]float64, np)
var dist DistFrechet
for i := 0; i < np; i++ {
dist.Init(&Variable{L: 0, A: A[i]})
X[i] = 1.0 / A[i]
μ := dist.Mean()
σ2 := dist.Variance()
δ2 := σ2 / (μ * μ)
Y[i] = 1.0 + δ2
}
k := np - 1
plt.Plot(X, Y, nil)
plt.Text(X[k], Y[k], io.Sf("(%.4f,%.4f)", X[k], Y[k]), nil)
plt.Text(X[0], Y[0], io.Sf("(%.4f,%.4f)", X[0], Y[0]), nil)
plt.Gll("$1/\\alpha$", "$1+\\delta^2$", nil)
plt.Save(dirout, fnkey)
} | rnd/dist_frechet.go | 0.76782 | 0.432962 | dist_frechet.go | starcoder |
package svgd
import (
"errors"
"fmt"
"github.com/ajstarks/svgo"
"io"
"math"
"math/rand"
)
type LinearCategory struct {
Name string
Color string
LineWidth int
values []float64
}
func (lc *LinearCategory) SetValues(vals []float64) {
lc.values = append(lc.values, vals...)
}
type LinearDiagram struct {
Title string
Width uint
Height uint
Grid bool
MinValue float64
MaxValue float64
Step float64
categories []*LinearCategory
labels []string
}
func (d *LinearDiagram) NewCategory(name string) (cat *LinearCategory) {
n := new(LinearCategory)
n.values = make([]float64, 0)
n.Name = name
d.categories = append(d.categories, n)
return n
}
func (d *LinearDiagram) SetLabels(labels []string) {
d.labels = make([]string, len(labels))
copy(d.labels, labels)
}
func (d *LinearDiagram) validate() (err error) {
if d.Step <= 0 {
err = errors.New("Error: Step must be greater than zero")
}
if len(d.categories) == 0 {
err = errors.New("Error: Nothing to build, categories are empty")
}
// Calculate Min and Max values
for _, cat := range d.categories {
for iVal := 0; iVal < len(cat.values); iVal++ {
if d.MaxValue < cat.values[iVal] {
d.MaxValue = cat.values[iVal]
}
if d.MinValue > cat.values[iVal] {
d.MinValue = cat.values[iVal]
}
}
if cat.LineWidth == 0 {
cat.LineWidth = 1
}
// Generate random color if it's doesn't set
if cat.Color == "" {
cat.Color = fmt.Sprintf("#%x%x%x", rand.Intn(255), rand.Intn(255), rand.Intn(255))
}
}
if d.MinValue == d.MaxValue {
err = errors.New("Error: MaxValue value must be greater than MinValue")
}
return
}
func (d *LinearDiagram) build(w io.Writer) (err error) {
if err = d.validate(); err != nil {
return
}
s := svg.New(w)
s.Start(int(d.Width), int(d.Height))
// Title
s.Text(int(d.Width)/2, dsMarginTop/2, d.Title,
fmt.Sprintf("text-anchor:middle;alignment-baseline:central;font-size:%d;fill:%s",
dsTitleFontSize, dsTitleFontColor))
// Draw X and Y axis
s.Line(dsMarginLeft, int(d.Height)-dsMarginBottom, int(d.Width)-dsMarginRight, int(d.Height)-dsMarginBottom,
fmt.Sprintf("stroke-width:%d;stroke:%s;", dsAxisLineWidth, dsAxisLineColor))
s.Line(dsMarginLeft, int(d.Height)-dsMarginBottom, dsMarginLeft, dsMarginTop,
fmt.Sprintf("stroke-width:%d;stroke:%s;", dsAxisLineWidth, dsAxisLineColor))
// Write labels
lenLabels := len(d.labels)
xStep := (int(d.Width) - dsMarginLeft - dsMarginRight) / (lenLabels - 1)
left := dsMarginLeft
s.Group(fmt.Sprintf("text-anchor:middle;font-size:%d;fill:%s", dsLabelsFontSize, dsLabelsFontColor))
for i := 0; i < lenLabels; i++ {
s.Text(left, int(d.Height)-dsMarginBottom+dsLabelsMargin, d.labels[i])
left += xStep
}
s.Gend()
// Round minimum value to nearest multiple of step
rem := math.Abs(math.Remainder(d.MinValue, d.Step))
if rem > 0 {
d.MinValue -= rem
}
rem = math.Abs(math.Remainder(d.MaxValue, d.Step))
if rem > 0 {
d.MaxValue += rem
}
// Calculate dimensions
var graphHeight int = int(d.Height) - dsMarginBottom - dsMarginTop
var valSegment float64 = d.MaxValue - d.MinValue
var stepsCount int = int(valSegment/d.Step+0.5) + 1
var stepHeight int = graphHeight / (stepsCount - 1)
// Write Y values
textValue := d.MinValue
top := int(d.Height) - dsMarginBottom
s.Group(fmt.Sprintf("text-anchor:end;font-size:%d;fill:%s",
dsLabelsFontSize, dsLabelsFontColor))
for i := 0; i < stepsCount; i++ {
s.Text(dsMarginLeft-dsValuesMargin, top, fmt.Sprintf("%.2f", textValue))
textValue += d.Step
top -= stepHeight
}
s.Gend()
// Drawing grid
if d.Grid {
s.Group("stroke-width:1;stroke:lightgray")
// Vertical grid
left = dsMarginLeft + xStep
for i := 1; i < lenLabels; i++ {
s.Line(left, dsMarginTop, left, int(d.Height)-dsMarginBottom)
left += xStep
}
// Horizontal grid
top = int(d.Height) - dsMarginBottom - stepHeight
for i := 1; i < stepsCount; i++ {
s.Line(dsMarginLeft, top, int(d.Width)-dsMarginRight, top)
top -= stepHeight
}
s.Gend()
}
// Draw linear graphs and legend
// Calculate height and start for legend
lHeight := (dsMarginBottom - dsLabelsMargin) / (len(d.categories) + 1)
lTop := int(d.Height) - dsMarginBottom + dsLabelsMargin + lHeight/2
for _, cat := range d.categories {
s.Group(fmt.Sprintf("stroke-width:%d;stroke:%s", cat.LineWidth, cat.Color))
x1 := dsMarginLeft
//y1 := int(d.Height) - dsMarginBottom - int((cat.values[0] - d.MinValue) * pxInVal)
var multiplier float64 = float64(stepHeight) / d.Step
var pointValue float64 = cat.values[0] - d.MinValue
var stepsInPointValue int = int(pointValue / d.Step)
var remain int = int((pointValue - float64(stepsInPointValue)*d.Step) * multiplier)
y1 := int(d.Height) - dsMarginBottom - int(pointValue/d.Step)*stepHeight - remain
lenVals := len(cat.values)
if lenLabels < lenVals {
lenVals = lenLabels
}
for iVal := 0; iVal < (lenVals - 1); iVal++ {
x2 := dsMarginLeft + (iVal+1)*xStep
pointValue = cat.values[iVal+1] - d.MinValue
stepsInPointValue := int(pointValue / d.Step)
remain = int((pointValue - float64(stepsInPointValue)*d.Step) * multiplier)
y2 := int(d.Height) - dsMarginBottom - int(pointValue/d.Step)*stepHeight - remain
s.Line(x1, y1, x2, y2)
y1 = y2
x1 = x2
}
s.Gend()
// Draw legend
// TODO draw legend in any side
// TODO do not draw legend if it's do not fit?
s.Rect(int(d.Width)/2, lTop+lHeight/2-dsLegendMarkSize/2, dsLegendMarkSize, dsLegendMarkSize,
fmt.Sprintf("fill:%s", cat.Color))
s.Text(int(d.Width)/2+dsLegendMarkSize+5, lTop+lHeight/2+dsLegendFontSize/2, cat.Name,
fmt.Sprintf("font-size:%d;fill:%s", dsLegendFontSize, dsLabelsFontColor))
lTop += lHeight
}
s.End()
return
} | linear.go | 0.641085 | 0.421433 | linear.go | starcoder |
package mathx
import (
"bitbucket.org/dtolpin/infergo/ad"
"math"
)
// Sigmoid computes the sigmoid function 1/(1 + exp(-x)).
func Sigm(x float64) float64 {
return 1. / (1. + math.Exp(-x))
}
// LogDSigm is log d Sigm(x) / dx, used for computing log
// probability in the presence of sigmoid-transformed variables.
func LogDSigm(x float64) float64 {
return x - 2*math.Log(1+math.Exp(x))
}
func init() {
ad.RegisterElemental(Sigm,
// dSigm / dx = Exp(-x) / (1 + Exp(-x))^2
// = Sigm(x) * (1 - Sigm(x))
func(value float64, _ ...float64) []float64 {
return []float64{value * (1. - value)}
})
ad.RegisterElemental(LogDSigm,
func(_ float64, params ...float64) []float64 {
return []float64{1 - 2*Sigm(params[0])}
})
}
// LogSumExp computes log(exp(x) + exp(y)) robustly.
func LogSumExp(x, y float64) float64 {
max := x
if y > max {
max = y
}
return max + math.Log(math.Exp(x-max)+math.Exp(y-max))
}
func init() {
// d lse(x, y) / dx = exp(x) / exp(x) + exp(y)
// = 1 / 1 + exp(y - x)
// d lse(x, y) / dy = exp(y) / exp(x) + exp(y)
// = exp(y - x) / 1 + exp(y - x)
ad.RegisterElemental(LogSumExp,
func(_ float64, params ...float64) []float64 {
z := math.Exp(params[1] - params[0])
t := 1 / (1 + z)
return []float64{t, t * z}
})
}
// LogGamma and digamma are borrowed from the source code of
// WebPPL, https://github.com/probmods/webppl.
// Copyright © 2014 WebPPL contributors
// LogGamma is used in the log-density of the Gamma and Beta
// distributions.
func LogGamma(x float64) float64 {
x -= 1
tmp := x + 5.5
tmp -= (x + 0.5) * math.Log(tmp)
var ser = 1.000000000190015
for _, g := range gammaCof {
x += 1
ser += g / x
}
return -tmp + math.Log(2.5066282746310005*ser)
}
var gammaCof = []float64{
76.18009172947146,
-86.50532032941677,
24.01409824083091,
-1.231739572450155,
0.1208650973866179e-2,
-0.5395239384953e-5,
}
// digamma is the derivative of LogGamma.
func digamma(x float64) float64 {
if x < 6 {
return digamma(x+1) - 1/x
}
return math.Log(x) -
1/(2*x) -
1/(12*math.Pow(x, 2)) +
1/(120*math.Pow(x, 4)) -
1/(252*math.Pow(x, 6)) +
1/(240*math.Pow(x, 8)) -
5/(660*math.Pow(x, 10)) +
691/(32760*math.Pow(x, 12)) -
1/(12*math.Pow(x, 14))
}
func init() {
ad.RegisterElemental(LogGamma,
func(_ float64, params ...float64) []float64 {
return []float64{digamma(params[0])}
})
} | mathx/mathx.go | 0.750278 | 0.559892 | mathx.go | starcoder |
package term
import (
"strconv"
)
const (
MinusOneMinusOne = 4294967295
)
type Position struct {
Row int
Column int
hash int
}
// Hash - combines a row and a column into a single integer. Note that it doesn't work with very large numbers.
func Hash(column, row int) int {
return ((column & 0xFFFF) << 16) | (row & 0xFFFF)
}
// UnHash - given a hash built with above function, return the original column and row. Note that negative values are not returned correctly. Use UnHashNeg below.
func UnHash(hash int) (int, int) {
return hash >> 16, hash & 0xFFFF
}
// UnHashNeg - given a hash built with above function, return the original column and row. Note that negative values are "special"
func UnHashNeg(hash int) (int, int) {
column := hash >> 16
if column > 0x8000 { // negative column
column = -(column ^ 0xFFFF) - 1
}
row := hash & 0xFFFF
if row > 0x8000 { // negative row
row = -(row ^ 0xFFFF) - 1
}
return column, row
}
// NewPosition
func NewPosition(column, row int) *Position {
return &Position{
Row: row,
Column: column,
hash: Hash(column, row),
}
}
// Position hash
func (p *Position) Hash() int {
return p.hash
}
// UpdateHash - refreshes the hash on row/column change
func (p *Position) UpdateHash() {
p.hash = Hash(p.Column, p.Row)
}
// PlusCols
func (p *Position) PlusCols(plusColumns int) *Position {
return NewPosition(p.Column+plusColumns, p.Row)
}
// PlusRows
func (p *Position) PlusRows(plusRows int) *Position {
return NewPosition(p.Column, p.Row+plusRows)
}
// Stringer implementation
func (p Position) String() string {
return "col:" + strconv.Itoa(p.Column) + ", row:" + strconv.Itoa(p.Row)
}
// Width
func Width(p1, p2 *Position) int {
return Abs(p1.Column-p2.Column) + 1
}
// Height
func Height(p1, p2 *Position) int {
return Abs(p1.Row-p2.Row) + 1
}
// Center : both parameters should be odd numbers
func Center(p1, p2 *Position) *Position {
rows := Height(p1, p2)
columns := Width(p1, p2)
if columns%2 == 1 && rows%2 == 1 { // both are odd - center will be even (except 1,1 which doesn't have a center)
return NewPosition(columns>>1, rows>>1)
}
if columns%2 == 1 && rows%2 == 0 { // cols are odd, rows are even
return NewPosition(columns>>1, rows>>1-1)
}
if columns%2 == 0 && rows%2 == 1 { // cols are even, rows are odd
return NewPosition(columns>>1-1, rows>>1)
}
// worst case, both are even
return NewPosition(columns>>1-1, rows>>1-1)
} | position.go | 0.757346 | 0.497742 | position.go | starcoder |
package eval
import (
"errors"
"reflect"
)
var (
intType reflect.Type = reflect.TypeOf(int(0))
i8 reflect.Type = reflect.TypeOf(int8(0))
i16 reflect.Type = reflect.TypeOf(int16(0))
i32 reflect.Type = reflect.TypeOf(int32(0))
i64 reflect.Type = reflect.TypeOf(int64(0))
uintType reflect.Type = reflect.TypeOf(uint(0))
u8 reflect.Type = reflect.TypeOf(uint8(0))
u16 reflect.Type = reflect.TypeOf(uint16(0))
u32 reflect.Type = reflect.TypeOf(uint32(0))
u64 reflect.Type = reflect.TypeOf(uint64(0))
f32 reflect.Type = reflect.TypeOf(float32(0))
f64 reflect.Type = reflect.TypeOf(float64(0))
c64 reflect.Type = reflect.TypeOf(complex64(0))
c128 reflect.Type = reflect.TypeOf(complex128(0))
boolType reflect.Type = reflect.TypeOf(bool(false))
stringType reflect.Type = reflect.TypeOf(string(""))
emptyInterface reflect.Type = reflect.TypeOf(new(interface{})).Elem()
byteSlice reflect.Type = reflect.SliceOf(u8)
runeSlice reflect.Type = reflect.SliceOf(i32)
untypedNilType reflect.Type = reflect.TypeOf(UntypedNil{})
)
var builtinTypes = map[string] reflect.Type{
"int": intType,
"int8": i8,
"int16": i16,
"int32": i32,
"int64": i64,
"uint": uintType,
"uint8": u8,
"uint16": u16,
"uint32": u32,
"uint64": u64,
"float32": f32,
"float64": f64,
"complex64": c64,
"complex128": c128,
"bool": boolType,
"byte": ByteType,
"rune": RuneType,
"string": stringType,
"error": reflect.TypeOf(errors.New("")),
}
var builtinFuncs = map[string] reflect.Value{
"complex": reflect.ValueOf(builtinComplex),
"real": reflect.ValueOf(builtinReal),
"imag": reflect.ValueOf(builtinImag),
"append": reflect.ValueOf(builtinAppend),
"cap": reflect.ValueOf(builtinCap),
"len": reflect.ValueOf(builtinLen),
"new": reflect.ValueOf(builtinNew),
"copy": reflect.ValueOf(builtinCopy),
"delete": reflect.ValueOf(builtinDelete),
"panic": reflect.ValueOf(builtinPanic),
}
func builtinComplex(re, im reflect.Value) reflect.Value {
if re.Type() == f64 {
return reflect.ValueOf(complex128(complex(re.Float(), im.Float())))
} else {
return reflect.ValueOf(complex64(complex(re.Float(), im.Float())))
}
}
func builtinReal(cplx reflect.Value) reflect.Value {
if cplx.Type() == c128 {
return reflect.ValueOf(float64(real(cplx.Complex())))
} else {
return reflect.ValueOf(float32(real(cplx.Complex())))
}
}
func builtinImag(cplx reflect.Value) reflect.Value {
if cplx.Type() == c128 {
return reflect.ValueOf(float64(imag(cplx.Complex())))
} else {
return reflect.ValueOf(float32(imag(cplx.Complex())))
}
}
func builtinAppend(s, t reflect.Value) reflect.Value {
if s.Type() == byteSlice && t.Type().Kind() == reflect.String {
t = reflect.ValueOf([]byte(t.String()))
}
return reflect.AppendSlice(s, t)
}
func builtinLen(v reflect.Value) reflect.Value {
return reflect.ValueOf(v.Len())
}
func builtinCap(v reflect.Value) reflect.Value {
return reflect.ValueOf(v.Cap())
}
func builtinNew(t reflect.Type) reflect.Value {
return reflect.New(t)
}
func builtinCopy(s, t reflect.Value) reflect.Value {
n := reflect.Copy(s, t)
return reflect.ValueOf(n)
}
func builtinDelete(m, k reflect.Value) reflect.Value {
m.SetMapIndex(k, reflect.Value{})
return reflect.Value{}
}
func builtinPanic(i reflect.Value) error {
return PanicUser(i)
} | Godeps/_workspace/src/github.com/0xfaded/eval/builtins.go | 0.532182 | 0.629718 | builtins.go | starcoder |
package openflow
import (
"fmt"
"os/exec"
"strings"
"github.com/quilt/quilt/minion/ipdef"
"github.com/quilt/quilt/minion/ovsdb"
)
/* OpenFlow Psuedocode -- Please, for the love of God, keep this updated.
OpenFlow is extremely difficult to reason about -- especially when its buried in Go code.
This comment aims to make it a bit easier to maintain by describing abstractly what the
OpenFlow code does, without the distraction of the go code required to implement it.
Interpreting the Psuedocode
---------------------------
The OpenFlow code is divided into a series of tables. Packets start at Table_0 and only
move to another table if explicitly instructed to by a `goto` statement.
Each table is composed of a series of if statements. Packets match either one or zero of
these statements. If they match zero they're dropped, if they match more than one then
the statement that appears first in the table is chosen.
Each if statement has one or more actions associated with it. Packets matching the
statement execute those actions in order. If one of those actions is a goto statement,
the packet is forwarded to the specified table and the process begins again.
Finally, note that some tables have loops which should be interpreted as duplicating the
inner if statements per loop element.
Registers
---------
The psuedocode currently uses three registers:
Reg0 -- Indicates what type of port the packet came from. 1 for a Veth. 2 for a patch
port. 0 if neither.
Reg1 -- Contains the OpenFlow port number of the veth, or zero if the packet came from
the gateway.
Reg2 -- Contains the OpenFlow port number of the patch port, or zero if the packet came
from the gateway.
Tables
------
// Table_0 initializes the registers and forwards to Table_1.
Table_0 { // Initial Table
for each db.Container {
if in_port=dbc.VethPort && dl_src=dbc.Mac {
reg0 <- 1
reg1 <- dbc.VethPort
reg2 <- dbc.PatchPort
goto Table_1
}
if in_port=dbc.PatchPort {
reg0 <- 2
reg1 <- dbc.VethPort
reg2 <- dbc.PatchPort
goto Table_1
}
}
if in_port=LOCAL {
goto Table_1
}
}
// Table_1 handles special cases for broadcast packets and the default gateway. If no
special cases apply, it outputs the packet.
Table_1 {
// If the veth sends a broadcast, send it to the gateway and the patch port.
if reg0=1 && dl_dst=ff:ff:ff:ff:ff:ff {
output:LOCAL,reg2
}
// If the patch port sends a broadcast, send it to the veth.
if reg0=2 && dl_dst=ff:ff:ff:ff:ff:ff {
output:reg1
}
// If the gateway sends a broadcast, send it to all veths.
if dl_dst=ff:ff:ff:ff:ff:ff {
output:veth{1..n}
}
// If the veth sends a packet to the load balancer router, forward it.
if reg0=1 && dl_dst=loadBalancerRouterMac {
output:reg2
}
// If the veth sends a packet to the gateway, forward it.
if reg0=1 && dl_dst=gwMac {
output:LOCAL
}
// Drop if a port other than a veth attempts to send to the default gateway.
if dl_dst=gwMac {
drop
}
// Packets from the gateway don't have the registers set, so use Table_2 to
// forward based on dl_dst.
if in_port=LOCAL {
goto Table_2
}
// Send packets from the veth to the patch port.
if reg0=1 {
output:reg2
}
// Send packets from the patch port to the veth.
if reg0=2 {
output:reg1
}
}
// Table_2 attempts to forward packets to a veth based on its destination MAC.
Table_2 {
// Packets coming from the
for each db.Container {
if nw_dst=dbc.Mac {
output:veth
}
}
}
*/
// A Container that needs OpenFlow rules installed for it.
type Container struct {
Veth string
Patch string
Mac string
}
type container struct {
veth int
patch int
mac string
}
var staticFlows = []string{
// Table 0
"table=0,priority=1000,in_port=LOCAL,actions=resubmit(,1)",
// Table 1
"table=1,priority=1000,reg0=0x1,dl_dst=ff:ff:ff:ff:ff:ff," +
"actions=output:LOCAL,output:NXM_NX_REG2[]",
"table=1,priority=900,reg0=0x2,dl_dst=ff:ff:ff:ff:ff:ff," +
"actions=output:NXM_NX_REG1[]",
fmt.Sprintf("table=1,priority=850,reg0=1,dl_dst=%s,actions=output:NXM_NX_REG2[]",
ipdef.LoadBalancerMac),
fmt.Sprintf("table=1,priority=800,reg0=1,dl_dst=%s,actions=LOCAL",
ipdef.GatewayMac),
fmt.Sprintf("table=1,priority=700,dl_dst=%s,actions=drop", ipdef.GatewayMac),
"table=1,priority=600,in_port=LOCAL,actions=resubmit(,2)",
"table=1,priority=500,reg0=1,actions=output:NXM_NX_REG2[]",
"table=1,priority=400,reg0=2,actions=output:NXM_NX_REG1[]",
}
// ReplaceFlows adds flows associated with the provided containers, and removes all
// other flows.
func ReplaceFlows(containers []Container) error {
ofports, err := openflowPorts()
if err != nil {
return err
}
flows := allFlows(resolveContainers(ofports, containers))
// XXX: Due to a bug in `ovs-ofctl replace-flows`, certain flows are
// replaced even if they do not differ. `diff-flows` already has a fix to
// this problem, so for now we only run `replace-flows` when `diff-flows`
// reports no changes. The `diff-flows` check should be removed once
// `replace-flows` is fixed upstream.
if ofctl("diff-flows", flows) != nil {
if err := ofctl("replace-flows", flows); err != nil {
return fmt.Errorf("ovs-ofctl: %s", err)
}
}
return nil
}
// AddFlows adds flows associated with the provided containers without touching flows
// that may already be installed.
func AddFlows(containers []Container) error {
ofports, err := openflowPorts()
if err != nil {
return err
}
flows := containerFlows(resolveContainers(ofports, containers))
if err := ofctl("add-flows", flows); err != nil {
return fmt.Errorf("ovs-ofctl: %s", err)
}
return nil
}
func containerFlows(containers []container) []string {
var flows []string
for _, c := range containers {
template := fmt.Sprintf("table=0,priority=1000,in_port=%s%s,"+
"actions=load:0x%s->NXM_NX_REG0[],load:0x%x->NXM_NX_REG1[],"+
"load:0x%x->NXM_NX_REG2[],resubmit(,1)",
"%d", "%s", "%x", c.veth, c.patch)
flows = append(flows,
fmt.Sprintf(template, c.veth, ",dl_src="+c.mac, 1),
fmt.Sprintf(template, c.patch, "", 2),
fmt.Sprintf("table=2,priority=1000,dl_dst=%s,actions=output:%d",
c.mac, c.veth))
}
return flows
}
func allFlows(containers []container) []string {
var gatewayBroadcastActions []string
for _, c := range containers {
gatewayBroadcastActions = append(gatewayBroadcastActions,
fmt.Sprintf("output:%d", c.veth))
}
flows := append(staticFlows, containerFlows(containers)...)
return append(flows, "table=1,priority=850,dl_dst=ff:ff:ff:ff:ff:ff,actions="+
strings.Join(gatewayBroadcastActions, ","))
}
func resolveContainers(portMap map[string]int, containers []Container) []container {
var ofcs []container
for _, c := range containers {
veth, okVeth := portMap[c.Veth]
patch, okPatch := portMap[c.Patch]
if !okVeth || !okPatch {
continue
}
ofcs = append(ofcs, container{patch: patch, veth: veth, mac: c.Mac})
}
return ofcs
}
func openflowPorts() (map[string]int, error) {
odb, err := ovsdb.Open()
if err != nil {
return nil, fmt.Errorf("ovsdb-server connection: %s", err)
}
defer odb.Disconnect()
return odb.OpenFlowPorts()
}
var ofctl = func(action string, flows []string) error {
cmd := exec.Command("ovs-ofctl", "-O", "OpenFlow13", action,
ipdef.QuiltBridge, "/dev/stdin")
stdin, err := cmd.StdinPipe()
if err != nil {
return err
}
if err := cmd.Start(); err != nil {
return err
}
for _, f := range flows {
stdin.Write([]byte(f + "\n"))
}
stdin.Close()
if err := cmd.Wait(); err != nil {
return err
}
return nil
} | minion/network/openflow/openflow.go | 0.520496 | 0.524882 | openflow.go | starcoder |
package canvas
import (
"github.com/alexandreLamarre/Golang-Ray-Tracing-Renderer/pkg/algebra"
"github.com/alexandreLamarre/Golang-Ray-Tracing-Renderer/pkg/noise"
"math"
)
var PATTERNOFFSET float64 = 500
//Pattern represents a pattern of colors
type Pattern struct {
a *Color
b *Color
getPattern func(vector *algebra.Vector, colorA *Color, colorB *Color) *Color
Transform *algebra.Matrix
}
//GetColor returns the color of the pattern at the given point
func (p *Pattern) GetColor(point *algebra.Vector) *Color {
return p.getPattern(point, p.a, p.b)
}
//TestPattern , used for testing certain functions
func TestPattern() *Pattern {
return &Pattern{a: nil, b: nil, getPattern: func(p *algebra.Vector, colorA *Color, colorB *Color) *Color {
return &Color{p.Get()[0], p.Get()[1], p.Get()[2]}
}, Transform: algebra.IdentityMatrix(4)}
}
//StripePattern Creates a new Stripe Pattern in the x coordinate with default identity transformation
func StripePattern(a *Color, b *Color) *Pattern {
return &Pattern{a: a, b: b, getPattern: func(p *algebra.Vector, a *Color, b *Color) *Color {
if int(math.Floor(p.Get()[0]))%2 == 0 {
return a
} else {
return b
}
}, Transform: algebra.IdentityMatrix(4)}
}
//GradientPattern creates a new Gradient Pattern for two colors using linear interpolation in the x coordinate by default
func GradientPattern(a *Color, b *Color) *Pattern {
return &Pattern{a: a, b: b, getPattern: func(p *algebra.Vector, colorA *Color, colorB *Color) *Color {
return colorA.Add(colorB.Subtract(colorA).ScalarMult(p.Get()[0] - math.Floor(p.Get()[0])))
}, Transform: algebra.IdentityMatrix(4)}
}
//RingPattern Creates a new Ring Pattern as rings that expand in the x-z directions by default
func RingPattern(a *Color, b *Color) *Pattern {
return &Pattern{a: a, b: b, getPattern: func(p *algebra.Vector, colorA *Color, colorB *Color) *Color {
if int(math.Floor(math.Sqrt(math.Pow(p.Get()[0], 2)+math.Pow(p.Get()[2], 2))))%2 == 0 {
return colorA
} else {
return colorB
}
}, Transform: algebra.IdentityMatrix(4)}
}
//CheckerPattern Creates a new Checker Pattern as cubes that expand in every direction
func CheckerPattern(a *Color, b *Color) *Pattern {
return &Pattern{a: a, b: b, getPattern: func(p *algebra.Vector, colorA *Color, colorB *Color) *Color {
px := math.Floor(math.Abs(p.Get()[0] + PATTERNOFFSET))
py := math.Floor(math.Abs(p.Get()[1] + PATTERNOFFSET))
pz := math.Floor(math.Abs(p.Get()[2] + PATTERNOFFSET))
if int(px+py+pz)%2 == 0 {
return colorA
} else {
return colorB
}
}, Transform: algebra.IdentityMatrix(4)}
}
//SolidPattern Creates a new Solid Pattern that returns a single constant color
func SolidPattern(a *Color) *Pattern {
return &Pattern{a: a, b: nil, getPattern: func(p *algebra.Vector, a *Color, b *Color) *Color {
return a
}, Transform: algebra.IdentityMatrix(4)}
}
//NestedPattern Creates a new nested Pattern from a new pattern that combines two other patterns
func NestedPattern(pattern *Pattern, patternA *Pattern, patternB *Pattern) *Pattern {
return &Pattern{a: nil, b: nil, getPattern: func(p *algebra.Vector, a *Color, b *Color) *Color {
colorA := patternA.GetColor(p)
colorB := patternB.GetColor(p)
return pattern.getPattern(p, colorA, colorB)
}, Transform: algebra.Multiply(patternB.Transform, patternA.Transform)}
}
//BlendedPattern Creates a new blended Pattern from two patterns and a heuristic function blend
//that should take two colors and return a new color, if it is provided nil as a heuristic it takes the average of two
//colors.
func BlendedPattern(patternA *Pattern, patternB *Pattern, blend func(colorA, colorB *Color) *Color) *Pattern {
if blend == nil {
return &Pattern{a: nil, b: nil, getPattern: func(p *algebra.Vector, colorA *Color, colorB *Color) *Color {
colorFromA := patternA.GetColor(p)
colorFromB := patternB.GetColor(p)
return (colorFromA.Add(colorFromB)).ScalarMult(1 / 2)
}, Transform: algebra.IdentityMatrix(4)}
} else {
return &Pattern{a: nil, b: nil, getPattern: func(p *algebra.Vector, colorA *Color, colorB *Color) *Color {
colorFromA := patternA.GetColor(p)
colorFromB := patternB.GetColor(p)
return blend(colorFromA, colorFromB)
}, Transform: algebra.Multiply(patternA.Transform, patternB.Transform)}
}
}
//Random Noise Patterns
func PerlinNoisePattern(pattern *Pattern) *Pattern {
return &Pattern{a: nil, b: nil, getPattern: func(point *algebra.Vector, colorA *Color, colorB *Color) *Color {
displacement := noise.Perlin(point.Get()[0]+PATTERNOFFSET, point.Get()[1]+PATTERNOFFSET, point.Get()[2]+PATTERNOFFSET)
newPoint, err := point.Add(algebra.NewPoint(
displacement+PATTERNOFFSET,
displacement+PATTERNOFFSET,
displacement+PATTERNOFFSET))
if err != nil {
panic(err)
}
return pattern.getPattern(newPoint, pattern.a, pattern.b)
}, Transform: pattern.Transform}
}
//SetTransform sets the transform of the pattern
func (p *Pattern) SetTransform(m *algebra.Matrix) {
if len(m.Get()) != 4 || len(m.Get()[0]) != 4 {
panic(algebra.ExpectedDimension(4))
} else {
p.Transform = m
}
} | pkg/canvas/pattern.go | 0.820505 | 0.585101 | pattern.go | starcoder |
package draw2dAnimation
import (
"bufio"
"fmt"
imageLibrary "image"
"image/draw"
"image/png"
"image/color"
"os"
)
// The image struct contains a set of all figures and take operations over them like drawing, updating, adding, deleting or filtering figures. Can save the result as a .png file. The default clear color is white.
type image struct {
figures *figuresCollection
canvas draw.Image
ClearColor color.Color
}
// Clears the image using the setted clear color.
func (this *image) Clear() {
width, height := this.canvas.Bounds().Dx(), this.canvas.Bounds().Dy()
this.ClearRectangle(0, 0, width, height)
}
// Clears a rectangle area of the image using the setted clear color.
func (this *image) ClearRectangle(x1, y1, x2, y2 int) {
imageColor := imageLibrary.NewUniform(this.ClearColor)
draw.Draw(this.canvas, imageLibrary.Rect(x1, y1, x2, y2), imageColor, imageLibrary.ZP, draw.Over)
}
// Default constructor for the struct starting with empty collection of figures and setting the canvas reference.
func newImage() *image {
return &image{
newFiguresCollection(),
imageLibrary.NewRGBA(imageLibrary.Rect(0, 0, FrameWidth, FrameHeight)),
color.RGBA{255, 255, 255, 255}}
}
// Adds figure with string key to the contained collection.
func (this *image) AddFigure(name string, figure Figurer) {
this.figures.add(name, figure)
}
// Removes figure by string key from the contained collection.
func (this *image) RemoveFigure(name string) {
this.figures.remove(name)
}
// Removes all figures from the contained collection passing the given filter.
func (this *image) RemoveByFilter(filter func(Figurer) bool) {
this.figures.removeByFilter(filter)
}
// Gets the figure corresponding to the given string key in the contained collection or nil if not found.
func (this *image) GetByName(name string) Figurer {
return this.figures.getByName(name)
}
// Gets the string keys and the figures in the contained collection passing the given filter.
func (this *image) GetByFilter(filter func(Figurer) bool) map[string]Figurer {
return this.figures.getByFilter(filter)
}
// Updates all figures in the contained collection.
func (this *image) Update() {
this.figures.traverse(func(figure Figurer) {
figure.Update()
})
}
// Draws all figures in the contained collection in their order by depth then by order of creation.
func (this *image) Draw() {
this.figures.traverse(func(figure Figurer) {
figure.Draw()
})
}
// Saves the result as a .png file using the DestinationFolder and FramePattern global variables.
func (this *image) SaveFrame() {
nextFrameNumber++
file, err := os.Create(
fmt.Sprintf("%s%s%03d.png", DestinationFolder, FramePattern, nextFrameNumber))
if err != nil {
os.Exit(1)
}
defer file.Close()
buffer := bufio.NewWriter(file)
err = png.Encode(buffer, this.canvas)
if err != nil {
os.Exit(1)
}
err = buffer.Flush()
if err != nil {
os.Exit(1)
}
} | draw2dAnimation/image.go | 0.798462 | 0.534977 | image.go | starcoder |
package main
import (
"bufio"
"fmt"
"github.com/golang-demos/chalk"
"os"
"strings"
"time"
)
// ScoreItem holds data on a scoreboard item's name, current point value, and ID
type ScoreItem struct {
name string
points int
id int
}
// GenerateBoard creates blank (0's) data for each playable option
func GenerateBoard() [13]ScoreItem {
board := [13]ScoreItem{}
board[0] = ScoreItem{"1's", 0, 0}
board[1] = ScoreItem{"2's", 0, 1}
board[2] = ScoreItem{"3's", 0, 2}
board[3] = ScoreItem{"4's", 0, 3}
board[4] = ScoreItem{"5's", 0, 4}
board[5] = ScoreItem{"6's", 0, 5}
board[6] = ScoreItem{"Three-of-a-kind", 0, 6}
board[7] = ScoreItem{"Four-of-a-kind", 0, 7}
board[8] = ScoreItem{"Full house", 0, 8}
board[9] = ScoreItem{"Small straight", 0, 9}
board[10] = ScoreItem{"Large straight", 0, 10}
board[11] = ScoreItem{"Yahtzee", 0, 11}
board[12] = ScoreItem{"Chance", 0, 12}
return board
}
// FindPossibleOptions Finds the playable options from the given dice and scoreboard and prompts the user to select one.
// Returns a new board
func FindPossibleOptions(board [13]ScoreItem,
dice [5]int,
fillInOption string,
crossOut bool,
crossOutOption string) [13]ScoreItem {
points := 0
for i := 0; i < 5; i++ {
points += dice[i]
}
m := DiceMap(dice)
possibilities := make([]ScoreItem, 0)
// Calculate upper hand
// Note: it's m[1] instead of m[0] because 1 references the number, not the position
if m[1] > 0 && board[0].points == 0 {
possibilities = append(possibilities, ScoreItem{"1's", m[1], 0})
}
if m[2] > 0 && board[1].points == 0 {
possibilities = append(possibilities, ScoreItem{"2's", m[2] * 2, 1})
}
if m[3] > 0 && board[2].points == 0 {
possibilities = append(possibilities, ScoreItem{"3's", m[3] * 3, 2})
}
if m[4] > 0 && board[3].points == 0 {
possibilities = append(possibilities, ScoreItem{"4's", m[4] * 4, 3})
}
if m[5] > 0 && board[4].points == 0 {
possibilities = append(possibilities, ScoreItem{"5's", m[5] * 5, 4})
}
if m[6] > 0 && board[5].points == 0 {
possibilities = append(possibilities, ScoreItem{"6's", m[6] * 6, 5})
}
// Calculate lower hand
if CalculateThreeKind(m) == 1 && board[6].points == 0 {
possibilities = append(possibilities, ScoreItem{"Three-of-a-kind", points, 6})
}
if CalculateFourKind(m) == 1 && board[7].points == 0 {
possibilities = append(possibilities, ScoreItem{"Four-of-a-kind", points, 7})
}
if CalculateFullHouse(m) == 1 && board[8].points == 0 {
possibilities = append(possibilities, ScoreItem{"Full house", 25, 8})
}
if CalculateSmallStraight(dice) == 1 && board[9].points == 0 {
possibilities = append(possibilities, ScoreItem{"Small straight", 30, 9})
}
if CalculateLargeStraight(dice) == 1 && board[10].points == 0 {
possibilities = append(possibilities, ScoreItem{"Large straight", 40, 10})
}
if CalculateYahtzee(m) == 1 {
// A Yahtzee is worth +50 points each time.
// The first is worth 50, then 100, then 150, etc.
yahtzeeValue := YahtzeeMultiplied
possibilities = append(possibilities, ScoreItem{"Yahtzee", yahtzeeValue, 11})
}
if board[12].points == 0 { // This is the "Chance" option
possibilities = append(possibilities, ScoreItem{"Chance", points, 12})
}
newBoard := board
fmt.Print("Please type the name to fill in.")
fmt.Println(chalk.YellowLight())
for i := 0; i < len(possibilities); i++ {
// Ex: Three-of-a-kind (30 points)
fmt.Printf("%s (%d", possibilities[i].name, possibilities[i].points)
if possibilities[i].points == 1 {
fmt.Println(" point)")
} else {
fmt.Println(" points)")
}
}
// They have to cross out
if len(possibilities) == 0 {
fmt.Println(chalk.RedLight())
fmt.Printf("You have to cross out")
fmt.Println(chalk.Reset())
newBoard = CrossOut(newBoard, crossOutOption)
return newBoard
}
fmt.Println(chalk.RedLight())
fmt.Printf("Cross out")
fmt.Println(chalk.Reset())
badName := true
scanner := bufio.NewScanner(os.Stdin)
for badName {
fmt.Print("Name > ")
input := ""
if fillInOption == "" && crossOut == false {
if !scanner.Scan() {
continue
}
input = scanner.Text()
} else if crossOut == true {
fmt.Println("cross out")
} else {
input = fillInOption
fmt.Println(fillInOption)
}
if strings.ToLower(input) == "cross out" || crossOut == true {
newBoard = CrossOut(newBoard, crossOutOption)
break
}
for i := 0; i < len(possibilities); i++ {
if strings.ToLower(possibilities[i].name) == strings.ToLower(input) {
for j := 0; j < len(newBoard); j++ {
if newBoard[j].id == possibilities[i].id {
newBoard[j].points = possibilities[i].points
if possibilities[i].id == 11 { // Add to Yahtzee global variable
YahtzeeValue = YahtzeeValue + YahtzeeMultiplied
YahtzeeMultiplied += 50
newBoard[j].points = YahtzeeValue
}
}
}
badName = false
fmt.Println("Filled in", possibilities[i].name)
break
}
}
if badName {
fmt.Println(chalk.RedLight())
fmt.Print("Invalid name!")
fmt.Println(chalk.Reset())
fmt.Println()
time.Sleep(2 * time.Second)
}
}
return newBoard
}
func CrossOut(board [13]ScoreItem, crossOutOption string) [13]ScoreItem {
newBoard := board
fmt.Print("\033[H\033[2J")
fmt.Print("Please type in the name to cross it out.")
fmt.Println(chalk.YellowLight())
// Counter represents the 1., 2., 3., 4., etc. counter used to display
// an orderly list to the user. We can't use board[i].id or i because
// they'll skip over ones we can't cross out
counter := 1
for i := 0; i < len(board); i++ {
if board[i].points == 0 {
// Ex: 4. Three-of-a-kind
fmt.Printf("%s\n", board[i].name)
counter++
}
}
fmt.Println(chalk.Reset())
var name string
badName := true
scanner := bufio.NewScanner(os.Stdin)
for badName {
fmt.Print("Name > ")
input := ""
if crossOutOption == "" {
if !scanner.Scan() {
continue
}
input = scanner.Text()
} else {
input = crossOutOption
fmt.Println(crossOutOption)
}
for i := 0; i < len(newBoard); i++ {
if strings.ToLower(newBoard[i].name) == strings.ToLower(input) {
newBoard[i].points = -1
name = newBoard[i].name
badName = false
break
}
}
if badName {
fmt.Println(chalk.RedLight())
fmt.Print("Invalid name!")
fmt.Println(chalk.Reset())
fmt.Println()
time.Sleep(2 * time.Second)
}
}
fmt.Println("Crossed out", name)
return newBoard
}
func CalculateTargets(board [13]ScoreItem) {
upperPoints := CalculateUpperScore(board)
turnsLeft := ItemsNeededLeft(board)
fmt.Println(chalk.CyanLight())
fmt.Print("Personalized goals:")
fmt.Println(chalk.YellowLight())
goals := ""
upperPossibleGoals := ""
upperBackupGoals := ""
variation := 0
if board[0].points == 0 { // 1's
variation += 3
upperPossibleGoals += "You should use your 1's soon. Leave them as backup.\n"
upperBackupGoals += "You should use your 1's soon. Leave them as backup.\n"
EmergencyAdvice = append(EmergencyAdvice, "Roll for 1's.\n")
}
if board[1].points == 0 { // 2's
variation += 6
upperPossibleGoals += "You should use your 2's soon. Leave them as backup.\n"
upperBackupGoals += "You should use your 2's soon. Leave them as backup.\n"
EmergencyAdvice = append(EmergencyAdvice, "Roll for 2's.\n")
}
if board[2].points == 0 { // 3's
variation += 9
upperPossibleGoals += "You're close to the bonus. Try and get a lot of 3's.\n"
upperBackupGoals += "Try for a bunch of 3's soon.\n"
EmergencyAdvice = append(EmergencyAdvice, "Roll for 3's.\n")
}
if board[3].points == 0 { // 4's
variation += 12
upperPossibleGoals += "You're close to the bonus. Try and get a lot of 4's.\n"
upperBackupGoals += "Try for a bunch of 4's soon.\n"
EmergencyAdvice = append(EmergencyAdvice, "Roll for 4's.\n")
}
if board[4].points == 0 { // 5's
variation += 15
upperPossibleGoals += "You're close to the bonus. Try and get a lot of 5's.\n"
upperBackupGoals += "Try for a bunch of 5's soon.\n"
EmergencyAdvice = append(EmergencyAdvice, "Roll for 5's.\n")
}
if board[5].points == 0 { // 6's
variation += 18
upperPossibleGoals += "You're close to the bonus. Try and get a lot of 6's.\n"
upperBackupGoals += "Try for a bunch of 6's soon.\n"
EmergencyAdvice = append(EmergencyAdvice, "Roll for 6's.\n")
}
if 63-variation <= upperPoints && upperPoints < 63 && turnsLeft < 8 {
goals += upperPossibleGoals
} else if turnsLeft < 6 || (board[6].points != 0 &&
board[9].points != 0 &&
board[7].points != 0 &&
board[8].points != 0 &&
board[10].points != 0) {
goals += upperBackupGoals
}
// Three-of-a-kind
if turnsLeft > 2 && turnsLeft < 5 && board[6].points == 0 {
goals += "You'll need a three-of-a-kind soon.\n"
EmergencyAdvice = append(EmergencyAdvice, "Go for a three-of-a-kind.\n")
} else if (turnsLeft <= 2 ||
(board[9].points != 0 && board[7].points != 0 && board[8].points != 0 && board[10].points != 0)) &&
board[6].points == 0 {
goals += "Target a three-of-a-kind soon.\n"
EmergencyAdvice = append(EmergencyAdvice, "Go for a three-of-a-kind.\n")
}
if board[10].points == 0 { // Large straight
goals += "Target a large straight soon.\n"
EmergencyAdvice = append(EmergencyAdvice, "Go for a large straight.\n")
}
if board[8].points == 0 { // Full house
goals += "Target a full house soon.\n"
EmergencyAdvice = append(EmergencyAdvice, "Go for a full house.\n")
}
if board[7].points == 0 && (turnsLeft < 6 || (board[8].points != 0 && board[10].points != 0)) { // Four-of-a-kind
goals += "Target a four-of-a-kind soon.\n"
EmergencyAdvice = append(EmergencyAdvice, "Go for a four-of-a-kind.\n")
}
// Small straight
if turnsLeft > 3 && turnsLeft < 6 && board[9].points == 0 {
goals += "You'll need a small straight soon.\n"
EmergencyAdvice = append(EmergencyAdvice, "Go for a small straight.\n")
} else if (turnsLeft <= 3 || (board[7].points != 0 && board[8].points != 0 && board[10].points != 0)) &&
board[9].points == 0 {
goals += "You should target a small straight soon.\n"
EmergencyAdvice = append(EmergencyAdvice, "Go for a small straight.\n")
}
if board[11].points > 0 {
goals += "Get a Yahtzee if you can. It'll be worth more.\n"
EmergencyAdvice = append(EmergencyAdvice, "Go for a Yahtzee!\n")
}
if board[12].points == 0 && turnsLeft < 7 {
goals += "You still have your chance left as backup!\n"
EmergencyAdvice = append(EmergencyAdvice, "Re-roll low numbers to get a good chance.\n")
}
if goals == "" {
goals += "Try for a Yahtzee!\n"
EmergencyAdvice = append(EmergencyAdvice, "Go for a Yahtzee!\n")
}
fmt.Print(goals)
fmt.Println(chalk.Reset())
}
// CalculateTotalScore takes the board and returns the point values. Ignores -1 (crossed out) values
func CalculateTotalScore(board [13]ScoreItem) (score int) {
for i := 0; i < len(board); i++ {
if board[i].points != -1 {
score += board[i].points
}
}
// If they get a score of 63 or higher on their upper hand it's a +35 bonus
if CalculateUpperScore(board) >= 63 {
score += 35
}
return
}
func CalculateUpperScore(board [13]ScoreItem) int {
return board[0].points + board[1].points + board[2].points + board[3].points + board[4].points + board[5].points
}
func DisplayScoreBoard(board [13]ScoreItem) {
fmt.Println("==== SCORECARD ====")
fmt.Println("-------------------")
fmt.Println("| Upper Section |")
fmt.Print("-------------------")
for i := 0; i < 6; i++ {
if board[i].points == -1 {
fmt.Println(chalk.Strikethrough())
fmt.Printf("| %s : 0", board[i].name)
} else {
fmt.Println(chalk.Reset())
fmt.Printf("| %s : %d", board[i].name, board[i].points)
}
}
fmt.Println(chalk.Reset())
fmt.Println("-------------------")
upperPoints := CalculateUpperScore(board)
if upperPoints >= 63 {
upperPoints += 35
}
if upperPoints < 10 {
fmt.Printf("| Points: %d |", upperPoints)
} else if upperPoints < 100 {
fmt.Printf("| Total: %d |", upperPoints)
} else {
fmt.Printf("| Points: %d |", upperPoints)
}
fmt.Println()
fmt.Println("-------------------")
fmt.Println()
fmt.Println("-------------------")
fmt.Println("| Lower Section |")
fmt.Print("-------------------")
if board[6].points == -1 {
fmt.Println(chalk.Strikethrough())
fmt.Printf("| Three-of-a-kind : 0")
} else {
fmt.Println(chalk.Reset())
fmt.Printf("| Three-of-a-kind : %d", board[6].points)
}
if board[7].points == -1 {
fmt.Println(chalk.Strikethrough())
fmt.Printf("| Four-of-a-kind : 0")
} else {
fmt.Println(chalk.Reset())
fmt.Printf("| Four-of-a-kind : %d", board[7].points)
}
if board[8].points == -1 {
fmt.Println(chalk.Strikethrough())
fmt.Printf("| Full house : 0")
} else {
fmt.Println(chalk.Reset())
fmt.Printf("| Full house : %d", board[8].points)
}
if board[9].points == -1 {
fmt.Println(chalk.Strikethrough())
fmt.Printf("| Small straight : 0")
} else {
fmt.Println(chalk.Reset())
fmt.Printf("| Small straight : %d", board[9].points)
}
if board[10].points == -1 {
fmt.Println(chalk.Strikethrough())
fmt.Printf("| Large straight : 0")
} else {
fmt.Println(chalk.Reset())
fmt.Printf("| Large straight : %d", board[10].points)
}
if board[11].points == -1 {
fmt.Println(chalk.Strikethrough())
fmt.Printf("| Yahtzee : 0")
} else {
fmt.Println(chalk.Reset())
fmt.Printf("| Yahtzee : %d", board[11].points)
}
if board[12].points == -1 {
fmt.Println(chalk.Strikethrough())
fmt.Printf("| Chance : 0")
} else {
fmt.Println(chalk.Reset())
fmt.Printf("| Chance : %d", board[12].points)
}
fmt.Println(chalk.Reset())
fmt.Println("-------------------")
points := CalculateTotalScore(board)
fmt.Println(chalk.GreenLight())
if points < 10 {
fmt.Printf("==== POINTS: %d ====", points)
} else if points < 100 {
fmt.Printf("==== TOTAL: %d ====", points)
} else if points < 1000 {
fmt.Printf("=== POINTS: %d ===", points)
} else if points < 10000 {
fmt.Printf("=== TOTAL: %d ===", points)
}
fmt.Println(chalk.Reset())
} | scoreboard.go | 0.543348 | 0.405066 | scoreboard.go | starcoder |
package gglm
//Note: We don't use the Swizzle interface for add/sub because the interface doesn't allow inling :(
import (
"fmt"
"math"
)
var _ Swizzle2 = &Vec2{}
var _ fmt.Stringer = &Vec2{}
type Vec2 struct {
Data [2]float32
}
func (v *Vec2) X() float32 {
return v.Data[0]
}
func (v *Vec2) Y() float32 {
return v.Data[1]
}
func (v *Vec2) R() float32 {
return v.Data[0]
}
func (v *Vec2) G() float32 {
return v.Data[1]
}
func (v *Vec2) SetX(x float32) {
v.Data[0] = x
}
func (v *Vec2) SetR(r float32) {
v.Data[0] = r
}
func (v *Vec2) SetY(y float32) {
v.Data[1] = y
}
func (v *Vec2) SetG(g float32) {
v.Data[1] = g
}
func (v *Vec2) AddX(x float32) {
v.Data[0] += x
}
func (v *Vec2) AddY(y float32) {
v.Data[1] += y
}
func (v *Vec2) AddR(r float32) {
v.Data[0] += r
}
func (v *Vec2) AddG(g float32) {
v.Data[1] += g
}
func (v *Vec2) SetXY(x, y float32) {
v.Data[0] = x
v.Data[1] = y
}
func (v *Vec2) AddXY(x, y float32) {
v.Data[0] += x
v.Data[1] += y
}
func (v *Vec2) SetRG(r, g float32) {
v.Data[0] = r
v.Data[1] = g
}
func (v *Vec2) AddRG(r, g float32) {
v.Data[0] += r
v.Data[1] += g
}
func (v *Vec2) String() string {
return fmt.Sprintf("(%f, %f)", v.X(), v.Y())
}
//Scale v *= x (element wise multiplication)
func (v *Vec2) Scale(x float32) *Vec2 {
v.Data[0] *= x
v.Data[1] *= x
return v
}
//Add v += v2
func (v *Vec2) Add(v2 *Vec2) *Vec2 {
v.Data[0] += v2.X()
v.Data[1] += v2.Y()
return v
}
//SubVec2 v -= v2
func (v *Vec2) Sub(v2 *Vec2) *Vec2 {
v.Data[0] -= v2.X()
v.Data[1] -= v2.Y()
return v
}
//Mag returns the magnitude of the vector
func (v *Vec2) Mag() float32 {
return float32(math.Sqrt(float64(v.X()*v.X() + v.Y()*v.Y())))
}
//Mag returns the squared magnitude of the vector
func (v *Vec2) SqrMag() float32 {
return v.X()*v.X() + v.Y()*v.Y()
}
func (v *Vec2) Eq(v2 *Vec2) bool {
return v.Data == v2.Data
}
func (v *Vec2) Set(x, y float32) {
v.Data[0] = x
v.Data[1] = y
}
func (v *Vec2) Normalize() {
mag := v.Mag()
v.Data[0] /= mag
v.Data[1] /= mag
}
func (v *Vec2) Clone() *Vec2 {
return &Vec2{Data: v.Data}
}
//AddVec2 v3 = v1 + v2
func AddVec2(v1, v2 *Vec2) *Vec2 {
return &Vec2{
Data: [2]float32{
v1.X() + v2.X(),
v1.Y() + v2.Y(),
},
}
}
//SubVec2 v3 = v1 - v2
func SubVec2(v1, v2 *Vec2) *Vec2 {
return &Vec2{
Data: [2]float32{
v1.X() - v2.X(),
v1.Y() - v2.Y(),
},
}
}
func NewVec2(x, y float32) *Vec2 {
return &Vec2{
[2]float32{
x,
y,
},
}
} | gglm/vec2.go | 0.815894 | 0.439026 | vec2.go | starcoder |
package transformations
import (
"fmt"
"math"
"sort"
"github.com/apache/arrow/go/arrow/array"
"github.com/influxdata/flux"
"github.com/influxdata/flux/execute"
"github.com/influxdata/flux/memory"
"github.com/influxdata/flux/plan"
"github.com/influxdata/flux/semantic"
"github.com/influxdata/tdigest"
"github.com/pkg/errors"
)
const PercentileKind = "percentile"
const ExactPercentileAggKind = "exact-percentile-aggregate"
const ExactPercentileSelectKind = "exact-percentile-selector"
const (
methodEstimateTdigest = "estimate_tdigest"
methodExactMean = "exact_mean"
methodExactSelector = "exact_selector"
)
type PercentileOpSpec struct {
Percentile float64 `json:"percentile"`
Compression float64 `json:"compression"`
Method string `json:"method"`
// percentile is either an aggregate, or a selector based on the options
execute.AggregateConfig
execute.SelectorConfig
}
func init() {
percentileSignature := flux.FunctionSignature(
map[string]semantic.PolyType{
"column": semantic.String,
"percentile": semantic.Float,
"compression": semantic.Float,
"method": semantic.String,
},
[]string{"percentile"},
)
flux.RegisterFunction(PercentileKind, createPercentileOpSpec, percentileSignature)
flux.RegisterBuiltIn("median", medianBuiltin)
flux.RegisterOpSpec(PercentileKind, newPercentileOp)
plan.RegisterProcedureSpec(PercentileKind, newPercentileProcedure, PercentileKind)
execute.RegisterTransformation(PercentileKind, createPercentileTransformation)
execute.RegisterTransformation(ExactPercentileAggKind, createExactPercentileAggTransformation)
execute.RegisterTransformation(ExactPercentileSelectKind, createExactPercentileSelectTransformation)
}
var medianBuiltin = `
// median returns the 50th percentile.
// By default an approximate percentile is computed, this can be disabled by passing exact:true.
// Using the exact method requires that the entire data set can fit in memory.
median = (method="estimate_tdigest", compression=0.0, tables=<-) =>
tables
|> percentile(percentile:0.5, method:method, compression:compression)
`
func createPercentileOpSpec(args flux.Arguments, a *flux.Administration) (flux.OperationSpec, error) {
if err := a.AddParentFromArgs(args); err != nil {
return nil, err
}
spec := new(PercentileOpSpec)
p, err := args.GetRequiredFloat("percentile")
if err != nil {
return nil, err
}
spec.Percentile = p
if spec.Percentile < 0 || spec.Percentile > 1 {
return nil, errors.New("percentile must be between 0 and 1.")
}
if m, ok, err := args.GetString("method"); err != nil {
return nil, err
} else if ok {
spec.Method = m
}
if c, ok, err := args.GetFloat("compression"); err != nil {
return nil, err
} else if ok {
spec.Compression = c
}
if spec.Compression > 0 && spec.Method != methodEstimateTdigest {
return nil, errors.New("compression parameter is only valid for method estimate_tdigest.")
}
// Set default Compression if not exact
if spec.Method == methodEstimateTdigest && spec.Compression == 0 {
spec.Compression = 1000
}
if err := spec.AggregateConfig.ReadArgs(args); err != nil {
return nil, err
}
if err := spec.SelectorConfig.ReadArgs(args); err != nil {
return nil, err
}
return spec, nil
}
func newPercentileOp() flux.OperationSpec {
return new(PercentileOpSpec)
}
func (s *PercentileOpSpec) Kind() flux.OperationKind {
return PercentileKind
}
type TDigestPercentileProcedureSpec struct {
Percentile float64 `json:"percentile"`
Compression float64 `json:"compression"`
execute.AggregateConfig
}
func (s *TDigestPercentileProcedureSpec) Kind() plan.ProcedureKind {
return PercentileKind
}
func (s *TDigestPercentileProcedureSpec) Copy() plan.ProcedureSpec {
return &TDigestPercentileProcedureSpec{
Percentile: s.Percentile,
Compression: s.Compression,
AggregateConfig: s.AggregateConfig,
}
}
type ExactPercentileAggProcedureSpec struct {
Percentile float64 `json:"percentile"`
execute.AggregateConfig
}
func (s *ExactPercentileAggProcedureSpec) Kind() plan.ProcedureKind {
return ExactPercentileAggKind
}
func (s *ExactPercentileAggProcedureSpec) Copy() plan.ProcedureSpec {
return &ExactPercentileAggProcedureSpec{Percentile: s.Percentile, AggregateConfig: s.AggregateConfig}
}
type ExactPercentileSelectProcedureSpec struct {
Percentile float64 `json:"percentile"`
execute.SelectorConfig
}
func (s *ExactPercentileSelectProcedureSpec) Kind() plan.ProcedureKind {
return ExactPercentileSelectKind
}
func (s *ExactPercentileSelectProcedureSpec) Copy() plan.ProcedureSpec {
return &ExactPercentileSelectProcedureSpec{Percentile: s.Percentile}
}
func newPercentileProcedure(qs flux.OperationSpec, a plan.Administration) (plan.ProcedureSpec, error) {
spec, ok := qs.(*PercentileOpSpec)
if !ok {
return nil, fmt.Errorf("invalid spec type %T", qs)
}
switch spec.Method {
case methodExactMean:
return &ExactPercentileAggProcedureSpec{
Percentile: spec.Percentile,
AggregateConfig: spec.AggregateConfig,
}, nil
case methodExactSelector:
return &ExactPercentileSelectProcedureSpec{
Percentile: spec.Percentile,
}, nil
case methodEstimateTdigest:
fallthrough
default:
// default to estimated percentile
return &TDigestPercentileProcedureSpec{
Percentile: spec.Percentile,
Compression: spec.Compression,
AggregateConfig: spec.AggregateConfig,
}, nil
}
}
type PercentileAgg struct {
Quantile,
Compression float64
digest *tdigest.TDigest
}
func createPercentileTransformation(id execute.DatasetID, mode execute.AccumulationMode, spec plan.ProcedureSpec, a execute.Administration) (execute.Transformation, execute.Dataset, error) {
ps, ok := spec.(*TDigestPercentileProcedureSpec)
if !ok {
return nil, nil, fmt.Errorf("invalid spec type %T", ps)
}
agg := &PercentileAgg{
Quantile: ps.Percentile,
Compression: ps.Compression,
}
t, d := execute.NewAggregateTransformationAndDataset(id, mode, agg, ps.AggregateConfig, a.Allocator())
return t, d, nil
}
func (a *PercentileAgg) Copy() *PercentileAgg {
na := new(PercentileAgg)
*na = *a
na.digest = tdigest.NewWithCompression(na.Compression)
return na
}
func (a *PercentileAgg) NewBoolAgg() execute.DoBoolAgg {
return nil
}
func (a *PercentileAgg) NewIntAgg() execute.DoIntAgg {
return nil
}
func (a *PercentileAgg) NewUIntAgg() execute.DoUIntAgg {
return nil
}
func (a *PercentileAgg) NewFloatAgg() execute.DoFloatAgg {
return a.Copy()
}
func (a *PercentileAgg) NewStringAgg() execute.DoStringAgg {
return nil
}
func (a *PercentileAgg) DoFloat(vs *array.Float64) {
for _, v := range vs.Float64Values() {
a.digest.Add(v, 1)
}
}
func (a *PercentileAgg) Type() flux.ColType {
return flux.TFloat
}
func (a *PercentileAgg) ValueFloat() float64 {
return a.digest.Quantile(a.Quantile)
}
type ExactPercentileAgg struct {
Quantile float64
data []float64
}
func createExactPercentileAggTransformation(id execute.DatasetID, mode execute.AccumulationMode, spec plan.ProcedureSpec, a execute.Administration) (execute.Transformation, execute.Dataset, error) {
ps, ok := spec.(*ExactPercentileAggProcedureSpec)
if !ok {
return nil, nil, fmt.Errorf("invalid spec type %T", ps)
}
agg := &ExactPercentileAgg{
Quantile: ps.Percentile,
}
t, d := execute.NewAggregateTransformationAndDataset(id, mode, agg, ps.AggregateConfig, a.Allocator())
return t, d, nil
}
func (a *ExactPercentileAgg) Copy() *ExactPercentileAgg {
na := new(ExactPercentileAgg)
*na = *a
na.data = nil
return na
}
func (a *ExactPercentileAgg) NewBoolAgg() execute.DoBoolAgg {
return nil
}
func (a *ExactPercentileAgg) NewIntAgg() execute.DoIntAgg {
return nil
}
func (a *ExactPercentileAgg) NewUIntAgg() execute.DoUIntAgg {
return nil
}
func (a *ExactPercentileAgg) NewFloatAgg() execute.DoFloatAgg {
return a.Copy()
}
func (a *ExactPercentileAgg) NewStringAgg() execute.DoStringAgg {
return nil
}
func (a *ExactPercentileAgg) DoFloat(vs *array.Float64) {
a.data = append(a.data, vs.Float64Values()...)
}
func (a *ExactPercentileAgg) Type() flux.ColType {
return flux.TFloat
}
func (a *ExactPercentileAgg) ValueFloat() float64 {
sort.Float64s(a.data)
x := a.Quantile * float64(len(a.data)-1)
x0 := math.Floor(x)
x1 := math.Ceil(x)
if x0 == x1 {
return a.data[int(x0)]
}
// Linear interpolate
y0 := a.data[int(x0)]
y1 := a.data[int(x1)]
y := y0*(x1-x) + y1*(x-x0)
return y
}
func createExactPercentileSelectTransformation(id execute.DatasetID, mode execute.AccumulationMode, spec plan.ProcedureSpec, a execute.Administration) (execute.Transformation, execute.Dataset, error) {
ps, ok := spec.(*ExactPercentileSelectProcedureSpec)
if !ok {
return nil, nil, fmt.Errorf("invalid spec type %T", ps)
}
cache := execute.NewTableBuilderCache(a.Allocator())
d := execute.NewDataset(id, mode, cache)
t := NewExactPercentileSelectorTransformation(d, cache, ps, a.Allocator())
return t, d, nil
}
type ExactPercentileSelectorTransformation struct {
d execute.Dataset
cache execute.TableBuilderCache
spec ExactPercentileSelectProcedureSpec
a *memory.Allocator
}
func NewExactPercentileSelectorTransformation(d execute.Dataset, cache execute.TableBuilderCache, spec *ExactPercentileSelectProcedureSpec, a *memory.Allocator) *ExactPercentileSelectorTransformation {
if spec.SelectorConfig.Column == "" {
spec.SelectorConfig.Column = execute.DefaultValueColLabel
}
sel := &ExactPercentileSelectorTransformation{
d: d,
cache: cache,
spec: *spec,
a: a,
}
return sel
}
func (t *ExactPercentileSelectorTransformation) Process(id execute.DatasetID, tbl flux.Table) error {
valueIdx := execute.ColIdx(t.spec.Column, tbl.Cols())
if valueIdx < 0 {
return fmt.Errorf("no column %q exists", t.spec.Column)
}
copyTable := execute.NewColListTableBuilder(tbl.Key(), t.a)
if err := execute.AddTableCols(tbl, copyTable); err != nil {
return err
}
if err := execute.AppendTable(tbl, copyTable); err != nil {
return err
}
copyTable.Sort([]string{t.spec.Column}, false)
n := copyTable.NRows()
index := getQuantileIndex(t.spec.Percentile, n)
row := copyTable.GetRow(index)
builder, created := t.cache.TableBuilder(tbl.Key())
if !created {
return fmt.Errorf("found duplicate table with key: %v", tbl.Key())
}
if err := execute.AddTableCols(tbl, builder); err != nil {
return err
}
for j, col := range builder.Cols() {
v, ok := row.Get(col.Label)
if !ok {
return fmt.Errorf("unexpected column in percentile select")
}
if err := builder.AppendValue(j, v); err != nil {
return err
}
}
return nil
}
func getQuantileIndex(quantile float64, len int) int {
x := quantile * float64(len)
index := int(math.Ceil(x))
if index > 0 {
index--
}
return index
}
func (t *ExactPercentileSelectorTransformation) RetractTable(id execute.DatasetID, key flux.GroupKey) error {
return t.d.RetractTable(key)
}
func (t *ExactPercentileSelectorTransformation) UpdateWatermark(id execute.DatasetID, mark execute.Time) error {
return t.d.UpdateWatermark(mark)
}
func (t *ExactPercentileSelectorTransformation) UpdateProcessingTime(id execute.DatasetID, pt execute.Time) error {
return t.d.UpdateProcessingTime(pt)
}
func (t *ExactPercentileSelectorTransformation) Finish(id execute.DatasetID, err error) {
t.d.Finish(err)
} | vendor/github.com/influxdata/flux/functions/transformations/percentile.go | 0.821188 | 0.42674 | percentile.go | starcoder |
package govalidator
import (
"reflect"
"regexp"
"sync"
)
// Validator is a wrapper for a validator function that returns bool and accepts string.
type Validator func(str string) bool
// CustomTypeValidator is a wrapper for validator functions that returns bool and accepts any type.
// The second parameter should be the context (in the case of validating a struct: the whole object being validated).
type CustomTypeValidator func(i interface{}, o interface{}) bool
// ParamValidator is a wrapper for validator functions that accepts additional parameters.
type ParamValidator func(str string, params ...string) bool
type tagOptionsMap map[string]string
// UnsupportedTypeError is a wrapper for reflect.Type
type UnsupportedTypeError struct {
Type reflect.Type
}
// stringValues is a slice of reflect.Value holding *reflect.StringValue.
// It implements the methods to sort by string.
type stringValues []reflect.Value
// ParamTagMap is a map of functions accept variants parameters
var ParamTagMap = map[string]ParamValidator{
"length": ByteLength,
"stringlength": StringLength,
"matches": StringMatches,
}
// ParamTagRegexMap maps param tags to their respective regexes.
var ParamTagRegexMap = map[string]*regexp.Regexp{
"length": regexp.MustCompile("^length\\((\\d+)\\|(\\d+)\\)$"),
"stringlength": regexp.MustCompile("^stringlength\\((\\d+)\\|(\\d+)\\)$"),
"matches": regexp.MustCompile(`^matches\((.+)\)$`),
}
type customTypeTagMap struct {
validators map[string]CustomTypeValidator
sync.RWMutex
}
func (tm *customTypeTagMap) Get(name string) (CustomTypeValidator, bool) {
tm.RLock()
defer tm.RUnlock()
v, ok := tm.validators[name]
return v, ok
}
func (tm *customTypeTagMap) Set(name string, ctv CustomTypeValidator) {
tm.Lock()
defer tm.Unlock()
tm.validators[name] = ctv
}
// CustomTypeTagMap is a map of functions that can be used as tags for ValidateStruct function.
// Use this to validate compound or custom types that need to be handled as a whole, e.g.
// `type UUID [16]byte` (this would be handled as an array of bytes).
var CustomTypeTagMap = &customTypeTagMap{validators: make(map[string]CustomTypeValidator)}
// TagMap is a map of functions, that can be used as tags for ValidateStruct function.
var TagMap = map[string]Validator{
"email": IsEmail,
"url": IsURL,
"dialstring": IsDialString,
"requrl": IsRequestURL,
"requri": IsRequestURI,
"alpha": IsAlpha,
"utfletter": IsUTFLetter,
"alphanum": IsAlphanumeric,
"utfletternum": IsUTFLetterNumeric,
"numeric": IsNumeric,
"utfnumeric": IsUTFNumeric,
"utfdigit": IsUTFDigit,
"hexadecimal": IsHexadecimal,
"hexcolor": IsHexcolor,
"rgbcolor": IsRGBcolor,
"lowercase": IsLowerCase,
"uppercase": IsUpperCase,
"int": IsInt,
"float": IsFloat,
"null": IsNull,
"uuid": IsUUID,
"uuidv3": IsUUIDv3,
"uuidv4": IsUUIDv4,
"uuidv5": IsUUIDv5,
"creditcard": IsCreditCard,
"isbn10": IsISBN10,
"isbn13": IsISBN13,
"json": IsJSON,
"multibyte": IsMultibyte,
"ascii": IsASCII,
"printableascii": IsPrintableASCII,
"fullwidth": IsFullWidth,
"halfwidth": IsHalfWidth,
"variablewidth": IsVariableWidth,
"base64": IsBase64,
"datauri": IsDataURI,
"ip": IsIP,
"port": IsPort,
"ipv4": IsIPv4,
"ipv6": IsIPv6,
"dns": IsDNSName,
"host": IsHost,
"mac": IsMAC,
"latitude": IsLatitude,
"longitude": IsLongitude,
"ssn": IsSSN,
"semver": IsSemver,
}
// ISO3166Entry stores country codes
type ISO3166Entry struct {
EnglishShortName string
FrenchShortName string
Alpha2Code string
Alpha3Code string
Numeric string
}
//ISO3166List based on https://www.iso.org/obp/ui/#search/code/ Code Type "Officially Assigned Codes"
var ISO3166List = []ISO3166Entry{
{"Afghanistan", "Afghanistan (l')", "AF", "AFG", "004"},
{"Albania", "Albanie (l')", "AL", "ALB", "008"},
{"Antarctica", "Antarctique (l')", "AQ", "ATA", "010"},
{"Algeria", "Algérie (l')", "DZ", "DZA", "012"},
{"American Samoa", "Samoa américaines (les)", "AS", "ASM", "016"},
{"Andorra", "Andorre (l')", "AD", "AND", "020"},
{"Angola", "Angola (l')", "AO", "AGO", "024"},
{"Antigua and Barbuda", "Antigua-et-Barbuda", "AG", "ATG", "028"},
{"Azerbaijan", "Azerbaïdjan (l')", "AZ", "AZE", "031"},
{"Argentina", "Argentine (l')", "AR", "ARG", "032"},
{"Australia", "Australie (l')", "AU", "AUS", "036"},
{"Austria", "Autriche (l')", "AT", "AUT", "040"},
{"Bahamas (the)", "Bahamas (les)", "BS", "BHS", "044"},
{"Bahrain", "Bahreïn", "BH", "BHR", "048"},
{"Bangladesh", "Bangladesh (le)", "BD", "BGD", "050"},
{"Armenia", "Arménie (l')", "AM", "ARM", "051"},
{"Barbados", "Barbade (la)", "BB", "BRB", "052"},
{"Belgium", "Belgique (la)", "BE", "BEL", "056"},
{"Bermuda", "Bermudes (les)", "BM", "BMU", "060"},
{"Bhutan", "Bhoutan (le)", "BT", "BTN", "064"},
{"Bolivia (Plurinational State of)", "Bolivie (État plurinational de)", "BO", "BOL", "068"},
{"Bosnia and Herzegovina", "Bosnie-Herzégovine (la)", "BA", "BIH", "070"},
{"Botswana", "Botswana (le)", "BW", "BWA", "072"},
{"Bouvet Island", "Bouvet (l'Île)", "BV", "BVT", "074"},
{"Brazil", "Brésil (le)", "BR", "BRA", "076"},
{"Belize", "Belize (le)", "BZ", "BLZ", "084"},
{"British Indian Ocean Territory (the)", "Indien (le Territoire britannique de l'océan)", "IO", "IOT", "086"},
{"Solomon Islands", "Salomon (Îles)", "SB", "SLB", "090"},
{"Virgin Islands (British)", "Vierges britanniques (les Îles)", "VG", "VGB", "092"},
{"Brunei Darussalam", "Brunéi Darussalam (le)", "BN", "BRN", "096"},
{"Bulgaria", "Bulgarie (la)", "BG", "BGR", "100"},
{"Myanmar", "Myanmar (le)", "MM", "MMR", "104"},
{"Burundi", "Burundi (le)", "BI", "BDI", "108"},
{"Belarus", "Bélarus (le)", "BY", "BLR", "112"},
{"Cambodia", "Cambodge (le)", "KH", "KHM", "116"},
{"Cameroon", "Cameroun (le)", "CM", "CMR", "120"},
{"Canada", "Canada (le)", "CA", "CAN", "124"},
{"Cabo Verde", "Cabo Verde", "CV", "CPV", "132"},
{"Cayman Islands (the)", "Caïmans (les Îles)", "KY", "CYM", "136"},
{"Central African Republic (the)", "République centrafricaine (la)", "CF", "CAF", "140"},
{"Sri Lanka", "Sri Lanka", "LK", "LKA", "144"},
{"Chad", "Tchad (le)", "TD", "TCD", "148"},
{"Chile", "Chili (le)", "CL", "CHL", "152"},
{"China", "Chine (la)", "CN", "CHN", "156"},
{"Taiwan (Province of China)", "Taïwan (Province de Chine)", "TW", "TWN", "158"},
{"Christmas Island", "Christmas (l'Île)", "CX", "CXR", "162"},
{"Cocos (Keeling) Islands (the)", "Cocos (les Îles)/ Keeling (les Îles)", "CC", "CCK", "166"},
{"Colombia", "Colombie (la)", "CO", "COL", "170"},
{"Comoros (the)", "Comores (les)", "KM", "COM", "174"},
{"Mayotte", "Mayotte", "YT", "MYT", "175"},
{"Congo (the)", "Congo (le)", "CG", "COG", "178"},
{"Congo (the Democratic Republic of the)", "Congo (la République démocratique du)", "CD", "COD", "180"},
{"Cook Islands (the)", "Cook (les Îles)", "CK", "COK", "184"},
{"<NAME>", "<NAME> (le)", "CR", "CRI", "188"},
{"Croatia", "Croatie (la)", "HR", "HRV", "191"},
{"Cuba", "Cuba", "CU", "CUB", "192"},
{"Cyprus", "Chypre", "CY", "CYP", "196"},
{"Czech Republic (the)", "tchèque (la République)", "CZ", "CZE", "203"},
{"Benin", "Bénin (le)", "BJ", "BEN", "204"},
{"Denmark", "Danemark (le)", "DK", "DNK", "208"},
{"Dominica", "Dominique (la)", "DM", "DMA", "212"},
{"Dominican Republic (the)", "dominicaine (la République)", "DO", "DOM", "214"},
{"Ecuador", "Équateur (l')", "EC", "ECU", "218"},
{"El Salvador", "El Salvador", "SV", "SLV", "222"},
{"Equatorial Guinea", "Guinée équatoriale (la)", "GQ", "GNQ", "226"},
{"Ethiopia", "Éthiopie (l')", "ET", "ETH", "231"},
{"Eritrea", "Érythrée (l')", "ER", "ERI", "232"},
{"Estonia", "Estonie (l')", "EE", "EST", "233"},
{"Faroe Islands (the)", "Féroé (les Îles)", "FO", "FRO", "234"},
{"Falkland Islands (the) [Malvinas]", "Falkland (les Îles)/Malouines (les Îles)", "FK", "FLK", "238"},
{"South Georgia and the South Sandwich Islands", "Géorgie du Sud-et-les Îles Sandwich du Sud (la)", "GS", "SGS", "239"},
{"Fiji", "Fidji (les)", "FJ", "FJI", "242"},
{"Finland", "Finlande (la)", "FI", "FIN", "246"},
{"Åland Islands", "Åland(les Îles)", "AX", "ALA", "248"},
{"France", "France (la)", "FR", "FRA", "250"},
{"French Guiana", "Guyane française (la )", "GF", "GUF", "254"},
{"French Polynesia", "Polynésie française (la)", "PF", "PYF", "258"},
{"French Southern Territories (the)", "Terres australes françaises (les)", "TF", "ATF", "260"},
{"Djibouti", "Djibouti", "DJ", "DJI", "262"},
{"Gabon", "Gabon (le)", "GA", "GAB", "266"},
{"Georgia", "Géorgie (la)", "GE", "GEO", "268"},
{"Gambia (the)", "Gambie (la)", "GM", "GMB", "270"},
{"Palestine, State of", "Palestine, État de", "PS", "PSE", "275"},
{"Germany", "Allemagne (l')", "DE", "DEU", "276"},
{"Ghana", "Ghana (le)", "GH", "GHA", "288"},
{"Gibraltar", "Gibraltar", "GI", "GIB", "292"},
{"Kiribati", "Kiribati", "KI", "KIR", "296"},
{"Greece", "Grèce (la)", "GR", "GRC", "300"},
{"Greenland", "Groenland (le)", "GL", "GRL", "304"},
{"Grenada", "Grenade (la)", "GD", "GRD", "308"},
{"Guadeloupe", "Guadeloupe (la)", "GP", "GLP", "312"},
{"Guam", "Guam", "GU", "GUM", "316"},
{"Guatemala", "Guatemala (le)", "GT", "GTM", "320"},
{"Guinea", "Guinée (la)", "GN", "GIN", "324"},
{"Guyana", "Guyana (le)", "GY", "GUY", "328"},
{"Haiti", "Haïti", "HT", "HTI", "332"},
{"Heard Island and McDonald Islands", "Heard-et-Îles MacDonald (l'Île)", "HM", "HMD", "334"},
{"Holy See (the)", "Saint-Siège (le)", "VA", "VAT", "336"},
{"Honduras", "Honduras (le)", "HN", "HND", "340"},
{"Hong Kong", "Hong Kong", "HK", "HKG", "344"},
{"Hungary", "Hongrie (la)", "HU", "HUN", "348"},
{"Iceland", "Islande (l')", "IS", "ISL", "352"},
{"India", "Inde (l')", "IN", "IND", "356"},
{"Indonesia", "Indonésie (l')", "ID", "IDN", "360"},
{"Iran (Islamic Republic of)", "Iran (République Islamique d')", "IR", "IRN", "364"},
{"Iraq", "Iraq (l')", "IQ", "IRQ", "368"},
{"Ireland", "Irlande (l')", "IE", "IRL", "372"},
{"Israel", "Israël", "IL", "ISR", "376"},
{"Italy", "Italie (l')", "IT", "ITA", "380"},
{"Côte d'Ivoire", "Côte d'Ivoire (la)", "CI", "CIV", "384"},
{"Jamaica", "Jamaïque (la)", "JM", "JAM", "388"},
{"Japan", "Japon (le)", "JP", "JPN", "392"},
{"Kazakhstan", "Kazakhstan (le)", "KZ", "KAZ", "398"},
{"Jordan", "Jordanie (la)", "JO", "JOR", "400"},
{"Kenya", "Kenya (le)", "KE", "KEN", "404"},
{"Korea (the Democratic People's Republic of)", "Corée (la République populaire démocratique de)", "KP", "PRK", "408"},
{"Korea (the Republic of)", "Corée (la République de)", "KR", "KOR", "410"},
{"Kuwait", "Koweït (le)", "KW", "KWT", "414"},
{"Kyrgyzstan", "Kirghizistan (le)", "KG", "KGZ", "417"},
{"Lao People's Democratic Republic (the)", "Lao, République démocratique populaire", "LA", "LAO", "418"},
{"Lebanon", "Liban (le)", "LB", "LBN", "422"},
{"Lesotho", "Lesotho (le)", "LS", "LSO", "426"},
{"Latvia", "Lettonie (la)", "LV", "LVA", "428"},
{"Liberia", "Libéria (le)", "LR", "LBR", "430"},
{"Libya", "Libye (la)", "LY", "LBY", "434"},
{"Liechtenstein", "Liechtenstein (le)", "LI", "LIE", "438"},
{"Lithuania", "Lituanie (la)", "LT", "LTU", "440"},
{"Luxembourg", "Luxembourg (le)", "LU", "LUX", "442"},
{"Macao", "Macao", "MO", "MAC", "446"},
{"Madagascar", "Madagascar", "MG", "MDG", "450"},
{"Malawi", "Malawi (le)", "MW", "MWI", "454"},
{"Malaysia", "Malaisie (la)", "MY", "MYS", "458"},
{"Maldives", "Maldives (les)", "MV", "MDV", "462"},
{"Mali", "Mali (le)", "ML", "MLI", "466"},
{"Malta", "Malte", "MT", "MLT", "470"},
{"Martinique", "Martinique (la)", "MQ", "MTQ", "474"},
{"Mauritania", "Mauritanie (la)", "MR", "MRT", "478"},
{"Mauritius", "Maurice", "MU", "MUS", "480"},
{"Mexico", "Mexique (le)", "MX", "MEX", "484"},
{"Monaco", "Monaco", "MC", "MCO", "492"},
{"Mongolia", "Mongolie (la)", "MN", "MNG", "496"},
{"Moldova (the Republic of)", "Moldova , République de", "MD", "MDA", "498"},
{"Montenegro", "Monténégro (le)", "ME", "MNE", "499"},
{"Montserrat", "Montserrat", "MS", "MSR", "500"},
{"Morocco", "Maroc (le)", "MA", "MAR", "504"},
{"Mozambique", "Mozambique (le)", "MZ", "MOZ", "508"},
{"Oman", "Oman", "OM", "OMN", "512"},
{"Namibia", "Namibie (la)", "NA", "NAM", "516"},
{"Nauru", "Nauru", "NR", "NRU", "520"},
{"Nepal", "Népal (le)", "NP", "NPL", "524"},
{"Netherlands (the)", "Pays-Bas (les)", "NL", "NLD", "528"},
{"Curaçao", "Curaçao", "CW", "CUW", "531"},
{"Aruba", "Aruba", "AW", "ABW", "533"},
{"<NAME> (Dutch part)", "Saint-Martin (partie néerlandaise)", "SX", "SXM", "534"},
{"Bonaire, Sint Eustatius and Saba", "Bonaire, Saint-Eustache et Saba", "BQ", "BES", "535"},
{"New Caledonia", "Nouvelle-Calédonie (la)", "NC", "NCL", "540"},
{"Vanuatu", "Vanuatu (le)", "VU", "VUT", "548"},
{"New Zealand", "Nouvelle-Zélande (la)", "NZ", "NZL", "554"},
{"Nicaragua", "Nicaragua (le)", "NI", "NIC", "558"},
{"Niger (the)", "Niger (le)", "NE", "NER", "562"},
{"Nigeria", "Nigéria (le)", "NG", "NGA", "566"},
{"Niue", "Niue", "NU", "NIU", "570"},
{"Norfolk Island", "Norfolk (l'Île)", "NF", "NFK", "574"},
{"Norway", "Norvège (la)", "NO", "NOR", "578"},
{"Northern Mariana Islands (the)", "Mariannes du Nord (les Îles)", "MP", "MNP", "580"},
{"United States Minor Outlying Islands (the)", "Îles mineures éloignées des États-Unis (les)", "UM", "UMI", "581"},
{"Micronesia (Federated States of)", "Micronésie (États fédérés de)", "FM", "FSM", "583"},
{"Marshall Islands (the)", "Marshall (Îles)", "MH", "MHL", "584"},
{"Palau", "Palaos (les)", "PW", "PLW", "585"},
{"Pakistan", "Pakistan (le)", "PK", "PAK", "586"},
{"Panama", "Panama (le)", "PA", "PAN", "591"},
{"Papua New Guinea", "Papouasie-Nouvelle-Guinée (la)", "PG", "PNG", "598"},
{"Paraguay", "Paraguay (le)", "PY", "PRY", "600"},
{"Peru", "Pérou (le)", "PE", "PER", "604"},
{"Philippines (the)", "Philippines (les)", "PH", "PHL", "608"},
{"Pitcairn", "Pitcairn", "PN", "PCN", "612"},
{"Poland", "Pologne (la)", "PL", "POL", "616"},
{"Portugal", "Portugal (le)", "PT", "PRT", "620"},
{"Guinea-Bissau", "Guinée-Bissau (la)", "GW", "GNB", "624"},
{"Timor-Leste", "Timor-Leste (le)", "TL", "TLS", "626"},
{"P<NAME>", "Porto Rico", "PR", "PRI", "630"},
{"Qatar", "Qatar (le)", "QA", "QAT", "634"},
{"Réunion", "Réunion (La)", "RE", "REU", "638"},
{"Romania", "Roumanie (la)", "RO", "ROU", "642"},
{"Russian Federation (the)", "Russie (la Fédération de)", "RU", "RUS", "643"},
{"Rwanda", "Rwanda (le)", "RW", "RWA", "646"},
{"<NAME>", "Saint-Barthélemy", "BL", "BLM", "652"},
{"S<NAME>, Ascension and <NAME>", "Sainte-Hélène, Ascension et Tristan <NAME>", "SH", "SHN", "654"},
{"Saint Kit<NAME>", "Saint-Kitts-et-Nevis", "KN", "KNA", "659"},
{"Anguilla", "Anguilla", "AI", "AIA", "660"},
{"Saint Lucia", "Sainte-Lucie", "LC", "LCA", "662"},
{"S<NAME> (French part)", "Saint-Martin (partie française)", "MF", "MAF", "663"},
{"<NAME> and Miquelon", "Saint-Pierre-et-Miquelon", "PM", "SPM", "666"},
{"S<NAME> and the Grenadines", "Saint-Vincent-et-les Grenadines", "VC", "VCT", "670"},
{"San Marino", "Saint-Marin", "SM", "SMR", "674"},
{"<NAME> Principe", "<NAME>-et-Principe", "ST", "STP", "678"},
{"<NAME>", "Arabie saoudite (l')", "SA", "SAU", "682"},
{"Senegal", "Sénégal (le)", "SN", "SEN", "686"},
{"Serbia", "Serbie (la)", "RS", "SRB", "688"},
{"Seychelles", "Seychelles (les)", "SC", "SYC", "690"},
{"Sierra Leone", "Sierra Leone (la)", "SL", "SLE", "694"},
{"Singapore", "Singapour", "SG", "SGP", "702"},
{"Slovakia", "Slovaquie (la)", "SK", "SVK", "703"},
{"Viet Nam", "Viet Nam (le)", "VN", "VNM", "704"},
{"Slovenia", "Slovénie (la)", "SI", "SVN", "705"},
{"Somalia", "Somalie (la)", "SO", "SOM", "706"},
{"South Africa", "Afrique du Sud (l')", "ZA", "ZAF", "710"},
{"Zimbabwe", "Zimbabwe (le)", "ZW", "ZWE", "716"},
{"Spain", "Espagne (l')", "ES", "ESP", "724"},
{"South Sudan", "Soudan du Sud (le)", "SS", "SSD", "728"},
{"Sudan (the)", "Soudan (le)", "SD", "SDN", "729"},
{"Western Sahara*", "Sahara occidental (le)*", "EH", "ESH", "732"},
{"Suriname", "Suriname (le)", "SR", "SUR", "740"},
{"Svalbard and <NAME>", "Svalbard et l'Î<NAME> (le)", "SJ", "SJM", "744"},
{"Swaziland", "Swaziland (le)", "SZ", "SWZ", "748"},
{"Sweden", "Suède (la)", "SE", "SWE", "752"},
{"Switzerland", "Suisse (la)", "CH", "CHE", "756"},
{"Syrian Arab Republic", "République arabe syrienne (la)", "SY", "SYR", "760"},
{"Tajikistan", "Tadjikistan (le)", "TJ", "TJK", "762"},
{"Thailand", "Thaïlande (la)", "TH", "THA", "764"},
{"Togo", "Togo (le)", "TG", "TGO", "768"},
{"Tokelau", "Tokelau (les)", "TK", "TKL", "772"},
{"Tonga", "Tonga (les)", "TO", "TON", "776"},
{"Trinidad and Tobago", "Trinité-et-Tobago (la)", "TT", "TTO", "780"},
{"United Arab Emirates (the)", "Émirats arabes unis (les)", "AE", "ARE", "784"},
{"Tunisia", "Tunisie (la)", "TN", "TUN", "788"},
{"Turkey", "Turquie (la)", "TR", "TUR", "792"},
{"Turkmenistan", "Turkménistan (le)", "TM", "TKM", "795"},
{"Turks and Caicos Islands (the)", "Turks-et-Caïcos (les Îles)", "TC", "TCA", "796"},
{"Tuvalu", "Tuvalu (les)", "TV", "TUV", "798"},
{"Uganda", "Ouganda (l')", "UG", "UGA", "800"},
{"Ukraine", "Ukraine (l')", "UA", "UKR", "804"},
{"Macedonia (the former Yugoslav Republic of)", "Macédoine (l'ex‑République yougoslave de)", "MK", "MKD", "807"},
{"Egypt", "Égypte (l')", "EG", "EGY", "818"},
{"United Kingdom of Great Britain and Northern Ireland (the)", "Royaume-Uni de Grande-Bretagne et d'Irlande du Nord (le)", "GB", "GBR", "826"},
{"Guernsey", "Guernesey", "GG", "GGY", "831"},
{"Jersey", "Jersey", "JE", "JEY", "832"},
{"Isle of Man", "Île de Man", "IM", "IMN", "833"},
{"Tanzania, United Republic of", "Tanzanie, République-Unie de", "TZ", "TZA", "834"},
{"United States of America (the)", "États-Unis d'Amérique (les)", "US", "USA", "840"},
{"Virgin Islands (U.S.)", "Vierges des États-Unis (les Îles)", "VI", "VIR", "850"},
{"Burkina Faso", "Burkina Faso (le)", "BF", "BFA", "854"},
{"Uruguay", "Uruguay (l')", "UY", "URY", "858"},
{"Uzbekistan", "Ouzbékistan (l')", "UZ", "UZB", "860"},
{"Venezuela (Bolivarian Republic of)", "Venezuela (République bolivarienne du)", "VE", "VEN", "862"},
{"Wallis and Futuna", "Wallis-et-Futuna", "WF", "WLF", "876"},
{"Samoa", "Samoa (le)", "WS", "WSM", "882"},
{"Yemen", "Yémen (le)", "YE", "YEM", "887"},
{"Zambia", "Zambie (la)", "ZM", "ZMB", "894"},
} | vendor/github.com/go-swagger/go-swagger/vendor/github.com/asaskevich/govalidator/types.go | 0.690037 | 0.503235 | types.go | starcoder |
package datadog
import (
"encoding/json"
)
// DistributionWidgetYAxis Y Axis controls for the distribution widget.
type DistributionWidgetYAxis struct {
// True includes zero.
IncludeZero *bool `json:"include_zero,omitempty"`
// The label of the axis to display on the graph.
Label *string `json:"label,omitempty"`
// Specifies the maximum value to show on the y-axis. It takes a number, or auto for default behavior.
Max *string `json:"max,omitempty"`
// Specifies minimum value to show on the y-axis. It takes a number, or auto for default behavior.
Min *string `json:"min,omitempty"`
// Specifies the scale type. Possible values are `linear` or `log`.
Scale *string `json:"scale,omitempty"`
}
// NewDistributionWidgetYAxis instantiates a new DistributionWidgetYAxis object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewDistributionWidgetYAxis() *DistributionWidgetYAxis {
this := DistributionWidgetYAxis{}
var max string = "auto"
this.Max = &max
var min string = "auto"
this.Min = &min
var scale string = "linear"
this.Scale = &scale
return &this
}
// NewDistributionWidgetYAxisWithDefaults instantiates a new DistributionWidgetYAxis object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewDistributionWidgetYAxisWithDefaults() *DistributionWidgetYAxis {
this := DistributionWidgetYAxis{}
var max string = "auto"
this.Max = &max
var min string = "auto"
this.Min = &min
var scale string = "linear"
this.Scale = &scale
return &this
}
// GetIncludeZero returns the IncludeZero field value if set, zero value otherwise.
func (o *DistributionWidgetYAxis) GetIncludeZero() bool {
if o == nil || o.IncludeZero == nil {
var ret bool
return ret
}
return *o.IncludeZero
}
// GetIncludeZeroOk returns a tuple with the IncludeZero field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *DistributionWidgetYAxis) GetIncludeZeroOk() (*bool, bool) {
if o == nil || o.IncludeZero == nil {
return nil, false
}
return o.IncludeZero, true
}
// HasIncludeZero returns a boolean if a field has been set.
func (o *DistributionWidgetYAxis) HasIncludeZero() bool {
if o != nil && o.IncludeZero != nil {
return true
}
return false
}
// SetIncludeZero gets a reference to the given bool and assigns it to the IncludeZero field.
func (o *DistributionWidgetYAxis) SetIncludeZero(v bool) {
o.IncludeZero = &v
}
// GetLabel returns the Label field value if set, zero value otherwise.
func (o *DistributionWidgetYAxis) GetLabel() string {
if o == nil || o.Label == nil {
var ret string
return ret
}
return *o.Label
}
// GetLabelOk returns a tuple with the Label field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *DistributionWidgetYAxis) GetLabelOk() (*string, bool) {
if o == nil || o.Label == nil {
return nil, false
}
return o.Label, true
}
// HasLabel returns a boolean if a field has been set.
func (o *DistributionWidgetYAxis) HasLabel() bool {
if o != nil && o.Label != nil {
return true
}
return false
}
// SetLabel gets a reference to the given string and assigns it to the Label field.
func (o *DistributionWidgetYAxis) SetLabel(v string) {
o.Label = &v
}
// GetMax returns the Max field value if set, zero value otherwise.
func (o *DistributionWidgetYAxis) GetMax() string {
if o == nil || o.Max == nil {
var ret string
return ret
}
return *o.Max
}
// GetMaxOk returns a tuple with the Max field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *DistributionWidgetYAxis) GetMaxOk() (*string, bool) {
if o == nil || o.Max == nil {
return nil, false
}
return o.Max, true
}
// HasMax returns a boolean if a field has been set.
func (o *DistributionWidgetYAxis) HasMax() bool {
if o != nil && o.Max != nil {
return true
}
return false
}
// SetMax gets a reference to the given string and assigns it to the Max field.
func (o *DistributionWidgetYAxis) SetMax(v string) {
o.Max = &v
}
// GetMin returns the Min field value if set, zero value otherwise.
func (o *DistributionWidgetYAxis) GetMin() string {
if o == nil || o.Min == nil {
var ret string
return ret
}
return *o.Min
}
// GetMinOk returns a tuple with the Min field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *DistributionWidgetYAxis) GetMinOk() (*string, bool) {
if o == nil || o.Min == nil {
return nil, false
}
return o.Min, true
}
// HasMin returns a boolean if a field has been set.
func (o *DistributionWidgetYAxis) HasMin() bool {
if o != nil && o.Min != nil {
return true
}
return false
}
// SetMin gets a reference to the given string and assigns it to the Min field.
func (o *DistributionWidgetYAxis) SetMin(v string) {
o.Min = &v
}
// GetScale returns the Scale field value if set, zero value otherwise.
func (o *DistributionWidgetYAxis) GetScale() string {
if o == nil || o.Scale == nil {
var ret string
return ret
}
return *o.Scale
}
// GetScaleOk returns a tuple with the Scale field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *DistributionWidgetYAxis) GetScaleOk() (*string, bool) {
if o == nil || o.Scale == nil {
return nil, false
}
return o.Scale, true
}
// HasScale returns a boolean if a field has been set.
func (o *DistributionWidgetYAxis) HasScale() bool {
if o != nil && o.Scale != nil {
return true
}
return false
}
// SetScale gets a reference to the given string and assigns it to the Scale field.
func (o *DistributionWidgetYAxis) SetScale(v string) {
o.Scale = &v
}
func (o DistributionWidgetYAxis) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.IncludeZero != nil {
toSerialize["include_zero"] = o.IncludeZero
}
if o.Label != nil {
toSerialize["label"] = o.Label
}
if o.Max != nil {
toSerialize["max"] = o.Max
}
if o.Min != nil {
toSerialize["min"] = o.Min
}
if o.Scale != nil {
toSerialize["scale"] = o.Scale
}
return json.Marshal(toSerialize)
}
type NullableDistributionWidgetYAxis struct {
value *DistributionWidgetYAxis
isSet bool
}
func (v NullableDistributionWidgetYAxis) Get() *DistributionWidgetYAxis {
return v.value
}
func (v *NullableDistributionWidgetYAxis) Set(val *DistributionWidgetYAxis) {
v.value = val
v.isSet = true
}
func (v NullableDistributionWidgetYAxis) IsSet() bool {
return v.isSet
}
func (v *NullableDistributionWidgetYAxis) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableDistributionWidgetYAxis(val *DistributionWidgetYAxis) *NullableDistributionWidgetYAxis {
return &NullableDistributionWidgetYAxis{value: val, isSet: true}
}
func (v NullableDistributionWidgetYAxis) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableDistributionWidgetYAxis) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | api/v1/datadog/model_distribution_widget_y_axis.go | 0.81538 | 0.404331 | model_distribution_widget_y_axis.go | starcoder |
package api
import (
"encoding/json"
)
// MeasurementSchemaColumn Definition of a measurement column
type MeasurementSchemaColumn struct {
Name string `json:"name" yaml:"name"`
Type ColumnSemanticType `json:"type" yaml:"type"`
DataType *ColumnDataType `json:"dataType,omitempty" yaml:"dataType,omitempty"`
}
// NewMeasurementSchemaColumn instantiates a new MeasurementSchemaColumn object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewMeasurementSchemaColumn(name string, type_ ColumnSemanticType) *MeasurementSchemaColumn {
this := MeasurementSchemaColumn{}
this.Name = name
this.Type = type_
return &this
}
// NewMeasurementSchemaColumnWithDefaults instantiates a new MeasurementSchemaColumn object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewMeasurementSchemaColumnWithDefaults() *MeasurementSchemaColumn {
this := MeasurementSchemaColumn{}
return &this
}
// GetName returns the Name field value
func (o *MeasurementSchemaColumn) GetName() string {
if o == nil {
var ret string
return ret
}
return o.Name
}
// GetNameOk returns a tuple with the Name field value
// and a boolean to check if the value has been set.
func (o *MeasurementSchemaColumn) GetNameOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Name, true
}
// SetName sets field value
func (o *MeasurementSchemaColumn) SetName(v string) {
o.Name = v
}
// GetType returns the Type field value
func (o *MeasurementSchemaColumn) GetType() ColumnSemanticType {
if o == nil {
var ret ColumnSemanticType
return ret
}
return o.Type
}
// GetTypeOk returns a tuple with the Type field value
// and a boolean to check if the value has been set.
func (o *MeasurementSchemaColumn) GetTypeOk() (*ColumnSemanticType, bool) {
if o == nil {
return nil, false
}
return &o.Type, true
}
// SetType sets field value
func (o *MeasurementSchemaColumn) SetType(v ColumnSemanticType) {
o.Type = v
}
// GetDataType returns the DataType field value if set, zero value otherwise.
func (o *MeasurementSchemaColumn) GetDataType() ColumnDataType {
if o == nil || o.DataType == nil {
var ret ColumnDataType
return ret
}
return *o.DataType
}
// GetDataTypeOk returns a tuple with the DataType field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *MeasurementSchemaColumn) GetDataTypeOk() (*ColumnDataType, bool) {
if o == nil || o.DataType == nil {
return nil, false
}
return o.DataType, true
}
// HasDataType returns a boolean if a field has been set.
func (o *MeasurementSchemaColumn) HasDataType() bool {
if o != nil && o.DataType != nil {
return true
}
return false
}
// SetDataType gets a reference to the given ColumnDataType and assigns it to the DataType field.
func (o *MeasurementSchemaColumn) SetDataType(v ColumnDataType) {
o.DataType = &v
}
func (o MeasurementSchemaColumn) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["name"] = o.Name
}
if true {
toSerialize["type"] = o.Type
}
if o.DataType != nil {
toSerialize["dataType"] = o.DataType
}
return json.Marshal(toSerialize)
}
type NullableMeasurementSchemaColumn struct {
value *MeasurementSchemaColumn
isSet bool
}
func (v NullableMeasurementSchemaColumn) Get() *MeasurementSchemaColumn {
return v.value
}
func (v *NullableMeasurementSchemaColumn) Set(val *MeasurementSchemaColumn) {
v.value = val
v.isSet = true
}
func (v NullableMeasurementSchemaColumn) IsSet() bool {
return v.isSet
}
func (v *NullableMeasurementSchemaColumn) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableMeasurementSchemaColumn(val *MeasurementSchemaColumn) *NullableMeasurementSchemaColumn {
return &NullableMeasurementSchemaColumn{value: val, isSet: true}
}
func (v NullableMeasurementSchemaColumn) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableMeasurementSchemaColumn) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | api/model_measurement_schema_column.gen.go | 0.75392 | 0.432543 | model_measurement_schema_column.gen.go | starcoder |
package main
import "math"
const (
// Values for handling field of view algorithm execution.
FOVRays = 360 // Whole area around player; it may not work properly with other values.
FOVLength = 5 // Sight range.
FOVStep = 1
)
var (
// Slices to store FOV Rays values.
// Should be immutable, but Go doesn't support immutable variables
sinBase = []float64{}
cosBase = []float64{}
)
func InitializeFOVTables() {
// Function InitializeFOVTables creates data for raycasting.
for i := 0; i < FOVRays; i++ {
x := math.Sin(float64(i) / (float64(180) / math.Pi))
y := math.Cos(float64(i) / (float64(180) / math.Pi))
sinBase = append(sinBase, x)
cosBase = append(cosBase, y)
}
}
func CastRays(b Board, sx, sy int) {
/* Function castRays is simple raycasting function for turning tiles to
explored.
It casts (fovRays / fovStep) rays (bigger fovStep means faster but
more error-prone raycasting) from player to coordinates in fovLength range.
Source of algorithm:
http://www.roguebasin.com/index.php?title=Raycasting_in_python [20170712] */
for i := 0; i < FOVRays; i += FOVStep {
rayX := sinBase[i]
rayY := cosBase[i]
x := float64(sx)
y := float64(sy)
bx, by := RoundFloatToInt(x), RoundFloatToInt(y)
b[bx][by].Explored = true
for j := 0; j < FOVLength; j++ {
x -= rayX
y -= rayY
if x < 0 || y < 0 || x > MapSizeX-1 || y > MapSizeY-1 {
break
}
bx2, by2 := RoundFloatToInt(x), RoundFloatToInt(y)
b[bx2][by2].Explored = true
if b[bx2][by2].BlocksSight == true {
break
}
}
}
}
func IsInFOV(b Board, sx, sy, tx, ty int) bool {
/* Function isInFOV checks if target (tx, ty) is in fov of source (sx, sy).
Returns true if tx, ty == sx, sy; otherwise, it casts (FOVRays / fovStep)
rays (bigger fovStep means faster but more error-prone algorithm)
from source to tiles in fovLength range;
stops if cell has BlocksSight bool set to true.
Source of algorithm:
http://www.roguebasin.com/index.php?title=Raycasting_in_python [20170712]. */
if sx == tx && sy == ty {
return true
}
if sx < tx-FOVLength || sx > tx+FOVLength ||
sy < ty-FOVLength || sy > ty+FOVLength {
return false
}
for i := 0; i < FOVRays; i += FOVStep {
rayX := sinBase[i]
rayY := cosBase[i]
x := float64(sx)
y := float64(sy)
for j := 0; j < FOVLength; j++ {
x -= rayX
y -= rayY
if x < 0 || y < 0 || x > MapSizeX-1 || y > MapSizeY-1 {
break
}
bx, by := RoundFloatToInt(x), RoundFloatToInt(y)
if bx == tx && by == ty {
return true
}
if b[bx][by].BlocksSight == true {
break
}
}
}
return false
}
func (c *Creature) MonstersInFov(b Board, cs Creatures) Creatures {
/* MonstersInFov is method of Creature. It takes global map, and
slice of creatures, as argument.
At first, new (empty) slice of creatures is made, to store
these monsters that are in c's field of view.
Then function iterates through Creatures passed as argument, and
adds every monster that is in c's fov, skipping source. */
var inFov = Creatures{}
for i := 0; i < len(cs); i++ {
v := cs[i]
if v == c {
continue
}
if v.HPCurrent <= 0 {
continue
}
if IsInFOV(b, c.X, c.Y, v.X, v.Y) == true {
inFov = append(inFov, cs[i])
}
}
return inFov
}
func (c *Creature) ObjectsInFov(b Board, o Objects) Objects {
/* ObjectsInFov is method of Creature that works similar to
MonstersInFov. It returns slice of Objects that are present
in c's field of view. */
var inFov = Objects{}
for i := 0; i < len(o); i++ {
v := o[i]
if IsInFOV(b, c.X, c.Y, v.X, v.Y) == true {
inFov = append(inFov, o[i])
}
}
return inFov
}
func GetAllStringsFromTile(x, y int, b Board, c Creatures, o Objects) []string {
/* GetAllStringsFromTile is function that takes coordinates, global map,
Creatures and Objects as arguments. It creates and then returns slice of
strings that contains names of all things on specific tile. It skips
tile names if there are objects present ("You see Monster and Objects here."),
otherwise it returns name of tile ("You see floor here."). */
var s = []string{}
for _, vc := range c {
if vc.X == x && vc.Y == y {
s = append(s, vc.Name)
}
}
for _, vo := range o {
if vo.X == x && vo.Y == y {
s = append(s, vo.Name)
}
}
if len(s) != 0 {
return s
}
s = append(s, b[x][y].Name)
return s
}
func GetAllStringsInFovTile(sx, sy, tx, ty int, b Board, c Creatures, o Objects) []string {
/* GetAllStringInFovTile is function that uses IsInFOV and GetAllStringsFromTile
to create slice of strings of objects in field of view. */
var s = []string{}
if IsInFOV(b, sx, sy, tx, ty) == true {
return GetAllStringsFromTile(tx, ty, b, c, o)
}
return s
}
func GetAliveCreatureFromTile(x, y int, c Creatures) *Creature {
/* Function GetAliveCreatureFromTile takes coords and slice of Creature
as arguments, and returns Creature.
It iterates through all Creatures and find one that occupies specified tile.
This function could use []*Creature instead of *Creature, but monsters
should not overlap anyway. */
var cs *Creature
for i := 0; i < len(c); i++ {
if c[i].X == x && c[i].Y == y && c[i].HPCurrent > 0 {
cs = c[i]
}
}
return cs
}
func GetAllThingsFromTile(x, y int, b Board, c Creatures, o Objects) (*Tile, Creatures, Objects) {
/* GetAllThingsFromTile is function that takes coordinates, global map,
Creatures and Objects as arguments. It creates slice of Creature and
slice of Object that occupy coords, and returns them.
If these slices are empty, it returns board tile. */
var cs = Creatures{}
for i := 0; i < len(c); i++ {
if c[i].X == x && c[i].Y == y {
cs = append(cs, c[i])
}
}
var os = Objects{}
for j := 0; j < len(o); j++ {
if o[j].X == x && o[j].Y == y {
os = append(os, o[j])
}
}
if len(cs) != 0 || len(os) != 0 {
return nil, cs, os
}
return b[x][y], cs, os // cs and os are nil.
} | fov.go | 0.563138 | 0.405066 | fov.go | starcoder |
package processor
import (
"errors"
"fmt"
"sort"
"strconv"
"strings"
"time"
"github.com/Jeffail/benthos/v3/internal/bloblang"
"github.com/Jeffail/benthos/v3/internal/bloblang/field"
"github.com/Jeffail/benthos/v3/internal/docs"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/types"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeMetric] = TypeSpec{
constructor: NewMetric,
Categories: []Category{
CategoryUtility,
},
Summary: "Emit custom metrics by extracting values from messages.",
Description: `
This processor works by evaluating an [interpolated field ` + "`value`" + `](/docs/configuration/interpolation#bloblang-queries) for each message and updating a emitted metric according to the [type](#types).
Custom metrics such as these are emitted along with Benthos internal metrics, where you can customize where metrics are sent, which metric names are emitted and rename them as/when appropriate. For more information check out the [metrics docs here](/docs/components/metrics/about).`,
FieldSpecs: docs.FieldSpecs{
docs.FieldCommon("type", "The metric [type](#types) to create.").HasOptions(
"counter",
"counter_by",
"gauge",
"timing",
),
docs.FieldDeprecated("path"),
docs.FieldCommon("name", "The name of the metric to create, this must be unique across all Benthos components otherwise it will overwrite those other metrics."),
docs.FieldCommon(
"labels", "A map of label names and values that can be used to enrich metrics. Labels are not supported by some metric destinations, in which case the metrics series are combined.",
map[string]string{
"type": "${! json(\"doc.type\") }",
"topic": "${! meta(\"kafka_topic\") }",
},
).IsInterpolated().Map(),
docs.FieldCommon("value", "For some metric types specifies a value to set, increment.").IsInterpolated(),
PartsFieldSpec,
},
Examples: []docs.AnnotatedExample{
{
Title: "Counter",
Summary: "In this example we emit a counter metric called `Foos`, which increments for every message processed, and we label the metric with some metadata about where the message came from and a field from the document that states what type it is. We also configure our metrics to emit to CloudWatch, and explicitly only allow our custom metric and some internal Benthos metrics to emit.",
Config: `
pipeline:
processors:
- metric:
name: Foos
type: counter
labels:
topic: ${! meta("kafka_topic") }
partition: ${! meta("kafka_partition") }
type: ${! json("document.type").or("unknown") }
metrics:
cloudwatch:
namespace: ProdConsumer
region: eu-west-1
path_mapping: |
root = if ![
"Foos",
"input.received",
"output.sent"
].contains(this) { deleted() }
`,
},
{
Title: "Gauge",
Summary: "In this example we emit a gauge metric called `FooSize`, which is given a value extracted from JSON messages at the path `foo.size`. We then also configure our Prometheus metric exporter to only emit this custom metric and nothing else. We also label the metric with some metadata.",
Config: `
pipeline:
processors:
- metric:
name: FooSize
type: gauge
labels:
topic: ${! meta("kafka_topic") }
value: ${! json("foo.size") }
metrics:
prometheus:
path_mapping: 'if this != "FooSize" { deleted() }'
`,
},
},
Footnotes: `
## Types
### ` + "`counter`" + `
Increments a counter by exactly 1, the contents of ` + "`value`" + ` are ignored
by this type.
### ` + "`counter_by`" + `
If the contents of ` + "`value`" + ` can be parsed as a positive integer value
then the counter is incremented by this value.
For example, the following configuration will increment the value of the
` + "`count.custom.field` metric by the contents of `field.some.value`" + `:
` + "```yaml" + `
metric:
type: counter_by
name: CountCustomField
value: ${!json("field.some.value")}
` + "```" + `
### ` + "`gauge`" + `
If the contents of ` + "`value`" + ` can be parsed as a positive integer value
then the gauge is set to this value.
For example, the following configuration will set the value of the
` + "`gauge.custom.field` metric to the contents of `field.some.value`" + `:
` + "```yaml" + `
metric:
type: gauge
path: GaugeCustomField
value: ${!json("field.some.value")}
` + "```" + `
### ` + "`timing`" + `
Equivalent to ` + "`gauge`" + ` where instead the metric is a timing.`,
}
}
//------------------------------------------------------------------------------
// MetricConfig contains configuration fields for the Metric processor.
type MetricConfig struct {
Parts []int `json:"parts" yaml:"parts"`
Type string `json:"type" yaml:"type"`
Path string `json:"path" yaml:"path"`
Name string `json:"name" yaml:"name"`
Labels map[string]string `json:"labels" yaml:"labels"`
Value string `json:"value" yaml:"value"`
}
// NewMetricConfig returns a MetricConfig with default values.
func NewMetricConfig() MetricConfig {
return MetricConfig{
Parts: []int{},
Type: "counter",
Path: "",
Name: "",
Labels: map[string]string{},
Value: "",
}
}
//------------------------------------------------------------------------------
// Metric is a processor that creates a metric from extracted values from a message part.
type Metric struct {
parts []int
deprecated bool
conf Config
log log.Modular
stats metrics.Type
value field.Expression
labels labels
mCounter metrics.StatCounter
mGauge metrics.StatGauge
mTimer metrics.StatTimer
mCounterVec metrics.StatCounterVec
mGaugeVec metrics.StatGaugeVec
mTimerVec metrics.StatTimerVec
handler func(string, int, types.Message) error
}
type labels []label
type label struct {
name string
value field.Expression
}
func (l *label) val(index int, msg types.Message) string {
return l.value.String(index, msg)
}
func (l labels) names() []string {
var names []string
for i := range l {
names = append(names, l[i].name)
}
return names
}
func (l labels) values(index int, msg types.Message) []string {
var values []string
for i := range l {
values = append(values, l[i].val(index, msg))
}
return values
}
func unwrapMetric(t metrics.Type) metrics.Type {
u, ok := t.(interface {
Unwrap() metrics.Type
})
if ok {
t = u.Unwrap()
}
return t
}
// NewMetric returns a Metric processor.
func NewMetric(
conf Config, mgr types.Manager, log log.Modular, stats metrics.Type,
) (Type, error) {
value, err := bloblang.NewField(conf.Metric.Value)
if err != nil {
return nil, fmt.Errorf("failed to parse value expression: %v", err)
}
m := &Metric{
parts: conf.Metric.Parts,
conf: conf,
log: log,
stats: stats,
value: value,
}
name := conf.Metric.Name
if len(conf.Metric.Path) > 0 {
if len(conf.Metric.Name) > 0 {
return nil, errors.New("cannot combine deprecated path field with name field")
}
if len(conf.Metric.Parts) > 0 {
return nil, errors.New("cannot combine deprecated path field with parts field")
}
m.deprecated = true
name = conf.Metric.Path
}
if len(name) == 0 {
return nil, errors.New("metric name must not be empty")
}
if !m.deprecated {
// Remove any namespaces from the metric type.
stats = unwrapMetric(stats)
}
labelNames := make([]string, 0, len(conf.Metric.Labels))
for n := range conf.Metric.Labels {
labelNames = append(labelNames, n)
}
sort.Strings(labelNames)
for _, n := range labelNames {
v, err := bloblang.NewField(conf.Metric.Labels[n])
if err != nil {
return nil, fmt.Errorf("failed to parse label '%v' expression: %v", n, err)
}
m.labels = append(m.labels, label{
name: n,
value: v,
})
}
switch strings.ToLower(conf.Metric.Type) {
case "counter":
if len(m.labels) > 0 {
m.mCounterVec = stats.GetCounterVec(name, m.labels.names())
} else {
m.mCounter = stats.GetCounter(name)
}
m.handler = m.handleCounter
case "counter_parts":
if len(m.labels) > 0 {
m.mCounterVec = stats.GetCounterVec(name, m.labels.names())
} else {
m.mCounter = stats.GetCounter(name)
}
m.handler = m.handleCounterParts
case "counter_by":
if len(m.labels) > 0 {
m.mCounterVec = stats.GetCounterVec(name, m.labels.names())
} else {
m.mCounter = stats.GetCounter(name)
}
m.handler = m.handleCounterBy
case "gauge":
if len(m.labels) > 0 {
m.mGaugeVec = stats.GetGaugeVec(name, m.labels.names())
} else {
m.mGauge = stats.GetGauge(name)
}
m.handler = m.handleGauge
case "timing":
if len(m.labels) > 0 {
m.mTimerVec = stats.GetTimerVec(name, m.labels.names())
} else {
m.mTimer = stats.GetTimer(name)
}
m.handler = m.handleTimer
default:
return nil, fmt.Errorf("metric type unrecognised: %v", conf.Metric.Type)
}
return m, nil
}
func (m *Metric) handleCounter(val string, index int, msg types.Message) error {
if len(m.labels) > 0 {
m.mCounterVec.With(m.labels.values(index, msg)...).Incr(1)
} else {
m.mCounter.Incr(1)
}
return nil
}
// TODO: V4 Remove this
func (m *Metric) handleCounterParts(val string, index int, msg types.Message) error {
if msg.Len() == 0 {
return nil
}
if len(m.labels) > 0 {
m.mCounterVec.With(m.labels.values(index, msg)...).Incr(int64(msg.Len()))
} else {
m.mCounter.Incr(int64(msg.Len()))
}
return nil
}
func (m *Metric) handleCounterBy(val string, index int, msg types.Message) error {
i, err := strconv.ParseInt(val, 10, 64)
if err != nil {
return err
}
if i < 0 {
return errors.New("value is negative")
}
if len(m.labels) > 0 {
m.mCounterVec.With(m.labels.values(index, msg)...).Incr(i)
} else {
m.mCounter.Incr(i)
}
return nil
}
func (m *Metric) handleGauge(val string, index int, msg types.Message) error {
i, err := strconv.ParseInt(val, 10, 64)
if err != nil {
return err
}
if i < 0 {
return errors.New("value is negative")
}
if len(m.labels) > 0 {
m.mGaugeVec.With(m.labels.values(index, msg)...).Set(i)
} else {
m.mGauge.Set(i)
}
return nil
}
func (m *Metric) handleTimer(val string, index int, msg types.Message) error {
i, err := strconv.ParseInt(val, 10, 64)
if err != nil {
return err
}
if i < 0 {
return errors.New("value is negative")
}
if len(m.labels) > 0 {
m.mTimerVec.With(m.labels.values(index, msg)...).Timing(i)
} else {
m.mTimer.Timing(i)
}
return nil
}
// ProcessMessage applies the processor to a message
func (m *Metric) ProcessMessage(msg types.Message) ([]types.Message, types.Response) {
if m.deprecated {
value := m.value.String(0, msg)
if err := m.handler(value, 0, msg); err != nil {
m.log.Errorf("Handler error: %v\n", err)
}
return []types.Message{msg}, nil
}
iterateParts(m.parts, msg, func(index int, p types.Part) error {
value := m.value.String(index, msg)
if err := m.handler(value, index, msg); err != nil {
m.log.Errorf("Handler error: %v\n", err)
}
return nil
})
return []types.Message{msg}, nil
}
// CloseAsync shuts down the processor and stops processing requests.
func (m *Metric) CloseAsync() {
}
// WaitForClose blocks until the processor has closed down.
func (m *Metric) WaitForClose(timeout time.Duration) error {
return nil
} | lib/processor/metric.go | 0.856663 | 0.572723 | metric.go | starcoder |
package vision
import (
"log"
pb "google.golang.org/genproto/googleapis/cloud/vision/v1"
)
// FaceLandmarks contains the positions of facial features detected by the service.
type FaceLandmarks struct {
Eyebrows Eyebrows
Eyes Eyes
Ears Ears
Nose Nose
Mouth Mouth
Chin Chin
Forehead *pb.Position
}
// Eyebrows represents a face's eyebrows.
type Eyebrows struct {
Left, Right Eyebrow
}
// Eyebrow represents a face's eyebrow.
type Eyebrow struct {
Top, Left, Right *pb.Position
}
// Eyes represents a face's eyes.
type Eyes struct {
Left, Right Eye
}
// Eye represents a face's eye.
type Eye struct {
Left, Right, Top, Bottom, Center, Pupil *pb.Position
}
// Ears represents a face's ears.
type Ears struct {
Left, Right *pb.Position
}
// Nose represents a face's nose.
type Nose struct {
Left, Right, Top, Bottom, Tip *pb.Position
}
// Mouth represents a face's mouth.
type Mouth struct {
Left, Center, Right, UpperLip, LowerLip *pb.Position
}
// Chin represents a face's chin.
type Chin struct {
Left, Center, Right *pb.Position
}
// FaceFromLandmarks converts the list of face landmarks returned by the service
// to a FaceLandmarks struct.
func FaceFromLandmarks(landmarks []*pb.FaceAnnotation_Landmark) *FaceLandmarks {
face := &FaceLandmarks{}
for _, lm := range landmarks {
switch lm.Type {
case pb.FaceAnnotation_Landmark_LEFT_OF_LEFT_EYEBROW:
face.Eyebrows.Left.Left = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_OF_LEFT_EYEBROW:
face.Eyebrows.Left.Right = lm.Position
case pb.FaceAnnotation_Landmark_LEFT_OF_RIGHT_EYEBROW:
face.Eyebrows.Right.Left = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_OF_RIGHT_EYEBROW:
face.Eyebrows.Right.Right = lm.Position
case pb.FaceAnnotation_Landmark_LEFT_EYEBROW_UPPER_MIDPOINT:
face.Eyebrows.Left.Top = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_EYEBROW_UPPER_MIDPOINT:
face.Eyebrows.Right.Top = lm.Position
case pb.FaceAnnotation_Landmark_MIDPOINT_BETWEEN_EYES:
face.Nose.Top = lm.Position
case pb.FaceAnnotation_Landmark_NOSE_TIP:
face.Nose.Tip = lm.Position
case pb.FaceAnnotation_Landmark_UPPER_LIP:
face.Mouth.UpperLip = lm.Position
case pb.FaceAnnotation_Landmark_LOWER_LIP:
face.Mouth.LowerLip = lm.Position
case pb.FaceAnnotation_Landmark_MOUTH_LEFT:
face.Mouth.Left = lm.Position
case pb.FaceAnnotation_Landmark_MOUTH_RIGHT:
face.Mouth.Right = lm.Position
case pb.FaceAnnotation_Landmark_MOUTH_CENTER:
face.Mouth.Center = lm.Position
case pb.FaceAnnotation_Landmark_NOSE_BOTTOM_RIGHT:
face.Nose.Right = lm.Position
case pb.FaceAnnotation_Landmark_NOSE_BOTTOM_LEFT:
face.Nose.Left = lm.Position
case pb.FaceAnnotation_Landmark_NOSE_BOTTOM_CENTER:
face.Nose.Bottom = lm.Position
case pb.FaceAnnotation_Landmark_LEFT_EYE:
face.Eyes.Left.Center = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_EYE:
face.Eyes.Right.Center = lm.Position
case pb.FaceAnnotation_Landmark_LEFT_EYE_TOP_BOUNDARY:
face.Eyes.Left.Top = lm.Position
case pb.FaceAnnotation_Landmark_LEFT_EYE_RIGHT_CORNER:
face.Eyes.Left.Right = lm.Position
case pb.FaceAnnotation_Landmark_LEFT_EYE_BOTTOM_BOUNDARY:
face.Eyes.Left.Bottom = lm.Position
case pb.FaceAnnotation_Landmark_LEFT_EYE_LEFT_CORNER:
face.Eyes.Left.Left = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_EYE_TOP_BOUNDARY:
face.Eyes.Right.Top = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_EYE_RIGHT_CORNER:
face.Eyes.Right.Right = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_EYE_BOTTOM_BOUNDARY:
face.Eyes.Right.Bottom = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_EYE_LEFT_CORNER:
face.Eyes.Right.Left = lm.Position
case pb.FaceAnnotation_Landmark_LEFT_EYE_PUPIL:
face.Eyes.Left.Pupil = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_EYE_PUPIL:
face.Eyes.Right.Pupil = lm.Position
case pb.FaceAnnotation_Landmark_LEFT_EAR_TRAGION:
face.Ears.Left = lm.Position
case pb.FaceAnnotation_Landmark_RIGHT_EAR_TRAGION:
face.Ears.Right = lm.Position
case pb.FaceAnnotation_Landmark_FOREHEAD_GLABELLA:
face.Forehead = lm.Position
case pb.FaceAnnotation_Landmark_CHIN_GNATHION:
face.Chin.Center = lm.Position
case pb.FaceAnnotation_Landmark_CHIN_LEFT_GONION:
face.Chin.Left = lm.Position
case pb.FaceAnnotation_Landmark_CHIN_RIGHT_GONION:
face.Chin.Right = lm.Position
default:
log.Printf("vision: ignoring unknown face annotation landmark %s", lm.Type)
}
}
return face
} | vendor/cloud.google.com/go/vision/apiv1/face.go | 0.62601 | 0.431045 | face.go | starcoder |
2D Rendering Code
*/
//-----------------------------------------------------------------------------
package render
import (
"image"
"image/color"
"image/png"
"math"
"os"
"github.com/deadsy/sdfx/sdf"
"github.com/llgcode/draw2d/draw2dimg"
)
//-----------------------------------------------------------------------------
// PNG is a png image object.
type PNG struct {
name string
bb sdf.Box2
pixels sdf.V2i
m *sdf.Map2
img *image.RGBA
}
// NewPNG returns an empty PNG object.
func NewPNG(name string, bb sdf.Box2, pixels sdf.V2i) (*PNG, error) {
d := PNG{}
d.name = name
d.bb = bb
d.pixels = pixels
m, err := sdf.NewMap2(bb, pixels, true)
if err != nil {
return nil, err
}
d.m = m
d.img = image.NewRGBA(image.Rect(0, 0, pixels[0]-1, pixels[1]-1))
return &d, nil
}
// RenderSDF2 renders a 2d signed distance field as gray scale.
func (d *PNG) RenderSDF2(s sdf.SDF2) {
d.RenderSDF2MinMax(s, 0, 0)
}
// RenderSDF2MinMax renders a 2d signed distance field as gray scale (with defined min/max levels).
func (d *PNG) RenderSDF2MinMax(s sdf.SDF2, dmin, dmax float64) {
// sample the distance field
minMaxSet := dmin != 0 && dmax != 0
if !minMaxSet {
//distance := make([]float64, d.pixels[0]*d.pixels[1]) // Less allocations: faster (70ms -> 60ms), use cache in SDF if needed
for x := 0; x < d.pixels[0]; x++ {
for y := 0; y < d.pixels[1]; y++ {
d := s.Evaluate(d.m.ToV2(sdf.V2i{x, y}))
dmax = math.Max(dmax, d)
dmin = math.Min(dmin, d)
}
}
}
// scale and set the pixel values
for x := 0; x < d.pixels[0]; x++ {
for y := 0; y < d.pixels[1]; y++ {
dist := s.Evaluate(d.m.ToV2(sdf.V2i{x, y}))
d.img.Set(x, y, color.Gray{Y: uint8(255 * imageColor2(dist, dmin, dmax))})
}
}
}
// imageColor2 returns the grayscale color for the returned SDF2.Evaluate value, given the reference minimum and maximum
// SDF2.Evaluate values. The returned value is in the range [0, 1].
func imageColor2(dist, dmin, dmax float64) float64 {
// Clamp due to possibly forced min and max
var val float64
// NOTE: This condition forces the surface to be close to 0.5 gray value, otherwise dmax >>> dmin or viceversa
// could cause the surface to be displaced visually
if dist >= 0 {
val = math.Max(0.5, math.Min(1, 0.5+0.5*((dist)/(dmax))))
} else { // Force lower scale for inside surface
val = math.Max(0, math.Min(0.5, 0.5*((dist-dmin)/(-dmin))))
}
return val
}
// Line adds a line to a png object.
func (d *PNG) Line(p0, p1 sdf.V2) {
gc := draw2dimg.NewGraphicContext(d.img)
gc.SetFillColor(color.RGBA{0xff, 0, 0, 0xff})
gc.SetStrokeColor(color.RGBA{0xff, 0, 0, 0xff})
gc.SetLineWidth(1)
p := d.m.ToV2i(p0)
gc.MoveTo(float64(p[0]), float64(p[1]))
p = d.m.ToV2i(p1)
gc.LineTo(float64(p[0]), float64(p[1]))
gc.Stroke()
}
// Lines adds a set of lines line to a png object.
func (d *PNG) Lines(s sdf.V2Set) {
gc := draw2dimg.NewGraphicContext(d.img)
gc.SetFillColor(color.RGBA{0xff, 0, 0, 0xff})
gc.SetStrokeColor(color.RGBA{0xff, 0, 0, 0xff})
gc.SetLineWidth(1)
p := d.m.ToV2i(s[0])
gc.MoveTo(float64(p[0]), float64(p[1]))
for i := 1; i < len(s); i++ {
p := d.m.ToV2i(s[i])
gc.LineTo(float64(p[0]), float64(p[1]))
}
gc.Stroke()
}
// Triangle adds a triangle to a png object.
func (d *PNG) Triangle(t Triangle2) {
d.Lines([]sdf.V2{t[0], t[1], t[2], t[0]})
}
// Save saves a png object to a file.
func (d *PNG) Save() error {
f, err := os.Create(d.name)
if err != nil {
return err
}
defer f.Close()
return png.Encode(f, d.img)
}
// Image returns the rendered image instead of writing it to a file
func (d *PNG) Image() *image.RGBA {
return d.img
}
//----------------------------------------------------------------------------- | render/png.go | 0.735831 | 0.498291 | png.go | starcoder |
package graph
import (
i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e "time"
i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55 "github.com/microsoft/kiota/abstractions/go/serialization"
)
// AssignedPlan provides operations to manage the drive singleton.
type AssignedPlan struct {
// Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
additionalData map[string]interface{};
// The date and time at which the plan was assigned. The Timestamp type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z.
assignedDateTime *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time;
// Condition of the capability assignment. The possible values are Enabled, Warning, Suspended, Deleted, LockedOut. See a detailed description of each value.
capabilityStatus *string;
// The name of the service; for example, exchange.
service *string;
// A GUID that identifies the service plan. For a complete list of GUIDs and their equivalent friendly service names, see Product names and service plan identifiers for licensing.
servicePlanId *string;
}
// NewAssignedPlan instantiates a new assignedPlan and sets the default values.
func NewAssignedPlan()(*AssignedPlan) {
m := &AssignedPlan{
}
m.SetAdditionalData(make(map[string]interface{}));
return m
}
// CreateAssignedPlanFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateAssignedPlanFromDiscriminatorValue(parseNode i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode)(i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable, error) {
return NewAssignedPlan(), nil
}
// GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *AssignedPlan) GetAdditionalData()(map[string]interface{}) {
if m == nil {
return nil
} else {
return m.additionalData
}
}
// GetAssignedDateTime gets the assignedDateTime property value. The date and time at which the plan was assigned. The Timestamp type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z.
func (m *AssignedPlan) GetAssignedDateTime()(*i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time) {
if m == nil {
return nil
} else {
return m.assignedDateTime
}
}
// GetCapabilityStatus gets the capabilityStatus property value. Condition of the capability assignment. The possible values are Enabled, Warning, Suspended, Deleted, LockedOut. See a detailed description of each value.
func (m *AssignedPlan) GetCapabilityStatus()(*string) {
if m == nil {
return nil
} else {
return m.capabilityStatus
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *AssignedPlan) GetFieldDeserializers()(map[string]func(interface{}, i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode)(error)) {
res := make(map[string]func(interface{}, i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode)(error))
res["assignedDateTime"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetTimeValue()
if err != nil {
return err
}
if val != nil {
m.SetAssignedDateTime(val)
}
return nil
}
res["capabilityStatus"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetCapabilityStatus(val)
}
return nil
}
res["service"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetService(val)
}
return nil
}
res["servicePlanId"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetServicePlanId(val)
}
return nil
}
return res
}
// GetService gets the service property value. The name of the service; for example, exchange.
func (m *AssignedPlan) GetService()(*string) {
if m == nil {
return nil
} else {
return m.service
}
}
// GetServicePlanId gets the servicePlanId property value. A GUID that identifies the service plan. For a complete list of GUIDs and their equivalent friendly service names, see Product names and service plan identifiers for licensing.
func (m *AssignedPlan) GetServicePlanId()(*string) {
if m == nil {
return nil
} else {
return m.servicePlanId
}
}
func (m *AssignedPlan) IsNil()(bool) {
return m == nil
}
// Serialize serializes information the current object
func (m *AssignedPlan) Serialize(writer i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.SerializationWriter)(error) {
{
err := writer.WriteTimeValue("assignedDateTime", m.GetAssignedDateTime())
if err != nil {
return err
}
}
{
err := writer.WriteStringValue("capabilityStatus", m.GetCapabilityStatus())
if err != nil {
return err
}
}
{
err := writer.WriteStringValue("service", m.GetService())
if err != nil {
return err
}
}
{
err := writer.WriteStringValue("servicePlanId", m.GetServicePlanId())
if err != nil {
return err
}
}
{
err := writer.WriteAdditionalData(m.GetAdditionalData())
if err != nil {
return err
}
}
return nil
}
// SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *AssignedPlan) SetAdditionalData(value map[string]interface{})() {
if m != nil {
m.additionalData = value
}
}
// SetAssignedDateTime sets the assignedDateTime property value. The date and time at which the plan was assigned. The Timestamp type represents date and time information using ISO 8601 format and is always in UTC time. For example, midnight UTC on Jan 1, 2014 is 2014-01-01T00:00:00Z.
func (m *AssignedPlan) SetAssignedDateTime(value *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time)() {
if m != nil {
m.assignedDateTime = value
}
}
// SetCapabilityStatus sets the capabilityStatus property value. Condition of the capability assignment. The possible values are Enabled, Warning, Suspended, Deleted, LockedOut. See a detailed description of each value.
func (m *AssignedPlan) SetCapabilityStatus(value *string)() {
if m != nil {
m.capabilityStatus = value
}
}
// SetService sets the service property value. The name of the service; for example, exchange.
func (m *AssignedPlan) SetService(value *string)() {
if m != nil {
m.service = value
}
}
// SetServicePlanId sets the servicePlanId property value. A GUID that identifies the service plan. For a complete list of GUIDs and their equivalent friendly service names, see Product names and service plan identifiers for licensing.
func (m *AssignedPlan) SetServicePlanId(value *string)() {
if m != nil {
m.servicePlanId = value
}
} | models/microsoft/graph/assigned_plan.go | 0.735547 | 0.401101 | assigned_plan.go | starcoder |
// Package aes implements AES encryption (formerly Rijndael), as defined in
// U.S. Federal Information Processing Standards Publication 197.
package aes
// This file contains AES constants - 8720 bytes of initialized data.
// http://www.csrc.nist.gov/publications/fips/fips197/fips-197.pdf
// AES is based on the mathematical behavior of binary polynomials
// (polynomials over GF(2)) modulo the irreducible polynomial x⁸ + x⁴ + x² + x + 1.
// Addition of these binary polynomials corresponds to binary xor.
// Reducing mod poly corresponds to binary xor with poly every
// time a 0x100 bit appears.
const poly = 1<<8 | 1<<4 | 1<<3 | 1<<1 | 1<<0 // x⁸ + x⁴ + x² + x + 1
// Powers of x mod poly in GF(2).
var powx = [16]byte{
0x01,
0x02,
0x04,
0x08,
0x10,
0x20,
0x40,
0x80,
0x1b,
0x36,
0x6c,
0xd8,
0xab,
0x4d,
0x9a,
0x2f,
}
// FIPS-197 Figure 7. S-box substitution values in hexadecimal format.
var sbox0 = [256]byte{
0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76,
0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0,
0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15,
0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75,
0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84,
0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf,
0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8,
0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2,
0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73,
0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb,
0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79,
0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08,
0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a,
0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e,
0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf,
0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16,
}
// FIPS-197 Figure 14. Inverse S-box substitution values in hexadecimal format.
var sbox1 = [256]byte{
0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb,
0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb,
0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e,
0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25,
0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92,
0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84,
0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06,
0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b,
0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73,
0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e,
0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b,
0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4,
0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f,
0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef,
0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61,
0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d,
}
// Lookup tables for encryption.
// These can be recomputed by adapting the tests in aes_test.go.
var te = [4][256]uint32{
{
0xc66363a5, 0xf87c7c84, 0xee777799, 0xf67b7b8d, 0xfff2f20d, 0xd66b6bbd, 0xde6f6fb1, 0x91c5c554,
0x60303050, 0x02010103, 0xce6767a9, 0x562b2b7d, 0xe7fefe19, 0xb5d7d762, 0x4dababe6, 0xec76769a,
0x8fcaca45, 0x1f82829d, 0x89c9c940, 0xfa7d7d87, 0xeffafa15, 0xb25959eb, 0x8e4747c9, 0xfbf0f00b,
0x41adadec, 0xb3d4d467, 0x5fa2a2fd, 0x45afafea, 0x239c9cbf, 0x53a4a4f7, 0xe4727296, 0x9bc0c05b,
0x75b7b7c2, 0xe1fdfd1c, 0x3d9393ae, 0x4c26266a, 0x6c36365a, 0x7e3f3f41, 0xf5f7f702, 0x83cccc4f,
0x6834345c, 0x51a5a5f4, 0xd1e5e534, 0xf9f1f108, 0xe2717193, 0xabd8d873, 0x62313153, 0x2a15153f,
0x0804040c, 0x95c7c752, 0x46232365, 0x9dc3c35e, 0x30181828, 0x379696a1, 0x0a05050f, 0x2f9a9ab5,
0x0e070709, 0x24121236, 0x1b80809b, 0xdfe2e23d, 0xcdebeb26, 0x4e272769, 0x7fb2b2cd, 0xea75759f,
0x1209091b, 0x1d83839e, 0x582c2c74, 0x341a1a2e, 0x361b1b2d, 0xdc6e6eb2, 0xb45a5aee, 0x5ba0a0fb,
0xa45252f6, 0x763b3b4d, 0xb7d6d661, 0x7db3b3ce, 0x5229297b, 0xdde3e33e, 0x5e2f2f71, 0x13848497,
0xa65353f5, 0xb9d1d168, 0x00000000, 0xc1eded2c, 0x40202060, 0xe3fcfc1f, 0x79b1b1c8, 0xb65b5bed,
0xd46a6abe, 0x8dcbcb46, 0x67bebed9, 0x7239394b, 0x944a4ade, 0x984c4cd4, 0xb05858e8, 0x85cfcf4a,
0xbbd0d06b, 0xc5efef2a, 0x4faaaae5, 0xedfbfb16, 0x864343c5, 0x9a4d4dd7, 0x66333355, 0x11858594,
0x8a4545cf, 0xe9f9f910, 0x04020206, 0xfe7f7f81, 0xa05050f0, 0x783c3c44, 0x259f9fba, 0x4ba8a8e3,
0xa25151f3, 0x5da3a3fe, 0x804040c0, 0x058f8f8a, 0x3f9292ad, 0x219d9dbc, 0x70383848, 0xf1f5f504,
0x63bcbcdf, 0x77b6b6c1, 0xafdada75, 0x42212163, 0x20101030, 0xe5ffff1a, 0xfdf3f30e, 0xbfd2d26d,
0x81cdcd4c, 0x180c0c14, 0x26131335, 0xc3ecec2f, 0xbe5f5fe1, 0x359797a2, 0x884444cc, 0x2e171739,
0x93c4c457, 0x55a7a7f2, 0xfc7e7e82, 0x7a3d3d47, 0xc86464ac, 0xba5d5de7, 0x3219192b, 0xe6737395,
0xc06060a0, 0x19818198, 0x9e4f4fd1, 0xa3dcdc7f, 0x44222266, 0x542a2a7e, 0x3b9090ab, 0x0b888883,
0x8c4646ca, 0xc7eeee29, 0x6bb8b8d3, 0x2814143c, 0xa7dede79, 0xbc5e5ee2, 0x160b0b1d, 0xaddbdb76,
0xdbe0e03b, 0x64323256, 0x743a3a4e, 0x140a0a1e, 0x924949db, 0x0c06060a, 0x4824246c, 0xb85c5ce4,
0x9fc2c25d, 0xbdd3d36e, 0x43acacef, 0xc46262a6, 0x399191a8, 0x319595a4, 0xd3e4e437, 0xf279798b,
0xd5e7e732, 0x8bc8c843, 0x6e373759, 0xda6d6db7, 0x018d8d8c, 0xb1d5d564, 0x9c4e4ed2, 0x49a9a9e0,
0xd86c6cb4, 0xac5656fa, 0xf3f4f407, 0xcfeaea25, 0xca6565af, 0xf47a7a8e, 0x47aeaee9, 0x10080818,
0x6fbabad5, 0xf0787888, 0x4a25256f, 0x5c2e2e72, 0x381c1c24, 0x57a6a6f1, 0x73b4b4c7, 0x97c6c651,
0xcbe8e823, 0xa1dddd7c, 0xe874749c, 0x3e1f1f21, 0x964b4bdd, 0x61bdbddc, 0x0d8b8b86, 0x0f8a8a85,
0xe0707090, 0x7c3e3e42, 0x71b5b5c4, 0xcc6666aa, 0x904848d8, 0x06030305, 0xf7f6f601, 0x1c0e0e12,
0xc26161a3, 0x6a35355f, 0xae5757f9, 0x69b9b9d0, 0x17868691, 0x99c1c158, 0x3a1d1d27, 0x279e9eb9,
0xd9e1e138, 0xebf8f813, 0x2b9898b3, 0x22111133, 0xd26969bb, 0xa9d9d970, 0x078e8e89, 0x339494a7,
0x2d9b9bb6, 0x3c1e1e22, 0x15878792, 0xc9e9e920, 0x87cece49, 0xaa5555ff, 0x50282878, 0xa5dfdf7a,
0x038c8c8f, 0x59a1a1f8, 0x09898980, 0x1a0d0d17, 0x65bfbfda, 0xd7e6e631, 0x844242c6, 0xd06868b8,
0x824141c3, 0x299999b0, 0x5a2d2d77, 0x1e0f0f11, 0x7bb0b0cb, 0xa85454fc, 0x6dbbbbd6, 0x2c16163a,
},
{
0xa5c66363, 0x84f87c7c, 0x99ee7777, 0x8df67b7b, 0x0dfff2f2, 0xbdd66b6b, 0xb1de6f6f, 0x5491c5c5,
0x50603030, 0x03020101, 0xa9ce6767, 0x7d562b2b, 0x19e7fefe, 0x62b5d7d7, 0xe64dabab, 0x9aec7676,
0x458fcaca, 0x9d1f8282, 0x4089c9c9, 0x87fa7d7d, 0x15effafa, 0xebb25959, 0xc98e4747, 0x0bfbf0f0,
0xec41adad, 0x67b3d4d4, 0xfd5fa2a2, 0xea45afaf, 0xbf239c9c, 0xf753a4a4, 0x96e47272, 0x5b9bc0c0,
0xc275b7b7, 0x1ce1fdfd, 0xae3d9393, 0x6a4c2626, 0x5a6c3636, 0x417e3f3f, 0x02f5f7f7, 0x4f83cccc,
0x5c683434, 0xf451a5a5, 0x34d1e5e5, 0x08f9f1f1, 0x93e27171, 0x73abd8d8, 0x53623131, 0x3f2a1515,
0x0c080404, 0x5295c7c7, 0x65462323, 0x5e9dc3c3, 0x28301818, 0xa1379696, 0x0f0a0505, 0xb52f9a9a,
0x090e0707, 0x36241212, 0x9b1b8080, 0x3ddfe2e2, 0x26cdebeb, 0x694e2727, 0xcd7fb2b2, 0x9fea7575,
0x1b120909, 0x9e1d8383, 0x74582c2c, 0x2e341a1a, 0x2d361b1b, 0xb2dc6e6e, 0xeeb45a5a, 0xfb5ba0a0,
0xf6a45252, 0x4d763b3b, 0x61b7d6d6, 0xce7db3b3, 0x7b522929, 0x3edde3e3, 0x715e2f2f, 0x97138484,
0xf5a65353, 0x68b9d1d1, 0x00000000, 0x2cc1eded, 0x60402020, 0x1fe3fcfc, 0xc879b1b1, 0xedb65b5b,
0xbed46a6a, 0x468dcbcb, 0xd967bebe, 0x4b723939, 0xde944a4a, 0xd4984c4c, 0xe8b05858, 0x4a85cfcf,
0x6bbbd0d0, 0x2ac5efef, 0xe54faaaa, 0x16edfbfb, 0xc5864343, 0xd79a4d4d, 0x55663333, 0x94118585,
0xcf8a4545, 0x10e9f9f9, 0x06040202, 0x81fe7f7f, 0xf0a05050, 0x44783c3c, 0xba259f9f, 0xe34ba8a8,
0xf3a25151, 0xfe5da3a3, 0xc0804040, 0x8a058f8f, 0xad3f9292, 0xbc219d9d, 0x48703838, 0x04f1f5f5,
0xdf63bcbc, 0xc177b6b6, 0x75afdada, 0x63422121, 0x30201010, 0x1ae5ffff, 0x0efdf3f3, 0x6dbfd2d2,
0x4c81cdcd, 0x14180c0c, 0x35261313, 0x2fc3ecec, 0xe1be5f5f, 0xa2359797, 0xcc884444, 0x392e1717,
0x5793c4c4, 0xf255a7a7, 0x82fc7e7e, 0x477a3d3d, 0xacc86464, 0xe7ba5d5d, 0x2b321919, 0x95e67373,
0xa0c06060, 0x98198181, 0xd19e4f4f, 0x7fa3dcdc, 0x66442222, 0x7e542a2a, 0xab3b9090, 0x830b8888,
0xca8c4646, 0x29c7eeee, 0xd36bb8b8, 0x3c281414, 0x79a7dede, 0xe2bc5e5e, 0x1d160b0b, 0x76addbdb,
0x3bdbe0e0, 0x56643232, 0x4e743a3a, 0x1e140a0a, 0xdb924949, 0x0a0c0606, 0x6c482424, 0xe4b85c5c,
0x5d9fc2c2, 0x6ebdd3d3, 0xef43acac, 0xa6c46262, 0xa8399191, 0xa4319595, 0x37d3e4e4, 0x8bf27979,
0x32d5e7e7, 0x438bc8c8, 0x596e3737, 0xb7da6d6d, 0x8c018d8d, 0x64b1d5d5, 0xd29c4e4e, 0xe049a9a9,
0xb4d86c6c, 0xfaac5656, 0x07f3f4f4, 0x25cfeaea, 0xafca6565, 0x8ef47a7a, 0xe947aeae, 0x18100808,
0xd56fbaba, 0x88f07878, 0x6f4a2525, 0x725c2e2e, 0x24381c1c, 0xf157a6a6, 0xc773b4b4, 0x5197c6c6,
0x23cbe8e8, 0x7ca1dddd, 0x9ce87474, 0x213e1f1f, 0xdd964b4b, 0xdc61bdbd, 0x860d8b8b, 0x850f8a8a,
0x90e07070, 0x427c3e3e, 0xc471b5b5, 0xaacc6666, 0xd8904848, 0x05060303, 0x01f7f6f6, 0x121c0e0e,
0xa3c26161, 0x5f6a3535, 0xf9ae5757, 0xd069b9b9, 0x91178686, 0x5899c1c1, 0x273a1d1d, 0xb9279e9e,
0x38d9e1e1, 0x13ebf8f8, 0xb32b9898, 0x33221111, 0xbbd26969, 0x70a9d9d9, 0x89078e8e, 0xa7339494,
0xb62d9b9b, 0x223c1e1e, 0x92158787, 0x20c9e9e9, 0x4987cece, 0xffaa5555, 0x78502828, 0x7aa5dfdf,
0x8f038c8c, 0xf859a1a1, 0x80098989, 0x171a0d0d, 0xda65bfbf, 0x31d7e6e6, 0xc6844242, 0xb8d06868,
0xc3824141, 0xb0299999, 0x775a2d2d, 0x111e0f0f, 0xcb7bb0b0, 0xfca85454, 0xd66dbbbb, 0x3a2c1616,
},
{
0x63a5c663, 0x7c84f87c, 0x7799ee77, 0x7b8df67b, 0xf20dfff2, 0x6bbdd66b, 0x6fb1de6f, 0xc55491c5,
0x30506030, 0x01030201, 0x67a9ce67, 0x2b7d562b, 0xfe19e7fe, 0xd762b5d7, 0xabe64dab, 0x769aec76,
0xca458fca, 0x829d1f82, 0xc94089c9, 0x7d87fa7d, 0xfa15effa, 0x59ebb259, 0x47c98e47, 0xf00bfbf0,
0xadec41ad, 0xd467b3d4, 0xa2fd5fa2, 0xafea45af, 0x9cbf239c, 0xa4f753a4, 0x7296e472, 0xc05b9bc0,
0xb7c275b7, 0xfd1ce1fd, 0x93ae3d93, 0x266a4c26, 0x365a6c36, 0x3f417e3f, 0xf702f5f7, 0xcc4f83cc,
0x345c6834, 0xa5f451a5, 0xe534d1e5, 0xf108f9f1, 0x7193e271, 0xd873abd8, 0x31536231, 0x153f2a15,
0x040c0804, 0xc75295c7, 0x23654623, 0xc35e9dc3, 0x18283018, 0x96a13796, 0x050f0a05, 0x9ab52f9a,
0x07090e07, 0x12362412, 0x809b1b80, 0xe23ddfe2, 0xeb26cdeb, 0x27694e27, 0xb2cd7fb2, 0x759fea75,
0x091b1209, 0x839e1d83, 0x2c74582c, 0x1a2e341a, 0x1b2d361b, 0x6eb2dc6e, 0x5aeeb45a, 0xa0fb5ba0,
0x52f6a452, 0x3b4d763b, 0xd661b7d6, 0xb3ce7db3, 0x297b5229, 0xe33edde3, 0x2f715e2f, 0x84971384,
0x53f5a653, 0xd168b9d1, 0x00000000, 0xed2cc1ed, 0x20604020, 0xfc1fe3fc, 0xb1c879b1, 0x5bedb65b,
0x6abed46a, 0xcb468dcb, 0xbed967be, 0x394b7239, 0x4ade944a, 0x4cd4984c, 0x58e8b058, 0xcf4a85cf,
0xd06bbbd0, 0xef2ac5ef, 0xaae54faa, 0xfb16edfb, 0x43c58643, 0x4dd79a4d, 0x33556633, 0x85941185,
0x45cf8a45, 0xf910e9f9, 0x02060402, 0x7f81fe7f, 0x50f0a050, 0x3c44783c, 0x9fba259f, 0xa8e34ba8,
0x51f3a251, 0xa3fe5da3, 0x40c08040, 0x8f8a058f, 0x92ad3f92, 0x9dbc219d, 0x38487038, 0xf504f1f5,
0xbcdf63bc, 0xb6c177b6, 0xda75afda, 0x21634221, 0x10302010, 0xff1ae5ff, 0xf30efdf3, 0xd26dbfd2,
0xcd4c81cd, 0x0c14180c, 0x13352613, 0xec2fc3ec, 0x5fe1be5f, 0x97a23597, 0x44cc8844, 0x17392e17,
0xc45793c4, 0xa7f255a7, 0x7e82fc7e, 0x3d477a3d, 0x64acc864, 0x5de7ba5d, 0x192b3219, 0x7395e673,
0x60a0c060, 0x81981981, 0x4fd19e4f, 0xdc7fa3dc, 0x22664422, 0x2a7e542a, 0x90ab3b90, 0x88830b88,
0x46ca8c46, 0xee29c7ee, 0xb8d36bb8, 0x143c2814, 0xde79a7de, 0x5ee2bc5e, 0x0b1d160b, 0xdb76addb,
0xe03bdbe0, 0x32566432, 0x3a4e743a, 0x0a1e140a, 0x49db9249, 0x060a0c06, 0x246c4824, 0x5ce4b85c,
0xc25d9fc2, 0xd36ebdd3, 0xacef43ac, 0x62a6c462, 0x91a83991, 0x95a43195, 0xe437d3e4, 0x798bf279,
0xe732d5e7, 0xc8438bc8, 0x37596e37, 0x6db7da6d, 0x8d8c018d, 0xd564b1d5, 0x4ed29c4e, 0xa9e049a9,
0x6cb4d86c, 0x56faac56, 0xf407f3f4, 0xea25cfea, 0x65afca65, 0x7a8ef47a, 0xaee947ae, 0x08181008,
0xbad56fba, 0x7888f078, 0x256f4a25, 0x2e725c2e, 0x1c24381c, 0xa6f157a6, 0xb4c773b4, 0xc65197c6,
0xe823cbe8, 0xdd7ca1dd, 0x749ce874, 0x1f213e1f, 0x4bdd964b, 0xbddc61bd, 0x8b860d8b, 0x8a850f8a,
0x7090e070, 0x3e427c3e, 0xb5c471b5, 0x66aacc66, 0x48d89048, 0x03050603, 0xf601f7f6, 0x0e121c0e,
0x61a3c261, 0x355f6a35, 0x57f9ae57, 0xb9d069b9, 0x86911786, 0xc15899c1, 0x1d273a1d, 0x9eb9279e,
0xe138d9e1, 0xf813ebf8, 0x98b32b98, 0x11332211, 0x69bbd269, 0xd970a9d9, 0x8e89078e, 0x94a73394,
0x9bb62d9b, 0x1e223c1e, 0x87921587, 0xe920c9e9, 0xce4987ce, 0x55ffaa55, 0x28785028, 0xdf7aa5df,
0x8c8f038c, 0xa1f859a1, 0x89800989, 0x0d171a0d, 0xbfda65bf, 0xe631d7e6, 0x42c68442, 0x68b8d068,
0x41c38241, 0x99b02999, 0x2d775a2d, 0x0f111e0f, 0xb0cb7bb0, 0x54fca854, 0xbbd66dbb, 0x163a2c16,
},
{
0x6363a5c6, 0x7c7c84f8, 0x777799ee, 0x7b7b8df6, 0xf2f20dff, 0x6b6bbdd6, 0x6f6fb1de, 0xc5c55491,
0x30305060, 0x01010302, 0x6767a9ce, 0x2b2b7d56, 0xfefe19e7, 0xd7d762b5, 0xababe64d, 0x76769aec,
0xcaca458f, 0x82829d1f, 0xc9c94089, 0x7d7d87fa, 0xfafa15ef, 0x5959ebb2, 0x4747c98e, 0xf0f00bfb,
0xadadec41, 0xd4d467b3, 0xa2a2fd5f, 0xafafea45, 0x9c9cbf23, 0xa4a4f753, 0x727296e4, 0xc0c05b9b,
0xb7b7c275, 0xfdfd1ce1, 0x9393ae3d, 0x26266a4c, 0x36365a6c, 0x3f3f417e, 0xf7f702f5, 0xcccc4f83,
0x34345c68, 0xa5a5f451, 0xe5e534d1, 0xf1f108f9, 0x717193e2, 0xd8d873ab, 0x31315362, 0x15153f2a,
0x04040c08, 0xc7c75295, 0x23236546, 0xc3c35e9d, 0x18182830, 0x9696a137, 0x05050f0a, 0x9a9ab52f,
0x0707090e, 0x12123624, 0x80809b1b, 0xe2e23ddf, 0xebeb26cd, 0x2727694e, 0xb2b2cd7f, 0x75759fea,
0x09091b12, 0x83839e1d, 0x2c2c7458, 0x1a1a2e34, 0x1b1b2d36, 0x6e6eb2dc, 0x5a5aeeb4, 0xa0a0fb5b,
0x5252f6a4, 0x3b3b4d76, 0xd6d661b7, 0xb3b3ce7d, 0x29297b52, 0xe3e33edd, 0x2f2f715e, 0x84849713,
0x5353f5a6, 0xd1d168b9, 0x00000000, 0xeded2cc1, 0x20206040, 0xfcfc1fe3, 0xb1b1c879, 0x5b5bedb6,
0x6a6abed4, 0xcbcb468d, 0xbebed967, 0x39394b72, 0x4a4ade94, 0x4c4cd498, 0x5858e8b0, 0xcfcf4a85,
0xd0d06bbb, 0xefef2ac5, 0xaaaae54f, 0xfbfb16ed, 0x4343c586, 0x4d4dd79a, 0x33335566, 0x85859411,
0x4545cf8a, 0xf9f910e9, 0x02020604, 0x7f7f81fe, 0x5050f0a0, 0x3c3c4478, 0x9f9fba25, 0xa8a8e34b,
0x5151f3a2, 0xa3a3fe5d, 0x4040c080, 0x8f8f8a05, 0x9292ad3f, 0x9d9dbc21, 0x38384870, 0xf5f504f1,
0xbcbcdf63, 0xb6b6c177, 0xdada75af, 0x21216342, 0x10103020, 0xffff1ae5, 0xf3f30efd, 0xd2d26dbf,
0xcdcd4c81, 0x0c0c1418, 0x13133526, 0xecec2fc3, 0x5f5fe1be, 0x9797a235, 0x4444cc88, 0x1717392e,
0xc4c45793, 0xa7a7f255, 0x7e7e82fc, 0x3d3d477a, 0x6464acc8, 0x5d5de7ba, 0x19192b32, 0x737395e6,
0x6060a0c0, 0x81819819, 0x4f4fd19e, 0xdcdc7fa3, 0x22226644, 0x2a2a7e54, 0x9090ab3b, 0x8888830b,
0x4646ca8c, 0xeeee29c7, 0xb8b8d36b, 0x14143c28, 0xdede79a7, 0x5e5ee2bc, 0x0b0b1d16, 0xdbdb76ad,
0xe0e03bdb, 0x32325664, 0x3a3a4e74, 0x0a0a1e14, 0x4949db92, 0x06060a0c, 0x24246c48, 0x5c5ce4b8,
0xc2c25d9f, 0xd3d36ebd, 0xacacef43, 0x6262a6c4, 0x9191a839, 0x9595a431, 0xe4e437d3, 0x79798bf2,
0xe7e732d5, 0xc8c8438b, 0x3737596e, 0x6d6db7da, 0x8d8d8c01, 0xd5d564b1, 0x4e4ed29c, 0xa9a9e049,
0x6c6cb4d8, 0x5656faac, 0xf4f407f3, 0xeaea25cf, 0x6565afca, 0x7a7a8ef4, 0xaeaee947, 0x08081810,
0xbabad56f, 0x787888f0, 0x25256f4a, 0x2e2e725c, 0x1c1c2438, 0xa6a6f157, 0xb4b4c773, 0xc6c65197,
0xe8e823cb, 0xdddd7ca1, 0x74749ce8, 0x1f1f213e, 0x4b4bdd96, 0xbdbddc61, 0x8b8b860d, 0x8a8a850f,
0x707090e0, 0x3e3e427c, 0xb5b5c471, 0x6666aacc, 0x4848d890, 0x03030506, 0xf6f601f7, 0x0e0e121c,
0x6161a3c2, 0x35355f6a, 0x5757f9ae, 0xb9b9d069, 0x86869117, 0xc1c15899, 0x1d1d273a, 0x9e9eb927,
0xe1e138d9, 0xf8f813eb, 0x9898b32b, 0x11113322, 0x6969bbd2, 0xd9d970a9, 0x8e8e8907, 0x9494a733,
0x9b9bb62d, 0x1e1e223c, 0x87879215, 0xe9e920c9, 0xcece4987, 0x5555ffaa, 0x28287850, 0xdfdf7aa5,
0x8c8c8f03, 0xa1a1f859, 0x89898009, 0x0d0d171a, 0xbfbfda65, 0xe6e631d7, 0x4242c684, 0x6868b8d0,
0x4141c382, 0x9999b029, 0x2d2d775a, 0x0f0f111e, 0xb0b0cb7b, 0x5454fca8, 0xbbbbd66d, 0x16163a2c,
},
}
// Lookup tables for decryption.
// These can be recomputed by adapting the tests in aes_test.go.
var td = [4][256]uint32{
{
0x51f4a750, 0x7e416553, 0x1a17a4c3, 0x3a275e96, 0x3bab6bcb, 0x1f9d45f1, 0xacfa58ab, 0x4be30393,
0x2030fa55, 0xad766df6, 0x88cc7691, 0xf5024c25, 0x4fe5d7fc, 0xc52acbd7, 0x26354480, 0xb562a38f,
0xdeb15a49, 0x25ba1b67, 0x45ea0e98, 0x5dfec0e1, 0xc32f7502, 0x814cf012, 0x8d4697a3, 0x6bd3f9c6,
0x038f5fe7, 0x15929c95, 0xbf6d7aeb, 0x955259da, 0xd4be832d, 0x587421d3, 0x49e06929, 0x8ec9c844,
0x75c2896a, 0xf48e7978, 0x99583e6b, 0x27b971dd, 0xbee14fb6, 0xf088ad17, 0xc920ac66, 0x7dce3ab4,
0x63df4a18, 0xe51a3182, 0x97513360, 0x62537f45, 0xb16477e0, 0xbb6bae84, 0xfe81a01c, 0xf9082b94,
0x70486858, 0x8f45fd19, 0x94de6c87, 0x527bf8b7, 0xab73d323, 0x724b02e2, 0xe31f8f57, 0x6655ab2a,
0xb2eb2807, 0x2fb5c203, 0x86c57b9a, 0xd33708a5, 0x302887f2, 0x23bfa5b2, 0x02036aba, 0xed16825c,
0x8acf1c2b, 0xa779b492, 0xf307f2f0, 0x4e69e2a1, 0x65daf4cd, 0x0605bed5, 0xd134621f, 0xc4a6fe8a,
0x342e539d, 0xa2f355a0, 0x058ae132, 0xa4f6eb75, 0x0b83ec39, 0x4060efaa, 0x5e719f06, 0xbd6e1051,
0x3e218af9, 0x96dd063d, 0xdd3e05ae, 0x4de6bd46, 0x91548db5, 0x71c45d05, 0x0406d46f, 0x605015ff,
0x1998fb24, 0xd6bde997, 0x894043cc, 0x67d99e77, 0xb0e842bd, 0x07898b88, 0xe7195b38, 0x79c8eedb,
0xa17c0a47, 0x7c420fe9, 0xf8841ec9, 0x00000000, 0x09808683, 0x322bed48, 0x1e1170ac, 0x6c5a724e,
0xfd0efffb, 0x0f853856, 0x3daed51e, 0x362d3927, 0x0a0fd964, 0x685ca621, 0x9b5b54d1, 0x24362e3a,
0x0c0a67b1, 0x9357e70f, 0xb4ee96d2, 0x1b9b919e, 0x80c0c54f, 0x61dc20a2, 0x5a774b69, 0x1c121a16,
0xe293ba0a, 0xc0a02ae5, 0x3c22e043, 0x121b171d, 0x0e090d0b, 0xf28bc7ad, 0x2db6a8b9, 0x141ea9c8,
0x57f11985, 0xaf75074c, 0xee99ddbb, 0xa37f60fd, 0xf701269f, 0x5c72f5bc, 0x44663bc5, 0x5bfb7e34,
0x8b432976, 0xcb23c6dc, 0xb6edfc68, 0xb8e4f163, 0xd731dcca, 0x42638510, 0x13972240, 0x84c61120,
0x854a247d, 0xd2bb3df8, 0xaef93211, 0xc729a16d, 0x1d9e2f4b, 0xdcb230f3, 0x0d8652ec, 0x77c1e3d0,
0x2bb3166c, 0xa970b999, 0x119448fa, 0x47e96422, 0xa8fc8cc4, 0xa0f03f1a, 0x567d2cd8, 0x223390ef,
0x87494ec7, 0xd938d1c1, 0x8ccaa2fe, 0x98d40b36, 0xa6f581cf, 0xa57ade28, 0xdab78e26, 0x3fadbfa4,
0x2c3a9de4, 0x5078920d, 0x6a5fcc9b, 0x547e4662, 0xf68d13c2, 0x90d8b8e8, 0x2e39f75e, 0x82c3aff5,
0x9f5d80be, 0x69d0937c, 0x6fd52da9, 0xcf2512b3, 0xc8ac993b, 0x10187da7, 0xe89c636e, 0xdb3bbb7b,
0xcd267809, 0x6e5918f4, 0xec9ab701, 0x834f9aa8, 0xe6956e65, 0xaaffe67e, 0x21bccf08, 0xef15e8e6,
0xbae79bd9, 0x4a6f36ce, 0xea9f09d4, 0x29b07cd6, 0x31a4b2af, 0x2a3f2331, 0xc6a59430, 0x35a266c0,
0x744ebc37, 0xfc82caa6, 0xe090d0b0, 0x33a7d815, 0xf104984a, 0x41ecdaf7, 0x7fcd500e, 0x1791f62f,
0x764dd68d, 0x43efb04d, 0xccaa4d54, 0xe49604df, 0x9ed1b5e3, 0x4c6a881b, 0xc12c1fb8, 0x4665517f,
0x9d5eea04, 0x018c355d, 0xfa877473, 0xfb0b412e, 0xb3671d5a, 0x92dbd252, 0xe9105633, 0x6dd64713,
0x9ad7618c, 0x37a10c7a, 0x59f8148e, 0xeb133c89, 0xcea927ee, 0xb761c935, 0xe11ce5ed, 0x7a47b13c,
0x9cd2df59, 0x55f2733f, 0x1814ce79, 0x73c737bf, 0x53f7cdea, 0x5ffdaa5b, 0xdf3d6f14, 0x7844db86,
0xcaaff381, 0xb968c43e, 0x3824342c, 0xc2a3405f, 0x161dc372, 0xbce2250c, 0x283c498b, 0xff0d9541,
0x39a80171, 0x080cb3de, 0xd8b4e49c, 0x6456c190, 0x7bcb8461, 0xd532b670, 0x486c5c74, 0xd0b85742,
},
{
0x5051f4a7, 0x537e4165, 0xc31a17a4, 0x963a275e, 0xcb3bab6b, 0xf11f9d45, 0xabacfa58, 0x934be303,
0x552030fa, 0xf6ad766d, 0x9188cc76, 0x25f5024c, 0xfc4fe5d7, 0xd7c52acb, 0x80263544, 0x8fb562a3,
0x49deb15a, 0x6725ba1b, 0x9845ea0e, 0xe15dfec0, 0x02c32f75, 0x12814cf0, 0xa38d4697, 0xc66bd3f9,
0xe7038f5f, 0x9515929c, 0xebbf6d7a, 0xda955259, 0x2dd4be83, 0xd3587421, 0x2949e069, 0x448ec9c8,
0x6a75c289, 0x78f48e79, 0x6b99583e, 0xdd27b971, 0xb6bee14f, 0x17f088ad, 0x66c920ac, 0xb47dce3a,
0x1863df4a, 0x82e51a31, 0x60975133, 0x4562537f, 0xe0b16477, 0x84bb6bae, 0x1cfe81a0, 0x94f9082b,
0x58704868, 0x198f45fd, 0x8794de6c, 0xb7527bf8, 0x23ab73d3, 0xe2724b02, 0x57e31f8f, 0x2a6655ab,
0x07b2eb28, 0x032fb5c2, 0x9a86c57b, 0xa5d33708, 0xf2302887, 0xb223bfa5, 0xba02036a, 0x5ced1682,
0x2b8acf1c, 0x92a779b4, 0xf0f307f2, 0xa14e69e2, 0xcd65daf4, 0xd50605be, 0x1fd13462, 0x8ac4a6fe,
0x9d342e53, 0xa0a2f355, 0x32058ae1, 0x75a4f6eb, 0x390b83ec, 0xaa4060ef, 0x065e719f, 0x51bd6e10,
0xf93e218a, 0x3d96dd06, 0xaedd3e05, 0x464de6bd, 0xb591548d, 0x0571c45d, 0x6f0406d4, 0xff605015,
0x241998fb, 0x97d6bde9, 0xcc894043, 0x7767d99e, 0xbdb0e842, 0x8807898b, 0x38e7195b, 0xdb79c8ee,
0x47a17c0a, 0xe97c420f, 0xc9f8841e, 0x00000000, 0x83098086, 0x48322bed, 0xac1e1170, 0x4e6c5a72,
0xfbfd0eff, 0x560f8538, 0x1e3daed5, 0x27362d39, 0x640a0fd9, 0x21685ca6, 0xd19b5b54, 0x3a24362e,
0xb10c0a67, 0x0f9357e7, 0xd2b4ee96, 0x9e1b9b91, 0x4f80c0c5, 0xa261dc20, 0x695a774b, 0x161c121a,
0x0ae293ba, 0xe5c0a02a, 0x433c22e0, 0x1d121b17, 0x0b0e090d, 0xadf28bc7, 0xb92db6a8, 0xc8141ea9,
0x8557f119, 0x4caf7507, 0xbbee99dd, 0xfda37f60, 0x9ff70126, 0xbc5c72f5, 0xc544663b, 0x345bfb7e,
0x768b4329, 0xdccb23c6, 0x68b6edfc, 0x63b8e4f1, 0xcad731dc, 0x10426385, 0x40139722, 0x2084c611,
0x7d854a24, 0xf8d2bb3d, 0x11aef932, 0x6dc729a1, 0x4b1d9e2f, 0xf3dcb230, 0xec0d8652, 0xd077c1e3,
0x6c2bb316, 0x99a970b9, 0xfa119448, 0x2247e964, 0xc4a8fc8c, 0x1aa0f03f, 0xd8567d2c, 0xef223390,
0xc787494e, 0xc1d938d1, 0xfe8ccaa2, 0x3698d40b, 0xcfa6f581, 0x28a57ade, 0x26dab78e, 0xa43fadbf,
0xe42c3a9d, 0x0d507892, 0x9b6a5fcc, 0x62547e46, 0xc2f68d13, 0xe890d8b8, 0x5e2e39f7, 0xf582c3af,
0xbe9f5d80, 0x7c69d093, 0xa96fd52d, 0xb3cf2512, 0x3bc8ac99, 0xa710187d, 0x6ee89c63, 0x7bdb3bbb,
0x09cd2678, 0xf46e5918, 0x01ec9ab7, 0xa8834f9a, 0x65e6956e, 0x7eaaffe6, 0x0821bccf, 0xe6ef15e8,
0xd9bae79b, 0xce4a6f36, 0xd4ea9f09, 0xd629b07c, 0xaf31a4b2, 0x312a3f23, 0x30c6a594, 0xc035a266,
0x37744ebc, 0xa6fc82ca, 0xb0e090d0, 0x1533a7d8, 0x4af10498, 0xf741ecda, 0x0e7fcd50, 0x2f1791f6,
0x8d764dd6, 0x4d43efb0, 0x54ccaa4d, 0xdfe49604, 0xe39ed1b5, 0x1b4c6a88, 0xb8c12c1f, 0x7f466551,
0x049d5eea, 0x5d018c35, 0x73fa8774, 0x2efb0b41, 0x5ab3671d, 0x5292dbd2, 0x33e91056, 0x136dd647,
0x8c9ad761, 0x7a37a10c, 0x8e59f814, 0x89eb133c, 0xeecea927, 0x35b761c9, 0xede11ce5, 0x3c7a47b1,
0x599cd2df, 0x3f55f273, 0x791814ce, 0xbf73c737, 0xea53f7cd, 0x5b5ffdaa, 0x14df3d6f, 0x867844db,
0x81caaff3, 0x3eb968c4, 0x2c382434, 0x5fc2a340, 0x72161dc3, 0x0cbce225, 0x8b283c49, 0x41ff0d95,
0x7139a801, 0xde080cb3, 0x9cd8b4e4, 0x906456c1, 0x617bcb84, 0x70d532b6, 0x74486c5c, 0x42d0b857,
},
{
0xa75051f4, 0x65537e41, 0xa4c31a17, 0x5e963a27, 0x6bcb3bab, 0x45f11f9d, 0x58abacfa, 0x03934be3,
0xfa552030, 0x6df6ad76, 0x769188cc, 0x4c25f502, 0xd7fc4fe5, 0xcbd7c52a, 0x44802635, 0xa38fb562,
0x5a49deb1, 0x1b6725ba, 0x0e9845ea, 0xc0e15dfe, 0x7502c32f, 0xf012814c, 0x97a38d46, 0xf9c66bd3,
0x5fe7038f, 0x9c951592, 0x7aebbf6d, 0x59da9552, 0x832dd4be, 0x21d35874, 0x692949e0, 0xc8448ec9,
0x896a75c2, 0x7978f48e, 0x3e6b9958, 0x71dd27b9, 0x4fb6bee1, 0xad17f088, 0xac66c920, 0x3ab47dce,
0x4a1863df, 0x3182e51a, 0x33609751, 0x7f456253, 0x77e0b164, 0xae84bb6b, 0xa01cfe81, 0x2b94f908,
0x68587048, 0xfd198f45, 0x6c8794de, 0xf8b7527b, 0xd323ab73, 0x02e2724b, 0x8f57e31f, 0xab2a6655,
0x2807b2eb, 0xc2032fb5, 0x7b9a86c5, 0x08a5d337, 0x87f23028, 0xa5b223bf, 0x6aba0203, 0x825ced16,
0x1c2b8acf, 0xb492a779, 0xf2f0f307, 0xe2a14e69, 0xf4cd65da, 0xbed50605, 0x621fd134, 0xfe8ac4a6,
0x539d342e, 0x55a0a2f3, 0xe132058a, 0xeb75a4f6, 0xec390b83, 0xefaa4060, 0x9f065e71, 0x1051bd6e,
0x8af93e21, 0x063d96dd, 0x05aedd3e, 0xbd464de6, 0x8db59154, 0x5d0571c4, 0xd46f0406, 0x15ff6050,
0xfb241998, 0xe997d6bd, 0x43cc8940, 0x9e7767d9, 0x42bdb0e8, 0x8b880789, 0x5b38e719, 0xeedb79c8,
0x0a47a17c, 0x0fe97c42, 0x1ec9f884, 0x00000000, 0x86830980, 0xed48322b, 0x70ac1e11, 0x724e6c5a,
0xfffbfd0e, 0x38560f85, 0xd51e3dae, 0x3927362d, 0xd9640a0f, 0xa621685c, 0x54d19b5b, 0x2e3a2436,
0x67b10c0a, 0xe70f9357, 0x96d2b4ee, 0x919e1b9b, 0xc54f80c0, 0x20a261dc, 0x4b695a77, 0x1a161c12,
0xba0ae293, 0x2ae5c0a0, 0xe0433c22, 0x171d121b, 0x0d0b0e09, 0xc7adf28b, 0xa8b92db6, 0xa9c8141e,
0x198557f1, 0x074caf75, 0xddbbee99, 0x60fda37f, 0x269ff701, 0xf5bc5c72, 0x3bc54466, 0x7e345bfb,
0x29768b43, 0xc6dccb23, 0xfc68b6ed, 0xf163b8e4, 0xdccad731, 0x85104263, 0x22401397, 0x112084c6,
0x247d854a, 0x3df8d2bb, 0x3211aef9, 0xa16dc729, 0x2f4b1d9e, 0x30f3dcb2, 0x52ec0d86, 0xe3d077c1,
0x166c2bb3, 0xb999a970, 0x48fa1194, 0x642247e9, 0x8cc4a8fc, 0x3f1aa0f0, 0x2cd8567d, 0x90ef2233,
0x4ec78749, 0xd1c1d938, 0xa2fe8cca, 0x0b3698d4, 0x81cfa6f5, 0xde28a57a, 0x8e26dab7, 0xbfa43fad,
0x9de42c3a, 0x920d5078, 0xcc9b6a5f, 0x4662547e, 0x13c2f68d, 0xb8e890d8, 0xf75e2e39, 0xaff582c3,
0x80be9f5d, 0x937c69d0, 0x2da96fd5, 0x12b3cf25, 0x993bc8ac, 0x7da71018, 0x636ee89c, 0xbb7bdb3b,
0x7809cd26, 0x18f46e59, 0xb701ec9a, 0x9aa8834f, 0x6e65e695, 0xe67eaaff, 0xcf0821bc, 0xe8e6ef15,
0x9bd9bae7, 0x36ce4a6f, 0x09d4ea9f, 0x7cd629b0, 0xb2af31a4, 0x23312a3f, 0x9430c6a5, 0x66c035a2,
0xbc37744e, 0xcaa6fc82, 0xd0b0e090, 0xd81533a7, 0x984af104, 0xdaf741ec, 0x500e7fcd, 0xf62f1791,
0xd68d764d, 0xb04d43ef, 0x4d54ccaa, 0x04dfe496, 0xb5e39ed1, 0x881b4c6a, 0x1fb8c12c, 0x517f4665,
0xea049d5e, 0x355d018c, 0x7473fa87, 0x412efb0b, 0x1d5ab367, 0xd25292db, 0x5633e910, 0x47136dd6,
0x618c9ad7, 0x0c7a37a1, 0x148e59f8, 0x3c89eb13, 0x27eecea9, 0xc935b761, 0xe5ede11c, 0xb13c7a47,
0xdf599cd2, 0x733f55f2, 0xce791814, 0x37bf73c7, 0xcdea53f7, 0xaa5b5ffd, 0x6f14df3d, 0xdb867844,
0xf381caaf, 0xc43eb968, 0x342c3824, 0x405fc2a3, 0xc372161d, 0x250cbce2, 0x498b283c, 0x9541ff0d,
0x017139a8, 0xb3de080c, 0xe49cd8b4, 0xc1906456, 0x84617bcb, 0xb670d532, 0x5c74486c, 0x5742d0b8,
},
{
0xf4a75051, 0x4165537e, 0x17a4c31a, 0x275e963a, 0xab6bcb3b, 0x9d45f11f, 0xfa58abac, 0xe303934b,
0x30fa5520, 0x766df6ad, 0xcc769188, 0x024c25f5, 0xe5d7fc4f, 0x2acbd7c5, 0x35448026, 0x62a38fb5,
0xb15a49de, 0xba1b6725, 0xea0e9845, 0xfec0e15d, 0x2f7502c3, 0x4cf01281, 0x4697a38d, 0xd3f9c66b,
0x8f5fe703, 0x929c9515, 0x6d7aebbf, 0x5259da95, 0xbe832dd4, 0x7421d358, 0xe0692949, 0xc9c8448e,
0xc2896a75, 0x8e7978f4, 0x583e6b99, 0xb971dd27, 0xe14fb6be, 0x88ad17f0, 0x20ac66c9, 0xce3ab47d,
0xdf4a1863, 0x1a3182e5, 0x51336097, 0x537f4562, 0x6477e0b1, 0x6bae84bb, 0x81a01cfe, 0x082b94f9,
0x48685870, 0x45fd198f, 0xde6c8794, 0x7bf8b752, 0x73d323ab, 0x4b02e272, 0x1f8f57e3, 0x55ab2a66,
0xeb2807b2, 0xb5c2032f, 0xc57b9a86, 0x3708a5d3, 0x2887f230, 0xbfa5b223, 0x036aba02, 0x16825ced,
0xcf1c2b8a, 0x79b492a7, 0x07f2f0f3, 0x69e2a14e, 0xdaf4cd65, 0x05bed506, 0x34621fd1, 0xa6fe8ac4,
0x2e539d34, 0xf355a0a2, 0x8ae13205, 0xf6eb75a4, 0x83ec390b, 0x60efaa40, 0x719f065e, 0x6e1051bd,
0x218af93e, 0xdd063d96, 0x3e05aedd, 0xe6bd464d, 0x548db591, 0xc45d0571, 0x06d46f04, 0x5015ff60,
0x98fb2419, 0xbde997d6, 0x4043cc89, 0xd99e7767, 0xe842bdb0, 0x898b8807, 0x195b38e7, 0xc8eedb79,
0x7c0a47a1, 0x420fe97c, 0x841ec9f8, 0x00000000, 0x80868309, 0x2bed4832, 0x1170ac1e, 0x5a724e6c,
0x0efffbfd, 0x8538560f, 0xaed51e3d, 0x2d392736, 0x0fd9640a, 0x5ca62168, 0x5b54d19b, 0x362e3a24,
0x0a67b10c, 0x57e70f93, 0xee96d2b4, 0x9b919e1b, 0xc0c54f80, 0xdc20a261, 0x774b695a, 0x121a161c,
0x93ba0ae2, 0xa02ae5c0, 0x22e0433c, 0x1b171d12, 0x090d0b0e, 0x8bc7adf2, 0xb6a8b92d, 0x1ea9c814,
0xf1198557, 0x75074caf, 0x99ddbbee, 0x7f60fda3, 0x01269ff7, 0x72f5bc5c, 0x663bc544, 0xfb7e345b,
0x4329768b, 0x23c6dccb, 0xedfc68b6, 0xe4f163b8, 0x31dccad7, 0x63851042, 0x97224013, 0xc6112084,
0x4a247d85, 0xbb3df8d2, 0xf93211ae, 0x29a16dc7, 0x9e2f4b1d, 0xb230f3dc, 0x8652ec0d, 0xc1e3d077,
0xb3166c2b, 0x70b999a9, 0x9448fa11, 0xe9642247, 0xfc8cc4a8, 0xf03f1aa0, 0x7d2cd856, 0x3390ef22,
0x494ec787, 0x38d1c1d9, 0xcaa2fe8c, 0xd40b3698, 0xf581cfa6, 0x7ade28a5, 0xb78e26da, 0xadbfa43f,
0x3a9de42c, 0x78920d50, 0x5fcc9b6a, 0x7e466254, 0x8d13c2f6, 0xd8b8e890, 0x39f75e2e, 0xc3aff582,
0x5d80be9f, 0xd0937c69, 0xd52da96f, 0x2512b3cf, 0xac993bc8, 0x187da710, 0x9c636ee8, 0x3bbb7bdb,
0x267809cd, 0x5918f46e, 0x9ab701ec, 0x4f9aa883, 0x956e65e6, 0xffe67eaa, 0xbccf0821, 0x15e8e6ef,
0xe79bd9ba, 0x6f36ce4a, 0x9f09d4ea, 0xb07cd629, 0xa4b2af31, 0x3f23312a, 0xa59430c6, 0xa266c035,
0x4ebc3774, 0x82caa6fc, 0x90d0b0e0, 0xa7d81533, 0x04984af1, 0xecdaf741, 0xcd500e7f, 0x91f62f17,
0x4dd68d76, 0xefb04d43, 0xaa4d54cc, 0x9604dfe4, 0xd1b5e39e, 0x6a881b4c, 0x2c1fb8c1, 0x65517f46,
0x5eea049d, 0x8c355d01, 0x877473fa, 0x0b412efb, 0x671d5ab3, 0xdbd25292, 0x105633e9, 0xd647136d,
0xd7618c9a, 0xa10c7a37, 0xf8148e59, 0x133c89eb, 0xa927eece, 0x61c935b7, 0x1ce5ede1, 0x47b13c7a,
0xd2df599c, 0xf2733f55, 0x14ce7918, 0xc737bf73, 0xf7cdea53, 0xfdaa5b5f, 0x3d6f14df, 0x44db8678,
0xaff381ca, 0x68c43eb9, 0x24342c38, 0xa3405fc2, 0x1dc37216, 0xe2250cbc, 0x3c498b28, 0x0d9541ff,
0xa8017139, 0x0cb3de08, 0xb4e49cd8, 0x56c19064, 0xcb84617b, 0x32b670d5, 0x6c5c7448, 0xb85742d0,
},
} | src/pkg/crypto/aes/const.go | 0.515864 | 0.660665 | const.go | starcoder |
package cbor
import (
"fmt"
"io"
"github.com/ipsn/go-ipfs/gxlibs/github.com/polydawn/refmt/shared"
. "github.com/ipsn/go-ipfs/gxlibs/github.com/polydawn/refmt/tok"
)
type Decoder struct {
r shared.SlickReader
stack []decoderStep // When empty, and step returns done, all done.
step decoderStep // Shortcut to end of stack.
left []int // Statekeeping space for definite-len map and array.
}
func NewDecoder(r io.Reader) (d *Decoder) {
d = &Decoder{
r: shared.NewReader(r),
stack: make([]decoderStep, 0, 10),
left: make([]int, 0, 10),
}
d.step = d.step_acceptValue
return
}
func (d *Decoder) Reset() {
d.stack = d.stack[0:0]
d.step = d.step_acceptValue
d.left = d.left[0:0]
}
type decoderStep func(tokenSlot *Token) (done bool, err error)
func (d *Decoder) Step(tokenSlot *Token) (done bool, err error) {
done, err = d.step(tokenSlot)
// If the step errored: out, entirely.
if err != nil {
return true, err
}
// If the step wasn't done, return same status.
if !done {
return false, nil
}
// If it WAS done, pop next, or if stack empty, we're entirely done.
nSteps := len(d.stack) - 1
if nSteps <= 0 {
return true, nil // that's all folks
}
d.step = d.stack[nSteps]
d.stack = d.stack[0:nSteps]
return false, nil
}
func (d *Decoder) pushPhase(newPhase decoderStep) {
d.stack = append(d.stack, d.step)
d.step = newPhase
}
// The original step, where any value is accepted, and no terminators for composites are valid.
// ONLY used in the original step; all other steps handle leaf nodes internally.
func (d *Decoder) step_acceptValue(tokenSlot *Token) (done bool, err error) {
majorByte, err := d.r.Readn1()
if err != nil {
return true, err
}
tokenSlot.Tagged = false
return d.stepHelper_acceptValue(majorByte, tokenSlot)
}
// Step in midst of decoding an indefinite-length array.
func (d *Decoder) step_acceptArrValueOrBreak(tokenSlot *Token) (done bool, err error) {
majorByte, err := d.r.Readn1()
if err != nil {
return true, err
}
tokenSlot.Tagged = false
switch majorByte {
case cborSigilBreak:
tokenSlot.Type = TArrClose
return true, nil
default:
_, err := d.stepHelper_acceptValue(majorByte, tokenSlot)
return false, err
}
}
// Step in midst of decoding an indefinite-length map, key expected up next, or end.
func (d *Decoder) step_acceptMapIndefKey(tokenSlot *Token) (done bool, err error) {
majorByte, err := d.r.Readn1()
if err != nil {
return true, err
}
tokenSlot.Tagged = false
switch majorByte {
case cborSigilBreak:
tokenSlot.Type = TMapClose
return true, nil
default:
d.step = d.step_acceptMapIndefValueOrBreak
_, err := d.stepHelper_acceptValue(majorByte, tokenSlot) // FIXME surely not *any* value? not composites, at least?
return false, err
}
}
// Step in midst of decoding an indefinite-length map, value expected up next.
func (d *Decoder) step_acceptMapIndefValueOrBreak(tokenSlot *Token) (done bool, err error) {
majorByte, err := d.r.Readn1()
if err != nil {
return true, err
}
tokenSlot.Tagged = false
switch majorByte {
case cborSigilBreak:
return true, fmt.Errorf("unexpected break; expected value in indefinite-length map")
default:
d.step = d.step_acceptMapIndefKey
_, err = d.stepHelper_acceptValue(majorByte, tokenSlot)
return false, err
}
}
// Step in midst of decoding a definite-length array.
func (d *Decoder) step_acceptArrValue(tokenSlot *Token) (done bool, err error) {
// Yield close token, pop state, and return done flag if expecting no more entries.
ll := len(d.left) - 1
if d.left[ll] == 0 {
d.left = d.left[0:ll]
tokenSlot.Type = TArrClose
return true, nil
}
d.left[ll]--
// Read next value.
majorByte, err := d.r.Readn1()
if err != nil {
return true, err
}
tokenSlot.Tagged = false
_, err = d.stepHelper_acceptValue(majorByte, tokenSlot)
return false, err
}
// Step in midst of decoding an definite-length map, key expected up next.
func (d *Decoder) step_acceptMapKey(tokenSlot *Token) (done bool, err error) {
// Yield close token, pop state, and return done flag if expecting no more entries.
ll := len(d.left) - 1
if d.left[ll] == 0 {
d.left = d.left[0:ll]
tokenSlot.Type = TMapClose
return true, nil
}
d.left[ll]--
// Read next key.
majorByte, err := d.r.Readn1()
if err != nil {
return true, err
}
d.step = d.step_acceptMapValue
tokenSlot.Tagged = false
_, err = d.stepHelper_acceptValue(majorByte, tokenSlot) // FIXME surely not *any* value? not composites, at least?
return false, err
}
// Step in midst of decoding an definite-length map, value expected up next.
func (d *Decoder) step_acceptMapValue(tokenSlot *Token) (done bool, err error) {
// Read next value.
majorByte, err := d.r.Readn1()
if err != nil {
return true, err
}
d.step = d.step_acceptMapKey
tokenSlot.Tagged = false
_, err = d.stepHelper_acceptValue(majorByte, tokenSlot)
return false, err
}
func (d *Decoder) stepHelper_acceptValue(majorByte byte, tokenSlot *Token) (done bool, err error) {
switch majorByte {
case cborSigilNil:
tokenSlot.Type = TNull
return true, nil
case cborSigilFalse:
tokenSlot.Type = TBool
tokenSlot.Bool = false
return true, nil
case cborSigilTrue:
tokenSlot.Type = TBool
tokenSlot.Bool = true
return true, nil
case cborSigilFloat16, cborSigilFloat32, cborSigilFloat64:
tokenSlot.Type = TFloat64
tokenSlot.Float64, err = d.decodeFloat(majorByte)
return true, err
case cborSigilIndefiniteBytes:
tokenSlot.Type = TBytes
tokenSlot.Bytes, err = d.decodeBytesIndefinite(nil)
return true, err
case cborSigilIndefiniteString:
tokenSlot.Type = TString
tokenSlot.Str, err = d.decodeStringIndefinite()
return true, err
case cborSigilIndefiniteArray:
tokenSlot.Type = TArrOpen
tokenSlot.Length = -1
d.pushPhase(d.step_acceptArrValueOrBreak)
return false, nil
case cborSigilIndefiniteMap:
tokenSlot.Type = TMapOpen
tokenSlot.Length = -1
d.pushPhase(d.step_acceptMapIndefKey)
return false, nil
default:
switch {
case majorByte >= cborMajorUint && majorByte < cborMajorNegInt:
tokenSlot.Type = TUint
tokenSlot.Uint, err = d.decodeUint(majorByte)
return true, err
case majorByte >= cborMajorNegInt && majorByte < cborMajorBytes:
tokenSlot.Type = TInt
tokenSlot.Int, err = d.decodeNegInt(majorByte)
return true, err
case majorByte >= cborMajorBytes && majorByte < cborMajorString:
tokenSlot.Type = TBytes
tokenSlot.Bytes, err = d.decodeBytes(majorByte)
return true, err
case majorByte >= cborMajorString && majorByte < cborMajorArray:
tokenSlot.Type = TString
tokenSlot.Str, err = d.decodeString(majorByte)
return true, err
case majorByte >= cborMajorArray && majorByte < cborMajorMap:
var n int
n, err = d.decodeLen(majorByte)
tokenSlot.Type = TArrOpen
tokenSlot.Length = n
d.left = append(d.left, n)
d.pushPhase(d.step_acceptArrValue)
return false, err
case majorByte >= cborMajorMap && majorByte < cborMajorTag:
var n int
n, err = d.decodeLen(majorByte)
tokenSlot.Type = TMapOpen
tokenSlot.Length = n
d.left = append(d.left, n)
d.pushPhase(d.step_acceptMapKey)
return false, err
case majorByte >= cborMajorTag && majorByte < cborMajorSimple:
// CBOR tags are, frankly, bonkers, and should not be used.
// They break isomorphism to basic standards like JSON.
// We'll parse basic integer tag values -- SINGLE layer only.
// We will NOT parse the full gamut of recursive tags: doing so
// would mean allowing an unbounded number of allocs *during
// *processing of a single token*, which is _not reasonable_.
if tokenSlot.Tagged {
return true, fmt.Errorf("unsupported multiple tags on a single data item")
}
tokenSlot.Tagged = true
tokenSlot.Tag, err = d.decodeLen(majorByte)
if err != nil {
return true, err
}
// Okay, we slurped a tag.
// Read next value.
majorByte, err := d.r.Readn1()
if err != nil {
return true, err
}
return d.stepHelper_acceptValue(majorByte, tokenSlot)
default:
return true, fmt.Errorf("Invalid majorByte: 0x%x", majorByte)
}
}
} | gxlibs/github.com/polydawn/refmt/cbor/cborDecoder.go | 0.597725 | 0.44348 | cborDecoder.go | starcoder |
package smooth
type Interface interface {
Len() int
Less(i, j int) bool
Swap(i, j int)
}
var leo []int
func init() {
leo = []int{1, 1}
length := 2
for leo[length-1] < 1000000000 {
leo = append(leo, leo[length-1]+leo[length-2]+1)
length++
}
}
// Stringify will reorder the root nodes to make sure that they are in
// increasing order. This is called when a new heap is added at the end
// such that the only root node that is out of order is the new one.
func stringify(v Interface, roots, sizes []int) int {
k := len(roots) - 1
for j := k - 1; j >= 0; j-- {
jr := roots[j]
kr := roots[k]
if v.Less(kr, jr) {
size := sizes[k]
if size <= 1 {
v.Swap(jr, kr)
k = j
} else {
right := roots[k] - 1
left := right - leo[sizes[k]-2]
if size <= 1 || v.Less(right, jr) && v.Less(left, jr) {
v.Swap(jr, kr)
k = j
}
}
} else {
// Since the only node that is out of order is the one we start with,
// once it is in order we can bail out.
return k
}
}
return k
}
// Heapify is called when two heaps are combined under a new root node. Since
// the two sub-heaps are necessarily heaps it suffices to swap this node with
// its largest child repeatedly until it is larger than both of its children.
func heapify(v Interface, root, size int) {
for size > 1 {
right := root - 1
left := right - leo[size-2]
if v.Less(left, right) {
if v.Less(root, right) {
v.Swap(root, right)
root = right
size -= 2
} else {
break
}
} else {
if v.Less(root, left) {
v.Swap(root, left)
root = left
size -= 1
} else {
break
}
}
}
}
func Sort(v Interface) {
if v.Len() <= 1 {
return
}
roots := make([]int, 0, 5)
sizes := make([]int, 0, 5)
roots = append(roots, 0)
sizes = append(sizes, 1)
// Build
for i := 1; i < v.Len(); i++ {
// Add the next element to the string of heaps
llen := len(roots)
if llen >= 2 && sizes[llen-2] == sizes[llen-1]+1 {
roots = roots[0 : len(roots)-1]
sizes = sizes[0 : len(sizes)-1]
roots[len(roots)-1] = i
sizes[len(sizes)-1]++
} else {
roots = append(roots, i)
if sizes[len(sizes)-1] == 1 {
sizes = append(sizes, 0)
} else {
sizes = append(sizes, 1)
}
}
// stringify - Despite what wikipedia says I think we only need to maintain
// the string property when the heap that was just added has exactly one
// element. If we are combining heaps to make a new heap then those leaf nodes
// already satisfy the string property and the larger of those will bubble up
// when we heapify and will obviously still satisfy the string property.
rooti := len(roots) - 1
if sizes[rooti] <= 1 {
rooti = stringify(v, roots, sizes)
if rooti != len(roots)-1 {
heapify(v, roots[rooti], sizes[rooti])
}
} else {
heapify(v, roots[rooti], sizes[rooti])
}
}
// Shrink
for len(roots) > 0 {
root := roots[len(roots)-1]
size := sizes[len(sizes)-1]
roots = roots[0 : len(roots)-1]
sizes = sizes[0 : len(sizes)-1]
if size > 1 {
right := root - 1
left := right - leo[size-2]
roots = append(roots, left)
sizes = append(sizes, size-1)
rooti := stringify(v, roots, sizes)
if rooti < len(roots)-1 {
heapify(v, roots[rooti], sizes[rooti])
}
roots = append(roots, right)
sizes = append(sizes, size-2)
rooti = stringify(v, roots, sizes)
if rooti < len(roots)-1 {
heapify(v, roots[rooti], sizes[rooti])
}
}
}
}
func IsSorted(data Interface) bool {
n := data.Len()
for i := n - 1; i > 0; i-- {
if data.Less(i, i-1) {
return false
}
}
return true
}
// Convenience types for common cases
// IntSlice attaches the methods of Interface to []int, sorting in increasing order.
type IntSlice []int
func (p IntSlice) Len() int { return len(p) }
func (p IntSlice) Less(i, j int) bool { return p[i] < p[j] }
func (p IntSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
// Sort is a convenience method.
func (p IntSlice) Sort() { Sort(p) }
// Float64Slice attaches the methods of Interface to []float64, sorting in increasing order.
type Float64Slice []float64
func (p Float64Slice) Len() int { return len(p) }
func (p Float64Slice) Less(i, j int) bool { return p[i] < p[j] }
func (p Float64Slice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
// Sort is a convenience method.
func (p Float64Slice) Sort() { Sort(p) }
// StringSlice attaches the methods of Interface to []string, sorting in increasing order.
type StringSlice []string
func (p StringSlice) Len() int { return len(p) }
func (p StringSlice) Less(i, j int) bool { return p[i] < p[j] }
func (p StringSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
// Sort is a convenience method.
func (p StringSlice) Sort() { Sort(p) }
// Convenience wrappers for common cases
// Ints sorts an array of ints in increasing order.
func Ints(a []int) { Sort(IntSlice(a)) }
// Float64s sorts an array of float64s in increasing order.
func Float64s(a []float64) { Sort(Float64Slice(a)) }
// Strings sorts an array of strings in increasing order.
func Strings(a []string) { Sort(StringSlice(a)) }
// IntsAreSorted tests whether an array of ints is sorted in increasing order.
func IntsAreSorted(a []int) bool { return IsSorted(IntSlice(a)) }
// Float64sAreSorted tests whether an array of float64s is sorted in increasing order.
func Float64sAreSorted(a []float64) bool { return IsSorted(Float64Slice(a)) }
// StringsAreSorted tests whether an array of strings is sorted in increasing order.
func StringsAreSorted(a []string) bool { return IsSorted(StringSlice(a)) } | data/go/3fcc40b96b7459e232242c557c5201f7_smooth.go | 0.595845 | 0.420094 | 3fcc40b96b7459e232242c557c5201f7_smooth.go | starcoder |
package types
import (
"math/big"
"sort"
"strings"
)
// Coin def
type CoinWs struct {
Denom string `json:"denom"`
Amount string `json:"amount"`
}
type Coin struct {
Denom string `json:"denom"`
Amount int64 `json:"amount"`
}
type Int struct {
I *big.Int
}
func (i *Int) Set(x int) {
if i.I == nil {
i.I = new(big.Int)
}
i.I.SetInt64(int64(x))
}
func (coin Coin) IsZero() bool {
return coin.Amount == 0
}
func (coin Coin) IsPositive() bool {
return coin.Amount > 0
}
func (coin Coin) IsNotNegative() bool {
return coin.Amount >= 0
}
func (coin Coin) SameDenomAs(other Coin) bool {
return (coin.Denom == other.Denom)
}
func (coin Coin) Plus(coinB Coin) Coin {
if !coin.SameDenomAs(coinB) {
return coin
}
i := coin.Amount + coinB.Amount
return Coin{coin.Denom, i}
}
// Coins def
type Coins []Coin
type CoinsWs []CoinWs
func (coins Coins) IsValid() bool {
switch len(coins) {
case 0:
return true
case 1:
return !coins[0].IsZero()
default:
lowDenom := coins[0].Denom
for _, coin := range coins[1:] {
if coin.Denom <= lowDenom {
return false
}
if coin.IsZero() {
return false
}
lowDenom = coin.Denom
}
return true
}
}
func (coins Coins) IsPositive() bool {
if len(coins) == 0 {
return false
}
for _, coin := range coins {
if !coin.IsPositive() {
return false
}
}
return true
}
func (coins Coins) Plus(coinsB Coins) Coins {
sum := ([]Coin)(nil)
indexA, indexB := 0, 0
lenA, lenB := len(coins), len(coinsB)
for {
if indexA == lenA {
if indexB == lenB {
return sum
}
return append(sum, coinsB[indexB:]...)
} else if indexB == lenB {
return append(sum, coins[indexA:]...)
}
coinA, coinB := coins[indexA], coinsB[indexB]
switch strings.Compare(coinA.Denom, coinB.Denom) {
case -1:
sum = append(sum, coinA)
indexA++
case 0:
if coinA.Amount+coinB.Amount == 0 {
// ignore 0 sum coin type
} else {
sum = append(sum, coinA.Plus(coinB))
}
indexA++
indexB++
case 1:
sum = append(sum, coinB)
indexB++
}
}
}
// IsEqual returns true if the two sets of Coins have the same value
func (coins Coins) IsEqual(coinsB Coins) bool {
if len(coins) != len(coinsB) {
return false
}
for i := 0; i < len(coins); i++ {
if coins[i].Denom != coinsB[i].Denom || !(coins[i].Amount == coinsB[i].Amount) {
return false
}
}
return true
}
func (coins Coins) IsZero() bool {
for _, coin := range coins {
if !coin.IsZero() {
return false
}
}
return true
}
func (coins Coins) IsNotNegative() bool {
if len(coins) == 0 {
return true
}
for _, coin := range coins {
if !coin.IsNotNegative() {
return false
}
}
return true
}
func (coins Coins) AmountOf(denom string) int64 {
switch len(coins) {
case 0:
return 0
case 1:
coin := coins[0]
if coin.Denom == denom {
return coin.Amount
}
return 0
default:
midIdx := len(coins) / 2 // 2:1, 3:1, 4:2
coin := coins[midIdx]
if denom < coin.Denom {
return coins[:midIdx].AmountOf(denom)
} else if denom == coin.Denom {
return coin.Amount
} else {
return coins[midIdx+1:].AmountOf(denom)
}
}
}
// Sort interface
//nolint
func (coins Coins) Len() int { return len(coins) }
func (coins Coins) Less(i, j int) bool { return coins[i].Denom < coins[j].Denom }
func (coins Coins) Swap(i, j int) { coins[i], coins[j] = coins[j], coins[i] }
// Sort is a helper function to sort the set of coins inplace
func (coins Coins) Sort() Coins {
sort.Sort(coins)
return coins
} | common/types/coins.go | 0.639061 | 0.490114 | coins.go | starcoder |
package pcg
// T is a pcg generator. The zero value is valid.
type T struct{ state uint64 }
// mul is the multiplier for the LCG step.
const (
mul = 6364136223846793005
inc = 11981177638785157926
)
// New constructs a pcg with the given state.
func New(state uint64) T { return T{state} }
// next advances and returns the state.
// Not safe for concurrent callers.
func (p *T) next() uint64 {
p.state = p.state*mul + inc
return p.state
}
// Uint32 returns a random uint32.
// Not safe for concurrent callers.
func (p *T) Uint32() uint32 {
state := p.next()
xor := uint32(((state >> 18) ^ state) >> 27)
shift := uint(state>>59) & 31
return xor>>shift | xor<<(32-shift)
}
// Uint32n returns a uint32 uniformly in [0, n).
// Not safe for concurrent callers.
func (p *T) Uint32n(n uint32) uint32 {
if n == 0 {
return 0
}
x := p.Uint32()
m := uint64(x) * uint64(n)
l := uint32(m)
if l < n {
t := -n
if t >= n {
t -= n
if t >= n {
t = t % n
}
}
again:
if l < t {
x = p.Uint32()
m = uint64(x) * uint64(n)
l = uint32(m)
goto again
}
}
return uint32(m >> 32)
}
// Uint64 returns a random uint64.
// Not safe for concurrent callers.
func (p *T) Uint64() uint64 {
state1 := p.next()
state2 := p.next()
xor1 := uint32(((state1 >> 18) ^ state1) >> 27)
shift1 := uint(state1>>59) & 31
xor2 := uint32(((state2 >> 18) ^ state2) >> 27)
shift2 := uint(state2>>59) & 31
return uint64(xor1>>shift1|xor1<<(32-shift1))<<32 |
uint64(xor2>>shift2|xor2<<(32-shift2))
}
// Float64 returns a float64 uniformly in [0, 1).
// Not safe for concurrent callers.
func (p *T) Float64() float64 {
again:
state1 := p.next()
state2 := p.next()
xor1 := uint32(((state1 >> 18) ^ state1) >> 27)
shift1 := uint(state1>>59) & 31
xor2 := uint32(((state2 >> 18) ^ state2) >> 27)
shift2 := uint(state2>>59) & 31
v := uint64(xor1>>shift1|xor1<<(32-shift1)) |
uint64(xor2>>shift2|xor2<<(32-shift2))
out := float64(v>>(64-53)) / (1 << 53)
if out == 1 {
goto again
}
return out
}
// Float32 returns a float32 uniformly in [0, 1).
// Not safe for concurrent callers.
func (p *T) Float32() float32 {
again:
out := float32(p.Uint32()>>(32-24)) / (1 << 24)
if out == 1 {
goto again
}
return out
} | pcg.go | 0.775095 | 0.439807 | pcg.go | starcoder |
package coapmsg
// OptionID identifies an option in a message.
type OptionId uint16
/*
+-----+----+---+---+---+----------------+--------+--------+---------+
| No. | C | U | N | R | Name | Format | Length | Default |
+-----+----+---+---+---+----------------+--------+--------+---------+
| 1 | x | | | x | If-Match | opaque | 0-8 | (none) |
| 3 | x | x | - | | Uri-Host | string | 1-255 | (see |
| | | | | | | | | below) |
| 4 | | | | x | ETag | opaque | 1-8 | (none) |
| 5 | x | | | | If-None-Match | empty | 0 | (none) |
| 7 | x | x | - | | Uri-Port | uint | 0-2 | (see |
| | | | | | | | | below) |
| 8 | | | | x | Location-Path | string | 0-255 | (none) |
| 11 | x | x | - | x | Uri-Path | string | 0-255 | (none) |
| 12 | | | | | Content-Format | uint | 0-2 | (none) |
| 14 | | x | - | | Max-Age | uint | 0-4 | 60 |
| 15 | x | x | - | x | Uri-Query | string | 0-255 | (none) |
| 17 | x | | | | Accept | uint | 0-2 | (none) |
| 20 | | | | x | Location-Query | string | 0-255 | (none) |
| 35 | x | x | - | | Proxy-Uri | string | 1-1034 | (none) |
| 39 | x | x | - | | Proxy-Scheme | string | 1-255 | (none) |
| 60 | | | x | | Size1 | uint | 0-4 | (none) |
+-----+----+---+---+---+----------------+--------+--------+---------+
C=Critical, U=Unsafe, N=NoCacheKey, R=Repeatable
*/
// Option IDs.
//go:generate stringer -type=OptionId
const (
IfMatch OptionId = 1
URIHost OptionId = 3
ETag OptionId = 4
IfNoneMatch OptionId = 5
Observe OptionId = 6
URIPort OptionId = 7
LocationPath OptionId = 8
URIPath OptionId = 11
ContentFormat OptionId = 12
MaxAge OptionId = 14
URIQuery OptionId = 15
Accept OptionId = 17
LocationQuery OptionId = 20
ProxyURI OptionId = 35
ProxyScheme OptionId = 39
Size1 OptionId = 60
)
func (o OptionId) Critical() bool {
return uint16(o)&1 != 0
}
// "Unsafe to forward" proxies will not forward unsafe options
func (o OptionId) UnSafe() bool {
return uint16(o)&uint16(2) != 0
}
// NoCacheKey only has a meaning for options that are Safe-to-Forward
func (o OptionId) NoCacheKey() bool {
return bool((o & 0x1e) == 0x1c)
} | coapmsg/optionId.go | 0.597608 | 0.420778 | optionId.go | starcoder |
package gator
import (
"errors"
"reflect"
"strconv"
"github.com/ShaleApps/gator/Godeps/_workspace/src/github.com/onsi/gomega/matchers"
)
const (
regexEmail = `^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$`
regexHexColor = `^#([A-Fa-f0-9]{6}|[A-Fa-f0-9]{3})$`
regexURL = `^(https?:\/\/)?([\da-z\.-]+)\.([a-z\.]{2,6})([\/\w \.-]*)*\/?$`
regexIP = `^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$`
regexNum = `^[1-9]\d*(\.\d+)?$`
regexAlpha = `^[a-zA-Z]*$`
)
// Func is a validation function that returns an error if v is invalid.
type Func func(name string, v interface{}) error
// Matches returns a Func that validates against the given regex.
func Matches(regex string) Func {
m := &matchers.MatchRegexpMatcher{Regexp: regex}
return match(m)
}
// Nonzero returns a Func that validates its value is non-zero.
// http://golang.org/pkg/reflect/#Zero
func Nonzero() Func {
return func(name string, v interface{}) error {
m := &matchers.BeZeroMatcher{}
zero, _ := m.Match(v)
if zero {
return formatError(name)
}
return nil
}
}
// Eq returns a Func that validates its value is equal to v. Eq uses a numerical
// comparison for built-in number types. For example 1.0 of type float64 would equal
// 1 of type int. All other types are compared using reflect.DeepEquals except when
// the value is a built-in number type and v is a string. Strings are converted into
// numbers if parsable to support struct tags.
func Eq(v interface{}) Func {
return func(k string, ov interface{}) error {
switch ov.(type) {
case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64, float32, float64:
switch vt := v.(type) {
case string:
n, err := strconv.ParseFloat(vt, 64)
if err != nil {
return formatError(k)
}
f := numericalMatch("==", n)
return f(k, ov)
}
f := numericalMatch("==", v)
return f(k, ov)
}
if !reflect.DeepEqual(v, ov) {
return formatError(k)
}
return nil
}
}
// Email returns a Func that validates its value is an email address.
func Email() Func {
return Matches(regexEmail)
}
// HexColor returns a Func that validates its value is a hexidecimal number prefixed by a hash.
// HTML standard link: http://www.w3.org/TR/REC-html40/types.html#h-6.5
func HexColor() Func {
return Matches(regexHexColor)
}
// URL returns a Func that validates its value is a URL.
func URL() Func {
return Matches(regexURL)
}
// IP returns a Func that validates its value is an IP address.
func IP() Func {
return Matches(regexIP)
}
// Alpha returns a Func that validates its value contains only letters.
func Alpha() Func {
return Matches(regexAlpha)
}
// Num returns a Func that validates its value contains only numbers.
func Num() Func {
return Matches(regexNum)
}
// AlphaNum returns a Func that validates its value contains both numbers and letters.
func AlphaNum() Func {
return combineFuncs(Matches("[a-zA-Z]+"), Matches("[0-9]+"))
}
// Gt returns a Func that validates its value is a number greater than v.
func Gt(v interface{}) Func {
return numericalMatch(">", v)
}
// Gte returns a Func that validates its value is a number greater than or equal to v.
func Gte(v interface{}) Func {
return numericalMatch(">=", v)
}
// Lt returns a Func that validates its value is a number less than v.
func Lt(v interface{}) Func {
return numericalMatch("<", v)
}
// Lte returns a Func that validates its value is a number less than or equal to v.
func Lte(v interface{}) Func {
return numericalMatch("<=", v)
}
// Lat returns a Func that validates its value is a decimal between 90 and -90.
func Lat() Func {
return combineFuncs(
numericalMatch("<=", 90.0),
numericalMatch(">=", -90.0))
}
// Lon returns a Func that validates its value is a decimal between 180 and -180.
func Lon() Func {
return combineFuncs(
numericalMatch("<=", 180.0),
numericalMatch(">=", -180.0))
}
// In returns a Func that validates its value is in the inputed list. Comparisons
// use reflect.DeepEqual.
func In(list []interface{}) Func {
return func(k string, v interface{}) error {
in := false
for _, e := range list {
in = in || reflect.DeepEqual(e, v)
}
if !in {
return formatError(k)
}
return nil
}
}
// NotIn returns a Func that validates its value is not in the inputed list. Comparisons
// use reflect.DeepEqual.
func NotIn(list []interface{}) Func {
return func(k string, v interface{}) error {
for _, e := range list {
if reflect.DeepEqual(e, v) {
return formatError(k)
}
}
return nil
}
}
// Len returns a Func that validates its value's length is equal to l.
func Len(l int) Func {
return func(k string, v interface{}) error {
length, ok := lengthOf(v)
if !ok || length != l {
return formatError(k)
}
return nil
}
}
// MinLen returns a Func that validates its value's length is greater than or equal to l.
func MinLen(l int) Func {
return func(k string, v interface{}) error {
length, ok := lengthOf(v)
if !ok || length < l {
return formatError(k)
}
return nil
}
}
// MaxLen returns a Func that validates its value's length is less than or equal to l.
func MaxLen(l int) Func {
return func(k string, v interface{}) error {
length, ok := lengthOf(v)
if !ok || length > l {
return formatError(k)
}
return nil
}
}
// Each returns a Func that validates the list of functions for each element in an array or slice.
func Each(funcs ...Func) Func {
return func(k string, v interface{}) error {
if !isArrayOrSlice(v) {
return formatError(k)
}
value := reflect.ValueOf(v)
for i := 0; i < value.Len(); i++ {
iFace := value.Index(i).Interface()
for _, f := range funcs {
if err := f(k, iFace); err != nil {
return formatError(k)
}
}
}
return nil
}
}
type matcher interface {
Match(actual interface{}) (success bool, err error)
}
func numericalMatch(comparator string, v interface{}) Func {
m := &matchers.BeNumericallyMatcher{
Comparator: comparator,
CompareTo: []interface{}{v},
}
return match(m)
}
func match(m matcher) Func {
return func(name string, v interface{}) error {
matches, _ := m.Match(v)
if !matches {
return formatError(name)
}
return nil
}
}
func combineFuncs(funcs ...Func) Func {
return func(name string, v interface{}) error {
for _, f := range funcs {
if err := f(name, v); err != nil {
return err
}
}
return nil
}
}
func formatError(name string) error {
return errors.New(name + " did not pass validation.")
} | func.go | 0.71103 | 0.409398 | func.go | starcoder |
package memqueue
import (
"fmt"
"github.com/elastic/elastic-agent-libs/logp"
)
// Internal event ring buffer.
// The ring is split into 2 contiguous regions.
// Events are appended to region A until it grows to the end of the internal
// buffer. Then region B is created at the beginning of the internal buffer,
// and events are inserted there until region A is emptied. When A becomes empty,
// we rename region B to region A, and the cycle repeats every time we wrap around
// the internal array storage.
type ringBuffer struct {
logger *logp.Logger
entries []queueEntry
// The underlying array is divided up into two contiguous regions.
regA, regB region
// The number of events awaiting ACK at the beginning of region A.
reserved int
}
// region represents a contiguous region in ringBuffer's internal storage (i.e.
// one that does not cross the end of the array).
type region struct {
// The starting position of this region within the full event buffer.
index int
// The number of events currently stored in this region.
size int
}
type clientState struct {
seq uint32 // event sequence number
state *produceState // the producer it's state used to compute and signal the ACK count
}
func (b *ringBuffer) init(logger *logp.Logger, size int) {
*b = ringBuffer{
logger: logger,
entries: make([]queueEntry, size),
}
}
// Old spec:
// Returns the number of free entries left in the queue buffer after
// insertion.
// Also returns 0 if there is no space left in the queue to insert
// the given event. However, this is an error state: the first time
// it returns 0, insertion should be disabled by setting the
// pushRequest channel in directEventLoop to nil.
// New spec:
// Returns true if the ringBuffer is full after handling
// the given insertion, false otherwise.
func (b *ringBuffer) insert(event interface{}, client clientState) {
// always insert into region B, if region B exists.
// That is, we have 2 regions and region A is currently processed by consumers
if b.regB.size > 0 {
// log.Debug(" - push into B region")
idx := b.regB.index + b.regB.size
avail := b.regA.index - idx
if avail > 0 {
b.entries[idx] = queueEntry{event, client}
b.regB.size++
}
return
}
// region B does not exist yet, check if region A is available for use
idx := b.regA.index + b.regA.size
if b.regA.index+b.regA.size >= len(b.entries) {
// region A extends to the end of the buffer
if b.regA.index > 0 {
// If there is space before region A, create
// region B there.
b.regB = region{index: 0, size: 1}
b.entries[0] = queueEntry{event, client}
}
return
}
// space available in region A -> let's append the event
// log.Debug(" - push into region A")
b.entries[idx] = queueEntry{event, client}
b.regA.size++
}
// cancel removes all buffered events matching `st`, not yet reserved by
// any consumer
func (b *ringBuffer) cancel(st *produceState) int {
cancelledB := b.cancelRegion(st, b.regB)
b.regB.size -= cancelledB
cancelledA := b.cancelRegion(st, region{
index: b.regA.index + b.reserved,
size: b.regA.size - b.reserved,
})
b.regA.size -= cancelledA
return cancelledA + cancelledB
}
// cancelRegion removes the events in the specified range having
// the specified produceState. It returns the number of events
// removed.
func (b *ringBuffer) cancelRegion(st *produceState, reg region) int {
start := reg.index
end := start + reg.size
entries := b.entries[start:end]
toEntries := entries[:0]
// filter loop
for i := 0; i < reg.size; i++ {
if entries[i].client.state == st {
continue // remove
}
toEntries = append(toEntries, entries[i])
}
// re-initialize old buffer elements to help garbage collector
entries = entries[len(toEntries):]
for i := range entries {
entries[i] = queueEntry{}
}
return len(entries)
}
// reserve returns up to `sz` events from the brokerBuffer,
// exclusively marking the events as 'reserved'. Subsequent calls to `reserve`
// will only return enqueued and non-reserved events from the buffer.
// If `sz == -1`, all available events will be reserved.
func (b *ringBuffer) reserve(sz int) (int, []queueEntry) {
use := b.regA.size - b.reserved
if sz > 0 && use > sz {
use = sz
}
start := b.regA.index + b.reserved
end := start + use
b.reserved += use
return start, b.entries[start:end]
}
// Remove the specified number of previously-reserved buffer entries from the
// start of region A. Called by the event loop when events are ACKed by
// consumers.
func (b *ringBuffer) removeEntries(count int) {
if b.regA.size < count {
panic(fmt.Errorf("commit region to big (commit region=%v, buffer size=%v)",
count, b.regA.size,
))
}
// clear region, so published events can be collected by the garbage collector:
end := b.regA.index + count
for i := b.regA.index; i < end; i++ {
b.entries[i] = queueEntry{}
}
b.regA.index = end
b.regA.size -= count
b.reserved -= count
if b.regA.size == 0 {
// region A is empty, transfer region B into region A
b.regA = b.regB
b.regB.index = 0
b.regB.size = 0
}
}
// Number of events that consumers can currently request.
func (b *ringBuffer) Avail() int {
return b.regA.size - b.reserved
}
func (b *ringBuffer) Full() bool {
if b.regB.size > 0 {
return b.regA.index == (b.regB.index + b.regB.size)
}
return b.regA.size == len(b.entries)
}
func (b *ringBuffer) Size() int {
return len(b.entries)
}
// Items returns the count of events currently in the buffer
func (b *ringBuffer) Items() int {
return b.regA.size + b.regB.size
} | libbeat/publisher/queue/memqueue/ringbuf.go | 0.766905 | 0.513729 | ringbuf.go | starcoder |
package dns
// NameUsed sets the RRs in the prereq section to
// "Name is in use" RRs. RFC 2136 section 2.4.4.
func (u *Msg) NameUsed(rr []RR) {
if u.Answer == nil {
u.Answer = make([]RR, 0, len(rr))
}
for _, r := range rr {
u.Answer = append(u.Answer, &ANY{Hdr: RR_Header{Name: r.Header().Name, Ttl: 0, Rrtype: TypeANY, Class: ClassANY}})
}
}
// NameNotUsed sets the RRs in the prereq section to
// "Name is in not use" RRs. RFC 2136 section 2.4.5.
func (u *Msg) NameNotUsed(rr []RR) {
if u.Answer == nil {
u.Answer = make([]RR, 0, len(rr))
}
for _, r := range rr {
u.Answer = append(u.Answer, &ANY{Hdr: RR_Header{Name: r.Header().Name, Ttl: 0, Rrtype: TypeANY, Class: ClassNONE}})
}
}
// Used sets the RRs in the prereq section to
// "RRset exists (value dependent -- with rdata)" RRs. RFC 2136 section 2.4.2.
func (u *Msg) Used(rr []RR) {
if len(u.Question) == 0 {
panic("dns: empty question section")
}
if u.Answer == nil {
u.Answer = make([]RR, 0, len(rr))
}
for _, r := range rr {
r.Header().Class = u.Question[0].Qclass
u.Answer = append(u.Answer, r)
}
}
// RRsetUsed sets the RRs in the prereq section to
// "RRset exists (value independent -- no rdata)" RRs. RFC 2136 section 2.4.1.
func (u *Msg) RRsetUsed(rr []RR) {
if u.Answer == nil {
u.Answer = make([]RR, 0, len(rr))
}
for _, r := range rr {
h := r.Header()
u.Answer = append(u.Answer, &ANY{Hdr: RR_Header{Name: h.Name, Ttl: 0, Rrtype: h.Rrtype, Class: ClassANY}})
}
}
// RRsetNotUsed sets the RRs in the prereq section to
// "RRset does not exist" RRs. RFC 2136 section 2.4.3.
func (u *Msg) RRsetNotUsed(rr []RR) {
if u.Answer == nil {
u.Answer = make([]RR, 0, len(rr))
}
for _, r := range rr {
h := r.Header()
u.Answer = append(u.Answer, &ANY{Hdr: RR_Header{Name: h.Name, Ttl: 0, Rrtype: h.Rrtype, Class: ClassNONE}})
}
}
// Insert creates a dynamic update packet that adds an complete RRset, see RFC 2136 section 2.5.1.
func (u *Msg) Insert(rr []RR) {
if len(u.Question) == 0 {
panic("dns: empty question section")
}
if u.Ns == nil {
u.Ns = make([]RR, 0, len(rr))
}
for _, r := range rr {
r.Header().Class = u.Question[0].Qclass
u.Ns = append(u.Ns, r)
}
}
// RemoveRRset creates a dynamic update packet that deletes an RRset, see RFC 2136 section 2.5.2.
func (u *Msg) RemoveRRset(rr []RR) {
if u.Ns == nil {
u.Ns = make([]RR, 0, len(rr))
}
for _, r := range rr {
h := r.Header()
u.Ns = append(u.Ns, &ANY{Hdr: RR_Header{Name: h.Name, Ttl: 0, Rrtype: h.Rrtype, Class: ClassANY}})
}
}
// RemoveName creates a dynamic update packet that deletes all RRsets of a name, see RFC 2136 section 2.5.3
func (u *Msg) RemoveName(rr []RR) {
if u.Ns == nil {
u.Ns = make([]RR, 0, len(rr))
}
for _, r := range rr {
u.Ns = append(u.Ns, &ANY{Hdr: RR_Header{Name: r.Header().Name, Ttl: 0, Rrtype: TypeANY, Class: ClassANY}})
}
}
// Remove creates a dynamic update packet deletes RR from a RRSset, see RFC 2136 section 2.5.4
func (u *Msg) Remove(rr []RR) {
if u.Ns == nil {
u.Ns = make([]RR, 0, len(rr))
}
for _, r := range rr {
h := r.Header()
h.Class = ClassNONE
h.Ttl = 0
u.Ns = append(u.Ns, r)
}
} | vendor/github.com/miekg/dns/update.go | 0.546496 | 0.401629 | update.go | starcoder |
package menge
import (
"fmt"
"strings"
)
// Complex128Set represents a set of complex128 elements.
type Complex128Set map[complex128]struct{}
// Add adds zero or more elements to the set.
func (s Complex128Set) Add(elems ...complex128) {
for _, e := range elems {
s[e] = struct{}{}
}
}
// Remove removes zero or more elements from the set.
func (s Complex128Set) Remove(elems ...complex128) {
for _, e := range elems {
delete(s, e)
}
}
// Empty empties the set.
func (s Complex128Set) Empty() {
for e := range s {
delete(s, e)
}
}
// Has indicates whether the set has an element.
func (s Complex128Set) Has(elem complex128) bool {
_, ok := s[elem]
return ok
}
// Size returns the size of the set.
func (s Complex128Set) Size() int {
return len(s)
}
// IsEmpty indicates whether the set is empty.
func (s Complex128Set) IsEmpty() bool {
return len(s) == 0
}
// Clone returns a clone of the set.
func (s Complex128Set) Clone() Complex128Set {
c := make(Complex128Set, len(s))
for e := range s {
c[e] = struct{}{}
}
return c
}
// AsSlice returns an equivalent slice with no specific order of the elements.
func (s Complex128Set) AsSlice() []complex128 {
a := make([]complex128, len(s))
i := 0
for e := range s {
a[i] = e
i++
}
return a
}
// String returns a string representation of the set.
func (s Complex128Set) String() string {
b := &strings.Builder{}
b.Grow(len(s) * 8)
fmt.Fprint(b, "{")
first := true
for e := range s {
if first {
first = false
fmt.Fprintf(b, "%v", e)
} else {
fmt.Fprintf(b, " %v", e)
}
}
fmt.Fprint(b, "}")
return b.String()
}
// Equals indicates whether s and t are equal.
func (s Complex128Set) Equals(t Complex128Set) bool {
if len(s) != len(t) {
return false
}
for e := range s {
if _, ok := t[e]; !ok {
return false
}
}
return true
}
// Union returns the union of s and t.
func (s Complex128Set) Union(t Complex128Set) Complex128Set {
r := make(Complex128Set, len(s)+len(t))
for e := range s {
r[e] = struct{}{}
}
for e := range t {
r[e] = struct{}{}
}
return r
}
// Intersection returns the intersection of s and t.
func (s Complex128Set) Intersection(t Complex128Set) Complex128Set {
var small, large Complex128Set
if len(s) <= len(t) {
small, large = s, t
} else {
small, large = t, s
}
r := make(Complex128Set, len(small))
for e := range small {
if _, ok := large[e]; ok {
r[e] = struct{}{}
}
}
return r
}
// Difference returns the difference of s and t, i.e., s - t.
func (s Complex128Set) Difference(t Complex128Set) Complex128Set {
r := make(Complex128Set, len(s))
for e := range s {
if _, ok := t[e]; !ok {
r[e] = struct{}{}
}
}
return r
}
// IsSubsetOf indicates whether s is a subset of t.
func (s Complex128Set) IsSubsetOf(t Complex128Set) bool {
for e := range s {
if _, ok := t[e]; !ok {
return false
}
}
return true
}
// IsProperSubsetOf indicates whether s is a proper subset of t.
func (s Complex128Set) IsProperSubsetOf(t Complex128Set) bool {
for e := range s {
if _, ok := t[e]; !ok {
return false
}
}
return len(s) != len(t)
}
// IsSupersetOf indicates whether s is a superset of t.
func (s Complex128Set) IsSupersetOf(t Complex128Set) bool {
for e := range t {
if _, ok := s[e]; !ok {
return false
}
}
return true
}
// IsProperSupersetOf indicates whether s is a proper superset of t.
func (s Complex128Set) IsProperSupersetOf(t Complex128Set) bool {
for e := range t {
if _, ok := s[e]; !ok {
return false
}
}
return len(s) != len(t)
}
// IsDisjointFrom indicates whether s and t are disjoint.
func (s Complex128Set) IsDisjointFrom(t Complex128Set) bool {
var small, large Complex128Set
if len(s) <= len(t) {
small, large = s, t
} else {
small, large = t, s
}
for e := range small {
if _, ok := large[e]; ok {
return false
}
}
return true
}
// NewComplex128Set returns a new Complex128Set containing zero or more elements.
func NewComplex128Set(elems ...complex128) Complex128Set {
s := make(Complex128Set, len(elems))
s.Add(elems...)
return s
} | complex128.go | 0.794624 | 0.461441 | complex128.go | starcoder |
package neural
// backpropagate completes the backpropagation method.
import (
"errors"
"log"
"math/rand"
"time"
"gonum.org/v1/gonum/floats"
"gonum.org/v1/gonum/mat"
)
var randSource = rand.NewSource(time.Now().UnixNano())
var randGen = rand.New(randSource)
// Network contains all of the information
// that defines a trained neural network.
type Network struct {
config NetworkConfig
IsTrained bool
wHidden *mat.Dense
bHidden *mat.Dense
wOut *mat.Dense
bOut *mat.Dense
}
// NewNetwork initializes a new neural network.
func NewNetwork(config NetworkConfig) *Network {
network := &Network{
config: config,
IsTrained: false,
wHidden: mat.NewDense(config.InputNeurons, config.HiddenNeurons, nil),
bHidden: mat.NewDense(1, config.HiddenNeurons, nil),
wOut: mat.NewDense(config.HiddenNeurons, config.OutputNeurons, nil),
bOut: mat.NewDense(1, config.OutputNeurons, nil),
}
network.Reset()
return network
}
func randomizeMatrix(matrix *mat.Dense) {
rawMatrix := matrix.RawMatrix().Data
for i := range rawMatrix {
rawMatrix[i] = randGen.Float64()
}
}
// Reset untrains a neural network by randomizing
// the weights and biases matrices for the
// hidden and output layers.
func (nn *Network) Reset() {
nn.IsTrained = false
randomizeMatrix(nn.wHidden)
randomizeMatrix(nn.bHidden)
randomizeMatrix(nn.wOut)
randomizeMatrix(nn.bOut)
}
// Train trains a neural network using backpropagation.
func (nn *Network) Train(inputs, labels *mat.Dense) error {
output := new(mat.Dense)
err := nn.backpropagate(inputs, labels, nn.wHidden, nn.bHidden, nn.wOut, nn.bOut, output)
if err != nil {
return err
}
nn.IsTrained = true
return nil
}
// TestAccuracy .
func (nn *Network) TestAccuracy(inputs, labels *mat.Dense) float64 {
// Make the predictions using the trained model.
predictions, err := nn.Predict(inputs)
if err != nil {
log.Fatal(err)
}
// Calculate the accuracy of our model.
var truePosNeg int
numPreds, _ := predictions.Dims()
for i := 0; i < numPreds; i++ {
// Get the label.
labelRow := mat.Row(nil, i, labels)
var prediction int
for idx, label := range labelRow {
if label == 1.0 {
prediction = idx
break
}
}
// Accumulate the true positive/negative count.
if predictions.At(i, prediction) == floats.Max(mat.Row(nil, i, predictions)) {
truePosNeg++
}
}
// Calculate the accuracy (subset accuracy).
return float64(truePosNeg) / float64(numPreds)
}
// Predict makes a prediction based on a trained
// neural network.
func (nn *Network) Predict(inputs *mat.Dense) (*mat.Dense, error) {
if !nn.IsTrained {
return nil, errors.New("cannot predict - network is not trained")
}
// Define the output of the neural network.
output := new(mat.Dense)
// Complete the feed forward process.
hiddenLayerInput := new(mat.Dense)
hiddenLayerInput.Mul(inputs, nn.wHidden)
addBHidden := func(_, col int, v float64) float64 { return v + nn.bHidden.At(0, col) }
hiddenLayerInput.Apply(addBHidden, hiddenLayerInput)
hiddenLayerActivations := new(mat.Dense)
applySigmoid := func(_, _ int, v float64) float64 { return Sigmoid(v) }
hiddenLayerActivations.Apply(applySigmoid, hiddenLayerInput)
outputLayerInput := new(mat.Dense)
outputLayerInput.Mul(hiddenLayerActivations, nn.wOut)
addBOut := func(_, col int, v float64) float64 { return v + nn.bOut.At(0, col) }
outputLayerInput.Apply(addBOut, outputLayerInput)
output.Apply(applySigmoid, outputLayerInput)
return output, nil
}
func (nn *Network) backpropagate(x, y, wHidden, bHidden, wOut, bOut, output *mat.Dense) error {
// Loop over the number of epochs utilizing
// backpropagation to train our model.
for i := 0; i < nn.config.NumEpochs; i++ {
// Complete the feed forward process.
hiddenLayerInput := new(mat.Dense)
hiddenLayerInput.Mul(x, wHidden)
addBHidden := func(_, col int, v float64) float64 { return v + bHidden.At(0, col) }
hiddenLayerInput.Apply(addBHidden, hiddenLayerInput)
hiddenLayerActivations := new(mat.Dense)
applySigmoid := func(_, _ int, v float64) float64 { return Sigmoid(v) }
hiddenLayerActivations.Apply(applySigmoid, hiddenLayerInput)
outputLayerInput := new(mat.Dense)
outputLayerInput.Mul(hiddenLayerActivations, wOut)
addBOut := func(_, col int, v float64) float64 { return v + bOut.At(0, col) }
outputLayerInput.Apply(addBOut, outputLayerInput)
output.Apply(applySigmoid, outputLayerInput)
// Complete the backpropagation.
networkError := new(mat.Dense)
networkError.Sub(y, output)
slopeOutputLayer := new(mat.Dense)
applySigmoidPrime := func(_, _ int, v float64) float64 { return SigmoidPrime(v) }
slopeOutputLayer.Apply(applySigmoidPrime, output)
slopeHiddenLayer := new(mat.Dense)
slopeHiddenLayer.Apply(applySigmoidPrime, hiddenLayerActivations)
dOutput := new(mat.Dense)
dOutput.MulElem(networkError, slopeOutputLayer)
errorAtHiddenLayer := new(mat.Dense)
errorAtHiddenLayer.Mul(dOutput, wOut.T())
dHiddenLayer := new(mat.Dense)
dHiddenLayer.MulElem(errorAtHiddenLayer, slopeHiddenLayer)
// Adjust the parameters.
wOutAdj := new(mat.Dense)
wOutAdj.Mul(hiddenLayerActivations.T(), dOutput)
wOutAdj.Scale(nn.config.LearningRate, wOutAdj)
wOut.Add(wOut, wOutAdj)
bOutAdj, err := sumAlongAxis(0, dOutput)
if err != nil {
return err
}
bOutAdj.Scale(nn.config.LearningRate, bOutAdj)
bOut.Add(bOut, bOutAdj)
wHiddenAdj := new(mat.Dense)
wHiddenAdj.Mul(x.T(), dHiddenLayer)
wHiddenAdj.Scale(nn.config.LearningRate, wHiddenAdj)
wHidden.Add(wHidden, wHiddenAdj)
bHiddenAdj, err := sumAlongAxis(0, dHiddenLayer)
if err != nil {
return err
}
bHiddenAdj.Scale(nn.config.LearningRate, bHiddenAdj)
bHidden.Add(bHidden, bHiddenAdj)
}
return nil
}
// sumAlongAxis sums a matrix along a
// particular dimension, preserving the
// other dimension.
func sumAlongAxis(axis int, m *mat.Dense) (*mat.Dense, error) {
numRows, numCols := m.Dims()
var output *mat.Dense
switch axis {
case 0:
data := make([]float64, numCols)
for i := 0; i < numCols; i++ {
col := mat.Col(nil, i, m)
data[i] = floats.Sum(col)
}
output = mat.NewDense(1, numCols, data)
case 1:
data := make([]float64, numRows)
for i := 0; i < numRows; i++ {
row := mat.Row(nil, i, m)
data[i] = floats.Sum(row)
}
output = mat.NewDense(numRows, 1, data)
default:
return nil, errors.New("invalid axis, must be 0 or 1")
}
return output, nil
} | neural/network.go | 0.766381 | 0.468791 | network.go | starcoder |
package geom
// A MultiPolygon is a collection of Polygons.
type MultiPolygon struct {
geom3
}
// NewMultiPolygon returns a new MultiPolygon with no Polygons.
func NewMultiPolygon(layout Layout) *MultiPolygon {
return NewMultiPolygonFlat(layout, nil, nil)
}
// NewMultiPolygonFlat returns a new MultiPolygon with the given flat coordinates.
func NewMultiPolygonFlat(layout Layout, flatCoords []float64, endss [][]int) *MultiPolygon {
mp := new(MultiPolygon)
mp.layout = layout
mp.stride = layout.Stride()
mp.flatCoords = flatCoords
mp.endss = endss
return mp
}
// Area returns the sum of the area of the individual Polygons.
func (mp *MultiPolygon) Area() float64 {
return doubleArea3(mp.flatCoords, 0, mp.endss, mp.stride) / 2
}
// Clone returns a deep copy.
func (mp *MultiPolygon) Clone() *MultiPolygon {
flatCoords := make([]float64, len(mp.flatCoords))
copy(flatCoords, mp.flatCoords)
endss := make([][]int, len(mp.endss))
for i, ends := range mp.endss {
endss[i] = make([]int, len(ends))
copy(endss[i], ends)
}
return NewMultiPolygonFlat(mp.layout, flatCoords, endss)
}
// Empty returns true if the collection is empty.
func (mp *MultiPolygon) Empty() bool {
return mp.NumPolygons() == 0
}
// Length returns the sum of the perimeters of the Polygons.
func (mp *MultiPolygon) Length() float64 {
return length3(mp.flatCoords, 0, mp.endss, mp.stride)
}
// MustSetCoords sets the coordinates and panics on any error.
func (mp *MultiPolygon) MustSetCoords(coords [][][]Coord) *MultiPolygon {
Must(mp.SetCoords(coords))
return mp
}
// NumPolygons returns the number of Polygons.
func (mp *MultiPolygon) NumPolygons() int {
return len(mp.endss)
}
// Polygon returns the ith Polygon.
func (mp *MultiPolygon) Polygon(i int) *Polygon {
offset := 0
if i > 0 {
ends := mp.endss[i-1]
offset = ends[len(ends)-1]
}
ends := make([]int, len(mp.endss[i]))
if offset == 0 {
copy(ends, mp.endss[i])
} else {
for j, end := range mp.endss[i] {
ends[j] = end - offset
}
}
return NewPolygonFlat(mp.layout, mp.flatCoords[offset:mp.endss[i][len(mp.endss[i])-1]], ends)
}
// Push appends a Polygon.
func (mp *MultiPolygon) Push(p *Polygon) error {
if p.layout != mp.layout {
return ErrLayoutMismatch{Got: p.layout, Want: mp.layout}
}
offset := len(mp.flatCoords)
ends := make([]int, len(p.ends))
if offset == 0 {
copy(ends, p.ends)
} else {
for i, end := range p.ends {
ends[i] = end + offset
}
}
mp.flatCoords = append(mp.flatCoords, p.flatCoords...)
mp.endss = append(mp.endss, ends)
return nil
}
// SetCoords sets the coordinates.
func (mp *MultiPolygon) SetCoords(coords [][][]Coord) (*MultiPolygon, error) {
if err := mp.setCoords(coords); err != nil {
return nil, err
}
return mp, nil
}
// SetSRID sets the SRID of mp.
func (mp *MultiPolygon) SetSRID(srid int) *MultiPolygon {
mp.srid = srid
return mp
}
// Swap swaps the values of mp and mp2.
func (mp *MultiPolygon) Swap(mp2 *MultiPolygon) {
mp.geom3.swap(&mp2.geom3)
} | vendor/github.com/twpayne/go-geom/multipolygon.go | 0.861626 | 0.649203 | multipolygon.go | starcoder |
package man
import (
. "github.com/gocircuit/circuit/gocircuit.org/render"
)
func RenderCommandPage() string {
figs := A{
"FigClient": RenderFigurePngSvg("Circuit client connected to a server.", "client", "500px"),
"FigServerAnchor": RenderFigurePngSvg("Circuit servers correspond to root-level anchors.", "serveranchor", "500px"),
}
return RenderHtml("Command-line client", Render(commandBody, figs))
}
const commandBody = `
<h2>Using the command-line client</h2>
<p>Once the circuit servers are started, you can create, observe and control
circuit elements (i) interactively—using the circuit binary which doubles as a command-line client—as
well as (ii) programmatically—using the circuit Go client package <code>github.com/gocircuit/circuit/client</code>.
In fact, the circuit command-line tool is simply a front-end for the Go client library.
<p>Clients (the tool or your own) <em>dial into</em> a circuit server in order to
interact with the entire system. All servers are equal citizens in every respect and,
in particular, any one can be used as a choice for dial-in.
{{.FigClient}}
<p>The tool (described in more detail later) is essentially a set of commands that
allow you to traverse the global hierarchical namespace of circuit elements,
and interact with them, somewhat similarly to how one uses the Zookeeper
namespace.
<p>For example, to list the entire circuit cluster anchor hierarchy, type in
<pre>
circuit ls /
</pre>
<p>So, you might get something like this in response
<pre>
/X88550014d4c82e4d
/X938fe923bcdef2390
</pre>
<p>The two root-level anchors correspond to the two circuit servers.
{{.FigServerAnchor}}
<h3>Pointing the tool to your circuit cluster</h3>
<p>Before you can use the <code>circuit</code> tool, you need to tell it how to locate
one circuit server for us a <em>dial-in</em> point.
<p>There are two ways to provide the dial-in server address to the tool:
<p>1. If the circuit servers were started with the <code>-discover</code> option or the
<code>CIRCUIT_DISCOVER</code> environment variable, the command-line tool
can use the same methods for finding a circuit server. E.g.
<pre>
circuit ls -discover 172.16.58.3:7711 /...
</pre>
<p>Or,
<pre>
export CIRCUIT_DISCOVER=172.16.58.3:7711
circuit ls /...
</pre>
<p>2. With the command-line option <code>-d</code>, like e.g.
<pre>
circuit ls -d circuit://10.0.0.1:11022/78517/Q56e7a2a0d47a7b5d /
</pre>
<p>Or, equivalently, by setting the environment variable <code>CIRCUIT</code> to point to a file
whose contents is the desired dial-in address. For example, (in bash):
<pre>
echo circuit://10.0.0.1:11022/78517/Q56e7a2a0d47a7b5d > ~/.circuit
export CIRCUIT="~/.circuit"
circuit ls /
</pre>
<p>A list of available tool commands is shown on the help screen
<pre>
circuit help
</pre>
<p>A more detailed explanation of their meaning and function can be found
in the documentation of the client package, <code>github.com/gocircuit/client</code>.
` | gocircuit.org/man/cmd.go | 0.761095 | 0.636918 | cmd.go | starcoder |
package rx
import (
"sync"
)
//jig:template CombineLatest<Foo>
//jig:needs ObservableObservable<Foo> CombineLatestAll
// CombineLatest will subscribe to all ObservableFoos. It will then wait for
// all of them to emit before emitting the first slice. Whenever any of the
// subscribed observables emits, a new slice will be emitted containing all
// the latest value.
func CombineLatestFoo(observables ...ObservableFoo) ObservableFooSlice {
return FromObservableFoo(observables...).CombineLatestAll()
}
//jig:template Observable<Foo> CombineLatestWith
//jig:needs ObservableObservable<Foo> CombineLatestAll
// CombineLatestWith will subscribe to its ObservableFoo and all other
// ObservableFoos passed in. It will then wait for all of the ObservableBars
// to emit before emitting the first slice. Whenever any of the subscribed
// observables emits, a new slice will be emitted containing all the latest
// value.
func (o ObservableFoo) CombineLatestWith(other ...ObservableFoo) ObservableFooSlice {
return FromObservableFoo(append([]ObservableFoo{o}, other...)...).CombineLatestAll()
}
//jig:template Observable<Foo> CombineLatestMap<Bar>
//jig:needs Observable<Foo> MapObservable<Bar>, ObservableObservable<Bar> CombineLatestAll
// CombinesLatestMap maps every entry emitted by the ObservableFoo into an
// ObservableBar, and then subscribe to it, until the source observable
// completes. It will then wait for all of the ObservableBars to emit before
// emitting the first slice. Whenever any of the subscribed observables emits,
// a new slice will be emitted containing all the latest value.
func (o ObservableFoo) CombineLatestMapBar(project func(foo) ObservableBar) ObservableBarSlice {
return o.MapObservableBar(project).CombineLatestAll()
}
//jig:template Observable<Foo> CombineLatestMapTo<Bar>
//jig:needs Observable<Foo> MapObservable<Bar>, ObservableObservable<Bar> CombineLatestAll
// CombinesLatestMapTo maps every entry emitted by the ObservableFoo into a
// single ObservableBar, and then subscribe to it, until the source
// observable completes. It will then wait for all of the ObservableBars
// to emit before emitting the first slice. Whenever any of the subscribed
// observables emits, a new slice will be emitted containing all the latest
// value.
func (o ObservableFoo) CombineLatestMapToBar(inner ObservableBar) ObservableBarSlice {
project := func(foo) ObservableBar { return inner }
return o.MapObservableBar(project).CombineLatestAll()
}
//jig:template ObservableObservable<Foo> CombineLatestAll
//jig:needs <Foo>Slice
// CombineLatestAll flattens a higher order observable
// (e.g. ObservableObservableFoo) by subscribing to
// all emitted observables (ie. ObservableFoo entries) until the source
// completes. It will then wait for all of the subscribed ObservableFoos
// to emit before emitting the first slice. Whenever any of the subscribed
// observables emits, a new slice will be emitted containing all the latest
// value.
func (o ObservableObservableFoo) CombineLatestAll() ObservableFooSlice {
observable := func(observe FooSliceObserver, subscribeOn Scheduler, subscriber Subscriber) {
observables := []ObservableFoo(nil)
var observers struct {
sync.Mutex
assigned []bool
values []foo
initialized int
active int
}
makeObserver := func(index int) FooObserver {
observer := func(next foo, err error, done bool) {
observers.Lock()
defer observers.Unlock()
if observers.active > 0 {
switch {
case !done:
if !observers.assigned[index] {
observers.assigned[index] = true
observers.initialized++
}
observers.values[index] = next
if observers.initialized == len(observers.values) {
observe(observers.values, nil, false)
}
case err != nil:
observers.active = 0
var zero []foo
observe(zero, err, true)
default:
if observers.active--; observers.active == 0 {
var zero []foo
observe(zero, nil, true)
}
}
}
}
return observer
}
observer := func(next ObservableFoo, err error, done bool) {
switch {
case !done:
observables = append(observables, next)
case err != nil:
var zero []foo
observe(zero, err, true)
default:
subscribeOn.Schedule(func() {
if subscriber.Subscribed() {
numObservables := len(observables)
observers.assigned = make([]bool, numObservables)
observers.values = make([]foo, numObservables)
observers.active = numObservables
for i, v := range observables {
if !subscriber.Subscribed() {
return
}
v(makeObserver(i), subscribeOn, subscriber)
}
}
})
}
}
o(observer, subscribeOn, subscriber)
}
return observable
} | generic/combining.go | 0.744842 | 0.542136 | combining.go | starcoder |
package wasmlib
import (
"encoding/binary"
"strconv"
)
type ScImmutableAddress struct {
objID int32
keyID Key32
}
func NewScImmutableAddress(objID int32, keyID Key32) ScImmutableAddress {
return ScImmutableAddress{objID: objID, keyID: keyID}
}
func (o ScImmutableAddress) Exists() bool {
return Exists(o.objID, o.keyID, TYPE_ADDRESS)
}
func (o ScImmutableAddress) String() string {
return o.Value().String()
}
func (o ScImmutableAddress) Value() ScAddress {
return NewScAddressFromBytes(GetBytes(o.objID, o.keyID, TYPE_ADDRESS))
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableAddressArray struct {
objID int32
}
func (o ScImmutableAddressArray) GetAddress(index int32) ScImmutableAddress {
return ScImmutableAddress{objID: o.objID, keyID: Key32(index)}
}
func (o ScImmutableAddressArray) Length() int32 {
return GetLength(o.objID)
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableAgentID struct {
objID int32
keyID Key32
}
func NewScImmutableAgentID(objID int32, keyID Key32) ScImmutableAgentID {
return ScImmutableAgentID{objID: objID, keyID: keyID}
}
func (o ScImmutableAgentID) Exists() bool {
return Exists(o.objID, o.keyID, TYPE_AGENT_ID)
}
func (o ScImmutableAgentID) String() string {
return o.Value().String()
}
func (o ScImmutableAgentID) Value() ScAgentID {
return NewScAgentIDFromBytes(GetBytes(o.objID, o.keyID, TYPE_AGENT_ID))
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableAgentIDArray struct {
objID int32
}
func (o ScImmutableAgentIDArray) GetAgentID(index int32) ScImmutableAgentID {
return ScImmutableAgentID{objID: o.objID, keyID: Key32(index)}
}
func (o ScImmutableAgentIDArray) Length() int32 {
return GetLength(o.objID)
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableBytes struct {
objID int32
keyID Key32
}
func NewScImmutableBytes(objID int32, keyID Key32) ScImmutableBytes {
return ScImmutableBytes{objID: objID, keyID: keyID}
}
func (o ScImmutableBytes) Exists() bool {
return Exists(o.objID, o.keyID, TYPE_BYTES)
}
func (o ScImmutableBytes) String() string {
return base58Encode(o.Value())
}
func (o ScImmutableBytes) Value() []byte {
return GetBytes(o.objID, o.keyID, TYPE_BYTES)
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableBytesArray struct {
objID int32
}
func (o ScImmutableBytesArray) GetBytes(index int32) ScImmutableBytes {
return ScImmutableBytes{objID: o.objID, keyID: Key32(index)}
}
func (o ScImmutableBytesArray) Length() int32 {
return GetLength(o.objID)
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableChainID struct {
objID int32
keyID Key32
}
func NewScImmutableChainID(objID int32, keyID Key32) ScImmutableChainID {
return ScImmutableChainID{objID: objID, keyID: keyID}
}
func (o ScImmutableChainID) Exists() bool {
return Exists(o.objID, o.keyID, TYPE_CHAIN_ID)
}
func (o ScImmutableChainID) String() string {
return o.Value().String()
}
func (o ScImmutableChainID) Value() ScChainID {
return NewScChainIDFromBytes(GetBytes(o.objID, o.keyID, TYPE_CHAIN_ID))
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableChainIDArray struct {
objID int32
}
func (o ScImmutableChainIDArray) GetChainID(index int32) ScImmutableChainID {
return ScImmutableChainID{objID: o.objID, keyID: Key32(index)}
}
func (o ScImmutableChainIDArray) Length() int32 {
return GetLength(o.objID)
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableColor struct {
objID int32
keyID Key32
}
func NewScImmutableColor(objID int32, keyID Key32) ScImmutableColor {
return ScImmutableColor{objID: objID, keyID: keyID}
}
func (o ScImmutableColor) Exists() bool {
return Exists(o.objID, o.keyID, TYPE_COLOR)
}
func (o ScImmutableColor) String() string {
return o.Value().String()
}
func (o ScImmutableColor) Value() ScColor {
return NewScColorFromBytes(GetBytes(o.objID, o.keyID, TYPE_COLOR))
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableColorArray struct {
objID int32
}
func (o ScImmutableColorArray) GetColor(index int32) ScImmutableColor {
return ScImmutableColor{objID: o.objID, keyID: Key32(index)}
}
func (o ScImmutableColorArray) Length() int32 {
return GetLength(o.objID)
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableHash struct {
objID int32
keyID Key32
}
func NewScImmutableHash(objID int32, keyID Key32) ScImmutableHash {
return ScImmutableHash{objID: objID, keyID: keyID}
}
func (o ScImmutableHash) Exists() bool {
return Exists(o.objID, o.keyID, TYPE_HASH)
}
func (o ScImmutableHash) String() string {
return o.Value().String()
}
func (o ScImmutableHash) Value() ScHash {
return NewScHashFromBytes(GetBytes(o.objID, o.keyID, TYPE_HASH))
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableHashArray struct {
objID int32
}
func (o ScImmutableHashArray) GetHash(index int32) ScImmutableHash {
return ScImmutableHash{objID: o.objID, keyID: Key32(index)}
}
func (o ScImmutableHashArray) Length() int32 {
return GetLength(o.objID)
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableHname struct {
objID int32
keyID Key32
}
func NewScImmutableHname(objID int32, keyID Key32) ScImmutableHname {
return ScImmutableHname{objID: objID, keyID: keyID}
}
func (o ScImmutableHname) Exists() bool {
return Exists(o.objID, o.keyID, TYPE_HNAME)
}
func (o ScImmutableHname) String() string {
return o.Value().String()
}
func (o ScImmutableHname) Value() ScHname {
return NewScHnameFromBytes(GetBytes(o.objID, o.keyID, TYPE_HNAME))
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableHnameArray struct {
objID int32
}
func (o ScImmutableHnameArray) GetHname(index int32) ScImmutableHname {
return ScImmutableHname{objID: o.objID, keyID: Key32(index)}
}
func (o ScImmutableHnameArray) Length() int32 {
return GetLength(o.objID)
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableInt16 struct {
objID int32
keyID Key32
}
func NewScImmutableInt16(objID int32, keyID Key32) ScImmutableInt16 {
return ScImmutableInt16{objID: objID, keyID: keyID}
}
func (o ScImmutableInt16) Exists() bool {
return Exists(o.objID, o.keyID, TYPE_INT16)
}
func (o ScImmutableInt16) String() string {
return strconv.FormatInt(int64(o.Value()), 10)
}
func (o ScImmutableInt16) Value() int16 {
bytes := GetBytes(o.objID, o.keyID, TYPE_INT16)
return int16(binary.LittleEndian.Uint16(bytes))
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableInt16Array struct {
objID int32
}
func (o ScImmutableInt16Array) GetInt16(index int32) ScImmutableInt16 {
return ScImmutableInt16{objID: o.objID, keyID: Key32(index)}
}
func (o ScImmutableInt16Array) Length() int32 {
return GetLength(o.objID)
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableInt32 struct {
objID int32
keyID Key32
}
func NewScImmutableInt32(objID int32, keyID Key32) ScImmutableInt32 {
return ScImmutableInt32{objID: objID, keyID: keyID}
}
func (o ScImmutableInt32) Exists() bool {
return Exists(o.objID, o.keyID, TYPE_INT32)
}
func (o ScImmutableInt32) String() string {
return strconv.FormatInt(int64(o.Value()), 10)
}
func (o ScImmutableInt32) Value() int32 {
bytes := GetBytes(o.objID, o.keyID, TYPE_INT32)
return int32(binary.LittleEndian.Uint32(bytes))
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableInt32Array struct {
objID int32
}
func (o ScImmutableInt32Array) GetInt32(index int32) ScImmutableInt32 {
return ScImmutableInt32{objID: o.objID, keyID: Key32(index)}
}
func (o ScImmutableInt32Array) Length() int32 {
return GetLength(o.objID)
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableInt64 struct {
objID int32
keyID Key32
}
func NewScImmutableInt64(objID int32, keyID Key32) ScImmutableInt64 {
return ScImmutableInt64{objID: objID, keyID: keyID}
}
func (o ScImmutableInt64) Exists() bool {
return Exists(o.objID, o.keyID, TYPE_INT64)
}
func (o ScImmutableInt64) String() string {
return strconv.FormatInt(o.Value(), 10)
}
func (o ScImmutableInt64) Value() int64 {
bytes := GetBytes(o.objID, o.keyID, TYPE_INT64)
return int64(binary.LittleEndian.Uint64(bytes))
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableInt64Array struct {
objID int32
}
func (o ScImmutableInt64Array) GetInt64(index int32) ScImmutableInt64 {
return ScImmutableInt64{objID: o.objID, keyID: Key32(index)}
}
func (o ScImmutableInt64Array) Length() int32 {
return GetLength(o.objID)
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableMap struct {
objID int32
}
func (o ScImmutableMap) CallFunc(keyID Key32, params []byte) []byte {
return CallFunc(o.objID, keyID, params)
}
func (o ScImmutableMap) GetAddress(key MapKey) ScImmutableAddress {
return ScImmutableAddress{objID: o.objID, keyID: key.KeyID()}
}
func (o ScImmutableMap) GetAddressArray(key MapKey) ScImmutableAddressArray {
arrID := GetObjectID(o.objID, key.KeyID(), TYPE_ADDRESS|TYPE_ARRAY)
return ScImmutableAddressArray{objID: arrID}
}
func (o ScImmutableMap) GetAgentID(key MapKey) ScImmutableAgentID {
return ScImmutableAgentID{objID: o.objID, keyID: key.KeyID()}
}
func (o ScImmutableMap) GetAgentIDArray(key MapKey) ScImmutableAgentIDArray {
arrID := GetObjectID(o.objID, key.KeyID(), TYPE_AGENT_ID|TYPE_ARRAY)
return ScImmutableAgentIDArray{objID: arrID}
}
func (o ScImmutableMap) GetBytes(key MapKey) ScImmutableBytes {
return ScImmutableBytes{objID: o.objID, keyID: key.KeyID()}
}
func (o ScImmutableMap) GetBytesArray(key MapKey) ScImmutableBytesArray {
arrID := GetObjectID(o.objID, key.KeyID(), TYPE_BYTES|TYPE_ARRAY)
return ScImmutableBytesArray{objID: arrID}
}
func (o ScImmutableMap) GetChainID(key MapKey) ScImmutableChainID {
return ScImmutableChainID{objID: o.objID, keyID: key.KeyID()}
}
func (o ScImmutableMap) GetChainIDArray(key MapKey) ScImmutableChainIDArray {
arrID := GetObjectID(o.objID, key.KeyID(), TYPE_CHAIN_ID|TYPE_ARRAY)
return ScImmutableChainIDArray{objID: arrID}
}
func (o ScImmutableMap) GetColor(key MapKey) ScImmutableColor {
return ScImmutableColor{objID: o.objID, keyID: key.KeyID()}
}
func (o ScImmutableMap) GetColorArray(key MapKey) ScImmutableColorArray {
arrID := GetObjectID(o.objID, key.KeyID(), TYPE_COLOR|TYPE_ARRAY)
return ScImmutableColorArray{objID: arrID}
}
func (o ScImmutableMap) GetHash(key MapKey) ScImmutableHash {
return ScImmutableHash{objID: o.objID, keyID: key.KeyID()}
}
func (o ScImmutableMap) GetHashArray(key MapKey) ScImmutableHashArray {
arrID := GetObjectID(o.objID, key.KeyID(), TYPE_HASH|TYPE_ARRAY)
return ScImmutableHashArray{objID: arrID}
}
func (o ScImmutableMap) GetHname(key MapKey) ScImmutableHname {
return ScImmutableHname{objID: o.objID, keyID: key.KeyID()}
}
func (o ScImmutableMap) GetHnameArray(key MapKey) ScImmutableHnameArray {
arrID := GetObjectID(o.objID, key.KeyID(), TYPE_HNAME|TYPE_ARRAY)
return ScImmutableHnameArray{objID: arrID}
}
func (o ScImmutableMap) GetInt16(key MapKey) ScImmutableInt16 {
return ScImmutableInt16{objID: o.objID, keyID: key.KeyID()}
}
func (o ScImmutableMap) GetInt16Array(key MapKey) ScImmutableInt16Array {
arrID := GetObjectID(o.objID, key.KeyID(), TYPE_INT16|TYPE_ARRAY)
return ScImmutableInt16Array{objID: arrID}
}
func (o ScImmutableMap) GetInt32(key MapKey) ScImmutableInt32 {
return ScImmutableInt32{objID: o.objID, keyID: key.KeyID()}
}
func (o ScImmutableMap) GetInt32Array(key MapKey) ScImmutableInt32Array {
arrID := GetObjectID(o.objID, key.KeyID(), TYPE_INT32|TYPE_ARRAY)
return ScImmutableInt32Array{objID: arrID}
}
func (o ScImmutableMap) GetInt64(key MapKey) ScImmutableInt64 {
return ScImmutableInt64{objID: o.objID, keyID: key.KeyID()}
}
func (o ScImmutableMap) GetInt64Array(key MapKey) ScImmutableInt64Array {
arrID := GetObjectID(o.objID, key.KeyID(), TYPE_INT64|TYPE_ARRAY)
return ScImmutableInt64Array{objID: arrID}
}
func (o ScImmutableMap) GetMap(key MapKey) ScImmutableMap {
mapID := GetObjectID(o.objID, key.KeyID(), TYPE_MAP)
return ScImmutableMap{objID: mapID}
}
func (o ScImmutableMap) GetMapArray(key MapKey) ScImmutableMapArray {
arrID := GetObjectID(o.objID, key.KeyID(), TYPE_MAP|TYPE_ARRAY)
return ScImmutableMapArray{objID: arrID}
}
func (o ScImmutableMap) GetRequestID(key MapKey) ScImmutableRequestID {
return ScImmutableRequestID{objID: o.objID, keyID: key.KeyID()}
}
func (o ScImmutableMap) GetRequestIDArray(key MapKey) ScImmutableRequestIDArray {
arrID := GetObjectID(o.objID, key.KeyID(), TYPE_REQUEST_ID|TYPE_ARRAY)
return ScImmutableRequestIDArray{objID: arrID}
}
func (o ScImmutableMap) GetString(key MapKey) ScImmutableString {
return ScImmutableString{objID: o.objID, keyID: key.KeyID()}
}
func (o ScImmutableMap) GetStringArray(key MapKey) ScImmutableStringArray {
arrID := GetObjectID(o.objID, key.KeyID(), TYPE_STRING|TYPE_ARRAY)
return ScImmutableStringArray{objID: arrID}
}
func (o ScImmutableMap) MapID() int32 {
return o.objID
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableMapArray struct {
objID int32
}
func (o ScImmutableMapArray) GetMap(index int32) ScImmutableMap {
mapID := GetObjectID(o.objID, Key32(index), TYPE_MAP)
return ScImmutableMap{objID: mapID}
}
func (o ScImmutableMapArray) Length() int32 {
return GetLength(o.objID)
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableRequestID struct {
objID int32
keyID Key32
}
func NewScImmutableRequestID(objID int32, keyID Key32) ScImmutableRequestID {
return ScImmutableRequestID{objID: objID, keyID: keyID}
}
func (o ScImmutableRequestID) Exists() bool {
return Exists(o.objID, o.keyID, TYPE_REQUEST_ID)
}
func (o ScImmutableRequestID) String() string {
return o.Value().String()
}
func (o ScImmutableRequestID) Value() ScRequestID {
return NewScRequestIDFromBytes(GetBytes(o.objID, o.keyID, TYPE_REQUEST_ID))
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableRequestIDArray struct {
objID int32
}
func (o ScImmutableRequestIDArray) GetRequestID(index int32) ScImmutableRequestID {
return ScImmutableRequestID{objID: o.objID, keyID: Key32(index)}
}
func (o ScImmutableRequestIDArray) Length() int32 {
return GetLength(o.objID)
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableString struct {
objID int32
keyID Key32
}
func NewScImmutableString(objID int32, keyID Key32) ScImmutableString {
return ScImmutableString{objID: objID, keyID: keyID}
}
func (o ScImmutableString) Exists() bool {
return Exists(o.objID, o.keyID, TYPE_STRING)
}
func (o ScImmutableString) String() string {
return o.Value()
}
func (o ScImmutableString) Value() string {
bytes := GetBytes(o.objID, o.keyID, TYPE_STRING)
if bytes == nil {
return ""
}
return string(bytes)
}
// \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\ // \\
type ScImmutableStringArray struct {
objID int32
}
func (o ScImmutableStringArray) GetString(index int32) ScImmutableString {
return ScImmutableString{objID: o.objID, keyID: Key32(index)}
}
func (o ScImmutableStringArray) Length() int32 {
return GetLength(o.objID)
} | packages/vm/wasmlib/immutable.go | 0.692642 | 0.53127 | immutable.go | starcoder |
package onshape
import (
"encoding/json"
)
// BTNotFilter165 struct for BTNotFilter165
type BTNotFilter165 struct {
BTQueryFilter183
BtType *string `json:"btType,omitempty"`
Operand *BTQueryFilter183 `json:"operand,omitempty"`
}
// NewBTNotFilter165 instantiates a new BTNotFilter165 object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewBTNotFilter165() *BTNotFilter165 {
this := BTNotFilter165{}
return &this
}
// NewBTNotFilter165WithDefaults instantiates a new BTNotFilter165 object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewBTNotFilter165WithDefaults() *BTNotFilter165 {
this := BTNotFilter165{}
return &this
}
// GetBtType returns the BtType field value if set, zero value otherwise.
func (o *BTNotFilter165) GetBtType() string {
if o == nil || o.BtType == nil {
var ret string
return ret
}
return *o.BtType
}
// GetBtTypeOk returns a tuple with the BtType field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTNotFilter165) GetBtTypeOk() (*string, bool) {
if o == nil || o.BtType == nil {
return nil, false
}
return o.BtType, true
}
// HasBtType returns a boolean if a field has been set.
func (o *BTNotFilter165) HasBtType() bool {
if o != nil && o.BtType != nil {
return true
}
return false
}
// SetBtType gets a reference to the given string and assigns it to the BtType field.
func (o *BTNotFilter165) SetBtType(v string) {
o.BtType = &v
}
// GetOperand returns the Operand field value if set, zero value otherwise.
func (o *BTNotFilter165) GetOperand() BTQueryFilter183 {
if o == nil || o.Operand == nil {
var ret BTQueryFilter183
return ret
}
return *o.Operand
}
// GetOperandOk returns a tuple with the Operand field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTNotFilter165) GetOperandOk() (*BTQueryFilter183, bool) {
if o == nil || o.Operand == nil {
return nil, false
}
return o.Operand, true
}
// HasOperand returns a boolean if a field has been set.
func (o *BTNotFilter165) HasOperand() bool {
if o != nil && o.Operand != nil {
return true
}
return false
}
// SetOperand gets a reference to the given BTQueryFilter183 and assigns it to the Operand field.
func (o *BTNotFilter165) SetOperand(v BTQueryFilter183) {
o.Operand = &v
}
func (o BTNotFilter165) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
serializedBTQueryFilter183, errBTQueryFilter183 := json.Marshal(o.BTQueryFilter183)
if errBTQueryFilter183 != nil {
return []byte{}, errBTQueryFilter183
}
errBTQueryFilter183 = json.Unmarshal([]byte(serializedBTQueryFilter183), &toSerialize)
if errBTQueryFilter183 != nil {
return []byte{}, errBTQueryFilter183
}
if o.BtType != nil {
toSerialize["btType"] = o.BtType
}
if o.Operand != nil {
toSerialize["operand"] = o.Operand
}
return json.Marshal(toSerialize)
}
type NullableBTNotFilter165 struct {
value *BTNotFilter165
isSet bool
}
func (v NullableBTNotFilter165) Get() *BTNotFilter165 {
return v.value
}
func (v *NullableBTNotFilter165) Set(val *BTNotFilter165) {
v.value = val
v.isSet = true
}
func (v NullableBTNotFilter165) IsSet() bool {
return v.isSet
}
func (v *NullableBTNotFilter165) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableBTNotFilter165(val *BTNotFilter165) *NullableBTNotFilter165 {
return &NullableBTNotFilter165{value: val, isSet: true}
}
func (v NullableBTNotFilter165) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableBTNotFilter165) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | onshape/model_bt_not_filter_165.go | 0.660939 | 0.409044 | model_bt_not_filter_165.go | starcoder |
package tensor
import (
"github.com/pkg/errors"
"gorgonia.org/tensor/internal/storage"
)
// Gt performs a > b elementwise. Both a and b must have the same shape.
// Acceptable FuncOpts are: UseUnsafe(), AsSameType(), WithReuse().
//UseUnsafe() will ensure that the same type is returned.
// Tensors used in WithReuse has to have the same Dtype as the return value's Dtype.
func (e StdEng) Gt(a Tensor, b Tensor, opts ...FuncOpt) (retVal Tensor, err error) {
if err = binaryCheck(a, b, ordTypes); err != nil {
return nil, errors.Wrapf(err, "Gt failed")
}
var reuse DenseTensor
var safe, same bool
if reuse, safe, _, _, same, err = handleFuncOpts(a.Shape(), a.Dtype(), a.DataOrder(), false, opts...); err != nil {
return nil, errors.Wrap(err, "Unable to handle funcOpts")
}
if !safe {
same = true
}
typ := a.Dtype().Type
var dataA, dataB, dataReuse *storage.Header
var ait, bit, iit Iterator
var useIter, swap bool
if dataA, dataB, dataReuse, ait, bit, iit, useIter, swap, err = prepDataVV(a, b, reuse); err != nil {
return nil, errors.Wrapf(err, "StdEng.Gt")
}
// check to see if anything needs to be created
switch {
case same && safe && reuse == nil:
if swap {
reuse = NewDense(b.Dtype(), b.Shape().Clone(), WithEngine(e))
} else {
reuse = NewDense(a.Dtype(), a.Shape().Clone(), WithEngine(e))
}
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
case !same && safe && reuse == nil:
reuse = NewDense(Bool, a.Shape().Clone(), WithEngine(e))
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
}
if useIter {
switch {
case !safe && same && reuse == nil:
err = e.E.GtSameIter(typ, dataA, dataB, ait, bit)
retVal = a
case same && safe && reuse != nil:
storage.CopyIter(typ, dataReuse, dataA, iit, ait)
ait.Reset()
iit.Reset()
err = e.E.GtSameIter(typ, dataReuse, dataB, iit, bit)
retVal = reuse
default: // safe && bool
err = e.E.GtIter(typ, dataA, dataB, dataReuse, ait, bit, iit)
retVal = reuse
}
return
}
// standard
switch {
case !safe && same && reuse == nil:
err = e.E.GtSame(typ, dataA, dataB)
retVal = a
case same && safe && reuse != nil:
storage.Copy(typ, dataReuse, dataA)
err = e.E.GtSame(typ, dataReuse, dataB)
retVal = reuse
default:
err = e.E.Gt(typ, dataA, dataB, dataReuse)
retVal = reuse
}
return
}
// Gte performs a ≥ b elementwise. Both a and b must have the same shape.
// Acceptable FuncOpts are: UseUnsafe(), AsSameType(), WithReuse().
//UseUnsafe() will ensure that the same type is returned.
// Tensors used in WithReuse has to have the same Dtype as the return value's Dtype.
func (e StdEng) Gte(a Tensor, b Tensor, opts ...FuncOpt) (retVal Tensor, err error) {
if err = binaryCheck(a, b, ordTypes); err != nil {
return nil, errors.Wrapf(err, "Gte failed")
}
var reuse DenseTensor
var safe, same bool
if reuse, safe, _, _, same, err = handleFuncOpts(a.Shape(), a.Dtype(), a.DataOrder(), false, opts...); err != nil {
return nil, errors.Wrap(err, "Unable to handle funcOpts")
}
if !safe {
same = true
}
typ := a.Dtype().Type
var dataA, dataB, dataReuse *storage.Header
var ait, bit, iit Iterator
var useIter, swap bool
if dataA, dataB, dataReuse, ait, bit, iit, useIter, swap, err = prepDataVV(a, b, reuse); err != nil {
return nil, errors.Wrapf(err, "StdEng.Gte")
}
// check to see if anything needs to be created
switch {
case same && safe && reuse == nil:
if swap {
reuse = NewDense(b.Dtype(), b.Shape().Clone(), WithEngine(e))
} else {
reuse = NewDense(a.Dtype(), a.Shape().Clone(), WithEngine(e))
}
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
case !same && safe && reuse == nil:
reuse = NewDense(Bool, a.Shape().Clone(), WithEngine(e))
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
}
if useIter {
switch {
case !safe && same && reuse == nil:
err = e.E.GteSameIter(typ, dataA, dataB, ait, bit)
retVal = a
case same && safe && reuse != nil:
storage.CopyIter(typ, dataReuse, dataA, iit, ait)
ait.Reset()
iit.Reset()
err = e.E.GteSameIter(typ, dataReuse, dataB, iit, bit)
retVal = reuse
default: // safe && bool
err = e.E.GteIter(typ, dataA, dataB, dataReuse, ait, bit, iit)
retVal = reuse
}
return
}
// standard
switch {
case !safe && same && reuse == nil:
err = e.E.GteSame(typ, dataA, dataB)
retVal = a
case same && safe && reuse != nil:
storage.Copy(typ, dataReuse, dataA)
err = e.E.GteSame(typ, dataReuse, dataB)
retVal = reuse
default:
err = e.E.Gte(typ, dataA, dataB, dataReuse)
retVal = reuse
}
return
}
// Lt performs a < b elementwise. Both a and b must have the same shape.
// Acceptable FuncOpts are: UseUnsafe(), AsSameType(), WithReuse().
//UseUnsafe() will ensure that the same type is returned.
// Tensors used in WithReuse has to have the same Dtype as the return value's Dtype.
func (e StdEng) Lt(a Tensor, b Tensor, opts ...FuncOpt) (retVal Tensor, err error) {
if err = binaryCheck(a, b, ordTypes); err != nil {
return nil, errors.Wrapf(err, "Lt failed")
}
var reuse DenseTensor
var safe, same bool
if reuse, safe, _, _, same, err = handleFuncOpts(a.Shape(), a.Dtype(), a.DataOrder(), false, opts...); err != nil {
return nil, errors.Wrap(err, "Unable to handle funcOpts")
}
if !safe {
same = true
}
typ := a.Dtype().Type
var dataA, dataB, dataReuse *storage.Header
var ait, bit, iit Iterator
var useIter, swap bool
if dataA, dataB, dataReuse, ait, bit, iit, useIter, swap, err = prepDataVV(a, b, reuse); err != nil {
return nil, errors.Wrapf(err, "StdEng.Lt")
}
// check to see if anything needs to be created
switch {
case same && safe && reuse == nil:
if swap {
reuse = NewDense(b.Dtype(), b.Shape().Clone(), WithEngine(e))
} else {
reuse = NewDense(a.Dtype(), a.Shape().Clone(), WithEngine(e))
}
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
case !same && safe && reuse == nil:
reuse = NewDense(Bool, a.Shape().Clone(), WithEngine(e))
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
}
if useIter {
switch {
case !safe && same && reuse == nil:
err = e.E.LtSameIter(typ, dataA, dataB, ait, bit)
retVal = a
case same && safe && reuse != nil:
storage.CopyIter(typ, dataReuse, dataA, iit, ait)
ait.Reset()
iit.Reset()
err = e.E.LtSameIter(typ, dataReuse, dataB, iit, bit)
retVal = reuse
default: // safe && bool
err = e.E.LtIter(typ, dataA, dataB, dataReuse, ait, bit, iit)
retVal = reuse
}
return
}
// standard
switch {
case !safe && same && reuse == nil:
err = e.E.LtSame(typ, dataA, dataB)
retVal = a
case same && safe && reuse != nil:
storage.Copy(typ, dataReuse, dataA)
err = e.E.LtSame(typ, dataReuse, dataB)
retVal = reuse
default:
err = e.E.Lt(typ, dataA, dataB, dataReuse)
retVal = reuse
}
return
}
// Lte performs a ≤ b elementwise. Both a and b must have the same shape.
// Acceptable FuncOpts are: UseUnsafe(), AsSameType(), WithReuse().
//UseUnsafe() will ensure that the same type is returned.
// Tensors used in WithReuse has to have the same Dtype as the return value's Dtype.
func (e StdEng) Lte(a Tensor, b Tensor, opts ...FuncOpt) (retVal Tensor, err error) {
if err = binaryCheck(a, b, ordTypes); err != nil {
return nil, errors.Wrapf(err, "Lte failed")
}
var reuse DenseTensor
var safe, same bool
if reuse, safe, _, _, same, err = handleFuncOpts(a.Shape(), a.Dtype(), a.DataOrder(), false, opts...); err != nil {
return nil, errors.Wrap(err, "Unable to handle funcOpts")
}
if !safe {
same = true
}
typ := a.Dtype().Type
var dataA, dataB, dataReuse *storage.Header
var ait, bit, iit Iterator
var useIter, swap bool
if dataA, dataB, dataReuse, ait, bit, iit, useIter, swap, err = prepDataVV(a, b, reuse); err != nil {
return nil, errors.Wrapf(err, "StdEng.Lte")
}
// check to see if anything needs to be created
switch {
case same && safe && reuse == nil:
if swap {
reuse = NewDense(b.Dtype(), b.Shape().Clone(), WithEngine(e))
} else {
reuse = NewDense(a.Dtype(), a.Shape().Clone(), WithEngine(e))
}
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
case !same && safe && reuse == nil:
reuse = NewDense(Bool, a.Shape().Clone(), WithEngine(e))
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
}
if useIter {
switch {
case !safe && same && reuse == nil:
err = e.E.LteSameIter(typ, dataA, dataB, ait, bit)
retVal = a
case same && safe && reuse != nil:
storage.CopyIter(typ, dataReuse, dataA, iit, ait)
ait.Reset()
iit.Reset()
err = e.E.LteSameIter(typ, dataReuse, dataB, iit, bit)
retVal = reuse
default: // safe && bool
err = e.E.LteIter(typ, dataA, dataB, dataReuse, ait, bit, iit)
retVal = reuse
}
return
}
// standard
switch {
case !safe && same && reuse == nil:
err = e.E.LteSame(typ, dataA, dataB)
retVal = a
case same && safe && reuse != nil:
storage.Copy(typ, dataReuse, dataA)
err = e.E.LteSame(typ, dataReuse, dataB)
retVal = reuse
default:
err = e.E.Lte(typ, dataA, dataB, dataReuse)
retVal = reuse
}
return
}
// ElEq performs a == b elementwise. Both a and b must have the same shape.
// Acceptable FuncOpts are: UseUnsafe(), AsSameType(), WithReuse().
//UseUnsafe() will ensure that the same type is returned.
// Tensors used in WithReuse has to have the same Dtype as the return value's Dtype.
func (e StdEng) ElEq(a Tensor, b Tensor, opts ...FuncOpt) (retVal Tensor, err error) {
if err = binaryCheck(a, b, eqTypes); err != nil {
return nil, errors.Wrapf(err, "Eq failed")
}
var reuse DenseTensor
var safe, same bool
if reuse, safe, _, _, same, err = handleFuncOpts(a.Shape(), a.Dtype(), a.DataOrder(), false, opts...); err != nil {
return nil, errors.Wrap(err, "Unable to handle funcOpts")
}
if !safe {
same = true
}
typ := a.Dtype().Type
var dataA, dataB, dataReuse *storage.Header
var ait, bit, iit Iterator
var useIter, swap bool
if dataA, dataB, dataReuse, ait, bit, iit, useIter, swap, err = prepDataVV(a, b, reuse); err != nil {
return nil, errors.Wrapf(err, "StdEng.Eq")
}
// check to see if anything needs to be created
switch {
case same && safe && reuse == nil:
if swap {
reuse = NewDense(b.Dtype(), b.Shape().Clone(), WithEngine(e))
} else {
reuse = NewDense(a.Dtype(), a.Shape().Clone(), WithEngine(e))
}
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
case !same && safe && reuse == nil:
reuse = NewDense(Bool, a.Shape().Clone(), WithEngine(e))
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
}
if useIter {
switch {
case !safe && same && reuse == nil:
err = e.E.EqSameIter(typ, dataA, dataB, ait, bit)
retVal = a
case same && safe && reuse != nil:
storage.CopyIter(typ, dataReuse, dataA, iit, ait)
ait.Reset()
iit.Reset()
err = e.E.EqSameIter(typ, dataReuse, dataB, iit, bit)
retVal = reuse
default: // safe && bool
err = e.E.EqIter(typ, dataA, dataB, dataReuse, ait, bit, iit)
retVal = reuse
}
return
}
// standard
switch {
case !safe && same && reuse == nil:
err = e.E.EqSame(typ, dataA, dataB)
retVal = a
case same && safe && reuse != nil:
storage.Copy(typ, dataReuse, dataA)
err = e.E.EqSame(typ, dataReuse, dataB)
retVal = reuse
default:
err = e.E.Eq(typ, dataA, dataB, dataReuse)
retVal = reuse
}
return
}
// ElNe performs a ≠ b elementwise. Both a and b must have the same shape.
// Acceptable FuncOpts are: UseUnsafe(), AsSameType(), WithReuse().
//UseUnsafe() will ensure that the same type is returned.
// Tensors used in WithReuse has to have the same Dtype as the return value's Dtype.
func (e StdEng) ElNe(a Tensor, b Tensor, opts ...FuncOpt) (retVal Tensor, err error) {
if err = binaryCheck(a, b, eqTypes); err != nil {
return nil, errors.Wrapf(err, "Ne failed")
}
var reuse DenseTensor
var safe, same bool
if reuse, safe, _, _, same, err = handleFuncOpts(a.Shape(), a.Dtype(), a.DataOrder(), false, opts...); err != nil {
return nil, errors.Wrap(err, "Unable to handle funcOpts")
}
if !safe {
same = true
}
typ := a.Dtype().Type
var dataA, dataB, dataReuse *storage.Header
var ait, bit, iit Iterator
var useIter, swap bool
if dataA, dataB, dataReuse, ait, bit, iit, useIter, swap, err = prepDataVV(a, b, reuse); err != nil {
return nil, errors.Wrapf(err, "StdEng.Ne")
}
// check to see if anything needs to be created
switch {
case same && safe && reuse == nil:
if swap {
reuse = NewDense(b.Dtype(), b.Shape().Clone(), WithEngine(e))
} else {
reuse = NewDense(a.Dtype(), a.Shape().Clone(), WithEngine(e))
}
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
case !same && safe && reuse == nil:
reuse = NewDense(Bool, a.Shape().Clone(), WithEngine(e))
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
}
if useIter {
switch {
case !safe && same && reuse == nil:
err = e.E.NeSameIter(typ, dataA, dataB, ait, bit)
retVal = a
case same && safe && reuse != nil:
storage.CopyIter(typ, dataReuse, dataA, iit, ait)
ait.Reset()
iit.Reset()
err = e.E.NeSameIter(typ, dataReuse, dataB, iit, bit)
retVal = reuse
default: // safe && bool
err = e.E.NeIter(typ, dataA, dataB, dataReuse, ait, bit, iit)
retVal = reuse
}
return
}
// standard
switch {
case !safe && same && reuse == nil:
err = e.E.NeSame(typ, dataA, dataB)
retVal = a
case same && safe && reuse != nil:
storage.Copy(typ, dataReuse, dataA)
err = e.E.NeSame(typ, dataReuse, dataB)
retVal = reuse
default:
err = e.E.Ne(typ, dataA, dataB, dataReuse)
retVal = reuse
}
return
}
// GtScalar performs t > s elementwise. The leftTensor parameter indicates if the tensor is the left operand. Only scalar types are accepted in s
// Acceptable FuncOpts are: UseUnsafe(), AsSameType(), WithReuse().
// UseUnsafe() will ensure that the same type is returned.
// Tensors used in WithReuse has to have the same Dtype as the return value's Dtype.
func (e StdEng) GtScalar(t Tensor, s interface{}, leftTensor bool, opts ...FuncOpt) (retVal Tensor, err error) {
if err = unaryCheck(t, ordTypes); err != nil {
return nil, errors.Wrapf(err, "Gt failed")
}
if err = scalarDtypeCheck(t, s); err != nil {
return nil, errors.Wrap(err, "Gt failed")
}
var reuse DenseTensor
var safe, same bool
if reuse, safe, _, _, same, err = handleFuncOpts(t.Shape(), t.Dtype(), t.DataOrder(), false, opts...); err != nil {
return nil, errors.Wrap(err, "Unable to handle funcOpts")
}
if !safe {
same = true
}
a := t
typ := t.Dtype().Type
var ait, bit, iit Iterator
var dataA, dataB, dataReuse, scalarHeader *storage.Header
var useIter bool
if leftTensor {
if dataA, dataB, dataReuse, ait, iit, useIter, err = prepDataVS(t, s, reuse); err != nil {
return nil, errors.Wrapf(err, opFail, "StdEng.Gt")
}
scalarHeader = dataB
} else {
if dataA, dataB, dataReuse, bit, iit, useIter, err = prepDataSV(s, t, reuse); err != nil {
return nil, errors.Wrapf(err, opFail, "StdEng.Gt")
}
scalarHeader = dataA
}
// check to see if anything needs to be created
switch {
case same && safe && reuse == nil:
reuse = NewDense(a.Dtype(), a.Shape().Clone(), WithEngine(e))
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
case !same && safe && reuse == nil:
reuse = NewDense(Bool, a.Shape().Clone(), WithEngine(e))
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
}
if useIter {
switch {
case !safe && same && reuse == nil:
err = e.E.GtSameIter(typ, dataA, dataB, ait, bit)
retVal = a
case same && safe && reuse != nil && !leftTensor:
storage.CopyIter(typ, dataReuse, dataB, iit, bit)
bit.Reset()
iit.Reset()
err = e.E.GtSameIter(typ, dataA, dataReuse, ait, bit)
retVal = reuse
case same && safe && reuse != nil && leftTensor:
storage.CopyIter(typ, dataReuse, dataA, iit, ait)
ait.Reset()
iit.Reset()
err = e.E.GtSameIter(typ, dataReuse, dataB, iit, bit)
retVal = reuse
default: // safe && bool
err = e.E.GtIter(typ, dataA, dataB, dataReuse, ait, bit, iit)
retVal = reuse
}
returnHeader(scalarHeader)
return
}
// handle special case where A and B have both len 1
if dataA.L == 1 && dataB.L == 1 {
switch {
case same && safe && reuse != nil && leftTensor:
storage.Copy(typ, dataReuse, dataA)
err = e.E.GtSame(typ, dataReuse, dataB)
retVal = reuse
return
case same && safe && reuse != nil && !leftTensor:
storage.Copy(typ, dataReuse, dataB)
err = e.E.LtSame(typ, dataReuse, dataA)
retVal = reuse
return
}
}
// standard
switch {
case !safe && same && reuse == nil:
err = e.E.GtSame(typ, dataA, dataB)
retVal = a
case same && safe && reuse != nil && leftTensor:
storage.Copy(typ, dataReuse, dataA)
err = e.E.GtSame(typ, dataReuse, dataB)
retVal = reuse
case same && safe && reuse != nil && !leftTensor:
storage.Copy(typ, dataReuse, dataB)
err = e.E.GtSame(typ, dataA, dataReuse)
retVal = reuse
default:
err = e.E.Gt(typ, dataA, dataB, dataReuse)
retVal = reuse
}
returnHeader(scalarHeader)
return
}
// GteScalar performs t ≥ s elementwise. The leftTensor parameter indicates if the tensor is the left operand. Only scalar types are accepted in s
// Acceptable FuncOpts are: UseUnsafe(), AsSameType(), WithReuse().
// UseUnsafe() will ensure that the same type is returned.
// Tensors used in WithReuse has to have the same Dtype as the return value's Dtype.
func (e StdEng) GteScalar(t Tensor, s interface{}, leftTensor bool, opts ...FuncOpt) (retVal Tensor, err error) {
if err = unaryCheck(t, ordTypes); err != nil {
return nil, errors.Wrapf(err, "Gte failed")
}
if err = scalarDtypeCheck(t, s); err != nil {
return nil, errors.Wrap(err, "Gte failed")
}
var reuse DenseTensor
var safe, same bool
if reuse, safe, _, _, same, err = handleFuncOpts(t.Shape(), t.Dtype(), t.DataOrder(), false, opts...); err != nil {
return nil, errors.Wrap(err, "Unable to handle funcOpts")
}
if !safe {
same = true
}
a := t
typ := t.Dtype().Type
var ait, bit, iit Iterator
var dataA, dataB, dataReuse, scalarHeader *storage.Header
var useIter bool
if leftTensor {
if dataA, dataB, dataReuse, ait, iit, useIter, err = prepDataVS(t, s, reuse); err != nil {
return nil, errors.Wrapf(err, opFail, "StdEng.Gte")
}
scalarHeader = dataB
} else {
if dataA, dataB, dataReuse, bit, iit, useIter, err = prepDataSV(s, t, reuse); err != nil {
return nil, errors.Wrapf(err, opFail, "StdEng.Gte")
}
scalarHeader = dataA
}
// check to see if anything needs to be created
switch {
case same && safe && reuse == nil:
reuse = NewDense(a.Dtype(), a.Shape().Clone(), WithEngine(e))
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
case !same && safe && reuse == nil:
reuse = NewDense(Bool, a.Shape().Clone(), WithEngine(e))
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
}
if useIter {
switch {
case !safe && same && reuse == nil:
err = e.E.GteSameIter(typ, dataA, dataB, ait, bit)
retVal = a
case same && safe && reuse != nil && !leftTensor:
storage.CopyIter(typ, dataReuse, dataB, iit, bit)
bit.Reset()
iit.Reset()
err = e.E.GteSameIter(typ, dataA, dataReuse, ait, bit)
retVal = reuse
case same && safe && reuse != nil && leftTensor:
storage.CopyIter(typ, dataReuse, dataA, iit, ait)
ait.Reset()
iit.Reset()
err = e.E.GteSameIter(typ, dataReuse, dataB, iit, bit)
retVal = reuse
default: // safe && bool
err = e.E.GteIter(typ, dataA, dataB, dataReuse, ait, bit, iit)
retVal = reuse
}
returnHeader(scalarHeader)
return
}
// handle special case where A and B have both len 1
if dataA.L == 1 && dataB.L == 1 {
switch {
case same && safe && reuse != nil && leftTensor:
storage.Copy(typ, dataReuse, dataA)
err = e.E.GteSame(typ, dataReuse, dataB)
retVal = reuse
return
case same && safe && reuse != nil && !leftTensor:
storage.Copy(typ, dataReuse, dataB)
err = e.E.LteSame(typ, dataReuse, dataA)
retVal = reuse
return
}
}
// standard
switch {
case !safe && same && reuse == nil:
err = e.E.GteSame(typ, dataA, dataB)
retVal = a
case same && safe && reuse != nil && leftTensor:
storage.Copy(typ, dataReuse, dataA)
err = e.E.GteSame(typ, dataReuse, dataB)
retVal = reuse
case same && safe && reuse != nil && !leftTensor:
storage.Copy(typ, dataReuse, dataB)
err = e.E.GteSame(typ, dataA, dataReuse)
retVal = reuse
default:
err = e.E.Gte(typ, dataA, dataB, dataReuse)
retVal = reuse
}
returnHeader(scalarHeader)
return
}
// LtScalar performs t < s elementwise. The leftTensor parameter indicates if the tensor is the left operand. Only scalar types are accepted in s
// Acceptable FuncOpts are: UseUnsafe(), AsSameType(), WithReuse().
// UseUnsafe() will ensure that the same type is returned.
// Tensors used in WithReuse has to have the same Dtype as the return value's Dtype.
func (e StdEng) LtScalar(t Tensor, s interface{}, leftTensor bool, opts ...FuncOpt) (retVal Tensor, err error) {
if err = unaryCheck(t, ordTypes); err != nil {
return nil, errors.Wrapf(err, "Lt failed")
}
if err = scalarDtypeCheck(t, s); err != nil {
return nil, errors.Wrap(err, "Lt failed")
}
var reuse DenseTensor
var safe, same bool
if reuse, safe, _, _, same, err = handleFuncOpts(t.Shape(), t.Dtype(), t.DataOrder(), false, opts...); err != nil {
return nil, errors.Wrap(err, "Unable to handle funcOpts")
}
if !safe {
same = true
}
a := t
typ := t.Dtype().Type
var ait, bit, iit Iterator
var dataA, dataB, dataReuse, scalarHeader *storage.Header
var useIter bool
if leftTensor {
if dataA, dataB, dataReuse, ait, iit, useIter, err = prepDataVS(t, s, reuse); err != nil {
return nil, errors.Wrapf(err, opFail, "StdEng.Lt")
}
scalarHeader = dataB
} else {
if dataA, dataB, dataReuse, bit, iit, useIter, err = prepDataSV(s, t, reuse); err != nil {
return nil, errors.Wrapf(err, opFail, "StdEng.Lt")
}
scalarHeader = dataA
}
// check to see if anything needs to be created
switch {
case same && safe && reuse == nil:
reuse = NewDense(a.Dtype(), a.Shape().Clone(), WithEngine(e))
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
case !same && safe && reuse == nil:
reuse = NewDense(Bool, a.Shape().Clone(), WithEngine(e))
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
}
if useIter {
switch {
case !safe && same && reuse == nil:
err = e.E.LtSameIter(typ, dataA, dataB, ait, bit)
retVal = a
case same && safe && reuse != nil && !leftTensor:
storage.CopyIter(typ, dataReuse, dataB, iit, bit)
bit.Reset()
iit.Reset()
err = e.E.LtSameIter(typ, dataA, dataReuse, ait, bit)
retVal = reuse
case same && safe && reuse != nil && leftTensor:
storage.CopyIter(typ, dataReuse, dataA, iit, ait)
ait.Reset()
iit.Reset()
err = e.E.LtSameIter(typ, dataReuse, dataB, iit, bit)
retVal = reuse
default: // safe && bool
err = e.E.LtIter(typ, dataA, dataB, dataReuse, ait, bit, iit)
retVal = reuse
}
returnHeader(scalarHeader)
return
}
// handle special case where A and B have both len 1
if dataA.L == 1 && dataB.L == 1 {
switch {
case same && safe && reuse != nil && leftTensor:
storage.Copy(typ, dataReuse, dataA)
err = e.E.LtSame(typ, dataReuse, dataB)
retVal = reuse
return
case same && safe && reuse != nil && !leftTensor:
storage.Copy(typ, dataReuse, dataB)
err = e.E.GtSame(typ, dataReuse, dataA)
retVal = reuse
return
}
}
// standard
switch {
case !safe && same && reuse == nil:
err = e.E.LtSame(typ, dataA, dataB)
retVal = a
case same && safe && reuse != nil && leftTensor:
storage.Copy(typ, dataReuse, dataA)
err = e.E.LtSame(typ, dataReuse, dataB)
retVal = reuse
case same && safe && reuse != nil && !leftTensor:
storage.Copy(typ, dataReuse, dataB)
err = e.E.LtSame(typ, dataA, dataReuse)
retVal = reuse
default:
err = e.E.Lt(typ, dataA, dataB, dataReuse)
retVal = reuse
}
returnHeader(scalarHeader)
return
}
// LteScalar performs t ≤ s elementwise. The leftTensor parameter indicates if the tensor is the left operand. Only scalar types are accepted in s
// Acceptable FuncOpts are: UseUnsafe(), AsSameType(), WithReuse().
// UseUnsafe() will ensure that the same type is returned.
// Tensors used in WithReuse has to have the same Dtype as the return value's Dtype.
func (e StdEng) LteScalar(t Tensor, s interface{}, leftTensor bool, opts ...FuncOpt) (retVal Tensor, err error) {
if err = unaryCheck(t, ordTypes); err != nil {
return nil, errors.Wrapf(err, "Lte failed")
}
if err = scalarDtypeCheck(t, s); err != nil {
return nil, errors.Wrap(err, "Lte failed")
}
var reuse DenseTensor
var safe, same bool
if reuse, safe, _, _, same, err = handleFuncOpts(t.Shape(), t.Dtype(), t.DataOrder(), false, opts...); err != nil {
return nil, errors.Wrap(err, "Unable to handle funcOpts")
}
if !safe {
same = true
}
a := t
typ := t.Dtype().Type
var ait, bit, iit Iterator
var dataA, dataB, dataReuse, scalarHeader *storage.Header
var useIter bool
if leftTensor {
if dataA, dataB, dataReuse, ait, iit, useIter, err = prepDataVS(t, s, reuse); err != nil {
return nil, errors.Wrapf(err, opFail, "StdEng.Lte")
}
scalarHeader = dataB
} else {
if dataA, dataB, dataReuse, bit, iit, useIter, err = prepDataSV(s, t, reuse); err != nil {
return nil, errors.Wrapf(err, opFail, "StdEng.Lte")
}
scalarHeader = dataA
}
// check to see if anything needs to be created
switch {
case same && safe && reuse == nil:
reuse = NewDense(a.Dtype(), a.Shape().Clone(), WithEngine(e))
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
case !same && safe && reuse == nil:
reuse = NewDense(Bool, a.Shape().Clone(), WithEngine(e))
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
}
if useIter {
switch {
case !safe && same && reuse == nil:
err = e.E.LteSameIter(typ, dataA, dataB, ait, bit)
retVal = a
case same && safe && reuse != nil && !leftTensor:
storage.CopyIter(typ, dataReuse, dataB, iit, bit)
bit.Reset()
iit.Reset()
err = e.E.LteSameIter(typ, dataA, dataReuse, ait, bit)
retVal = reuse
case same && safe && reuse != nil && leftTensor:
storage.CopyIter(typ, dataReuse, dataA, iit, ait)
ait.Reset()
iit.Reset()
err = e.E.LteSameIter(typ, dataReuse, dataB, iit, bit)
retVal = reuse
default: // safe && bool
err = e.E.LteIter(typ, dataA, dataB, dataReuse, ait, bit, iit)
retVal = reuse
}
returnHeader(scalarHeader)
return
}
// handle special case where A and B have both len 1
if dataA.L == 1 && dataB.L == 1 {
switch {
case same && safe && reuse != nil && leftTensor:
storage.Copy(typ, dataReuse, dataA)
err = e.E.LteSame(typ, dataReuse, dataB)
retVal = reuse
return
case same && safe && reuse != nil && !leftTensor:
storage.Copy(typ, dataReuse, dataB)
err = e.E.GteSame(typ, dataReuse, dataA)
retVal = reuse
return
}
}
// standard
switch {
case !safe && same && reuse == nil:
err = e.E.LteSame(typ, dataA, dataB)
retVal = a
case same && safe && reuse != nil && leftTensor:
storage.Copy(typ, dataReuse, dataA)
err = e.E.LteSame(typ, dataReuse, dataB)
retVal = reuse
case same && safe && reuse != nil && !leftTensor:
storage.Copy(typ, dataReuse, dataB)
err = e.E.LteSame(typ, dataA, dataReuse)
retVal = reuse
default:
err = e.E.Lte(typ, dataA, dataB, dataReuse)
retVal = reuse
}
returnHeader(scalarHeader)
return
}
func (e StdEng) EqScalar(t Tensor, s interface{}, leftTensor bool, opts ...FuncOpt) (retVal Tensor, err error) {
if err = unaryCheck(t, eqTypes); err != nil {
return nil, errors.Wrapf(err, "Eq failed")
}
if err = scalarDtypeCheck(t, s); err != nil {
return nil, errors.Wrap(err, "Eq failed")
}
var reuse DenseTensor
var safe, same bool
if reuse, safe, _, _, same, err = handleFuncOpts(t.Shape(), t.Dtype(), t.DataOrder(), false, opts...); err != nil {
return nil, errors.Wrap(err, "Unable to handle funcOpts")
}
if !safe {
same = true
}
a := t
typ := t.Dtype().Type
var ait, bit, iit Iterator
var dataA, dataB, dataReuse, scalarHeader *storage.Header
var useIter bool
if leftTensor {
if dataA, dataB, dataReuse, ait, iit, useIter, err = prepDataVS(t, s, reuse); err != nil {
return nil, errors.Wrapf(err, opFail, "StdEng.Eq")
}
scalarHeader = dataB
} else {
if dataA, dataB, dataReuse, bit, iit, useIter, err = prepDataSV(s, t, reuse); err != nil {
return nil, errors.Wrapf(err, opFail, "StdEng.Eq")
}
scalarHeader = dataA
}
// check to see if anything needs to be created
switch {
case same && safe && reuse == nil:
reuse = NewDense(a.Dtype(), a.Shape().Clone(), WithEngine(e))
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
case !same && safe && reuse == nil:
reuse = NewDense(Bool, a.Shape().Clone(), WithEngine(e))
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
}
if useIter {
switch {
case !safe && same && reuse == nil:
err = e.E.EqSameIter(typ, dataA, dataB, ait, bit)
retVal = a
case same && safe && reuse != nil && !leftTensor:
storage.CopyIter(typ, dataReuse, dataB, iit, bit)
bit.Reset()
iit.Reset()
err = e.E.EqSameIter(typ, dataA, dataReuse, ait, bit)
retVal = reuse
case same && safe && reuse != nil && leftTensor:
storage.CopyIter(typ, dataReuse, dataA, iit, ait)
ait.Reset()
iit.Reset()
err = e.E.EqSameIter(typ, dataReuse, dataB, iit, bit)
retVal = reuse
default: // safe && bool
err = e.E.EqIter(typ, dataA, dataB, dataReuse, ait, bit, iit)
retVal = reuse
}
returnHeader(scalarHeader)
return
}
// handle special case where A and B have both len 1
if dataA.L == 1 && dataB.L == 1 {
switch {
case same && safe && reuse != nil && leftTensor:
storage.Copy(typ, dataReuse, dataA)
err = e.E.EqSame(typ, dataReuse, dataB)
retVal = reuse
return
case same && safe && reuse != nil && !leftTensor:
storage.Copy(typ, dataReuse, dataB)
err = e.E.EqSame(typ, dataReuse, dataA)
retVal = reuse
return
}
}
// standard
switch {
case !safe && same && reuse == nil:
err = e.E.EqSame(typ, dataA, dataB)
retVal = a
case same && safe && reuse != nil && leftTensor:
storage.Copy(typ, dataReuse, dataA)
err = e.E.EqSame(typ, dataReuse, dataB)
retVal = reuse
case same && safe && reuse != nil && !leftTensor:
storage.Copy(typ, dataReuse, dataB)
err = e.E.EqSame(typ, dataA, dataReuse)
retVal = reuse
default:
err = e.E.Eq(typ, dataA, dataB, dataReuse)
retVal = reuse
}
returnHeader(scalarHeader)
return
}
func (e StdEng) NeScalar(t Tensor, s interface{}, leftTensor bool, opts ...FuncOpt) (retVal Tensor, err error) {
if err = unaryCheck(t, eqTypes); err != nil {
return nil, errors.Wrapf(err, "Ne failed")
}
if err = scalarDtypeCheck(t, s); err != nil {
return nil, errors.Wrap(err, "Ne failed")
}
var reuse DenseTensor
var safe, same bool
if reuse, safe, _, _, same, err = handleFuncOpts(t.Shape(), t.Dtype(), t.DataOrder(), false, opts...); err != nil {
return nil, errors.Wrap(err, "Unable to handle funcOpts")
}
if !safe {
same = true
}
a := t
typ := t.Dtype().Type
var ait, bit, iit Iterator
var dataA, dataB, dataReuse, scalarHeader *storage.Header
var useIter bool
if leftTensor {
if dataA, dataB, dataReuse, ait, iit, useIter, err = prepDataVS(t, s, reuse); err != nil {
return nil, errors.Wrapf(err, opFail, "StdEng.Ne")
}
scalarHeader = dataB
} else {
if dataA, dataB, dataReuse, bit, iit, useIter, err = prepDataSV(s, t, reuse); err != nil {
return nil, errors.Wrapf(err, opFail, "StdEng.Ne")
}
scalarHeader = dataA
}
// check to see if anything needs to be created
switch {
case same && safe && reuse == nil:
reuse = NewDense(a.Dtype(), a.Shape().Clone(), WithEngine(e))
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
case !same && safe && reuse == nil:
reuse = NewDense(Bool, a.Shape().Clone(), WithEngine(e))
dataReuse = reuse.hdr()
if useIter {
iit = IteratorFromDense(reuse)
}
}
if useIter {
switch {
case !safe && same && reuse == nil:
err = e.E.NeSameIter(typ, dataA, dataB, ait, bit)
retVal = a
case same && safe && reuse != nil && !leftTensor:
storage.CopyIter(typ, dataReuse, dataB, iit, bit)
bit.Reset()
iit.Reset()
err = e.E.NeSameIter(typ, dataA, dataReuse, ait, bit)
retVal = reuse
case same && safe && reuse != nil && leftTensor:
storage.CopyIter(typ, dataReuse, dataA, iit, ait)
ait.Reset()
iit.Reset()
err = e.E.NeSameIter(typ, dataReuse, dataB, iit, bit)
retVal = reuse
default: // safe && bool
err = e.E.NeIter(typ, dataA, dataB, dataReuse, ait, bit, iit)
retVal = reuse
}
returnHeader(scalarHeader)
return
}
// handle special case where A and B have both len 1
if dataA.L == 1 && dataB.L == 1 {
switch {
case same && safe && reuse != nil && leftTensor:
storage.Copy(typ, dataReuse, dataA)
err = e.E.NeSame(typ, dataReuse, dataB)
retVal = reuse
return
case same && safe && reuse != nil && !leftTensor:
storage.Copy(typ, dataReuse, dataB)
err = e.E.NeSame(typ, dataReuse, dataA)
retVal = reuse
return
}
}
// standard
switch {
case !safe && same && reuse == nil:
err = e.E.NeSame(typ, dataA, dataB)
retVal = a
case same && safe && reuse != nil && leftTensor:
storage.Copy(typ, dataReuse, dataA)
err = e.E.NeSame(typ, dataReuse, dataB)
retVal = reuse
case same && safe && reuse != nil && !leftTensor:
storage.Copy(typ, dataReuse, dataB)
err = e.E.NeSame(typ, dataA, dataReuse)
retVal = reuse
default:
err = e.E.Ne(typ, dataA, dataB, dataReuse)
retVal = reuse
}
returnHeader(scalarHeader)
return
} | defaultengine_cmp.go | 0.638723 | 0.516108 | defaultengine_cmp.go | starcoder |
package geohash
import "math"
var (
// const used to interleave64 and deinterleave64
// From: https://graphics.stanford.edu/~seander/bithacks.html#InterleaveBMN
s = []uint32{0, 1, 2, 4, 8, 16}
b = []uint64{
0x5555555555555555,
0x3333333333333333,
0x0F0F0F0F0F0F0F0F,
0x00FF00FF00FF00FF,
0x0000FFFF0000FFFF,
0x00000000FFFFFFFF,
}
geoalphabet = "0123456789bcdefghjkmnpqrstuvwxyz"
)
const (
MERCATOR_MAX float64 = 20037726.37
// Earth's quatratic mean radius for WGS-84
EARTH_RADIUS_IN_METERS float64 = 6372797.560856
D_R = (math.Pi / 180.0)
)
func degRad(ang float64) float64 {
return ang * D_R
}
func radDeg(ang float64) float64 {
return ang / D_R
}
/* Interleave lower bits of x and y, so the bits of x
* are in the even positions and bits from y in the odd;
* x and y must initially be less than 2**32 (65536).
* From: https://graphics.stanford.edu/~seander/bithacks.html#InterleaveBMN
*/
func interleave64(xlo uint32, ylo uint32) uint64 {
var x, y uint64 = uint64(xlo), uint64(ylo)
x = (x | x<<s[5]) & b[4]
y = (y | y<<s[5]) & b[4]
x = (x | x<<s[4]) & b[3]
y = (y | y<<s[4]) & b[3]
x = (x | x<<s[3]) & b[2]
y = (y | y<<s[3]) & b[2]
x = (x | x<<s[2]) & b[1]
y = (y | y<<s[2]) & b[1]
x = (x | x<<s[1]) & b[0]
y = (y | y<<s[1]) & b[0]
return x | (y << 1)
}
/* reverse the interleave process
* derived from http://stackoverflow.com/questions/4909263
*/
func deinterleave64(interleaved uint64) (uint32, uint32) {
x, y := interleaved, interleaved>>1
x = (x | (x >> s[0])) & b[0]
y = (y | (y >> s[0])) & b[0]
x = (x | (x >> s[1])) & b[1]
y = (y | (y >> s[1])) & b[1]
x = (x | (x >> s[2])) & b[2]
y = (y | (y >> s[2])) & b[2]
x = (x | (x >> s[3])) & b[3]
y = (y | (y >> s[3])) & b[3]
x = (x | (x >> s[4])) & b[4]
y = (y | (y >> s[4])) & b[4]
x = (x | (x >> s[5])) & b[5]
y = (y | (y >> s[5])) & b[5]
x = x | (y << 32)
return uint32(x), uint32(x >> 32)
}
func DistBetweenGeoHashWGS84(hash0 uint64, hash1 uint64) float64 {
lon0d, lat0d := DecodeToLongLatWGS84(hash0)
lon1d, lat1d := DecodeToLongLatWGS84(hash1)
return GetDistance(lon0d, lat0d, lon1d, lat1d)
}
// Calculate distance using haversin great circle distance formula.
func GetDistance(lon0d, lat0d, lon1d, lat1d float64) float64 {
lat0r := degRad(lat0d)
lon0r := degRad(lon0d)
lat1r := degRad(lat1d)
lon1r := degRad(lon1d)
u := math.Sin((lat1r - lat0r) / 2)
v := math.Sin((lon1r - lon0r) / 2)
return 2.0 * EARTH_RADIUS_IN_METERS *
math.Asin(
math.Sqrt(
u*u+
math.Cos(lat0r)*math.Cos(lat1r)*v*v))
}
func EncodeToBase32(hash uint64) []byte {
buf := make([]byte, 11)
var i uint8 = 0
for ; i < 11; i++ {
idx := (hash >> (52 - ((i + 1) * 5))) & 0x1f
buf[i] = geoalphabet[idx]
}
return buf
}
func GetAreasByRadiusWGS84(longitude, latitude, radius float64) (*Radius, error) {
minLon, minLat, maxLon, maxLat := boundingBox(longitude, latitude, radius)
steps := estimateStepsByRadius(radius, latitude)
hash, err := Encode(
WGS84_LONG_RANGE, WGS84_LAT_RANGE,
longitude, latitude,
steps)
if err != nil {
return nil, err
}
neighbors := GetNeighbors(hash)
area := decode(WGS84_LONG_RANGE, WGS84_LAT_RANGE, hash)
/* Check if the step is enough at the limits of the covered area.
* Sometimes when the search area is near an edge of the
* area, the estimated step is not small enough, since one of the
* north / south / west / east square is too near to the search area
* to cover everything. */
var decrStep bool
n := decode(WGS84_LONG_RANGE, WGS84_LAT_RANGE, neighbors.North)
s := decode(WGS84_LONG_RANGE, WGS84_LAT_RANGE, neighbors.South)
e := decode(WGS84_LONG_RANGE, WGS84_LAT_RANGE, neighbors.East)
w := decode(WGS84_LONG_RANGE, WGS84_LAT_RANGE, neighbors.West)
if GetDistance(longitude, latitude, longitude, n.Latitude.Max) < radius {
decrStep = true
}
if GetDistance(longitude, latitude, longitude, s.Latitude.Min) < radius {
decrStep = true
}
if GetDistance(longitude, latitude, e.Longitude.Max, latitude) < radius {
decrStep = true
}
if GetDistance(longitude, latitude, w.Longitude.Min, latitude) < radius {
decrStep = true
}
if decrStep && steps > 1 {
steps--
hash, err = Encode(
WGS84_LONG_RANGE, WGS84_LAT_RANGE,
longitude, latitude,
steps)
if err != nil {
return nil, err
}
neighbors = GetNeighbors(hash)
area = decode(WGS84_LONG_RANGE, WGS84_LAT_RANGE, hash)
}
/* Exclude the search areas that are useless. */
if area.Latitude.Min < minLat {
(&neighbors.South).Clean()
(&neighbors.SouthWest).Clean()
(&neighbors.SouthEast).Clean()
}
if area.Latitude.Max > maxLat {
(&neighbors.North).Clean()
(&neighbors.NorthEast).Clean()
(&neighbors.NorthWest).Clean()
}
if area.Longitude.Min < minLon {
(&neighbors.West).Clean()
(&neighbors.SouthWest).Clean()
(&neighbors.NorthWest).Clean()
}
if area.Longitude.Max > maxLon {
(&neighbors.East).Clean()
(&neighbors.SouthEast).Clean()
(&neighbors.NorthEast).Clean()
}
return &Radius{
Area: *area,
Hash: hash,
Neighbors: neighbors,
}, nil
}
func boundingBox(longitude, latitude, radius float64) (
minLongitude float64,
minLatitude float64,
maxLongitude float64,
maxLatitude float64) {
lonR, latR := degRad(longitude), degRad(latitude)
if radius > EARTH_RADIUS_IN_METERS {
radius = EARTH_RADIUS_IN_METERS
}
distance := radius / EARTH_RADIUS_IN_METERS
minLatitude, maxLatitude = latR-distance, latR+distance
diffLongitude := math.Asin(math.Sin(distance) / math.Cos(latR))
minLongitude = lonR - diffLongitude
maxLongitude = lonR + diffLongitude
minLatitude = radDeg(minLatitude)
maxLatitude = radDeg(maxLatitude)
minLongitude = radDeg(minLongitude)
maxLongitude = radDeg(maxLongitude)
return
}
/* This function is used in order to estimate the step (bits precision)
* of the 9 search area boxes during radius queries. */
func estimateStepsByRadius(rangeMeters, latitude float64) uint8 {
if rangeMeters == 0 {
return WGS84_GEO_STEP
}
var step int8 = 1
for rangeMeters < MERCATOR_MAX {
rangeMeters *= 2
step++
}
// Make sure range is included in most of the base cases.
step -= 2
// Wider range torwards the poles... Note: it is possible to do better
// than this approximation by computing the distance between meridians
// at this latitude, but this does the trick for now.
if latitude > 66 || latitude < -66 {
step--
if latitude > 80 || latitude < -80 {
step--
}
}
/* Frame to valid range. */
if step < 1 {
step = 1
} else if step > 26 {
step = 26
}
return uint8(step)
}
func GetNeighbors(hash HashBits) *Neighbors {
neighbors := &Neighbors{
East: hash,
West: hash,
North: hash,
South: hash,
SouthEast: hash,
SouthWest: hash,
NorthEast: hash,
NorthWest: hash,
}
moveX(&(neighbors.East), 1)
moveY(&(neighbors.East), 0)
moveX(&(neighbors.West), -1)
moveY(&(neighbors.West), 0)
moveX(&(neighbors.South), 0)
moveY(&(neighbors.South), -1)
moveX(&(neighbors.North), 0)
moveY(&(neighbors.North), 1)
moveX(&(neighbors.NorthWest), -1)
moveY(&(neighbors.NorthWest), 1)
moveX(&(neighbors.NorthEast), 1)
moveY(&(neighbors.NorthEast), 1)
moveX(&(neighbors.SouthEast), 1)
moveY(&(neighbors.SouthEast), -1)
moveX(&(neighbors.SouthWest), -1)
moveY(&(neighbors.SouthWest), -1)
return neighbors
}
func moveX(hash *HashBits, d int8) *HashBits {
if d == 0 {
return hash
}
var xmask, ymask uint64 = 0xaaaaaaaaaaaaaaaa, 0x5555555555555555
var x uint64 = hash.Bits & xmask
var y uint64 = hash.Bits & ymask
var zz uint64 = ymask >> (64 - hash.Step*2)
if d > 0 {
x = x + (zz + 1)
} else {
x = x | zz
x = x - (zz + 1)
}
x &= (xmask >> (64 - hash.Step*2))
hash.Bits = (x | y)
return hash
}
func moveY(hash *HashBits, d int8) *HashBits {
if d == 0 {
return hash
}
var xmask, ymask uint64 = 0xaaaaaaaaaaaaaaaa, 0x5555555555555555
var x uint64 = hash.Bits & xmask
var y uint64 = hash.Bits & ymask
var zz uint64 = xmask >> (64 - hash.Step*2)
if d > 0 {
y = y + (zz + 1)
} else {
y = y | zz
y = y - (zz + 1)
}
y &= (ymask >> (64 - hash.Step*2))
hash.Bits = (x | y)
return hash
} | common/geohash/util.go | 0.766905 | 0.571886 | util.go | starcoder |
package main
import (
"github.com/gen2brain/raylib-go/raylib"
"math"
)
type Enemy struct {
lvl *Level
position rl.Vector2
texture rl.Texture2D
speed rl.Vector2
target rl.Vector2
defaultTarget rl.Vector2
savedState int
}
func (e *Enemy) SetTarget(target rl.Vector2) {
e.target = target
}
func (e *Enemy) SetDefaultTarget() {
e.target = e.defaultTarget
}
func computeDistance(u, v rl.Vector2) float64 {
return math.Sqrt(math.Pow(float64(u.X-v.X), 2) + math.Pow(float64(u.Y-v.Y), 2))
}
func (e *Enemy) Update() {
bestDistance := float64(2048)
change := rl.Vector2{}
if e.lvl.state[int(e.position.Y-1)][int(e.position.X)] > 0 && e.speed.Y != 1 {
if distance := computeDistance(rl.Vector2{X: e.position.X, Y: e.position.Y - 1}, e.target); distance < bestDistance {
bestDistance = distance
change.X = 0
change.Y = -1
}
}
if e.lvl.state[int(e.position.Y+1)][int(e.position.X)] > 0 && e.speed.Y != -1 {
if distance := computeDistance(rl.Vector2{X: e.position.X, Y: e.position.Y + 1}, e.target); distance < bestDistance {
bestDistance = distance
change.X = 0
change.Y = 1
}
}
if e.lvl.state[int(e.position.Y)][int(e.position.X-1)] > 0 && e.speed.X != 1 {
if distance := computeDistance(rl.Vector2{X: e.position.X - 1, Y: e.position.Y}, e.target); distance < bestDistance {
bestDistance = distance
change.X = -1
change.Y = 0
}
}
if e.lvl.state[int(e.position.Y)][int(e.position.X+1)] > 0 && e.speed.X != -1 {
if distance := computeDistance(rl.Vector2{X: e.position.X + 1, Y: e.position.Y}, e.target); distance < bestDistance {
bestDistance = distance
change.X = 1
change.Y = 0
}
}
if change.X+change.Y != 0 {
e.speed.X = change.X
e.speed.Y = change.Y
}
next := e.lvl.state[int(e.position.Y+e.speed.Y)][int(e.position.X+e.speed.X)]
if next > 0 {
e.lvl.state[int(e.position.Y)][int(e.position.X)] = e.savedState
e.position.X += e.speed.X
e.position.Y += e.speed.Y
e.savedState = e.lvl.state[int(e.position.Y)][int(e.position.X)]
e.lvl.state[int(e.position.Y)][int(e.position.X)] = ENEMY
} else {
e.speed.X = 0
e.speed.Y = 0
}
}
func (e *Enemy) ProcessInput() {
}
func (e *Enemy) Draw() {
rl.DrawTexture(e.texture, int32(e.position.X*spriteSize), int32(e.position.Y*spriteSize), rl.RayWhite)
}
func (e *Enemy) GetStat() string {
return ""
}
func newEnemy(position rl.Vector2, level *Level, texturePath string) (e *Enemy) {
e = &Enemy{}
e.texture = rl.LoadTexture(texturePath)
e.position = position
e.lvl = level
e.defaultTarget = position
e.lvl.state[int(e.position.Y)][int(e.position.X)] = ENEMY
e.savedState = EMPTY
return
} | enemy.go | 0.549399 | 0.577972 | enemy.go | starcoder |
package internal
import (
"fmt"
)
var deviceNames = map[uint16]string{
11: "DC Brick",
13: "Master Brick",
14: "Servo Brick",
15: "Stepper Brick",
16: "IMU Brick",
17: "RED Brick",
18: "IMU Brick 2.0",
19: "Silent Stepper Brick",
21: "Ambient Light Bricklet",
23: "Current12 Bricklet",
24: "Current25 Bricklet",
25: "Distance IR Bricklet",
26: "Dual Relay Bricklet",
27: "Humidity Bricklet",
28: "IO-16 Bricklet",
29: "IO-4 Bricklet",
111: "HAT Brick",
112: "HAT Zero Brick",
210: "Joystick Bricklet",
211: "LCD 16x2 Bricklet",
212: "LCD 20x4 Bricklet",
213: "Linear Poti Bricklet",
214: "Piezo Buzzer Bricklet",
215: "Rotary Poti Bricklet",
216: "Temperature Bricklet",
217: "Temperature IR Bricklet",
218: "Voltage Bricklet",
219: "Analog In Bricklet",
220: "Analog Out Bricklet",
221: "Barometer Bricklet",
222: "GPS Bricklet",
223: "Industrial Digital In 4 Bricklet",
224: "Industrial Digital Out 4 Bricklet",
225: "Industrial Quad Relay Bricklet",
226: "PTC Bricklet",
227: "Voltage/Current Bricklet",
228: "Industrial Dual 0-20mA Bricklet",
229: "Distance US Bricklet",
230: "Dual Button Bricklet",
231: "LED Strip Bricklet",
232: "Moisture Bricklet",
233: "Motion Detector Bricklet",
234: "Multi Touch Bricklet",
235: "Remote Switch Bricklet",
236: "Rotary Encoder Bricklet",
237: "Segment Display 4x7 Bricklet",
238: "Sound Intensity Bricklet",
239: "Tilt Bricklet",
240: "Hall Effect Bricklet",
241: "Line Bricklet",
242: "Piezo Speaker Bricklet",
243: "Color Bricklet",
244: "Solid State Relay Bricklet",
246: "NFC/RFID Bricklet",
249: "Industrial Dual Analog In Bricklet",
250: "Accelerometer Bricklet",
251: "Analog In Bricklet 2.0",
253: "Load Cell Bricklet",
254: "RS232 Bricklet",
255: "Laser Range Finder Bricklet",
256: "Analog Out Bricklet 2.0",
258: "Industrial Analog Out Bricklet",
259: "Ambient Light Bricklet 2.0",
260: "Dust Detector Bricklet",
262: "CO2 Bricklet",
263: "OLED 128x64 Bricklet",
264: "OLED 64x48 Bricklet",
265: "UV Light Bricklet",
266: "Thermocouple Bricklet",
267: "Motorized Linear Poti Bricklet",
268: "Real-Time Clock Bricklet",
270: "CAN Bricklet",
271: "RGB LED Bricklet",
272: "RGB LED Matrix Bricklet",
276: "GPS Bricklet 2.0",
277: "RS485 Bricklet",
278: "Thermal Imaging Bricklet",
279: "XMC1400 Breakout Bricklet",
282: "RGB LED Button Bricklet",
283: "Humidity Bricklet 2.0",
284: "Industrial Dual Relay Bricklet",
285: "DMX Bricklet",
286: "NFC Bricklet",
288: "Outdoor Weather Bricklet",
289: "Remote Switch Bricklet 2.0",
290: "Sound Pressure Level Bricklet",
291: "Temperature IR Bricklet 2.0",
292: "Motion Detector Bricklet 2.0",
293: "Industrial Counter Bricklet",
294: "Rotary Encoder Bricklet 2.0",
295: "Analog In Bricklet 3.0",
296: "Solid State Relay Bricklet 2.0",
297: "Air Quality Bricklet",
298: "LCD 128x64 Bricklet",
299: "Distance US Bricklet 2.0",
2100: "Industrial Digital In 4 Bricklet 2.0",
2101: "PTC Bricklet 2.0",
2102: "Industrial Quad Relay Bricklet 2.0",
2103: "LED Strip Bricklet 2.0",
2104: "Load Cell Bricklet 2.0",
2105: "Voltage/Current Bricklet 2.0",
2106: "Real-Time Clock Bricklet 2.0",
2107: "CAN Bricklet 2.0",
2108: "RS232 Bricklet 2.0",
2109: "Thermocouple Bricklet 2.0",
2110: "Particulate Matter Bricklet",
2111: "IO-4 Bricklet 2.0",
2112: "OLED 128x64 Bricklet 2.0",
2113: "Temperature Bricklet 2.0",
2114: "IO-16 Bricklet 2.0",
2115: "Analog Out Bricklet 3.0",
2116: "Industrial Analog Out Bricklet 2.0",
2117: "Barometer Bricklet 2.0",
2118: "UV Light Bricklet 2.0",
2119: "Dual Button Bricklet 2.0",
2120: "Industrial Dual 0-20mA Bricklet 2.0",
2121: "Industrial Dual Analog In Bricklet 2.0",
2122: "Isolator Bricklet",
2123: "One Wire Bricklet",
2124: "Industrial Digital Out 4 Bricklet 2.0",
2125: "Distance IR Bricklet 2.0",
2127: "RGB LED Bricklet 2.0",
2128: "Color Bricklet 2.0",
2129: "Multi Touch Bricklet 2.0",
2130: "Accelerometer Bricklet 2.0",
2131: "Ambient Light Bricklet 3.0",
2132: "Hall Effect Bricklet 2.0",
2137: "Segment Display 4x7 Bricklet 2.0",
2138: "Joystick Bricklet 2.0",
2139: "Linear Poti Bricklet 2.0",
2140: "Rotary Poti Bricklet 2.0",
2144: "Laser Range Finder Bricklet 2.0",
2145: "Piezo Speaker Bricklet 2.0",
2146: "E-Paper 296x128 Bricklet",
2147: "CO2 Bricklet 2.0",
2152: "Energy Monitor Bricklet",
2153: "Compass Bricklet",
2156: "Performance DC Bricklet",
2157: "Servo Bricklet 2.0",
2161: "IMU Bricklet 3.0",
2162: "Industrial Dual AC Relay Bricklet",
2164: "Industrial PTC Bricklet",
2165: "DC Bricklet 2.0",
2166: "Silent Stepper Bricklet 2.0",
}
func getDeviceDisplayName(deviceIdentifier uint16) string {
result := deviceNames[deviceIdentifier]
if result == "" {
return fmt.Sprintf("Unknown Device [%v]", deviceIdentifier)
}
return result
} | internal/device_display_names.go | 0.500244 | 0.879923 | device_display_names.go | starcoder |
package manual
func manifesto() string {
return `Scarlet was built on the following ideas and principles:
1. Soft-Magic Themed
Scarlet is a soft-magic themed tool. Programming is a kind of hard-magic
with an unrelenting vortex of soul sucking rationalism veiled beneath
its technoshiny exterior --probably why it's devoid of women--. While
it's all for the best, I found the initial enchantment wears off after
a few years in the slave pits. Odd thing really, because programming
is filled with things that don't make sense unless you were in the
right place, at the right time, and inside the right mind. Even then,
only if you're lucky.
I wanted to inject some true magic. Magic that is shaped by the minds
of practictioners without compromising derived solutions. Magic that
cannot be rationalised yet does not need to be. Magic that wears a
pointy hat, carry's a katana, and talks with a feminine irish accent.
2. No dependencies
Scrolls (Scarlet scripts) have no native way to import other scrolls.
Scarlet was intended as a secretary language to complete simple, yet
essential, quests such as building applications, controlling pipelines,
and a customisable replacement for bash scripts. Avoiding dependencies
has the nice effect of avoiding a great source of complexity and the
inappropriate use as a systems language.
3. Easy integration
Scarlet emphasises the creation of spells (inbuilt functions) for
generic functionality. Spells are written in the underlying systems
language, e.g. Go, so their external libraries may be used. Simply
register the spell and recompile Scarlet. I envisioned a user, team,
or commmunity will take a copy of the base tool then populate it with
domain or team specific spells using patterns befitting use cases or
the authors biases.
4. Context specific
Contrary to modern scripting tools, Scarlet scrolls are designed to be
platform and situation specific, that is, scripts are written for a
single purpose and usually for a single target platform. This may seem
rather restrictive but it's to encourage context driven solutions and
surpress the compelling urge to abstract everything. If you can't live
with that you can always create a spell to import functions or something.
"THAT'S MORTALS FOR YOU. THEY'VE ONLY GOT A FEW YEARS IN THIS WORLD AND
THEY SPEND THEM ALL IN MAKING THINGS COMPLICATED FOR THEMSELVES."
- 'Mort' by <NAME>
5. Minimalist
Scarlet favours spells over native syntax, vis only the bare necessities
form the base language, everything else is better off as a spell that
can be easily modified. This also makes the rare addition of new native
features a breeze.
"Take it from me, there's nothing more terrible than someone out to do
the world a favour."
- 'Sourcery' by <NAME>
6. Light and portable
The Scarlet executable will hopefully be light, portable, and require
no installation process; much like Lua. With time and hope a Tinygo
implementation will be built precisely for embedding in other programs
and repositories.
"'What's a philosopher?' said Brutha. 'Someone who's bright enough to
find a job with no heavy lifting,' said a voice in his head."
- 'Small Gods' by <NAME>chett
Use cases:
I intended for a very small binary so I can include it within code
repositories; Tinygo is a good choice for this but it's not quite
ready. It could then be used to build and run applications both within
pipelines and workstations without requiring additional tools be
installed into container images and the such like. I want to automate
without installation pains.
With this I can create language independent Web API testing scrolls
so I can more easily switch a web server's implementation language
and avoid self inflicted vendor lock in. Current tools were either
too heavy weight or painfully complex. Project building,
configuration, and deployment are another activity I want more
control over.
I also wanted to do general purpose scripting. There are plenty of
languages that can assist with this but I really craved specific tools
free of dependencies. I wanted to be able to change the langauge
upon noticing it was woefully incapable of satisfying me.
No, just no! I'm strongly for fitting the tool to the job and not the
other way around so here are a few use cases I do not recommend using
Scarlet for:
Backend web programming. That's best left to systems tools such as Go,
Java, C#, etc. However, I do intend to create spells for quickly serving
static content and file storage on a local network.
Anything that needs to scale or use concurrency. Again Go, Rust, and
many JVM languages are good choices.
Maths, science, or running numeric algorithms. That's best left to tools
like R or library rich glue languages like Python.
Avoid using it for critical systems! I wrote the code for me and don't
want innocent bystanders (if such people exist) getting hurt.
"A catastrophe curve, Mr. Bucket, is what Software runs along. Software
happens because a large number of things amazingly fail without quite
sinking projects, Mr. Bucket. It works because of hatred and love and
nerves. All the time. This isn't cheese. This is Software. If you
wanted a quiet retirement, Mr. Bucket, you shouldn't have bought the
Software House. You should have done something peaceful, like alligator
dentistry."
- (Original) 'Maskerade' by <NAME>
- Adapted to context by Paulio`
} | _manual/manifesto.go | 0.512449 | 0.589953 | manifesto.go | starcoder |
package sql
import (
"database/sql/driver"
"fmt"
"reflect"
"strconv"
"strings"
"time"
)
type Schema []*Column
func (s Schema) CheckRow(row Row) error {
expected := len(s)
got := len(row)
if expected != got {
return fmt.Errorf("expected %d values, got %d", expected, got)
}
for idx, f := range s {
v := row[idx]
if f.Check(v) {
continue
}
typ := reflect.TypeOf(v).String()
return fmt.Errorf("value at %d has unexpected type: %s",
idx, typ)
}
return nil
}
// Column is the definition of a table column.
// As SQL:2016 puts it:
// A column is a named component of a table. It has a data type, a default,
// and a nullability characteristic.
type Column struct {
// Name is the name of the column.
Name string
// Type is the data type of the column.
Type Type
// Default contains the default value of the column or nil if it is NULL.
Default interface{}
// Nullable is true if the column can contain NULL values, or false
// otherwise.
Nullable bool
}
func (c *Column) Check(v interface{}) bool {
if v == nil {
return c.Nullable
}
return c.Type.Check(v)
}
type Type interface {
Name() string
InternalType() reflect.Kind
Check(interface{}) bool
Convert(interface{}) (interface{}, error)
Compare(interface{}, interface{}) int
Native(interface{}) driver.Value
Default() interface{}
}
var Null = nullType{}
type nullType struct{}
func (t nullType) Name() string {
return "null"
}
func (t nullType) InternalType() reflect.Kind {
return reflect.Interface
}
func (t nullType) Check(v interface{}) bool {
return v == nil
}
func (t nullType) Convert(v interface{}) (interface{}, error) {
if v != nil {
return nil, fmt.Errorf("value not nil: %#v", v)
}
return nil, nil
}
func (t nullType) Compare(a interface{}, b interface{}) int {
//XXX: Note that while this returns 0 (equals) for ordering purposes, in
// SQL NULL != NULL.
return 0
}
func (t nullType) Native(v interface{}) driver.Value {
return driver.Value(nil)
}
func (t nullType) Default() interface{} {
return nil
}
var Integer = integerType{}
type integerType struct{}
func (t integerType) Name() string {
return "integer"
}
func (t integerType) InternalType() reflect.Kind {
return reflect.Int32
}
func (t integerType) Check(v interface{}) bool {
return checkInt32(v)
}
func (t integerType) Convert(v interface{}) (interface{}, error) {
return convertToInt32(v)
}
func (t integerType) Compare(a interface{}, b interface{}) int {
return compareInt32(a, b)
}
func (t integerType) Native(v interface{}) driver.Value {
if v == nil {
return driver.Value(nil)
}
return driver.Value(int64(v.(int32)))
}
func (t integerType) Default() interface{} {
return int32(0)
}
var BigInteger = bigIntegerType{}
type bigIntegerType struct{}
func (t bigIntegerType) Name() string {
return "biginteger"
}
func (t bigIntegerType) InternalType() reflect.Kind {
return reflect.Int64
}
func (t bigIntegerType) Check(v interface{}) bool {
return checkInt64(v)
}
func (t bigIntegerType) Convert(v interface{}) (interface{}, error) {
return convertToInt64(v)
}
func (t bigIntegerType) Compare(a interface{}, b interface{}) int {
return compareInt64(a, b)
}
func (t bigIntegerType) Native(v interface{}) driver.Value {
if v == nil {
return driver.Value(nil)
}
return driver.Value(v.(int64))
}
func (t bigIntegerType) Default() interface{} {
return int64(0)
}
// TimestampWithTimezone is a timestamp with timezone.
var TimestampWithTimezone = timestampWithTimeZoneType{}
type timestampWithTimeZoneType struct{}
func (t timestampWithTimeZoneType) Name() string {
return "timestamp with timezone"
}
func (t timestampWithTimeZoneType) InternalType() reflect.Kind {
return reflect.Struct
}
func (t timestampWithTimeZoneType) Check(v interface{}) bool {
return checkTimestamp(v)
}
func (t timestampWithTimeZoneType) Convert(v interface{}) (interface{}, error) {
return convertToTimestamp(v)
}
func (t timestampWithTimeZoneType) Compare(a interface{}, b interface{}) int {
return compareTimestamp(a, b)
}
func (t timestampWithTimeZoneType) Native(v interface{}) driver.Value {
if v == nil {
return driver.Value(nil)
}
return driver.Value(v.(time.Time))
}
func (t timestampWithTimeZoneType) Default() interface{} {
return time.Time{}
}
var String = stringType{}
type stringType struct{}
func (t stringType) Name() string {
return "string"
}
func (t stringType) InternalType() reflect.Kind {
return reflect.String
}
func (t stringType) Check(v interface{}) bool {
return checkString(v)
}
func (t stringType) Convert(v interface{}) (interface{}, error) {
return convertToString(v)
}
func (t stringType) Compare(a interface{}, b interface{}) int {
return compareString(a, b)
}
func (t stringType) Native(v interface{}) driver.Value {
if v == nil {
return driver.Value(nil)
}
return driver.Value(v.(string))
}
func (t stringType) Default() interface{} {
return ""
}
var Boolean Type = booleanType{}
type booleanType struct{}
func (t booleanType) Name() string {
return "boolean"
}
func (t booleanType) InternalType() reflect.Kind {
return reflect.Bool
}
func (t booleanType) Check(v interface{}) bool {
return checkBoolean(v)
}
func (t booleanType) Convert(v interface{}) (interface{}, error) {
return convertToBool(v)
}
func (t booleanType) Compare(a interface{}, b interface{}) int {
return compareBool(a, b)
}
func (t booleanType) Native(v interface{}) driver.Value {
if v == nil {
return driver.Value(nil)
}
return driver.Value(v.(bool))
}
func (t booleanType) Default() interface{} {
return false
}
var Float Type = floatType{}
type floatType struct{}
func (t floatType) Name() string {
return "float"
}
func (t floatType) InternalType() reflect.Kind {
return reflect.Float64
}
func (t floatType) Check(v interface{}) bool {
return checkFloat64(v)
}
func (t floatType) Convert(v interface{}) (interface{}, error) {
return convertToFloat64(v)
}
func (t floatType) Compare(a interface{}, b interface{}) int {
return compareFloat64(a, b)
}
func (t floatType) Native(v interface{}) driver.Value {
if v == nil {
return driver.Value(nil)
}
return driver.Value(v.(float64))
}
func (t floatType) Default() interface{} {
return float64(0)
}
func checkString(v interface{}) bool {
_, ok := v.(string)
return ok
}
func convertToString(v interface{}) (interface{}, error) {
switch v.(type) {
case string:
return v.(string), nil
case fmt.Stringer:
return v.(fmt.Stringer).String(), nil
default:
return nil, ErrInvalidType
}
}
func compareString(a interface{}, b interface{}) int {
av := a.(string)
bv := b.(string)
return strings.Compare(av, bv)
}
func checkInt32(v interface{}) bool {
_, ok := v.(int32)
return ok
}
func convertToInt32(v interface{}) (interface{}, error) {
switch v.(type) {
case int:
return int32(v.(int)), nil
case int8:
return int32(v.(int8)), nil
case int16:
return int32(v.(int16)), nil
case int32:
return v.(int32), nil
case int64:
i64 := v.(int64)
if i64 > (1<<31)-1 || i64 < -(1<<31) {
return nil, fmt.Errorf("value %d overflows int32", i64)
}
return int32(i64), nil
case uint8:
return int32(v.(uint8)), nil
case uint16:
return int32(v.(uint16)), nil
case uint:
u := v.(uint)
if u > (1<<31)-1 {
return nil, fmt.Errorf("value %d overflows int32", v)
}
return int32(u), nil
case uint32:
u := v.(uint32)
if u > (1<<31)-1 {
return nil, fmt.Errorf("value %d overflows int32", v)
}
return int32(u), nil
case uint64:
u := v.(uint64)
if u > (1<<31)-1 {
return nil, fmt.Errorf("value %d overflows int32", v)
}
return int32(u), nil
case string:
s := v.(string)
i, err := strconv.Atoi(s)
if err != nil {
return nil, fmt.Errorf("value %q can't be converted to int32", v)
}
return int32(i), nil
default:
return nil, ErrInvalidType
}
}
func compareInt32(a interface{}, b interface{}) int {
av := a.(int32)
bv := b.(int32)
if av < bv {
return -1
} else if av > bv {
return 1
}
return 0
}
func checkInt64(v interface{}) bool {
_, ok := v.(int64)
return ok
}
func convertToInt64(v interface{}) (interface{}, error) {
switch v.(type) {
case int:
return int64(v.(int)), nil
case int8:
return int64(v.(int8)), nil
case int16:
return int64(v.(int16)), nil
case int32:
return int64(v.(int32)), nil
case int64:
return v.(int64), nil
case uint:
return int64(v.(uint)), nil
case uint8:
return int64(v.(uint8)), nil
case uint16:
return int64(v.(uint16)), nil
case uint32:
return int64(v.(uint32)), nil
case uint64:
u := v.(uint64)
if u >= 1<<63 {
return nil, fmt.Errorf("value %d overflows int64", v)
}
return int64(u), nil
case string:
s := v.(string)
i, err := strconv.Atoi(s)
if err != nil {
return nil, fmt.Errorf("value %q can't be converted to int64", v)
}
return int64(i), nil
default:
return nil, ErrInvalidType
}
}
func compareInt64(a interface{}, b interface{}) int {
av := a.(int64)
bv := b.(int64)
if av < bv {
return -1
} else if av > bv {
return 1
}
return 0
}
func checkBoolean(v interface{}) bool {
_, ok := v.(bool)
return ok
}
func convertToBool(v interface{}) (interface{}, error) {
switch v.(type) {
case bool:
return v.(bool), nil
default:
return nil, ErrInvalidType
}
}
func compareBool(a interface{}, b interface{}) int {
av := a.(bool)
bv := b.(bool)
if av == bv {
return 0
} else if av == false {
return -1
} else {
return 1
}
}
func checkFloat64(v interface{}) bool {
_, ok := v.(float32)
return ok
}
func convertToFloat64(v interface{}) (interface{}, error) {
switch v.(type) {
case float32:
return v.(float32), nil
default:
return nil, ErrInvalidType
}
}
func compareFloat64(a interface{}, b interface{}) int {
av := a.(float32)
bv := b.(float32)
if av < bv {
return -1
} else if av > bv {
return 1
}
return 0
}
func checkTimestamp(v interface{}) bool {
_, ok := v.(time.Time)
return ok
}
const timestampLayout = "2006-01-02 15:04:05.000000"
func convertToTimestamp(v interface{}) (interface{}, error) {
switch v.(type) {
case string:
t, err := time.Parse(timestampLayout, v.(string))
if err != nil {
return nil, fmt.Errorf("value %q can't be converted to int64", v)
}
return t, nil
default:
if !BigInteger.Check(v) {
return nil, ErrInvalidType
}
bi, err := BigInteger.Convert(v)
if err != nil {
return nil, ErrInvalidType
}
return time.Unix(bi.(int64), 0), nil
}
}
func compareTimestamp(a interface{}, b interface{}) int {
av := a.(time.Time)
bv := b.(time.Time)
if av.Before(bv) {
return -1
} else if av.After(bv) {
return 1
}
return 0
} | sql/type.go | 0.745398 | 0.416085 | type.go | starcoder |
// TeamShooterScenario is a scenario which is designed to emulate the
// approximate behavior to open match that a skill based team game would have.
// It doesn't try to provide good matchmaking for real players. There are three
// arguments used:
// mode: The game mode the players wants to play in. mode is a hard partition.
// regions: Players may have good latency to one or more regions. A player will
// search for matches in all eligible regions.
// skill: Players have a random skill based on a normal distribution. Players
// will only be matched with other players who have a close skill value. The
// match functions have overlapping partitions of the skill brackets.
package teamshooter
import (
"fmt"
"io"
"math"
"math/rand"
"sort"
"time"
"github.com/golang/protobuf/ptypes"
"github.com/golang/protobuf/ptypes/any"
"github.com/golang/protobuf/ptypes/wrappers"
"open-match.dev/open-match/pkg/pb"
)
const (
poolName = "all"
skillArg = "skill"
modeArg = "mode"
)
// TeamShooterScenario provides the required methods for running a scenario.
type TeamShooterScenario struct {
// Names of available region tags.
regions []string
// Maximum regions a player can search in.
maxRegions int
// Number of tickets which form a match.
playersPerGame int
// For each pair of consequitive values, the value to split profiles on by
// skill.
skillBoundaries []float64
// Maximum difference between two tickets to consider a match valid.
maxSkillDifference float64
// List of mode names.
modes []string
// Returns a random mode, with some weight.
randomMode func() string
}
// Scenario creates a new TeamShooterScenario.
func Scenario() *TeamShooterScenario {
modes, randomMode := weightedChoice(map[string]int{
"pl": 100, // Payload, very popular.
"cp": 25, // Capture point, 1/4 as popular.
})
regions := []string{}
for i := 0; i < 2; i++ {
regions = append(regions, fmt.Sprintf("region_%d", i))
}
return &TeamShooterScenario{
regions: regions,
maxRegions: 1,
playersPerGame: 12,
skillBoundaries: []float64{math.Inf(-1), 0, math.Inf(1)},
maxSkillDifference: 0.01,
modes: modes,
randomMode: randomMode,
}
}
// Profiles shards the player base on mode, region, and skill.
func (t *TeamShooterScenario) Profiles() []*pb.MatchProfile {
p := []*pb.MatchProfile{}
for _, region := range t.regions {
for _, mode := range t.modes {
for i := 0; i+1 < len(t.skillBoundaries); i++ {
skillMin := t.skillBoundaries[i] - t.maxSkillDifference/2
skillMax := t.skillBoundaries[i+1] + t.maxSkillDifference/2
p = append(p, &pb.MatchProfile{
Name: fmt.Sprintf("%s_%s_%v-%v", region, mode, skillMin, skillMax),
Pools: []*pb.Pool{
{
Name: poolName,
DoubleRangeFilters: []*pb.DoubleRangeFilter{
{
DoubleArg: skillArg,
Min: skillMin,
Max: skillMax,
},
},
TagPresentFilters: []*pb.TagPresentFilter{
{
Tag: region,
},
},
StringEqualsFilters: []*pb.StringEqualsFilter{
{
StringArg: modeArg,
Value: mode,
},
},
},
},
})
}
}
}
return p
}
// Ticket creates a randomized player.
func (t *TeamShooterScenario) Ticket() *pb.Ticket {
region := rand.Intn(len(t.regions))
numRegions := rand.Intn(t.maxRegions) + 1
tags := []string{}
for i := 0; i < numRegions; i++ {
tags = append(tags, t.regions[region])
// The Earth is actually a circle.
region = (region + 1) % len(t.regions)
}
return &pb.Ticket{
SearchFields: &pb.SearchFields{
DoubleArgs: map[string]float64{
skillArg: clamp(rand.NormFloat64(), -3, 3),
},
StringArgs: map[string]string{
modeArg: t.randomMode(),
},
Tags: tags,
},
}
}
func (t *TeamShooterScenario) Backfill() *pb.Backfill {
return nil
}
// MatchFunction puts tickets into matches based on their skill, finding the
// required number of tickets for a game within the maximum skill difference.
func (t *TeamShooterScenario) MatchFunction(p *pb.MatchProfile, poolBackfills map[string][]*pb.Backfill, poolTickets map[string][]*pb.Ticket) ([]*pb.Match, error) {
skill := func(t *pb.Ticket) float64 {
return t.SearchFields.DoubleArgs[skillArg]
}
tickets := poolTickets[poolName]
var matches []*pb.Match
sort.Slice(tickets, func(i, j int) bool {
return skill(tickets[i]) < skill(tickets[j])
})
for i := 0; i+t.playersPerGame <= len(tickets); i++ {
mt := tickets[i : i+t.playersPerGame]
if skill(mt[len(mt)-1])-skill(mt[0]) < t.maxSkillDifference {
avg := float64(0)
for _, t := range mt {
avg += skill(t)
}
avg /= float64(len(mt))
q := float64(0)
for _, t := range mt {
diff := skill(t) - avg
q -= diff * diff
}
m, err := (&matchExt{
id: fmt.Sprintf("profile-%v-time-%v-%v", p.GetName(), time.Now().Format("2006-01-02T15:04:05.00"), len(matches)),
matchProfile: p.GetName(),
matchFunction: "skillmatcher",
tickets: mt,
quality: q,
}).pack()
if err != nil {
return nil, err
}
matches = append(matches, m)
}
}
return matches, nil
}
// Evaluate returns matches in order of highest quality, skipping any matches
// which contain tickets that are already used.
func (t *TeamShooterScenario) Evaluate(stream pb.Evaluator_EvaluateServer) error {
// Unpacked proposal matches.
proposals := []*matchExt{}
// Ticket ids which are used in a match.
used := map[string]struct{}{}
for {
req, err := stream.Recv()
if err == io.EOF {
break
}
if err != nil {
return fmt.Errorf("Error reading evaluator input stream: %w", err)
}
p, err := unpackMatch(req.GetMatch())
if err != nil {
return err
}
proposals = append(proposals, p)
}
// Higher quality is better.
sort.Slice(proposals, func(i, j int) bool {
return proposals[i].quality > proposals[j].quality
})
outer:
for _, p := range proposals {
for _, t := range p.tickets {
if _, ok := used[t.Id]; ok {
continue outer
}
}
for _, t := range p.tickets {
used[t.Id] = struct{}{}
}
err := stream.Send(&pb.EvaluateResponse{MatchId: p.id})
if err != nil {
return fmt.Errorf("Error sending evaluator output stream: %w", err)
}
}
return nil
}
// matchExt presents the match and extension data in a native form, and allows
// easy conversion to and from proto format.
type matchExt struct {
id string
tickets []*pb.Ticket
quality float64
matchProfile string
matchFunction string
}
func unpackMatch(m *pb.Match) (*matchExt, error) {
v := &wrappers.DoubleValue{}
err := ptypes.UnmarshalAny(m.Extensions["quality"], v)
if err != nil {
return nil, fmt.Errorf("Error unpacking match quality: %w", err)
}
return &matchExt{
id: m.MatchId,
tickets: m.Tickets,
quality: v.Value,
matchProfile: m.MatchProfile,
matchFunction: m.MatchFunction,
}, nil
}
func (m *matchExt) pack() (*pb.Match, error) {
v := &wrappers.DoubleValue{Value: m.quality}
a, err := ptypes.MarshalAny(v)
if err != nil {
return nil, fmt.Errorf("Error packing match quality: %w", err)
}
return &pb.Match{
MatchId: m.id,
Tickets: m.tickets,
MatchProfile: m.matchProfile,
MatchFunction: m.matchFunction,
Extensions: map[string]*any.Any{
"quality": a,
},
}, nil
}
func clamp(v float64, min float64, max float64) float64 {
if v < min {
return min
}
if v > max {
return max
}
return v
}
// weightedChoice takes a map of values, and their relative probability. It
// returns a list of the values, along with a function which will return random
// choices from the values with the weighted probability.
func weightedChoice(m map[string]int) ([]string, func() string) {
s := make([]string, 0, len(m))
total := 0
for k, v := range m {
s = append(s, k)
total += v
}
return s, func() string {
remainder := rand.Intn(total)
for k, v := range m {
remainder -= v
if remainder < 0 {
return k
}
}
panic("weightedChoice is broken.")
}
} | examples/scale/scenarios/teamshooter/teamshooter.go | 0.635109 | 0.526951 | teamshooter.go | starcoder |
package models
import (
i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e "time"
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
)
// Simulation
type Simulation struct {
Entity
// The social engineering technique used in the attack simulation and training campaign. Supports $filter and $orderby. Possible values are: unknown, credentialHarvesting, attachmentMalware, driveByUrl, linkInAttachment, linkToMalwareFile, unknownFutureValue. For more information on the types of social engineering attack techniques, see simulations.
attackTechnique *SimulationAttackTechnique
// Attack type of the attack simulation and training campaign. Supports $filter and $orderby. Possible values are: unknown, social, cloud, endpoint, unknownFutureValue.
attackType *SimulationAttackType
// Unique identifier for the attack simulation automation.
automationId *string
// Date and time of completion of the attack simulation and training campaign. Supports $filter and $orderby.
completionDateTime *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time
// Identity of the user who created the attack simulation and training campaign.
createdBy EmailIdentityable
// Date and time of creation of the attack simulation and training campaign.
createdDateTime *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time
// Description of the attack simulation and training campaign.
description *string
// Display name of the attack simulation and training campaign. Supports $filter and $orderby.
displayName *string
// Flag representing if the attack simulation and training campaign was created from a simulation automation flow. Supports $filter and $orderby.
isAutomated *bool
// Identity of the user who most recently modified the attack simulation and training campaign.
lastModifiedBy EmailIdentityable
// Date and time of the most recent modification of the attack simulation and training campaign.
lastModifiedDateTime *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time
// Date and time of the launch/start of the attack simulation and training campaign. Supports $filter and $orderby.
launchDateTime *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time
// Method of delivery of the phishing payload used in the attack simulation and training campaign. Possible values are: unknown, sms, email, teams, unknownFutureValue.
payloadDeliveryPlatform *PayloadDeliveryPlatform
// Report of the attack simulation and training campaign.
report SimulationReportable
// Status of the attack simulation and training campaign. Supports $filter and $orderby. Possible values are: unknown, draft, running, scheduled, succeeded, failed, cancelled, excluded, unknownFutureValue.
status *SimulationStatus
}
// NewSimulation instantiates a new simulation and sets the default values.
func NewSimulation()(*Simulation) {
m := &Simulation{
Entity: *NewEntity(),
}
return m
}
// CreateSimulationFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateSimulationFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewSimulation(), nil
}
// GetAttackTechnique gets the attackTechnique property value. The social engineering technique used in the attack simulation and training campaign. Supports $filter and $orderby. Possible values are: unknown, credentialHarvesting, attachmentMalware, driveByUrl, linkInAttachment, linkToMalwareFile, unknownFutureValue. For more information on the types of social engineering attack techniques, see simulations.
func (m *Simulation) GetAttackTechnique()(*SimulationAttackTechnique) {
if m == nil {
return nil
} else {
return m.attackTechnique
}
}
// GetAttackType gets the attackType property value. Attack type of the attack simulation and training campaign. Supports $filter and $orderby. Possible values are: unknown, social, cloud, endpoint, unknownFutureValue.
func (m *Simulation) GetAttackType()(*SimulationAttackType) {
if m == nil {
return nil
} else {
return m.attackType
}
}
// GetAutomationId gets the automationId property value. Unique identifier for the attack simulation automation.
func (m *Simulation) GetAutomationId()(*string) {
if m == nil {
return nil
} else {
return m.automationId
}
}
// GetCompletionDateTime gets the completionDateTime property value. Date and time of completion of the attack simulation and training campaign. Supports $filter and $orderby.
func (m *Simulation) GetCompletionDateTime()(*i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time) {
if m == nil {
return nil
} else {
return m.completionDateTime
}
}
// GetCreatedBy gets the createdBy property value. Identity of the user who created the attack simulation and training campaign.
func (m *Simulation) GetCreatedBy()(EmailIdentityable) {
if m == nil {
return nil
} else {
return m.createdBy
}
}
// GetCreatedDateTime gets the createdDateTime property value. Date and time of creation of the attack simulation and training campaign.
func (m *Simulation) GetCreatedDateTime()(*i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time) {
if m == nil {
return nil
} else {
return m.createdDateTime
}
}
// GetDescription gets the description property value. Description of the attack simulation and training campaign.
func (m *Simulation) GetDescription()(*string) {
if m == nil {
return nil
} else {
return m.description
}
}
// GetDisplayName gets the displayName property value. Display name of the attack simulation and training campaign. Supports $filter and $orderby.
func (m *Simulation) GetDisplayName()(*string) {
if m == nil {
return nil
} else {
return m.displayName
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *Simulation) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) {
res := m.Entity.GetFieldDeserializers()
res["attackTechnique"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetEnumValue(ParseSimulationAttackTechnique)
if err != nil {
return err
}
if val != nil {
m.SetAttackTechnique(val.(*SimulationAttackTechnique))
}
return nil
}
res["attackType"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetEnumValue(ParseSimulationAttackType)
if err != nil {
return err
}
if val != nil {
m.SetAttackType(val.(*SimulationAttackType))
}
return nil
}
res["automationId"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetAutomationId(val)
}
return nil
}
res["completionDateTime"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetTimeValue()
if err != nil {
return err
}
if val != nil {
m.SetCompletionDateTime(val)
}
return nil
}
res["createdBy"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateEmailIdentityFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetCreatedBy(val.(EmailIdentityable))
}
return nil
}
res["createdDateTime"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetTimeValue()
if err != nil {
return err
}
if val != nil {
m.SetCreatedDateTime(val)
}
return nil
}
res["description"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetDescription(val)
}
return nil
}
res["displayName"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetDisplayName(val)
}
return nil
}
res["isAutomated"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetBoolValue()
if err != nil {
return err
}
if val != nil {
m.SetIsAutomated(val)
}
return nil
}
res["lastModifiedBy"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateEmailIdentityFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetLastModifiedBy(val.(EmailIdentityable))
}
return nil
}
res["lastModifiedDateTime"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetTimeValue()
if err != nil {
return err
}
if val != nil {
m.SetLastModifiedDateTime(val)
}
return nil
}
res["launchDateTime"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetTimeValue()
if err != nil {
return err
}
if val != nil {
m.SetLaunchDateTime(val)
}
return nil
}
res["payloadDeliveryPlatform"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetEnumValue(ParsePayloadDeliveryPlatform)
if err != nil {
return err
}
if val != nil {
m.SetPayloadDeliveryPlatform(val.(*PayloadDeliveryPlatform))
}
return nil
}
res["report"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateSimulationReportFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetReport(val.(SimulationReportable))
}
return nil
}
res["status"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetEnumValue(ParseSimulationStatus)
if err != nil {
return err
}
if val != nil {
m.SetStatus(val.(*SimulationStatus))
}
return nil
}
return res
}
// GetIsAutomated gets the isAutomated property value. Flag representing if the attack simulation and training campaign was created from a simulation automation flow. Supports $filter and $orderby.
func (m *Simulation) GetIsAutomated()(*bool) {
if m == nil {
return nil
} else {
return m.isAutomated
}
}
// GetLastModifiedBy gets the lastModifiedBy property value. Identity of the user who most recently modified the attack simulation and training campaign.
func (m *Simulation) GetLastModifiedBy()(EmailIdentityable) {
if m == nil {
return nil
} else {
return m.lastModifiedBy
}
}
// GetLastModifiedDateTime gets the lastModifiedDateTime property value. Date and time of the most recent modification of the attack simulation and training campaign.
func (m *Simulation) GetLastModifiedDateTime()(*i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time) {
if m == nil {
return nil
} else {
return m.lastModifiedDateTime
}
}
// GetLaunchDateTime gets the launchDateTime property value. Date and time of the launch/start of the attack simulation and training campaign. Supports $filter and $orderby.
func (m *Simulation) GetLaunchDateTime()(*i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time) {
if m == nil {
return nil
} else {
return m.launchDateTime
}
}
// GetPayloadDeliveryPlatform gets the payloadDeliveryPlatform property value. Method of delivery of the phishing payload used in the attack simulation and training campaign. Possible values are: unknown, sms, email, teams, unknownFutureValue.
func (m *Simulation) GetPayloadDeliveryPlatform()(*PayloadDeliveryPlatform) {
if m == nil {
return nil
} else {
return m.payloadDeliveryPlatform
}
}
// GetReport gets the report property value. Report of the attack simulation and training campaign.
func (m *Simulation) GetReport()(SimulationReportable) {
if m == nil {
return nil
} else {
return m.report
}
}
// GetStatus gets the status property value. Status of the attack simulation and training campaign. Supports $filter and $orderby. Possible values are: unknown, draft, running, scheduled, succeeded, failed, cancelled, excluded, unknownFutureValue.
func (m *Simulation) GetStatus()(*SimulationStatus) {
if m == nil {
return nil
} else {
return m.status
}
}
// Serialize serializes information the current object
func (m *Simulation) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {
err := m.Entity.Serialize(writer)
if err != nil {
return err
}
if m.GetAttackTechnique() != nil {
cast := (*m.GetAttackTechnique()).String()
err = writer.WriteStringValue("attackTechnique", &cast)
if err != nil {
return err
}
}
if m.GetAttackType() != nil {
cast := (*m.GetAttackType()).String()
err = writer.WriteStringValue("attackType", &cast)
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("automationId", m.GetAutomationId())
if err != nil {
return err
}
}
{
err = writer.WriteTimeValue("completionDateTime", m.GetCompletionDateTime())
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("createdBy", m.GetCreatedBy())
if err != nil {
return err
}
}
{
err = writer.WriteTimeValue("createdDateTime", m.GetCreatedDateTime())
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("description", m.GetDescription())
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("displayName", m.GetDisplayName())
if err != nil {
return err
}
}
{
err = writer.WriteBoolValue("isAutomated", m.GetIsAutomated())
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("lastModifiedBy", m.GetLastModifiedBy())
if err != nil {
return err
}
}
{
err = writer.WriteTimeValue("lastModifiedDateTime", m.GetLastModifiedDateTime())
if err != nil {
return err
}
}
{
err = writer.WriteTimeValue("launchDateTime", m.GetLaunchDateTime())
if err != nil {
return err
}
}
if m.GetPayloadDeliveryPlatform() != nil {
cast := (*m.GetPayloadDeliveryPlatform()).String()
err = writer.WriteStringValue("payloadDeliveryPlatform", &cast)
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("report", m.GetReport())
if err != nil {
return err
}
}
if m.GetStatus() != nil {
cast := (*m.GetStatus()).String()
err = writer.WriteStringValue("status", &cast)
if err != nil {
return err
}
}
return nil
}
// SetAttackTechnique sets the attackTechnique property value. The social engineering technique used in the attack simulation and training campaign. Supports $filter and $orderby. Possible values are: unknown, credentialHarvesting, attachmentMalware, driveByUrl, linkInAttachment, linkToMalwareFile, unknownFutureValue. For more information on the types of social engineering attack techniques, see simulations.
func (m *Simulation) SetAttackTechnique(value *SimulationAttackTechnique)() {
if m != nil {
m.attackTechnique = value
}
}
// SetAttackType sets the attackType property value. Attack type of the attack simulation and training campaign. Supports $filter and $orderby. Possible values are: unknown, social, cloud, endpoint, unknownFutureValue.
func (m *Simulation) SetAttackType(value *SimulationAttackType)() {
if m != nil {
m.attackType = value
}
}
// SetAutomationId sets the automationId property value. Unique identifier for the attack simulation automation.
func (m *Simulation) SetAutomationId(value *string)() {
if m != nil {
m.automationId = value
}
}
// SetCompletionDateTime sets the completionDateTime property value. Date and time of completion of the attack simulation and training campaign. Supports $filter and $orderby.
func (m *Simulation) SetCompletionDateTime(value *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time)() {
if m != nil {
m.completionDateTime = value
}
}
// SetCreatedBy sets the createdBy property value. Identity of the user who created the attack simulation and training campaign.
func (m *Simulation) SetCreatedBy(value EmailIdentityable)() {
if m != nil {
m.createdBy = value
}
}
// SetCreatedDateTime sets the createdDateTime property value. Date and time of creation of the attack simulation and training campaign.
func (m *Simulation) SetCreatedDateTime(value *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time)() {
if m != nil {
m.createdDateTime = value
}
}
// SetDescription sets the description property value. Description of the attack simulation and training campaign.
func (m *Simulation) SetDescription(value *string)() {
if m != nil {
m.description = value
}
}
// SetDisplayName sets the displayName property value. Display name of the attack simulation and training campaign. Supports $filter and $orderby.
func (m *Simulation) SetDisplayName(value *string)() {
if m != nil {
m.displayName = value
}
}
// SetIsAutomated sets the isAutomated property value. Flag representing if the attack simulation and training campaign was created from a simulation automation flow. Supports $filter and $orderby.
func (m *Simulation) SetIsAutomated(value *bool)() {
if m != nil {
m.isAutomated = value
}
}
// SetLastModifiedBy sets the lastModifiedBy property value. Identity of the user who most recently modified the attack simulation and training campaign.
func (m *Simulation) SetLastModifiedBy(value EmailIdentityable)() {
if m != nil {
m.lastModifiedBy = value
}
}
// SetLastModifiedDateTime sets the lastModifiedDateTime property value. Date and time of the most recent modification of the attack simulation and training campaign.
func (m *Simulation) SetLastModifiedDateTime(value *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time)() {
if m != nil {
m.lastModifiedDateTime = value
}
}
// SetLaunchDateTime sets the launchDateTime property value. Date and time of the launch/start of the attack simulation and training campaign. Supports $filter and $orderby.
func (m *Simulation) SetLaunchDateTime(value *i336074805fc853987abe6f7fe3ad97a6a6f3077a16391fec744f671a015fbd7e.Time)() {
if m != nil {
m.launchDateTime = value
}
}
// SetPayloadDeliveryPlatform sets the payloadDeliveryPlatform property value. Method of delivery of the phishing payload used in the attack simulation and training campaign. Possible values are: unknown, sms, email, teams, unknownFutureValue.
func (m *Simulation) SetPayloadDeliveryPlatform(value *PayloadDeliveryPlatform)() {
if m != nil {
m.payloadDeliveryPlatform = value
}
}
// SetReport sets the report property value. Report of the attack simulation and training campaign.
func (m *Simulation) SetReport(value SimulationReportable)() {
if m != nil {
m.report = value
}
}
// SetStatus sets the status property value. Status of the attack simulation and training campaign. Supports $filter and $orderby. Possible values are: unknown, draft, running, scheduled, succeeded, failed, cancelled, excluded, unknownFutureValue.
func (m *Simulation) SetStatus(value *SimulationStatus)() {
if m != nil {
m.status = value
}
} | models/simulation.go | 0.560132 | 0.629618 | simulation.go | starcoder |
package macros
import (
"errors"
"fmt"
"math"
"strconv"
"time"
)
// Value is a value replacement for a macro
type Value struct {
macro Token
str string
num uint64
typ valueType
any interface{}
}
type valueType uint
const (
typeNone valueType = iota
typeString
typeFloat
typeInt
typeUint
typeAny
typeTime
typeConcat
)
// String creates a new value replacing `macro` with a string
func String(macro Token, s string) Value {
return Value{macro, s, 0, typeString, nil}
}
// Concat creates a new value replacing `macro` with a string
func Concat(macro Token, sep string, values []string) Value {
return Value{macro, sep, 0, typeConcat, values}
}
// Bool creates a new value replacing `macro` with "true" or "false"
func Bool(macro Token, v bool) Value {
if v {
return Value{macro, "true", 0, typeString, nil}
}
return Value{macro, "false", 0, typeString, nil}
}
// Float64 creates a new value replacing `macro` with a float64
func Float64(macro Token, f float64) Value {
return Value{macro, "", math.Float64bits(f), typeFloat, nil}
}
// Float32 creates a new value replacing `macro` with a float32 value
func Float32(macro Token, f float32) Value {
return Value{macro, "", math.Float64bits(float64(f)), typeFloat, nil}
}
// Uint creates a new value that replaces `macro` with a `uint`
func Uint(macro Token, u uint) Value {
return Value{macro, "", uint64(u), typeUint, nil}
}
// Uint64 creates a new value that replaces `macro` with a `uint64`
func Uint64(macro Token, u uint64) Value {
return Value{macro, "", u, typeUint, nil}
}
// Uint32 creates a new value that replaces `macro` with a `uint32`
func Uint32(macro Token, u uint32) Value {
return Value{macro, "", uint64(u), typeUint, nil}
}
// Uint16 creates a new value that replaces `macro` with a `uint16`
func Uint16(macro Token, u uint16) Value {
return Value{macro, "", uint64(u), typeUint, nil}
}
// Uint8 creates a new value that replaces `macro` with a `uint8`
func Uint8(macro Token, u uint8) Value {
return Value{macro, "", uint64(u), typeUint, nil}
}
// Int creates a new value that replaces `macro` with an `int`
func Int(macro Token, i int) Value {
return Value{macro, "", uint64(int64(i)), typeInt, nil}
}
// Int64 creates a new value that replaces `macro` with an `int64`
func Int64(macro Token, i int64) Value {
return Value{macro, "", uint64(i), typeInt, nil}
}
// Int32 creates a new value that replaces `macro` with an `int32`
func Int32(macro Token, i int32) Value {
return Value{macro, "", uint64(int64(i)), typeInt, nil}
}
// Int16 creates a new value that replaces `macro` with an `int16`
func Int16(macro Token, i int16) Value {
return Value{macro, "", uint64(int64(i)), typeInt, nil}
}
// Int8 creates a new value that replaces `macro` with an `int8`
func Int8(macro Token, i int8) Value {
return Value{macro, "", uint64(int64(i)), typeInt, nil}
}
// Unix crates a new value that replaces `macro` with the unix timestamp of `tm`
func Unix(macro Token, tm time.Time) Value {
return Value{macro, "", uint64(tm.Unix()), typeInt, nil}
}
// Time creates a new value that replaces `macro` with `tm` formatted according to `layout`
func Time(macro Token, tm time.Time, layout string) Value {
return Value{macro, layout, 0, typeTime, tm}
}
// Any creates a new value that replaces `macro` with any value
func Any(macro Token, x interface{}) Value {
if any, ok := x.(ValueAppender); ok {
return Value{macro, "", 0, typeAny, any}
}
return Value{macro, "", 0, typeAny, any{x}}
}
// Bind creates a new value that replaces `macro` with any value
func Bind(macro Token, v ValueAppender) Value {
return Value{macro, "", 0, typeAny, v}
}
// AppendValue implements `ValueAppender` interface
func (v *Value) AppendValue(buf []byte) ([]byte, error) {
switch v.typ {
case typeString:
return append(buf, v.str...), nil
case typeFloat:
f := math.Float64frombits(v.num)
return strconv.AppendFloat(buf, f, 'f', -1, 64), nil
case typeUint:
return strconv.AppendUint(buf, v.num, 10), nil
case typeInt:
return strconv.AppendInt(buf, int64(v.num), 10), nil
case typeTime:
return v.any.(time.Time).AppendFormat(buf, v.str), nil
case typeConcat:
values := v.any.([]string)
sep := v.str
for i, v := range values {
if i > 0 {
buf = append(buf, sep...)
}
buf = append(buf, v...)
}
return buf, nil
case typeAny:
if v, ok := v.any.(ValueAppender); ok {
return v.AppendValue(buf)
}
return any{v.any}.AppendValue(buf)
case typeNone:
return buf, ErrMacroNotFound
default:
return nil, errors.New("Invalid value type")
}
}
// ValueAppender appends a value to a buffer
type ValueAppender interface {
AppendValue([]byte) ([]byte, error)
}
type any struct {
value interface{}
}
func (v any) AppendValue(buf []byte) ([]byte, error) {
switch v := v.value.(type) {
case string:
return append(buf, v...), nil
case []byte:
return append(buf, v...), nil
case int:
return strconv.AppendInt(buf, int64(v), 10), nil
case int64:
return strconv.AppendInt(buf, int64(v), 10), nil
case int32:
return strconv.AppendInt(buf, int64(v), 10), nil
case int16:
return strconv.AppendInt(buf, int64(v), 10), nil
case int8:
return strconv.AppendInt(buf, int64(v), 10), nil
case uint:
return strconv.AppendUint(buf, uint64(v), 10), nil
case uint64:
return strconv.AppendUint(buf, uint64(v), 10), nil
case uint32:
return strconv.AppendUint(buf, uint64(v), 10), nil
case uint16:
return strconv.AppendUint(buf, uint64(v), 10), nil
case uint8:
return strconv.AppendUint(buf, uint64(v), 10), nil
case float64:
return strconv.AppendFloat(buf, float64(v), 'f', -1, 64), nil
case float32:
return strconv.AppendFloat(buf, float64(v), 'f', -1, 32), nil
case bool:
if v {
return append(buf, "true"...), nil
}
return append(buf, "false"...), nil
case fmt.Stringer:
return append(buf, v.String()...), nil
default:
s := fmt.Sprintf("%s", v)
return append(buf, s...), nil
}
} | values.go | 0.718792 | 0.486149 | values.go | starcoder |
package trie
// most code from https://github.com/dghubble/trie
type Trie struct {
value interface{}
children map[rune]*Trie
}
func NewTrie() *Trie {
return new(Trie)
}
func (trie *Trie) Get(key string) interface{} {
node := trie
for _, r := range key {
node = node.children[r]
if node == nil {
return nil
}
}
return node.value
}
func (trie *Trie) Put(key string, value interface{}) bool {
node := trie
for _, r := range key {
child, _ := node.children[r]
if child == nil {
if node.children == nil {
node.children = map[rune]*Trie{}
}
child = new(Trie)
node.children[r] = child
}
node = child
}
isNewVal := node.value == nil
node.value = value
return isNewVal
}
func (trie *Trie) Delete(key string) bool {
path := make([]nodeRune, len(key))
node := trie
for i, r := range key {
path[i] = nodeRune{r: r, node: node}
node = node.children[r]
if node == nil {
return false
}
}
node.value = nil
if node.isLeaf() {
for i := len(key) - 1; i >= 0; i-- {
parent := path[i].node
r := path[i].r
delete(parent.children, r)
if !parent.isLeaf() {
break
}
parent.children = nil
if parent.value != nil {
break
}
}
}
return true
}
type WalkFunc func(key string, value interface{}) error
func (trie *Trie) Walk(walker WalkFunc) error {
return trie.walk("", walker)
}
func (trie *Trie) WalkPath(key string, walker WalkFunc) error {
if trie.value != nil {
if err := walker("", trie.value); err != nil {
return err
}
}
for i, r := range key {
if trie = trie.children[r]; trie == nil {
return nil
}
if trie.value != nil {
if err := walker(string(key[0:i+1]), trie.value); err != nil {
return err
}
}
}
return nil
}
type nodeRune struct {
node *Trie
r rune
}
func (trie *Trie) walk(key string, walker WalkFunc) error {
if trie.value != nil {
if err := walker(key, trie.value); err != nil {
return err
}
}
for r, child := range trie.children {
if err := child.walk(key+string(r), walker); err != nil {
return err
}
}
return nil
}
func (trie *Trie) isLeaf() bool {
return len(trie.children) == 0
} | gomisc/trie/trie.go | 0.605099 | 0.493164 | trie.go | starcoder |
package neuralnetwork
import (
"fmt"
"math"
"gonum.org/v1/gonum/mat"
)
type activationStruct struct{}
// ActivationFunctions is interface with Func and Grad for activation functions
type ActivationFunctions interface {
Func(z, h *mat.Dense)
Grad(z, h, grad *mat.Dense)
String() string
}
// IdentityActivation implements ActivationFunctions for identity
type IdentityActivation struct{ activationStruct }
// Func for IdentityActivation
func (*IdentityActivation) Func(z, h *mat.Dense) { h.Copy(z) }
// Grad for IndentityActivation
func (*IdentityActivation) Grad(z, h, grad *mat.Dense) {
matx{Dense: grad}.CopyApplied(h, func(h float64) float64 { return 1. })
}
// String for identity
func (*IdentityActivation) String() string { return "identity" }
// LogisticActivation implements ActivationFunctions for sigmoid
type LogisticActivation struct{ activationStruct }
// Func for LogisticActivation
func (*LogisticActivation) Func(z, h *mat.Dense) {
matx{Dense: h}.CopyApplied(z, func(z float64) float64 { return 1. / (1. + math.Exp(-z)) })
//h.Copy(matApply{Matrix: z, Func: func(z float64) float64 { return 1. / (1. + math.Exp(-z)) }})
}
// Grad for LogisticActivation
func (*LogisticActivation) Grad(z, h, grad *mat.Dense) {
matx{Dense: grad}.CopyApplied(h, func(h float64) float64 { return h * (1. - h) })
}
// String for logistic
func (*LogisticActivation) String() string { return "logistic" }
// TanhActivation implements ActivationFunctions for Tanh
type TanhActivation struct{ activationStruct }
// Func for tanh
func (*TanhActivation) Func(z, h *mat.Dense) {
matx{Dense: h}.CopyApplied(z, math.Tanh)
}
// Grad for tanh
func (*TanhActivation) Grad(z, h, grad *mat.Dense) {
matx{Dense: grad}.CopyApplied(h, func(h float64) float64 { return 1. - h*h })
}
// String for tanh
func (*TanhActivation) String() string { return "tanh" }
// ReluActivation implements ActivationFunctions for rectified linear unit
type ReluActivation struct{ activationStruct }
// Func for relu
func (*ReluActivation) Func(z, h *mat.Dense) {
matx{Dense: h}.CopyApplied(z, func(z float64) float64 {
if z >= 0 {
return z
}
return 0
})
}
// Grad for relu
func (*ReluActivation) Grad(z, h, grad *mat.Dense) {
matx{Dense: grad}.CopyApplied(h, func(h float64) float64 {
if h <= 0 {
return 0.
}
return 1.
})
}
// String for relu
func (*ReluActivation) String() string { return "relu" }
// ParamreluActivation implements ActivationFunctions for parametric relu
type ParamreluActivation struct {
activationStruct
Param float64
}
// Func for paramrelu
func (act *ParamreluActivation) Func(z, h *mat.Dense) {
matx{Dense: h}.CopyApplied(z, func(z float64) float64 {
if z >= 0 {
return z
}
return z * act.Param
})
}
// Grad for paramrelu
func (act *ParamreluActivation) Grad(z, h, grad *mat.Dense) {
matx{Dense: grad}.CopyApplied(h, func(h float64) float64 {
if h >= 0 {
return 1.
}
return act.Param
})
}
// String for paramrelu
func (act *ParamreluActivation) String() string { return fmt.Sprintf("paramrelu(%g)", act.Param) }
// SetParameter for paramrelu
func (act *ParamreluActivation) SetParameter(v float64) { act.Param = v }
// DefaultParameter for paramrelu
func (act *ParamreluActivation) DefaultParameter() float64 { return 0.01 }
// EluActivation implements ActivationFunctions for exponential linear unit
type EluActivation struct {
activationStruct
Param float64
}
// Func for elu
func (act *EluActivation) Func(z, h *mat.Dense) {
elu := func(x float64) float64 {
if x >= 0 {
return x
}
return act.Param * (math.Exp(x) - 1)
}
matx{Dense: h}.CopyApplied(z, elu)
}
// Grad for elu
func (act *EluActivation) Grad(z, h, grad *mat.Dense) {
if act.Param < 0 {
panic("elu Param must be >0")
}
matx{Dense: grad}.CopyApplied(h, func(h float64) float64 {
if h <= 0 {
// derivative of a*e^x-a is a*e^x that is h+a
return h + act.Param
}
return 1.
})
}
// String for relu
func (act *EluActivation) String() string { return fmt.Sprintf("elu(%g)", act.Param) }
// SetParameter for elu
func (act *EluActivation) SetParameter(v float64) { act.Param = v }
// DefaultParameter for elu
func (act *ReluActivation) DefaultParameter() float64 { return 0.01 }
type setparameterer interface {
SetParameter(v float64)
}
type defaultparameterer interface {
DefaultParameter() float64
}
// SupportedActivations is a map[Sing]ActivationFunctions for the supproted activation functions (identity,logistic,tanh,relu)
var SupportedActivations = map[string]ActivationFunctions{
"identity": &IdentityActivation{},
"logistic": &LogisticActivation{},
"tanh": &TanhActivation{},
"relu": &ReluActivation{},
"elu": &EluActivation{},
"paramrelu": &ParamreluActivation{},
}
// NewActivation return ActivationFunctions (Func and Grad) from its name (identity,logistic,tanh,relu)
// arg may be a string or an ActivationFunctions
func NewActivation(arg interface{}) ActivationFunctions {
if f, ok := arg.(ActivationFunctions); ok {
return f
}
name := arg.(string)
activation, ok := SupportedActivations[name]
if !ok {
panic(fmt.Errorf("unknown activation %s", name))
}
if a1, ok := activation.(defaultparameterer); ok {
if a2, ok := activation.(setparameterer); ok {
a2.SetParameter(a1.DefaultParameter())
}
}
return activation
} | neural_network/activation.go | 0.743075 | 0.468183 | activation.go | starcoder |
package field
import (
"gorm.io/gorm/clause"
)
type String Field
func (field String) Eq(value string) Expr {
return expr{e: clause.Eq{Column: field.RawExpr(), Value: value}}
}
func (field String) Neq(value string) Expr {
return expr{e: clause.Neq{Column: field.RawExpr(), Value: value}}
}
func (field String) Gt(value string) Expr {
return expr{e: clause.Gt{Column: field.RawExpr(), Value: value}}
}
func (field String) Gte(value string) Expr {
return expr{e: clause.Gte{Column: field.RawExpr(), Value: value}}
}
func (field String) Lt(value string) Expr {
return expr{e: clause.Lt{Column: field.RawExpr(), Value: value}}
}
func (field String) Lte(value string) Expr {
return expr{e: clause.Lte{Column: field.RawExpr(), Value: value}}
}
func (field String) Between(left string, right string) Expr {
return field.between([]interface{}{left, right})
}
func (field String) NotBetween(left string, right string) Expr {
return Not(field.Between(left, right))
}
func (field String) In(values ...string) Expr {
return expr{e: clause.IN{Column: field.RawExpr(), Values: field.toSlice(values)}}
}
func (field String) NotIn(values ...string) Expr {
return expr{e: clause.Not(field.In(values...).expression())}
}
func (field String) Like(value string) Expr {
return expr{e: clause.Like{Column: field.RawExpr(), Value: value}}
}
func (field String) NotLike(value string) Expr {
return expr{e: clause.Not(field.Like(value).expression())}
}
func (field String) Regexp(value string) Expr {
return field.regexp(value)
}
func (field String) NotRegxp(value string) Expr {
return expr{e: clause.Not(field.Regexp(value).expression())}
}
func (field String) Value(value string) AssignExpr {
return field.value(value)
}
func (field String) Zero() AssignExpr {
return field.value("")
}
func (field String) IfNull(value string) Expr {
return field.ifNull(value)
}
// FindInSet FIND_IN_SET(field_name, input_string_list)
func (field String) FindInSet(targetList string) Expr {
return expr{e: clause.Expr{SQL: "FIND_IN_SET(?,?)", Vars: []interface{}{field.RawExpr(), targetList}}}
}
// FindInSetWith FIND_IN_SET(input_string, field_name)
func (field String) FindInSetWith(target string) Expr {
return expr{e: clause.Expr{SQL: "FIND_IN_SET(?,?)", Vars: []interface{}{target, field.RawExpr()}}}
}
func (field String) Replace(from, to string) String {
return String{expr{e: clause.Expr{SQL: "REPLACE(?,?,?)", Vars: []interface{}{field.RawExpr(), from, to}}}}
}
func (field String) Concat(before, after string) String {
switch {
case before != "" && after != "":
return String{expr{e: clause.Expr{SQL: "CONCAT(?,?,?)", Vars: []interface{}{before, field.RawExpr(), after}}}}
case before != "":
return String{expr{e: clause.Expr{SQL: "CONCAT(?,?)", Vars: []interface{}{before, field.RawExpr()}}}}
case after != "":
return String{expr{e: clause.Expr{SQL: "CONCAT(?,?)", Vars: []interface{}{field.RawExpr(), after}}}}
default:
return field
}
}
func (field String) toSlice(values []string) []interface{} {
slice := make([]interface{}, len(values))
for i, v := range values {
slice[i] = v
}
return slice
}
type Bytes String
func (field Bytes) Eq(value []byte) Expr {
return expr{e: clause.Eq{Column: field.RawExpr(), Value: value}}
}
func (field Bytes) Neq(value []byte) Expr {
return expr{e: clause.Neq{Column: field.RawExpr(), Value: value}}
}
func (field Bytes) Gt(value []byte) Expr {
return expr{e: clause.Gt{Column: field.RawExpr(), Value: value}}
}
func (field Bytes) Gte(value []byte) Expr {
return expr{e: clause.Gte{Column: field.RawExpr(), Value: value}}
}
func (field Bytes) Lt(value []byte) Expr {
return expr{e: clause.Lt{Column: field.RawExpr(), Value: value}}
}
func (field Bytes) Lte(value []byte) Expr {
return expr{e: clause.Lte{Column: field.RawExpr(), Value: value}}
}
func (field Bytes) Between(left []byte, right []byte) Expr {
return field.between([]interface{}{left, right})
}
func (field Bytes) NotBetween(left []byte, right []byte) Expr {
return Not(field.Between(left, right))
}
func (field Bytes) In(values ...[]byte) Expr {
return expr{e: clause.IN{Column: field.RawExpr(), Values: field.toSlice(values)}}
}
func (field Bytes) NotIn(values ...[]byte) Expr {
return expr{e: clause.Not(field.In(values...).expression())}
}
func (field Bytes) Like(value string) Expr {
return expr{e: clause.Like{Column: field.RawExpr(), Value: value}}
}
func (field Bytes) NotLike(value string) Expr {
return expr{e: clause.Not(field.Like(value).expression())}
}
func (field Bytes) Regexp(value string) Expr {
return field.regexp(value)
}
func (field Bytes) NotRegxp(value string) Expr {
return Not(field.Regexp(value))
}
func (field Bytes) Value(value []byte) AssignExpr {
return field.value(value)
}
func (field Bytes) Zero() AssignExpr {
return field.value([]byte{})
}
func (field Bytes) IfNull(value []byte) Expr {
return field.ifNull(value)
}
// FindInSet FIND_IN_SET(field_name, input_string_list)
func (field Bytes) FindInSet(targetList string) Expr {
return expr{e: clause.Expr{SQL: "FIND_IN_SET(?,?)", Vars: []interface{}{field.RawExpr(), targetList}}}
}
// FindInSetWith FIND_IN_SET(input_string, field_name)
func (field Bytes) FindInSetWith(target string) Expr {
return expr{e: clause.Expr{SQL: "FIND_IN_SET(?,?)", Vars: []interface{}{target, field.RawExpr()}}}
}
func (field Bytes) toSlice(values [][]byte) []interface{} {
slice := make([]interface{}, len(values))
for i, v := range values {
slice[i] = v
}
return slice
} | field/string.go | 0.73659 | 0.500977 | string.go | starcoder |
package primitives
import (
"context"
atomixlock "github.com/atomix/go-client/pkg/client/lock"
"github.com/stretchr/testify/assert"
"sync/atomic"
"testing"
"time"
)
// TestAtomixLock : integration test
func (s *TestSuite) TestAtomixLock(t *testing.T) {
client, err := s.getClient(t)
assert.NoError(t, err)
database, err := client.GetDatabase(context.Background(), "raft-database")
assert.NoError(t, err)
lock1, err := database.GetLock(context.Background(), "TestAtomixLock")
assert.NoError(t, err)
lock2, err := database.GetLock(context.Background(), "TestAtomixLock")
assert.NoError(t, err)
id, err := lock1.Lock(context.Background())
assert.NoError(t, err)
assert.NotEqual(t, uint64(0), id)
var lock uint64
wait := make(chan struct{})
go func() {
id, err := lock2.Lock(context.Background())
assert.NoError(t, err)
assert.NotEqual(t, uint64(0), id)
atomic.StoreUint64(&lock, id)
wait <- struct{}{}
}()
isLocked, err := lock1.IsLocked(context.Background())
assert.NoError(t, err)
assert.True(t, isLocked)
isLocked, err = lock1.IsLocked(context.Background(), atomixlock.IfVersion(id))
assert.NoError(t, err)
assert.True(t, isLocked)
isLocked, err = lock1.IsLocked(context.Background(), atomixlock.IfVersion(id+1))
assert.NoError(t, err)
assert.False(t, isLocked)
unlocked, err := lock1.Unlock(context.Background())
assert.NoError(t, err)
assert.True(t, unlocked)
<-wait
id = atomic.LoadUint64(&lock)
assert.NotEqual(t, uint64(0), id)
isLocked, err = lock2.IsLocked(context.Background())
assert.NoError(t, err)
assert.True(t, isLocked)
unlocked, err = lock1.Unlock(context.Background(), atomixlock.IfVersion(id))
assert.NoError(t, err)
assert.True(t, unlocked)
isLocked, err = lock2.IsLocked(context.Background())
assert.NoError(t, err)
assert.False(t, isLocked)
id, err = lock1.Lock(context.Background())
assert.NoError(t, err)
assert.NotEqual(t, uint64(0), id)
lock = 0
wait = make(chan struct{})
go func() {
id, err := lock2.Lock(context.Background(), atomixlock.WithTimeout(100*time.Millisecond))
assert.NoError(t, err)
atomic.StoreUint64(&lock, id)
wait <- struct{}{}
}()
<-wait
id = atomic.LoadUint64(&lock)
assert.Equal(t, uint64(0), id)
} | test/primitives/locktest.go | 0.511473 | 0.428592 | locktest.go | starcoder |
package transport
import (
"bytes"
"fmt"
"sync"
)
// numberRange is an inclusive range.
type numberRange struct {
start uint64
end uint64
}
// rangeSet is sorted ranges in ascending order.
type rangeSet []numberRange
func (s rangeSet) largest() uint64 {
if len(s) > 0 {
return s[len(s)-1].end
}
return 0
}
func (s rangeSet) contains(n uint64) bool {
left := 0
right := len(s)
for left < right {
mid := left + (right-left)/2
r := s[mid]
if n < r.start {
right = mid
} else if n <= r.end {
return true
} else {
left = mid + 1
}
}
return false
}
// equals returns true only when range is continuous from start to end.
func (s rangeSet) equals(start, end uint64) bool {
return len(s) == 1 && s[0].start == start && s[0].end == end
}
// push adds new range [start, end].
func (s *rangeSet) push(start, end uint64) {
if end < start {
panic("invalid number range")
}
ls := *s
idx := ls.insertPos(start)
if idx < len(ls) {
r := ls[idx]
if r.start <= start && end <= r.end {
// [....]
// [..]
return
}
if start > r.start {
// [..]
// [..]
start = r.start
}
}
if idx > 0 && ls[idx-1].end+1 == start {
// New range is usually continuous, can just extend the range
// [1..2][3..4] => [1..4]
idx--
ls[idx].end = end
} else {
s.insert(idx, numberRange{start: start, end: end})
ls = *s
}
// Check if the new range can be merged with the following ranges
cur := &(*s)[idx]
k := -1
for i := idx + 1; i < len(ls); i++ {
if cur.end+1 < ls[i].start {
break
}
k = i
}
if k > idx {
if cur.end <= ls[k].end {
cur.end = ls[k].end
}
// Remove ranges from idx+1 until k
copy(ls[idx+1:], ls[k+1:])
*s = ls[:len(ls)-(k-idx)]
}
}
// insertPos returns the highest position i where s[i-1].end <= n.
func (s rangeSet) insertPos(n uint64) int {
left := 0
right := len(s)
for left < right {
mid := left + (right-left)/2
r := s[mid]
if n < r.start {
right = mid
} else if n <= r.end {
return mid
} else {
left = mid + 1
}
}
return left
}
func (s *rangeSet) insert(idx int, r numberRange) {
ls := append(*s, numberRange{})
copy(ls[idx+1:], ls[idx:])
ls[idx] = r
*s = ls
}
// removeUntil removes all numbers less than or equal to v.
func (s *rangeSet) removeUntil(v uint64) {
ls := *s
// Find starting range to keep
idx := ls.insertPos(v)
if idx < len(ls) {
r := &ls[idx]
if v < r.start {
// Keep this range
} else if v < r.end {
// Narrow this range
r.start = v + 1
} else {
// Delete this range
idx++
}
}
if idx > 0 {
copy(ls, ls[idx:])
*s = ls[:len(ls)-idx]
}
}
func (s rangeSet) String() string {
buf := bytes.Buffer{}
fmt.Fprintf(&buf, "ranges=%d", len(s))
for _, r := range s {
fmt.Fprintf(&buf, " [%d,%d]", r.start, r.end)
}
return buf.String()
}
// rangeBuffer represents a fragment of data at an offset.
type rangeBuffer struct {
offset uint64
data []byte
}
func (s *rangeBuffer) String() string {
return fmt.Sprintf("[%d,%d)", s.offset, s.offset+uint64(len(s.data)))
}
// newRangeBuffer creates a new buffer with a copy of data.
func newRangeBuffer(data []byte, offset uint64) rangeBuffer {
d := newDataBuffer(len(data))
copy(d, data)
return rangeBuffer{
data: d,
offset: offset,
}
}
// rangeBufferList is a sorted list of data buffer by offset.
type rangeBufferList struct {
ls []rangeBuffer
}
func (s *rangeBufferList) write(data []byte, offset uint64) {
if len(data) == 0 {
return
}
end := offset + uint64(len(data))
// Find initial index to check overlap and insert
idx := s.insertPos(offset)
i := idx - 1
if i < 0 {
i = 0
}
for ; i < len(s.ls); i++ {
b := s.ls[i]
bStart := b.offset
bEnd := b.offset + uint64(len(b.data))
if bStart <= offset {
if end <= bEnd {
// Fully contained in existing buffer.
// XXXXXX
// OOOOOO
return
}
if offset < bEnd {
// New start overlaps existing buffer.
// XXXXXX
// OOOO
data = data[bEnd-offset:]
offset = bEnd
idx = i + 1
}
// Check next buffer
// XXXXXX
// OOOO
} else {
if end < bStart {
// Found the gap to insert
// XXXXXX
// OOOO
break
}
if end <= bEnd {
// New end overlaps existing buffer.
// XXXXXX
// OOOO
// OOOOOOOOO
data = data[:bStart-offset]
break
}
// Split the new buffer
// XXXXXX
// OOOOOOOOO
b = newRangeBuffer(data[:bStart-offset], offset)
s.insert(idx, b)
data = data[bEnd-offset:]
offset = bEnd
idx = i + 2
}
}
b := newRangeBuffer(data, offset)
s.insert(idx, b)
}
func (s *rangeBufferList) read(data []byte, offset uint64) int {
var i, n int
for i = 0; i < len(s.ls); i++ {
b := s.ls[i]
if b.offset != offset {
// Data have gaps
break
}
k := copy(data[n:], b.data)
if k == 0 {
// Read buffer is full
break
}
n += k
if k < len(b.data) {
// Read partial data
s.ls[i] = newRangeBuffer(b.data[k:], b.offset+uint64(k))
freeDataBuffer(b.data)
break
}
offset += uint64(k)
freeDataBuffer(b.data)
}
if i > 0 {
s.shift(i)
}
return n
}
func (s *rangeBufferList) consume(offset uint64, fn func([]byte) (int, error)) (n int, err error) {
var i, k int
for i = 0; i < len(s.ls); i++ {
b := s.ls[i]
if b.offset != offset {
break
}
k, err = fn(b.data)
if k <= 0 {
break
}
n += k
if k < len(b.data) {
s.ls[i] = newRangeBuffer(b.data[k:], b.offset+uint64(k))
}
freeDataBuffer(b.data)
if k != len(b.data) || err != nil {
break
}
offset += uint64(k)
}
if i > 0 {
s.shift(i)
}
return
}
// Return first continuous range
func (s *rangeBufferList) pop(max int) ([]byte, uint64) {
if len(s.ls) == 0 || max <= 0 {
return nil, 0
}
// Use offset from the first segment
data := s.ls[0].data
offset := s.ls[0].offset
n := 0
// Peek available bytes
for _, b := range s.ls {
if b.offset != offset+uint64(n) {
break
}
n += len(b.data)
if n > max {
n = max
break
}
}
// No allocation needed if data is the whole first buffer
if n == len(data) {
s.shift(1)
return data, offset
}
if n < len(data) {
// Replace the first buffer with the remaining data
s.ls[0] = newRangeBuffer(data[n:], offset+uint64(n))
return data[:n], offset
}
b := newDataBuffer(n)
n = s.read(b, offset)
return b[:n], offset
}
func (s *rangeBufferList) insert(idx int, r rangeBuffer) {
ls := append(s.ls, rangeBuffer{})
copy(ls[idx+1:], ls[idx:])
ls[idx] = r
s.ls = ls
}
func (s *rangeBufferList) shift(idx int) {
ls := s.ls
n := copy(ls, ls[idx:])
for i := n; i < len(ls); i++ {
ls[i] = rangeBuffer{}
}
s.ls = ls[:n]
}
func (s *rangeBufferList) insertPos(offset uint64) int {
left := 0
right := len(s.ls)
for left < right {
mid := left + (right-left)/2
if offset < s.ls[mid].offset {
right = mid
} else {
left = mid + 1
}
}
return left
}
// size returns the size of actual data hold by the buffer.
func (s *rangeBufferList) size() int {
sz := 0
for _, b := range s.ls {
sz += len(b.data)
}
return sz
}
// length returns the maximum length of the data, ignoring the gaps.
func (s *rangeBufferList) length() uint64 {
ls := s.ls
if len(ls) > 0 {
left := ls[0]
right := ls[len(ls)-1]
return right.offset - left.offset + uint64(len(right.data))
}
return 0
}
func (s *rangeBufferList) isEmpty() bool {
return len(s.ls) == 0
}
// firstOffset returns the first range if available.
func (s *rangeBufferList) first() *rangeBuffer {
if len(s.ls) > 0 {
return &s.ls[0]
}
return nil
}
func (s *rangeBufferList) String() string {
buf := bytes.Buffer{}
fmt.Fprintf(&buf, "ranges=%d", len(s.ls))
for i := range s.ls {
fmt.Fprintf(&buf, " %s", &s.ls[i])
}
return buf.String()
}
// newDataBuffer returns a slice from buffer pools if its size is eligible.
// This buffer is used in stream data and datagram.
func newDataBuffer(size int) []byte {
for i := 1; i < len(dataBufferSizes); i++ {
n := dataBufferSizes[i]
if size <= n {
d := dataBufferPools[i].Get()
if d == nil {
data := make([]byte, n)
return data[:size]
}
buf := d.(*dataBuffer)
b := buf.data[:size]
buf.data = nil
dataBufferPools[0].Put(buf)
return b
}
}
debug("data is too large for buffer pools: %v", size)
return make([]byte, size)
}
// freeDataBuffer puts the slice to buffer pools if its size is eligible.
// This is used when stream or datagram frame is acknowledged or lost.
func freeDataBuffer(b []byte) {
size := cap(b)
for i := 1; i < len(dataBufferSizes); i++ {
n := dataBufferSizes[i]
if size == n {
var buf *dataBuffer
d := dataBufferPools[0].Get()
if d == nil {
buf = &dataBuffer{}
} else {
buf = d.(*dataBuffer)
}
buf.data = b[:n]
dataBufferPools[i].Put(buf)
return
}
}
debug("data is not eligible for buffer pools: %v", size)
}
var dataBufferSizes = [...]int{
0, // For buffer container only
1 << 10,
2 << 10,
4 << 10,
8 << 10,
}
var dataBufferPools = [len(dataBufferSizes)]sync.Pool{}
// dataBuffer contains data for recycling.
// The bytes slice should not be put to sync.Pool directly as it will cause allocations.
type dataBuffer struct {
data []byte
} | transport/range.go | 0.637369 | 0.407923 | range.go | starcoder |
package geometry
import (
"math"
"github.com/tab58/v1/spatial/pkg/numeric"
)
// Point2DReader is a write-only interface for vectors.
type Point2DReader interface {
GetX() float64
GetY() float64
Clone() *Point2D
AsVector() *Vector2D
DistanceTo(q Point2DReader) (float64, error)
IsEqualTo(q Point2DReader, tol float64) (bool, error)
}
// Point2DWriter is a read-only interface for vectors.
type Point2DWriter interface {
SetX() float64
SetY() float64
Copy(q Point2DReader)
Scale(f float64) error
Add(v Vector2DReader) error
Sub(v Vector2DReader) error
}
// Origin2D is the canonical origin in the 2D plane.
var Origin2D Point2DReader = &Point2D{X: 0, Y: 0}
// Point2D represents a 2D point.
type Point2D struct {
X float64
Y float64
}
// GetX returns the x-coordinate of the point.
func (p *Point2D) GetX() float64 {
return p.X
}
// GetY returns the y-coordinate of the point.
func (p *Point2D) GetY() float64 {
return p.Y
}
// SetX sets the x-coordinate of the point.
func (p *Point2D) SetX(x float64) {
p.X = x
}
// SetY sets the x-coordinate of the point.
func (p *Point2D) SetY(y float64) {
p.Y = y
}
// Copy copies the coordinate info from another Point2D.
func (p *Point2D) Copy(q Point2DReader) {
p.SetX(q.GetX())
p.SetY(q.GetY())
}
// Scale scales the displacement vector from the origin by the given factor.
func (p *Point2D) Scale(f float64) error {
if math.IsNaN(f) {
return numeric.ErrInvalidArgument
}
newX := p.GetX() * f
newY := p.GetY() * f
if numeric.AreAnyOverflow(newX, newY) {
return numeric.ErrOverflow
}
p.SetX(newX)
p.SetY(newY)
return nil
}
// Add adds the given displacement vector to this point.
func (p *Point2D) Add(v Vector2DReader) error {
x, y := p.GetX(), p.GetY()
vx, vy := v.GetX(), v.GetY()
newX := x + vx
newY := y + vy
if numeric.AreAnyOverflow(newX, newY) {
return numeric.ErrOverflow
}
p.SetX(newX)
p.SetY(newY)
return nil
}
// Sub subtracts the given displacement vector to this point.
func (p *Point2D) Sub(v Vector2DReader) error {
x, y := p.GetX(), p.GetY()
vx, vy := v.GetX(), v.GetY()
newX := x - vx
newY := y - vy
if numeric.AreAnyOverflow(newX, newY) {
return numeric.ErrOverflow
}
p.SetX(newX)
p.SetY(newY)
return nil
}
// Clone creates a new Point2D with the same coordinate data.
func (p *Point2D) Clone() *Point2D {
return &Point2D{
X: p.GetX(),
Y: p.GetY(),
}
}
// AsVector gets the displacement vector for the point.
func (p *Point2D) AsVector() *Vector2D {
return &Vector2D{
X: p.GetX(),
Y: p.GetY(),
}
}
// DistanceTo gets the length of the displacement vector from a point to the given point.
func (p *Point2D) DistanceTo(q Point2DReader) (float64, error) {
qx, qy := q.GetX(), q.GetY()
px, py := p.GetX(), p.GetY()
newX := qx - px
newY := qy - py
if numeric.AreAnyOverflow(newX, newY) {
return 0, numeric.ErrOverflow
}
len := numeric.Nrm2(newX, newY)
if numeric.IsOverflow(len) {
return 0, numeric.ErrOverflow
}
return len, nil
}
// IsEqualTo returns true if 2 points can be considered equal to within a specific tolerance, false if not.
func (p *Point2D) IsEqualTo(q Point2DReader, tol float64) (bool, error) {
if numeric.IsInvalidTolerance(tol) {
return false, numeric.ErrInvalidTol
}
px, py := p.GetX(), p.GetY()
qx, qy := q.GetX(), q.GetY()
x := math.Abs(px - qx)
y := math.Abs(py - qy)
isEqual := x <= tol && y <= tol
return isEqual, nil
} | pkg/geometry/point2d.go | 0.888245 | 0.665288 | point2d.go | starcoder |
package graph
import (
i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55 "github.com/microsoft/kiota/abstractions/go/serialization"
)
// WorkbookTableColumn
type WorkbookTableColumn struct {
Entity
// Retrieve the filter applied to the column. Read-only.
filter *WorkbookFilter;
// Returns the index number of the column within the columns collection of the table. Zero-indexed. Read-only.
index *int32;
// Returns the name of the table column.
name *string;
// Represents the raw values of the specified range. The data returned could be of type string, number, or a boolean. Cell that contain an error will return the error string.
values *Json;
}
// NewWorkbookTableColumn instantiates a new workbookTableColumn and sets the default values.
func NewWorkbookTableColumn()(*WorkbookTableColumn) {
m := &WorkbookTableColumn{
Entity: *NewEntity(),
}
return m
}
// GetFilter gets the filter property value. Retrieve the filter applied to the column. Read-only.
func (m *WorkbookTableColumn) GetFilter()(*WorkbookFilter) {
if m == nil {
return nil
} else {
return m.filter
}
}
// GetIndex gets the index property value. Returns the index number of the column within the columns collection of the table. Zero-indexed. Read-only.
func (m *WorkbookTableColumn) GetIndex()(*int32) {
if m == nil {
return nil
} else {
return m.index
}
}
// GetName gets the name property value. Returns the name of the table column.
func (m *WorkbookTableColumn) GetName()(*string) {
if m == nil {
return nil
} else {
return m.name
}
}
// GetValues gets the values property value. Represents the raw values of the specified range. The data returned could be of type string, number, or a boolean. Cell that contain an error will return the error string.
func (m *WorkbookTableColumn) GetValues()(*Json) {
if m == nil {
return nil
} else {
return m.values
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *WorkbookTableColumn) GetFieldDeserializers()(map[string]func(interface{}, i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode)(error)) {
res := m.Entity.GetFieldDeserializers()
res["filter"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewWorkbookFilter() })
if err != nil {
return err
}
if val != nil {
m.SetFilter(val.(*WorkbookFilter))
}
return nil
}
res["index"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetIndex(val)
}
return nil
}
res["name"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetName(val)
}
return nil
}
res["values"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetObjectValue(func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return NewJson() })
if err != nil {
return err
}
if val != nil {
m.SetValues(val.(*Json))
}
return nil
}
return res
}
func (m *WorkbookTableColumn) IsNil()(bool) {
return m == nil
}
// Serialize serializes information the current object
func (m *WorkbookTableColumn) Serialize(writer i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.SerializationWriter)(error) {
err := m.Entity.Serialize(writer)
if err != nil {
return err
}
{
err = writer.WriteObjectValue("filter", m.GetFilter())
if err != nil {
return err
}
}
{
err = writer.WriteInt32Value("index", m.GetIndex())
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("name", m.GetName())
if err != nil {
return err
}
}
{
err = writer.WriteObjectValue("values", m.GetValues())
if err != nil {
return err
}
}
return nil
}
// SetFilter sets the filter property value. Retrieve the filter applied to the column. Read-only.
func (m *WorkbookTableColumn) SetFilter(value *WorkbookFilter)() {
m.filter = value
}
// SetIndex sets the index property value. Returns the index number of the column within the columns collection of the table. Zero-indexed. Read-only.
func (m *WorkbookTableColumn) SetIndex(value *int32)() {
m.index = value
}
// SetName sets the name property value. Returns the name of the table column.
func (m *WorkbookTableColumn) SetName(value *string)() {
m.name = value
}
// SetValues sets the values property value. Represents the raw values of the specified range. The data returned could be of type string, number, or a boolean. Cell that contain an error will return the error string.
func (m *WorkbookTableColumn) SetValues(value *Json)() {
m.values = value
} | models/microsoft/graph/workbook_table_column.go | 0.714329 | 0.461199 | workbook_table_column.go | starcoder |
package losses
import (
"github.com/nlpodyssey/spago/pkg/ml/ag"
)
// MAE measures the mean absolute error (a.k.a. L1 Loss) between each element in the input x and target y.
func MAE(g *ag.Graph, x ag.Node, y ag.Node, reduceMean bool) ag.Node {
loss := g.Abs(g.Sub(x, y))
if reduceMean {
return g.ReduceMean(loss)
}
return g.ReduceSum(loss)
}
// MSE measures the mean squared error (squared L2 norm) between each element in the input x and target y.
func MSE(g *ag.Graph, x ag.Node, y ag.Node, reduceMean bool) ag.Node {
loss := g.ProdScalar(g.Square(g.Sub(x, y)), g.NewScalar(0.5))
if reduceMean {
return g.ReduceMean(loss)
}
return g.ReduceSum(loss)
}
// NLL returns the loss of the input x respect to the target y.
// The target is expected to be a one-hot vector.
func NLL(g *ag.Graph, x ag.Node, y ag.Node) ag.Node {
return g.Neg(g.ReduceSum(g.Prod(y, g.Log(x))))
}
// c is the index of the gold class
func CrossEntropy(g *ag.Graph, x ag.Node, c int) ag.Node {
return g.Add(g.Neg(g.AtVec(x, c)), g.Log(g.ReduceSum(g.Exp(x))))
}
func Perplexity(g *ag.Graph, x ag.Node, c int) ag.Node {
return g.Exp(CrossEntropy(g, x, c))
}
func ZeroOneQuantization(g *ag.Graph, x ag.Node) ag.Node {
return g.ReduceSum(g.Prod(g.Square(x), g.Square(g.ReverseSub(x, g.NewScalar(1.0)))))
}
func Norm2Quantization(g *ag.Graph, x ag.Node) ag.Node {
return g.Square(g.SubScalar(g.ReduceSum(g.Square(x)), g.NewScalar(1.0)))
}
// q is the quantization regularizer weight (suggested 0.00001)
func OneHotQuantization(g *ag.Graph, x ag.Node, q float64) ag.Node {
return g.ProdScalar(g.Add(ZeroOneQuantization(g, x), Norm2Quantization(g, x)), g.NewScalar(q))
}
func Distance(g *ag.Graph, x ag.Node, target float64) ag.Node {
return g.Abs(g.Sub(g.NewScalar(target), x))
}
func MSESeq(g *ag.Graph, predicted []ag.Node, target []ag.Node, reduceMean bool) ag.Node {
loss := MSE(g, predicted[0], target[0], false)
for i := 1; i < len(predicted); i++ {
loss = g.Add(loss, MSE(g, predicted[i], target[i], false))
}
if reduceMean {
return g.DivScalar(loss, g.NewScalar(float64(len(predicted))))
}
return loss
}
func MAESeq(g *ag.Graph, predicted []ag.Node, target []ag.Node, reduceMean bool) ag.Node {
loss := MAE(g, predicted[0], target[0], false)
for i := 1; i < len(predicted); i++ {
loss = g.Add(loss, MAE(g, predicted[i], target[i], false))
}
if reduceMean {
return g.DivScalar(loss, g.NewScalar(float64(len(predicted))))
}
return loss
}
func CrossEntropySeq(g *ag.Graph, predicted []ag.Node, target []int, reduceMean bool) ag.Node {
loss := CrossEntropy(g, predicted[0], target[0])
for i := 1; i < len(predicted); i++ {
loss = g.Add(loss, CrossEntropy(g, predicted[i], target[i]))
}
if reduceMean {
return g.DivScalar(loss, g.NewScalar(float64(len(predicted))))
}
return loss
}
// SPG (Softmax Policy Gradient) is a Gradient Policy used in Reinforcement Learning.
// logPropActions are the log-probability of the chosen action by the Agent at each time;
// logProbTargets are results of the reward function i.e. the predicted log-likelihood of the ground truth at each time;
func SPG(g *ag.Graph, logPropActions []ag.Node, logProbTargets []ag.Node) ag.Node {
var loss ag.Node
for t := 0; t < len(logPropActions); t++ {
loss = g.Add(loss, g.Prod(logPropActions[t], logProbTargets[t]))
}
return g.Neg(loss)
} | pkg/ml/losses/losses.go | 0.874553 | 0.641324 | losses.go | starcoder |
package harrypotter
//NumBooks is the number of books in the series
const NumBooks = 5
const pricePerBook = 8.0
const optimalDiscountSize = 4
var discounts = [NumBooks + 1]float64{1.0, 1.0, .95, .9, .8, .75}
//CalculatePrice computes and returns the price for the specified books
func CalculatePrice(books BookBasket) float64 {
sets := mergeComplementarySets(buildSets(books))
return totalPriceOf(sets)
}
//BookBasket contains zero or more copies of each of NumBooks books
type BookBasket struct {
counts []int
}
//CreateBasket returns a new BookBasket containing the given books
func CreateBasket(bookCounts ...int) (BookBasket, bool) {
if len(bookCounts) > NumBooks {
return BookBasket{}, false
}
for _, count := range bookCounts {
if count < 0 {
return BookBasket{}, false
}
}
return BookBasket{
counts: bookCounts,
}, true
}
type bookSet []bool
func (set bookSet) price() float64 {
numBooks := set.count()
return float64(numBooks) * pricePerBook * discounts[numBooks]
}
func (set bookSet) count() int {
count := 0
for _, present := range set {
if present {
count++
}
}
return count
}
func buildSets(books BookBasket) []bookSet {
sets := []bookSet{}
for {
set, ok := extractSet(books, optimalDiscountSize)
if !ok {
break
}
sets = append(sets, set)
}
return sets
}
func extractSet(books BookBasket, maxSize int) (bookSet, bool) {
counts := make([]bool, NumBooks)
setSize := 0
for i, count := range books.counts {
if count > 0 && setSize < maxSize {
counts[i] = true
books.counts[i]--
setSize++
}
}
if setSize == 0 {
return counts, false
}
return counts, true
}
func mergeComplementarySets(sets []bookSet) []bookSet {
fours := findSetsOf(sets, optimalDiscountSize)
ones := findSetsOf(sets, NumBooks-optimalDiscountSize)
for _, fourSet := range fours {
for _, oneSet := range ones {
if areComplementary(fourSet, oneSet) {
mergeSets(fourSet, oneSet)
}
}
}
return sets
}
func totalPriceOf(sets []bookSet) float64 {
totalPrice := 0.0
for _, set := range sets {
totalPrice += set.price()
}
return totalPrice
}
func findSetsOf(sets []bookSet, size int) []bookSet {
startIndex := 0
stopIndex := len(sets)
for i, set := range sets {
setSize := set.count()
if setSize > size {
startIndex = i + 1
} else if setSize < size {
stopIndex = i
break
}
}
return sets[startIndex:stopIndex]
}
func areComplementary(bigSet, smallSet bookSet) bool {
for i := range bigSet {
if bigSet[i] && smallSet[i] {
return false
}
}
return true
}
func mergeSets(bigSet, smallSet bookSet) {
for i := range bigSet {
bigSet[i] = bigSet[i] || smallSet[i]
smallSet[i] = false
}
} | go/harrypotter/harrypotter.go | 0.698021 | 0.566198 | harrypotter.go | starcoder |
package vector
import "math"
type Vec4 struct {
W float64
X float64
Y float64
Z float64
}
func (v *Vec4) NormSquared() float64 {
return v.W*v.W + v.X*v.X + v.Y*v.Y + v.Z*v.Z
}
func (v *Vec4) HNormSquared() float64 {
return v.W*v.W - v.X*v.X - v.Y*v.Y - v.Z*v.Z
}
func (v *Vec4) Scale(a float64) {
v.W *= a
v.X *= a
v.Y *= a
v.Z *= a
}
func (v *Vec4) Sum(w Vec4) {
v.W += w.W
v.X += w.X
v.Y += w.Y
v.Z += w.Z
}
func (v *Vec4) Diff(w Vec4) {
v.W -= w.W
v.X -= w.X
v.Y -= w.Y
v.Z -= w.Z
}
func (v *Vec4) ToSlice() [4]float64 {
return [4]float64{v.W, v.X, v.Y, v.Z}
}
func (v *Vec4) Normalise() {
v.Scale(1 / math.Sqrt(v.NormSquared()))
}
func (v *Vec4) SNormalise() {
v.Scale(1 / math.Sqrt(v.NormSquared()))
}
func (v *Vec4) HNormalise() {
v.Scale(1 / math.Sqrt(math.Abs(v.HNormSquared())))
}
func (v *Vec4) Dot(w Vec4) float64 {
return v.W*w.W + v.X*w.X + v.Y*w.Y + v.Z*w.Z
}
func Scale4(v Vec4, a float64) Vec4 {
return Vec4{v.W * a, v.X * a, v.Y * a, v.Z * a}
}
func Sum4(v, w Vec4) Vec4 {
return Vec4{v.W + w.W, v.X + w.X, v.Y + w.Y, v.Z + w.Z}
}
func Diff4(v, w Vec4) Vec4 {
return Vec4{v.W - w.W, v.X - w.X, v.Y - w.Y, v.Z - w.Z}
}
func Dot4(v, w Vec4) float64 {
return v.W*w.W + v.X*w.X + v.Y*w.Y + v.Z*w.Z
}
func Midpoint4(v, w Vec4) Vec4 {
return Scale4(Sum4(v, w), 0.5)
}
func NormSquared4(v Vec4) float64 {
return v.W*v.W + v.X*v.X + v.Y*v.Y + v.Z*v.Z
}
func DistanceSquared4(v, w Vec4) float64 {
return NormSquared4(Diff4(v, w))
}
func Cross4(vec1, vec2, vec3 Vec4) Vec4 {
m := [3][4]float64{vec1.ToSlice(), vec2.ToSlice(), vec3.ToSlice()}
return Vec4{
Determinant3(Mat3{
m[0][1], m[0][2], m[0][3],
m[1][1], m[1][2], m[1][3],
m[2][1], m[2][2], m[2][3]}),
-Determinant3(Mat3{
m[0][0], m[0][2], m[0][3],
m[1][0], m[1][2], m[1][3],
m[2][0], m[2][2], m[2][3]}),
Determinant3(Mat3{
m[0][0], m[0][1], m[0][3],
m[1][0], m[1][1], m[1][3],
m[2][0], m[2][1], m[2][3]}),
-Determinant3(Mat3{
m[0][0], m[0][1], m[0][2],
m[1][0], m[1][1], m[1][2],
m[2][0], m[2][1], m[2][2]}),
}
} | vector/vector4.go | 0.829423 | 0.485417 | vector4.go | starcoder |
package kinesis
import (
"fmt"
"github.com/crowdmob/goamz/aws"
)
type ShardIteratorType string
type StreamStatus string
const (
// Start reading exactly from the position denoted by a specific sequence number.
ShardIteratorAtSequenceNumber ShardIteratorType = "AT_SEQUENCE_NUMBER"
// Start reading right after the position denoted by a specific sequence number.
ShardIteratorAfterSequenceNumber ShardIteratorType = "AFTER_SEQUENCE_NUMBER"
// Start reading at the last untrimmed record in the shard in the system,
// which is the oldest data record in the shard.
ShardIteratorTrimHorizon ShardIteratorType = "TRIM_HORIZON"
// Start reading just after the most recent record in the shard,
// so that you always read the most recent data in the shard.
ShardIteratorLatest ShardIteratorType = "LATEST"
// The stream is being created. Upon receiving a CreateStream request,
// Amazon Kinesis immediately returns and sets StreamStatus to CREATING.
StreamStatusCreating StreamStatus = "CREATING"
// The stream is being deleted. After a DeleteStream request,
// the specified stream is in the DELETING state until Amazon Kinesis completes the deletion.
StreamStatusDeleting StreamStatus = "DELETING"
// The stream exists and is ready for read and write operations or deletion.
// You should perform read and write operations only on an ACTIVE stream.
StreamStatusActive StreamStatus = "ACTIVE"
// Shards in the stream are being merged or split.
// Read and write operations continue to work while the stream is in the UPDATING state.
StreamStatusUpdating StreamStatus = "UPDATING"
)
// Main Kinesis object
type Kinesis struct {
aws.Auth
aws.Region
}
// The range of possible hash key values for the shard, which is a set of ordered contiguous positive integers.
type HashKeyRange struct {
EndingHashKey string
StartingHashKey string
}
func (h HashKeyRange) String() string {
return fmt.Sprintf("{EndingHashKey: %s, StartingHashKey: %s}\n",
h.EndingHashKey, h.StartingHashKey)
}
// The range of possible sequence numbers for the shard.
type SequenceNumberRange struct {
EndingSequenceNumber string
StartingSequenceNumber string
}
func (s SequenceNumberRange) String() string {
return fmt.Sprintf("{EndingSequenceNumber: %s, StartingSequenceNumber: %s}\n",
s.EndingSequenceNumber, s.StartingSequenceNumber)
}
// A uniquely identified group of data records in an Amazon Kinesis stream.
type Shard struct {
AdjacentParentShardId string
HashKeyRange HashKeyRange
ParentShardId string
SequenceNumberRange SequenceNumberRange
ShardId string
}
// Description of a Stream
type StreamDescription struct {
HasMoreShards bool
Shards []Shard
StreamARN string
StreamName string
StreamStatus StreamStatus
}
// The unit of data of the Amazon Kinesis stream, which is composed of a sequence number,
// a partition key, and a data blob.
type Record struct {
Data []byte
PartitionKey string
SequenceNumber string
}
// Represents the output of a DescribeStream operation.
type DescribeStreamResponse struct {
StreamDescription StreamDescription
}
// Represents the output of a GetRecords operation.
type GetRecordsResponse struct {
NextShardIterator string
Records []Record
}
// Represents the output of a GetShardIterator operation.
type GetShardIteratorResponse struct {
ShardIterator string
}
// Represents the output of a ListStreams operation.
type ListStreamResponse struct {
HasMoreStreams bool
StreamNames []string
}
// Represents the output of a PutRecord operation.
type PutRecordResponse struct {
SequenceNumber string
ShardId string
}
// Error represents an error in an operation with Kinesis(following goamz/Dynamodb)
type Error struct {
StatusCode int // HTTP status code (200, 403, ...)
Status string
Code string `json:"__type"`
Message string `json:"message"`
}
func (e Error) Error() string {
return fmt.Sprintf("[HTTP %d] %s : %s\n", e.StatusCode, e.Code, e.Message)
} | go/src/github.com/crowdmob/goamz/kinesis/types.go | 0.780537 | 0.475544 | types.go | starcoder |
package xslice
import (
"fmt"
"math"
"math/rand"
"reflect"
)
// Version returns package version
func Version() string {
return "0.21.0"
}
// Author returns package author
func Author() string {
return "[<NAME>](https://www.likexian.com/)"
}
// License returns package license
func License() string {
return "Licensed under the Apache License 2.0"
}
// IsSlice returns whether value is slice
func IsSlice(v interface{}) bool {
return reflect.ValueOf(v).Kind() == reflect.Slice
}
// Unique returns unique values of slice
func Unique(v interface{}) interface{} {
vv := reflect.ValueOf(v)
if vv.Kind() != reflect.Slice {
panic("xslice: v expected to be a slice")
}
h := make(map[interface{}]bool)
r := reflect.MakeSlice(reflect.TypeOf(v), 0, vv.Cap())
for i := 0; i < vv.Len(); i++ {
hv := hashValue(vv.Index(i).Interface())
if _, ok := h[hv]; !ok {
h[hv] = true
r = reflect.Append(r, vv.Index(i))
}
}
return r.Interface()
}
// IsUnique returns whether slice values is unique
func IsUnique(v interface{}) bool {
vv := reflect.ValueOf(v)
if vv.Kind() != reflect.Slice {
panic("xslice: v expected to be a slice")
}
if vv.Len() <= 1 {
return true
}
h := make(map[interface{}]bool)
for i := 0; i < vv.Len(); i++ {
hv := hashValue(vv.Index(i).Interface())
if _, ok := h[hv]; ok {
return false
}
h[hv] = true
}
return true
}
// Intersect returns values in both slices
func Intersect(x, y interface{}) interface{} {
xx := reflect.ValueOf(x)
if xx.Kind() != reflect.Slice {
panic("xslice: x expected to be a slice")
}
yy := reflect.ValueOf(y)
if yy.Kind() != reflect.Slice {
panic("xslice: y expected to be a slice")
}
h := make(map[interface{}]bool)
for i := 0; i < xx.Len(); i++ {
h[hashValue(xx.Index(i).Interface())] = true
}
r := reflect.MakeSlice(reflect.TypeOf(x), 0, 0)
for i := 0; i < yy.Len(); i++ {
if _, ok := h[hashValue(yy.Index(i).Interface())]; ok {
r = reflect.Append(r, yy.Index(i))
}
}
return r.Interface()
}
// Different returns values in x but not in y
func Different(x, y interface{}) interface{} {
xx := reflect.ValueOf(x)
if xx.Kind() != reflect.Slice {
panic("xslice: x expected to be a slice")
}
yy := reflect.ValueOf(y)
if yy.Kind() != reflect.Slice {
panic("xslice: y expected to be a slice")
}
h := make(map[interface{}]bool)
for i := 0; i < yy.Len(); i++ {
h[hashValue(yy.Index(i).Interface())] = true
}
r := reflect.MakeSlice(reflect.TypeOf(x), 0, 0)
for i := 0; i < xx.Len(); i++ {
if _, ok := h[hashValue(xx.Index(i).Interface())]; !ok {
r = reflect.Append(r, xx.Index(i))
}
}
return r.Interface()
}
// Merge append y values to x if not exists in
func Merge(x, y interface{}) interface{} {
xx := reflect.ValueOf(x)
if xx.Kind() != reflect.Slice {
panic("xslice: x expected to be a slice")
}
yy := reflect.ValueOf(y)
if yy.Kind() != reflect.Slice {
panic("xslice: y expected to be a slice")
}
h := make(map[interface{}]bool)
for i := 0; i < xx.Len(); i++ {
h[hashValue(xx.Index(i).Interface())] = true
}
r := xx.Slice(0, xx.Len())
for i := 0; i < yy.Len(); i++ {
if _, ok := h[hashValue(yy.Index(i).Interface())]; !ok {
r = reflect.Append(r, yy.Index(i))
}
}
return r.Interface()
}
// Reverse returns a slice with elements in reverse order
func Reverse(v interface{}) {
vv := reflect.ValueOf(v)
if vv.Kind() != reflect.Slice {
panic("xslice: v expected to be a slice")
}
swap := reflect.Swapper(v)
for i, j := 0, vv.Len()-1; i < j; i, j = i+1, j-1 {
swap(i, j)
}
}
// Shuffle shuffle a slice
func Shuffle(v interface{}) {
vv := reflect.ValueOf(v)
if vv.Kind() != reflect.Slice {
panic("xslice: v expected to be a slice")
}
swap := reflect.Swapper(v)
for i := vv.Len() - 1; i >= 1; i-- {
j := rand.Intn(i + 1)
swap(i, j)
}
}
// Fill returns a slice with count number of v values
func Fill(v interface{}, count int) interface{} {
if count <= 0 {
panic("xslice: count expected to be greater than 0")
}
r := reflect.MakeSlice(reflect.SliceOf(reflect.TypeOf(v)), 0, 0)
for i := 0; i < count; i++ {
r = reflect.Append(r, reflect.ValueOf(v))
}
return r.Interface()
}
// Chunk split slice into chunks
func Chunk(v interface{}, size int) interface{} {
vv := reflect.ValueOf(v)
if vv.Kind() != reflect.Slice {
panic("xslice: v expected to be a slice")
}
if size <= 0 {
panic("xslice: size expected to be greater than 0")
}
n := int(math.Ceil(float64(vv.Len()) / float64(size)))
r := reflect.MakeSlice(reflect.SliceOf(reflect.TypeOf(v)), 0, 0)
for i := 0; i < n; i++ {
rr := reflect.MakeSlice(reflect.TypeOf(v), 0, 0)
for j := 0; j < size; j++ {
if i*size+j >= vv.Len() {
break
}
rr = reflect.Append(rr, vv.Index(i*size+j))
}
r = reflect.Append(r, rr)
}
return r.Interface()
}
// Concat returns a new flatten slice of []slice
func Concat(v interface{}) interface{} {
vv := reflect.ValueOf(v)
if vv.Kind() != reflect.Slice {
panic("xslice: v expected to be a slice")
}
if vv.Len() == 0 {
return v
}
vt := reflect.TypeOf(v)
if vt.Elem().Kind() != reflect.Slice {
return v
}
r := reflect.MakeSlice(reflect.TypeOf(v).Elem(), 0, 0)
for i := 0; i < vv.Len(); i++ {
for j := 0; j < vv.Index(i).Len(); j++ {
r = reflect.Append(r, vv.Index(i).Index(j))
}
}
return r.Interface()
}
// Filter filter slice values using callback function fn
func Filter(v interface{}, fn interface{}) interface{} {
vv := reflect.ValueOf(v)
if vv.Kind() != reflect.Slice {
panic("xslice: v expected to be a slice")
}
err := CheckIsFunc(fn, 1, 1)
if err != nil {
panic("xslice: " + err.Error())
}
fv := reflect.ValueOf(fn)
ot := fv.Type().Out(0)
if ot.Kind() != reflect.Bool {
panic("xslice: fn expected to return bool but got " + ot.Kind().String())
}
r := reflect.MakeSlice(reflect.TypeOf(v), 0, 0)
for i := 0; i < vv.Len(); i++ {
if fv.Call([]reflect.Value{vv.Index(i)})[0].Interface().(bool) {
r = reflect.Append(r, vv.Index(i))
}
}
return r.Interface()
}
// Map apply callback function fn to elements of slice
func Map(v interface{}, fn interface{}) interface{} {
vv := reflect.ValueOf(v)
if vv.Kind() != reflect.Slice {
panic("xslice: v expected to be a slice")
}
err := CheckIsFunc(fn, 1, 1)
if err != nil {
panic("xslice: " + err.Error())
}
fv := reflect.ValueOf(fn)
ot := fv.Type().Out(0)
r := reflect.MakeSlice(reflect.SliceOf(ot), 0, 0)
for i := 0; i < vv.Len(); i++ {
r = reflect.Append(r, fv.Call([]reflect.Value{vv.Index(i)})[0])
}
return r.Interface()
}
// Reduce reduce the slice values using callback function fn
func Reduce(v interface{}, fn interface{}) interface{} {
vv := reflect.ValueOf(v)
if vv.Kind() != reflect.Slice {
panic("xslice: v expected to be a slice")
}
if vv.Len() == 0 {
panic("xslice: v expected to be not empty")
}
err := CheckIsFunc(fn, 2, 1)
if err != nil {
panic("xslice: " + err.Error())
}
fv := reflect.ValueOf(fn)
if vv.Type().Elem() != fv.Type().In(0) || vv.Type().Elem() != fv.Type().In(1) {
panic(fmt.Sprintf("xslice: fn expected to have (%s, %s) arguments but got (%s, %s)",
vv.Type().Elem(), vv.Type().Elem(), fv.Type().In(0), fv.Type().In(1)))
}
if vv.Type().Elem() != fv.Type().Out(0) {
panic(fmt.Sprintf("xslice: fn expected to return %s but got %s",
vv.Type().Elem(), fv.Type().Out(0).String()))
}
r := vv.Index(0)
for i := 1; i < vv.Len(); i++ {
r = fv.Call([]reflect.Value{r, vv.Index(i)})[0]
}
return r.Interface()
}
// CheckIsFunc check if fn is a function with n[0] arguments and n[1] returns
func CheckIsFunc(fn interface{}, n ...int) error {
if fn == nil {
return fmt.Errorf("fn excepted to be a function but got nil")
}
ft := reflect.TypeOf(fn)
if ft.Kind() != reflect.Func {
return fmt.Errorf("fn excepted to be a function but got " + ft.Kind().String())
}
if len(n) >= 1 && n[0] != ft.NumIn() {
return fmt.Errorf("fn expected to have %d arguments but got %d", n[0], ft.NumIn())
}
if len(n) >= 2 && n[1] != ft.NumOut() {
return fmt.Errorf("fn expected to have %d returns but got %d", n[1], ft.NumOut())
}
return nil
}
// hashValue returns a hashable value
func hashValue(x interface{}) interface{} {
return fmt.Sprintf("%#v", x)
} | xslice/xslice.go | 0.799873 | 0.535645 | xslice.go | starcoder |
package instago
//The JSON type can be used when you do not directly want to parse JSON data into a Go
//struct, or when you are dealing with object types that are unknown or constantly
//changing. The API uses this because a) The structure of some Instagram API requests adds
//a lot of additional unnecessary data that ought not be in the final Go response.
type JSON map[string]interface{}
//Check to see if an interface is a string and if it is not, return an empty string
func JSONString(data interface{}) string {
if str, ok := data.(string); ok {
return str
}
return ""
}
//Check to see if an interface is an int and if it is not it will return 0
func JSONInt(data interface{}) int {
//N.B. The encoding/json library assumes all numbers as float64 but most Instagram
//values are actually integers
return int(JSONFloat(data))
}
//Check to see if an interface is a float(64) and if it is not it will return 0
func JSONFloat(data interface{}) float64 {
if number, ok := data.(float64); ok {
return number
}
return 0
}
//Checks to see if an interface is an array, and if not return an empty array
func JSONArray(data interface{}) []interface{} {
if arr, ok := data.([]interface{}); ok {
return arr
}
return make([]interface{}, 0)
}
//Checks to see if an object is a JSON object and if not, return an empty object
func JSONObject(data interface{}) JSON {
if obj, ok := data.(map[string]interface{}); ok {
return obj
}
return make(map[string]interface{}, 0)
}
//Checks to see if an interface is an array of strings, and if not return an empty string
//array
func JSONStringArray(data interface{}) []string {
arr := JSONArray(data)
strings := make([]string, 0)
for _, v := range arr {
strings = append(strings, JSONString(v))
}
return strings
}
//Check to see if it is an array of objects and if not return an empty JSON object array
func JSONObjectArray(data interface{}) []JSON {
arr := JSONArray(data)
objs := make([]JSON, 0)
for _, v := range arr {
objs = append(objs, JSONObject(v))
}
return objs
}
//Utility wrapper around JSONString
func (json JSON) String(key string) string {
return JSONString(json[key])
}
//Utility wrapper around JSONInt
func (json JSON) Int(key string) int {
return JSONInt(json[key])
}
//Utility wrapper around JSONFloat
func (json JSON) Float(key string) float64 {
return JSONFloat(json[key])
}
//Utility wrapper around JSONArray
func (json JSON) Array(key string) []interface{} {
return JSONArray(json[key])
}
//Utility wrapper around JSONObject
func (json JSON) Object(key string) JSON {
return JSONObject(json[key])
}
//Utility wrapper around JSONStringArray
func (json JSON) StringArray(key string) []string {
return JSONStringArray(json[key])
}
//Utility wrapper around JSONObjectArray
func (json JSON) ObjectArray(key string) []JSON {
return JSONObjectArray(json[key])
} | jsonutil.go | 0.688468 | 0.42662 | jsonutil.go | starcoder |
package beacon
import (
"fmt"
"github.com/lightningnetwork/lnd/tlv"
"github.com/xplorfin/moneysocket-go/moneysocket/beacon/location"
"github.com/xplorfin/moneysocket-go/moneysocket/beacon/util"
"github.com/xplorfin/moneysocket-go/moneysocket/beacon/util/bigsize"
encodeUtils "github.com/xplorfin/moneysocket-go/moneysocket/util"
)
// MoneysocketHrp is the human readable part (https://en.bitcoin.it/wiki/BIP_0173#Bech32 )
// of the TLV (type-length-value, defined in BOLT #1: https://git.io/JLCRq ).
const MoneysocketHrp = "moneysocket"
// Beacon contains a SharedSeed for end-to-end encryption and a list of location.Location's.
type Beacon struct {
seed SharedSeed
locations []location.Location
}
// NewBeacon creates a Beacon with no locations and an auto-generated SharedSeed.
func NewBeacon() Beacon {
return NewBeaconFromSharedSeed(NewSharedSeed())
}
// NewBeaconFromSharedSeed creates a Beacon with no locations
// and the given SharedSeed.
func NewBeaconFromSharedSeed(seed SharedSeed) Beacon {
return Beacon{
seed: seed,
}
}
// Locations returns the list of locations in the Beacon.
func (b Beacon) Locations() []location.Location {
return b.locations
}
// ToObject generates a json-encodable map of the Beacon's data.
func (b Beacon) ToObject() map[string]interface{} {
m := make(map[string]interface{})
m["shared_seed"] = b.seed.ToString()
locDict := make([]interface{}, 0)
for _, loc := range b.locations {
locDict = append(locDict, loc.ToObject())
}
m["locations"] = locDict
return m
}
// AddLocation appends a location to the beacon.
func (b *Beacon) AddLocation(loc location.Location) {
b.locations = append(b.locations, loc)
}
// GetSharedSeed fetches the shared seed from the beacon.
func (b Beacon) GetSharedSeed() SharedSeed {
return b.seed
}
// EncodeLocationListTlvs encodes a list of locations as tlvs.
func (b Beacon) EncodeLocationListTlvs() []byte {
encoded := make([]byte, 0)
locationCount := uint64(len(b.locations))
record := tlv.MakeStaticRecord(util.LocationCountTLVType, &locationCount, tlv.VarIntSize(locationCount), util.EVarInt, util.DVarInt)
locationCountEncoded := encodeUtils.TLVRecordToBytes(record)
encoded = append(encoded, locationCountEncoded...)
for _, loc := range b.locations {
encoded = append(encoded, loc.EncodedTLV()...)
}
return encodeUtils.TLVRecordToBytes(tlv.MakePrimitiveRecord(util.LocationListTLVType, &encoded))
}
// EncodeTLV encodes the tlv.
func (b Beacon) EncodeTLV() []byte {
// encode the shared seed
ssEncoded := b.seed.EncodedTLV()
llEncoded := b.EncodeLocationListTlvs()
combinedTlvs := append(ssEncoded, llEncoded...)
return encodeUtils.TLVRecordToBytes(tlv.MakePrimitiveRecord(util.BeaconTLVType, &combinedTlvs))
}
// ToBech32Str encodes the tlv to a bech32 string (https://en.bitcoin.it/wiki/BIP_0173#Bech32 ).
func (b Beacon) ToBech32Str() string {
encodedBytes := b.EncodeTLV()
res, err := encodeUtils.Bech32EncodeBytes(encodedBytes, MoneysocketHrp)
// this is theoretically possible with enough locations
if err != nil {
panic(err)
}
return res
}
// DecodeTLV decodes a TLV (type-length-value, defined in BOLT #1: https://git.io/JLCRq)
// into a Beacon. Returns an error if Beacon cannot be decoded.
func DecodeTLV(b []byte) (beacon Beacon, err error) {
beaconTlv, _, err := util.TLVPop(b)
if err != nil {
return beacon, err
}
if beaconTlv.Type() != util.BeaconTLVType {
return beacon, fmt.Errorf("got unexpected tlv type: %d expected %d", beaconTlv.Type(), util.BeaconTLVType)
}
ssTlv, remainder, err := util.TLVPop(beaconTlv.Value())
if err != nil {
return beacon, err
}
if ssTlv.Type() != util.SharedSeedTLVType {
return beacon, fmt.Errorf("got unexpected shared seed tlv type %d, expected: %d", ssTlv.Type(), util.SharedSeedTLVType)
}
llTlv, _, err := util.TLVPop(remainder)
if err != nil {
return beacon, err
}
if llTlv.Type() != util.LocationListTLVType {
return beacon, fmt.Errorf("got unexpected location list tlv type: %d, expected: %d", llTlv.Type(), util.LocationListTLVType)
}
beacon.seed, err = BytesToSharedSeed(ssTlv.Value())
if err != nil {
return beacon, err
}
lcTlv, remainder, err := util.TLVPop(llTlv.Value())
if err != nil {
return beacon, err
}
if lcTlv.Type() != util.LocationCountTLVType {
return beacon, fmt.Errorf("got unexpected location list tlv type: %d, expected: %d", lcTlv.Type(), util.LocationCountTLVType)
}
locationCount, _, err := bigsize.Pop(lcTlv.Value())
if err != nil {
return beacon, err
}
// TODO break this out into it's own function to reduce cyclomatic compleixty
var locations []location.Location
for i := 0; i < int(locationCount); i++ {
llTlv, remainder, err = util.TLVPop(remainder)
if err != nil {
return beacon, err
}
var loc location.Location
switch llTlv.Type() {
case util.WebsocketLocationTLVType:
loc, err = location.WebsocketLocationFromTLV(llTlv)
case util.WebRTCLocationTLVType:
loc, err = location.WebRTCLocationFromTLV(llTlv)
case util.BluetoothLocationTLVType:
loc, err = location.BluetoothLocationFromTLV(llTlv)
case util.NFCLocationTLVType:
loc, err = location.NfcLocationFromTLV(llTlv)
default:
panic(fmt.Errorf("location type %d not yet implemented", llTlv.Type()))
}
if err != nil {
return beacon, err
}
locations = append(locations, loc)
}
beacon.locations = locations
return beacon, err
}
// DecodeFromBech32Str decodes a Beacon from a bech32 string (https://en.bitcoin.it/wiki/BIP_0173#Bech32 ).
func DecodeFromBech32Str(bech32 string) (Beacon, error) {
hrp, decodedBytes, err := encodeUtils.Bech32DecodeBytes(bech32)
if err != nil {
return Beacon{}, err
}
_ = decodedBytes
if hrp != MoneysocketHrp {
return Beacon{}, fmt.Errorf("got hrp %s when decoding tlv, expected %s", hrp, MoneysocketHrp)
}
return DecodeTLV(decodedBytes)
} | moneysocket/beacon/beacon.go | 0.682891 | 0.484563 | beacon.go | starcoder |
package telegraf
import (
"time"
)
// Accumulator allows adding metrics to the processing flow.
type Accumulator interface {
// AddFields adds a metric to the accumulator with the given measurement
// name, fields, and tags (and timestamp). If a timestamp is not provided,
// then the accumulator sets it to "now".
AddFields(measurement string,
fields map[string]interface{},
tags map[string]string,
t ...time.Time)
// AddGauge is the same as AddFields, but will add the metric as a "Gauge" type
AddGauge(measurement string,
fields map[string]interface{},
tags map[string]string,
t ...time.Time)
// AddCounter is the same as AddFields, but will add the metric as a "Counter" type
AddCounter(measurement string,
fields map[string]interface{},
tags map[string]string,
t ...time.Time)
// AddSummary is the same as AddFields, but will add the metric as a "Summary" type
AddSummary(measurement string,
fields map[string]interface{},
tags map[string]string,
t ...time.Time)
// AddHistogram is the same as AddFields, but will add the metric as a "Histogram" type
AddHistogram(measurement string,
fields map[string]interface{},
tags map[string]string,
t ...time.Time)
// AddMetric adds an metric to the accumulator.
AddMetric(Metric)
// SetPrecision sets the timestamp rounding precision. All metrics
// added to the accumulator will have their timestamp rounded to the
// nearest multiple of precision.
SetPrecision(precision time.Duration)
// Report an error.
AddError(err error)
// Upgrade to a TrackingAccumulator with space for maxTracked
// metrics/batches.
WithTracking(maxTracked int) TrackingAccumulator
}
// TrackingID uniquely identifies a tracked metric group
type TrackingID uint64
// DeliveryInfo provides the results of a delivered metric group.
type DeliveryInfo interface {
// ID is the TrackingID
ID() TrackingID
// Delivered returns true if the metric was processed successfully.
Delivered() bool
}
// TrackingAccumulator is an Accumulator that provides a signal when the
// metric has been fully processed. Sending more metrics than the accumulator
// has been allocated for without reading status from the Accepted or Rejected
// channels is an error.
type TrackingAccumulator interface {
Accumulator
// Add the Metric and arrange for tracking feedback after processing..
AddTrackingMetric(m Metric) TrackingID
// Add a group of Metrics and arrange for a signal when the group has been
// processed.
AddTrackingMetricGroup(group []Metric) TrackingID
// Delivered returns a channel that will contain the tracking results.
Delivered() <-chan DeliveryInfo
} | accumulator.go | 0.608594 | 0.481149 | accumulator.go | starcoder |
// This package implements a basic LISP interpretor for embedding in a go program for scripting.
// This file implements data elements.
package golisp
// Cxr
func WalkList(d *Data, path string) *Data {
c := d
for index := len(path) - 1; index >= 0; index-- {
if c == nil {
return nil
}
if !PairP(c) && !AlistP(c) && !DottedPairP(c) {
return nil
}
switch path[index] {
case 'a':
c = Car(c)
case 'd':
c = Cdr(c)
default:
c = nil
}
}
return c
}
// Cxxr
func Caar(d *Data) *Data {
return WalkList(d, "aa")
}
func Cadr(d *Data) *Data {
return WalkList(d, "ad")
}
func Cdar(d *Data) *Data {
return WalkList(d, "da")
}
func Cddr(d *Data) *Data {
return WalkList(d, "dd")
}
// Cxxxr
func Caaar(d *Data) *Data {
return WalkList(d, "aaa")
}
func Caadr(d *Data) *Data {
return WalkList(d, "aad")
}
func Cadar(d *Data) *Data {
return WalkList(d, "ada")
}
func Caddr(d *Data) *Data {
return WalkList(d, "add")
}
func Cdaar(d *Data) *Data {
return WalkList(d, "daa")
}
func Cdadr(d *Data) *Data {
return WalkList(d, "dad")
}
func Cddar(d *Data) *Data {
return WalkList(d, "dda")
}
func Cdddr(d *Data) *Data {
return WalkList(d, "ddd")
}
// nth
func Nth(d *Data, n int) *Data {
if d == nil || n < 1 || n > Length(d) {
return nil
}
var c *Data = d
for i := n; i > 1; c, i = Cdr(c), i-1 {
}
return Car(c)
}
func First(d *Data) *Data {
return Nth(d, 1)
}
func Second(d *Data) *Data {
return Nth(d, 2)
}
func Third(d *Data) *Data {
return Nth(d, 3)
}
func Fourth(d *Data) *Data {
return Nth(d, 4)
}
func Fifth(d *Data) *Data {
return Nth(d, 5)
}
func Sixth(d *Data) *Data {
return Nth(d, 6)
}
func Seventh(d *Data) *Data {
return Nth(d, 7)
}
func Eighth(d *Data) *Data {
return Nth(d, 8)
}
func Ninth(d *Data) *Data {
return Nth(d, 9)
}
func Tenth(d *Data) *Data {
return Nth(d, 10)
}
func SetNth(list *Data, index int, value *Data) *Data {
for i := index; i > 1; list, i = Cdr(list), i-1 {
}
if !NilP(list) {
ConsValue(list).Car = value
}
return value
} | list_access.go | 0.768993 | 0.451447 | list_access.go | starcoder |
package engine
import (
"fmt"
"github.com/notnil/chess"
)
type squareValues map[chess.Square]float32
var pieceValues map[chess.PieceType]float32 = map[chess.PieceType]float32{
chess.Pawn: 10,
chess.Knight: 30,
chess.Bishop: 30,
chess.Rook: 50,
chess.Queen: 90,
chess.King: 900,
}
var whitePawnValues = squareValues{
chess.A8: 0.0, chess.B8: 0.0, chess.C8: 0.0, chess.D8: 0.0, chess.E8: 0.0, chess.F8: 0.0, chess.G8: 0.0, chess.H8: 0.0,
chess.A7: 5.0, chess.B7: 5.0, chess.C7: 5.0, chess.D7: 5.0, chess.E7: 5.0, chess.F7: 5.0, chess.G7: 5.0, chess.H7: 5.0,
chess.A6: 1.0, chess.B6: 1.0, chess.C6: 2.0, chess.D6: 3.0, chess.E6: 3.0, chess.F6: 2.0, chess.G6: 1.0, chess.H6: 1.0,
chess.A5: 0.5, chess.B5: 0.5, chess.C5: 1.0, chess.D5: 2.5, chess.E5: 2.5, chess.F5: 1.0, chess.G5: 0.5, chess.H5: 0.5,
chess.A4: 0.0, chess.B4: 0.0, chess.C4: 0.0, chess.D4: 2.0, chess.E4: 2.0, chess.F4: 0.0, chess.G4: 0.0, chess.H4: 0.0,
chess.A3: 0.5, chess.B3: -0.5, chess.C3: -1.0, chess.D3: 0.0, chess.E3: 0.0, chess.F3: -1.0, chess.G3: -0.5, chess.H3: 0.5,
chess.A2: 0.5, chess.B2: 1.0, chess.C2: 1.0, chess.D2: -2.0, chess.E2: -2.0, chess.F2: 1.0, chess.G2: 1.0, chess.H2: 0.5,
chess.A1: 0.0, chess.B1: 0.0, chess.C1: 0.0, chess.D1: 0.0, chess.E1: 0.0, chess.F1: 0.0, chess.G1: 0.0, chess.H1: 0.0,
}
var blackPawnValues = squareValues{
chess.A8: 0.0, chess.B8: 0.0, chess.C8: 0.0, chess.D8: 0.0, chess.E8: 0.0, chess.F8: 0.0, chess.G8: 0.0, chess.H8: 0.0,
chess.A7: 0.5, chess.B7: 1.0, chess.C7: 1.0, chess.D7: -2.0, chess.E7: -2.0, chess.F7: 1.0, chess.G7: 1.0, chess.H7: 5.0,
chess.A6: 0.5, chess.B6: -0.5, chess.C6: -1.0, chess.D6: 0.0, chess.E6: 0.0, chess.F6: -1.0, chess.G6: -0.5, chess.H6: 0.5,
chess.A5: 0.0, chess.B5: 0.0, chess.C5: 0.0, chess.D5: 2.0, chess.E5: 2.0, chess.F5: 0.0, chess.G5: 0.0, chess.H5: 0.0,
chess.A4: 0.5, chess.B4: 0.5, chess.C4: 1.0, chess.D4: 2.5, chess.E4: 2.5, chess.F4: 1.0, chess.G4: 0.5, chess.H4: 0.5,
chess.A3: 1.0, chess.B3: 1.0, chess.C3: 2.0, chess.D3: 3.0, chess.E3: 3.0, chess.F3: 2.0, chess.G3: 1.0, chess.H3: 1.0,
chess.A2: 5.0, chess.B2: 5.0, chess.C2: 5.0, chess.D2: 5.0, chess.E2: 5.0, chess.F2: 5.0, chess.G2: 5.0, chess.H2: 5.0,
chess.A1: 0.0, chess.B1: 0.0, chess.C1: 0.0, chess.D1: 0.0, chess.E1: 0.0, chess.F1: 0.0, chess.G1: 0.0, chess.H1: 0.0,
}
var whiteKnightValues = squareValues{
chess.A8: -5.0, chess.B8: -4.0, chess.C8: -3.0, chess.D8: -3.0, chess.E8: -3.0, chess.F8: -3.0, chess.G8: -4.0, chess.H8: -5.0,
chess.A7: -4.0, chess.B7: -2.0, chess.C7: 0.0, chess.D7: 0.0, chess.E7: 0.0, chess.F7: 0.0, chess.G7: -2.0, chess.H7: -4.0,
chess.A6: -3.0, chess.B6: 0.0, chess.C6: 1.0, chess.D6: 1.5, chess.E6: 1.5, chess.F6: 1.0, chess.G6: 0.0, chess.H6: -3.0,
chess.A5: -3.0, chess.B5: 0.5, chess.C5: 1.5, chess.D5: 2.0, chess.E5: 2.0, chess.F5: 1.5, chess.G5: 0.5, chess.H5: -3.0,
chess.A4: -3.0, chess.B4: 0.0, chess.C4: 1.5, chess.D4: 2.0, chess.E4: 2.0, chess.F4: 1.0, chess.G4: 0.0, chess.H4: -3.0,
chess.A3: -3.0, chess.B3: 0.5, chess.C3: 1.0, chess.D3: 1.5, chess.E3: 1.5, chess.F3: 1.0, chess.G3: 0.5, chess.H3: -3.0,
chess.A2: -4.0, chess.B2: -2.0, chess.C2: 0.0, chess.D2: 0.5, chess.E2: 0.5, chess.F2: 0.0, chess.G2: -2.0, chess.H2: -4.0,
chess.A1: -5.0, chess.B1: -4.0, chess.C1: -3.0, chess.D1: -3.0, chess.E1: -3.0, chess.F1: -3.0, chess.G1: -4.0, chess.H1: -5.0,
}
var blackKnightValues = squareValues{
chess.A8: -5.0, chess.B8: -4.0, chess.C8: -3.0, chess.D8: -3.0, chess.E8: -3.0, chess.F8: -3.0, chess.G8: -4.0, chess.H8: -5.0,
chess.A7: -4.0, chess.B7: -2.0, chess.C7: 0.0, chess.D7: 0.5, chess.E7: 0.5, chess.F7: 0.0, chess.G7: -2.0, chess.H7: -4.0,
chess.A6: -3.0, chess.B6: 0.5, chess.C6: 1.0, chess.D6: 1.5, chess.E6: 1.5, chess.F6: 1.0, chess.G6: 0.5, chess.H6: -3.0,
chess.A5: -3.0, chess.B5: 0.0, chess.C5: 1.5, chess.D5: 2.0, chess.E5: 2.0, chess.F5: 1.0, chess.G5: 0.0, chess.H5: -3.0,
chess.A4: -3.0, chess.B4: 0.5, chess.C4: 1.5, chess.D4: 2.0, chess.E4: 2.0, chess.F4: 1.5, chess.G4: 0.5, chess.H4: -3.0,
chess.A3: -3.0, chess.B3: 0.0, chess.C3: 1.0, chess.D3: 1.5, chess.E3: 1.5, chess.F3: 1.0, chess.G3: 0.0, chess.H3: -3.0,
chess.A2: -4.0, chess.B2: -2.0, chess.C2: 0.0, chess.D2: 0.0, chess.E2: 0.0, chess.F2: 0.0, chess.G2: -2.0, chess.H2: -4.0,
chess.A1: -5.0, chess.B1: -4.0, chess.C1: -3.0, chess.D1: -3.0, chess.E1: -3.0, chess.F1: -3.0, chess.G1: -4.0, chess.H1: -5.0,
}
var whiteBishopValues = squareValues{
chess.A8: -2.0, chess.B8: -1.0, chess.C8: -1.0, chess.D8: -1.0, chess.E8: -1.0, chess.F8: -1.0, chess.G8: -1.0, chess.H8: -2.0,
chess.A7: -1.0, chess.B7: 0.0, chess.C7: 0.0, chess.D7: 0.0, chess.E7: 0.0, chess.F7: 0.0, chess.G7: 0.0, chess.H7: -1.0,
chess.A6: -1.0, chess.B6: 0.0, chess.C6: 0.5, chess.D6: 1.0, chess.E6: 1.0, chess.F6: 0.5, chess.G6: 0.0, chess.H6: -1.0,
chess.A5: -1.0, chess.B5: 0.5, chess.C5: 0.5, chess.D5: 1.0, chess.E5: 1.0, chess.F5: 0.5, chess.G5: 0.5, chess.H5: -1.0,
chess.A4: -1.0, chess.B4: 0.0, chess.C4: 1.0, chess.D4: 1.0, chess.E4: 1.0, chess.F4: 1.0, chess.G4: 0.0, chess.H4: -1.0,
chess.A3: -1.0, chess.B3: 1.0, chess.C3: 1.0, chess.D3: 1.0, chess.E3: 1.0, chess.F3: 1.0, chess.G3: 1.0, chess.H3: -1.0,
chess.A2: -1.0, chess.B2: 0.5, chess.C2: 0.0, chess.D2: 0.0, chess.E2: 0.0, chess.F2: 0.0, chess.G2: 0.5, chess.H2: -1.0,
chess.A1: -2.0, chess.B1: -1.0, chess.C1: -1.0, chess.D1: -1.0, chess.E1: -1.0, chess.F1: -1.0, chess.G1: -1.0, chess.H1: -2.0,
}
var blackBishopValues = squareValues{
chess.A8: -2.0, chess.B8: -1.0, chess.C8: -1.0, chess.D8: -1.0, chess.E8: -1.0, chess.F8: -1.0, chess.G8: -1.0, chess.H8: -2.0,
chess.A7: -1.0, chess.B7: 0.5, chess.C7: 0.0, chess.D7: 0.0, chess.E7: 0.0, chess.F7: 0.0, chess.G7: 0.5, chess.H7: -1.0,
chess.A6: -1.0, chess.B6: 1.0, chess.C6: 1.0, chess.D6: 1.0, chess.E6: 1.0, chess.F6: 1.0, chess.G6: 1.0, chess.H6: -1.0,
chess.A5: -1.0, chess.B5: 0.0, chess.C5: 1.0, chess.D5: 1.0, chess.E5: 1.0, chess.F5: 1.0, chess.G5: 0.0, chess.H5: -1.0,
chess.A4: -1.0, chess.B4: 0.5, chess.C4: 0.5, chess.D4: 1.0, chess.E4: 1.0, chess.F4: 0.5, chess.G4: 0.5, chess.H4: -1.0,
chess.A3: -1.0, chess.B3: 0.0, chess.C3: 0.5, chess.D3: 1.0, chess.E3: 1.0, chess.F3: 0.5, chess.G3: 0.0, chess.H3: -1.0,
chess.A2: -1.0, chess.B2: 0.0, chess.C2: 0.0, chess.D2: 0.0, chess.E2: 0.0, chess.F2: 0.0, chess.G2: 0.0, chess.H2: -1.0,
chess.A1: -2.0, chess.B1: -1.0, chess.C1: -1.0, chess.D1: -1.0, chess.E1: -1.0, chess.F1: -1.0, chess.G1: -1.0, chess.H1: -2.0,
}
var whiteRookValues = squareValues{
chess.A8: 0.0, chess.B8: 0.0, chess.C8: 0.0, chess.D8: 0.0, chess.E8: 0.0, chess.F8: 0.0, chess.G8: 0.0, chess.H8: 0.0,
chess.A7: 0.5, chess.B7: 1.0, chess.C7: 1.0, chess.D7: 1.0, chess.E7: 1.0, chess.F7: 1.0, chess.G7: 1.0, chess.H7: 0.5,
chess.A6: -0.5, chess.B6: 0.0, chess.C6: 0.0, chess.D6: 0.0, chess.E6: 0.0, chess.F6: 0.0, chess.G6: 0.0, chess.H6: -0.5,
chess.A5: -0.5, chess.B5: 0.0, chess.C5: 0.0, chess.D5: 0.0, chess.E5: 0.0, chess.F5: 0.0, chess.G5: 0.0, chess.H5: -0.5,
chess.A4: -0.5, chess.B4: 0.0, chess.C4: 0.0, chess.D4: 0.0, chess.E4: 0.0, chess.F4: 0.0, chess.G4: 0.0, chess.H4: -0.5,
chess.A3: -0.5, chess.B3: 0.0, chess.C3: 0.0, chess.D3: 0.0, chess.E3: 0.0, chess.F3: 0.0, chess.G3: 0.0, chess.H3: -0.5,
chess.A2: -0.5, chess.B2: 0.0, chess.C2: 0.0, chess.D2: 0.0, chess.E2: 0.0, chess.F2: 0.0, chess.G2: 0.0, chess.H2: -0.5,
chess.A1: 0.0, chess.B1: 0.0, chess.C1: 0.0, chess.D1: 0.5, chess.E1: 0.5, chess.F1: 0.0, chess.G1: 0.0, chess.H1: 0.0,
}
var blackRookValues = squareValues{
chess.A8: 0.0, chess.B8: 0.0, chess.C8: 0.0, chess.D8: 5.0, chess.E8: 5.0, chess.F8: 0.0, chess.G8: 0.0, chess.H8: 0.0,
chess.A7: -0.5, chess.B7: 0.0, chess.C7: 0.0, chess.D7: 0.0, chess.E7: 0.0, chess.F7: 0.0, chess.G7: 0.0, chess.H7: -0.5,
chess.A6: -0.5, chess.B6: 0.0, chess.C6: 0.0, chess.D6: 0.0, chess.E6: 0.0, chess.F6: 0.0, chess.G6: 0.0, chess.H6: -0.5,
chess.A5: -0.5, chess.B5: 0.0, chess.C5: 0.0, chess.D5: 0.0, chess.E5: 0.0, chess.F5: 0.0, chess.G5: 0.0, chess.H5: -0.5,
chess.A4: -0.5, chess.B4: 0.0, chess.C4: 0.0, chess.D4: 0.0, chess.E4: 0.0, chess.F4: 0.0, chess.G4: 0.0, chess.H4: -0.5,
chess.A3: -0.5, chess.B3: 0.0, chess.C3: 0.0, chess.D3: 0.0, chess.E3: 0.0, chess.F3: 0.0, chess.G3: 0.0, chess.H3: -0.5,
chess.A2: 0.5, chess.B2: 1.0, chess.C2: 1.0, chess.D2: 1.0, chess.E2: 1.0, chess.F2: 1.0, chess.G2: 1.0, chess.H2: 0.5,
chess.A1: 0.0, chess.B1: 0.0, chess.C1: 0.0, chess.D1: 0.0, chess.E1: 0.0, chess.F1: 0.0, chess.G1: 0.0, chess.H1: 0.0,
}
var whiteQueenValues = squareValues{
chess.A8: -2.0, chess.B8: -1.0, chess.C8: -1.0, chess.D8: -0.5, chess.E8: -0.5, chess.F8: -1.0, chess.G8: -1.0, chess.H8: -2.0,
chess.A7: -1.0, chess.B7: 0.0, chess.C7: 0.0, chess.D7: 0.0, chess.E7: 0.0, chess.F7: 0.0, chess.G7: 0.0, chess.H7: -1.0,
chess.A6: -1.0, chess.B6: 0.0, chess.C6: 0.5, chess.D6: 0.5, chess.E6: 0.5, chess.F6: 0.5, chess.G6: 0.0, chess.H6: -1.0,
chess.A5: -0.5, chess.B5: 0.0, chess.C5: 0.5, chess.D5: 0.5, chess.E5: 0.5, chess.F5: 0.5, chess.G5: 0.0, chess.H5: -0.5,
chess.A4: 0.0, chess.B4: 0.0, chess.C4: 0.5, chess.D4: 0.5, chess.E4: 0.5, chess.F4: 0.5, chess.G4: 0.0, chess.H4: -0.5,
chess.A3: -1.0, chess.B3: 0.0, chess.C3: 0.5, chess.D3: 0.5, chess.E3: 0.5, chess.F3: 0.5, chess.G3: 0.0, chess.H3: -1.0,
chess.A2: -1.0, chess.B2: 0.0, chess.C2: 0.5, chess.D2: 0.0, chess.E2: 0.0, chess.F2: 0.0, chess.G2: 0.0, chess.H2: -1.0,
chess.A1: -2.0, chess.B1: -1.0, chess.C1: -1.0, chess.D1: -0.5, chess.E1: -0.5, chess.F1: -1.0, chess.G1: -1.0, chess.H1: -2.0,
}
var blackQueenValues = squareValues{
chess.A8: -2.0, chess.B8: -1.0, chess.C8: -1.0, chess.D8: -0.5, chess.E8: -0.5, chess.F8: -1.0, chess.G8: -1.0, chess.H8: -2.0,
chess.A7: -1.0, chess.B7: 0.0, chess.C7: 0.0, chess.D7: 0.0, chess.E7: 0.0, chess.F7: 0.5, chess.G7: 0.0, chess.H7: -1.0,
chess.A6: -1.0, chess.B6: 0.0, chess.C6: 0.5, chess.D6: 0.5, chess.E6: 0.5, chess.F6: 0.5, chess.G6: 0.0, chess.H6: -1.0,
chess.A5: -0.5, chess.B5: 0.0, chess.C5: 0.5, chess.D5: 0.5, chess.E5: 0.5, chess.F5: 0.5, chess.G5: 0.0, chess.H5: 0.0,
chess.A4: -0.5, chess.B4: 0.0, chess.C4: 0.5, chess.D4: 0.5, chess.E4: 0.5, chess.F4: 0.5, chess.G4: 0.0, chess.H4: -0.5,
chess.A3: -1.0, chess.B3: 0.0, chess.C3: 0.5, chess.D3: 0.5, chess.E3: 0.5, chess.F3: 0.5, chess.G3: 0.0, chess.H3: -1.0,
chess.A2: -1.0, chess.B2: 0.0, chess.C2: 0.0, chess.D2: 0.0, chess.E2: 0.0, chess.F2: 0.0, chess.G2: 0.0, chess.H2: -1.0,
chess.A1: -2.0, chess.B1: -1.0, chess.C1: -1.0, chess.D1: -0.5, chess.E1: -0.5, chess.F1: -1.0, chess.G1: -1.0, chess.H1: -2.0,
}
var whiteKingValues = squareValues{
chess.A8: -3.0, chess.B8: -4.0, chess.C8: -4.0, chess.D8: -5.0, chess.E8: -5.0, chess.F8: -4.0, chess.G8: -4.0, chess.H8: -3.0,
chess.A7: -3.0, chess.B7: -4.0, chess.C7: -4.0, chess.D7: -5.0, chess.E7: -5.0, chess.F7: -4.0, chess.G7: -4.0, chess.H7: -3.0,
chess.A6: -3.0, chess.B6: -4.0, chess.C6: -4.0, chess.D6: -5.0, chess.E6: -5.0, chess.F6: -4.0, chess.G6: -4.0, chess.H6: -3.0,
chess.A5: -3.0, chess.B5: -4.0, chess.C5: -4.0, chess.D5: -5.0, chess.E5: -5.0, chess.F5: -4.0, chess.G5: -4.0, chess.H5: -3.0,
chess.A4: -2.0, chess.B4: -3.0, chess.C4: -3.0, chess.D4: -4.0, chess.E4: -4.0, chess.F4: -3.0, chess.G4: -3.0, chess.H4: -2.0,
chess.A3: -1.0, chess.B3: -2.0, chess.C3: -2.0, chess.D3: -2.0, chess.E3: -2.0, chess.F3: -2.0, chess.G3: -2.0, chess.H3: -1.0,
chess.A2: 2.0, chess.B2: 2.0, chess.C2: 0.0, chess.D2: 0.0, chess.E2: 0.0, chess.F2: 0.0, chess.G2: 2.0, chess.H2: 2.0,
chess.A1: 2.0, chess.B1: 3.0, chess.C1: 1.0, chess.D1: 0.0, chess.E1: 0.0, chess.F1: 1.0, chess.G1: 3.0, chess.H1: 2.0,
}
var blackKingValues = squareValues{
chess.A8: 2.0, chess.B8: 3.0, chess.C8: 1.0, chess.D8: 0.0, chess.E8: 0.0, chess.F8: 1.0, chess.G8: 3.0, chess.H8: 2.0,
chess.A7: 2.0, chess.B7: 2.0, chess.C7: 0.0, chess.D7: 0.0, chess.E7: 0.0, chess.F7: 0.0, chess.G7: 2.0, chess.H7: 2.0,
chess.A6: -1.0, chess.B6: -2.0, chess.C6: -2.0, chess.D6: -2.0, chess.E6: -2.0, chess.F6: -2.0, chess.G6: -2.0, chess.H6: -1.0,
chess.A5: -2.0, chess.B5: -3.0, chess.C5: -3.0, chess.D5: -4.0, chess.E5: -4.0, chess.F5: -3.0, chess.G5: -3.0, chess.H5: -2.0,
chess.A4: -3.0, chess.B4: -4.0, chess.C4: -4.0, chess.D4: -5.0, chess.E4: -5.0, chess.F4: -4.0, chess.G4: -4.0, chess.H4: -3.0,
chess.A3: -3.0, chess.B3: -4.0, chess.C3: -4.0, chess.D3: -5.0, chess.E3: -5.0, chess.F3: -4.0, chess.G3: -4.0, chess.H3: -3.0,
chess.A2: -3.0, chess.B2: -4.0, chess.C2: -4.0, chess.D2: -5.0, chess.E2: -5.0, chess.F2: -4.0, chess.G2: -4.0, chess.H2: -3.0,
chess.A1: -3.0, chess.B1: -4.0, chess.C1: -4.0, chess.D1: -5.0, chess.E1: -5.0, chess.F1: -4.0, chess.G1: -4.0, chess.H1: -3.0,
}
// ScoredMove represents a move and the score that has been assigned to it.
type ScoredMove struct {
Move *chess.Move
Score float32
}
func (m ScoredMove) String() string {
return fmt.Sprintf("ScoredMove(move=%s, score=%.2f)", m.Move, m.Score)
}
// Score returns a score evaluating a chess possition
func Score(pos *chess.Position) float32 {
board := pos.Board().SquareMap()
var score float32
for square, piece := range board {
score += scorePiece(piece, square)
}
return score
}
func scorePiece(piece chess.Piece, square chess.Square) float32 {
var coeff float32 = 1
color := piece.Color()
if piece.Color() == chess.Black {
coeff = -1
}
var value float32
t := piece.Type()
switch t {
case chess.Pawn:
value = calculatePawnValue(square, color)
case chess.Knight:
value = calculateKnightValue(square, color)
case chess.Bishop:
value = calculateBishopValue(square, color)
case chess.Rook:
value = calculateRookValue(square, color)
case chess.Queen:
value = calculateQueenValue(square, color)
case chess.King:
value = calculateKingValue(square, color)
default:
value = calculateDefaultValue(t)
}
return coeff * value
}
func calculatePawnValue(square chess.Square, color chess.Color) float32 {
pieceValue, _ := pieceValues[chess.Pawn]
if color == chess.Black {
squareValue, _ := blackPawnValues[square]
return pieceValue + squareValue
}
squareValue, _ := whitePawnValues[square]
return pieceValue + squareValue
}
func calculateKnightValue(square chess.Square, color chess.Color) float32 {
pieceValue, _ := pieceValues[chess.Knight]
if color == chess.Black {
squareValue, _ := blackKnightValues[square]
return pieceValue + squareValue
}
squareValue, _ := whiteKnightValues[square]
return pieceValue + squareValue
}
func calculateBishopValue(square chess.Square, color chess.Color) float32 {
pieceValue, _ := pieceValues[chess.Knight]
if color == chess.Black {
squareValue, _ := blackBishopValues[square]
return pieceValue + squareValue
}
squareValue, _ := whiteBishopValues[square]
return pieceValue + squareValue
}
func calculateRookValue(square chess.Square, color chess.Color) float32 {
pieceValue, _ := pieceValues[chess.Knight]
if color == chess.Black {
squareValue, _ := blackRookValues[square]
return pieceValue + squareValue
}
squareValue, _ := whiteRookValues[square]
return pieceValue + squareValue
}
func calculateQueenValue(square chess.Square, color chess.Color) float32 {
pieceValue, _ := pieceValues[chess.Knight]
if color == chess.Black {
squareValue, _ := blackQueenValues[square]
return pieceValue + squareValue
}
squareValue, _ := whiteQueenValues[square]
return pieceValue + squareValue
}
func calculateKingValue(square chess.Square, color chess.Color) float32 {
pieceValue, _ := pieceValues[chess.Knight]
if color == chess.Black {
squareValue, _ := blackKingValues[square]
return pieceValue + squareValue
}
squareValue, _ := whiteKingValues[square]
return pieceValue + squareValue
}
func calculateDefaultValue(t chess.PieceType) float32 {
value, _ := pieceValues[t]
return value
} | internal/engine/scoring.go | 0.587233 | 0.565119 | scoring.go | starcoder |
package sparse
import (
"math/rand"
"gonum.org/v1/gonum/mat"
)
// Sparser is the interface for Sparse matrices. Sparser contains the mat.Matrix interface so automatically
// exposes all mat.Matrix methods.
type Sparser interface {
mat.Matrix
// NNZ returns the Number of Non Zero elements in the sparse matrix.
NNZ() int
}
// TypeConverter interface for converting to other matrix formats
type TypeConverter interface {
// ToDense returns a mat.Dense dense format version of the matrix.
ToDense() *mat.Dense
// ToDOK returns a Dictionary Of Keys (DOK) sparse format version of the matrix.
ToDOK() *DOK
// ToCOO returns a COOrdinate sparse format version of the matrix.
ToCOO() *COO
// ToCSR returns a Compressed Sparse Row (CSR) sparse format version of the matrix.
ToCSR() *CSR
// ToCSC returns a Compressed Sparse Row (CSR) sparse format version of the matrix.
ToCSC() *CSC
// ToType returns an alternative format version fo the matrix in the format specified.
ToType(matType MatrixType) mat.Matrix
}
// MatrixType represents a type of Matrix format. This is used to specify target format types for conversion, etc.
type MatrixType interface {
// Convert converts to the type of matrix format represented by the receiver from the specified TypeConverter.
Convert(from TypeConverter) mat.Matrix
}
// DenseType represents the mat.Dense matrix type format
type DenseType int
// Convert converts the specified TypeConverter to mat.Dense format
func (d DenseType) Convert(from TypeConverter) mat.Matrix {
return from.ToDense()
}
// DOKType represents the DOK (Dictionary Of Keys) matrix type format
type DOKType int
// Convert converts the specified TypeConverter to DOK (Dictionary of Keys) format
func (s DOKType) Convert(from TypeConverter) mat.Matrix {
return from.ToDOK()
}
// COOType represents the COOrdinate matrix type format
type COOType int
// Convert converts the specified TypeConverter to COOrdinate format
func (s COOType) Convert(from TypeConverter) mat.Matrix {
return from.ToCOO()
}
// CSRType represents the CSR (Compressed Sparse Row) matrix type format
type CSRType int
// Convert converts the specified TypeConverter to CSR (Compressed Sparse Row) format
func (s CSRType) Convert(from TypeConverter) mat.Matrix {
return from.ToCSR()
}
// CSCType represents the CSC (Compressed Sparse Column) matrix type format
type CSCType int
// Convert converts the specified TypeConverter to CSC (Compressed Sparse Column) format
func (s CSCType) Convert(from TypeConverter) mat.Matrix {
return from.ToCSC()
}
const (
// DenseFormat is an enum value representing Dense matrix format
DenseFormat DenseType = iota
// DOKFormat is an enum value representing DOK matrix format
DOKFormat DOKType = iota
// COOFormat is an enum value representing COO matrix format
COOFormat COOType = iota
// CSRFormat is an enum value representing CSR matrix format
CSRFormat CSRType = iota
// CSCFormat is an enum value representing CSC matrix format
CSCFormat CSCType = iota
)
// Random constructs a new matrix of the specified type e.g. Dense, COO, CSR, etc.
// It is constructed with random values randomly placed through the matrix according to the
// matrix size, specified by dimensions r * c (rows * columns), and the specified density
// of non zero values. Density is a value between 0 and 1 (0 >= density >= 1) where a density
// of 1 will construct a matrix entirely composed of non zero values and a density of 0 will
// have only zero values.
func Random(t MatrixType, r int, c int, density float32) mat.Matrix {
d := int(density * float32(r) * float32(c))
m := make([]int, d)
n := make([]int, d)
data := make([]float64, d)
for i := 0; i < d; i++ {
data[i] = rand.Float64()
m[i] = rand.Intn(r)
n[i] = rand.Intn(c)
}
return NewCOO(r, c, m, n, data).ToType(t)
}
// RawRowView returns a slice representing row i of the matrix. This is a copy
// of the data within the matrix and does not share underlying storage.
func rawRowView(m mat.Matrix, i int) []float64 {
r, c := m.Dims()
if i >= r || i < 0 {
panic(mat.ErrRowAccess)
}
slice := make([]float64, c)
for j := range slice {
slice[j] = m.At(i, j)
}
return slice
}
// RawColView returns a slice representing col i of the matrix. This is a copy
// of the data within the matrix and does not share underlying storage.
func rawColView(m mat.Matrix, j int) []float64 {
r, c := m.Dims()
if j >= c || j < 0 {
panic(mat.ErrColAccess)
}
slice := make([]float64, r)
for i := range slice {
slice[i] = m.At(i, j)
}
return slice
} | vendor/github.com/james-bowman/sparse/matrix.go | 0.842086 | 0.726474 | matrix.go | starcoder |
package xsort
import (
"sort"
"github.com/leaxoy/x-go/types"
)
// isNaN64 is a copy of math.IsNaN to avoid a dependency on the math package.
func isNaN64(f float64) bool { return f != f }
// isNaN32 is a copy of math.IsNaN to avoid a dependency on the math package.
func isNaN32(f float32) bool { return f != f }
type ByteSlice types.ByteSlice
func (p ByteSlice) Len() int { return len(p) }
func (p ByteSlice) Less(i, j int) bool { return p[i] < p[j] }
func (p ByteSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p ByteSlice) Sort() { sort.Sort(p) }
type UintSlice types.UintSlice
func (p UintSlice) Len() int { return len(p) }
func (p UintSlice) Less(i, j int) bool { return p[i] < p[j] }
func (p UintSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p UintSlice) Sort() { sort.Sort(p) }
type Uint64Slice types.Uint64Slice
func (p Uint64Slice) Len() int { return len(p) }
func (p Uint64Slice) Less(i, j int) bool { return p[i] < p[j] }
func (p Uint64Slice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p Uint64Slice) Sort() { sort.Sort(p) }
type Uint32Slice types.Uint32Slice
func (p Uint32Slice) Len() int { return len(p) }
func (p Uint32Slice) Less(i, j int) bool { return p[i] < p[j] }
func (p Uint32Slice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p Uint32Slice) Sort() { sort.Sort(p) }
type Uint16Slice types.Uint16Slice
func (p Uint16Slice) Len() int { return len(p) }
func (p Uint16Slice) Less(i, j int) bool { return p[i] < p[j] }
func (p Uint16Slice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p Uint16Slice) Sort() { sort.Sort(p) }
type Uint8Slice types.Uint8Slice
func (p Uint8Slice) Len() int { return len(p) }
func (p Uint8Slice) Less(i, j int) bool { return p[i] < p[j] }
func (p Uint8Slice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p Uint8Slice) Sort() { sort.Sort(p) }
type IntSlice = sort.IntSlice
type Int64Slice types.Int64Slice
func (p Int64Slice) Len() int { return len(p) }
func (p Int64Slice) Less(i, j int) bool { return p[i] < p[j] }
func (p Int64Slice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p Int64Slice) Sort() { sort.Sort(p) }
type Int32Slice types.Int32Slice
func (p Int32Slice) Len() int { return len(p) }
func (p Int32Slice) Less(i, j int) bool { return p[i] < p[j] }
func (p Int32Slice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p Int32Slice) Sort() { sort.Sort(p) }
type Int16Slice types.Int16Slice
func (p Int16Slice) Len() int { return len(p) }
func (p Int16Slice) Less(i, j int) bool { return p[i] < p[j] }
func (p Int16Slice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p Int16Slice) Sort() { sort.Sort(p) }
type Int8Slice types.Int8Slice
func (p Int8Slice) Len() int { return len(p) }
func (p Int8Slice) Less(i, j int) bool { return p[i] < p[j] }
func (p Int8Slice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p Int8Slice) Sort() { sort.Sort(p) }
type StringSlice types.StringSlice
func (p StringSlice) Len() int { return len(p) }
func (p StringSlice) Less(i, j int) bool { return p[i] < p[j] }
func (p StringSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p StringSlice) Sort() { sort.Sort(p) }
type Float64Slice types.Float64Slice
func (p Float64Slice) Len() int { return len(p) }
func (p Float64Slice) Less(i, j int) bool { return p[i] < p[j] || isNaN64(p[i]) && !isNaN64(p[j]) }
func (p Float64Slice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p Float64Slice) Sort() { sort.Sort(p) }
type Float32Slice types.Float32Slice
func (p Float32Slice) Len() int { return len(p) }
func (p Float32Slice) Less(i, j int) bool { return p[i] < p[j] || isNaN32(p[i]) && !isNaN32(p[j]) }
func (p Float32Slice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p Float32Slice) Sort() { sort.Sort(p) }
type ComparableSlice types.ComparableSlice
func (p ComparableSlice) Len() int { return len(p) }
func (p ComparableSlice) Less(i, j int) bool { return p[i].Compare(p[j]) == types.OrderingLess }
func (p ComparableSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (p ComparableSlice) Sort() { sort.Sort(p) } | xsort/sort.go | 0.63307 | 0.579638 | sort.go | starcoder |
package mtreefilter
import (
"path/filepath"
"github.com/apex/log"
"github.com/vbatts/go-mtree"
)
// FilterFunc is a function used when filtering deltas with FilterDeltas.
type FilterFunc func(path string) bool
// makeRoot does a very simple job of converting a path to a lexical
// relative-to-root. In mtree we don't deal with any symlink components.
func makeRoot(path string) string {
return filepath.Join(string(filepath.Separator), path)
}
func maskFilter(maskedPaths map[string]struct{}, includeSelf bool) FilterFunc {
return func(path string) bool {
// Convert the path to be cleaned and relative-to-root.
path = makeRoot(path)
// Check that no ancestor of the path is a masked path.
for parent := path; parent != filepath.Dir(parent); parent = filepath.Dir(parent) {
if _, ok := maskedPaths[parent]; !ok {
continue
}
if parent == path && !includeSelf {
continue
}
log.Debugf("maskfilter: ignoring path %q matched by mask %q", path, parent)
return false
}
return true
}
}
// MaskFilter is a factory for FilterFuncs that will mask all InodeDelta paths
// that are lexical children of any path in the mask slice. All paths are
// considered to be relative to '/'.
func MaskFilter(masks []string) FilterFunc {
maskedPaths := map[string]struct{}{}
for _, mask := range masks {
maskedPaths[makeRoot(mask)] = struct{}{}
}
return maskFilter(maskedPaths, true)
}
// SimplifyFilter is a factory that takes a list of InodeDelta and creates a
// filter to filter out all deletion entries that have a parent which also has
// a deletion entry. This is necessary to both reduce our image sizes and
// remain compatible with Docker's now-incompatible image format (the OCI spec
// doesn't require this behaviour but it's now needed because of course Docker
// won't fix their own bugs).
func SimplifyFilter(deltas []mtree.InodeDelta) FilterFunc {
deletedPaths := make(map[string]struct{})
for _, delta := range deltas {
if delta.Type() != mtree.Missing {
continue
}
deletedPaths[makeRoot(delta.Path())] = struct{}{}
}
return maskFilter(deletedPaths, false)
}
// FilterDeltas is a helper function to easily filter []mtree.InodeDelta with a
// filter function. Only entries which have `filter(delta.Path()) == true` will
// be included in the returned slice.
func FilterDeltas(deltas []mtree.InodeDelta, filters ...FilterFunc) []mtree.InodeDelta {
var filtered []mtree.InodeDelta
for _, delta := range deltas {
var blocked bool
for _, filter := range filters {
if !filter(delta.Path()) {
blocked = true
break
}
}
if !blocked {
filtered = append(filtered, delta)
}
}
return filtered
} | pkg/mtreefilter/mask.go | 0.702428 | 0.430147 | mask.go | starcoder |
package util
import (
"time"
)
const (
// AllDaysMask is a bitmask of all the days of the week.
AllDaysMask = 1<<uint(time.Sunday) | 1<<uint(time.Monday) | 1<<uint(time.Tuesday) | 1<<uint(time.Wednesday) | 1<<uint(time.Thursday) | 1<<uint(time.Friday) | 1<<uint(time.Saturday)
// WeekDaysMask is a bitmask of all the weekdays of the week.
WeekDaysMask = 1<<uint(time.Monday) | 1<<uint(time.Tuesday) | 1<<uint(time.Wednesday) | 1<<uint(time.Thursday) | 1<<uint(time.Friday)
//WeekendDaysMask is a bitmask of the weekend days of the week.
WeekendDaysMask = 1<<uint(time.Sunday) | 1<<uint(time.Saturday)
)
var (
// DaysOfWeek are all the time.Weekday in an array for utility purposes.
DaysOfWeek = []time.Weekday{
time.Sunday,
time.Monday,
time.Tuesday,
time.Wednesday,
time.Thursday,
time.Friday,
time.Saturday,
}
// WeekDays are the business time.Weekday in an array.
WeekDays = []time.Weekday{
time.Monday,
time.Tuesday,
time.Wednesday,
time.Thursday,
time.Friday,
}
// WeekendDays are the weekend time.Weekday in an array.
WeekendDays = []time.Weekday{
time.Sunday,
time.Saturday,
}
//Epoch is unix epoc saved for utility purposes.
Epoch = time.Unix(0, 0)
)
// Date contains utility functions that operate on dates.
var Date date
type date struct{}
func (d date) MustEastern() *time.Location {
if eastern, err := d.Eastern(); err != nil {
panic(err)
} else {
return eastern
}
}
// Eastern returns the eastern timezone.
func (d date) Eastern() (*time.Location, error) {
// Try POSIX
est, err := time.LoadLocation("America/New_York")
if err != nil {
// Try Windows
est, err = time.LoadLocation("EST")
if err != nil {
return nil, err
}
}
return est, nil
}
func (d date) MustPacific() *time.Location {
if pst, err := d.Pacific(); err != nil {
panic(err)
} else {
return pst
}
}
// Pacific returns the pacific timezone.
func (d date) Pacific() (*time.Location, error) {
// Try POSIX
pst, err := time.LoadLocation("America/Los_Angeles")
if err != nil {
// Try Windows
pst, err = time.LoadLocation("PST")
if err != nil {
return nil, err
}
}
return pst, nil
}
// TimeUTC returns a new time.Time for the given clock components in UTC.
// It is meant to be used with the `OnDate` function.
func (d date) TimeUTC(hour, min, sec, nsec int) time.Time {
return time.Date(0, 0, 0, hour, min, sec, nsec, time.UTC)
}
// Time returns a new time.Time for the given clock components.
// It is meant to be used with the `OnDate` function.
func (d date) Time(hour, min, sec, nsec int, loc *time.Location) time.Time {
return time.Date(0, 0, 0, hour, min, sec, nsec, loc)
}
// DateUTC returns a new time.Time for the given date comonents at (noon) in UTC.
func (d date) DateUTC(year, month, day int) time.Time {
return time.Date(year, time.Month(month), day, 12, 0, 0, 0, time.UTC)
}
// DateUTC returns a new time.Time for the given date comonents at (noon) in a given location.
func (d date) Date(year, month, day int, loc *time.Location) time.Time {
return time.Date(year, time.Month(month), day, 12, 0, 0, 0, loc)
}
// OnDate returns the clock components of clock (hour,minute,second) on the date components of d.
func (d date) OnDate(clock, date time.Time) time.Time {
tzAdjusted := date.In(clock.Location())
return time.Date(tzAdjusted.Year(), tzAdjusted.Month(), tzAdjusted.Day(), clock.Hour(), clock.Minute(), clock.Second(), clock.Nanosecond(), clock.Location())
}
// NoonOnDate is a shortcut for On(Time(12,0,0), cd) a.k.a. noon on a given date.
func (d date) NoonOnDate(cd time.Time) time.Time {
return time.Date(cd.Year(), cd.Month(), cd.Day(), 12, 0, 0, 0, cd.Location())
}
// IsWeekDay returns if the day is a monday->friday.
func (d date) IsWeekDay(day time.Weekday) bool {
return !d.IsWeekendDay(day)
}
// IsWeekendDay returns if the day is a monday->friday.
func (d date) IsWeekendDay(day time.Weekday) bool {
return day == time.Saturday || day == time.Sunday
}
// Before returns if a timestamp is strictly before another date (ignoring hours, minutes etc.)
func (d date) Before(before, reference time.Time) bool {
tzAdjustedBefore := before.In(reference.Location())
if tzAdjustedBefore.Year() < reference.Year() {
return true
}
if tzAdjustedBefore.Month() < reference.Month() {
return true
}
return tzAdjustedBefore.Year() == reference.Year() && tzAdjustedBefore.Month() == reference.Month() && tzAdjustedBefore.Day() < reference.Day()
}
const (
_secondsPerHour = 60 * 60
_secondsPerDay = 60 * 60 * 24
)
// NextDay returns the timestamp advanced a day.
func (d date) NextDay(ts time.Time) time.Time {
return ts.AddDate(0, 0, 1)
}
// NextHour returns the next timestamp on the hour.
func (d date) NextHour(ts time.Time) time.Time {
//advance a full hour ...
advanced := ts.Add(time.Hour)
minutes := time.Duration(advanced.Minute()) * time.Minute
final := advanced.Add(-minutes)
return time.Date(final.Year(), final.Month(), final.Day(), final.Hour(), 0, 0, 0, final.Location())
}
// NextDayOfWeek returns the next instance of a given weekday after a given timestamp.
func (d date) NextDayOfWeek(after time.Time, dayOfWeek time.Weekday) time.Time {
afterWeekday := after.Weekday()
if afterWeekday == dayOfWeek {
return after.AddDate(0, 0, 7)
}
// 1 vs 5 ~ add 4 days
if afterWeekday < dayOfWeek {
dayDelta := int(dayOfWeek - afterWeekday)
return after.AddDate(0, 0, dayDelta)
}
// 5 vs 1, add 7-(5-1) ~ 3 days
dayDelta := 7 - int(afterWeekday-dayOfWeek)
return after.AddDate(0, 0, dayDelta)
} | util/date.go | 0.636692 | 0.466663 | date.go | starcoder |
package palette
import "github.com/Lexus123/gamut"
// source: https://gist.github.com/lunohodov/1995178
func init() {
RAL.AddColors(
gamut.Colors{
{"Green beige", gamut.Hex("#BEBD7F"), "1000"},
{"Beige", gamut.Hex("#C2B078"), "1001"},
{"Sand yellow", gamut.Hex("#C6A664"), "1002"},
{"Signal yellow", gamut.Hex("#E5BE01"), "1003"},
{"Golden yellow", gamut.Hex("#CDA434"), "1004"},
{"Honey yellow", gamut.Hex("#A98307"), "1005"},
{"Maize yellow", gamut.Hex("#E4A010"), "1006"},
{"Daffodil yellow", gamut.Hex("#DC9D00"), "1007"},
{"Brown beige", gamut.Hex("#8A6642"), "1011"},
{"Lemon yellow", gamut.Hex("#C7B446"), "1012"},
{"Oyster white", gamut.Hex("#EAE6CA"), "1013"},
{"Ivory", gamut.Hex("#E1CC4F"), "1014"},
{"Light ivory", gamut.Hex("#E6D690"), "1015"},
{"Sulfur yellow", gamut.Hex("#EDFF21"), "1016"},
{"Saffron yellow", gamut.Hex("#F5D033"), "1017"},
{"Zinc yellow", gamut.Hex("#F8F32B"), "1018"},
{"Grey beige", gamut.Hex("#9E9764"), "1019"},
{"Olive yellow", gamut.Hex("#999950"), "1020"},
{"Rape yellow", gamut.Hex("#F3DA0B"), "1021"},
{"Traffic yellow", gamut.Hex("#FAD201"), "1023"},
{"Ochre yellow", gamut.Hex("#AEA04B"), "1024"},
{"Luminous yellow", gamut.Hex("#FFFF00"), "1026"},
{"Curry", gamut.Hex("#9D9101"), "1027"},
{"Melon yellow", gamut.Hex("#F4A900"), "1028"},
{"Broom yellow", gamut.Hex("#D6AE01"), "1032"},
{"Dahlia yellow", gamut.Hex("#F3A505"), "1033"},
{"Pastel yellow", gamut.Hex("#EFA94A"), "1034"},
{"Pearl beige", gamut.Hex("#6A5D4D"), "1035"},
{"Pearl gold", gamut.Hex("#705335"), "1036"},
{"Sun yellow", gamut.Hex("#F39F18"), "1037"},
{"Yellow orange", gamut.Hex("#ED760E"), "2000"},
{"Red orange", gamut.Hex("#C93C20"), "2001"},
{"Vermilion", gamut.Hex("#CB2821"), "2002"},
{"Pastel orange", gamut.Hex("#FF7514"), "2003"},
{"Pure orange", gamut.Hex("#F44611"), "2004"},
{"Luminous orange", gamut.Hex("#FF2301"), "2005"},
{"Luminous bright orange", gamut.Hex("#FFA420"), "2007"},
{"Bright red orange", gamut.Hex("#F75E25"), "2008"},
{"Traffic orange", gamut.Hex("#F54021"), "2009"},
{"Signal orange", gamut.Hex("#D84B20"), "2010"},
{"Deep orange", gamut.Hex("#EC7C26"), "2011"},
{"Salmon range", gamut.Hex("#E55137"), "2012"},
{"Pearl orange", gamut.Hex("#C35831"), "2013"},
{"Flame red", gamut.Hex("#AF2B1E"), "3000"},
{"Signal red", gamut.Hex("#A52019"), "3001"},
{"Carmine red", gamut.Hex("#A2231D"), "3002"},
{"Ruby red", gamut.Hex("#9B111E"), "3003"},
{"Purple red", gamut.Hex("#75151E"), "3004"},
{"Wine red", gamut.Hex("#5E2129"), "3005"},
{"Black red", gamut.Hex("#412227"), "3007"},
{"Oxide red", gamut.Hex("#642424"), "3009"},
{"Brown red", gamut.Hex("#781F19"), "3011"},
{"Beige red", gamut.Hex("#C1876B"), "3012"},
{"Tomato red", gamut.Hex("#A12312"), "3013"},
{"Antique pink", gamut.Hex("#D36E70"), "3014"},
{"Light pink", gamut.Hex("#EA899A"), "3015"},
{"Coral red", gamut.Hex("#B32821"), "3016"},
{"Rose", gamut.Hex("#E63244"), "3017"},
{"Strawberry red", gamut.Hex("#D53032"), "3018"},
{"Traffic red", gamut.Hex("#CC0605"), "3020"},
{"Salmon pink", gamut.Hex("#D95030"), "3022"},
{"Luminous red", gamut.Hex("#F80000"), "3024"},
{"Luminous bright red", gamut.Hex("#FE0000"), "3026"},
{"Raspberry red", gamut.Hex("#C51D34"), "3027"},
{"Pure red", gamut.Hex("#CB3234"), "3028"},
{"Orient red", gamut.Hex("#B32428"), "3031"},
{"Pearl ruby red", gamut.Hex("#721422"), "3032"},
{"Pearl pink", gamut.Hex("#B44C43"), "3033"},
{"Red lilac", gamut.Hex("#6D3F5B"), "4001"},
{"Red violet", gamut.Hex("#922B3E"), "4002"},
{"Heather violet", gamut.Hex("#DE4C8A"), "4003"},
{"Claret violet", gamut.Hex("#641C34"), "4004"},
{"Blue lilac", gamut.Hex("#6C4675"), "4005"},
{"Traffic purple", gamut.Hex("#A03472"), "4006"},
{"Purple violet", gamut.Hex("#4A192C"), "4007"},
{"Signal violet", gamut.Hex("#924E7D"), "4008"},
{"Pastel violet", gamut.Hex("#A18594"), "4009"},
{"Telemagenta", gamut.Hex("#CF3476"), "4010"},
{"Pearl violet", gamut.Hex("#8673A1"), "4011"},
{"Pearl black berry", gamut.Hex("#6C6874"), "4012"},
{"Violet blue", gamut.Hex("#354D73"), "5000"},
{"Green blue", gamut.Hex("#1F3438"), "5001"},
{"Ultramarine blue", gamut.Hex("#20214F"), "5002"},
{"Saphire blue", gamut.Hex("#1D1E33"), "5003"},
{"Black blue", gamut.Hex("#18171C"), "5004"},
{"Signal blue", gamut.Hex("#1E2460"), "5005"},
{"Brillant blue", gamut.Hex("#3E5F8A"), "5007"},
{"Grey blue", gamut.Hex("#26252D"), "5008"},
{"Azure blue", gamut.Hex("#025669"), "5009"},
{"Gentian blue", gamut.Hex("#0E294B"), "5010"},
{"Steel blue", gamut.Hex("#231A24"), "5011"},
{"Light blue", gamut.Hex("#3B83BD"), "5012"},
{"Cobalt blue", gamut.Hex("#1E213D"), "5013"},
{"Pigeon blue", gamut.Hex("#606E8C"), "5014"},
{"Sky blue", gamut.Hex("#2271B3"), "5015"},
{"Traffic blue", gamut.Hex("#063971"), "5017"},
{"Turquoise blue", gamut.Hex("#3F888F"), "5018"},
{"Capri blue", gamut.Hex("#1B5583"), "5019"},
{"Ocean blue", gamut.Hex("#1D334A"), "5020"},
{"Water blue", gamut.Hex("#256D7B"), "5021"},
{"Night blue", gamut.Hex("#252850"), "5022"},
{"Distant blue", gamut.Hex("#49678D"), "5023"},
{"Pastel blue", gamut.Hex("#5D9B9B"), "5024"},
{"Pearl gentian blue", gamut.Hex("#2A6478"), "5025"},
{"Pearl night blue", gamut.Hex("#102C54"), "5026"},
{"Patina green", gamut.Hex("#316650"), "6000"},
{"Emerald green", gamut.Hex("#287233"), "6001"},
{"Leaf green", gamut.Hex("#2D572C"), "6002"},
{"Olive green", gamut.Hex("#424632"), "6003"},
{"Blue green", gamut.Hex("#1F3A3D"), "6004"},
{"Moss green", gamut.Hex("#2F4538"), "6005"},
{"Grey olive", gamut.Hex("#3E3B32"), "6006"},
{"Bottle green", gamut.Hex("#343B29"), "6007"},
{"Brown green", gamut.Hex("#39352A"), "6008"},
{"Fir green", gamut.Hex("#31372B"), "6009"},
{"Grass green", gamut.Hex("#35682D"), "6010"},
{"Reseda green", gamut.Hex("#587246"), "6011"},
{"Black green", gamut.Hex("#343E40"), "6012"},
{"Reed green", gamut.Hex("#6C7156"), "6013"},
{"Yellow olive", gamut.Hex("#47402E"), "6014"},
{"Black olive", gamut.Hex("#3B3C36"), "6015"},
{"Turquoise green", gamut.Hex("#1E5945"), "6016"},
{"May green", gamut.Hex("#4C9141"), "6017"},
{"Yellow green", gamut.Hex("#57A639"), "6018"},
{"Pastel green", gamut.Hex("#BDECB6"), "6019"},
{"Chrome green", gamut.Hex("#2E3A23"), "6020"},
{"Pale green", gamut.Hex("#89AC76"), "6021"},
{"Olive drab", gamut.Hex("#25221B"), "6022"},
{"Traffic green", gamut.Hex("#308446"), "6024"},
{"Fern green", gamut.Hex("#3D642D"), "6025"},
{"Opal green", gamut.Hex("#015D52"), "6026"},
{"Light green", gamut.Hex("#84C3BE"), "6027"},
{"Pine green", gamut.Hex("#2C5545"), "6028"},
{"Mint green", gamut.Hex("#20603D"), "6029"},
{"Signal green", gamut.Hex("#317F43"), "6032"},
{"Mint turquoise", gamut.Hex("#497E76"), "6033"},
{"Pastel turquoise", gamut.Hex("#7FB5B5"), "6034"},
{"Pearl green", gamut.Hex("#1C542D"), "6035"},
{"Pearl opal green", gamut.Hex("#193737"), "6036"},
{"Pure green", gamut.Hex("#008F39"), "6037"},
{"Luminous green", gamut.Hex("#00BB2D"), "6038"},
{"Squirrel grey", gamut.Hex("#78858B"), "7000"},
{"Silver grey", gamut.Hex("#8A9597"), "7001"},
{"Olive grey", gamut.Hex("#7E7B52"), "7002"},
{"Moss grey", gamut.Hex("#6C7059"), "7003"},
{"Signal grey", gamut.Hex("#969992"), "7004"},
{"Mouse grey", gamut.Hex("#646B63"), "7005"},
{"Beige grey", gamut.Hex("#6D6552"), "7006"},
{"Khaki grey", gamut.Hex("#6A5F31"), "7008"},
{"Green grey", gamut.Hex("#4D5645"), "7009"},
{"Tarpaulin grey", gamut.Hex("#4C514A"), "7010"},
{"Iron grey", gamut.Hex("#434B4D"), "7011"},
{"Basalt grey", gamut.Hex("#4E5754"), "7012"},
{"Brown grey", gamut.Hex("#464531"), "7013"},
{"Slate grey", gamut.Hex("#434750"), "7015"},
{"Anthracite grey", gamut.Hex("#293133"), "7016"},
{"Black grey", gamut.Hex("#23282B"), "7021"},
{"Umbra grey", gamut.Hex("#332F2C"), "7022"},
{"Concrete grey", gamut.Hex("#686C5E"), "7023"},
{"Graphite grey", gamut.Hex("#474A51"), "7024"},
{"Granite grey", gamut.Hex("#2F353B"), "7026"},
{"Stone grey", gamut.Hex("#8B8C7A"), "7030"},
{"Blue grey", gamut.Hex("#474B4E"), "7031"},
{"Pebble grey", gamut.Hex("#B8B799"), "7032"},
{"Cement grey", gamut.Hex("#7D8471"), "7033"},
{"Yellow grey", gamut.Hex("#8F8B66"), "7034"},
{"Light grey", gamut.Hex("#CBD0CC"), "7035"},
{"Platinum grey", gamut.Hex("#7F7679"), "7036"},
{"Dusty grey", gamut.Hex("#7D7F7D"), "7037"},
{"Agate grey", gamut.Hex("#B5B8B1"), "7038"},
{"Quartz grey", gamut.Hex("#6C6960"), "7039"},
{"Window grey", gamut.Hex("#9DA1AA"), "7040"},
{"Traffic grey A", gamut.Hex("#8D948D"), "7042"},
{"Traffic grey B", gamut.Hex("#4E5452"), "7043"},
{"Silk grey", gamut.Hex("#CAC4B0"), "7044"},
{"Telegrey 1", gamut.Hex("#909090"), "7045"},
{"Telegrey 2", gamut.Hex("#82898F"), "7046"},
{"Telegrey 4", gamut.Hex("#D0D0D0"), "7047"},
{"Pearl mouse grey", gamut.Hex("#898176"), "7048"},
{"Green brown", gamut.Hex("#826C34"), "8000"},
{"Ochre brown", gamut.Hex("#955F20"), "8001"},
{"Signal brown", gamut.Hex("#6C3B2A"), "8002"},
{"Clay brown", gamut.Hex("#734222"), "8003"},
{"Copper brown", gamut.Hex("#8E402A"), "8004"},
{"Fawn brown", gamut.Hex("#59351F"), "8007"},
{"Olive brown", gamut.Hex("#6F4F28"), "8008"},
{"Nut brown", gamut.Hex("#5B3A29"), "8011"},
{"Red brown", gamut.Hex("#592321"), "8012"},
{"Sepia brown", gamut.Hex("#382C1E"), "8014"},
{"Chestnut brown", gamut.Hex("#633A34"), "8015"},
{"Mahogany brown", gamut.Hex("#4C2F27"), "8016"},
{"Chocolate brown", gamut.Hex("#45322E"), "8017"},
{"Grey brown", gamut.Hex("#403A3A"), "8019"},
{"Black brown", gamut.Hex("#212121"), "8022"},
{"Orange brown", gamut.Hex("#A65E2E"), "8023"},
{"Beige brown", gamut.Hex("#79553D"), "8024"},
{"Pale brown", gamut.Hex("#755C48"), "8025"},
{"Terra brown", gamut.Hex("#4E3B31"), "8028"},
{"Pearl copper", gamut.Hex("#763C28"), "8029"},
{"Cream", gamut.Hex("#FDF4E3"), "9001"},
{"Grey white", gamut.Hex("#E7EBDA"), "9002"},
{"Signal white", gamut.Hex("#F4F4F4"), "9003"},
{"Signal black", gamut.Hex("#282828"), "9004"},
{"Jet black", gamut.Hex("#0A0A0A"), "9005"},
{"White aluminium", gamut.Hex("#A5A5A5"), "9006"},
{"Grey aluminium", gamut.Hex("#8F8F8F"), "9007"},
{"Pure white", gamut.Hex("#FFFFFF"), "9010"},
{"Graphite black", gamut.Hex("#1C1C1C"), "9011"},
{"Traffic white", gamut.Hex("#F6F6F6"), "9016"},
{"Traffic black", gamut.Hex("#1E1E1E"), "9017"},
{"Papyrus white", gamut.Hex("#CFD3CD"), "9018"},
{"Pearl light grey", gamut.Hex("#9C9C9C"), "9022"},
{"Pearl dark grey", gamut.Hex("#828282"), "9023"},
})
} | palette/ral.go | 0.631594 | 0.402627 | ral.go | starcoder |
package runners
import (
"errors"
"time"
"github.com/twitter/scoot/runner"
)
// polling.go: turns a StatusQueryNower into a StatusQuerier by polling
// NewPollingStatusQuerier creates a new StatusQuerier by polling a StatusQueryNower that polls every period
func NewPollingStatusQuerier(del runner.StatusQueryNower, period time.Duration) *PollingStatusQuerier {
runner := &PollingStatusQuerier{del, period}
return runner
}
// NewPollingService creates a new Service from a Controller, a StatusEraser, and a StatusQueryNower.
// (This is a convenience function over NewPollingStatusQuerier
func NewPollingService(c runner.Controller, e runner.StatusEraser, nower runner.StatusQueryNower, period time.Duration) runner.Service {
q := NewPollingStatusQuerier(nower, period)
return &Service{c, q, e}
}
// PollingStatusQuerier turns a StatusQueryNower into a StatusQuerier
type PollingStatusQuerier struct {
del runner.StatusQueryNower
period time.Duration
}
// QueryNow returns all RunStatus'es matching q in their current state
func (r *PollingStatusQuerier) QueryNow(q runner.Query) ([]runner.RunStatus, runner.ServiceStatus, error) {
return r.del.QueryNow(q)
}
// Query returns all RunStatus'es matching q, waiting as described by w
func (r *PollingStatusQuerier) Query(q runner.Query, wait runner.Wait) ([]runner.RunStatus, runner.ServiceStatus, error) {
end := time.Now().Add(wait.Timeout)
var service runner.ServiceStatus
for time.Now().Before(end) || wait.Timeout == 0 {
select {
case <-wait.AbortCh:
return nil, service, errors.New("Aborted")
default:
}
st, service, err := r.QueryNow(q)
if err != nil || len(st) > 0 {
return st, service, err
}
time.Sleep(r.period)
}
return nil, service, nil
}
// Status returns the current status of id from q.
func (r *PollingStatusQuerier) Status(id runner.RunID) (runner.RunStatus, runner.ServiceStatus, error) {
return runner.StatusNow(r, id)
}
// StatusAll returns the Current status of all runs
func (r *PollingStatusQuerier) StatusAll() ([]runner.RunStatus, runner.ServiceStatus, error) {
return runner.StatusAll(r)
} | runner/runners/polling.go | 0.805173 | 0.4206 | polling.go | starcoder |
package graphing
import (
"github.com/go-gl/gl/v4.1-core/gl"
mgl "github.com/go-gl/mathgl/mgl32"
)
type SingleVarFunc func(x float32) float32
type Params2D struct {
XBoarder float32
YBoarder float32
XRange mgl.Vec2
YRange mgl.Vec2
Dx float32
XAxisColor mgl.Vec3
YAxisColor mgl.Vec3
}
const pointsPerVertex = 5
func MakeAxisBuffs(params Params2D) (uint32, uint32, int32) {
xCol := params.XAxisColor
yCol := params.YAxisColor
vertices := []float32{
// Positions // Color coords
1 - params.XBoarder, 0.0, xCol[0], xCol[1], xCol[2],
-1 + params.XBoarder, 0.0, xCol[0], xCol[1], xCol[2],
0.0, 1 - params.YBoarder, yCol[0], yCol[1], yCol[2],
0.0, -1 + params.YBoarder, yCol[0], yCol[1], yCol[2],
}
var VAO, VBO uint32
gl.GenVertexArrays(1, &VAO)
gl.GenBuffers(1, &VBO)
gl.BindVertexArray(VAO)
gl.BindBuffer(gl.ARRAY_BUFFER, VBO)
gl.BufferData(gl.ARRAY_BUFFER, len(vertices)*4,
gl.Ptr(vertices), gl.STATIC_DRAW)
gl.EnableVertexAttribArray(0)
gl.VertexAttribPointer(0, 2, gl.FLOAT, false, pointsPerVertex*4,
gl.PtrOffset(0))
gl.EnableVertexAttribArray(1)
gl.VertexAttribPointer(1, 3, gl.FLOAT, false, pointsPerVertex*4,
gl.PtrOffset(2*4))
return VAO, VBO, int32(len(vertices) / pointsPerVertex)
}
func MakeFunctionBuffs(params Params2D, fx SingleVarFunc,
col mgl.Vec3) (uint32, uint32, int32) {
vertices := []float32{}
for i := params.XRange[0]; i <= params.XRange[1]; i += params.Dx {
vertices = append(vertices,
(2.0-params.XBoarder*2)*
(i/(params.XRange[1]-params.XRange[0])),
(2.0-params.YBoarder*2)*
(fx(i)/(params.YRange[1]-params.YRange[0])),
col[0], col[1], col[2])
}
var VAO, VBO uint32
gl.GenVertexArrays(1, &VAO)
gl.GenBuffers(1, &VBO)
gl.BindVertexArray(VAO)
gl.BindBuffer(gl.ARRAY_BUFFER, VBO)
gl.BufferData(gl.ARRAY_BUFFER, len(vertices)*4,
gl.Ptr(vertices), gl.STATIC_DRAW)
gl.EnableVertexAttribArray(0)
gl.VertexAttribPointer(0, 2, gl.FLOAT, false, pointsPerVertex*4,
gl.PtrOffset(0))
gl.EnableVertexAttribArray(1)
gl.VertexAttribPointer(1, 3, gl.FLOAT, false, pointsPerVertex*4,
gl.PtrOffset(2*4))
return VAO, VBO, int32(len(vertices) / pointsPerVertex)
} | graphing/graphing.go | 0.58522 | 0.464294 | graphing.go | starcoder |
package wann
import (
"errors"
"fmt"
"math/rand"
"github.com/dave/jennifer/jen"
)
// Neuron is a list of input-neurons, and an activation function.
type Neuron struct {
Net *Network
InputNodes []NeuronIndex // pointers to other neurons
ActivationFunction ActivationFunctionIndex
Value *float64
distanceFromOutputNode int // Used when traversing nodes and drawing diagrams
neuronIndex NeuronIndex
}
// NewBlankNeuron creates a new Neuron, with the Step activation function as the default
func (net *Network) NewBlankNeuron() (*Neuron, NeuronIndex) {
// Pre-allocate room for 16 connections and use Linear as the default activation function
neuron := Neuron{Net: net, InputNodes: make([]NeuronIndex, 0, 16), ActivationFunction: Swish}
neuron.neuronIndex = NeuronIndex(len(net.AllNodes))
net.AllNodes = append(net.AllNodes, neuron)
return &neuron, neuron.neuronIndex
}
// NewNeuron creates a new *Neuron, with a randomly chosen activation function
func (net *Network) NewNeuron() (*Neuron, NeuronIndex) {
chosenActivationFunctionIndex := ActivationFunctionIndex(rand.Intn(len(ActivationFunctions)))
inputNodes := make([]NeuronIndex, 0, 16)
neuron := Neuron{
Net: net,
InputNodes: inputNodes,
ActivationFunction: chosenActivationFunctionIndex,
}
// The length of net.AllNodes is what will be the last index
neuronIndex := NeuronIndex(len(net.AllNodes))
// Assign the neuron index in the net to the neuron
neuron.neuronIndex = neuronIndex
// Add this neuron to the net
net.AllNodes = append(net.AllNodes, neuron)
return &neuron, neuronIndex
}
// NewUnconnectedNeuron returns a new unconnected neuron with neuronIndex -1 and net pointer set to nil
func NewUnconnectedNeuron() *Neuron {
// Pre-allocate room for 16 connections and use Linear as the default activation function
neuron := Neuron{Net: nil, InputNodes: make([]NeuronIndex, 0, 16), ActivationFunction: Linear}
neuron.neuronIndex = -1
return &neuron
}
// Connect this neuron to a network, overwriting any existing connections.
// This will also clear any input nodes to this neuron, since the net is different.
// TODO: Find the input nodes from the neuron.Net, save those and re-assign if there are matches?
func (neuron *Neuron) Connect(net *Network) {
neuron.InputNodes = []NeuronIndex{}
neuron.Net = net
for ni := range net.AllNodes {
// Check if this network already has a pointer to this neuron
if &net.AllNodes[ni] == neuron {
// Yes, assign the index
neuron.neuronIndex = NeuronIndex(ni)
// All good, bail
return
}
}
// The neuron was not found in the network
// Find what will be the last index in net.AllNodes
neuronIndex := len(net.AllNodes)
// Add this neuron to the network
net.AllNodes = append(net.AllNodes, *neuron)
// Assign the index
net.AllNodes[neuronIndex].neuronIndex = NeuronIndex(neuronIndex)
}
// RandomizeActivationFunction will choose a random activation function for this neuron
func (neuron *Neuron) RandomizeActivationFunction() {
chosenActivationFunctionIndex := ActivationFunctionIndex(rand.Intn(len(ActivationFunctions)))
neuron.ActivationFunction = chosenActivationFunctionIndex
}
// SetValue can be used for setting a value for this neuron instead of using input neutrons.
// This changes how the Evaluation function behaves.
func (neuron *Neuron) SetValue(x float64) {
neuron.Value = &x
}
// HasInput checks if the given neuron is an input neuron to this one
func (neuron *Neuron) HasInput(e NeuronIndex) bool {
for _, ni := range neuron.InputNodes {
if ni == e {
return true
}
}
return false
}
// FindInput checks if the given neuron is an input neuron to this one,
// and also returns the index to InputNeurons, if found.
func (neuron *Neuron) FindInput(e NeuronIndex) (int, bool) {
for i, n := range neuron.InputNodes {
if n == e {
return i, true
}
}
return -1, false
}
// Is check if the given NeuronIndex points to this neuron
func (neuron *Neuron) Is(e NeuronIndex) bool {
return neuron.neuronIndex == e
}
// AddInput will add an input neuron
func (neuron *Neuron) AddInput(ni NeuronIndex) error {
if neuron.Is(ni) {
return errors.New("adding a neuron as input to itself")
}
if neuron.HasInput(ni) {
return errors.New("neuron already exists")
}
neuron.InputNodes = append(neuron.InputNodes, ni)
return nil
}
// AddInputNeuron both adds a neuron to this network (if needed) and also
// adds its neuron index to the neuron.InputNeurons
func (neuron *Neuron) AddInputNeuron(n *Neuron) error {
// If n.neuronIndex is known to this network, just add the NeuronIndex to neuron.InputNeurons
if neuron.Net.Exists(n.neuronIndex) {
return neuron.AddInput(n.neuronIndex)
}
// If not, add this neuron to the network first
node := *n
node.neuronIndex = NeuronIndex(len(neuron.Net.AllNodes))
neuron.Net.AllNodes = append(neuron.Net.AllNodes, node)
return neuron.AddInput(n.neuronIndex)
}
// RemoveInput will remove an input neuron
func (neuron *Neuron) RemoveInput(e NeuronIndex) error {
if i, found := neuron.FindInput(e); found {
// Found it, remove the neuron at index i
neuron.InputNodes = append(neuron.InputNodes[:i], neuron.InputNodes[i+1:]...)
return nil
}
return errors.New("neuron does not exist")
}
// Exists checks if the given NeuronIndex exists in this Network
func (net *Network) Exists(ni NeuronIndex) bool {
for i := range net.AllNodes {
neuronIndex := NeuronIndex(i)
if neuronIndex == ni {
return true
}
}
return false
}
// InputNeuronsAreGood checks if all input neurons of this neuron exists in neuron.Net
func (neuron *Neuron) InputNeuronsAreGood() bool {
for _, inputNeuronIndex := range neuron.InputNodes {
if !neuron.Net.Exists(inputNeuronIndex) {
return false
}
}
return true
}
// evaluate will return a weighted sum of the input nodes,
// using the .Value field if it is set and no input nodes are available.
// returns true if the maximum number of evaluation loops is reached
func (neuron *Neuron) evaluate(weight float64, maxEvaluationLoops *int) (float64, bool) {
if *maxEvaluationLoops <= 0 {
return 0.0, true
}
// Assume this is the Output neuron, recursively evaluating the result
// For each input neuron, evaluate them
summed := 0.0
counter := 0
for _, inputNeuronIndex := range neuron.InputNodes {
// Let each input neuron do its own evauluation, using the given weight
(*maxEvaluationLoops)--
// TODO: Figure out exactly why this one kicks in (and if it matters)
// It only seems to kick in during "go test" and not in evolve/main.go
if int(inputNeuronIndex) >= len(neuron.Net.AllNodes) {
continue
//panic("TOO HIGH INPUT NEURON INDEX")
}
result, stopNow := neuron.Net.AllNodes[inputNeuronIndex].evaluate(weight, maxEvaluationLoops)
summed += result * weight
counter++
if stopNow || (*maxEvaluationLoops < 0) {
break
}
}
// No input neurons. Use the .Value field if it's not nil and this is not the output node
if counter == 0 && neuron.Value != nil && !neuron.IsOutput() {
return *(neuron.Value), false
}
// Return the averaged sum, or 0
if counter == 0 {
// This should never happen
return 0.0, false
}
// This should run, also when this neuron is the output neuron
f := neuron.GetActivationFunction()
//fmt.Println(neuron.ActivationFunction.Name() + " = " + neuron.ActivationFunction.String())
// Run the input through the activation function
// TODO: Does "retval := f(summed)"" perform better?, or the one that averages the sum first?
//retval := f(summed / float64(counter))
retval := f(summed)
return retval, false
}
// GetActivationFunction returns the activation function for this neuron
func (neuron *Neuron) GetActivationFunction() func(float64) float64 {
return ActivationFunctions[neuron.ActivationFunction]
}
// In checks if this neuron is in the given collection
func (neuron *Neuron) In(collection []NeuronIndex) bool {
for _, existingNodeIndex := range collection {
if neuron.Is(existingNodeIndex) {
return true
}
}
return false
}
// IsInput returns true if this is an input node or not
// Returns false if nil
func (neuron *Neuron) IsInput() bool {
if neuron.Net == nil {
return false
}
return neuron.Net.IsInput(neuron.neuronIndex)
}
// IsOutput returns true if this is an output node or not
// Returns false if nil
func (neuron *Neuron) IsOutput() bool {
if neuron.Net == nil {
return false
}
return neuron.Net.OutputNode == neuron.neuronIndex
}
// Copy a Neuron to a new Neuron, and assign the pointer to the given network to .Net
func (neuron Neuron) Copy(net *Network) Neuron {
var newNeuron Neuron
newNeuron.Net = net
newNeuron.InputNodes = neuron.InputNodes
newNeuron.ActivationFunction = neuron.ActivationFunction
newNeuron.Value = neuron.Value
newNeuron.distanceFromOutputNode = neuron.distanceFromOutputNode
newNeuron.neuronIndex = neuron.neuronIndex
return newNeuron
}
// String will return a string containing both the pointer address and the number of input neurons
func (neuron *Neuron) String() string {
nodeType := " Node"
if neuron.IsInput() {
nodeType = " Input node"
} else if neuron.IsOutput() {
nodeType = "Output node"
}
return fmt.Sprintf("%s ID %d has these input connections: %v", nodeType, neuron.neuronIndex, neuron.InputNodes)
}
// InputStatement returns a statement like "inputData[0]", if this node is a network input node
func (neuron *Neuron) InputStatement() (*jen.Statement, error) {
// If this node is a network input node, return a statement representing this input,
// like "inputData[0]"
if !neuron.IsInput() {
return jen.Empty(), errors.New(" not an input node")
}
for i, ni := range neuron.Net.InputNodes {
if ni == neuron.neuronIndex {
// This index in the neuron.NetInputNodes is i
return jen.Id("inputData").Index(jen.Lit(i)), nil
}
}
// Not found!
return jen.Empty(), errors.New("not an input node for the associated network")
} | neuron.go | 0.624408 | 0.664568 | neuron.go | starcoder |
package mocks
import (
"github.com/steinfletcher/apitest"
"testing"
)
var _ apitest.Verifier = MockVerifier{}
// MockVerifier is a mock of the Verifier interface that is used in tests of apitest
type MockVerifier struct {
EqualFn func(t *testing.T, expected, actual interface{}, msgAndArgs ...interface{}) bool
EqualInvoked bool
JSONEqFn func(t *testing.T, expected string, actual string, msgAndArgs ...interface{}) bool
JSONEqInvoked bool
FailFn func(t *testing.T, failureMessage string, msgAndArgs ...interface{}) bool
FailInvoked bool
NoErrorFn func(t *testing.T, err error, msgAndArgs ...interface{}) bool
NoErrorInvoked bool
}
func NewVerifier() MockVerifier {
return MockVerifier{
EqualFn: func(t *testing.T, expected, actual interface{}, msgAndArgs ...interface{}) bool {
return true
},
JSONEqFn: func(t *testing.T, expected string, actual string, msgAndArgs ...interface{}) bool {
return true
},
FailFn: func(t *testing.T, failureMessage string, msgAndArgs ...interface{}) bool {
return true
},
NoErrorFn: func(t *testing.T, err error, msgAndArgs ...interface{}) bool {
return true
},
}
}
// Equal mocks the Equal method of the Verifier
func (m MockVerifier) Equal(t *testing.T, expected, actual interface{}, msgAndArgs ...interface{}) bool {
m.EqualInvoked = true
return m.EqualFn(t, expected, actual, msgAndArgs)
}
// JSONEq mocks the JSONEq method of the Verifier
func (m MockVerifier) JSONEq(t *testing.T, expected string, actual string, msgAndArgs ...interface{}) bool {
m.JSONEqInvoked = true
return m.JSONEqFn(t, expected, actual, msgAndArgs)
}
// Fail mocks the Fail method of the Verifier
func (m MockVerifier) Fail(t *testing.T, failureMessage string, msgAndArgs ...interface{}) bool {
m.FailInvoked = true
return m.FailFn(t, failureMessage, msgAndArgs)
}
// NoError asserts that a function returned no error
func (m MockVerifier) NoError(t *testing.T, err error, msgAndArgs ...interface{}) bool {
m.NoErrorInvoked = true
return m.NoErrorFn(t, err, msgAndArgs)
} | mocks/verifier.go | 0.731059 | 0.426023 | verifier.go | starcoder |
package schema8
func init() {
gAssertionConstructorMap = map[string]assertionConstructorFunc{
"type": newAssertionType,
"enum": newAssertionEnum,
"const": newAssertionConst,
"multipleOf": newAssertionMultipleOf,
"maximum": newAssertionMaximum,
"minimum": newAssertionMinimum,
"exclusiveMaximum": newAssertionExMaximum,
"exclusiveMinimum": newAssertionExMinimum,
"maxLength": newAssertionMaxLength,
"minLength": newAssertionMinLength,
"pattern": newAssertionPattern,
"maxItems": newAssertionMaxItems,
"minItems": newAssertionMinItems,
"maxContains": newAssertionMaxContains,
"minContains": newAssertionMinContains,
"uniqueItems": newAssertionUniqueItems,
"maxProperties": newAssertionMaxProperties,
"minProperties": newAssertionMinProperties,
"required": newAssertionRequired,
"dependentRequired": newAssertionDependentRequired,
"format": newAssertionFormat,
}
// applicator 'items' will be handled specially.
gApplicatorArraySchemaMap = map[string]applicatorConstructorFunc{
"allOf": newApplicatorAllOf,
"anyOf": newApplicatorAnyOf,
"oneOf": newApplicatorOneOf,
}
gApplicatorObjectSchemaMap = map[string]applicatorConstructorFunc{
"dependentSchemas": newApplicatorDependentSchemas,
"properties": newApplicatorProperties,
"patternProperties": newApplicatorPatternProperties,
}
gApplicatorSchemaMap = map[string]applicatorConstructorFunc{
"not": newApplicatorNot,
"if": newApplicatorIf,
"then": newApplicatorThen,
"else": newApplicatorElse,
"additionalItems": newApplicatorAdditionalItems,
"unevaluatedItems": newApplicatorUnevaluatedItems,
"contains": newApplicatorContains,
"additionalProperties": newApplicatorAdditionalProperties,
"unevaluatedProperties": newApplicatorUnevaluatedProperties,
"propertyNames": newApplicatorPropertyNames,
}
gFormatFuncMap = map[string]FormatFunc{
"date-time": nil,
"date": nil,
"time": nil,
"duration": nil,
"email": formatEmail, // <EMAIL>
"idn-email": nil,
"hostname": nil,
"idn-hostname": nil,
"ipv4": formatIPV4, // 10.0.0.1
"ipv6": formatIPV6, // fc00:e968:6179::de52:7100
"uri": formatURI, // absolute URI, e.g.: https://www.duckduckgo.com
"uri-reference": formatURIRef, // absolute URI or uri-reference
"iri": nil,
"iri-reference": nil,
"uuid": formatUUID,
"json-pointer": nil,
"relative-json-pointer": nil,
"regex": formatRegex,
// Custom
"mysql-datetime": formatMySQLDateTime, // 2006-01-02 15:04:05
"mongodb-datetime": formatMongoDBDateTime, // 2018-11-16T06:16:36.156Z
"timestamp": formatTimestamp, // 1604906268, now +/- 10 min is valid
"timestamp-ms": formatTimestampMS, // 1604906268123, now +/- 10 min is valid
}
} | schema8/init.go | 0.587825 | 0.404096 | init.go | starcoder |
package conversions
type Length struct {
//Yards in Metters, Kilometters and Centimeters
yardInMetter float32
yardInKilometer float32
yardInCentimeters float32
//Feats in Metters, Kilometters and Centimeters
featInKilometter float32
featInMetter float32
featInCentimetter float32
//Milles in Metters, Kilometters and Centimeters
milleInMetter float32
milleInKilometter float32
milleInCentimetter float32
//Inche in Metters, Kilometters and Centimeters
incheInKilometter float32
incheInMetter float32
incheInCentimetter float32
}
//Return yards in Kilometter
func (yard *Length) GetYardInKilometter() float32 {
yard.yardInKilometer = 0.0009144
return yard.yardInKilometer
}
//Return yards in metter
func (yard *Length) GetYardInMetter() float32 {
yard.yardInMetter = 0.9144
return yard.yardInMetter
}
func GetYardInMetter2() float32 {
return 9.2
}
//Return yards in Centimetter
func (yard *Length) GetYardInCentimetter() float32 {
yard.yardInCentimeters = 91.44
return yard.yardInCentimeters
}
//Return Inche in Kilometter
func (inche *Length) GetIncheInKilometter() float32 {
inche.incheInKilometter = 0.0000254
return inche.incheInKilometter
}
//Return Inche in Metter
func (inche *Length) GetIncheInMetter() float32 {
inche.incheInMetter = 0.0254
return inche.incheInMetter
}
//Return Inche in Centimetter
func (inche *Length) GetIncheInCentimetter() float32 {
inche.incheInCentimetter = 2.54
return inche.incheInCentimetter
}
//Return Feat in Metter
func (feat *Length) GetFeatInKilometter() float32 {
feat.featInKilometter = 0.0003048
return feat.featInKilometter
}
//Return Feat in Metter
func (feat *Length) GetFeatInMetter() float32 {
feat.featInMetter = 0.3048
return feat.featInMetter
}
//Return Feat in Centimetter
func (feat *Length) GetFeatInCentimetter() float32 {
feat.featInCentimetter = 30.48
return feat.featInCentimetter
}
//Return Mille in KiloMetter
func (mille *Length) GetMilleInKilometter() float32 {
mille.milleInMetter = 1609.34
return mille.milleInKilometter
}
//Return Mille in Metter
func (mille *Length) GetMilleInMetter() float32 {
mille.milleInKilometter = 1.60934
return mille.milleInMetter
}
//Return Mille in centimetter
func (mille *Length) GetMilleInCentiMetter() float32 {
mille.milleInCentimetter = 160934
return mille.milleInCentimetter
} | length.go | 0.77081 | 0.401952 | length.go | starcoder |
package core
import "fmt"
type Scalar struct {
Val []float64
}
func NewScalar(v0 float64, v1 float64, v2 float64, v3 float64) (rcvr *Scalar) {
rcvr = &Scalar{}
rcvr.Val = []float64{v0, v1, v2, v3}
return
}
func NewScalar2(v0 float64, v1 float64, v2 float64) (rcvr *Scalar) {
rcvr = &Scalar{}
rcvr.Val = []float64{v0, v1, v2, 0}
return
}
func NewScalar3(v0 float64, v1 float64) (rcvr *Scalar) {
rcvr = &Scalar{}
rcvr.Val = []float64{v0, v1, 0, 0}
return
}
func NewScalar4(v0 float64) (rcvr *Scalar) {
rcvr = &Scalar{}
rcvr.Val = []float64{v0, 0, 0, 0}
return
}
func NewScalar5(vals []float64) (rcvr *Scalar) {
rcvr = &Scalar{}
rcvr.Set(vals)
return
}
func ScalarAll(v float64) *Scalar {
return NewScalar(v, v, v, v)
}
func (rcvr *Scalar) Clone() *Scalar {
return NewScalar5(rcvr.Val)
}
func (rcvr *Scalar) Conj() *Scalar {
return NewScalar(rcvr.Val[0], -rcvr.Val[1], -rcvr.Val[2], -rcvr.Val[3])
}
func (rcvr *Scalar) Equals(obj interface{}) bool {
if rcvr == obj {
return true
}
it, ok := obj.(*Scalar)
if !ok {
return false
}
return rcvr.Val[0] == it.Val[0] && rcvr.Val[1] == it.Val[1] && rcvr.Val[2] == it.Val[2] && rcvr.Val[3] == it.Val[3]
}
func (rcvr *Scalar) IsReal() bool {
return rcvr.Val[1] == 0 && rcvr.Val[2] == 0 && rcvr.Val[3] == 0
}
func (rcvr *Scalar) Mul(it *Scalar, scale float64) *Scalar {
return NewScalar(rcvr.Val[0]*it.Val[0]*scale, rcvr.Val[1]*it.Val[1]*scale, rcvr.Val[2]*it.Val[2]*scale, rcvr.Val[3]*it.Val[3]*scale)
}
func (rcvr *Scalar) Mul2(it *Scalar) *Scalar {
return rcvr.Mul(it, 1)
}
func (rcvr *Scalar) Set(vals []float64) {
if vals != nil {
rcvr.Val[0] = func() float64 {
if len(vals) > 0 {
return vals[0]
} else {
return 0
}
}()
rcvr.Val[1] = func() float64 {
if len(vals) > 1 {
return vals[1]
} else {
return 0
}
}()
rcvr.Val[2] = func() float64 {
if len(vals) > 2 {
return vals[2]
} else {
return 0
}
}()
rcvr.Val[3] = func() float64 {
if len(vals) > 3 {
return vals[3]
} else {
return 0
}
}()
} else {
rcvr.Val[0], rcvr.Val[1], rcvr.Val[2], rcvr.Val[3] = 0, 0, 0, 0
}
}
func (rcvr *Scalar) String() string {
return fmt.Sprintf("%v%v%v%v%v%v%v%v%v", "[", rcvr.Val[0], ", ", rcvr.Val[1], ", ", rcvr.Val[2], ", ", rcvr.Val[3], "]")
} | opencv3/core/Scalar.java.go | 0.553023 | 0.41253 | Scalar.java.go | starcoder |
package term
import "fmt"
import "math/big"
// Integer represents an unbounded, signed integer value
type Integer big.Int
// NewInt parses an integer's string representation to create a new
// integer value. Panics if the string's is not a valid integer
func NewInt(text string) Number {
if len(text) == 0 {
panic("Empty string is not a valid integer")
}
// see §6.4.4 for syntax details
if text[0] == '0' && len(text) >= 3 {
switch text[1] {
case '\'':
return parseEscape(text[2:])
case 'b':
return parseInteger("%b", text[2:])
case 'o':
return parseInteger("%o", text[2:])
case 'x':
return parseInteger("%x", text[2:])
default:
return parseInteger("%d", text)
}
}
return parseInteger("%d", text)
}
// helper for when an int64 is already available
func NewInt64(i int64) Number {
return (*Integer)(big.NewInt(i))
}
// helper for when a big.Int is already available
func NewBigInt(val *big.Int) Number {
return (*Integer)(val)
}
// NewCode returns an integer whose value is the character code if
// the given rune.
func NewCode(c rune) *Integer {
return (*Integer)(big.NewInt(int64(c)))
}
func parseInteger(format, text string) (*Integer, error) {
i := new(big.Int)
n, err := fmt.Sscanf(text, format, i)
//maybePanic(err)
if err != nil {
return (*Integer)(i), err
}
if n == 0 {
panic("Parsed no integers")
}
return (*Integer)(i)
}
// see "single quoted character" - §6.4.2.1
func parseEscape(text string) (*Integer, error) {
var r rune
if text[0] == '\\' {
if len(text) < 2 {
err := fmt.Errorf("Invalid integer character constant: %s", text)
//panic(msg)
return 0, err
}
switch text[1] {
// "meta escape sequence" - §6.4.2.1
case '\\':
r = '\\'
case '\'':
r = '\''
case '"':
r = '"'
case '`':
r = '`'
// "control escape char" - §6.4.2.1
case 'a':
r = '\a'
case 'b':
r = '\b'
case 'f':
r = '\f'
case 'n':
r = '\n'
case 'r':
r = '\r'
case 's':
r = ' ' // SWI-Prolog extension
case 't':
r = '\t'
case 'v':
r = '\v'
// "hex escape char" - §6.4.2.1
case 'x':
return parseInteger("%x", text[2:len(text)-1])
// "octal escape char" - §6.4.2.1
case '0', '1', '2', '3', '4', '5', '6', '7':
return parseInteger("%o", text[1:len(text)-1])
// unexpected escape sequence
default:
err := fmt.Errorf("Invalid character escape sequence: %s", text)
//panic(msg)
return 0, err
}
} else {
// "non quote char" - §6.4.2.1
runes := []rune(text)
r = runes[0]
}
code := int64(r)
return (*Integer)(big.NewInt(code)), nil
}
func (self *Integer) Value() *big.Int {
return (*big.Int)(self)
}
// treat this integer as a character code. should be a method on
// a Code interface someday
func (self *Integer) Code() rune {
i := (*big.Int)(self)
return rune(i.Int64())
}
func (self *Integer) String() string {
return self.Value().String()
}
func (self *Integer) Type() int {
return IntegerType
}
func (self *Integer) Indicator() string {
return self.String()
}
func (a *Integer) Unify(e Bindings, b Term) (Bindings, error) {
if IsVariable(b) {
return b.Unify(e, a)
}
if IsInteger(b) {
if a.Value().Cmp(b.(*Integer).Value()) == 0 {
return e, nil
}
}
return e, CantUnify
}
func (self *Integer) ReplaceVariables(env Bindings) Term {
return self
}
// implement Number interface
func (self *Integer) Float64() float64 {
return float64(self.Value().Int64())
}
func (self *Integer) LosslessInt() (*big.Int, bool) {
return self.Value(), true
}
func (self *Integer) LosslessRat() (*big.Rat, bool) {
r := new(big.Rat).SetFrac(self.Value(), big.NewInt(1))
return r, true
} | term/integer.go | 0.741393 | 0.461988 | integer.go | starcoder |
package tests
import (
"testing"
"github.com/fnproject/fn_go/client/routes"
"github.com/fnproject/fn_go/models"
)
func AssertRouteMatches(t *testing.T, expected *models.Route, got *models.Route) {
if expected.Path != got.Path {
t.Errorf("Route path mismatch. Expected: %v. Actual: %v", expected.Path, got.Path)
}
if expected.Image != got.Image {
t.Errorf("Route image mismatch. Expected: %v. Actual: %v", expected.Image, got.Image)
}
if expected.Image != got.Image {
t.Errorf("Route type mismatch. Expected: %v. Actual: %v", expected.Image, got.Image)
}
if expected.Format != got.Format {
t.Errorf("Route format mismatch. Expected: %v. Actual: %v", expected.Format, got.Format)
}
}
// PostRoute Creates a route and deletes the corresponding app (if created) on teardown
func (s *TestHarness) PostRoute(appName string, route *models.Route) (*routes.PostAppsAppRoutesOK, error) {
cfg := &routes.PostAppsAppRoutesParams{
App: appName,
Body: &models.RouteWrapper{
Route: route,
},
Context: s.Context,
}
ok, err := s.Client.Routes.PostAppsAppRoutes(cfg)
if err == nil {
s.createdApps[appName] = true
}
return ok, err
}
func (s *TestHarness) BasicRoute() *models.Route {
return &models.Route{
Format: s.Format,
Path: s.RoutePath,
Image: s.Image,
Type: s.RouteType,
Timeout: &s.Timeout,
IDLETimeout: &s.IdleTimeout,
}
}
//GivenRouteExists creates a route using the specified arguments, failing the test if the creation fails, this tears down any apps that are created when the test is complete
func (s *TestHarness) GivenRouteExists(t *testing.T, appName string, route *models.Route) {
_, err := s.PostRoute(appName, route)
if err != nil {
t.Fatalf("Expected route to be created, got %v", err)
}
}
//RouteMustExist checks that a route exists, failing the test if it doesn't, returns the route
func (s *TestHarness) RouteMustExist(t *testing.T, appName string, routePath string) *models.Route {
cfg := &routes.GetAppsAppRoutesRouteParams{
App: appName,
Route: routePath[1:],
Context: s.Context,
}
routeResponse, err := s.Client.Routes.GetAppsAppRoutesRoute(cfg)
if err != nil {
t.Fatalf("Expected route %s %s to exist but got %v", appName, routePath, err)
}
return routeResponse.Payload.Route
}
//GivenRoutePatched applies a patch to a route, failing the test if this fails.
func (s *TestHarness) GivenRoutePatched(t *testing.T, appName, routeName string, rt *models.Route) {
_, err := s.Client.Routes.PatchAppsAppRoutesRoute(&routes.PatchAppsAppRoutesRouteParams{
App: appName,
Route: routeName,
Context: s.Context,
Body: &models.RouteWrapper{
Route: rt,
},
})
if err != nil {
t.Fatalf("Failed to patch route %s %s : %v", appName, routeName, err)
}
}
func assertContainsRoute(routeModels []*models.Route, expectedRoute string) bool {
for _, r := range routeModels {
if r.Path == expectedRoute {
return true
}
}
return false
}
//PutRoute creates a route via PUT, tearing down any apps that are created when the test is complete
func (s *TestHarness) PutRoute(appName string, routePath string, route *models.Route) (*routes.PutAppsAppRoutesRouteOK, error) {
cfg := &routes.PutAppsAppRoutesRouteParams{
App: appName,
Context: s.Context,
Route: routePath,
Body: &models.RouteWrapper{
Route: route,
},
}
resp, err := s.Client.Routes.PutAppsAppRoutesRoute(cfg)
if err == nil {
s.createdApps[appName] = true
}
return resp, err
} | test/fn-api-tests/routes_api.go | 0.683947 | 0.405684 | routes_api.go | starcoder |
package gittest
import (
"bytes"
"crypto/rand"
"fmt"
"io"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gitlab.com/gitlab-org/gitaly/internal/git"
"gitlab.com/gitlab-org/gitaly/internal/testhelper"
)
// TestDeltaIslands is based on the tests in
// https://github.com/git/git/blob/master/t/t5320-delta-islands.sh .
func TestDeltaIslands(t *testing.T, repoPath string, repack func() error) {
// Create blobs that we expect Git to use delta compression on.
blob1 := make([]byte, 100000)
_, err := io.ReadFull(rand.Reader, blob1)
require.NoError(t, err)
blob2 := append(blob1, "\nblob 2"...)
// Assume Git prefers the largest blob as the delta base.
badBlob := append(blob2, "\nbad blob"...)
blob1ID := commitBlob(t, repoPath, "refs/heads/branch1", blob1)
blob2ID := commitBlob(t, repoPath, "refs/tags/tag2", blob2)
// The bad blob will only be reachable via a non-standard ref. Because of
// that it should be excluded from delta chains in the main island.
badBlobID := commitBlob(t, repoPath, "refs/bad/ref3", badBlob)
// So far we have create blobs and commits but they will be in loose
// object files; we want them to be delta compressed. Run repack to make
// that happen.
testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "repack", "-ad")
assert.Equal(t, badBlobID, deltaBase(t, repoPath, blob1ID), "expect blob 1 delta base to be bad blob after test setup")
assert.Equal(t, badBlobID, deltaBase(t, repoPath, blob2ID), "expect blob 2 delta base to be bad blob after test setup")
require.NoError(t, repack(), "repack after delta island setup")
assert.Equal(t, blob2ID, deltaBase(t, repoPath, blob1ID), "blob 1 delta base should be blob 2 after repack")
// blob2 is the bigger of the two so it should be the delta base
assert.Equal(t, git.NullSHA, deltaBase(t, repoPath, blob2ID), "blob 2 should not be delta compressed after repack")
}
func commitBlob(t *testing.T, repoPath, ref string, content []byte) string {
hashObjectOut := testhelper.MustRunCommand(t, bytes.NewReader(content), "git", "-C", repoPath, "hash-object", "-w", "--stdin")
blobID := chompToString(hashObjectOut)
treeSpec := fmt.Sprintf("100644 blob %s\tfile\n", blobID)
mktreeOut := testhelper.MustRunCommand(t, strings.NewReader(treeSpec), "git", "-C", repoPath, "mktree")
treeID := chompToString(mktreeOut)
// No parent, that means this will be an initial commit. Not very
// realistic but it doesn't matter for delta compression.
commitTreeOut := testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "commit-tree", "-m", "msg", treeID)
commitID := chompToString(commitTreeOut)
testhelper.MustRunCommand(t, nil, "git", "-C", repoPath, "update-ref", ref, commitID)
return blobID
}
func deltaBase(t *testing.T, repoPath string, blobID string) string {
catfileOut := testhelper.MustRunCommand(t, strings.NewReader(blobID), "git", "-C", repoPath, "cat-file", "--batch-check=%(deltabase)")
return chompToString(catfileOut)
}
func chompToString(s []byte) string { return strings.TrimSuffix(string(s), "\n") } | internal/git/gittest/delta_islands.go | 0.8059 | 0.591428 | delta_islands.go | starcoder |
package swagger
const (
Lessondef = `{
"swagger": "2.0",
"info": {
"title": "api/exp/definitions/lessondef.proto",
"version": "version not set"
},
"schemes": [
"http",
"https"
],
"consumes": [
"application/json"
],
"produces": [
"application/json"
],
"paths": {
"/exp/lessondef": {
"post": {
"summary": "Retrieve all LessonDefs with filter",
"operationId": "ListLessonDefs",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/expLessonDefs"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/expLessonDefFilter"
}
}
],
"tags": [
"LessonDefService"
]
}
},
"/exp/lessondef/{id}": {
"get": {
"operationId": "GetLessonDef",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/expLessonDef"
}
}
},
"parameters": [
{
"name": "id",
"in": "path",
"required": true,
"type": "integer",
"format": "int32"
}
],
"tags": [
"LessonDefService"
]
}
}
},
"definitions": {
"expLessonDef": {
"type": "object",
"properties": {
"LessonId": {
"type": "integer",
"format": "int32"
},
"Stages": {
"type": "array",
"items": {
"$ref": "#/definitions/expLessonStage"
}
},
"LessonName": {
"type": "string"
}
}
},
"expLessonDefFilter": {
"type": "object",
"properties": {
"Category": {
"type": "string"
}
}
},
"expLessonDefs": {
"type": "object",
"properties": {
"lessondefs": {
"type": "array",
"items": {
"$ref": "#/definitions/expLessonDef"
}
},
"Category": {
"type": "string"
}
}
},
"expLessonStage": {
"type": "object",
"properties": {
"StageId": {
"type": "integer",
"format": "int32"
},
"Description": {
"type": "string"
}
}
}
}
}
`
Livelesson = `{
"swagger": "2.0",
"info": {
"title": "api/exp/definitions/livelesson.proto",
"version": "version not set"
},
"schemes": [
"http",
"https"
],
"consumes": [
"application/json"
],
"produces": [
"application/json"
],
"paths": {
"/*": {
"get": {
"operationId": "HealthCheck",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/expHealthCheckMessage"
}
}
},
"tags": [
"LiveLessonsService"
]
}
},
"/exp/livelesson": {
"post": {
"summary": "Request a lab is created, or request the UUID of one that already exists for these parameters.",
"operationId": "RequestLiveLesson",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/expLessonUUID"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/expLessonParams"
}
}
],
"tags": [
"LiveLessonsService"
]
}
},
"/exp/livelesson/{id}": {
"get": {
"summary": "Retrieve details about a lesson",
"operationId": "GetLiveLesson",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/expLiveLesson"
}
}
},
"parameters": [
{
"name": "id",
"in": "path",
"required": true,
"type": "string"
}
],
"tags": [
"LiveLessonsService"
]
}
},
"/exp/livelessonall": {
"get": {
"summary": "Retrieve all livelessons",
"operationId": "ListLiveLessons",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/expLiveLessonMap"
}
}
},
"tags": [
"LiveLessonsService"
]
}
}
},
"definitions": {
"EndpointEndpointType": {
"type": "string",
"enum": [
"UNKNOWN",
"DEVICE",
"IFRAME",
"BLACKBOX",
"UTILITY"
],
"default": "UNKNOWN",
"description": "This field helps the web client understand how to connect to this endpoint. Some might be done via SSH/Guacamole, others might be iframes, etc."
},
"expEndpoint": {
"type": "object",
"properties": {
"Name": {
"type": "string"
},
"Type": {
"$ref": "#/definitions/EndpointEndpointType"
},
"Host": {
"type": "string",
"description": "This will contain a ClusterIP for SSH endpoints, so we don't need to allocate a public IP for them. If an IFRAME,\nthis will get set to the FQDN needed to connect to the external IP allocated for it."
},
"Port": {
"type": "integer",
"format": "int32"
},
"IframeDetails": {
"$ref": "#/definitions/expIFDetails"
},
"Sshuser": {
"type": "string"
},
"Sshpassword": {
"type": "string"
}
}
},
"expHealthCheckMessage": {
"type": "object"
},
"expIFDetails": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"Protocol": {
"type": "string"
},
"URI": {
"type": "string"
},
"Port": {
"type": "integer",
"format": "int32"
}
}
},
"expLessonParams": {
"type": "object",
"properties": {
"lessonId": {
"type": "integer",
"format": "int32"
},
"sessionId": {
"type": "string"
},
"lessonStage": {
"type": "integer",
"format": "int32"
}
}
},
"expLessonUUID": {
"type": "object",
"properties": {
"id": {
"type": "string"
}
}
},
"expLessontoUUIDMap": {
"type": "object",
"properties": {
"Uuids": {
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/expUUIDtoLiveLessonMap"
}
}
}
},
"expLiveLesson": {
"type": "object",
"properties": {
"LessonUUID": {
"type": "string"
},
"LessonId": {
"type": "integer",
"format": "int32"
},
"Endpoints": {
"type": "array",
"items": {
"$ref": "#/definitions/expEndpoint"
}
},
"LessonStage": {
"type": "integer",
"format": "int32"
},
"LabGuide": {
"type": "string"
},
"Ready": {
"type": "boolean",
"format": "boolean"
},
"createdTime": {
"type": "string",
"format": "date-time"
},
"sessionId": {
"type": "string"
},
"LessonDiagram": {
"type": "string"
},
"LessonVideo": {
"type": "string"
},
"Error": {
"type": "boolean",
"format": "boolean"
}
},
"description": "A provisioned lab without the scheduler details. The server will translate from an underlying type\n(i.e. KubeLab) into this, so only the abstract, relevant details are presented."
},
"expLiveLessonMap": {
"type": "object",
"properties": {
"Sessions": {
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/expLessontoUUIDMap"
}
}
}
},
"expUUIDtoLiveLessonMap": {
"type": "object",
"properties": {
"Livelessons": {
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/expLiveLesson"
}
}
}
}
}
}
`
Syringeinfo = `{
"swagger": "2.0",
"info": {
"title": "api/exp/definitions/syringeinfo.proto",
"version": "version not set"
},
"schemes": [
"http",
"https"
],
"consumes": [
"application/json"
],
"produces": [
"application/json"
],
"paths": {
"/exp/syringeinfo": {
"get": {
"operationId": "GetSyringeInfo",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/expSyringeInfo"
}
}
},
"tags": [
"SyringeInfoService"
]
}
}
},
"definitions": {
"expSyringeInfo": {
"type": "object",
"properties": {
"buildSha": {
"type": "string"
},
"antidoteSha": {
"type": "string"
}
}
}
}
}
`
) | api/exp/swagger/swagger.pb.go | 0.658966 | 0.439928 | swagger.pb.go | starcoder |
package imagic
import (
"image"
"image/color"
)
type Config struct {
SeparationMin, SeparationMax int
CrossEyed bool
InvertDepth bool
}
/**
* Given a depth map and background image, create an autostereogram.
*/
func Imagic(dm, bg image.Image, config Config) image.Image {
bounds := dm.Bounds()
min := bounds.Min
max := bounds.Max
result := newMutableImage(dm, bg)
if result == nil {
return nil
}
for y := min.Y; y < max.Y; y++ {
r := magicInflateRow(dm, bg, config, y)
result.imageRows[y] = r
}
return result
}
func magicInflateRow(dm, bg image.Image, config Config, y int) imageRow {
dmWidth := boundsWidth(dm.Bounds())
bgWidth := boundsWidth(bg.Bounds())
dmHeight := boundsHeight(dm.Bounds())
bgHeight := boundsHeight(bg.Bounds())
bgY := y * bgHeight / dmHeight
var sourceIndexes = make([]int, dmWidth)
// Find desired index of pixel to the left.
for x := 0; x < len(sourceIndexes); x++ {
depth := depthAt(dm, x, y, config)
offset := sourceOffset(depth, config)
sourceIndexes[x] = x - int(offset)
}
// Skip initial consecutive places that reference negative-indexed pixels.
initialWidth := 0
for ; sourceIndexes[initialWidth] < 0; initialWidth++ {
}
bgIndexStep := bgWidth / initialWidth
// Map background onto the first section on left.
var bgIndexes = make([]int, dmWidth)
for x := 0; x < initialWidth; x++ {
bgIndexes[x] = x * bgIndexStep
}
// For the rest, copy pixel index from left to right.
var usedBgIndexes = make([]bool, dmWidth)
for x := initialWidth; x < len(bgIndexes); x++ {
if si := sourceIndexes[x]; si < 0 {
// If the source index is negative, just use the next bg pixel.
bgIndexes[x] = bgIndexes[x-1] + 1
} else if usedBgIndexes[si] && !config.CrossEyed {
// This removes some phantom artifacts for wall-eyed viewing.
// Previous pixels that have been used for the left eye previously
// should not be used again here. That would make this right
// eye and the previous right eye compete for the same left pixel.
// This means the brain can interpret two different depths,
// depending on which pixel the right eye chooses.
bgIndexes[x] = bgIndexes[x-1] + 1 // bgIndexStep
} else {
bgIndexes[x] = bgIndexes[si]
usedBgIndexes[si] = true
}
}
row := imageRow{}
row.colors = make([]color.Color, dmWidth)
for x := 0; x < dmWidth; x++ {
bgX := bgIndexes[x] // TODO(cartland): Check index bounds.
row.colors[x] = bg.At(bgX, bgY)
}
return row
}
func boundsWidth(bounds image.Rectangle) int {
return bounds.Max.X - bounds.Min.X
}
func boundsHeight(bounds image.Rectangle) int {
return bounds.Max.Y - bounds.Min.Y
}
var depthMax = uint32(3000)
func depthAt(dm image.Image, x, y int, config Config) uint32 {
color := dm.At(x, y)
r, g, b, a := color.RGBA()
rgb := (r + g + b) // [0, 3 * 0xFFFF]
rgb = rgb / 3 // [0, 0xFFFF]
rgba := rgb * a / 0xFFFF // [0, 0xFFFF]
depth := rgba * depthMax / 0xFFFF // [0, depthMax]
if config.InvertDepth {
return depthMax - depth
}
return depth
}
func sourceOffset(depth uint32, config Config) uint32 {
maxWidth := config.SeparationMax - config.SeparationMin
offsetWidth := depth * uint32(maxWidth) / depthMax
var offset uint32
if config.CrossEyed {
offset = uint32(config.SeparationMin) + offsetWidth
} else {
offset = uint32(config.SeparationMax) - offsetWidth
}
return offset
}
func newMutableImage(dm, bg image.Image) *mutableImage {
if dm == nil {
return nil
}
if bg == nil {
return nil
}
cm := bg.ColorModel()
bounds := dm.Bounds()
var imageRows = make([]imageRow, bounds.Max.Y)
image := new(mutableImage)
image.cm = cm
image.bounds = bounds
image.imageRows = imageRows
return image
}
type mutableImage struct {
cm color.Model
bounds image.Rectangle
imageRows []imageRow
}
func (i *mutableImage) ColorModel() color.Model {
return i.cm
}
func (i *mutableImage) Bounds() image.Rectangle {
return i.bounds
}
func (i *mutableImage) At(x, y int) color.Color {
return i.imageRows[y].colors[x]
}
type imageRow struct {
colors []color.Color
} | imagic/imagic.go | 0.744006 | 0.471527 | imagic.go | starcoder |
package sizestr
import (
"errors"
"math"
"regexp"
"strconv"
"strings"
)
//String representations of each scale
var scaleStrings = []string{"B", "KB", "MB", "GB", "TB", "PB", "XB"}
var parseRegexp = regexp.MustCompile(
// byte value[1] scales[4] 1024[5]? B
`(?i)\b(\d+(\.(\d+))?)(k|m|g|t|p|x)?(i?)(b?)\b`,
)
var lowerCase = false
//Default bytes per kilobyte
var defaultBytesPerKB = float64(1000)
var defaultBytesPerKiB = float64(1024)
//Default number of Significant Figures
var defaultSigFigures = float64(3) //must 10^SigFigures >= Scale
//ToggleCase changes the case of the scale strings ("MB" -> "mb")
func ToggleCase() {
lowerCase = !lowerCase
}
func UpperCase() {
lowerCase = false
}
func LowerCase() {
lowerCase = false
}
//Converts a byte count into a byte string
func ToString(n int64) string {
return ToStringSigBytesPerKB(n, defaultSigFigures, defaultBytesPerKB)
}
//ParseScale a string into a byte count with a specific scale
func MustParse(s string) int64 {
i, err := ParseBytesPerKB(s, 0 /*autodetect*/)
if err != nil {
panic(err)
}
return i
}
//ParseScale a string into a byte count with a specific scale
func Parse(s string) (int64, error) {
return ParseBytesPerKB(s, 0 /*autodetect*/)
}
//ParseScale a string into a byte count with a specific scale (defaults to 1000)
func ParseBytesPerKB(s string, bytesPerKB int64) (int64, error) {
//0 doesn't need a scale
if s == "0" {
return 0, nil
}
m := parseRegexp.FindStringSubmatch(s)
if len(m) == 0 {
return 0, errors.New("parse failed")
}
v, err := strconv.ParseFloat(m[1], 64)
if err != nil {
return 0, errors.New("parse float error")
}
var bytesPer float64
if bytesPerKB > 0 {
bytesPer = float64(bytesPerKB)
} else {
if strings.ToLower(m[5]) == "i" {
bytesPer = float64(defaultBytesPerKiB)
} else {
bytesPer = float64(defaultBytesPerKB)
}
}
if strings.ToLower(m[6]) == "b" {
scale := strings.ToUpper(m[4] + "b")
for _, s := range scaleStrings {
if scale == s {
break
}
v *= bytesPer
}
}
i := int64(v)
if i < 0 {
return 0, errors.New("int64 overflow")
}
return i, nil
}
//Converts a byte count into a byte string
func ToStringSig(n int64, sig float64) string {
return ToStringSigBytesPerKB(n, sig, defaultBytesPerKB)
}
//Converts a byte count into a byte string
func ToStringSigBytesPerKB(n int64, sig, bytesPerKB float64) string {
var f = float64(n)
var i int
for i, _ = range scaleStrings {
if f < bytesPerKB {
break
}
f = f / bytesPerKB
}
f = ToPrecision(f, sig)
if f == bytesPerKB {
return strconv.FormatFloat(f/bytesPerKB, 'f', 0, 64) + toCase(scaleStrings[i+1])
}
return strconv.FormatFloat(f, 'f', -1, 64) + toCase(scaleStrings[i])
}
var log10 = math.Log(10)
//A Go implementation of JavaScript's Math.toPrecision
func ToPrecision(n, p float64) float64 {
//credits http://stackoverflow.com/a/12055126/977939
if n == 0 {
return 0
}
e := math.Floor(math.Log10(math.Abs(n)))
f := round(math.Exp(math.Abs(e-p+1) * log10))
if e-p+1 < 0 {
return round(n*f) / f
}
return round(n/f) * f
}
func round(n float64) float64 {
return math.Floor(n + 0.5)
}
func toCase(s string) string {
if lowerCase {
return strings.ToLower(s)
}
return s
} | Godeps/_workspace/src/github.com/jpillora/sizestr/sizestr.go | 0.618435 | 0.4133 | sizestr.go | starcoder |
package core
func NewFlightData() FlightDataConcrete {
return FlightDataConcrete{0, make([]DataSegment, 0), Coordinate{}}
}
func (f *FlightDataConcrete) AppendData(segments []DataSegment) {
f.Segments = append(f.Segments, segments...)
}
func (f *FlightDataConcrete) SetBasePressure(bp float64) {
f.Base = bp
}
func (f *FlightDataConcrete) SetOrigin(coord Coordinate) {
f.OriginCoordinate = coord
}
func (f *FlightDataConcrete) AllSegments() []DataSegment {
return f.Segments
}
func (f *FlightDataConcrete) BasePressure() float64 {
return f.Base
}
func (f *FlightDataConcrete) SmoothedAltitude() []float64 {
return singleFlightDataElement(f, func(segment DataSegment) float64 {
return segment.Computed.SmoothedAltitude
})
}
func (f *FlightDataConcrete) SmoothedVelocity() []float64 {
return singleFlightDataElement(f, func(segment DataSegment) float64 {
return segment.Computed.SmoothedVelocity
})
}
func (f *FlightDataConcrete) SmoothedTemperature() []float64 {
return singleFlightDataElement(f, func(segment DataSegment) float64 {
return segment.Computed.SmoothedTemperature
})
}
func (f *FlightDataConcrete) SmoothedPressure() []float64 {
return singleFlightDataElement(f, func(segment DataSegment) float64 {
return segment.Computed.SmoothedPressure
})
}
func (f *FlightDataConcrete) GpsQuality() []float64 {
return singleFlightDataElement(f, func(segment DataSegment) float64 {
return segment.Raw.GPSInfo.Quality
})
}
func (f *FlightDataConcrete) GpsSats() []float64 {
return singleFlightDataElement(f, func(segment DataSegment) float64 {
return segment.Raw.GPSInfo.Sats
})
}
func (f *FlightDataConcrete) Time() []float64 {
return singleFlightDataElement(f, func(segment DataSegment) float64 {
return segment.Raw.Timestamp
})
}
func (f *FlightDataConcrete) Rssi() []float64 {
return singleFlightDataElement(f, func(segment DataSegment) float64 {
return float64(segment.Raw.Rssi)
})
}
func (f *FlightDataConcrete) Origin() Coordinate {
return f.OriginCoordinate
}
func (f *FlightDataConcrete) FlightModes() []FlightMode {
data := make([]FlightMode, len(f.AllSegments()))
for i, segment := range f.AllSegments() {
data[i] = segment.Computed.FlightMode
}
return data
} | ground/core/flight_data.go | 0.821868 | 0.598782 | flight_data.go | starcoder |
package geometry
import (
"math"
"github.com/tab58/v1/spatial/pkg/numeric"
"gonum.org/v1/gonum/blas"
"gonum.org/v1/gonum/blas/blas64"
)
// Vector3DReader is a read-only interface for a 3D vector.
type Vector3DReader interface {
GetX() float64
GetY() float64
GetZ() float64
GetComponents() (float64, float64, float64)
Length() (float64, error)
LengthSquared() (float64, error)
Clone() *Vector3D
ToBlasVector() blas64.Vector
GetNormalizedVector() *Vector3D
IsZeroLength(tol float64) (bool, error)
IsUnitLength(tol float64) (bool, error)
AngleTo(w Vector3DReader) (float64, error)
Dot(w Vector3DReader) (float64, error)
Cross(w Vector3DReader) (*Vector3D, error)
IsPerpendicularTo(w Vector3DReader, tol float64) (bool, error)
IsCodirectionalTo(w Vector3DReader, tol float64) (bool, error)
IsParallelTo(w Vector3DReader, tol float64) (bool, error)
IsEqualTo(w Vector3DReader, tol float64) (bool, error)
MatrixTransform3D(m *Matrix3D) error
HomogeneousMatrixTransform4D(m *Matrix4D) error
}
// Vector3DWriter is a write-only interface for a 3D vector.
type Vector3DWriter interface {
SetX(float64)
SetY(float64)
SetZ(float64)
Negate()
Add(w Vector3DReader) error
Sub(w Vector3DReader) error
Normalize() error
Scale(f float64) error
RotateBy(axis Vector3DReader, angleRad float64) error
}
// XAxis3D represents the canonical Cartesian x-axis in 3 dimensions.
var XAxis3D Vector3DReader = &Vector3D{X: 1, Y: 0, Z: 0}
// YAxis3D represents the canonical Cartesian y-axis in 3 dimensions.
var YAxis3D Vector3DReader = &Vector3D{X: 0, Y: 1, Z: 0}
// ZAxis3D represents the canonical Cartesian z-axis in 3 dimensions.
var ZAxis3D Vector3DReader = &Vector3D{X: 0, Y: 1, Z: 1}
// Zero3D represents the zero vector in the 3D plane.
var Zero3D Vector3DReader = &Vector3D{X: 0, Y: 0, Z: 0}
// Vector3D is a representation of a vector in 3 dimensions.
type Vector3D struct {
X float64
Y float64
Z float64
}
// GetX returns the x-coordinate of the vector.
func (v *Vector3D) GetX() float64 {
return v.X
}
// GetY returns the y-coordinate of the vector.
func (v *Vector3D) GetY() float64 {
return v.Y
}
// GetZ returns the z-coordinate of the vector.
func (v *Vector3D) GetZ() float64 {
return v.Z
}
// GetComponents returns the components of the vector.
func (v *Vector3D) GetComponents() (x, y, z float64) {
return v.GetX(), v.GetY(), v.GetZ()
}
// SetX sets the x-coordinate of the vector.
func (v *Vector3D) SetX(z float64) {
v.X = z
}
// SetY sets the y-coordinate of the vector.
func (v *Vector3D) SetY(z float64) {
v.Y = z
}
// SetZ sets the z-coordinate of the vector.
func (v *Vector3D) SetZ(z float64) {
v.Z = z
}
// SetComponents sets the components of the vector.
func (v *Vector3D) SetComponents(x, y, z float64) {
v.SetX(x)
v.SetY(y)
v.SetZ(z)
}
// ToBlasVector returns a BLAS vector for operations.
func (v *Vector3D) ToBlasVector() blas64.Vector {
return blas64.Vector{
N: 3,
Data: []float64{v.X, v.Y, v.Z},
Inc: 1,
}
}
// Length computes the length of the vector.
func (v *Vector3D) Length() (float64, error) {
x, y, z := v.GetComponents()
r := numeric.Nrm2(numeric.Nrm2(x, y), z)
if numeric.AreAnyOverflow(r) {
return 0, numeric.ErrOverflow
}
return r, nil
}
// LengthSquared computes the squared length of the vector.
func (v *Vector3D) LengthSquared() (float64, error) {
x, y, z := v.GetComponents()
r := x*x + y*y + z*z
if numeric.IsOverflow(r) {
return 0, numeric.ErrOverflow
}
return r, nil
}
// Clone creates a new Vector3D with the same component values.
func (v *Vector3D) Clone() *Vector3D {
return &Vector3D{
X: v.GetX(),
Y: v.GetY(),
Z: v.GetZ(),
}
}
// GetNormalizedVector gets the unit vector codirectional to this vector.
func (v *Vector3D) GetNormalizedVector() *Vector3D {
w := v.Clone()
w.Normalize()
return w
}
// IsZeroLength returns true if the vector is of zero length (within a tolerance), false if not.
func (v *Vector3D) IsZeroLength(tol float64) (bool, error) {
if numeric.IsInvalidTolerance(tol) {
return false, numeric.ErrInvalidTol
}
return v.IsEqualTo(Zero3D, tol)
}
// IsUnitLength returns true if the vector is equal to the normalized vector within the given tolerance, false if not.
func (v *Vector3D) IsUnitLength(tol float64) (bool, error) {
if numeric.IsInvalidTolerance(tol) {
return false, numeric.ErrInvalidTol
}
vv := v.Clone()
vv.Normalize()
return v.IsEqualTo(vv, tol)
}
// AngleTo gets the angle between this vector and another vector.
func (v *Vector3D) AngleTo(u Vector3DReader) (float64, error) {
// code based on Kahan's formula for angles between 3D vectors
// https://people.eecs.berkeley.edu/~wkahan/Mindless.pdf, see Mangled Angles section
lv, err := v.Length()
if err != nil {
return 0, err
}
lu, err := u.Length()
if err != nil {
return 0, err
}
nVu := u.Clone()
err = nVu.Scale(lv)
if err != nil {
return 0, err
}
nUv := v.Clone()
err = nUv.Scale(lu)
if err != nil {
return 0, err
}
// Y = norm(v) * u - norm(u) * v
Y := nVu.Clone()
Y.Sub(nUv)
// X = norm(v) * u + norm(u) * v
X := nVu.Clone()
X.Add(nUv)
ay, err := Y.Length()
if err != nil {
return 0, err
}
ax, err := X.Length()
if err != nil {
return 0, err
}
return 2 * math.Atan2(ay, ax), nil
}
// Dot computes the dot product between this vector and another Vector3DReader.
func (v *Vector3D) Dot(w Vector3DReader) (float64, error) {
ax, ay, az := v.GetComponents()
bx, by, bz := w.GetComponents()
r := ax*bx + ay*by + az*bz
if numeric.AreAnyOverflow(r) {
return 0, numeric.ErrOverflow
}
return r, nil
}
// Cross computes the cross product between this vector and another Vector3DReader.
func (v *Vector3D) Cross(w Vector3DReader) (*Vector3D, error) {
ax, ay, az := v.GetComponents()
bx, by, bz := w.GetComponents()
ux := ay*bz - az*by
uy := az*bx - ax*bz
uz := ax*by - ay*bx
if numeric.AreAnyOverflow(ux, uy, uz) {
return nil, numeric.ErrOverflow
}
cross := &Vector3D{
X: ux,
Y: uy,
Z: uz,
}
return cross, nil
}
// IsEqualTo returns true if the vector components are equal within a tolerance of each other, false if not.
func (v *Vector3D) IsEqualTo(w Vector3DReader, tol float64) (bool, error) {
if numeric.IsInvalidTolerance(tol) {
return false, numeric.ErrInvalidTol
}
vx, vy, vz := v.GetComponents()
wx, wy, wz := w.GetComponents()
x := math.Abs(wx - vx)
y := math.Abs(wy - vy)
z := math.Abs(wz - vz)
isEqual := x <= tol && y <= tol && z <= tol
return isEqual, nil
}
// IsParallelTo returns true if the vector is in the direction (either same or opposite) of the given vector within the given tolerance, false if not.
func (v *Vector3D) IsParallelTo(w Vector3DReader, tol float64) (bool, error) {
if numeric.IsInvalidTolerance(tol) {
return false, numeric.ErrInvalidTol
}
vv := v.Clone()
vv.Normalize()
ww := w.Clone()
ww.Normalize()
D, err := vv.Dot(ww)
if err != nil {
return false, err
}
d, err := numeric.Signum(D)
if err != nil {
return false, err
}
if d == 0 {
return false, nil
}
err = vv.Scale(float64(d)) // flips vv in the direction into the ww
if err != nil {
return false, err
}
return vv.IsEqualTo(ww, tol)
}
// IsPerpendicularTo returns true if the vector is pointed in the same direction as the given vector within the given tolerance, false if not.
func (v *Vector3D) IsPerpendicularTo(w Vector3DReader, tol float64) (bool, error) {
if numeric.IsInvalidTolerance(tol) {
return false, numeric.ErrInvalidTol
}
vv := v.Clone()
vv.Normalize()
ww := w.Clone()
ww.Normalize()
d, err := vv.Dot(ww)
if err != nil {
return false, err
}
return math.Abs(d) <= tol, nil
}
// IsCodirectionalTo returns true if the vector is pointed in the same direction as the given vector within the given tolerance, false if not.
func (v *Vector3D) IsCodirectionalTo(w Vector3DReader, tol float64) (bool, error) {
if numeric.IsInvalidTolerance(tol) {
return false, numeric.ErrInvalidTol
}
vv := v.Clone()
vv.Normalize()
ww := w.Clone()
ww.Normalize()
return vv.IsEqualTo(ww, tol)
}
// Negate negates the vector components.
func (v *Vector3D) Negate() {
x, y, z := v.GetX(), v.GetY(), v.GetZ()
v.SetX(-x)
v.SetY(-y)
v.SetZ(-z)
}
// Add adds the given displacement vector to this point.
func (v *Vector3D) Add(w Vector3DReader) error {
vx, vy, vz := v.GetComponents()
wx, wy, wz := w.GetComponents()
newX := vx + wx
newY := vy + wy
newZ := vz + wz
if numeric.AreAnyOverflow(newX, newY, newZ) {
return numeric.ErrOverflow
}
v.SetComponents(newX, newY, newZ)
return nil
}
// Sub subtracts the given displacement vector to this point.
func (v *Vector3D) Sub(w Vector3DReader) error {
vx, vy, vz := v.GetComponents()
wx, wy, wz := w.GetComponents()
newX := vx - wx
newY := vy - wy
newZ := vz - wz
if numeric.AreAnyOverflow(newX, newY, newZ) {
return numeric.ErrOverflow
}
v.SetComponents(newX, newY, newZ)
return nil
}
// Normalize scales the vector to unit length.
func (v *Vector3D) Normalize() error {
x, y, z := v.GetComponents()
l, err := v.Length()
if err != nil {
return err
}
if math.Abs(l) == 0 {
return numeric.ErrDivideByZero
}
newX := x / l
newY := y / l
newZ := z / l
if numeric.AreAnyOverflow(newX, newY, newZ) {
return numeric.ErrOverflow
}
v.SetComponents(newX, newY, newZ)
return nil
}
// Scale scales the vector by the given factor.
func (v *Vector3D) Scale(f float64) error {
if math.IsNaN(f) {
return numeric.ErrInvalidArgument
}
x, y, z := v.GetComponents()
newX := x * f
newY := y * f
newZ := z * f
if numeric.AreAnyOverflow(newX, newY, newZ) {
return numeric.ErrOverflow
}
v.SetComponents(newX, newY, newZ)
return nil
}
// MatrixTransform3D transforms this vector by left-multiplying the given matrix.
func (v *Vector3D) MatrixTransform3D(m *Matrix3D) error {
isSingular, err := m.IsNearSingular(1e-12)
if err != nil {
return err
}
if isSingular {
return numeric.ErrSingularMatrix
}
vv := v.ToBlasVector()
mm := m.ToBlas64General()
uu := blas64.Vector{
N: 3,
Data: []float64{0, 0, 0},
Inc: 1,
}
blas64.Gemv(blas.NoTrans, 1, mm, vv, 0, uu)
newX := uu.Data[0]
newY := uu.Data[1]
newZ := uu.Data[2]
if numeric.AreAnyOverflow(newX, newY, newZ) {
return numeric.ErrOverflow
}
v.SetComponents(newX, newY, newZ)
return nil
}
// HomogeneousMatrixTransform4D transforms this vector by left-multiplying the given matrix
// by the homogeneous vector and then projected back into this space.
func (v *Vector3D) HomogeneousMatrixTransform4D(m *Matrix4D) error {
u := &Vector4D{X: v.X, Y: v.Y, Z: v.Z, W: 1.0}
err := u.MatrixTransform4D(m)
if err != nil {
return err
}
ux, uy, uz, uw := u.GetComponents()
if uw != 0 {
return numeric.ErrDivideByZero
}
newX := ux / uw
newY := uy / uw
newZ := uz / uw
if numeric.AreAnyOverflow(newX, newY, newZ) {
return numeric.ErrOverflow
}
v.SetComponents(newX, newY, newZ)
return nil
} | pkg/geometry/vector3d.go | 0.883563 | 0.70304 | vector3d.go | starcoder |
package win3cards
type HandCard struct {
Cards [3]int `json:"cards"`
v string `json:"v"`
}
func (self HandCard) Version() string {
return self.v
}
func Compare(h1 HandCard, h2 HandCard) bool {
return h1.Score() > h2.Score()
}
func (self HandCard) Score() (score int) {
if self.isLeopard() {
score = 500
} else if self.isRoyalFlush() {
score = 400
} else if self.isFlush() {
score = 300
} else if self.isStraight() {
score = 200
} else if self.isPair() {
score = 100
} else {
score = 0
}
return (score + self.baseScore()) * 10
}
func (self HandCard) baseScore() int {
x, y, z := sortCard(self.Cards[0]%100, self.Cards[1]%100, self.Cards[2]%100)
if isStraight(x, y, z) && x == 2 {
return x + y + z - 13
}
return x + y + z
}
func (self HandCard) suitScore() int {
x, y, z := self.Cards[0], self.Cards[1], self.Cards[2]
var baseMax int
if x%100 >= y%100 && x%100 >= z%100 {
baseMax = x
}
if y%100 >= x%100 && y%100 >= z%100 {
baseMax = y
}
if x%100 >= y%100 && x%100 >= z%100 {
baseMax = z
}
var suitMax int
if x == baseMax && x/100 >= suitMax {
suitMax = x / 100
}
if y == baseMax && y/100 >= suitMax {
suitMax = y / 100
}
if z == baseMax && z/100 >= suitMax {
suitMax = z / 100
}
return suitMax
}
func (self HandCard) isLeopard() bool {
x, y, z := self.Cards[0], self.Cards[1], self.Cards[2]
return isLeopard(x, y, z)
}
func (self HandCard) isRoyalFlush() bool {
x, y, z := self.Cards[0], self.Cards[1], self.Cards[2]
return isFlush(x, y, z) && isStraight(x, y, z)
}
func (self HandCard) isFlush() bool {
x, y, z := self.Cards[0], self.Cards[1], self.Cards[2]
return isFlush(x, y, z) && !isStraight(x, y, z)
}
func (self HandCard) isStraight() bool {
x, y, z := self.Cards[0], self.Cards[1], self.Cards[2]
return isStraight(x, y, z) && !isFlush(x, y, z)
}
func (self HandCard) isPair() bool {
x, y, z := self.Cards[0]%100, self.Cards[1]%100, self.Cards[2]%100
return isPair(x, y, z) && !isLeopard(x, y, z)
}
// ============================================================
func isLeopard(x, y, z int) bool {
return x%100 == y%100 && y%100 == z%100
}
func isFlush(x, y, z int) bool {
return x/100 == y/100 && y/100 == z/100
}
func isStraight(x, y, z int) bool {
x, y, z = sortCard(x%100, y%100, z%100)
return (x+1 == y && x+2 == z) || (x+1 == y && x+12 == z)
}
func isPair(x, y, z int) bool {
return (x == y || x == z || y == z)
}
func sortCard(a, b, c int) (x, y, z int) {
x, y, z = a, b, c
if x > y {
x, y = y, x
}
if x > z {
x, z = z, x
}
if y > z {
y, z = z, y
}
return
} | my/royalpoker/win3cards/cards.go | 0.64579 | 0.469034 | cards.go | starcoder |
package rateengine
import (
"time"
"github.com/pkg/errors"
"go.uber.org/zap"
"github.com/transcom/mymove/pkg/models"
"github.com/transcom/mymove/pkg/unit"
)
// LinehaulCostComputation represents the results of a computation.
type LinehaulCostComputation struct {
BaseLinehaul unit.Cents
OriginLinehaulFactor unit.Cents
DestinationLinehaulFactor unit.Cents
ShorthaulCharge unit.Cents
LinehaulChargeTotal unit.Cents
Mileage int
}
// Scale scales a cost computation by a multiplicative factor
func (c *LinehaulCostComputation) Scale(factor float64) {
c.BaseLinehaul = c.BaseLinehaul.MultiplyFloat64(factor)
c.OriginLinehaulFactor = c.OriginLinehaulFactor.MultiplyFloat64(factor)
c.DestinationLinehaulFactor = c.DestinationLinehaulFactor.MultiplyFloat64(factor)
c.ShorthaulCharge = c.ShorthaulCharge.MultiplyFloat64(factor)
c.LinehaulChargeTotal = c.LinehaulChargeTotal.MultiplyFloat64(factor)
}
func (re *RateEngine) determineMileage(originZip5 string, destinationZip5 string) (mileage int, err error) {
mileage, err = re.planner.Zip5TransitDistance(originZip5, destinationZip5)
if err != nil {
re.logger.Error("Failed to get distance from planner - %v", zap.Error(err),
zap.String("origin_zip5", originZip5), zap.String("destination_zip5", destinationZip5))
}
return mileage, err
}
// Determine the Base Linehaul (BLH)
func (re *RateEngine) baseLinehaul(mileage int, weight unit.Pound, date time.Time) (baseLinehaulChargeCents unit.Cents, err error) {
baseLinehaulChargeCents, err = models.FetchBaseLinehaulRate(re.db, mileage, weight, date)
if err != nil {
re.logger.Error("Base Linehaul query didn't complete: ", zap.Error(err))
}
return baseLinehaulChargeCents, err
}
// Determine the Linehaul Factors (OLF and DLF)
func (re *RateEngine) linehaulFactors(cwt unit.CWT, zip3 string, date time.Time) (linehaulFactorCents unit.Cents, err error) {
serviceArea, err := models.FetchTariff400ngServiceAreaForZip3(re.db, zip3, date)
if err != nil {
return 0, err
}
return serviceArea.LinehaulFactor.Multiply(cwt.Int()), nil
}
// Determine Shorthaul (SH) Charge (ONLY applies if shipment moves 800 miles and less)
func (re *RateEngine) shorthaulCharge(mileage int, cwt unit.CWT, date time.Time) (shorthaulChargeCents unit.Cents, err error) {
if mileage >= 800 {
return 0, nil
}
re.logger.Debug("Shipment qualifies for shorthaul fee",
zap.Int("miles", mileage))
cwtMiles := mileage * cwt.Int()
shorthaulChargeCents, err = models.FetchShorthaulRateCents(re.db, cwtMiles, date)
return shorthaulChargeCents, err
}
// Determine Linehaul Charge (LC) TOTAL
// Formula: LC= [BLH + OLF + DLF + [SH]
func (re *RateEngine) linehaulChargeComputation(weight unit.Pound, originZip5 string, destinationZip5 string, date time.Time) (cost LinehaulCostComputation, err error) {
cwt := weight.ToCWT()
originZip3 := Zip5ToZip3(originZip5)
destinationZip3 := Zip5ToZip3(destinationZip5)
mileage, err := re.determineMileage(originZip5, destinationZip5)
if err != nil {
return cost, errors.Wrap(err, "Failed to determine mileage")
}
cost.Mileage = mileage
cost.BaseLinehaul, err = re.baseLinehaul(mileage, weight, date)
if err != nil {
return cost, errors.Wrap(err, "Failed to determine base linehaul charge")
}
cost.OriginLinehaulFactor, err = re.linehaulFactors(cwt, originZip3, date)
if err != nil {
return cost, errors.Wrap(err, "Failed to determine origin linehaul factor")
}
cost.DestinationLinehaulFactor, err = re.linehaulFactors(cwt, destinationZip3, date)
if err != nil {
return cost, errors.Wrap(err, "Failed to determine destination linehaul factor")
}
cost.ShorthaulCharge, err = re.shorthaulCharge(mileage, cwt, date)
if err != nil {
return cost, errors.Wrap(err, "Failed to determine shorthaul charge")
}
cost.LinehaulChargeTotal = cost.BaseLinehaul +
cost.OriginLinehaulFactor +
cost.DestinationLinehaulFactor +
cost.ShorthaulCharge
re.logger.Info("Linehaul charge total calculated",
zap.Int("linehaul total", cost.LinehaulChargeTotal.Int()),
zap.Int("linehaul", cost.BaseLinehaul.Int()),
zap.Int("origin lh factor", cost.OriginLinehaulFactor.Int()),
zap.Int("destination lh factor", cost.DestinationLinehaulFactor.Int()),
zap.Int("shorthaul", cost.ShorthaulCharge.Int()))
return cost, err
} | pkg/rateengine/linehaul.go | 0.724578 | 0.453443 | linehaul.go | starcoder |
package cvsgeolookup
import (
"encoding/binary"
"encoding/csv"
"io"
"net"
"sort"
"strconv"
)
type Metrics interface {
}
type nometrics struct{}
type record struct {
begin uint32 // first ip of segment
end uint32 // last ip of segment
lantitude float32 // lantitude
longtitude float32 // longtitude
}
type engine struct {
records []record
beginindex []uint64
endindex []uint64
options options
}
// New returns new lookup engine
func New(opts ...Option) (*engine, error) {
options := options{
fieldNameBegin: "start",
fieldNameEnd: "end",
fieldNameLantitude: "lantitude",
fieldNameLongtitude: "longtitude",
commaRune: ',',
commentRune: '#',
metrics: &nometrics{},
}
for _, o := range opts {
o.apply(&options)
}
return &engine{
options: options,
}, nil
}
func (e *engine) Load(r io.Reader) error {
if r == nil {
return ErrReadInterfaceRequired
}
data := csv.NewReader(r)
data.Comma = e.options.commaRune
data.Comment = e.options.commentRune
data.ReuseRecord = true
// Read header and find right fields' positions
indexBegin := -1
indexEnd := -1
indexLantitude := -1
indexLongtitude := -1
indexSkip := -1
if header, err := data.Read(); err == nil {
for index, field := range header {
switch field {
case e.options.fieldNameBegin:
indexBegin = index
case e.options.fieldNameEnd:
indexEnd = index
case e.options.fieldNameLantitude:
indexLantitude = index
case e.options.fieldNameLongtitude:
indexLongtitude = index
case e.options.fieldNameSkip:
indexSkip = index
}
}
} else {
return err
}
if indexBegin == -1 {
return ErrNoBeginField
}
if indexEnd == -1 {
return ErrNoEndField
}
if indexLantitude == -1 {
return ErrNoLantitudeField
}
if indexLongtitude == -1 {
return ErrNoLongtitudeField
}
// Build records
records := make([]record, 0)
for {
line, err := data.Read()
if err == io.EOF {
break
}
if err != nil {
return err
}
if indexSkip >= 0 {
if line[indexSkip] == e.options.skipValue {
continue
}
}
var record record
if record.begin, err = e.parseIP(line[indexBegin]); err != nil {
return err
}
if record.end, err = e.parseIP(line[indexEnd]); err != nil {
return err
}
if record.begin > record.end {
return ErrIncorrectSegment
}
if record.lantitude, err = e.parseFloat(line[indexLantitude]); err != nil {
return err
}
if record.longtitude, err = e.parseFloat(line[indexLongtitude]); err != nil {
return err
}
records = append(records, record)
}
// Just make sure all segments are in monotone order
sort.Slice(records[:], func(i, j int) bool {
return records[i].begin < records[j].begin
})
// Build indexes
beginindex := make([]uint64, 256)
endindex := make([]uint64, 256)
count := uint64(len(records))
for i := uint64(0); i < count; i++ {
end := (records[i].begin >> 24)
begin := (records[count-i-1].begin >> 24)
beginindex[begin] = count - i - 1
endindex[end] = i
}
e.records = records
e.beginindex = beginindex
e.endindex = endindex
return nil
}
func (e *engine) Lookup(ip string) (lantitude float32, longtitude float32, err error) {
err = nil
lantitude = 0
longtitude = 0
if e.records == nil {
err = ErrNotInitialized
return
}
look, err := e.parseIP(ip)
if err != nil {
return
}
begin := e.beginindex[look>>24]
end := e.endindex[look>>24]
for {
current := (begin + end) / 2
if e.records[current].begin <= look && e.records[current].end >= look {
//found!
lantitude = e.records[current].lantitude
longtitude = e.records[current].longtitude
return
}
if begin == end {
break
}
if e.records[current].begin > look {
// move to left side
end = current
continue
}
if e.records[current].end < look {
// move to right side
if begin != current {
begin = current
} else {
begin = current + 1
}
continue
}
if begin > end {
break
}
}
err = ErrNotFound
return
}
func (e *engine) parseIP(val string) (uint32, error) {
ip := net.ParseIP(val)
if ip == nil {
return 0, ErrWrongIPFormat
}
ip = ip.To4()
return binary.BigEndian.Uint32(ip), nil
}
func (e *engine) parseFloat(val string) (float32, error) {
float, err := strconv.ParseFloat(val, 64)
if err != nil {
return 0, err
}
// Some precision corrections
if float > 0 {
float += 0.0000000001
} else if float < 0 {
float -= 0.0000000001
}
return float32(float), nil
} | geolookup.go | 0.566498 | 0.418578 | geolookup.go | starcoder |
package openapi
import (
"encoding/json"
)
// DispatchRateImpl struct for DispatchRateImpl
type DispatchRateImpl struct {
DispatchThrottlingRateInMsg *int32 `json:"dispatchThrottlingRateInMsg,omitempty"`
DispatchThrottlingRateInByte *int64 `json:"dispatchThrottlingRateInByte,omitempty"`
RelativeToPublishRate *bool `json:"relativeToPublishRate,omitempty"`
RatePeriodInSecond *int32 `json:"ratePeriodInSecond,omitempty"`
}
// NewDispatchRateImpl instantiates a new DispatchRateImpl object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewDispatchRateImpl() *DispatchRateImpl {
this := DispatchRateImpl{}
return &this
}
// NewDispatchRateImplWithDefaults instantiates a new DispatchRateImpl object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewDispatchRateImplWithDefaults() *DispatchRateImpl {
this := DispatchRateImpl{}
return &this
}
// GetDispatchThrottlingRateInMsg returns the DispatchThrottlingRateInMsg field value if set, zero value otherwise.
func (o *DispatchRateImpl) GetDispatchThrottlingRateInMsg() int32 {
if o == nil || o.DispatchThrottlingRateInMsg == nil {
var ret int32
return ret
}
return *o.DispatchThrottlingRateInMsg
}
// GetDispatchThrottlingRateInMsgOk returns a tuple with the DispatchThrottlingRateInMsg field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *DispatchRateImpl) GetDispatchThrottlingRateInMsgOk() (*int32, bool) {
if o == nil || o.DispatchThrottlingRateInMsg == nil {
return nil, false
}
return o.DispatchThrottlingRateInMsg, true
}
// HasDispatchThrottlingRateInMsg returns a boolean if a field has been set.
func (o *DispatchRateImpl) HasDispatchThrottlingRateInMsg() bool {
if o != nil && o.DispatchThrottlingRateInMsg != nil {
return true
}
return false
}
// SetDispatchThrottlingRateInMsg gets a reference to the given int32 and assigns it to the DispatchThrottlingRateInMsg field.
func (o *DispatchRateImpl) SetDispatchThrottlingRateInMsg(v int32) {
o.DispatchThrottlingRateInMsg = &v
}
// GetDispatchThrottlingRateInByte returns the DispatchThrottlingRateInByte field value if set, zero value otherwise.
func (o *DispatchRateImpl) GetDispatchThrottlingRateInByte() int64 {
if o == nil || o.DispatchThrottlingRateInByte == nil {
var ret int64
return ret
}
return *o.DispatchThrottlingRateInByte
}
// GetDispatchThrottlingRateInByteOk returns a tuple with the DispatchThrottlingRateInByte field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *DispatchRateImpl) GetDispatchThrottlingRateInByteOk() (*int64, bool) {
if o == nil || o.DispatchThrottlingRateInByte == nil {
return nil, false
}
return o.DispatchThrottlingRateInByte, true
}
// HasDispatchThrottlingRateInByte returns a boolean if a field has been set.
func (o *DispatchRateImpl) HasDispatchThrottlingRateInByte() bool {
if o != nil && o.DispatchThrottlingRateInByte != nil {
return true
}
return false
}
// SetDispatchThrottlingRateInByte gets a reference to the given int64 and assigns it to the DispatchThrottlingRateInByte field.
func (o *DispatchRateImpl) SetDispatchThrottlingRateInByte(v int64) {
o.DispatchThrottlingRateInByte = &v
}
// GetRelativeToPublishRate returns the RelativeToPublishRate field value if set, zero value otherwise.
func (o *DispatchRateImpl) GetRelativeToPublishRate() bool {
if o == nil || o.RelativeToPublishRate == nil {
var ret bool
return ret
}
return *o.RelativeToPublishRate
}
// GetRelativeToPublishRateOk returns a tuple with the RelativeToPublishRate field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *DispatchRateImpl) GetRelativeToPublishRateOk() (*bool, bool) {
if o == nil || o.RelativeToPublishRate == nil {
return nil, false
}
return o.RelativeToPublishRate, true
}
// HasRelativeToPublishRate returns a boolean if a field has been set.
func (o *DispatchRateImpl) HasRelativeToPublishRate() bool {
if o != nil && o.RelativeToPublishRate != nil {
return true
}
return false
}
// SetRelativeToPublishRate gets a reference to the given bool and assigns it to the RelativeToPublishRate field.
func (o *DispatchRateImpl) SetRelativeToPublishRate(v bool) {
o.RelativeToPublishRate = &v
}
// GetRatePeriodInSecond returns the RatePeriodInSecond field value if set, zero value otherwise.
func (o *DispatchRateImpl) GetRatePeriodInSecond() int32 {
if o == nil || o.RatePeriodInSecond == nil {
var ret int32
return ret
}
return *o.RatePeriodInSecond
}
// GetRatePeriodInSecondOk returns a tuple with the RatePeriodInSecond field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *DispatchRateImpl) GetRatePeriodInSecondOk() (*int32, bool) {
if o == nil || o.RatePeriodInSecond == nil {
return nil, false
}
return o.RatePeriodInSecond, true
}
// HasRatePeriodInSecond returns a boolean if a field has been set.
func (o *DispatchRateImpl) HasRatePeriodInSecond() bool {
if o != nil && o.RatePeriodInSecond != nil {
return true
}
return false
}
// SetRatePeriodInSecond gets a reference to the given int32 and assigns it to the RatePeriodInSecond field.
func (o *DispatchRateImpl) SetRatePeriodInSecond(v int32) {
o.RatePeriodInSecond = &v
}
func (o DispatchRateImpl) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.DispatchThrottlingRateInMsg != nil {
toSerialize["dispatchThrottlingRateInMsg"] = o.DispatchThrottlingRateInMsg
}
if o.DispatchThrottlingRateInByte != nil {
toSerialize["dispatchThrottlingRateInByte"] = o.DispatchThrottlingRateInByte
}
if o.RelativeToPublishRate != nil {
toSerialize["relativeToPublishRate"] = o.RelativeToPublishRate
}
if o.RatePeriodInSecond != nil {
toSerialize["ratePeriodInSecond"] = o.RatePeriodInSecond
}
return json.Marshal(toSerialize)
}
type NullableDispatchRateImpl struct {
value *DispatchRateImpl
isSet bool
}
func (v NullableDispatchRateImpl) Get() *DispatchRateImpl {
return v.value
}
func (v *NullableDispatchRateImpl) Set(val *DispatchRateImpl) {
v.value = val
v.isSet = true
}
func (v NullableDispatchRateImpl) IsSet() bool {
return v.isSet
}
func (v *NullableDispatchRateImpl) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableDispatchRateImpl(val *DispatchRateImpl) *NullableDispatchRateImpl {
return &NullableDispatchRateImpl{value: val, isSet: true}
}
func (v NullableDispatchRateImpl) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableDispatchRateImpl) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | openapi/model_dispatch_rate_impl.go | 0.777131 | 0.400339 | model_dispatch_rate_impl.go | starcoder |
package testonly
// MerkleTreeLeafTestInputs returns a slice of leaf inputs that may be used in
// compact Merkle tree test cases. They are intended to be added successively,
// so that after each addition the corresponding root from MerkleTreeLeafTestRoots
// gives the expected Merkle tree root hash.
func MerkleTreeLeafTestInputs() [][]byte {
return [][]byte{
[]byte(""), []byte("\x00"), []byte("\x10"), []byte("\x20\x21"), []byte("\x30\x31"),
[]byte("\x40\x41\x42\x43"), []byte("\x50\x51\x52\x53\x54\x55\x56\x57"),
[]byte("\x60\x61\x62\x63\x64\x65\x66\x67\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f")}
}
// MerkleTreeLeafTestRootHashes returns a slice of Merkle tree root hashes that
// correspond to the expected tree state for the leaf additions returned by
// MerkleTreeLeafTestInputs(), as described above.
func MerkleTreeLeafTestRootHashes() [][]byte {
return [][]byte{
// constants from C++ test: https://github.com/google/certificate-transparency/blob/master/cpp/merkletree/merkle_tree_test.cc#L277
MustHexDecode("<KEY>"),
MustHexDecode("fac54203e7cc696cf0dfcb42c92a1d9dbaf70ad9e621f4bd8d98662f00e3c125"),
MustHexDecode("<KEY>"),
MustHexDecode("<KEY>"),
MustHexDecode("<KEY>"),
MustHexDecode("<KEY>"),
MustHexDecode("ddb89be403809e325750d3d263cd78929c2942b7942a34b77e122c9594a74c8c"),
MustHexDecode("<KEY>")}
}
// CompactMerkleTreeLeafTestNodeHashes returns the CompactMerkleTree.node state
// that must result after each of the leaf additions returned by
// MerkleTreeLeafTestInputs(), as described above.
func CompactMerkleTreeLeafTestNodeHashes() [][][]byte {
return [][][]byte{
nil, // perfect tree size, 2^0
nil, // perfect tree size, 2^1
{MustDecodeBase64("ApjRIpBtz8EIkstTpzmS/FufST6kybrbJ7eRtBJ6f+c="), MustDecodeBase64("+sVCA+fMaWzw38tCySodnbr3CtnmIfS9jZhmLwDjwSU=")},
nil, // perfect tree size, 2^2
{MustDecodeBase64("vBoGQ7EuTS18d5GPROD095qDi2z57FtcKD4fTYhZnms="), nil, MustDecodeBase64("037kGJdt2VdTwcc4Yrk5j6Kiz5tP8P3+izDNlSCWFLc=")},
{nil, MustDecodeBase64("DrxdNDf74tsVi58Sah0RjjCBgQMdCpSfje3t68VY72o="), MustDecodeBase64("037kGJdt2VdTwcc4Yrk5j6Kiz5tP8P3+izDNlSCWFLc=")},
{MustDecodeBase64("sIaT7C5yFZcTBkHoIR5+7cy0wmQTlj7ubB4u0W/7Gl8="), MustDecodeBase64("DrxdNDf74tsVi58Sah0RjjCBgQMdCpSfje3t68VY72o="), MustDecodeBase64("037kGJdt2VdTwcc4Yrk5j6Kiz5tP8P3+izDNlSCWFLc=")},
nil, // perfect tree size, 2^3
}
}
// EmptyMerkleTreeRootHash returns the expected root hash for an empty Merkle Tree
// that uses SHA256 hashing.
func EmptyMerkleTreeRootHash() []byte {
const sha256EmptyTreeHash = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
return MustHexDecode(sha256EmptyTreeHash)
} | testonly/compact_merkle_tree.go | 0.798029 | 0.527377 | compact_merkle_tree.go | starcoder |
package onshape
import (
"encoding/json"
)
// BTAndFilter110AllOf struct for BTAndFilter110AllOf
type BTAndFilter110AllOf struct {
BtType *string `json:"btType,omitempty"`
Operand1 *BTQueryFilter183 `json:"operand1,omitempty"`
Operand2 *BTQueryFilter183 `json:"operand2,omitempty"`
}
// NewBTAndFilter110AllOf instantiates a new BTAndFilter110AllOf object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewBTAndFilter110AllOf() *BTAndFilter110AllOf {
this := BTAndFilter110AllOf{}
return &this
}
// NewBTAndFilter110AllOfWithDefaults instantiates a new BTAndFilter110AllOf object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewBTAndFilter110AllOfWithDefaults() *BTAndFilter110AllOf {
this := BTAndFilter110AllOf{}
return &this
}
// GetBtType returns the BtType field value if set, zero value otherwise.
func (o *BTAndFilter110AllOf) GetBtType() string {
if o == nil || o.BtType == nil {
var ret string
return ret
}
return *o.BtType
}
// GetBtTypeOk returns a tuple with the BtType field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTAndFilter110AllOf) GetBtTypeOk() (*string, bool) {
if o == nil || o.BtType == nil {
return nil, false
}
return o.BtType, true
}
// HasBtType returns a boolean if a field has been set.
func (o *BTAndFilter110AllOf) HasBtType() bool {
if o != nil && o.BtType != nil {
return true
}
return false
}
// SetBtType gets a reference to the given string and assigns it to the BtType field.
func (o *BTAndFilter110AllOf) SetBtType(v string) {
o.BtType = &v
}
// GetOperand1 returns the Operand1 field value if set, zero value otherwise.
func (o *BTAndFilter110AllOf) GetOperand1() BTQueryFilter183 {
if o == nil || o.Operand1 == nil {
var ret BTQueryFilter183
return ret
}
return *o.Operand1
}
// GetOperand1Ok returns a tuple with the Operand1 field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTAndFilter110AllOf) GetOperand1Ok() (*BTQueryFilter183, bool) {
if o == nil || o.Operand1 == nil {
return nil, false
}
return o.Operand1, true
}
// HasOperand1 returns a boolean if a field has been set.
func (o *BTAndFilter110AllOf) HasOperand1() bool {
if o != nil && o.Operand1 != nil {
return true
}
return false
}
// SetOperand1 gets a reference to the given BTQueryFilter183 and assigns it to the Operand1 field.
func (o *BTAndFilter110AllOf) SetOperand1(v BTQueryFilter183) {
o.Operand1 = &v
}
// GetOperand2 returns the Operand2 field value if set, zero value otherwise.
func (o *BTAndFilter110AllOf) GetOperand2() BTQueryFilter183 {
if o == nil || o.Operand2 == nil {
var ret BTQueryFilter183
return ret
}
return *o.Operand2
}
// GetOperand2Ok returns a tuple with the Operand2 field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTAndFilter110AllOf) GetOperand2Ok() (*BTQueryFilter183, bool) {
if o == nil || o.Operand2 == nil {
return nil, false
}
return o.Operand2, true
}
// HasOperand2 returns a boolean if a field has been set.
func (o *BTAndFilter110AllOf) HasOperand2() bool {
if o != nil && o.Operand2 != nil {
return true
}
return false
}
// SetOperand2 gets a reference to the given BTQueryFilter183 and assigns it to the Operand2 field.
func (o *BTAndFilter110AllOf) SetOperand2(v BTQueryFilter183) {
o.Operand2 = &v
}
func (o BTAndFilter110AllOf) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.BtType != nil {
toSerialize["btType"] = o.BtType
}
if o.Operand1 != nil {
toSerialize["operand1"] = o.Operand1
}
if o.Operand2 != nil {
toSerialize["operand2"] = o.Operand2
}
return json.Marshal(toSerialize)
}
type NullableBTAndFilter110AllOf struct {
value *BTAndFilter110AllOf
isSet bool
}
func (v NullableBTAndFilter110AllOf) Get() *BTAndFilter110AllOf {
return v.value
}
func (v *NullableBTAndFilter110AllOf) Set(val *BTAndFilter110AllOf) {
v.value = val
v.isSet = true
}
func (v NullableBTAndFilter110AllOf) IsSet() bool {
return v.isSet
}
func (v *NullableBTAndFilter110AllOf) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableBTAndFilter110AllOf(val *BTAndFilter110AllOf) *NullableBTAndFilter110AllOf {
return &NullableBTAndFilter110AllOf{value: val, isSet: true}
}
func (v NullableBTAndFilter110AllOf) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableBTAndFilter110AllOf) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | onshape/model_bt_and_filter_110_all_of.go | 0.695338 | 0.410756 | model_bt_and_filter_110_all_of.go | starcoder |
package protocol
// Attribute is an entity attribute, that holds specific data such as the health of the entity. Each attribute
// holds a default value, maximum and minimum value, name and its current value.
type Attribute struct {
// Name is the name of the attribute, for example 'minecraft:health'. These names must be identical to
// the ones defined client-side.
Name string
// Value is the current value of the attribute. This value will be applied to the entity when sent in a
// packet.
Value float32
// Max and Min specify the boundaries within the value of the attribute must be. The definition of these
// fields differ per attribute. The maximum health of an entity may be changed, whereas the maximum
// movement speed for example may not be.
Max, Min float32
// Default is the default value of the attribute. It's not clear why this field must be sent to the
// client, but it is required regardless.
Default float32
}
// Attributes reads an Attribute slice x from Reader r.
func Attributes(r *Reader, x *[]Attribute) {
var count uint32
r.Varuint32(&count)
r.LimitUint32(count, mediumLimit)
*x = make([]Attribute, count)
for i := uint32(0); i < count; i++ {
r.Float32(&(*x)[i].Min)
r.Float32(&(*x)[i].Max)
r.Float32(&(*x)[i].Value)
r.Float32(&(*x)[i].Default)
r.String(&(*x)[i].Name)
}
}
// WriteAttributes writes a slice of Attributes x to Writer w.
func WriteAttributes(w *Writer, x *[]Attribute) {
l := uint32(len(*x))
w.Varuint32(&l)
for _, attribute := range *x {
w.Float32(&attribute.Min)
w.Float32(&attribute.Max)
w.Float32(&attribute.Value)
w.Float32(&attribute.Default)
w.String(&attribute.Name)
}
}
// InitialAttributes reads an Attribute slice from bytes.Buffer src and stores it in the pointer passed.
// InitialAttributes is used when reading the attributes of a new entity. (AddEntity packet)
func InitialAttributes(r *Reader, x *[]Attribute) {
var count uint32
r.Varuint32(&count)
r.LimitUint32(count, mediumLimit)
*x = make([]Attribute, count)
for i := uint32(0); i < count; i++ {
r.String(&(*x)[i].Name)
r.Float32(&(*x)[i].Min)
r.Float32(&(*x)[i].Value)
r.Float32(&(*x)[i].Max)
}
}
// WriteInitialAttributes writes a slice of Attributes x to Writer w. WriteInitialAttributes is used when
// writing the attributes of a new entity. (AddEntity packet)
func WriteInitialAttributes(w *Writer, x *[]Attribute) {
l := uint32(len(*x))
w.Varuint32(&l)
for _, attribute := range *x {
w.String(&attribute.Name)
w.Float32(&attribute.Min)
w.Float32(&attribute.Value)
w.Float32(&attribute.Max)
}
} | minecraft/protocol/attribute.go | 0.809163 | 0.46041 | attribute.go | starcoder |
package goqueue
//Owner provides functions that directly affect the underlying pointers
// and data structures of a queue pointers. The Close() function should
// ready the underlying pointer for garbage collection and return a slice
// of any items that remain in the queue
type Owner interface {
Close() (items []interface{})
}
//GarbageCollecter can be implemented to re-create the underlying pointers
// so that they can be garabge collected, you can think of this as creating
// an opportunity to defrag the memory
type GarbageCollecter interface {
GarbageCollect()
}
//Dequeuer can be used to destructively remove one or more items from the
// queue, it can remove one item via Dequeue(), multiple items via
// DequeueMultiple() or all items using Flush() underflow will be true if
// the queue is empty
type Dequeuer interface {
Dequeue() (item interface{}, underflow bool)
DequeueMultiple(n int) (items []interface{})
Flush() (items []interface{})
}
//Peeker can be used to non-destructively remove one or more items from
// the queue, it can remove all items via Peek(), remove an item from the
// front of the queue via PeekHead() or remove multiple items via
// PeekFromHead(). Underflow will be true, if the queue is empty
type Peeker interface {
Peek() (items []interface{})
PeekHead() (item interface{}, underflow bool)
PeekFromHead(n int) (items []interface{})
}
//Enqueuer can be used to put one or more items into the queue
// Enqueue() can be used to place one item while EnqueueMultiple()
// can be used to place multiple items, in the event the queue is full
// the remaining items will be provided (if applicable) and overflow
// will be true
type Enqueuer interface {
Enqueue(item interface{}) (overflow bool)
EnqueueMultiple(items []interface{}) (itemsRemaining []interface{}, overflow bool)
}
//EnqueueInFronter describes an operation where you enqueue a single item at the
// front of the queue, if the queue is full overflow will be true
type EnqueueInFronter interface {
EnqueueInFront(item interface{}) (overflow bool)
}
//Info can be used to determine the number of items in the queue or its capacity;
// keep in mind that capacity's output is dependent on the underlying logic for
// the queue. For example an infinite queue's capacity will change as you exceed
// the limit of the queue
type Info interface {
Length() (size int)
Capacity() (capacity int)
}
//Event can be used to get a read-only signal that would indicate whether data was
// removed from the queue (out) or put into the queue (in). Keep in mind that whether
// the channel is buffered or un-buffered depends on the underlying implementation
type Event interface {
GetSignalIn() (signal <-chan struct{})
GetSignalOut() (signal <-chan struct{})
} | types.go | 0.512449 | 0.417984 | types.go | starcoder |
package qmath
import (
"time"
"math/rand"
)
type Quote struct {
Name, Summary, Author string
}
var Quotes []Quote = []Quote{
{
Name: "Natural numbers",
Summary: "Natural numbers are the simple counting numbers (0, 1, 2, 3, ...). The skill of counting is intimatelty linked to the development of complex societies through trade, technology and documentation. Counting requires more than numbers, though. It involves addition, and hence subtraction too.",
Author: "<NAME>",
},
{
Name: "One",
Summary: "Together with zero, the number one is at the heart of all arithmetic.",
Author: "<NAME>",
},
{
Name: "Zero",
Summary: "Zero is complex idea, and for a long time there was considerable philosophical reluctance to recognize and put a name to it.",
Author: "<NAME>",
},
{
Name: "Infinity",
Summary: "Infinity (represented mathimatically as ∞) is simply the concept of endlessness: an infinite object is one that is unbounded.",
Author: "<NAME>",
},
{
Name: "Number systems",
Summary: "A number system is a way writing down numbers. In our everyday decimal system, we represent number in the from 434.15, for example. Digits within the number indicate units, tens, hundreds, tenths, hundredths, thousandths and so on, and are called coefficients.",
Author: "<NAME>",
},
{
Name: "Rational numbers",
Summary: "Rational numbers are numbers that can be expressed by dividing one integer by another non-zero integer. Thus all rational numbers take the form of fractions or quotients. These are written as one number, the numerator, divided by a second, the denominator.",
Author: "<NAME>",
},
{
Name: "Squares",
Summary: "The square of any number x is the product of the number times itself, denoted x².",
Author: "<NAME>",
},
{
Name: "Prime numbers",
Summary: "Prime numbers are positive integers that are divisible only by themselves and 1. The first eleven are 2, 3, 5, 7, 11, 13, 17, 19, 23, 29 and 31, but there are infinitely many. By convention, 1 is not considered prime, while 2 is the only even prime. A number that is neither 1 nor a prime is called a composite number.",
Author: "<NAME>",
},
{
Name: "Divisors and remainders",
Summary: "A number is a divisor of another number if it divides into that number exactly, with no remainder. So 4 is a divisor of 12, because it can be divided into 12 exactly three times. In this kind of operation, the number being divided, 12, is known as dividend.",
Author: "<NAME>",
},
{
Name: "Irrational numbers",
Summary: "Irrational numbers are numbers that cannot be expressed by dividing one natural number by another. Unlike rational numbers, they cannot be expressed as a ratio between to integers, or in a decimal form that either comes to an end or lapses into a regular pattern of repeating digits. Instead, the decimal expansions of irrational numbers carry on forever without periodic repetition.",
Author: "<NAME>",
},
}
func Rand() Quote {
rand.Seed(time.Now().UnixNano())
return Quotes[rand.Intn(len(Quotes))]
} | qmath.go | 0.661376 | 0.665864 | qmath.go | starcoder |
package detection
import (
"fmt"
"math"
"time"
"github.com/VividCortex/ewma"
"github.com/tencent/caelus/pkg/caelus/detection/ring"
"github.com/tencent/caelus/pkg/caelus/types"
)
// MinData is the minimum length of data for AnomalyDetector to work
const MinData = 11
// EwmaDetector using ewma alg to detect anomaly values
type EwmaDetector struct {
data ring.Ring
n int
ma ewma.MovingAverage
recentAddTime time.Time
addCount int
metric string
}
var _ Detector = (*EwmaDetector)(nil)
// NewEwmaDetector returns a AnomalyDetector which keeps n data.
// Note: please initialize the detector with at lease 10 data using Add() method.
// It is required by the EWMA library to be "ready" for producing value.
// AnomalyDetector is not thread safe
func NewEwmaDetector(metric string, n int) *EwmaDetector {
return &EwmaDetector{
ma: ewma.NewMovingAverage(float64(n)),
data: ring.NewRing(n),
recentAddTime: nilTime,
metric: metric,
n: n,
}
}
// Name show detector name
func (e *EwmaDetector) Name() string {
return types.DetectionEWMA
}
// Add add detect data
func (e *EwmaDetector) Add(data TimedData) {
e.add(data)
}
func (e *EwmaDetector) add(data TimedData) {
if !data.Ts.After(e.recentAddTime) {
return
}
e.recentAddTime = data.Ts
if e.addCount < MinData {
e.addCount++
}
e.ma.Add(data.Vals[e.metric])
e.data.Add(data.Vals[e.metric])
}
// AddAll add detect data array
func (e *EwmaDetector) AddAll(vals []TimedData) {
e.ma = ewma.NewMovingAverage(float64(e.n))
e.data = ring.NewRing(e.n)
e.recentAddTime = nilTime
e.addCount = 0
for _, val := range vals {
e.add(val)
}
}
// IsAnomaly checks if current values is anomaly
func (e *EwmaDetector) IsAnomaly() (bool, error) {
if e.addCount < MinData {
return false, fmt.Errorf("too few samples(%d), at least %d", e.addCount, MinData)
}
return math.Abs(e.data.Peek()-e.Mean()) > 3*e.StdDev(), nil
}
// Mean returns the mean value
func (e *EwmaDetector) Mean() float64 {
return e.ma.Value()
}
// StdDev returns the standard deviation
func (e *EwmaDetector) StdDev() float64 {
// The algorithm reference: https://zh.wikipedia.org/wiki/%E6%A8%99%E6%BA%96%E5%B7%AE
var sum float64
mean := e.Mean()
for _, v := range e.data.Values() {
d := math.Abs(v - mean)
sum += d * d
}
return math.Sqrt(sum / float64(len(e.data.Values())))
}
// Metrics return current detector metrics
func (e *EwmaDetector) Metrics() []string {
return []string{e.metric}
}
// SampleCount get current data count
func (e *EwmaDetector) SampleCount() int {
return e.n
}
// SampleDuration get current data time range
func (e *EwmaDetector) SampleDuration() time.Duration {
return time.Duration(0)
}
// Reason get anomaly reason
func (e *EwmaDetector) Reason() string {
return fmt.Sprintf("ewma abnormal with metric %s", e.metric)
} | pkg/caelus/detection/ewma.go | 0.806472 | 0.40031 | ewma.go | starcoder |
package sentiment
/*
Base data, as per the paper "PANAS-t: A Pychometric Scale for Measuring Sentiments on Twitter",
which can be accessed at https://arxiv.org/abs/1308.1857.
*/
// SelfReferences indicates the references to the subject in a sentence, as recognized by the PANAS-t paper.
var SelfReferences = []string{"I am", "I'm", "I", "am", "feeling", "me", "myself"}
// StatesColl is a collection of all the specific sentiment states that can appear in a text,
// as recognized by the PANAS-t paper.
var StatesColl = []string{
// jovility states
"happy", "joyful", "delighted", "cheerful", "excited", "enthusiastic", "lively", "energetic",
// selfAssurance states
"proud", "strong", "confident", "bold", "daring", "fearless",
// attentiveness states
"alert", "attentiveness", "concentrating", "determined",
// fear states
"afraid", "scared", "frightened", "nervous", "jittery", "shaky",
// hostility states
"angry", "hostile", "irritable", "scornful", "disgusted", "loathing",
// guilt states
"guilty", "ashamed", "blameworthy", "angry at self", "disgusted with self", "dissatisfied with self",
// sadness states
"sad", "blue", "downhearted", "alone", "lonely",
// shyness states
"shy", "bashful", "sheepish", "timid",
// fatigue states
"sleepy", "tired", "sluggish", "drowsy",
// serenity states
"calm", "relaxed", "at ease",
// surprise states
"amazed", "surprised", "astonished",
}
// CategoriesMap is map of available sentiment categories, as recognized by the pANAS-t paper
var CategoriesMap = map[string]bool{
"jovility": true,
"selfAssurance": true,
"attentiveness": true,
"fear": true,
"hostility": true,
"guilt": true,
"sadness": true,
"shyness": true,
"fatigue": true,
"serenity": true,
"surprise": true,
}
// StateC contains a its specific state-category and its positive/negative direction.
type StateC struct {
Category string
Direction string
}
// StatesCategories is a map of states and their corresponding categories and overall positive/negative emotion.
var StatesCategories = map[string]StateC{
"happy": {Category: "jovility", Direction: "positive"},
"joyful": {Category: "jovility", Direction: "positive"},
"delighted": {Category: "jovility", Direction: "positive"},
"cheerful": {Category: "jovility", Direction: "positive"},
"excited": {Category: "jovility", Direction: "positive"},
"enthusiastic": {Category: "jovility", Direction: "positive"},
"lively": {Category: "jovility", Direction: "positive"},
"energetic": {Category: "jovility", Direction: "positive"},
"proud": {Category: "selfAssurance", Direction: "positive"},
"strong": {Category: "selfAssurance", Direction: "positive"},
"confident": {Category: "selfAssurance", Direction: "positive"},
"bold": {Category: "selfAssurance", Direction: "positive"},
"daring": {Category: "selfAssurance", Direction: "positive"},
"fearless": {Category: "selfAssurance", Direction: "positive"},
"alert": {Category: "attentiveness", Direction: "positive"},
"attentiveness": {Category: "attentiveness", Direction: "positive"},
"concentrating": {Category: "attentiveness", Direction: "positive"},
"determined": {Category: "attentiveness", Direction: "positive"},
"afraid": {Category: "fear", Direction: "negative"},
"scared": {Category: "fear", Direction: "negative"},
"frightened": {Category: "fear", Direction: "negative"},
"nervous": {Category: "fear", Direction: "negative"},
"jittery": {Category: "fear", Direction: "negative"},
"shaky": {Category: "fear", Direction: "negative"},
"angry": {Category: "hostility", Direction: "negative"},
"hostile": {Category: "hostility", Direction: "negative"},
"irritable": {Category: "hostility", Direction: "negative"},
"scornful": {Category: "hostility", Direction: "negative"},
"disgusted": {Category: "hostility", Direction: "negative"},
"loathing": {Category: "hostility", Direction: "negative"},
"guilty": {Category: "guilt", Direction: "negative"},
"ashamed": {Category: "guilt", Direction: "negative"},
"blameworthy": {Category: "guilt", Direction: "negative"},
"angry at self": {Category: "guilt", Direction: "negative"},
"disgusted with self": {Category: "guilt", Direction: "negative"},
"dissatisfied with self": {Category: "guilt", Direction: "negative"},
"sad": {Category: "sadness", Direction: "negative"},
"blue": {Category: "sadness", Direction: "negative"},
"downhearted": {Category: "sadness", Direction: "negative"},
"alone": {Category: "sadness", Direction: "negative"},
"lonely": {Category: "sadness", Direction: "negative"},
"shy": {Category: "shyness", Direction: "other"},
"bashful": {Category: "shyness", Direction: "other"},
"sheepish": {Category: "shyness", Direction: "other"},
"timid": {Category: "shyness", Direction: "other"},
"sleepy": {Category: "fatigue", Direction: "other"},
"tired": {Category: "fatigue", Direction: "other"},
"sluggish": {Category: "fatigue", Direction: "other"},
"drowsy": {Category: "fatigue", Direction: "other"},
"calm": {Category: "serenity", Direction: "other"},
"relaxed": {Category: "serenity", Direction: "other"},
"at ease": {Category: "serenity", Direction: "other"},
"amazed": {Category: "surprise", Direction: "other"},
"surprised": {Category: "surprise", Direction: "other"},
"astonished": {Category: "surprise", Direction: "other"},
}
// GeneralPosNegStates is a map of general states and their corresponding categories and overall positive/negative emotion.
var GeneralPosNegStates = map[string]StateC{
"active": {Category: "general", Direction: "positive"},
"alert": {Category: "general", Direction: "positive"},
"attentive": {Category: "general", Direction: "positive"},
"determined": {Category: "general", Direction: "positive"},
"enthusiastic": {Category: "general", Direction: "positive"},
"excited": {Category: "general", Direction: "positive"},
"inspired": {Category: "general", Direction: "positive"},
"interested": {Category: "general", Direction: "positive"},
"proud": {Category: "general", Direction: "positive"},
"strong": {Category: "general", Direction: "positive"},
"afraid": {Category: "general", Direction: "negative"},
"scared": {Category: "general", Direction: "negative"},
"nervous": {Category: "general", Direction: "negative"},
"jittery": {Category: "general", Direction: "negative"},
"irritable": {Category: "general", Direction: "negative"},
"hostile": {Category: "general", Direction: "negative"},
"guilty": {Category: "general", Direction: "negative"},
"ashamed": {Category: "general", Direction: "negative"},
"upset": {Category: "general", Direction: "negative"},
"distressed": {Category: "general", Direction: "negative"},
}
// WorldBaseline is based on about 3.5 years (2009-13) of twitter data, about 0.48bn tweets.
// Please see the paper for further details.
var WorldBaseline = map[string]float64{
// positive sentiments
"joviality": 0.0182421,
"selfAssurance": 0.0036012,
"attentiveness": 0.0008997,
// negative sentiments
"fear": 0.0063791,
"hostility": 0.0018225,
"guilt": 0.0021756,
"sadness": 0.0086279,
// other sentiments
"shyness": 0.0007608,
"fatigue": 0.0240757,
"surprise": 0.0084612,
"serenity": 0.0022914,
// overall (computed as the weighted averages of the positive, negative, and other sentiment values)
"positive": 0.007581,
"negative": 0.004751275,
"other": 0.008897275,
} | pkg/sentiment/base.go | 0.648911 | 0.545044 | base.go | starcoder |
package main
import (
"fmt"
"math"
"code.google.com/p/gomatrix/matrix"
"eurobot/extkalman"
)
type position struct {
X float64
Y float64
}
func newPos(X, Y float64) *position {
return &position{X,Y}
}
func main() {
// Beacon positions
var beaconA = newPos( 0.0, 0.0)
var beaconB = newPos( 0.0, 2000.0)
var beaconC = newPos(3000.0, 1000.0)
// First state
var x0 matrix.Matrix = matrix.MakeDenseMatrix([]float64{200, 0, 200, 0}, 4, 1)
var P0 matrix.Matrix = matrix.Diagonal([]float64{10000, 100, 10000, 100})
fmt.Printf(">init:\n")
fmt.Printf("x0:\n%v\n\n", x0)
fmt.Printf("P0:\n%v\n\n", P0)
// x(k+1) = Ax + Bu + Ww
var W matrix.Matrix = matrix.MakeDenseMatrix([]float64{
0.0, 0.0,
1.0, 0.0,
0.0, 0.0,
0.0, 1.0}, 4, 2)
fmt.Printf("> noise\n")
fmt.Printf("W:\n%v\n\n", W)
// Design
var d = 0.01*0.01
var Q matrix.Matrix = matrix.Diagonal([]float64{d, d})
var R matrix.Matrix = matrix.Diagonal([]float64{d, d, d})
fmt.Printf(">Design\n")
fmt.Printf("Q:\n%v\n\n", Q)
fmt.Printf("R:\n%v\n\n", R)
// Init filter
var u matrix.Matrix = matrix.MakeDenseMatrix([]float64{0, 0}, 2, 1)
var y matrix.Matrix = matrix.MakeDenseMatrix([]float64{1000, 1000, 3000}, 3, 1)
// df(x,u)/dx
dfdx := func(x, u matrix.Matrix) matrix.Matrix {
// The process is independent of possition, df/dx = A
T := 1.0
A := matrix.MakeDenseMatrix([]float64{
1.0, T, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, T,
0.0, 0.0, 0.0, 1.0}, 4, 4)
return A
}
// Estimated movement of robot given state x
f := func(x, u matrix.Matrix) matrix.Matrix {
// x(k+1) = Ax + Bu
var A = dfdx(x, u)
return matrix.Product(A, x)
// Commented out: take motor gain in to account:
/*
var B matrix.Matrix = matrix.MakeDenseMatrix([]float64{
0.0, 0.0,
0.0, 0.0,
0.0, 0.0,
0.0, 0.0}, 4, 2)
return matrix.Sum( matrix.Product(A, x), matrix.Product(B, u))
*/
}
// dh(x,u)/dx
dhdx := func(x matrix.Matrix) matrix.Matrix {
// Linearisation of H around x(k)
robot := newPos(x.Get(0, 0), x.Get(2, 0))
denA := math.Sqrt( math.Pow(robot.X-beaconA.X, 2.0) + math.Pow(robot.Y-beaconA.Y, 2.0) )
denB := math.Sqrt( math.Pow(robot.X-beaconB.X, 2.0) + math.Pow(robot.Y-beaconB.Y, 2.0) )
denC := math.Sqrt( math.Pow(robot.X-beaconC.X, 2.0) + math.Pow(robot.Y-beaconC.Y, 2.0) )
H := matrix.MakeDenseMatrix([]float64{
(robot.X-beaconA.X)/denA, 0.0, (robot.Y-beaconA.Y)/denA, 0.0,
(robot.X-beaconB.X)/denB, 0.0, (robot.Y-beaconB.Y)/denB, 0.0,
(robot.X-beaconC.X)/denC, 0.0, (robot.Y-beaconC.Y)/denC, 0.0}, 3, 4)
return H
}
// Measure estimate given state x
h:= func(x matrix.Matrix) matrix.Matrix {
var robot = newPos(x.Get(0, 0), x.Get(2, 0))
var y matrix.Matrix = matrix.MakeDenseMatrix([]float64{
math.Sqrt( math.Pow(robot.X-beaconA.X, 2.0) + math.Pow(robot.Y-beaconA.Y, 2.0) ),
math.Sqrt( math.Pow(robot.X-beaconB.X, 2.0) + math.Pow(robot.Y-beaconB.Y, 2.0) ),
math.Sqrt( math.Pow(robot.X-beaconC.X, 2.0) + math.Pow(robot.Y-beaconC.Y, 2.0) )}, 3, 1)
return y
}
// State variable
var x matrix.Matrix
// Initalize kalman filter
var filter = extkalman.ExtendedKalman(W, R, Q, x0, P0, f, dfdx, h, dhdx)
// Run filter
fmt.Printf("Running:\n")
for {
x = filter.Step(y, u)
fmt.Printf("x:\n%v\n\n", x)
}
} | examples/extkalman_robotpositioning.go | 0.604983 | 0.504333 | extkalman_robotpositioning.go | starcoder |
package msm
import (
"fmt"
"math"
"os"
"sort"
"math/bits"
"golang.org/x/exp/rand"
"gonum.org/v1/gonum/mat"
"time"
"gonum.org/v1/gonum/stat"
"gonum.org/v1/gonum/stat/distuv"
"gonum.org/v1/gonum/optimize"
)
const ISPI = 1.0 / math.Sqrt2 / math.SqrtPi
// Compute transition probability matrix
func probMat(p []float64) *mat.Dense {
x := 1
a := mat.NewDense(2, 2, []float64{1, 1, 1, 1})
c := mat.NewDense(1, 1, []float64{1.0})
var tmp mat.Dense
for i := range p {
gi := p[i] * 0.5
a.Set(0, 0, 1.0-gi)
a.Set(0, 1, gi)
a.Set(1, 0, gi)
a.Set(1, 1, 1.0-gi)
tmp.Kronecker(c, a)
x = x * 2
c.Grow(x, x)
c.CloneFrom(&tmp)
tmp.Reset()
}
return c
}
// Compute volatilities corresponding to states
func sigma(m0 float64, s0 float64, k int, M []int) []float64 {
n := len(M)
s := make([]float64, n)
m1 := 2.0 - m0
for i := range s {
s[i] = s0 * math.Sqrt(math.Pow(m1, float64(M[i]))*math.Pow(m0, float64(k-M[i])))
}
return s
}
/* States expressed as number of "ON" states in total as that is what matters in computing volatility
*/
func states(k int) []int {
n := int(math.Pow(2.0, float64(k)))
st := make([]int, n)
for i := range st {
st[i] = bits.OnesCount(uint(i))
}
return st
}
// Sigmoid function
func sigmoid(x float64) float64 {
return 1.0 / (1.0 + math.Exp(-1.0*x))
}
// Compute negative log likelihood of MSM model
func negloglik(par []float64, x []float64, k int, states []int) float64 {
// Transform (-inf, inf) domain to appropriate parameter domains
p := transformParams(par)
n := len(states)
// Compute transition probability matrix for product of Markov chains
A := probMat(p[2:])
// Compute volatility of each state
s := sigma(p[0], p[1], k, states)
// Initialise unconditional probability distribution of states
B := make([]float64, n)
for i := range B {
B[i] = 1.0 / float64(n)
}
// Auxiliary variables
wx := make([]float64, n)
var tmp mat.Dense
sw := 0.0
// Initialise log-likelihood
ll := 0.0
for i := range x {
for j := range wx {
wx[j] = ISPI * math.Exp(-0.5*x[i]*x[i]/s[j]/s[j]) / s[j]
}
tmp.Mul(A, mat.NewDense(n, 1, B))
sw = 0.0
for j := range wx {
sw += wx[j] * tmp.At(j, 0)
}
ll += math.Log(sw)
for j := range B {
B[j] = wx[j] * tmp.At(j, 0) / sw
}
}
return -ll
}
func transformParams(par []float64) []float64 {
m := len(par)
p := make([]float64, m)
// Transform (-inf, inf) domain to (1,2) and (0,inf)
p[0], p[1] = 1.0+sigmoid(par[0]), math.Exp(par[1])
// Transform (-inf, inf) domain to probabilities
for i := 2; i < m; i++ {
p[i] = sigmoid(par[i])
}
return p
}
func simulate(par []float64, nsims int) []float64 {
m0, s0 := par[0], par[1]
p := par[2:]
k := len(p)
A := probMat(p)
M := states(k)
s := sigma(m0, s0, k, M)
x := make([]float64, nsims)
var st int
q := mat.Row(nil, 0, A)
d2 := distuv.NewCategorical(q, rand.NewSource(uint64(time.Now().UnixNano())))
st = int(d2.Rand())
d1 := distuv.Normal{Mu: 0.0, Sigma: 1.0}
x[0] = d1.Rand() * s[st]
for i := 1; i < nsims; i++ {
q = mat.Row(nil, st, A)
d2 = distuv.NewCategorical(q, rand.NewSource(uint64(time.Now().UnixNano())))
st = int(d2.Rand())
x[i] = d1.Rand() * s[st]
}
return x
}
func Predict(par []float64, paths int, pathLen int) []float64 {
vol := make([]float64, paths)
ret := make([]float64, pathLen)
for i := range vol {
ret = simulate(par, pathLen)
vol[i] = stat.StdDev(ret, nil)
}
res := make([]float64, 2)
res[0], res[1] = stat.MeanStdDev(vol, nil)
fmt.Printf("-------------- Vol prediction ---------------\n")
fmt.Printf("Mean:\t%0.4f\n", res[0])
fmt.Printf("SD:\t%0.4f\n", res[1])
fmt.Printf("----------------------------------------------\n")
return res
}
// Fit an MSM-BT model of dimension k to data x
func Fit(x []float64, k int) []float64 {
// initialise parameters for negloglik function
par := make([]float64, k+2)
dist := distuv.Normal{Mu: 0.0, Sigma: 1.0}
for i := range par {
par[i] = dist.Rand()
}
// Use sample standard deviation for s0 param of model
sd := stat.StdDev(x, nil)
par[1] = math.Log(sd)
// calculate Markov chain states
M := states(k)
// compute negative of log likelihood
start := time.Now()
problem := optimize.Problem{
Func: func(par []float64) float64 {
return negloglik(par, x, k, M)
},
}
result, err := optimize.Minimize(problem, par, nil, &optimize.NelderMead{})
if err != nil {
fmt.Println(err)
os.Exit(-1)
}
fmt.Printf("---------- MSM-BT Model Fit Results ----------\n")
fmt.Printf("MLE took %v seconds\n", time.Since(start))
fmt.Printf("Numberof func evals: %d\n", result.Stats.FuncEvaluations)
fmt.Printf("Status:\t%v\n", result.Status)
fmt.Printf("Loglik:\t%0.f\n", result.F)
res := transformParams(result.X)
fmt.Printf("-------------- Model parameters --------------\n")
fmt.Printf("m0:\t%0.4f\n", res[0])
fmt.Printf("s0:\t%0.4f\n", res[1])
ps := res[2:]
sort.Float64s(ps)
for i := range ps {
fmt.Printf("p%d:\t%0.4f\n", i+1, ps[i])
}
// Return model parameters and objective value
res = append(res, result.F)
return res
} | model.go | 0.677794 | 0.501465 | model.go | starcoder |
package test
import (
"io"
"testing"
"github.com/stretchr/testify/assert"
"github.com/icon-project/goloop/module"
)
func AssertBlock(
t *testing.T, blk module.Block,
version int, height int64, id []byte, prevID []byte,
) {
assert.EqualValues(t, version, blk.Version())
assert.EqualValues(t, height, blk.Height())
assert.EqualValues(t, id, blk.ID())
assert.EqualValues(t, prevID, blk.PrevID())
}
func AssertBlockInBM(
t *testing.T, bm module.BlockManager,
version int, height int64, id []byte, prevID []byte,
) {
blk, err := bm.GetBlockByHeight(height)
assert.NoError(t, err)
AssertBlock(t, blk, version, height, id, prevID)
blk, err = bm.GetBlock(id)
assert.NoError(t, err)
AssertBlock(t, blk, version, height, id, prevID)
}
func AssertLastBlock(
t *testing.T, bm module.BlockManager,
height int64, prevID []byte, version int,
) {
blk, err := bm.GetLastBlock()
assert.NoError(t, err)
AssertBlock(t, blk, version, height, blk.ID(), prevID)
AssertBlockInBM(t, bm, version, height, blk.ID(), prevID)
}
func GetLastBlock(t *testing.T, bm module.BlockManager) module.Block {
blk, err := bm.GetLastBlock()
assert.NoError(t, err)
return blk
}
type cbResult struct {
bc module.BlockCandidate
cbErr error
}
func ProposeBlock(
bm module.BlockManager,
prevID []byte, votes module.CommitVoteSet,
) (bc module.BlockCandidate, err error, cbError error) {
ch := make(chan cbResult)
_, err = bm.Propose(
prevID, votes, func (bc module.BlockCandidate, err error) {
ch <- cbResult{ bc, err }
},
)
if err != nil {
return nil, err, nil
}
res := <- ch
return res.bc, nil, res.cbErr
}
func ImportBlockByReader(
t *testing.T, bm module.BlockManager,
r io.Reader, flag int,
) (resBc module.BlockCandidate, err error, cbErr error) {
ch := make(chan cbResult)
_, err = bm.Import(
r, flag, func(bc module.BlockCandidate, err error) {
assert.NoError(t, err)
ch <- cbResult{ bc, err }
},
)
if err != nil {
return nil, err, nil
}
res := <- ch
return res.bc, nil, res.cbErr
}
func ImportBlock(
t *testing.T, bm module.BlockManager,
bc module.BlockCandidate, flag int,
) (resBc module.BlockCandidate, err error, cbErr error) {
ch := make(chan cbResult)
_, err = bm.ImportBlock(
bc, flag, func(bc module.BlockCandidate, err error) {
assert.NoError(t, err)
ch <- cbResult{ bc, err }
},
)
if err != nil {
return nil, err, nil
}
res := <- ch
return res.bc, nil, res.cbErr
}
func FinalizeBlock(
t *testing.T, bm module.BlockManager, bc module.BlockCandidate,
) {
err := bm.Finalize(bc)
assert.NoError(t, err)
} | test/blockmanager.go | 0.54359 | 0.476275 | blockmanager.go | starcoder |
package elasticsearch
const IndexTemplate string = `
{
"template" : "logstash-*",
"settings" : {
"index.refresh_interval" : "5s"
},
"mappings" : {
"_default_" : {
"_all" : {"enabled" : true, "omit_norms" : true},
"dynamic_templates" : [ {
"message_field" : {
"match" : "message",
"match_mapping_type" : "string",
"mapping" : {
"type" : "string", "index" : "analyzed", "omit_norms" : true,
"fielddata" : { "format" : "disabled" }
}
}
}, {
"string_fields" : {
"match" : "*",
"match_mapping_type" : "string",
"mapping" : {
"type" : "string", "index" : "analyzed", "omit_norms" : true,
"fielddata" : { "format" : "disabled" },
"fields" : {
"raw" : {"type": "string", "index" : "not_analyzed", "doc_values" : true, "ignore_above" : 256}
}
}
}
}, {
"float_fields" : {
"match" : "*",
"match_mapping_type" : "float",
"mapping" : { "type" : "float", "doc_values" : true }
}
}, {
"double_fields" : {
"match" : "*",
"match_mapping_type" : "double",
"mapping" : { "type" : "double", "doc_values" : true }
}
}, {
"byte_fields" : {
"match" : "*",
"match_mapping_type" : "byte",
"mapping" : { "type" : "byte", "doc_values" : true }
}
}, {
"short_fields" : {
"match" : "*",
"match_mapping_type" : "short",
"mapping" : { "type" : "short", "doc_values" : true }
}
}, {
"integer_fields" : {
"match" : "*",
"match_mapping_type" : "integer",
"mapping" : { "type" : "integer", "doc_values" : true }
}
}, {
"long_fields" : {
"match" : "*",
"match_mapping_type" : "long",
"mapping" : { "type" : "long", "doc_values" : true }
}
}, {
"date_fields" : {
"match" : "*",
"match_mapping_type" : "date",
"mapping" : { "type" : "date", "doc_values" : true }
}
}, {
"geo_point_fields" : {
"match" : "*",
"match_mapping_type" : "geo_point",
"mapping" : { "type" : "geo_point", "doc_values" : true }
}
} ],
"properties" : {
"@timestamp": { "type": "date", "doc_values" : true },
"@version": { "type": "string", "index": "not_analyzed", "doc_values" : true },
"geoip" : {
"type" : "object",
"dynamic": true,
"properties" : {
"ip": { "type": "ip", "doc_values" : true },
"location" : { "type" : "geo_point", "doc_values" : true },
"latitude" : { "type" : "float", "doc_values" : true },
"longitude" : { "type" : "float", "doc_values" : true }
}
}
}
}
}
}
` | output/elasticsearch/index-template.go | 0.688783 | 0.504455 | index-template.go | starcoder |
package forGraphBLASGo
import "github.com/intel/forGoParallel/pipeline"
type vectorApply[Dw, Du any] struct {
op UnaryOp[Dw, Du]
u *vectorReference[Du]
}
func newVectorApply[Dw, Du any](op UnaryOp[Dw, Du], u *vectorReference[Du]) computeVectorT[Dw] {
return vectorApply[Dw, Du]{op: op, u: u}
}
func (compute vectorApply[Dw, Du]) resize(newSize int) computeVectorT[Dw] {
return vectorApply[Dw, Du]{
op: compute.op,
u: compute.u.resize(newSize),
}
}
func (compute vectorApply[Dw, Du]) computeElement(index int) (result Dw, ok bool) {
if value, ok := compute.u.extractElement(index); ok {
return compute.op(value), true
}
return
}
func (compute vectorApply[Dw, Du]) computePipeline() *pipeline.Pipeline[any] {
p := compute.u.getPipeline()
if p == nil {
return nil
}
p.Add(
pipeline.Par(pipeline.Receive(func(_ int, data any) any {
slice := data.(vectorSlice[Du])
result := vectorSlice[Dw]{
cow: slice.cow &^ cowv,
indices: slice.indices,
values: make([]Dw, len(slice.values)),
}
for i, value := range slice.values {
result.values[i] = compute.op(value)
}
return result
})),
)
return p
}
type vectorApplyBinaryOp1st[Dw, Ds, Du any] struct {
op BinaryOp[Dw, Ds, Du]
value Ds
u *vectorReference[Du]
}
func newVectorApplyBinaryOp1st[Dw, Ds, Du any](op BinaryOp[Dw, Ds, Du], value Ds, u *vectorReference[Du]) computeVectorT[Dw] {
return vectorApplyBinaryOp1st[Dw, Ds, Du]{op: op, value: value, u: u}
}
func (compute vectorApplyBinaryOp1st[Dw, Ds, Du]) resize(newSize int) computeVectorT[Dw] {
return vectorApplyBinaryOp1st[Dw, Ds, Du]{
op: compute.op,
value: compute.value,
u: compute.u.resize(newSize),
}
}
func (compute vectorApplyBinaryOp1st[Dw, Ds, Du]) computeElement(index int) (result Dw, ok bool) {
if u, uok := compute.u.extractElement(index); uok {
return compute.op(compute.value, u), true
}
return
}
func (compute vectorApplyBinaryOp1st[Dw, Ds, Du]) computePipeline() *pipeline.Pipeline[any] {
p := compute.u.getPipeline()
if p == nil {
return nil
}
p.Add(
pipeline.Par(pipeline.Receive(func(_ int, data any) any {
slice := data.(vectorSlice[Du])
result := vectorSlice[Dw]{
cow: slice.cow &^ cowv,
indices: slice.indices,
values: make([]Dw, len(slice.values)),
}
for i, value := range slice.values {
result.values[i] = compute.op(compute.value, value)
}
return result
})),
)
return p
}
type vectorApplyBinaryOp1stScalar[Dw, Ds, Du any] struct {
op BinaryOp[Dw, Ds, Du]
value *scalarReference[Ds]
u *vectorReference[Du]
}
func newVectorApplyBinaryOp1stScalar[Dw, Ds, Du any](op BinaryOp[Dw, Ds, Du], value *scalarReference[Ds], u *vectorReference[Du]) computeVectorT[Dw] {
return vectorApplyBinaryOp1stScalar[Dw, Ds, Du]{op: op, value: value, u: u}
}
func (compute vectorApplyBinaryOp1stScalar[Dw, Ds, Du]) resize(newSize int) computeVectorT[Dw] {
return vectorApplyBinaryOp1stScalar[Dw, Ds, Du]{
op: compute.op,
value: compute.value,
u: compute.u.resize(newSize),
}
}
func (compute vectorApplyBinaryOp1stScalar[Dw, Ds, Du]) computeElement(index int) (result Dw, ok bool) {
if u, uok := compute.u.extractElement(index); uok {
if s, sok := compute.value.extractElement(); sok {
return compute.op(s, u), true
} else {
panic(EmptyObject)
}
}
return
}
func (compute vectorApplyBinaryOp1stScalar[Dw, Ds, Du]) computePipeline() *pipeline.Pipeline[any] {
p := compute.u.getPipeline()
if p == nil {
return nil
}
s, sok := compute.value.extractElement()
if !sok {
panic(EmptyObject)
}
p.Add(
pipeline.Par(pipeline.Receive(func(_ int, data any) any {
slice := data.(vectorSlice[Du])
result := vectorSlice[Dw]{
cow: slice.cow &^ cowv,
indices: slice.indices,
values: make([]Dw, len(slice.values)),
}
for i, value := range slice.values {
result.values[i] = compute.op(s, value)
}
return result
})),
)
return p
}
type vectorApplyBinaryOp2nd[Dw, Du, Ds any] struct {
op BinaryOp[Dw, Du, Ds]
u *vectorReference[Du]
value Ds
}
func newVectorApplyBinaryOp2nd[Dw, Du, Ds any](op BinaryOp[Dw, Du, Ds], u *vectorReference[Du], value Ds) computeVectorT[Dw] {
return vectorApplyBinaryOp2nd[Dw, Du, Ds]{op: op, u: u, value: value}
}
func (compute vectorApplyBinaryOp2nd[Dw, Du, Ds]) resize(newSize int) computeVectorT[Dw] {
return vectorApplyBinaryOp2nd[Dw, Du, Ds]{
op: compute.op,
u: compute.u.resize(newSize),
value: compute.value,
}
}
func (compute vectorApplyBinaryOp2nd[Dw, Du, Ds]) computeElement(index int) (result Dw, ok bool) {
if u, uok := compute.u.extractElement(index); uok {
return compute.op(u, compute.value), true
}
return
}
func (compute vectorApplyBinaryOp2nd[Dw, Du, Ds]) computePipeline() *pipeline.Pipeline[any] {
p := compute.u.getPipeline()
if p == nil {
return nil
}
p.Add(
pipeline.Par(pipeline.Receive(func(_ int, data any) any {
slice := data.(vectorSlice[Du])
result := vectorSlice[Dw]{
cow: slice.cow &^ cowv,
indices: slice.indices,
values: make([]Dw, len(slice.values)),
}
for i, value := range slice.values {
result.values[i] = compute.op(value, compute.value)
}
return result
})),
)
return p
}
type vectorApplyBinaryOp2ndScalar[Dw, Du, Ds any] struct {
op BinaryOp[Dw, Du, Ds]
u *vectorReference[Du]
value *scalarReference[Ds]
}
func newVectorApplyBinaryOp2ndScalar[Dw, Du, Ds any](op BinaryOp[Dw, Du, Ds], u *vectorReference[Du], value *scalarReference[Ds]) computeVectorT[Dw] {
return vectorApplyBinaryOp2ndScalar[Dw, Du, Ds]{op: op, u: u, value: value}
}
func (compute vectorApplyBinaryOp2ndScalar[Dw, Du, Ds]) resize(newSize int) computeVectorT[Dw] {
return vectorApplyBinaryOp2ndScalar[Dw, Du, Ds]{
op: compute.op,
u: compute.u.resize(newSize),
value: compute.value,
}
}
func (compute vectorApplyBinaryOp2ndScalar[Dw, Du, Ds]) computeElement(index int) (result Dw, ok bool) {
if u, uok := compute.u.extractElement(index); uok {
if s, sok := compute.value.extractElement(); sok {
return compute.op(u, s), true
} else {
panic(EmptyObject)
}
}
return
}
func (compute vectorApplyBinaryOp2ndScalar[Dw, Du, Ds]) computePipeline() *pipeline.Pipeline[any] {
p := compute.u.getPipeline()
if p == nil {
return nil
}
s, sok := compute.value.extractElement()
if !sok {
panic(EmptyObject)
}
p.Add(
pipeline.Par(pipeline.Receive(func(_ int, data any) any {
slice := data.(vectorSlice[Du])
result := vectorSlice[Dw]{
cow: slice.cow &^ cowv,
indices: slice.indices,
values: make([]Dw, len(slice.values)),
}
for i, value := range slice.values {
result.values[i] = compute.op(value, s)
}
return result
})),
)
return p
}
type vectorApplyIndexOp[Dw, Du, Ds any] struct {
op IndexUnaryOp[Dw, Du, Ds]
u *vectorReference[Du]
s Ds
}
func newVectorApplyIndexOp[Dw, Du, Ds any](op IndexUnaryOp[Dw, Du, Ds], u *vectorReference[Du], s Ds) computeVectorT[Dw] {
return vectorApplyIndexOp[Dw, Du, Ds]{op: op, u: u, s: s}
}
func (compute vectorApplyIndexOp[Dw, Du, Ds]) resize(newSize int) computeVectorT[Dw] {
return vectorApplyIndexOp[Dw, Du, Ds]{
op: compute.op,
u: compute.u.resize(newSize),
s: compute.s,
}
}
func (compute vectorApplyIndexOp[Dw, Du, Ds]) computeElement(index int) (result Dw, ok bool) {
if value, ok := compute.u.extractElement(index); ok {
return compute.op(value, index, 0, compute.s), true
}
return
}
func (compute vectorApplyIndexOp[Dw, Du, Ds]) computePipeline() *pipeline.Pipeline[any] {
p := compute.u.getPipeline()
if p == nil {
return nil
}
p.Add(
pipeline.Par(pipeline.Receive(func(_ int, data any) any {
slice := data.(vectorSlice[Du])
result := vectorSlice[Dw]{
cow: slice.cow &^ cowv,
indices: slice.indices,
values: make([]Dw, len(slice.values)),
}
for i, value := range slice.values {
result.values[i] = compute.op(value, slice.indices[i], 0, compute.s)
}
return result
})),
)
return p
}
type vectorApplyIndexOpScalar[Dw, Du, Ds any] struct {
op IndexUnaryOp[Dw, Du, Ds]
u *vectorReference[Du]
s *scalarReference[Ds]
}
func newVectorApplyIndexOpScalar[Dw, Du, Ds any](op IndexUnaryOp[Dw, Du, Ds], u *vectorReference[Du], s *scalarReference[Ds]) computeVectorT[Dw] {
return vectorApplyIndexOpScalar[Dw, Du, Ds]{op: op, u: u, s: s}
}
func (compute vectorApplyIndexOpScalar[Dw, Du, Ds]) resize(newSize int) computeVectorT[Dw] {
return vectorApplyIndexOpScalar[Dw, Du, Ds]{
op: compute.op,
u: compute.u.resize(newSize),
s: compute.s,
}
}
func (compute vectorApplyIndexOpScalar[Dw, Du, Ds]) computeElement(index int) (result Dw, ok bool) {
if u, uok := compute.u.extractElement(index); uok {
if s, sok := compute.s.extractElement(); sok {
return compute.op(u, index, 0, s), true
} else {
panic(EmptyObject)
}
}
return
}
func (compute vectorApplyIndexOpScalar[Dw, Du, Ds]) computePipeline() *pipeline.Pipeline[any] {
p := compute.u.getPipeline()
if p == nil {
return nil
}
s, sok := compute.s.extractElement()
if !sok {
panic(EmptyObject)
}
p.Add(
pipeline.Par(pipeline.Receive(func(_ int, data any) any {
slice := data.(vectorSlice[Du])
result := vectorSlice[Dw]{
cow: slice.cow &^ cowv,
indices: slice.indices,
values: make([]Dw, len(slice.values)),
}
for i, value := range slice.values {
result.values[i] = compute.op(value, slice.indices[i], 0, s)
}
return result
})),
)
return p
} | functional_Vector_ComputedApply.go | 0.654011 | 0.718705 | functional_Vector_ComputedApply.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.