code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package grid
import (
"fmt"
"github.com/callum-oakley/fgwm/wmutils"
)
func (g *Grid) getRectangle(wid wmutils.WindowID) (Rectangle, error) {
pPos, pSize, err := wmutils.GetAttributes(wid)
if err != nil {
return Rectangle{}, fmt.Errorf(
"error getting rectangle for wid %v: %v",
wid,
err,
)
}
return Rectangle{
g.closestPoint(pPos.Offset(g.pad.Scale(-1))),
g.closestPoint(pPos.Offset(pSize.Add(
g.pad.Add(wmutils.Size{g.border, g.border}),
))),
}, nil
}
func (g *Grid) closestPoint(p wmutils.Position) Position {
return Position{
X: round((float64(p.X - g.margins.Left)) / float64(g.cell.W)),
Y: round((float64(p.Y - g.margins.Top)) / float64(g.cell.H)),
}
}
func round(x float64) int {
return int(x + 0.5)
}
func (g *Grid) pInGrid(p Position) bool {
return 0 <= p.X && p.X <= g.size.W && 0 <= p.Y && p.Y <= g.size.H
}
func (g *Grid) inGrid(r Rectangle) bool {
return g.pInGrid(r.TopLeft) && g.pInGrid(r.BottomRight)
}
func (g *Grid) pixelSize(size Size) wmutils.Size {
return wmutils.Size{
W: wmutils.Pixels(size.W) * g.cell.W,
H: wmutils.Pixels(size.H) * g.cell.H,
}
}
func (g *Grid) pixelPosition(pos Position) wmutils.Position {
return wmutils.Position{
X: g.margins.Left + wmutils.Pixels(pos.X)*g.cell.W,
Y: g.margins.Top + wmutils.Pixels(pos.Y)*g.cell.H,
}
}
func index(wids []wmutils.WindowID, wid wmutils.WindowID) (int, error) {
for i := 0; i < len(wids); i++ {
if wids[i] == wid {
return i, nil
}
}
return 0, fmt.Errorf("can't find %v in %v", wid, wids)
}
func (g *Grid) centerWID(wid wmutils.WindowID) error {
center := Position{g.size.W / 2, g.size.H / 2}
r, err := g.getRectangle(wid)
if err != nil {
return nil
}
size := r.Size()
offset := Size{size.W / 2, size.H / 2}
return g.teleportWID(wid, Rectangle{
center.Offset(offset.Scale(-1)),
center.Offset(offset),
})
}
func (g *Grid) teleportWID(wid wmutils.WindowID, r Rectangle) error {
g.view.Unfullscreen(wid)
if !g.inGrid(r) || !r.Valid() {
return nil
}
return wmutils.Teleport(
wid,
g.pixelPosition(r.TopLeft).Offset(g.pad),
g.pixelSize(r.Size()).Add(
g.pad.Add(wmutils.Size{g.border, g.border}).Scale(-2),
),
)
} | grid/utils.go | 0.722135 | 0.452354 | utils.go | starcoder |
package economist
import (
"github.com/coschain/contentos-go/common/constants"
. "github.com/coschain/contentos-go/dandelion"
"github.com/coschain/contentos-go/prototype"
"github.com/stretchr/testify/assert"
"math/big"
"testing"
)
type VoteTester struct {
acc0,acc1,acc2,acc3,acc4 *DandelionAccount
}
func (tester *VoteTester) Test(t *testing.T, d *Dandelion) {
tester.acc0 = d.Account("actor0")
tester.acc1 = d.Account("actor1")
tester.acc2 = d.Account("actor2")
tester.acc3 = d.Account("actor3")
tester.acc4 = d.Account("actor4")
registerBlockProducer(tester.acc4, t)
const VEST = 1000
SelfTransferToVesting([]*DandelionAccount{tester.acc0, tester.acc1}, VEST, t)
t.Run("normal", d.Test(tester.normal1))
t.Run("normal cashout", d.Test(tester.normal2))
t.Run("normal sequence cashout", d.Test(tester.normal3))
t.Run("multi cashout", d.Test(tester.normal4))
}
func (tester *VoteTester) normal1(t *testing.T, d *Dandelion) {
a := assert.New(t)
const POST = 1
a.NoError(tester.acc0.SendTrxAndProduceBlock(Post(POST, tester.acc0.Name, "title", "content", []string{"1"}, nil)))
a.NoError(tester.acc1.SendTrxAndProduceBlock(Vote(tester.acc1.Name, POST)))
a.NoError(d.ProduceBlocks(constants.VoteCashOutDelayBlock - 2))
vest0 := d.Account(tester.acc1.Name).GetVest().Value
// to cashout
a.NoError(d.ProduceBlocks(1))
a.NotEqual(d.Account(tester.acc1.Name).GetVest().Value, vest0)
vest1 := d.Account(tester.acc1.Name).GetVest().Value
a.NoError(d.ProduceBlocks(1))
a.Equal(d.Account(tester.acc1.Name).GetVest().Value, vest1)
}
func (tester *VoteTester) normal2(t *testing.T, d *Dandelion) {
a := assert.New(t)
const POST = 2
a.NoError(tester.acc0.SendTrxAndProduceBlock(Post(POST, tester.acc0.Name, "title", "content", []string{"1"}, nil)))
a.NoError(tester.acc1.SendTrxAndProduceBlock(Vote(tester.acc1.Name, POST)))
a.NoError(d.ProduceBlocks(constants.VoteCashOutDelayBlock - 2))
vest0 := d.Account(tester.acc1.Name).GetVest().Value
postWeightedVp := StringToBigInt(d.Post(POST).GetWeightedVp())
a.NotEqual(postWeightedVp.Int64(), int64(0))
weightedVp := StringToBigInt(d.Vote(tester.acc1.Name, POST).GetWeightedVp())
decayedVoteWeight := bigDecay(StringToBigInt(d.GlobalProps().GetWeightedVpsVote()))
totalVoteRewards := new(big.Int).SetUint64(d.GlobalProps().GetPoolVoteRewards().Value)
nextBlockGlobalVoteReward := new(big.Int).Add(totalVoteRewards, new(big.Int).SetUint64(perBlockVoteReward(d)))
nextBlockGlobalVoteWeightedVp := new(big.Int).Add(decayedVoteWeight, weightedVp)
exceptVoteReward := ProportionAlgorithm(weightedVp, nextBlockGlobalVoteWeightedVp, nextBlockGlobalVoteReward)
exceptGlobalClaimRewardAfterCashout := d.GlobalProps().GetClaimedVoteRewards().Add(&prototype.Vest{Value: exceptVoteReward.Uint64()})
exceptGlobalRewardAfterCashout := &prototype.Vest{ Value: new(big.Int).Sub(nextBlockGlobalVoteReward, exceptVoteReward).Uint64()}
// to cashout
a.NoError(d.ProduceBlocks(1))
vest1 := d.Account(tester.acc1.Name).GetVest().Value
a.Equal(d.GlobalProps().GetWeightedVpsVote(), nextBlockGlobalVoteWeightedVp.String())
a.Equal(vest1 - vest0, exceptVoteReward.Uint64())
a.Equal(d.GlobalProps().GetClaimedVoteRewards(), exceptGlobalClaimRewardAfterCashout)
a.Equal(d.GlobalProps().GetPoolVoteRewards(), exceptGlobalRewardAfterCashout)
}
func (tester *VoteTester) normal3(t *testing.T, d *Dandelion) {
a := assert.New(t)
const POST = 3
a.NoError(tester.acc0.SendTrxAndProduceBlock(Post(POST, tester.acc0.Name, "title", "content", []string{"1"}, nil)))
a.NoError(tester.acc2.SendTrxAndProduceBlock(Vote(tester.acc2.Name, POST)))
a.NoError(tester.acc1.SendTrx(Vote(tester.acc1.Name, POST)))
a.NoError(d.ProduceBlocks(constants.VoteCashOutDelayBlock - 2))
acc1vest0 := d.Account(tester.acc1.Name).GetVest().Value
acc2vest0 := d.Account(tester.acc2.Name).GetVest().Value
postWeightedVp := StringToBigInt(d.Post(POST).GetWeightedVp())
a.NotEqual(postWeightedVp.Int64(), int64(0))
acc2WeightedVp := StringToBigInt(d.Vote(tester.acc2.Name, POST).GetWeightedVp())
decayedVoteWeight := bigDecay(StringToBigInt(d.GlobalProps().GetWeightedVpsVote()))
totalVoteRewards := new(big.Int).SetUint64(d.GlobalProps().GetPoolVoteRewards().Value)
nextBlockGlobalVoteReward := new(big.Int).Add(totalVoteRewards, new(big.Int).SetUint64(perBlockVoteReward(d)))
nextBlockGlobalVoteWeightedVp := new(big.Int).Add(decayedVoteWeight, acc2WeightedVp)
exceptVoteReward := ProportionAlgorithm(acc2WeightedVp, nextBlockGlobalVoteWeightedVp, nextBlockGlobalVoteReward)
exceptGlobalClaimRewardAfterCashout := d.GlobalProps().GetClaimedVoteRewards().Add(&prototype.Vest{Value: exceptVoteReward.Uint64()})
exceptGlobalRewardAfterCashout := &prototype.Vest{ Value: new(big.Int).Sub(nextBlockGlobalVoteReward, exceptVoteReward).Uint64()}
// to cashout
a.NoError(d.ProduceBlocks(1))
acc2vest1 := d.Account(tester.acc2.Name).GetVest().Value
a.Equal(d.GlobalProps().GetWeightedVpsVote(), nextBlockGlobalVoteWeightedVp.String())
a.Equal(acc2vest1 - acc2vest0, exceptVoteReward.Uint64())
a.Equal(d.GlobalProps().GetClaimedVoteRewards(), exceptGlobalClaimRewardAfterCashout)
a.Equal(d.GlobalProps().GetPoolVoteRewards(), exceptGlobalRewardAfterCashout)
acc1WeightedVp := StringToBigInt(d.Vote(tester.acc1.Name, POST).GetWeightedVp())
decayedVoteWeight = bigDecay(nextBlockGlobalVoteWeightedVp)
// subtract rewards which had been cashout to acc2
nextBlockGlobalVoteReward.Sub(nextBlockGlobalVoteReward, exceptVoteReward)
nextBlockGlobalVoteReward.Add(nextBlockGlobalVoteReward, new(big.Int).SetUint64(perBlockVoteReward(d)))
nextBlockGlobalVoteWeightedVp = new(big.Int).Add(decayedVoteWeight, acc1WeightedVp)
exceptVoteReward = ProportionAlgorithm(acc1WeightedVp, nextBlockGlobalVoteWeightedVp, nextBlockGlobalVoteReward)
exceptGlobalClaimRewardAfterCashout.Add(&prototype.Vest{Value: exceptVoteReward.Uint64()})
exceptGlobalRewardAfterCashout = &prototype.Vest{ Value: new(big.Int).Sub(nextBlockGlobalVoteReward, exceptVoteReward).Uint64()}
a.NoError(d.ProduceBlocks(1))
acc1vest1 := d.Account(tester.acc1.Name).GetVest().Value
a.Equal(d.GlobalProps().GetWeightedVpsVote(), nextBlockGlobalVoteWeightedVp.String())
a.Equal(acc1vest1 - acc1vest0, exceptVoteReward.Uint64())
a.Equal(d.GlobalProps().GetClaimedVoteRewards(), exceptGlobalClaimRewardAfterCashout)
a.Equal(d.GlobalProps().GetPoolVoteRewards(), exceptGlobalRewardAfterCashout)
}
func (tester *VoteTester) normal4(t *testing.T, d *Dandelion) {
a := assert.New(t)
const POST = 4
a.NoError(tester.acc0.SendTrxAndProduceBlock(Post(POST, tester.acc0.Name, "title", "content", []string{"1"}, nil)))
a.NoError(tester.acc1.SendTrx(Vote(tester.acc1.Name, POST)))
a.NoError(tester.acc2.SendTrx(Vote(tester.acc2.Name, POST)))
a.NoError(d.ProduceBlocks(1))
a.NoError(d.ProduceBlocks(constants.VoteCashOutDelayBlock - 2))
vest1 := d.Account(tester.acc1.Name).GetVest().Value
vest2 := d.Account(tester.acc2.Name).GetVest().Value
postWeightedVp := StringToBigInt(d.Post(POST).GetWeightedVp())
a.NotEqual(postWeightedVp.Int64(), int64(0))
weightedVp1 := StringToBigInt(d.Vote(tester.acc1.Name, POST).GetWeightedVp())
weightedVp2 := StringToBigInt(d.Vote(tester.acc2.Name, POST).GetWeightedVp())
weightedVp := new(big.Int).Add(weightedVp1, weightedVp2)
decayedVoteWeight := bigDecay(StringToBigInt(d.GlobalProps().GetWeightedVpsVote()))
currentGlobalVoteReward := new(big.Int).SetUint64(d.GlobalProps().GetPoolVoteRewards().Value)
nextBlockGlobalVoteReward := new(big.Int).Add(currentGlobalVoteReward, new(big.Int).SetUint64(perBlockVoteReward(d)))
nextBlockGlobalVoteWeightedVp := decayedVoteWeight.Add(decayedVoteWeight, weightedVp)
reward1 := ProportionAlgorithm(weightedVp1, nextBlockGlobalVoteWeightedVp, nextBlockGlobalVoteReward)
reward2 := ProportionAlgorithm(weightedVp2, nextBlockGlobalVoteWeightedVp, nextBlockGlobalVoteReward)
reward := new(big.Int).Add(reward1, reward2)
exceptGlobalClaimRewardAfterCashout := d.GlobalProps().GetClaimedVoteRewards().Add(&prototype.Vest{Value: reward.Uint64()})
exceptGlobalRewardAfterCashout := &prototype.Vest{ Value: new(big.Int).Sub(nextBlockGlobalVoteReward, reward).Uint64()}
// to cashout
a.NoError(d.ProduceBlocks(1))
vest1n := d.Account(tester.acc1.Name).GetVest().Value
vest2n := d.Account(tester.acc2.Name).GetVest().Value
a.Equal(d.GlobalProps().GetWeightedVpsVote(), nextBlockGlobalVoteWeightedVp.String())
a.Equal(vest1n - vest1, reward1.Uint64())
a.Equal(vest2n - vest2, reward2.Uint64())
a.Equal(d.GlobalProps().GetClaimedVoteRewards(), exceptGlobalClaimRewardAfterCashout)
a.Equal(d.GlobalProps().GetPoolVoteRewards(), exceptGlobalRewardAfterCashout)
} | tests/economist/vote.go | 0.505859 | 0.47244 | vote.go | starcoder |
package compositions
/*
Centered log ratio transform of a (dataset of) composition(s) and its inverse.
Arguments
data a composition or a data matrix of compositions, not necessarily closed
clr the clr-transform of a composition or a data matrix of clr-transforms of compositions, not necessarily centered (i.e. summing up to zero)
The clr-transform maps a composition in the D-part Aitchison-simplex isometrically to a D-dimensonal euclidian vector subspace: consequently, the transformation is not injective. Thus resulting covariance matrices are always singular.
The data can then be analysed in this transformation by all classical multivariate analysis tools not relying on a full rank of the covariance. See ilr and alr for alternatives. The interpretation of the results is relatively easy since the relation between each original part and a transformed variable is preserved.
The centered logratio transform is given by
clr(x) = (ln x - mean(ln x) ) The image of the clr is a vector with entries summing to 0. This hyperplane is also called the clr-plane.
<NAME>. (1986) The Statistical Analysis of Compositional Data, Monographs on Statistics and Applied Probability. Chapman & Hall Ltd., London (UK). 416p.
*/
// Clr returns Centered log ratio transform of a dataset of compositions.
func Clr(in, out Matrix64) {
rows, cols := in.Dims()
r, c := out.Dims()
if !(r == rows && c == cols) {
panic("bad dimensions")
}
for i := 0; i < rows; i++ {
meanln := 0.0
for j := 0; j < cols; j++ {
meanln += ln(in[i][j])
}
meanln /= float64(cols)
for j := 0; j < cols; j++ {
out[i][j] = ln(in[i][j]) - meanln
}
}
}
// ToDo: Inv_clr(in, out Matrix64)
/*
Additive log ratio transform of a dataset of compositions.
Arguments
data a composition, not necessarily closed
alr the alr-transform of a composition, thus a (D-1)-dimensional real vector
Details
The alr-transform maps a composition in the D-part Aitchison-simplex non-isometrically to a D-1 dimensonal euclidian vector, treating the last part as common denominator of the others. The data can then be analysed in this transformation by all classical multivariate analysis tools not relying on a distance. The interpretation of the results is relatively simple, since the relation to the original D-1 first parts is preserved. However distance is an extremely relevant concept in most types of analysis, where a clr or ilr transformation should be preferred.
The additive logratio transform is given by
alr(x)_i := ln(x_i/x_D)
References
<NAME>. (1986) The Statistical Analysis of Compositional Data Monographs on Statistics and Applied Probability. Chapman & Hall Ltd., London (UK). 416p.
*/
// Alr returns Additive log ratio transform of a dataset of compositions.
func Alr(in, out Matrix64) {
//alr matrix has one column less than data matrix
rows, cols := in.Dims()
r, c := out.Dims()
if !(r == rows && c == cols-1) {
panic("bad dimensions")
}
lastcol := cols
for i := 0; i < rows; i++ {
for j := 0; j < cols-1; j++ {
out[i][j] = ln(in[i][j] / in[i][lastcol])
}
}
}
// ToDo: inv_alr(Matrix *in, Matrix *out)
/*
Closure of a composition
Description
Closes compositions to sum up to one , by dividing each part by the sum.
Arguments
in matrix of compositions
out matrix of compositions closed to one
Details
The closure operation is given by
clo(x) := ( x_i / sum_j(x_i))
clo generates a composition without assigning one of the compositional classes acomp or rcomp. Note that after computing the closed-to-one version, obtaining a version closed to any other value is done by simple multiplication.
clo can be used to unclass compositions.
References
<NAME>. (1986) The Statistical Analysis of Compositional Data Monographs on Statistics and Applied Probability. Chapman & Hall Ltd., London (UK). 416p.
*/
// Clo closes compositions to sum up to one , by dividing each part by the sum.
func Clo(in, out Matrix64) {
rows, cols := in.Dims()
r, c := out.Dims()
if !(r == rows && c == cols) {
panic("bad dimensions")
}
for i := 0; i < rows; i++ {
sum := 0.0
for j := 0; j < cols; j++ {
sum += in[i][j]
}
for j := 0; j < cols; j++ {
out[i][j] = in[i][j] / sum
}
}
}
/*
Centered planar transform
Compute the centered planar transform of a (dataset of) compositions and its inverse.
Arguments
in a composition or a data.matrix of compositions, not necessarily closed
out the cpt-transform of a composition or a data matrix of cpt-transforms of compositions. It is checked that the z sum up to 0.
The cpt-transform maps a composition in the D-part real-simplex isometrically to a D-1 dimensional euclidian vector space, identified with a plane parallel to the simplex but passing through the origin. However the transformation is not injective and does not even reach the whole plane. Thus resulting covariance matrices are always singular.
The data can then be analysed in this transformed space by all classical multivariate analysis tools not relying on a full rank of the covariance matrix. See ipt and apt for alternatives. The interpretation of the results is relatively easy since the relation of each transformed component to the original parts is preserved.
The centered planar transform is given by
cpt(x)_i = clo(x)_i - 1/D
References
<NAME>, K.G. and <NAME> (2007) "compositions": a unified R package to analyze Compositional Data, Computers & Geosciences. (in press).
*/
// Cpt computes the Centered planar transform of a dataset of compositions.
func Cpt(in, out Matrix64) {
rows, cols := in.Dims()
r, c := out.Dims()
if !(r == rows && c == cols) {
panic("bad dimensions")
}
Clo(in, out)
for i := 0; i < rows; i++ {
for j := 0; j < cols; j++ {
out[i][j] = out[i][j] - 1/float64(cols)
}
}
}
/*
Additive planar transform
Compute the additive planar transform of a (dataset of) compositions or its inverse.
Arguments:
in a composition or a matrix of compositions, not necessarily closed
out the apt-transform of a composition or a matrix of alr-transforms of compositions
Details:
The apt-transform maps a composition in the D-part real-simplex linearly to a D-1 dimensional euclidian vector. Although the transformation does not reach the whole R^{D-1}, resulting covariance matrices are typically of full rank.
The data can then be analysed in this transformation by all classical multivariate analysis tools not relying on distances. See cpt and ipt for alternatives. The interpretation of the results is easy since the relation to the first D-1 original variables is preserved.
The additive planar transform is given by
apt(x)_i := clo(x)_i, i=1,...,D-1
*/
// Apt computes the additive planar transform of a dataset of compositions.
func Apt(in, out Matrix64) {
rows, cols := in.Dims()
r, c := out.Dims()
if !(r == rows && c == cols) {
panic("bad dimensions")
}
for i := 0; i < rows; i++ {
sum := 0.0
for j := 0; j < cols-1; j++ { // D-1
sum += in[i][j]
}
for j := 0; j < cols; j++ { //D
out[i][j] = in[i][j] / sum
}
}
} | comp.go | 0.830972 | 0.743145 | comp.go | starcoder |
package main
import (
. "go-guide/datastruct/binaryTree/treeNode"
"log"
)
/***
路径总和 II:https://leetcode-cn.com/problems/path-sum-ii/
给你二叉树的根节点 root 和一个整数目标和 targetSum ,找出所有 从根节点到叶子节点 路径总和等于给定目标和的路径。
叶子节点 是指没有子节点的节点。
*/
func main() {
root := NewNormalTree()
targetSum := 11
log.Println("所有路径总和II-递归:", pathSum(root, targetSum))
//log.Println("所有路径总和II-递归:", pathSum1(root, targetSum))
log.Println("所有路径总和II-递归:", pathSum2(root, targetSum))
}
// hasPathSum 递归法
func pathSum(root *TreeNode, targetSum int) [][]int {
var res [][]int
var path []int
var dfs func(*TreeNode, int)
dfs = func(node *TreeNode, left int) {
if node == nil {
return
}
defer func() {
path = path[:len(path)-1]
}()
left -= node.Val
path = append(path, node.Val)
if node.Left == nil && node.Right == nil && left == 0 {
res = append(res, append([]int(nil), path...))
return
}
dfs(node.Left, left)
dfs(node.Right, left)
}
dfs(root, targetSum)
return res
}
// pathSum1 迭代法BFS,前序遍历
/*func pathSum1(root *TreeNode, targetSum int) [][]int {
var res [][]int
if root == nil {
return res
}
queue := []*TreeNode{root}
pathNodes := [][]*
for len(queue) != 0 {
n := queue[0]
p := pathNodes[0]
queue = queue[1:]
pathNodes = pathNodes[1:]
// 判断:当遍历到叶子节点时,如果p等于给定的值就返回真
if n.Left == nil && n.Right == nil {
if p == targetSum {
return true
}
continue
}
if n.Left != nil {
queue = append(queue, n.Left)
pathSum = append(pathSum, n.Left.Val+p)
}
if n.Right != nil {
queue = append(queue, n.Right)
pathSum = append(pathSum, n.Right.Val+p)
}
}
return res
}*/
type pair struct {
node *TreeNode
left int
}
// 层序遍历 bfs
func pathSum2(root *TreeNode, targetSum int) (ans [][]int) {
if root == nil {
return
}
parent := map[*TreeNode]*TreeNode{}
getPath := func(node *TreeNode) (path []int) {
for ; node != nil; node = parent[node] {
path = append(path, node.Val)
}
for i, j := 0, len(path)-1; i < j; i++ {
path[i], path[j] = path[j], path[i]
j--
}
return
}
queue := []pair{{root, targetSum}}
for len(queue) > 0 {
p := queue[0]
queue = queue[1:]
node := p.node
left := p.left - node.Val
if node.Left == nil && node.Right == nil {
if left == 0 {
ans = append(ans, getPath(node))
}
} else {
if node.Left != nil {
parent[node.Left] = node
queue = append(queue, pair{node.Left, left})
}
if node.Right != nil {
parent[node.Right] = node
queue = append(queue, pair{node.Right, left})
}
}
}
return
} | datastruct/binaryTree/leetcodeQuestion/pathSum/pathSum.go | 0.522933 | 0.454896 | pathSum.go | starcoder |
package managers
import (
"github.com/juan-medina/gosge/components/geometry"
"github.com/juan-medina/gosge/components/sprite"
)
// CollisionManager is a manager for manage collisions
type CollisionManager struct {
sm *StorageManager
}
// getSpriteRectAt return a geometry.Rect for a given sprite.Sprite at a geometry.Point
func (cm CollisionManager) getSpriteRectAt(spr sprite.Sprite, at geometry.Point) geometry.Rect {
def, _ := cm.sm.GetSpriteDef(spr.Sheet, spr.Name)
size := def.Origin.Size.Scale(spr.Scale)
return geometry.Rect{
From: geometry.Point{
X: at.X - (size.Width * def.Pivot.X),
Y: at.Y - (size.Height * def.Pivot.Y),
},
Size: size,
}
}
// getSpriteRectAtFactor return a geometry.Rect for a given sprite.Sprite at a geometry.Point with a factor
func (cm CollisionManager) getSpriteRectAtFactor(spr sprite.Sprite, at geometry.Point, factor geometry.Point) geometry.Rect {
def, _ := cm.sm.GetSpriteDef(spr.Sheet, spr.Name)
size := def.Origin.Size.ScaleXYFactor(geometry.Point{X: factor.X * spr.Scale, Y: factor.Y * spr.Scale})
return geometry.Rect{
From: geometry.Point{
X: at.X - (size.Width * def.Pivot.X),
Y: at.Y - (size.Height * def.Pivot.Y),
},
Size: size,
}
}
// SpriteAtContains indicates if a sprite.Sprite at a given geometry.Point contains a geometry.Point
func (cm CollisionManager) SpriteAtContains(spr sprite.Sprite, at geometry.Point, point geometry.Point) bool {
return cm.getSpriteRectAt(spr, at).IsPointInRect(point)
}
// SpritesCollides indicates if two sprite.Sprite collides
func (cm CollisionManager) SpritesCollides(spr1 sprite.Sprite, at1 geometry.Point, spr2 sprite.Sprite, at2 geometry.Point) bool {
rec1 := cm.getSpriteRectAt(spr1, at1)
rec2 := cm.getSpriteRectAt(spr2, at2)
return rec1.Collides(rec2)
}
// SpritesCollidesFactor indicates if two sprite.Sprite collides with a factor
func (cm CollisionManager) SpritesCollidesFactor(spr1 sprite.Sprite, at1 geometry.Point, spr2 sprite.Sprite, at2 geometry.Point,
factor1 geometry.Point, factor2 geometry.Point) bool {
rec1 := cm.getSpriteRectAtFactor(spr1, at1, factor1)
rec2 := cm.getSpriteRectAtFactor(spr2, at2, factor2)
return rec1.Collides(rec2)
}
// Collisions returns a CollisionManager
func Collisions(sm *StorageManager) *CollisionManager {
return &CollisionManager{sm: sm}
} | managers/collisionmng.go | 0.833663 | 0.587707 | collisionmng.go | starcoder |
package main
import (
"bufio"
"fmt"
"os"
)
const (
top = 0
bottom = 1
left = 2
right = 3
)
type tile struct {
id int
edges []string // 0: top, 1: bottom, 2: left, 3: right
inner []string // horizontal lines of the binary image
}
type tileMatch struct {
edge int
otherTile int
otherEdge int
needToFlip bool
}
func main() {
var tiles = make([]tile, 150)
var tile, tileLine = 0, 0
// read input
scanner := bufio.NewScanner(os.Stdin)
for scanner.Scan() {
if tileLine == 0 {
// read tile ID and create slices for edges
fmt.Sscanf(scanner.Text(), "Tile %d:", &tiles[tile].id)
tiles[tile].edges = make([]string, 4)
tileLine++
} else if tileLine < 11 {
// read tile edges
if tileLine == 1 {
tiles[tile].edges[top] = scanner.Text()
} else if tileLine == 10 {
tiles[tile].edges[bottom] = scanner.Text()
} else {
tiles[tile].inner = append(tiles[tile].inner, scanner.Text()[1:len(scanner.Text())-1])
}
tiles[tile].edges[left] += scanner.Text()[0:1]
tiles[tile].edges[right] += scanner.Text()[len(scanner.Text())-1:]
tileLine++
} else { // empty input line between tiles
tile++
tileLine = 0
}
}
// find corner tiles
var result = 1
var cornerTile int
for tileIndex, tile := range tiles {
if tile.id == 0 {
continue
}
if len(findMatchingEdges(tile, tiles)) == 2 {
result *= tile.id
cornerTile = tileIndex
}
}
fmt.Printf("Product of corner tile IDs: %v\n", result)
// solve jigsaw, generate final image, and find sea monsters
var solvedJigsaw = solveJigsaw(tiles, cornerTile)
var monsters = 0
for i := 0; monsters == 0 && i < 8; i++ {
monsters = countSeaMonsters(solvedJigsaw)
solvedJigsaw = rotateCW(solvedJigsaw, 90)
if i == 4 {
flipVertically(solvedJigsaw)
}
}
if monsters == 0 {
panic("No sea monsters found!")
}
roughness := calculateRoughness(solvedJigsaw, monsters)
fmt.Printf("Sea monster habitat roughness: %v\n", roughness)
}
// The number of ways in which we can attach the given tile to other tiles.
func findMatchingEdges(t tile, tiles []tile) (matches []tileMatch) {
for t0Index, t0 := range tiles {
if t0.id == 0 || t0.id == t.id {
continue
}
for eID, edge := range t.edges {
for e0ID, edge0 := range t0.edges {
if edge == edge0 || edge == reverse(edge0) {
needToFlip := edge == reverse(edge0)
if eID == e0ID ||
(eID == bottom && e0ID == left) || (eID == left && e0ID == bottom) ||
(eID == top && e0ID == right) || (eID == right && e0ID == top) {
needToFlip = edge == edge0
}
matches = append(matches, tileMatch{eID, t0Index, e0ID, needToFlip})
}
}
}
}
return
}
// The startingTile has to be the index in the tiles slice of a corner tile.
func solveJigsaw(tiles []tile, startingTile int) (solved []string) {
var current = startingTile
var row []int
// rotate starting tile to make it the top-left corner
matches := findMatchingEdges(tiles[startingTile], tiles)
if matches[0].edge == top || matches[1].edge == top {
if matches[0].edge == left || matches[1].edge == left {
rotateTile(&tiles[startingTile], 180)
} else {
rotateTile(&tiles[startingTile], 90)
}
} else if matches[0].edge == left || matches[1].edge == left {
rotateTile(&tiles[startingTile], 270)
}
// build first row of final image
solved = placeTile(solved, tiles, startingTile, true)
for x := 0; ; x++ {
row = append(row, current)
matches := findMatchingEdges(tiles[current], tiles)
if matches[0].edge != right && matches[1].edge != right && (len(matches) < 3 || matches[2].edge != right) {
break
}
// find the correct match object
match := matches[0]
if matches[1].edge == right {
match = matches[1]
} else if len(matches) >= 3 && matches[2].edge == right {
match = matches[2]
}
// rotate the new tile accordingly
if match.otherEdge == bottom {
rotateTile(&tiles[match.otherTile], 90)
} else if match.otherEdge == right {
rotateTile(&tiles[match.otherTile], 180)
} else if match.otherEdge == top {
rotateTile(&tiles[match.otherTile], 270)
}
if match.needToFlip {
flipTileVert(&tiles[match.otherTile])
}
// add the tile into the final image
solved = placeTile(solved, tiles, match.otherTile, false)
current = match.otherTile
}
// build remaining rows of final image
for y := 1; y < len(row); y++ {
for x := 0; x < len(row); x++ {
matches := findMatchingEdges(tiles[row[x]], tiles)
for _, match := range matches {
if match.edge == bottom {
row[x] = match.otherTile
// rotate the new tile accordingly
if match.otherEdge == left {
rotateTile(&tiles[match.otherTile], 90)
} else if match.otherEdge == bottom {
rotateTile(&tiles[match.otherTile], 180)
} else if match.otherEdge == right {
rotateTile(&tiles[match.otherTile], 270)
}
if match.needToFlip {
flipTileHori(&tiles[match.otherTile])
}
solved = placeTile(solved, tiles, match.otherTile, (x == 0))
break
}
}
}
}
return
}
func rotateTile(t *tile, degrees int) {
(*t).inner = rotateCW((*t).inner, degrees)
for degLeft := degrees; degLeft > 0; degLeft -= 90 {
oldEdges := make([]string, len((*t).edges))
copy(oldEdges, (*t).edges)
(*t).edges[top] = reverse(oldEdges[left])
(*t).edges[bottom] = reverse(oldEdges[right])
(*t).edges[left] = oldEdges[bottom]
(*t).edges[right] = oldEdges[top]
}
}
func flipTileVert(t *tile) {
flipVertically((*t).inner)
oldEdges := make([]string, len((*t).edges))
copy(oldEdges, (*t).edges)
(*t).edges[top] = oldEdges[bottom]
(*t).edges[bottom] = oldEdges[top]
(*t).edges[left] = reverse(oldEdges[left])
(*t).edges[right] = reverse(oldEdges[right])
}
func flipTileHori(t *tile) {
flipHorizontally((*t).inner)
oldEdges := make([]string, len((*t).edges))
copy(oldEdges, (*t).edges)
(*t).edges[top] = reverse(oldEdges[top])
(*t).edges[bottom] = reverse(oldEdges[bottom])
(*t).edges[left] = oldEdges[right]
(*t).edges[right] = oldEdges[left]
}
func placeTile(image []string, tiles []tile, tileIndex int, newRow bool) []string {
if newRow {
for _, row := range tiles[tileIndex].inner {
image = append(image, row)
}
} else {
for innerY, row := range tiles[tileIndex].inner {
y := len(image) - len(tiles[tileIndex].inner) + innerY
image[y] += row
}
}
return image
}
// Mutates image internally.
func rotateCW(image []string, degrees int) []string {
if degrees >= 180 {
flipHorizontally(image)
flipVertically(image)
}
if degrees%180 == 90 {
outImage := make([]string, len(image))
for y := len(image) - 1; y >= 0; y-- {
for x := 0; x < len(image[0]); x++ {
outImage[x] += string(rune(image[y][x]))
}
}
return outImage
}
return image
}
// Mutates image in place.
func flipHorizontally(image []string) {
for i, row := range image {
image[i] = reverse(row)
}
}
// Mutates image in place.
func flipVertically(image []string) {
for i := 0; i < len(image)/2; i++ {
j := len(image) - i - 1
image[i], image[j] = image[j], image[i]
}
}
var seaMonster = []string{
" # ",
"# ## ## ###",
" # # # # # # ",
}
func countSeaMonsters(image []string) (monsters int) {
for x := 0; x < len(image[0])-len(seaMonster[0]); x++ {
for y := 0; y < len(image)-len(seaMonster); y++ {
if matchSeaMonster(image, x, y) {
monsters++
}
}
}
return
}
func matchSeaMonster(image []string, xOffset, yOffset int) bool {
for x := 0; x < len(seaMonster[0]); x++ {
for y := 0; y < len(seaMonster); y++ {
if seaMonster[y][x] == byte('#') && image[yOffset+y][xOffset+x] != byte('#') {
return false
}
}
}
return true
}
func calculateRoughness(image []string, monsters int) int {
var roughness = 0
for y := 0; y < len(image); y++ {
for x := 0; x < len(image[0]); x++ {
if image[y][x] == byte('#') {
roughness++
}
}
}
return roughness - monsters*15
}
// Reverses the given string, returning a string with all the runes in reverse order.
func reverse(s string) string {
n := len(s)
runes := make([]rune, n)
for _, r := range s {
n--
runes[n] = r
}
return string(runes[n:])
} | 2020/day20/jurassicJigsaw.go | 0.510008 | 0.542379 | jurassicJigsaw.go | starcoder |
package input
import (
"github.com/benthosdev/benthos/v4/internal/component/input"
"github.com/benthosdev/benthos/v4/internal/component/metrics"
"github.com/benthosdev/benthos/v4/internal/docs"
"github.com/benthosdev/benthos/v4/internal/impl/nats/auth"
"github.com/benthosdev/benthos/v4/internal/interop"
"github.com/benthosdev/benthos/v4/internal/log"
"github.com/benthosdev/benthos/v4/internal/old/input/reader"
"github.com/benthosdev/benthos/v4/internal/tls"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeNATSStream] = TypeSpec{
constructor: fromSimpleConstructor(NewNATSStream),
Summary: `
Subscribe to a NATS Stream subject. Joining a queue is optional and allows
multiple clients of a subject to consume using queue semantics.`,
Description: `
Tracking and persisting offsets through a durable name is also optional and
works with or without a queue. If a durable name is not provided then subjects
are consumed from the most recently published message.
When a consumer closes its connection it unsubscribes, when all consumers of a
durable queue do this the offsets are deleted. In order to avoid this you can
stop the consumers from unsubscribing by setting the field
` + "`unsubscribe_on_close` to `false`" + `.
### Metadata
This input adds the following metadata fields to each message:
` + "``` text" + `
- nats_stream_subject
- nats_stream_sequence
` + "```" + `
You can access these metadata fields using
[function interpolation](/docs/configuration/interpolation#metadata).
` + auth.Description(),
FieldSpecs: docs.FieldSpecs{
docs.FieldString(
"urls",
"A list of URLs to connect to. If an item of the list contains commas it will be expanded into multiple URLs.",
[]string{"nats://127.0.0.1:4222"},
[]string{"nats://username:password@127.0.0.1:4222"},
).Array(),
docs.FieldString("cluster_id", "The ID of the cluster to consume from."),
docs.FieldString("client_id", "A client ID to connect as."),
docs.FieldString("queue", "The queue to consume from."),
docs.FieldString("subject", "A subject to consume from."),
docs.FieldString("durable_name", "Preserve the state of your consumer under a durable name."),
docs.FieldBool("unsubscribe_on_close", "Whether the subscription should be destroyed when this client disconnects."),
docs.FieldBool("start_from_oldest", "If a position is not found for a queue, determines whether to consume from the oldest available message, otherwise messages are consumed from the latest.").Advanced(),
docs.FieldInt("max_inflight", "The maximum number of unprocessed messages to fetch at a given time.").Advanced(),
docs.FieldString("ack_wait", "An optional duration to specify at which a message that is yet to be acked will be automatically retried.").Advanced(),
tls.FieldSpec(),
auth.FieldSpec(),
},
Categories: []Category{
CategoryServices,
},
}
}
//------------------------------------------------------------------------------
// NewNATSStream creates a new NATSStream input type.
func NewNATSStream(conf Config, mgr interop.Manager, log log.Modular, stats metrics.Type) (input.Streamed, error) {
var c reader.Async
var err error
if c, err = reader.NewNATSStream(conf.NATSStream, log, stats); err != nil {
return nil, err
}
return NewAsyncReader(TypeNATSStream, true, c, log, stats)
}
//------------------------------------------------------------------------------ | internal/old/input/nats_stream.go | 0.810329 | 0.637482 | nats_stream.go | starcoder |
package sliceutil
import (
"reflect"
"sort"
)
// Compare will check if two slices are equal
// even if they aren't in the same order
// Inspired by github.com/stephanbaker white board sudo code
func Compare(s1, s2 interface{}) bool {
if s1 == nil || s2 == nil {
return false
}
// Convert slices to correct type
slice1 := convertSliceToInterface(s1)
slice2 := convertSliceToInterface(s2)
if slice1 == nil || slice2 == nil {
return false
}
if len(slice1) != len(slice2) {
return false
}
// setup maps to store values and count of slices
m1 := make(map[interface{}]int)
m2 := make(map[interface{}]int)
for i := 0; i < len(slice1); i++ {
// Add each value to map and increment for each found
m1[slice1[i]]++
m2[slice2[i]]++
}
for key := range m1 {
if m1[key] != m2[key] {
return false
}
}
return true
}
// OrderedCompare will check if two slices are equal, taking order into consideration.
func OrderedCompare(s1, s2 interface{}) bool {
//If both are nil, they are equal
if s1 == nil && s2 == nil {
return true
}
//If only one is nil, they are not equal (!= represents XOR)
if (s1 == nil) != (s2 == nil) {
return false
}
// Convert slices to correct type
slice1 := convertSliceToInterface(s1)
slice2 := convertSliceToInterface(s2)
//If both are nil, they are equal
if slice1 == nil || slice2 == nil {
return false
}
//If the lengths are different, the slices are not equal
if len(slice1) != len(slice2) {
return false
}
//Loop through and compare the slices at each index
for i := 0; i < len(slice1); i++ {
if slice1[i] != slice2[i] {
return false
}
}
//If nothing has failed up to this point, the slices are equal
return true
}
// Contains checks if a slice contains an element
func Contains(s interface{}, e interface{}) bool {
slice := convertSliceToInterface(s)
for _, a := range slice {
if a == e {
return true
}
}
return false
}
// ContainsString checks if a slice contains a string
func ContainsString(s []string, e string) bool {
for _, a := range s {
if a == e {
return true
}
}
return false
}
// convertSliceToInterface takes a slice passed in as an interface{}
// then converts the slice to a slice of interfaces
func convertSliceToInterface(s interface{}) (slice []interface{}) {
v := reflect.ValueOf(s)
if v.Kind() != reflect.Slice {
return nil
}
length := v.Len()
slice = make([]interface{}, length)
for i := 0; i < length; i++ {
slice[i] = v.Index(i).Interface()
}
return slice
}
// Reverse reverses slices of any type
func Reverse(s []interface{}) []interface{} {
a := make([]interface{}, len(s))
copy(a, s)
for i := len(a)/2 - 1; i >= 0; i-- {
opp := len(a) - 1 - i
a[i], a[opp] = a[opp], a[i]
}
return a
}
// FastContains offers a faster implementation of contains
func FastContains(s []string, searchterm string) bool {
i := sort.SearchStrings(s, searchterm)
return i < len(s) && s[i] == searchterm
} | sliceutil/sliceutil.go | 0.650467 | 0.406744 | sliceutil.go | starcoder |
package generator
import (
"github.com/drakos74/oremi/internal/data/model"
)
// Line is a collection of points adhering to a graph evolution model in 2D space
type Line model.Series
// NewLine creates a new collection of a number of vectors forming a line
func NewLine(num int, a, b, step float64) model.Collection {
line := NewLineGenerator(a, b, step)
collection := line.Num(num)
return collection
}
// NewLineGenerator creates a new generator for a 2d line
func NewLineGenerator(a, b, step float64) *Euclidean {
return &Euclidean{
NewLinearSequence(0, step),
F{f: []Y{
// x coordinate :
func(x ...float64) float64 {
return x[0]
},
// y coordinate
linearFunction(a, b),
}}}
}
// Polynomial is a collection of vectors following a polynomial function
type Polynomial model.Series
// NewLine creates a new collection of vectors forming a line
func NewPolynomial(num int, b, step float64, a ...float64) model.Collection {
polynomial := NewPolynomialGenerator(b, step, a...)
collection := polynomial.Num(num)
return collection
}
// NewLineGenerator creates a new generator for a 2d line
func NewPolynomialGenerator(b, step float64, a ...float64) *Euclidean {
x := []float64{b}
x = append(x, a...)
return &Euclidean{
NewLinearSequence(0, step),
F{f: []Y{
// x coordinate :
func(x ...float64) float64 {
return x[0]
},
// y coordinate
polynomialFunction(x...),
}}}
}
// Exponential is a collection of vectors following an exponential function
type Exponential model.Series
// NewLine creates a new collection of vectors forming a line
func NewExponential(num int, a, step float64) model.Collection {
exponential := NewExponentialGenerator(a, step)
collection := exponential.Num(num)
return collection
}
// NewLineGenerator creates a new generator for a 2d line
func NewExponentialGenerator(a, step float64) *Euclidean {
return &Euclidean{
NewLinearSequence(0, step),
F{f: []Y{
// x coordinate
func(x ...float64) float64 {
return x[0]
},
// y coordinate
exponentialFunction(a),
}}}
} | internal/data/source/generator/geometry.go | 0.837587 | 0.567008 | geometry.go | starcoder |
package influx2otel
import (
"errors"
"fmt"
"strings"
"time"
"github.com/influxdata/influxdb-observability/common"
"go.opentelemetry.io/collector/model/pdata"
)
type LineProtocolToOtelMetrics struct {
logger common.Logger
}
func NewLineProtocolToOtelMetrics(logger common.Logger) (*LineProtocolToOtelMetrics, error) {
return &LineProtocolToOtelMetrics{
logger: logger,
}, nil
}
func (c *LineProtocolToOtelMetrics) NewBatch() *MetricsBatch {
return &MetricsBatch{
rmByAttributes: make(map[string]pdata.ResourceMetrics),
ilmByRMAttributesAndIL: make(map[string]map[string]pdata.InstrumentationLibraryMetrics),
metricByRMIL: make(map[string]map[string]map[string]pdata.Metric),
histogramDataPointsByMDPK: make(map[pdata.Metric]map[dataPointKey]pdata.HistogramDataPoint),
summaryDataPointsByMDPK: make(map[pdata.Metric]map[dataPointKey]pdata.SummaryDataPoint),
logger: c.logger,
}
}
type MetricsBatch struct {
rmByAttributes map[string]pdata.ResourceMetrics
ilmByRMAttributesAndIL map[string]map[string]pdata.InstrumentationLibraryMetrics
metricByRMIL map[string]map[string]map[string]pdata.Metric
histogramDataPointsByMDPK map[pdata.Metric]map[dataPointKey]pdata.HistogramDataPoint
summaryDataPointsByMDPK map[pdata.Metric]map[dataPointKey]pdata.SummaryDataPoint
logger common.Logger
}
func (b *MetricsBatch) AddPoint(measurement string, tags map[string]string, fields map[string]interface{}, ts time.Time, vType common.InfluxMetricValueType) error {
if measurement == common.MeasurementPrometheus {
err := b.addPointTelegrafPrometheusV2(measurement, tags, fields, ts, vType)
if err == errValueTypeUnknown {
return b.addPointWithUnknownSchema(measurement, tags, fields, ts)
} else {
return err
}
}
err := b.addPointTelegrafPrometheusV1(measurement, tags, fields, ts, vType)
if err == errValueTypeUnknown {
return b.addPointWithUnknownSchema(measurement, tags, fields, ts)
} else {
return err
}
}
func resourceAttributesToKey(rAttributes pdata.AttributeMap) string {
var key strings.Builder
rAttributes.Range(func(k string, v pdata.AttributeValue) bool {
key.WriteString(k)
key.WriteByte(':')
return true
})
return key.String()
}
var errValueTypeUnknown = errors.New("value type unknown")
func (b *MetricsBatch) lookupMetric(metricName string, tags map[string]string, vType common.InfluxMetricValueType) (pdata.Metric, pdata.AttributeMap, error) {
var ilName, ilVersion string
rAttributes := pdata.NewAttributeMap()
mAttributes := pdata.NewAttributeMap()
for k, v := range tags {
switch {
case k == common.MetricHistogramBoundKeyV2 || k == common.MetricSummaryQuantileKeyV2:
continue
case k == common.AttributeInstrumentationLibraryName:
ilName = v
case k == common.AttributeInstrumentationLibraryVersion:
ilVersion = v
case common.ResourceNamespace.MatchString(k):
rAttributes.InsertString(k, v)
default:
mAttributes.InsertString(k, v)
}
}
rAttributes.Sort()
rKey := resourceAttributesToKey(rAttributes)
var resourceMetrics pdata.ResourceMetrics
if rm, found := b.rmByAttributes[rKey]; found {
resourceMetrics = rm
} else {
resourceMetrics = pdata.NewResourceMetrics()
rAttributes.CopyTo(resourceMetrics.Resource().Attributes())
b.rmByAttributes[rKey] = resourceMetrics
b.ilmByRMAttributesAndIL[rKey] = make(map[string]pdata.InstrumentationLibraryMetrics)
b.metricByRMIL[rKey] = make(map[string]map[string]pdata.Metric)
}
ilmKey := ilName + ":" + ilVersion
var ilMetrics pdata.InstrumentationLibraryMetrics
if ilm, found := b.ilmByRMAttributesAndIL[rKey][ilmKey]; found {
ilMetrics = ilm
} else {
ilMetrics = resourceMetrics.InstrumentationLibraryMetrics().AppendEmpty()
ilMetrics.InstrumentationLibrary().SetName(ilName)
ilMetrics.InstrumentationLibrary().SetVersion(ilVersion)
b.ilmByRMAttributesAndIL[rKey][ilmKey] = ilMetrics
b.metricByRMIL[rKey][ilmKey] = make(map[string]pdata.Metric)
}
var metric pdata.Metric
if m, found := b.metricByRMIL[rKey][ilmKey][metricName]; found {
switch m.DataType() {
case pdata.MetricDataTypeGauge:
if vType != common.InfluxMetricValueTypeGauge && vType != common.InfluxMetricValueTypeUntyped {
return pdata.Metric{}, pdata.AttributeMap{}, fmt.Errorf("value type conflict for metric '%s'; expected '%s' or '%s', got '%s'", metricName, common.InfluxMetricValueTypeGauge, common.InfluxMetricValueTypeUntyped, vType)
}
case pdata.MetricDataTypeSum:
if vType != common.InfluxMetricValueTypeSum {
return pdata.Metric{}, pdata.AttributeMap{}, fmt.Errorf("value type conflict for metric '%s'; expected '%s', got '%s'", metricName, common.InfluxMetricValueTypeSum, vType)
}
case pdata.MetricDataTypeHistogram:
if vType != common.InfluxMetricValueTypeHistogram {
return pdata.Metric{}, pdata.AttributeMap{}, fmt.Errorf("value type conflict for metric '%s'; expected '%s', got '%s'", metricName, common.InfluxMetricValueTypeHistogram, vType)
}
case pdata.MetricDataTypeSummary:
if vType != common.InfluxMetricValueTypeSummary {
return pdata.Metric{}, pdata.AttributeMap{}, fmt.Errorf("value type conflict for metric '%s'; expected '%s', got '%s'", metricName, common.InfluxMetricValueTypeSummary, vType)
}
default:
return pdata.Metric{}, pdata.AttributeMap{}, fmt.Errorf("impossible InfluxMetricValueType %d", vType)
}
metric = m
} else {
metric = b.ilmByRMAttributesAndIL[rKey][ilmKey].Metrics().AppendEmpty()
metric.SetName(metricName)
switch vType {
case common.InfluxMetricValueTypeGauge:
metric.SetDataType(pdata.MetricDataTypeGauge)
case common.InfluxMetricValueTypeSum:
metric.SetDataType(pdata.MetricDataTypeSum)
metric.Sum().SetIsMonotonic(true)
metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
case common.InfluxMetricValueTypeHistogram:
metric.SetDataType(pdata.MetricDataTypeHistogram)
metric.Histogram().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
case common.InfluxMetricValueTypeSummary:
metric.SetDataType(pdata.MetricDataTypeSummary)
default:
return pdata.Metric{}, pdata.AttributeMap{}, fmt.Errorf("unrecognized InfluxMetricValueType %d", vType)
}
b.metricByRMIL[rKey][ilmKey][metricName] = metric
b.histogramDataPointsByMDPK[metric] = make(map[dataPointKey]pdata.HistogramDataPoint)
b.summaryDataPointsByMDPK[metric] = make(map[dataPointKey]pdata.SummaryDataPoint)
}
return metric, mAttributes, nil
}
func (b *MetricsBatch) GetMetrics() pdata.Metrics {
metrics := pdata.NewMetrics()
// Ensure that the extra bucket counts have been added.
for _, resourceMetrics := range b.rmByAttributes {
for i := 0; i < resourceMetrics.InstrumentationLibraryMetrics().Len(); i++ {
ilMetrics := resourceMetrics.InstrumentationLibraryMetrics().At(i)
for j := 0; j < ilMetrics.Metrics().Len(); j++ {
metric := ilMetrics.Metrics().At(j)
if metric.DataType() == pdata.MetricDataTypeHistogram {
for k := 0; k < metric.Histogram().DataPoints().Len(); k++ {
dataPoint := metric.Histogram().DataPoints().At(k)
if len(dataPoint.BucketCounts()) == len(dataPoint.ExplicitBounds()) {
dataPoint.SetBucketCounts(append(dataPoint.BucketCounts(), dataPoint.Count()))
}
}
}
}
}
resourceMetrics.CopyTo(metrics.ResourceMetrics().AppendEmpty())
}
return metrics
}
func (b *MetricsBatch) addPointWithUnknownSchema(measurement string, tags map[string]string, fields map[string]interface{}, ts time.Time) error {
if ts.IsZero() {
ts = time.Now()
}
for k, v := range fields {
var floatValue *float64
var intValue *int64
switch vv := v.(type) {
case float64:
floatValue = &vv
case int64:
intValue = &vv
case uint64:
convertedTypedValue := int64(vv)
intValue = &convertedTypedValue
default:
b.logger.Debug("field has unsupported type", "measurement", measurement, "field", k, "type", fmt.Sprintf("%T", v))
continue
}
metricName := fmt.Sprintf("%s_%s", measurement, k)
metric, attributes, err := b.lookupMetric(metricName, tags, common.InfluxMetricValueTypeGauge)
if err != nil {
return err
}
dataPoint := metric.Gauge().DataPoints().AppendEmpty()
attributes.CopyTo(dataPoint.Attributes())
dataPoint.SetTimestamp(pdata.NewTimestampFromTime(ts))
if floatValue != nil {
dataPoint.SetDoubleVal(*floatValue)
} else if intValue != nil {
dataPoint.SetIntVal(*intValue)
} else {
panic("unreachable")
}
}
return nil
} | influx2otel/metrics.go | 0.564219 | 0.488283 | metrics.go | starcoder |
package templates
// README is the contents of the README.md file
const README = `# dslink-{{.Lang}}-{{.Name}}
## {{.Node}} DSLink
A DSLink for <complete this>
`
const LICENSE = ` Copyright <Year> <Person/Company>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
`
// DSLinkJSON is the contents of dslink.json file
const DSLinkJson = `{
"name": "dslink-{{.Lang}}-{{.Name}}",
"version": "0.0.1",
"description": "{{.Node}} DSLink",
"license": "Apache",
"author": {
"name": "<<NAME>>",
"email": "<Your email>"
},
{{ if eq "dart" .Lang }}
"main": "bin/run.dart",
"engines": {
"dart": ">1.15.0"
},
{{ else if eq "java" .Lang }}
"main": "bin/dslink-{{.Lang}}-{{.Name}}",
{{ else if eq "javascript" .Lang }}
"main": "index.js",
"engines": {
"node" : ">=0.10.0"
},
"getDependencies": [
"node install.js"
],
{{ end }}
"repository": {
"type": "git",
"url": "https://<your repo address>"
},
"bugs": {
"url": "https://<your bug tracker address>"
},
"configs": {
"name": {
"type": "string",
"default": "{{.Node}}"
},
"broker": {
"type": "url"
},
"token": {
"type": "string"
},
"nodes": {
"type": "path",
"default": "nodes.json"
},
"key": {
"type": "path",
"default": ".key"
},
"log": {
"type": "enum",
"default": "info"
}
{{ if eq .Lang "java" }}
"handler_class": {
"type": "string",
"default": "<your handler class>"
}
{{ end }}
},
"license": "Apache"
}
`
const GitIgnore = `# Logs
logs
*.log
# Runtime data
pids
*.pid
*.pids
*.seed
{{ if eq "dart" .Lang }}
# Files and directories created by pub
.buildlog
.packages
.project
.pub/
build/
packages
# Files created by dart2js
*.dart.js
*.part.js
*.js.deps
*.js.map
#.info.json
# Directory created by dartdoc
doc/api/
# Should include pubspec lock file as its an application.
#pubspec.lock
{{ else if eq "javascript" .Lang }}
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# Grunt intermediate storage
.grunt
# node-waf configuration
.lock-wscript
# Compiled binary addons
build/Release
# Dependency directory
node_modules
{{ end }}
# DSA Specific ignores
nodes.json
.key
.dslink.key
.dslink.commit
` | templates/common_templates.go | 0.778439 | 0.447762 | common_templates.go | starcoder |
package numbers
var Context = &TransformManager {}
type MethodTransform func() Operate
// define transform method
type Operator interface {
// transform to string
Transform2String(value Value, defaultValue Value) interface{}
// transform to int64
Transform2Int64(value Value, defaultValue Value) interface{}
// transform to bool
Transform2Bool(value Value, defaultValue Value) interface{}
// transform ro float64
Transform2Float64(value Value, defaultValue Value) interface{}
}
type TypeOperator struct {
// target
target Type
// transform method
methodTransform MethodTransform
}
func RegisterOperator(source Type, operator Operator) {
transformer := registerTransformer(source, transform2TypeOperators(operator))
if nil == transformer {
return
}
Context.register(transformer)
}
func transform2TypeOperators(operator Operator) []*TypeOperator {
operators := make([]*TypeOperator, 0)
operators = appendOperator(operators, "string", operator.Transform2String)
operators = appendOperator(operators, "int64", operator.Transform2Int64)
operators = appendOperator(operators, "bool", operator.Transform2Bool)
operators = appendOperator(operators, "float64", operator.Transform2Float64)
return operators
}
func appendOperator(operators []*TypeOperator, target string, method func(value Value, defaultValue Value) interface{}) []*TypeOperator {
operators = append(operators, transform2TypeOperator(Type(target), func() Operate {
return func(value interface{}, targetType Type, defaultValue interface{}) interface{} {
return method(value, defaultValue)
}
}))
return operators
}
func transform2TypeOperator(target Type, transform MethodTransform) *TypeOperator {
return &TypeOperator{
target: target,
methodTransform: transform,
}
}
func registerTransformer(source Type, operators []*TypeOperator) *Transformer {
if len(operators) == 0 {
return nil
}
transformer := Transformer {
Source: source,
}
operateMap := make(map[Type]Operate)
for _, operator := range operators {
operateMap[operator.target] = operator.methodTransform()
}
transformer.TargetOperateMap = operateMap
return &transformer
} | numbers/transfromer_register.go | 0.667256 | 0.518912 | transfromer_register.go | starcoder |
package box2d
func B2CollideCircles(manifold *B2Manifold, circleA *B2CircleShape, xfA B2Transform, circleB *B2CircleShape, xfB B2Transform) {
manifold.PointCount = 0
pA := B2TransformVec2Mul(xfA, circleA.M_p)
pB := B2TransformVec2Mul(xfB, circleB.M_p)
d := B2Vec2Sub(pB, pA)
distSqr := B2Vec2Dot(d, d)
rA := circleA.M_radius
rB := circleB.M_radius
radius := rA + rB
if distSqr > radius*radius {
return
}
manifold.Type = B2Manifold_Type.E_circles
manifold.LocalPoint = circleA.M_p
manifold.LocalNormal.SetZero()
manifold.PointCount = 1
manifold.Points[0].LocalPoint = circleB.M_p
manifold.Points[0].Id.SetKey(0)
}
func B2CollidePolygonAndCircle(manifold *B2Manifold, polygonA *B2PolygonShape, xfA B2Transform, circleB *B2CircleShape, xfB B2Transform) {
manifold.PointCount = 0
// Compute circle position in the frame of the polygon.
c := B2TransformVec2Mul(xfB, circleB.M_p)
cLocal := B2TransformVec2MulT(xfA, c)
// Find the min separating edge.
normalIndex := 0
separation := -B2_maxFloat
radius := polygonA.M_radius + circleB.M_radius
vertexCount := polygonA.M_count
vertices := polygonA.M_vertices
normals := polygonA.M_normals
for i := 0; i < vertexCount; i++ {
s := B2Vec2Dot(normals[i], B2Vec2Sub(cLocal, vertices[i]))
if s > radius {
// Early out.
return
}
if s > separation {
separation = s
normalIndex = i
}
}
// Vertices that subtend the incident face.
vertIndex1 := normalIndex
vertIndex2 := 0
if vertIndex1+1 < vertexCount {
vertIndex2 = vertIndex1 + 1
}
v1 := vertices[vertIndex1]
v2 := vertices[vertIndex2]
// If the center is inside the polygon ...
if separation < B2_epsilon {
manifold.PointCount = 1
manifold.Type = B2Manifold_Type.E_faceA
manifold.LocalNormal = normals[normalIndex]
manifold.LocalPoint = B2Vec2MulScalar(0.5, B2Vec2Add(v1, v2))
manifold.Points[0].LocalPoint = circleB.M_p
manifold.Points[0].Id.SetKey(0)
return
}
// Compute barycentric coordinates
u1 := B2Vec2Dot(B2Vec2Sub(cLocal, v1), B2Vec2Sub(v2, v1))
u2 := B2Vec2Dot(B2Vec2Sub(cLocal, v2), B2Vec2Sub(v1, v2))
if u1 <= 0.0 {
if B2Vec2DistanceSquared(cLocal, v1) > radius*radius {
return
}
manifold.PointCount = 1
manifold.Type = B2Manifold_Type.E_faceA
manifold.LocalNormal = B2Vec2Sub(cLocal, v1)
manifold.LocalNormal.Normalize()
manifold.LocalPoint = v1
manifold.Points[0].LocalPoint = circleB.M_p
manifold.Points[0].Id.SetKey(0)
} else if u2 <= 0.0 {
if B2Vec2DistanceSquared(cLocal, v2) > radius*radius {
return
}
manifold.PointCount = 1
manifold.Type = B2Manifold_Type.E_faceA
manifold.LocalNormal = B2Vec2Sub(cLocal, v2)
manifold.LocalNormal.Normalize()
manifold.LocalPoint = v2
manifold.Points[0].LocalPoint = circleB.M_p
manifold.Points[0].Id.SetKey(0)
} else {
faceCenter := B2Vec2MulScalar(0.5, B2Vec2Add(v1, v2))
s := B2Vec2Dot(B2Vec2Sub(cLocal, faceCenter), normals[vertIndex1])
if s > radius {
return
}
manifold.PointCount = 1
manifold.Type = B2Manifold_Type.E_faceA
manifold.LocalNormal = normals[vertIndex1]
manifold.LocalPoint = faceCenter
manifold.Points[0].LocalPoint = circleB.M_p
manifold.Points[0].Id.SetKey(0)
}
} | CollisionB2CollideCircle.go | 0.817429 | 0.809088 | CollisionB2CollideCircle.go | starcoder |
package geometry_msgs_msg
import (
"unsafe"
"github.com/tiiuae/rclgo/pkg/rclgo"
"github.com/tiiuae/rclgo/pkg/rclgo/types"
"github.com/tiiuae/rclgo/pkg/rclgo/typemap"
primitives "github.com/tiiuae/rclgo/pkg/rclgo/primitives"
)
/*
#include <rosidl_runtime_c/message_type_support_struct.h>
#include <geometry_msgs/msg/accel_with_covariance.h>
*/
import "C"
func init() {
typemap.RegisterMessage("geometry_msgs/AccelWithCovariance", AccelWithCovarianceTypeSupport)
}
// Do not create instances of this type directly. Always use NewAccelWithCovariance
// function instead.
type AccelWithCovariance struct {
Accel Accel `yaml:"accel"`
Covariance [36]float64 `yaml:"covariance"`// Row-major representation of the 6x6 covariance matrixThe orientation parameters use a fixed-axis representation.In order, the parameters are:(x, y, z, rotation about X axis, rotation about Y axis, rotation about Z axis)
}
// NewAccelWithCovariance creates a new AccelWithCovariance with default values.
func NewAccelWithCovariance() *AccelWithCovariance {
self := AccelWithCovariance{}
self.SetDefaults()
return &self
}
func (t *AccelWithCovariance) Clone() *AccelWithCovariance {
c := &AccelWithCovariance{}
c.Accel = *t.Accel.Clone()
c.Covariance = t.Covariance
return c
}
func (t *AccelWithCovariance) CloneMsg() types.Message {
return t.Clone()
}
func (t *AccelWithCovariance) SetDefaults() {
t.Accel.SetDefaults()
t.Covariance = [36]float64{}
}
// AccelWithCovariancePublisher wraps rclgo.Publisher to provide type safe helper
// functions
type AccelWithCovariancePublisher struct {
*rclgo.Publisher
}
// NewAccelWithCovariancePublisher creates and returns a new publisher for the
// AccelWithCovariance
func NewAccelWithCovariancePublisher(node *rclgo.Node, topic_name string, options *rclgo.PublisherOptions) (*AccelWithCovariancePublisher, error) {
pub, err := node.NewPublisher(topic_name, AccelWithCovarianceTypeSupport, options)
if err != nil {
return nil, err
}
return &AccelWithCovariancePublisher{pub}, nil
}
func (p *AccelWithCovariancePublisher) Publish(msg *AccelWithCovariance) error {
return p.Publisher.Publish(msg)
}
// AccelWithCovarianceSubscription wraps rclgo.Subscription to provide type safe helper
// functions
type AccelWithCovarianceSubscription struct {
*rclgo.Subscription
}
// AccelWithCovarianceSubscriptionCallback type is used to provide a subscription
// handler function for a AccelWithCovarianceSubscription.
type AccelWithCovarianceSubscriptionCallback func(msg *AccelWithCovariance, info *rclgo.RmwMessageInfo, err error)
// NewAccelWithCovarianceSubscription creates and returns a new subscription for the
// AccelWithCovariance
func NewAccelWithCovarianceSubscription(node *rclgo.Node, topic_name string, subscriptionCallback AccelWithCovarianceSubscriptionCallback) (*AccelWithCovarianceSubscription, error) {
callback := func(s *rclgo.Subscription) {
var msg AccelWithCovariance
info, err := s.TakeMessage(&msg)
subscriptionCallback(&msg, info, err)
}
sub, err := node.NewSubscription(topic_name, AccelWithCovarianceTypeSupport, callback)
if err != nil {
return nil, err
}
return &AccelWithCovarianceSubscription{sub}, nil
}
func (s *AccelWithCovarianceSubscription) TakeMessage(out *AccelWithCovariance) (*rclgo.RmwMessageInfo, error) {
return s.Subscription.TakeMessage(out)
}
// CloneAccelWithCovarianceSlice clones src to dst by calling Clone for each element in
// src. Panics if len(dst) < len(src).
func CloneAccelWithCovarianceSlice(dst, src []AccelWithCovariance) {
for i := range src {
dst[i] = *src[i].Clone()
}
}
// Modifying this variable is undefined behavior.
var AccelWithCovarianceTypeSupport types.MessageTypeSupport = _AccelWithCovarianceTypeSupport{}
type _AccelWithCovarianceTypeSupport struct{}
func (t _AccelWithCovarianceTypeSupport) New() types.Message {
return NewAccelWithCovariance()
}
func (t _AccelWithCovarianceTypeSupport) PrepareMemory() unsafe.Pointer { //returns *C.geometry_msgs__msg__AccelWithCovariance
return (unsafe.Pointer)(C.geometry_msgs__msg__AccelWithCovariance__create())
}
func (t _AccelWithCovarianceTypeSupport) ReleaseMemory(pointer_to_free unsafe.Pointer) {
C.geometry_msgs__msg__AccelWithCovariance__destroy((*C.geometry_msgs__msg__AccelWithCovariance)(pointer_to_free))
}
func (t _AccelWithCovarianceTypeSupport) AsCStruct(dst unsafe.Pointer, msg types.Message) {
m := msg.(*AccelWithCovariance)
mem := (*C.geometry_msgs__msg__AccelWithCovariance)(dst)
AccelTypeSupport.AsCStruct(unsafe.Pointer(&mem.accel), &m.Accel)
cSlice_covariance := mem.covariance[:]
primitives.Float64__Array_to_C(*(*[]primitives.CFloat64)(unsafe.Pointer(&cSlice_covariance)), m.Covariance[:])
}
func (t _AccelWithCovarianceTypeSupport) AsGoStruct(msg types.Message, ros2_message_buffer unsafe.Pointer) {
m := msg.(*AccelWithCovariance)
mem := (*C.geometry_msgs__msg__AccelWithCovariance)(ros2_message_buffer)
AccelTypeSupport.AsGoStruct(&m.Accel, unsafe.Pointer(&mem.accel))
cSlice_covariance := mem.covariance[:]
primitives.Float64__Array_to_Go(m.Covariance[:], *(*[]primitives.CFloat64)(unsafe.Pointer(&cSlice_covariance)))
}
func (t _AccelWithCovarianceTypeSupport) TypeSupport() unsafe.Pointer {
return unsafe.Pointer(C.rosidl_typesupport_c__get_message_type_support_handle__geometry_msgs__msg__AccelWithCovariance())
}
type CAccelWithCovariance = C.geometry_msgs__msg__AccelWithCovariance
type CAccelWithCovariance__Sequence = C.geometry_msgs__msg__AccelWithCovariance__Sequence
func AccelWithCovariance__Sequence_to_Go(goSlice *[]AccelWithCovariance, cSlice CAccelWithCovariance__Sequence) {
if cSlice.size == 0 {
return
}
*goSlice = make([]AccelWithCovariance, int64(cSlice.size))
for i := 0; i < int(cSlice.size); i++ {
cIdx := (*C.geometry_msgs__msg__AccelWithCovariance__Sequence)(unsafe.Pointer(
uintptr(unsafe.Pointer(cSlice.data)) + (C.sizeof_struct_geometry_msgs__msg__AccelWithCovariance * uintptr(i)),
))
AccelWithCovarianceTypeSupport.AsGoStruct(&(*goSlice)[i], unsafe.Pointer(cIdx))
}
}
func AccelWithCovariance__Sequence_to_C(cSlice *CAccelWithCovariance__Sequence, goSlice []AccelWithCovariance) {
if len(goSlice) == 0 {
return
}
cSlice.data = (*C.geometry_msgs__msg__AccelWithCovariance)(C.malloc((C.size_t)(C.sizeof_struct_geometry_msgs__msg__AccelWithCovariance * uintptr(len(goSlice)))))
cSlice.capacity = C.size_t(len(goSlice))
cSlice.size = cSlice.capacity
for i, v := range goSlice {
cIdx := (*C.geometry_msgs__msg__AccelWithCovariance)(unsafe.Pointer(
uintptr(unsafe.Pointer(cSlice.data)) + (C.sizeof_struct_geometry_msgs__msg__AccelWithCovariance * uintptr(i)),
))
AccelWithCovarianceTypeSupport.AsCStruct(unsafe.Pointer(cIdx), &v)
}
}
func AccelWithCovariance__Array_to_Go(goSlice []AccelWithCovariance, cSlice []CAccelWithCovariance) {
for i := 0; i < len(cSlice); i++ {
AccelWithCovarianceTypeSupport.AsGoStruct(&goSlice[i], unsafe.Pointer(&cSlice[i]))
}
}
func AccelWithCovariance__Array_to_C(cSlice []CAccelWithCovariance, goSlice []AccelWithCovariance) {
for i := 0; i < len(goSlice); i++ {
AccelWithCovarianceTypeSupport.AsCStruct(unsafe.Pointer(&cSlice[i]), &goSlice[i])
}
} | internal/msgs/geometry_msgs/msg/AccelWithCovariance.gen.go | 0.73914 | 0.462716 | AccelWithCovariance.gen.go | starcoder |
package sparsetable
import (
"bytes"
"encoding/gob"
"fmt"
"sort"
"unicode/utf8"
"github.com/pkg/errors"
)
// State represents a the state of a DFA.
// It is a simple integer that points to the active state of the DFA's
// cell table.
type State int
// Valid returns true if the state is still valid.
func (s State) Valid() bool {
return s >= 0
}
// DFA is a DFA implementation using a sparse table.
type DFA struct {
table []Cell
initial State
}
// NewDictionary builds a minimized sparse table DFA from a list of strings.
// NewDictionary panics if the build process fails.
func NewDictionary(strs ...string) *DFA {
b := NewBuilder()
sort.Strings(strs)
for _, str := range strs {
if err := b.Add(str, 1); err != nil {
panic(err)
}
}
return b.Build()
}
// Initial returns the initial state of the DFA.
// The state of the DFA is a simple integer that give the position
// of the active cell in the DFA's cell table.
// Values less than 0 mark invalid states.
func (d *DFA) Initial() State {
return d.initial
}
// Delta makes on transition from the given state s with the given byte c.
func (d DFA) Delta(s State, c byte) State {
if !d.valid(s, validAnyState) {
return -1
}
o := State(c)
if int(s+o) >= len(d.table) ||
!d.table[s+o].Transition() ||
d.table[s+o].Char() != c {
return -1
}
return State(d.table[s+o].Target())
}
// Final returns the (data, true) if the given state is final.
// If the given state is not final, (0, false) is returned.
func (d *DFA) Final(s State) (int32, bool) {
if !d.valid(s, validFinalState) {
return 0, false
}
return d.table[s].Final()
}
// EachTransition iterates over all transitions of the given state calling
// the callback function f for each transition cell.
func (d *DFA) EachTransition(s State, f func(Cell)) {
if !d.valid(s, validAnyState) {
return
}
d.forEachTransition(s, f)
}
var (
ulen = [...]int{
1, 1, 1, 1, 1, 1, 1, 1,
0, 0, 0, 0,
2, 2,
3,
4,
}
)
// EachUTF8Transition iterates over all transition of the given state
// calling the callback function f for each transition.
// EachUTF8Transition follows UTF8 mutlibyte sequences to ensure
// that the callback is called for each valid unicode transition.
func (d *DFA) EachUTF8Transition(s State, f func(rune, State)) {
if !d.valid(s, validAnyState) {
return
}
d.forEachTransition(s, func(cell Cell) {
buf := [utf8.UTFMax]byte{cell.Char()}
switch ulen[cell.Char()>>4] {
case 0:
f(0, State(cell.Target()))
case 1:
f(rune(cell.Char()), State(cell.Target()))
case 2: // two bytes
d.forEachUTF8Transition(buf[:], 1, 1, State(cell.Target()), f)
case 3: // three bytes
d.forEachUTF8Transition(buf[:], 1, 2, State(cell.Target()), f)
case 4: // four bytes
d.forEachUTF8Transition(buf[:], 1, 3, State(cell.Target()), f)
default: // something else
panic(fmt.Sprintf("invalid utf8 byte %b encountered", cell.Char()))
}
})
}
func (d DFA) forEachUTF8Transition(buf []byte, i, end int, s State, f func(rune, State)) {
if !d.valid(s, validAnyState) {
return
}
d.forEachTransition(s, func(cell Cell) {
if !utf8.RuneStart(cell.Char()) {
buf[i] = cell.Char()
if i == end {
r, _ := utf8.DecodeRune(buf)
f(r, State(cell.Target()))
} else {
d.forEachUTF8Transition(buf, i+1, end, State(cell.Target()), f)
}
}
})
}
func (d DFA) forEachTransition(s State, f func(Cell)) {
if !d.valid(s, validAnyState) {
return
}
for i := State(d.table[s].Next()); i > 0; i = State(d.table[s+i].Next()) {
cell := d.table[s+i]
if !cell.Transition() {
panic(fmt.Sprintf("invalid cell type in EachTransition: %s", cell))
}
f(cell)
}
}
const (
validTransition = iota
validAnyState
validFinalState
validAny
)
func (d DFA) valid(s State, typ int) bool {
if s < 0 || int(s) >= len(d.table) {
return false
}
switch typ {
case validAny:
return true
case validAnyState:
return d.table[s].State()
case validFinalState:
_, final := d.table[s].Final()
return final
case validTransition:
return d.table[s].Transition()
}
return false
}
// CellAt returns the the cell of the given state.
func (d *DFA) CellAt(s State) Cell {
if !d.valid(s, validAny) {
return Cell{}
}
return d.table[s]
}
// EachCell calls the given callback function for each cell in the DFA's table.
func (d *DFA) EachCell(f func(Cell)) {
for _, cell := range d.table {
f(cell)
}
}
// GobDecode decodes a DFA from gob.
func (d *DFA) GobDecode(bs []byte) error {
buffer := bytes.NewBuffer(bs)
decoder := gob.NewDecoder(buffer)
var initial State
var table []Cell
if err := decoder.Decode(&initial); err != nil {
return errors.Wrapf(err, "could not GOB decode initial state")
}
if err := decoder.Decode(&table); err != nil {
return errors.Wrapf(err, "could not GOB decode sparse table")
}
d.initial = initial
d.table = table
return nil
}
// GobEncode encods a DFA to gob.
func (d *DFA) GobEncode() ([]byte, error) {
buffer := new(bytes.Buffer)
encoder := gob.NewEncoder(buffer)
if err := encoder.Encode(d.initial); err != nil {
return nil, errors.Wrapf(err, "could not GOB encode initial state")
}
if err := encoder.Encode(d.table); err != nil {
return nil, errors.Wrapf(err, "could not GOB encode sparse table")
}
return buffer.Bytes(), nil
} | dfa.go | 0.764979 | 0.492005 | dfa.go | starcoder |
package main
const contributors = `[
{
"login": "ardan-bkennedy",
"id": 2280005,
"avatar_url": "https://avatars3.githubusercontent.com/u/2280005?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/ardan-bkennedy",
"html_url": "https://github.com/ardan-bkennedy",
"followers_url": "https://api.github.com/users/ardan-bkennedy/followers",
"following_url": "https://api.github.com/users/ardan-bkennedy/following{/other_user}",
"gists_url": "https://api.github.com/users/ardan-bkennedy/gists{/gist_id}",
"starred_url": "https://api.github.com/users/ardan-bkennedy/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/ardan-bkennedy/subscriptions",
"organizations_url": "https://api.github.com/users/ardan-bkennedy/orgs",
"repos_url": "https://api.github.com/users/ardan-bkennedy/repos",
"events_url": "https://api.github.com/users/ardan-bkennedy/events{/privacy}",
"received_events_url": "https://api.github.com/users/ardan-bkennedy/received_events",
"type": "User",
"site_admin": false,
"contributions": 700
},
{
"login": "dwhitena",
"id": 4524535,
"avatar_url": "https://avatars0.githubusercontent.com/u/4524535?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/dwhitena",
"html_url": "https://github.com/dwhitena",
"followers_url": "https://api.github.com/users/dwhitena/followers",
"following_url": "https://api.github.com/users/dwhitena/following{/other_user}",
"gists_url": "https://api.github.com/users/dwhitena/gists{/gist_id}",
"starred_url": "https://api.github.com/users/dwhitena/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/dwhitena/subscriptions",
"organizations_url": "https://api.github.com/users/dwhitena/orgs",
"repos_url": "https://api.github.com/users/dwhitena/repos",
"events_url": "https://api.github.com/users/dwhitena/events{/privacy}",
"received_events_url": "https://api.github.com/users/dwhitena/received_events",
"type": "User",
"site_admin": false,
"contributions": 156
},
{
"login": "extemporalgenome",
"id": 536740,
"avatar_url": "https://avatars1.githubusercontent.com/u/536740?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/extemporalgenome",
"html_url": "https://github.com/extemporalgenome",
"followers_url": "https://api.github.com/users/extemporalgenome/followers",
"following_url": "https://api.github.com/users/extemporalgenome/following{/other_user}",
"gists_url": "https://api.github.com/users/extemporalgenome/gists{/gist_id}",
"starred_url": "https://api.github.com/users/extemporalgenome/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/extemporalgenome/subscriptions",
"organizations_url": "https://api.github.com/users/extemporalgenome/orgs",
"repos_url": "https://api.github.com/users/extemporalgenome/repos",
"events_url": "https://api.github.com/users/extemporalgenome/events{/privacy}",
"received_events_url": "https://api.github.com/users/extemporalgenome/received_events",
"type": "User",
"site_admin": false,
"contributions": 100
},
{
"login": "jcbwlkr",
"id": 2027263,
"avatar_url": "https://avatars1.githubusercontent.com/u/2027263?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/jcbwlkr",
"html_url": "https://github.com/jcbwlkr",
"followers_url": "https://api.github.com/users/jcbwlkr/followers",
"following_url": "https://api.github.com/users/jcbwlkr/following{/other_user}",
"gists_url": "https://api.github.com/users/jcbwlkr/gists{/gist_id}",
"starred_url": "https://api.github.com/users/jcbwlkr/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/jcbwlkr/subscriptions",
"organizations_url": "https://api.github.com/users/jcbwlkr/orgs",
"repos_url": "https://api.github.com/users/jcbwlkr/repos",
"events_url": "https://api.github.com/users/jcbwlkr/events{/privacy}",
"received_events_url": "https://api.github.com/users/jcbwlkr/received_events",
"type": "User",
"site_admin": false,
"contributions": 71
},
{
"login": "markbates",
"id": 3528,
"avatar_url": "https://avatars3.githubusercontent.com/u/3528?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/markbates",
"html_url": "https://github.com/markbates",
"followers_url": "https://api.github.com/users/markbates/followers",
"following_url": "https://api.github.com/users/markbates/following{/other_user}",
"gists_url": "https://api.github.com/users/markbates/gists{/gist_id}",
"starred_url": "https://api.github.com/users/markbates/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/markbates/subscriptions",
"organizations_url": "https://api.github.com/users/markbates/orgs",
"repos_url": "https://api.github.com/users/markbates/repos",
"events_url": "https://api.github.com/users/markbates/events{/privacy}",
"received_events_url": "https://api.github.com/users/markbates/received_events",
"type": "User",
"site_admin": false,
"contributions": 15
},
{
"login": "codegangsta",
"id": 178316,
"avatar_url": "https://avatars1.githubusercontent.com/u/178316?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/codegangsta",
"html_url": "https://github.com/codegangsta",
"followers_url": "https://api.github.com/users/codegangsta/followers",
"following_url": "https://api.github.com/users/codegangsta/following{/other_user}",
"gists_url": "https://api.github.com/users/codegangsta/gists{/gist_id}",
"starred_url": "https://api.github.com/users/codegangsta/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/codegangsta/subscriptions",
"organizations_url": "https://api.github.com/users/codegangsta/orgs",
"repos_url": "https://api.github.com/users/codegangsta/repos",
"events_url": "https://api.github.com/users/codegangsta/events{/privacy}",
"received_events_url": "https://api.github.com/users/codegangsta/received_events",
"type": "User",
"site_admin": false,
"contributions": 14
},
{
"login": "scottefein",
"id": 594123,
"avatar_url": "https://avatars1.githubusercontent.com/u/594123?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/scottefein",
"html_url": "https://github.com/scottefein",
"followers_url": "https://api.github.com/users/scottefein/followers",
"following_url": "https://api.github.com/users/scottefein/following{/other_user}",
"gists_url": "https://api.github.com/users/scottefein/gists{/gist_id}",
"starred_url": "https://api.github.com/users/scottefein/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/scottefein/subscriptions",
"organizations_url": "https://api.github.com/users/scottefein/orgs",
"repos_url": "https://api.github.com/users/scottefein/repos",
"events_url": "https://api.github.com/users/scottefein/events{/privacy}",
"received_events_url": "https://api.github.com/users/scottefein/received_events",
"type": "User",
"site_admin": false,
"contributions": 8
},
{
"login": "sbinet",
"id": 1070920,
"avatar_url": "https://avatars2.githubusercontent.com/u/1070920?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/sbinet",
"html_url": "https://github.com/sbinet",
"followers_url": "https://api.github.com/users/sbinet/followers",
"following_url": "https://api.github.com/users/sbinet/following{/other_user}",
"gists_url": "https://api.github.com/users/sbinet/gists{/gist_id}",
"starred_url": "https://api.github.com/users/sbinet/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/sbinet/subscriptions",
"organizations_url": "https://api.github.com/users/sbinet/orgs",
"repos_url": "https://api.github.com/users/sbinet/repos",
"events_url": "https://api.github.com/users/sbinet/events{/privacy}",
"received_events_url": "https://api.github.com/users/sbinet/received_events",
"type": "User",
"site_admin": false,
"contributions": 8
},
{
"login": "freeeve",
"id": 882560,
"avatar_url": "https://avatars0.githubusercontent.com/u/882560?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/freeeve",
"html_url": "https://github.com/freeeve",
"followers_url": "https://api.github.com/users/freeeve/followers",
"following_url": "https://api.github.com/users/freeeve/following{/other_user}",
"gists_url": "https://api.github.com/users/freeeve/gists{/gist_id}",
"starred_url": "https://api.github.com/users/freeeve/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/freeeve/subscriptions",
"organizations_url": "https://api.github.com/users/freeeve/orgs",
"repos_url": "https://api.github.com/users/freeeve/repos",
"events_url": "https://api.github.com/users/freeeve/events{/privacy}",
"received_events_url": "https://api.github.com/users/freeeve/received_events",
"type": "User",
"site_admin": false,
"contributions": 6
},
{
"login": "ryanfaerman",
"id": 218121,
"avatar_url": "https://avatars3.githubusercontent.com/u/218121?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/ryanfaerman",
"html_url": "https://github.com/ryanfaerman",
"followers_url": "https://api.github.com/users/ryanfaerman/followers",
"following_url": "https://api.github.com/users/ryanfaerman/following{/other_user}",
"gists_url": "https://api.github.com/users/ryanfaerman/gists{/gist_id}",
"starred_url": "https://api.github.com/users/ryanfaerman/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/ryanfaerman/subscriptions",
"organizations_url": "https://api.github.com/users/ryanfaerman/orgs",
"repos_url": "https://api.github.com/users/ryanfaerman/repos",
"events_url": "https://api.github.com/users/ryanfaerman/events{/privacy}",
"received_events_url": "https://api.github.com/users/ryanfaerman/received_events",
"type": "User",
"site_admin": false,
"contributions": 6
},
{
"login": "bsreera",
"id": 4218828,
"avatar_url": "https://avatars3.githubusercontent.com/u/4218828?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/bsreera",
"html_url": "https://github.com/bsreera",
"followers_url": "https://api.github.com/users/bsreera/followers",
"following_url": "https://api.github.com/users/bsreera/following{/other_user}",
"gists_url": "https://api.github.com/users/bsreera/gists{/gist_id}",
"starred_url": "https://api.github.com/users/bsreera/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/bsreera/subscriptions",
"organizations_url": "https://api.github.com/users/bsreera/orgs",
"repos_url": "https://api.github.com/users/bsreera/repos",
"events_url": "https://api.github.com/users/bsreera/events{/privacy}",
"received_events_url": "https://api.github.com/users/bsreera/received_events",
"type": "User",
"site_admin": false,
"contributions": 6
},
{
"login": "Dfenniak",
"id": 4259190,
"avatar_url": "https://avatars0.githubusercontent.com/u/4259190?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/Dfenniak",
"html_url": "https://github.com/Dfenniak",
"followers_url": "https://api.github.com/users/Dfenniak/followers",
"following_url": "https://api.github.com/users/Dfenniak/following{/other_user}",
"gists_url": "https://api.github.com/users/Dfenniak/gists{/gist_id}",
"starred_url": "https://api.github.com/users/Dfenniak/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Dfenniak/subscriptions",
"organizations_url": "https://api.github.com/users/Dfenniak/orgs",
"repos_url": "https://api.github.com/users/Dfenniak/repos",
"events_url": "https://api.github.com/users/Dfenniak/events{/privacy}",
"received_events_url": "https://api.github.com/users/Dfenniak/received_events",
"type": "User",
"site_admin": false,
"contributions": 5
},
{
"login": "egonzo",
"id": 1641719,
"avatar_url": "https://avatars0.githubusercontent.com/u/1641719?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/egonzo",
"html_url": "https://github.com/egonzo",
"followers_url": "https://api.github.com/users/egonzo/followers",
"following_url": "https://api.github.com/users/egonzo/following{/other_user}",
"gists_url": "https://api.github.com/users/egonzo/gists{/gist_id}",
"starred_url": "https://api.github.com/users/egonzo/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/egonzo/subscriptions",
"organizations_url": "https://api.github.com/users/egonzo/orgs",
"repos_url": "https://api.github.com/users/egonzo/repos",
"events_url": "https://api.github.com/users/egonzo/events{/privacy}",
"received_events_url": "https://api.github.com/users/egonzo/received_events",
"type": "User",
"site_admin": false,
"contributions": 5
},
{
"login": "corylanou",
"id": 585100,
"avatar_url": "https://avatars3.githubusercontent.com/u/585100?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/corylanou",
"html_url": "https://github.com/corylanou",
"followers_url": "https://api.github.com/users/corylanou/followers",
"following_url": "https://api.github.com/users/corylanou/following{/other_user}",
"gists_url": "https://api.github.com/users/corylanou/gists{/gist_id}",
"starred_url": "https://api.github.com/users/corylanou/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/corylanou/subscriptions",
"organizations_url": "https://api.github.com/users/corylanou/orgs",
"repos_url": "https://api.github.com/users/corylanou/repos",
"events_url": "https://api.github.com/users/corylanou/events{/privacy}",
"received_events_url": "https://api.github.com/users/corylanou/received_events",
"type": "User",
"site_admin": false,
"contributions": 4
},
{
"login": "lkysow",
"id": 1034429,
"avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/lkysow",
"html_url": "https://github.com/lkysow",
"followers_url": "https://api.github.com/users/lkysow/followers",
"following_url": "https://api.github.com/users/lkysow/following{/other_user}",
"gists_url": "https://api.github.com/users/lkysow/gists{/gist_id}",
"starred_url": "https://api.github.com/users/lkysow/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/lkysow/subscriptions",
"organizations_url": "https://api.github.com/users/lkysow/orgs",
"repos_url": "https://api.github.com/users/lkysow/repos",
"events_url": "https://api.github.com/users/lkysow/events{/privacy}",
"received_events_url": "https://api.github.com/users/lkysow/received_events",
"type": "User",
"site_admin": false,
"contributions": 3
},
{
"login": "mvanholsteijn",
"id": 576619,
"avatar_url": "https://avatars3.githubusercontent.com/u/576619?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/mvanholsteijn",
"html_url": "https://github.com/mvanholsteijn",
"followers_url": "https://api.github.com/users/mvanholsteijn/followers",
"following_url": "https://api.github.com/users/mvanholsteijn/following{/other_user}",
"gists_url": "https://api.github.com/users/mvanholsteijn/gists{/gist_id}",
"starred_url": "https://api.github.com/users/mvanholsteijn/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/mvanholsteijn/subscriptions",
"organizations_url": "https://api.github.com/users/mvanholsteijn/orgs",
"repos_url": "https://api.github.com/users/mvanholsteijn/repos",
"events_url": "https://api.github.com/users/mvanholsteijn/events{/privacy}",
"received_events_url": "https://api.github.com/users/mvanholsteijn/received_events",
"type": "User",
"site_admin": false,
"contributions": 3
},
{
"login": "dayvson",
"id": 59447,
"avatar_url": "https://avatars3.githubusercontent.com/u/59447?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/dayvson",
"html_url": "https://github.com/dayvson",
"followers_url": "https://api.github.com/users/dayvson/followers",
"following_url": "https://api.github.com/users/dayvson/following{/other_user}",
"gists_url": "https://api.github.com/users/dayvson/gists{/gist_id}",
"starred_url": "https://api.github.com/users/dayvson/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/dayvson/subscriptions",
"organizations_url": "https://api.github.com/users/dayvson/orgs",
"repos_url": "https://api.github.com/users/dayvson/repos",
"events_url": "https://api.github.com/users/dayvson/events{/privacy}",
"received_events_url": "https://api.github.com/users/dayvson/received_events",
"type": "User",
"site_admin": false,
"contributions": 3
},
{
"login": "danicat",
"id": 5531172,
"avatar_url": "https://avatars2.githubusercontent.com/u/5531172?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/danicat",
"html_url": "https://github.com/danicat",
"followers_url": "https://api.github.com/users/danicat/followers",
"following_url": "https://api.github.com/users/danicat/following{/other_user}",
"gists_url": "https://api.github.com/users/danicat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/danicat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/danicat/subscriptions",
"organizations_url": "https://api.github.com/users/danicat/orgs",
"repos_url": "https://api.github.com/users/danicat/repos",
"events_url": "https://api.github.com/users/danicat/events{/privacy}",
"received_events_url": "https://api.github.com/users/danicat/received_events",
"type": "User",
"site_admin": false,
"contributions": 3
},
{
"login": "aarongreenlee",
"id": 264508,
"avatar_url": "https://avatars0.githubusercontent.com/u/264508?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/aarongreenlee",
"html_url": "https://github.com/aarongreenlee",
"followers_url": "https://api.github.com/users/aarongreenlee/followers",
"following_url": "https://api.github.com/users/aarongreenlee/following{/other_user}",
"gists_url": "https://api.github.com/users/aarongreenlee/gists{/gist_id}",
"starred_url": "https://api.github.com/users/aarongreenlee/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/aarongreenlee/subscriptions",
"organizations_url": "https://api.github.com/users/aarongreenlee/orgs",
"repos_url": "https://api.github.com/users/aarongreenlee/repos",
"events_url": "https://api.github.com/users/aarongreenlee/events{/privacy}",
"received_events_url": "https://api.github.com/users/aarongreenlee/received_events",
"type": "User",
"site_admin": false,
"contributions": 2
},
{
"login": "AlexeyKupershtokh",
"id": 499778,
"avatar_url": "https://avatars2.githubusercontent.com/u/499778?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/AlexeyKupershtokh",
"html_url": "https://github.com/AlexeyKupershtokh",
"followers_url": "https://api.github.com/users/AlexeyKupershtokh/followers",
"following_url": "https://api.github.com/users/AlexeyKupershtokh/following{/other_user}",
"gists_url": "https://api.github.com/users/AlexeyKupershtokh/gists{/gist_id}",
"starred_url": "https://api.github.com/users/AlexeyKupershtokh/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/AlexeyKupershtokh/subscriptions",
"organizations_url": "https://api.github.com/users/AlexeyKupershtokh/orgs",
"repos_url": "https://api.github.com/users/AlexeyKupershtokh/repos",
"events_url": "https://api.github.com/users/AlexeyKupershtokh/events{/privacy}",
"received_events_url": "https://api.github.com/users/AlexeyKupershtokh/received_events",
"type": "User",
"site_admin": false,
"contributions": 2
},
{
"login": "amankapoor",
"id": 8210816,
"avatar_url": "https://avatars2.githubusercontent.com/u/8210816?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/amankapoor",
"html_url": "https://github.com/amankapoor",
"followers_url": "https://api.github.com/users/amankapoor/followers",
"following_url": "https://api.github.com/users/amankapoor/following{/other_user}",
"gists_url": "https://api.github.com/users/amankapoor/gists{/gist_id}",
"starred_url": "https://api.github.com/users/amankapoor/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/amankapoor/subscriptions",
"organizations_url": "https://api.github.com/users/amankapoor/orgs",
"repos_url": "https://api.github.com/users/amankapoor/repos",
"events_url": "https://api.github.com/users/amankapoor/events{/privacy}",
"received_events_url": "https://api.github.com/users/amankapoor/received_events",
"type": "User",
"site_admin": false,
"contributions": 2
},
{
"login": "LipGlossary",
"id": 1580086,
"avatar_url": "https://avatars2.githubusercontent.com/u/1580086?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/LipGlossary",
"html_url": "https://github.com/LipGlossary",
"followers_url": "https://api.github.com/users/LipGlossary/followers",
"following_url": "https://api.github.com/users/LipGlossary/following{/other_user}",
"gists_url": "https://api.github.com/users/LipGlossary/gists{/gist_id}",
"starred_url": "https://api.github.com/users/LipGlossary/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/LipGlossary/subscriptions",
"organizations_url": "https://api.github.com/users/LipGlossary/orgs",
"repos_url": "https://api.github.com/users/LipGlossary/repos",
"events_url": "https://api.github.com/users/LipGlossary/events{/privacy}",
"received_events_url": "https://api.github.com/users/LipGlossary/received_events",
"type": "User",
"site_admin": false,
"contributions": 2
},
{
"login": "selva-oscura",
"id": 4088970,
"avatar_url": "https://avatars3.githubusercontent.com/u/4088970?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/selva-oscura",
"html_url": "https://github.com/selva-oscura",
"followers_url": "https://api.github.com/users/selva-oscura/followers",
"following_url": "https://api.github.com/users/selva-oscura/following{/other_user}",
"gists_url": "https://api.github.com/users/selva-oscura/gists{/gist_id}",
"starred_url": "https://api.github.com/users/selva-oscura/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/selva-oscura/subscriptions",
"organizations_url": "https://api.github.com/users/selva-oscura/orgs",
"repos_url": "https://api.github.com/users/selva-oscura/repos",
"events_url": "https://api.github.com/users/selva-oscura/events{/privacy}",
"received_events_url": "https://api.github.com/users/selva-oscura/received_events",
"type": "User",
"site_admin": false,
"contributions": 2
},
{
"login": "DDRBoxman",
"id": 207897,
"avatar_url": "https://avatars0.githubusercontent.com/u/207897?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/DDRBoxman",
"html_url": "https://github.com/DDRBoxman",
"followers_url": "https://api.github.com/users/DDRBoxman/followers",
"following_url": "https://api.github.com/users/DDRBoxman/following{/other_user}",
"gists_url": "https://api.github.com/users/DDRBoxman/gists{/gist_id}",
"starred_url": "https://api.github.com/users/DDRBoxman/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/DDRBoxman/subscriptions",
"organizations_url": "https://api.github.com/users/DDRBoxman/orgs",
"repos_url": "https://api.github.com/users/DDRBoxman/repos",
"events_url": "https://api.github.com/users/DDRBoxman/events{/privacy}",
"received_events_url": "https://api.github.com/users/DDRBoxman/received_events",
"type": "User",
"site_admin": false,
"contributions": 2
},
{
"login": "diwant",
"id": 521869,
"avatar_url": "https://avatars0.githubusercontent.com/u/521869?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/diwant",
"html_url": "https://github.com/diwant",
"followers_url": "https://api.github.com/users/diwant/followers",
"following_url": "https://api.github.com/users/diwant/following{/other_user}",
"gists_url": "https://api.github.com/users/diwant/gists{/gist_id}",
"starred_url": "https://api.github.com/users/diwant/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/diwant/subscriptions",
"organizations_url": "https://api.github.com/users/diwant/orgs",
"repos_url": "https://api.github.com/users/diwant/repos",
"events_url": "https://api.github.com/users/diwant/events{/privacy}",
"received_events_url": "https://api.github.com/users/diwant/received_events",
"type": "User",
"site_admin": false,
"contributions": 2
},
{
"login": "ibjhb",
"id": 265779,
"avatar_url": "https://avatars3.githubusercontent.com/u/265779?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/ibjhb",
"html_url": "https://github.com/ibjhb",
"followers_url": "https://api.github.com/users/ibjhb/followers",
"following_url": "https://api.github.com/users/ibjhb/following{/other_user}",
"gists_url": "https://api.github.com/users/ibjhb/gists{/gist_id}",
"starred_url": "https://api.github.com/users/ibjhb/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/ibjhb/subscriptions",
"organizations_url": "https://api.github.com/users/ibjhb/orgs",
"repos_url": "https://api.github.com/users/ibjhb/repos",
"events_url": "https://api.github.com/users/ibjhb/events{/privacy}",
"received_events_url": "https://api.github.com/users/ibjhb/received_events",
"type": "User",
"site_admin": false,
"contributions": 2
},
{
"login": "nathany",
"id": 4566,
"avatar_url": "https://avatars1.githubusercontent.com/u/4566?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/nathany",
"html_url": "https://github.com/nathany",
"followers_url": "https://api.github.com/users/nathany/followers",
"following_url": "https://api.github.com/users/nathany/following{/other_user}",
"gists_url": "https://api.github.com/users/nathany/gists{/gist_id}",
"starred_url": "https://api.github.com/users/nathany/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/nathany/subscriptions",
"organizations_url": "https://api.github.com/users/nathany/orgs",
"repos_url": "https://api.github.com/users/nathany/repos",
"events_url": "https://api.github.com/users/nathany/events{/privacy}",
"received_events_url": "https://api.github.com/users/nathany/received_events",
"type": "User",
"site_admin": false,
"contributions": 2
},
{
"login": "vaskoz",
"id": 21339,
"avatar_url": "https://avatars0.githubusercontent.com/u/21339?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/vaskoz",
"html_url": "https://github.com/vaskoz",
"followers_url": "https://api.github.com/users/vaskoz/followers",
"following_url": "https://api.github.com/users/vaskoz/following{/other_user}",
"gists_url": "https://api.github.com/users/vaskoz/gists{/gist_id}",
"starred_url": "https://api.github.com/users/vaskoz/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/vaskoz/subscriptions",
"organizations_url": "https://api.github.com/users/vaskoz/orgs",
"repos_url": "https://api.github.com/users/vaskoz/repos",
"events_url": "https://api.github.com/users/vaskoz/events{/privacy}",
"received_events_url": "https://api.github.com/users/vaskoz/received_events",
"type": "User",
"site_admin": false,
"contributions": 2
},
{
"login": "williamhanisch",
"id": 13632991,
"avatar_url": "https://avatars2.githubusercontent.com/u/13632991?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/williamhanisch",
"html_url": "https://github.com/williamhanisch",
"followers_url": "https://api.github.com/users/williamhanisch/followers",
"following_url": "https://api.github.com/users/williamhanisch/following{/other_user}",
"gists_url": "https://api.github.com/users/williamhanisch/gists{/gist_id}",
"starred_url": "https://api.github.com/users/williamhanisch/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/williamhanisch/subscriptions",
"organizations_url": "https://api.github.com/users/williamhanisch/orgs",
"repos_url": "https://api.github.com/users/williamhanisch/repos",
"events_url": "https://api.github.com/users/williamhanisch/events{/privacy}",
"received_events_url": "https://api.github.com/users/williamhanisch/received_events",
"type": "User",
"site_admin": false,
"contributions": 2
},
{
"login": "forevermatt",
"id": 6233204,
"avatar_url": "https://avatars2.githubusercontent.com/u/6233204?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/forevermatt",
"html_url": "https://github.com/forevermatt",
"followers_url": "https://api.github.com/users/forevermatt/followers",
"following_url": "https://api.github.com/users/forevermatt/following{/other_user}",
"gists_url": "https://api.github.com/users/forevermatt/gists{/gist_id}",
"starred_url": "https://api.github.com/users/forevermatt/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/forevermatt/subscriptions",
"organizations_url": "https://api.github.com/users/forevermatt/orgs",
"repos_url": "https://api.github.com/users/forevermatt/repos",
"events_url": "https://api.github.com/users/forevermatt/events{/privacy}",
"received_events_url": "https://api.github.com/users/forevermatt/received_events",
"type": "User",
"site_admin": false,
"contributions": 2
}
]` | topics/go/exercises/contributors/githubmock/contributors.go | 0.508056 | 0.454412 | contributors.go | starcoder |
package httpassert
import (
"bytes"
"encoding/json"
"fmt"
"reflect"
"strconv"
"testing"
"time"
)
func renderJSONError(t *testing.T, expected interface{}, b []byte) {
t.Errorf(
"error: unexpected value.\nwant response:\n%s\ngot json:\n%s\n",
renderInterface(expected, 0),
renderJSON(b),
)
}
func renderInterface(i interface{}, n int) string {
v := reflect.ValueOf(i)
return fmt.Sprintf(
"(%s) %s",
renderType(reflect.TypeOf(i)),
renderValue(v, n),
)
}
func renderType(typ reflect.Type) string {
if typ == nil {
return "interface{}"
}
var (
name string
kid = typ.Kind()
)
switch kid {
case reflect.Ptr:
return fmt.Sprintf("*%s", renderType(typ.Elem()))
case reflect.Struct:
b := new(bytes.Buffer)
if len(typ.PkgPath()) > 0 {
b.WriteString(typ.PkgPath())
b.WriteByte('.')
}
name := typ.Name()
if len(name) == 0 {
name = "anonymous"
}
b.WriteString(name)
return b.String()
case reflect.Array, reflect.Slice:
return fmt.Sprintf("[]%s", renderType(typ.Elem()))
case reflect.Map:
return fmt.Sprintf("map[%s]%s", renderType(typ.Key()), renderType(typ.Elem()))
case reflect.Chan:
return fmt.Sprintf("chan %s", renderType(typ.Elem()))
default:
name = kid.String()
}
return name
}
func renderValue(v reflect.Value, n int) string {
switch v.Kind() {
case reflect.Invalid:
return "nil"
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return strconv.FormatInt(v.Int(), 10)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return strconv.FormatUint(v.Uint(), 10)
case reflect.Bool:
return strconv.FormatBool(v.Bool())
case reflect.Float32:
return strconv.FormatFloat(v.Float(), 'f', -1, 32)
case reflect.Float64:
return strconv.FormatFloat(v.Float(), 'f', -1, 64)
case reflect.Complex64, reflect.Complex128:
return renderComplex(v)
case reflect.String:
return strconv.Quote(v.String())
case reflect.Ptr, reflect.Interface:
return renderValue(v.Elem(), n)
case reflect.Array, reflect.Slice:
return renderArrayLikeValue(v, n)
case reflect.Struct:
return renderStruct(v, n)
case reflect.Map:
return renderMap(v, n)
}
return ""
}
func renderComplex(v reflect.Value) string {
k := v.Kind()
c := v.Complex()
b := 64
if k == reflect.Complex64 {
b = 32
}
r := strconv.FormatFloat(real(c), 'f', -1, b)
i := strconv.FormatFloat(imag(c), 'f', -1, b)
if i[0] != '+' && i[0] != '-' {
i = "+" + i
}
return fmt.Sprintf("(%s%si)", r, i)
}
func renderMap(v reflect.Value, n int) string {
buf := new(bytes.Buffer)
buf.WriteByte('{')
keyIter := v.MapRange()
for keyIter.Next() {
key := keyIter.Key()
val := keyIter.Value()
buf.WriteByte('\n')
buf.WriteString(nindent(renderValue(key, n), indentStr, n+1))
buf.WriteByte(':')
buf.WriteByte(' ')
buf.WriteString(renderInterface(val.Interface(), n+1))
buf.WriteByte(',')
}
buf.WriteByte('\n')
buf.WriteString(nindent("}", indentStr, n))
return buf.String()
}
func renderStruct(v reflect.Value, n int) string {
buf := new(bytes.Buffer)
typ := v.Type()
if typ == timeType {
buf.WriteString(v.Interface().(time.Time).String())
return buf.String()
}
buf.WriteByte('{')
for i := 0; i < typ.NumField(); i++ {
f := typ.Field(i)
if len(f.PkgPath) > 0 {
continue
}
buf.WriteByte('\n')
buf.WriteString(nindent(f.Name, indentStr, n+1))
buf.WriteByte(':')
buf.WriteByte(' ')
buf.WriteString(renderInterface(v.Field(i).Interface(), n+1))
buf.WriteByte(',')
}
buf.WriteByte('\n')
buf.WriteString(nindent("}", indentStr, n))
return buf.String()
}
func renderArrayLikeValue(v reflect.Value, n int) string {
buf := new(bytes.Buffer)
truncateN := 0
buf.WriteByte('[')
for i := 0; i < v.Len(); i++ {
item := v.Index(i)
for item.Kind() == reflect.Ptr {
item = item.Elem()
}
if isCompositeStructKind(item.Kind()) {
buf.WriteByte('\n')
buf.WriteString(nindent(renderValue(item, 0), indentStr, n+1))
buf.WriteByte(',')
truncateN = 0
// The last item
if i == v.Len()-1 {
buf.WriteByte('\n')
}
} else {
buf.WriteString(renderValue(item, 0))
buf.WriteString(", ")
// Remove last ",\n" string
truncateN = 2
}
}
buf.Truncate(buf.Len() - truncateN)
buf.WriteString(nindent("]", indentStr, n))
return buf.String()
}
func renderJSON(b []byte) string {
buf := new(bytes.Buffer)
if err := json.Indent(buf, b, "", indentStr); err != nil {
panic(err)
}
return buf.String()
}
func isCompositeStructKind(k reflect.Kind) bool {
return k == reflect.Map ||
k == reflect.Struct ||
k == reflect.Array ||
k == reflect.Slice
} | render_json_error.go | 0.587115 | 0.422683 | render_json_error.go | starcoder |
package color
// XTerm256 is a 256 color palette.
func XTerm256() *IndexedPalette {
p := NewIndexedPalette()
p.add(
index{rgb: MakeRGB(0, 0, 0), labels: []string{"Black"}},
index{rgb: MakeRGB(128, 0, 0), labels: []string{"Maroon"}},
index{rgb: MakeRGB(0, 128, 0), labels: []string{"Green"}},
index{rgb: MakeRGB(128, 128, 0), labels: []string{"Olive"}},
index{rgb: MakeRGB(0, 0, 128), labels: []string{"Navy"}},
index{rgb: MakeRGB(128, 0, 128), labels: []string{"Purple"}},
index{rgb: MakeRGB(0, 128, 128), labels: []string{"Teal"}},
index{rgb: MakeRGB(192, 192, 192), labels: []string{"Silver"}},
index{rgb: MakeRGB(128, 128, 128), labels: []string{"Grey"}},
index{rgb: MakeRGB(255, 0, 0), labels: []string{"Red"}},
index{rgb: MakeRGB(0, 255, 0), labels: []string{"Lime"}},
index{rgb: MakeRGB(255, 255, 0), labels: []string{"Yellow"}},
index{rgb: MakeRGB(0, 0, 255), labels: []string{"Blue"}},
index{rgb: MakeRGB(255, 0, 255), labels: []string{"Fuchsia"}},
index{rgb: MakeRGB(0, 255, 255), labels: []string{"Aqua"}},
index{rgb: MakeRGB(255, 255, 255), labels: []string{"White"}},
index{rgb: MakeRGB(0, 0, 0), labels: []string{"Grey0"}},
index{rgb: MakeRGB(0, 0, 95), labels: []string{"NavyBlue"}},
index{rgb: MakeRGB(0, 0, 135), labels: []string{"DarkBlue"}},
index{rgb: MakeRGB(0, 0, 175), labels: []string{"Blue3"}},
index{rgb: MakeRGB(0, 0, 215), labels: []string{"Blue3"}},
index{rgb: MakeRGB(0, 0, 255), labels: []string{"Blue1"}},
index{rgb: MakeRGB(0, 95, 0), labels: []string{"DarkGreen"}},
index{rgb: MakeRGB(0, 95, 95), labels: []string{"DeepSkyBlue4"}},
index{rgb: MakeRGB(0, 95, 135), labels: []string{"DeepSkyBlue4"}},
index{rgb: MakeRGB(0, 95, 175), labels: []string{"DeepSkyBlue4"}},
index{rgb: MakeRGB(0, 95, 215), labels: []string{"DodgerBlue3"}},
index{rgb: MakeRGB(0, 95, 255), labels: []string{"DodgerBlue2"}},
index{rgb: MakeRGB(0, 135, 0), labels: []string{"Green4"}},
index{rgb: MakeRGB(0, 135, 95), labels: []string{"SpringGreen4"}},
index{rgb: MakeRGB(0, 135, 135), labels: []string{"Turquoise4"}},
index{rgb: MakeRGB(0, 135, 175), labels: []string{"DeepSkyBlue3"}},
index{rgb: MakeRGB(0, 135, 215), labels: []string{"DeepSkyBlue3"}},
index{rgb: MakeRGB(0, 135, 255), labels: []string{"DodgerBlue1"}},
index{rgb: MakeRGB(0, 175, 0), labels: []string{"Green3"}},
index{rgb: MakeRGB(0, 175, 95), labels: []string{"SpringGreen3"}},
index{rgb: MakeRGB(0, 175, 135), labels: []string{"DarkCyan"}},
index{rgb: MakeRGB(0, 175, 175), labels: []string{"LightSeaGreen"}},
index{rgb: MakeRGB(0, 175, 215), labels: []string{"DeepSkyBlue2"}},
index{rgb: MakeRGB(0, 175, 255), labels: []string{"DeepSkyBlue1"}},
index{rgb: MakeRGB(0, 215, 0), labels: []string{"Green3"}},
index{rgb: MakeRGB(0, 215, 95), labels: []string{"SpringGreen3"}},
index{rgb: MakeRGB(0, 215, 135), labels: []string{"SpringGreen2"}},
index{rgb: MakeRGB(0, 215, 175), labels: []string{"Cyan3"}},
index{rgb: MakeRGB(0, 215, 215), labels: []string{"DarkTurquoise"}},
index{rgb: MakeRGB(0, 215, 255), labels: []string{"Turquoise2"}},
index{rgb: MakeRGB(0, 255, 0), labels: []string{"Green1"}},
index{rgb: MakeRGB(0, 255, 95), labels: []string{"SpringGreen2"}},
index{rgb: MakeRGB(0, 255, 135), labels: []string{"SpringGreen1"}},
index{rgb: MakeRGB(0, 255, 175), labels: []string{"MediumSpringGreen"}},
index{rgb: MakeRGB(0, 255, 215), labels: []string{"Cyan2"}},
index{rgb: MakeRGB(0, 255, 255), labels: []string{"Cyan1"}},
index{rgb: MakeRGB(95, 0, 0), labels: []string{"DarkRed"}},
index{rgb: MakeRGB(95, 0, 95), labels: []string{"DeepPink4"}},
index{rgb: MakeRGB(95, 0, 135), labels: []string{"Purple4"}},
index{rgb: MakeRGB(95, 0, 175), labels: []string{"Purple4"}},
index{rgb: MakeRGB(95, 0, 215), labels: []string{"Purple3"}},
index{rgb: MakeRGB(95, 0, 255), labels: []string{"BlueViolet"}},
index{rgb: MakeRGB(95, 95, 0), labels: []string{"Orange4"}},
index{rgb: MakeRGB(95, 95, 95), labels: []string{"Grey37"}},
index{rgb: MakeRGB(95, 95, 135), labels: []string{"MediumPurple4"}},
index{rgb: MakeRGB(95, 95, 175), labels: []string{"SlateBlue3"}},
index{rgb: MakeRGB(95, 95, 215), labels: []string{"SlateBlue3"}},
index{rgb: MakeRGB(95, 95, 255), labels: []string{"RoyalBlue1"}},
index{rgb: MakeRGB(95, 135, 0), labels: []string{"Chartreuse4"}},
index{rgb: MakeRGB(95, 135, 95), labels: []string{"DarkSeaGreen4"}},
index{rgb: MakeRGB(95, 135, 135), labels: []string{"PaleTurquoise4"}},
index{rgb: MakeRGB(95, 135, 175), labels: []string{"SteelBlue"}},
index{rgb: MakeRGB(95, 135, 215), labels: []string{"SteelBlue3"}},
index{rgb: MakeRGB(95, 135, 255), labels: []string{"CornflowerBlue"}},
index{rgb: MakeRGB(95, 175, 0), labels: []string{"Chartreuse3"}},
index{rgb: MakeRGB(95, 175, 95), labels: []string{"DarkSeaGreen4"}},
index{rgb: MakeRGB(95, 175, 135), labels: []string{"CadetBlue"}},
index{rgb: MakeRGB(95, 175, 175), labels: []string{"CadetBlue"}},
index{rgb: MakeRGB(95, 175, 215), labels: []string{"SkyBlue3"}},
index{rgb: MakeRGB(95, 175, 255), labels: []string{"SteelBlue1"}},
index{rgb: MakeRGB(95, 215, 0), labels: []string{"Chartreuse3"}},
index{rgb: MakeRGB(95, 215, 95), labels: []string{"PaleGreen3"}},
index{rgb: MakeRGB(95, 215, 135), labels: []string{"SeaGreen3"}},
index{rgb: MakeRGB(95, 215, 175), labels: []string{"Aquamarine3"}},
index{rgb: MakeRGB(95, 215, 215), labels: []string{"MediumTurquoise"}},
index{rgb: MakeRGB(95, 215, 255), labels: []string{"SteelBlue1"}},
index{rgb: MakeRGB(95, 255, 0), labels: []string{"Chartreuse2"}},
index{rgb: MakeRGB(95, 255, 95), labels: []string{"SeaGreen2"}},
index{rgb: MakeRGB(95, 255, 135), labels: []string{"SeaGreen1"}},
index{rgb: MakeRGB(95, 255, 175), labels: []string{"SeaGreen1"}},
index{rgb: MakeRGB(95, 255, 215), labels: []string{"Aquamarine1"}},
index{rgb: MakeRGB(95, 255, 255), labels: []string{"DarkSlateGray2"}},
index{rgb: MakeRGB(135, 0, 0), labels: []string{"DarkRed"}},
index{rgb: MakeRGB(135, 0, 95), labels: []string{"DeepPink4"}},
index{rgb: MakeRGB(135, 0, 135), labels: []string{"DarkMagenta"}},
index{rgb: MakeRGB(135, 0, 175), labels: []string{"DarkMagenta"}},
index{rgb: MakeRGB(135, 0, 215), labels: []string{"DarkViolet"}},
index{rgb: MakeRGB(135, 0, 255), labels: []string{"Purple"}},
index{rgb: MakeRGB(135, 95, 0), labels: []string{"Orange4"}},
index{rgb: MakeRGB(135, 95, 95), labels: []string{"LightPink4"}},
index{rgb: MakeRGB(135, 95, 135), labels: []string{"Plum4"}},
index{rgb: MakeRGB(135, 95, 175), labels: []string{"MediumPurple3"}},
index{rgb: MakeRGB(135, 95, 215), labels: []string{"MediumPurple3"}},
index{rgb: MakeRGB(135, 95, 255), labels: []string{"SlateBlue1"}},
index{rgb: MakeRGB(135, 135, 0), labels: []string{"Yellow4"}},
index{rgb: MakeRGB(135, 135, 95), labels: []string{"Wheat4"}},
index{rgb: MakeRGB(135, 135, 135), labels: []string{"Grey53"}},
index{rgb: MakeRGB(135, 135, 175), labels: []string{"LightSlateGrey"}},
index{rgb: MakeRGB(135, 135, 215), labels: []string{"MediumPurple"}},
index{rgb: MakeRGB(135, 135, 255), labels: []string{"LightSlateBlue"}},
index{rgb: MakeRGB(135, 175, 0), labels: []string{"Yellow4"}},
index{rgb: MakeRGB(135, 175, 95), labels: []string{"DarkOliveGreen3"}},
index{rgb: MakeRGB(135, 175, 135), labels: []string{"DarkSeaGreen"}},
index{rgb: MakeRGB(135, 175, 175), labels: []string{"LightSkyBlue3"}},
index{rgb: MakeRGB(135, 175, 215), labels: []string{"LightSkyBlue3"}},
index{rgb: MakeRGB(135, 175, 255), labels: []string{"SkyBlue2"}},
index{rgb: MakeRGB(135, 215, 0), labels: []string{"Chartreuse2"}},
index{rgb: MakeRGB(135, 215, 95), labels: []string{"DarkOliveGreen3"}},
index{rgb: MakeRGB(135, 215, 135), labels: []string{"PaleGreen3"}},
index{rgb: MakeRGB(135, 215, 175), labels: []string{"DarkSeaGreen3"}},
index{rgb: MakeRGB(135, 215, 215), labels: []string{"DarkSlateGray3"}},
index{rgb: MakeRGB(135, 215, 255), labels: []string{"SkyBlue1"}},
index{rgb: MakeRGB(135, 255, 0), labels: []string{"Chartreuse1"}},
index{rgb: MakeRGB(135, 255, 95), labels: []string{"LightGreen"}},
index{rgb: MakeRGB(135, 255, 135), labels: []string{"LightGreen"}},
index{rgb: MakeRGB(135, 255, 175), labels: []string{"PaleGreen1"}},
index{rgb: MakeRGB(135, 255, 215), labels: []string{"Aquamarine1"}},
index{rgb: MakeRGB(135, 255, 255), labels: []string{"DarkSlateGray1"}},
index{rgb: MakeRGB(175, 0, 0), labels: []string{"Red3"}},
index{rgb: MakeRGB(175, 0, 95), labels: []string{"DeepPink4"}},
index{rgb: MakeRGB(175, 0, 135), labels: []string{"MediumVioletRed"}},
index{rgb: MakeRGB(175, 0, 175), labels: []string{"Magenta3"}},
index{rgb: MakeRGB(175, 0, 215), labels: []string{"DarkViolet"}},
index{rgb: MakeRGB(175, 0, 255), labels: []string{"Purple"}},
index{rgb: MakeRGB(175, 95, 0), labels: []string{"DarkOrange3"}},
index{rgb: MakeRGB(175, 95, 95), labels: []string{"IndianRed"}},
index{rgb: MakeRGB(175, 95, 135), labels: []string{"HotPink3"}},
index{rgb: MakeRGB(175, 95, 175), labels: []string{"MediumOrchid3"}},
index{rgb: MakeRGB(175, 95, 215), labels: []string{"MediumOrchid"}},
index{rgb: MakeRGB(175, 95, 255), labels: []string{"MediumPurple2"}},
index{rgb: MakeRGB(175, 135, 0), labels: []string{"DarkGoldenrod"}},
index{rgb: MakeRGB(175, 135, 95), labels: []string{"LightSalmon3"}},
index{rgb: MakeRGB(175, 135, 135), labels: []string{"RosyBrown"}},
index{rgb: MakeRGB(175, 135, 175), labels: []string{"Grey63"}},
index{rgb: MakeRGB(175, 135, 215), labels: []string{"MediumPurple2"}},
index{rgb: MakeRGB(175, 135, 255), labels: []string{"MediumPurple1"}},
index{rgb: MakeRGB(175, 175, 0), labels: []string{"Gold3"}},
index{rgb: MakeRGB(175, 175, 95), labels: []string{"DarkKhaki"}},
index{rgb: MakeRGB(175, 175, 135), labels: []string{"NavajoWhite3"}},
index{rgb: MakeRGB(175, 175, 175), labels: []string{"Grey69"}},
index{rgb: MakeRGB(175, 175, 215), labels: []string{"LightSteelBlue3"}},
index{rgb: MakeRGB(175, 175, 255), labels: []string{"LightSteelBlue"}},
index{rgb: MakeRGB(175, 215, 0), labels: []string{"Yellow3"}},
index{rgb: MakeRGB(175, 215, 95), labels: []string{"DarkOliveGreen3"}},
index{rgb: MakeRGB(175, 215, 135), labels: []string{"DarkSeaGreen3"}},
index{rgb: MakeRGB(175, 215, 175), labels: []string{"DarkSeaGreen2"}},
index{rgb: MakeRGB(175, 215, 215), labels: []string{"LightCyan3"}},
index{rgb: MakeRGB(175, 215, 255), labels: []string{"LightSkyBlue1"}},
index{rgb: MakeRGB(175, 255, 0), labels: []string{"GreenYellow"}},
index{rgb: MakeRGB(175, 255, 95), labels: []string{"DarkOliveGreen2"}},
index{rgb: MakeRGB(175, 255, 135), labels: []string{"PaleGreen1"}},
index{rgb: MakeRGB(175, 255, 175), labels: []string{"DarkSeaGreen2"}},
index{rgb: MakeRGB(175, 255, 215), labels: []string{"DarkSeaGreen1"}},
index{rgb: MakeRGB(175, 255, 255), labels: []string{"PaleTurquoise1"}},
index{rgb: MakeRGB(215, 0, 0), labels: []string{"Red3"}},
index{rgb: MakeRGB(215, 0, 95), labels: []string{"DeepPink3"}},
index{rgb: MakeRGB(215, 0, 135), labels: []string{"DeepPink3"}},
index{rgb: MakeRGB(215, 0, 175), labels: []string{"Magenta3"}},
index{rgb: MakeRGB(215, 0, 215), labels: []string{"Magenta3"}},
index{rgb: MakeRGB(215, 0, 255), labels: []string{"Magenta2"}},
index{rgb: MakeRGB(215, 95, 0), labels: []string{"DarkOrange3"}},
index{rgb: MakeRGB(215, 95, 95), labels: []string{"IndianRed"}},
index{rgb: MakeRGB(215, 95, 135), labels: []string{"HotPink3"}},
index{rgb: MakeRGB(215, 95, 175), labels: []string{"HotPink2"}},
index{rgb: MakeRGB(215, 95, 215), labels: []string{"Orchid"}},
index{rgb: MakeRGB(215, 95, 255), labels: []string{"MediumOrchid1"}},
index{rgb: MakeRGB(215, 135, 0), labels: []string{"Orange3"}},
index{rgb: MakeRGB(215, 135, 95), labels: []string{"LightSalmon3"}},
index{rgb: MakeRGB(215, 135, 135), labels: []string{"LightPink3"}},
index{rgb: MakeRGB(215, 135, 175), labels: []string{"Pink3"}},
index{rgb: MakeRGB(215, 135, 215), labels: []string{"Plum3"}},
index{rgb: MakeRGB(215, 135, 255), labels: []string{"Violet"}},
index{rgb: MakeRGB(215, 175, 0), labels: []string{"Gold3"}},
index{rgb: MakeRGB(215, 175, 95), labels: []string{"LightGoldenrod3"}},
index{rgb: MakeRGB(215, 175, 135), labels: []string{"Tan"}},
index{rgb: MakeRGB(215, 175, 175), labels: []string{"MistyRose3"}},
index{rgb: MakeRGB(215, 175, 215), labels: []string{"Thistle3"}},
index{rgb: MakeRGB(215, 175, 255), labels: []string{"Plum2"}},
index{rgb: MakeRGB(215, 215, 0), labels: []string{"Yellow3"}},
index{rgb: MakeRGB(215, 215, 95), labels: []string{"Khaki3"}},
index{rgb: MakeRGB(215, 215, 135), labels: []string{"LightGoldenrod2"}},
index{rgb: MakeRGB(215, 215, 175), labels: []string{"LightYellow3"}},
index{rgb: MakeRGB(215, 215, 215), labels: []string{"Grey84"}},
index{rgb: MakeRGB(215, 215, 255), labels: []string{"LightSteelBlue1"}},
index{rgb: MakeRGB(215, 255, 0), labels: []string{"Yellow2"}},
index{rgb: MakeRGB(215, 255, 95), labels: []string{"DarkOliveGreen1"}},
index{rgb: MakeRGB(215, 255, 135), labels: []string{"DarkOliveGreen1"}},
index{rgb: MakeRGB(215, 255, 175), labels: []string{"DarkSeaGreen1"}},
index{rgb: MakeRGB(215, 255, 215), labels: []string{"Honeydew2"}},
index{rgb: MakeRGB(215, 255, 255), labels: []string{"LightCyan1"}},
index{rgb: MakeRGB(255, 0, 0), labels: []string{"Red1"}},
index{rgb: MakeRGB(255, 0, 95), labels: []string{"DeepPink2"}},
index{rgb: MakeRGB(255, 0, 135), labels: []string{"DeepPink1"}},
index{rgb: MakeRGB(255, 0, 175), labels: []string{"DeepPink1"}},
index{rgb: MakeRGB(255, 0, 215), labels: []string{"Magenta2"}},
index{rgb: MakeRGB(255, 0, 255), labels: []string{"Magenta1"}},
index{rgb: MakeRGB(255, 95, 0), labels: []string{"OrangeRed1"}},
index{rgb: MakeRGB(255, 95, 95), labels: []string{"IndianRed1"}},
index{rgb: MakeRGB(255, 95, 135), labels: []string{"IndianRed1"}},
index{rgb: MakeRGB(255, 95, 175), labels: []string{"HotPink"}},
index{rgb: MakeRGB(255, 95, 215), labels: []string{"HotPink"}},
index{rgb: MakeRGB(255, 95, 255), labels: []string{"MediumOrchid1"}},
index{rgb: MakeRGB(255, 135, 0), labels: []string{"DarkOrange"}},
index{rgb: MakeRGB(255, 135, 95), labels: []string{"Salmon1"}},
index{rgb: MakeRGB(255, 135, 135), labels: []string{"LightCoral"}},
index{rgb: MakeRGB(255, 135, 175), labels: []string{"PaleVioletRed1"}},
index{rgb: MakeRGB(255, 135, 215), labels: []string{"Orchid2"}},
index{rgb: MakeRGB(255, 135, 255), labels: []string{"Orchid1"}},
index{rgb: MakeRGB(255, 175, 0), labels: []string{"Orange1"}},
index{rgb: MakeRGB(255, 175, 95), labels: []string{"SandyBrown"}},
index{rgb: MakeRGB(255, 175, 135), labels: []string{"LightSalmon1"}},
index{rgb: MakeRGB(255, 175, 175), labels: []string{"LightPink1"}},
index{rgb: MakeRGB(255, 175, 215), labels: []string{"Pink1"}},
index{rgb: MakeRGB(255, 175, 255), labels: []string{"Plum1"}},
index{rgb: MakeRGB(255, 215, 0), labels: []string{"Gold1"}},
index{rgb: MakeRGB(255, 215, 95), labels: []string{"LightGoldenrod2"}},
index{rgb: MakeRGB(255, 215, 135), labels: []string{"LightGoldenrod2"}},
index{rgb: MakeRGB(255, 215, 175), labels: []string{"NavajoWhite1"}},
index{rgb: MakeRGB(255, 215, 215), labels: []string{"MistyRose1"}},
index{rgb: MakeRGB(255, 215, 255), labels: []string{"Thistle1"}},
index{rgb: MakeRGB(255, 255, 0), labels: []string{"Yellow1"}},
index{rgb: MakeRGB(255, 255, 95), labels: []string{"LightGoldenrod1"}},
index{rgb: MakeRGB(255, 255, 135), labels: []string{"Khaki1"}},
index{rgb: MakeRGB(255, 255, 175), labels: []string{"Wheat1"}},
index{rgb: MakeRGB(255, 255, 215), labels: []string{"Cornsilk1"}},
index{rgb: MakeRGB(255, 255, 255), labels: []string{"Grey100"}},
index{rgb: MakeRGB(8, 8, 8), labels: []string{"Grey3"}},
index{rgb: MakeRGB(18, 18, 18), labels: []string{"Grey7"}},
index{rgb: MakeRGB(28, 28, 28), labels: []string{"Grey11"}},
index{rgb: MakeRGB(38, 38, 38), labels: []string{"Grey15"}},
index{rgb: MakeRGB(48, 48, 48), labels: []string{"Grey19"}},
index{rgb: MakeRGB(58, 58, 58), labels: []string{"Grey23"}},
index{rgb: MakeRGB(68, 68, 68), labels: []string{"Grey27"}},
index{rgb: MakeRGB(78, 78, 78), labels: []string{"Grey30"}},
index{rgb: MakeRGB(88, 88, 88), labels: []string{"Grey35"}},
index{rgb: MakeRGB(98, 98, 98), labels: []string{"Grey39"}},
index{rgb: MakeRGB(108, 108, 108), labels: []string{"Grey42"}},
index{rgb: MakeRGB(118, 118, 118), labels: []string{"Grey46"}},
index{rgb: MakeRGB(128, 128, 128), labels: []string{"Grey50"}},
index{rgb: MakeRGB(138, 138, 138), labels: []string{"Grey54"}},
index{rgb: MakeRGB(148, 148, 148), labels: []string{"Grey58"}},
index{rgb: MakeRGB(158, 158, 158), labels: []string{"Grey62"}},
index{rgb: MakeRGB(168, 168, 168), labels: []string{"Grey66"}},
index{rgb: MakeRGB(178, 178, 178), labels: []string{"Grey70"}},
index{rgb: MakeRGB(188, 188, 188), labels: []string{"Grey74"}},
index{rgb: MakeRGB(198, 198, 198), labels: []string{"Grey78"}},
index{rgb: MakeRGB(208, 208, 208), labels: []string{"Grey82"}},
index{rgb: MakeRGB(218, 218, 218), labels: []string{"Grey85"}},
index{rgb: MakeRGB(228, 228, 228), labels: []string{"Grey89"}},
index{rgb: MakeRGB(238, 238, 238), labels: []string{"Grey93"}},
)
return p
} | ui/color/xterm.go | 0.662469 | 0.459561 | xterm.go | starcoder |
package substring
import (
"bytes"
"regexp"
"github.com/toqueteos/trie"
)
type BytesMatcher interface {
Match(b []byte) bool
MatchIndex(b []byte) int
}
// regexp
type regexpBytes struct{ re *regexp.Regexp }
func BytesRegexp(pat string) *regexpBytes { return ®expBytes{regexp.MustCompile(pat)} }
func (m *regexpBytes) Match(b []byte) bool { return m.re.Match(b) }
func (m *regexpBytes) MatchIndex(b []byte) int {
found := m.re.FindIndex(b)
if found != nil {
return found[1]
}
return -1
}
// exact
type exactBytes struct{ pat []byte }
func BytesExact(pat string) *exactBytes { return &exactBytes{[]byte(pat)} }
func (m *exactBytes) Match(b []byte) bool {
l, r := len(m.pat), len(b)
if l != r {
return false
}
for i := 0; i < l; i++ {
if b[i] != m.pat[i] {
return false
}
}
return true
}
func (m *exactBytes) MatchIndex(b []byte) int {
if m.Match(b) {
return len(b)
}
return -1
}
// any, search `s` in `.Match(pat)`
type anyBytes struct {
pat []byte
}
func BytesAny(pat string) *anyBytes { return &anyBytes{[]byte(pat)} }
func (m *anyBytes) Match(b []byte) bool { return bytes.Index(m.pat, b) >= 0 }
func (m *anyBytes) MatchIndex(b []byte) int {
if idx := bytes.Index(m.pat, b); idx >= 0 {
return idx + len(b)
}
return -1
}
// has, search `pat` in `.Match(s)`
type hasBytes struct {
pat []byte
}
func BytesHas(pat string) *hasBytes { return &hasBytes{[]byte(pat)} }
func (m *hasBytes) Match(b []byte) bool { return bytes.Index(b, m.pat) >= 0 }
func (m *hasBytes) MatchIndex(b []byte) int {
if idx := bytes.Index(b, m.pat); idx >= 0 {
return idx + len(m.pat)
}
return -1
}
// prefix
type prefixBytes struct{ pat []byte }
func BytesPrefix(pat string) *prefixBytes { return &prefixBytes{[]byte(pat)} }
func (m *prefixBytes) Match(b []byte) bool { return bytes.HasPrefix(b, m.pat) }
func (m *prefixBytes) MatchIndex(b []byte) int {
if bytes.HasPrefix(b, m.pat) {
return len(m.pat)
}
return -1
}
// prefixes
type prefixesBytes struct {
t *trie.Trie
}
func BytesPrefixes(pats ...string) *prefixesBytes {
t := trie.New()
for _, pat := range pats {
t.Insert([]byte(pat))
}
return &prefixesBytes{t}
}
func (m *prefixesBytes) Match(b []byte) bool { return m.t.PrefixIndex(b) >= 0 }
func (m *prefixesBytes) MatchIndex(b []byte) int {
if idx := m.t.PrefixIndex(b); idx >= 0 {
return idx
}
return -1
}
// suffix
type suffixBytes struct{ pat []byte }
func BytesSuffix(pat string) *suffixBytes { return &suffixBytes{[]byte(pat)} }
func (m *suffixBytes) Match(b []byte) bool { return bytes.HasSuffix(b, m.pat) }
func (m *suffixBytes) MatchIndex(b []byte) int {
if bytes.HasSuffix(b, m.pat) {
return len(m.pat)
}
return -1
}
// suffixes
type suffixesBytes struct {
t *trie.Trie
}
func BytesSuffixes(pats ...string) *suffixesBytes {
t := trie.New()
for _, pat := range pats {
t.Insert(reverse([]byte(pat)))
}
return &suffixesBytes{t}
}
func (m *suffixesBytes) Match(b []byte) bool {
return m.t.PrefixIndex(reverse(b)) >= 0
}
func (m *suffixesBytes) MatchIndex(b []byte) int {
if idx := m.t.PrefixIndex(reverse(b)); idx >= 0 {
return idx
}
return -1
}
// after
type afterBytes struct {
first []byte
matcher BytesMatcher
}
func BytesAfter(first string, m BytesMatcher) *afterBytes { return &afterBytes{[]byte(first), m} }
func (a *afterBytes) Match(b []byte) bool {
if idx := bytes.Index(b, a.first); idx >= 0 {
return a.matcher.Match(b[idx+len(a.first):])
}
return false
}
func (a *afterBytes) MatchIndex(b []byte) int {
if idx := bytes.Index(b, a.first); idx >= 0 {
return idx + a.matcher.MatchIndex(b[idx:])
}
return -1
}
// and, returns true iff all matchers return true
type andBytes struct{ matchers []BytesMatcher }
func BytesAnd(m ...BytesMatcher) *andBytes { return &andBytes{m} }
func (a *andBytes) Match(b []byte) bool {
for _, m := range a.matchers {
if !m.Match(b) {
return false
}
}
return true
}
func (a *andBytes) MatchIndex(b []byte) int {
longest := 0
for _, m := range a.matchers {
if idx := m.MatchIndex(b); idx < 0 {
return -1
} else if idx > longest {
longest = idx
}
}
return longest
}
// or, returns true iff any matcher returns true
type orBytes struct{ matchers []BytesMatcher }
func BytesOr(m ...BytesMatcher) *orBytes { return &orBytes{m} }
func (o *orBytes) Match(b []byte) bool {
for _, m := range o.matchers {
if m.Match(b) {
return true
}
}
return false
}
func (o *orBytes) MatchIndex(b []byte) int {
for _, m := range o.matchers {
if idx := m.MatchIndex(b); idx >= 0 {
return idx
}
}
return -1
}
type suffixGroupBytes struct {
suffix BytesMatcher
matchers []BytesMatcher
}
func BytesSuffixGroup(s string, m ...BytesMatcher) *suffixGroupBytes {
return &suffixGroupBytes{BytesSuffix(s), m}
}
func (sg *suffixGroupBytes) Match(b []byte) bool {
if sg.suffix.Match(b) {
return BytesOr(sg.matchers...).Match(b)
}
return false
}
func (sg *suffixGroupBytes) MatchIndex(b []byte) int {
if sg.suffix.MatchIndex(b) >= 0 {
return BytesOr(sg.matchers...).MatchIndex(b)
}
return -1
} | vendor/gopkg.in/toqueteos/substring.v1/bytes.go | 0.68056 | 0.403449 | bytes.go | starcoder |
package template
import (
"bytes"
"io"
"strings"
"text/template"
"github.com/golang/gddo/doc"
"github.com/posener/goreadme/internal/markdown"
)
// Execute is used to execute the README.md template
func Execute(w io.Writer, data interface{}) error {
return main.Execute(&multiNewLineEliminator{w: w}, data)
}
var base = template.New("base").Funcs(
template.FuncMap{
"code": func(s string) string {
return "```golang\n" + s + "\n```\n"
},
"inlineCode": func(s string) string {
return "`" + s + "`"
},
"fullName": func(p *doc.Package) string {
return strings.TrimPrefix(p.ImportPath, "github.com/")
},
"urlOrName": func(f *doc.File) string {
if f.URL != "" {
return f.URL
}
return "/" + f.Name
},
"doc": func(s string) string {
b := bytes.NewBuffer(nil)
markdown.ToMarkdown(b, s, nil)
return b.String()
},
},
)
var main = template.Must(base.Parse(`# {{.Package.Name}}
{{if .Config.Badges.TravicCI -}}
[](https://travis-ci.org/{{fullName .Package}})
{{end -}}
{{if .Config.Badges.CodeCov -}}
[](https://codecov.io/gh/{{fullName .Package}})
{{end -}}
{{if .Config.Badges.GolangCI -}}
[](https://golangci.com/r/{{.Package.ImportPath}})
{{end -}}
{{if .Config.Badges.GoDoc -}}
[](http://godoc.org/{{.Package.ImportPath}})
{{end -}}
{{if .Config.Badges.GoReportCard -}}
[](https://goreportcard.com/report/{{.Package.ImportPath}})
{{end -}}
{{if .Config.Badges.Goreadme -}}
[](https://goreadme.herokuapp.com)
{{ end }}
{{ doc .Package.Doc }}
{{ if .Config.Functions }}
{{ template "functions" .Package }}
{{ end }}
{{ if (not .Config.SkipSubPackages) }}
{{ template "subpackages" . }}
{{ end }}
{{ if (not .Config.SkipExamples) }}
{{ template "examples" .Package.Examples }}
{{end }}
`))
var functions = template.Must(base.Parse(`
{{ define "functions" }}
{{ if .Funcs }}
## Functions
{{ range .Funcs }}
### func [{{ .Name }}]({{ urlOrName (index $.Files .Pos.File) }}#L{{ .Pos.Line }})
{{ inlineCode .Decl.Text }}
{{ doc .Doc }}
{{ template "examples" .Examples }}
{{ end }}
{{ end }}
{{ end }}
`))
var exmaples = template.Must(base.Parse(`
{{ define "examples" }}
{{ if . }}
#### Examples
{{ range . }}
{{ if .Name }}##### {{.Name}}{{ end }}
{{ doc .Doc }}
{{ if .Play }}{{code .Play}}{{ else }}{{code .Code.Text}}{{ end }}
{{ end }}
{{ end }}
{{ end }}
`))
var subPackages = template.Must(base.Parse(`
{{ define "subpackages" }}
{{ if .SubPackages }}
## Sub Packages
{{ range .SubPackages }}
* [{{.Path}}](./{{.Path}}){{if .Package.Synopsis}}: {{.Package.Synopsis}}{{end}}
{{ end }}
{{ end }}
{{ end }}
`)) | internal/template/template.go | 0.55447 | 0.568476 | template.go | starcoder |
package datatype
import (
"fmt"
)
type tempType int
// Temperature units - Celsius, Fahrenheit, Kelvin
const (
Celsius tempType = iota
Fahrenheit
Kelvin
)
// TemperatureType - struct for implementing convertions of temperature
type TemperatureType struct {
b *BaseDataType
tempUnit tempType
convTempFuncs map[tempType]ConvFunc
}
// GetBase - return basic info(names, displayName, unit group) of datatype unit
func (t *TemperatureType) GetBase() *BaseDataType {
return t.b
}
// GetConvFunc - convert function for switching from one temperature unit to antoher
func (t *TemperatureType) GetConvFunc(typeTo DataType) (ConvFunc, error) {
if t.b.Group == GroupBare {
return func(in float64) float64 { return in }, nil
} else if t.b.Group != typeTo.GetBase().Group {
return nil, fmt.Errorf("GetConversionMultipl: incompatible types %#v - %#v", t, typeTo)
}
return t.convTempFuncs[typeTo.(*TemperatureType).tempUnit], nil
}
var temperatureTypes = []DataType{
&TemperatureType{
b: &BaseDataType{
Group: GroupTemperature,
Names: []string{"celsius", "grad celsius", "grads celsius", "C", "°C"},
DisplayName: "°C",
},
tempUnit: Celsius,
convTempFuncs: map[tempType]ConvFunc{
Fahrenheit: func(in float64) float64 { return 1.8*in + 32 },
Kelvin: func(in float64) float64 { return in + 273.15 },
},
},
&TemperatureType{
b: &BaseDataType{
Group: GroupTemperature,
Names: []string{"fahrenheit", "grad fahrenheit", "grads fahrenheit", "F", "°F"},
DisplayName: "°F",
},
tempUnit: Fahrenheit,
convTempFuncs: map[tempType]ConvFunc{
Celsius: func(in float64) float64 { return (in - 32) / 1.8 },
Kelvin: func(in float64) float64 { return (in-32)/1.8 + 273.15 },
},
},
&TemperatureType{
b: &BaseDataType{
Group: GroupTemperature,
Names: []string{"kelvin", "kelvins", "K"},
DisplayName: "K",
},
tempUnit: Kelvin,
convTempFuncs: map[tempType]ConvFunc{
Celsius: func(in float64) float64 { return in - 273.15 },
Fahrenheit: func(in float64) float64 { return (in-273.15)*1.8 + 32 },
},
},
} | frontend/core/datatype/temperature.go | 0.58818 | 0.499939 | temperature.go | starcoder |
package event_service
import (
"code-test/server/model"
"code-test/server/repository"
"log"
)
// this mapping functions isolate the datamodel object construction to a single file - if the data model changes, the mapping related changes will happen in the same place
// also, status validation related to the event in database to which I am mapping to
func (scrEvent *ScreenResizeEvent) Map() (*model.Data, error) {
dataToReturn, err := repository.SessionsData.Get(scrEvent.SessionId, scrEvent.WebsiteUrl)
if err != nil {
log.Println(errorRetrievingObjectToMap, "with error", err)
return nil, err
}
// Since only one re-size happens, I'm assuming that if already stored resize data is empty, we can override with valid (non zero-value) received resize data
dataToReturnResizeFromInvalid := dataToReturn.ResizeFrom.Height == "" && dataToReturn.ResizeFrom.Width == ""
if dataToReturnResizeFromInvalid {
dataToReturn.ResizeFrom = scrEvent.ResizeFrom
}
dataToReturnResizeToInvalid := dataToReturn.ResizeTo.Height == "" && dataToReturn.ResizeTo.Width == ""
if dataToReturnResizeToInvalid {
dataToReturn.ResizeTo = scrEvent.ResizeTo
}
return dataToReturn, nil
}
func (timeEvent *TimeTakenEvent) Map() (*model.Data, error) {
dataToReturn, err := repository.SessionsData.Get(timeEvent.SessionId, timeEvent.WebsiteUrl)
if err != nil {
log.Println(errorRetrievingObjectToMap, "with error", err)
return nil, err
}
dataToReturn.Time = timeEvent.Time
return dataToReturn, nil
}
func (cpEvent *CopyPasteEvent) Map() (*model.Data, error) {
dataToReturn, err := repository.SessionsData.Get(cpEvent.SessionId, cpEvent.WebsiteUrl)
if err != nil {
log.Println(errorRetrievingObjectToMap, "with error", err)
return nil, err
}
// store copy paste events
// Given that the paste operation will only change from false to true once, I'm only adding to the dictionary
if _, ok := dataToReturn.CopyAndPaste[""]; ok {
delete(dataToReturn.CopyAndPaste, "")
}
dataToReturn.CopyAndPaste[cpEvent.FormId] = cpEvent.Pasted
return dataToReturn, nil
} | server/services/event_service/eventMapping.go | 0.718792 | 0.401805 | eventMapping.go | starcoder |
Package migrations houses the orc8r data migration executables.
This doc.go describes how to write an orc8r data migration.
Functionality
Migrations are expected to conform to the following descriptions
- Idempotent
- Running the migration multiple times has same effect as running once
- Self-contained
- Migration itself doesn't rely on services or internal code
- Edit SQL tables directly
- Okay to use sqorc internal library as it's expected to remain stable
- Okay to use internal code for validation step
- Service-based validation
- Provide option to call to running services and check their semantic
view of the updated tables
Getting started
- Migration gets its own, codebase-unique package name
- MODULE/cloud/go/tools/migrations/m042_short_description
- MODULE should be orc8r, lte, cwf, etc
- Package name must be unique across the codebase
- Use unique migration number
- Emulate recent data migrations
- Full migration should run in a single, sql.LevelSerializable tx
- Minimize number of round-trip calls to DB
- Provide abundant documentation on migration's purpose and mechanism
During development
Run the migration locally, for faster iteration, and against a recent copy
of the prod DB, for visibility into the expected changes.
- Ask teammate for copy of prod DB, then load it into your local Postgres
- (host) $ docker cp ~/.magma/dbs/pgdump-prod-1595044229.sql orc8r_postgres_1:/var/lib/postgresql/data/
- (orc8r_postgres_1) $ createdb -U magma_dev -T template0 pgdump-prod-1595044229
- (orc8r_postgres_1) $ psql -U magma_dev pgdump-prod-1595044229 < /var/lib/postgresql/data/pgdump-prod-1595044229.sql
- Run migration locally, without needing to rebuild all containers
- Temporarily comment-out the ip.IsLoopback check in
unary/identity_decorator.go, then rebuild containers
- Point editor/shell to prod DB via the DATABASE_SOURCE env variable
- DATABASE_SOURCE='host=localhost dbname=prod-1595044229-july-17 user=magma_dev password=<PASSWORD> sslmode=disable'
- (optional) Update cloud docker-compose to expose the port of the
relevant controller service
Manual verification
Perform a final manual verification step against the prod DB, running the
newly-built migration executable from within the controller container.
- Point controller services to prod DB
- Set DATABASE_SOURCE environment variable(s) in cloud docker-compose
mirroring above
- Restart controller services
- Run the migration from the controller container
- Perform 2x to ensure idempotence
- Run migration
- Check relevant tables
- Check relevant endpoints and logs
*/
package migrations | orc8r/cloud/go/tools/migrations/doc.go | 0.53777 | 0.571527 | doc.go | starcoder |
package lexers
import . "github.com/johnsto/go-highlight"
var CSS = Lexer{
Name: "css",
MimeTypes: []string{"text/css"},
Filenames: []string{"*.css"},
States: StatesSpec{
"root": {
{Include: "whitespace"},
{Include: "singleLineComment"},
{Include: "multiLineComment"},
{Include: "selector"},
{Include: "declarationBlock"},
},
"selector": {
{Regexp: `(\[)([^\]]+)(\])`,
SubTypes: []TokenType{Punctuation, Attribute, Punctuation}},
{Regexp: `(\.)([-a-zA-Z0-9]+)`,
SubTypes: []TokenType{Punctuation, Attribute}},
{Regexp: `@[-a-zA-Z0-9]+`, Type: Literal, State: "media"},
{Regexp: `>`, Type: Punctuation},
{Regexp: `\+`, Type: Punctuation},
{Regexp: `:`, Type: Punctuation},
{Regexp: `,`, Type: Punctuation},
{Regexp: `[-a-zA-Z0-9]+`, Type: Attribute},
{Regexp: `\*`, Type: Attribute},
},
"media": {
{Regexp: ` and `, Type: Operator},
{Regexp: `,`, Type: Punctuation},
{Regexp: `[-a-zA-Z0-9]+`, Type: Attribute},
{Regexp: `(\()` + `(\s*)` +
`([-a-zA-Z0-9]+)` + `(:)` + `([^\)]+)` +
`(\s*)` + `(\))`,
SubTypes: []TokenType{Punctuation, Whitespace, Attribute, Assignment, Text, Whitespace, Punctuation}},
{Include: "whitespace"},
{Include: "singleLineComment"},
{Include: "multiLineComment"},
{Regexp: `{`, Type: Punctuation, State: "mediaBlock"},
},
"mediaBlock": {
{Include: "whitespace"},
{Include: "singleLineComment"},
{Include: "multiLineComment"},
{Include: "selector"},
{Include: "declarationBlock"},
{Regexp: `}`, Type: Punctuation, State: "#pop #pop"},
},
"ruleValue": {
{Regexp: `;`, Type: "Punctuation", State: "#pop"},
{Regexp: `.*`, Type: "Text"},
},
"declarationBlock": {
{Regexp: `{`, Type: Punctuation, State: "declaration"},
},
"declaration": {
{Include: "whitespace"},
{Include: "singleLineComment"},
{Include: "multiLineComment"},
{Regexp: `([a-zA-Z0-9_-]+)(\w*)(:)`,
SubTypes: []TokenType{Tag, Whitespace, Assignment},
State: "declarationValue"},
{Regexp: `}`, Type: Punctuation, State: "#pop"},
{Include: "selector"},
{Include: "declarationBlock"},
},
"declarationValue": {
{Regexp: `(")([^"]*)(")`,
SubTypes: []TokenType{Punctuation, Text, Punctuation}},
{Regexp: `(')([^']*)(')`,
SubTypes: []TokenType{Punctuation, Text, Punctuation}},
{Regexp: `[^;]+`, Type: Text},
{Regexp: `,`, Type: Punctuation},
{Regexp: `;`, Type: Punctuation, State: "#pop"},
},
"whitespace": {
{Regexp: `[ \r\n\f\t]+`, Type: Whitespace},
},
"singleLineComment": {
{Regexp: `\/\/.*`, Type: Comment},
},
"multiLineComment": {
{Regexp: `\/\*`, Type: Comment, State: "multiLineCommentContents"},
},
"multiLineCommentContents": {
{Regexp: `\*\/`, Type: Comment, State: "#pop"},
{Regexp: `(.+?)(\*\/)`, Type: Comment, State: "#pop"},
{Regexp: `.+`, Type: Comment},
},
},
}
func init() {
Register(CSS.Name, CSS)
} | lexers/lexer_css.go | 0.53607 | 0.416856 | lexer_css.go | starcoder |
package typeinfo
import (
"context"
"fmt"
"github.com/dolthub/go-mysql-server/sql"
"github.com/dolthub/dolt/go/store/types"
)
// This is a dolt implementation of the MySQL type Geometry, thus most of the functionality
// within is directly reliant on the go-mysql-server implementation.
type geometryType struct {
sqlGeometryType sql.GeometryType // References the corresponding GeometryType in GMS
innerType TypeInfo // References the actual typeinfo (pointType, linestringType, polygonType)
}
var _ TypeInfo = (*geometryType)(nil)
var GeometryType = &geometryType{sql.GeometryType{}, nil}
// ConvertTypesGeometryToSQLGeometry basically makes a deep copy of sql.Geometry
func ConvertTypesGeometryToSQLGeometry(g types.Geometry) sql.Geometry {
switch inner := g.Inner.(type) {
case types.Point:
return sql.Geometry{Inner: ConvertTypesPointToSQLPoint(inner)}
case types.Linestring:
return sql.Geometry{Inner: ConvertTypesLinestringToSQLLinestring(inner)}
case types.Polygon:
return sql.Geometry{Inner: ConvertTypesPolygonToSQLPolygon(inner)}
default:
panic("used an invalid type types.Geometry.Inner")
}
}
// ConvertNomsValueToValue implements TypeInfo interface.
func (ti *geometryType) ConvertNomsValueToValue(v types.Value) (interface{}, error) {
// Expect a types.Geometry, return a sql.Geometry
if val, ok := v.(types.Geometry); ok {
return ConvertTypesGeometryToSQLGeometry(val), nil
}
// Check for null
if _, ok := v.(types.Null); ok || v == nil {
return nil, nil
}
return nil, fmt.Errorf(`"%v" cannot convert NomsKind "%v" to a value`, ti.String(), v.Kind())
}
// ReadFrom reads a go value from a noms types.CodecReader directly
func (ti *geometryType) ReadFrom(nbf *types.NomsBinFormat, reader types.CodecReader) (interface{}, error) {
k := reader.ReadKind()
switch k {
case types.GeometryKind:
p, err := reader.ReadGeometry()
if err != nil {
return nil, err
}
return ti.ConvertNomsValueToValue(p)
case types.NullKind:
return nil, nil
default:
return nil, fmt.Errorf(`"%v" cannot convert NomsKind "%v" to a value`, ti.String(), k)
}
}
func ConvertSQLGeometryToTypesGeometry(p sql.Geometry) types.Geometry {
switch inner := p.Inner.(type) {
case sql.Point:
return types.Geometry{Inner: ConvertSQLPointToTypesPoint(inner)}
case sql.Linestring:
return types.Geometry{Inner: ConvertSQLLinestringToTypesLinestring(inner)}
case sql.Polygon:
return types.Geometry{Inner: ConvertSQLPolygonToTypesPolygon(inner)}
default:
panic("used an invalid type sql.Geometry.Inner")
}
}
// ConvertValueToNomsValue implements TypeInfo interface.
func (ti *geometryType) ConvertValueToNomsValue(ctx context.Context, vrw types.ValueReadWriter, v interface{}) (types.Value, error) {
// Check for null
if v == nil {
return types.NullValue, nil
}
// Convert accordingly
geom, err := ti.sqlGeometryType.Convert(v)
if err != nil {
return nil, err
}
return ConvertSQLGeometryToTypesGeometry(geom.(sql.Geometry)), nil
}
// Equals implements TypeInfo interface.
func (ti *geometryType) Equals(other TypeInfo) bool {
if other == nil {
return false
}
_, ok := other.(*geometryType)
return ok
}
// FormatValue implements TypeInfo interface.
func (ti *geometryType) FormatValue(v types.Value) (*string, error) {
// Received null value
if _, ok := v.(types.Null); ok || v == nil {
return nil, nil
}
// Expect a Geometry type
if val, ok := v.(types.Geometry); ok {
return ti.innerType.FormatValue(val.Inner)
}
return nil, fmt.Errorf(`"%v" has unexpectedly encountered a value of type "%T" from embedded type`, ti.String(), v.Kind())
}
// GetTypeIdentifier implements TypeInfo interface.
func (ti *geometryType) GetTypeIdentifier() Identifier {
return GeometryTypeIdentifier
}
// GetTypeParams implements TypeInfo interface.
func (ti *geometryType) GetTypeParams() map[string]string {
return map[string]string{}
}
// IsValid implements TypeInfo interface.
func (ti *geometryType) IsValid(v types.Value) bool {
if _, ok := v.(types.Geometry); ok {
return true
}
if _, ok := v.(types.Null); ok || v == nil {
return true
}
return false
}
// NomsKind implements TypeInfo interface.
func (ti *geometryType) NomsKind() types.NomsKind {
return types.GeometryKind
}
// Promote implements TypeInfo interface.
func (ti *geometryType) Promote() TypeInfo {
return &geometryType{ti.sqlGeometryType.Promote().(sql.GeometryType), ti.innerType.Promote()}
}
// String implements TypeInfo interface.
func (ti *geometryType) String() string {
return "Geometry"
}
// ToSqlType implements TypeInfo interface.
func (ti *geometryType) ToSqlType() sql.Type {
return ti.sqlGeometryType
}
// geometryTypeConverter is an internal function for GetTypeConverter that handles the specific type as the source TypeInfo.
func geometryTypeConverter(ctx context.Context, src *geometryType, destTi TypeInfo) (tc TypeConverter, needsConversion bool, err error) {
switch dest := destTi.(type) {
case *bitType:
return func(ctx context.Context, vrw types.ValueReadWriter, v types.Value) (types.Value, error) {
return types.Uint(0), nil
}, true, nil
case *blobStringType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
case *boolType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
case *datetimeType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
case *decimalType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
case *enumType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
case *floatType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
case *geometryType:
return identityTypeConverter, false, nil
case *inlineBlobType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
case *intType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
case *jsonType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
case *linestringType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
case *pointType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
case *polygonType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
case *setType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
case *timeType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
case *uintType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
case *uuidType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
case *varBinaryType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
case *varStringType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
case *yearType:
return wrapConvertValueToNomsValue(dest.ConvertValueToNomsValue)
default:
return nil, false, UnhandledTypeConversion.New(src.String(), destTi.String())
}
} | go/libraries/doltcore/schema/typeinfo/geometry.go | 0.716219 | 0.629091 | geometry.go | starcoder |
package geometry
import (
"errors"
"math"
"github.com/benjamin-rood/abm-cp/calc"
)
// Vector : Any sized dimension representation of a point of vector space.
type Vector []float64
// VectorEquality – Trying to implement a quick version of checking for Vector equality
type VectorEquality interface {
Equal(VectorEquality) bool
}
// Equal method implements an Equality comparison between vectors.
func (v Vector) Equal(u VectorEquality) bool {
if len(v) != len(u.(Vector)) {
return false
}
for i := 0; i < len(v); i++ {
if v[i] != u.(Vector)[i] {
return false
}
}
return true
}
// just providing a conventional element naming system for
// 2D and 3D vectors.
// this 'aliasing' is just local to this file.
const (
x = iota
y
z
)
// VecAddition – performs vector addition between two vectors, v and u
// s.t. v + u = [v₁+u₁ , v₂+u₂, ⠂⠂⠂ , vᵢ₋₁+uᵢ₋₁ , vᵢ+uᵢ ]
// on an i-th dimension vector.
func VecAddition(v Vector, u Vector) (Vector, error) {
if len(v) != len(u) {
return nil, errors.New("vector dimensions do not coincide")
}
var vPlusU Vector
for i := 0; i < len(v); i++ {
vPlusU = append(vPlusU, (v[i] + u[i])) // add an element to the new vector which is the sum of element i from v and u
}
return vPlusU, nil
}
// VecScalarMultiply - performs scalar multiplication on a vector v,
// s.t. scalar * v = [scalar*e1, scalar*e2, scalar*e3]
func VecScalarMultiply(v Vector, scalar float64) (Vector, error) {
if len(v) == 0 {
return nil, errors.New("v is an empty vector")
}
var sm Vector
for i := range v {
sm = append(sm, (v[i] * scalar))
}
return sm, nil
}
// DotProduct returns the sum of the product of elements of
// two i-dimension vectors, u and v, as a scalar
// s.t. v•v = (v₁u₁ + v₂u₂ + ⠂⠂⠂ + vᵢ₋₁uᵢ₋₁ + vᵢuᵢ)
func DotProduct(v Vector, u Vector) (float64, error) {
if len(v) != len(u) {
return 0, errors.New("vector dimensions do not coincide")
}
var f float64
for i := 0; i < len(v); i++ {
f += (v[i] * u[i])
}
return f, nil
}
// CrossProduct produces a new Vector orthogonal to both v and u.
// Only supported for 3D vectors.
func CrossProduct(v Vector, u Vector) (Vector, error) {
if len(v) != 3 || len(u) != 3 {
return nil, errors.New("vector dimension != 3")
}
var cp Vector
cp = append(cp, (v[y]*u[z] - v[z]*u[y]))
cp = append(cp, (v[z]*u[x] - v[x]*u[z]))
cp = append(cp, (v[x]*u[y] - v[y]*u[x]))
return cp, nil
}
// AngleFromOrigin calculates the angle of a given vector from the origin
// relative to the x-axis of 𝐄 (the model environment)
func AngleFromOrigin(v Vector) (float64, error) {
if len(v) != 2 {
return 0, errors.New("vector dimension != 2")
}
return calc.ToFixed(math.Atan2(v[x], v[y]), 5), nil
}
// RelativeAngle – does what it says on the box.
// Only implemented in 2D currently, or, as a comparitive rotation between two points on a single plane.
func RelativeAngle(v Vector, u Vector) (float64, error) {
if len(v) == 0 || len(u) == 0 {
return 0, errors.New("v or u is an empty vector")
}
if len(v) != len(u) {
return 0, errors.New("vector dimensions do not coincide")
}
dx := u[x] - v[x]
dy := u[y] - v[y]
Φ := math.Atan2(dy, dx)
return calc.ToFixed(Φ, 5), nil
}
// AngleToIntercept calculates the change in angle required from the current heading to point in direction of target.
func AngleToIntercept(pos Vector, dir𝚹 float64, target Vector) (float64, error) {
// angle between pos and target:
Φ, err := RelativeAngle(pos, target)
if err != nil {
return 0, err
}
// angle between unit vector (dir) and Φ:
Ψ := Φ - dir𝚹
if Ψ < -math.Pi {
Ψ += 2 * math.Pi
}
Ψ = calc.ToFixed(Ψ, 5)
return Ψ, nil
}
// UnitAngle will map any floating-point value to its angle on a unit circle.
func UnitAngle(angle float64) float64 {
twoPi := math.Pi * 2
return calc.ToFixed((angle - (twoPi * math.Floor(angle/twoPi))), 5)
}
// UnitVector returns a direction unit vector for an axis pair.
func UnitVector(angle float64) Vector {
var v Vector
angle = UnitAngle(angle)
v = append(v, math.Cos(angle))
v = append(v, math.Sin(angle))
return v
}
// Magnitude does the classic calculation for length of a vector
// (or, distance from origin)
func Magnitude(v Vector) (float64, error) {
if len(v) == 0 {
return 0, errors.New("v is an empty vector")
}
var ǁvǁsq float64
for i := 0; i < len(v); i++ {
ǁvǁsq += v[i] * v[i]
}
return calc.ToFixed(math.Sqrt(ǁvǁsq), 5), nil
}
// VectorDistance calculates the distance between two positions
func VectorDistance(v Vector, u Vector) (float64, error) {
if len(v) != len(u) {
return 0, errors.New("vector dimensions do not coincide")
}
vd := Vector{}
for i := 0; i < len(v); i++ {
diff := (v[i] - u[i])
vd = append(vd, diff)
}
return Magnitude(vd)
}
// Normalise returns the normalised Vector of v
// – it's what you might call a direction vector,
// as opposed to a position vector.
// NOTE: this is not the same as a 'norm'/'normal' which is the vector
// orthogonal to a plane or surface.
func Normalise(v Vector) (Vector, error) {
if len(v) == 0 {
return nil, errors.New("v is an empty vector")
}
var norm Vector
var ǁvǁ float64
var err error
for i := 0; i < len(v); i++ {
ǁvǁ, err = Magnitude(v)
if err != nil {
return nil, err
}
norm = append(norm, calc.ToFixed(v[i]/ǁvǁ, 5))
}
return norm, nil
}
// TranslatePositionToSector2D : translates the co-ordinates of a 2D vector to sector indices location (2D Version)
func TranslatePositionToSector2D(ed float64, n int, v Vector) (int, int) {
fn := float64(n)
col := int(((v[x] + ed) / (2 * ed)) * fn)
row := int(((-1 * (v[y] - ed)) / (2 * ed)) * fn)
return row, col
}
// FuzzifyVector will return a a 'fuzzy', slightly randomised version of v, at a random variance in range (-ε, +ε) offset from each existing element of v.
func FuzzifyVector(v Vector, ε float64) (Vector, error) {
if len(v) == 0 {
return nil, errors.New("v is an empty vector")
}
vf := v
for i := 0; i < len(vf); i++ {
vf[i] = vf[i] + calc.RandFloatIn(-ε, ε)
}
return vf, nil
}
// RandVector will give a random vector within boundaries the axes of len(bounds) dimensions
func RandVector(bounds []float64) Vector {
var v Vector
for i := 0; i < len(bounds); i++ {
d := bounds[i]
val := calc.RandFloatIn(-d, d)
v = append(v, val)
}
return v
} | geometry/geometry.go | 0.830388 | 0.652144 | geometry.go | starcoder |
package rangeproof
import (
"encoding/binary"
"fmt"
"io"
"math/big"
"github.com/pkg/errors"
ristretto "github.com/bwesterb/go-ristretto"
"github.com/dusk-network/dusk-blockchain/pkg/crypto/rangeproof/fiatshamir"
"github.com/dusk-network/dusk-blockchain/pkg/crypto/rangeproof/innerproduct"
"github.com/dusk-network/dusk-blockchain/pkg/crypto/rangeproof/pedersen"
"github.com/dusk-network/dusk-blockchain/pkg/crypto/rangeproof/vector"
)
// N is number of bits in range
// So amount will be between 0...2^(N-1)
const N = 64
// M is the number of outputs for one bulletproof
var M = 1
// M is the maximum number of values allowed per rangeproof
const maxM = 16
// Proof is the constructed BulletProof
type Proof struct {
V []pedersen.Commitment // Curve points 32 bytes
Blinders []ristretto.Scalar
A ristretto.Point // Curve point 32 bytes
S ristretto.Point // Curve point 32 bytes
T1 ristretto.Point // Curve point 32 bytes
T2 ristretto.Point // Curve point 32 bytes
taux ristretto.Scalar //scalar
mu ristretto.Scalar //scalar
t ristretto.Scalar
IPProof *innerproduct.Proof
}
// Prove will take a set of scalars as a parameter and prove that it is [0, 2^N)
func Prove(v []ristretto.Scalar, debug bool) (Proof, error) {
if len(v) < 1 {
return Proof{}, errors.New("length of slice v is zero")
}
M = len(v)
if M > maxM {
return Proof{}, fmt.Errorf("maximum amount of values must be less than %d", maxM)
}
// Pad zero values until we have power of two
padAmount := innerproduct.DiffNextPow2(uint32(M))
M = M + int(padAmount)
for i := uint32(0); i < padAmount; i++ {
var zeroScalar ristretto.Scalar
zeroScalar.SetZero()
v = append(v, zeroScalar)
}
// commitment to values v
Vs := make([]pedersen.Commitment, 0, M)
genData := []byte("dusk.BulletProof.vec1")
ped := pedersen.New(genData)
ped.BaseVector.Compute(uint32((N * M)))
// Hash for Fiat-Shamir
hs := fiatshamir.HashCacher{[]byte{}}
for _, amount := range v {
// compute commmitment to v
V := ped.CommitToScalar(amount)
Vs = append(Vs, V)
// update Fiat-Shamir
hs.Append(V.Value.Bytes())
}
aLs := make([]ristretto.Scalar, 0, N*M)
aRs := make([]ristretto.Scalar, 0, N*M)
for i := range v {
// Compute Bitcommits aL and aR to v
BC := BitCommit(v[i].BigInt())
aLs = append(aLs, BC.AL...)
aRs = append(aRs, BC.AR...)
}
// Compute A
A := computeA(ped, aLs, aRs)
// // Compute S
S, sL, sR := computeS(ped)
// // update Fiat-Shamir
hs.Append(A.Value.Bytes(), S.Value.Bytes())
// compute y and z
y, z := computeYAndZ(hs)
// compute polynomial
poly, err := computePoly(aLs, aRs, sL, sR, y, z)
if err != nil {
return Proof{}, errors.Wrap(err, "[Prove] - poly")
}
// Compute T1 and T2
T1 := ped.CommitToScalar(poly.t1)
T2 := ped.CommitToScalar(poly.t2)
// update Fiat-Shamir
hs.Append(z.Bytes(), T1.Value.Bytes(), T2.Value.Bytes())
// compute x
x := computeX(hs)
// compute taux which is just the polynomial for the blinding factors at a point x
taux := computeTaux(x, z, T1.BlindingFactor, T2.BlindingFactor, Vs)
// compute mu
mu := computeMu(x, A.BlindingFactor, S.BlindingFactor)
// compute l dot r
l, err := poly.computeL(x)
if err != nil {
return Proof{}, errors.Wrap(err, "[Prove] - l")
}
r, err := poly.computeR(x)
if err != nil {
return Proof{}, errors.Wrap(err, "[Prove] - r")
}
t, err := vector.InnerProduct(l, r)
if err != nil {
return Proof{}, errors.Wrap(err, "[Prove] - t")
}
// START DEBUG
if debug {
err := debugProve(x, y, z, v, l, r, aLs, aRs, sL, sR)
if err != nil {
return Proof{}, errors.Wrap(err, "[Prove] - debugProve")
}
// DEBUG T0
testT0, err := debugT0(aLs, aRs, y, z)
if err != nil {
return Proof{}, errors.Wrap(err, "[Prove] - testT0")
}
if !testT0.Equals(&poly.t0) {
return Proof{}, errors.New("[Prove]: Test t0 value does not match the value calculated from the polynomial")
}
polyt0 := poly.computeT0(y, z, v, N, uint32(M))
if !polyt0.Equals(&poly.t0) {
return Proof{}, errors.New("[Prove]: t0 value from delta function, does not match the polynomial t0 value(Correct)")
}
tPoly := poly.eval(x)
if !t.Equals(&tPoly) {
return Proof{}, errors.New("[Prove]: The t value computed from the t-poly, does not match the t value computed from the inner product of l and r")
}
}
// End DEBUG
// check if any challenge scalars are zero
if x.IsNonZeroI() == 0 || y.IsNonZeroI() == 0 || z.IsNonZeroI() == 0 {
return Proof{}, errors.New("[Prove] - One of the challenge scalars, x, y, or z was equal to zero. Generate proof again")
}
hs.Append(x.Bytes(), taux.Bytes(), mu.Bytes(), t.Bytes())
// calculate inner product proof
Q := ristretto.Point{}
w := hs.Derive()
Q.ScalarMult(&ped.BasePoint, &w)
var yinv ristretto.Scalar
yinv.Inverse(&y)
Hpf := vector.ScalarPowers(yinv, uint32(N*M))
genData = append(genData, uint8(1))
ped2 := pedersen.New(genData)
ped2.BaseVector.Compute(uint32(N * M))
H := ped2.BaseVector.Bases
G := ped.BaseVector.Bases
ip, err := innerproduct.Generate(G, H, l, r, Hpf, Q)
if err != nil {
return Proof{}, errors.Wrap(err, "[Prove] - ipproof")
}
return Proof{
V: Vs,
A: A.Value,
S: S.Value,
T1: T1.Value,
T2: T2.Value,
t: t,
taux: taux,
mu: mu,
IPProof: ip,
}, nil
}
// A = kH + aL*G + aR*H
func computeA(ped *pedersen.Pedersen, aLs, aRs []ristretto.Scalar) pedersen.Commitment {
cA := ped.CommitToVectors(aLs, aRs)
return cA
}
// S = kH + sL*G + sR * H
func computeS(ped *pedersen.Pedersen) (pedersen.Commitment, []ristretto.Scalar, []ristretto.Scalar) {
sL, sR := make([]ristretto.Scalar, N*M), make([]ristretto.Scalar, N*M)
for i := 0; i < N*M; i++ {
var randA ristretto.Scalar
randA.Rand()
sL[i] = randA
var randB ristretto.Scalar
randB.Rand()
sR[i] = randB
}
cS := ped.CommitToVectors(sL, sR)
return cS, sL, sR
}
func computeYAndZ(hs fiatshamir.HashCacher) (ristretto.Scalar, ristretto.Scalar) {
var y ristretto.Scalar
y.Derive(hs.Result())
var z ristretto.Scalar
z.Derive(y.Bytes())
return y, z
}
func computeX(hs fiatshamir.HashCacher) ristretto.Scalar {
var x ristretto.Scalar
x.Derive(hs.Result())
return x
}
// compute polynomial for blinding factors l61
// N.B. tau1 means tau superscript 1
// taux = t1Blind * x + t2Blind * x^2 + (sum(z^n+1 * vBlind[n-1])) from n = 1 to n = m
func computeTaux(x, z, t1Blind, t2Blind ristretto.Scalar, vBlinds []pedersen.Commitment) ristretto.Scalar {
tau1X := t1Blind.Mul(&x, &t1Blind)
var xsq ristretto.Scalar
xsq.Square(&x)
tau2Xsq := t2Blind.Mul(&xsq, &t2Blind)
var zN ristretto.Scalar
zN.Square(&z) // start at zSq
var zNBlindSum ristretto.Scalar
zNBlindSum.SetZero()
for i := range vBlinds {
zNBlindSum.MulAdd(&zN, &vBlinds[i].BlindingFactor, &zNBlindSum)
zN.Mul(&zN, &z)
}
var res ristretto.Scalar
res.Add(tau1X, tau2Xsq)
res.Add(&res, &zNBlindSum)
return res
}
// alpha is the blinding factor for A
// rho is the blinding factor for S
// mu = alpha + rho * x
func computeMu(x, alpha, rho ristretto.Scalar) ristretto.Scalar {
var mu ristretto.Scalar
mu.MulAdd(&rho, &x, &alpha)
return mu
}
// computeHprime will take a a slice of points H, with a scalar y
// and return a slice of points Hprime, such that Hprime = y^-n * H
func computeHprime(H []ristretto.Point, y ristretto.Scalar) []ristretto.Point {
Hprimes := make([]ristretto.Point, len(H))
var yInv ristretto.Scalar
yInv.Inverse(&y)
invYInt := yInv.BigInt()
for i, p := range H {
// compute y^-i
var invYPowInt big.Int
invYPowInt.Exp(invYInt, big.NewInt(int64(i)), nil)
var invY ristretto.Scalar
invY.SetBigInt(&invYPowInt)
var hprime ristretto.Point
hprime.ScalarMult(&p, &invY)
Hprimes[i] = hprime
}
return Hprimes
}
// Verify takes a bullet proof and returns true only if the proof was valid
func Verify(p Proof) (bool, error) {
genData := []byte("dusk.BulletProof.vec1")
ped := pedersen.New(genData)
ped.BaseVector.Compute(uint32(N * M))
genData = append(genData, uint8(1))
ped2 := pedersen.New(genData)
ped2.BaseVector.Compute(uint32(N * M))
G := ped.BaseVector.Bases
H := ped2.BaseVector.Bases
// Reconstruct the challenges
hs := fiatshamir.HashCacher{[]byte{}}
for _, V := range p.V {
hs.Append(V.Value.Bytes())
}
hs.Append(p.A.Bytes(), p.S.Bytes())
y, z := computeYAndZ(hs)
hs.Append(z.Bytes(), p.T1.Bytes(), p.T2.Bytes())
x := computeX(hs)
hs.Append(x.Bytes(), p.taux.Bytes(), p.mu.Bytes(), p.t.Bytes())
w := hs.Derive()
return megacheckWithC(p.IPProof, p.mu, x, y, z, p.t, p.taux, w, p.A, ped.BasePoint, ped.BlindPoint, p.S, p.T1, p.T2, G, H, p.V)
}
func megacheckWithC(ipproof *innerproduct.Proof, mu, x, y, z, t, taux, w ristretto.Scalar, A, G, H, S, T1, T2 ristretto.Point, GVec, HVec []ristretto.Point, V []pedersen.Commitment) (bool, error) {
var c1, c2, c3, c4, c5, c6, c7, c8, c9, c10, c11 ristretto.Point
var c ristretto.Scalar
c.Rand()
uSq, uInvSq, s := ipproof.VerifScalars()
sInv := make([]ristretto.Scalar, len(s))
copy(sInv, s)
// reverse s
for i, j := 0, len(sInv)-1; i < j; i, j = i+1, j-1 {
sInv[i], sInv[j] = sInv[j], sInv[i]
}
// g vector scalars : as + z points : G
as := vector.MulScalar(s, ipproof.A)
g := vector.AddScalar(as, z)
g = vector.MulScalar(g, c)
c1, err := vector.Exp(g, GVec, len(GVec), 1)
if err != nil {
return false, err
}
// h vector scalars : y Had (bsInv - zM2N) - z points : H
bs := vector.MulScalar(sInv, ipproof.B)
zAnd2 := sumZMTwoN(z)
h, err := vector.Sub(bs, zAnd2)
if err != nil {
return false, errors.Wrap(err, "[h1]")
}
var yinv ristretto.Scalar
yinv.Inverse(&y)
Hpf := vector.ScalarPowers(yinv, uint32(N*M))
h, err = vector.Hadamard(h, Hpf)
if err != nil {
return false, errors.Wrap(err, "[h2]")
}
h = vector.SubScalar(h, z)
h = vector.MulScalar(h, c)
c2, err = vector.Exp(h, HVec, len(HVec), 1)
if err != nil {
return false, err
}
// G basepoint gbp : (c * w(ab-t)) + t-D(y,z) point : G
delta := computeDelta(y, z, N, uint32(M))
var tMinusDelta ristretto.Scalar
tMinusDelta.Sub(&t, &delta)
var abMinusT ristretto.Scalar
abMinusT.Mul(&ipproof.A, &ipproof.B)
abMinusT.Sub(&abMinusT, &t)
var cw ristretto.Scalar
cw.Mul(&c, &w)
var gBP ristretto.Scalar
gBP.MulAdd(&cw, &abMinusT, &tMinusDelta)
c3.ScalarMult(&G, &gBP)
// H basepoint hbp : c * mu + taux point: H
var cmu ristretto.Scalar
cmu.Mul(&mu, &c)
var hBP ristretto.Scalar
hBP.Add(&cmu, &taux)
c4.ScalarMult(&H, &hBP)
// scalar :c point: A
c5.ScalarMult(&A, &c)
// scalar: cx point : S
var cx ristretto.Scalar
cx.Mul(&c, &x)
c6.ScalarMult(&S, &cx)
// scalar: uSq challenges points: Lj
c7, err = vector.Exp(uSq, ipproof.L, len(ipproof.L), 1)
if err != nil {
return false, err
}
c7.PublicScalarMult(&c7, &c)
// scalar : uInvSq challenges points: Rj
c8, err = vector.Exp(uInvSq, ipproof.R, len(ipproof.R), 1)
if err != nil {
return false, err
}
c8.PublicScalarMult(&c8, &c)
// scalar: z_j+2 points: Vj
zM := vector.ScalarPowers(z, uint32(M))
var zSq ristretto.Scalar
zSq.Square(&z)
zM = vector.MulScalar(zM, zSq)
c9.SetZero()
for i := range zM {
var temp ristretto.Point
temp.PublicScalarMult(&V[i].Value, &zM[i])
c9.Add(&c9, &temp)
}
// scalar : x point: T1
c10.PublicScalarMult(&T1, &x)
// scalar : xSq point: T2
var xSq ristretto.Scalar
xSq.Square(&x)
c11.PublicScalarMult(&T2, &xSq)
var sum ristretto.Point
sum.SetZero()
sum.Add(&c1, &c2)
sum.Add(&sum, &c3)
sum.Add(&sum, &c4)
sum.Sub(&sum, &c5)
sum.Sub(&sum, &c6)
sum.Sub(&sum, &c7)
sum.Sub(&sum, &c8)
sum.Sub(&sum, &c9)
sum.Sub(&sum, &c10)
sum.Sub(&sum, &c11)
var zero ristretto.Point
zero.SetZero()
ok := zero.Equals(&sum)
if !ok {
return false, errors.New("megacheck failed")
}
return true, nil
}
func (p *Proof) Encode(w io.Writer, includeCommits bool) error {
if includeCommits {
err := pedersen.EncodeCommitments(w, p.V)
if err != nil {
return err
}
}
err := binary.Write(w, binary.BigEndian, p.A.Bytes())
if err != nil {
return err
}
err = binary.Write(w, binary.BigEndian, p.S.Bytes())
if err != nil {
return err
}
err = binary.Write(w, binary.BigEndian, p.T1.Bytes())
if err != nil {
return err
}
err = binary.Write(w, binary.BigEndian, p.T2.Bytes())
if err != nil {
return err
}
err = binary.Write(w, binary.BigEndian, p.taux.Bytes())
if err != nil {
return err
}
err = binary.Write(w, binary.BigEndian, p.mu.Bytes())
if err != nil {
return err
}
err = binary.Write(w, binary.BigEndian, p.t.Bytes())
if err != nil {
return err
}
return p.IPProof.Encode(w)
}
func (p *Proof) Decode(r io.Reader, includeCommits bool) error {
if p == nil {
return errors.New("struct is nil")
}
if includeCommits {
comms, err := pedersen.DecodeCommitments(r)
if err != nil {
return err
}
p.V = comms
}
err := readerToPoint(r, &p.A)
if err != nil {
return err
}
err = readerToPoint(r, &p.S)
if err != nil {
return err
}
err = readerToPoint(r, &p.T1)
if err != nil {
return err
}
err = readerToPoint(r, &p.T2)
if err != nil {
return err
}
err = readerToScalar(r, &p.taux)
if err != nil {
return err
}
err = readerToScalar(r, &p.mu)
if err != nil {
return err
}
err = readerToScalar(r, &p.t)
if err != nil {
return err
}
p.IPProof = &innerproduct.Proof{}
return p.IPProof.Decode(r)
}
func (p *Proof) Equals(other Proof, includeCommits bool) bool {
if len(p.V) != len(other.V) && includeCommits {
return false
}
for i := range p.V {
ok := p.V[i].EqualValue(other.V[i])
if !ok {
return ok
}
}
ok := p.A.Equals(&other.A)
if !ok {
return ok
}
ok = p.S.Equals(&other.S)
if !ok {
return ok
}
ok = p.T1.Equals(&other.T1)
if !ok {
return ok
}
ok = p.T2.Equals(&other.T2)
if !ok {
return ok
}
ok = p.taux.Equals(&other.taux)
if !ok {
return ok
}
ok = p.mu.Equals(&other.mu)
if !ok {
return ok
}
ok = p.t.Equals(&other.t)
if !ok {
return ok
}
return true
return p.IPProof.Equals(*other.IPProof)
}
func readerToPoint(r io.Reader, p *ristretto.Point) error {
var x [32]byte
err := binary.Read(r, binary.BigEndian, &x)
if err != nil {
return err
}
ok := p.SetBytes(&x)
if !ok {
return errors.New("point not encodable")
}
return nil
}
func readerToScalar(r io.Reader, s *ristretto.Scalar) error {
var x [32]byte
err := binary.Read(r, binary.BigEndian, &x)
if err != nil {
return err
}
s.SetBytes(&x)
return nil
} | pkg/crypto/rangeproof/rangeproof.go | 0.667039 | 0.472014 | rangeproof.go | starcoder |
package gt
import (
"database/sql/driver"
"time"
)
/*
Shortcut: parses successfully or panics. Should be used only in root scope. When
error handling is relevant, use `.Parse`.
*/
func ParseNullInterval(src string) (val NullInterval) {
try(val.Parse(src))
return
}
// Simplified interval constructor without a time constituent.
func DateNullInterval(years int, months int, days int) NullInterval {
return NullInterval{Years: years, Months: months, Days: days}
}
// Simplified interval constructor without a date constituent.
func TimeNullInterval(hours, mins, secs int) NullInterval {
return NullInterval{Hours: hours, Minutes: mins, Seconds: secs}
}
// Simplified interval constructor.
func NullIntervalFrom(years int, months int, days, hours, mins, secs int) NullInterval {
return NullInterval{years, months, days, hours, mins, secs}
}
// Uses `.SetDuration` and returns the resulting interval.
func DurationNullInterval(src time.Duration) (val NullInterval) {
val.SetDuration(src)
return
}
/*
Variant of `gt.Interval` where zero value is considered empty in text, and null
in JSON and SQL.
*/
type NullInterval Interval
var (
_ = Encodable(NullInterval{})
_ = Decodable((*NullInterval)(nil))
)
// Implement `gt.Zeroable`. Equivalent to `reflect.ValueOf(self).IsZero()`.
func (self NullInterval) IsZero() bool { return Interval(self).IsZero() }
// Implement `gt.Nullable`. True if zero.
func (self NullInterval) IsNull() bool { return self.IsZero() }
/*
Implement `gt.Getter`. If zero, returns `nil`, otherwise uses `.String` to
return a string representation.
*/
func (self NullInterval) Get() interface{} {
if self.IsNull() {
return nil
}
return Interval(self).Get()
}
// Implement `gt.Setter`, using `.Scan`. Panics on error.
func (self *NullInterval) Set(src interface{}) { try(self.Scan(src)) }
// Implement `gt.Zeroer`, zeroing the receiver.
func (self *NullInterval) Zero() { (*Interval)(self).Zero() }
/*
Implement `fmt.Stringer`. If zero, returns an empty string. Otherwise returns a
text representation in the standard machine-readable ISO 8601 format.
*/
func (self NullInterval) String() string {
if self.IsNull() {
return ``
}
return Interval(self).String()
}
/*
Implement `gt.Parser`. If the input is empty, zeroes the receiver. Otherwise
requires a valid machine-readable ISO 8601 representation.
*/
func (self *NullInterval) Parse(src string) error {
if len(src) == 0 {
self.Zero()
return nil
}
return (*Interval)(self).Parse(src)
}
// Implement `gt.Appender`, using the same representation as `.String`.
func (self NullInterval) Append(buf []byte) []byte {
if self.IsNull() {
return buf
}
return Interval(self).Append(buf)
}
/*
Implement `encoding.TextMarhaler`. If zero, returns nil. Otherwise returns the
same representation as `.String`.
*/
func (self NullInterval) MarshalText() ([]byte, error) {
if self.IsNull() {
return nil, nil
}
return Interval(self).MarshalText()
}
// Implement `encoding.TextUnmarshaler`, using the same algorithm as `.Parse`.
func (self *NullInterval) UnmarshalText(src []byte) error {
return self.Parse(bytesString(src))
}
/*
Implement `json.Marshaler`. If zero, returns bytes representing `null`.
Otherwise returns bytes representing a JSON string with the same text as in
`.String`.
*/
func (self NullInterval) MarshalJSON() ([]byte, error) {
if self.IsNull() {
return bytesNull, nil
}
return Interval(self).MarshalJSON()
}
/*
Implement `json.Unmarshaler`. If the input is empty or represents JSON `null`,
zeroes the receiver. Otherwise parses a JSON string, using the same algorithm
as `.Parse`.
*/
func (self *NullInterval) UnmarshalJSON(src []byte) error {
if isJsonEmpty(src) {
self.Zero()
return nil
}
if isJsonStr(src) {
return self.UnmarshalText(cutJsonStr(src))
}
return errJsonString(src, self)
}
// Implement `driver.Valuer`, using `.Get`.
func (self NullInterval) Value() (driver.Value, error) {
return self.Get(), nil
}
/*
Implement `sql.Scanner`, converting an arbitrary input to `gt.NullInterval` and
modifying the receiver. Acceptable inputs:
* `nil` -> use `.Zero`
* `string` -> use `.Parse`
* `[]byte` -> use `.UnmarshalText`
* `time.Duration` -> use `.SetDuration`
* `*time.Duration` -> use `.Zero` or `.SetDuration`
* `gt.Interval` -> assign
* `*gt.Interval` -> use `.Zero` or assign
* `gt.NullInterval` -> assign
* `gt.Getter` -> scan underlying value
*/
func (self *NullInterval) Scan(src interface{}) error {
switch src := src.(type) {
case nil:
self.Zero()
return nil
case string:
return self.Parse(src)
case []byte:
return self.UnmarshalText(src)
case time.Duration:
self.SetDuration(src)
return nil
case *time.Duration:
if src == nil {
self.Zero()
} else {
self.SetDuration(*src)
}
return nil
case Interval:
*self = NullInterval(src)
return nil
case *Interval:
if src == nil {
self.Zero()
} else {
*self = NullInterval(*src)
}
return nil
case NullInterval:
*self = src
return nil
default:
val, ok := get(src)
if ok {
return self.Scan(val)
}
return errScanType(self, src)
}
}
// Same as `(*gt.Interval).SetDuration`.
func (self *NullInterval) SetDuration(val time.Duration) {
(*Interval)(self).SetDuration(val)
}
// Same as `gt.Interval.Date`.
func (self NullInterval) Date() (years int, months int, days int) {
return Interval(self).Date()
}
// Same as `gt.Interval.OnlyDate`.
func (self NullInterval) OnlyDate() NullInterval {
return NullInterval(Interval(self).OnlyDate())
}
// Same as `gt.Interval.OnlyTime`.
func (self NullInterval) OnlyTime() NullInterval {
return NullInterval(Interval(self).OnlyTime())
}
// Same as `gt.Interval.HasDate`.
func (self NullInterval) HasDate() bool {
return Interval(self).HasDate()
}
// Same as `gt.Interval.HasTime`.
func (self NullInterval) HasTime() bool {
return Interval(self).HasTime()
}
// Same as `gt.Interval.Duration`.
func (self NullInterval) Duration() time.Duration {
return Interval(self).Duration()
}
// Returns a version of this interval with `.Years = val`.
func (self NullInterval) WithYears(val int) NullInterval {
return NullInterval(Interval(self).WithYears(val))
}
// Returns a version of this interval with `.Months = val`.
func (self NullInterval) WithMonths(val int) NullInterval {
return NullInterval(Interval(self).WithMonths(val))
}
// Returns a version of this interval with `.Days = val`.
func (self NullInterval) WithDays(val int) NullInterval {
return NullInterval(Interval(self).WithDays(val))
}
// Returns a version of this interval with `.Hours = val`.
func (self NullInterval) WithHours(val int) NullInterval {
return NullInterval(Interval(self).WithHours(val))
}
// Returns a version of this interval with `.Minutes = val`.
func (self NullInterval) WithMinutes(val int) NullInterval {
return NullInterval(Interval(self).WithMinutes(val))
}
// Returns a version of this interval with `.Seconds = val`.
func (self NullInterval) WithSeconds(val int) NullInterval {
return NullInterval(Interval(self).WithSeconds(val))
}
// Returns a version of this interval with `.Years += val`.
func (self NullInterval) AddYears(val int) NullInterval {
return NullInterval(Interval(self).AddYears(val))
}
// Returns a version of this interval with `.Months += val`.
func (self NullInterval) AddMonths(val int) NullInterval {
return NullInterval(Interval(self).AddMonths(val))
}
// Returns a version of this interval with `.Days += val`.
func (self NullInterval) AddDays(val int) NullInterval {
return NullInterval(Interval(self).AddDays(val))
}
// Returns a version of this interval with `.Hours += val`.
func (self NullInterval) AddHours(val int) NullInterval {
return NullInterval(Interval(self).AddHours(val))
}
// Returns a version of this interval with `.Minutes += val`.
func (self NullInterval) AddMinutes(val int) NullInterval {
return NullInterval(Interval(self).AddMinutes(val))
}
// Returns a version of this interval with `.Seconds += val`.
func (self NullInterval) AddSeconds(val int) NullInterval {
return NullInterval(Interval(self).AddSeconds(val))
}
/*
Adds every field of one interval to every field of another interval, returning
the sum. Does NOT convert different time units, such as seconds to minutes or
vice versa.
*/
func (self NullInterval) Add(val NullInterval) NullInterval {
return NullInterval(Interval(self).Add(Interval(val)))
}
/*
Subtracts every field of one interval from every corresponding field of another
interval, returning the difference. Does NOT convert different time units, such
as seconds to minutes or vice versa.
*/
func (self NullInterval) Sub(val NullInterval) NullInterval {
return NullInterval(Interval(self).Sub(Interval(val)))
}
/*
Returns a version of this interval with every field inverted: positive fields
become negative, and negative fields become positive.
*/
func (self NullInterval) Neg() NullInterval {
return NullInterval(Interval(self).Neg())
} | gt_null_interval.go | 0.845656 | 0.538316 | gt_null_interval.go | starcoder |
package trie
import "github.com/zainkai/go-collections/queue"
type TrieNode struct {
Key byte
Conns map[byte]*TrieNode
IsWord bool
Word string
}
// Trie implementation
type Trie struct {
root *TrieNode
}
// New create a Trie
func New() *Trie {
root := &TrieNode{
Key: byte(0),
Conns: make(map[byte]*TrieNode),
IsWord: true,
}
return &Trie{root}
}
func toBytes(s string) []byte {
return []byte(s)
}
func (t *Trie) Insert(word string) {
wordArray := toBytes(word)
t.InsertBytes(wordArray)
}
// InsertBytes adds new word to trie
// O(k)
func (t *Trie) InsertBytes(word []byte) {
curNode := t.root
for _, char := range word {
nextNode, ok := curNode.Conns[char]
if !ok {
nextNode = &TrieNode{
Key: char,
Conns: make(map[byte]*TrieNode),
IsWord: false,
Word: string(word),
}
curNode.Conns[char] = nextNode
}
curNode = nextNode
}
curNode.IsWord = true
}
// SearchWord search if full word was inserted into trie
// O(k)
func (t *Trie) SearchWord(word string) bool {
wordArray := toBytes(word)
return t.SearchBytes(wordArray)
}
// SearchBytes search if full word was inserted into trie
// O(k)
func (t *Trie) SearchBytes(word []byte) bool {
n := t.searchTrie(word)
return n != nil && n.IsWord
}
// IsPrefix search if prefix path exists in trie
// O(k)
func (t *Trie) IsPrefix(word string) bool {
wordArray := toBytes(word)
return t.IsPrefixBytes(wordArray)
}
// IsPrefixBytes search if prefix path exists in trie
// O(k)
func (t *Trie) IsPrefixBytes(word []byte) bool {
return t.searchTrie(word) != nil
}
func (t *Trie) searchTrie(word []byte) *TrieNode {
curNode := t.root
for _, char := range word {
nextNode, ok := curNode.Conns[char]
if !ok {
return nil
}
curNode = nextNode
}
return curNode
}
// GetSuggestionsBytes from a word look for all valid connecting words in trie
// O(K + N)
func (t *Trie) GetSuggestionsBytes(word []byte) []string {
foundNode := t.searchTrie(word)
connectedWords := []string{}
if foundNode == nil {
return connectedWords
}
queue := queue.New()
queue.Enqueue(foundNode)
for queue.Length > 0 {
curNode := queue.Dequeue().(*TrieNode)
if curNode.IsWord {
connectedWords = append(connectedWords, curNode.Word)
}
for _, conn := range curNode.Conns {
queue.Enqueue(conn)
}
}
return connectedWords
}
// GetSuggestions from a word look for all valid connecting words in trie
// O(K + N)
func (t *Trie) GetSuggestions(word string) []string {
wordArray := toBytes(word)
return t.GetSuggestionsBytes(wordArray)
} | trie/trie.go | 0.709221 | 0.427755 | trie.go | starcoder |
package haar
import (
"image"
"image/color"
"math"
)
// ColourChannels is the number of channels for one color. We will be using
// three colour channels per pixel at all times.
const ColourChannels = 3
// Coef is the union of coefficients for all channels of the original image.
type Coef [ColourChannels]float64
// Add adds another coefficient in place.
func (coef *Coef) Add(offset Coef) {
for index := range coef {
coef[index] += offset[index]
}
}
// Subtract subtracts another coefficient in place.
func (coef *Coef) Subtract(offset Coef) {
for index := range coef {
coef[index] -= offset[index]
}
}
// Divide divides all elements of the coefficient by a value, in place.
func (coef *Coef) Divide(value float64) {
factor := 1.0 / value
for index := range coef {
coef[index] *= factor // Slightly faster.
}
}
// Matrix is the result of the Haar transform, a two-dimensional matrix of
// coefficients.
type Matrix struct {
// Coefs is the slice of coefficients resulting from a forward 2D Haar
// transform. The position of a coefficient (x,y) is (y * Width + x).
Coefs []Coef
// The number of columns in the matrix.
Width uint
// The number of rows in the matrix.
Height uint
}
// colorToCoef converts a native Color type into a YIQ Coef. We are using
// YIQ because we only have weights for them. (Apart from the score weights,
// the store is built to handle different sized Coef's so any length may be
// returned.)
func colorToCoef(gen color.Color) Coef {
// Convert into YIQ. (We may want to convert from YCbCr directly one day.)
r32, g32, b32, _ := gen.RGBA()
r, g, b := float64(r32>>8), float64(g32>>8), float64(b32>>8)
return Coef{
(0.299900*r + 0.587000*g + 0.114000*b) / 0x100,
(0.595716*r - 0.274453*g - 0.321263*b) / 0x100,
(0.211456*r - 0.522591*g + 0.311135*b) / 0x100}
}
// Transform performs a forward 2D Haar transform on the provided image after
// converting it to YIQ space.
func Transform(img image.Image) Matrix {
bounds := img.Bounds()
width := bounds.Max.X - bounds.Min.X
height := bounds.Max.Y - bounds.Min.Y
if width > 2 {
// We can't handle odd widths.
width = width &^ 1
}
if height > 2 {
// We can't handle odd heights.
height = height &^ 1
}
matrix := Matrix{
Coefs: make([]Coef, width*height),
Width: uint(width),
Height: uint(height)}
// Convert colours to coefficients.
for row := bounds.Min.Y; row < bounds.Min.Y+height; row++ {
for column := bounds.Min.X; column < bounds.Min.X+width; column++ {
matrix.Coefs[(row-bounds.Min.Y)*width+(column-bounds.Min.X)] = colorToCoef(img.At(column, row))
}
}
// Apply 1D Haar transform on rows.
tempRow := make([]Coef, width)
for row := 0; row < height; row++ {
for step := width / 2; step >= 1; step /= 2 {
for column := 0; column < step; column++ {
high := matrix.Coefs[row*width+2*column]
low := high
offset := matrix.Coefs[row*width+2*column+1]
high.Add(offset)
low.Subtract(offset)
high.Divide(math.Sqrt2)
low.Divide(math.Sqrt2)
tempRow[column] = high
tempRow[column+step] = low
}
for column := 0; column < width; column++ {
matrix.Coefs[row*width+column] = tempRow[column]
}
}
}
// Apply 1D Haar transform on columns.
tempColumn := make([]Coef, height)
for column := 0; column < width; column++ {
for step := height / 2; step >= 1; step /= 2 {
for row := 0; row < step; row++ {
high := matrix.Coefs[(2*row)*width+column]
low := high
offset := matrix.Coefs[(2*row+1)*width+column]
high.Add(offset)
low.Subtract(offset)
high.Divide(math.Sqrt2)
low.Divide(math.Sqrt2)
tempColumn[row] = high
tempColumn[row+step] = low
}
for row := 0; row < height; row++ {
matrix.Coefs[row*width+column] = tempColumn[row]
}
}
}
return matrix
} | duplo/haar/haar.go | 0.753829 | 0.54577 | haar.go | starcoder |
package node
import (
"github.com/densify-dev/Container-Optimization-Data-Forwarder/internal/common"
"github.com/prometheus/common/model"
)
//Gets node metrics from prometheus (and checks to see if they are valid)
func getNodeMetric(result model.Value, node model.LabelName, metric string) {
if result == nil {
return
}
//Loop through the different entities in the results.
for i := 0; i < result.(model.Matrix).Len(); i++ {
nodeValue, ok := result.(model.Matrix)[i].Metric[node]
if !ok {
continue
}
if _, ok := nodes[string(nodeValue)]; !ok {
continue
}
//validates that the value of the entity is set and if not will default to 0
var value float64
if len(result.(model.Matrix)[i].Values) != 0 {
value = float64(result.(model.Matrix)[i].Values[len(result.(model.Matrix)[i].Values)-1].Value)
}
//Check which metric this is for and update the corresponding variable for this container in the system data structure
if metric == "capacity" {
capacityType := result.(model.Matrix)[i].Metric["resource"]
switch capacityType {
case "cpu":
nodes[string(nodeValue)].cpuCapacity = int(value)
case "memory":
nodes[string(nodeValue)].memCapacity = int(value)
case "pods":
nodes[string(nodeValue)].podsCapacity = int(value)
case "ephemeral_storage":
nodes[string(nodeValue)].ephemeralStorageCapacity = int(value)
case "hugepages_2Mi":
nodes[string(nodeValue)].hugepages2MiCapacity = int(value)
}
} else if metric == "allocatable" {
capacityType := result.(model.Matrix)[i].Metric["resource"]
switch capacityType {
case "cpu":
nodes[string(nodeValue)].cpuAllocatable = int(value)
case "memory":
nodes[string(nodeValue)].memAllocatable = int(value)
case "pods":
nodes[string(nodeValue)].podsAllocatable = int(value)
case "ephemeral_storage":
nodes[string(nodeValue)].ephemeralStorageAllocatable = int(value)
case "hugepages_2Mi":
nodes[string(nodeValue)].hugepages2MiAllocatable = int(value)
}
} else {
switch metric {
case "capacity_cpu":
nodes[string(nodeValue)].cpuCapacity = int(value)
case "capacity_mem":
nodes[string(nodeValue)].memCapacity = int(value)
case "capacity_pod":
nodes[string(nodeValue)].podsCapacity = int(value)
case "allocatable_cpu":
nodes[string(nodeValue)].cpuAllocatable = int(value)
case "allocatable_mem":
nodes[string(nodeValue)].memAllocatable = int(value)
case "allocatable_pod":
nodes[string(nodeValue)].podsAllocatable = int(value)
case "netSpeedBytes":
nodes[string(nodeValue)].netSpeedBytes = int(value)
case "limits":
switch result.(model.Matrix)[i].Metric["resource"] {
case "memory":
nodes[string(nodeValue)].memLimit = int(value / 1024 / 1024)
case "cpu":
nodes[string(nodeValue)].cpuLimit = int(value * 1000)
}
case "requests":
switch result.(model.Matrix)[i].Metric["resource"] {
case "memory":
nodes[string(nodeValue)].memRequest = int(value / 1024 / 1024)
case "cpu":
nodes[string(nodeValue)].cpuRequest = int(value * 1000)
}
case "cpuLimit":
nodes[string(nodeValue)].cpuLimit = int(value)
case "cpuRequest":
nodes[string(nodeValue)].cpuRequest = int(value)
case "memLimit":
nodes[string(nodeValue)].memLimit = int(value)
case "memRequest":
nodes[string(nodeValue)].memRequest = int(value)
}
}
}
}
//getNodeMetricString is used to parse the label based results from Prometheus related to Container Entities and store them in the systems data structure.
func getNodeMetricString(result model.Value, node model.LabelName) {
//Validate there is data in the results.
if result == nil {
return
}
//Loop through the different entities in the results.
for i := 0; i < result.(model.Matrix).Len(); i++ {
nodeValue, ok := result.(model.Matrix)[i].Metric[node]
if !ok {
continue
}
if _, ok := nodes[string(nodeValue)]; !ok {
continue
}
for key, value := range result.(model.Matrix)[i].Metric {
common.AddToLabelMap(string(key), string(value), nodes[string(nodeValue)].labelMap)
}
}
} | internal/node/collection.go | 0.504883 | 0.419529 | collection.go | starcoder |
package graph
// Edge represents an edge in a graph with access to its node endpoints.
// If its in a directed graph, it also has access to the node the edge comes from
// and the node the edge goes into. It can also store a value.
type Edge interface {
// GetTo returns the node the edge goes into in a directed graph.
// In a undirected graph, this returns a "cannot use this method" error.
GetTo() (Node, error)
// GetFrom returns the node the edge comes from in a directed graph.
// In a undirected graph, this returns a "cannot use this method" error.
GetFrom() (Node, error)
// GetNodes returns the endpoint nodes of this edge.
// Nodes are sorted by id (ascending).
GetNodes() ([]Node, error)
// GetValue returns the value stored in this edge.
// If there is no value then this returns a "no value" error.
GetValue() (interface{}, error)
removeRef() Edge
}
type rawDirectedEdge struct {
From NodeID
To NodeID
RawGraphRef *rawDirectedGraph
Value wrappedValue
}
func (re rawDirectedEdge) GetTo() (Node, error) {
return re.RawGraphRef.GetNode(re.To)
}
func (re rawDirectedEdge) GetFrom() (Node, error) {
return re.RawGraphRef.GetNode(re.From)
}
func (re rawDirectedEdge) GetNodes() ([]Node, error) {
nodes := make([]Node, 0)
from, err := re.GetFrom()
if err != nil {
return nodes, err
}
to, err := re.GetTo()
if err != nil {
return nodes, err
}
nodes = append(nodes, from)
if re.From != re.To {
nodes = append(nodes, to)
}
return nodes, nil
}
func (re rawDirectedEdge) GetValue() (interface{}, error) {
if !re.Value.HasValue {
return nil, noValueFoundInEdgeError{fromID: re.From, toID: re.To}
}
return re.Value.RawValue, nil
}
func (re rawDirectedEdge) removeRef() Edge {
re.RawGraphRef = nil
return re
}
type rawUndirectedEdge struct {
Nodes [2]NodeID
RawGraphRef *rawUndirectedGraph
Value wrappedValue
}
func (re rawUndirectedEdge) GetTo() (Node, error) {
return nil, cannotUseForUndirectedGraphError{"Edge.GetTo"}
}
func (re rawUndirectedEdge) GetFrom() (Node, error) {
return nil, cannotUseForUndirectedGraphError{"Edge.GetFrom"}
}
func (re rawUndirectedEdge) GetNodes() ([]Node, error) {
nodes := make([]Node, 0)
node, err := re.RawGraphRef.GetNode(re.Nodes[0])
if err != nil {
return nodes, err
}
nodes = append(nodes, node)
node, err = re.RawGraphRef.GetNode(re.Nodes[1])
if err != nil {
return nodes, err
}
if re.Nodes[0] != re.Nodes[1] {
nodes = append(nodes, node)
}
return nodes, nil
}
func (re rawUndirectedEdge) GetValue() (interface{}, error) {
if !re.Value.HasValue {
return nil, noValueFoundInEdgeError{fromID: re.Nodes[0], toID: re.Nodes[1]}
}
return re.Value.RawValue, nil
}
func (re rawUndirectedEdge) removeRef() Edge {
re.RawGraphRef = nil
return re
} | graph/edge.go | 0.851768 | 0.627267 | edge.go | starcoder |
package main
import (
"chaincode/errors"
"strconv"
)
// ------------------------------------------
// Methods on receivers composite traintuple
// ------------------------------------------
// SetFromInput is a method of the receiver CompositeTraintuple.
// It uses the inputCompositeTraintuple to check and set the traintuple's parameters
// which don't depend on previous traintuples values :
// - AssetType
// - Creator & permissions
// - Tag
// - AlgoKey & ObjectiveKey
// - Dataset
func (traintuple *CompositeTraintuple) SetFromInput(db *LedgerDB, inp inputCompositeTraintuple) error {
creator, err := GetTxCreator(db.cc)
if err != nil {
return err
}
traintuple.Key = inp.Key
traintuple.AssetType = CompositeTraintupleType
traintuple.Creator = creator
traintuple.ComputePlanKey = inp.ComputePlanKey
traintuple.Metadata = inp.Metadata
traintuple.Tag = inp.Tag
algo, err := db.GetCompositeAlgo(inp.AlgoKey)
if err != nil {
return errors.BadRequest(err, "could not retrieve Composite algo with key %s", inp.AlgoKey)
}
if !algo.Permissions.CanProcess(algo.Owner, creator) {
return errors.Forbidden("not authorized to process algo %s", inp.AlgoKey)
}
traintuple.AlgoKey = inp.AlgoKey
// check if DataSampleKeys are from the same dataManager and if they are not test only dataSample
_, trainOnly, err := checkSameDataManager(db, inp.DataManagerKey, inp.DataSampleKeys)
if err != nil {
return err
}
if !trainOnly {
return errors.BadRequest("not possible to create a traintuple with test only data")
}
dataManager, err := db.GetDataManager(inp.DataManagerKey)
if err != nil {
return errors.BadRequest(err, "could not retrieve dataManager with key %s", inp.DataManagerKey)
}
if !dataManager.Permissions.CanProcess(dataManager.Owner, creator) {
return errors.Forbidden("not authorized to process dataManager %s", inp.DataManagerKey)
}
// fill traintuple.Dataset from dataManager and dataSample
traintuple.Dataset = &Dataset{
DataManagerKey: inp.DataManagerKey,
DataSampleKeys: inp.DataSampleKeys,
}
traintuple.Dataset.Worker, err = getDataManagerOwner(db, traintuple.Dataset.DataManagerKey)
// permissions (head): worker only where the data belong
workerOnly := Permission{
Public: false,
AuthorizedIDs: []string{traintuple.Dataset.Worker}}
traintuple.OutHeadModel.Permissions = Permissions{Process: workerOnly, Download: workerOnly}
// permissions (trunk): dictated by input
permissions, err := NewPermissions(db, inp.OutTrunkModelPermissions)
if err != nil {
return err
}
traintuple.OutTrunkModel.Permissions = permissions
return err
}
// SetFromParents set the status of the traintuple depending on its "parents",
// i.e. the traintuples from which it received the outModels as inModels.
// Also it's InModelKeys are set.
// TODO: rename to SetInModels
func (traintuple *CompositeTraintuple) SetFromParents(db *LedgerDB, inp inputCompositeTraintuple) error {
traintuple.Status = StatusTodo
if inp.InHeadModelKey == "" || inp.InTrunkModelKey == "" {
return nil
}
// [Head]
// It can only be a composite traintuple's head out model
traintuple.InHeadModel = inp.InHeadModelKey
head, err := db.GetGenericTuple(inp.InHeadModelKey)
if err != nil {
return err
}
if !typeInSlice(head.AssetType, []AssetType{CompositeTraintupleType}) {
return errors.BadRequest(
"tuple type %s from key %s is not supported as head InModel",
head.AssetType,
inp.InHeadModelKey)
}
// Head Model is only processable on the same worker
compositeTraintuple, err := db.GetCompositeTraintuple(inp.InHeadModelKey)
if traintuple.Dataset.Worker != compositeTraintuple.Dataset.Worker {
return errors.BadRequest(
"Dataset worker (%s) and head InModel owner (%s) must be the same",
traintuple.Dataset.Worker,
compositeTraintuple.Dataset.Worker)
}
// [Trunk]
// It can be either:
// - a traintuple's out model
// - a composite traintuple's trunk out model
// - an aggregate tuple's out model
traintuple.InTrunkModel = inp.InTrunkModelKey
trunk, err := db.GetGenericTuple(inp.InTrunkModelKey)
if err != nil {
return err
}
if !typeInSlice(trunk.AssetType, []AssetType{TraintupleType, CompositeTraintupleType, AggregatetupleType}) {
return errors.BadRequest(
"tuple type %s from key %s is not supported as trunk InModel",
trunk.AssetType,
inp.InTrunkModelKey)
}
traintuple.Status = determineStatusFromInModels([]string{head.Status, trunk.Status})
return nil
}
// AddToComputePlan set the traintuple's parameters that determines if it's part of on ComputePlan and how.
// It uses the inputCompositeTraintuple values as follow:
// - If neither ComputePlanKey nor rank is set it returns immediately
// - If rank is 0 and ComputePlanKey empty, it's start a new one using this traintuple key
// - If rank and ComputePlanKey are set, it checks if there are coherent with previous ones and set it.
// Use checkComputePlanAvailability to ensure the compute plan exists and no other tuple is registered with the same worker/rank
func (traintuple *CompositeTraintuple) AddToComputePlan(db *LedgerDB, inp inputCompositeTraintuple, traintupleKey string, checkComputePlanAvailability bool) error {
// check ComputePlanKey and Rank and set it when required
var err error
if inp.Rank == "" {
if inp.ComputePlanKey != "" {
return errors.BadRequest("invalid inputs, a ComputePlan should have a rank")
}
return nil
}
traintuple.Rank, err = strconv.Atoi(inp.Rank)
if err != nil {
return err
}
traintuple.ComputePlanKey = inp.ComputePlanKey
computePlan, err := db.GetComputePlan(inp.ComputePlanKey)
if err != nil {
return err
}
err = computePlan.AddTuple(db, CompositeTraintupleType, traintupleKey, traintuple.Status, traintuple.Dataset.Worker)
if err != nil {
return err
}
err = computePlan.Save(db, traintuple.ComputePlanKey)
if err != nil {
return err
}
if !checkComputePlanAvailability {
return nil
}
var ttKeys []string
ttKeys, err = db.GetIndexKeys("computePlan~computeplankey~worker~rank~key", []string{"computePlan", inp.ComputePlanKey, traintuple.Dataset.Worker, inp.Rank})
if err != nil {
return err
} else if len(ttKeys) > 0 {
err = errors.BadRequest("ComputePlanKey %s with worker %s rank %d already exists", inp.ComputePlanKey, traintuple.Dataset.Worker, traintuple.Rank)
return err
}
return nil
}
// Save will put in the legder interface both the traintuple with its key
// and all the associated composite keys
func (traintuple *CompositeTraintuple) Save(db *LedgerDB, traintupleKey string) error {
// store in ledger
if err := db.Add(traintupleKey, traintuple); err != nil {
return err
}
// create composite keys
if err := db.CreateIndex("compositeTraintuple~algo~key", []string{"compositeTraintuple", traintuple.AlgoKey, traintupleKey}); err != nil {
return err
}
if err := db.CreateIndex("compositeTraintuple~worker~status~key", []string{"compositeTraintuple", traintuple.Dataset.Worker, traintuple.Status, traintupleKey}); err != nil {
return err
}
// TODO: Do we create an index for head/trunk inModel or do we concider that
// they are classic inModels ?
if err := db.CreateIndex("tuple~inModel~key", []string{"tuple", traintuple.InHeadModel, traintupleKey}); err != nil {
return err
}
if err := db.CreateIndex("tuple~inModel~key", []string{"tuple", traintuple.InTrunkModel, traintupleKey}); err != nil {
return err
}
if traintuple.ComputePlanKey != "" {
if err := db.CreateIndex("computePlan~computeplankey~worker~rank~key", []string{"computePlan", traintuple.ComputePlanKey, traintuple.Dataset.Worker, strconv.Itoa(traintuple.Rank), traintupleKey}); err != nil {
return err
}
if err := db.CreateIndex("algo~computeplankey~key", []string{"algo", traintuple.ComputePlanKey, traintuple.AlgoKey}); err != nil {
return err
}
}
if traintuple.Tag != "" {
err := db.CreateIndex("compositeTraintuple~tag~key", []string{"compositeTraintuple", traintuple.Tag, traintupleKey})
if err != nil {
return err
}
}
return nil
}
// -------------------------------------------------
// Smart contracts related to composite traintuples
// -------------------------------------------------
// createCompositeTraintuple is the wrapper for the substra smartcontract createCompositeTraintuple
func createCompositeTraintuple(db *LedgerDB, args []string) (outputKey, error) {
inp := inputCompositeTraintuple{}
err := AssetFromJSON(args, &inp)
if err != nil {
return outputKey{}, err
}
key, err := createCompositeTraintupleInternal(db, inp, true)
if err != nil {
return outputKey{}, err
}
return outputKey{Key: key}, nil
}
// createCompositeTraintupleInternal adds a CompositeTraintuple in the ledger
func createCompositeTraintupleInternal(db *LedgerDB, inp inputCompositeTraintuple, checkComputePlanAvailability bool) (string, error) {
traintuple := CompositeTraintuple{}
err := traintuple.SetFromInput(db, inp)
if err != nil {
return "", err
}
err = traintuple.SetFromParents(db, inp)
if err != nil {
return "", err
}
// Test if the key (ergo the traintuple) already exists
tupleExists, err := db.KeyExists(traintuple.Key)
if err != nil {
return "", err
}
if tupleExists {
return "", errors.Conflict("composite traintuple already exists").WithKey(traintuple.Key)
}
err = traintuple.AddToComputePlan(db, inp, traintuple.Key, checkComputePlanAvailability)
if err != nil {
return "", err
}
err = traintuple.Save(db, traintuple.Key)
if err != nil {
return "", err
}
err = db.AddTupleEvent(traintuple.Key)
if err != nil {
return "", err
}
return traintuple.Key, nil
}
// logStartCompositeTrain modifies a traintuple by changing its status from todo to doing
func logStartCompositeTrain(db *LedgerDB, args []string) (o outputCompositeTraintuple, err error) {
status := StatusDoing
inp := inputKey{}
err = AssetFromJSON(args, &inp)
if err != nil {
return
}
// get compositeTraintuple, check validity of the update
compositeTraintuple, err := db.GetCompositeTraintuple(inp.Key)
if err != nil {
return
}
if err = validateTupleOwner(db, compositeTraintuple.Dataset.Worker); err != nil {
return
}
if err = compositeTraintuple.commitStatusUpdate(db, inp.Key, status); err != nil {
return
}
err = o.Fill(db, compositeTraintuple)
return
}
// logSuccessCompositeTrain modifies a traintuple by changing its status from doing to done
// reports logs and associated performances
func logSuccessCompositeTrain(db *LedgerDB, args []string) (o outputCompositeTraintuple, err error) {
status := StatusDone
inp := inputLogSuccessCompositeTrain{}
err = AssetFromJSON(args, &inp)
if err != nil {
return
}
compositeTraintupleKey := inp.Key
// get, update and commit traintuple
compositeTraintuple, err := db.GetCompositeTraintuple(compositeTraintupleKey)
if err != nil {
return
}
compositeTraintuple.OutHeadModel.OutModel = &KeyChecksum{
Key: inp.OutHeadModel.Key,
Checksum: inp.OutHeadModel.Checksum}
compositeTraintuple.OutTrunkModel.OutModel = &KeyChecksumAddress{
Key: inp.OutTrunkModel.Key,
Checksum: inp.OutTrunkModel.Checksum,
StorageAddress: inp.OutTrunkModel.StorageAddress}
compositeTraintuple.Log += inp.Log
err = createModelIndex(db, inp.OutHeadModel.Key, compositeTraintupleKey)
if err != nil {
return
}
err = createModelIndex(db, inp.OutTrunkModel.Key, compositeTraintupleKey)
if err != nil {
return
}
if err = validateTupleOwner(db, compositeTraintuple.Dataset.Worker); err != nil {
return
}
if err = compositeTraintuple.commitStatusUpdate(db, compositeTraintupleKey, status); err != nil {
return
}
err = TryAddIntermediaryModel(db, compositeTraintuple.ComputePlanKey, compositeTraintuple.Dataset.Worker, compositeTraintupleKey, inp.OutHeadModel.Key)
if err != nil {
return
}
err = TryAddIntermediaryModel(db, compositeTraintuple.ComputePlanKey, compositeTraintuple.Dataset.Worker, compositeTraintupleKey, inp.OutTrunkModel.Key)
if err != nil {
return
}
err = UpdateTraintupleChildren(db, compositeTraintupleKey, compositeTraintuple.Status, []string{})
if err != nil {
return
}
err = UpdateTesttupleChildren(db, compositeTraintupleKey, compositeTraintuple.Status)
if err != nil {
return
}
err = o.Fill(db, compositeTraintuple)
return
}
// logFailCompositeTrain modifies a traintuple by changing its status to fail and reports associated logs
func logFailCompositeTrain(db *LedgerDB, args []string) (o outputCompositeTraintuple, err error) {
status := StatusFailed
inp := inputLogFailTrain{}
err = AssetFromJSON(args, &inp)
if err != nil {
return
}
// get, update and commit traintuple
compositeTraintuple, err := db.GetCompositeTraintuple(inp.Key)
if err != nil {
return
}
compositeTraintuple.Log += inp.Log
if err = validateTupleOwner(db, compositeTraintuple.Dataset.Worker); err != nil {
return
}
if err = compositeTraintuple.commitStatusUpdate(db, inp.Key, status); err != nil {
return
}
err = o.Fill(db, compositeTraintuple)
if err != nil {
return
}
// Do not propagate failure if we are in a compute plan
if compositeTraintuple.ComputePlanKey != "" {
return
}
// update depending tuples
err = UpdateTesttupleChildren(db, inp.Key, compositeTraintuple.Status)
if err != nil {
return
}
err = UpdateTraintupleChildren(db, inp.Key, compositeTraintuple.Status, []string{})
return
}
// queryCompositeTraintuple returns info about a composite traintuple given its key
func queryCompositeTraintuple(db *LedgerDB, args []string) (outputTraintuple outputCompositeTraintuple, err error) {
inp := inputKey{}
err = AssetFromJSON(args, &inp)
if err != nil {
return
}
traintuple, err := db.GetCompositeTraintuple(inp.Key)
if err != nil {
return
}
if traintuple.AssetType != CompositeTraintupleType {
err = errors.NotFound("no element with key %s", inp.Key)
return
}
outputTraintuple.Fill(db, traintuple)
return
}
// queryCompositeTraintuples returns all composite traintuples
func queryCompositeTraintuples(db *LedgerDB, args []string) (outTraintuples []outputCompositeTraintuple, bookmark string, err error) {
inp := inputBookmark{}
outTraintuples = []outputCompositeTraintuple{}
if len(args) > 1 {
err = errors.BadRequest("incorrect number of arguments, expecting at most one argument")
return
}
if len(args) == 1 && args[0] != "" {
err = AssetFromJSON(args, &inp)
if err != nil {
return
}
}
elementsKeys, bookmark, err := db.GetIndexKeysWithPagination("compositeTraintuple~algo~key", []string{"compositeTraintuple"}, OutputPageSize, inp.Bookmark)
if err != nil {
return
}
for _, key := range elementsKeys {
outputTraintuple, err := getOutputCompositeTraintuple(db, key)
if err != nil {
return outTraintuples, bookmark, err
}
outTraintuples = append(outTraintuples, outputTraintuple)
}
return
}
// ----------------------------------------------------------
// Utils for smartcontracts related to composite traintuples
// ----------------------------------------------------------
// UpdateCompositeTraintupleChild updates the status of a waiting trainuple, given the new parent traintuple status
func UpdateCompositeTraintupleChild(db *LedgerDB, parentTraintupleKey string, childTraintupleKey string, traintupleStatus string) (childStatus string, err error) {
// get and update traintuple
childTraintuple, err := db.GetCompositeTraintuple(childTraintupleKey)
if err != nil {
return
}
childStatus = childTraintuple.Status
// get traintuple new status
var newStatus string
if traintupleStatus == StatusFailed {
newStatus = StatusFailed
} else if traintupleStatus == StatusDone {
ready, _err := childTraintuple.isReady(db, parentTraintupleKey)
if _err != nil {
err = _err
return
}
if ready {
newStatus = StatusTodo
}
}
// commit new status
if newStatus == "" {
return
}
if err = childTraintuple.commitStatusUpdate(db, childTraintupleKey, newStatus); err != nil {
return
}
// update return value after status update
childStatus = childTraintuple.Status
err = db.AddTupleEvent(childTraintupleKey)
return
}
// getOutputCompositeTraintuple takes as input a traintuple key and returns the outputCompositeTraintuple
func getOutputCompositeTraintuple(db *LedgerDB, traintupleKey string) (outTraintuple outputCompositeTraintuple, err error) {
traintuple, err := db.GetCompositeTraintuple(traintupleKey)
if err != nil {
return
}
outTraintuple.Fill(db, traintuple)
return
}
// getOutputCompositeTraintuples takes as input a list of keys and returns a paylaod containing a list of associated retrieved elements
func getOutputCompositeTraintuples(db *LedgerDB, traintupleKeys []string) (outTraintuples []outputCompositeTraintuple, err error) {
for _, key := range traintupleKeys {
var outputTraintuple outputCompositeTraintuple
outputTraintuple, err = getOutputCompositeTraintuple(db, key)
if err != nil {
return
}
outTraintuples = append(outTraintuples, outputTraintuple)
}
return
}
// validateNewStatus verifies that the new status is consistent with the tuple current status
func (traintuple *CompositeTraintuple) validateNewStatus(db *LedgerDB, status string) error {
// check validity of worker and change of status
if err := checkUpdateTuple(db, traintuple.Dataset.Worker, traintuple.Status, status); err != nil {
return err
}
return nil
}
func (traintuple *CompositeTraintuple) isReady(db *LedgerDB, newDoneTraintupleKey string) (ready bool, err error) {
return IsReady(db, []string{traintuple.InHeadModel, traintuple.InTrunkModel}, newDoneTraintupleKey)
}
// commitStatusUpdate update the traintuple status in the ledger
func (traintuple *CompositeTraintuple) commitStatusUpdate(db *LedgerDB, traintupleKey string, newStatus string) error {
if traintuple.Status == newStatus {
return nil
}
// do not update if previous status is already Done, Failed, Todo, Doing
if StatusAborted == newStatus && traintuple.Status != StatusWaiting {
return nil
}
if err := traintuple.validateNewStatus(db, newStatus); err != nil {
return errors.Internal("update traintuple %s failed: %s", traintupleKey, err.Error())
}
oldStatus := traintuple.Status
traintuple.Status = newStatus
if err := db.Put(traintupleKey, traintuple); err != nil {
return errors.Internal("failed to update traintuple %s - %s", traintupleKey, err.Error())
}
// update associated composite keys
indexName := "compositeTraintuple~worker~status~key"
oldAttributes := []string{"compositeTraintuple", traintuple.Dataset.Worker, oldStatus, traintupleKey}
newAttributes := []string{"compositeTraintuple", traintuple.Dataset.Worker, traintuple.Status, traintupleKey}
if err := db.UpdateIndex(indexName, oldAttributes, newAttributes); err != nil {
return err
}
if err := UpdateComputePlanState(db, traintuple.ComputePlanKey, newStatus, traintupleKey, traintuple.Dataset.Worker); err != nil {
return err
}
logger.Infof("compositetraintuple %s status updated: %s (from=%s)", traintupleKey, newStatus, oldStatus)
return nil
} | chaincode/traintuple_composite.go | 0.521471 | 0.460835 | traintuple_composite.go | starcoder |
package holtwinters
import "fmt"
// PredictAdditive takes in a seasonal historical series of data and produces a prediction of what the data will be in the future using triple
// exponential smoothing using the additive method. Existing data will also be smoothed alongside predictions. Returns the entire dataset with
// the predictions appended to the end.
// series - Historical seasonal data, must be at least a full season, for optimal results use at least two full seasons,
// the first value should be at the start of a season
// seasonLength - The length of the data's seasons, must be at least 2
// alpha - Exponential smoothing coefficient for level, must be between 0 and 1
// beta - Exponential smoothing coefficient for trend, must be between 0 and 1
// gamma - Exponential smoothing coefficient for seasonality, must be between 0 and 1
// predictionLength - Number of predictions to make, set to 0 to make no predictions and only smooth, can't be negative
func PredictAdditive(series []float64, seasonLength int, alpha float64, beta float64, gamma float64, predictionLength int) ([]float64, error) {
// Parameter validation mainly to avoid out of bounds errors and division by zero
err := validateParams(series, seasonLength, alpha, beta, gamma, predictionLength)
if err != nil {
return nil, err
}
// Assumptions at this point, after params have been validated
// seasonLength >= 2
// series >= seasonLength
// alpha, beta, gamma >= 0.0 and <= 1.0
// Initial setup
result := []float64{series[0]}
smooth := series[0]
trend := initialTrend(series, seasonLength)
seasonals := initialSeasonalComponentsAdditive(series, seasonLength)
// Build prediction and smooth existing values
for i := 1; i < len(series)+predictionLength; i++ {
if i >= len(series) {
// Prediction
m := float64(i - len(series) + 1)
result = append(result, (smooth+m*trend)+seasonals[i%seasonLength])
} else {
// Smooth existing values
val := series[i]
lastSmooth := smooth
smooth = alpha*(val-seasonals[i%seasonLength]) + (1-alpha)*(smooth+trend)
trend = beta*(smooth-lastSmooth) + (1-beta)*trend
seasonals[i%seasonLength] = gamma*(val-smooth) + (1-gamma)*seasonals[i%seasonLength]
result = append(result, smooth+trend+seasonals[i%seasonLength])
}
}
return result, nil
}
// PredictMultiplicative takes in a seasonal historical series of data and produces a prediction of what the data will be in the future using triple
// exponential smoothing using the multiplicative method. Existing data will also be smoothed alongside predictions. Returns the entire dataset with
// the predictions appended to the end.
// series - Historical seasonal data, must be at least a full season, for optimal results use at least two full seasons,
// the first value should be at the start of a season
// seasonLength - The length of the data's seasons, must be at least 2
// alpha - Exponential smoothing coefficient for level, must be between 0 and 1
// beta - Exponential smoothing coefficient for trend, must be between 0 and 1
// gamma - Exponential smoothing coefficient for seasonality, must be between 0 and 1
// predictionLength - Number of predictions to make, set to 0 to make no predictions and only smooth, can't be negative
func PredictMultiplicative(series []float64, seasonLength int, alpha float64, beta float64, gamma float64, predictionLength int) ([]float64, error) {
// Parameter validation mainly to avoid out of bounds errors and division by zero
err := validateParams(series, seasonLength, alpha, beta, gamma, predictionLength)
if err != nil {
return nil, err
}
// Assumptions at this point, after params have been validated
// seasonLength >= 2
// series >= seasonLength
// alpha, beta, gamma >= 0.0 and <= 1.0
// Initial setup
result := []float64{series[0]}
smooth := series[0]
trend := initialTrend(series, seasonLength)
seasonals := initialSeasonalComponentsMultiplicative(series, seasonLength)
// Build prediction and smooth existing values
for i := 1; i < len(series)+predictionLength; i++ {
if i >= len(series) {
// Prediction
m := float64(i - len(series) + 1)
result = append(result, (smooth+m*trend)+seasonals[i%seasonLength])
} else {
// Smooth existing values
val := series[i]
lastSmooth := smooth
smooth = alpha*(val/seasonals[i%seasonLength]) + (1-alpha)*(smooth+trend)
trend = beta*(smooth-lastSmooth) + (1-beta)*trend
seasonals[i%seasonLength] = gamma*(val/smooth) + (1-gamma)*seasonals[i%seasonLength]
result = append(result, smooth+trend*seasonals[i%seasonLength])
}
}
return result, nil
}
// initialTrend calculates the initial trend based on average trends between the first and second
// seasons, if there is not enough data for two full seasons to be compared, instead the trend is
// calculated by comparing the first and second points of the first season
func initialTrend(series []float64, seasonLength int) float64 {
// If not enough data to compare two seasons, more rough trend calculated using first two points
if len(series) < seasonLength*2 {
return series[1] - series[0]
}
// Enough data for two seasons, compare first two and average for trend
sum := float64(0)
for i := 0; i < seasonLength; i++ {
sum += (series[i+seasonLength] - series[i]) / float64(seasonLength)
}
return sum / float64(seasonLength)
}
// validateParams ensures the parameters provided are valid, avoids NaN values and out of bounds errors
func validateParams(series []float64, seasonLength int, alpha float64, beta float64, gamma float64, predictionLength int) error {
if seasonLength <= 1 {
return fmt.Errorf("Invalid parameter for prediction; season length must be at least 2, is %d", seasonLength)
}
if predictionLength < 0 {
return fmt.Errorf("Invalid parameter for prediction; prediction length must be at least 0, cannot be negative, is %d", predictionLength)
}
if alpha < 0.0 || alpha > 1.0 {
return fmt.Errorf("Invalid parameter for prediction; alpha must be between 0 and 1, is %f", alpha)
}
if beta < 0.0 || beta > 1.0 {
return fmt.Errorf("Invalid parameter for prediction; beta must be between 0 and 1, is %f", beta)
}
if gamma < 0.0 || gamma > 1.0 {
return fmt.Errorf("Invalid parameter for prediction; gamma must be between 0 and 1, is %f", gamma)
}
if len(series) < seasonLength {
return fmt.Errorf("Invalid parameter for prediction; must have at least 1 season of data to predict, season length: %d, series length: %d", seasonLength, len(series))
}
return nil
}
// initialSeasonalComponentsAdditive calculates the initial seasonal values for the additive method
func initialSeasonalComponentsAdditive(series []float64, seasonLength int) []float64 {
var seasonals = make([]float64, seasonLength)
seasonAverages := []float64{}
nSeasons := len(series) / seasonLength
for i := 0; i < nSeasons; i++ {
// Calculate sum of season
sum := float64(0)
for j := seasonLength * i; j < seasonLength*i+seasonLength; j++ {
sum += series[j]
}
// Calculate average of season and add to slice
seasonAverages = append(seasonAverages, sum/float64(seasonLength))
}
for i := 0; i < seasonLength; i++ {
sumOfValuesOverAverage := float64(0)
for j := 0; j < nSeasons; j++ {
sumOfValuesOverAverage += series[seasonLength*j+i] - seasonAverages[j]
}
seasonals[i] = sumOfValuesOverAverage / float64(nSeasons)
}
return seasonals
}
// initialSeasonalComponentsMultiplicative calculates the initial seasonal values for the multiplicative method
func initialSeasonalComponentsMultiplicative(series []float64, seasonLength int) []float64 {
var seasonals = make([]float64, seasonLength)
seasonAverages := []float64{}
nSeasons := len(series) / seasonLength
for i := 0; i < nSeasons; i++ {
// Calculate sum of season
sum := float64(0)
for j := seasonLength * i; j < seasonLength*i+seasonLength; j++ {
sum += series[j]
}
// Calculate average of season and add to slice
seasonAverages = append(seasonAverages, sum/float64(seasonLength))
}
for i := 0; i < seasonLength; i++ {
sumOfValuesOverAverage := float64(0)
for j := 0; j < nSeasons; j++ {
sumOfValuesOverAverage += series[seasonLength*j+i] / seasonAverages[j]
}
seasonals[i] = sumOfValuesOverAverage / float64(nSeasons)
}
return seasonals
} | holtwinters.go | 0.811788 | 0.881002 | holtwinters.go | starcoder |
package server
import (
"fmt"
"reflect"
"strconv"
)
// SafeStringFromMap returns the value referenced by `key` in `values`. as a string.
// If not found, it returns an empty string.
func SafeStringFromMap(values map[string]interface{}, key string) string {
if values[key] == nil {
return ""
}
return values[key].(string)
}
// SafeStringFromMapFloat returns the value referenced by `key` in `values`. as a string (after first formatting as a base 64 float).
// If not found, it returns an empty string.
func SafeStringFromMapFloat(values map[string]interface{}, key string) string {
if values[key] == nil {
return ""
}
return strconv.FormatFloat(values[key].(float64), 'E', -1, 64)
}
// SafeStringFromMap returns the value referenced by `key` in `values`. as a boolean.
// If not found, it returns a boolean false.
func SafeBooleanFromMap(values map[string]interface{}, key string) bool {
if values[key] == nil {
return false
}
return values[key].(bool)
}
// SafeSliceFromMap returns the value referenced by `key` in `values`. as a slice.
// If not found, it returns an empty slice.
func SafeSliceFromMap(values map[string]interface{}, key string) []string {
value := reflect.ValueOf(values[key])
switch value.Kind() {
case reflect.Slice:
t := make([]string, value.Len())
for i := 0; i < value.Len(); i++ {
t[i] = fmt.Sprint(value.Index(i))
}
return t
}
return []string{}
}
// SubtractSlice returns the slice a with all elements of b removed.
func SubtractSlice(a []string, b []string) []string {
for _, element := range b {
a = RemoveElement(a, element)
}
return a
}
// RemoveElement removes the element from slice.
func RemoveElement(a []string, str string) []string {
for i := range a {
if a[i] == str {
return append(a[:i], a[i+1:]...)
}
}
return a
}
// Unique returns a slice with duplicate values removed.
func Unique(a []string) []string {
seen := make(map[string]struct{})
b := a[:0]
for _, v := range a {
if _, ok := seen[v]; !ok {
seen[v] = struct{}{}
b = append(b, v)
}
}
return b
} | components/infra-proxy-service/server/util.go | 0.843959 | 0.438725 | util.go | starcoder |
package material
import (
"dasa.cc/simplex"
"golang.org/x/mobile/exp/f32"
)
type Box struct {
l, r, b, t, z simplex.Var
world f32.Mat4
}
func NewBox(prg *simplex.Program) (a Box) {
a.l, a.r, a.b, a.t, a.z = prg.Var(1), prg.Var(1), prg.Var(1), prg.Var(1), prg.Var(1)
return
}
func (a Box) Width(x float32) simplex.Constraint {
return simplex.Constrain(simplex.Coef{1, a.r}, simplex.Coef{-1, a.l}).Equal(float64(x))
}
func (a Box) Height(x float32) simplex.Constraint {
return simplex.Constrain(simplex.Coef{1, a.t}, simplex.Coef{-1, a.b}).Equal(float64(x))
}
func (a Box) Start(x float32) simplex.Constraint {
return simplex.Constrain(simplex.Coef{1, a.l}).Equal(float64(x))
}
func (a Box) End(x float32) simplex.Constraint {
return simplex.Constrain(simplex.Coef{1, a.r}).Equal(float64(x))
}
func (a Box) Bottom(x float32) simplex.Constraint {
return simplex.Constrain(simplex.Coef{1, a.b}).Equal(float64(x))
}
func (a Box) Top(x float32) simplex.Constraint {
return simplex.Constrain(simplex.Coef{1, a.t}).Equal(float64(x))
}
func (a Box) Z(z float32) simplex.Constraint {
return simplex.Constrain(simplex.Coef{1, a.z}).Equal(float64(z))
}
func (a Box) StartIn(b Box, by float32) simplex.Constraint {
return simplex.Constrain(simplex.Coef{1, a.l}, simplex.Coef{-1, b.l}).GreaterEq(float64(by))
}
func (a Box) EndIn(b Box, by float32) simplex.Constraint {
return simplex.Constrain(simplex.Coef{1, b.r}, simplex.Coef{-1, a.r}).GreaterEq(float64(by))
}
func (a Box) BottomIn(b Box, by float32) simplex.Constraint {
return simplex.Constrain(simplex.Coef{1, a.b}, simplex.Coef{-1, b.b}).GreaterEq(float64(by))
}
func (a Box) TopIn(b Box, by float32) simplex.Constraint {
return simplex.Constrain(simplex.Coef{1, b.t}, simplex.Coef{-1, a.t}).GreaterEq(float64(by))
}
func (a Box) CenterVerticalIn(b Box) simplex.Constraint {
return simplex.Constrain(simplex.Coef{1, b.b}, simplex.Coef{1, b.t}, simplex.Coef{-1, a.b}, simplex.Coef{-1, a.t})
}
func (a Box) CenterHorizontalIn(b Box) simplex.Constraint {
return simplex.Constrain(simplex.Coef{1, b.l}, simplex.Coef{1, b.r}, simplex.Coef{-1, a.l}, simplex.Coef{-1, a.r})
}
func (a Box) Before(b Box, by float32) simplex.Constraint {
return simplex.Constrain(simplex.Coef{1, b.l}, simplex.Coef{-1, a.r}).GreaterEq(float64(by))
}
func (a Box) After(b Box, by float32) simplex.Constraint {
// TODO this is the crux of adaptive layout model, along with a Before method.
// Consider how box a would be after box b if room, otherwise box a is below box b.
// Note in the latter case, box a should not be aligned after box b when below.
return simplex.Constrain(simplex.Coef{1, a.l}, simplex.Coef{-1, b.r}).GreaterEq(float64(by))
}
func (a Box) Below(b Box, by float32) simplex.Constraint {
return simplex.Constrain(simplex.Coef{1, b.b}, simplex.Coef{-1, a.t}).GreaterEq(float64(by))
}
func (a Box) Above(b Box, by float32) simplex.Constraint {
return simplex.Constrain(simplex.Coef{1, a.b}, simplex.Coef{-1, b.t}).GreaterEq(float64(by))
}
func (a Box) AlignBottoms(b Box, by float32) simplex.Constraint {
return simplex.Constrain(simplex.Coef{1, b.b}, simplex.Coef{-1, a.b}).GreaterEq(float64(by))
}
func (a Box) AlignTops(b Box, by float32) simplex.Constraint {
return simplex.Constrain(simplex.Coef{1, b.t}, simplex.Coef{-1, a.t}).GreaterEq(float64(by))
}
func (a Box) Bounds(l, r, b, t float32) []simplex.Constraint {
return []simplex.Constraint{
simplex.Constrain(simplex.Coef{1, a.l}).GreaterEq(float64(l)),
simplex.Constrain(simplex.Coef{1, a.r}).LessEq(float64(r)),
simplex.Constrain(simplex.Coef{1, a.b}).GreaterEq(float64(b)),
simplex.Constrain(simplex.Coef{1, a.t}).LessEq(float64(t)),
}
}
func (a *Box) UpdateWorld(prg *simplex.Program) {
prg.For(&a.l, &a.r, &a.b, &a.t, &a.z)
a.world.Identity()
a.world.Translate(&a.world, float32(a.l.Val), float32(a.b.Val), 0)
a.world.Scale(&a.world, float32(a.r.Val-a.l.Val), float32(a.t.Val-a.b.Val), 1)
a.world[2][3] = float32(a.z.Val)
} | layout.go | 0.718496 | 0.563678 | layout.go | starcoder |
package resize
import "image"
// Keep value in [0,255] range.
func clampUint8(in int32) uint8 {
// casting a negative int to an uint will result in an overflown
// large uint. this behavior will be exploited here and in other functions
// to achieve a higher performance.
if uint32(in) < 256 {
return uint8(in)
}
if in > 255 {
return 255
}
return 0
}
// Keep value in [0,65535] range.
func clampUint16(in int64) uint16 {
if uint64(in) < 65536 {
return uint16(in)
}
if in > 65535 {
return 65535
}
return 0
}
func resizeGeneric(in image.Image, out *image.NRGBA64, scale float64, coeffs []int32, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var rgba [4]int64
var sum int64
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
coeff := coeffs[ci+i]
if coeff != 0 {
xi := start + i
switch {
case xi < 0:
xi = 0
case xi >= maxX:
xi = maxX
}
r, g, b, a := in.At(xi+in.Bounds().Min.X, x+in.Bounds().Min.Y).RGBA()
// reverse alpha-premultiplication.
if a != 0 {
r *= 0xffff
r /= a
g *= 0xffff
g /= a
b *= 0xffff
b /= a
}
rgba[0] += int64(coeff) * int64(r)
rgba[1] += int64(coeff) * int64(g)
rgba[2] += int64(coeff) * int64(b)
rgba[3] += int64(coeff) * int64(a)
sum += int64(coeff)
}
}
offset := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*8
value := clampUint16(rgba[0] / sum)
out.Pix[offset+0] = uint8(value >> 8)
out.Pix[offset+1] = uint8(value)
value = clampUint16(rgba[1] / sum)
out.Pix[offset+2] = uint8(value >> 8)
out.Pix[offset+3] = uint8(value)
value = clampUint16(rgba[2] / sum)
out.Pix[offset+4] = uint8(value >> 8)
out.Pix[offset+5] = uint8(value)
value = clampUint16(rgba[3] / sum)
out.Pix[offset+6] = uint8(value >> 8)
out.Pix[offset+7] = uint8(value)
}
}
}
func resizeRGBA(in *image.RGBA, out *image.NRGBA, scale float64, coeffs []int16, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var rgba [4]int32
var sum int32
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
coeff := coeffs[ci+i]
if coeff != 0 {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 4
case xi >= maxX:
xi = 4 * maxX
default:
xi = 0
}
r := uint32(row[xi+0])
g := uint32(row[xi+1])
b := uint32(row[xi+2])
a := uint32(row[xi+3])
// reverse alpha-premultiplication.
if a != 0 {
r *= 0xff
r /= a
g *= 0xff
g /= a
b *= 0xff
b /= a
}
rgba[0] += int32(coeff) * int32(r)
rgba[1] += int32(coeff) * int32(g)
rgba[2] += int32(coeff) * int32(b)
rgba[3] += int32(coeff) * int32(a)
sum += int32(coeff)
}
}
xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*4
out.Pix[xo+0] = clampUint8(rgba[0] / sum)
out.Pix[xo+1] = clampUint8(rgba[1] / sum)
out.Pix[xo+2] = clampUint8(rgba[2] / sum)
out.Pix[xo+3] = clampUint8(rgba[3] / sum)
}
}
}
func resizeNRGBA(in *image.NRGBA, out *image.NRGBA, scale float64, coeffs []int16, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var rgba [4]int32
var sum int32
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
coeff := coeffs[ci+i]
if coeff != 0 {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 4
case xi >= maxX:
xi = 4 * maxX
default:
xi = 0
}
rgba[0] += int32(coeff) * int32(row[xi+0])
rgba[1] += int32(coeff) * int32(row[xi+1])
rgba[2] += int32(coeff) * int32(row[xi+2])
rgba[3] += int32(coeff) * int32(row[xi+3])
sum += int32(coeff)
}
}
xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*4
out.Pix[xo+0] = clampUint8(rgba[0] / sum)
out.Pix[xo+1] = clampUint8(rgba[1] / sum)
out.Pix[xo+2] = clampUint8(rgba[2] / sum)
out.Pix[xo+3] = clampUint8(rgba[3] / sum)
}
}
}
func resizeRGBA64(in *image.RGBA64, out *image.NRGBA64, scale float64, coeffs []int32, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var rgba [4]int64
var sum int64
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
coeff := coeffs[ci+i]
if coeff != 0 {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 8
case xi >= maxX:
xi = 8 * maxX
default:
xi = 0
}
r := uint32(uint16(row[xi+0])<<8 | uint16(row[xi+1]))
g := uint32(uint16(row[xi+2])<<8 | uint16(row[xi+3]))
b := uint32(uint16(row[xi+4])<<8 | uint16(row[xi+5]))
a := uint32(uint16(row[xi+6])<<8 | uint16(row[xi+7]))
// reverse alpha-premultiplication.
if a != 0 {
r *= 0xffff
r /= a
g *= 0xffff
g /= a
b *= 0xffff
b /= a
}
rgba[0] += int64(coeff) * int64(r)
rgba[1] += int64(coeff) * int64(g)
rgba[2] += int64(coeff) * int64(b)
rgba[3] += int64(coeff) * int64(a)
sum += int64(coeff)
}
}
xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*8
value := clampUint16(rgba[0] / sum)
out.Pix[xo+0] = uint8(value >> 8)
out.Pix[xo+1] = uint8(value)
value = clampUint16(rgba[1] / sum)
out.Pix[xo+2] = uint8(value >> 8)
out.Pix[xo+3] = uint8(value)
value = clampUint16(rgba[2] / sum)
out.Pix[xo+4] = uint8(value >> 8)
out.Pix[xo+5] = uint8(value)
value = clampUint16(rgba[3] / sum)
out.Pix[xo+6] = uint8(value >> 8)
out.Pix[xo+7] = uint8(value)
}
}
}
func resizeNRGBA64(in *image.NRGBA64, out *image.NRGBA64, scale float64, coeffs []int32, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var rgba [4]int64
var sum int64
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
coeff := coeffs[ci+i]
if coeff != 0 {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 8
case xi >= maxX:
xi = 8 * maxX
default:
xi = 0
}
rgba[0] += int64(coeff) * int64(uint16(row[xi+0])<<8|uint16(row[xi+1]))
rgba[1] += int64(coeff) * int64(uint16(row[xi+2])<<8|uint16(row[xi+3]))
rgba[2] += int64(coeff) * int64(uint16(row[xi+4])<<8|uint16(row[xi+5]))
rgba[3] += int64(coeff) * int64(uint16(row[xi+6])<<8|uint16(row[xi+7]))
sum += int64(coeff)
}
}
xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*8
value := clampUint16(rgba[0] / sum)
out.Pix[xo+0] = uint8(value >> 8)
out.Pix[xo+1] = uint8(value)
value = clampUint16(rgba[1] / sum)
out.Pix[xo+2] = uint8(value >> 8)
out.Pix[xo+3] = uint8(value)
value = clampUint16(rgba[2] / sum)
out.Pix[xo+4] = uint8(value >> 8)
out.Pix[xo+5] = uint8(value)
value = clampUint16(rgba[3] / sum)
out.Pix[xo+6] = uint8(value >> 8)
out.Pix[xo+7] = uint8(value)
}
}
}
func resizeGray(in *image.Gray, out *image.Gray, scale float64, coeffs []int16, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[(x-newBounds.Min.X)*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var gray int32
var sum int32
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
coeff := coeffs[ci+i]
if coeff != 0 {
xi := start + i
switch {
case xi < 0:
xi = 0
case xi >= maxX:
xi = maxX
}
gray += int32(coeff) * int32(row[xi])
sum += int32(coeff)
}
}
offset := (y-newBounds.Min.Y)*out.Stride + (x - newBounds.Min.X)
out.Pix[offset] = clampUint8(gray / sum)
}
}
}
func resizeGray16(in *image.Gray16, out *image.Gray16, scale float64, coeffs []int32, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var gray int64
var sum int64
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
coeff := coeffs[ci+i]
if coeff != 0 {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 2
case xi >= maxX:
xi = 2 * maxX
default:
xi = 0
}
gray += int64(coeff) * int64(uint16(row[xi+0])<<8|uint16(row[xi+1]))
sum += int64(coeff)
}
}
offset := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*2
value := clampUint16(gray / sum)
out.Pix[offset+0] = uint8(value >> 8)
out.Pix[offset+1] = uint8(value)
}
}
}
func resizeYCbCr(in *ycc, out *ycc, scale float64, coeffs []int16, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var p [3]int32
var sum int32
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
coeff := coeffs[ci+i]
if coeff != 0 {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 3
case xi >= maxX:
xi = 3 * maxX
default:
xi = 0
}
p[0] += int32(coeff) * int32(row[xi+0])
p[1] += int32(coeff) * int32(row[xi+1])
p[2] += int32(coeff) * int32(row[xi+2])
sum += int32(coeff)
}
}
xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*3
out.Pix[xo+0] = clampUint8(p[0] / sum)
out.Pix[xo+1] = clampUint8(p[1] / sum)
out.Pix[xo+2] = clampUint8(p[2] / sum)
}
}
}
func nearestYCbCr(in *ycc, out *ycc, scale float64, coeffs []bool, offset []int, filterLength int) {
newBounds := out.Bounds()
maxX := in.Bounds().Dx() - 1
for x := newBounds.Min.X; x < newBounds.Max.X; x++ {
row := in.Pix[x*in.Stride:]
for y := newBounds.Min.Y; y < newBounds.Max.Y; y++ {
var p [3]float32
var sum float32
start := offset[y]
ci := y * filterLength
for i := 0; i < filterLength; i++ {
if coeffs[ci+i] {
xi := start + i
switch {
case uint(xi) < uint(maxX):
xi *= 3
case xi >= maxX:
xi = 3 * maxX
default:
xi = 0
}
p[0] += float32(row[xi+0])
p[1] += float32(row[xi+1])
p[2] += float32(row[xi+2])
sum++
}
}
xo := (y-newBounds.Min.Y)*out.Stride + (x-newBounds.Min.X)*3
out.Pix[xo+0] = floatToUint8(p[0] / sum)
out.Pix[xo+1] = floatToUint8(p[1] / sum)
out.Pix[xo+2] = floatToUint8(p[2] / sum)
}
}
} | converter.go | 0.672117 | 0.428592 | converter.go | starcoder |
package actrf
import (
"github.com/emer/etable/etensor"
"github.com/emer/etable/norm"
)
// RF is used for computing an activation-based receptive field.
// It simply computes the activation weighted average of other
// *source* patterns of activation -- i.e., sum(act * src) / sum(src)
// which then shows you the patterns of source activity for which
// a given unit was active.
// You must call Init to initialize everything, Reset to restart the accumulation of the data,
// and Avg to compute the resulting averages based an accumulated data.
// Avg does not erase the accumulated data so it can continue beyond that point.
type RF struct {
Name string `desc:"name of this RF -- used for management of multiple in RFs"`
RF etensor.Float32 `view:"no-inline" desc:"computed receptive field, as SumProd / SumSrc -- only after Avg has been called"`
NormRF etensor.Float32 `view:"no-inline" desc:"unit normalized version of RF per source (inner 2D dimensions) -- good for display"`
SumProd etensor.Float32 `view:"no-inline" desc:"sum of the products of act * src"`
SumSrc etensor.Float32 `view:"no-inline" desc:"sum of the sources (denomenator)"`
}
// Init initializes this RF based on name and shapes of given
// tensors representing the activations and source values.
func (af *RF) Init(name string, act, src etensor.Tensor) {
af.Name = name
aNy, aNx, _, _ := etensor.Prjn2DShape(act.ShapeObj(), false)
sNy, sNx, _, _ := etensor.Prjn2DShape(src.ShapeObj(), false)
oshp := []int{aNy, aNx, sNy, sNx}
snm := []string{"ActY", "ActX", "SrcY", "SrcX"}
af.RF.SetShape(oshp, nil, snm)
af.NormRF.SetShape(oshp, nil, snm)
af.SumProd.SetShape(oshp, nil, snm)
af.SumSrc.SetShape(oshp, nil, snm)
af.Reset()
}
// Reset reinitializes the Sum accumulators -- must have called Init first
func (af *RF) Reset() {
af.SumProd.SetZeros()
af.SumSrc.SetZeros()
}
// Add adds one sample based on activation and source tensor values.
// these must be of the same shape as used when Init was called.
// thr is a threshold value on sources below which values are not added (prevents
// numerical issues with very small numbers)
func (af *RF) Add(act, src etensor.Tensor, thr float32) {
aNy, aNx, _, _ := etensor.Prjn2DShape(act.ShapeObj(), false)
sNy, sNx, _, _ := etensor.Prjn2DShape(src.ShapeObj(), false)
for sy := 0; sy < sNy; sy++ {
for sx := 0; sx < sNx; sx++ {
tv := float32(etensor.Prjn2DVal(src, false, sy, sx))
if tv < thr {
continue
}
for ay := 0; ay < aNy; ay++ {
for ax := 0; ax < aNx; ax++ {
av := float32(etensor.Prjn2DVal(act, false, ay, ax))
oi := []int{ay, ax, sy, sx}
oo := af.SumProd.Offset(oi)
af.SumProd.Values[oo] += av * tv
af.SumSrc.Values[oo] += tv
}
}
}
}
}
// Avg computes RF as SumProd / SumSrc. Does not Reset sums.
func (af *RF) Avg() {
aNy := af.SumProd.Dim(0)
aNx := af.SumProd.Dim(1)
sNy := af.SumProd.Dim(2)
sNx := af.SumProd.Dim(3)
for ay := 0; ay < aNy; ay++ {
for ax := 0; ax < aNx; ax++ {
for sy := 0; sy < sNy; sy++ {
for sx := 0; sx < sNx; sx++ {
oi := []int{ay, ax, sy, sx}
oo := af.SumProd.Offset(oi)
src := af.SumSrc.Values[oo]
if src > 0 {
af.RF.Values[oo] = af.SumProd.Values[oo] / src
}
}
}
}
}
}
// Norm computes unit norm of RF values
func (af *RF) Norm() {
af.NormRF.CopyFrom(&af.RF)
norm.TensorUnit32(&af.NormRF, 2) // 2 = norm within outer 2 dims = norm each src within
} | actrf/actrf.go | 0.704364 | 0.54698 | actrf.go | starcoder |
package util
import (
"fmt"
"reflect"
structpb "github.com/golang/protobuf/ptypes/struct"
)
func StructProto(v map[string]interface{}) *structpb.Struct {
size := len(v)
if size == 0 {
return nil
}
fields := make(map[string]*structpb.Value, size)
for k, v := range v {
fields[k] = ValueProto(v)
}
return &structpb.Struct{
Fields: fields,
}
}
// ValueProto converts an interface{} to a ptypes.Value
func ValueProto(v interface{}) *structpb.Value {
switch v := v.(type) {
case nil:
return nil
case bool:
return &structpb.Value{
Kind: &structpb.Value_BoolValue{
BoolValue: v,
},
}
case int:
return &structpb.Value{
Kind: &structpb.Value_NumberValue{
NumberValue: float64(v),
},
}
case int8:
return &structpb.Value{
Kind: &structpb.Value_NumberValue{
NumberValue: float64(v),
},
}
case int32:
return &structpb.Value{
Kind: &structpb.Value_NumberValue{
NumberValue: float64(v),
},
}
case int64:
return &structpb.Value{
Kind: &structpb.Value_NumberValue{
NumberValue: float64(v),
},
}
case uint:
return &structpb.Value{
Kind: &structpb.Value_NumberValue{
NumberValue: float64(v),
},
}
case uint8:
return &structpb.Value{
Kind: &structpb.Value_NumberValue{
NumberValue: float64(v),
},
}
case uint32:
return &structpb.Value{
Kind: &structpb.Value_NumberValue{
NumberValue: float64(v),
},
}
case uint64:
return &structpb.Value{
Kind: &structpb.Value_NumberValue{
NumberValue: float64(v),
},
}
case float32:
return &structpb.Value{
Kind: &structpb.Value_NumberValue{
NumberValue: float64(v),
},
}
case float64:
return &structpb.Value{
Kind: &structpb.Value_NumberValue{
NumberValue: v,
},
}
case string:
return &structpb.Value{
Kind: &structpb.Value_StringValue{
StringValue: v,
},
}
case error:
return &structpb.Value{
Kind: &structpb.Value_StringValue{
StringValue: v.Error(),
},
}
default:
// Fallback to reflection for other types
return valueProto(reflect.ValueOf(v))
}
}
func valueProto(v reflect.Value) *structpb.Value {
switch v.Kind() {
case reflect.Bool:
return &structpb.Value{
Kind: &structpb.Value_BoolValue{
BoolValue: v.Bool(),
},
}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return &structpb.Value{
Kind: &structpb.Value_NumberValue{
NumberValue: float64(v.Int()),
},
}
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return &structpb.Value{
Kind: &structpb.Value_NumberValue{
NumberValue: float64(v.Uint()),
},
}
case reflect.Float32, reflect.Float64:
return &structpb.Value{
Kind: &structpb.Value_NumberValue{
NumberValue: v.Float(),
},
}
case reflect.Ptr:
if v.IsNil() {
return nil
}
return valueProto(reflect.Indirect(v))
case reflect.Array, reflect.Slice:
size := v.Len()
if size == 0 {
return nil
}
values := make([]*structpb.Value, size)
for i := 0; i < size; i++ {
values[i] = valueProto(v.Index(i))
}
return &structpb.Value{
Kind: &structpb.Value_ListValue{
ListValue: &structpb.ListValue{
Values: values,
},
},
}
case reflect.Struct:
t := v.Type()
size := v.NumField()
if size == 0 {
return nil
}
fields := make(map[string]*structpb.Value, size)
for i := 0; i < size; i++ {
name := t.Field(i).Name
// Better way?
if len(name) > 0 && 'A' <= name[0] && name[0] <= 'Z' {
fields[name] = valueProto(v.Field(i))
}
}
if len(fields) == 0 {
return nil
}
return &structpb.Value{
Kind: &structpb.Value_StructValue{
StructValue: &structpb.Struct{
Fields: fields,
},
},
}
case reflect.Map:
keys := v.MapKeys()
if len(keys) == 0 {
return nil
}
fields := make(map[string]*structpb.Value, len(keys))
for _, k := range keys {
if k.Kind() == reflect.String {
fields[k.String()] = valueProto(v.MapIndex(k))
}
}
if len(fields) == 0 {
return nil
}
return &structpb.Value{
Kind: &structpb.Value_StructValue{
StructValue: &structpb.Struct{
Fields: fields,
},
},
}
default:
// Last resort
return &structpb.Value{
Kind: &structpb.Value_StringValue{
StringValue: fmt.Sprint(v),
},
}
}
}
// DecodeToMap converts a pb.Struct to a map from strings to Go types.
// DecodeToMap panics if s is invalid.
func StructToMap(s *structpb.Struct) map[string]interface{} {
if s == nil {
return nil
}
m := map[string]interface{}{}
for k, v := range s.Fields {
m[k] = decodeValue(v)
}
return m
}
func StructToMapWithPrefix(s *structpb.Struct) map[string]interface{} {
if s == nil {
return nil
}
m := map[string]interface{}{}
for k, v := range s.Fields {
m[fmt.Sprintf("%s", k)] = decodeValue(v)
}
return m
}
func decodeValue(v *structpb.Value) interface{} {
switch k := v.Kind.(type) {
case *structpb.Value_NullValue:
return nil
case *structpb.Value_NumberValue:
return k.NumberValue
case *structpb.Value_StringValue:
return k.StringValue
case *structpb.Value_BoolValue:
return k.BoolValue
case *structpb.Value_StructValue:
return StructToMap(k.StructValue)
case *structpb.Value_ListValue:
s := make([]interface{}, len(k.ListValue.Values))
for i, e := range k.ListValue.Values {
s[i] = decodeValue(e)
}
return s
default:
return nil
}
} | core/util/proto.go | 0.647575 | 0.436682 | proto.go | starcoder |
package main
import (
"math"
"math/rand"
"time"
)
type Boid struct {
position Vector2D
velocity Vector2D
id int
}
func createBoid(bid int) {
b := Boid{
position: Vector2D{x: rand.Float64() * screenWidth, y: rand.Float64() * screenHeight},
velocity: Vector2D{x: (rand.Float64() * 2) - 1.0, y: (rand.Float64() * 2) - 1.0},
id: bid,
}
boids[bid] = &b
b.updatePositionInMap()
go b.start()
}
func (b *Boid) start() {
for {
b.moveOne()
time.Sleep(5 * time.Millisecond)
}
}
func (b *Boid) moveOne() {
acceleration := b.calcAcceleration()
rwLock.Lock()
b.velocity = b.velocity.Add(acceleration).limit(-1, 1)
boidMap[int(b.position.x)][int(b.position.y)] = -1
b.position = b.position.Add(b.velocity)
b.updatePositionInMap()
next := b.position.Add(b.velocity)
if next.x > screenWidth || next.x < 0 {
b.velocity = Vector2D{x: -b.velocity.x, y: b.velocity.y}
}
if next.y > screenHeight || next.y < 0 {
b.velocity = Vector2D{x: b.velocity.x, y: -b.velocity.y}
}
rwLock.Unlock()
}
func (b *Boid) updatePositionInMap() {
boidMap[int(b.position.x)][int(b.position.y)] = b.id
}
func (b *Boid) calcAcceleration() Vector2D {
upper, lower := b.position.AddV(viewRadius), b.position.AddV(-viewRadius)
avgPosition := Vector2D{0, 0}
avgVelocity := Vector2D{0, 0}
separation := Vector2D{0, 0}
count := 0.0
rwLock.RLock()
for i := math.Max(lower.x, 0); i <= math.Min(upper.x, screenWidth); i++ {
for j := math.Max(lower.y, 0); j <= math.Min(upper.y, screenHeight); j++ {
if otherBoidId := boidMap[int(i)][int(j)]; otherBoidId != -1 && otherBoidId != b.id {
if dist := boids[otherBoidId].position.Distance(b.position); dist < viewRadius {
count++
avgVelocity = avgVelocity.Add(boids[otherBoidId].velocity)
avgPosition = avgPosition.Add(boids[otherBoidId].position)
separation = separation.Add(b.position.Subtract(boids[otherBoidId].position).DivisionV(dist))
}
}
}
}
rwLock.RUnlock()
accel := Vector2D{b.borderBounce(b.position.x, screenWidth), b.borderBounce(b.position.y, screenHeight)}
if count > 0 {
avgPosition = avgPosition.DivisionV(count)
avgVelocity = avgVelocity.DivisionV(count)
accelAlignment := avgVelocity.Subtract(b.velocity).MultiplyV(adjRate)
accelCohesion := avgPosition.Subtract(b.position).MultiplyV(adjRate)
accelSeparation := separation.MultiplyV(adjRate)
accel = accel.Add(accelAlignment).Add(accelCohesion).Add(accelSeparation)
}
return accel
}
func (b *Boid) borderBounce(pos, maxBorderPos float64) float64 {
if pos < viewRadius {
return 1 / pos
} else if pos > maxBorderPos-viewRadius {
return 1 / (pos - maxBorderPos)
}
return 0
} | boids/boid.go | 0.636579 | 0.520374 | boid.go | starcoder |
package solve_equation
import "fmt"
/*
* @lc app=leetcode id=640 lang=golang
*
* [640] Solve the Equation
*
* https://leetcode.com/problems/solve-the-equation/description/
*
* algorithms
* Medium (40.95%)
* Total Accepted: 20.6K
* Total Submissions: 50.2K
* Testcase Example: '"x+5-3+x=6+x-2"'
*
*
* Solve a given equation and return the value of x in the form of string
* "x=#value". The equation contains only '+', '-' operation, the variable x
* and its coefficient.
*
*
*
* If there is no solution for the equation, return "No solution".
*
*
* If there are infinite solutions for the equation, return "Infinite
* solutions".
*
*
* If there is exactly one solution for the equation, we ensure that the value
* of x is an integer.
*
*
* Example 1:
*
* Input: "x+5-3+x=6+x-2"
* Output: "x=2"
*
*
*
* Example 2:
*
* Input: "x=x"
* Output: "Infinite solutions"
*
*
*
* Example 3:
*
* Input: "2x=x"
* Output: "x=0"
*
*
*
* Example 4:
*
* Input: "2x+3x-6x=x+2"
* Output: "x=-1"
*
*
*
* Example 5:
*
* Input: "x=x+2"
* Output: "No solution"
*
*
*/
func solveEquation(equation string) string {
var number, leftNumber, leftCoefficient, rightNumber, rightCoefficient int
positive, left := 1, true
for pos, c := range []byte(equation) {
switch c {
case '=':
leftNumber += positive * number
left, number, positive = false, 0, 1
case '+':
if left {
leftNumber += positive * number
} else {
rightNumber += positive * number
}
positive, number = 1, 0
case '-':
if left {
leftNumber += positive * number
} else {
rightNumber += positive * number
}
positive, number = -1, 0
case 'x':
if number == 0 {
if pos >= 1 && equation[pos-1] == '0' {
number = 0
} else {
number = 1
}
}
if left {
leftCoefficient += positive * number
} else {
rightCoefficient += positive * number
}
number = 0
default:
number = number*10 + int(c-'0')
}
}
rightNumber += positive * number
coefficient := leftCoefficient - rightCoefficient
number = rightNumber - leftNumber
if coefficient == 0 && number == 0 {
return "Infinite solutions"
} else if coefficient == 0 && number != 0 {
return "No solution"
} else {
return fmt.Sprintf("x=%d", number/coefficient)
}
} | 640-solve-the-equation/640.solve-the-equation.go | 0.882933 | 0.402187 | 640.solve-the-equation.go | starcoder |
package distuv
import (
"math"
"golang.org/x/exp/rand"
)
// Triangle represents a triangle distribution (https://en.wikipedia.org/wiki/Triangular_distribution).
type Triangle struct {
a, b, c float64
Src *rand.Rand
}
// NewTriangle constructs a new triangle distribution with lower limit a, upper limit b, and mode c.
// Constraints are a < b and a ≤ c ≤ b.
// This distribution is uncommon in nature, but may be useful for simulation.
func NewTriangle(a, b, c float64) Triangle {
checkTriangleParameters(a, b, c)
return Triangle{a, b, c, nil}
}
func checkTriangleParameters(a, b, c float64) {
if a >= b {
panic("triangle: constraint of a < b violated")
}
if a > c {
panic("triangle: constraint of a <= c violated")
}
if c > b {
panic("triangle: constraint of c <= b violated")
}
}
// CDF computes the value of the cumulative density function at x.
func (t Triangle) CDF(x float64) float64 {
switch {
case x <= t.a:
return 0
case x <= t.c:
d := x - t.a
return (d * d) / ((t.b - t.a) * (t.c - t.a))
case x < t.b:
d := t.b - x
return 1 - (d*d)/((t.b-t.a)*(t.b-t.c))
default:
return 1
}
}
// Entropy returns the entropy of the distribution.
func (t Triangle) Entropy() float64 {
return 0.5 + math.Log(t.b-t.a) - math.Ln2
}
// ExKurtosis returns the excess kurtosis of the distribution.
func (Triangle) ExKurtosis() float64 {
return -3.0 / 5.0
}
// Fit is not appropriate for Triangle, because the distribution is generally used when there is little data.
// LogProb computes the natural logarithm of the value of the probability density function at x.
func (t Triangle) LogProb(x float64) float64 {
return math.Log(t.Prob(x))
}
// Mean returns the mean of the probability distribution.
func (t Triangle) Mean() float64 {
return (t.a + t.b + t.c) / 3
}
// Median returns the median of the probability distribution.
func (t Triangle) Median() float64 {
if t.c >= (t.a+t.b)/2 {
return t.a + math.Sqrt((t.b-t.a)*(t.c-t.a)/2)
}
return t.b - math.Sqrt((t.b-t.a)*(t.b-t.c)/2)
}
// Mode returns the mode of the probability distribution.
func (t Triangle) Mode() float64 {
return t.c
}
// NumParameters returns the number of parameters in the distribution.
func (Triangle) NumParameters() int {
return 3
}
// Prob computes the value of the probability density function at x.
func (t Triangle) Prob(x float64) float64 {
switch {
case x < t.a:
return 0
case x < t.c:
return 2 * (x - t.a) / ((t.b - t.a) * (t.c - t.a))
case x == t.c:
return 2 / (t.b - t.a)
case x <= t.b:
return 2 * (t.b - x) / ((t.b - t.a) * (t.b - t.c))
default:
return 0
}
}
// Quantile returns the inverse of the cumulative probability distribution.
func (t Triangle) Quantile(p float64) float64 {
if p < 0 || p > 1 {
panic(badPercentile)
}
f := (t.c - t.a) / (t.b - t.a)
if p < f {
return t.a + math.Sqrt(p*(t.b-t.a)*(t.c-t.a))
}
return t.b - math.Sqrt((1-p)*(t.b-t.a)*(t.b-t.c))
}
// Rand returns a random sample drawn from the distribution.
func (t Triangle) Rand() float64 {
var rnd float64
if t.Src == nil {
rnd = rand.Float64()
} else {
rnd = t.Src.Float64()
}
return t.Quantile(rnd)
}
// Skewness returns the skewness of the distribution.
func (t Triangle) Skewness() float64 {
n := math.Sqrt2 * (t.a + t.b - 2*t.c) * (2*t.a - t.b - t.c) * (t.a - 2*t.b + t.c)
d := 5 * math.Pow(t.a*t.a+t.b*t.b+t.c*t.c-t.a*t.b-t.a*t.c-t.b*t.c, 3.0/2.0)
return n / d
}
// StdDev returns the standard deviation of the probability distribution.
func (t Triangle) StdDev() float64 {
return math.Sqrt(t.Variance())
}
// Survival returns the survival function (complementary CDF) at x.
func (t Triangle) Survival(x float64) float64 {
return 1 - t.CDF(x)
}
// MarshalParameters implements the ParameterMarshaler interface
func (t Triangle) MarshalParameters(p []Parameter) {
if len(p) != t.NumParameters() {
panic("triangle: improper parameter length")
}
p[0].Name = "A"
p[0].Value = t.a
p[1].Name = "B"
p[1].Value = t.b
p[2].Name = "C"
p[2].Value = t.c
}
// UnmarshalParameters implements the ParameterMarshaler interface
func (t *Triangle) UnmarshalParameters(p []Parameter) {
if len(p) != t.NumParameters() {
panic("triangle: incorrect number of parameters to set")
}
if p[0].Name != "A" {
panic("triangle: " + panicNameMismatch)
}
if p[1].Name != "B" {
panic("triangle: " + panicNameMismatch)
}
if p[2].Name != "C" {
panic("triangle: " + panicNameMismatch)
}
checkTriangleParameters(p[0].Value, p[1].Value, p[2].Value)
t.a = p[0].Value
t.b = p[1].Value
t.c = p[2].Value
}
// Variance returns the variance of the probability distribution.
func (t Triangle) Variance() float64 {
return (t.a*t.a + t.b*t.b + t.c*t.c - t.a*t.b - t.a*t.c - t.b*t.c) / 18
} | vendor/gonum.org/v1/gonum/stat/distuv/triangle.go | 0.890948 | 0.690872 | triangle.go | starcoder |
package giu
import (
"image"
"image/color"
"github.com/AllenDang/giu/imgui"
)
type Canvas struct {
drawlist imgui.DrawList
}
func GetCanvas() *Canvas {
return &Canvas{
drawlist: imgui.GetWindowDrawList(),
}
}
func (c *Canvas) AddLine(p1, p2 image.Point, color color.RGBA, thickness float32) {
c.drawlist.AddLine(ToVec2(p1), ToVec2(p2), ToVec4Color(color), thickness)
}
type DrawFlags int
const (
DrawFlags_None DrawFlags = 0
DrawFlags_Closed DrawFlags = 1 << 0 // PathStroke(), AddPolyline(): specify that shape should be closed (portant: this is always == 1 for legacy reason)
DrawFlags_RoundCornersTopLeft DrawFlags = 1 << 4 // AddRect(), AddRectFilled(), PathRect(): enable rounding top-left corner only (when rounding > 0.0f, we default to all corners). Was 0x01.
DrawFlags_RoundCornersTopRight DrawFlags = 1 << 5 // AddRect(), AddRectFilled(), PathRect(): enable rounding top-right corner only (when rounding > 0.0f, we default to all corners). Was 0x02.
DrawFlags_RoundCornersBottomLeft DrawFlags = 1 << 6 // AddRect(), AddRectFilled(), PathRect(): enable rounding bottom-left corner only (when rounding > 0.0f, we default to all corners). Was 0x04.
DrawFlags_RoundCornersBottomRight DrawFlags = 1 << 7 // AddRect(), AddRectFilled(), PathRect(): enable rounding bottom-right corner only (when rounding > 0.0f, we default to all corners). Wax 0x08.
DrawFlags_RoundCornersNone DrawFlags = 1 << 8 // AddRect(), AddRectFilled(), PathRect(): disable rounding on all corners (when rounding > 0.0f). This is NOT zero, NOT an implicit flag!
DrawFlags_RoundCornersTop DrawFlags = DrawFlags_RoundCornersTopLeft | DrawFlags_RoundCornersTopRight
DrawFlags_RoundCornersBottom DrawFlags = DrawFlags_RoundCornersBottomLeft | DrawFlags_RoundCornersBottomRight
DrawFlags_RoundCornersLeft DrawFlags = DrawFlags_RoundCornersBottomLeft | DrawFlags_RoundCornersTopLeft
DrawFlags_RoundCornersRight DrawFlags = DrawFlags_RoundCornersBottomRight | DrawFlags_RoundCornersTopRight
DrawFlags_RoundCornersAll DrawFlags = DrawFlags_RoundCornersTopLeft | DrawFlags_RoundCornersTopRight | DrawFlags_RoundCornersBottomLeft | DrawFlags_RoundCornersBottomRight
DrawFlags_RoundCornersDefault_ DrawFlags = DrawFlags_RoundCornersAll // Default to ALL corners if none of the _RoundCornersXX flags are specified.
DrawFlags_RoundCornersMask_ DrawFlags = DrawFlags_RoundCornersAll | DrawFlags_RoundCornersNone
)
func (c *Canvas) AddRect(pMin, pMax image.Point, color color.RGBA, rounding float32, rounding_corners DrawFlags, thickness float32) {
c.drawlist.AddRect(ToVec2(pMin), ToVec2(pMax), ToVec4Color(color), rounding, int(rounding_corners), thickness)
}
func (c *Canvas) AddRectFilled(pMin, pMax image.Point, color color.RGBA, rounding float32, rounding_corners DrawFlags) {
c.drawlist.AddRectFilled(ToVec2(pMin), ToVec2(pMax), ToVec4Color(color), rounding, int(rounding_corners))
}
func (c *Canvas) AddText(pos image.Point, color color.RGBA, text string) {
c.drawlist.AddText(ToVec2(pos), ToVec4Color(color), text)
}
func (c *Canvas) AddBezierCubic(pos0, cp0, cp1, pos1 image.Point, color color.RGBA, thickness float32, num_segments int) {
c.drawlist.AddBezierCubic(ToVec2(pos0), ToVec2(cp0), ToVec2(cp1), ToVec2(pos1), ToVec4Color(color), thickness, num_segments)
}
func (c *Canvas) AddTriangle(p1, p2, p3 image.Point, color color.RGBA, thickness float32) {
c.drawlist.AddTriangle(ToVec2(p1), ToVec2(p2), ToVec2(p3), ToVec4Color(color), thickness)
}
func (c *Canvas) AddTriangleFilled(p1, p2, p3 image.Point, color color.RGBA) {
c.drawlist.AddTriangleFilled(ToVec2(p1), ToVec2(p2), ToVec2(p3), ToVec4Color(color))
}
func (c *Canvas) AddCircle(center image.Point, radius float32, color color.RGBA, thickness float32) {
c.drawlist.AddCircle(ToVec2(center), radius, ToVec4Color(color), thickness)
}
func (c *Canvas) AddCircleFilled(center image.Point, radius float32, color color.RGBA) {
c.drawlist.AddCircleFilled(ToVec2(center), radius, ToVec4Color(color))
}
func (c *Canvas) AddQuad(p1, p2, p3, p4 image.Point, color color.RGBA, thickness float32) {
c.drawlist.AddQuad(ToVec2(p1), ToVec2(p2), ToVec2(p3), ToVec2(p4), ToVec4Color(color), thickness)
}
func (c *Canvas) AddQuadFilled(p1, p2, p3, p4 image.Point, color color.RGBA) {
c.drawlist.AddQuadFilled(ToVec2(p1), ToVec2(p2), ToVec2(p3), ToVec2(p4), ToVec4Color(color))
}
// Stateful path API, add points then finish with PathFillConvex() or PathStroke()
func (c *Canvas) PathClear() {
c.drawlist.PathClear()
}
func (c *Canvas) PathLineTo(pos image.Point) {
c.drawlist.PathLineTo(ToVec2(pos))
}
func (c *Canvas) PathLineToMergeDuplicate(pos image.Point) {
c.drawlist.PathLineToMergeDuplicate(ToVec2(pos))
}
func (c *Canvas) PathFillConvex(color color.RGBA) {
c.drawlist.PathFillConvex(ToVec4Color(color))
}
func (c *Canvas) PathStroke(color color.RGBA, closed bool, thickness float32) {
c.drawlist.PathStroke(ToVec4Color(color), closed, thickness)
}
func (c *Canvas) PathArcTo(center image.Point, radius, a_min, a_max float32, num_segments int) {
c.drawlist.PathArcTo(ToVec2(center), radius, a_min, a_max, num_segments)
}
func (c *Canvas) PathArcToFast(center image.Point, radius float32, a_min_of_12, a_max_of_12 int) {
c.drawlist.PathArcToFast(ToVec2(center), radius, a_min_of_12, a_max_of_12)
}
func (c *Canvas) PathBezierCubicCurveTo(p1, p2, p3 image.Point, num_segments int) {
c.drawlist.PathBezierCubicCurveTo(ToVec2(p1), ToVec2(p2), ToVec2(p3), num_segments)
}
func (c *Canvas) AddImage(texture *Texture, pMin, pMax image.Point) {
c.drawlist.AddImage(texture.id, ToVec2(pMin), ToVec2(pMax))
}
func (c *Canvas) AddImageV(texture *Texture, pMin, pMax image.Point, uvMin, uvMax image.Point, color color.RGBA) {
c.drawlist.AddImageV(texture.id, ToVec2(pMin), ToVec2(pMax), ToVec2(uvMin), ToVec2(uvMax), ToVec4Color(color))
} | Canvas.go | 0.75101 | 0.57821 | Canvas.go | starcoder |
package sort
import (
"fmt"
"math"
)
type IndexMaxHeap struct {
/**real data**/
datas []int
/**
binary heap of index for data
**/
indexs []int
revs []int
}
func NewIndexMaxHeap(cap int) *IndexMaxHeap {
heap := &IndexMaxHeap{make([]int, 0, cap+1), make([]int, 0, cap+1),
make([]int, 0, cap+1)}
heap.datas = append(heap.datas, math.MinInt32)
heap.indexs = append(heap.indexs, math.MinInt32)
heap.revs = append(heap.revs, math.MinInt32)
return heap
}
func (heap *IndexMaxHeap) Size() int {
return len(heap.indexs) - 1
}
func (heap *IndexMaxHeap) Insert(v int) {
heap.datas = append(heap.datas, v)
heap.indexs = append(heap.indexs, len(heap.datas)-1)
heap.revs = append(heap.revs, len(heap.indexs)-1)
heap.shiftUp(heap.Size())
}
func (heap *IndexMaxHeap) Extract() int {
v := heap.datas[heap.indexs[1]]
heap.indexs[1], heap.indexs[heap.Size()] =
heap.indexs[heap.Size()], heap.indexs[1]
heap.revs[heap.indexs[1]] = 1
heap.revs[heap.indexs[heap.Size()]] = 0
heap.indexs = heap.indexs[:heap.Size()]
heap.shiftDown(1)
return v
}
func (heap *IndexMaxHeap) shiftUp(k int) {
for k > 1 && heap.datas[heap.indexs[k/2]] < heap.datas[heap.indexs[k]] {
heap.indexs[k], heap.indexs[k/2] = heap.indexs[k/2], heap.indexs[k]
heap.revs[heap.indexs[k]] = k
heap.revs[heap.indexs[k/2]] = k / 2
k /= 2
}
}
func (heap *IndexMaxHeap) shiftDown(k int) {
for 2*k < len(heap.indexs) {
j := 2 * k
if j+1 < len(heap.indexs) &&
heap.datas[heap.indexs[j]] < heap.datas[heap.indexs[j+1]] {
j++
}
if heap.datas[heap.indexs[j]] <= heap.datas[heap.indexs[k]] {
break
}
heap.indexs[k], heap.indexs[j] = heap.indexs[j], heap.indexs[k]
heap.revs[heap.indexs[k]] = k
heap.revs[heap.indexs[j]] = j
k = j
}
}
func (heap *IndexMaxHeap) Empty() bool {
return len(heap.indexs) == 1
}
func (heap *IndexMaxHeap) Contain(i int) bool {
i++
if i >= len(heap.datas) {
return false
}
return heap.revs[i] != 0
}
func (heap *IndexMaxHeap) Modify(i int, v int) {
i++
heap.datas[i] = v
// j := heap.revs[i]
heap.shiftUp(heap.revs[i])
heap.shiftDown(heap.revs[i])
}
func (heap *IndexMaxHeap) TestPrintln() {
fmt.Printf("datas:%v\n", heap.datas)
fmt.Printf("indexs:%v\n", heap.indexs)
fmt.Printf("revs:%v\n", heap.revs)
} | indexMaxHeap.go | 0.605682 | 0.449332 | indexMaxHeap.go | starcoder |
package merkleblock
import (
"github.com/gcash/bchd/blockchain"
"github.com/gcash/bchd/chaincfg/chainhash"
"github.com/gcash/bchd/wire"
"github.com/gcash/bchutil"
"github.com/gcash/bchutil/bloom"
)
// MerkleBlock is used to house intermediate information needed to generate a
// wire.MsgMerkleBlock
type MerkleBlock struct {
numTx uint32
allHashes []*chainhash.Hash
finalHashes []*chainhash.Hash
matchedBits []byte
bits []byte
}
// calcTreeWidth calculates and returns the the number of nodes (width) or a
// merkle tree at the given depth-first height.
func (m *MerkleBlock) calcTreeWidth(height uint32) uint32 {
return (m.numTx + (1 << height) - 1) >> height
}
// calcHash returns the hash for a sub-tree given a depth-first height and
// node position.
func (m *MerkleBlock) calcHash(height, pos uint32) *chainhash.Hash {
if height == 0 {
return m.allHashes[pos]
}
var right *chainhash.Hash
left := m.calcHash(height-1, pos*2)
if pos*2+1 < m.calcTreeWidth(height-1) {
right = m.calcHash(height-1, pos*2+1)
} else {
right = left
}
return blockchain.HashMerkleBranches(left, right)
}
// traverseAndBuild builds a partial merkle tree using a recursive depth-first
// approach. As it calculates the hashes, it also saves whether or not each
// node is a parent node and a list of final hashes to be included in the
// merkle block.
func (m *MerkleBlock) traverseAndBuild(height, pos uint32) {
// Determine whether this node is a parent of a matched node.
var isParent byte
for i := pos << height; i < (pos+1)<<height && i < m.numTx; i++ {
isParent |= m.matchedBits[i]
}
m.bits = append(m.bits, isParent)
// When the node is a leaf node or not a parent of a matched node,
// append the hash to the list that will be part of the final merkle
// block.
if height == 0 || isParent == 0x00 {
m.finalHashes = append(m.finalHashes, m.calcHash(height, pos))
return
}
// At this point, the node is an internal node and it is the parent of
// of an included leaf node.
// Descend into the left child and process its sub-tree.
m.traverseAndBuild(height-1, pos*2)
// Descend into the right child and process its sub-tree if
// there is one.
if pos*2+1 < m.calcTreeWidth(height-1) {
m.traverseAndBuild(height-1, pos*2+1)
}
}
// TxInSet checks if a given transaction is included in the given list of
// transactions
func TxInSet(tx *chainhash.Hash, set []*chainhash.Hash) bool {
for _, next := range set {
if *tx == *next {
return true
}
}
return false
}
// NewMerkleBlockWithFilter returns a new *wire.MsgMerkleBlock and an array of the matched
// transaction index numbers based on the passed block and bloom filter.
func NewMerkleBlockWithFilter(block *bchutil.Block, filter *bloom.Filter) (*wire.MsgMerkleBlock, []uint32) {
numTx := uint32(len(block.Transactions()))
mBlock := MerkleBlock{
numTx: numTx,
allHashes: make([]*chainhash.Hash, 0, numTx),
matchedBits: make([]byte, 0, numTx),
}
matchedMap := bloom.GetMatchedIndices(block, filter)
var matchedIndices []uint32
for txIndex, tx := range block.Transactions() {
if matchedMap[txIndex] {
mBlock.matchedBits = append(mBlock.matchedBits, 0x01)
matchedIndices = append(matchedIndices, uint32(txIndex))
} else {
mBlock.matchedBits = append(mBlock.matchedBits, 0x00)
}
mBlock.allHashes = append(mBlock.allHashes, tx.Hash())
}
return mBlock.calcBlock(block), matchedIndices
}
// NewMerkleBlockWithTxnSet returns a new *wire.MsgMerkleBlock containing a
// partial merkle tree built using the list of transactions provided
func NewMerkleBlockWithTxnSet(block *bchutil.Block, txnSet []*chainhash.Hash) (*wire.MsgMerkleBlock, []uint32) {
numTx := uint32(len(block.Transactions()))
mBlock := MerkleBlock{
numTx: numTx,
allHashes: make([]*chainhash.Hash, 0, numTx),
matchedBits: make([]byte, 0, numTx),
}
// add all block transactions to merkle block and set bits for matching
// transactions
var matchedIndices []uint32
for txIndex, tx := range block.Transactions() {
if TxInSet(tx.Hash(), txnSet) {
mBlock.matchedBits = append(mBlock.matchedBits, 0x01)
matchedIndices = append(matchedIndices, uint32(txIndex))
} else {
mBlock.matchedBits = append(mBlock.matchedBits, 0x00)
}
mBlock.allHashes = append(mBlock.allHashes, tx.Hash())
}
return mBlock.calcBlock(block), matchedIndices
}
// calcBlock calculates the merkleBlock when created from either a TxnSet or
// by a bloom.Filter
func (m *MerkleBlock) calcBlock(block *bchutil.Block) *wire.MsgMerkleBlock {
// Calculate the number of merkle branches (height) in the tree.
height := uint32(0)
for m.calcTreeWidth(height) > 1 {
height++
}
// Build the depth-first partial merkle tree.
m.traverseAndBuild(height, 0)
// Create and return the merkle block.
msgMerkleBlock := wire.MsgMerkleBlock{
Header: block.MsgBlock().Header,
Transactions: m.numTx,
Hashes: make([]*chainhash.Hash, 0, len(m.finalHashes)),
Flags: make([]byte, (len(m.bits)+7)/8),
}
for _, hash := range m.finalHashes {
msgMerkleBlock.AddTxHash(hash)
}
for i := uint32(0); i < uint32(len(m.bits)); i++ {
msgMerkleBlock.Flags[i/8] |= m.bits[i] << (i % 8)
}
return &msgMerkleBlock
} | merkleblock/encode.go | 0.751739 | 0.406332 | encode.go | starcoder |
package square
// A payment represents a paid transaction between a Square merchant and a customer. Payment details are usually available from Connect API endpoints within a few minutes after the transaction completes. Each Payment object includes several fields that end in `_money`. These fields describe the various amounts of money that contribute to the payment total: <ul> <li> Monetary values are <b>positive</b> if they represent an <em>increase</em> in the amount of money the merchant receives (e.g., <code>tax_money</code>, <code>tip_money</code>). </li> <li> Monetary values are <b>negative</b> if they represent an <em>decrease</em> in the amount of money the merchant receives (e.g., <code>discount_money</code>, <code>refunded_money</code>). </li> </ul>
type V1Payment struct {
// The payment's unique identifier.
Id string `json:"id,omitempty"`
// The unique identifier of the merchant that took the payment.
MerchantId string `json:"merchant_id,omitempty"`
// The time when the payment was created, in ISO 8601 format. Reflects the time of the first payment if the object represents an incomplete partial payment, and the time of the last or complete payment otherwise.
CreatedAt string `json:"created_at,omitempty"`
// The unique identifier of the Square account that took the payment.
CreatorId string `json:"creator_id,omitempty"`
Device *Device `json:"device,omitempty"`
// The URL of the payment's detail page in the merchant dashboard. The merchant must be signed in to the merchant dashboard to view this page.
PaymentUrl string `json:"payment_url,omitempty"`
// The URL of the receipt for the payment. Note that for split tender payments, this URL corresponds to the receipt for the first tender listed in the payment's tender field. Each Tender object has its own receipt_url field you can use to get the other receipts associated with a split tender payment.
ReceiptUrl string `json:"receipt_url,omitempty"`
InclusiveTaxMoney *V1Money `json:"inclusive_tax_money,omitempty"`
AdditiveTaxMoney *V1Money `json:"additive_tax_money,omitempty"`
TaxMoney *V1Money `json:"tax_money,omitempty"`
TipMoney *V1Money `json:"tip_money,omitempty"`
DiscountMoney *V1Money `json:"discount_money,omitempty"`
TotalCollectedMoney *V1Money `json:"total_collected_money,omitempty"`
ProcessingFeeMoney *V1Money `json:"processing_fee_money,omitempty"`
NetTotalMoney *V1Money `json:"net_total_money,omitempty"`
RefundedMoney *V1Money `json:"refunded_money,omitempty"`
SwedishRoundingMoney *V1Money `json:"swedish_rounding_money,omitempty"`
GrossSalesMoney *V1Money `json:"gross_sales_money,omitempty"`
NetSalesMoney *V1Money `json:"net_sales_money,omitempty"`
// All of the inclusive taxes associated with the payment.
InclusiveTax []V1PaymentTax `json:"inclusive_tax,omitempty"`
// All of the additive taxes associated with the payment.
AdditiveTax []V1PaymentTax `json:"additive_tax,omitempty"`
// All of the tenders associated with the payment.
Tender []V1Tender `json:"tender,omitempty"`
// All of the refunds applied to the payment. Note that the value of all refunds on a payment can exceed the value of all tenders if a merchant chooses to refund money to a tender after previously accepting returned goods as part of an exchange.
Refunds []V1Refund `json:"refunds,omitempty"`
// The items purchased in the payment.
Itemizations []V1PaymentItemization `json:"itemizations,omitempty"`
SurchargeMoney *V1Money `json:"surcharge_money,omitempty"`
// A list of all surcharges associated with the payment.
Surcharges []V1PaymentSurcharge `json:"surcharges,omitempty"`
// Indicates whether or not the payment is only partially paid for. If true, this payment will have the tenders collected so far, but the itemizations will be empty until the payment is completed.
IsPartial bool `json:"is_partial,omitempty"`
} | square/model_v1_payment.go | 0.824179 | 0.671969 | model_v1_payment.go | starcoder |
package gofinancial
import (
"time"
"github.com/shopspring/decimal"
"github.com/razorpay/go-financial/enums/paymentperiod"
"github.com/razorpay/go-financial/enums/interesttype"
"github.com/razorpay/go-financial/enums/frequency"
)
// Config is used to store details used in generation of amortization table.
type Config struct {
StartDate time.Time // Starting day of the amortization schedule(inclusive)
EndDate time.Time // Ending day of the amortization schedule(inclusive)
Frequency frequency.Type // Frequency enum with DAILY, WEEKLY, MONTHLY or ANNUALLY
AmountBorrowed decimal.Decimal // Amount Borrowed
InterestType interesttype.Type // InterestType enum with FLAT or REDUCING value.
Interest decimal.Decimal // Interest in basis points
PaymentPeriod paymentperiod.Type // Payment period enum to know whether payment made at the BEGINNING or ENDING of a period
EnableRounding bool // If enabled, the final values in amortization schedule are rounded
RoundingPlaces int32 // If specified, the final values in amortization schedule are rounded to these many places
RoundingErrorTolerance decimal.Decimal // Any difference in [payment-(principal+interest)] will be adjusted in interest component, upto the RoundingErrorTolerance value specified
periods int64 // derived
startDates []time.Time // derived
endDates []time.Time // derived
}
func (c *Config) setPeriodsAndDates() error {
sy, sm, sd := c.StartDate.Date()
startDate := time.Date(sy, sm, sd, 0, 0, 0, 0, c.StartDate.Location())
ey, em, ed := c.EndDate.Date()
endDate := time.Date(ey, em, ed, 0, 0, 0, 0, c.EndDate.Location())
period, err := GetPeriodDifference(startDate, endDate, c.Frequency)
if err != nil {
return err
}
c.periods = int64(period)
for i := 0; i < period; i++ {
date, err := getStartDate(startDate, c.Frequency, i)
if err != nil {
return err
}
if i == 0 {
c.startDates = append(c.startDates, c.StartDate)
} else {
c.startDates = append(c.startDates, date)
}
if endDate, err := getEndDates(date, c.Frequency); err != nil {
return err
} else {
c.endDates = append(c.endDates, endDate)
}
}
return nil
}
func GetPeriodDifference(from time.Time, to time.Time, freq frequency.Type) (int, error) {
var periods int
switch freq {
case frequency.DAILY:
periods = int(to.Sub(from).Hours()/24) + 1
case frequency.WEEKLY:
days := int(to.Sub(from).Hours()/24) + 1
if days%7 != 0 {
return -1, ErrUnevenEndDate
}
periods = days / 7
case frequency.MONTHLY:
months, err := getMonthsBetweenDates(from, to)
if err != nil {
return -1, err
}
periods = *months
case frequency.ANNUALLY:
years, err := getYearsBetweenDates(from, to)
if err != nil {
return -1, err
}
periods = *years
default:
return -1, ErrInvalidFrequency
}
return periods, nil
}
func getStartDate(date time.Time, freq frequency.Type, index int) (time.Time, error) {
var startDate time.Time
switch freq {
case frequency.DAILY:
startDate = date.AddDate(0, 0, index)
case frequency.WEEKLY:
startDate = date.AddDate(0, 0, 7*index)
case frequency.MONTHLY:
startDate = date.AddDate(0, index, 0)
case frequency.ANNUALLY:
startDate = date.AddDate(index, 0, 0)
default:
return time.Time{}, ErrInvalidFrequency
}
return startDate, nil
}
func getMonthsBetweenDates(start time.Time, end time.Time) (*int, error) {
count := 0
for start.Before(end) {
start = start.AddDate(0, 1, 0)
count++
}
finalDate := start.AddDate(0, 0, -1)
if !finalDate.Equal(end) {
return nil, ErrUnevenEndDate
}
return &count, nil
}
func getYearsBetweenDates(start time.Time, end time.Time) (*int, error) {
count := 0
for start.Before(end) {
start = start.AddDate(1, 0, 0)
count++
}
finalDate := start.AddDate(0, 0, -1)
if !finalDate.Equal(end) {
return nil, ErrUnevenEndDate
}
return &count, nil
}
func getEndDates(date time.Time, freq frequency.Type) (time.Time, error) {
var nextDate time.Time
switch freq {
case frequency.DAILY:
nextDate = time.Date(date.Year(), date.Month(), date.Day(), 23, 59, 59, 0, date.Location())
case frequency.WEEKLY:
date = date.AddDate(0, 0, 6)
nextDate = time.Date(date.Year(), date.Month(), date.Day(), 23, 59, 59, 0, date.Location())
case frequency.MONTHLY:
date = date.AddDate(0, 1, 0).AddDate(0, 0, -1)
nextDate = time.Date(date.Year(), date.Month(), date.Day(), 23, 59, 59, 0, date.Location())
case frequency.ANNUALLY:
date = date.AddDate(1, 0, 0).AddDate(0, 0, -1)
nextDate = time.Date(date.Year(), date.Month(), date.Day(), 23, 59, 59, 0, date.Location())
default:
return time.Time{}, ErrInvalidFrequency
}
return nextDate, nil
}
func (c *Config) getInterestRatePerPeriodInDecimal() decimal.Decimal {
hundred := decimal.NewFromInt(100)
freq := decimal.NewFromInt(int64(c.Frequency.Value()))
interestInPercent := c.Interest.Div(hundred)
InterestInDecimal := interestInPercent.Div(hundred)
InterestPerPeriod := InterestInDecimal.Div(freq)
return InterestPerPeriod
} | config.go | 0.68763 | 0.426322 | config.go | starcoder |
package main
import "fmt"
type maxHeap struct {
array []int
}
//insert adds an element to the heap
func (h *maxHeap) insert(key int) {
h.array = append(h.array, key)
h.maxHeapifyUp(len(h.array) - 1)
}
//extract returns the largest key, and removes it from the heap
func (h *maxHeap) extract() int {
extracted := h.array[0]
l := len(h.array) - 1
//when the array is empty
if len(h.array) == 0 {
fmt.Println("cannot extract because array length is 0")
return -1
}
//take out the last index and put it the root
h.array[0] = h.array[l]
h.array = h.array[:l]
h.maxHeapifyDown(0)
return extracted
}
//will heapify from bottom top
func (h *maxHeap) maxHeapifyUp(index int) {
for h.array[parent(index)] < h.array[index] {
h.swap(parent(index), index)
index = parent(index)
}
}
//will heapify top to bottom
func (h *maxHeap) maxHeapifyDown(index int) {
lastIndex := len(h.array) - 1
l, r := left(index), right(index)
childToCompare := 0
//loop while index has at least one child
for l <= lastIndex {
if l == lastIndex { //left child is the only child
childToCompare = l
} else if h.array[l] > h.array[r] { // left child is larger
childToCompare = l
} else { //right child is larger
childToCompare = r
}
}
if h.array[index] < h.array[childToCompare] {
h.swap(index, childToCompare)
index = childToCompare
l, r = left(index), right(index)
} else {
return
}
}
//get the parent index
func parent(i int) int {
return (i - 1) / 2
}
//get the left child index
func left(i int) int {
return 2*i + 1
}
//get the right child index
func right(i int) int {
return 2*i + 2
}
//swap keys in the array
func (h *maxHeap) swap(i1, i2 int) {
h.array[i1], h.array[i2] = h.array[i2], h.array[i1]
}
func main() {
max := &maxHeap{}
fmt.Println(max)
buildHeap := []int{10, 25, 35, 67, 45, 3, 23, 13, 24, 75, 62, 18, 14, 12}
for _, i := range buildHeap {
max.insert(i)
fmt.Println(max)
}
for _, v := range buildHeap {
max.extract()
fmt.Println(v, max)
}
} | heap/main.go | 0.654674 | 0.40698 | main.go | starcoder |
package yamltags
import (
"errors"
"fmt"
"reflect"
"strconv"
"strings"
)
// ProcessStruct validates and processes the provided pointer to a struct.
func ProcessStruct(s interface{}) error {
parentStruct := reflect.ValueOf(s).Elem()
t := parentStruct.Type()
// Loop through the fields on the struct, looking for tags.
for i := 0; i < t.NumField(); i++ {
f := t.Field(i)
val := parentStruct.Field(i)
field := parentStruct.Type().Field(i)
if tags, ok := f.Tag.Lookup("yamltags"); ok {
if err := ProcessTags(tags, val, parentStruct, field); err != nil {
return err
}
}
// Recurse down the struct
if val.Kind() == reflect.Struct {
if err := ProcessStruct(val.Addr().Interface()); err != nil {
return err
}
}
}
return nil
}
func ProcessTags(yamltags string, val reflect.Value, parentStruct reflect.Value, field reflect.StructField) error {
tags := strings.Split(yamltags, ",")
for _, tag := range tags {
tagParts := strings.Split(tag, "=")
var yt YamlTag
switch tagParts[0] {
case "required":
yt = &RequiredTag{}
case "default":
yt = &DefaultTag{}
case "oneOf":
yt = &OneOfTag{
Field: field,
Parent: parentStruct,
}
}
if err := yt.Load(tagParts); err != nil {
return err
}
if err := yt.Process(val); err != nil {
return err
}
}
return nil
}
type YamlTag interface {
Load([]string) error
Process(reflect.Value) error
}
type RequiredTag struct {
}
func (rt *RequiredTag) Load(s []string) error {
return nil
}
func (rt *RequiredTag) Process(val reflect.Value) error {
if isZeroValue(val) {
return errors.New("required value not set")
}
return nil
}
type DefaultTag struct {
dv string
}
func (dt *DefaultTag) Load(s []string) error {
if len(s) != 2 {
return fmt.Errorf("invalid default tag: %v, expected key=value", s)
}
dt.dv = s[1]
return nil
}
func (dt *DefaultTag) Process(val reflect.Value) error {
if !isZeroValue(val) {
return nil
}
switch val.Kind() {
case reflect.Int, reflect.Int16, reflect.Int32, reflect.Int64:
i, err := strconv.ParseInt(dt.dv, 0, 0)
if err != nil {
return err
}
val.SetInt(i)
case reflect.String:
val.SetString(dt.dv)
}
return nil
}
// A program can have many structs, that each have many oneOfSets
// each oneOfSet is a map of a set name to the list of fields that belong to that set
// only one field in that list can have a non-zero value.
var allOneOfs map[string]map[string][]string
func getOneOfSetsForStruct(structName string) map[string][]string {
_, ok := allOneOfs[structName]
if !ok {
allOneOfs[structName] = map[string][]string{}
}
return allOneOfs[structName]
}
type OneOfTag struct {
Field reflect.StructField
Parent reflect.Value
oneOfSets map[string][]string
setName string
}
func (oot *OneOfTag) Load(s []string) error {
if len(s) != 2 {
return fmt.Errorf("invalid default struct tag: %v, expected key=value", s)
}
oot.setName = s[1]
// Fetch the right oneOfSet for the struct.
structName := oot.Parent.Type().Name()
oot.oneOfSets = getOneOfSetsForStruct(structName)
// Add this field to the oneOfSet
oot.oneOfSets[oot.setName] = append(oot.oneOfSets[oot.setName], oot.Field.Name)
return nil
}
func (oot *OneOfTag) Process(val reflect.Value) error {
if isZeroValue(val) {
return nil
}
// This must exist because process is always called after Load.
oneOfSet := oot.oneOfSets[oot.setName]
for _, otherField := range oneOfSet {
if otherField == oot.Field.Name {
continue
}
field := oot.Parent.FieldByName(otherField)
if !isZeroValue(field) {
return fmt.Errorf("only one element in set %s can be set. got %s and %s", oot.setName, otherField, oot.Field.Name)
}
}
return nil
}
func isZeroValue(val reflect.Value) bool {
zv := reflect.Zero(val.Type()).Interface()
return reflect.DeepEqual(zv, val.Interface())
}
func init() {
allOneOfs = make(map[string]map[string][]string)
} | pkg/skaffold/yamltags/tags.go | 0.589244 | 0.403773 | tags.go | starcoder |
package storj
// RedundancyScheme specifies the parameters and the algorithm for redundancy
type RedundancyScheme struct {
// Algorithm determines the algorithm to be used for redundancy.
Algorithm RedundancyAlgorithm
// ShareSize is the size in bytes for each erasure shares.
ShareSize int32
// RequiredShares is the minimum number of shares required to recover a
// stripe, reed-solomon k.
RequiredShares int16
// RepairShares is the minimum number of safe shares that can remain
// before a repair is triggered.
RepairShares int16
// OptimalShares is the desired total number of shares for a segment.
OptimalShares int16
// TotalShares is the number of shares to encode. If it is larger than
// OptimalShares, slower uploads of the excess shares will be aborted in
// order to improve performance.
TotalShares int16
}
// IsZero returns true if no field in the struct is set to non-zero value
func (scheme RedundancyScheme) IsZero() bool {
return scheme == (RedundancyScheme{})
}
// StripeSize is the number of bytes for a stripe.
// Stripes are erasure encoded and split into n shares, where we need k to
// reconstruct the stripe. Therefore a stripe size is the erasure share size
// times the required shares, k.
func (scheme RedundancyScheme) StripeSize() int32 {
return scheme.ShareSize * int32(scheme.RequiredShares)
}
// DownloadNodes calculates the number of nodes needed to download in the
// presence of node failure based on t = k + (n-o)k/o.
func (scheme RedundancyScheme) DownloadNodes() int32 {
extra := int32(1)
if scheme.OptimalShares > 0 {
extra = int32(((scheme.TotalShares - scheme.OptimalShares) * scheme.RequiredShares) / scheme.OptimalShares)
if extra == 0 {
// ensure there is at least one extra node, so we can have error detection/correction
extra = 1
}
}
needed := int32(scheme.RequiredShares) + extra
if needed > int32(scheme.TotalShares) {
needed = int32(scheme.TotalShares)
}
return needed
}
// RedundancyAlgorithm is the algorithm used for redundancy
type RedundancyAlgorithm byte
// List of supported redundancy algorithms
const (
InvalidRedundancyAlgorithm = RedundancyAlgorithm(iota)
ReedSolomon
) | vendor/storj.io/common/storj/redundancy.go | 0.76986 | 0.423041 | redundancy.go | starcoder |
package iso20022
// Specifies the details relative to the submission of the certificate data set.
type RequiredSubmission4 struct {
// Specifies with party(ies) is authorised to submit the data set as part of the transaction.
Submitter []*BICIdentification1 `xml:"Submitr"`
// Specifies the type of the certificate.
CertificateType *TradeCertificateType1Code `xml:"CertTp"`
// Specifies if the issuer must be matched as part of the validation of the data set.
MatchIssuer *PartyIdentification27 `xml:"MtchIssr,omitempty"`
// Specifies if the issue date must be matched as part of the validation of the data set.
MatchIssueDate *YesNoIndicator `xml:"MtchIsseDt"`
// Specifies if the inspection date must be matched as part of the validation of the data set.
MatchInspectionDate *YesNoIndicator `xml:"MtchInspctnDt"`
// Specifies if the indication of an authorised inspector must be present as part of the validation of the data set.
AuthorisedInspectorIndicator *YesNoIndicator `xml:"AuthrsdInspctrInd"`
// Specifies if the consignee must be matched as part of the validation of the data set.
MatchConsignee *YesNoIndicator `xml:"MtchConsgn"`
// Specifies if the manufacturer must be matched as part of the validation of the data set.
MatchManufacturer *PartyIdentification27 `xml:"MtchManfctr,omitempty"`
// Specifies if the certificate data set is required in relation to specific line items, and which line items.
LineItemIdentification []*Max70Text `xml:"LineItmId,omitempty"`
}
func (r *RequiredSubmission4) AddSubmitter() *BICIdentification1 {
newValue := new(BICIdentification1)
r.Submitter = append(r.Submitter, newValue)
return newValue
}
func (r *RequiredSubmission4) SetCertificateType(value string) {
r.CertificateType = (*TradeCertificateType1Code)(&value)
}
func (r *RequiredSubmission4) AddMatchIssuer() *PartyIdentification27 {
r.MatchIssuer = new(PartyIdentification27)
return r.MatchIssuer
}
func (r *RequiredSubmission4) SetMatchIssueDate(value string) {
r.MatchIssueDate = (*YesNoIndicator)(&value)
}
func (r *RequiredSubmission4) SetMatchInspectionDate(value string) {
r.MatchInspectionDate = (*YesNoIndicator)(&value)
}
func (r *RequiredSubmission4) SetAuthorisedInspectorIndicator(value string) {
r.AuthorisedInspectorIndicator = (*YesNoIndicator)(&value)
}
func (r *RequiredSubmission4) SetMatchConsignee(value string) {
r.MatchConsignee = (*YesNoIndicator)(&value)
}
func (r *RequiredSubmission4) AddMatchManufacturer() *PartyIdentification27 {
r.MatchManufacturer = new(PartyIdentification27)
return r.MatchManufacturer
}
func (r *RequiredSubmission4) AddLineItemIdentification(value string) {
r.LineItemIdentification = append(r.LineItemIdentification, (*Max70Text)(&value))
} | RequiredSubmission4.go | 0.802285 | 0.407687 | RequiredSubmission4.go | starcoder |
package cpu
// Good reference with some info Z80 heaven doesn't contain:
// http://www.devrs.com/gb/files/GBCPU_Instr.html
// RLCr -> e.g. RLC B
// Performs 8-bit rotation to the left
// Rotated bit is copied to carry
// Flags: Z00C
func (gbcpu *GBCPU) RLCr(reg *byte) {
carry := *reg >> 7
*reg = (*reg<<1 | carry)
if *reg == 0 {
gbcpu.Regs.setZero()
} else {
gbcpu.Regs.clearZero()
}
if carry != 0 {
gbcpu.Regs.setCarry()
} else {
gbcpu.Regs.clearCarry()
}
gbcpu.Regs.clearSubtract()
gbcpu.Regs.clearHalfCarry()
}
// RLCHL -> e.g. RLC (HL)
// Performs 8-bit rotation to the left of value at address (HL)
// Rotated bit is copied to carry
// Flags: Z00C
func (gbcpu *GBCPU) RLCHL() {
gbcpu.RLCr(&GbMMU.Memory[gbcpu.Regs.JoinRegs(&gbcpu.Regs.h, &gbcpu.Regs.l)])
}
// RRCr -> e.g. RRC B
// Performs 8-bit rotation to the right
// Rotated bit is copied to carry
// Flags: Z00C
func (gbcpu *GBCPU) RRCr(reg *byte) {
carry := *reg << 7
*reg = (*reg>>1 | carry)
if *reg == 0 {
gbcpu.Regs.setZero()
} else {
gbcpu.Regs.clearZero()
}
if carry != 0 {
gbcpu.Regs.setCarry()
} else {
gbcpu.Regs.clearCarry()
}
gbcpu.Regs.clearSubtract()
gbcpu.Regs.clearHalfCarry()
}
// RRCHL -> e.g. RRC (HL)
// Performs 8-bit rotation to the right of value at address (HL)
// Rotated bit is copied to carry
// Flags: Z00C
func (gbcpu *GBCPU) RRCHL() {
gbcpu.RRCr(&GbMMU.Memory[gbcpu.Regs.JoinRegs(&gbcpu.Regs.h, &gbcpu.Regs.l)])
}
// RLr -> e.g. RL B
// Rotates register left through CF
// Store old 0th bit into carry
// Old carry becomes new 7th bit
// Flags: Z00C
func (gbcpu *GBCPU) RLr(reg *byte) {
carry := (*reg >> 7)
oldCarry := gbcpu.Regs.getCarry()
*reg = ((*reg << 1) | oldCarry)
if *reg == 0 {
gbcpu.Regs.setZero()
} else {
gbcpu.Regs.clearZero()
}
if carry != 0 {
gbcpu.Regs.setCarry()
} else {
gbcpu.Regs.clearCarry()
}
gbcpu.Regs.clearSubtract()
gbcpu.Regs.clearHalfCarry()
}
// RLHL -> e.g. RL (HL)
// Rotates value at address (HL) left through CF
// Store old 0th bit into carry
// Old carry becomes new 7th bit
// Flags: Z00C
func (gbcpu *GBCPU) RLHL() {
gbcpu.RLr(&GbMMU.Memory[gbcpu.Regs.JoinRegs(&gbcpu.Regs.h, &gbcpu.Regs.l)])
}
// RRr -> e.g. RR B
// Rotates register right through CF
// Flags: Z00C
func (gbcpu *GBCPU) RRr(reg *byte) {
carry := gbcpu.Regs.getCarry()
if *reg&0x01 != 0 {
gbcpu.Regs.setCarry()
} else {
gbcpu.Regs.clearCarry()
}
*reg = (*reg>>1 | carry<<7)
if *reg == 0 {
gbcpu.Regs.setZero()
} else {
gbcpu.Regs.clearZero()
}
gbcpu.Regs.clearSubtract()
gbcpu.Regs.clearHalfCarry()
}
// RRHL -> e.g. RR (HL)
// Rotates value at address (HL) right through CF
// Store old 0th bit into carry
// Old carry becomes new 7th bit
// Flags: Z00C
func (gbcpu *GBCPU) RRHL() {
gbcpu.RRr(&GbMMU.Memory[gbcpu.Regs.JoinRegs(&gbcpu.Regs.h, &gbcpu.Regs.l)])
}
// SLAr -> e.g. SLA B
// Shift reg left into carry
// Least significant bit of reg set to 0
// Flags: Z00C
func (gbcpu *GBCPU) SLAr(reg *byte) {
if *reg&0x80 != 0 {
gbcpu.Regs.setCarry()
} else {
gbcpu.Regs.clearCarry()
}
*reg = *reg << 1
if *reg == 0 {
gbcpu.Regs.setZero()
} else {
gbcpu.Regs.clearZero()
}
gbcpu.Regs.clearSubtract()
gbcpu.Regs.clearHalfCarry()
}
// SLAHL -> e.g. SLA (HL)
// Shift value at addr (HL) left into carry
// Least significant bit of reg set to 0
// Flags: Z00C
func (gbcpu *GBCPU) SLAHL() {
gbcpu.SLAr(&GbMMU.Memory[gbcpu.Regs.JoinRegs(&gbcpu.Regs.h, &gbcpu.Regs.l)])
}
// SRAr -> e.g. SRA B
// Shift reg right into carry
// Most significant bit of reg is unaffected
// Flags: Z000
func (gbcpu *GBCPU) SRAr(reg *byte) {
if *reg&0x01 != 0 {
gbcpu.Regs.setCarry()
} else {
gbcpu.Regs.clearCarry()
}
*reg = (*reg>>1 | *reg&0x80)
if *reg == 0 {
gbcpu.Regs.setZero()
} else {
gbcpu.Regs.clearZero()
}
gbcpu.Regs.clearSubtract()
gbcpu.Regs.clearHalfCarry()
}
// SRAHL -> e.g. SRA (HL)
// Shift value at addr (HL) right into carry
// Most significant bit of reg is unaffected
// Flags: Z000
func (gbcpu *GBCPU) SRAHL() {
gbcpu.SRAr(&GbMMU.Memory[gbcpu.Regs.JoinRegs(&gbcpu.Regs.h, &gbcpu.Regs.l)])
}
// SWAPr -> e.g. SWAP B
// Swap nibbles of reg
// Flags: Z000
func (gbcpu *GBCPU) SWAPr(reg *byte) {
*reg = (*reg << 4) | (*reg >> 4)
if *reg == 0 {
gbcpu.Regs.setZero()
} else {
gbcpu.Regs.clearZero()
}
gbcpu.Regs.clearSubtract()
gbcpu.Regs.clearHalfCarry()
gbcpu.Regs.clearCarry()
}
// SWAPHL -> e.g. SWAP (HL)
// Swap nibbles of value at addr (HL)
// Flags: Z000
func (gbcpu *GBCPU) SWAPHL() {
gbcpu.SWAPr(&GbMMU.Memory[gbcpu.Regs.JoinRegs(&gbcpu.Regs.h, &gbcpu.Regs.l)])
}
// SRLr -> e.g. SRL B
// Shift reg right into carry
// Most significant bit of reg is set to 0
// Flags: Z00C
func (gbcpu *GBCPU) SRLr(reg *byte) {
if *reg&0x01 != 0 {
gbcpu.Regs.setCarry()
} else {
gbcpu.Regs.clearCarry()
}
*reg = *reg >> 1
if *reg == 0 {
gbcpu.Regs.setZero()
} else {
gbcpu.Regs.clearZero()
}
gbcpu.Regs.clearSubtract()
gbcpu.Regs.clearHalfCarry()
}
// SRLHL -> e.g. SRL (HL)
// Shift value at addr (HL) right into carry
// Most significant bit of reg is set to 0
// Flags: Z00C
func (gbcpu *GBCPU) SRLHL() {
gbcpu.SRLr(&GbMMU.Memory[gbcpu.Regs.JoinRegs(&gbcpu.Regs.h, &gbcpu.Regs.l)])
}
// BITnr -> e.g. BIT 0,B
// Test bit at position in reg
// Flags: Z01-
func (gbcpu *GBCPU) BITnr(pos uint8, reg *byte) {
bitVal := *reg & (1 << pos)
if bitVal == 0 {
gbcpu.Regs.setZero()
} else {
gbcpu.Regs.clearZero()
}
gbcpu.Regs.clearSubtract()
gbcpu.Regs.setHalfCarry()
}
// BITHL -> e.g. BIT 0,(HL)
// Test bit at position in value at addr (HL)
// Flags: Z01-
func (gbcpu *GBCPU) BITHL(pos uint8) {
gbcpu.BITnr(pos, &GbMMU.Memory[gbcpu.Regs.JoinRegs(&gbcpu.Regs.h, &gbcpu.Regs.l)])
}
// RESnr -> e.g. RES 0,B
// Reset bit in register
// Flags: ----
func (gbcpu *GBCPU) RESnr(pos uint8, reg *byte) {
*reg &^= (1 << pos)
}
// RESHL -> e.g. RES 0,(HL)
// Reset bit in value at addr (HL)
// Flags: ----
func (gbcpu *GBCPU) RESHL(pos uint8) {
gbcpu.RESnr(pos, &GbMMU.Memory[gbcpu.Regs.JoinRegs(&gbcpu.Regs.h, &gbcpu.Regs.l)])
}
// SETnr -> e.g. SET 0,B
// Set bit in register
// Flags: ----
func (gbcpu *GBCPU) SETnr(pos uint8, reg *byte) {
*reg |= (1 << pos)
}
// SETHL -> e.g. SET 0,(HL)
// Set bit in value at addr (HL)
// Flags: ----
func (gbcpu *GBCPU) SETHL(pos uint8) {
gbcpu.SETnr(pos, &GbMMU.Memory[gbcpu.Regs.JoinRegs(&gbcpu.Regs.h, &gbcpu.Regs.l)])
} | cpu/executors-cb.go | 0.606382 | 0.42937 | executors-cb.go | starcoder |
package rbtree
// This file contains all RB tree modification methods implementations
// Insert inserts new node into Red-Black tree. Creates Root if tree is empty
func (tree *rbTree) Insert(z Comparable) {
if z == nil {
return
}
n := newNode(z)
tree.insert(n)
}
// ReplaceOrInsert inserts new node into Red-Black tree. Creates Root if tree is empty
func (tree *rbTree) ReplaceOrInsert(z Comparable) Comparable {
if z == nil {
return nil
}
var r Comparable
n, ok := tree.SearchNode(z)
if ok {
tree.delete(n)
r = n.key
}
tree.insert(newNode(z))
return r
}
func newNode(z Comparable) *Node {
return &Node{key: z}
}
func (tree *rbTree) insert(z *Node) {
if tree.root.isNil() {
tree.root = z
tree.root.color = black
tree.root.parent = tree.tnil
tree.root.left = tree.tnil
tree.root.right = tree.tnil
tree.root.size = 1
return
}
y := tree.tnil
x := tree.root
z.size = 1
for x != tree.tnil {
y = x
y.size++
if z.key.Less(x.key) {
x = x.left
} else {
x = x.right
}
}
z.parent = y
if z.key.Less(y.key) {
y.left = z
} else {
y.right = z
}
z.left = tree.tnil
z.right = tree.tnil
z.color = red
rbInsertFixup(tree, z)
}
func rbInsertFixup(tree *rbTree, z *Node) {
for z.parent.color == red {
if z.parent == z.parent.parent.left {
y := z.parent.parent.right
if y.color == red {
z.parent.color = black
y.color = black
z.parent.parent.color = red
z = z.parent.parent
} else {
if z == z.parent.right {
z = z.parent
leftRotate(tree, z)
}
z.parent.color = black
z.parent.parent.color = red
rightRotate(tree, z.parent.parent)
}
} else {
y := z.parent.parent.left
if y.color == red {
z.parent.color = black
y.color = black
z.parent.parent.color = red
z = z.parent.parent
} else {
if z == z.parent.left {
z = z.parent
rightRotate(tree, z)
}
z.parent.color = black
z.parent.parent.color = red
leftRotate(tree, z.parent.parent)
}
}
}
tree.root.color = black
}
// Delete searches and deletes first found node with key value specified from Red-black tree
// It returns true if node was successfully deleted otherwise false
func (tree *rbTree) Delete(c Comparable) bool {
found, ok := tree.root.search(c)
if ok {
tree.delete(found)
}
return ok
}
// DeleteAll searches and deletes all found nodes with key value specified from Red-black tree
// It returns true if nodes was successfully deleted otherwise false
func (tree *rbTree) DeleteAll(c Comparable) bool {
ok := tree.Delete(c)
res := ok
for ok {
ok = tree.Delete(c)
}
return res
}
func (tree *rbTree) delete(z *Node) {
if z == nil || z.parent == nil {
return
}
y := z
p := z.parent
for p != tree.tnil {
p.size--
p = p.parent
}
var x *Node
yOriginalColor := y.color
if z.left == tree.tnil {
x = z.right
rbTransplant(tree, z, z.right)
} else if z.right == tree.tnil {
x = z.left
rbTransplant(tree, z, z.left)
} else {
y := z.right.minimum()
yOriginalColor = y.color
x = y.right
if y.parent == z {
x.parent = y
} else {
rbTransplant(tree, y, y.right)
y.right = z.right
y.right.parent = y
}
rbTransplant(tree, z, y)
y.left = z.left
y.left.parent = y
y.color = z.color
}
if yOriginalColor == black {
rbDeleteFixup(tree, x)
}
}
func rbDeleteFixup(tree *rbTree, x *Node) {
for x != tree.root && x.color == black {
if x == x.parent.left {
w := x.parent.right
if w.color == red {
w.color = black
x.parent.color = red
leftRotate(tree, x.parent)
w = x.parent.right
}
if w.left.color == black && w.right.color == black {
w.color = red
x = x.parent
} else {
if w.right.color == black {
w.left.color = black
w.color = red
rightRotate(tree, w)
w = x.parent.right
}
w.color = x.parent.color
x.parent.color = black
w.right.color = black
leftRotate(tree, x.parent)
x = tree.root
}
} else {
w := x.parent.left
if w.color == red {
w.color = black
x.parent.color = red
rightRotate(tree, x.parent)
w = x.parent.left
}
if w.right.color == black && w.left.color == black {
w.color = red
x = x.parent
} else {
if w.left.color == black {
w.right.color = black
w.color = red
leftRotate(tree, w)
w = x.parent.left
}
w.color = x.parent.color
x.parent.color = black
w.left.color = black
rightRotate(tree, x.parent)
x = tree.root
}
}
}
x.color = black
}
func rbTransplant(tree *rbTree, u *Node, v *Node) {
if u.parent == tree.tnil {
tree.root = v
tree.root.size = u.size - 1
} else if u == u.parent.left {
u.parent.left = v
} else {
u.parent.right = v
}
v.parent = u.parent
}
func leftRotate(tree *rbTree, x *Node) {
y := x.right
x.right = y.left
if y.left != tree.tnil {
y.left.parent = x
}
y.parent = x.parent
if x.parent == tree.tnil {
tree.root = y
} else if x == x.parent.left {
x.parent.left = y
} else {
x.parent.right = y
}
y.left = x
x.parent = y
y.size = x.size
x.size = x.left.size + x.right.size + 1
}
func rightRotate(tree *rbTree, x *Node) {
y := x.left
x.left = y.right
if y.right != tree.tnil {
y.right.parent = x
}
y.parent = x.parent
if x.parent == tree.tnil {
tree.root = y
} else if x == x.parent.right {
x.parent.right = y
} else {
x.parent.left = y
}
y.right = x
x.parent = y
y.size = x.size
x.size = x.left.size + x.right.size + 1
} | rbtree/modification.go | 0.788949 | 0.463262 | modification.go | starcoder |
package doltcore
import (
"fmt"
"strconv"
"github.com/liquidata-inc/dolt/go/store/types"
)
type ConversionError struct {
fromKind types.NomsKind
toKind types.NomsKind
err error
}
func (ce ConversionError) Error() string {
toKindStr := types.KindToString[ce.toKind]
fromKindStr := types.KindToString[ce.fromKind]
return fmt.Sprint("error converting", fromKindStr, "to", toKindStr+":", ce.err.Error())
}
func IsConversionError(err error) bool {
_, ok := err.(ConversionError)
return ok
}
func GetFromAndToKinds(err error) (from, to types.NomsKind) {
ce, ok := err.(ConversionError)
if !ok {
panic("Check that this is a conversion error before using this.")
}
return ce.fromKind, ce.toKind
}
func GetUnderlyingError(err error) error {
ce, ok := err.(ConversionError)
if !ok {
panic("Check that this is a conversion error before using this.")
}
return ce.err
}
// ConvFunc is a function that converts one noms value to another of a different type.
type ConvFunc func(types.Value) (types.Value, error)
var convFuncMap = map[types.NomsKind]map[types.NomsKind]ConvFunc{
types.StringKind: {
types.StringKind: identityConvFunc,
types.UUIDKind: convStringToUUID,
types.UintKind: convStringToUint,
types.IntKind: convStringToInt,
types.FloatKind: convStringToFloat,
types.BoolKind: convStringToBool,
types.NullKind: convToNullFunc},
types.UUIDKind: {
types.StringKind: convUUIDToString,
types.UUIDKind: identityConvFunc,
types.UintKind: nil,
types.IntKind: nil,
types.FloatKind: nil,
types.BoolKind: nil,
types.NullKind: convToNullFunc},
types.UintKind: {
types.StringKind: convUintToString,
types.UUIDKind: nil,
types.UintKind: identityConvFunc,
types.IntKind: convUintToInt,
types.FloatKind: convUintToFloat,
types.BoolKind: convUintToBool,
types.NullKind: convToNullFunc},
types.IntKind: {
types.StringKind: convIntToString,
types.UUIDKind: nil,
types.UintKind: convIntToUint,
types.IntKind: identityConvFunc,
types.FloatKind: convIntToFloat,
types.BoolKind: convIntToBool,
types.NullKind: convToNullFunc},
types.FloatKind: {
types.StringKind: convFloatToString,
types.UUIDKind: nil,
types.UintKind: convFloatToUint,
types.IntKind: convFloatToInt,
types.FloatKind: identityConvFunc,
types.BoolKind: convFloatToBool,
types.NullKind: convToNullFunc},
types.BoolKind: {
types.StringKind: convBoolToString,
types.UUIDKind: nil,
types.UintKind: convBoolToUint,
types.IntKind: convBoolToInt,
types.FloatKind: convBoolToFloat,
types.BoolKind: identityConvFunc,
types.NullKind: convToNullFunc},
types.NullKind: {
types.StringKind: convToNullFunc,
types.UUIDKind: convToNullFunc,
types.UintKind: convToNullFunc,
types.IntKind: convToNullFunc,
types.FloatKind: convToNullFunc,
types.BoolKind: convToNullFunc,
types.NullKind: convToNullFunc},
}
// GetConvFunc takes in a source kind and a destination kind and returns a ConvFunc which can convert values of the
// source kind to values of the destination kind.
func GetConvFunc(srcKind, destKind types.NomsKind) ConvFunc {
var convFunc ConvFunc
if destKindMap, ok := convFuncMap[srcKind]; ok {
convFunc = destKindMap[destKind]
}
return convFunc
}
var identityConvFunc = func(value types.Value) (types.Value, error) {
return value, nil
}
var convToNullFunc = func(types.Value) (types.Value, error) {
return types.NullValue, nil
}
func convStringToFloat(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
return stringToFloat(string(val.(types.String)))
}
func convStringToBool(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
return stringToBool(string(val.(types.String)))
}
func convStringToInt(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
return stringToInt(string(val.(types.String)))
}
func convStringToUint(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
return stringToUint(string(val.(types.String)))
}
func convStringToUUID(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
return stringToUUID(string(val.(types.String)))
}
func convUUIDToString(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
return types.String(val.(types.UUID).String()), nil
}
func convUintToString(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
n := uint64(val.(types.Uint))
str := strconv.FormatUint(n, 10)
return types.String(str), nil
}
func convUintToInt(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
n := uint64(val.(types.Uint))
return types.Int(int64(n)), nil
}
func convUintToFloat(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
n := uint64(val.(types.Uint))
return types.Float(float64(n)), nil
}
func convUintToBool(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
n := uint64(val.(types.Uint))
return types.Bool(n != 0), nil
}
func convIntToString(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
n := int64(val.(types.Int))
str := strconv.FormatInt(n, 10)
return types.String(str), nil
}
func convIntToUint(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
n := int64(val.(types.Int))
return types.Uint(uint64(n)), nil
}
func convIntToFloat(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
n := int64(val.(types.Int))
return types.Float(float64(n)), nil
}
func convIntToBool(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
n := int64(val.(types.Int))
return types.Bool(n != 0), nil
}
func convFloatToString(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
fl := float64(val.(types.Float))
str := strconv.FormatFloat(fl, 'f', -1, 64)
return types.String(str), nil
}
func convFloatToUint(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
fl := float64(val.(types.Float))
return types.Uint(uint64(fl)), nil
}
func convFloatToInt(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
fl := float64(val.(types.Float))
return types.Int(int(fl)), nil
}
func convFloatToBool(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
fl := float64(val.(types.Float))
return types.Bool(fl != 0), nil
}
var trueValStr = types.String("true")
var falseValStr = types.String("false")
func convBoolToString(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
b := val.(types.Bool)
if b {
return trueValStr, nil
}
return falseValStr, nil
}
var zeroUintVal = types.Uint(0)
var oneUintVal = types.Uint(1)
func convBoolToUint(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
b := val.(types.Bool)
if b {
return oneUintVal, nil
}
return zeroUintVal, nil
}
var zeroIntVal = types.Int(0)
var oneIntVal = types.Int(1)
func convBoolToInt(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
b := val.(types.Bool)
if b {
return oneIntVal, nil
}
return zeroIntVal, nil
}
var zeroFloatVal = types.Float(0)
var oneFloatVal = types.Float(1)
func convBoolToFloat(val types.Value) (types.Value, error) {
if val == nil {
return nil, nil
}
b := val.(types.Bool)
if b {
return oneFloatVal, nil
}
return zeroFloatVal, nil
} | go/libraries/doltcore/type_conversion.go | 0.702428 | 0.482368 | type_conversion.go | starcoder |
package types
import (
"fmt"
"regexp"
"strings"
)
var DEBUG_LABEL_FILTER_PARSING = false
type LabelFilter func([]string) bool
func matchLabelAction(label string) LabelFilter {
expected := strings.ToLower(label)
return func(labels []string) bool {
for i := range labels {
if strings.ToLower(labels[i]) == expected {
return true
}
}
return false
}
}
func matchLabelRegexAction(regex *regexp.Regexp) LabelFilter {
return func(labels []string) bool {
for i := range labels {
if regex.MatchString(labels[i]) {
return true
}
}
return false
}
}
func notAction(filter LabelFilter) LabelFilter {
return func(labels []string) bool { return !filter(labels) }
}
func andAction(a, b LabelFilter) LabelFilter {
return func(labels []string) bool { return a(labels) && b(labels) }
}
func orAction(a, b LabelFilter) LabelFilter {
return func(labels []string) bool { return a(labels) || b(labels) }
}
type lfToken uint
const (
lfTokenInvalid lfToken = iota
lfTokenRoot
lfTokenOpenGroup
lfTokenCloseGroup
lfTokenNot
lfTokenAnd
lfTokenOr
lfTokenRegexp
lfTokenLabel
lfTokenEOF
)
func (l lfToken) Precedence() int {
switch l {
case lfTokenRoot, lfTokenOpenGroup:
return 0
case lfTokenOr:
return 1
case lfTokenAnd:
return 2
case lfTokenNot:
return 3
}
return -1
}
func (l lfToken) String() string {
switch l {
case lfTokenRoot:
return "ROOT"
case lfTokenOpenGroup:
return "("
case lfTokenCloseGroup:
return ")"
case lfTokenNot:
return "!"
case lfTokenAnd:
return "&&"
case lfTokenOr:
return "||"
case lfTokenRegexp:
return "/regexp/"
case lfTokenLabel:
return "label"
case lfTokenEOF:
return "EOF"
}
return "INVALID"
}
type treeNode struct {
token lfToken
location int
value string
parent *treeNode
leftNode *treeNode
rightNode *treeNode
}
func (tn *treeNode) setRightNode(node *treeNode) {
tn.rightNode = node
node.parent = tn
}
func (tn *treeNode) setLeftNode(node *treeNode) {
tn.leftNode = node
node.parent = tn
}
func (tn *treeNode) firstAncestorWithPrecedenceLEQ(precedence int) *treeNode {
if tn.token.Precedence() <= precedence {
return tn
}
return tn.parent.firstAncestorWithPrecedenceLEQ(precedence)
}
func (tn *treeNode) firstUnmatchedOpenNode() *treeNode {
if tn.token == lfTokenOpenGroup {
return tn
}
if tn.parent == nil {
return nil
}
return tn.parent.firstUnmatchedOpenNode()
}
func (tn *treeNode) constructLabelFilter(input string) (LabelFilter, error) {
switch tn.token {
case lfTokenOpenGroup:
return nil, GinkgoErrors.SyntaxErrorParsingLabelFilter(input, tn.location, "Mismatched '(' - could not find matching ')'.")
case lfTokenLabel:
return matchLabelAction(tn.value), nil
case lfTokenRegexp:
re, err := regexp.Compile(tn.value)
if err != nil {
return nil, GinkgoErrors.SyntaxErrorParsingLabelFilter(input, tn.location, fmt.Sprintf("RegExp compilation error: %s", err))
}
return matchLabelRegexAction(re), nil
}
if tn.rightNode == nil {
return nil, GinkgoErrors.SyntaxErrorParsingLabelFilter(input, -1, "Unexpected EOF.")
}
rightLF, err := tn.rightNode.constructLabelFilter(input)
if err != nil {
return nil, err
}
switch tn.token {
case lfTokenRoot, lfTokenCloseGroup:
return rightLF, nil
case lfTokenNot:
return notAction(rightLF), nil
}
if tn.leftNode == nil {
return nil, GinkgoErrors.SyntaxErrorParsingLabelFilter(input, tn.location, fmt.Sprintf("Malformed tree - '%s' is missing left operand.", tn.token))
}
leftLF, err := tn.leftNode.constructLabelFilter(input)
if err != nil {
return nil, err
}
switch tn.token {
case lfTokenAnd:
return andAction(leftLF, rightLF), nil
case lfTokenOr:
return orAction(leftLF, rightLF), nil
}
return nil, GinkgoErrors.SyntaxErrorParsingLabelFilter(input, tn.location, fmt.Sprintf("Invalid token '%s'.", tn.token))
}
func (tn *treeNode) tokenString() string {
out := fmt.Sprintf("<%s", tn.token)
if tn.value != "" {
out += " | " + tn.value
}
out += ">"
return out
}
func (tn *treeNode) toString(indent int) string {
out := tn.tokenString() + "\n"
if tn.leftNode != nil {
out += fmt.Sprintf("%s |_(L)_%s", strings.Repeat(" ", indent), tn.leftNode.toString(indent+1))
}
if tn.rightNode != nil {
out += fmt.Sprintf("%s |_(R)_%s", strings.Repeat(" ", indent), tn.rightNode.toString(indent+1))
}
return out
}
func tokenize(input string) func() (*treeNode, error) {
runes, i := []rune(input), 0
peekIs := func(r rune) bool {
if i+1 < len(runes) {
return runes[i+1] == r
}
return false
}
consumeUntil := func(cutset string) (string, int) {
j := i
for ; j < len(runes); j++ {
if strings.IndexRune(cutset, runes[j]) >= 0 {
break
}
}
return string(runes[i:j]), j - i
}
return func() (*treeNode, error) {
for i < len(runes) && runes[i] == ' ' {
i += 1
}
if i >= len(runes) {
return &treeNode{token: lfTokenEOF}, nil
}
node := &treeNode{location: i}
switch runes[i] {
case '&':
if !peekIs('&') {
return &treeNode{}, GinkgoErrors.SyntaxErrorParsingLabelFilter(input, i, "Invalid token '&'. Did you mean '&&'?")
}
i += 2
node.token = lfTokenAnd
case '|':
if !peekIs('|') {
return &treeNode{}, GinkgoErrors.SyntaxErrorParsingLabelFilter(input, i, "Invalid token '|'. Did you mean '||'?")
}
i += 2
node.token = lfTokenOr
case '!':
i += 1
node.token = lfTokenNot
case ',':
i += 1
node.token = lfTokenOr
case '(':
i += 1
node.token = lfTokenOpenGroup
case ')':
i += 1
node.token = lfTokenCloseGroup
case '/':
i += 1
value, n := consumeUntil("/")
i += n + 1
node.token, node.value = lfTokenRegexp, value
default:
value, n := consumeUntil("&|!,()/")
i += n
node.token, node.value = lfTokenLabel, strings.TrimSpace(value)
}
return node, nil
}
}
func ParseLabelFilter(input string) (LabelFilter, error) {
if DEBUG_LABEL_FILTER_PARSING {
fmt.Println("\n==============")
fmt.Println("Input: ", input)
fmt.Print("Tokens: ")
}
nextToken := tokenize(input)
root := &treeNode{token: lfTokenRoot}
current := root
LOOP:
for {
node, err := nextToken()
if err != nil {
return nil, err
}
if DEBUG_LABEL_FILTER_PARSING {
fmt.Print(node.tokenString() + " ")
}
switch node.token {
case lfTokenEOF:
break LOOP
case lfTokenLabel, lfTokenRegexp:
if current.rightNode != nil {
return nil, GinkgoErrors.SyntaxErrorParsingLabelFilter(input, node.location, "Found two adjacent labels. You need an operator between them.")
}
current.setRightNode(node)
case lfTokenNot, lfTokenOpenGroup:
if current.rightNode != nil {
return nil, GinkgoErrors.SyntaxErrorParsingLabelFilter(input, node.location, fmt.Sprintf("Invalid token '%s'.", node.token))
}
current.setRightNode(node)
current = node
case lfTokenAnd, lfTokenOr:
if current.rightNode == nil {
return nil, GinkgoErrors.SyntaxErrorParsingLabelFilter(input, node.location, fmt.Sprintf("Operator '%s' missing left hand operand.", node.token))
}
nodeToStealFrom := current.firstAncestorWithPrecedenceLEQ(node.token.Precedence())
node.setLeftNode(nodeToStealFrom.rightNode)
nodeToStealFrom.setRightNode(node)
current = node
case lfTokenCloseGroup:
firstUnmatchedOpenNode := current.firstUnmatchedOpenNode()
if firstUnmatchedOpenNode == nil {
return nil, GinkgoErrors.SyntaxErrorParsingLabelFilter(input, node.location, "Mismatched ')' - could not find matching '('.")
}
if firstUnmatchedOpenNode == current && current.rightNode == nil {
return nil, GinkgoErrors.SyntaxErrorParsingLabelFilter(input, node.location, "Found empty '()' group.")
}
firstUnmatchedOpenNode.token = lfTokenCloseGroup //signify the group is now closed
current = firstUnmatchedOpenNode.parent
default:
return nil, GinkgoErrors.SyntaxErrorParsingLabelFilter(input, node.location, fmt.Sprintf("Unknown token '%s'.", node.token))
}
}
if DEBUG_LABEL_FILTER_PARSING {
fmt.Printf("\n Tree:\n%s", root.toString(0))
}
return root.constructLabelFilter(input)
}
func ValidateAndCleanupLabel(label string, cl CodeLocation) (string, error) {
out := strings.TrimSpace(label)
if out == "" {
return "", GinkgoErrors.InvalidEmptyLabel(cl)
}
if strings.ContainsAny(out, "&|!,()/") {
return "", GinkgoErrors.InvalidLabel(label, cl)
}
return out, nil
} | vendor/github.com/onsi/ginkgo/v2/types/label_filter.go | 0.504639 | 0.413951 | label_filter.go | starcoder |
package serialization
import (
"time"
"github.com/google/uuid"
)
// ParseNode Interface for a deserialization node in a parse tree. This interace provides an abstraction layer over serialization formats, libraries and implementations.
type ParseNode interface {
// GetChildNode returns a new parse node for the given identifier.
GetChildNode(index string) (ParseNode, error)
// GetCollectionOfObjectValues returns the collection of Parsable values from the node.
GetCollectionOfObjectValues(ctor ParsableFactory) ([]Parsable, error)
// GetCollectionOfPrimitiveValues returns the collection of primitive values from the node.
GetCollectionOfPrimitiveValues(targetType string) ([]interface{}, error)
// GetCollectionOfEnumValues returns the collection of Enum values from the node.
GetCollectionOfEnumValues(parser func(string) (interface{}, error)) ([]interface{}, error)
// GetObjectValue returns the Parsable value from the node.
GetObjectValue(ctor ParsableFactory) (Parsable, error)
// GetStringValue returns a String value from the nodes.
GetStringValue() (*string, error)
// GetBoolValue returns a Bool value from the nodes.
GetBoolValue() (*bool, error)
// GetInt8Value returns a int8 value from the nodes.
GetInt8Value() (*int8, error)
// GetByteValue returns a Byte value from the nodes.
GetByteValue() (*byte, error)
// GetFloat32Value returns a Float32 value from the nodes.
GetFloat32Value() (*float32, error)
// GetFloat64Value returns a Float64 value from the nodes.
GetFloat64Value() (*float64, error)
// GetInt32Value returns a Int32 value from the nodes.
GetInt32Value() (*int32, error)
// GetInt64Value returns a Int64 value from the nodes.
GetInt64Value() (*int64, error)
// GetTimeValue returns a Time value from the nodes.
GetTimeValue() (*time.Time, error)
// GetISODurationValue returns a ISODuration value from the nodes.
GetISODurationValue() (*ISODuration, error)
// GetTimeOnlyValue returns a TimeOnly value from the nodes.
GetTimeOnlyValue() (*TimeOnly, error)
// GetDateOnlyValue returns a DateOnly value from the nodes.
GetDateOnlyValue() (*DateOnly, error)
// GetUUIDValue returns a UUID value from the nodes.
GetUUIDValue() (*uuid.UUID, error)
// GetEnumValue returns a Enum value from the nodes.
GetEnumValue(parser func(string) (interface{}, error)) (interface{}, error)
// GetByteArrayValue returns a ByteArray value from the nodes.
GetByteArrayValue() ([]byte, error)
} | abstractions/go/serialization/parse_node.go | 0.664758 | 0.532607 | parse_node.go | starcoder |
package gorgonia
import (
"fmt"
"hash"
"github.com/chewxy/hm"
"github.com/pkg/errors"
"gorgonia.org/tensor"
)
func SoftMax(x *Node, axes ...int) (*Node, error) {
op := newSoftmaxOp(x.Shape(), axes...)
return ApplyOp(op, x)
}
type softmaxOp struct {
shape tensor.Shape
axis int
isLog bool
}
func newSoftmaxOp(inputShape tensor.Shape, axes ...int) *softmaxOp {
axis := -1
if len(axes) > 0 {
axis = axes[0]
}
softmaxop := &softmaxOp{
shape: inputShape,
axis: axis,
}
return softmaxop
}
func (op *softmaxOp) Arity() int { return 1 }
func (op *softmaxOp) ReturnsPtr() bool { return false }
func (op *softmaxOp) CallsExtern() bool { return false }
func (op *softmaxOp) WriteHash(h hash.Hash) { fmt.Fprintf(h, "Softmax{%v}()", op.axis) }
func (op *softmaxOp) Hashcode() uint32 { return simpleHash(op) }
func (op *softmaxOp) String() string { return fmt.Sprintf("Softmax{%d, %v}()", op.axis, op.isLog) }
func (op *softmaxOp) InferShape(inputs ...DimSizer) (tensor.Shape, error) {
return inputs[0].(tensor.Shape), nil
}
func (op *softmaxOp) Type() hm.Type {
a := hm.TypeVariable('a')
return hm.NewFnType(a, a) // f(float64) float64
}
func (op *softmaxOp) OverwritesInput() int { return -1 }
func (op *softmaxOp) checkInput(inputs ...Value) (tensor.Tensor, error) {
if err := checkArity(op, len(inputs)); err != nil {
return nil, err
}
var (
in tensor.Tensor
ok bool
)
if in, ok = inputs[0].(tensor.Tensor); !ok {
return nil, errors.Errorf("Expected input to be a tensor")
}
return in, nil
}
func (op *softmaxOp) Do(inputs ...Value) (retVal Value, err error) {
inputTensor, err := op.checkInput(inputs...)
if err != nil {
return nil, fmt.Errorf("Can't check Softmax input: %w", err)
}
aShape := inputTensor.Shape()
ret := tensor.New(tensor.WithShape(aShape.Clone()...), tensor.Of(inputTensor.Dtype()))
return op.UsePreallocDo(ret, inputTensor)
}
func (op *softmaxOp) UsePreallocDo(prealloc Value, inputs ...Value) (Value, error) {
inputTensor, err := op.checkInput(inputs...)
if err != nil {
return nil, fmt.Errorf("Can't check Softmax input: %w", err)
}
aShape := inputTensor.Shape()
axis := aShape.Dims() - 1 // default: last dim
if aShape.IsColVec() || (aShape.IsVector() && !aShape.IsRowVec()) {
axis = 0
}
if op.axis != -1 {
axis = op.axis
}
if !op.isLog {
_, err = tensor.SoftMax(inputTensor, axis, tensor.WithReuse(prealloc.(tensor.Tensor)), tensor.UseUnsafe())
if err != nil {
return nil, err
}
} else {
_, err = tensor.LogSoftMax(inputTensor, axis, tensor.WithReuse(prealloc.(tensor.Tensor)), tensor.UseUnsafe())
if err != nil {
return nil, err
}
}
return prealloc, nil
}
// DoDiff calculates the diff and sets its value to the output node. Implementation for ADOp interface.
func (op *softmaxOp) DoDiff(ctx ExecutionContext, inputs Nodes, output *Node) error {
if len(inputs) != 1 {
return fmt.Errorf("SoftmaxOp.DoDiff needs 1 arguments")
}
odv := output.boundTo.(*dualValue)
idv := inputs[0].boundTo.(*dualValue)
idvd := idv.d.(*tensor.Dense)
diffOp := &softmaxDiffOp{op}
result, err := diffOp.Do(idv.Value, odv.Value, odv.d)
if err != nil {
return err
}
sum, err := idvd.Add(result.(*tensor.Dense), tensor.UseUnsafe())
if err != nil {
return err
}
odv.d = sum
return nil
}
// SymDiff applies the diff op. Implementation for SDOp interface.
func (op *softmaxOp) SymDiff(inputs Nodes, output, grad *Node) (Nodes, error) {
err := checkArity(op, len(inputs))
if err != nil {
return nil, err
}
diffOp := &softmaxDiffOp{op}
nodes := make(Nodes, 1)
nodes[0], err = ApplyOp(diffOp, inputs[0], output, grad)
return nodes, err
}
// DiffWRT is an implementation for the SDOp interface
func (op *softmaxOp) DiffWRT(inputs int) []bool {
if inputs != 1 {
panic(fmt.Sprintf("softmax operator only supports one input, got %d instead", inputs))
}
return []bool{true}
}
type softmaxDiffOp struct {
*softmaxOp
}
func (op *softmaxDiffOp) Arity() int { return 3 }
func (op *softmaxDiffOp) ReturnsPtr() bool { return false }
func (op *softmaxDiffOp) CallsExtern() bool { return false }
func (op *softmaxDiffOp) WriteHash(h hash.Hash) {
fmt.Fprintf(h, "SoftmaxDiff{%d, %v}()", op.axis, op.isLog)
}
func (op *softmaxDiffOp) Hashcode() uint32 { return simpleHash(op) }
func (op *softmaxDiffOp) String() string {
return fmt.Sprintf("SoftmaxDiff{%d, %v}()", op.axis, op.isLog)
}
func (op *softmaxDiffOp) InferShape(inputs ...DimSizer) (tensor.Shape, error) {
s := inputs[0].(tensor.Shape).Clone()
return s, nil
}
func (op *softmaxDiffOp) Type() hm.Type {
a := hm.TypeVariable('a')
return hm.NewFnType(a, a, a, a) // f(float64) float64
}
func (op *softmaxDiffOp) OverwritesInput() int { return -1 }
func (op *softmaxDiffOp) checkInput(inputs ...Value) (tensor.Tensor, tensor.Tensor, tensor.Tensor, error) {
if err := checkArity(op, len(inputs)); err != nil {
return nil, nil, nil, err
}
var (
in tensor.Tensor
out tensor.Tensor
grad tensor.Tensor
ok bool
)
switch t := inputs[0].(type) {
case *dualValue:
if in, ok = t.Value.(tensor.Tensor); !ok {
return nil, nil, nil, errors.Errorf("input should be a tensor, got %T", inputs[0])
}
case tensor.Tensor:
in = t
default:
return nil, nil, nil, errors.Errorf("input type is not supported, got %T", inputs[0])
}
switch t := inputs[1].(type) {
case *dualValue:
if out, ok = t.Value.(tensor.Tensor); !ok {
return nil, nil, nil, errors.Errorf("output should be a tensor, got %T", inputs[1])
}
case tensor.Tensor:
out = t
default:
return nil, nil, nil, errors.Errorf("output type is not supported, got %T", inputs[1])
}
switch t := inputs[2].(type) {
case *dualValue:
if grad, ok = t.Value.(tensor.Tensor); !ok {
return nil, nil, nil, errors.Errorf("grad should be a tensor, got %T", inputs[1])
}
case tensor.Tensor:
grad = t
default:
return nil, nil, nil, errors.Errorf("grad type is not supported, got %T", inputs[1])
}
return in, out, grad, nil
}
func (op *softmaxDiffOp) Do(inputs ...Value) (Value, error) {
x, y, grad, err := op.checkInput(inputs...)
if err != nil {
return nil, fmt.Errorf("Can't check SoftmaxDiff input: %w", err)
}
ret := tensor.New(tensor.WithShape(y.Shape().Clone()...), tensor.Of(y.Dtype()))
return op.UsePreallocDo(ret, x, y, grad)
}
func (op *softmaxDiffOp) UsePreallocDo(prealloc Value, inputs ...Value) (Value, error) {
_, y, grad, err := op.checkInput(inputs...)
if err != nil {
return nil, fmt.Errorf("Can't check SoftmaxDiff input: %w", err)
}
if op.isLog {
return tensor.LogSoftMaxB(y, grad, op.axis, tensor.WithReuse(prealloc.(tensor.Tensor)), tensor.UseUnsafe())
}
return tensor.SoftMaxB(y, grad, op.axis, tensor.WithReuse(prealloc.(tensor.Tensor)), tensor.UseUnsafe())
}
// ensure it complies with the Op interface
var (
_ Op = &softmaxOp{}
_ ADOp = &softmaxOp{}
_ SDOp = &softmaxOp{}
_ Op = &softmaxDiffOp{}
) | op_softmax.go | 0.757705 | 0.446796 | op_softmax.go | starcoder |
package entities
import (
"fmt"
tl "github.com/JoelOtter/termloop"
"github.com/zladovan/gorched/gmath"
"github.com/zladovan/gorched/physics"
)
// Label is text entity with one row of text.
type Label struct {
// position is reference position of this label
position gmath.Vector2i
// format defines text formatting
format Formatting
// entity is used to draw text to the screen
entity *tl.Text
// text is text which will be drawn
text string
}
// Alignment defines types how the text of the label will be shifted from Label's position
type Alignment = uint8
const (
// Center alignment will draw test with Label position as the center
Center Alignment = iota
// Right alignment will draw text starting from Label position
Right
// Left alignment will draw text ending in Label position
Left
)
// Formatting defines text formatting options for Label
type Formatting struct {
// Color is foreground color
Color tl.Attr
// Background is background color
Background tl.Attr
// Align is text alignment
Align Alignment
}
// NewLabel creates new label.
// Given position is reference position and final entity could be moved from this position according used fromat alignment.
func NewLabel(position gmath.Vector2i, text string, format Formatting) *Label {
l := &Label{
position: position,
format: format,
text: text,
}
l.refresh()
return l
}
// Draw will draw this label to the screen
func (l *Label) Draw(s *tl.Screen) {
l.entity.Draw(s)
}
// Tick does nothing now
func (l *Label) Tick(e tl.Event) {}
// SetText will change text of this label
func (l *Label) SetText(text string) {
l.text = text
l.refresh()
}
// SetPosition will change position of this label.
// Given position is reference position and final entity could be moved from this position according used fromat alignment.
func (l *Label) SetPosition(p gmath.Vector2i) {
l.position = p
l.refresh()
}
// Position returns reference position of this label
func (l *Label) Position() gmath.Vector2i {
return l.position
}
// refresh recreate wrapped termloop.Text entity
func (l *Label) refresh() {
len := len([]rune(l.text))
x := l.position.X
y := l.position.Y
switch l.format.Align {
case Left:
x -= len
case Center:
x -= len / 2
}
l.entity = tl.NewText(x, y, l.text, l.format.Color|tl.AttrBold, l.format.Background)
}
// ZIndex return z-index of the label
// It should be higher than z-index of tank and trees but lower z-index of explosion.
func (l *Label) ZIndex() int {
return 2001
}
// TempLabel is Label which is hidden after TTL seconds if it's not updated with one of Show methods.
// If you want to make it visible right after the creation set RemainingTTL to non zero value.
// Otherwise it will be shown after first call of one of Show methods.
type TempLabel struct {
// it extends from Label
*Label
// TTL is how many seconds will be label visible when shown
TTL float64
// RemainingTTL is how many seconds remains to be hidden
RemainingTTL float64
// Remove if is true label will be removed from world after TTL seconds
Remove bool
}
// Show makes label again visible for TTL seconds.
func (l *TempLabel) Show() {
l.RemainingTTL = l.TTL
}
// ShowText sets some text to the label and show it for TTL seconds.
func (l *TempLabel) ShowText(s string) {
l.SetText(s)
l.Show()
}
// ShowNumber sets some number as to the label and show it for TTL seconds.
// See ShowText().
func (l *TempLabel) ShowNumber(i int) {
l.ShowText(fmt.Sprintf("%d", i))
}
// Draw draws label if it is not out of ttl
func (l *TempLabel) Draw(s *tl.Screen) {
if l.IsVisible() {
l.Label.Draw(s)
l.RemainingTTL -= s.TimeDelta()
} else if l.Remove {
s.Level().RemoveEntity(l)
}
}
// IsVisible returns true if label is not yet ouf of time to be drawn
func (l *TempLabel) IsVisible() bool {
return l.RemainingTTL > 0
}
// FlyingLabel is text entity which will fly up for two seconds and then it removes itself from world.
type FlyingLabel struct {
*TempLabel
body *physics.Body
}
// NewFlyingLabel creates FlyingLabel on given position with given text and fromatting.
// To set some text use one of the Show methods.
func NewFlyingLabel(position gmath.Vector2i, text string, format Formatting) *FlyingLabel {
return &FlyingLabel{
TempLabel: &TempLabel{
Label: NewLabel(position, text, format),
TTL: 2,
RemainingTTL: 2,
Remove: true,
},
body: &physics.Body{
Position: *position.As2F(),
Mass: 0.5,
Velocity: gmath.Vector2f{X: 0, Y: -8},
},
}
}
// Draw draws label if it is not out of ttl
func (l *FlyingLabel) Draw(s *tl.Screen) {
// update label y coordinate based on physical body
l.TempLabel.SetPosition(*l.body.Position.As2I())
// draw original label
l.TempLabel.Draw(s)
}
// Body returns physical body of this label
func (l *FlyingLabel) Body() *physics.Body {
return l.body
}
// ZIndex return z-index of the flying label
// It should be higher than z-index of standard label.
func (l *FlyingLabel) ZIndex() int {
return l.TempLabel.ZIndex() + 1
} | entities/label.go | 0.73659 | 0.42674 | label.go | starcoder |
package ga
import "github.com/rannoch/cldr"
var calendar = cldr.Calendar{
Formats: cldr.CalendarFormats{
Date: cldr.CalendarDateFormat{Full: "EEEE d MMMM y", Long: "d MMMM y", Medium: "d MMM y", Short: "dd/MM/y"},
Time: cldr.CalendarDateFormat{Full: "HH:mm:ss zzzz", Long: "HH:mm:ss z", Medium: "HH:mm:ss", Short: "HH:mm"},
DateTime: cldr.CalendarDateFormat{Full: "{1} {0}", Long: "{1} {0}", Medium: "{1} {0}", Short: "{1} {0}"},
},
FormatNames: cldr.CalendarFormatNames{
Months: cldr.CalendarMonthFormatNames{
Abbreviated: cldr.CalendarMonthFormatNameValue{Jan: "Ean", Feb: "Feabh", Mar: "Márta", Apr: "Aib", May: "Beal", Jun: "Meith", Jul: "Iúil", Aug: "Lún", Sep: "MFómh", Oct: "DFómh", Nov: "Samh", Dec: "Noll"},
Narrow: cldr.CalendarMonthFormatNameValue{Jan: "E", Feb: "F", Mar: "M", Apr: "A", May: "B", Jun: "M", Jul: "I", Aug: "L", Sep: "M", Oct: "D", Nov: "S", Dec: "N"},
Short: cldr.CalendarMonthFormatNameValue{},
Wide: cldr.CalendarMonthFormatNameValue{Jan: "Eanáir", Feb: "Feabhra", Mar: "Márta", Apr: "Aibreán", May: "Bealtaine", Jun: "Meitheamh", Jul: "Iúil", Aug: "Lúnasa", Sep: "Meán Fómhair", Oct: "Deireadh Fómhair", Nov: "Samhain", Dec: "Nollaig"},
},
Days: cldr.CalendarDayFormatNames{
Abbreviated: cldr.CalendarDayFormatNameValue{Sun: "Domh", Mon: "Luan", Tue: "Máirt", Wed: "Céad", Thu: "Déar", Fri: "Aoine", Sat: "Sath"},
Narrow: cldr.CalendarDayFormatNameValue{Sun: "D", Mon: "L", Tue: "M", Wed: "C", Thu: "D", Fri: "A", Sat: "S"},
Short: cldr.CalendarDayFormatNameValue{Sun: "Do", Mon: "Lu", Tue: "Má", Wed: "Cé", Thu: "Dé", Fri: "Ao", Sat: "Sa"},
Wide: cldr.CalendarDayFormatNameValue{Sun: "Dé Domhnaigh", Mon: "Dé Luain", Tue: "Dé Máirt", Wed: "Dé Céadaoin", Thu: "Déardaoin", Fri: "Dé hAoine", Sat: "Dé Sathairn"},
},
Periods: cldr.CalendarPeriodFormatNames{
Abbreviated: cldr.CalendarPeriodFormatNameValue{},
Narrow: cldr.CalendarPeriodFormatNameValue{AM: "a", PM: "p"},
Short: cldr.CalendarPeriodFormatNameValue{},
Wide: cldr.CalendarPeriodFormatNameValue{AM: "a.m.", PM: "p.m."},
},
},
} | resources/locales/ga/calendar.go | 0.508544 | 0.455925 | calendar.go | starcoder |
package timeseries
import (
"math"
"sort"
"time"
)
// Aligns point's time stamps according to provided interval.
func (ts TimeSeries) Align(interval time.Duration) TimeSeries {
if interval <= 0 || ts.Len() < 2 {
return ts
}
alignedTs := NewTimeSeries()
var frameTs = ts[0].GetTimeFrame(interval)
var pointFrameTs time.Time
var point TimePoint
for i := 0; i < ts.Len(); i++ {
point = ts[i]
pointFrameTs = point.GetTimeFrame(interval)
if pointFrameTs.After(frameTs) {
for frameTs.Before(pointFrameTs) {
alignedTs = append(alignedTs, TimePoint{Time: frameTs, Value: nil})
frameTs = frameTs.Add(interval)
}
}
alignedTs = append(alignedTs, TimePoint{Time: pointFrameTs, Value: point.Value})
frameTs = frameTs.Add(interval)
}
return alignedTs
}
// Fill missing points in trend by null values
func (ts TimeSeries) FillTrendWithNulls() TimeSeries {
if ts.Len() < 2 {
return ts
}
interval := time.Hour
alignedTs := NewTimeSeries()
var frameTs = ts[0].GetTimeFrame(interval)
var pointFrameTs time.Time
var point TimePoint
for i := 0; i < ts.Len(); i++ {
point = ts[i]
pointFrameTs = point.GetTimeFrame(interval)
if pointFrameTs.After(frameTs) {
for frameTs.Before(pointFrameTs) {
alignedTs = append(alignedTs, TimePoint{Time: frameTs, Value: nil})
frameTs = frameTs.Add(interval)
}
}
alignedTs = append(alignedTs, point)
frameTs = frameTs.Add(interval)
}
return alignedTs
}
// Detects interval between data points in milliseconds based on median delta between points.
func (ts TimeSeries) DetectInterval() time.Duration {
if ts.Len() < 2 {
return 0
}
deltas := make([]int, 0)
for i := 1; i < ts.Len(); i++ {
delta := ts[i].Time.Sub(ts[i-1].Time)
deltas = append(deltas, int(delta.Milliseconds()))
}
sort.Ints(deltas)
midIndex := int(math.Floor(float64(len(deltas)) * 0.5))
return time.Duration(deltas[midIndex]) * time.Millisecond
}
// PrepareForStack performs series interpolation to make series consist of the points with same time stamps
func PrepareForStack(series []*TimeSeriesData) []*TimeSeriesData {
if len(series) == 0 {
return series
}
// Build unique set of time stamps from all series
interpolatedTimeStampsMap := make(map[time.Time]time.Time)
for _, s := range series {
for _, p := range s.TS {
interpolatedTimeStampsMap[p.Time] = p.Time
}
}
// Convert to slice and sort
interpolatedTimeStamps := make([]time.Time, 0)
for _, ts := range interpolatedTimeStampsMap {
interpolatedTimeStamps = append(interpolatedTimeStamps, ts)
}
sort.Slice(interpolatedTimeStamps, func(i, j int) bool {
return interpolatedTimeStamps[i].Before(interpolatedTimeStamps[j])
})
for _, s := range series {
if s.Len() < 2 {
continue
}
p := s.TS[0]
pNext := s.TS[1]
interpolatedSeries := make([]TimePoint, 0)
interpolatedTS := interpolatedTimeStamps[0]
interpolatedTSIdx := 0
// Insert nulls before the first point
for i := 0; i < len(interpolatedTimeStamps); i++ {
interpolatedTS = interpolatedTimeStamps[i]
if interpolatedTS.Before(p.Time) {
interpolatedSeries = append(interpolatedSeries, TimePoint{Time: interpolatedTS, Value: nil})
} else {
interpolatedTSIdx = i
break
}
}
for i := 0; i < s.Len()-1; i++ {
p = s.TS[i]
pNext = s.TS[i+1]
interpolatedSeries = append(interpolatedSeries, p)
// Insert interpolated points between existing
for interpolatedTimeStamps[interpolatedTSIdx].Before(pNext.Time) && interpolatedTSIdx < len(interpolatedTimeStamps) {
if interpolatedTimeStamps[interpolatedTSIdx].Equal(p.Time) {
interpolatedTSIdx++
continue
}
frameTs := interpolatedTimeStamps[interpolatedTSIdx]
if p.Value != nil && pNext.Value != nil {
pointValue := linearInterpolation(frameTs, p, pNext)
interpolatedSeries = append(interpolatedSeries, TimePoint{Time: frameTs, Value: &pointValue})
} else {
// Next or current point is null means we're currently in a gap between 2 points,
// so put nulls instead of interpolating values.
interpolatedSeries = append(interpolatedSeries, TimePoint{Time: frameTs, Value: nil})
}
interpolatedTSIdx++
}
}
interpolatedSeries = append(interpolatedSeries, pNext)
s.TS = interpolatedSeries
}
return series
} | pkg/timeseries/align.go | 0.809728 | 0.605857 | align.go | starcoder |
package lcwidgets
import (
"fmt"
"image"
ui "github.com/gizak/termui/v3"
)
// Table renders rows and each row has columns which can be strings or Blocks
type Table struct {
ui.Block
ColumnNames []string
ColumnWidths []int
Rows [][]interface{}
firstRow int
}
// NewTable creates a new scrollable table that can render other Blocks within cells
func NewTable() *Table {
return &Table{
Block: *ui.NewBlock(),
}
}
// ScrollUp moves the viewable area upwards one row
func (t *Table) ScrollUp() {
if t.firstRow > 0 {
t.firstRow--
}
}
// PageUp moves the viewable area upwards one page
func (t *Table) PageUp() {
if t.firstRow > t.Inner.Dy()-3 {
t.firstRow -= t.Inner.Dy() - 3
} else {
t.firstRow = 0
}
}
// ScrollDown moves the viewable area downwards one row
func (t *Table) ScrollDown() {
if t.firstRow < len(t.Rows)-(t.Inner.Dy()-3) {
t.firstRow++
}
}
// PageDown moves the viewable area downwards one page
func (t *Table) PageDown() {
if t.firstRow < len(t.Rows)-(t.Inner.Dy()*2-3) {
t.firstRow += t.Inner.Dy() - 3
} else {
t.firstRow = len(t.Rows) - (t.Inner.Dy() - 3)
}
}
// SetRect implements the Drawable interface.
func (t *Table) SetRect(x1, y1, x2, y2 int) {
t.Block.SetRect(x1, y1, x2, y2)
if len(t.Rows)-t.Inner.Dy()-2 < 0 {
t.firstRow = 0
} else if t.firstRow > len(t.Rows)-t.Inner.Dy()-2 {
t.firstRow = len(t.Rows) - t.Inner.Dy() - 2
}
}
// Draw implements the Drawable interface
func (t *Table) Draw(buf *ui.Buffer) {
if t.Rows == nil || len(t.Rows) == 0 {
return
}
drawRow := func(row []interface{}, x int, y int) {
for idx, columnWidth := range t.ColumnWidths {
if drawable, ok := row[idx].(ui.Drawable); ok {
drawable.SetRect(x-1, y, x+columnWidth+2, y+1)
drawable.Draw(buf)
x += columnWidth + 2
} else {
x++
cells := ui.TrimCells(ui.ParseStyles(row[idx].(string), ui.Theme.Paragraph.Text), columnWidth)
for _, cell := range cells {
buf.SetCell(cell, image.Point{x, y})
x++
}
x += columnWidth - len(cells) + 1
}
if idx < len(t.ColumnWidths)-1 {
buf.SetCell(ui.NewCell(ui.VERTICAL_LINE, t.Block.BorderStyle), image.Point{x, y})
x++
}
}
}
hasScrollbar := len(t.Rows) > t.Inner.Dy()-2
x := t.Inner.Min.X
y := t.Inner.Min.Y
// If we have a border on our block, change some top cells to link to the column separator cleanly
if t.Border {
y = t.Min.Y
for _, columnWidth := range t.ColumnWidths[:len(t.ColumnWidths)-1] {
x += columnWidth + 2
buf.SetCell(ui.NewCell(ui.HORIZONTAL_DOWN, t.Block.BorderStyle), image.Point{x, y})
x++
}
x = t.Inner.Min.X
y++
}
columnData := make([]interface{}, len(t.ColumnNames))
for idx, column := range t.ColumnNames {
columnData[idx] = fmt.Sprintf("[%s](mod:bold)", column)
}
drawRow(columnData, x, y)
y++
// Line under column names
hvCross := ui.NewCell(HV_CROSS, t.Block.BorderStyle)
buf.Fill(ui.NewCell(ui.HORIZONTAL_LINE, t.Block.BorderStyle), image.Rect(x, y, x+t.Inner.Dx(), y+1))
buf.SetCell(ui.NewCell(ui.VERTICAL_RIGHT, t.Block.BorderStyle), image.Point{t.Min.X, y})
buf.SetCell(ui.NewCell(ui.VERTICAL_LEFT, t.Block.BorderStyle), image.Point{t.Max.X - 1, y})
for _, columnWidth := range t.ColumnWidths[:len(t.ColumnWidths)-1] {
x += columnWidth + 2
buf.SetCell(hvCross, image.Point{x, y})
x++
}
y++
x = t.Inner.Min.X
// Rows
lastRow := t.firstRow + t.Inner.Dy() - 2
if hasScrollbar {
lastRow--
}
if lastRow > len(t.Rows) {
lastRow = len(t.Rows)
}
for _, row := range t.Rows[t.firstRow:lastRow] {
drawRow(row, x, y)
y++
}
// Draw the column separator into the remaining empty rows
for ; y < t.Inner.Max.Y; y++ {
for _, columnWidth := range t.ColumnWidths[:len(t.ColumnWidths)-1] {
x += columnWidth + 2
buf.SetCell(ui.NewCell(ui.VERTICAL_LINE, t.Block.BorderStyle), image.Point{x, y})
x++
}
x = t.Inner.Min.X
}
// If we have a border on our block, change some bottom cells to link to the column separator cleanly
if t.Border {
y = t.Max.Y - 1
for _, columnWidth := range t.ColumnWidths[:len(t.ColumnWidths)-1] {
x += columnWidth + 2
buf.SetCell(ui.NewCell(ui.HORIZONTAL_UP, t.Block.BorderStyle), image.Point{x, y})
x++
}
}
// Draw scrollbar
if len(t.Rows) > t.Inner.Dy()-2 {
for idx := 2; idx < t.Inner.Max.Y-t.Inner.Min.Y; idx++ {
buf.SetCell(ui.NewCell(HEAVY_VERTICAL_LINE, t.Block.BorderStyle), image.Point{t.Max.X - 1, t.Inner.Min.Y + idx})
}
instructions := ui.ParseStyles(fmt.Sprintf("[Down: [d/PgDn/%c] Up: [u/PgUp/%c]](fg:black,bg:white)", ui.DOWN_ARROW, ui.UP_ARROW), ui.Theme.Paragraph.Text)
for idx, cell := range instructions {
buf.SetCell(cell, image.Point{t.Inner.Min.X + 1 + idx, t.Inner.Max.Y - 1})
}
}
if t.firstRow != 0 {
buf.SetCell(ui.NewCell(ui.UP_ARROW, t.Block.BorderStyle), image.Point{t.Max.X - 1, t.Inner.Min.Y + 2})
}
if len(t.Rows)-t.firstRow > t.Inner.Dy()-2 {
buf.SetCell(ui.NewCell(ui.DOWN_ARROW, t.Block.BorderStyle), image.Point{t.Max.X - 1, t.Inner.Max.Y - 1})
}
} | lc-admin/lcwidgets/table.go | 0.596786 | 0.506897 | table.go | starcoder |
package schedule
import (
"fmt"
"time"
)
type ScheduleSpec struct {
StartTime WeekdayTime // Monday 8am
EndTime WeekdayTime // Friday 5pm
}
type WeekdayTime struct {
Weekday time.Weekday
TimeOfDay TimeOfDay
}
type TimeOfDay struct {
Hour int
Minute int
}
type Schedule struct {
spec *ScheduleSpec
}
func New(schedSpec *ScheduleSpec) *Schedule {
return &Schedule{spec: schedSpec}
}
func (s *Schedule) Contains(t time.Time) bool {
startDay := int(s.spec.StartTime.Weekday)
endDay := int(s.spec.EndTime.Weekday)
inputDay := int(t.Weekday())
if s.spec.StartTime.Weekday > s.spec.EndTime.Weekday {
// Normalize weekdays.
startDay = 0
endDay = 7 + (int(s.spec.EndTime.Weekday) - int(s.spec.StartTime.Weekday))
inputDay = (7 + (int(t.Weekday()) - int(s.spec.StartTime.Weekday))) % 7
}
nowHour := t.Hour()
startHour := s.spec.StartTime.TimeOfDay.Hour
endHour := s.spec.EndTime.TimeOfDay.Hour
nowMinute := t.Minute()
startMinute := s.spec.StartTime.TimeOfDay.Minute
endMinute := s.spec.EndTime.TimeOfDay.Minute
// If our day is in between the start and end day.
if inputDay >= startDay && inputDay <= endDay {
// Return early on start and end day when its outside of hours.
if t.Weekday() == s.spec.StartTime.Weekday {
if startHour > nowHour && startMinute < nowMinute {
return false
}
}
if t.Weekday() == s.spec.EndTime.Weekday {
if endHour < nowHour && endMinute > nowMinute {
return false
}
}
return true
}
return false
}
func ConvertWeekday(day string) (time.Weekday, error) {
weekdays := map[string]time.Weekday{
"Monday": time.Weekday(1),
"monday": time.Weekday(1),
"Tuesday": time.Weekday(2),
"tuesday": time.Weekday(2),
"Wednesday": time.Weekday(3),
"wednesday": time.Weekday(3),
"Thursday": time.Weekday(4),
"thursday": time.Weekday(4),
"Friday": time.Weekday(5),
"friday": time.Weekday(5),
"Saturday": time.Weekday(6),
"saturday": time.Weekday(6),
"Sunday": time.Weekday(0),
"sunday": time.Weekday(0),
}
value, ok := weekdays[day]
if !ok {
return 0, fmt.Errorf("Wrong weekday.")
}
return value, nil
} | pkg/schedule/schedule.go | 0.588771 | 0.514339 | schedule.go | starcoder |
package datadog
import (
"encoding/json"
"fmt"
)
// SLOHistoryMetricsSeries A representation of `metric` based SLO time series for the provided queries. This is the same response type from `batch_query` endpoint.
type SLOHistoryMetricsSeries struct {
// Count of submitted metrics.
Count int64 `json:"count"`
Metadata SLOHistoryMetricsSeriesMetadata `json:"metadata"`
// Total sum of the query.
Sum float64 `json:"sum"`
// The query values for each metric.
Values []float64 `json:"values"`
}
// NewSLOHistoryMetricsSeries instantiates a new SLOHistoryMetricsSeries object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewSLOHistoryMetricsSeries(count int64, metadata SLOHistoryMetricsSeriesMetadata, sum float64, values []float64) *SLOHistoryMetricsSeries {
this := SLOHistoryMetricsSeries{}
this.Count = count
this.Metadata = metadata
this.Sum = sum
this.Values = values
return &this
}
// NewSLOHistoryMetricsSeriesWithDefaults instantiates a new SLOHistoryMetricsSeries object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewSLOHistoryMetricsSeriesWithDefaults() *SLOHistoryMetricsSeries {
this := SLOHistoryMetricsSeries{}
return &this
}
// GetCount returns the Count field value
func (o *SLOHistoryMetricsSeries) GetCount() int64 {
if o == nil {
var ret int64
return ret
}
return o.Count
}
// GetCountOk returns a tuple with the Count field value
// and a boolean to check if the value has been set.
func (o *SLOHistoryMetricsSeries) GetCountOk() (*int64, bool) {
if o == nil {
return nil, false
}
return &o.Count, true
}
// SetCount sets field value
func (o *SLOHistoryMetricsSeries) SetCount(v int64) {
o.Count = v
}
// GetMetadata returns the Metadata field value
func (o *SLOHistoryMetricsSeries) GetMetadata() SLOHistoryMetricsSeriesMetadata {
if o == nil {
var ret SLOHistoryMetricsSeriesMetadata
return ret
}
return o.Metadata
}
// GetMetadataOk returns a tuple with the Metadata field value
// and a boolean to check if the value has been set.
func (o *SLOHistoryMetricsSeries) GetMetadataOk() (*SLOHistoryMetricsSeriesMetadata, bool) {
if o == nil {
return nil, false
}
return &o.Metadata, true
}
// SetMetadata sets field value
func (o *SLOHistoryMetricsSeries) SetMetadata(v SLOHistoryMetricsSeriesMetadata) {
o.Metadata = v
}
// GetSum returns the Sum field value
func (o *SLOHistoryMetricsSeries) GetSum() float64 {
if o == nil {
var ret float64
return ret
}
return o.Sum
}
// GetSumOk returns a tuple with the Sum field value
// and a boolean to check if the value has been set.
func (o *SLOHistoryMetricsSeries) GetSumOk() (*float64, bool) {
if o == nil {
return nil, false
}
return &o.Sum, true
}
// SetSum sets field value
func (o *SLOHistoryMetricsSeries) SetSum(v float64) {
o.Sum = v
}
// GetValues returns the Values field value
func (o *SLOHistoryMetricsSeries) GetValues() []float64 {
if o == nil {
var ret []float64
return ret
}
return o.Values
}
// GetValuesOk returns a tuple with the Values field value
// and a boolean to check if the value has been set.
func (o *SLOHistoryMetricsSeries) GetValuesOk() (*[]float64, bool) {
if o == nil {
return nil, false
}
return &o.Values, true
}
// SetValues sets field value
func (o *SLOHistoryMetricsSeries) SetValues(v []float64) {
o.Values = v
}
func (o SLOHistoryMetricsSeries) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["count"] = o.Count
}
if true {
toSerialize["metadata"] = o.Metadata
}
if true {
toSerialize["sum"] = o.Sum
}
if true {
toSerialize["values"] = o.Values
}
return json.Marshal(toSerialize)
}
func (o *SLOHistoryMetricsSeries) UnmarshalJSON(bytes []byte) (err error) {
required := struct {
Count *int64 `json:"count"`
Metadata *SLOHistoryMetricsSeriesMetadata `json:"metadata"`
Sum *float64 `json:"sum"`
Values *[]float64 `json:"values"`
}{}
all := struct {
Count int64 `json:"count"`
Metadata SLOHistoryMetricsSeriesMetadata `json:"metadata"`
Sum float64 `json:"sum"`
Values []float64 `json:"values"`
}{}
err = json.Unmarshal(bytes, &required)
if err != nil {
return err
}
if required.Count == nil {
return fmt.Errorf("Required field count missing")
}
if required.Metadata == nil {
return fmt.Errorf("Required field metadata missing")
}
if required.Sum == nil {
return fmt.Errorf("Required field sum missing")
}
if required.Values == nil {
return fmt.Errorf("Required field values missing")
}
err = json.Unmarshal(bytes, &all)
if err != nil {
return err
}
o.Count = all.Count
o.Metadata = all.Metadata
o.Sum = all.Sum
o.Values = all.Values
return nil
}
type NullableSLOHistoryMetricsSeries struct {
value *SLOHistoryMetricsSeries
isSet bool
}
func (v NullableSLOHistoryMetricsSeries) Get() *SLOHistoryMetricsSeries {
return v.value
}
func (v *NullableSLOHistoryMetricsSeries) Set(val *SLOHistoryMetricsSeries) {
v.value = val
v.isSet = true
}
func (v NullableSLOHistoryMetricsSeries) IsSet() bool {
return v.isSet
}
func (v *NullableSLOHistoryMetricsSeries) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableSLOHistoryMetricsSeries(val *SLOHistoryMetricsSeries) *NullableSLOHistoryMetricsSeries {
return &NullableSLOHistoryMetricsSeries{value: val, isSet: true}
}
func (v NullableSLOHistoryMetricsSeries) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableSLOHistoryMetricsSeries) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | api/v1/datadog/model_slo_history_metrics_series.go | 0.829112 | 0.422683 | model_slo_history_metrics_series.go | starcoder |
package network
import (
"bytes"
"fmt"
"github.com/yaricom/goNEAT/v2/neat"
"github.com/yaricom/goNEAT/v2/neat/math"
)
// NNode is either a NEURON or a SENSOR.
// - If it's a sensor, it can be loaded with a value for output
// - If it's a neuron, it has a list of its incoming input signals ([]*Link is used)
// Use an activation count to avoid flushing
type NNode struct {
// The ID of the node
Id int
// The type of node activation function (SIGMOID, ...)
ActivationType math.NodeActivationType
// The neuron type for this node (HIDDEN, INPUT, OUTPUT, BIAS)
NeuronType NodeNeuronType
// The node's activation value
Activation float64
// The number of activations for current node
ActivationsCount int32
// The activation sum
ActivationSum float64
// The list of all incoming connections
Incoming []*Link
// The list of all outgoing connections
Outgoing []*Link
// The trait linked to the node
Trait *neat.Trait
// Used for Gene decoding by referencing analogue to this node in organism phenotype
PhenotypeAnalogue *NNode
// the flag to use for loop detection
visited bool
/* ************ LEARNING PARAMETERS *********** */
// The following parameters are for use in neurons that learn through habituation,
// sensitization, or Hebbian-type processes */
Params []float64
// Activation value of node at time t-1; Holds the previous step's activation for recurrency
lastActivation float64
// Activation value of node at time t-2 Holds the activation before the previous step's
// This is necessary for a special recurrent case when the innode of a recurrent link is one time step ahead of the outnode.
// The innode then needs to send from TWO time steps ago
lastActivation2 float64
// If true the node is active - used during node activation
isActive bool
}
// NewNNode Creates new node with specified ID and neuron type associated (INPUT, HIDDEN, OUTPUT, BIAS)
func NewNNode(nodeId int, neuronType NodeNeuronType) *NNode {
n := NewNetworkNode()
n.Id = nodeId
n.NeuronType = neuronType
return n
}
// NewNNodeCopy Construct a NNode off another NNode with given trait for genome purposes
func NewNNodeCopy(n *NNode, t *neat.Trait) *NNode {
node := NewNetworkNode()
node.Id = n.Id
node.NeuronType = n.NeuronType
node.ActivationType = n.ActivationType
node.Trait = t
return node
}
// NewNetworkNode The default constructor
func NewNetworkNode() *NNode {
return &NNode{
NeuronType: HiddenNeuron,
ActivationType: math.SigmoidSteepenedActivation,
Incoming: make([]*Link, 0),
Outgoing: make([]*Link, 0),
}
}
// Set new activation value to this node
func (n *NNode) setActivation(input float64) {
// Keep a memory of activations for potential time delayed connections
n.saveActivations()
// Set new activation value
n.Activation = input
// Increment the activation_count
n.ActivationsCount++
}
// Saves current node's activations for potential time delayed connections
func (n *NNode) saveActivations() {
n.lastActivation2 = n.lastActivation
n.lastActivation = n.Activation
}
// GetActiveOut Returns activation for a current step
func (n *NNode) GetActiveOut() float64 {
if n.ActivationsCount > 0 {
return n.Activation
} else {
return 0.0
}
}
// GetActiveOutTd Returns activation from PREVIOUS time step
func (n *NNode) GetActiveOutTd() float64 {
if n.ActivationsCount > 1 {
return n.lastActivation
} else {
return 0.0
}
}
// IsSensor Returns true if this node is SENSOR
func (n *NNode) IsSensor() bool {
return n.NeuronType == InputNeuron || n.NeuronType == BiasNeuron
}
// IsNeuron returns true if this node is NEURON
func (n *NNode) IsNeuron() bool {
return n.NeuronType == HiddenNeuron || n.NeuronType == OutputNeuron
}
// SensorLoad If the node is a SENSOR, returns TRUE and loads the value
func (n *NNode) SensorLoad(load float64) bool {
if n.IsSensor() {
// Keep a memory of activations for potential time delayed connections
n.saveActivations()
// Puts sensor into next time-step
n.ActivationsCount++
n.Activation = load
return true
} else {
return false
}
}
// Adds a non-recurrent outgoing link to this node
func (n *NNode) addOutgoing(out *NNode, weight float64) *Link {
newLink := NewLink(weight, n, out, false)
n.Outgoing = append(n.Outgoing, newLink)
return newLink
}
// Adds a non-recurrent Link to an incoming NNode in the incoming List
func (n *NNode) addIncoming(in *NNode, weight float64) *Link {
newLink := NewLink(weight, in, n, false)
n.Incoming = append(n.Incoming, newLink)
return newLink
}
// connectFrom is to create link between two nodes. The incoming links of current node and outgoing links of in node
// would be updated to have reference to the new link.
func (n *NNode) connectFrom(in *NNode, weight float64) *Link {
newLink := NewLink(weight, in, n, false)
n.Incoming = append(n.Incoming, newLink)
in.Outgoing = append(in.Outgoing, newLink)
return newLink
}
// Flushback Recursively deactivate backwards through the network
func (n *NNode) Flushback() {
n.ActivationsCount = 0
n.Activation = 0
n.lastActivation = 0
n.lastActivation2 = 0
n.isActive = false
n.visited = false
}
// FlushbackCheck is to verify flushing for debugging
func (n *NNode) FlushbackCheck() error {
if n.ActivationsCount > 0 {
return fmt.Errorf("NNODE: %s has activation count %d", n, n.ActivationsCount)
}
if n.Activation > 0 {
return fmt.Errorf("NNODE: %s has activation %f", n, n.Activation)
}
if n.lastActivation > 0 {
return fmt.Errorf("NNODE: %s has last_activation %f", n, n.lastActivation)
}
if n.lastActivation2 > 0 {
return fmt.Errorf("NNODE: %s has last_activation2 %f", n, n.lastActivation2)
}
return nil
}
// Depth Find the greatest depth starting from this neuron at depth d
func (n *NNode) Depth(d int) (int, error) {
if d > 1000 {
// to avoid infinite recursion
return 10, NetErrDepthCalculationFailedLoopDetected
}
n.visited = true
// Base Case
if n.IsSensor() {
return d, nil
} else {
// recursion
max := d // The max depth
for _, l := range n.Incoming {
if l.InNode.visited {
// was already visited (loop detected) - skipping
continue
}
curDepth, err := l.InNode.Depth(d + 1)
if err != nil {
return curDepth, err
}
if curDepth > max {
max = curDepth
}
}
return max, nil
}
}
// NodeType Convenient method to check network's node type (SENSOR, NEURON)
func (n *NNode) NodeType() NodeType {
if n.IsSensor() {
return SensorNode
}
return NeuronNode
}
func (n *NNode) String() string {
activation, _ := math.NodeActivators.ActivationNameFromType(n.ActivationType)
active := "active"
if !n.isActive {
active = "inactive"
}
return fmt.Sprintf("(%s id:%03d, %s, %s,\t%s -> step: %d = %.3f %.3f)",
NodeTypeName(n.NodeType()), n.Id, NeuronTypeName(n.NeuronType), activation, active,
n.ActivationsCount, n.Activation, n.Params)
}
// PrintDebug is to print all fields of the node to the string
func (n *NNode) PrintDebug() string {
str := "NNode fields\n"
b := bytes.NewBufferString(str)
_, _ = fmt.Fprintf(b, "\tId: %d\n", n.Id)
_, _ = fmt.Fprintf(b, "\tIsActive: %t\n", n.isActive)
_, _ = fmt.Fprintf(b, "\tActivation: %f\n", n.Activation)
activation, _ := math.NodeActivators.ActivationNameFromType(n.ActivationType)
_, _ = fmt.Fprintf(b, "\tActivation Type: %s\n", activation)
_, _ = fmt.Fprintf(b, "\tNeuronType: %d\n", n.NeuronType)
_, _ = fmt.Fprintf(b, "\tActivationsCount: %d\n", n.ActivationsCount)
_, _ = fmt.Fprintf(b, "\tActivationSum: %f\n", n.ActivationSum)
_, _ = fmt.Fprintf(b, "\tIncoming: %s\n", n.Incoming)
_, _ = fmt.Fprintf(b, "\tOutgoing: %s\n", n.Outgoing)
_, _ = fmt.Fprintf(b, "\tTrait: %s\n", n.Trait)
_, _ = fmt.Fprintf(b, "\tPhenotypeAnalogue: %s\n", n.PhenotypeAnalogue)
_, _ = fmt.Fprintf(b, "\tParams: %f\n", n.Params)
_, _ = fmt.Fprintf(b, "\tlastActivation: %f\n", n.lastActivation)
_, _ = fmt.Fprintf(b, "\tlastActivation2: %f\n", n.lastActivation2)
return b.String()
} | neat/network/nnode.go | 0.663451 | 0.452294 | nnode.go | starcoder |
package kiwi
import "strconv"
type Term struct {
Variable *Variable
Coefficient float64
}
var _ Constrainer = Term{}
func (t Term) GetValue() float64 {
return t.Coefficient * t.Variable.Value
}
func (t Term) Multiply(coefficient float64) Term {
return Term{Variable: t.Variable, Coefficient: t.Coefficient * coefficient}
}
func (t Term) Divide(denominator float64) Term {
return Term{Variable: t.Variable, Coefficient: t.Coefficient / denominator}
}
func (t Term) Negate() Term {
return Term{Variable: t.Variable, Coefficient: -t.Coefficient}
}
func (t Term) AddConstant(constant float64) Expression {
return Expression{Terms: []Term{t}, Constant: constant}
}
func (t Term) AddVariable(variable *Variable) Expression {
other := Term{Variable: variable, Coefficient: 1.0}
return Expression{Terms: []Term{t, other}, Constant: 0.0}
}
func (t Term) AddTerm(term Term) Expression {
return Expression{Terms: []Term{t, term}, Constant: 0.0}
}
func (t Term) AddExpression(expression Expression) Expression {
terms := make([]Term, 1+len(expression.Terms))
terms[0] = t
copy(terms[1:], expression.Terms)
return Expression{Terms: terms, Constant: expression.Constant}
}
func (t Term) EqualsConstant(constant float64) *Constraint {
return NewConstraint(t.AddConstant(-constant), EQ)
}
func (t Term) EqualsVariable(variable *Variable) *Constraint {
return NewConstraint(t.AddTerm(variable.Negate()), EQ)
}
func (t Term) EqualsTerm(term Term) *Constraint {
return NewConstraint(t.AddTerm(term.Negate()), EQ)
}
func (t Term) EqualsExpression(expression Expression) *Constraint {
return expression.EqualsTerm(t)
}
func (t Term) LessThanOrEqualsConstant(constant float64) *Constraint {
return NewConstraint(t.AddConstant(-constant), LE)
}
func (t Term) LessThanOrEqualsVariable(variable *Variable) *Constraint {
return NewConstraint(t.AddTerm(variable.Negate()), LE)
}
func (t Term) LessThanOrEqualsTerm(term Term) *Constraint {
return NewConstraint(t.AddTerm(term.Negate()), LE)
}
func (t Term) LessThanOrEqualsExpression(expression Expression) *Constraint {
return NewConstraint(t.AddExpression(expression.Negate()), LE)
}
func (t Term) GreaterThanOrEqualsConstant(constant float64) *Constraint {
return NewConstraint(t.AddConstant(-constant), GE)
}
func (t Term) GreaterThanOrEqualsVariable(variable *Variable) *Constraint {
return NewConstraint(t.AddTerm(variable.Negate()), GE)
}
func (t Term) GreaterThanOrEqualsTerm(term Term) *Constraint {
return NewConstraint(t.AddTerm(term.Negate()), GE)
}
func (t Term) GreaterThanOrEqualsExpression(expression Expression) *Constraint {
return NewConstraint(t.AddExpression(expression.Negate()), GE)
}
func (t Term) String() string {
if t.Coefficient == 1.0 {
return t.Variable.String()
}
return strconv.FormatFloat(t.Coefficient, 'f', -1, 64) + " * " + t.Variable.String()
} | term.go | 0.815233 | 0.469034 | term.go | starcoder |
package gi3d
import (
"math"
"github.com/goki/ki/kit"
"github.com/goki/mat32"
)
// Torus is a torus mesh, defined by the radius of the solid tube and the
// larger radius of the ring.
type Torus struct {
MeshBase
Radius float32 `desc:"larger radius of the torus ring"`
TubeRadius float32 `desc:"radius of the solid tube"`
RadialSegs int `min:"1" desc:"number of segments around the radius of the torus (32 is reasonable default for full circle)"`
TubeSegs int `min:"1" desc:"number of segments for the tube itself (32 is reasonable default for full height)"`
AngStart float32 `min:"0" max:"360" step:"5" desc:"starting radial angle in degrees relative to 1,0,0 starting point"`
AngLen float32 `min:"0" max:"360" step:"5" desc:"total radial angle to generate in degrees (max = 360)"`
}
var KiT_Torus = kit.Types.AddType(&Torus{}, nil)
// AddNewTorus creates a sphere mesh with the specified outer ring radius,
// solid tube radius, and number of segments (resolution).
func AddNewTorus(sc *Scene, name string, radius, tubeRadius float32, segs int) *Torus {
sp := &Torus{}
sp.Nm = name
sp.Radius = radius
sp.TubeRadius = tubeRadius
sp.RadialSegs = segs
sp.TubeSegs = segs
sp.AngStart = 0
sp.AngLen = 360
sc.AddMesh(sp)
return sp
}
func (sp *Torus) Make(sc *Scene) {
sp.Reset()
sp.AddTorusSector(sp.Radius, sp.TubeRadius, sp.RadialSegs, sp.TubeSegs, sp.AngStart, sp.AngLen, mat32.Vec3{})
sp.BBox.UpdateFmBBox()
}
// NewTorus creates a torus geometry with the specified revolution radius, tube radius,
// number of radial segments, number of tubular segments,
// radial sector start angle and length in degrees (0 - 360)
func (ms *MeshBase) AddTorusSector(radius, tubeRadius float32, radialSegs, tubeSegs int, angStart, angLen float32, offset mat32.Vec3) {
angStRad := mat32.DegToRad(angStart)
angLenRad := mat32.DegToRad(angLen)
pos := mat32.NewArrayF32(0, 0)
norms := mat32.NewArrayF32(0, 0)
uvs := mat32.NewArrayF32(0, 0)
idxs := mat32.NewArrayU32(0, 0)
stidx := uint32(ms.Vtx.Len() / 3)
bb := mat32.Box3{}
bb.SetEmpty()
var center mat32.Vec3
for j := 0; j <= radialSegs; j++ {
for i := 0; i <= tubeSegs; i++ {
u := angStRad + float32(i)/float32(tubeSegs)*angLenRad
v := float32(j) / float32(radialSegs) * math.Pi * 2
center.X = radius * mat32.Cos(u)
center.Y = radius * mat32.Sin(u)
var pt mat32.Vec3
pt.X = (radius + tubeRadius*mat32.Cos(v)) * mat32.Cos(u)
pt.Y = (radius + tubeRadius*mat32.Cos(v)) * mat32.Sin(u)
pt.Z = tubeRadius * mat32.Sin(v)
pt.SetAdd(offset)
pos.AppendVec3(pt)
bb.ExpandByPoint(pt)
uvs.Append(float32(i)/float32(tubeSegs), float32(j)/float32(radialSegs))
norms.AppendVec3(pt.Sub(center).Normal())
}
}
for j := 1; j <= radialSegs; j++ {
for i := 1; i <= tubeSegs; i++ {
a := (tubeSegs+1)*j + i - 1
b := (tubeSegs+1)*(j-1) + i - 1
c := (tubeSegs+1)*(j-1) + i
d := (tubeSegs+1)*j + i
idxs.Append(stidx+uint32(a), stidx+uint32(b), stidx+uint32(d), stidx+uint32(b), stidx+uint32(c), stidx+uint32(d))
}
}
ms.Vtx = append(ms.Vtx, pos...)
ms.Idx = append(ms.Idx, idxs...)
ms.Norm = append(ms.Norm, norms...)
ms.Tex = append(ms.Tex, uvs...)
ms.BBox.BBox.ExpandByBox(bb)
} | gi3d/torus.go | 0.824921 | 0.425307 | torus.go | starcoder |
package object
// initialize built-in objects like Int, Arr, Str...
// NOTE: Props are inserted in package eval not to make
// package object and package BuiltIn circular reference
// BuiltInIntObj is an object of Int (proto of each int).
var BuiltInIntObj = NewPanObj(&map[SymHash]Pair{}, BuiltInNumObj, WithZero(BuiltInZeroInt))
// BuiltInFloatObj is an object of Float (proto of each float).
var BuiltInFloatObj = NewPanObj(&map[SymHash]Pair{}, BuiltInNumObj, WithZero(zeroFloat))
// BuiltInNumObj is an object of Num.
var BuiltInNumObj = NewPanObj(&map[SymHash]Pair{}, BuiltInObjObj)
// BuiltInNilObj is an object of Nil (proto of nil).
var BuiltInNilObj = NewPanObj(&map[SymHash]Pair{}, BuiltInObjObj, WithZero(BuiltInNil))
// BuiltInStrObj is an object of Str (proto of each str).
var BuiltInStrObj = NewPanObj(&map[SymHash]Pair{}, BuiltInObjObj, WithZero(zeroStr))
// BuiltInArrObj is an object of Arr (proto of each arr).
var BuiltInArrObj = NewPanObj(&map[SymHash]Pair{}, BuiltInObjObj, WithZero(zeroArr))
// BuiltInRangeObj is an object of Range (proto of each range).
var BuiltInRangeObj = NewPanObj(&map[SymHash]Pair{}, BuiltInObjObj, WithZero(zeroRange))
// BuiltInFuncObj is an object of Func (proto of each func).
var BuiltInFuncObj = NewPanObj(&map[SymHash]Pair{}, BuiltInObjObj)
// BuiltInIterObj is an object of Iter (proto of each iter).
var BuiltInIterObj = NewPanObj(&map[SymHash]Pair{}, BuiltInObjObj)
// BuiltInMatchObj is an object of Match (proto of each match).
var BuiltInMatchObj = NewPanObj(&map[SymHash]Pair{}, BuiltInObjObj)
// BuiltInObjObj is an object of Obj (proto of each obj).
var BuiltInObjObj = NewPanObj(&map[SymHash]Pair{}, BuiltInBaseObj)
// BuiltInBaseObj is an object of BaseObj (ancestor of all objects).
var BuiltInBaseObj = NewPanObj(&map[SymHash]Pair{}, nil)
// BuiltInMapObj is an object of Map (proto of each map).
var BuiltInMapObj = NewPanObj(&map[SymHash]Pair{}, BuiltInObjObj, WithZero(zeroMap))
// BuiltInIOObj is an object of IO (proto of each io).
var BuiltInIOObj = NewPanObj(&map[SymHash]Pair{}, BuiltInObjObj)
// BuiltInDiamondObj is an object of Diamond.
var BuiltInDiamondObj = NewPanObj(&map[SymHash]Pair{}, BuiltInObjObj)
// BuiltInKernelObj is an object of Kernel, whose props can be used in top-level.
var BuiltInKernelObj = NewPanObj(&map[SymHash]Pair{}, BuiltInObjObj)
// BuiltInJSONObj is an object of JSON, whose props can be used in top-level.
var BuiltInJSONObj = NewPanObj(&map[SymHash]Pair{}, BuiltInObjObj)
// BuiltInIterableObj is an object of Iterable, which is mixed-in iterable objects.
var BuiltInIterableObj = NewPanObj(&map[SymHash]Pair{}, BuiltInObjObj)
// BuiltInComparableObj is an object of Comparable, which is mixed-in comparable objects.
var BuiltInComparableObj = NewPanObj(&map[SymHash]Pair{}, BuiltInObjObj)
// BuiltInWrappableObj is an object of Wrappable, which is mixed-in wrappable objects.
var BuiltInWrappableObj = NewPanObj(&map[SymHash]Pair{}, BuiltInObjObj)
// BuiltInEitherObj is an object of Either.
var BuiltInEitherObj = NewPanObj(&map[SymHash]Pair{}, BuiltInObjObj)
// BuiltInEitherValObj is an object of EitherVal.
var BuiltInEitherValObj = NewPanObj(&map[SymHash]Pair{}, BuiltInEitherObj)
// BuiltInEitherErrObj is an object of EitherErr.
var BuiltInEitherErrObj = NewPanObj(&map[SymHash]Pair{}, BuiltInEitherObj)
// BuiltInOneInt is an int object `1`.
var BuiltInOneInt = &PanInt{1}
// BuiltInZeroInt is an int object `0`.
var BuiltInZeroInt = &PanInt{0}
// BuiltInTrue is a bool object `true`.
var BuiltInTrue = &PanBool{true}
// BuiltInFalse is a bool object `false`.
var BuiltInFalse = &PanBool{false}
// BuiltInNil is a nil object `nil`.
var BuiltInNil = &PanNil{}
// BuiltInErrObj is an object of Err (proto of all specific err types).
var BuiltInErrObj = NewPanObj(&map[SymHash]Pair{}, BuiltInObjObj)
// BuiltInAssertionErr is an object of AssertionErr (proto of each assertionErr).
var BuiltInAssertionErr = NewPanObj(&map[SymHash]Pair{}, BuiltInErrObj)
// BuiltInNameErr is an object of NameErr (proto of each nameErr).
var BuiltInNameErr = NewPanObj(&map[SymHash]Pair{}, BuiltInErrObj)
// BuiltInNoPropErr is an object of NoPropErr (proto of each noPropErr).
var BuiltInNoPropErr = NewPanObj(&map[SymHash]Pair{}, BuiltInErrObj)
// BuiltInNotImplementedErr is an object of NotImplemented (proto of each notImplementdErr).
var BuiltInNotImplementedErr = NewPanObj(&map[SymHash]Pair{}, BuiltInErrObj)
// BuiltInStopIterErr is an object of StopIterErr (proto of each stopIterErr).
var BuiltInStopIterErr = NewPanObj(&map[SymHash]Pair{}, BuiltInErrObj)
// BuiltInSyntaxErr is an object of SyntaxErr (proto of each syntaxErr).
var BuiltInSyntaxErr = NewPanObj(&map[SymHash]Pair{}, BuiltInErrObj)
// BuiltInTypeErr is an object of TypeErr (proto of each typeErr).
var BuiltInTypeErr = NewPanObj(&map[SymHash]Pair{}, BuiltInErrObj)
// BuiltInValueErr is an object of ValueErr (proto of each valueErr).
var BuiltInValueErr = NewPanObj(&map[SymHash]Pair{}, BuiltInErrObj)
// BuiltInZeroDivisionErr is an object of ZeroDivisionErr (proto of each zeroDivisionErr).
var BuiltInZeroDivisionErr = NewPanObj(&map[SymHash]Pair{}, BuiltInErrObj)
// BuiltInNotImplemented is an object of _
var BuiltInNotImplemented = NewNotImplementedErr("Not implemented") | object/builtinobj.go | 0.575111 | 0.517266 | builtinobj.go | starcoder |
package eval
import (
"fmt"
"reflect"
"strings"
"github.com/alecthomas/participle/lexer"
)
// ErrInvalidPattern is returned for an invalid regular expression
type ErrInvalidPattern struct {
Pattern string
}
func (e ErrInvalidPattern) Error() string {
return fmt.Sprintf("invalid pattern `%s`", e.Pattern)
}
// ErrAstToEval describes an error that occurred during the conversion from the AST to an evaluator
type ErrAstToEval struct {
Pos lexer.Position
Text string
}
func (r *ErrAstToEval) Error() string {
return fmt.Sprintf("%s: %s", r.Text, r.Pos)
}
// NewError returns a new ErrAstToEval error
func NewError(pos lexer.Position, text string) *ErrAstToEval {
return &ErrAstToEval{Pos: pos, Text: text}
}
// NewTypeError returns a new ErrAstToEval error when an invalid type was used
func NewTypeError(pos lexer.Position, kind reflect.Kind) *ErrAstToEval {
return NewError(pos, fmt.Sprintf("%s expected", kind))
}
// NewOpUnknownError returns a new ErrAstToEval error when an unknown operator was used
func NewOpUnknownError(pos lexer.Position, op string) *ErrAstToEval {
return NewError(pos, fmt.Sprintf("operator `%s` unknown", op))
}
// NewOpError returns a new ErrAstToEval error when an operator was used in an invalid manner
func NewOpError(pos lexer.Position, op string, err error) *ErrAstToEval {
return NewError(pos, fmt.Sprintf("operator `%s` error: %s", op, err))
}
// NewRegisterMultipleFields returns a new ErrAstToEval error when an operator was used in an invalid manner
func NewRegisterMultipleFields(pos lexer.Position, regID RegisterID, err error) *ErrAstToEval {
return NewError(pos, fmt.Sprintf("register `%s` error: %s", regID, err))
}
// ErrRuleParse describes a parsing error and its position in the expression
type ErrRuleParse struct {
pos lexer.Position
expr string
}
func (e *ErrRuleParse) Error() string {
column := e.pos.Column
if column > 0 {
column--
}
str := fmt.Sprintf("%s\n", e.expr)
str += strings.Repeat(" ", column)
str += "^"
return str
}
// ErrFieldNotFound error when a field is not present in the model
type ErrFieldNotFound struct {
Field string
}
func (e ErrFieldNotFound) Error() string {
return fmt.Sprintf("field `%s` not found", e.Field)
}
// ErrIteratorNotSupported error when a field doesn't support iteration
type ErrIteratorNotSupported struct {
Field string
}
func (e ErrIteratorNotSupported) Error() string {
return fmt.Sprintf("field `%s` doesn't support iteration", e.Field)
}
// ErrNotSupported returned when something is not supported on a field
type ErrNotSupported struct {
Field string
}
func (e ErrNotSupported) Error() string {
return fmt.Sprintf("not supported by field `%s`", e.Field)
}
// ErrValueTypeMismatch error when the given value is not having the correct type
type ErrValueTypeMismatch struct {
Field string
}
func (e ErrValueTypeMismatch) Error() string {
return fmt.Sprintf("incorrect value type for `%s`", e.Field)
} | pkg/security/secl/eval/errors.go | 0.861159 | 0.410225 | errors.go | starcoder |
package klog
import (
"errors"
"fmt"
"regexp"
"strconv"
)
// Duration represents a time span.
type Duration interface {
InMinutes() int
// Plus adds up two durations and returns a new duration.
// It doesn’t alter the original duration object.
Plus(Duration) Duration
// Minus subtracts the second from the first duration.
// It doesn’t alter the original duration object.
Minus(Duration) Duration
// ToString serialises the duration. If the duration is negative,
// the value is preceded by a `-`. E.g. `45m` or `-2h15m`.
ToString() string
// ToStringWithSign serialises the duration. In contrast to `ToString`
// it also precedes positive values with a `+`. If the duration is `0`,
// no sign will be added. E.g. `-45m` or `0` or `+6h`.
ToStringWithSign() string
}
func NewDuration(amountHours int, amountMinutes int) Duration {
return duration(amountHours*60) + duration(amountMinutes)
}
type duration int
func abs(x int) int {
if x < 0 {
return -x
}
return x
}
func (d duration) InMinutes() int {
return int(d)
}
func (d duration) Plus(additional Duration) Duration {
return NewDuration(0, d.InMinutes()+additional.InMinutes())
}
func (d duration) Minus(deductible Duration) Duration {
return NewDuration(0, d.InMinutes()-deductible.InMinutes())
}
func (d duration) ToString() string {
if d == 0 {
return "0m"
}
hours := abs(int(d) / 60)
minutes := abs(int(d) % 60)
result := ""
if int(d) < 0 {
result += "-"
}
if hours > 0 {
result += fmt.Sprintf("%dh", hours)
}
if minutes > 0 {
result += fmt.Sprintf("%dm", minutes)
}
return result
}
func (d duration) ToStringWithSign() string {
s := d.ToString()
if d > 0 {
return "+" + s
}
return s
}
var durationPattern = regexp.MustCompile(`^(-|\+)?((\d+)h)?((\d+)m)?$`)
func NewDurationFromString(hhmm string) (Duration, error) {
match := durationPattern.FindStringSubmatch(hhmm)
if match == nil {
return nil, errors.New("MALFORMED_DURATION")
}
sign := 1
if match[1] == "-" {
sign = -1
}
if match[3] == "" && match[5] == "" {
return nil, errors.New("MALFORMED_DURATION")
}
amountOfHours, _ := strconv.Atoi(match[3])
amountOfMinutes, _ := strconv.Atoi(match[5])
if amountOfHours != 0 && amountOfMinutes >= 60 {
return nil, errors.New("UNREPRESENTABLE_DURATION")
}
return NewDuration(sign*amountOfHours, sign*amountOfMinutes), nil
} | src/duration.go | 0.814717 | 0.440168 | duration.go | starcoder |
package plot
import "math"
// SetScreenLog1P sets axis to logarithm space.
func (axis *Axis) SetScreenLog1P(compress float64) {
if compress == 0 {
axis.Transform = nil
return
}
invert := compress < 0
if invert {
compress = -compress
}
mul := 1 / math.Log1p(compress)
invCompress := 1 / compress
tx := &ScreenSpaceTransform{}
axis.Transform = tx
tx.Transform = func(v float64) float64 {
return math.Log1p(v*compress) * mul
}
tx.Inverse = func(v float64) float64 {
return (math.Pow(compress+1, v) - 1) * invCompress
}
if invert {
tx.Transform, tx.Inverse = tx.Inverse, tx.Transform
}
}
// Log1pTransform implements logarithmic transform.
type Log1pTransform struct {
invert bool
base float64
mulbase float64 // 1 / Log1p(base)
cache struct {
low, high float64
loglow, loghigh float64
}
}
// NewLog1pTransform implements a logarithmic axis transform.
func NewLog1pTransform(base float64) *Log1pTransform {
invert := base < 0
if invert {
base = -base
}
return &Log1pTransform{
invert: invert,
base: base,
mulbase: 1 / math.Log1p(base),
}
}
// ilog implements mirrored log.
func (tx *Log1pTransform) log(v float64) float64 {
if v == 0 {
return 0
} else if v < 0 {
return -math.Log1p(-v) * tx.mulbase
} else {
return math.Log1p(v) * tx.mulbase
}
}
func (tx *Log1pTransform) ilog(v float64) float64 {
if v == 0 {
return 0
} else if v < 0 {
return math.Pow(tx.base, v) - 1
} else {
return -math.Pow(tx.base, -v) + 1
}
}
// logspace converts log-space to normalized space.
func (tx *Log1pTransform) transform(v float64) float64 {
if tx.invert {
return tx.ilog(v)
}
return tx.log(v)
}
// inverse converts normalized value to log-space.
func (tx *Log1pTransform) inverse(v float64) float64 {
if tx.invert {
return tx.log(v)
}
return tx.ilog(v)
}
// lowhigh calculates low and high limits of the axis.
func (tx *Log1pTransform) lowhigh(axis *Axis) (float64, float64) {
low, high := axis.lowhigh()
if tx.cache.low == low && tx.cache.high == high {
return tx.cache.loglow, tx.cache.loghigh
}
tx.cache.loglow = tx.transform(low)
tx.cache.loghigh = tx.transform(high)
return tx.cache.loglow, tx.cache.loghigh
}
// ToCanvas converts value to canvas space.
func (tx *Log1pTransform) ToCanvas(axis *Axis, v float64, screenMin, screenMax Length) Length {
v = tx.transform(v)
low, high := tx.lowhigh(axis)
n := (v - low) / (high - low)
return screenMin + n*(screenMax-screenMin)
}
// FromCanvas converts canvas point to value point.
func (tx *Log1pTransform) FromCanvas(axis *Axis, s Length, screenMin, screenMax Length) float64 {
low, high := tx.lowhigh(axis)
n := (s - screenMin) / (screenMax - screenMin)
v := low + n*(high-low)
return tx.inverse(v)
} | axis_log.go | 0.883739 | 0.40987 | axis_log.go | starcoder |
package utils
import (
"fmt"
"strconv"
"strings"
"regexp"
"time"
)
// Date represents a date in a search query. FHIR search params may define
// dates to varying levels of precision, and the amount of precision affects
// the behavior of the query. Date's value should only be interpreted in the
// context of the Precision supplied.
type Date struct {
Value time.Time
Precision DatePrecision
}
// String returns a string representation of the date, honoring the supplied
// precision.
func (d *Date) String() string {
s := d.Value.Format(d.Precision.layout())
if strings.HasSuffix(s, "+00:00") {
s = strings.Replace(s, "+00:00", "Z", 1)
}
return s
}
// RangeLowIncl represents the low end of a date range to match against. As
// the name suggests, the low end of the range is inclusive.
func (d *Date) RangeLowIncl() time.Time {
return d.Value
}
// RangeHighExcl represents the high end of a date range to match against. As
// the name suggests, the high end of the range is exclusive.
func (d *Date) RangeHighExcl() time.Time {
switch d.Precision {
case Year:
return d.Value.AddDate(1, 0, 0)
case Month:
return d.Value.AddDate(0, 1, 0)
case Day:
return d.Value.AddDate(0, 0, 1)
case Minute:
return d.Value.Add(time.Minute)
case Second:
return d.Value.Add(time.Second)
case Millisecond:
return d.Value.Add(time.Millisecond)
default:
return d.Value.Add(time.Millisecond)
}
}
func MustParseDate(dateStr string) (out *Date) {
var err error
out, err = ParseDate(dateStr)
if err != nil {
panic(err)
}
return
}
// ParseDate parses a FHIR date string (roughly ISO 8601) into a Date object,
// maintaining the value and the precision supplied.
func ParseDate(dateStr string) (*Date, error) {
dt := &Date{}
dateStr = strings.TrimSpace(dateStr)
dtRegex := regexp.MustCompile("([0-9]{4})(-(0[1-9]|1[0-2])(-(0[0-9]|[1-2][0-9]|3[0-1])(T([01][0-9]|2[0-3]):([0-5][0-9])(:([0-5][0-9])(\\.([0-9]+))?)?((Z)|(\\+|-)((0[0-9]|1[0-3]):([0-5][0-9])|(14):(00)))?)?)?)?")
if m := dtRegex.FindStringSubmatch(dateStr); m != nil {
y, mo, d, h, mi, s, ms, tzZu, tzOp, tzh, tzm := m[1], m[3], m[5], m[7], m[8], m[10], m[12], m[14], m[15], m[17], m[18]
switch {
case ms != "":
dt.Precision = Millisecond
// Fix milliseconds (.9 -> .900, .99 -> .990, .999999 -> .999 )
switch len(ms) {
case 1:
ms += "00"
case 2:
ms += "0"
case 3:
// do nothing
default:
ms = ms[:3]
}
case s != "":
dt.Precision = Second
case mi != "":
dt.Precision = Minute
// NOTE: Skip hour precision since FHIR specification disallows it
case d != "":
dt.Precision = Day
case mo != "":
dt.Precision = Month
case y != "":
dt.Precision = Year
default:
dt.Precision = Millisecond
}
// Get the location (if no time components or no location, use local)
loc := time.Local
if h != "" {
if tzZu == "Z" {
loc, _ = time.LoadLocation("UTC")
} else if tzOp != "" && tzh != "" && tzm != "" {
tzhi, _ := strconv.Atoi(tzh)
tzmi, _ := strconv.Atoi(tzm)
offset := tzhi*60*60 + tzmi*60
if tzOp == "-" {
offset *= -1
}
loc = time.FixedZone(tzOp+tzh+tzm, offset)
}
}
// Convert to a time.Time
yInt, _ := strconv.Atoi(y)
moInt, err := strconv.Atoi(mo)
if err != nil {
moInt = 1
}
dInt, err := strconv.Atoi(d)
if err != nil {
dInt = 1
}
hInt, _ := strconv.Atoi(h)
miInt, _ := strconv.Atoi(mi)
sInt, _ := strconv.Atoi(s)
msInt, _ := strconv.Atoi(ms)
dt.Value = time.Date(yInt, time.Month(moInt), dInt, hInt, miInt, sInt, msInt*1000*1000, loc)
return dt, nil
} else {
return nil, fmt.Errorf("could not parse date/time: %s", dateStr)
}
}
// DatePrecision is an enum representing the precision of a date.
type DatePrecision int
// Constant values for the DatePrecision enum.
const (
Year DatePrecision = iota
Month
Day
Minute
Second
Millisecond
)
func (p DatePrecision) layout() string {
switch p {
case Year:
return "2006"
case Month:
return "2006-01"
case Day:
return "2006-01-02"
case Minute:
return "2006-01-02T15:04-07:00"
case Second:
return "2006-01-02T15:04:05-07:00"
case Millisecond:
return "2006-01-02T15:04:05.000-07:00"
default:
return "2006-01-02T15:04:05.000-07:00"
}
} | vendor/github.com/eug48/fhir/utils/date.go | 0.738952 | 0.469216 | date.go | starcoder |
package miner
import (
abi "github.com/filecoin-project/specs-actors/actors/abi"
big "github.com/filecoin-project/specs-actors/actors/abi/big"
power "github.com/filecoin-project/specs-actors/actors/builtin/power"
)
// An approximation to chain state finality (should include message propagation time as well).
const ChainFinalityish = abi.ChainEpoch(500) // PARAM_FINISH
// Maximum duration to allow for the sealing process for seal algorithms.
// Dependent on algorithm and sector size
var MaxSealDuration = map[abi.RegisteredProof]abi.ChainEpoch{
abi.RegisteredProof_StackedDRG32GiBSeal: abi.ChainEpoch(10000), // PARAM_FINISH
abi.RegisteredProof_StackedDRG2KiBSeal: abi.ChainEpoch(10000),
abi.RegisteredProof_StackedDRG8MiBSeal: abi.ChainEpoch(10000),
abi.RegisteredProof_StackedDRG512MiBSeal: abi.ChainEpoch(10000),
}
// Number of epochs between publishing the precommit and when the challenge for interactive PoRep is drawn
// used to ensure it is not predictable by miner.
const PreCommitChallengeDelay = abi.ChainEpoch(10)
// Lookback from the current epoch from which to obtain a PoSt challenge.
// A lookback of 1 means consulting the immediate parent tipset/state.
const PoStLookback = abi.ChainEpoch(1) // PARAM_FINISH
// Lookback from the current epoch for state view for elections; for Election PoSt, same as the PoSt lookback.
const ElectionLookback = PoStLookback // PARAM_FINISH
// Number of sectors to be sampled as part of windowed PoSt
const NumWindowedPoStSectors = 1 // PARAM_FINISH
// Delay between declaration of a temporary sector fault and effectiveness of reducing the active proving set for PoSts.
const DeclaredFaultEffectiveDelay = abi.ChainEpoch(20) // PARAM_FINISH
// Staging period for a miner worker key change.
const WorkerKeyChangeDelay = 2 * ElectionLookback // PARAM_FINISH
// Deposit per sector required at pre-commitment, refunded after the commitment is proven (else burned).
func precommitDeposit(sectorSize abi.SectorSize, duration abi.ChainEpoch) abi.TokenAmount {
depositPerByte := abi.NewTokenAmount(0) // PARAM_FINISH
return big.Mul(depositPerByte, big.NewIntUnsigned(uint64(sectorSize)))
}
func temporaryFaultFee(weights []*power.SectorStorageWeightDesc, duration abi.ChainEpoch) abi.TokenAmount {
return big.Zero() // PARAM_FINISH
}
// MaxFaultsCount is the maximum number of faults that can be declared
const MaxFaultsCount = 32 << 20
// ProvingPeriod defines the frequency of PoSt challenges that a miner will have to respond to
const ProvingPeriod = 300
// WindowedPoStChallengeCount defines the number of windowed PoSt challenges
const WindowedPoStChallengeCount = 2000 | actors/builtin/miner/policy.go | 0.675551 | 0.452717 | policy.go | starcoder |
package cmd
import (
"fmt"
"strconv"
"strings"
"github.com/jaredbancroft/aoc2020/pkg/docking"
"github.com/jaredbancroft/aoc2020/pkg/helpers"
"github.com/spf13/cobra"
)
// day14Cmd represents the day14 command
var day14Cmd = &cobra.Command{
Use: "day14",
Short: "Advent of Code 2020 - Day14: Docking Data",
Long: `
Advent of Code 2020
--- Day 14: Docking Data ---
As your ferry approaches the sea port, the captain asks for your help again. The computer
system that runs this port isn't compatible with the docking program on the ferry, so the
docking parameters aren't being correctly initialized in the docking program's memory.
After a brief inspection, you discover that the sea port's computer system uses a strange
bitmask system in its initialization program. Although you don't have the correct decoder
chip handy, you can emulate it in software!
The initialization program (your puzzle input) can either update the bitmask or write a value
to memory. Values and memory addresses are both 36-bit unsigned integers. For example, ignoring
bitmasks for a moment, a line like mem[8] = 11 would write the value 11 to memory address 8.
The bitmask is always given as a string of 36 bits, written with the most significant bit
(representing 2^35) on the left and the least significant bit (2^0, that is, the 1s bit) on
the right. The current bitmask is applied to values immediately before they are written to
memory: a 0 or 1 overwrites the corresponding bit in the value, while an X leaves the bit in
the value unchanged.
For example, consider the following program:
mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X
mem[8] = 11
mem[7] = 101
mem[8] = 0
This program starts by specifying a bitmask (mask = ....). The mask it specifies will overwrite
two bits in every written value: the 2s bit is overwritten with 0, and the 64s bit is overwritten with 1.
The program then attempts to write the value 11 to memory address 8. By expanding everything out
to individual bits, the mask is applied as follows:
value: 000000000000000000000000000000001011 (decimal 11)
mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X
result: 000000000000000000000000000001001001 (decimal 73)
So, because of the mask, the value 73 is written to memory address 8 instead. Then, the program tries
to write 101 to address 7:
value: 000000000000000000000000000001100101 (decimal 101)
mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X
result: 000000000000000000000000000001100101 (decimal 101)
This time, the mask has no effect, as the bits it overwrote were already the values the mask tried to
set. Finally, the program tries to write 0 to address 8:
value: 000000000000000000000000000000000000 (decimal 0)
mask: XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X
result: 000000000000000000000000000001000000 (decimal 64)
64 is written to address 8 instead, overwriting the value that was there previously.
To initialize your ferry's docking program, you need the sum of all values left in memory after the
initialization program completes. (The entire 36-bit address space begins initialized to the value
0 at every address.) In the above example, only two values in memory are not zero - 101 (at address 7)
and 64 (at address 8) - producing a sum of 165.
Execute the initialization program. What is the sum of all values left in memory after it completes?
`,
RunE: func(cmd *cobra.Command, args []string) error {
initProgram, err := helpers.ReadStringFile(input)
if err != nil {
return err
}
part1, part2 := runInitProgram(initProgram)
fmt.Println("Part 1 is", part1)
fmt.Println("Part 2 is", part2)
return nil
},
}
func runInitProgram(initProgram []string) (int, int) {
var mask docking.Mask
program := make(map[int]int)
program2 := make(map[int]int)
//var currentMask string
for _, line := range initProgram {
commands := strings.Split(line, " = ")
if commands[0] == "mask" {
mask = docking.NewMask(commands[1])
} else {
memLocation, _ := strconv.Atoi(strings.TrimLeft(strings.TrimRight(commands[0], "]"), "mem["))
intCommand, _ := strconv.Atoi(commands[1])
part2Value := intCommand
for pos, bit := range mask.Bits {
if bit == 0 {
intCommand = mask.ClearBit(intCommand, pos)
}
if bit == 1 {
intCommand = mask.SetBit(intCommand, pos)
}
}
program[memLocation] = intCommand
//part2
mask2 := docking.NewMemMask(memLocation, mask, part2Value)
for mem, val := range mask2.MemAddrs {
program2[mem] = val
}
}
}
count := 0
for _, value := range program {
count = count + value
}
count2 := 0
for _, value := range program2 {
count2 = count2 + value
}
return count, count2
}
func init() {
rootCmd.AddCommand(day14Cmd)
} | cmd/day14.go | 0.597373 | 0.493226 | day14.go | starcoder |
package influxql
import (
"sort"
"github.com/gogo/protobuf/proto"
"github.com/influxdata/influxdb/influxql/internal"
)
// ZeroTime is the Unix nanosecond timestamp for time.Time{}.
const ZeroTime = int64(-6795364578871345152)
// Point represents a value in a series that occurred at a given time.
type Point interface {
// Name and tags uniquely identify the series the value belongs to.
name() string
tags() Tags
// The time that the value occurred at.
time() int64
// The value at the given time.
value() interface{}
// Auxillary values passed along with the value.
aux() []interface{}
}
// Points represents a list of points.
type Points []Point
// Tags represent a map of keys and values.
// It memoizes its key so it can be used efficiently during query execution.
type Tags struct {
id string
m map[string]string
}
// NewTags returns a new instance of Tags.
func NewTags(m map[string]string) Tags {
if len(m) == 0 {
return Tags{}
}
return Tags{
id: string(encodeTags(m)),
m: m,
}
}
// newTagsID returns a new instance of Tags parses from a tag id.
func newTagsID(id string) Tags {
m := decodeTags([]byte(id))
if len(m) == 0 {
return Tags{}
}
return Tags{id: id, m: m}
}
// ID returns the string identifier for the tags.
func (t Tags) ID() string { return t.id }
// KeyValues returns the underlying map for the tags.
func (t Tags) KeyValues() map[string]string { return t.m }
// Keys returns a sorted list of all keys on the tag.
func (t *Tags) Keys() []string {
if t == nil {
return nil
}
var a []string
for k := range t.m {
a = append(a, k)
}
sort.Strings(a)
return a
}
// Value returns the value for a given key.
func (t *Tags) Value(k string) string {
if t == nil {
return ""
}
return t.m[k]
}
// Subset returns a new tags object with a subset of the keys.
func (t *Tags) Subset(keys []string) Tags {
if t.m == nil || len(keys) == 0 {
return Tags{}
}
// If keys match existing keys, simply return this tagset.
if keysMatch(t.m, keys) {
return *t
}
// Otherwise create new tag set.
m := make(map[string]string, len(keys))
for _, k := range keys {
m[k] = t.m[k]
}
return NewTags(m)
}
// Equals returns true if t equals other.
func (t *Tags) Equals(other *Tags) bool {
if t == nil && other == nil {
return true
} else if t == nil || other == nil {
return false
}
return t.id == other.id
}
// keysMatch returns true if m has exactly the same keys as listed in keys.
func keysMatch(m map[string]string, keys []string) bool {
if len(keys) != len(m) {
return false
}
for _, k := range keys {
if _, ok := m[k]; !ok {
return false
}
}
return true
}
// encodeTags converts a map of strings to an identifier.
func encodeTags(m map[string]string) []byte {
// Empty maps marshal to empty bytes.
if len(m) == 0 {
return nil
}
// Extract keys and determine final size.
sz := (len(m) * 2) - 1 // separators
keys := make([]string, 0, len(m))
for k, v := range m {
keys = append(keys, k)
sz += len(k) + len(v)
}
sort.Strings(keys)
// Generate marshaled bytes.
b := make([]byte, sz)
buf := b
for _, k := range keys {
copy(buf, k)
buf[len(k)] = '\x00'
buf = buf[len(k)+1:]
}
for i, k := range keys {
v := m[k]
copy(buf, v)
if i < len(keys)-1 {
buf[len(v)] = '\x00'
buf = buf[len(v)+1:]
}
}
return b
}
// decodeTags parses an identifier into a map of tags.
func decodeTags(id []byte) map[string]string { panic("FIXME: implement") }
func encodeAux(aux []interface{}) []*internal.Aux {
pb := make([]*internal.Aux, len(aux))
for i := range aux {
switch v := aux[i].(type) {
case float64:
pb[i] = &internal.Aux{DataType: proto.Int32(Float), FloatValue: proto.Float64(v)}
case *float64:
pb[i] = &internal.Aux{DataType: proto.Int32(Float)}
case int64:
pb[i] = &internal.Aux{DataType: proto.Int32(Integer), IntegerValue: proto.Int64(v)}
case *int64:
pb[i] = &internal.Aux{DataType: proto.Int32(Integer)}
case string:
pb[i] = &internal.Aux{DataType: proto.Int32(String), StringValue: proto.String(v)}
case *string:
pb[i] = &internal.Aux{DataType: proto.Int32(String)}
case bool:
pb[i] = &internal.Aux{DataType: proto.Int32(Boolean), BooleanValue: proto.Bool(v)}
case *bool:
pb[i] = &internal.Aux{DataType: proto.Int32(Boolean)}
default:
pb[i] = &internal.Aux{DataType: proto.Int32(int32(Unknown))}
}
}
return pb
}
func decodeAux(pb []*internal.Aux) []interface{} {
aux := make([]interface{}, len(pb))
for i := range pb {
switch pb[i].GetDataType() {
case Float:
if pb[i].FloatValue != nil {
aux[i] = *pb[i].FloatValue
} else {
aux[i] = (*float64)(nil)
}
case Integer:
if pb[i].IntegerValue != nil {
aux[i] = *pb[i].IntegerValue
} else {
aux[i] = (*int64)(nil)
}
case String:
if pb[i].StringValue != nil {
aux[i] = *pb[i].StringValue
} else {
aux[i] = (*string)(nil)
}
case Boolean:
if pb[i].BooleanValue != nil {
aux[i] = *pb[i].BooleanValue
} else {
aux[i] = (*bool)(nil)
}
default:
aux[i] = nil
}
}
return aux
} | vendor/github.com/influxdata/influxdb/influxql/point.go | 0.686475 | 0.444203 | point.go | starcoder |
package it
// Volume defines a volume value
type Volume uint8
// Value returns the value of the volume as a floating point value between 0 and 1, inclusively
func (p Volume) Value() float32 {
switch {
case p <= 64:
return float32(p) / 64
default:
panic("unexpected value")
}
}
// FineVolume defines a volume value with double precision
type FineVolume uint8
// Value returns the value of the fine volume as a floating point value between 0 and 1, inclusively
func (p FineVolume) Value() float32 {
switch {
case p <= 128:
return float32(p) / 128
default:
panic("unexpected value")
}
}
const (
// DefaultVolume is the default volume for many things in IT files
DefaultVolume = Volume(64)
// DefaultFineVolume is the default volume for fine volumes in IT files
DefaultFineVolume = Volume(128)
)
// PanSeparation is the panning separation value
type PanSeparation uint8
// Value returns the value of the panning separation as a floating point value between 0 and 1, inclusively
func (p PanSeparation) Value() float32 {
switch {
case p <= 128:
return float32(p) / 128
default:
panic("unexpected value")
}
}
// PanValue describes a panning value in the IT format
type PanValue uint8
// IsSurround returns true if the panning is in surround-sound mode
func (p PanValue) IsSurround() bool {
return (p &^ 128) == 100
}
// IsDisabled returns true if the channel this panning value is attached to is muted
// Effects in muted channels are still processed
func (p PanValue) IsDisabled() bool {
return (p & 128) != 0
}
// Value returns the value of the panning as a floating point value between 0 and 1, inclusively
// 0 = absolute left, 0.5 = center, 1 = absolute right
func (p PanValue) Value() float32 {
pv := p &^ 128
switch {
case pv <= 64:
return float32(pv) / 64
case pv == 100:
return float32(0.5)
default:
panic("unexpected value")
}
}
// SamplePanValue describes a panning value in the IT format's sample header
type SamplePanValue uint8
// IsSurround returns true if the panning is in surround-sound mode
func (p SamplePanValue) IsSurround() bool {
return (p &^ 128) == 100
}
// IsDisabled returns true if the channel this panning value is attached to is muted
// Effects in muted channels are still processed
func (p SamplePanValue) IsDisabled() bool {
return (p & 128) == 0
}
// Value returns the value of the panning as a floating point value between 0 and 1, inclusively
// 0 = absolute left, 0.5 = center, 1 = absolute right
func (p SamplePanValue) Value() float32 {
pv := p &^ 128
switch {
case pv <= 64:
return float32(pv) / 64
case pv == 100:
return float32(0.5)
default:
panic("unexpected value")
}
}
// NewNoteAction is what to do when a new note occurs
type NewNoteAction uint8
const (
// NewNoteActionCut means to cut the previous playback when a new note occurs
NewNoteActionCut = NewNoteAction(0)
// NewNoteActionContinue means to continue the previous playback when a new note occurs
NewNoteActionContinue = NewNoteAction(1)
// NewNoteActionOff means to note-off the previous playback when a new note occurs
NewNoteActionOff = NewNoteAction(2)
// NewNoteActionFade means to fade the previous playback when a new note occurs
NewNoteActionFade = NewNoteAction(3)
)
// Percentage8 is a percentage stored as a uint8
type Percentage8 uint8
// Value returns the value of the percentage
func (p Percentage8) Value() float32 {
return float32(p) / 100
}
// DuplicateCheckType is the duplicate check type
type DuplicateCheckType uint8
const (
// DuplicateCheckTypeOff is for when the duplicate check type is disabled
DuplicateCheckTypeOff = DuplicateCheckType(0)
// DuplicateCheckTypeNote is for when the duplicate check type is set to note mode
DuplicateCheckTypeNote = DuplicateCheckType(1)
// DuplicateCheckTypeSample is for when the duplicate check type is set to sample mode
DuplicateCheckTypeSample = DuplicateCheckType(2)
// DuplicateCheckTypeInstrument is for when the duplicate check type is set to instrument mode
DuplicateCheckTypeInstrument = DuplicateCheckType(3)
)
// DuplicateCheckAction is the action to perform when a duplicate is detected
type DuplicateCheckAction uint8
const (
// DuplicateCheckActionCut cuts the playback when a duplicate is detected
DuplicateCheckActionCut = DuplicateCheckAction(0)
// DuplicateCheckActionOff performs a note-off on the playback when a duplicate is detected
DuplicateCheckActionOff = DuplicateCheckAction(1)
// DuplicateCheckActionFade performs a fade-out on the playback when a duplicate is detected
DuplicateCheckActionFade = DuplicateCheckAction(2)
)
// NodePoint16 is a node point in the old instrument format
type NodePoint16 struct {
Tick uint8
Magnitude uint8
}
// NodePoint24 is a node point in the new instrument format
type NodePoint24 struct {
Y int8
Tick uint16
}
// Envelope is an envelope structure
type Envelope struct {
Flags EnvelopeFlags
Count uint8
LoopBegin uint8
LoopEnd uint8
SustainLoopBegin uint8
SustainLoopEnd uint8
NodePoints [25]NodePoint24
Reserved51 uint8
}
// DuplicateNoteCheck activates or deactivates the duplicate note checking
type DuplicateNoteCheck uint8
const (
// DuplicateNoteCheckOff disables the duplicate note checking
DuplicateNoteCheckOff = DuplicateNoteCheck(0)
// DuplicateNoteCheckOn activates the duplicate note checking
DuplicateNoteCheckOn = DuplicateNoteCheck(1)
)
// Note is a note field value
type Note uint8
// IsNoteOff returns true if the note is a note-off command
func (n Note) IsNoteOff() bool {
return n == 255
}
// IsNoteCut returns true if the note is a note-cut command
func (n Note) IsNoteCut() bool {
return n == 254
}
// IsNoteFade returns true if the note is a note-fade command
func (n Note) IsNoteFade() bool {
return n >= 120 && n < 254
}
// IsSpecial returns true if the note is actually a special value (see above)
func (n Note) IsSpecial() bool {
return n >= 120
}
// NoteSample is a note-sample keyboard mapping entry
type NoteSample struct {
Note Note
Sample uint8
} | music/tracked/it/util.go | 0.850142 | 0.733714 | util.go | starcoder |
package shell
import (
"encoding/binary"
"errors"
"math"
"strconv"
"strings"
"fmt"
"encoding/hex"
)
type Token struct {
Buf string
DataType uint
}
func (t *Token) ToMustValue() interface{} {
v, err := t.ToValue()
if err != nil {
panic(err)
}
return v
}
func (t *Token) ToValue() (interface{}, error) {
if t.DataType == DataTypeHexBytes {
return nil, fmt.Errorf("unsuported type to convert value: %v", t.Buf)
} else if t.DataType == DataTypeInt {
v, err := strconv.ParseInt(t.Buf, 0, 64)
if err != nil {
return nil, err
}
return v, nil
} else if t.DataType == DataTypeFloat {
v, err := strconv.ParseFloat(t.Buf, 64)
if err != nil {
return nil, err
}
return v, nil
} else if t.DataType == DataTypeString {
return t.Buf, nil
} else if t.DataType == DataTypeTerm {
if t.Buf == "true" {
return true, nil
} else if t.Buf == "false" {
return false, nil
} else {
return nil, fmt.Errorf("unsuported type to convert value: %v", t.Buf)
}
} else {
return nil, fmt.Errorf("unknown data type '%s'", t.Buf)
}
}
func (t *Token) ToBytes() ([]byte, error) {
if t.DataType == DataTypeHexBytes {
b ,err := hex.DecodeString(t.Buf[2:])
if err != nil {
return nil, err
}
return b, nil
} else if t.DataType == DataTypeInt {
v, err := strconv.ParseInt(t.Buf, 0, 64)
if err != nil {
return nil, err
}
b := make([]byte, 8)
binary.BigEndian.PutUint64(b, uint64(v))
return b, nil
} else if t.DataType == DataTypeFloat {
v, err := strconv.ParseFloat(t.Buf, 64)
if err != nil {
return nil, err
}
bits := math.Float64bits(v)
b := make([]byte, 8)
binary.BigEndian.PutUint64(b, bits)
return b, nil
} else if t.DataType == DataTypeString {
return []byte(t.Buf), nil
} else if t.DataType == DataTypeTerm {
return nil, fmt.Errorf("term '%s' can not be converted to the bytes slice", t.Buf)
} else {
return nil, fmt.Errorf("unknown data type '%s'", t.Buf)
}
}
func (t *Token) ToMustBytes() []byte {
b, err := t.ToBytes()
if err != nil {
panic(err)
}
return b
}
const (
DataTypeHexBytes = 0
DataTypeInt = 1
DataTypeFloat = 2
DataTypeString = 3
DataTypeTerm = 4
)
// tokenize logic was inspired https://github.com/mattn/go-shellwords
// tokenize
func Tokenize(line string) ([]*Token, error) {
line = strings.TrimSpace(line)
tokens := []*Token{}
buf := ""
var escaped, doubleQuoted, singleQuoted bool
var usingQuoted bool
backtick := ""
for _, r := range line {
if escaped {
buf += string(r)
escaped = false
continue
}
if r == '\\' {
if singleQuoted {
buf += string(r)
} else {
escaped = true
}
continue
}
if isSpace(r) {
if singleQuoted || doubleQuoted {
buf += string(r)
backtick += string(r)
} else if buf != "" {
token := &Token{
Buf: buf,
}
if usingQuoted {
token.DataType = DataTypeString
} else {
if strings.HasPrefix(buf, "0x") {
token.DataType = DataTypeHexBytes
} else {
_, err := strconv.ParseInt(buf, 0, 64)
if err == nil {
token.DataType = DataTypeInt
} else {
_, err := strconv.ParseFloat(buf, 64)
if err == nil {
token.DataType = DataTypeFloat
} else {
token.DataType = DataTypeTerm
}
}
}
}
tokens = append(tokens, token)
buf = ""
usingQuoted = false
}
continue
}
switch r {
case '"':
if !singleQuoted {
doubleQuoted = !doubleQuoted
usingQuoted = true
continue
}
case '\'':
if !doubleQuoted {
singleQuoted = !singleQuoted
usingQuoted = true
continue
}
}
buf += string(r)
}
if buf != "" {
token := &Token{
Buf: buf,
}
if usingQuoted {
token.DataType = DataTypeString
} else {
if strings.HasPrefix(buf, "0x") {
token.DataType = DataTypeHexBytes
} else {
_, err := strconv.ParseInt(buf, 0, 64)
if err == nil {
token.DataType = DataTypeInt
} else {
_, err := strconv.ParseFloat(buf, 64)
if err == nil {
token.DataType = DataTypeFloat
} else {
token.DataType = DataTypeTerm
}
}
}
}
tokens = append(tokens, token)
}
if escaped || singleQuoted || doubleQuoted {
return nil, errors.New("invalid line string")
}
return tokens, nil
}
func isSpace(r rune) bool {
switch r {
case ' ', '\t', '\r', '\n':
return true
}
return false
} | shell/tokenize.go | 0.566019 | 0.410461 | tokenize.go | starcoder |
package storetestcases
import (
"context"
"testing"
"github.com/stratumn/go-chainscript"
"github.com/stratumn/go-chainscript/chainscripttest"
"github.com/stratumn/go-core/store"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
// TestEvidenceStore runs all tests for the store.EvidenceStore interface
func (f Factory) TestEvidenceStore(t *testing.T) {
a := f.initAdapter(t)
defer f.freeAdapter(a)
l := chainscripttest.RandomLink(t)
linkHash, _ := a.CreateLink(context.Background(), l)
s := store.EvidenceStore(a)
t.Run("Adding evidences to a segment should work", func(t *testing.T) {
ctx := context.Background()
e1, _ := chainscript.NewEvidence("1.0.0", "TMPop", "1", []byte{1})
e2, _ := chainscript.NewEvidence("1.0.0", "dummy", "2", []byte{2})
e3, _ := chainscript.NewEvidence("1.0.0", "batch", "3", []byte{3})
e4, _ := chainscript.NewEvidence("1.0.0", "bcbatch", "4", []byte{4})
e5, _ := chainscript.NewEvidence("1.0.0", "generic", "5", []byte{5})
evidences := []*chainscript.Evidence{e1, e2, e3, e4, e5}
for _, evidence := range evidences {
err := s.AddEvidence(ctx, linkHash, evidence)
assert.NoError(t, err, "s.AddEvidence(ctx, )")
}
storedEvidences, err := s.GetEvidences(ctx, linkHash)
assert.NoError(t, err, "s.GetEvidences()")
assert.Equal(t, 5, len(storedEvidences), "Invalid number of evidences")
for _, evidence := range evidences {
foundEvidence := storedEvidences.FindEvidences(evidence.Backend)
assert.Equal(t, 1, len(foundEvidence), "Evidence not found: %v", evidence)
}
})
t.Run("Duplicate evidences should be discarded", func(t *testing.T) {
ctx := context.Background()
e, _ := chainscript.NewEvidence("1.0.0", "TMPop", "42", []byte{42})
err := s.AddEvidence(ctx, linkHash, e)
require.NoError(t, err, "s.AddEvidence()")
// Add duplicate - some stores return an error, others silently ignore
_ = s.AddEvidence(ctx, linkHash, e)
storedEvidences, err := s.GetEvidences(ctx, linkHash)
assert.NoError(t, err, "s.GetEvidences()")
assert.Equal(t, 6, len(storedEvidences), "Invalid number of evidences")
stored := storedEvidences.GetEvidence("TMPop", "42")
assert.NotNil(t, stored)
assert.EqualValues(t, e.Backend, stored.Backend, "Invalid evidence backend")
})
} | store/storetestcases/evidencestore.go | 0.55447 | 0.447702 | evidencestore.go | starcoder |
package spatial
import (
"encoding/json"
"math"
"reflect"
)
func coordToSlice(fs ...float64) []*float64 {
cs := make([]*float64, len(fs))
for i, f := range fs {
if !math.IsNaN(f) {
cs[i] = &fs[i]
}
}
return cs
}
func (c Coord) coordToSlice() []*float64 { return coordToSlice(c.X, c.Y) }
func (c CoordZ) coordToSlice() []*float64 { return coordToSlice(c.X, c.Y, c.Z) }
func (c CoordM) coordToSlice() []*float64 { return coordToSlice(c.X, c.Y, 0, c.M) }
func (c CoordZM) coordToSlice() []*float64 { return coordToSlice(c.X, c.Y, c.Z, c.M) }
func jsonCoord(v reflect.Value) []*float64 {
switch {
case v.Type().ConvertibleTo(coordType):
return v.Convert(coordType).Interface().(Coord).coordToSlice()
case v.Type().ConvertibleTo(coordZType):
return v.Convert(coordZType).Interface().(CoordZ).coordToSlice()
case v.Type().ConvertibleTo(coordMType):
return v.Convert(coordMType).Interface().(CoordM).coordToSlice()
case v.Type().ConvertibleTo(coordZMType):
return v.Convert(coordZMType).Interface().(CoordZM).coordToSlice()
default:
panic("invalid coordinate type")
}
}
func jsonConvert(rv reflect.Value) interface{} {
switch rv.Kind() {
case reflect.Slice:
size := rv.Len()
s := make([]interface{}, size)
for i := 0; i < size; i++ {
s[i] = jsonConvert(rv.Index(i))
}
return s
case reflect.Interface:
return jsonConvert(rv.Elem())
default:
return jsonCoord(rv)
}
}
func jsonConvertGeometries(rv reflect.Value) interface{} {
size := rv.Len()
s := make([]interface{}, size)
for i := 0; i < size; i++ {
iv := rv.Index(i)
s[i] = jsonType{Type: geoTypeName(iv.Interface().(Geometry)), Coordinates: jsonConvert(iv)}
}
return s
}
type jsonType struct {
Type string `json:"type"`
Coordinates interface{} `json:"coordinates"`
}
type jsonTypeGeometries struct {
Type string `json:"type"`
Geometries interface{} `json:"geometries"`
}
// EncodeGeoJSON encodes a geometry to the geoJSON format.
func EncodeGeoJSON(g Geometry) ([]byte, error) {
switch geoType(g) {
case geoGeometryCollection:
return json.Marshal(jsonTypeGeometries{Type: geoTypeName(g), Geometries: jsonConvertGeometries(reflect.ValueOf(g))})
default:
return json.Marshal(jsonType{Type: geoTypeName(g), Coordinates: jsonConvert(reflect.ValueOf(g))})
}
} | driver/spatial/geojson.go | 0.70477 | 0.459197 | geojson.go | starcoder |
package blog_posts
import (
"encoding/json"
)
// BackgroundImage struct for BackgroundImage
type BackgroundImage struct {
ImageUrl string `json:"imageUrl"`
BackgroundSize string `json:"backgroundSize"`
BackgroundPosition string `json:"backgroundPosition"`
}
// NewBackgroundImage instantiates a new BackgroundImage object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewBackgroundImage(imageUrl string, backgroundSize string, backgroundPosition string) *BackgroundImage {
this := BackgroundImage{}
this.ImageUrl = imageUrl
this.BackgroundSize = backgroundSize
this.BackgroundPosition = backgroundPosition
return &this
}
// NewBackgroundImageWithDefaults instantiates a new BackgroundImage object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewBackgroundImageWithDefaults() *BackgroundImage {
this := BackgroundImage{}
return &this
}
// GetImageUrl returns the ImageUrl field value
func (o *BackgroundImage) GetImageUrl() string {
if o == nil {
var ret string
return ret
}
return o.ImageUrl
}
// GetImageUrlOk returns a tuple with the ImageUrl field value
// and a boolean to check if the value has been set.
func (o *BackgroundImage) GetImageUrlOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.ImageUrl, true
}
// SetImageUrl sets field value
func (o *BackgroundImage) SetImageUrl(v string) {
o.ImageUrl = v
}
// GetBackgroundSize returns the BackgroundSize field value
func (o *BackgroundImage) GetBackgroundSize() string {
if o == nil {
var ret string
return ret
}
return o.BackgroundSize
}
// GetBackgroundSizeOk returns a tuple with the BackgroundSize field value
// and a boolean to check if the value has been set.
func (o *BackgroundImage) GetBackgroundSizeOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.BackgroundSize, true
}
// SetBackgroundSize sets field value
func (o *BackgroundImage) SetBackgroundSize(v string) {
o.BackgroundSize = v
}
// GetBackgroundPosition returns the BackgroundPosition field value
func (o *BackgroundImage) GetBackgroundPosition() string {
if o == nil {
var ret string
return ret
}
return o.BackgroundPosition
}
// GetBackgroundPositionOk returns a tuple with the BackgroundPosition field value
// and a boolean to check if the value has been set.
func (o *BackgroundImage) GetBackgroundPositionOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.BackgroundPosition, true
}
// SetBackgroundPosition sets field value
func (o *BackgroundImage) SetBackgroundPosition(v string) {
o.BackgroundPosition = v
}
func (o BackgroundImage) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["imageUrl"] = o.ImageUrl
}
if true {
toSerialize["backgroundSize"] = o.BackgroundSize
}
if true {
toSerialize["backgroundPosition"] = o.BackgroundPosition
}
return json.Marshal(toSerialize)
}
type NullableBackgroundImage struct {
value *BackgroundImage
isSet bool
}
func (v NullableBackgroundImage) Get() *BackgroundImage {
return v.value
}
func (v *NullableBackgroundImage) Set(val *BackgroundImage) {
v.value = val
v.isSet = true
}
func (v NullableBackgroundImage) IsSet() bool {
return v.isSet
}
func (v *NullableBackgroundImage) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableBackgroundImage(val *BackgroundImage) *NullableBackgroundImage {
return &NullableBackgroundImage{value: val, isSet: true}
}
func (v NullableBackgroundImage) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableBackgroundImage) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | generated/blog_posts/model_background_image.go | 0.818845 | 0.425486 | model_background_image.go | starcoder |
package binarytree
// BinaryTree represents a corresponding abstract data structure
type BinaryTree struct {
Root *Node
}
// NewBinaryTree is a function that creates new "instance" for structure
func NewBinaryTree(nodes ...int) BinaryTree {
if len(nodes) == 0 {
return BinaryTree{nil}
}
tree := BinaryTree{nil}
for _, node := range nodes {
tree.Insert(node)
}
return tree
}
// Insert is a function that insert new BT node into the tree
func (t *BinaryTree) Insert(value int) {
node := NewNode(value, nil, nil)
if t.Root == nil {
t.Root = node
return
}
t.insertNode(t.Root, node)
}
// Contains is a function that checks does value exist in a tree
func (t *BinaryTree) Contains(value int) bool {
if t.Root == nil {
return false
}
return t.search(t.Root, value) != nil
}
// Find is a function that search a tree and return target node if it's exist
func (t *BinaryTree) Find(value int) *Node {
if t.Root == nil {
return nil
}
return t.search(t.Root, value)
}
// FindParent is a function that returns node parent for specific value
func (t *BinaryTree) FindParent(value int) *Node {
if t.Root == nil || t.Root.Value == value {
return nil
}
return t.iterate(t.Root, func (current *Node) (*Node, bool) {
if value < current.Value && current.Left != nil {
if current.Left.Value == value {
return current, false
}
return current.Left, true
}
if value > current.Value && current.Right != nil {
if current.Right.Value == value {
return current, false
}
return current.Right, true
}
return nil, false
})
}
// Min is a function that returns tree minimum value
func (t *BinaryTree) Min() *Node {
if t.Root == nil {
return nil
}
return t.iterate(t.Root, func (current *Node) (*Node, bool) {
if current.Left == nil {
return current, false
}
return current.Left, true
})
}
// Min is a function that returns tree maximum value
func (t *BinaryTree) Max() *Node {
if t.Root == nil {
return nil
}
return t.iterate(t.Root, func (current *Node) (*Node, bool) {
if current.Right == nil {
return current, false
}
return current.Right, true
})
}
// Delete is a function that delete value from the tree
func (t *BinaryTree) Delete(value int) bool {
if t.Root == nil {
return false
}
if t.Root.Value == value {
t.Root = nil
return true
}
parent := t.FindParent(value)
if parent == nil {
return false
}
if parent.Value > value {
parent.Left = nil
}
parent.Right = nil
return true
}
func (t BinaryTree) iterate(current *Node, fn func (node *Node) (*Node, bool)) *Node {
next, keep := fn(current)
if next != nil && keep == true {
return t.iterate(next, fn)
}
return next
}
func (t BinaryTree) search(current *Node, value int) *Node {
if value > current.Value && current.Right != nil {
return t.search(current.Right, value)
}
if value < current.Value && current.Left != nil {
return t.search(current.Left, value)
}
if value == current.Value {
return current
}
return nil
}
func (t BinaryTree) insertNode(current *Node, node *Node) {
if node.Value < current.Value {
if current.Left == nil {
current.Left = node
} else {
t.insertNode(current.Left, node)
}
} else if node.Value > current.Value {
if current.Right == nil {
current.Right = node
} else {
t.insertNode(current.Right, node)
}
}
} | BinaryTree/binary_tree.go | 0.800302 | 0.605741 | binary_tree.go | starcoder |
package pure
import (
"errors"
"fmt"
"strconv"
"github.com/benthosdev/benthos/v4/internal/batch/policy"
"github.com/benthosdev/benthos/v4/internal/bundle"
"github.com/benthosdev/benthos/v4/internal/component/input"
iprocessor "github.com/benthosdev/benthos/v4/internal/component/processor"
"github.com/benthosdev/benthos/v4/internal/docs"
oinput "github.com/benthosdev/benthos/v4/internal/old/input"
)
var (
// ErrBrokerNoInputs is returned when creating a broker with zero inputs.
ErrBrokerNoInputs = errors.New("attempting to create broker input type with no inputs")
)
func init() {
err := bundle.AllInputs.Add(newBrokerInput, docs.ComponentSpec{
Name: "broker",
Summary: `
Allows you to combine multiple inputs into a single stream of data, where each input will be read in parallel.`,
Description: `
A broker type is configured with its own list of input configurations and a field to specify how many copies of the list of inputs should be created.
Adding more input types allows you to combine streams from multiple sources into one. For example, reading from both RabbitMQ and Kafka:
` + "```yaml" + `
input:
broker:
copies: 1
inputs:
- amqp_0_9:
urls:
- amqp://guest:guest@localhost:5672/
consumer_tag: benthos-consumer
queue: benthos-queue
# Optional list of input specific processing steps
processors:
- bloblang: |
root.message = this
root.meta.link_count = this.links.length()
root.user.age = this.user.age.number()
- kafka:
addresses:
- localhost:9092
client_id: benthos_kafka_input
consumer_group: benthos_consumer_group
topics: [ benthos_stream:0 ]
` + "```" + `
If the number of copies is greater than zero the list will be copied that number
of times. For example, if your inputs were of type foo and bar, with 'copies'
set to '2', you would end up with two 'foo' inputs and two 'bar' inputs.
### Batching
It's possible to configure a [batch policy](/docs/configuration/batching#batch-policy)
with a broker using the ` + "`batching`" + ` fields. When doing this the feeds
from all child inputs are combined. Some inputs do not support broker based
batching and specify this in their documentation.
### Processors
It is possible to configure [processors](/docs/components/processors/about) at
the broker level, where they will be applied to _all_ child inputs, as well as
on the individual child inputs. If you have processors at both the broker level
_and_ on child inputs then the broker processors will be applied _after_ the
child nodes processors.`,
Categories: []string{
"Utility",
},
Config: docs.FieldComponent().WithChildren(
docs.FieldInt("copies", "Whatever is specified within `inputs` will be created this many times.").Advanced().HasDefault(1),
docs.FieldInput("inputs", "A list of inputs to create.").Array().HasDefault([]interface{}{}),
policy.FieldSpec(),
),
})
if err != nil {
panic(err)
}
}
func newBrokerInput(conf oinput.Config, mgr bundle.NewManagement, pipelines ...iprocessor.PipelineConstructorFunc) (input.Streamed, error) {
pipelines = oinput.AppendProcessorsFromConfig(conf, mgr, pipelines...)
lInputs := len(conf.Broker.Inputs) * conf.Broker.Copies
if lInputs <= 0 {
return nil, ErrBrokerNoInputs
}
var err error
var b input.Streamed
if lInputs == 1 {
if b, err = mgr.NewInput(conf.Broker.Inputs[0], pipelines...); err != nil {
return nil, err
}
} else {
inputs := make([]input.Streamed, lInputs)
for j := 0; j < conf.Broker.Copies; j++ {
for i, iConf := range conf.Broker.Inputs {
iMgr := mgr.IntoPath("broker", "inputs", strconv.Itoa(i)).(bundle.NewManagement)
inputs[len(conf.Broker.Inputs)*j+i], err = iMgr.NewInput(iConf, pipelines...)
if err != nil {
return nil, err
}
}
}
if b, err = newFanInInputBroker(inputs); err != nil {
return nil, err
}
}
if conf.Broker.Batching.IsNoop() {
return b, nil
}
bMgr := mgr.IntoPath("broker", "batching")
policy, err := policy.New(conf.Broker.Batching, bMgr)
if err != nil {
return nil, fmt.Errorf("failed to construct batch policy: %v", err)
}
return oinput.NewBatcher(policy, b, mgr.Logger(), mgr.Metrics()), nil
} | internal/impl/pure/input_broker.go | 0.694199 | 0.61173 | input_broker.go | starcoder |
package test_decl
func assert(want int, act int, code string)
func println(format string)
func strcmp(s1 string, s2 string) int
var g1, g2, g3 bool
var g4, g5, g6 = 2.0, 8, "foo"
var (
g7 int
g8, g9, g10 = 2.0, 3.0, "bar"
)
func main() {
var x1 byte
assert(1, Sizeof(x1), "var x1 byte; Sizeof(x1)")
var x2 int16
assert(2, Sizeof(x2), "var x2 int16; Sizeof(x2)")
var x3 int
assert(4, Sizeof(x3), "var x3 int; Sizeof(x3)")
var x4 int64
assert(8, Sizeof(x4), "var x4 int64; Sizeof(x4)")
var x5 bool = 0
assert(0, x5, "var x5 bool=0; x5")
var x6 bool = 1
assert(1, x6, "var x6 bool=1; x6")
var x7 bool = 2
assert(1, x7, "var x7 bool=2; x7")
assert(1, bool(1), "bool(1)")
assert(1, bool(2), "bool(2)")
assert(0, bool(byte(256)), "bool(byte(256))")
var x8, x9 int
assert(0, x8, "x8")
assert(4, Sizeof(x8), "Sizeof(x8)")
assert(0, x9, "x9")
var x10, x11, x12, x13 int = 1, 2, 3, 4
assert(1, x10, "x10")
assert(4, Sizeof(x10), "Sizeof(x10)")
assert(2, x11, "x11")
assert(3, x12, "x12")
assert(4, x13, "x13")
var x14, x15, x16, x17 string = "1", "2", "3", "4"
assert(8, Sizeof(x14), "Sizeof(x14)")
assert(0, strcmp(x14, "1"), "strcmp(x14, \"1\")")
assert(0, strcmp(x15, "2"), "strcmp(x15, \"2\")")
assert(0, strcmp(x16, "3"), "strcmp(x16, \"3\")")
assert(0, strcmp(x17, "4"), "strcmp(x17, \"4\")")
x18, x19, x20 := 1, 2, 3
assert(1, x18, "x18")
assert(2, x19, "x19")
assert(3, x20, "x20")
x21, x22, x23, x24 := "1", "2", "3", "4"
assert(8, Sizeof(x21), "Sizeof(x21)")
assert(0, strcmp(x21, "1"), "strcmp(x21, \"1\")")
assert(0, strcmp(x22, "2"), "strcmp(x22, \"2\")")
assert(0, strcmp(x23, "3"), "strcmp(x23, \"3\")")
assert(0, strcmp(x24, "4"), "strcmp(x24, \"4\")")
var (
i25 int
u25, v25, s25 = 2.0, 3.0, "bar"
)
assert(0, i25, "i25")
assert(2.0, u25, "u25")
assert(3.0, v25, "v25")
assert(0, strcmp(s25, "bar"), "strcmp(s25, \"bar\")")
assert(0, g1, "g1")
assert(0, g2, "g2")
assert(0, g3, "g3")
assert(2.0, g4, "g4")
assert(8, g5, "g5")
assert(0, strcmp(g6, "foo"), "strcmp(g6, \"foo\")")
assert(0, g7, "g7")
assert(2.0, g8, "g8")
assert(3.0, g9, "g9")
assert(0, strcmp(g10, "bar"), "strcmp(g10, \"bar\")")
println("OK")
} | testdata/esc/decl.go | 0.561936 | 0.465813 | decl.go | starcoder |
package bild
import (
"image"
"image/color"
"math"
)
// Invert returns a negated version of the image.
func Invert(src image.Image) *image.RGBA {
fn := func(c color.RGBA) color.RGBA {
return color.RGBA{255 - c.R, 255 - c.G, 255 - c.B, c.A}
}
img := apply(src, fn)
return img
}
// Grayscale returns a copy of the image in Grayscale using the weights
// 0.3R + 0.6G + 0.1B as a heuristic.
func Grayscale(src image.Image) *image.RGBA {
fn := func(c color.RGBA) color.RGBA {
v := 0.3*float64(c.R) + 0.6*float64(c.G) + 0.1*float64(c.B)
result := uint8(clampFloat64(math.Ceil(v), 0, 255))
return color.RGBA{
result,
result,
result,
c.A}
}
img := apply(src, fn)
return img
}
// EdgeDetection returns a copy of the image with it's edges highlighted.
func EdgeDetection(src image.Image, radius float64) *image.RGBA {
if radius <= 0 {
return CloneAsRGBA(src)
}
length := int(math.Ceil(2*radius + 1))
k := NewKernel(length)
for x := 0; x < length; x++ {
for y := 0; y < length; y++ {
v := -1.0
if x == length/2 && y == length/2 {
v = float64(length*length) - 1
}
k.Matrix[y*length+x] = v
}
}
return Convolve(src, k, &ConvolutionOptions{Bias: 0, Wrap: false, CarryAlpha: true})
}
// Emboss returns a copy of the image in which each pixel has been
// replaced either by a highlight or a shadow representation.
func Emboss(src image.Image) *image.RGBA {
k := Kernel{[]float64{
-1, -1, 0,
-1, 0, 1,
0, 1, 1,
}, 3}
return Convolve(src, &k, &ConvolutionOptions{Bias: 128, Wrap: false, CarryAlpha: true})
}
// Sharpen returns a sharpened copy of the image by detecting it's edges and adding it to the original.
func Sharpen(src image.Image) *image.RGBA {
k := Kernel{[]float64{
0, -1, 0,
-1, 5, -1,
0, -1, 0,
}, 3}
return Convolve(src, &k, &ConvolutionOptions{Bias: 0, Wrap: false, CarryAlpha: true})
}
// Sobel returns an image emphasising edges using an approximation to the Sobel–Feldman operator.
func Sobel(src image.Image) *image.RGBA {
hk := Kernel{[]float64{
1, 2, 1,
0, 0, 0,
-1, -2, -1,
}, 3}
vk := Kernel{[]float64{
-1, 0, 1,
-2, 0, 2,
-1, 0, 1,
}, 3}
vSobel := Convolve(src, &vk, &ConvolutionOptions{Bias: 0, Wrap: false, CarryAlpha: true})
hSobel := Convolve(src, &hk, &ConvolutionOptions{Bias: 0, Wrap: false, CarryAlpha: true})
return Add(Multiply(vSobel, vSobel), Multiply(hSobel, hSobel))
}
// Median returns a new image in which each pixel is the median of it's neighbors.
// Size sets the amount of neighbors to be searched.
func Median(img image.Image, size int) *image.RGBA {
bounds := img.Bounds()
src := CloneAsRGBA(img)
if size <= 0 {
return src
}
dst := image.NewRGBA(bounds)
w, h := bounds.Max.X, bounds.Max.Y
neighborsCount := size * size
parallelize(h, func(start, end int) {
for x := 0; x < w; x++ {
for y := start; y < end; y++ {
neighbors := make([]color.RGBA, neighborsCount)
i := 0
for kx := 0; kx < size; kx++ {
for ky := 0; ky < size; ky++ {
ix := (x - size/2 + kx + w) % (w)
iy := (y - size/2 + ky + h) % h
ipos := iy*dst.Stride + ix*4
neighbors[i] = color.RGBA{
R: src.Pix[ipos+0],
G: src.Pix[ipos+1],
B: src.Pix[ipos+2],
A: src.Pix[ipos+3],
}
i++
}
}
quicksortRGBA(neighbors, 0, neighborsCount-1)
median := neighbors[neighborsCount/2]
pos := y*dst.Stride + x*4
dst.Pix[pos+0] = median.R
dst.Pix[pos+1] = median.G
dst.Pix[pos+2] = median.B
dst.Pix[pos+3] = median.A
}
}
})
return dst
} | effects.go | 0.900139 | 0.624007 | effects.go | starcoder |
package table
import (
"fmt"
"github.com/nboughton/rollt"
)
// OneRoll represents the oft used one-roll systems spread throughout SWN
type OneRoll struct {
D4 rollt.Able
D6 rollt.Able
D8 rollt.Able
D10 rollt.Able
D12 rollt.Able
D20 rollt.Able
}
// Roll performs all rolls for a OneRoll and returns the results
func (o OneRoll) Roll() [][]string {
return [][]string{
{o.D4.Label(), o.D4.Roll()},
{o.D6.Label(), o.D6.Roll()},
{o.D8.Label(), o.D8.Roll()},
{o.D10.Label(), o.D10.Roll()},
{o.D12.Label(), o.D12.Roll()},
{o.D20.Label(), o.D20.Roll()},
}
}
func (o OneRoll) String() string {
return fmt.Sprintf("%s\n%s\n%s\n%s\n%s\n%s\n", o.D4, o.D6, o.D8, o.D10, o.D12, o.D20)
}
// Attributes table to roll on
var Attributes = OneRoll{
D4: rollt.List{
Name: "Rule of Law",
Items: []string{
"Perilous",
"Mostly Unsafe",
"Mostly Safe",
"Secure",
},
},
D6: rollt.List{
Name: "Population",
Items: []string{
"Established monoculture",
"Cosmopolitan ",
"Criminal/Pirates/Rebels",
"Tribal/Regressive/Feral",
"Outpost - Military/ Research/ Commercial",
"Colonists",
},
},
D8: rollt.List{
Name: "Major Terrain Types",
Items: []string{
"Forest/Jungle",
"Mountain/Hills",
"Swamp",
"Garden World",
"Desert/Frozen/Toxic - Wasteland",
"Underground Caves/Mines",
"Water World",
"Gas Giant",
},
},
D10: rollt.List{
Name: "Assets",
Items: []string{
"High-grade resource",
"Tourism - unique culture or nature",
"Unique life-forms draw researchers",
"Alien Ruins and Tech",
"A delicacy is harvested here",
"An renowned place of learning",
"A life-giving atmosphere/ spring/ resource that can’t be exported",
"Dangerous fauna for the hunt",
"A strategic position",
"Massive manufactories",
},
},
D12: rollt.List{
Name: "Cultures",
Items: []string{
"Xenophobic",
"Mercantile",
"Hedonistic",
"Spartan",
"Inexplicably alien",
"Friendly and Welcoming",
"Non-Organic",
"Religious Zealots",
"Hivemind",
"Aesthetes",
"Extreme Pride",
"Cold/calculating/no emotion",
},
},
D20: rollt.List{
Name: "Adventure Hooks",
Items: []string{
"Quarantine",
"Refugees",
"Civil War",
"Invasion",
"Tyranny/ Exploitation",
"Scarcity",
"Gold rush",
"Impending disaster",
"A wild/exotic festival",
"Abandoned",
"Momentus discovery",
"Freak weather",
"Battleground",
"Pilgrimage",
"Hostile flora/fauna",
"Terrorism",
"Corporate Takeover ",
"Corruption",
"A new religion",
"It’s so nice here… why leave? Stay.",
},
},
}
// Names table to roll on
var Names = rollt.List{
Name: "Name",
Items: []string{
"<NAME> ",
"<NAME>ellowstone ",
"<NAME> ",
"<NAME> ",
"Astarte ",
"I'Tedai ",
"Chi-You ",
"Phoebe ",
"Ch'Deni ",
"Kazi",
"Hezitis ",
"Giveria ",
"Cholion ",
"Nulrade ",
"Duwei ",
"Leanus ",
"Dorscind's World ",
"Goiturn ",
"Bryke ",
"1A<NAME>",
"Dyton",
"Sihnon",
"<NAME>",
"Ariel",
"Londinium",
"<NAME>",
"Santo",
"Triumph",
"<NAME>",
"Hera",
"<NAME>",
"<NAME>",
"<NAME>",
"<NAME>",
"<NAME>",
"<NAME>",
"<NAME>",
"<NAME>",
"<NAME>II",
"<NAME>",
"Hrane",
"Siono",
"Kote",
"Gerte",
"Yedin",
"Palmary",
"Zathru",
"Axus",
"Calfuu",
"Kidia",
"Lungor",
"Munei",
"Ekak",
"Otaw",
"Olok",
"Anein",
"Lonei",
"Tsunei",
"Eytaw",
"Malu",
"Sihnon",
"New Melbourne",
"Bernadette",
"New Canaan",
"Lazarus",
"Parth",
"Paquin",
"St. Albans",
"<NAME>",
"Maenali VI",
"Cassiopeiae IV",
"<NAME>",
"Zorgi III",
"Regulus Prime",
"Pegasi III",
"Pleione IV",
"<NAME>",
"Sagittae VI",
"Gana",
"Nara",
"Beyscrim",
"Bora",
"Anosh",
"Aros",
"Myto",
"Parmea",
"Ablis",
"Tala",
"Chostrastea",
"Sevozuno",
"Kallilia",
"Imiq",
"Roabos",
"Euwei",
"Lluxetis",
"Vaipra",
"Comia UT5",
"Thonoe 142",
"<NAME>",
"Thalidae",
},
} | table/table.go | 0.530601 | 0.41561 | table.go | starcoder |
package geoindex
var (
minLon = -180.0
minLat = -90.0
latDegreeLength = Km(111.0)
lonDegreeLength = Km(85.0)
)
type Meters float64
func Km(km float64) Meters {
return Meters(km * 1000)
}
func Meter(meters float64) Meters {
return Meters(meters)
}
type cell struct {
x int
y int
}
func cellOf(point Point, resolution Meters) cell {
x := int((-minLat + point.Lat()) * float64(latDegreeLength) / float64(resolution))
y := int((-minLon + point.Lon()) * float64(lonDegreeLength) / float64(resolution))
return cell{x, y}
}
type geoIndex struct {
resolution Meters
index map[cell]interface{}
newEntry func() interface{}
}
// Creates new geo index with resolution a function that returns a new entry that is stored in each cell.
func newGeoIndex(resolution Meters, newEntry func() interface{}) *geoIndex {
return &geoIndex{resolution, make(map[cell]interface{}), newEntry}
}
func (i *geoIndex) Clone() *geoIndex {
clone := &geoIndex{
resolution: i.resolution,
index: make(map[cell]interface{}, len(i.index)),
newEntry: i.newEntry,
}
for k, v := range i.index {
set, ok := v.(set)
if !ok {
panic("Cannot cast value to set")
}
clone.index[k] = set.Clone()
}
return clone
}
// AddEntryAt adds an entry if missing, returns the entry at specific position.
func (geoIndex *geoIndex) AddEntryAt(point Point) interface{} {
square := cellOf(point, geoIndex.resolution)
if _, ok := geoIndex.index[square]; !ok {
geoIndex.index[square] = geoIndex.newEntry()
}
return geoIndex.index[square]
}
// GetEntryAt gets an entry from the geoindex, if missing returns an empty entry without changing the index.
func (geoIndex *geoIndex) GetEntryAt(point Point) interface{} {
square := cellOf(point, geoIndex.resolution)
entries, ok := geoIndex.index[square]
if !ok {
return geoIndex.newEntry()
}
return entries
}
// Range returns the index entries within lat, lng range.
func (geoIndex *geoIndex) Range(topLeft Point, bottomRight Point) []interface{} {
topLeftIndex := cellOf(topLeft, geoIndex.resolution)
bottomRightIndex := cellOf(bottomRight, geoIndex.resolution)
return geoIndex.get(bottomRightIndex.x, topLeftIndex.x, topLeftIndex.y, bottomRightIndex.y)
}
func (geoIndex *geoIndex) get(minx int, maxx int, miny int, maxy int) []interface{} {
entries := make([]interface{}, 0, 0)
for x := minx; x <= maxx; x++ {
for y := miny; y <= maxy; y++ {
if indexEntry, ok := geoIndex.index[cell{x, y}]; ok {
entries = append(entries, indexEntry)
}
}
}
return entries
}
func (g *geoIndex) getCells(minx int, maxx int, miny int, maxy int) []cell {
indices := make([]cell, 0)
for x := minx; x <= maxx; x++ {
for y := miny; y <= maxy; y++ {
indices = append(indices, cell{x, y})
}
}
return indices
} | hotelReservation/vendor/github.com/hailocab/go-geoindex/geo-index.go | 0.851089 | 0.492432 | geo-index.go | starcoder |
package mon
// SLOFreshnessTiles freshness SLO dashboard JSON template
const SLOFreshnessTiles = `
[
{
"height": 2,
"width": 4,
"widget": {
"title": "<scope> <flow> <sloText> < <thresholdText>",
"text": {
"content": "**Freshness**: <sloText> of <scope> configurations from <flow> flow over the last 28 days should be analyzed in less than <thresholdText>.",
"format": "MARKDOWN"
}
}
},
{
"height": 2,
"width": 3,
"xPos": 4,
"widget": {
"title": "SLI vs SLO",
"scorecard": {
"gaugeView": {
"lowerBound": <lowerBound>,
"upperBound": 1.0
},
"thresholds": [
{
"color": "RED",
"direction": "BELOW",
"value": <slo>
}
],
"timeSeriesQuery": {
"timeSeriesQueryLanguage": "fetch cloud_function::logging.googleapis.com/user/ram_latency_e2e\n| filter metric.microservice_name == 'stream2bq'\n| filter metric.origin == '<origin>'\n| align delta(28d)\n| every 28d\n| within 28d\n| group_by [metric.microservice_name]\n| fraction_less_than_from <thresholdSeconds>"
}
}
}
},
{
"height": 2,
"width": 3,
"xPos": 7,
"widget": {
"title": "Remaining ERROR BUDGET",
"scorecard": {
"thresholds": [
{
"color": "YELLOW",
"direction": "BELOW",
"value": 0.1
}
],
"timeSeriesQuery": {
"timeSeriesQueryLanguage": "fetch cloud_function::logging.googleapis.com/user/ram_latency_e2e\n| filter metric.microservice_name == 'stream2bq'\n| filter metric.origin == '<origin>'\n| align delta(28d)\n| every 28d\n| within 28d\n| group_by [metric.microservice_name]\n| fraction_less_than_from <thresholdSeconds>\n| neg\n| add 1\n| div 0.01\n| neg\n| add 1"
}
}
}
},
{
"height": 2,
"width": 2,
"xPos": 10,
"widget": {
"title": "Configurations analyzed in 28 days",
"scorecard": {
"sparkChartView": {
"sparkChartType": "SPARK_LINE"
},
"timeSeriesQuery": {
"timeSeriesQueryLanguage": "fetch cloud_function::logging.googleapis.com/user/ram_latency_e2e\n| filter metric.microservice_name == 'stream2bq'\n| filter metric.origin == '<origin>'\n| align delta(28d)\n| every 28d\n| within 28d\n| group_by [metric.microservice_name]\n| count_from"
}
}
}
},
{
"height": 9,
"width": 3,
"xPos": 9,
"yPos": 2,
"widget": {
"title": "Last 28days heatmap",
"xyChart": {
"chartOptions": {
"mode": "COLOR"
},
"dataSets": [
{
"plotType": "HEATMAP",
"timeSeriesQuery": {
"timeSeriesQueryLanguage": "fetch cloud_function::logging.googleapis.com/user/ram_latency_e2e\n| filter (metric.microservice_name == 'stream2bq')\n| filter metric.origin == '<origin>'\n| align delta(28d)\n| every 28d\n| within 28d\n| group_by [metric.microservice_name]\n| graph_period 28d"
}
}
],
"timeshiftDuration": "0s",
"yAxis": {
"label": "y1Axis",
"scale": "LINEAR"
}
}
}
},
{
"height": 3,
"width": 9,
"yPos": 2,
"widget": {
"title": "Error budget burnrate on 7d sliding windows - Email when > 1.5",
"xyChart": {
"chartOptions": {
"mode": "COLOR"
},
"dataSets": [
{
"plotType": "LINE",
"timeSeriesQuery": {
"timeSeriesQueryLanguage": "fetch cloud_function::logging.googleapis.com/user/ram_latency_e2e\n|filter metric.microservice_name == 'stream2bq'\n| filter metric.origin == '<origin>'\n| align delta(1m)\n| every 1m\n| group_by [metric.microservice_name], sliding(7d)\n| fraction_less_than_from <thresholdSeconds>\n| neg\n| add 1\n| div 0.01\n| cast_units \"1\""
}
}
],
"thresholds": [
{
"value": 1.5
}
],
"timeshiftDuration": "0s",
"yAxis": {
"label": "y1Axis",
"scale": "LINEAR"
}
}
}
},
{
"height": 3,
"width": 9,
"yPos": 5,
"widget": {
"title": "Error budget burnrate on 12h sliding windows - Alert when > 3",
"xyChart": {
"chartOptions": {
"mode": "COLOR"
},
"dataSets": [
{
"plotType": "LINE",
"timeSeriesQuery": {
"timeSeriesQueryLanguage": "fetch cloud_function::logging.googleapis.com/user/ram_latency_e2e\n|filter metric.microservice_name == 'stream2bq'\n| filter metric.origin == '<origin>'\n| align delta(1m)\n| every 1m\n| group_by [metric.microservice_name], sliding(12h)\n| fraction_less_than_from <thresholdSeconds>\n| neg\n| add 1\n| div 0.01\n| cast_units \"1\""
}
}
],
"thresholds": [
{
"value": 3.0
}
],
"timeshiftDuration": "0s",
"yAxis": {
"label": "y1Axis",
"scale": "LINEAR"
}
}
}
},
{
"height": 3,
"width": 9,
"yPos": 8,
"widget": {
"title": "Error budget burnrate on 1h sliding windows - Alert when > 9",
"xyChart": {
"chartOptions": {
"mode": "COLOR"
},
"dataSets": [
{
"plotType": "LINE",
"timeSeriesQuery": {
"timeSeriesQueryLanguage": "fetch cloud_function::logging.googleapis.com/user/ram_latency_e2e\n| filter (metric.microservice_name == 'stream2bq')\n| filter metric.origin == '<origin>'\n| align delta(28d)\n| every 28d\n| within 28d\n| group_by [metric.microservice_name]\n| graph_period 28d"
}
}
],
"thresholds": [
{
"value": 9.0
}
],
"timeshiftDuration": "0s",
"yAxis": {
"label": "y1Axis",
"scale": "LINEAR"
}
}
}
}
]
` | utilities/mon/const_dashboardslofreshness.go | 0.617513 | 0.466663 | const_dashboardslofreshness.go | starcoder |
package ldraw
import (
"fmt"
"log"
"math"
"strconv"
)
// TransMatrix TransMatrix
type TransMatrix [16]float64
/* InitMatrix 1
/ a d g 0 \ / a b c x \
| b e h 0 | | d e f y |
| c f i 0 | | g h i z |
\ x y z 1 / \ 0 0 0 1 /
*/
var InitMatrix = &TransMatrix{
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0,
}
// DefaultXMatrix default ldr file matrix for stand
const DefaultXMatrix = "1 0 0 0 1 0 0 0 1"
// str2F64 inline string to float64
func str2F64(s string) float64 {
result, _ := strconv.ParseFloat(s, 64)
return result
}
// NewTransMatrixFromStrs NewTransMatrixFromStrs
func NewTransMatrixFromStrs(d []string) *TransMatrix {
if len(d) != 12 {
log.Fatalf("string not match:%v", d)
}
return &TransMatrix{
str2F64(d[3]), str2F64(d[6]), str2F64(d[9]), 0,
str2F64(d[4]), str2F64(d[7]), str2F64(d[10]), 0,
str2F64(d[5]), str2F64(d[8]), str2F64(d[11]), 0,
str2F64(d[0]), str2F64(d[1]), str2F64(d[2]), 1,
}
}
// NewVectorsFromLine NewVectorsFromLine
func NewVectorsFromLine(d []string, vCount int) []*TransVector {
num := 3 * vCount
if len(d) < num {
return nil
}
resp := []*TransVector{}
for i := 0; i < num; i += 3 {
resp = append(resp, &TransVector{str2F64(d[i]), str2F64(d[i+1]), str2F64(d[i+2])})
}
return resp
}
// MultipleMatrix https://www.migenius.com/articles/3d-transformations-part1-matrices
func MultipleMatrix(l, r *TransMatrix) *TransMatrix {
result := &TransMatrix{}
/*
{\displaystyle \mathbf {C} ={\begin{pmatrix}a_{11}b_{11}+\cdots +a_{1n}b_{n1}&a_{11}b_{12}+\cdots +a_{1n}b_{n2}&\cdots &a_{11}b_{1p}+\cdots +a_{1n}b_{np}\\a_{21}b_{11}+\cdots +a_{2n}b_{n1}&a_{21}b_{12}+\cdots +a_{2n}b_{n2}&\cdots &a_{21}b_{1p}+\cdots +a_{2n}b_{np}\\\vdots &\vdots &\ddots &\vdots \\a_{m1}b_{11}+\cdots +a_{mn}b_{n1}&a_{m1}b_{12}+\cdots +a_{mn}b_{n2}&\cdots &a_{m1}b_{1p}+\cdots +a_{mn}b_{np}\\\end{pmatrix}}}
*/
// https://en.wikipedia.org/wiki/Matrix_multiplication
result[0] = l[0]*r[0] + l[4]*r[1] + l[8]*r[2] + l[12]*r[3]
result[1] = l[1]*r[0] + l[5]*r[1] + l[9]*r[2] + l[13]*r[3]
result[2] = l[2]*r[0] + l[6]*r[1] + l[10]*r[2] + l[14]*r[3]
result[3] = l[3]*r[0] + l[7]*r[1] + l[11]*r[2] + l[15]*r[3]
result[4] = l[0]*r[4] + l[4]*r[5] + l[8]*r[6] + l[12]*r[7]
result[5] = l[1]*r[4] + l[5]*r[5] + l[9]*r[6] + l[13]*r[7]
result[6] = l[2]*r[4] + l[6]*r[5] + l[10]*r[6] + l[14]*r[7]
result[7] = l[3]*r[4] + l[7]*r[5] + l[11]*r[6] + l[15]*r[7]
result[8] = l[0]*r[8] + l[4]*r[9] + l[8]*r[10] + l[12]*r[11]
result[9] = l[1]*r[8] + l[5]*r[9] + l[9]*r[10] + l[13]*r[11]
result[10] = l[2]*r[8] + l[6]*r[9] + l[10]*r[10] + l[14]*r[11]
result[11] = l[3]*r[8] + l[7]*r[9] + l[11]*r[10] + l[15]*r[11]
result[12] = l[0]*r[12] + l[4]*r[13] + l[8]*r[14] + l[12]*r[15]
result[13] = l[1]*r[12] + l[5]*r[13] + l[9]*r[14] + l[13]*r[15]
result[14] = l[2]*r[12] + l[6]*r[13] + l[10]*r[14] + l[14]*r[15]
result[15] = l[3]*r[12] + l[7]*r[13] + l[11]*r[14] + l[15]*r[15]
return result
}
// TransVector TransVector
type TransVector [3]float64
// MultipleVector https://www.ldraw.org/article/218.html
func MultipleVector(m *TransMatrix, vs ...*TransVector) []*TransVector {
results := []*TransVector{}
for _, v := range vs {
result := &TransVector{}
// u' = a*u + b*v + c*w + x
// v' = d*u + e*v + f*w + y
// w' = g*u + h*v + i*w + z
result[0] = m[0]*v[0] + m[4]*v[1] + m[8]*v[2] + m[12]
result[1] = m[1]*v[0] + m[5]*v[1] + m[9]*v[2] + m[13]
result[2] = m[2]*v[0] + m[6]*v[1] + m[10]*v[2] + m[14]
results = append(results, result)
}
return results
}
func (tv *TransVector) String() string {
return fmt.Sprintf("[%7.2f,%7.2f,%7.2f]", tv[0], tv[1], tv[2])
}
// BoundingBox BoundingBox
type BoundingBox struct {
Min, Max *TransVector
}
// NewBoundingBox NewBoundingBox
func NewBoundingBox() *BoundingBox {
return &BoundingBox{
Min: &TransVector{math.Inf(1), math.Inf(1), math.Inf(1)},
Max: &TransVector{math.Inf(-1), math.Inf(-1), math.Inf(-1)},
}
}
// TransEmpty TransEmpty
func (bb *BoundingBox) TransEmpty() *BoundingBox {
if bb.Min[0] == math.Inf(1) && bb.Min[1] == math.Inf(1) && bb.Min[2] == math.Inf(1) &&
bb.Max[0] == math.Inf(-1) && bb.Max[0] == math.Inf(-1) && bb.Max[0] == math.Inf(-1) {
return &BoundingBox{Min: &TransVector{0, 0, 0}, Max: &TransVector{0, 0, 0}}
}
return bb
}
// ToGob ToGob
func (bb *BoundingBox) ToGob() [2][3]float64 {
return [2][3]float64{{bb.Min[0], bb.Min[1], bb.Min[2]}, {bb.Max[0], bb.Max[1], bb.Max[2]}}
}
// MergeMinMaxVector MergeMinMaxVector
func (bb *BoundingBox) MergeMinMaxVector(news ...*TransVector) {
for _, new := range news {
if new[0] < bb.Min[0] {
bb.Min[0] = new[0]
}
if new[1] < bb.Min[1] {
bb.Min[1] = new[1]
}
if new[2] < bb.Min[2] {
bb.Min[2] = new[2]
}
if new[0] > bb.Max[0] {
bb.Max[0] = new[0]
}
if new[1] > bb.Max[1] {
bb.Max[1] = new[1]
}
if new[2] > bb.Max[2] {
bb.Max[2] = new[2]
}
}
}
// CalcSize Calc LDU Size
func (bb *BoundingBox) CalcSize() [3]float64 {
return [3]float64{
bb.Max[0] - bb.Min[0],
bb.Max[1] - bb.Min[1],
bb.Max[2] - bb.Min[2],
}
}
// CalcBrickSize Calc Brick Size
func (bb *BoundingBox) CalcBrickSize() [3]float64 {
lduSize := bb.CalcSize()
return [3]float64{lduSize[0] / 20, lduSize[1] / 20, lduSize[2] / 20}
}
// GetBoxWHTByX GetBoundingBoxWidthHeightTall
func GetBoxWHTByX(b [2][3]float64) (float64, float64, float64) {
width := b[1][0] - b[0][0] // x
height := b[1][2] - b[0][2] // z
tall := b[1][1] - b[0][1] // y
return width, height, tall
} | matrix.go | 0.589835 | 0.512937 | matrix.go | starcoder |
// The Carrier Package provides a framework for communicating with
// Carrier Infinity and Bryant Evolution Furnaces.
package Carrier
import (
"bytes"
"encoding/binary"
"github.com/npat-efault/crc16"
)
// Frame provides a structure to capture the Carrier Framing.
// See Docs/framing.md for details.
type Frame struct {
Dst devAddr `json:"dest"` // Bytes 0:1 Destination Address
Src devAddr `json:"srce"` // Bytes 2:3 Source Address
Len uint8 // Byte 4 Data Length
Opr frameOpr // Byte 7 Operation
Tab uint8 `json:"table"` // Byte 9 Table
Row uint8 `json:"row"` // Byte 10 Row
Dat []byte `json:"data"` //Bytes 11:-1 Row contents
}
// encode Frame into a slice of bytes for the protocol.
func (f *Frame) Encode() (a []byte, e error) {
// b := make([]byte, 10 + len(frame.data))
if len(f.Dat) > 255 {
return a, ERRBIGFRAME
}
var b bytes.Buffer
binary.Write(&b, binary.BigEndian, f.Dst.Encode()) //Bytes 0 and 1 Destination Address
binary.Write(&b, binary.BigEndian, f.Src.Encode()) //Bytes 2 and 3 Source Address
b.WriteByte(byte(len(f.Dat) + 3)) // byte 4 Data Length
b.WriteByte(0) //byte 5 unknown
b.WriteByte(0) //byte 6 unknown
b.WriteByte(f.Opr.Encode()) //byte 7 operation
b.WriteByte(0) //byte 8 is usually 0x00
b.WriteByte(f.Tab) //byte 9 is the table
b.WriteByte(f.Row) //byte 10 is the row
b.Write(f.Dat) // Row Data
b.Write(crc(b.Bytes())) //Last 2 bytes crc checksum
return b.Bytes(), nil
}
// Create a new frame from raw slice of bytes.
func NewFrame(buf []byte) (*Frame, error) {
// Validate CRC
if !bytes.Equal(crc(buf[:len(buf)-2]), buf[len(buf)-2:]) {
return &Frame{}, ERRBADCRC
}
f := &Frame{}
// Bytes 0:1 - Destination Device Address
f.Dst = devAddrFromUint16(binary.BigEndian.Uint16(buf[0:2]))
// Bytes 2:3 - Source Device Address
f.Src = devAddrFromUint16(binary.BigEndian.Uint16(buf[2:4]))
// Byte 4 - Data Length
f.Len = buf[4] - 0x03
// Bytes 5:6 - Unknown
// Byte 7 - Operation Code
f.Opr = newFrameOpr(buf[7])
// TODO: Add logic for pulling out register information
// Byte 8 - Unknown
// Byte 9 - Table Address
f.Tab = buf[9]
// Byte 10 - Row Address
f.Row = buf[10]
// Bytes 11:-2 - Row Data
if len(buf) > 13 {
f.Dat = buf[11 : len(buf)-2]
}
// Bytes -2: - CRC Checksum
return f, nil
}
func crc(b []byte) []byte {
s := crc16.New(&crc16.Conf{
Poly: 0x8005,
BitRev: true,
IniVal: 0x0,
FinVal: 0x0,
BigEnd: false,
})
s.Write(b)
return s.Sum(nil)
} | Carrier/Frame.go | 0.669313 | 0.408808 | Frame.go | starcoder |
package geojson
import "github.com/tidwall/geojson/geometry"
// Spatial ...
type Spatial interface {
WithinRect(rect geometry.Rect) bool
WithinPoint(point geometry.Point) bool
WithinLine(line *geometry.Line) bool
WithinPoly(poly *geometry.Poly) bool
IntersectsRect(rect geometry.Rect) bool
IntersectsPoint(point geometry.Point) bool
IntersectsLine(line *geometry.Line) bool
IntersectsPoly(poly *geometry.Poly) bool
DistanceRect(rect geometry.Rect) float64
DistancePoint(point geometry.Point) float64
DistanceLine(line *geometry.Line) float64
DistancePoly(poly *geometry.Poly) float64
}
var _ = []Spatial{
&Point{}, &LineString{}, &Polygon{}, &Feature{},
&MultiPoint{}, &MultiLineString{}, &MultiPolygon{},
&GeometryCollection{}, &FeatureCollection{}, &Rect{},
EmptySpatial{},
}
// EmptySpatial ...
type EmptySpatial struct{}
// WithinRect ...
func (s EmptySpatial) WithinRect(rect geometry.Rect) bool {
return false
}
// WithinPoint ...
func (s EmptySpatial) WithinPoint(point geometry.Point) bool {
return false
}
// WithinLine ...
func (s EmptySpatial) WithinLine(line *geometry.Line) bool {
return false
}
// WithinPoly ...
func (s EmptySpatial) WithinPoly(poly *geometry.Poly) bool {
return false
}
// IntersectsRect ...
func (s EmptySpatial) IntersectsRect(rect geometry.Rect) bool {
return false
}
// IntersectsPoint ...
func (s EmptySpatial) IntersectsPoint(point geometry.Point) bool {
return false
}
// IntersectsLine ...
func (s EmptySpatial) IntersectsLine(line *geometry.Line) bool {
return false
}
// IntersectsPoly ...
func (s EmptySpatial) IntersectsPoly(poly *geometry.Poly) bool {
return false
}
// DistanceRect ...
func (s EmptySpatial) DistanceRect(rect geometry.Rect) float64 {
return 0
}
// DistancePoint ...
func (s EmptySpatial) DistancePoint(point geometry.Point) float64 {
return 0
}
// DistanceLine ...
func (s EmptySpatial) DistanceLine(line *geometry.Line) float64 {
return 0
}
// DistancePoly ...
func (s EmptySpatial) DistancePoly(poly *geometry.Poly) float64 {
return 0
} | spatial.go | 0.792745 | 0.765987 | spatial.go | starcoder |
package taxi
import (
"github.com/golang/geo/s2"
)
// Service interface to apply business logic to dataset and transforming to the output.
// GetTotalTripsByStartEndDate: Returns the TotalTripsByDay data from the repository.
// GetAverageSpeedByDate: Returns the AverageSpeedByDay data from the repository.
// GetAverageFarePickUpByLocation: Uses the geo s2 library to transform data to the S2idFare result.
type IService interface {
GetTotalTripsByStartEndDate(string, string, int) ([]TotalTripsByDay, error)
GetAverageSpeedByDate(string, int) ([]AverageSpeedByDay, error)
GetAverageFarePickUpByLocation(string, int, int) ([]S2idFare, error)
}
// Service to apply business logic to dataset and transforming to the output.
type Service struct {
Repo Repository // Set Repository
}
// GetTotalTripsByStartEndDate: Returns the TotalTripsByDay data from the repository.
// No business logic is applied
func (s Service) GetTotalTripsByStartEndDate(startDate string, endDate string, year int) ([]TotalTripsByDay, error) {
var result []TotalTripsByDay
result, _ = s.Repo.GetTotalTripsByStartEndDate(startDate, endDate, year)
return result, nil
}
// GetAverageSpeedByDate: Returns the AverageSpeedByDay data from the repository.
// No business logic is applied
func (s Service) GetAverageSpeedByDate(date string, year int) ([]AverageSpeedByDay, error) {
var result []AverageSpeedByDay
result, _ = s.Repo.GetAverageSpeedByDate(date, year)
return result, nil
}
// GetAverageFarePickUpByLocation: Uses the geo s2 library to transform data to the S2idFare result.
// Uses s2 library and region coverer to get s2id at level 16 for each location
// Aggregates the location and returns the average fare
func (s Service) GetAverageFarePickUpByLocation(date string, year int, level int) ([]S2idFare, error) {
var data []FarePickupByLocation
var fareByLocation []S2idFare
data, err := s.Repo.GetFareLocationByDate(date, year)
if data != nil && err == nil {
for i := 0; i < len(data); i++ {
// Get latlng and convert to point
latlng := s2.LatLngFromDegrees(data[i].Lat, data[i].Lng).Normalized()
point := s2.PointFromLatLng(latlng)
// Create cap from point
// Region from cap
cap := s2.CapFromPoint(point)
region := s2.Region(cap)
// Using region coverer set the level
// Use covering to get list of cellids
rc := &s2.RegionCoverer{MaxLevel: level, MinLevel: level}
cellUnion := rc.Covering(region)
// Return list of s2id and fare
for j := 0; j < len(cellUnion); j++ {
fareByLocation = append(fareByLocation, S2idFare{S2id: cellUnion[j].ToToken(), Fare: data[i].Fare})
}
}
}
// Create fares map to sum the fare for a s2id location.
fares := make(map[string]float64)
// Creates count map to count the number of s2id location.
count := make(map[string]int)
// Sum fare amount and count by the s2id location.
for i := 0; i < len(fareByLocation); i++ {
s2id := fareByLocation[i].S2id
fare := fareByLocation[i].Fare
fares[s2id] += fare
count[s2id] += 1
}
var result []S2idFare
// To get average get sum/count by s2id location
for k, v := range fares {
result = append(result, S2idFare{S2id: k, Fare: v / float64(count[k])})
}
return result, nil
} | taxi/service.go | 0.768299 | 0.478529 | service.go | starcoder |
package main
import (
"bufio"
"flag"
"fmt"
"os"
"regexp"
"strconv"
)
var (
methodP *string
)
func parseFlags() {
methodP = flag.String("method", "all", "The method/part that should be run, valid are p1,p2 and test")
flag.Parse()
}
func main() {
parseFlags()
switch *methodP {
case "all":
fmt.Println("Silver:" + PartOne("input"))
fmt.Println("Gold:" + PartTwo("input"))
case "p1":
fmt.Println("Silver:" + PartOne("input"))
break
case "p2":
fmt.Println("Gold:" + PartTwo("input"))
break
}
}
func PartOne(filename string) string {
input := readInput(filename)
targetZone, lowestPointInGrid := createTargetZone(input[0])
currentMaxHeight := 0
for x := 0; x < 300; x++ {
for y := 0; y < 300; y++ {
maxHeight, didWeHitTarget := fireProbe(x, y, targetZone, lowestPointInGrid)
if didWeHitTarget && maxHeight > currentMaxHeight {
// Find point with highest hit
currentMaxHeight = maxHeight
}
}
}
num := strconv.Itoa(currentMaxHeight)
return num
}
func PartTwo(filename string) string {
input := readInput(filename)
targetZone, lowestPointInGrid := createTargetZone(input[0])
hits := 0
for x := 0; x < 300; x++ {
for y := -300; y < 300; y++ {
_, didWeHitTarget := fireProbe(x, y, targetZone, lowestPointInGrid)
if didWeHitTarget {
// Find point with highest hit
hits++
}
}
}
num := strconv.Itoa(hits)
return num
}
type Point struct {
X int
Y int
}
func fireProbe(xVelo, yVelo int, targetGrid map[Point]bool, lowestPointInTarget int) (int, bool) {
hitTarget := false
maxHeight := 0
// Check if we hit target based on xVelo and yVelo
// The probe's x position increases by its x velocity.
// The probe's y position increases by its y velocity.
// Due to drag, the probe's x velocity changes by 1 toward the value 0; that is, it decreases by 1 if it is greater than 0, increases by 1 if it is less than 0, or does not change if it is already 0.
// Due to gravity, the probe's y velocity decreases by 1.
currentX := 0
currentY := 0
for !hitTarget {
// run a step
currentX += xVelo
currentY += yVelo
if xVelo < 0 {
xVelo++
} else if xVelo > 0 {
xVelo--
}
yVelo--
if currentY > maxHeight {
maxHeight = currentY
}
// Check if we hit target
if targetGrid[Point{currentX, currentY}] {
hitTarget = true
break
}
// If we are below target, we will never hit, so break out
if currentY < lowestPointInTarget {
break
}
}
return maxHeight, hitTarget
}
// Read data from input.txt
// Return the string, so that we can deal with it however
func readInput(filename string) []string {
var input []string
f, _ := os.Open(filename + ".txt")
scanner := bufio.NewScanner(f)
for scanner.Scan() {
input = append(input, scanner.Text())
}
return input
}
func createTargetZone(input string) (map[Point]bool, int) {
pointsInGrid := make(map[Point]bool)
// target area: x=185..221, y=-122..-74
gridRegex := regexp.MustCompile("target area: x=([0-9]+)..([0-9]+), y=(-[0-9]+)..(-[0-9]+)")
gridString := gridRegex.FindAllStringSubmatch(input, 1)
xMin, _ := strconv.Atoi(gridString[0][1])
xMax, _ := strconv.Atoi(gridString[0][2])
yMin, _ := strconv.Atoi(gridString[0][3])
yMax, _ := strconv.Atoi(gridString[0][4])
for x := xMin; x < xMax+1; x++ {
for y := yMin; y < yMax+1; y++ {
pointsInGrid[Point{x, y}] = true
}
}
return pointsInGrid, yMin
} | 2021/17-TrickShot/main.go | 0.554953 | 0.41182 | main.go | starcoder |
package ast
import "errors"
// Variant represents a value at runtime.
type Variant struct {
Type TypeKind
Int int64
String string
Bool bool
IsReturn bool
VariableReferenceFailed bool
VectorData []*Variant
NamedData map[string]*Variant
}
// MakeVariant takes a value of type *Variant or a go primitive (int/int64/bool/string) and constructs a *Variant.
func MakeVariant(in interface{}) *Variant {
switch v := in.(type) {
case TypeKind:
return &Variant{
Type: v,
}
case *Variant:
temp := *v
temp.IsReturn = false
temp.VariableReferenceFailed = false
return &temp
case int:
return &Variant{
Type: PrimitiveTypeInt,
Int: int64(v),
}
case int64:
return &Variant{
Type: PrimitiveTypeInt,
Int: v,
}
case bool:
return &Variant{
Type: PrimitiveTypeBool,
Bool: v,
}
case string:
return &Variant{
Type: PrimitiveTypeString,
String: v,
}
}
return &Variant{
Type: PrimitiveTypeUndefined,
}
}
// DefaultVariantValue returns a valid *Variant setup with the given type, and the appropriate default values.
func DefaultVariantValue(t TypeKind) (*Variant, error) {
ret := &Variant{
Type: t,
}
switch t.Kind() {
//default values are fine
case PrimitiveTypeInt:
case PrimitiveTypeString:
case PrimitiveTypeUndefined:
case PrimitiveTypeBool:
case ComplexTypeArray:
context := &ExecContext{}
arrayLen := 0
lenEval := t.(ArrayType).Len.Exec(context)
if len(context.Errors) == 0 && lenEval.Type == PrimitiveTypeInt {
arrayLen = int(lenEval.Int)
ret.VectorData = make([]*Variant, arrayLen)
for i := 0; i < arrayLen; i++ {
v, e := DefaultVariantValue(t.BaseType())
if e != nil {
return ret, errors.New("Array basetype error: " + e.Error())
}
ret.VectorData[i] = v
}
} else if len(context.Errors) > 0 || lenEval.VariableReferenceFailed {
return ret, errors.New("Could not statically resolve the length of the given array")
} else {
return ret, errors.New("Resolved length of array was not an integer")
}
case ComplexTypeStruct:
ret.NamedData = map[string]*Variant{}
for _, field := range t.(StructType).Fields {
fv, err := DefaultVariantValue(field.BaseType())
if err != nil {
return ret, err
}
ret.NamedData[field.Name()] = fv
}
default:
return ret, errors.New("Internal Error: cannot do anything about: " + t.Kind().String())
}
return ret, nil
} | ast/variant.go | 0.673406 | 0.432243 | variant.go | starcoder |
package onshape
import (
"encoding/json"
)
// BTExportTessellatedFacesBody1321AllOf struct for BTExportTessellatedFacesBody1321AllOf
type BTExportTessellatedFacesBody1321AllOf struct {
Appearance *BTGraphicsAppearance1152 `json:"appearance,omitempty"`
BodyType *string `json:"bodyType,omitempty"`
BtType *string `json:"btType,omitempty"`
Faces *[]BTExportTessellatedFacesFace1192 `json:"faces,omitempty"`
FacetPoints *[]BTVector3d389 `json:"facetPoints,omitempty"`
}
// NewBTExportTessellatedFacesBody1321AllOf instantiates a new BTExportTessellatedFacesBody1321AllOf object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewBTExportTessellatedFacesBody1321AllOf() *BTExportTessellatedFacesBody1321AllOf {
this := BTExportTessellatedFacesBody1321AllOf{}
return &this
}
// NewBTExportTessellatedFacesBody1321AllOfWithDefaults instantiates a new BTExportTessellatedFacesBody1321AllOf object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewBTExportTessellatedFacesBody1321AllOfWithDefaults() *BTExportTessellatedFacesBody1321AllOf {
this := BTExportTessellatedFacesBody1321AllOf{}
return &this
}
// GetAppearance returns the Appearance field value if set, zero value otherwise.
func (o *BTExportTessellatedFacesBody1321AllOf) GetAppearance() BTGraphicsAppearance1152 {
if o == nil || o.Appearance == nil {
var ret BTGraphicsAppearance1152
return ret
}
return *o.Appearance
}
// GetAppearanceOk returns a tuple with the Appearance field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTExportTessellatedFacesBody1321AllOf) GetAppearanceOk() (*BTGraphicsAppearance1152, bool) {
if o == nil || o.Appearance == nil {
return nil, false
}
return o.Appearance, true
}
// HasAppearance returns a boolean if a field has been set.
func (o *BTExportTessellatedFacesBody1321AllOf) HasAppearance() bool {
if o != nil && o.Appearance != nil {
return true
}
return false
}
// SetAppearance gets a reference to the given BTGraphicsAppearance1152 and assigns it to the Appearance field.
func (o *BTExportTessellatedFacesBody1321AllOf) SetAppearance(v BTGraphicsAppearance1152) {
o.Appearance = &v
}
// GetBodyType returns the BodyType field value if set, zero value otherwise.
func (o *BTExportTessellatedFacesBody1321AllOf) GetBodyType() string {
if o == nil || o.BodyType == nil {
var ret string
return ret
}
return *o.BodyType
}
// GetBodyTypeOk returns a tuple with the BodyType field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTExportTessellatedFacesBody1321AllOf) GetBodyTypeOk() (*string, bool) {
if o == nil || o.BodyType == nil {
return nil, false
}
return o.BodyType, true
}
// HasBodyType returns a boolean if a field has been set.
func (o *BTExportTessellatedFacesBody1321AllOf) HasBodyType() bool {
if o != nil && o.BodyType != nil {
return true
}
return false
}
// SetBodyType gets a reference to the given string and assigns it to the BodyType field.
func (o *BTExportTessellatedFacesBody1321AllOf) SetBodyType(v string) {
o.BodyType = &v
}
// GetBtType returns the BtType field value if set, zero value otherwise.
func (o *BTExportTessellatedFacesBody1321AllOf) GetBtType() string {
if o == nil || o.BtType == nil {
var ret string
return ret
}
return *o.BtType
}
// GetBtTypeOk returns a tuple with the BtType field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTExportTessellatedFacesBody1321AllOf) GetBtTypeOk() (*string, bool) {
if o == nil || o.BtType == nil {
return nil, false
}
return o.BtType, true
}
// HasBtType returns a boolean if a field has been set.
func (o *BTExportTessellatedFacesBody1321AllOf) HasBtType() bool {
if o != nil && o.BtType != nil {
return true
}
return false
}
// SetBtType gets a reference to the given string and assigns it to the BtType field.
func (o *BTExportTessellatedFacesBody1321AllOf) SetBtType(v string) {
o.BtType = &v
}
// GetFaces returns the Faces field value if set, zero value otherwise.
func (o *BTExportTessellatedFacesBody1321AllOf) GetFaces() []BTExportTessellatedFacesFace1192 {
if o == nil || o.Faces == nil {
var ret []BTExportTessellatedFacesFace1192
return ret
}
return *o.Faces
}
// GetFacesOk returns a tuple with the Faces field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTExportTessellatedFacesBody1321AllOf) GetFacesOk() (*[]BTExportTessellatedFacesFace1192, bool) {
if o == nil || o.Faces == nil {
return nil, false
}
return o.Faces, true
}
// HasFaces returns a boolean if a field has been set.
func (o *BTExportTessellatedFacesBody1321AllOf) HasFaces() bool {
if o != nil && o.Faces != nil {
return true
}
return false
}
// SetFaces gets a reference to the given []BTExportTessellatedFacesFace1192 and assigns it to the Faces field.
func (o *BTExportTessellatedFacesBody1321AllOf) SetFaces(v []BTExportTessellatedFacesFace1192) {
o.Faces = &v
}
// GetFacetPoints returns the FacetPoints field value if set, zero value otherwise.
func (o *BTExportTessellatedFacesBody1321AllOf) GetFacetPoints() []BTVector3d389 {
if o == nil || o.FacetPoints == nil {
var ret []BTVector3d389
return ret
}
return *o.FacetPoints
}
// GetFacetPointsOk returns a tuple with the FacetPoints field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTExportTessellatedFacesBody1321AllOf) GetFacetPointsOk() (*[]BTVector3d389, bool) {
if o == nil || o.FacetPoints == nil {
return nil, false
}
return o.FacetPoints, true
}
// HasFacetPoints returns a boolean if a field has been set.
func (o *BTExportTessellatedFacesBody1321AllOf) HasFacetPoints() bool {
if o != nil && o.FacetPoints != nil {
return true
}
return false
}
// SetFacetPoints gets a reference to the given []BTVector3d389 and assigns it to the FacetPoints field.
func (o *BTExportTessellatedFacesBody1321AllOf) SetFacetPoints(v []BTVector3d389) {
o.FacetPoints = &v
}
func (o BTExportTessellatedFacesBody1321AllOf) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.Appearance != nil {
toSerialize["appearance"] = o.Appearance
}
if o.BodyType != nil {
toSerialize["bodyType"] = o.BodyType
}
if o.BtType != nil {
toSerialize["btType"] = o.BtType
}
if o.Faces != nil {
toSerialize["faces"] = o.Faces
}
if o.FacetPoints != nil {
toSerialize["facetPoints"] = o.FacetPoints
}
return json.Marshal(toSerialize)
}
type NullableBTExportTessellatedFacesBody1321AllOf struct {
value *BTExportTessellatedFacesBody1321AllOf
isSet bool
}
func (v NullableBTExportTessellatedFacesBody1321AllOf) Get() *BTExportTessellatedFacesBody1321AllOf {
return v.value
}
func (v *NullableBTExportTessellatedFacesBody1321AllOf) Set(val *BTExportTessellatedFacesBody1321AllOf) {
v.value = val
v.isSet = true
}
func (v NullableBTExportTessellatedFacesBody1321AllOf) IsSet() bool {
return v.isSet
}
func (v *NullableBTExportTessellatedFacesBody1321AllOf) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableBTExportTessellatedFacesBody1321AllOf(val *BTExportTessellatedFacesBody1321AllOf) *NullableBTExportTessellatedFacesBody1321AllOf {
return &NullableBTExportTessellatedFacesBody1321AllOf{value: val, isSet: true}
}
func (v NullableBTExportTessellatedFacesBody1321AllOf) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableBTExportTessellatedFacesBody1321AllOf) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | onshape/model_bt_export_tessellated_faces_body_1321_all_of.go | 0.713831 | 0.455865 | model_bt_export_tessellated_faces_body_1321_all_of.go | starcoder |
package prop
import (
"fmt"
"math"
"strings"
)
// FloatConstraint is an interface to represent float value constraint.
type FloatConstraint interface {
Compare(float32) (float64, bool)
Value() (float32, bool)
}
// Float specifies ideal float value.
// Any value may be selected, but closest value takes priority.
type Float float32
// Compare implements FloatConstraint.
func (f Float) Compare(a float32) (float64, bool) {
return math.Abs(float64(a-float32(f))) / math.Max(math.Abs(float64(a)), math.Abs(float64(f))), true
}
// Value implements FloatConstraint.
func (f Float) Value() (float32, bool) { return float32(f), true }
// String implements Stringify
func (f Float) String() string {
return fmt.Sprintf("%.2f (ideal)", f)
}
// FloatExact specifies exact float value.
type FloatExact float32
// Compare implements FloatConstraint.
func (f FloatExact) Compare(a float32) (float64, bool) {
if float32(f) == a {
return 0.0, true
}
return 1.0, false
}
// Value implements FloatConstraint.
func (f FloatExact) Value() (float32, bool) { return float32(f), true }
// String implements Stringify
func (f FloatExact) String() string {
return fmt.Sprintf("%.2f (exact)", f)
}
// FloatOneOf specifies list of expected float values.
type FloatOneOf []float32
// Compare implements FloatConstraint.
func (f FloatOneOf) Compare(a float32) (float64, bool) {
for _, ff := range f {
if ff == a {
return 0.0, true
}
}
return 1.0, false
}
// Value implements FloatConstraint.
func (FloatOneOf) Value() (float32, bool) { return 0, false }
// String implements Stringify
func (f FloatOneOf) String() string {
var opts []string
for _, v := range f {
opts = append(opts, fmt.Sprintf("%.2f", v))
}
return fmt.Sprintf("%s (one of values)", strings.Join(opts, ","))
}
// FloatRanged specifies range of expected float value.
// If Ideal is non-zero, closest value to Ideal takes priority.
type FloatRanged struct {
Min float32
Max float32
Ideal float32
}
// Compare implements FloatConstraint.
func (f FloatRanged) Compare(a float32) (float64, bool) {
if f.Min != 0 && f.Min > a {
// Out of range
return 1.0, false
}
if f.Max != 0 && f.Max < a {
// Out of range
return 1.0, false
}
if f.Ideal == 0 {
// If the value is in the range and Ideal is not specified,
// any value is evenly acceptable.
return 0.0, true
}
switch {
case a == f.Ideal:
return 0.0, true
case a < f.Ideal:
if f.Min == 0 {
// If Min is not specified, smaller values than Ideal are even.
return 0.0, true
}
return float64(f.Ideal-a) / float64(f.Ideal-f.Min), true
default:
if f.Max == 0 {
// If Max is not specified, larger values than Ideal are even.
return 0.0, true
}
return float64(a-f.Ideal) / float64(f.Max-f.Ideal), true
}
}
// Value implements FloatConstraint.
func (FloatRanged) Value() (float32, bool) { return 0, false }
// String implements Stringify
func (f FloatRanged) String() string {
return fmt.Sprintf("%.2f - %.2f (range), %.2f (ideal)", f.Min, f.Max, f.Ideal)
} | pkg/prop/float.go | 0.855866 | 0.46041 | float.go | starcoder |
package leveldb
/*
level
Copyright (c) 2019 beito
This software is released under the MIT License.
http://opensource.org/licenses/mit-license.php
*/
import "fmt"
import "math"
// GetStorageTypeFromSize returns
func GetStorageTypeFromSize(size uint) StorageType {
size--
size |= (size >> 1)
size |= (size >> 2)
size |= (size >> 4)
size |= (size >> 8)
size |= (size >> 16)
size++
return StorageType(math.Log2(float64(size)))
}
// StorageType is a type of BlockStorage
type StorageType int
// BitsPerBlock retunrs bits per a block for BlockStorage
func (t StorageType) BitsPerBlock() int {
return int(t)
}
// PaletteSize returns a size of palette for StorageType
func (t StorageType) PaletteSize() int {
return 1 << uint(t)
}
const (
TypePalette1 StorageType = 1
TypePalette2 StorageType = 2
TypePalette3 StorageType = 3
TypePalette4 StorageType = 4
TypePalette5 StorageType = 5
TypePalette6 StorageType = 6
TypePalette8 StorageType = 8
TypePalette16 StorageType = 16
)
// BlockStorageSize is a size of BlockStorage
const BlockStorageSize = 16 * 16 * 16
// NewBlockStorage returns new BlockStorage
func NewBlockStorage() *BlockStorage {
return &BlockStorage{
Blocks: make([]uint16, BlockStorageSize),
}
}
// BlockStorage is a storage contains blocks of a subchunk
type BlockStorage struct {
Palettes []*RawBlockState
Blocks []uint16
}
// At returns a index for Blocks at blockstorage coordinates
func (BlockStorage) At(x, y, z int) int {
return x<<8 | z<<4 | y
}
// Vaild vailds blockstorage coordinates
func (BlockStorage) Vaild(x, y, z int) error {
if x < 0 || x > 15 || y < 0 || y > 15 || z < 0 || z > 15 {
return fmt.Errorf("level.leveldb: invaild block storage coordinate")
}
return nil
}
// GetBlock returns the BlockState at blockstorage coordinates
func (storage *BlockStorage) GetBlock(x, y, z int) (*RawBlockState, error) {
err := storage.Vaild(x, y, z)
if err != nil {
return nil, err
}
index := storage.At(x, y, z)
if index >= len(storage.Blocks) {
return nil, fmt.Errorf("level.leveldb: uninitialized BlockStorage")
}
id := storage.Blocks[index]
if int(id) >= len(storage.Palettes) {
return nil, fmt.Errorf("level.leveldb: couldn't find a palette for the block")
}
return storage.Palettes[id], nil
}
// SetBlock set the BlockState at blockstorage coordinates
func (storage *BlockStorage) SetBlock(x, y, z int, bs *RawBlockState) error {
if len(storage.Palettes) > TypePalette16.PaletteSize() {
return fmt.Errorf("level.leveldb: unsupported palette size > %d", TypePalette16.PaletteSize())
}
for i, v := range storage.Palettes {
if v.Equal(bs) {
storage.Blocks[storage.At(x, y, z)] = uint16(i)
return nil
}
}
storage.Palettes = append(storage.Palettes, bs)
storage.Blocks[storage.At(x, y, z)] = uint16(len(storage.Palettes)-1)
return nil
}
// NewSubChunk returns new SubChunk
func NewSubChunk(y byte) *SubChunk {
return &SubChunk{
Y: y,
}
}
// SubChunk is a 16x16x16 blocks segment for a chunk
type SubChunk struct {
Y byte
Storages []*BlockStorage
}
// GetBlockStorage returns BlockStorage which subchunk contained with index
func (sub *SubChunk) GetBlockStorage(index int) (*BlockStorage, bool) {
if index >= len(sub.Storages) || index < 0 {
return nil, false
}
return sub.Storages[index], true
}
// GetBlock returns BlockState at the subchunk coordinates
func (sub *SubChunk) GetBlock(x, y, z, index int) (*RawBlockState, error) {
storage, ok := sub.GetBlockStorage(index)
if !ok {
return nil, fmt.Errorf("level.leveldb: invaild storage index")
}
return storage.GetBlock(x, y, z)
}
// SetBlock returns BlockState at the subchunk coordinates
func (sub *SubChunk) SetBlock(x, y, z, index int, bs *RawBlockState) error {
storage, ok := sub.GetBlockStorage(index)
if !ok {
return fmt.Errorf("level.leveldb: invaild storage index")
}
return storage.SetBlock(x, y, z, bs)
}
// SubChunkFormat is a formatter for subchunk
type SubChunkFormat interface {
Read(y byte, b []byte) (*SubChunk, error)
} | leveldb/subchunk.go | 0.737725 | 0.454835 | subchunk.go | starcoder |
package gofa
/*
Tpxes Project celestial to tangent plane, spherical
In the tangent plane projection, given celestial spherical
coordinates for a star and the tangent point, solve for the star's
rectangular coordinates in the tangent plane.
Given:
a,b float64 star's spherical coordinates
a0,b0 float64 tangent point's spherical coordinates
Returned:
xi,eta float64 rectangular coordinates of star image (Note 2)
Returned (function value):
int status: 0 = OK
1 = star too far from axis
2 = antistar on tangent plane
3 = antistar too far from axis
Notes:
1) The tangent plane projection is also called the "gnomonic
projection" and the "central projection".
2) The eta axis points due north in the adopted coordinate system.
If the spherical coordinates are observed (RA,Dec), the tangent
plane coordinates (xi,eta) are conventionally called the
"standard coordinates". For right-handed spherical coordinates,
(xi,eta) are also right-handed. The units of (xi,eta) are,
effectively, radians at the tangent point.
3) All angular arguments are in radians.
4) This function is a member of the following set:
spherical vector solve for
> iauTpxes < iauTpxev xi,eta
iauTpsts iauTpstv star
iauTpors iauTporv origin
References:
<NAME>. & <NAME>., 2002, "Representations of
celestial coordinates in FITS", Astron.Astrophys. 395, 1077
<NAME>., "Spherical Astronomy", Cambridge University Press,
1987, Chapter 13.
*/
func Tpxes(a, b, a0, b0 float64, xi, eta *float64) int {
const TINY = 1e-6
var j int
var sb0, sb, cb0, cb, da, sda, cda, d float64
/* Functions of the spherical coordinates. */
sb0 = sin(b0)
sb = sin(b)
cb0 = cos(b0)
cb = cos(b)
da = a - a0
sda = sin(da)
cda = cos(da)
/* Reciprocal of star vector length to tangent plane. */
d = sb*sb0 + cb*cb0*cda
/* Check for error cases. */
if d > TINY {
j = 0
} else if d >= 0.0 {
j = 1
d = TINY
} else if d > -TINY {
j = 2
d = -TINY
} else {
j = 3
}
/* Return the tangent plane coordinates (even in dubious cases). */
*xi = cb * sda / d
*eta = (sb*cb0 - cb*sb0*cda) / d
/* Return the status. */
return j
}
/*
Tpxev Project celestial to tangent plane, vector
In the tangent plane projection, given celestial direction cosines
for a star and the tangent point, solve for the star's rectangular
coordinates in the tangent plane.
Given:
v [3]float64 direction cosines of star (Note 4)
v0 [3]float64 direction cosines of tangent point (Note 4)
Returned:
xi,eta float64 tangent plane coordinates of star
Returned (function value):
int status: 0 = OK
1 = star too far from axis
2 = antistar on tangent plane
3 = antistar too far from axis
Notes:
1) The tangent plane projection is also called the "gnomonic
projection" and the "central projection".
2) The eta axis points due north in the adopted coordinate system.
If the direction cosines represent observed (RA,Dec), the tangent
plane coordinates (xi,eta) are conventionally called the
"standard coordinates". If the direction cosines are with
respect to a right-handed triad, (xi,eta) are also right-handed.
The units of (xi,eta) are, effectively, radians at the tangent
point.
3) The method used is to extend the star vector to the tangent
plane and then rotate the triad so that (x,y) becomes (xi,eta).
Writing (a,b) for the celestial spherical coordinates of the
star, the sequence of rotations is (a+pi/2) around the z-axis
followed by (pi/2-b) around the x-axis.
4) If vector v0 is not of unit length, or if vector v is of zero
length, the results will be wrong.
5) If v0 points at a pole, the returned (xi,eta) will be based on
the arbitrary assumption that the longitude coordinate of the
tangent point is zero.
6) This function is a member of the following set:
spherical vector solve for
iauTpxes > iauTpxev < xi,eta
iauTpsts iauTpstv star
iauTpors iauTporv origin
References:
<NAME>. & <NAME>., 2002, "Representations of
celestial coordinates in FITS", Astron.Astrophys. 395, 1077
<NAME>., "Spherical Astronomy", Cambridge University Press,
1987, Chapter 13.
*/
func Tpxev(v, v0 [3]float64, xi, eta *float64) int {
const TINY = 1e-6
var j int
var x, y, z, x0, y0, z0, r2, r, w, d float64
/* Star and tangent point. */
x = v[0]
y = v[1]
z = v[2]
x0 = v0[0]
y0 = v0[1]
z0 = v0[2]
/* Deal with polar case. */
r2 = x0*x0 + y0*y0
r = sqrt(r2)
if r == 0.0 {
r = 1e-20
x0 = r
}
/* Reciprocal of star vector length to tangent plane. */
w = x*x0 + y*y0
d = w + z*z0
/* Check for error cases. */
if d > TINY {
j = 0
} else if d >= 0.0 {
j = 1
d = TINY
} else if d > -TINY {
j = 2
d = -TINY
} else {
j = 3
}
/* Return the tangent plane coordinates (even in dubious cases). */
d *= r
*xi = (y*x0 - x*y0) / d
*eta = (z*r2 - z0*w) / d
/* Return the status. */
return j
}
/*
Tpsts Project tangent plane to celestial, spherical
In the tangent plane projection, given the star's rectangular
coordinates and the spherical coordinates of the tangent point,
solve for the spherical coordinates of the star.
Given:
xi,eta float64 rectangular coordinates of star image (Note 2)
a0,b0 float64 tangent point's spherical coordinates
Returned:
a,b float64 star's spherical coordinates
Notes:
1) The tangent plane projection is also called the "gnomonic
projection" and the "central projection".
2) The eta axis points due north in the adopted coordinate system.
If the spherical coordinates are observed (RA,Dec), the tangent
plane coordinates (xi,eta) are conventionally called the
"standard coordinates". If the spherical coordinates are with
respect to a right-handed triad, (xi,eta) are also right-handed.
The units of (xi,eta) are, effectively, radians at the tangent
point.
3) All angular arguments are in radians.
4) This function is a member of the following set:
spherical vector solve for
iauTpxes iauTpxev xi,eta
> iauTpsts < iauTpstv star
iauTpors iauTporv origin
Called:
Anp normalize angle into range 0 to 2pi
References:
<NAME>. & <NAME>., 2002, "Representations of
celestial coordinates in FITS", Astron.Astrophys. 395, 1077
<NAME>., "Spherical Astronomy", Cambridge University Press,
1987, Chapter 13.
*/
func Tpsts(xi, eta, a0, b0 float64, a, b *float64) {
var sb0, cb0, d float64
sb0 = sin(b0)
cb0 = cos(b0)
d = cb0 - eta*sb0
*a = Anp(atan2(xi, d) + a0)
*b = atan2(sb0+eta*cb0, sqrt(xi*xi+d*d))
}
/*
Tpstv Project tangent plane to celestial, vector
In the tangent plane projection, given the star's rectangular
coordinates and the direction cosines of the tangent point, solve
for the direction cosines of the star.
Given:
xi,eta float64 rectangular coordinates of star image (Note 2)
v0 [3]float64 tangent point's direction cosines
Returned:
v [3]float64 star's direction cosines
Notes:
1) The tangent plane projection is also called the "gnomonic
projection" and the "central projection".
2) The eta axis points due north in the adopted coordinate system.
If the direction cosines represent observed (RA,Dec), the tangent
plane coordinates (xi,eta) are conventionally called the
"standard coordinates". If the direction cosines are with
respect to a right-handed triad, (xi,eta) are also right-handed.
The units of (xi,eta) are, effectively, radians at the tangent
point.
3) The method used is to complete the star vector in the (xi,eta)
based triad and normalize it, then rotate the triad to put the
tangent point at the pole with the x-axis aligned to zero
longitude. Writing (a0,b0) for the celestial spherical
coordinates of the tangent point, the sequence of rotations is
(b-pi/2) around the x-axis followed by (-a-pi/2) around the
z-axis.
4) If vector v0 is not of unit length, the returned vector v will
be wrong.
5) If vector v0 points at a pole, the returned vector v will be
based on the arbitrary assumption that the longitude coordinate
of the tangent point is zero.
6) This function is a member of the following set:
spherical vector solve for
iauTpxes iauTpxev xi,eta
iauTpsts > iauTpstv < star
iauTpors iauTporv origin
References:
<NAME>. & <NAME>., 2002, "Representations of
celestial coordinates in FITS", Astron.Astrophys. 395, 1077
<NAME>., "Spherical Astronomy", Cambridge University Press,
1987, Chapter 13.
*/
func Tpstv(xi, eta float64, v0 [3]float64, v *[3]float64) {
var x, y, z, f, r float64
/* Tangent point. */
x = v0[0]
y = v0[1]
z = v0[2]
/* Deal with polar case. */
r = sqrt(x*x + y*y)
if r == 0.0 {
r = 1e-20
x = r
}
/* Star vector length to tangent plane. */
f = sqrt(1.0 + xi*xi + eta*eta)
/* Apply the transformation and normalize. */
v[0] = (x - (xi*y+eta*x*z)/r) / f
v[1] = (y + (xi*x-eta*y*z)/r) / f
v[2] = (z + eta*r) / f
}
/*
Tpors Solve for tangent point, spherical
In the tangent plane projection, given the rectangular coordinates
of a star and its spherical coordinates, determine the spherical
coordinates of the tangent point.
Given:
xi,eta float64 rectangular coordinates of star image (Note 2)
a,b float64 star's spherical coordinates (Note 3)
Returned:
a01,b01 float64 tangent point's spherical coordinates, Soln. 1
a02,b02 float64 tangent point's spherical coordinates, Soln. 2
Returned (function value):
int number of solutions:
0 = no solutions returned (Note 5)
1 = only the first solution is useful (Note 6)
2 = both solutions are useful (Note 6)
Notes:
1) The tangent plane projection is also called the "gnomonic
projection" and the "central projection".
2) The eta axis points due north in the adopted coordinate system.
If the spherical coordinates are observed (RA,Dec), the tangent
plane coordinates (xi,eta) are conventionally called the
"standard coordinates". If the spherical coordinates are with
respect to a right-handed triad, (xi,eta) are also right-handed.
The units of (xi,eta) are, effectively, radians at the tangent
point.
3) All angular arguments are in radians.
4) The angles a01 and a02 are returned in the range 0-2pi. The
angles b01 and b02 are returned in the range +/-pi, but in the
usual, non-pole-crossing, case, the range is +/-pi/2.
5) Cases where there is no solution can arise only near the poles.
For example, it is clearly impossible for a star at the pole
itself to have a non-zero xi value, and hence it is meaningless
to ask where the tangent point would have to be to bring about
this combination of xi and dec.
6) Also near the poles, cases can arise where there are two useful
solutions. The return value indicates whether the second of the
two solutions returned is useful; 1 indicates only one useful
solution, the usual case.
7) The basis of the algorithm is to solve the spherical triangle PSC,
where P is the north celestial pole, S is the star and C is the
tangent point. The spherical coordinates of the tangent point are
[a0,b0]; writing rho^2 = (xi^2+eta^2) and r^2 = (1+rho^2), side c
is then (pi/2-b), side p is sqrt(xi^2+eta^2) and side s (to be
found) is (pi/2-b0). Angle C is given by sin(C) = xi/rho and
cos(C) = eta/rho. Angle P (to be found) is the longitude
difference between star and tangent point (a-a0).
8) This function is a member of the following set:
spherical vector solve for
iauTpxes iauTpxev xi,eta
iauTpsts iauTpstv star
> iauTpors < iauTporv origin
Called:
Anp normalize angle into range 0 to 2pi
References:
<NAME>. & <NAME>., 2002, "Representations of
celestial coordinates in FITS", Astron.Astrophys. 395, 1077
<NAME>., "Spherical Astronomy", Cambridge University Press,
1987, Chapter 13.
*/
func Tpors(xi, eta, a, b float64, a01, b01, a02, b02 *float64) int {
var xi2, r, sb, cb, rsb, rcb, w2, w, s, c float64
xi2 = xi * xi
r = sqrt(1.0 + xi2 + eta*eta)
sb = sin(b)
cb = cos(b)
rsb = r * sb
rcb = r * cb
w2 = rcb*rcb - xi2
if w2 >= 0.0 {
w = sqrt(w2)
s = rsb - eta*w
c = rsb*eta + w
if xi == 0.0 && w == 0.0 {
w = 1.0
}
*a01 = Anp(a - atan2(xi, w))
*b01 = atan2(s, c)
w = -w
s = rsb - eta*w
c = rsb*eta + w
*a02 = Anp(a - atan2(xi, w))
*b02 = atan2(s, c)
if fabs(rsb) < 1.0 {
return 1
} else {
return 2
}
} else {
return 0
}
}
/*
Tporv Solve for tangent point, vector
In the tangent plane projection, given the rectangular coordinates
of a star and its direction cosines, determine the direction
cosines of the tangent point.
Given:
xi,eta float64 rectangular coordinates of star image (Note 2)
v [3]float64 star's direction cosines (Note 3)
Returned:
v01 [3]float64 tangent point's direction cosines, Solution 1
v02 [3]float64 tangent point's direction cosines, Solution 2
Returned (function value):
int number of solutions:
0 = no solutions returned (Note 4)
1 = only the first solution is useful (Note 5)
2 = both solutions are useful (Note 5)
Notes:
1) The tangent plane projection is also called the "gnomonic
projection" and the "central projection".
2) The eta axis points due north in the adopted coordinate system.
If the direction cosines represent observed (RA,Dec), the tangent
plane coordinates (xi,eta) are conventionally called the
"standard coordinates". If the direction cosines are with
respect to a right-handed triad, (xi,eta) are also right-handed.
The units of (xi,eta) are, effectively, radians at the tangent
point.
3) The vector v must be of unit length or the result will be wrong.
4) Cases where there is no solution can arise only near the poles.
For example, it is clearly impossible for a star at the pole
itself to have a non-zero xi value, and hence it is meaningless
to ask where the tangent point would have to be.
5) Also near the poles, cases can arise where there are two useful
solutions. The return value indicates whether the second of the
two solutions returned is useful; 1 indicates only one useful
solution, the usual case.
6) The basis of the algorithm is to solve the spherical triangle
PSC, where P is the north celestial pole, S is the star and C is
the tangent point. Calling the celestial spherical coordinates
of the star and tangent point (a,b) and (a0,b0) respectively, and
writing rho^2 = (xi^2+eta^2) and r^2 = (1+rho^2), and
transforming the vector v into (a,b) in the normal way, side c is
then (pi/2-b), side p is sqrt(xi^2+eta^2) and side s (to be
found) is (pi/2-b0), while angle C is given by sin(C) = xi/rho
and cos(C) = eta/rho; angle P (to be found) is (a-a0). After
solving the spherical triangle, the result (a0,b0) can be
expressed in vector form as v0.
7) This function is a member of the following set:
spherical vector solve for
iauTpxes iauTpxev xi,eta
iauTpsts iauTpstv star
iauTpors > iauTporv < origin
References:
<NAME>. & <NAME>., 2002, "Representations of
celestial coordinates in FITS", Astron.Astrophys. 395, 1077
<NAME>., "Spherical Astronomy", Cambridge University Press,
1987, Chapter 13.
*/
func Tporv(xi, eta float64, v [3]float64, v01, v02 *[3]float64) int {
var x, y, z, rxy2, xi2, eta2p1, r, rsb, rcb, w2, w, c float64
x = v[0]
y = v[1]
z = v[2]
rxy2 = x*x + y*y
xi2 = xi * xi
eta2p1 = eta*eta + 1.0
r = sqrt(xi2 + eta2p1)
rsb = r * z
rcb = r * sqrt(x*x+y*y)
w2 = rcb*rcb - xi2
if w2 > 0.0 {
w = sqrt(w2)
c = (rsb*eta + w) / (eta2p1 * sqrt(rxy2*(w2+xi2)))
v01[0] = c * (x*w + y*xi)
v01[1] = c * (y*w - x*xi)
v01[2] = (rsb - eta*w) / eta2p1
w = -w
c = (rsb*eta + w) / (eta2p1 * sqrt(rxy2*(w2+xi2)))
v02[0] = c * (x*w + y*xi)
v02[1] = c * (y*w - x*xi)
v02[2] = (rsb - eta*w) / eta2p1
if fabs(rsb) < 1.0 {
return 1
} else {
return 2
}
} else {
return 0
}
} | projection.go | 0.928161 | 0.848031 | projection.go | starcoder |
package it
import (
"github.com/m4gshm/gollections/c"
"github.com/m4gshm/gollections/check"
"github.com/m4gshm/gollections/it/impl/it"
"github.com/m4gshm/gollections/op"
)
//Of creates the Iterator of predefined elements.
func Of[T any](elements ...T) c.Iterator[T] {
iter := it.NewHead(elements)
return &iter
}
//Wrap creates the Iterator using sclie as the elements source.
func Wrap[T any, TS ~[]T](elements TS) c.Iterator[T] {
iter := it.NewHead(elements)
return &iter
}
//Map creates the Iterator that converts elements with a converter and returns them.
func Map[From, To any, IT c.Iterator[From]](elements IT, by c.Converter[From, To]) c.Iterator[To] {
return it.Map(elements, by)
}
//MapFit additionally filters 'From' elements.
func MapFit[From, To any, IT c.Iterator[From]](elements IT, fit c.Predicate[From], by c.Converter[From, To]) c.Iterator[To] {
return it.MapFit(elements, fit, by)
}
//Flatt creates the Iterator that extracts slices of 'To' by a Flatter from elements of 'From' and flattens as one iterable collection of 'To' elements.
func Flatt[From, To any, IT c.Iterator[From]](elements IT, by c.Flatter[From, To]) c.Iterator[To] {
iter := it.Flatt(elements, by)
return &iter
}
//FlattFit additionally filters 'From' elements.
func FlattFit[From, To any, IT c.Iterator[From]](elements IT, fit c.Predicate[From], flatt c.Flatter[From, To]) c.Iterator[To] {
iter := it.FlattFit(elements, fit, flatt)
return &iter
}
//Filter creates the Iterator that checks elements by a filter and returns successful ones.
func Filter[T any, IT c.Iterator[T]](elements IT, filter c.Predicate[T]) c.Iterator[T] {
return it.Filter(elements, filter)
}
//NotNil creates the Iterator that filters nullable elements.
func NotNil[T any, IT c.Iterator[*T]](elements IT) c.Iterator[*T] {
return Filter(elements, check.NotNil[T])
}
//Reduce reduces elements to an one.
func Reduce[T any, IT c.Iterator[T]](elements IT, by op.Binary[T]) T {
return it.Reduce(elements, by)
}
//ReduceKV reduces key/value elements to an one.
func ReduceKV[K, V any, IT c.KVIterator[K, V]](elements IT, by op.Quaternary[K, V]) (K, V) {
return it.ReduceKV(elements, by)
}
//Slice converts an Iterator to a slice.
func Slice[T any](elements c.Iterator[T]) []T {
return it.Slice[T](elements)
}
//Group transforms iterable elements to the MapPipe based on applying key extractor to the elements
func Group[T any, K comparable](elements c.Iterator[T], by c.Converter[T, K]) c.MapPipe[K, T, map[K][]T] {
return it.Group(elements, by)
}
//ForEach applies a walker to elements of an Iterator.
func ForEach[T any, IT c.Iterator[T]](elements IT, walker func(T)) {
it.ForEach(elements, walker)
}
//ForEachFit applies a walker to elements that satisfy a predicate condition.
func ForEachFit[T any](elements c.Iterator[T], walker func(T), fit c.Predicate[T]) {
it.ForEachFit(elements, walker, fit)
} | it/api.go | 0.840848 | 0.496643 | api.go | starcoder |
package physics
import (
"github.com/g3n/engine/experimental/collision/shape"
"github.com/g3n/engine/experimental/physics"
"github.com/g3n/engine/experimental/physics/object"
"github.com/g3n/engine/geometry"
"github.com/g3n/engine/gls"
"github.com/g3n/engine/graphic"
"github.com/g3n/engine/light"
"github.com/g3n/engine/material"
"github.com/g3n/engine/math32"
"github.com/g3n/engine/texture"
"github.com/g3n/engine/util/helper"
"github.com/g3n/engine/window"
"github.com/g3n/g3nd/app"
"time"
)
func init() {
app.DemoMap["physics-experimental.spheres"] = &PhysicsSpheres{}
}
type PhysicsSpheres struct {
sim *physics.Simulation
app *app.App
sphereGeom *geometry.Geometry
matSphere *material.Standard
anim *texture.Animator
sprite *graphic.Sprite
attractorOn bool
gravity *physics.ConstantForceField
attractor *physics.AttractorForceField
}
// Start is called once at the start of the demo.
func (t *PhysicsSpheres) Start(a *app.App) {
t.app = a
// Subscribe to key events
a.Subscribe(window.OnKeyRepeat, t.onKey)
a.Subscribe(window.OnKeyDown, t.onKey)
//a.Camera().GetCamera().SetPosition
// LookAt
// Create axes helper
axes := helper.NewAxes(1)
a.Scene().Add(axes)
pl := light.NewPoint(math32.NewColor("white"), 1.0)
pl.SetPosition(0, 1, 0)
a.Scene().Add(pl)
// Add directional light from top
l2 := light.NewDirectional(&math32.Color{1, 1, 1}, 0.5)
l2.SetPosition(0, 0.1, 0)
a.Scene().Add(l2)
// Create simulation and force fields
t.sim = physics.NewSimulation(a.Scene())
t.gravity = physics.NewConstantForceField(&math32.Vector3{0, -0.98, 0})
t.attractor = physics.NewAttractorForceField(&math32.Vector3{0, 1, 0}, 1)
t.sim.AddForceField(t.gravity)
// Create sprite texture and animator
tex2, err := texture.NewTexture2DFromImage(a.DirData() + "/images/smoke30.png")
if err != nil {
a.Log().Fatal("Error loading texture: %s", err)
}
t.anim = texture.NewAnimator(tex2, 6, 5)
t.anim.SetDispTime(2 * 16666 * time.Microsecond)
mat2 := material.NewStandard(&math32.Color{1, 1, 1})
mat2.AddTexture(tex2)
mat2.SetOpacity(0.5)
mat2.SetTransparent(true)
t.sprite = graphic.NewSprite(2, 2, mat2)
t.sprite.SetPosition(0, 1, 0)
t.sprite.SetVisible(false)
a.Scene().Add(t.sprite)
// Create sphere geometry
t.sphereGeom = geometry.NewSphere(0.1, 16, 8)
texfileG := a.DirData() + "/images/ground2.jpg"
texG, err := texture.NewTexture2DFromImage(texfileG)
texG.SetRepeat(100, 100)
texG.SetWrapS(gls.REPEAT)
texG.SetWrapT(gls.REPEAT)
if err != nil {
a.Log().Fatal("Error loading texture: %s", err)
}
mat := material.NewStandard(&math32.Color{1, 1, 1})
mat.SetTransparent(true)
mat.SetOpacity(0.5)
mat.AddTexture(texG)
floorGeom := geometry.NewPlane(100, 100)
floor := graphic.NewMesh(floorGeom, mat)
floor.SetPosition(0, 0, 0)
floor.SetRotation(-math32.Pi/2, 0, 0)
a.Scene().Add(floor)
floorBody := object.NewBody(floor)
floorBody.SetShape(shape.NewPlane())
floorBody.SetBodyType(object.Static)
t.sim.AddBody(floorBody, "Floor")
// Create sphere texture
texfile := a.DirData() + "/images/uvgrid.jpg"
tex3, err := texture.NewTexture2DFromImage(texfile)
if err != nil {
a.Log().Fatal("Error loading texture: %s", err)
}
// Create sphere material
t.matSphere = material.NewStandard(&math32.Color{1, 1, 1})
t.matSphere.AddTexture(tex3)
sphere2 := graphic.NewMesh(t.sphereGeom, t.matSphere)
sphere2.SetPosition(0, 1, -0.02)
a.Scene().Add(sphere2)
rb2 := object.NewBody(sphere2)
rb2.SetShape(shape.NewSphere(0.1))
t.sim.AddBody(rb2, "Sphere2")
sphere3 := graphic.NewMesh(t.sphereGeom, t.matSphere)
sphere3.SetPosition(0.05, 1.2, 0.05)
a.Scene().Add(sphere3)
rb3 := object.NewBody(sphere3)
rb3.SetShape(shape.NewSphere(0.1))
t.sim.AddBody(rb3, "Sphere3")
sphere4 := graphic.NewMesh(t.sphereGeom, t.matSphere)
sphere4.SetPosition(-0.05, 1.4, 0)
a.Scene().Add(sphere4)
rb4 := object.NewBody(sphere4)
rb4.SetShape(shape.NewSphere(0.1))
t.sim.AddBody(rb4, "Sphere4")
}
func (t *PhysicsSpheres) ThrowBall() {
// Obtain throw direction from camera position and target
camPos := t.app.Camera().Position()
camTarget := t.app.Orbit().Target()
throwDir := math32.NewVec3().SubVectors(&camTarget, &camPos).SetLength(3)
// Create sphere rigid body
sphere := graphic.NewMesh(t.sphereGeom, t.matSphere)
sphere.SetPositionVec(&camPos)
t.app.Scene().Add(sphere)
rb := object.NewBody(sphere)
rb.SetShape(shape.NewSphere(0.1))
rb.SetVelocity(throwDir)
t.sim.AddBody(rb, "Sphere")
}
func (t *PhysicsSpheres) onKey(evname string, ev interface{}) {
kev := ev.(*window.KeyEvent)
switch kev.Key {
case window.KeyP:
t.sim.SetPaused(!t.sim.Paused())
case window.KeyO:
t.sim.SetPaused(false)
t.sim.Step(0.016)
t.sim.SetPaused(true)
case window.KeySpace:
t.ThrowBall()
case window.KeyA:
if t.attractorOn {
t.sim.AddForceField(t.gravity)
t.sim.RemoveForceField(t.attractor)
t.sprite.SetVisible(false)
t.attractorOn = false
} else {
t.sim.RemoveForceField(t.gravity)
t.sim.AddForceField(t.attractor)
t.sprite.SetVisible(true)
t.attractorOn = true
}
case window.Key2:
// TODO
}
}
// Update is called every frame.
func (t *PhysicsSpheres) Update(a *app.App, deltaTime time.Duration) {
t.sim.Step(float32(deltaTime.Seconds()))
t.anim.Update(time.Now())
}
// Cleanup is called once at the end of the demo.
func (t *PhysicsSpheres) Cleanup(a *app.App) {} | demos/experimental/physics/spheres.go | 0.596668 | 0.468791 | spheres.go | starcoder |
package geo
import (
"errors"
"math"
"github.com/haiyiyun/mongodb/geometry"
)
// these constants are used for vincentyDistance()
const a = 6378137
const b = 6356752.3142
const f = 1 / 298.257223563 // WGS-84 ellipsiod
/*
VincentyDistance computes the distances between two georgaphic coordinates
Args:
p1: the 'starting' point, given in [0]longitude, [1]latitude as a PointCoordinates struct
p2: the 'ending' point
Returns:
A 2 element tuple: distance between the 2 points given in (1) meters
The second element will return true upon a successful computation or
false if the algorithm fails to converge. -1, false is returned upon failure
*/
func VincentyDistance(p1, p2 geometry.PointCoordinates) (float64, error) {
// convert from degrees to radians
var la1, lo1, la2, lo2 float64
piRad := math.Pi / 180
lo1 = p1[0] * piRad
la1 = p1[1] * piRad
lo2 = p2[0] * piRad
la2 = p2[1] * piRad
L := lo2 - lo1
U1 := math.Atan((1 - f) * math.Tan(la1))
U2 := math.Atan((1 - f) * math.Tan(la2))
sinU1 := math.Sin(U1)
cosU1 := math.Cos(U1)
sinU2 := math.Sin(U2)
cosU2 := math.Cos(U2)
lambda := L
lambdaP := 2 * math.Pi
iterLimit := 20
var sinLambda, cosLambda, sinSigma float64
var cosSigma, sigma, sinAlpha, cosSqAlpha, cos2SigmaM, C float64
for {
if math.Abs(lambda-lambdaP) > 1e-12 && (iterLimit > 0) {
iterLimit -= 1
} else {
break
}
sinLambda = math.Sin(lambda)
cosLambda = math.Cos(lambda)
sinSigma = math.Sqrt((cosU2*sinLambda)*(cosU2*sinLambda) + (cosU1*sinU2-sinU1*cosU2*cosLambda)*(cosU1*sinU2-sinU1*cosU2*cosLambda))
if sinSigma == 0 {
return 0, nil // co-incident points
}
cosSigma = sinU1*sinU2 + cosU1*cosU2*cosLambda
sigma = math.Atan2(sinSigma, cosSigma)
sinAlpha = cosU1 * cosU2 * sinLambda / sinSigma
cosSqAlpha = 1 - sinAlpha*sinAlpha
cos2SigmaM = cosSigma - 2*sinU1*sinU2/cosSqAlpha
if math.IsNaN(cos2SigmaM) {
cos2SigmaM = 0 // equatorial line: cosSqAlpha=0
}
C = f / 16 * cosSqAlpha * (4 + f*(4-3*cosSqAlpha))
lambdaP = lambda
lambda = L + (1-C)*f*sinAlpha*(sigma+C*sinSigma*(cos2SigmaM+C*cosSigma*(-1+2*cos2SigmaM*cos2SigmaM)))
}
if iterLimit == 0 {
return -1, errors.New("vincenty algorithm failed to converge") // formula failed to converge
}
uSq := cosSqAlpha * (a*a - b*b) / (b * b)
A := 1 + uSq/16384*(4096+uSq*(-768+uSq*(320-175*uSq)))
B := uSq / 1024 * (256 + uSq*(-128+uSq*(74-47*uSq)))
deltaSigma := B * sinSigma * (cos2SigmaM + B/4*(cosSigma*(-1+2*cos2SigmaM*cos2SigmaM)-B/6*cos2SigmaM*(-3+4*sinSigma*sinSigma)*(-3+4*cos2SigmaM*cos2SigmaM)))
meters := b * A * (sigma - deltaSigma)
return meters, nil
} | geo/vincenty.go | 0.824638 | 0.596257 | vincenty.go | starcoder |
package matrix
import (
"math"
"github.com/kieron-pivotal/rays/tuple"
)
func Translation(x, y, z float64) Matrix {
m := Identity(4, 4)
m.Set(0, 3, x)
m.Set(1, 3, y)
m.Set(2, 3, z)
return m
}
func (m Matrix) Translate(x, y, z float64) Matrix {
t := Translation(x, y, z)
return t.Multiply(m)
}
func Scaling(x, y, z float64) Matrix {
m := Identity(4, 4)
m.Set(0, 0, x)
m.Set(1, 1, y)
m.Set(2, 2, z)
return m
}
func (m Matrix) Scale(x, y, z float64) Matrix {
t := Scaling(x, y, z)
return t.Multiply(m)
}
func RotationX(a float64) Matrix {
m := Identity(4, 4)
m.Set(1, 1, math.Cos(a))
m.Set(1, 2, -math.Sin(a))
m.Set(2, 1, math.Sin(a))
m.Set(2, 2, math.Cos(a))
return m
}
func (m Matrix) RotateX(a float64) Matrix {
t := RotationX(a)
return t.Multiply(m)
}
func RotationY(a float64) Matrix {
m := Identity(4, 4)
m.Set(0, 0, math.Cos(a))
m.Set(0, 2, math.Sin(a))
m.Set(2, 0, -math.Sin(a))
m.Set(2, 2, math.Cos(a))
return m
}
func (m Matrix) RotateY(a float64) Matrix {
t := RotationY(a)
return t.Multiply(m)
}
func RotationZ(a float64) Matrix {
m := Identity(4, 4)
m.Set(0, 0, math.Cos(a))
m.Set(0, 1, -math.Sin(a))
m.Set(1, 0, math.Sin(a))
m.Set(1, 1, math.Cos(a))
return m
}
func (m Matrix) RotateZ(a float64) Matrix {
t := RotationZ(a)
return t.Multiply(m)
}
func Shearing(xy, xz, yx, yz, zx, zy float64) Matrix {
m := Identity(4, 4)
m.Set(0, 1, xy)
m.Set(0, 2, xz)
m.Set(1, 0, yx)
m.Set(1, 2, yz)
m.Set(2, 0, zx)
m.Set(2, 1, zy)
return m
}
func (m Matrix) Shear(xy, xz, yx, yz, zx, zy float64) Matrix {
t := Shearing(xy, xz, yx, yz, zx, zy)
return t.Multiply(m)
}
func ViewTransformation(from, to, up tuple.Tuple) Matrix {
forwardNormal := to.Subtract(from).Normalize()
upNormal := up.Normalize()
left := forwardNormal.Cross(upNormal)
trueUp := left.Cross(forwardNormal)
orientation := New(4, 4,
left.X, left.Y, left.Z, 0,
trueUp.X, trueUp.Y, trueUp.Z, 0,
-forwardNormal.X, -forwardNormal.Y, -forwardNormal.Z, 0,
0, 0, 0, 1,
)
return orientation.Multiply(Translation(-from.X, -from.Y, -from.Z))
} | matrix/transformation.go | 0.858778 | 0.684432 | transformation.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.