code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package geotex
import (
"github.com/mmcloughlin/geohash"
"github.com/pkg/errors"
)
var (
accuracyToLength = map[uint]*quarterLength{
2: {lat: 1.40625, lng: 2.8125},
3: {lat: 0.3515625, lng: 0.3515625},
4: {lat: 0.0439453125, lng: 0.087890625},
5: {lat: 0.010986328125, lng: 0.010986328125},
6: {lat: 0.001373291015625, lng: 0.00274658203125},
7: {lat: 0.00034332275390625, lng: 0.00034332275390625},
8: {lat: 4.291534423828125e-05, lng: 8.58306884765625e-05},
9: {lat: 1.0728836059570312e-05, lng: 1.0728836059570312e-05},
10: {lat: 1.341104507446289e-06, lng: 2.682209014892578e-06},
11: {lat: 3.3527612686157227e-07, lng: 3.3527612686157227e-07},
}
)
type Geotex struct {
quarterLength *quarterLength
accuracy uint
}
type quarterLength struct {
lat, lng float64
}
func NewGeotex(acc uint) (*Geotex, error) {
if ql, ok := accuracyToLength[acc]; ok {
return &Geotex{quarterLength: ql, accuracy: acc}, nil
}
validAcc := make([]uint, 0, len(accuracyToLength))
for acc := range accuracyToLength {
validAcc = append(validAcc, acc)
}
return nil, errors.Errorf("invalid accuracy not in :%v", validAcc)
}
func (g *Geotex) GetVertex(lat, lng float64) (float64, float64) {
gh := geohash.EncodeWithPrecision(lat, lng, g.accuracy)
box := geohash.BoundingBox(gh)
var retLng = box.MaxLng
if (lng - box.MinLng) < (box.MaxLng - lng) {
retLng = box.MinLng
}
var retLat = box.MaxLat
if (lat - box.MinLat) < (box.MaxLat - lat) {
retLat = box.MinLat
}
return retLat, retLng
}
func (g *Geotex) GetNearestRectangleInHash(rLat, rLng float64) []string {
return []string{
geohash.EncodeWithPrecision(rLat+g.quarterLength.lat, rLng+g.quarterLength.lng, g.accuracy),
geohash.EncodeWithPrecision(rLat-g.quarterLength.lat, rLng+g.quarterLength.lng, g.accuracy),
geohash.EncodeWithPrecision(rLat+g.quarterLength.lat, rLng-g.quarterLength.lng, g.accuracy),
geohash.EncodeWithPrecision(rLat-g.quarterLength.lat, rLng-g.quarterLength.lng, g.accuracy),
}
} | geotex.go | 0.640636 | 0.471588 | geotex.go | starcoder |
package algorithm
import (
"github.com/alecj1240/astart/api"
)
// ChaseTail returns the coordinate of the position behind my tail
func ChaseTail(You []api.Coord) api.Coord {
return You[len(You)-1]
}
func abs(x int) int {
if x < 0 {
return -x
}
return x
}
// Manhatten gives the difference between two points
func Manhatten(pointA api.Coord, pointB api.Coord) int {
var manhattenX = abs(pointB.X - pointA.X)
var manhattenY = abs(pointB.Y - pointA.Y)
var manhattenDistance = manhattenX + manhattenY
return manhattenDistance
}
// determines if the square is actually on the board
func OnBoard(square api.Coord, boardHeight int, boardWidth int) bool {
if square.X >= 0 && square.X < boardWidth && square.Y >= 0 && square.Y < boardHeight {
return true
}
return false
}
// determines if the square is blocked by a snake
func SquareBlocked(point api.Coord, Snakes []api.Snake) bool {
for i := 0; i < len(Snakes); i++ {
for j := 0; j < len(Snakes[i].Body); j++ {
if Snakes[i].Body[j].X == point.X && Snakes[i].Body[j].Y == point.Y {
if len(Snakes[i].Body)-1 == j {
return false
}
return true
}
}
}
return false
}
// Heading determines the direction between two points - must be side by side
func Heading(startingPoint api.Coord, headingPoint api.Coord) string {
if headingPoint.X > startingPoint.X {
return "right"
}
if headingPoint.X < startingPoint.X {
return "left"
}
if headingPoint.Y > startingPoint.Y {
return "down"
}
if headingPoint.Y < startingPoint.Y {
return "up"
}
return "up"
}
// NearestFood finds the closest food to the head of my snake
func NearestFood(FoodCoords []api.Coord, You api.Coord) api.Coord {
var nearestFood = FoodCoords[0]
var nearestFoodF = Manhatten(FoodCoords[0], You)
for i := 0; i < len(FoodCoords); i++ {
if Manhatten(FoodCoords[i], You) < nearestFoodF {
nearestFood = FoodCoords[i]
nearestFoodF = Manhatten(FoodCoords[i], You)
}
}
return nearestFood
}
// HeadOnCollision determines the nearest snake on the board based on the head of the snake
func HeadOnCollision(Destination api.Coord, Snakes []api.Snake, You api.Snake) bool {
destinationAdjacents := GetAdjacentCoords(Destination)
for i := 0; i < len(Snakes); i++ {
for j := 0; j < len(destinationAdjacents); j++ {
if Snakes[i].Body[0] == destinationAdjacents[j] && Snakes[i].ID != You.ID {
return true
}
}
}
return false
} | algorithm/mathfunctions.go | 0.758242 | 0.459804 | mathfunctions.go | starcoder |
package summary
import (
"math"
)
// Mean: Returns the man of the dataset
func Mean(dataset []map[string]float64) map[string]float64 {
result := make(map[string]float64)
for key, values := range groupByKey(dataset) {
result[key] = meanOfArray(values)
}
return result
}
// Median: Returns the median of the dataset
func Median(dataset []map[string]float64) map[string]float64 {
result := make(map[string]float64)
for key, values := range groupByKey(dataset) {
result[key] = medianOfArray(values)
}
return result
}
// Mode: Returns the mode of the dataset
func Mode(dataset []map[string]float64) map[string]float64 {
result := make(map[string]float64)
for key, values := range groupByKey(dataset) {
result[key] = modeOfArray(values)
}
return result
}
// MeanWithoutOutliers: Returns the mean of the dataset after removing values outside 2 standard deviations from the median
func MeanWithoutOutliers(dataset []map[string]float64) map[string]float64 {
result := make(map[string]float64)
for key, values := range groupByKey(dataset) {
result[key] = meanOfArray(RemoveOutlier(values))
}
return result
}
// MedianWithoutOutliers: Returns the median of the dataset after removing values outside 2 std deviations from the median
func MedianWithoutOutliers(dataset []map[string]float64) map[string]float64 {
result := make(map[string]float64)
for key, values := range groupByKey(dataset) {
result[key] = medianOfArray(RemoveOutlier(values))
}
return result
}
// groupBykey: returns the dataset grouped by keys from each entry in the dataset
func groupByKey(dataset []map[string]float64) map[string][]float64 {
result := make(map[string][]float64)
for _, entry := range dataset {
for k, v := range entry {
result[k] = append(result[k], v)
}
}
return result
}
func RemoveOutlier(dataset []float64) []float64 {
if len(dataset) <= 2 {
return dataset
}
var sum float64 = 0.0
var squaredSum float64 = 0.0
for _, x := range dataset {
sum += x
squaredSum += x * x
}
var mean float64 = sum / float64(len(dataset))
var standardDeviation float64 = math.Sqrt(squaredSum/float64(len(dataset)) - (mean * mean))
slicedData := make([]float64, 0)
median := medianOfArray(dataset)
for _, x := range dataset {
if median-(2*standardDeviation) <= x && x <= median+(2*standardDeviation) {
slicedData = append(slicedData, x)
}
}
return slicedData
}
func meanOfArray(dataset []float64) float64 {
if len(dataset) == 1 {
return dataset[0]
}
var sum float64 = 0
for i := 0; i < len(dataset); i++ {
sum += dataset[i]
}
return sum / float64(len(dataset))
}
func medianOfArray(dataset []float64) float64 {
if len(dataset)%2 == 0 {
return (dataset[len(dataset)/2] + dataset[(len(dataset)/2)-1]) / 2
} else {
return dataset[(len(dataset)-1)/2]
}
}
func modeOfArray(dataset []float64) float64 {
counter := make(map[float64]int)
for _, x := range dataset {
counter[x]++
}
modeX, modeCount := 0.0, 0
for x, count := range counter {
if count > modeCount {
modeCount = count
modeX = x
}
}
return modeX
} | summary/summary.go | 0.874185 | 0.707373 | summary.go | starcoder |
package datastructs
import "fmt"
// BTreeNode Binary TreeNode
type BTreeNode struct {
key interface{}
parent *BTreeNode
left *BTreeNode
right *BTreeNode
}
// PrintBinaryTree preorder recursion
func PrintBinaryTree(root *BTreeNode) {
if root == nil {
return
}
fmt.Print(root.key, "\t")
if root.left != nil {
PrintBinaryTree(root.left)
}
if root.right != nil {
PrintBinaryTree(root.right)
}
}
// PrintBinaryTree2 inorder tranverse
// see http://www.geeksforgeeks.org/inorder-tree-traversal-without-recursion/
// 1. Create an empty stack S.
// 2. Initialize current node as root
// 3. Push current node to S and set current = current.left until current = nil
// 4. If current is nil and stack is not empty then
// 1) Pop the top item from stack.
// 2) Print the popped item, set current = poppedItem.right
// 3) Go to step 3.
// 5. If current is nil and stack is empty then we are done
func PrintBinaryTree2(root *BTreeNode) {
if root == nil {
return
}
stack := NewListStack()
for root != nil {
stack.Push(root)
root = root.left
}
for !stack.IsEmpty() {
p := stack.Pop().(*BTreeNode) //type assertion, see https://tour.golang.org/methods/15
fmt.Print(p.key, "\t")
root = p.right
for root != nil {
stack.Push(root)
root = root.left
}
}
}
// TreeHeight recursively get tree height, 5.3 p183
func TreeHeight(tree *BTreeNode) int {
if tree == nil {
return -1
}
var left = TreeHeight(tree.left)
var right = TreeHeight(tree.right)
if left >= right {
return left + 1
} else {
return right + 1
}
}
// TreeSearch search in a binary search tree recursively
func TreeSearch(tree *BTreeNode, key interface{}) *BTreeNode {
c1, ok1 := key.(Comparable)
if !ok1 {
panic("key must implement Comparable interafce")
}
c := c1.compare(tree.key)
if tree == nil || c == 0 {
return tree
}
if c < 0 {
return TreeSearch(tree.left, key)
} else {
return TreeSearch(tree.right, key)
}
}
// TreeSearchIterative searches value key in a binary search tree iteratively
func TreeSearchIterative(tree *BTreeNode, key interface{}) *BTreeNode {
c1, ok1 := key.(Comparable)
if !ok1 {
panic("key must implement Comparable interafce")
}
c := c1.compare(tree.key)
for tree != nil && c != 0 {
c = c1.compare(tree.key)
if c < 0 {
tree = tree.left
} else if c > 0 {
tree = tree.right
} else {
return tree
}
}
return tree
}
// TreeMinimum find the minimum element in a binary search tree
func TreeMinimum(tree *BTreeNode) *BTreeNode {
if tree == nil {
return nil
}
for tree.left != nil {
tree = tree.left
}
return tree
}
// TreeMaximum find the maximum element in a binary search tree
func TreeMaximum(tree *BTreeNode) *BTreeNode {
if tree == nil {
return nil
}
for tree.right != nil {
tree = tree.right
}
return tree
}
// TreeSuccessor find the successor of a node in a binary search tree
func TreeSuccessor(node *BTreeNode) *BTreeNode {
if node.right != nil {
return TreeMinimum(node.right)
}
p := node.parent
for p != nil && node == p.right {
node = p
p = p.parent
}
return p
}
// TreePredecessor find the predecessor of a node in a binary search tree
func TreePredecessor(node *BTreeNode) *BTreeNode {
if node.left != nil {
return TreeMaximum(node.left)
}
p := node.parent
for p != nil && node == p.left {
node = p
p = p.parent
}
return p
}
// TreeInsert inserts element value v to a binary search tree,
// the inserted value must be Comparable interface
func TreeInsert(root *BTreeNode, v interface{}) *BTreeNode {
v0, ok := v.(Comparable)
if !ok {
panic("must be comparable")
}
z := &BTreeNode{key: v}
var p *BTreeNode
r0 := root
r := root
for r != nil {
p = r
if v0.compare(r.key) < 0 {
r = r.left
} else {
r = r.right
}
}
z.parent = p
if p == nil {
r0 = z
} else if v0.compare(p.key) < 0 {
p.left = z
} else {
p.right = z
}
return r0
}
// TreeDelete deletes a node of binary search tree
func TreeDelete(root, z *BTreeNode) *BTreeNode {
if z == nil {
return root
}
if z.left == nil {
root = transplant(root, z, z.right)
} else if z.right == nil {
root = transplant(root, z, z.left)
} else {
y := TreeMinimum(z.right)
if y.parent != z {
root = transplant(root, y, y.right)
y.right = z.right
y.right.parent = y
}
root = transplant(root, z, y)
y.left = z.left
y.left.parent = y
}
return root
}
// transplant replaces one subtree as a child of its parent with another subtree.
// When transplant replaces the subtree rooted at node u with the subtree rooted at node v,
// node u's parent becomes node v's parent, and u's parent ends up having v as its appropriate child.
func transplant(root, u, v *BTreeNode) *BTreeNode {
if u.parent == nil {
root = v
} else if u == u.parent.left {
u.parent.left = v
} else {
u.parent.right = v
}
if v != nil {
v.parent = u.parent
}
return root
}
// TreeNode general treenode with each node can have one left child and
// many rightSiblings which is started with rightSibling
type TreeNode struct {
key interface{}
parent *TreeNode
leftChild *TreeNode
rightSibling *TreeNode
}
//PrintTree preorder recursion
func PrintTree(root *TreeNode) {
if root == nil {
return
}
fmt.Print("\t")
fmt.Print(root.key)
if root.leftChild != nil {
PrintTree(root.leftChild)
}
if root.rightSibling != nil {
PrintTree(root.rightSibling)
}
} | datastructs/tree.go | 0.691185 | 0.432003 | tree.go | starcoder |
package list
import (
. "github.com/flowonyx/functional"
"github.com/flowonyx/functional/option"
)
// Indexed converts values into Pairs of each value with its index.
func Indexed[T any](values []T) []Pair[int, T] {
output := make([]Pair[int, T], len(values))
for i := range values {
output[i] = PairOf(i, values[i])
}
return output
}
func indexFunc[T any](search func(T) bool, input []T, reverse bool) int {
if len(input) == 0 {
return -1
}
index := -1
start, end := 0, len(input)-1
if reverse {
start, end = end, start
}
DoRangeUntil(func(i int) bool {
if search(input[i]) {
index = i
return true
}
return false
}, start, end)
return index
}
// IndexOf returns the first index within values of search.
// If search is not in values, it returns -1.
func IndexOf[T comparable](search T, values []T) int {
return indexFunc(func(t T) bool { return t == search }, values, false)
}
// IndexOfBack returns the last index within values of search.
// If search is not in values, it returns -1.
func IndexOfBack[T comparable](search T, input []T) int {
return indexFunc(func(t T) bool { return t == search }, input, true)
}
// IndexBy returns the first index within values that matches the search predicate.
// If no values match search, it returns -1.
func IndexBy[T any](search func(T) bool, values []T) int {
return indexFunc(search, values, false)
}
// IndexByBack returns the last index within values that matches the search predicate.
// If no values match search, it returns -1.
func IndexByBack[T any](search func(T) bool, values []T) int {
return indexFunc(search, values, true)
}
// TryIndexOf returns the first index within values of search as an Option.
// If search is not in values, it returns None.
// This function is probably only useful if you working heavily with Options.
func TryIndexOf[T comparable](search T, input []T) option.Option[int] {
if i := IndexOf(search, input); i >= 0 {
return option.Some(i)
}
return option.None[int]()
}
// TryIndexOfBack returns the last index within values of search as an Option.
// If search is not in values, it returns None.
// This function is probably only useful if you working heavily with Options.
func TryIndexOfBack[T comparable](search T, input []T) option.Option[int] {
if i := IndexOfBack(search, input); i >= 0 {
return option.Some(i)
}
return option.None[int]()
}
// TryIndexBy returns the first index within values that matches the search predicate as an Option.
// If search is not in values, it returns None.
// This function is probably only useful if you working heavily with Options.
func TryIndexBy[T any](search func(T) bool, input []T) option.Option[int] {
if i := IndexBy(search, input); i >= 0 {
return option.Some(i)
}
return option.None[int]()
}
// TryIndexByBack returns the last index within values that matches the search predicate as an Option.
// If search is not in values, it returns None.
// This function is probably only useful if you working heavily with Options.
func TryIndexByBack[T any](search func(T) bool, input []T) option.Option[int] {
if i := IndexByBack(search, input); i >= 0 {
return option.Some(i)
}
return option.None[int]()
} | list/index.go | 0.69368 | 0.473596 | index.go | starcoder |
package simulations
import (
"fmt"
"image"
"image/color"
"math/rand"
"strings"
"github.com/domtriola/automata/internal/models"
"github.com/domtriola/automata/internal/palette"
)
var _ models.Simulation = &CellularAutomata{}
// CellularAutomata simulates a scenario where cells in a 2-dimensional world
// can hunt and eat each other based on a set of simple parameters.
type CellularAutomata struct {
cfg CellularAutomataConfig
palette color.Palette
}
// CellularAutomataConfig holds the configurations for the cellular automata
// simulation
type CellularAutomataConfig struct {
nSpecies int
predatorThreshold int
predatorDirs []string
}
// NewCellularAutomata initializes and returns a new cellular automata simulation
func NewCellularAutomata(cfg models.SimulationConfig) (*CellularAutomata, error) {
s := &CellularAutomata{cfg: CellularAutomataConfig{
nSpecies: cfg.CellularAutomata.NSpecies,
predatorThreshold: cfg.CellularAutomata.PredatorThreshold,
predatorDirs: cfg.CellularAutomata.PredatorDirs,
}}
err := s.setPalette()
return s, err
}
// OutputName creates an output file path based on parameters of the
// simulation
func (s *CellularAutomata) OutputName() (string, error) {
filename := fmt.Sprintf(
"cellular-automata_%d_%d_%s",
s.cfg.nSpecies,
s.cfg.predatorThreshold,
strings.Join(s.cfg.predatorDirs, ""),
)
return filename, nil
}
// InitializeGrid instantiates a grid
func (s *CellularAutomata) InitializeGrid(g *models.Grid) error {
oID := 0
// Populate each space
for _, row := range g.Rows {
for _, space := range row {
o := models.NewOrganism(oID)
o.Features.SpeciesID = 1 + rand.Intn(s.cfg.nSpecies)
space.Organism = o
oID++
}
}
// Let each organism know about it's neighbors
for y, row := range g.Rows {
for x, space := range row {
for _, ns := range g.GetNeighbors(x, y, s.cfg.predatorDirs) {
space.Organism.Features.Neighbors = append(
space.Organism.Features.Neighbors,
ns.Organism,
)
}
}
}
return nil
}
// AdvanceFrame determines and assigns the next state of each organism's
// parameters.
func (s *CellularAutomata) AdvanceFrame(g *models.Grid) error {
s.calculateNextFrame(g)
s.applyNextFrame(g)
return nil
}
func (s *CellularAutomata) calculateNextFrame(g *models.Grid) {
for _, row := range g.Rows {
for _, space := range row {
predatorCount := 0
for _, n := range space.Organism.Features.Neighbors {
if s.predator(n, space.Organism) {
predatorCount++
}
}
if predatorCount >= s.cfg.predatorThreshold {
s.incrementNextSpeciesID(space.Organism)
}
}
}
}
func (s *CellularAutomata) incrementNextSpeciesID(o *models.Organism) {
o.NextFeatures.SpeciesID = o.Features.SpeciesID%s.cfg.nSpecies + 1
}
func (s *CellularAutomata) predator(neighbor *models.Organism, o *models.Organism) bool {
return neighbor.Features.SpeciesID == o.Features.SpeciesID%s.cfg.nSpecies+1
}
func (s *CellularAutomata) applyNextFrame(g *models.Grid) {
for _, row := range g.Rows {
for _, space := range row {
if space.Organism.NextFeatures.SpeciesID > 0 {
space.Organism.Features.SpeciesID = space.Organism.NextFeatures.SpeciesID
space.Organism.NextFeatures.SpeciesID = 0
}
}
}
}
// DrawSpace colors the image at the specified location according to the
// properties of the Space.
func (s *CellularAutomata) DrawSpace(
sp *models.Space,
img *image.Paletted,
x int,
y int,
) error {
colorIndex := sp.Organism.Features.SpeciesID - 1
if colorIndex < 0 || colorIndex > len(img.Palette) {
return fmt.Errorf("colorIndex: %d out of bounds of rect: %+v", colorIndex, img.Bounds())
}
img.SetColorIndex(x, y, uint8(colorIndex))
return nil
}
// GetPalette returns the simulation's color palette
func (s *CellularAutomata) GetPalette() color.Palette {
return s.palette
}
func (s *CellularAutomata) setPalette() error {
p, err := createPalette(s.cfg.nSpecies)
if err != nil {
return err
}
s.palette = p
return nil
}
func createPalette(nSpecies int) (color.Palette, error) {
colors := color.Palette{}
rainbow, err := palette.Rainbow(7)
if err != nil {
return rainbow, err
}
step := len(rainbow) / nSpecies
for i := 0; i < nSpecies; i++ {
colors = append(colors, rainbow[i*step])
}
return colors, nil
} | internal/simulations/cellularAutomata.go | 0.810254 | 0.409044 | cellularAutomata.go | starcoder |
package pkg
import (
"io"
"regexp"
"strings"
"github.com/rotisserie/eris"
)
type BoolNode interface {
Eval(map[string]bool) bool
}
type boolAnd struct {
left BoolNode
right BoolNode
}
func (n boolAnd) Eval(vars map[string]bool) bool {
return n.left.Eval(vars) && n.right.Eval(vars)
}
type boolOr struct {
left BoolNode
right BoolNode
}
func (n boolOr) Eval(vars map[string]bool) bool {
return n.left.Eval(vars) || n.right.Eval(vars)
}
type boolVar struct {
name string
}
func (n boolVar) Eval(vars map[string]bool) bool {
return vars[n.name]
}
type boolParen struct {
node BoolNode
negated bool
}
func (n boolParen) Eval(vars map[string]bool) bool {
if n.negated {
return !n.node.Eval(vars)
}
return n.node.Eval(vars)
}
type boolNegate struct {
node BoolNode
}
func (n boolNegate) Eval(vars map[string]bool) bool {
return !n.node.Eval(vars)
}
var (
letterSymRe = regexp.MustCompile("[a-zA-Z_]")
numberRe = regexp.MustCompile("[0-9]")
)
func ParseBoolExpr(input string) (BoolNode, error) {
scanner := strings.NewReader(input)
stack := make([]BoolNode, 0, 3)
state := uint8(0)
buffer := make([]rune, 0, 10)
expect := ' '
shouldNegate := false
for {
char, _, err := scanner.ReadRune()
if err != nil {
if eris.Is(err, io.EOF) {
break
}
return nil, err
}
switch state {
case 0:
// looking for a var name
if char == '(' {
stack = append(stack, boolParen{node: nil})
continue
}
// skip leading whitespace
if (char == ' ' || char == '\t') && len(buffer) == 0 {
continue
}
if char == ' ' || char == '\t' || char == '|' || char == '&' {
// end of var name
node := BoolNode(boolVar{name: string(buffer)})
if shouldNegate {
shouldNegate = false
node = boolNegate{node: node}
}
stack = append(stack, node)
buffer = buffer[:0]
state = 1
expect = ' '
if char == '|' || char == '&' {
expect = char
}
continue
}
if char == '!' {
shouldNegate = !shouldNegate
continue
}
if !letterSymRe.MatchString(string(char)) {
if len(buffer) == 0 || !numberRe.MatchString(string(char)) {
return nil, eris.Errorf("expected a-z, A-Z or _ but found %c", char)
}
}
buffer = append(buffer, char)
case 1:
// looking for an operator
if expect == ' ' {
if char == ' ' || char == '\t' {
continue
}
if char == '&' || char == '|' {
expect = char
continue
}
return nil, eris.Errorf("expected operator (&& or ||) but found %c", char)
}
if char != expect {
return nil, eris.Errorf("expected %c but found %c", expect, char)
}
top := len(stack) - 1
switch char {
case '&':
stack[top] = boolAnd{left: stack[top]}
case '|':
stack[top] = boolOr{left: stack[top]}
default:
return nil, eris.Errorf("unreachable code, got operator %c", char)
}
state = 2
case 2:
if char == ' ' || char == '\t' {
continue
}
if char == '!' {
shouldNegate = !shouldNegate
continue
}
if char == '(' {
stack = append(stack, boolParen{node: nil, negated: shouldNegate})
shouldNegate = false
state = 0
continue
}
err = scanner.UnreadRune()
if err != nil {
return nil, err
}
state = 3
case 3:
// looking for variable after operator
top := len(stack) - 1
if char == ' ' || char == '\t' || char == '|' || char == '&' || char == ')' {
// end of var name
varNode := BoolNode(boolVar{name: string(buffer)})
buffer = buffer[:0]
if shouldNegate {
shouldNegate = false
varNode = boolNegate{node: varNode}
}
switch node := stack[top].(type) {
case boolAnd:
node.right = varNode
stack[top] = node
case boolOr:
node.right = varNode
stack[top] = node
default:
return nil, eris.Errorf("unexpected stack top, expected boolAnd or boolOr but found %v", stack[top])
}
state = 1
expect = ' '
if char == '|' || char == '&' {
expect = char
}
if char == ')' {
preTop := len(stack) - 2
parenNode, ok := stack[preTop].(boolParen)
if !ok {
return nil, eris.Errorf("unexpected ), current node on stack is %v", stack[preTop])
}
if parenNode.negated {
stack[top] = boolNegate{node: stack[top]}
}
// replace the paren node with the current top node
if top > 1 {
switch node := stack[top-2].(type) {
case boolAnd:
node.right = stack[top]
stack[top-2] = node
stack = stack[:preTop]
case boolOr:
node.right = stack[top]
stack[top-2] = node
stack = stack[:preTop]
default:
stack[preTop] = stack[top]
stack = stack[:top]
}
} else {
stack[preTop] = stack[top]
stack = stack[:top]
}
}
continue
}
if !letterSymRe.MatchString(string(char)) {
if len(buffer) == 0 || !numberRe.MatchString(string(char)) {
return nil, eris.Errorf("expected a-z, A-Z or _ but found %c", char)
}
}
buffer = append(buffer, char)
}
}
if len(stack) > 1 {
return nil, eris.Errorf("more than one node left on stack: %v", stack)
}
if len(buffer) > 0 {
varNode := BoolNode(boolVar{name: string(buffer)})
if shouldNegate {
varNode = boolNegate{node: varNode}
}
if len(stack) == 0 {
stack = append(stack, varNode)
} else {
switch node := stack[0].(type) {
case boolAnd:
node.right = varNode
stack[0] = node
case boolOr:
node.right = varNode
stack[0] = node
default:
return nil, eris.Errorf("found var string after node %v", stack[0])
}
}
}
if len(stack) == 0 {
return nil, eris.New("no expression found in input")
}
return stack[0], nil
} | packages/build-tools/pkg/bool_parser.go | 0.587943 | 0.432543 | bool_parser.go | starcoder |
package list1
import "math"
/*
Given an array of ints, return True if 6 appears as either the first or last element in the array. The array will be length 1 or more.
*/
func first_last6(nums []int) bool {
return nums[0] == 6 || nums[len(nums)-1] == 6
}
/*
Given an array of ints, return True if the array is length 1 or more, and the first element and the last element are equal.
*/
func same_first_last(nums []int) bool {
if len(nums) > 1 {
return nums[0] == nums[len(nums)-1]
}
return false
}
/*
Return an int array length 3 containing the first 3 digits of pi, {3, 1, 4}.
*/
func make_pi() []int {
return []int{3, 1, 4}
}
/*
Given 2 arrays of ints, a and b, return True if they have the same first element or they have the same last element. Both arrays will be length 1 or more.
*/
func common_end(a, b []int) bool {
return a[0] == b[0] || a[len(a)-1] == b[len(b)-1]
}
/*
Given an array of ints length 3, return the sum of all the elements.
*/
func sum3(nums []int) int {
return nums[0] + nums[1] + nums[2]
}
/*
Given an array of ints length 3, return an array with the elements "rotated left" so {1, 2, 3} yields {2, 3, 1}.
*/
func rotate_left3(nums []int) []int {
return append(nums[2:3], nums[:2]...)
}
/*
Given an array of ints length 3, return a new array with the elements in reverse order, so {1, 2, 3} becomes {3, 2, 1}.
*/
func reverse3(nums []int) []int {
return []int{nums[2], nums[1], nums[0]}
}
/*
Given an array of ints length 3, figure out which is larger between the first and last elements in the array, and set all the other elements to be that value. Return the changed array.
*/
func max_end3(nums []int) []int {
max := int(math.Max(float64(nums[0]), float64(nums[2])))
return []int{max, max, max}
}
/*
Given an array of ints, return the sum of the first 2 elements in the array. If the array length is less than 2, just sum up the elements that exist, returning 0 if the array is length 0.
*/
func sum2(nums []int) int {
var count int = 0
if len(nums) < 2 {
for _, val := range nums {
count += val
}
return count
}
return nums[0] + nums[2]
}
/*
Given 2 int arrays, a and b, each length 3, return a new array length 2 containing their middle elements.
*/
func middle_way(a, b []int) []int {
return []int{a[1], b[1]}
}
/*
Given an array of ints, return a new array length 2 containing the first and last elements from the original array. The original array will be length 1 or more.
*/
func make_ends(nums []int) []int {
return []int{nums[0], nums[len(nums)-1]}
}
/*
Given an int array length 2, return True if it contains a 2 or a 3.
*/
func has23(nums []int) bool {
for _, val := range nums {
if val == 2 || val == 3 {
return true
}
}
return false
} | Go/CodingBat/list-1.go | 0.716219 | 0.602851 | list-1.go | starcoder |
package float
import (
"fmt"
"strconv"
"strings"
)
// Parse a float string compatible with Format.
// If a separator was not detected, then zero will be returned for thousandsSep or decimalSep.
// See: https://en.wikipedia.org/wiki/Decimal_separator
func Parse(str string) (float64, error) {
f, _, _, _, err := ParseDetails(str)
return f, err
}
// ParseDetails parses a float string compatible with Format
// and returns the detected integer thousands separator and decimal separator characters.
// If a separator was not detected, then zero will be returned for thousandsSep or decimalSep.
// See: https://en.wikipedia.org/wiki/Decimal_separator
func ParseDetails(str string) (f float64, thousandsSep, decimalSep rune, decimals int, err error) {
var (
lastDigitIndex = -1
lastNonDigitIndex = -1
pointWritten = false
eIndex = -1
numMinus int
numGroupingRunes int
lastGroupingRune rune
lastGroupingIndex int
skipFirst int // skip first bytes of str
skipLast int // skip last bytes of str
floatBuilder strings.Builder
)
str = strings.TrimSpace(str)
floatBuilder.Grow(len(str))
// detect the sign, allowed positions are start and end
for i, r := range str {
switch {
case r == 'e', r == 'E':
eIndex = i
case r == '-':
switch {
case i == 0:
skipFirst = 1
case i == len(str)-1:
skipLast = 1
case i == eIndex+1:
continue
default:
return 0, 0, 0, 0, fmt.Errorf("minus can only be used as first or last character: %q", str)
}
floatBuilder.WriteByte(byte(r))
numMinus = 1
case r == '+':
switch {
case i == 0:
skipFirst = 1
case i == len(str)-1:
skipLast = 1
case i == eIndex+1:
continue
default:
return 0, 0, 0, 0, fmt.Errorf("plus can only be used as first or last character: %q", str)
}
}
}
eIndex = -1
// remove the sign from the string and trim space in case the removal left one
trimmedSignsStr := strings.TrimSpace(str[skipFirst : len(str)-skipLast])
for i, r := range trimmedSignsStr {
switch {
case r >= '0' && r <= '9':
lastDigitIndex = i
case r == '.' || r == ',' || r == '\'':
if pointWritten {
return 0, 0, 0, 0, fmt.Errorf("no further separators allowed after decimal separator: %q", str)
}
// Write everything after the lastNonDigitIndex and before current index
floatBuilder.WriteString(trimmedSignsStr[lastNonDigitIndex+1 : i])
if numGroupingRunes == 0 {
// This is the first grouping rune, just save it
numGroupingRunes = 1
lastGroupingRune = r
lastGroupingIndex = i
} else {
// It's a further grouping rune, has to be 3 bytes since last grouping rune
if i-(lastGroupingIndex+1) != 3 {
return 0, 0, 0, 0, fmt.Errorf("thousands separators have to be 3 characters apart: %q", str)
}
numGroupingRunes++
if r == lastGroupingRune {
if numGroupingRunes == 2 {
if floatBuilder.Len()-numMinus > 6 {
return 0, 0, 0, 0, fmt.Errorf("thousands separators have to be 3 characters apart: %q", str)
}
}
// If it's the same grouping rune, then just save it
lastGroupingRune = r
lastGroupingIndex = i
} else {
// If it's a different grouping rune, then we have
// reached the decimal separator
floatBuilder.WriteByte('.')
pointWritten = true
thousandsSep = lastGroupingRune
decimalSep = r
}
}
lastNonDigitIndex = i
case r == ' ':
if pointWritten {
return 0, 0, 0, 0, fmt.Errorf("no further separators allowed after decimal separator: %q", str)
}
// Write everything after the lastNonDigitIndex and before current index
floatBuilder.WriteString(trimmedSignsStr[lastNonDigitIndex+1 : i])
if numGroupingRunes == 0 {
// This is the first grouping rune, just save it
numGroupingRunes = 1
lastGroupingRune = r
lastGroupingIndex = i
} else {
// It's a further grouping rune, has to be 3 bytes since last grouping rune
if i-(lastGroupingIndex+1) != 3 {
return 0, 0, 0, 0, fmt.Errorf("thousands separators have to be 3 characters apart: %q", str)
}
numGroupingRunes++
if r == lastGroupingRune {
if numGroupingRunes == 2 {
if floatBuilder.Len()-numMinus > 6 {
return 0, 0, 0, 0, fmt.Errorf("thousands separators have to be 3 characters apart: %q", str)
}
}
// If it's the same grouping rune, then just save it
lastGroupingRune = r
lastGroupingIndex = i
} else {
// Spaces only are used as thousands separators.
// If the the last separator was not a space, something is wrong
return 0, 0, 0, 0, fmt.Errorf("space can not be used after another thousands separator: %q", str)
}
}
lastNonDigitIndex = i
case r == 'e', r == 'E':
if i == 0 || eIndex != -1 {
return 0, 0, 0, 0, fmt.Errorf("e can't be the first or a repeating character: %q", str)
}
if numGroupingRunes > 0 && !pointWritten {
floatBuilder.WriteByte('.')
pointWritten = true
decimalSep = '.'
}
floatBuilder.WriteString(trimmedSignsStr[lastNonDigitIndex+1 : i+1]) // i+1 to write including the 'e'
lastNonDigitIndex = i
eIndex = i
case (r == '-' || r == '+') && i == eIndex+1:
floatBuilder.WriteRune(r)
lastNonDigitIndex = i
default:
return 0, 0, 0, 0, fmt.Errorf("invalid rune '%s' in %q", string(r), str)
}
}
if numGroupingRunes > 0 && !pointWritten {
if numGroupingRunes > 1 {
// If more than one grouping rune has been written, but no point
// then it was pure integer grouping, so the last there
// have to be 3 bytes since last grouping rune
if lastDigitIndex-lastGroupingIndex != 3 {
return 0, 0, 0, 0, fmt.Errorf("thousands separators have to be 3 characters apart: %q", str)
}
thousandsSep = lastGroupingRune
} else {
floatBuilder.WriteByte('.')
pointWritten = true
decimalSep = lastGroupingRune
}
}
if lastDigitIndex >= lastNonDigitIndex {
floatBuilder.WriteString(trimmedSignsStr[lastNonDigitIndex+1 : lastDigitIndex+1])
}
floatStr := floatBuilder.String()
f, err = strconv.ParseFloat(floatStr, 64)
if err != nil {
return 0, 0, 0, 0, err
}
pointPos := strings.IndexByte(floatStr, '.')
if pointPos != -1 {
if eIndex != -1 {
ePos := strings.LastIndexAny(floatStr, "eE")
decimals = ePos - (pointPos + 1)
} else {
decimals = len(floatStr) - (pointPos + 1)
}
}
return f, thousandsSep, decimalSep, decimals, nil
} | float/parse.go | 0.747984 | 0.527803 | parse.go | starcoder |
package index
import (
"github.com/lfritz/clustering/geometry"
)
// A LeafKDTree is a k-d tree for 2-D points that stores one point in each leaf node.
type LeafKDTree struct {
points [][2]float64
root leafTreeNode
}
type leafTreeNode interface {
boundingBox(result []int, points [][2]float64, bb *geometry.BoundingBox,
level int) []int
}
type innerNode struct {
value float64
left, right leafTreeNode
}
type leafNode struct {
point int
}
// NewLeafKDTree creates a LeafKDTree for the given points.
func NewLeafKDTree(points [][2]float64) *LeafKDTree {
indices := make([]int, len(points))
for i := range indices {
indices[i] = i
}
return &LeafKDTree{points, newLeafTreeNode(points, indices, 0)}
}
func newLeafTreeNode(points [][2]float64, indices []int, level int) leafTreeNode {
if len(indices) == 0 {
return (*innerNode)(nil)
}
if len(indices) == 1 {
return leafNode(leafNode{indices[0]})
}
middle := len(indices) / 2
dimension := level % 2
QuickSelect(&byOneDimension{points, dimension, indices}, middle)
value := points[indices[middle]][dimension]
left := newLeafTreeNode(points, indices[:middle], level+1)
right := newLeafTreeNode(points, indices[middle:], level+1)
return &innerNode{value, left, right}
}
// Points returns the slice of points.
func (t *LeafKDTree) Points() [][2]float64 {
return t.points
}
// BoundingBox returns the indices of all points within the given axis-aligned bounding box.
func (t *LeafKDTree) BoundingBox(bb *geometry.BoundingBox) []int {
result := []int{}
return t.root.boundingBox(result, t.points, bb, 0)
}
func (n *innerNode) boundingBox(result []int, points [][2]float64, bb *geometry.BoundingBox,
level int) []int {
if n == nil {
return result
}
dimension := level % 2
if bb.From[dimension] <= n.value {
result = n.left.boundingBox(result, points, bb, level+1)
}
if bb.To[dimension] >= n.value {
result = n.right.boundingBox(result, points, bb, level+1)
}
return result
}
func (n leafNode) boundingBox(result []int, points [][2]float64, bb *geometry.BoundingBox,
level int) []int {
if bb.Contains(points[n.point]) {
result = append(result, n.point)
}
return result
} | index/leaf_kdtree.go | 0.827061 | 0.484319 | leaf_kdtree.go | starcoder |
package fullCopy
import (
"math"
"fmt"
"github.com/nylen/go-compgeo/geom"
"github.com/nylen/go-compgeo/printutil"
"github.com/nylen/go-compgeo/search"
)
// FullPersistentBST is an implementation of a persistent
// binary search tree using full copies, with each
// instant represented by a separate BST.
type FullPersistentBST struct {
instant float64
index int
// Implicitly sorted
instants []BSTInstant
}
func NewFullPersistentBST(dyn search.Dynamic) search.DynamicPersistent {
pbst := new(FullPersistentBST)
pbst.instant = math.MaxFloat64 * -1
pbst.instants = []BSTInstant{{Dynamic: dyn, instant: pbst.instant}}
return pbst
}
// BSTInstant is a single BST within a Persistent BST.
type BSTInstant struct {
search.Dynamic
instant float64
}
// ThisInstant returns the subtree at the most recent
// instant set
func (pbst *FullPersistentBST) ThisInstant() search.Dynamic {
return pbst.instants[pbst.index]
}
// AtInstant returns the subtree of pbst at the given instant
func (pbst *FullPersistentBST) AtInstant(ins float64) search.Dynamic {
// binary search
bot := 0
top := len(pbst.instants) - 1
var mid int
for {
if top <= bot {
// round down
if pbst.instants[bot].instant > ins {
bot--
}
return pbst.instants[bot]
}
mid = (bot + top) / 2
v := pbst.instants[mid].instant
if geom.F64eq(v, ins) {
return pbst.instants[mid]
} else if v < ins {
bot = mid + 1
} else {
top = mid - 1
}
}
}
// ToStaticPersistent returns a static peristent version
// of the pbst
func (pbst *FullPersistentBST) ToStaticPersistent() search.StaticPersistent {
// Todo
return nil
}
// MinInstant returns the minimum instant ever set on pbst.
func (pbst *FullPersistentBST) MinInstant() float64 {
return pbst.instants[0].instant
}
// MaxInstant returns the maximum instant ever set on pbst.
func (pbst *FullPersistentBST) MaxInstant() float64 {
return pbst.instants[len(pbst.instants)-1].instant
}
// SetInstant increments the pbst to the given instant.
func (pbst *FullPersistentBST) SetInstant(ins float64) {
if ins < pbst.instant {
panic("Decreasing instants is not yet supported")
} else if ins == pbst.instant {
return
}
bsti := BSTInstant{}
bsti.Dynamic = pbst.instants[len(pbst.instants)-1].Copy().(search.Dynamic)
bsti.instant = ins
pbst.instants = append(pbst.instants, bsti)
pbst.instant = ins
pbst.index++
}
// Insert peforms Insert on the current set instant's search tree.
func (pbst *FullPersistentBST) Insert(n search.Node) error {
return pbst.AtInstant(pbst.instant).Insert(n)
}
// Delete performs Delete on the current set instant's search tree.
func (pbst *FullPersistentBST) Delete(n search.Node) error {
return pbst.AtInstant(pbst.instant).Delete(n)
}
// ToStatic performs ToStatic on the current set instant's search tree.
func (pbst *FullPersistentBST) ToStatic() search.Static {
return pbst.AtInstant(pbst.instant).ToStatic()
}
// Size performs Size on the current set instant's search tree.
func (pbst *FullPersistentBST) Size() int {
return pbst.AtInstant(pbst.instant).Size()
}
// InOrderTraverse performs InOrderTraverse on the current
// set instant's search tree.
func (pbst *FullPersistentBST) InOrderTraverse() []search.Node {
return pbst.AtInstant(pbst.instant).InOrderTraverse()
}
// Search performs Search on the current set instant's search tree.
func (pbst *FullPersistentBST) Search(f interface{}) (bool, interface{}) {
return pbst.AtInstant(pbst.instant).Search(f)
}
// SearchDown performs SearchDown on the current set instant's search tree.
func (pbst *FullPersistentBST) SearchDown(f interface{}, d int) (search.Comparable, interface{}) {
return pbst.AtInstant(pbst.instant).SearchDown(f, d)
}
// SearchUp performs SearchUp on the current set instant's search tree.
func (pbst *FullPersistentBST) SearchUp(f interface{}, u int) (search.Comparable, interface{}) {
return pbst.AtInstant(pbst.instant).SearchUp(f, u)
}
// String returns a string representation of pbst.
func (pbst *FullPersistentBST) String() string {
s := ""
for _, ins := range pbst.instants {
s += printutil.Stringf64(ins.instant) + ":\n"
s += fmt.Sprintf("%v", ins.Dynamic)
}
return s
}
func (pbst *FullPersistentBST) Copy() interface{} {
return nil
} | search/tree/fullCopy/fullCopy.go | 0.79854 | 0.574246 | fullCopy.go | starcoder |
package vmath
import (
"unsafe"
)
const (
m3col0 = 0
m3col1 = 3
m3col2 = 6
)
const g_PI_OVER_2 = 1.570796327
func (result *Matrix3) MakeFromScalar(scalar float32) {
result[m3col0+x] = scalar
result[m3col0+y] = scalar
result[m3col0+z] = scalar
result[m3col1+x] = scalar
result[m3col1+y] = scalar
result[m3col1+z] = scalar
result[m3col2+x] = scalar
result[m3col2+y] = scalar
result[m3col2+z] = scalar
}
func (result *Matrix3) MakeFromQ(unitQuat *Quaternion) {
qx := unitQuat[x]
qy := unitQuat[x]
qz := unitQuat[x]
qw := unitQuat[x]
qx2 := qx + qx
qy2 := qy + qy
qz2 := qz + qz
qxqx2 := qx * qx2
qxqy2 := qx * qy2
qxqz2 := qx * qz2
qxqw2 := qw * qx2
qyqy2 := qy * qy2
qyqz2 := qy * qz2
qyqw2 := qw * qy2
qzqz2 := qz * qz2
qzqw2 := qw * qz2
result[m3col0+x] = ((1.0 - qyqy2) - qzqz2)
result[m3col0+y] = (qxqy2 + qzqw2)
result[m3col0+z] = (qxqz2 - qyqw2)
result[m3col1+x] = (qxqy2 - qzqw2)
result[m3col1+y] = ((1.0 - qxqx2) - qzqz2)
result[m3col1+z] = (qyqz2 + qxqw2)
result[m3col2+x] = (qxqz2 + qyqw2)
result[m3col2+y] = (qyqz2 - qxqw2)
result[m3col2+z] = ((1.0 - qxqx2) - qyqy2)
}
func (m *Matrix3) Copy(other *Matrix3) {
for i := range m {
m[i] = other[i]
}
}
func (result *Matrix3) MakeFromCols(col0, col1, col2 *Vector3) {
result.SetCol(0, col0)
result.SetCol(1, col1)
result.SetCol(2, col2)
}
func (m *Matrix3) SetCol(col int, vec *Vector3) {
switch col {
case 0:
m[m3col0+x] = vec[x]
m[m3col0+y] = vec[y]
m[m3col0+z] = vec[z]
case 1:
m[m3col1+x] = vec[x]
m[m3col1+y] = vec[y]
m[m3col1+z] = vec[z]
case 2:
m[m3col2+x] = vec[x]
m[m3col2+y] = vec[y]
m[m3col2+z] = vec[z]
}
}
func (m *Matrix3) SetRow(row int, vec *Vector3) {
m[m3col0+row] = vec[x]
m[m3col1+row] = vec[y]
m[m3col2+row] = vec[z]
}
func (m *Matrix3) SetElem(col, row int, val float32) {
m[col*3+row] = val
}
func (m *Matrix3) Elem(col, row int) float32 {
return m[col*3+row]
}
func (m *Matrix3) Col(result *Vector3, col int) {
switch col {
case 0:
result[x] = m[m3col0+x]
result[y] = m[m3col0+y]
result[z] = m[m3col0+z]
case 1:
result[x] = m[m3col1+x]
result[y] = m[m3col1+y]
result[z] = m[m3col1+z]
case 2:
result[x] = m[m3col2+x]
result[y] = m[m3col2+y]
result[z] = m[m3col2+z]
}
}
func (mat *Matrix3) Row(result *Vector3, row int) {
result[x] = mat[m3col0+row]
result[y] = mat[m3col1+row]
result[z] = mat[m3col2+row]
}
func (result *Matrix3) Transpose(mat *Matrix3) {
if unsafe.Pointer(result) == unsafe.Pointer(mat) {
result.TransposeSelf()
return
}
result[m3col0+x] = mat[m3col0+x]
result[m3col0+y] = mat[m3col1+x]
result[m3col0+z] = mat[m3col2+x]
result[m3col1+x] = mat[m3col0+y]
result[m3col1+y] = mat[m3col1+y]
result[m3col1+z] = mat[m3col2+y]
result[m3col2+x] = mat[m3col0+z]
result[m3col2+y] = mat[m3col1+z]
result[m3col2+z] = mat[m3col2+z]
}
func (m *Matrix3) TransposeSelf() {
tmp := *m
m.Transpose(&tmp)
}
func (result *Matrix3) Inverse(mat *Matrix3) {
var col0, col1, col2 Vector3
var tmp0, tmp1, tmp2 Vector3
mat.Col(&col0, 0)
mat.Col(&col1, 1)
mat.Col(&col2, 2)
tmp0.Cross(&col1, &col2)
tmp1.Cross(&col2, &col0)
tmp2.Cross(&col0, &col1)
detinv := 1.0 / col2.Dot(&tmp2)
result[m3col0+x] = tmp0[x] * detinv
result[m3col0+y] = tmp1[x] * detinv
result[m3col0+z] = tmp2[x] * detinv
result[m3col1+x] = tmp0[y] * detinv
result[m3col1+y] = tmp1[y] * detinv
result[m3col1+z] = tmp2[y] * detinv
result[m3col1+x] = tmp0[z] * detinv
result[m3col1+y] = tmp1[z] * detinv
result[m3col1+z] = tmp2[z] * detinv
}
func (m *Matrix3) InverseSelf() {
m.Inverse(m)
}
func (m *Matrix3) Determinant() float32 {
var col0, col1, col2, tmp Vector3
m.Col(&col0, 0)
m.Col(&col1, 0)
m.Col(&col2, 0)
tmp.Cross(&col0, &col1)
return col2.Dot(&tmp)
}
func (result *Matrix3) Add(mat0, mat1 *Matrix3) {
result[m3col0+x] = mat0[m3col0+x] + mat1[m3col0+x]
result[m3col0+y] = mat0[m3col0+y] + mat1[m3col0+y]
result[m3col0+z] = mat0[m3col0+z] + mat1[m3col0+z]
result[m3col1+x] = mat0[m3col1+x] + mat1[m3col1+x]
result[m3col1+y] = mat0[m3col1+y] + mat1[m3col1+y]
result[m3col1+z] = mat0[m3col1+z] + mat1[m3col1+z]
result[m3col2+x] = mat0[m3col2+x] + mat1[m3col2+x]
result[m3col2+y] = mat0[m3col2+y] + mat1[m3col2+y]
result[m3col2+z] = mat0[m3col2+z] + mat1[m3col2+z]
}
func (result *Matrix3) AddToSelf(mat *Matrix3) {
result.Add(result, mat)
}
func (result *Matrix3) Sub(mat0, mat1 *Matrix3) {
result[m3col0+x] = mat0[m3col0+x] - mat1[m3col0+x]
result[m3col0+y] = mat0[m3col0+y] - mat1[m3col0+y]
result[m3col0+z] = mat0[m3col0+z] - mat1[m3col0+z]
result[m3col1+x] = mat0[m3col1+x] - mat1[m3col1+x]
result[m3col1+y] = mat0[m3col1+y] - mat1[m3col1+y]
result[m3col1+z] = mat0[m3col1+z] - mat1[m3col1+z]
result[m3col2+x] = mat0[m3col2+x] - mat1[m3col2+x]
result[m3col2+y] = mat0[m3col2+y] - mat1[m3col2+y]
result[m3col2+z] = mat0[m3col2+z] - mat1[m3col2+z]
}
func (result *Matrix3) SubFromSelf(mat *Matrix3) {
result.Sub(result, mat)
}
func (result *Matrix3) Neg(mat *Matrix3) {
result[m3col0+x] = -mat[m3col0+x]
result[m3col0+y] = -mat[m3col0+y]
result[m3col0+z] = -mat[m3col0+z]
result[m3col1+x] = -mat[m3col1+x]
result[m3col1+y] = -mat[m3col1+y]
result[m3col1+z] = -mat[m3col1+z]
result[m3col2+x] = -mat[m3col2+x]
result[m3col2+y] = -mat[m3col2+y]
result[m3col2+z] = -mat[m3col2+z]
}
func (result *Matrix3) NegSelf() {
result.Neg(result)
}
func (result *Matrix3) AbsPerElem(mat *Matrix3) {
result[m3col0+x] = abs(mat[m3col0+x])
result[m3col0+y] = abs(mat[m3col0+y])
result[m3col0+z] = abs(mat[m3col0+z])
result[m3col1+x] = abs(mat[m3col1+x])
result[m3col1+y] = abs(mat[m3col1+y])
result[m3col1+z] = abs(mat[m3col1+z])
result[m3col2+x] = abs(mat[m3col2+x])
result[m3col2+y] = abs(mat[m3col2+y])
result[m3col2+z] = abs(mat[m3col2+z])
}
func (result *Matrix3) AbsPerElemSelf() {
result.AbsPerElem(result)
}
func (result *Matrix3) ScalarMul(mat *Matrix3, scalar float32) {
result[m3col0+x] = mat[m3col0+x] * scalar
result[m3col0+y] = mat[m3col0+y] * scalar
result[m3col0+z] = mat[m3col0+z] * scalar
result[m3col1+x] = mat[m3col1+x] * scalar
result[m3col1+y] = mat[m3col1+y] * scalar
result[m3col1+z] = mat[m3col1+z] * scalar
result[m3col2+x] = mat[m3col2+x] * scalar
result[m3col2+y] = mat[m3col2+y] * scalar
result[m3col2+z] = mat[m3col2+z] * scalar
}
func (result *Matrix3) ScalarMulSelf(scalar float32) {
result.ScalarMul(result, scalar)
}
func (result *Vector3) MulM3(vec *Vector3, mat *Matrix3) {
if unsafe.Pointer(result) == unsafe.Pointer(vec) {
result.MulM3Self(mat)
return
}
result[x] = ((mat[m3col0+x] * vec[x]) + (mat[m3col1+x] * vec[y])) + (mat[m3col2+x] * vec[z])
result[y] = ((mat[m3col0+y] * vec[x]) + (mat[m3col1+y] * vec[y])) + (mat[m3col2+y] * vec[z])
result[z] = ((mat[m3col0+z] * vec[x]) + (mat[m3col1+z] * vec[y])) + (mat[m3col2+z] * vec[z])
}
func (result *Vector3) MulM3Self(mat *Matrix3) {
temp := *result
result.MulM3(&temp, mat)
}
func (result *Matrix3) Mul(mat0, mat1 *Matrix3) {
if unsafe.Pointer(result) == unsafe.Pointer(mat0) {
tmp := *result
result.Mul(&tmp, mat1)
return
}
if unsafe.Pointer(result) == unsafe.Pointer(mat1) {
tmp := *result
result.Mul(mat0, &tmp)
return
}
result[m3col0+x] = ((mat0[m3col0+x] * mat1[m3col0+x]) + (mat0[m3col1+x] * mat1[m3col0+y])) + (mat0[m3col2+x] * mat1[m3col0+z])
result[m3col0+y] = ((mat0[m3col0+y] * mat1[m3col0+x]) + (mat0[m3col1+y] * mat1[m3col0+y])) + (mat0[m3col2+y] * mat1[m3col0+z])
result[m3col0+z] = ((mat0[m3col0+z] * mat1[m3col0+x]) + (mat0[m3col1+z] * mat1[m3col0+y])) + (mat0[m3col2+z] * mat1[m3col0+z])
result[m3col1+x] = ((mat0[m3col0+x] * mat1[m3col1+x]) + (mat0[m3col1+x] * mat1[m3col1+y])) + (mat0[m3col2+x] * mat1[m3col1+z])
result[m3col1+y] = ((mat0[m3col0+y] * mat1[m3col1+x]) + (mat0[m3col1+y] * mat1[m3col1+y])) + (mat0[m3col2+y] * mat1[m3col1+z])
result[m3col1+z] = ((mat0[m3col0+z] * mat1[m3col1+x]) + (mat0[m3col1+z] * mat1[m3col1+y])) + (mat0[m3col2+z] * mat1[m3col1+z])
result[m3col2+x] = ((mat0[m3col0+x] * mat1[m3col2+x]) + (mat0[m3col1+x] * mat1[m3col2+y])) + (mat0[m3col2+x] * mat1[m3col2+z])
result[m3col2+y] = ((mat0[m3col0+y] * mat1[m3col2+x]) + (mat0[m3col1+y] * mat1[m3col2+y])) + (mat0[m3col2+y] * mat1[m3col2+z])
result[m3col2+z] = ((mat0[m3col0+z] * mat1[m3col2+x]) + (mat0[m3col1+z] * mat1[m3col2+y])) + (mat0[m3col2+z] * mat1[m3col2+z])
}
func (result *Matrix3) MulSelf(mat *Matrix3) {
temp := *result
result.Mul(&temp, mat)
}
func (result *Matrix3) MulPerElem(mat0, mat1 *Matrix3) {
result[m3col0+x] = mat0[m3col0+x] * mat1[m3col0+x]
result[m3col0+y] = mat0[m3col0+y] * mat1[m3col0+y]
result[m3col0+z] = mat0[m3col0+z] * mat1[m3col0+z]
result[m3col1+x] = mat0[m3col1+x] * mat1[m3col1+x]
result[m3col1+y] = mat0[m3col1+y] * mat1[m3col1+y]
result[m3col1+z] = mat0[m3col1+z] * mat1[m3col1+z]
result[m3col2+x] = mat0[m3col2+x] * mat1[m3col2+x]
result[m3col2+y] = mat0[m3col2+y] * mat1[m3col2+y]
result[m3col2+z] = mat0[m3col2+z] * mat1[m3col2+z]
}
func (result *Matrix3) MulPerElemSelf(mat *Matrix3) {
result.MulPerElem(result, mat)
}
func (result *Matrix3) MakeIdentity() {
//x axis
result[m3col0+x] = 1.0
result[m3col0+y] = 0.0
result[m3col0+z] = 0.0
//y axis
result[m3col1+x] = 0.0
result[m3col1+y] = 1.0
result[m3col1+z] = 0.0
//z axis
result[m3col2+x] = 0.0
result[m3col2+y] = 0.0
result[m3col2+z] = 1.0
}
func (result *Matrix3) MakeRotationX(radians float32) {
s := sin(radians)
c := cos(radians)
result[m3col0+x] = 1.0
result[m3col0+y] = 0.0
result[m3col0+z] = 0.0
result[m3col1+x] = 0.0
result[m3col1+y] = c
result[m3col1+z] = s
result[m3col1+x] = 0.0
result[m3col1+y] = -s
result[m3col1+z] = c
}
func (result *Matrix3) MakeRotationY(radians float32) {
s := sin(radians)
c := cos(radians)
result[m3col0+x] = c
result[m3col0+y] = 0.0
result[m3col0+z] = -s
result[m3col2+x] = 0.0
result[m3col2+y] = 1.0
result[m3col2+z] = 0.0
result[m3col2+x] = s
result[m3col2+y] = 0.0
result[m3col2+z] = c
}
func (result *Matrix3) MakeRotationZ(radians float32) {
s := sin(radians)
c := cos(radians)
result[m3col0+x] = c
result[m3col0+y] = s
result[m3col0+z] = 0.0
result[m3col1+x] = -s
result[m3col1+y] = c
result[m3col1+z] = 0.0
result[m3col2+x] = 0.0
result[m3col2+y] = 0.0
result[m3col2+z] = 1.0
}
func (result *Matrix3) MakeRotationZYX(radiansXYZ *Vector3) {
sX := sin(radiansXYZ[x])
cX := cos(radiansXYZ[x])
sY := sin(radiansXYZ[y])
cY := cos(radiansXYZ[y])
sZ := sin(radiansXYZ[z])
cZ := cos(radiansXYZ[z])
tmp0 := cZ * sY
tmp1 := sZ * sY
result[m3col0+x] = (cZ * cY)
result[m3col0+y] = (sZ * cY)
result[m3col0+z] = -sY
result[m3col1+x] = ((tmp0 * sX) - (sZ * cX))
result[m3col1+y] = ((tmp1 * sX) + (cZ * cX))
result[m3col1+z] = (cY * sX)
result[m3col2+x] = ((tmp0 * cX) + (sZ * sX))
result[m3col2+y] = ((tmp1 * cX) - (cZ * sX))
result[m3col2+z] = (cY * cX)
}
func (result *Matrix3) MakeRotationAxis(radians float32, unitVec *Vector3) {
s := sin(radians)
c := cos(radians)
X := unitVec[x]
Y := unitVec[y]
Z := unitVec[z]
xy := X * Y
yz := Y * Z
zx := Z * X
oneMinusC := 1.0 - c
result[m3col0+x] = (((X * X) * oneMinusC) + c)
result[m3col0+y] = ((xy * oneMinusC) + (Z * s))
result[m3col0+z] = ((zx * oneMinusC) - (Y * s))
result[m3col1+x] = ((xy * oneMinusC) - (Z * s))
result[m3col1+y] = (((Y * Y) * oneMinusC) + c)
result[m3col1+z] = ((yz * oneMinusC) + (X * s))
result[m3col2+x] = ((zx * oneMinusC) + (Y * s))
result[m3col2+y] = ((yz * oneMinusC) - (X * s))
result[m3col2+z] = (((Z * Z) * oneMinusC) + c)
}
func (result *Matrix3) MakeRotationQ(unitQuat *Quaternion) {
result.MakeFromQ(unitQuat)
}
func (result *Matrix3) MakeScale(scaleVec *Vector3) {
result[m3col0+x] = scaleVec[x]
result[m3col0+y] = 0.0
result[m3col0+z] = 0.0
result[m3col1+x] = 0.0
result[m3col1+y] = scaleVec[y]
result[m3col1+z] = 0.0
result[m3col2+x] = 0.0
result[m3col2+y] = 0.0
result[m3col2+z] = scaleVec[z]
}
func (result *Matrix3) AppendScale(mat *Matrix3, scaleVec *Vector3) {
result[m3col0+x] = mat[m3col0+x] * scaleVec[x]
result[m3col0+y] = mat[m3col0+y] * scaleVec[x]
result[m3col0+z] = mat[m3col0+z] * scaleVec[x]
result[m3col1+x] = mat[m3col1+x] * scaleVec[y]
result[m3col1+y] = mat[m3col1+y] * scaleVec[y]
result[m3col1+z] = mat[m3col1+z] * scaleVec[y]
result[m3col2+x] = mat[m3col2+x] * scaleVec[z]
result[m3col2+y] = mat[m3col2+y] * scaleVec[z]
result[m3col2+z] = mat[m3col2+z] * scaleVec[z]
}
func (result *Matrix3) AppendScaleSelf(scaleVec *Vector3) {
result.AppendScale(result, scaleVec)
}
func (result *Matrix3) PrependScale(scaleVec *Vector3, mat *Matrix3) {
result[m3col0+x] = mat[m3col0+x] * scaleVec[x]
result[m3col0+y] = mat[m3col0+y] * scaleVec[y]
result[m3col0+z] = mat[m3col0+z] * scaleVec[z]
result[m3col1+x] = mat[m3col1+x] * scaleVec[x]
result[m3col1+y] = mat[m3col1+y] * scaleVec[y]
result[m3col1+z] = mat[m3col1+z] * scaleVec[z]
result[m3col2+x] = mat[m3col2+x] * scaleVec[x]
result[m3col2+y] = mat[m3col2+y] * scaleVec[y]
result[m3col2+z] = mat[m3col2+z] * scaleVec[z]
}
func (result *Matrix3) PrependScaleSelf(scaleVec *Vector3) {
result.PrependScale(scaleVec, result)
}
func (result *Matrix3) Select(mat0, mat1 *Matrix3, select1 int) {
if select1 != 0 {
result[m3col0+x] = mat1[m3col0+x]
result[m3col0+y] = mat1[m3col0+y]
result[m3col0+z] = mat1[m3col0+z]
result[m3col1+x] = mat1[m3col1+x]
result[m3col1+y] = mat1[m3col1+y]
result[m3col1+z] = mat1[m3col1+z]
result[m3col2+x] = mat1[m3col2+x]
result[m3col2+y] = mat1[m3col2+y]
result[m3col2+z] = mat1[m3col2+z]
} else {
result[m3col0+x] = mat0[m3col0+x]
result[m3col0+y] = mat0[m3col0+y]
result[m3col0+z] = mat0[m3col0+z]
result[m3col1+x] = mat0[m3col1+x]
result[m3col1+y] = mat0[m3col1+y]
result[m3col1+z] = mat0[m3col1+z]
result[m3col2+x] = mat0[m3col2+x]
result[m3col2+y] = mat0[m3col2+y]
result[m3col2+z] = mat0[m3col2+z]
}
}
//Matrix 4
const (
m4col0 = 0
m4col1 = 4
m4col2 = 8
m4col3 = 12
)
func (result *Matrix4) MakeFromScalar(scalar float32) {
result[m4col0+x] = scalar
result[m4col0+y] = scalar
result[m4col0+z] = scalar
result[m4col0+w] = scalar
result[m4col1+x] = scalar
result[m4col1+y] = scalar
result[m4col1+z] = scalar
result[m4col1+w] = scalar
result[m4col2+x] = scalar
result[m4col2+y] = scalar
result[m4col2+z] = scalar
result[m4col2+w] = scalar
result[m4col3+x] = scalar
result[m4col3+y] = scalar
result[m4col3+z] = scalar
result[m4col3+w] = scalar
}
func (result *Matrix4) MakeFromT3(trns *Transform3) {
result[m4col0+x] = trns[t3col0+x]
result[m4col0+y] = trns[t3col0+y]
result[m4col0+z] = trns[t3col0+z]
result[m4col0+w] = 0.0
result[m4col1+x] = trns[t3col1+x]
result[m4col1+y] = trns[t3col1+y]
result[m4col1+z] = trns[t3col1+z]
result[m4col1+w] = 0.0
result[m4col2+x] = trns[t3col2+x]
result[m4col2+y] = trns[t3col2+y]
result[m4col2+z] = trns[t3col2+z]
result[m4col2+w] = 0.0
result[m4col3+x] = trns[t3col3+x]
result[m4col3+y] = trns[t3col3+y]
result[m4col3+z] = trns[t3col3+z]
result[m4col3+w] = 1.0
}
func (m *Matrix4) Copy(other *Matrix4) {
for i := range m {
m[i] = other[i]
}
}
func (m *Matrix4) SetCol(col int, vec *Vector4) {
switch col {
case 0:
m[m4col0+x] = vec[x]
m[m4col0+y] = vec[y]
m[m4col0+z] = vec[z]
m[m4col0+w] = vec[w]
case 1:
m[m4col1+x] = vec[x]
m[m4col1+y] = vec[y]
m[m4col1+z] = vec[z]
m[m4col1+w] = vec[w]
case 2:
m[m4col2+x] = vec[x]
m[m4col2+y] = vec[y]
m[m4col2+z] = vec[z]
m[m4col2+w] = vec[w]
case 3:
m[m4col3+x] = vec[x]
m[m4col3+y] = vec[y]
m[m4col3+z] = vec[z]
m[m4col3+w] = vec[w]
}
}
func (result *Matrix4) MakeFromCols(col0, col1, col2, col3 *Vector4) {
result.SetCol(0, col0)
result.SetCol(1, col1)
result.SetCol(2, col2)
result.SetCol(3, col3)
}
func (result *Matrix4) MakeFromM3V3(mat *Matrix3, translateVec *Vector3) {
result[m4col0+x] = mat[m3col0+x]
result[m4col0+y] = mat[m3col0+y]
result[m4col0+z] = mat[m3col0+z]
result[m4col0+w] = 0.0
result[m4col1+x] = mat[m3col1+x]
result[m4col1+y] = mat[m3col1+y]
result[m4col1+z] = mat[m3col1+z]
result[m4col1+w] = 0.0
result[m4col2+x] = mat[m3col2+x]
result[m4col2+y] = mat[m3col2+y]
result[m4col2+z] = mat[m3col2+z]
result[m4col2+w] = 0.0
result[m4col3+x] = translateVec[x]
result[m4col3+y] = translateVec[y]
result[m4col3+z] = translateVec[z]
result[m4col3+w] = 1.0
}
func (result *Matrix4) MakeFromQV3(unitQuat *Quaternion, translateVec *Vector3) {
var mat Matrix3
mat.MakeFromQ(unitQuat)
result.MakeFromM3V3(&mat, translateVec)
}
func (m *Matrix4) SetRow(row int, vec *Vector4) {
m[m4col0+row] = vec[x]
m[m4col1+row] = vec[y]
m[m4col2+row] = vec[z]
m[m4col3+row] = vec[w]
}
func (m *Matrix4) SetElem(col, row int, val float32) {
m[col*4+row] = val
}
func (m *Matrix4) Elem(col, row int) float32 {
return m[col*4+row]
}
func (m *Matrix4) Col(result *Vector4, col int) {
switch col {
case 0:
result[x] = m[m4col0+x]
result[y] = m[m4col0+y]
result[z] = m[m4col0+z]
result[w] = m[m4col0+w]
case 1:
result[x] = m[m4col1+x]
result[y] = m[m4col1+y]
result[z] = m[m4col1+z]
result[w] = m[m4col1+w]
case 2:
result[x] = m[m4col2+x]
result[y] = m[m4col2+y]
result[z] = m[m4col2+z]
result[w] = m[m4col2+w]
case 3:
result[x] = m[m4col3+x]
result[y] = m[m4col3+y]
result[z] = m[m4col3+z]
result[w] = m[m4col3+w]
}
}
func (mat *Matrix4) Row(result *Vector4, row int) {
result[x] = mat[m4col0+row]
result[y] = mat[m4col1+row]
result[z] = mat[m4col2+row]
result[w] = mat[m4col3+row]
}
func (result *Matrix4) Transpose(mat *Matrix4) {
if unsafe.Pointer(result) == unsafe.Pointer(mat) {
result.TransposeSelf()
return
}
result[m4col0+x] = mat[m4col0+x]
result[m4col0+y] = mat[m4col1+x]
result[m4col0+z] = mat[m4col2+x]
result[m4col0+w] = mat[m4col3+x]
result[m4col1+x] = mat[m4col0+y]
result[m4col1+y] = mat[m4col1+y]
result[m4col1+z] = mat[m4col2+y]
result[m4col1+w] = mat[m4col3+y]
result[m4col2+x] = mat[m4col0+z]
result[m4col2+y] = mat[m4col1+z]
result[m4col2+z] = mat[m4col2+z]
result[m4col2+w] = mat[m4col3+z]
result[m4col3+x] = mat[m4col0+w]
result[m4col3+y] = mat[m4col1+w]
result[m4col3+z] = mat[m4col2+w]
result[m4col3+w] = mat[m4col3+w]
}
func (m *Matrix4) TransposeSelf() {
tmp := *m
m.Transpose(&tmp)
}
func (result *Matrix4) Inverse(mat *Matrix4) {
var res0, res1, res2, res3 Vector4
mA := mat[m4col0+x]
mB := mat[m4col0+y]
mC := mat[m4col0+z]
mD := mat[m4col0+w]
mE := mat[m4col1+x]
mF := mat[m4col1+y]
mG := mat[m4col1+z]
mH := mat[m4col1+w]
mI := mat[m4col2+x]
mJ := mat[m4col2+y]
mK := mat[m4col2+z]
mL := mat[m4col2+w]
mM := mat[m4col3+x]
mN := mat[m4col3+y]
mO := mat[m4col3+z]
mP := mat[m4col3+w]
tmp0 := ((mK * mD) - (mC * mL))
tmp1 := ((mO * mH) - (mG * mP))
tmp2 := ((mB * mK) - (mJ * mC))
tmp3 := ((mF * mO) - (mN * mG))
tmp4 := ((mJ * mD) - (mB * mL))
tmp5 := ((mN * mH) - (mF * mP))
res0[x] = (((mJ * tmp1) - (mL * tmp3)) - (mK * tmp5))
res0[y] = (((mN * tmp0) - (mP * tmp2)) - (mO * tmp4))
res0[z] = (((mD * tmp3) + (mC * tmp5)) - (mB * tmp1))
res0[w] = (((mH * tmp2) + (mG * tmp4)) - (mF * tmp0))
detInv := (1.0 / ((((mA * res0[x]) + (mE * res0[y])) + (mI * res0[z])) + (mM * res0[w])))
res1[x] = (mI * tmp1)
res1[y] = (mM * tmp0)
res1[z] = (mA * tmp1)
res1[w] = (mE * tmp0)
res3[x] = (mI * tmp3)
res3[y] = (mM * tmp2)
res3[z] = (mA * tmp3)
res3[w] = (mE * tmp2)
res2[x] = (mI * tmp5)
res2[y] = (mM * tmp4)
res2[z] = (mA * tmp5)
res2[w] = (mE * tmp4)
tmp0 = ((mI * mB) - (mA * mJ))
tmp1 = ((mM * mF) - (mE * mN))
tmp2 = ((mI * mD) - (mA * mL))
tmp3 = ((mM * mH) - (mE * mP))
tmp4 = ((mI * mC) - (mA * mK))
tmp5 = ((mM * mG) - (mE * mO))
res2[x] = (((mL * tmp1) - (mJ * tmp3)) + res2[x])
res2[y] = (((mP * tmp0) - (mN * tmp2)) + res2[y])
res2[z] = (((mB * tmp3) - (mD * tmp1)) - res2[z])
res2[w] = (((mF * tmp2) - (mH * tmp0)) - res2[w])
res3[x] = (((mJ * tmp5) - (mK * tmp1)) + res3[x])
res3[y] = (((mN * tmp4) - (mO * tmp0)) + res3[y])
res3[z] = (((mC * tmp1) - (mB * tmp5)) - res3[z])
res3[w] = (((mG * tmp0) - (mF * tmp4)) - res3[w])
res1[x] = (((mK * tmp3) - (mL * tmp5)) - res1[x])
res1[y] = (((mO * tmp2) - (mP * tmp4)) - res1[y])
res1[z] = (((mD * tmp5) - (mC * tmp3)) + res1[z])
res1[w] = (((mH * tmp4) - (mG * tmp2)) + res1[w])
res0.ScalarMulSelf(detInv)
result.SetCol(0, &res0)
res1.ScalarMulSelf(detInv)
result.SetCol(1, &res1)
res2.ScalarMulSelf(detInv)
result.SetCol(2, &res2)
res3.ScalarMulSelf(detInv)
result.SetCol(3, &res3)
}
func (result *Matrix4) InverseSelf() {
result.Inverse(result)
}
func (result *Matrix4) AffineInverse(mat *Matrix4) {
var affineMat Transform3
affineMat[t3col0+x] = mat[m4col0+x]
affineMat[t3col0+y] = mat[m4col0+y]
affineMat[t3col0+z] = mat[m4col0+z]
affineMat[t3col1+x] = mat[m4col1+x]
affineMat[t3col1+y] = mat[m4col1+y]
affineMat[t3col1+z] = mat[m4col1+z]
affineMat[t3col2+x] = mat[m4col2+x]
affineMat[t3col2+y] = mat[m4col2+y]
affineMat[t3col2+z] = mat[m4col2+z]
affineMat[t3col3+x] = mat[m4col3+x]
affineMat[t3col3+y] = mat[m4col3+y]
affineMat[t3col3+z] = mat[m4col3+z]
affineMat.InverseSelf()
result.MakeFromT3(&affineMat)
}
func (result *Matrix4) AffineInverseSelf() {
result.AffineInverse(result)
}
func (result *Matrix4) OrthoInverse(mat *Matrix4) {
var affineMat Transform3
affineMat[t3col0+x] = mat[m4col0+x]
affineMat[t3col0+y] = mat[m4col0+y]
affineMat[t3col0+z] = mat[m4col0+z]
affineMat[t3col1+x] = mat[m4col1+x]
affineMat[t3col1+y] = mat[m4col1+y]
affineMat[t3col1+z] = mat[m4col1+z]
affineMat[t3col2+x] = mat[m4col2+x]
affineMat[t3col2+y] = mat[m4col2+y]
affineMat[t3col2+z] = mat[m4col2+z]
affineMat[t3col3+x] = mat[m4col3+x]
affineMat[t3col3+y] = mat[m4col3+y]
affineMat[t3col3+z] = mat[m4col3+z]
affineMat.OrthoInverseSelf()
result.MakeFromT3(&affineMat)
}
func (result *Matrix4) OrthoInverseSelf() {
result.OrthoInverse(result)
}
func (m *Matrix4) Determinant() float32 {
mA := m[m4col0+x]
mB := m[m4col0+y]
mC := m[m4col0+z]
mD := m[m4col0+w]
mE := m[m4col1+x]
mF := m[m4col1+y]
mG := m[m4col1+z]
mH := m[m4col1+w]
mI := m[m4col2+x]
mJ := m[m4col2+y]
mK := m[m4col2+z]
mL := m[m4col2+w]
mM := m[m4col3+x]
mN := m[m4col3+y]
mO := m[m4col3+z]
mP := m[m4col3+w]
tmp0 := ((mK * mD) - (mC * mL))
tmp1 := ((mO * mH) - (mG * mP))
tmp2 := ((mB * mK) - (mJ * mC))
tmp3 := ((mF * mO) - (mN * mG))
tmp4 := ((mJ * mD) - (mB * mL))
tmp5 := ((mN * mH) - (mF * mP))
dx := (((mJ * tmp1) - (mL * tmp3)) - (mK * tmp5))
dy := (((mN * tmp0) - (mP * tmp2)) - (mO * tmp4))
dz := (((mD * tmp3) + (mC * tmp5)) - (mB * tmp1))
dw := (((mH * tmp2) + (mG * tmp4)) - (mF * tmp0))
return ((((mA * dx) + (mE * dy)) + (mI * dz)) + (mM * dw))
}
func (result *Matrix4) Add(mat0, mat1 *Matrix4) {
result[m4col0+x] = mat0[m4col0+x] + mat1[m4col0+x]
result[m4col0+y] = mat0[m4col0+y] + mat1[m4col0+y]
result[m4col0+z] = mat0[m4col0+z] + mat1[m4col0+z]
result[m4col0+w] = mat0[m4col0+w] + mat1[m4col0+w]
result[m4col1+x] = mat0[m4col1+x] + mat1[m4col1+x]
result[m4col1+y] = mat0[m4col1+y] + mat1[m4col1+y]
result[m4col1+z] = mat0[m4col1+z] + mat1[m4col1+z]
result[m4col1+w] = mat0[m4col1+w] + mat1[m4col1+w]
result[m4col2+x] = mat0[m4col2+x] + mat1[m4col2+x]
result[m4col2+y] = mat0[m4col2+y] + mat1[m4col2+y]
result[m4col2+z] = mat0[m4col2+z] + mat1[m4col2+z]
result[m4col2+w] = mat0[m4col2+w] + mat1[m4col2+w]
result[m4col3+x] = mat0[m4col3+x] + mat1[m4col3+x]
result[m4col3+y] = mat0[m4col3+y] + mat1[m4col3+y]
result[m4col3+z] = mat0[m4col3+z] + mat1[m4col3+z]
result[m4col3+w] = mat0[m4col3+w] + mat1[m4col3+w]
}
func (result *Matrix4) AddToSelf(mat *Matrix4) {
result.Add(result, mat)
}
func (result *Matrix4) Sub(mat0, mat1 *Matrix4) {
result[m4col0+x] = mat0[m4col0+x] - mat1[m4col0+x]
result[m4col0+y] = mat0[m4col0+y] - mat1[m4col0+y]
result[m4col0+z] = mat0[m4col0+z] - mat1[m4col0+z]
result[m4col0+w] = mat0[m4col0+w] - mat1[m4col0+w]
result[m4col1+x] = mat0[m4col1+x] - mat1[m4col1+x]
result[m4col1+y] = mat0[m4col1+y] - mat1[m4col1+y]
result[m4col1+z] = mat0[m4col1+z] - mat1[m4col1+z]
result[m4col1+w] = mat0[m4col1+w] - mat1[m4col1+w]
result[m4col2+x] = mat0[m4col2+x] - mat1[m4col2+x]
result[m4col2+y] = mat0[m4col2+y] - mat1[m4col2+y]
result[m4col2+z] = mat0[m4col2+z] - mat1[m4col2+z]
result[m4col2+w] = mat0[m4col2+w] - mat1[m4col2+w]
result[m4col3+x] = mat0[m4col3+x] - mat1[m4col3+x]
result[m4col3+y] = mat0[m4col3+y] - mat1[m4col3+y]
result[m4col3+z] = mat0[m4col3+z] - mat1[m4col3+z]
result[m4col3+w] = mat0[m4col3+w] - mat1[m4col3+w]
}
func (result *Matrix4) SubFromSelf(mat *Matrix4) {
result.Sub(result, mat)
}
func (result *Matrix4) Neg(mat *Matrix4) {
result[m4col0+x] = -mat[m4col0+x]
result[m4col0+y] = -mat[m4col0+y]
result[m4col0+z] = -mat[m4col0+z]
result[m4col0+w] = -mat[m4col0+w]
result[m4col1+x] = -mat[m4col1+x]
result[m4col1+y] = -mat[m4col1+y]
result[m4col1+z] = -mat[m4col1+z]
result[m4col1+w] = -mat[m4col1+w]
result[m4col2+x] = -mat[m4col2+x]
result[m4col2+y] = -mat[m4col2+y]
result[m4col2+z] = -mat[m4col2+z]
result[m4col2+w] = -mat[m4col2+w]
result[m4col3+x] = -mat[m4col3+x]
result[m4col3+y] = -mat[m4col3+y]
result[m4col3+z] = -mat[m4col3+z]
result[m4col3+w] = -mat[m4col3+w]
}
func (m *Matrix4) NegSelf() {
m.Neg(m)
}
func (result *Matrix4) AbsPerElem(mat *Matrix4) {
result[m4col0+x] = abs(mat[m4col0+x])
result[m4col0+y] = abs(mat[m4col0+y])
result[m4col0+z] = abs(mat[m4col0+z])
result[m4col0+w] = abs(mat[m4col0+w])
result[m4col1+x] = abs(mat[m4col1+x])
result[m4col1+y] = abs(mat[m4col1+y])
result[m4col1+z] = abs(mat[m4col1+z])
result[m4col1+w] = abs(mat[m4col1+w])
result[m4col2+x] = abs(mat[m4col2+x])
result[m4col2+y] = abs(mat[m4col2+y])
result[m4col2+z] = abs(mat[m4col2+z])
result[m4col2+w] = abs(mat[m4col2+w])
result[m4col3+x] = abs(mat[m4col3+x])
result[m4col3+y] = abs(mat[m4col3+y])
result[m4col3+z] = abs(mat[m4col3+z])
result[m4col3+w] = abs(mat[m4col3+w])
}
func (result *Matrix4) AbsPerElemSelf() {
result.AbsPerElem(result)
}
func (result *Matrix4) ScalarMul(mat *Matrix4, scalar float32) {
result[m4col0+x] = mat[m4col0+x] * scalar
result[m4col0+y] = mat[m4col0+y] * scalar
result[m4col0+z] = mat[m4col0+z] * scalar
result[m4col0+w] = mat[m4col0+w] * scalar
result[m4col1+x] = mat[m4col1+x] * scalar
result[m4col1+y] = mat[m4col1+y] * scalar
result[m4col1+z] = mat[m4col1+z] * scalar
result[m4col1+w] = mat[m4col1+w] * scalar
result[m4col2+x] = mat[m4col2+x] * scalar
result[m4col2+y] = mat[m4col2+y] * scalar
result[m4col2+z] = mat[m4col2+z] * scalar
result[m4col2+w] = mat[m4col2+w] * scalar
result[m4col3+x] = mat[m4col3+x] * scalar
result[m4col3+y] = mat[m4col3+y] * scalar
result[m4col3+z] = mat[m4col3+z] * scalar
result[m4col3+w] = mat[m4col3+w] * scalar
}
func (result *Matrix4) ScalarMulSelf(scalar float32) {
result.ScalarMul(result, scalar)
}
func (result *Vector4) MulM4(vec *Vector4, mat *Matrix4) {
if unsafe.Pointer(result) == unsafe.Pointer(vec) {
result.MulM4Self(mat)
return
}
result[x] = (((mat[m4col0+x] * vec[x]) + (mat[m4col1+x] * vec[y])) + (mat[m4col2+x] * vec[z])) + (mat[m4col3+x] * vec[w])
result[y] = (((mat[m4col0+y] * vec[x]) + (mat[m4col1+y] * vec[y])) + (mat[m4col2+y] * vec[z])) + (mat[m4col3+y] * vec[w])
result[z] = (((mat[m4col0+z] * vec[x]) + (mat[m4col1+z] * vec[y])) + (mat[m4col2+z] * vec[z])) + (mat[m4col3+z] * vec[w])
result[w] = (((mat[m4col0+w] * vec[x]) + (mat[m4col1+w] * vec[y])) + (mat[m4col2+w] * vec[z])) + (mat[m4col3+w] * vec[w])
}
func (result *Vector4) MulM4Self(mat *Matrix4) {
tmp := *result
result.MulM4(&tmp, mat)
}
func (result *Vector4) MulM4V3(mat *Matrix4, vec *Vector3) {
result[x] = ((mat[m4col0+x] * vec[x]) + (mat[m4col1+x] * vec[y])) + (mat[m4col2+x] * vec[z])
result[y] = ((mat[m4col0+y] * vec[x]) + (mat[m4col1+y] * vec[y])) + (mat[m4col2+y] * vec[z])
result[z] = ((mat[m4col0+z] * vec[x]) + (mat[m4col1+z] * vec[y])) + (mat[m4col2+z] * vec[z])
result[w] = ((mat[m4col0+w] * vec[x]) + (mat[m4col1+w] * vec[y])) + (mat[m4col2+w] * vec[z])
}
func (result *Vector4) MulM4P3(mat *Matrix4, pnt *Point3) {
result[x] = (((mat[m4col0+x] * pnt[x]) + (mat[m4col1+x] * pnt[y])) + (mat[m4col2+x] * pnt[z])) + mat[m4col3+x]
result[y] = (((mat[m4col0+y] * pnt[x]) + (mat[m4col1+y] * pnt[y])) + (mat[m4col2+y] * pnt[z])) + mat[m4col3+y]
result[z] = (((mat[m4col0+z] * pnt[x]) + (mat[m4col1+z] * pnt[y])) + (mat[m4col2+z] * pnt[z])) + mat[m4col3+z]
result[w] = (((mat[m4col0+w] * pnt[x]) + (mat[m4col1+w] * pnt[y])) + (mat[m4col2+w] * pnt[z])) + mat[m4col3+w]
}
func (result *Matrix4) Mul(mat0, mat1 *Matrix4) {
if unsafe.Pointer(result) == unsafe.Pointer(mat0) {
tmp := *result
result.Mul(&tmp, mat1)
return
}
if unsafe.Pointer(result) == unsafe.Pointer(mat1) {
tmp := *result
result.Mul(mat0, &tmp)
return
}
result[m4col0+x] = (((mat0[m4col0+x] * mat1[m4col0+x]) + (mat0[m4col1+x] * mat1[m4col0+y])) + (mat0[m4col2+x] * mat1[m4col0+z])) + (mat0[m4col3+x] * mat1[m4col0+w])
result[m4col0+y] = (((mat0[m4col0+y] * mat1[m4col0+x]) + (mat0[m4col1+y] * mat1[m4col0+y])) + (mat0[m4col2+y] * mat1[m4col0+z])) + (mat0[m4col3+y] * mat1[m4col0+w])
result[m4col0+z] = (((mat0[m4col0+z] * mat1[m4col0+x]) + (mat0[m4col1+z] * mat1[m4col0+y])) + (mat0[m4col2+z] * mat1[m4col0+z])) + (mat0[m4col3+z] * mat1[m4col0+w])
result[m4col0+w] = (((mat0[m4col0+w] * mat1[m4col0+x]) + (mat0[m4col1+w] * mat1[m4col0+y])) + (mat0[m4col2+w] * mat1[m4col0+z])) + (mat0[m4col3+w] * mat1[m4col0+w])
result[m4col1+x] = (((mat0[m4col0+x] * mat1[m4col1+x]) + (mat0[m4col1+x] * mat1[m4col1+y])) + (mat0[m4col2+x] * mat1[m4col1+z])) + (mat0[m4col3+x] * mat1[m4col1+w])
result[m4col1+y] = (((mat0[m4col0+y] * mat1[m4col1+x]) + (mat0[m4col1+y] * mat1[m4col1+y])) + (mat0[m4col2+y] * mat1[m4col1+z])) + (mat0[m4col3+y] * mat1[m4col1+w])
result[m4col1+z] = (((mat0[m4col0+z] * mat1[m4col1+x]) + (mat0[m4col1+z] * mat1[m4col1+y])) + (mat0[m4col2+z] * mat1[m4col1+z])) + (mat0[m4col3+z] * mat1[m4col1+w])
result[m4col1+w] = (((mat0[m4col0+w] * mat1[m4col1+x]) + (mat0[m4col1+w] * mat1[m4col1+y])) + (mat0[m4col2+w] * mat1[m4col1+z])) + (mat0[m4col3+w] * mat1[m4col1+w])
result[m4col2+x] = (((mat0[m4col0+x] * mat1[m4col2+x]) + (mat0[m4col1+x] * mat1[m4col2+y])) + (mat0[m4col2+x] * mat1[m4col2+z])) + (mat0[m4col3+x] * mat1[m4col2+w])
result[m4col2+y] = (((mat0[m4col0+y] * mat1[m4col2+x]) + (mat0[m4col1+y] * mat1[m4col2+y])) + (mat0[m4col2+y] * mat1[m4col2+z])) + (mat0[m4col3+y] * mat1[m4col2+w])
result[m4col2+z] = (((mat0[m4col0+z] * mat1[m4col2+x]) + (mat0[m4col1+z] * mat1[m4col2+y])) + (mat0[m4col2+z] * mat1[m4col2+z])) + (mat0[m4col3+z] * mat1[m4col2+w])
result[m4col2+w] = (((mat0[m4col0+w] * mat1[m4col2+x]) + (mat0[m4col1+w] * mat1[m4col2+y])) + (mat0[m4col2+w] * mat1[m4col2+z])) + (mat0[m4col3+w] * mat1[m4col2+w])
result[m4col3+x] = (((mat0[m4col0+x] * mat1[m4col3+x]) + (mat0[m4col1+x] * mat1[m4col3+y])) + (mat0[m4col2+x] * mat1[m4col3+z])) + (mat0[m4col3+x] * mat1[m4col3+w])
result[m4col3+y] = (((mat0[m4col0+y] * mat1[m4col3+x]) + (mat0[m4col1+y] * mat1[m4col3+y])) + (mat0[m4col2+y] * mat1[m4col3+z])) + (mat0[m4col3+y] * mat1[m4col3+w])
result[m4col3+z] = (((mat0[m4col0+z] * mat1[m4col3+x]) + (mat0[m4col1+z] * mat1[m4col3+y])) + (mat0[m4col2+z] * mat1[m4col3+z])) + (mat0[m4col3+z] * mat1[m4col3+w])
result[m4col3+w] = (((mat0[m4col0+w] * mat1[m4col3+x]) + (mat0[m4col1+w] * mat1[m4col3+y])) + (mat0[m4col2+w] * mat1[m4col3+z])) + (mat0[m4col3+w] * mat1[m4col3+w])
}
func (result *Matrix4) MulSelf(mat *Matrix4) {
tmp := *result
result.Mul(&tmp, mat)
}
func (result *Matrix4) MulT3(mat *Matrix4, tfrm *Transform3) {
if unsafe.Pointer(result) == unsafe.Pointer(mat) {
result.MulT3Self(tfrm)
return
}
result[m4col0+x] = ((mat[m4col0+x] * tfrm[t3col0+x]) + (mat[m4col1+x] * tfrm[t3col0+y])) + (mat[m4col2+x] * tfrm[t3col0+z])
result[m4col0+y] = ((mat[m4col0+y] * tfrm[t3col0+x]) + (mat[m4col1+y] * tfrm[t3col0+y])) + (mat[m4col2+y] * tfrm[t3col0+z])
result[m4col0+z] = ((mat[m4col0+z] * tfrm[t3col0+x]) + (mat[m4col1+z] * tfrm[t3col0+y])) + (mat[m4col2+z] * tfrm[t3col0+z])
result[m4col0+w] = ((mat[m4col0+w] * tfrm[t3col0+x]) + (mat[m4col1+w] * tfrm[t3col0+y])) + (mat[m4col2+w] * tfrm[t3col0+z])
result[m4col1+x] = ((mat[m4col0+x] * tfrm[t3col1+x]) + (mat[m4col1+x] * tfrm[t3col1+y])) + (mat[m4col2+x] * tfrm[t3col1+z])
result[m4col1+y] = ((mat[m4col0+y] * tfrm[t3col1+x]) + (mat[m4col1+y] * tfrm[t3col1+y])) + (mat[m4col2+y] * tfrm[t3col1+z])
result[m4col1+z] = ((mat[m4col0+z] * tfrm[t3col1+x]) + (mat[m4col1+z] * tfrm[t3col1+y])) + (mat[m4col2+z] * tfrm[t3col1+z])
result[m4col1+w] = ((mat[m4col0+w] * tfrm[t3col1+x]) + (mat[m4col1+w] * tfrm[t3col1+y])) + (mat[m4col2+w] * tfrm[t3col1+z])
result[m4col2+x] = ((mat[m4col0+x] * tfrm[t3col2+x]) + (mat[m4col1+x] * tfrm[t3col2+y])) + (mat[m4col2+x] * tfrm[t3col2+z])
result[m4col2+y] = ((mat[m4col0+y] * tfrm[t3col2+x]) + (mat[m4col1+y] * tfrm[t3col2+y])) + (mat[m4col2+y] * tfrm[t3col2+z])
result[m4col2+z] = ((mat[m4col0+z] * tfrm[t3col2+x]) + (mat[m4col1+z] * tfrm[t3col2+y])) + (mat[m4col2+z] * tfrm[t3col2+z])
result[m4col2+w] = ((mat[m4col0+w] * tfrm[t3col2+x]) + (mat[m4col1+w] * tfrm[t3col2+y])) + (mat[m4col2+w] * tfrm[t3col2+z])
result[m4col3+x] = (((mat[m4col0+x] * tfrm[t3col3+x]) + (mat[m4col1+x] * tfrm[t3col3+y])) + (mat[m4col2+x] * tfrm[t3col3+z])) + mat[m4col3+x]
result[m4col3+y] = (((mat[m4col0+y] * tfrm[t3col3+x]) + (mat[m4col1+y] * tfrm[t3col3+y])) + (mat[m4col2+y] * tfrm[t3col3+z])) + mat[m4col3+y]
result[m4col3+z] = (((mat[m4col0+z] * tfrm[t3col3+x]) + (mat[m4col1+z] * tfrm[t3col3+y])) + (mat[m4col2+z] * tfrm[t3col3+z])) + mat[m4col3+z]
result[m4col3+w] = (((mat[m4col0+w] * tfrm[t3col3+x]) + (mat[m4col1+w] * tfrm[t3col3+y])) + (mat[m4col2+w] * tfrm[t3col3+z])) + mat[m4col3+w]
}
func (result *Matrix4) MulT3Self(tfrm *Transform3) {
tmp := *result
result.MulT3(&tmp, tfrm)
}
func (result *Matrix4) MulPerElem(mat0, mat1 *Matrix4) {
result[m4col0+x] = mat0[m4col0+x] * mat1[m4col0+x]
result[m4col0+y] = mat0[m4col0+y] * mat1[m4col0+y]
result[m4col0+z] = mat0[m4col0+z] * mat1[m4col0+z]
result[m4col0+w] = mat0[m4col0+w] * mat1[m4col0+w]
result[m4col1+x] = mat0[m4col1+x] * mat1[m4col1+x]
result[m4col1+y] = mat0[m4col1+y] * mat1[m4col1+y]
result[m4col1+z] = mat0[m4col1+z] * mat1[m4col1+z]
result[m4col1+w] = mat0[m4col1+w] * mat1[m4col1+w]
result[m4col2+x] = mat0[m4col2+x] * mat1[m4col2+x]
result[m4col2+y] = mat0[m4col2+y] * mat1[m4col2+y]
result[m4col2+z] = mat0[m4col2+z] * mat1[m4col2+z]
result[m4col2+w] = mat0[m4col2+w] * mat1[m4col2+w]
result[m4col3+x] = mat0[m4col3+x] * mat1[m4col3+x]
result[m4col3+y] = mat0[m4col3+y] * mat1[m4col3+y]
result[m4col3+z] = mat0[m4col3+z] * mat1[m4col3+z]
result[m4col3+w] = mat0[m4col3+w] * mat1[m4col3+w]
}
func (result *Matrix4) MulPerElemSelf(mat *Matrix4) {
result.MulPerElem(result, mat)
}
func (result *Matrix4) MakeIdentity() {
//x-axis
result[m4col0+x] = 1.0
result[m4col0+y] = 0.0
result[m4col0+z] = 0.0
result[m4col0+w] = 0.0
//y-axis
result[m4col1+x] = 0.0
result[m4col1+y] = 1.0
result[m4col1+z] = 0.0
result[m4col1+w] = 0.0
//z-axis
result[m4col2+x] = 0.0
result[m4col2+y] = 0.0
result[m4col2+z] = 1.0
result[m4col2+w] = 0.0
//w-axis
result[m4col3+x] = 0.0
result[m4col3+y] = 0.0
result[m4col3+z] = 0.0
result[m4col3+w] = 1.0
}
func (m *Matrix4) SetUpper3x3(mat3 *Matrix3) {
m[m4col0+x] = mat3[m3col0+x]
m[m4col0+y] = mat3[m3col0+y]
m[m4col0+z] = mat3[m3col0+z]
m[m4col1+x] = mat3[m3col1+x]
m[m4col1+y] = mat3[m3col1+y]
m[m4col1+z] = mat3[m3col1+z]
m[m4col2+x] = mat3[m3col2+x]
m[m4col2+y] = mat3[m3col2+y]
m[m4col2+z] = mat3[m3col2+z]
}
func (m *Matrix4) Upper3x3(result *Matrix3) {
result[m3col0+x] = m[m4col0+x]
result[m3col0+y] = m[m4col0+y]
result[m3col0+z] = m[m4col0+z]
result[m3col1+x] = m[m4col1+x]
result[m3col1+y] = m[m4col1+y]
result[m3col1+z] = m[m4col1+z]
result[m3col2+x] = m[m4col2+x]
result[m3col2+y] = m[m4col2+y]
result[m3col2+z] = m[m4col2+z]
}
func (m *Matrix4) SetTranslation(translateVec *Vector3) {
m[m4col3+x] = translateVec[x]
m[m4col3+y] = translateVec[y]
m[m4col3+z] = translateVec[z]
}
func (m *Matrix4) Translation(result *Vector3) {
result[x] = m[m4col3+x]
result[y] = m[m4col3+y]
result[z] = m[m4col3+z]
}
func (result *Matrix4) MakeRotationX(radians float32) {
s := sin(radians)
c := cos(radians)
//x-axis
result[m4col0+x] = 1.0
result[m4col0+y] = 0.0
result[m4col0+z] = 0.0
result[m4col0+w] = 0.0
result[m4col1+x] = 0.0
result[m4col1+y] = c
result[m4col1+z] = s
result[m4col1+w] = 0.0
result[m4col2+x] = 0.0
result[m4col2+y] = -s
result[m4col2+z] = c
result[m4col2+w] = 0.0
//w-axis
result[m4col3+x] = 0.0
result[m4col3+y] = 0.0
result[m4col3+z] = 0.0
result[m4col3+w] = 1.0
}
func (result *Matrix4) MakeRotationY(radians float32) {
s := sin(radians)
c := cos(radians)
result[m4col0+x] = c
result[m4col0+y] = 0.0
result[m4col0+z] = -s
result[m4col0+w] = 0.0
//y-axis
result[m4col1+x] = 0.0
result[m4col1+y] = 1.0
result[m4col1+z] = 0.0
result[m4col1+w] = 0.0
result[m4col2+x] = s
result[m4col2+y] = 0.0
result[m4col2+z] = c
result[m4col2+w] = 0.0
//w-axis
result[m4col3+x] = 0.0
result[m4col3+y] = 0.0
result[m4col3+z] = 0.0
result[m4col3+w] = 1.0
}
func (result *Matrix4) MakeRotationZ(radians float32) {
s := sin(radians)
c := cos(radians)
result[m4col0+x] = c
result[m4col0+y] = s
result[m4col0+z] = 0.0
result[m4col0+w] = 0.0
result[m4col1+x] = -s
result[m4col1+y] = c
result[m4col1+z] = 0.0
result[m4col1+w] = 0.0
//z-axis
result[m4col2+x] = 0.0
result[m4col2+y] = 0.0
result[m4col2+z] = 1.0
result[m4col2+w] = 0.0
//w-axis
result[m4col3+x] = 0.0
result[m4col3+y] = 0.0
result[m4col3+z] = 0.0
result[m4col3+w] = 1.0
}
func (result *Matrix4) MakeRotationXYZ(radiansXYZ *Vector3) {
sX := sin(radiansXYZ[x])
cX := cos(radiansXYZ[x])
sY := sin(radiansXYZ[y])
cY := cos(radiansXYZ[y])
sZ := sin(radiansXYZ[z])
cZ := cos(radiansXYZ[z])
tmp0 := (cZ * sY)
tmp1 := (sZ * sY)
result[m4col0+x] = (cZ * cY)
result[m4col0+y] = (sZ * cY)
result[m4col0+z] = -sY
result[m4col0+w] = 0.0
result[m4col1+x] = ((tmp0 * sX) - (sZ * cX))
result[m4col1+y] = ((tmp1 * sX) + (cZ * cX))
result[m4col1+z] = (cY * sX)
result[m4col1+w] = 0.0
result[m4col2+x] = ((tmp0 * cX) + (sZ * sX))
result[m4col2+y] = ((tmp1 * cX) - (cZ * sX))
result[m4col2+z] = (cY * cX)
result[m4col2+w] = 0.0
//w-axis
result[m4col3+x] = 0.0
result[m4col3+y] = 0.0
result[m4col3+z] = 0.0
result[m4col3+w] = 1.0
}
func (result *Matrix4) MakeRotationAxis(radians float32, unitVec *Vector3) {
s := sin(radians)
c := cos(radians)
X := unitVec[x]
Y := unitVec[y]
Z := unitVec[z]
xy := X * Y
yz := Y * Z
zx := Z * X
oneMinusC := 1.0 - c
result[m4col0+x] = (((X * X) * oneMinusC) + c)
result[m4col0+y] = ((xy * oneMinusC) + (Z * s))
result[m4col0+z] = ((zx * oneMinusC) - (Y * s))
result[m4col0+w] = 0.0
result[m4col1+x] = ((xy * oneMinusC) - (Z * s))
result[m4col1+y] = (((Y * Y) * oneMinusC) + c)
result[m4col1+z] = ((yz * oneMinusC) + (X * s))
result[m4col1+w] = 0.0
result[m4col2+x] = ((zx * oneMinusC) + (Y * s))
result[m4col2+y] = ((yz * oneMinusC) - (X * s))
result[m4col2+z] = (((Z * Z) * oneMinusC) + c)
result[m4col2+w] = 0.0
//w-axis
result[m4col3+x] = 0.0
result[m4col3+y] = 0.0
result[m4col3+z] = 0.0
result[m4col3+w] = 1.0
}
func (result *Matrix4) MakeRotationQ(unitQuat *Quaternion) {
var tmpT3 Transform3
tmpT3.MakeRotationQ(unitQuat)
result.MakeFromT3(&tmpT3)
}
func (result *Matrix4) MakeScale(scaleVec *Vector3) {
result[m4col0+x] = scaleVec[x]
result[m4col0+y] = 0.0
result[m4col0+z] = 0.0
result[m4col0+w] = 0.0
result[m4col1+x] = 0.0
result[m4col1+y] = scaleVec[y]
result[m4col1+z] = 0.0
result[m4col1+w] = 0.0
result[m4col2+x] = 0.0
result[m4col2+y] = 0.0
result[m4col2+z] = scaleVec[z]
result[m4col2+w] = 0.0
//w-axis
result[m4col3+x] = 0.0
result[m4col3+y] = 0.0
result[m4col3+z] = 0.0
result[m4col3+w] = 1.0
}
func (result *Matrix4) AppendScale(mat *Matrix4, scaleVec *Vector3) {
result[m4col0+x] = mat[m4col0+x] * scaleVec[x]
result[m4col0+y] = mat[m4col0+y] * scaleVec[x]
result[m4col0+z] = mat[m4col0+z] * scaleVec[x]
result[m4col0+w] = mat[m4col0+w] * scaleVec[x]
result[m4col1+x] = mat[m4col1+x] * scaleVec[y]
result[m4col1+y] = mat[m4col1+y] * scaleVec[y]
result[m4col1+z] = mat[m4col1+z] * scaleVec[y]
result[m4col1+w] = mat[m4col1+w] * scaleVec[y]
result[m4col2+x] = mat[m4col2+x] * scaleVec[z]
result[m4col2+y] = mat[m4col2+y] * scaleVec[z]
result[m4col2+z] = mat[m4col2+z] * scaleVec[z]
result[m4col2+w] = mat[m4col2+w] * scaleVec[z]
result[m4col3+x] = mat[m4col3+x]
result[m4col3+y] = mat[m4col3+y]
result[m4col3+z] = mat[m4col3+z]
result[m4col3+w] = mat[m4col3+w]
}
func (result *Matrix4) AppendScaleSelf(scaleVec *Vector3) {
result.AppendScale(result, scaleVec)
}
func (result *Matrix4) PrependScale(scaleVec *Vector3, mat *Matrix4) {
result[m4col0+x] = mat[m4col0+x] * scaleVec[x]
result[m4col0+y] = mat[m4col0+y] * scaleVec[y]
result[m4col0+z] = mat[m4col0+z] * scaleVec[z]
result[m4col0+w] = mat[m4col0+w] * 1.0
result[m4col1+x] = mat[m4col1+x] * scaleVec[x]
result[m4col1+y] = mat[m4col1+y] * scaleVec[y]
result[m4col1+z] = mat[m4col1+z] * scaleVec[z]
result[m4col1+w] = mat[m4col1+w] * 1.0
result[m4col2+x] = mat[m4col2+x] * scaleVec[x]
result[m4col2+y] = mat[m4col2+y] * scaleVec[y]
result[m4col2+z] = mat[m4col2+z] * scaleVec[z]
result[m4col2+w] = mat[m4col2+w] * 1.0
result[m4col3+x] = mat[m4col3+x] * scaleVec[x]
result[m4col3+y] = mat[m4col3+y] * scaleVec[y]
result[m4col3+z] = mat[m4col3+z] * scaleVec[z]
result[m4col3+w] = mat[m4col3+w] * 1.0
}
func (result *Matrix4) PrependScaleSelf(scaleVec *Vector3) {
result.PrependScale(scaleVec, result)
}
func (result *Matrix4) MakeTranslation(translateVec *Vector3) {
//x-axis
result[m4col0+x] = 1.0
result[m4col0+y] = 0.0
result[m4col0+z] = 0.0
result[m4col0+w] = 0.0
//y-axis
result[m4col1+x] = 0.0
result[m4col1+y] = 1.0
result[m4col1+z] = 0.0
result[m4col1+w] = 0.0
//z-axis
result[m4col2+x] = 0.0
result[m4col2+y] = 0.0
result[m4col2+z] = 1.0
result[m4col2+w] = 0.0
result[m4col3+x] = translateVec[x]
result[m4col3+y] = translateVec[y]
result[m4col3+z] = translateVec[z]
result[m4col3+w] = 1.0
}
func (result *Matrix4) MakeLookAt(eyePos, lookAtPos *Point3, upVec *Vector3) {
var m4EyeFrame Matrix4
var v3X, v3Y, v3Z, tmpV3_0, tmpV3_1 Vector3
var tmpV4_0, tmpV4_1, tmpV4_2, tmpV4_3 Vector4
v3Y.Normalize(upVec)
tmpV3_0.P3Sub(eyePos, lookAtPos)
v3Z.Normalize(&tmpV3_0)
tmpV3_1.Cross(&v3Y, &v3Z)
v3X.Normalize(&tmpV3_1)
v3Y.Cross(&v3Z, &v3X)
tmpV4_0.MakeFromV3(&v3X)
tmpV4_1.MakeFromV3(&v3Y)
tmpV4_2.MakeFromV3(&v3Z)
tmpV4_3.MakeFromP3(eyePos)
m4EyeFrame.MakeFromCols(&tmpV4_0, &tmpV4_1, &tmpV4_2, &tmpV4_3)
result.OrthoInverse(&m4EyeFrame)
}
func (result *Matrix4) MakePerspective(fovyRadians, aspect, zNear, zFar float32) {
f := tan(g_PI_OVER_2 - (0.5 * fovyRadians))
rangeInv := 1.0 / (zNear - zFar)
result[m4col0+x] = (f / aspect)
result[m4col0+y] = 0.0
result[m4col0+z] = 0.0
result[m4col0+w] = 0.0
result[m4col1+x] = 0.0
result[m4col1+y] = f
result[m4col1+z] = 0.0
result[m4col1+w] = 0.0
result[m4col2+x] = 0.0
result[m4col2+y] = 0.0
result[m4col2+z] = ((zNear + zFar) * rangeInv)
result[m4col2+w] = -1.0
result[m4col3+x] = 0.0
result[m4col3+y] = 0.0
result[m4col3+z] = (((zNear * zFar) * rangeInv) * 2.0)
result[m4col3+w] = 0.0
}
func (result *Matrix4) MakeFrustum(left, right, bottom, top, zNear, zFar float32) {
sum_rl := (right + left)
sum_tb := (top + bottom)
sum_nf := (zNear + zFar)
inv_rl := (1.0 / (right - left))
inv_tb := (1.0 / (top - bottom))
inv_nf := (1.0 / (zNear - zFar))
n2 := (zNear + zNear)
result[m4col0+x] = (n2 * inv_rl)
result[m4col0+y] = 0.0
result[m4col0+z] = 0.0
result[m4col0+w] = 0.0
result[m4col1+x] = 0.0
result[m4col1+y] = (n2 * inv_tb)
result[m4col1+z] = 0.0
result[m4col1+w] = 0.0
result[m4col2+x] = (sum_rl * inv_rl)
result[m4col2+y] = (sum_tb * inv_tb)
result[m4col2+z] = (sum_nf * inv_nf)
result[m4col2+w] = -1.0
result[m4col3+x] = 0.0
result[m4col3+y] = 0.0
result[m4col3+z] = ((n2 * inv_nf) * zFar)
result[m4col3+w] = 0.0
}
func (result *Matrix4) MakeOrthographic(left, right, bottom, top, zNear, zFar float32) {
sum_rl := (right + left)
sum_tb := (top + bottom)
sum_nf := (zNear + zFar)
inv_rl := (1.0 / (right - left))
inv_tb := (1.0 / (top - bottom))
inv_nf := (1.0 / (zNear - zFar))
//V4MakeFromElems(&result.Col0, (inv_rl + inv_rl), 0.0, 0.0, 0.0)
result[m4col0+x] = (inv_rl + inv_rl)
result[m4col0+y] = 0.0
result[m4col0+z] = 0.0
result[m4col0+w] = 0.0
//V4MakeFromElems(&result.Col1, 0.0, (inv_tb + inv_tb), 0.0, 0.0)
result[m4col1+x] = 0.0
result[m4col1+y] = (inv_tb + inv_tb)
result[m4col1+z] = 0.0
result[m4col1+w] = 0.0
//V4MakeFromElems(&result.Col2, 0.0, 0.0, (inv_nf + inv_nf), 0.0)
result[m4col2+x] = 0.0
result[m4col2+y] = 0.0
result[m4col2+z] = (inv_nf + inv_nf)
result[m4col2+w] = 0.0
//V4MakeFromElems(&result.Col3, (-sum_rl * inv_rl), (-sum_tb * inv_tb), (sum_nf * inv_nf), 1.0)
result[m4col3+x] = (-sum_rl * inv_rl)
result[m4col3+y] = (-sum_tb * inv_tb)
result[m4col3+z] = (sum_nf * inv_nf)
result[m4col3+w] = 1.0
}
func (result *Matrix4) Select(mat0, mat1 *Matrix4, select1 int) {
if select1 != 0 {
result[m4col0+x] = mat1[m4col0+x]
result[m4col0+y] = mat1[m4col0+y]
result[m4col0+z] = mat1[m4col0+z]
result[m4col0+w] = mat1[m4col0+w]
result[m4col1+x] = mat1[m4col1+x]
result[m4col1+y] = mat1[m4col1+y]
result[m4col1+z] = mat1[m4col1+z]
result[m4col1+w] = mat1[m4col1+w]
result[m4col2+x] = mat1[m4col2+x]
result[m4col2+y] = mat1[m4col2+y]
result[m4col2+z] = mat1[m4col2+z]
result[m4col2+w] = mat1[m4col2+w]
result[m4col3+x] = mat1[m4col3+x]
result[m4col3+y] = mat1[m4col3+y]
result[m4col3+z] = mat1[m4col3+z]
result[m4col3+w] = mat1[m4col3+w]
} else {
result[m4col0+x] = mat0[m4col0+x]
result[m4col0+y] = mat0[m4col0+y]
result[m4col0+z] = mat0[m4col0+z]
result[m4col0+w] = mat0[m4col0+w]
result[m4col1+x] = mat0[m4col1+x]
result[m4col1+y] = mat0[m4col1+y]
result[m4col1+z] = mat0[m4col1+z]
result[m4col1+w] = mat0[m4col1+w]
result[m4col2+x] = mat0[m4col2+x]
result[m4col2+y] = mat0[m4col2+y]
result[m4col2+z] = mat0[m4col2+z]
result[m4col2+w] = mat0[m4col2+w]
result[m4col3+x] = mat0[m4col3+x]
result[m4col3+y] = mat0[m4col3+y]
result[m4col3+z] = mat0[m4col3+z]
result[m4col3+w] = mat0[m4col3+w]
}
}
//Transform3
const (
t3col0 = 0
t3col1 = 3
t3col2 = 6
t3col3 = 9
)
func (result *Transform3) MakeFromScalar(scalar float32) {
result[t3col0+x] = scalar
result[t3col0+y] = scalar
result[t3col0+z] = scalar
result[t3col1+x] = scalar
result[t3col1+y] = scalar
result[t3col1+z] = scalar
result[t3col2+x] = scalar
result[t3col2+y] = scalar
result[t3col2+z] = scalar
result[t3col3+x] = scalar
result[t3col3+y] = scalar
result[t3col3+z] = scalar
}
func (t *Transform3) Copy(other *Transform3) {
for i := range t {
t[i] = other[i]
}
}
func (result *Transform3) MakeFromCols(col0, col1, col2, col3 *Vector3) {
result.SetCol(0, col0)
result.SetCol(1, col1)
result.SetCol(2, col2)
result.SetCol(3, col3)
}
func (result *Transform3) MakeFromM3V3(tfrm *Matrix3, translateVec *Vector3) {
result.SetUpper3x3(tfrm)
result.SetTranslation(translateVec)
}
func (result *Transform3) MakeFromQV3(unitQuat *Quaternion, translateVec *Vector3) {
var tmpM3_0 Matrix3
tmpM3_0.MakeFromQ(unitQuat)
result.SetUpper3x3(&tmpM3_0)
result.SetTranslation(translateVec)
}
func (t *Transform3) SetCol(col int, vec *Vector3) {
switch col {
case 0:
t[t3col0+x] = vec[x]
t[t3col0+y] = vec[y]
t[t3col0+z] = vec[z]
case 1:
t[t3col1+x] = vec[x]
t[t3col1+y] = vec[y]
t[t3col1+z] = vec[z]
case 2:
t[t3col2+x] = vec[x]
t[t3col2+y] = vec[y]
t[t3col2+z] = vec[z]
case 3:
t[t3col3+x] = vec[x]
t[t3col3+y] = vec[y]
t[t3col3+z] = vec[z]
}
}
func (t *Transform3) SetRow(row int, vec *Vector4) {
t[t3col0+row] = vec[x]
t[t3col1+row] = vec[y]
t[t3col2+row] = vec[z]
}
func (t *Transform3) SetElem(col, row int, val float32) {
t[col*4+row] = val
}
func (t *Transform3) Elem(col, row int) float32 {
return t[col*4+row]
}
func (t *Transform3) Col(result *Vector3, col int) {
switch col {
case 0:
result[x] = t[t3col0+x]
result[y] = t[t3col0+y]
result[z] = t[t3col0+z]
case 1:
result[x] = t[t3col1+x]
result[y] = t[t3col1+y]
result[z] = t[t3col1+z]
case 2:
result[x] = t[t3col2+x]
result[y] = t[t3col2+y]
result[z] = t[t3col2+z]
case 3:
result[x] = t[t3col3+x]
result[y] = t[t3col3+y]
result[z] = t[t3col3+z]
}
}
func (t *Transform3) Row(result *Vector4, row int) {
result[x] = t[t3col0+row]
result[y] = t[t3col1+row]
result[z] = t[t3col2+row]
result[w] = t[t3col3+row]
}
func (result *Transform3) Inverse(tfrm *Transform3) {
if unsafe.Pointer(result) == unsafe.Pointer(tfrm) {
result.InverseSelf()
return
}
var tmp0, tmp1, tmp2, tmpV3_3, tmpV3_4, tmpV3_5 Vector3
var tfrmCol2 Vector3
tmp0[x] = tfrm[t3col1+y]*tfrm[t3col2+z] - tfrm[t3col1+z]*tfrm[t3col2+y]
tmp0[y] = tfrm[t3col1+z]*tfrm[t3col2+x] - tfrm[t3col1+x]*tfrm[t3col2+z]
tmp0[z] = tfrm[t3col1+x]*tfrm[t3col2+y] - tfrm[t3col1+y]*tfrm[t3col2+x]
tmp1[x] = tfrm[t3col2+y]*tfrm[t3col0+z] - tfrm[t3col2+z]*tfrm[t3col0+y]
tmp1[y] = tfrm[t3col2+z]*tfrm[t3col0+x] - tfrm[t3col2+x]*tfrm[t3col0+z]
tmp1[z] = tfrm[t3col2+x]*tfrm[t3col0+y] - tfrm[t3col2+y]*tfrm[t3col0+x]
tmp2[x] = tfrm[t3col0+y]*tfrm[t3col1+z] - tfrm[t3col0+z]*tfrm[t3col1+y]
tmp2[y] = tfrm[t3col0+z]*tfrm[t3col1+x] - tfrm[t3col0+x]*tfrm[t3col1+z]
tmp2[z] = tfrm[t3col0+x]*tfrm[t3col1+y] - tfrm[t3col0+y]*tfrm[t3col1+x]
tfrm.Col(&tfrmCol2, 2)
detinv := (1.0 / tfrmCol2.Dot(&tmp2))
result[t3col0+x] = (tmp0[x] * detinv)
result[t3col0+y] = (tmp1[x] * detinv)
result[t3col0+z] = (tmp2[x] * detinv)
result[t3col1+x] = (tmp0[y] * detinv)
result[t3col1+y] = (tmp1[y] * detinv)
result[t3col1+z] = (tmp2[y] * detinv)
result[t3col2+x] = (tmp0[z] * detinv)
result[t3col2+y] = (tmp1[z] * detinv)
result[t3col2+z] = (tmp2[z] * detinv)
tmpV3_0 := Vector3{
result[t3col0+x] * tfrm[t3col3+x],
result[t3col0+y] * tfrm[t3col3+x],
result[t3col0+z] * tfrm[t3col3+x]}
tmpV3_1 := Vector3{
result[t3col1+x] * tfrm[t3col3+y],
result[t3col1+y] * tfrm[t3col3+y],
result[t3col1+z] * tfrm[t3col3+y]}
tmpV3_2 := Vector3{
result[t3col2+x] * tfrm[t3col3+z],
result[t3col2+y] * tfrm[t3col3+z],
result[t3col2+z] * tfrm[t3col3+z]}
tmpV3_3.Add(&tmpV3_1, &tmpV3_2)
tmpV3_4.Add(&tmpV3_0, &tmpV3_3)
tmpV3_5.Neg(&tmpV3_4)
result[t3col3+x] = tmpV3_5[x]
result[t3col3+y] = tmpV3_5[y]
result[t3col3+z] = tmpV3_5[z]
}
func (t *Transform3) InverseSelf() {
tmp := *t
t.Inverse(&tmp)
}
func (result *Transform3) OrthoInverse(tfrm *Transform3) {
if unsafe.Pointer(result) == unsafe.Pointer(tfrm) {
result.OrthoInverseSelf()
return
}
var tmpV3_3, tmpV3_4, tmpV3_5 Vector3
result[t3col0+x] = tfrm[t3col0+x]
result[t3col0+y] = tfrm[t3col1+x]
result[t3col0+z] = tfrm[t3col2+x]
result[t3col1+x] = tfrm[t3col0+y]
result[t3col1+y] = tfrm[t3col1+y]
result[t3col1+z] = tfrm[t3col2+y]
result[t3col2+x] = tfrm[t3col0+z]
result[t3col2+y] = tfrm[t3col1+z]
result[t3col2+z] = tfrm[t3col2+z]
tmpV3_0 := Vector3{
result[t3col0+x] * tfrm[t3col3+x],
result[t3col0+y] * tfrm[t3col3+x],
result[t3col0+z] * tfrm[t3col3+x]}
tmpV3_1 := Vector3{
result[t3col1+x] * tfrm[t3col3+y],
result[t3col1+y] * tfrm[t3col3+y],
result[t3col1+z] * tfrm[t3col3+y]}
tmpV3_2 := Vector3{
result[t3col2+x] * tfrm[t3col3+z],
result[t3col2+y] * tfrm[t3col3+z],
result[t3col2+z] * tfrm[t3col3+z]}
tmpV3_3.Add(&tmpV3_1, &tmpV3_2)
tmpV3_4.Add(&tmpV3_0, &tmpV3_3)
tmpV3_5.Neg(&tmpV3_4)
result[t3col3+x] = tmpV3_5[x]
result[t3col3+y] = tmpV3_5[y]
result[t3col3+z] = tmpV3_5[z]
}
func (result *Transform3) OrthoInverseSelf() {
tmp := *result
result.OrthoInverse(&tmp)
}
func (result *Transform3) AbsPerElem(tfrm *Transform3) {
result[t3col0+x] = abs(tfrm[t3col0+x])
result[t3col0+y] = abs(tfrm[t3col0+y])
result[t3col0+z] = abs(tfrm[t3col0+z])
result[t3col1+x] = abs(tfrm[t3col1+x])
result[t3col1+y] = abs(tfrm[t3col1+y])
result[t3col1+z] = abs(tfrm[t3col1+z])
result[t3col2+x] = abs(tfrm[t3col2+x])
result[t3col2+y] = abs(tfrm[t3col2+y])
result[t3col2+z] = abs(tfrm[t3col2+z])
result[t3col3+x] = abs(tfrm[t3col3+x])
result[t3col3+y] = abs(tfrm[t3col3+y])
result[t3col3+z] = abs(tfrm[t3col3+z])
}
func (result *Vector3) MulT3(tfrm *Transform3, vec *Vector3) {
if unsafe.Pointer(result) == unsafe.Pointer(vec) {
result.MulT3Self(tfrm)
return
}
result[x] = ((tfrm[t3col0+x] * vec[x]) + (tfrm[t3col1+x] * vec[y])) + (tfrm[t3col2+x] * vec[z])
result[y] = ((tfrm[t3col0+y] * vec[x]) + (tfrm[t3col1+y] * vec[y])) + (tfrm[t3col2+y] * vec[z])
result[z] = ((tfrm[t3col0+z] * vec[x]) + (tfrm[t3col1+z] * vec[y])) + (tfrm[t3col2+z] * vec[z])
}
func (result *Vector3) MulT3Self(tfrm *Transform3) {
tmp := *result
result.MulT3(tfrm, &tmp)
}
func (result *Point3) MulT3(tfrm *Transform3, pnt *Point3) {
if unsafe.Pointer(result) == unsafe.Pointer(pnt) {
result.MulT3Self(tfrm)
return
}
result[x] = ((((tfrm[t3col0+x] * pnt[x]) + (tfrm[t3col1+x] * pnt[y])) + (tfrm[t3col2+x] * pnt[z])) + tfrm[t3col3+x])
result[y] = ((((tfrm[t3col0+y] * pnt[x]) + (tfrm[t3col1+y] * pnt[y])) + (tfrm[t3col2+y] * pnt[z])) + tfrm[t3col3+y])
result[z] = ((((tfrm[t3col0+z] * pnt[x]) + (tfrm[t3col1+z] * pnt[y])) + (tfrm[t3col2+z] * pnt[z])) + tfrm[t3col3+z])
}
func (result *Point3) MulT3Self(tfrm *Transform3) {
tmp := *result
result.MulT3(tfrm, &tmp)
}
func (result *Transform3) Mul(tfrm0, tfrm1 *Transform3) {
if unsafe.Pointer(result) == unsafe.Pointer(tfrm0) {
tmp := *result
result.Mul(&tmp, tfrm1)
return
}
if unsafe.Pointer(result) == unsafe.Pointer(tfrm1) {
tmp := *result
result.Mul(tfrm0, &tmp)
return
}
result[t3col0+x] = ((tfrm0[t3col0+x] * tfrm1[t3col0+x]) + (tfrm0[t3col1+x] * tfrm1[t3col0+y])) + (tfrm0[t3col2+x] * tfrm1[t3col0+z])
result[t3col0+y] = ((tfrm0[t3col0+y] * tfrm1[t3col0+x]) + (tfrm0[t3col1+y] * tfrm1[t3col0+y])) + (tfrm0[t3col2+y] * tfrm1[t3col0+z])
result[t3col0+z] = ((tfrm0[t3col0+z] * tfrm1[t3col0+x]) + (tfrm0[t3col1+z] * tfrm1[t3col0+y])) + (tfrm0[t3col2+z] * tfrm1[t3col0+z])
result[t3col1+x] = ((tfrm0[t3col0+x] * tfrm1[t3col1+x]) + (tfrm0[t3col1+x] * tfrm1[t3col1+y])) + (tfrm0[t3col2+x] * tfrm1[t3col1+z])
result[t3col1+y] = ((tfrm0[t3col0+y] * tfrm1[t3col1+x]) + (tfrm0[t3col1+y] * tfrm1[t3col1+y])) + (tfrm0[t3col2+y] * tfrm1[t3col1+z])
result[t3col1+z] = ((tfrm0[t3col0+z] * tfrm1[t3col1+x]) + (tfrm0[t3col1+z] * tfrm1[t3col1+y])) + (tfrm0[t3col2+z] * tfrm1[t3col1+z])
result[t3col2+x] = ((tfrm0[t3col0+x] * tfrm1[t3col2+x]) + (tfrm0[t3col1+x] * tfrm1[t3col2+y])) + (tfrm0[t3col2+x] * tfrm1[t3col2+z])
result[t3col2+y] = ((tfrm0[t3col0+y] * tfrm1[t3col2+x]) + (tfrm0[t3col1+y] * tfrm1[t3col2+y])) + (tfrm0[t3col2+y] * tfrm1[t3col2+z])
result[t3col2+z] = ((tfrm0[t3col0+z] * tfrm1[t3col2+x]) + (tfrm0[t3col1+z] * tfrm1[t3col2+y])) + (tfrm0[t3col2+z] * tfrm1[t3col2+z])
result[t3col3+x] = ((((tfrm0[t3col0+x] * tfrm1[t3col3+x]) + (tfrm0[t3col1+x] * tfrm1[t3col3+y])) + (tfrm0[t3col2+x] * tfrm1[t3col3+z])) + tfrm0[t3col3+x])
result[t3col3+y] = ((((tfrm0[t3col0+y] * tfrm1[t3col3+x]) + (tfrm0[t3col1+y] * tfrm1[t3col3+y])) + (tfrm0[t3col2+y] * tfrm1[t3col3+z])) + tfrm0[t3col3+y])
result[t3col3+z] = ((((tfrm0[t3col0+z] * tfrm1[t3col3+x]) + (tfrm0[t3col1+z] * tfrm1[t3col3+y])) + (tfrm0[t3col2+z] * tfrm1[t3col3+z])) + tfrm0[t3col3+z])
}
func (result *Transform3) MulSelf(tfrm *Transform3) {
tmp := *result
result.Mul(&tmp, tfrm)
}
func (result *Transform3) MulPerElem(tfrm0, tfrm1 *Transform3) {
result[t3col0+x] = tfrm0[t3col0+x] * tfrm1[t3col0+x]
result[t3col0+y] = tfrm0[t3col0+y] * tfrm1[t3col0+y]
result[t3col0+z] = tfrm0[t3col0+z] * tfrm1[t3col0+z]
result[t3col1+x] = tfrm0[t3col1+x] * tfrm1[t3col1+x]
result[t3col1+y] = tfrm0[t3col1+y] * tfrm1[t3col1+y]
result[t3col1+z] = tfrm0[t3col1+z] * tfrm1[t3col1+z]
result[t3col2+x] = tfrm0[t3col2+x] * tfrm1[t3col2+x]
result[t3col2+y] = tfrm0[t3col2+y] * tfrm1[t3col2+y]
result[t3col2+z] = tfrm0[t3col2+z] * tfrm1[t3col2+z]
result[t3col3+x] = tfrm0[t3col3+x] * tfrm1[t3col3+x]
result[t3col3+y] = tfrm0[t3col3+y] * tfrm1[t3col3+y]
result[t3col3+z] = tfrm0[t3col3+z] * tfrm1[t3col3+z]
}
func (result *Transform3) MulPerElemSelf(tfrm *Transform3) {
result.MulPerElem(result, tfrm)
}
func (result *Transform3) MakeIdentity() {
//x-axis
result[t3col0+x] = 1.0
result[t3col0+y] = 0.0
result[t3col0+z] = 0.0
//y-axis
result[t3col1+x] = 0.0
result[t3col1+y] = 1.0
result[t3col1+z] = 0.0
//z-axis
result[t3col2+x] = 0.0
result[t3col2+y] = 0.0
result[t3col2+z] = 1.0
//w-axis
result[t3col3+x] = 0.0
result[t3col3+y] = 0.0
result[t3col3+z] = 0.0
}
func (t *Transform3) SetUpper3x3(m *Matrix3) {
t[t3col0+x] = m[m3col0+x]
t[t3col0+y] = m[m3col0+y]
t[t3col0+z] = m[m3col0+z]
t[t3col1+x] = m[m3col1+x]
t[t3col1+y] = m[m3col1+y]
t[t3col1+z] = m[m3col1+z]
t[t3col2+x] = m[m3col2+x]
t[t3col2+y] = m[m3col2+y]
t[t3col2+z] = m[m3col2+z]
}
func (t *Transform3) Upper3x3(result *Matrix3) {
result[m3col0+x] = t[t3col0+x]
result[m3col0+y] = t[t3col0+y]
result[m3col0+z] = t[t3col0+z]
result[m3col1+x] = t[t3col1+x]
result[m3col1+y] = t[t3col1+y]
result[m3col1+z] = t[t3col1+z]
result[m3col2+x] = t[t3col2+x]
result[m3col2+y] = t[t3col2+y]
result[m3col2+z] = t[t3col2+z]
}
func (t *Transform3) SetTranslation(translateVec *Vector3) {
t[t3col3+x] = translateVec[x]
t[t3col3+y] = translateVec[y]
t[t3col3+z] = translateVec[z]
}
func (tfrm *Transform3) Translation(result *Vector3) {
result[x] = tfrm[t3col3+x]
result[y] = tfrm[t3col3+y]
result[z] = tfrm[t3col3+z]
}
func (result *Transform3) MakeRotationX(radians float32) {
s := sin(radians)
c := cos(radians)
result[t3col0+x] = 1.0
result[t3col0+y] = 0.0
result[t3col0+z] = 0.0
result[t3col1+x] = 0.0
result[t3col1+y] = c
result[t3col1+z] = s
result[t3col1+x] = 0.0
result[t3col1+y] = -s
result[t3col1+z] = c
result[t3col2+x] = 0
result[t3col2+y] = 0
result[t3col2+z] = 0
}
func (result *Transform3) MakeRotationY(radians float32) {
s := sin(radians)
c := cos(radians)
result[t3col0+x] = c
result[t3col0+y] = 0.0
result[t3col0+z] = -s
//y-axis
result[t3col1+x] = 0.0
result[t3col1+y] = 1.0
result[t3col1+z] = 0.0
result[t3col2+x] = s
result[t3col2+y] = 0.0
result[t3col2+z] = c
//w-axis
result[t3col3+x] = 0.0
result[t3col3+y] = 0.0
result[t3col3+z] = 0.0
}
func (result *Transform3) MakeRotationZ(radians float32) {
s := sin(radians)
c := cos(radians)
result[t3col0+x] = c
result[t3col0+y] = s
result[t3col0+z] = 0.0
result[t3col1+x] = -s
result[t3col1+y] = c
result[t3col1+z] = 0.0
//z-axis
result[t3col2+x] = 0.0
result[t3col2+y] = 0.0
result[t3col2+z] = 1.0
//w-axis
result[t3col3+x] = 0.0
result[t3col3+y] = 0.0
result[t3col3+z] = 0.0
}
func (result *Transform3) MakeRotationXYZ(radiansXYZ *Vector3) {
sX := sin(radiansXYZ[x])
cX := cos(radiansXYZ[x])
sY := sin(radiansXYZ[y])
cY := cos(radiansXYZ[y])
sZ := sin(radiansXYZ[z])
cZ := cos(radiansXYZ[z])
tmp0 := (cZ * sY)
tmp1 := (sZ * sY)
result[t3col0+x] = (cZ * cY)
result[t3col0+y] = (sZ * cY)
result[t3col0+z] = -sY
result[t3col1+x] = ((tmp0 * sX) - (sZ * cX))
result[t3col1+y] = ((tmp1 * sX) + (cZ * cX))
result[t3col1+z] = (cY * sX)
result[t3col2+x] = ((tmp0 * cX) + (sZ * sX))
result[t3col2+y] = ((tmp1 * cX) - (cZ * sX))
result[t3col2+z] = (cY * cX)
//w-axis
result[t3col3+x] = 0.0
result[t3col3+y] = 0.0
result[t3col3+z] = 0.0
}
func (result *Transform3) MakeRotationAxis(radians float32, unitVec *Vector3) {
s := sin(radians)
c := cos(radians)
X := unitVec[x]
Y := unitVec[y]
Z := unitVec[z]
xy := X * Y
yz := Y * Z
zx := Z * X
oneMinusC := 1.0 - c
result[t3col0+x] = (((X * X) * oneMinusC) + c)
result[t3col0+y] = ((xy * oneMinusC) + (Z * s))
result[t3col0+z] = ((zx * oneMinusC) - (Y * s))
result[t3col1+x] = ((xy * oneMinusC) - (Z * s))
result[t3col1+y] = (((Y * Y) * oneMinusC) + c)
result[t3col1+z] = ((yz * oneMinusC) + (X * s))
result[t3col2+x] = ((zx * oneMinusC) + (Y * s))
result[t3col2+y] = ((yz * oneMinusC) - (X * s))
result[t3col2+z] = (((Z * Z) * oneMinusC) + c)
//w-axis
result[t3col3+x] = 0.0
result[t3col3+y] = 0.0
result[t3col3+z] = 0.0
}
func (result *Transform3) MakeRotationQ(unitQuat *Quaternion) {
var tmpM3 Matrix3
tmpM3.MakeFromQ(unitQuat)
result.MakeFromM3V3(&tmpM3, &Vector3{0, 0, 0})
}
func (result *Transform3) MakeScale(scaleVec *Vector3) {
result[t3col0+x] = scaleVec[x]
result[t3col0+y] = 0.0
result[t3col0+z] = 0.0
result[t3col1+x] = 0.0
result[t3col1+y] = scaleVec[y]
result[t3col1+z] = 0.0
result[t3col2+x] = 0.0
result[t3col2+y] = 0.0
result[t3col2+z] = scaleVec[z]
result[t3col3+x] = 0.0
result[t3col3+y] = 0.0
result[t3col3+z] = 0.0
}
func (result *Transform3) AppendScale(tfrm *Transform3, scaleVec *Vector3) {
result[t3col0+x] = tfrm[t3col0+x] * scaleVec[x]
result[t3col0+y] = tfrm[t3col0+y] * scaleVec[x]
result[t3col0+z] = tfrm[t3col0+z] * scaleVec[x]
result[t3col1+x] = tfrm[t3col1+x] * scaleVec[y]
result[t3col1+y] = tfrm[t3col1+y] * scaleVec[y]
result[t3col1+z] = tfrm[t3col1+z] * scaleVec[y]
result[t3col2+x] = tfrm[t3col2+x] * scaleVec[z]
result[t3col2+y] = tfrm[t3col2+y] * scaleVec[z]
result[t3col2+z] = tfrm[t3col2+z] * scaleVec[z]
result[t3col3+x] = tfrm[t3col3+x]
result[t3col3+y] = tfrm[t3col3+y]
result[t3col3+z] = tfrm[t3col3+z]
}
func (result *Transform3) AppendScaleSelf(scaleVec *Vector3) {
result.AppendScale(result, scaleVec)
}
func (result *Transform3) PrependScale(scaleVec *Vector3, tfrm *Transform3) {
result[t3col0+x] = tfrm[t3col0+x] * scaleVec[x]
result[t3col0+y] = tfrm[t3col0+y] * scaleVec[y]
result[t3col0+z] = tfrm[t3col0+z] * scaleVec[z]
result[t3col1+x] = tfrm[t3col1+x] * scaleVec[x]
result[t3col1+y] = tfrm[t3col1+y] * scaleVec[y]
result[t3col1+z] = tfrm[t3col1+z] * scaleVec[z]
result[t3col2+x] = tfrm[t3col2+x] * scaleVec[x]
result[t3col2+y] = tfrm[t3col2+y] * scaleVec[y]
result[t3col2+z] = tfrm[t3col2+z] * scaleVec[z]
result[t3col3+x] = tfrm[t3col3+x] * scaleVec[x]
result[t3col3+y] = tfrm[t3col3+y] * scaleVec[y]
result[t3col3+z] = tfrm[t3col3+z] * scaleVec[z]
}
func (result *Transform3) PrependScaleSelf(scaleVec *Vector3) {
result.PrependScale(scaleVec, result)
}
func (result *Transform3) MakeTranslation(translateVec *Vector3) {
//x-axis
result[t3col0+x] = 1.0
result[t3col0+y] = 0.0
result[t3col0+z] = 0.0
//y-axis
result[t3col1+x] = 0.0
result[t3col1+y] = 1.0
result[t3col1+z] = 0.0
//z-axis
result[t3col2+x] = 0.0
result[t3col2+y] = 0.0
result[t3col2+z] = 1.0
result[t3col3+x] = translateVec[x]
result[t3col3+y] = translateVec[y]
result[t3col3+z] = translateVec[z]
}
func (result *Transform3) Select(tfrm0, tfrm1 *Transform3, select1 int) {
if select1 != 0 {
result[t3col0+x] = tfrm1[t3col0+x]
result[t3col0+y] = tfrm1[t3col0+y]
result[t3col0+z] = tfrm1[t3col0+z]
result[t3col1+x] = tfrm1[t3col1+x]
result[t3col1+y] = tfrm1[t3col1+y]
result[t3col1+z] = tfrm1[t3col1+z]
result[t3col2+x] = tfrm1[t3col2+x]
result[t3col2+y] = tfrm1[t3col2+y]
result[t3col2+z] = tfrm1[t3col2+z]
result[t3col3+x] = tfrm1[t3col3+x]
result[t3col3+y] = tfrm1[t3col3+y]
result[t3col3+z] = tfrm1[t3col3+z]
} else {
result[t3col0+x] = tfrm0[t3col0+x]
result[t3col0+y] = tfrm0[t3col0+y]
result[t3col0+z] = tfrm0[t3col0+z]
result[t3col1+x] = tfrm0[t3col1+x]
result[t3col1+y] = tfrm0[t3col1+y]
result[t3col1+z] = tfrm0[t3col1+z]
result[t3col2+x] = tfrm0[t3col2+x]
result[t3col2+y] = tfrm0[t3col2+y]
result[t3col2+z] = tfrm0[t3col2+z]
result[t3col3+x] = tfrm0[t3col3+x]
result[t3col3+y] = tfrm0[t3col3+y]
result[t3col3+z] = tfrm0[t3col3+z]
}
}
func (result *Matrix3) V3Outer(tfrm0, tfrm1 *Vector3) {
result[m3col0+x] = tfrm0[x] * tfrm1[x]
result[m3col0+y] = tfrm0[y] * tfrm1[x]
result[m3col0+z] = tfrm0[z] * tfrm1[x]
result[m3col1+x] = tfrm0[x] * tfrm1[y]
result[m3col1+y] = tfrm0[y] * tfrm1[y]
result[m3col1+z] = tfrm0[z] * tfrm1[y]
result[m3col2+x] = tfrm0[x] * tfrm1[z]
result[m3col2+y] = tfrm0[y] * tfrm1[z]
result[m3col2+z] = tfrm0[z] * tfrm1[z]
}
func (result *Matrix4) V4Outer(tfrm0, tfrm1 *Vector4) {
result[m4col0+x] = tfrm0[x] * tfrm1[x]
result[m4col0+y] = tfrm0[y] * tfrm1[x]
result[m4col0+z] = tfrm0[z] * tfrm1[x]
result[m4col0+w] = tfrm0[w] * tfrm1[x]
result[m4col1+x] = tfrm0[x] * tfrm1[y]
result[m4col1+y] = tfrm0[y] * tfrm1[y]
result[m4col1+z] = tfrm0[z] * tfrm1[y]
result[m4col1+w] = tfrm0[w] * tfrm1[y]
result[m4col2+x] = tfrm0[x] * tfrm1[z]
result[m4col2+y] = tfrm0[y] * tfrm1[z]
result[m4col2+z] = tfrm0[z] * tfrm1[z]
result[m4col2+w] = tfrm0[w] * tfrm1[z]
result[m4col3+x] = tfrm0[x] * tfrm1[z]
result[m4col3+y] = tfrm0[y] * tfrm1[z]
result[m4col3+z] = tfrm0[z] * tfrm1[z]
result[m4col3+w] = tfrm0[w] * tfrm1[z]
}
func (result *Vector3) RowMulMat3(vec *Vector3, mat *Matrix3) {
if unsafe.Pointer(result) == unsafe.Pointer(vec) {
result.RowMulMat3Self(mat)
return
}
result[x] = (((vec[x] * mat[m3col0+x]) + (vec[y] * mat[m3col0+y])) + (vec[z] * mat[m3col0+z]))
result[y] = (((vec[x] * mat[m3col1+x]) + (vec[y] * mat[m3col1+y])) + (vec[z] * mat[m3col1+z]))
result[z] = (((vec[x] * mat[m3col2+x]) + (vec[y] * mat[m3col2+y])) + (vec[z] * mat[m3col2+z]))
}
func (result *Vector3) RowMulMat3Self(mat *Matrix3) {
tmp := *result
result.RowMulMat3(&tmp, mat)
}
func (result *Matrix3) V3CrossMatrix(vec *Vector3) {
result[m3col0+x] = 0.0
result[m3col0+y] = vec[z]
result[m3col0+z] = -vec[y]
result[m3col1+x] = -vec[z]
result[m3col1+y] = 0.0
result[m3col1+z] = vec[x]
result[m3col2+x] = vec[y]
result[m3col2+y] = -vec[x]
result[m3col2+z] = 0.0
}
func (result *Matrix3) V3CrossMatrixMul(vec *Vector3, mat *Matrix3) {
if unsafe.Pointer(result) == unsafe.Pointer(mat) {
result.V3CrossMatrixMulSelf(vec)
return
}
result[m3col0+x] = vec[y]*mat[m3col0+z] - vec[z]*mat[m3col0+y]
result[m3col0+y] = vec[z]*mat[m3col0+x] - vec[x]*mat[m3col0+z]
result[m3col0+z] = vec[x]*mat[m3col0+y] - vec[y]*mat[m3col0+x]
result[m3col1+x] = vec[y]*mat[m3col1+z] - vec[z]*mat[m3col1+y]
result[m3col1+y] = vec[z]*mat[m3col1+x] - vec[x]*mat[m3col1+z]
result[m3col1+z] = vec[x]*mat[m3col1+y] - vec[y]*mat[m3col1+x]
result[m3col2+x] = vec[y]*mat[m3col2+z] - vec[z]*mat[m3col2+y]
result[m3col2+y] = vec[z]*mat[m3col2+x] - vec[x]*mat[m3col2+z]
result[m3col2+z] = vec[x]*mat[m3col2+y] - vec[y]*mat[m3col2+x]
}
func (result *Matrix3) V3CrossMatrixMulSelf(vec *Vector3) {
tmp := *result
result.V3CrossMatrixMul(vec, &tmp)
} | matrix.go | 0.672117 | 0.499268 | matrix.go | starcoder |
package equationdisassembler
import (
"errors"
"fmt"
"log"
"strings"
"unicode"
"unicode/utf8"
)
const secondDegreeSize = 3 // size of "x^2"
const firstDegreeSize = 1 // size of "x"
func Disassemble(equation string, variable string) DisassembledEquationMessage {
log.Printf("Start disassemble equation %s", equation)
a, secondDegreeIndex, err := findA(equation, variable)
if err != nil {
log.Print(err)
return DisassembledEquationMessage { IsDisassembleFailed: true }
}
b, firstDegreeIndex, err := findB(equation, variable, secondDegreeIndex + secondDegreeSize)
if err != nil {
log.Print(err)
return DisassembledEquationMessage { IsDisassembleFailed: true }
}
c, err := findC(equation, firstDegreeIndex + firstDegreeSize)
if err != nil {
log.Print(err)
return DisassembledEquationMessage { IsDisassembleFailed: true }
}
log.Printf("The coefficients of %s are: %s, %s, %s", equation, a, b, c)
return DisassembledEquationMessage {
Equation: equation,
A: a,
B: b,
C: c,
};
}
func findA(equation string, variable string) (string, int, error) {
secondDegreeVariable := variable + "^2"
secondDegreeIndex := strings.Index(equation, secondDegreeVariable)
if secondDegreeIndex <= -1 {
errorMessage := fmt.Sprintf("Could not find second degree variable for equation %s", equation)
return "", -1, errors.New(errorMessage)
}
log.Printf("Index of %s: %d", secondDegreeVariable, secondDegreeIndex)
a := "1"
if secondDegreeIndex != 0 {
a = equation[0:secondDegreeIndex]
if a[0] == '+' {
a = a[1:]
} else if a[0] != '-' {
aRune, _ := utf8.DecodeRuneInString(a)
if !unicode.IsDigit(aRune) {
return "", -1, errors.New("Expecting '+' or '-' signs only")
}
}
if a == "-" {
a = "-1"
}
}
log.Printf("Found A: %s", a)
return a, secondDegreeIndex, nil
}
func findB(equation string, variable string, secondDegreeEndIndex int) (string, int, error) {
equationAfterSecondDegree := equation[secondDegreeEndIndex:]
firstDegreeIndex := strings.Index(equationAfterSecondDegree, variable)
if firstDegreeIndex <= -1 {
errorMessage := fmt.Sprintf("Could not find first degree variable for equation %s", equation)
return "", -1, errors.New(errorMessage)
}
b := equationAfterSecondDegree[:firstDegreeIndex]
if b[0] == '+' {
b = b[1:]
} else if b[0] != '-' {
errorMessage := fmt.Sprintf("Expecting '+' or '-' signs only")
return "", -1, errors.New(errorMessage)
}
if len(b) == 0 {
b = "1"
}
firstDegreeIndex += secondDegreeEndIndex
log.Printf("Index of %s: %d", variable, firstDegreeIndex)
log.Printf("Found B: %s", b)
return b, firstDegreeIndex, nil
}
func findC(equation string, firstDegreeEndIndex int) (string, error) {
equalSignIndex := strings.Index(equation, "=")
if equalSignIndex == -1 {
errorMessage := fmt.Sprintf("Could not find equal sign")
return "", errors.New(errorMessage)
}
c := equation[firstDegreeEndIndex:equalSignIndex]
if c[0] == '+' {
c = c[1:]
}
log.Printf("Found C: %s", c)
return c, nil;
} | src/equationdisassembler/equation_disassembler.go | 0.569853 | 0.472805 | equation_disassembler.go | starcoder |
package processor
import (
"errors"
"fmt"
"strings"
"github.com/Jeffail/benthos/lib/log"
"github.com/Jeffail/benthos/lib/metrics"
"github.com/Jeffail/benthos/lib/types"
"github.com/Jeffail/gabs"
)
//------------------------------------------------------------------------------
func init() {
Constructors["process_map"] = TypeSpec{
constructor: NewProcessMap,
description: `
A processor that extracts and maps fields from the original payload into new
objects, applies a list of processors to the newly constructed objects, and
finally maps the result back into the original payload.
Map paths are arbitrary dot paths, target path hierarchies are constructed if
they do not yet exist. Processing is skipped for message parts where the premap
targets aren't found, for optional premap targets use ` + "`premap_optional`" + `.
If the pre-map is empty then the full payload is sent to the processors. The
post-map should not be left empty, if you intend to replace the full payload
with the result then this processor is redundant. Currently only JSON format is
supported for mapping fields from and back to the original payload.
Maps can reference the root of objects either with an empty string or '.', for
example the maps:
` + "``` yaml" + `
premap:
.: foo.bar
postmap:
foo.bar: .
` + "```" + `
Would create a new object where the root is the value of ` + "`foo.bar`" + ` and
would map the full contents of the result back into ` + "`foo.bar`" + `.
This processor is useful for performing processors on subsections of a payload.
For example, you could extract sections of a JSON object in order to construct
a request object for an ` + "`http`" + ` processor, then map the result back
into a field within the original object.
If the number of total message parts resulting from the processing steps does
not match the original count then this processor fails and the messages continue
unchanged. Therefore, you should avoid using batch and filter type processors in
this list.
### Batch Ordering
This processor supports batch messages. When message parts are post-mapped after
processing they will be correctly aligned with the original batch. However, the
ordering of premapped message parts as they are sent through processors are not
guaranteed to match the ordering of the original batch.`,
sanitiseConfigFunc: func(conf Config) (interface{}, error) {
var err error
procConfs := make([]interface{}, len(conf.ProcessMap.Processors))
for i, pConf := range conf.ProcessMap.Processors {
if procConfs[i], err = SanitiseConfig(pConf); err != nil {
return nil, err
}
}
return map[string]interface{}{
"parts": conf.ProcessMap.Parts,
"premap": conf.ProcessMap.Premap,
"premap_optional": conf.ProcessMap.PremapOptional,
"postmap": conf.ProcessMap.Postmap,
"processors": procConfs,
}, nil
},
}
}
//------------------------------------------------------------------------------
// ProcessMapConfig is a config struct containing fields for the
// ProcessMap processor.
type ProcessMapConfig struct {
Parts []int `json:"parts" yaml:"parts"`
Premap map[string]string `json:"premap" yaml:"premap"`
PremapOptional map[string]string `json:"premap_optional" yaml:"premap_optional"`
Postmap map[string]string `json:"postmap" yaml:"postmap"`
Processors []Config `json:"processors" yaml:"processors"`
}
// NewProcessMapConfig returns a default ProcessMapConfig.
func NewProcessMapConfig() ProcessMapConfig {
return ProcessMapConfig{
Parts: []int{},
Premap: map[string]string{},
PremapOptional: map[string]string{},
Postmap: map[string]string{},
Processors: []Config{},
}
}
//------------------------------------------------------------------------------
// ProcessMap is a processor that applies a list of child processors to a
// field extracted from the original payload.
type ProcessMap struct {
parts []int
premap map[string]string
premapOptional map[string]string
postmap map[string]string
children []Type
log log.Modular
mCount metrics.StatCounter
mSkipped metrics.StatCounter
mSkippedMap metrics.StatCounter
mErr metrics.StatCounter
mErrJSONParse metrics.StatCounter
mErrMisaligned metrics.StatCounter
mErrMisalignedBatch metrics.StatCounter
mSent metrics.StatCounter
mSentParts metrics.StatCounter
mDropped metrics.StatCounter
}
// NewProcessMap returns a ProcessField processor.
func NewProcessMap(
conf Config, mgr types.Manager, log log.Modular, stats metrics.Type,
) (Type, error) {
nsStats := metrics.Namespaced(stats, "processor.process_map")
nsLog := log.NewModule(".processor.process_map")
var children []Type
for _, pconf := range conf.ProcessMap.Processors {
proc, err := New(pconf, mgr, nsLog, nsStats)
if err != nil {
return nil, err
}
children = append(children, proc)
}
p := &ProcessMap{
parts: conf.ProcessMap.Parts,
premap: conf.ProcessMap.Premap,
premapOptional: conf.ProcessMap.PremapOptional,
postmap: conf.ProcessMap.Postmap,
children: children,
log: nsLog,
mCount: stats.GetCounter("processor.process_map.count"),
mSkipped: stats.GetCounter("processor.process_map.skipped"),
mSkippedMap: stats.GetCounter("processor.process_map.skipped.premap_target_missing"),
mErr: stats.GetCounter("processor.process_map.error"),
mErrJSONParse: stats.GetCounter("processor.process_map.error.json_parse"),
mErrMisaligned: stats.GetCounter("processor.process_map.error.misaligned"),
mErrMisalignedBatch: stats.GetCounter("processor.process_map.error.misaligned_messages"),
mSent: stats.GetCounter("processor.process_map.sent"),
mSentParts: stats.GetCounter("processor.process_map.parts.sent"),
mDropped: stats.GetCounter("processor.process_map.dropped"),
}
var err error
if p.premap, err = validateMap(conf.ProcessMap.Premap); err != nil {
return nil, fmt.Errorf("premap was not valid: %v", err)
}
if p.premapOptional, err = validateMap(conf.ProcessMap.PremapOptional); err != nil {
return nil, fmt.Errorf("optional premap was not valid: %v", err)
}
if p.postmap, err = validateMap(conf.ProcessMap.Postmap); err != nil {
return nil, fmt.Errorf("postmap was not valid: %v", err)
}
if len(p.postmap) == 0 {
return nil, errors.New("postmap replaces the root of the original payload, this processor is redundant")
}
for k := range p.postmap {
if len(k) == 0 {
return nil, errors.New("postmap replaces the root of the original payload, this processor is redundant")
}
}
return p, nil
}
//------------------------------------------------------------------------------
func validateMap(m map[string]string) (map[string]string, error) {
newMap := map[string]string{}
for k, v := range m {
if k == "." {
k = ""
}
if v == "." {
v = ""
}
if _, exists := newMap[k]; exists {
return nil, errors.New("root object mapped twice")
}
newMap[k] = v
}
targets := []string{}
for k := range newMap {
targets = append(targets, k)
}
for i, trgt1 := range targets {
if trgt1 == "" && len(targets) > 1 {
return nil, errors.New("root map target collides with other targets")
}
for j, trgt2 := range targets {
if j == i {
continue
}
t1Split, t2Split := strings.Split(trgt1, "."), strings.Split(trgt2, ".")
if len(t1Split) == len(t2Split) {
// Siblings can't collide
continue
}
if len(t1Split) >= len(t2Split) {
continue
}
matchedSubpaths := true
for k, t1p := range t1Split {
if t1p != t2Split[k] {
matchedSubpaths = false
break
}
}
if matchedSubpaths {
return nil, fmt.Errorf("map targets '%v' and '%v' collide", trgt1, trgt2)
}
}
}
if len(newMap) == 1 {
if v, exists := newMap[""]; exists {
if v == "" {
newMap = map[string]string{}
}
}
}
return newMap, nil
}
//------------------------------------------------------------------------------
// ProcessMessage applies child processors to a mapped subset of payloads and
// maps the result back into the original payload.
func (p *ProcessMap) ProcessMessage(msg types.Message) (msgs []types.Message, res types.Response) {
p.mCount.Incr(1)
payload := msg.ShallowCopy()
resMsgs := [1]types.Message{payload}
msgs = resMsgs[:]
var targetObjs map[int]*gabs.Container
if len(p.parts) > 0 {
targetObjs = make(map[int]*gabs.Container, len(p.parts))
for _, i := range p.parts {
if i < 0 {
i = payload.Len() + i
}
if i < 0 || i >= payload.Len() {
continue
}
targetObjs[i] = nil
}
} else {
targetObjs = make(map[int]*gabs.Container, payload.Len())
for i := 0; i < payload.Len(); i++ {
targetObjs[i] = nil
}
}
// Parse original payloads. If the payload is invalid we skip it.
for i := range targetObjs {
var err error
var jObj interface{}
var gObj *gabs.Container
if jObj, err = payload.GetJSON(i); err == nil {
gObj, err = gabs.Consume(jObj)
}
if err != nil {
p.mErrJSONParse.Incr(1)
p.log.Errorf("Failed to parse message part '%v': %v\n", i, err)
p.log.Debugf("Message part '%v' contents: %q\n", i, payload.Get(i))
delete(targetObjs, i)
continue
}
targetObjs[i] = gObj
}
// Maps request message parts back into the original alignment.
reqParts := []int{}
reqMsg := types.NewMessage(nil)
// Map the original payloads into premapped message parts.
premapLoop:
for i, gObj := range targetObjs {
if len(p.premap) == 0 && len(p.premapOptional) == 0 {
reqParts = append(reqParts, i)
reqMsg.Append(payload.Get(i))
continue
}
gReq := gabs.New()
for k, v := range p.premap {
gTarget := gObj.Path(v)
if gTarget.Data() == nil {
p.mSkipped.Incr(1)
p.mSkippedMap.Incr(1)
continue premapLoop
}
if len(k) == 0 {
reqMsg.Append([]byte("{}"))
reqParts = append(reqParts, i)
reqMsg.SetJSON(-1, gTarget.Data())
continue premapLoop
} else {
gReq.SetP(gTarget.Data(), k)
}
}
for k, v := range p.premapOptional {
gTarget := gObj.Path(v)
if gTarget.Data() == nil {
continue
}
if len(k) == 0 {
reqMsg.Append([]byte("{}"))
reqParts = append(reqParts, i)
reqMsg.SetJSON(-1, gTarget.Data())
continue premapLoop
} else {
gReq.SetP(gTarget.Data(), k)
}
}
reqMsg.Append([]byte("{}"))
reqParts = append(reqParts, i)
reqMsg.SetJSON(reqMsg.Len()-1, gReq.Data())
}
resultMsgs := []types.Message{reqMsg}
for i := 0; len(resultMsgs) > 0 && i < len(p.children); i++ {
var nextResultMsgs []types.Message
for _, m := range resultMsgs {
var rMsgs []types.Message
rMsgs, _ = p.children[i].ProcessMessage(m)
nextResultMsgs = append(nextResultMsgs, rMsgs...)
}
resultMsgs = nextResultMsgs
}
resMsg := types.NewMessage(nil)
for _, rMsg := range resultMsgs {
for _, part := range rMsg.GetAll() {
resMsg.Append(part)
}
}
if exp, act := len(reqParts), resMsg.Len(); exp != act {
p.mSent.Incr(1)
p.mSentParts.Incr(int64(payload.Len()))
p.mErr.Incr(1)
p.mErrMisalignedBatch.Incr(1)
p.log.Errorf("Misaligned processor result batch. Expected %v messages, received %v\n", exp, act)
return
}
for i, j := range reqParts {
ogObj := targetObjs[j]
var err error
var jObj interface{}
var gObj *gabs.Container
if jObj, err = resMsg.GetJSON(i); err == nil {
gObj, err = gabs.Consume(jObj)
}
if err != nil {
p.mErrJSONParse.Incr(1)
p.log.Errorf("Failed to parse result part '%v': %v\n", j, err)
p.log.Debugf("Result part '%v' contents: %q\n", j, resMsg.Get(i))
continue
}
for k, v := range p.postmap {
gTarget := gObj
if len(v) > 0 {
gTarget = gTarget.Path(v)
}
if gTarget.Data() != nil {
ogObj.SetP(gTarget.Data(), k)
}
}
payload.SetJSON(j, ogObj.Data())
}
p.mSent.Incr(1)
p.mSentParts.Incr(int64(payload.Len()))
return
}
//------------------------------------------------------------------------------ | lib/processor/process_map.go | 0.771672 | 0.692353 | process_map.go | starcoder |
package common
import (
"fmt"
)
// ValidateString validates string field base on min/max length limit.
func ValidateString(name, data string, minLen, maxLen int) error {
length := len(data)
if minLen > 0 && length == 0 {
return fmt.Errorf("invalid input data, %s is required", name)
}
if length < minLen {
return fmt.Errorf("invalid input data, %s is too short (min length: %d)", name, minLen)
}
if length > maxLen {
return fmt.Errorf("invalid input data, %s is too long (max length: %d)", name, maxLen)
}
return nil
}
// ValidateInt validates int field base on min/max value limit.
func ValidateInt(name string, data, minValue, maxValue int) error {
if data < minValue {
return fmt.Errorf("invalid input data, %s is too little (min value: %d)", name, minValue)
}
if data > maxValue {
return fmt.Errorf("invalid input data, %s is too large (max value: %d)", name, maxValue)
}
return nil
}
// ValidateInt32 validates int32 field base on min/max value limit.
func ValidateInt32(name string, data, minValue, maxValue int32) error {
if data < minValue {
return fmt.Errorf("invalid input data, %s is too little (min value: %d)", name, minValue)
}
if data > maxValue {
return fmt.Errorf("invalid input data, %s is too large (max value: %d)", name, maxValue)
}
return nil
}
// ValidateInt64 validates int64 field base on min/max value limit.
func ValidateInt64(name string, data, minValue, maxValue int64) error {
if data < minValue {
return fmt.Errorf("invalid input data, %s is too little (min value: %d)", name, minValue)
}
if data > maxValue {
return fmt.Errorf("invalid input data, %s is too large (max value: %d)", name, maxValue)
}
return nil
}
// ValidateUint validates uint field base on min/max value limit.
func ValidateUint(name string, data, minValue, maxValue uint) error {
if data < minValue {
return fmt.Errorf("invalid input data, %s is too little (min value: %d)", name, minValue)
}
if data > maxValue {
return fmt.Errorf("invalid input data, %s is too large (max value: %d)", name, maxValue)
}
return nil
}
// ValidateUint32 validates uint32 field base on min/max value limit.
func ValidateUint32(name string, data, minValue, maxValue uint32) error {
if data < minValue {
return fmt.Errorf("invalid input data, %s is too little (min value: %d)", name, minValue)
}
if data > maxValue {
return fmt.Errorf("invalid input data, %s is too large (max value: %d)", name, maxValue)
}
return nil
}
// ValidateUint64 validates uint64 field base on min/max value limit.
func ValidateUint64(name string, data, minValue, maxValue uint64) error {
if data < minValue {
return fmt.Errorf("invalid input data, %s is too little (min value: %d)", name, minValue)
}
if data > maxValue {
return fmt.Errorf("invalid input data, %s is too large (max value: %d)", name, maxValue)
}
return nil
}
// ValidateStrings validates string field must in target slice base on values limit.
func ValidateStrings(name, data string, values ...string) error {
for _, value := range values {
if data == value {
return nil
}
}
return fmt.Errorf("invalid input data, %s is not supported (values: %s)", name, values)
}
// ValidateEnums validates enum field must in target slice base on values limit.
func ValidateEnums(name string, data interface{}, values ...interface{}) error {
for _, value := range values {
if data == value {
return nil
}
}
return fmt.Errorf("invalid input data, %s is not supported (values: %s)", name, values)
} | bmsf-configuration/pkg/common/validation.go | 0.727782 | 0.436982 | validation.go | starcoder |
Package fork implements a pattern for forking sub-processes as new Pods.
This can be used when some code that's part of the operator (usually an entire
controller) needs to run under a different Pod context (e.g. different service
account, volume mounts, etc.). The forked Pod runs the same container image as
the parent Pod, so the code versions should stay synchronized as long as
immutable image tags are used. This avoids the operational and maintenance
burden of creating and publishing separate binaries and container images for
each sub-process.
The forked Pod is given an additional environment variable telling it which
forked code path it should run instead of the normal code path. The main()
function should be written to take this into account by calling fork.Path() to
determine which fork it should follow, if any.
The parent Pod must also give some environment variables to the operator's
Container to let this package find the Pod in which it's currently running:
env:
- name: PS_OPERATOR_POD_NAME
valueFrom:
fieldRef:
fieldPath: metadata.name
- name: PS_OPERATOR_POD_NAMESPACE
valueFrom:
fieldRef:
fieldPath: metadata.namespace
The parent Pod uses its own Pod spec as a basis to build the child Pod spec.
*/
package fork
import (
"context"
"fmt"
"os"
corev1 "k8s.io/api/core/v1"
"sigs.k8s.io/controller-runtime/pkg/client"
"planetscale.dev/vitess-operator/pkg/operator/update"
)
const (
envForkPath = "PS_OPERATOR_FORK_PATH"
envPodName = "PS_OPERATOR_POD_NAME"
envPodNamespace = "PS_OPERATOR_POD_NAMESPACE"
)
// Path returns the name of the forked code path that this process should take.
// It returns "" if no fork should be taken (i.e. this is the root process).
func Path() string {
return os.Getenv(envForkPath)
}
// NewPodSpec returns the specification for a child Pod to be forked off from the
// Pod in which you're currently running.
func NewPodSpec(ctx context.Context, c client.Client, forkPath string) (*corev1.PodSpec, error) {
// Get the Pod we're currently running in.
parentPod, err := getParentPod(ctx, c)
if err != nil {
return nil, fmt.Errorf("can't get parent Pod: %v", err)
}
spec := parentPod.Spec
// Clear the NodeName so we let the scheduler choose a node.
spec.NodeName = ""
// Set the fork path env var on all containers.
childEnv := []corev1.EnvVar{
{
Name: envForkPath,
Value: forkPath,
},
}
for i := range spec.Containers {
container := &spec.Containers[i]
update.Env(&container.Env, childEnv)
}
return &spec, nil
}
func getParentPod(ctx context.Context, c client.Client) (*corev1.Pod, error) {
var key client.ObjectKey
key.Namespace = os.Getenv(envPodNamespace)
if key.Namespace == "" {
return nil, fmt.Errorf("forking requires %v env var to be set on the Container in the parent Pod", envPodNamespace)
}
key.Name = os.Getenv(envPodName)
if key.Name == "" {
return nil, fmt.Errorf("forking requires %v env var to be set on the Container in the parent Pod", envPodName)
}
pod := &corev1.Pod{}
err := c.Get(ctx, key, pod)
return pod, err
} | pkg/operator/fork/fork.go | 0.695752 | 0.471467 | fork.go | starcoder |
package proto
import (
"bytes"
"log"
"reflect"
"strings"
)
/*
Equal returns true iff protocol buffers a and b are equal.
The arguments must both be pointers to protocol buffer structs.
Equality is defined in this way:
- Two messages are equal iff they are the same type,
corresponding fields are equal, unknown field sets
are equal, and extensions sets are equal.
- Two set scalar fields are equal iff their values are equal.
If the fields are of a floating-point type, remember that
NaN != x for all x, including NaN. If the message is defined
in a proto3 .proto file, fields are not "set"; specifically,
zero length proto3 "bytes" fields are equal (nil == {}).
- Two repeated fields are equal iff their lengths are the same,
and their corresponding elements are equal. Note a "bytes" field,
although represented by []byte, is not a repeated field and the
rule for the scalar fields described above applies.
- Two unset fields are equal.
- Two unknown field sets are equal if their current
encoded state is equal.
- Two extension sets are equal iff they have corresponding
elements that are pairwise equal.
- Two map fields are equal iff their lengths are the same,
and they contain the same set of elements. Zero-length map
fields are equal.
- Every other combination of things are not equal.
The return value is undefined if a and b are not protocol buffers.
*/
func Equal(a, b Message) bool {
if a == nil || b == nil {
return a == b
}
v1, v2 := reflect.ValueOf(a), reflect.ValueOf(b)
if v1.Type() != v2.Type() {
return false
}
if v1.Kind() == reflect.Ptr {
if v1.IsNil() {
return v2.IsNil()
}
if v2.IsNil() {
return false
}
v1, v2 = v1.Elem(), v2.Elem()
}
if v1.Kind() != reflect.Struct {
return false
}
return equalStruct(v1, v2)
}
func equalStruct(v1, v2 reflect.Value) bool {
sprop := GetProperties(v1.Type())
for i := 0; i < v1.NumField(); i++ {
f := v1.Type().Field(i)
if strings.HasPrefix(f.Name, "XXX_") {
continue
}
f1, f2 := v1.Field(i), v2.Field(i)
if f.Type.Kind() == reflect.Ptr {
if n1, n2 := f1.IsNil(), f2.IsNil(); n1 && n2 {
continue
} else if n1 != n2 {
return false
}
b1, ok := f1.Interface().(raw)
if ok {
b2 := f2.Interface().(raw)
if !bytes.Equal(b1.Bytes(), b2.Bytes()) {
return false
}
continue
}
f1, f2 = f1.Elem(), f2.Elem()
}
if !equalAny(f1, f2, sprop.Prop[i]) {
return false
}
}
if em1 := v1.FieldByName("XXX_InternalExtensions"); em1.IsValid() {
em2 := v2.FieldByName("XXX_InternalExtensions")
if !equalExtensions(v1.Type(), em1.Interface().(XXX_InternalExtensions), em2.Interface().(XXX_InternalExtensions)) {
return false
}
}
if em1 := v1.FieldByName("XXX_extensions"); em1.IsValid() {
em2 := v2.FieldByName("XXX_extensions")
if !equalExtMap(v1.Type(), em1.Interface().(map[int32]Extension), em2.Interface().(map[int32]Extension)) {
return false
}
}
uf := v1.FieldByName("XXX_unrecognized")
if !uf.IsValid() {
return true
}
u1 := uf.Bytes()
u2 := v2.FieldByName("XXX_unrecognized").Bytes()
if !bytes.Equal(u1, u2) {
return false
}
return true
}
func equalAny(v1, v2 reflect.Value, prop *Properties) bool {
if v1.Type() == protoMessageType {
m1, _ := v1.Interface().(Message)
m2, _ := v2.Interface().(Message)
return Equal(m1, m2)
}
switch v1.Kind() {
case reflect.Bool:
return v1.Bool() == v2.Bool()
case reflect.Float32, reflect.Float64:
return v1.Float() == v2.Float()
case reflect.Int32, reflect.Int64:
return v1.Int() == v2.Int()
case reflect.Interface:
n1, n2 := v1.IsNil(), v2.IsNil()
if n1 || n2 {
return n1 == n2
}
e1, e2 := v1.Elem(), v2.Elem()
if e1.Type() != e2.Type() {
return false
}
return equalAny(e1, e2, nil)
case reflect.Map:
if v1.Len() != v2.Len() {
return false
}
for _, key := range v1.MapKeys() {
val2 := v2.MapIndex(key)
if !val2.IsValid() {
return false
}
if !equalAny(v1.MapIndex(key), val2, nil) {
return false
}
}
return true
case reflect.Ptr:
if v1.IsNil() && v2.IsNil() {
return true
}
if v1.IsNil() != v2.IsNil() {
return false
}
return equalAny(v1.Elem(), v2.Elem(), prop)
case reflect.Slice:
if v1.Type().Elem().Kind() == reflect.Uint8 {
if prop != nil && prop.proto3 && v1.Len() == 0 && v2.Len() == 0 {
return true
}
if v1.IsNil() != v2.IsNil() {
return false
}
return bytes.Equal(v1.Interface().([]byte), v2.Interface().([]byte))
}
if v1.Len() != v2.Len() {
return false
}
for i := 0; i < v1.Len(); i++ {
if !equalAny(v1.Index(i), v2.Index(i), prop) {
return false
}
}
return true
case reflect.String:
return v1.Interface().(string) == v2.Interface().(string)
case reflect.Struct:
return equalStruct(v1, v2)
case reflect.Uint32, reflect.Uint64:
return v1.Uint() == v2.Uint()
}
log.Printf("proto: don't know how to compare %v", v1)
return false
}
func equalExtensions(base reflect.Type, x1, x2 XXX_InternalExtensions) bool {
em1, _ := x1.extensionsRead()
em2, _ := x2.extensionsRead()
return equalExtMap(base, em1, em2)
}
func equalExtMap(base reflect.Type, em1, em2 map[int32]Extension) bool {
if len(em1) != len(em2) {
return false
}
for extNum, e1 := range em1 {
e2, ok := em2[extNum]
if !ok {
return false
}
m1, m2 := e1.value, e2.value
if m1 != nil && m2 != nil {
if !equalAny(reflect.ValueOf(m1), reflect.ValueOf(m2), nil) {
return false
}
continue
}
var desc *ExtensionDesc
if m := extensionMaps[base]; m != nil {
desc = m[extNum]
}
if desc == nil {
log.Printf("proto: don't know how to compare extension %d of %v", extNum, base)
continue
}
var err error
if m1 == nil {
m1, err = decodeExtension(e1.enc, desc)
}
if m2 == nil && err == nil {
m2, err = decodeExtension(e2.enc, desc)
}
if err != nil {
log.Printf("proto: badly encoded extension %d of %v: %v", extNum, base, err)
return false
}
if !equalAny(reflect.ValueOf(m1), reflect.ValueOf(m2), nil) {
return false
}
}
return true
} | vendor/github.com/golang/protobuf/proto/equal.go | 0.664214 | 0.428831 | equal.go | starcoder |
package main
import (
"fmt"
"strings"
)
const (
// operators
addition = "abcd"
subtraction = "bcde"
multiplication = "dede"
division = "abab"
)
var worthOfAlphabets = map[byte]byte{
byte('a'): 1,
byte('b'): 2,
byte('c'): 3,
byte('d'): 4,
byte('e'): 5,
}
type Noun string
func (n Noun) process() int {
values := make([]int, 0, len(n))
repetition := 0
current := byte(0)
for index := 0; index < len(n); index++ {
if current == 0 {
current = n[index]
repetition++
} else if current != n[index] {
worth := worthOfAlphabets[current]
values = append(values, int(repetition)*int(worth))
current = byte(n[index])
repetition = 1
} else {
repetition++
}
}
worth := worthOfAlphabets[current]
values = append(values, int(repetition)*int(worth))
for index := 0; index < len(values); index++ {
newVal := values[index] % 5
values[index] = newVal * newVal
}
return sumIntSlice(values)
}
type Sentence struct {
verb string
subSentences []Sentence
noun Noun
}
func (s Sentence) process() float64 {
if noun := s.noun; len(noun) != 0 {
return float64(noun.process())
}
values := make([]float64, 0, len(s.subSentences))
for index := 0; index < len(s.subSentences); index++ {
values = append(values, s.subSentences[index].process())
}
result := values[0]
for index := 1; index < len(values); index++ {
if s.verb == addition {
result += values[index]
} else if s.verb == subtraction {
result -= values[index]
} else if s.verb == multiplication {
result *= values[index]
} else if s.verb == division {
result /= values[index]
}
}
return result
}
func main() {
input := "abcd bcde ab ac abab a b"
splitedInput := strings.Split(input, " ")
sentence := buildGeneralizedList(splitedInput)
value := sentence.process()
fmt.Println(value)
}
func buildGeneralizedList(terms []string) Sentence {
length := len(terms)
if length == 1 {
return Sentence{
noun: Noun(terms[0]),
}
}
verb := terms[0]
subSentences := make([]Sentence, 0, length/2)
verbsIndex := make([]int, 0, length/2)
for index := 1; index < length; index++ {
if isVerb(terms[index]) {
verbsIndex = append(verbsIndex, index)
}
}
verbsLength := len(verbsIndex)
lastVerbIndex := length
if verbsLength != 0 {
lastVerbIndex = verbsIndex[0]
}
for index := 1; index < lastVerbIndex; index++ {
sentence := Sentence{noun: Noun(terms[index])}
subSentences = append(subSentences, sentence)
}
for index := 0; index < verbsLength; index++ {
if verbsLength-index <= 1 {
sentence := buildGeneralizedList(terms[verbsIndex[index]:])
subSentences = append(subSentences, sentence)
} else {
sentence := buildGeneralizedList(terms[verbsIndex[index]:verbsIndex[index+1]])
subSentences = append(subSentences, sentence)
}
}
return Sentence{
subSentences: subSentences,
verb: verb,
}
}
func isVerb(input string) bool {
return input == addition ||
input == subtraction ||
input == multiplication ||
input == division
}
func sumIntSlice(slice []int) int {
sum := 0
for index := 0; index < len(slice); index++ {
sum += slice[index]
}
return sum
} | challanges/rabex/main.go | 0.581778 | 0.405508 | main.go | starcoder |
package main
// https://en.wikipedia.org/wiki/Siamese_method
func siamese(sq Square, offset int) {
dim := sq.Dim()
if dim%2 == 0 {
panic("square is even")
}
sq.Clear()
i, j := 0, dim/2
for n := 1; n < dim*dim; n++ {
sq[i][j] = n + offset
i1, j1 := i, j
if i1 == 0 {
i1 = dim
}
i1--
j1++
if j1 == dim {
j1 = 0
}
if sq[i1][j1] != 0 {
i++
} else {
i, j = i1, j1
}
if sq[i][j] != 0 {
panic("don't know what to do")
}
}
sq[i][j] = dim*dim + offset
}
func FillOddSquare(sq Square) {
siamese(sq, 0)
}
// https://en.wikipedia.org/wiki/Magic_square#A_method_of_constructing_a_magic_square_of_doubly_even_order
func FillDoublyEvenSquare(sq Square) {
dim := sq.Dim()
if dim%4 != 0 {
panic("square is not doubly even")
}
sq.Clear()
// first pass -- truth table
subDim := dim / 4
for i := 0; i < dim; i += subDim {
for j := 0; j < dim; j += subDim {
sub := sq.SubSquare(i, j, subDim)
if i == j || i+j+subDim == dim {
// on the diagonals
sub.Fill(1)
}
}
}
// second pass -- fill the numbers
inc, dec := 1, dim*dim
for i := 0; i < dim; i++ {
for j := 0; j < dim; j++ {
if sq[i][j] == 1 {
sq[i][j] = inc
} else {
sq[i][j] = dec
}
inc++
dec--
}
}
}
// https://en.wikipedia.org/wiki/Strachey_method_for_magic_squares
func FillSinglyEvenSquare(sq Square) {
dim := sq.Dim()
if dim%4 != 2 {
panic("square is not singly even")
}
subDim := dim / 2
A := sq.SubSquare(0, 0, subDim)
B := sq.SubSquare(subDim, subDim, subDim)
C := sq.SubSquare(0, subDim, subDim)
D := sq.SubSquare(subDim, 0, subDim)
for i, sub := range [...]Square{A, B, C, D} {
siamese(sub, i*subDim*subDim)
}
n := dim / 4
// exchange the leftmost n columns in A and D
for j := 0; j < n; j++ {
for i := 0; i < subDim; i++ {
A[i][j], D[i][j] = D[i][j], A[i][j]
}
}
// exchange the rightmost n-1 columns in C and B
for j := subDim - (n - 1); j < subDim; j++ {
for i := 0; i < subDim; i++ {
C[i][j], B[i][j] = B[i][j], C[i][j]
}
}
// exchange the middle cell of the leftmost column of A and D
// exchange the central cell in A and D
mid := subDim / 2
A[mid][0], D[mid][0] = D[mid][0], A[mid][0]
A[mid][mid], D[mid][mid] = D[mid][mid], A[mid][mid]
} | magic.go | 0.580233 | 0.448004 | magic.go | starcoder |
package common
import (
"bytes"
"crypto/sha256"
"encoding/binary"
"encoding/hex"
"github.com/libp2p/go-libp2p-peer"
"github.com/tinychain/tinychain/p2p/pb"
"math/big"
)
const (
HashLength = 32
AddressLength = 20
)
type Hash [HashLength]byte
// BigToHash sets byte representation of b to hash.
// If b is larger than len(h), b will be cropped from the left.
func BigToHash(b *big.Int) Hash { return BytesToHash(b.Bytes()) }
// HexToHash sets byte representation of s to hash.
// If b is larger than len(h), b will be cropped from the left.
func HexToHash(s string) Hash { return BytesToHash(FromHex(s)) }
func (h Hash) String() string {
return string(h[:])
}
func (h Hash) Bytes() []byte {
return h[:]
}
func (h Hash) Hex() string {
return string(Hex(h[:]))
}
func (h Hash) Big() *big.Int {
return new(big.Int).SetBytes(h.Bytes())
}
// Decode hash string with "0x...." format to Hash type
func DecodeHash(data []byte) Hash {
dec := make([]byte, HashLength)
hex.Decode(dec, data[2:])
return BytesToHash(dec)
}
func BytesToHash(d []byte) Hash {
var h Hash
if len(d) > HashLength {
d = d[:HashLength]
}
copy(h[:], d)
return h
}
func (h Hash) Nil() bool {
return h == Hash{}
}
func Sha256(d []byte) Hash {
return sha256.Sum256(d)
}
type Address [AddressLength]byte
func (addr Address) String() string {
return string(addr[:])
}
func (addr Address) Bytes() []byte {
return addr[:]
}
func (addr Address) Hex() string {
enc := make([]byte, len(addr)*2)
hex.Encode(enc, addr[:])
return "0x" + string(enc)
}
func (addr Address) Big() *big.Int {
return new(big.Int).SetBytes(addr.Bytes())
}
func (addr Address) Nil() bool {
return addr == Address{}
}
func BytesToAddress(b []byte) Address {
var addr Address
if len(b) > AddressLength {
b = b[:AddressLength]
}
copy(addr[:], b)
return addr
}
func BigToAddress(b *big.Int) Address {
return BytesToAddress(b.Bytes())
}
func CreateAddress(addr Address, nonce uint64) Address {
var buf = make([]byte, 8)
binary.BigEndian.PutUint64(buf, nonce)
return BytesToAddress(Sha256(append(addr.Bytes(), buf...)).Bytes())
}
func HashToAddr(hash Hash) Address {
return BytesToAddress(hash[:AddressLength])
}
// Decode address in hex format to common.Address
func HexToAddress(d string) Address {
h := []byte(d)
dec := make([]byte, AddressLength)
if bytes.Compare(h[:2], []byte("0x")) == 0 {
h = h[2:]
}
hex.Decode(dec, h)
return BytesToAddress(dec)
}
// Protocol represents the callback handler
type Protocol interface {
// Typ should match the message type
Type() string
// Run func handles the message from the stream
Run(pid peer.ID, message *pb.Message) error
// Error func handles the error returned from the stream
Error(error)
} | common/types.go | 0.726523 | 0.420719 | types.go | starcoder |
package wkb
import (
"fmt"
"strings"
)
import "github.com/airmap/tegola"
/*
This purpose of this file is to house the wkt functions. These functions are
use to take a tagola.Geometry and convert it to a wkt string. It will, also,
contain functions to parse a wkt string into a wkb.Geometry.
*/
func wkt(geo tegola.Geometry) string {
switch g := geo.(type) {
case tegola.Point:
return fmt.Sprintf("%v %v", g.X(), g.Y())
case tegola.Point3:
return fmt.Sprintf("%v %v %v", g.X(), g.Y(), g.Z())
case tegola.MultiPoint:
var points []string
for _, p := range g.Points() {
points = append(points, wkt(p))
}
return "(" + strings.Join(points, ",") + ")"
case tegola.LineString:
var points []string
for _, p := range g.Subpoints() {
points = append(points, wkt(p))
}
return "(" + strings.Join(points, ",") + ")"
case tegola.MultiLine:
var lines []string
for _, l := range g.Lines() {
lines = append(lines, wkt(l))
}
return "(" + strings.Join(lines, ",") + ")"
case tegola.Polygon:
var lines []string
for _, l := range g.Sublines() {
lines = append(lines, wkt(l))
}
return "(" + strings.Join(lines, ",") + ")"
case tegola.MultiPolygon:
var polygons []string
for _, p := range g.Polygons() {
polygons = append(polygons, wkt(p))
}
return "(" + strings.Join(polygons, ",") + ")"
}
panic("Don't know the geometry type!")
}
//WKT returns a WKT representation of the Geometry if possible.
// the Error will be non-nil if geometry is unknown.
func WKT(geo tegola.Geometry) string {
switch g := geo.(type) {
default:
return ""
case tegola.Point:
// POINT( 10 10)
if g == nil {
return "POINT EMPTY"
}
return "POINT (" + wkt(g) + ")"
case tegola.Point3:
// POINT M ( 10 10 10 )
if g == nil {
return "POINT M EMPTY"
}
return "POINT M (" + wkt(g) + ")"
case tegola.MultiPoint:
if g == nil {
return "MULTIPOINT EMPTY"
}
return "MULTIPOINT " + wkt(g)
case tegola.LineString:
if g == nil {
return "LINESTRING EMPTY"
}
return "LINESTRING " + wkt(g)
case tegola.MultiLine:
if g == nil {
return "MULTILINE EMPTY"
}
return "MULTILINE " + wkt(g)
case tegola.Polygon:
if g == nil {
return "POLYGON EMPTY"
}
return "POLYGON " + wkt(g)
case tegola.MultiPolygon:
if g == nil {
return "MULTIPOLYGON EMPTY"
}
return "MULTIPOLYGON " + wkt(g)
case tegola.Collection:
if g == nil {
return "GEOMETRYCOLLECTION EMPTY"
}
var geometries []string
for _, sg := range g.Geometries() {
s := WKT(sg)
geometries = append(geometries, s)
}
return "GEOMETRYCOLLECTION (" + strings.Join(geometries, ",") + ")"
}
} | wkb/wkt.go | 0.57332 | 0.422683 | wkt.go | starcoder |
package timeseries
import (
"time"
"github.com/grokify/mogo/time/timeutil"
)
// ToYear aggregates time values into months. `inflate` is used to add months with `0` values.
func (set *TimeSeriesSet) ToYear(inflate, popLast bool) (TimeSeriesSet, error) {
newTSS := TimeSeriesSet{
Name: set.Name,
Series: map[string]TimeSeries{},
Interval: timeutil.Year,
Order: set.Order}
for name, ts := range set.Series {
newTSS.Series[name] = ts.ToYear()
}
if popLast {
newTSS.PopLast()
}
newTSS.Inflate()
newTSS.Times = newTSS.TimeSlice(true)
return newTSS, nil
}
// ToMonth aggregates time values into months. `inflate` is used to add months with `0` values.
func (set *TimeSeriesSet) ToMonth(cumulative, inflate, popLast bool, monthsFilter []time.Month) (TimeSeriesSet, error) {
if cumulative {
return set.toMonthCumulative(inflate, popLast)
}
newTSS := TimeSeriesSet{
Name: set.Name,
Series: map[string]TimeSeries{},
Times: set.Times,
Interval: timeutil.Month,
Order: set.Order}
for name, ts := range set.Series {
newTSS.Series[name] = ts.ToMonth(inflate, monthsFilter...)
}
if popLast {
newTSS.PopLast()
}
newTSS.Inflate()
newTSS.Times = newTSS.TimeSlice(true)
return newTSS, nil
}
func (set *TimeSeriesSet) toMonthCumulative(inflate, popLast bool) (TimeSeriesSet, error) {
newTSS := TimeSeriesSet{
Name: set.Name,
Series: map[string]TimeSeries{},
Times: set.Times,
Interval: timeutil.Month,
Order: set.Order}
for seriesName, ts := range set.Series {
newTS, err := ts.ToMonthCumulative(inflate, newTSS.Times...)
if err != nil {
return newTSS, err
}
newTSS.Series[seriesName] = newTS
}
if popLast {
newTSS.PopLast()
}
newTSS.Inflate()
newTSS.Times = newTSS.TimeSlice(true)
return newTSS, nil
}
func (set *TimeSeriesSet) PopLast() {
times := set.TimeSlice(true)
if len(times) == 0 {
return
}
last := times[len(times)-1]
set.DeleteTime(last)
}
func (set *TimeSeriesSet) DeleteTime(dt time.Time) {
for id, ds := range set.Series {
ds.DeleteTime(dt)
set.Series[id] = ds
}
}
func (set *TimeSeriesSet) ToNewSeriesNames(seriesNames, seriesSetNames map[string]string) TimeSeriesSet {
newTSS := TimeSeriesSet{
Name: set.Name,
Series: map[string]TimeSeries{},
Times: set.Times,
IsFloat: set.IsFloat,
Interval: timeutil.Month,
Order: []string{}}
for _, ts := range set.Series {
for _, item := range ts.ItemMap {
if len(seriesNames) > 0 {
if newSeriesName, ok := seriesNames[item.SeriesName]; ok {
item.SeriesName = newSeriesName
}
}
if len(seriesSetNames) > 0 {
if newSeriesSetName, ok := seriesSetNames[item.SeriesName]; ok {
item.SeriesSetName = newSeriesSetName
}
}
newTSS.AddItems(item)
}
}
newTSS.Inflate()
return newTSS
} | data/timeseries/time_series_set_mod.go | 0.677261 | 0.441252 | time_series_set_mod.go | starcoder |
package i6502
import "fmt"
/*
The Cpu only contains the AddressBus, through which 8-bit values can be read and written
at 16-bit addresses.
The Cpu has an 8-bit accumulator (A) and two 8-bit index registers (X,Y). There is a 16-bit
Program Counter (PC) and an 8-bit Stack Pointer (SP), pointing to addresses in 0x0100-01FF.
The status register (P) contains flags for Zero, Negative, Break, Decimal, IrqDisable,
Carry and Overflow flags.
*/
type Cpu struct {
A byte // Accumulator
X byte // Index register X
Y byte // Index register Y
PC uint16 // 16-bit program counter
P byte // Status Register
SP byte // Stack Pointer
Bus *AddressBus // The address bus
}
const (
ZeropageBase = 0x0000 // 0x0000-00FF Reserved for zeropage instructions
StackBase = 0x0100 // 0x0100-01FF Reserved for stack
ResetVector = 0xFFFC // 0xFFFC-FFFD
IrqVector = 0xFFFE // 0xFFFE-FFFF
)
// Create an new Cpu, using the AddressBus for accessing memory.
func NewCpu(bus *AddressBus) (*Cpu, error) {
return &Cpu{Bus: bus}, nil
}
// Returns a string containing the current state of the CPU.
func (c *Cpu) String() string {
str := ">>> CPU [ A ] [ X ] [ Y ] [ SP ] [ PC ] NVxBDIZC\n>>> 0x%02X 0x%02X 0x%02X 0x%02X 0x%04X %08b\n"
return fmt.Sprintf(str, c.A, c.X, c.Y, c.SP, c.PC, c.P)
}
/*
Reset the CPU, emulating the RESB pin.
The status register is reset to a know state (0x34, IrqDisabled set, Decimal unset, Break set).
Then the Program Counter is set to the value read from `ResetVector` (0xFFFC-FFFD).
Normally, no assumptions can be made about registers (A, X, Y) and the
Stack Pointer. For convenience, these are reset to 0x00 (A,X,Y) and 0xFF (SP).
*/
func (c *Cpu) Reset() {
c.PC = c.Bus.Read16(ResetVector)
c.P = 0x34
// Not specified, but let's clean up
c.A = 0x00
c.X = 0x00
c.Y = 0x00
c.SP = 0xFF
}
/*
Simulate the IRQ pin.
This will push the current Cpu state to the stack (P + PC) and set the PC
to the address read from the `IrqVector` (0xFFFE-FFFF)
*/
func (c *Cpu) Interrupt() {
c.handleIrq(c.PC)
}
// Handles an interrupt or BRK.
func (c *Cpu) handleIrq(PC uint16) {
c.stackPush(byte(PC >> 8))
c.stackPush(byte(PC))
c.stackPush(c.P)
c.setIrqDisable(true)
c.PC = c.Bus.Read16(IrqVector)
}
// Load the specified program data at the given memory location
// and point the Program Counter to the beginning of the program.
func (c *Cpu) LoadProgram(data []byte, location uint16) {
for i, b := range data {
c.Bus.WriteByte(location+uint16(i), b)
}
c.PC = location
}
func (c *Cpu) Steps(steps int) {
for i := 0; i < steps; i++ {
c.Step()
}
}
// Read and execute the instruction pointed to by the Program Counter (PC)
func (c *Cpu) Step() {
instruction := c.readNextInstruction()
c.PC += uint16(instruction.Size)
c.execute(instruction)
}
// Handle the execution of an instruction
func (c *Cpu) execute(instruction Instruction) {
switch instruction.opcodeId {
case nop:
break
case adc:
c.adc(instruction)
case sbc:
c.sbc(instruction)
case sec:
c.setCarry(true)
case sed:
c.setDecimal(true)
case sei:
c.setIrqDisable(true)
case clc:
c.setCarry(false)
case cld:
c.setDecimal(false)
case cli:
c.setIrqDisable(false)
case clv:
c.setOverflow(false)
case inx:
c.setX(c.X + 1)
case iny:
c.setY(c.Y + 1)
case inc:
c.inc(instruction)
case dex:
c.setX(c.X - 1)
case dey:
c.setY(c.Y - 1)
case dec:
c.dec(instruction)
case lda:
value := c.resolveOperand(instruction)
c.setA(value)
case ldx:
value := c.resolveOperand(instruction)
c.setX(value)
case ldy:
value := c.resolveOperand(instruction)
c.setY(value)
case ora:
value := c.resolveOperand(instruction)
c.setA(c.A | value)
case and:
value := c.resolveOperand(instruction)
c.setA(c.A & value)
case eor:
value := c.resolveOperand(instruction)
c.setA(c.A ^ value)
case sta:
address := c.memoryAddress(instruction)
c.Bus.WriteByte(address, c.A)
case stx:
address := c.memoryAddress(instruction)
c.Bus.WriteByte(address, c.X)
case sty:
address := c.memoryAddress(instruction)
c.Bus.WriteByte(address, c.Y)
case tax:
c.setX(c.A)
case tay:
c.setY(c.A)
case txa:
c.setA(c.X)
case tya:
c.setA(c.Y)
case tsx:
c.setX(c.SP)
case txs:
c.SP = c.X
case asl:
c.asl(instruction)
case lsr:
c.lsr(instruction)
case rol:
c.rol(instruction)
case ror:
c.ror(instruction)
case cmp:
value := c.resolveOperand(instruction)
c.setCarry(c.A >= value)
c.setArithmeticFlags(c.A - value)
case cpx:
value := c.resolveOperand(instruction)
c.setCarry(c.X >= value)
c.setArithmeticFlags(c.X - value)
case cpy:
value := c.resolveOperand(instruction)
c.setCarry(c.Y >= value)
c.setArithmeticFlags(c.Y - value)
case brk:
c.setBreak(true)
c.handleIrq(c.PC + 1)
case bcc:
if !c.getCarry() {
c.branch(instruction)
}
case bcs:
if c.getCarry() {
c.branch(instruction)
}
case bne:
if !c.getZero() {
c.branch(instruction)
}
case beq:
if c.getZero() {
c.branch(instruction)
}
case bpl:
if !c.getNegative() {
c.branch(instruction)
}
case bmi:
if c.getNegative() {
c.branch(instruction)
}
case bvc:
if !c.getOverflow() {
c.branch(instruction)
}
case bvs:
if c.getOverflow() {
c.branch(instruction)
}
case bit:
value := c.resolveOperand(instruction)
c.setNegative((value & 0x80) != 0)
c.setOverflow((value & 0x40) != 0)
c.setZero((c.A & value) == 0)
case php:
c.stackPush(c.P | 0x30)
case plp:
c.setP(c.stackPop())
case pha:
c.stackPush(c.A)
case pla:
value := c.stackPop()
c.setA(value)
case jmp:
c.PC = c.memoryAddress(instruction)
case jsr:
c.stackPush(byte((c.PC - 1) >> 8))
c.stackPush(byte(c.PC - 1))
c.PC = c.memoryAddress(instruction)
case rts:
c.PC = (uint16(c.stackPop()) | uint16(c.stackPop())<<8) + 1
case rti:
c.setP(c.stackPop())
c.PC = uint16(c.stackPop()) | uint16(c.stackPop())<<8
default:
panic(fmt.Errorf("Unimplemented instruction: %s", instruction))
}
}
func (c *Cpu) readNextInstruction() Instruction {
// Read the opcode
opcode := c.Bus.ReadByte(c.PC)
optype, ok := opTypes[opcode]
if !ok {
panic(fmt.Sprintf("Unknown or unimplemented opcode 0x%02X\n%s", opcode, c.String()))
}
instruction := Instruction{OpType: optype, Address: c.PC}
switch instruction.Size {
case 1: // Zero operand instruction
case 2: // 8-bit operand
instruction.Op8 = c.Bus.ReadByte(c.PC + 1)
case 3: // 16-bit operand
instruction.Op16 = c.Bus.Read16(c.PC + 1)
}
return instruction
}
func (c *Cpu) branch(in Instruction) {
relative := int8(in.Op8) // Signed!
if relative >= 0 {
c.PC += uint16(relative)
} else {
c.PC -= -uint16(relative)
}
}
func (c *Cpu) resolveOperand(in Instruction) uint8 {
switch in.addressingId {
case immediate:
return in.Op8
default:
return c.Bus.ReadByte(c.memoryAddress(in))
}
}
func (c *Cpu) memoryAddress(in Instruction) uint16 {
switch in.addressingId {
case absolute:
return in.Op16
case absoluteX:
return in.Op16 + uint16(c.X)
case absoluteY:
return in.Op16 + uint16(c.Y)
case indirect:
return c.Bus.Read16(in.Op16)
case indirectX:
return c.Bus.Read16(uint16(in.Op8 + c.X))
case indirectY:
return c.Bus.Read16(uint16(in.Op8)) + uint16(c.Y)
case relative:
panic("Relative addressing not yet implemented.")
case zeropage:
return uint16(in.Op8)
case zeropageX:
return uint16(in.Op8 + c.X)
case zeropageY:
return uint16(in.Op8 + c.Y)
default:
panic(fmt.Errorf("Unhandled addressing mode. Are you sure you are running a 6502 ROM?"))
}
}
// Add Memory to Accumulator with Carry
func (c *Cpu) adc(in Instruction) {
operand := c.resolveOperand(in)
carryIn := c.getCarryInt()
if c.getDecimal() {
c.adcDecimal(c.A, operand, carryIn)
} else {
c.adcNormal(c.A, operand, carryIn)
}
}
// Substract memory from Accummulator with carry
func (c *Cpu) sbc(in Instruction) {
operand := c.resolveOperand(in)
carryIn := c.getCarryInt()
// fmt.Printf("SBC: A: 0x%02X V: 0x%02X C: %b D: %v\n", c.A, operand, carryIn, c.getDecimal())
if c.getDecimal() {
c.sbcDecimal(c.A, operand, carryIn)
} else {
c.adcNormal(c.A, ^operand, carryIn)
}
}
func (c *Cpu) inc(in Instruction) {
address := c.memoryAddress(in)
value := c.Bus.ReadByte(address) + 1
c.Bus.WriteByte(address, value)
c.setArithmeticFlags(value)
}
func (c *Cpu) dec(in Instruction) {
address := c.memoryAddress(in)
value := c.Bus.ReadByte(address) - 1
c.Bus.WriteByte(address, value)
c.setArithmeticFlags(value)
}
func (c *Cpu) asl(in Instruction) {
switch in.addressingId {
case accumulator:
c.setCarry((c.A >> 7) == 1)
c.A <<= 1
c.setArithmeticFlags(c.A)
default:
address := c.memoryAddress(in)
value := c.Bus.ReadByte(address)
c.setCarry((value >> 7) == 1)
value <<= 1
c.Bus.WriteByte(address, value)
c.setArithmeticFlags(value)
}
}
func (c *Cpu) lsr(in Instruction) {
switch in.addressingId {
case accumulator:
c.setCarry((c.A & 0x01) == 1)
c.A >>= 1
c.setArithmeticFlags(c.A)
default:
address := c.memoryAddress(in)
value := c.Bus.ReadByte(address)
c.setCarry((value & 0x01) == 1)
value >>= 1
c.Bus.WriteByte(address, value)
c.setArithmeticFlags(value)
}
}
func (c *Cpu) rol(in Instruction) {
carry := c.getCarryInt()
switch in.addressingId {
case accumulator:
c.setCarry((c.A & 0x80) != 0)
c.A = c.A<<1 | carry
c.setArithmeticFlags(c.A)
default:
address := c.memoryAddress(in)
value := c.Bus.ReadByte(address)
c.setCarry((value & 0x80) != 0)
value = value<<1 | carry
c.Bus.WriteByte(address, value)
c.setArithmeticFlags(value)
}
}
func (c *Cpu) ror(in Instruction) {
carry := c.getCarryInt()
switch in.addressingId {
case accumulator:
c.setCarry(c.A&0x01 == 1)
c.A = c.A>>1 | carry<<7
c.setArithmeticFlags(c.A)
default:
address := c.memoryAddress(in)
value := c.Bus.ReadByte(address)
c.setCarry(value&0x01 == 1)
value = value>>1 | carry<<7
c.Bus.WriteByte(address, value)
c.setArithmeticFlags(value)
}
}
// Performs regular, 8-bit addition
func (c *Cpu) adcNormal(a uint8, b uint8, carryIn uint8) {
result16 := uint16(a) + uint16(b) + uint16(carryIn)
result := uint8(result16)
carryOut := (result16 & 0x100) != 0
overflow := (a^result)&(b^result)&0x80 != 0
// Set the carry flag if we exceed 8-bits
c.setCarry(carryOut)
// Set the overflow bit
c.setOverflow(overflow)
// Store the resulting value (8-bits)
c.setA(result)
}
// Performs addition in decimal mode
func (c *Cpu) adcDecimal(a uint8, b uint8, carryIn uint8) {
var carryB uint8 = 0
low := (a & 0x0F) + (b & 0x0F) + carryIn
if (low & 0xFF) > 9 {
low += 6
}
if low > 15 {
carryB = 1
}
high := (a >> 4) + (b >> 4) + carryB
if (high & 0xFF) > 9 {
high += 6
}
result := (low & 0x0F) | (high<<4)&0xF0
c.setCarry(high > 15)
c.setZero(result == 0)
c.setNegative(false) // BCD never sets negative
c.setOverflow(false) // BCD never sets overflow
c.A = result
}
func (c *Cpu) sbcDecimal(a uint8, b uint8, carryIn uint8) {
var carryB uint8 = 0
if carryIn == 0 {
carryIn = 1
} else {
carryIn = 0
}
low := (a & 0x0F) - (b & 0x0F) - carryIn
if (low & 0x10) != 0 {
low -= 6
}
if (low & 0x10) != 0 {
carryB = 1
}
high := (a >> 4) - (b >> 4) - carryB
if (high & 0x10) != 0 {
high -= 6
}
result := (low & 0x0F) | (high << 4)
c.setCarry((high & 0xFF) < 15)
c.setZero(result == 0)
c.setNegative(false) // BCD never sets negative
c.setOverflow(false) // BCD never sets overflow
c.A = result
}
func (c *Cpu) stackPush(data byte) {
c.Bus.WriteByte(StackBase+uint16(c.SP), data)
c.SP -= 1
}
func (c *Cpu) stackPeek() byte {
return c.Bus.ReadByte(StackBase + uint16(c.SP+1))
}
func (c *Cpu) stackPop() byte {
c.SP += 1
return c.Bus.ReadByte(StackBase + uint16(c.SP))
} | cpu.go | 0.650134 | 0.47457 | cpu.go | starcoder |
package cart
// NewMBC1 returns a new MBC1 memory controller.
func NewMBC1(data []byte) BankingController {
return &MBC1{
rom: data,
romBank: 1,
ram: make([]byte, 0x8000),
}
}
// MBC1 is a GameBoy cartridge that supports rom and ram banking.
type MBC1 struct {
rom []byte
romBank uint32
ram []byte
ramBank uint32
ramEnabled bool
romBanking bool
}
// Read returns a value at a memory address in the ROM or RAM.
func (r *MBC1) Read(address uint16) byte {
switch {
case address < 0x4000:
return r.rom[address] // Bank 0 is fixed
case address < 0x8000:
return r.rom[uint32(address-0x4000)+(r.romBank*0x4000)] // Use selected rom bank
default:
return r.ram[(0x2000*r.ramBank)+uint32(address-0xA000)] // Use selected ram bank
}
}
// WriteROM attempts to switch the ROM or RAM bank.
func (r *MBC1) WriteROM(address uint16, value byte) {
switch {
case address < 0x2000:
// RAM enable
if value&0xF == 0xA {
r.ramEnabled = true
} else if value&0xF == 0x0 {
r.ramEnabled = false
}
case address < 0x4000:
// ROM bank number (lower 5)
r.romBank = (r.romBank & 0xe0) | uint32(value&0x1f)
r.updateRomBankIfZero()
case address < 0x6000:
// ROM/RAM banking
if r.romBanking {
r.romBank = (r.romBank & 0x1F) | uint32(value&0xe0)
r.updateRomBankIfZero()
} else {
r.ramBank = uint32(value & 0x3)
}
case address < 0x8000:
// ROM/RAM select mode
r.romBanking = value&0x1 == 0x00
if r.romBanking {
r.ramBank = 0
} else {
r.romBank = r.romBank & 0x1F
}
}
}
// Update the romBank if it is on a value which cannot be used.
func (r *MBC1) updateRomBankIfZero() {
if r.romBank == 0x00 || r.romBank == 0x20 || r.romBank == 0x40 || r.romBank == 0x60 {
r.romBank++
}
}
// WriteRAM writes data to the ram if it is enabled.
func (r *MBC1) WriteRAM(address uint16, value byte) {
if r.ramEnabled {
r.ram[(0x2000*r.ramBank)+uint32(address-0xA000)] = value
}
}
// GetSaveData returns the save data for this banking controller.
func (r *MBC1) GetSaveData() []byte {
data := make([]byte, len(r.ram))
copy(data, r.ram)
return data
}
// LoadSaveData loads the save data into the cartridge.
func (r *MBC1) LoadSaveData(data []byte) {
r.ram = data
} | pkg/cart/mbc1.go | 0.731634 | 0.472927 | mbc1.go | starcoder |
package agg
import (
"github.com/brimdata/zed"
)
// Schema constructs a fused type for types passed to Mixin. Values of any
// mixed-in type can be shaped to the fused type without loss of information.
type Schema struct {
zctx *zed.Context
typ zed.Type
}
func NewSchema(zctx *zed.Context) *Schema {
return &Schema{zctx: zctx}
}
// Mixin mixes t into the fused type.
func (s *Schema) Mixin(t zed.Type) {
if s.typ == nil {
s.typ = t
} else {
s.typ = merge(s.zctx, s.typ, t)
}
}
// Type returns the fused type.
func (s *Schema) Type() zed.Type {
return s.typ
}
func merge(zctx *zed.Context, a, b zed.Type) zed.Type {
aUnder := zed.TypeUnder(a)
if aUnder == zed.TypeNull {
return b
}
bUnder := zed.TypeUnder(b)
if bUnder == zed.TypeNull {
return a
}
if a, ok := aUnder.(*zed.TypeRecord); ok {
if b, ok := bUnder.(*zed.TypeRecord); ok {
cols := append([]zed.Column{}, a.Columns...)
for _, c := range b.Columns {
if i, ok := columnOfField(cols, c.Name); !ok {
cols = append(cols, c)
} else if cols[i] != c {
cols[i].Type = merge(zctx, cols[i].Type, c.Type)
}
}
return zctx.MustLookupTypeRecord(cols)
}
}
if a, ok := aUnder.(*zed.TypeArray); ok {
if b, ok := bUnder.(*zed.TypeArray); ok {
return zctx.LookupTypeArray(merge(zctx, a.Type, b.Type))
}
if b, ok := bUnder.(*zed.TypeSet); ok {
return zctx.LookupTypeArray(merge(zctx, a.Type, b.Type))
}
}
if a, ok := aUnder.(*zed.TypeSet); ok {
if b, ok := bUnder.(*zed.TypeArray); ok {
return zctx.LookupTypeArray(merge(zctx, a.Type, b.Type))
}
if b, ok := bUnder.(*zed.TypeSet); ok {
return zctx.LookupTypeSet(merge(zctx, a.Type, b.Type))
}
}
if a, ok := aUnder.(*zed.TypeMap); ok {
if b, ok := bUnder.(*zed.TypeMap); ok {
keyType := merge(zctx, a.KeyType, b.KeyType)
valType := merge(zctx, a.ValType, b.ValType)
return zctx.LookupTypeMap(keyType, valType)
}
}
if a, ok := aUnder.(*zed.TypeUnion); ok {
types := append([]zed.Type{}, a.Types...)
if bUnion, ok := bUnder.(*zed.TypeUnion); ok {
for _, t := range bUnion.Types {
types = appendIfAbsent(types, t)
}
} else {
types = appendIfAbsent(types, b)
}
types = mergeAllRecords(zctx, types)
if len(types) == 1 {
return types[0]
}
return zctx.LookupTypeUnion(types)
}
if _, ok := bUnder.(*zed.TypeUnion); ok {
return merge(zctx, b, a)
}
// XXX Merge enums?
return zctx.LookupTypeUnion([]zed.Type{a, b})
}
func appendIfAbsent(types []zed.Type, typ zed.Type) []zed.Type {
for _, t := range types {
if t == typ {
return types
}
}
return append(types, typ)
}
func columnOfField(cols []zed.Column, name string) (int, bool) {
for i, c := range cols {
if c.Name == name {
return i, true
}
}
return -1, false
}
func mergeAllRecords(zctx *zed.Context, types []zed.Type) []zed.Type {
out := types[:0]
recIndex := -1
for _, t := range types {
if zed.IsRecordType(t) {
if recIndex < 0 {
recIndex = len(out)
} else {
out[recIndex] = merge(zctx, out[recIndex], t)
continue
}
}
out = append(out, t)
}
return out
} | runtime/expr/agg/schema.go | 0.606382 | 0.417271 | schema.go | starcoder |
package perfcounters
import (
"fmt"
"sync"
)
/*
AverageCount32
An average counter that shows how many items are processed, on average, during an operation. Counters of this type display a ratio of the items processed to the number of operations
completed. The ratio is calculated by comparing the number of items processed during the last interval to the number of operations completed during the last interval.
Formula: (N 1 -N 0)/(B 1 -B 0), where N 1 and N 0 are performance counter readings, and the B 1 and B 0 are their corresponding AverageBase values. Thus, the numerator represents the
numbers of items processed during the sample interval, and the denominator represents the number of operations completed during the sample interval.
Counters of this type include PhysicalDisk\ Avg. Disk Bytes/Transfer.
[[source: https://msdn.microsoft.com/en-us/library/system.diagnostics.performancecountertype(v=vs.90).aspx]]
*/
type AverageCount32 struct {
lastCount int32
lastBase int32
count int32
base int32
mu sync.Mutex
}
func NewAverageCount32() *AverageCount32 {
return &AverageCount32{}
}
func (self *AverageCount32) Increment() {
self.mu.Lock()
defer self.mu.Unlock()
self.count += 1
self.base += 1
}
func (self *AverageCount32) Add(value int32) {
self.mu.Lock()
defer self.mu.Unlock()
self.count += value
self.base += 1
}
func (self *AverageCount32) CalculatedValue() float32 {
self.mu.Lock()
defer self.mu.Unlock()
count := self.count
base := self.base
lastCount := self.lastCount
lastBase := self.lastBase
if base == 0 {
return 0
}
if base-lastBase == 0 {
return 0
}
calculatedValue := float32((count - lastCount) / (base - lastBase))
self.lastCount = count
self.lastBase = base
return calculatedValue
}
func (self *AverageCount32) String() string {
return fmt.Sprintf("%.3f", self.CalculatedValue())
}
/*
func main() {
counter := NewAverageCount32()
fmt.Println(counter.String()) // should display 0.00
counter.Increment() // one operation, 1 item
fmt.Println(counter.String()) // should display 1.00
counter.Add(9) // one operation, 9 item
fmt.Println(counter.String()) // should display 9.00
fmt.Println(counter.String()) // should display 0.00
counter.Add(4) // one operation, 4 items
counter.Add(8) // one operation, 8 items
fmt.Println(counter.String()) // should display 6.00
}
*/ | perfcounters/averagecount32.go | 0.783492 | 0.645092 | averagecount32.go | starcoder |
package distance
import (
"log"
"math"
"sync"
seq "github.com/bkaraceylan/goophy/sequence"
)
//DistMat is a distance matrix structure
type DistMat struct {
Ids []string
Matrix [][]float64
Method string
Alignment *seq.DNAPool
}
//PDist calculates p-distance between two sequences
func PDist(dna1 seq.DNA, dna2 seq.DNA) float64 {
seq1 := dna1.Seq
seq2 := dna2.Seq
if len(seq1) != len(seq2) {
log.Fatalf("Varying sequence lengths %v != %v \n", len(seq1), len(seq2))
}
numnuc := 0
numdiff := 0
for x := 0; x < len(seq1); x++ {
if !seq.IsNuc(rune(seq1[x])) || !seq.IsNuc(rune(seq2[x])) {
continue
}
if rune(seq1[x]) != rune(seq2[x]) {
numdiff++
}
numnuc++
}
return (float64(numdiff) / float64(numnuc))
}
//JCDist calculates Jukes-Cantor distance between two sequences
func JCDist(dna1 seq.DNA, dna2 seq.DNA) float64 {
pDist := PDist(dna1, dna2)
pow := math.Log(float64(1 - (1.3333 * pDist)))
dist := -0.75 * pow
return dist
}
//K80Dist calculates Kimure-Nei distance between two sequences
func K80Dist(dna1 seq.DNA, dna2 seq.DNA) float64 {
len, _, ts, tv := seq.ComputeTrans(dna1, dna2)
P := float64(ts) / float64(len)
Q := float64(tv) / float64(len)
a1 := 1 - 2*P - Q
a2 := 1 - 2*Q
dist := -0.5 * math.Log(a1*math.Sqrt(a2))
return dist
}
//PairDist calculates pairwise distances between all sequences in a DNAPool using the specified method (OLD).
func PairDist(pool seq.DNAPool, method string) DistMat {
var result [][]float64
dmat := DistMat{}
dmat.Alignment = &pool
dmat.Method = method
for _, dna1 := range pool.Samples {
var row []float64
for _, dna2 := range pool.Samples {
var dist float64
switch method {
case "P":
dist = PDist(dna1, dna2)
case "JC":
dist = JCDist(dna1, dna2)
case "K80":
dist = K80Dist(dna1, dna2)
}
row = append(row, dist)
}
result = append(result, row)
}
for _, v := range pool.Samples {
dmat.Ids = append(dmat.Ids, v.Id)
}
dmat.Matrix = result
return dmat
}
type seqJob struct {
addr *DistMat
dna1 seq.DNA
dna2 seq.DNA
idx1 int
idx2 int
model string
}
//PairDistConc concurrently calculates the pairwise distances betwen all sequences in a DNAPool using the specified method.
func PairDistConc(pool seq.DNAPool, model string) DistMat {
dmat := DistMat{}
dmat.Alignment = &pool
dmat.Method = model
dmat.Matrix = make([][]float64, len(pool.Samples))
chann := make(chan seqJob, 100)
go func() {
defer close(chann)
for idx1, dna1 := range pool.Samples {
dmat.Matrix[idx1] = make([]float64, len(pool.Samples))
for idx2, dna2 := range pool.Samples {
chann <- seqJob{&dmat, dna1, dna2, idx1, idx2, model}
}
}
}()
go func() {
for _, v := range pool.Samples {
dmat.Ids = append(dmat.Ids, v.Id)
}
}()
var wg sync.WaitGroup
for w := 1; w <= 10; w++ {
wg.Add(1)
go pairDistWorker(chann, &wg)
}
wg.Wait()
return dmat
}
func pairDistWorker(seqjob <-chan seqJob, wg *sync.WaitGroup) {
var dist float64
defer wg.Done()
for j := range seqjob {
if j.idx1 == j.idx2 {
j.addr.Matrix[j.idx1][j.idx2] = 0
} else {
switch j.model {
case "P":
dist = PDist(j.dna1, j.dna2)
case "JC":
dist = JCDist(j.dna1, j.dna2)
case "K80":
dist = K80Dist(j.dna1, j.dna2)
}
if dist < 0 || dist == 0 {
j.addr.Matrix[j.idx1][j.idx2] = 0
} else {
j.addr.Matrix[j.idx1][j.idx2] = dist
}
}
}
} | distance/distance.go | 0.690037 | 0.460046 | distance.go | starcoder |
package shape
import (
"fmt"
"math"
"strings"
"github.com/fogleman/gg"
"github.com/golang/freetype/raster"
)
// Polygon represents a polygonal shape with Order vertices.
type Polygon struct {
Order int
Convex bool
X, Y []float64
}
func NewPolygon(order int, convex bool) *Polygon {
p := &Polygon{}
p.Order = order
p.Convex = convex
return p
}
func (p *Polygon) Init(plane *Plane) {
rnd := plane.Rnd
p.X = make([]float64, p.Order)
p.Y = make([]float64, p.Order)
p.X[0] = randomW(plane)
p.Y[0] = randomH(plane)
for i := 1; i < p.Order; i++ {
p.X[i] = p.X[0] + rnd.Float64()*40 - 20
p.Y[i] = p.Y[0] + rnd.Float64()*40 - 20
}
p.mutateImpl(plane, 1.0, 2, ActionAny)
}
func (p *Polygon) Draw(dc *gg.Context, scale float64) {
dc.NewSubPath()
for i := 0; i < p.Order; i++ {
dc.LineTo(p.X[i], p.Y[i])
}
dc.ClosePath()
dc.Fill()
}
func (p *Polygon) SVG(attrs string) string {
ret := fmt.Sprintf(
"<polygon %s points=\"",
attrs)
points := make([]string, len(p.X))
for i := 0; i < len(p.X); i++ {
points[i] = fmt.Sprintf("%f,%f", p.X[i], p.Y[i])
}
return ret + strings.Join(points, ",") + "\" />"
}
func (p *Polygon) Copy() Shape {
a := *p
a.X = make([]float64, p.Order)
a.Y = make([]float64, p.Order)
copy(a.X, p.X)
copy(a.Y, p.Y)
return &a
}
func (p *Polygon) Mutate(plane *Plane, temp float64) {
p.mutateImpl(plane, temp, 10, ActionAny)
}
func (p *Polygon) mutateImpl(plane *Plane, temp float64, rollback int, actions ActionType) {
if actions == ActionNone {
return
}
const R = math.Pi / 4.0
const m = 16
w := plane.W
h := plane.H
rnd := plane.Rnd
scale := 16 * temp
repeat := true
for repeat {
switch rnd.Intn(9) {
case 0:
if (actions & ActionMutate) != 0 {
// Move a point
i := rnd.Intn(p.Order)
a := rnd.NormFloat64() * scale
b := rnd.NormFloat64() * scale
xsave, ysave := p.X[i], p.Y[i]
p.X[i] = clamp(p.X[i]+a, -m, float64(w-1+m))
p.Y[i] = clamp(p.Y[i]+b, -m, float64(h-1+m))
if p.Valid() {
repeat = false
break
}
if rollback > 0 {
p.X[i], p.Y[i] = xsave, ysave
rollback -= 1
}
}
case 1:
if (actions & ActionMutate) != 0 {
// Swap a point
i := rnd.Intn(p.Order)
j := rnd.Intn(p.Order)
p.X[i], p.Y[i], p.X[j], p.Y[j] = p.X[j], p.Y[j], p.X[i], p.Y[i]
if p.Valid() {
repeat = false
break
}
if rollback > 0 {
p.X[i], p.Y[i], p.X[j], p.Y[j] = p.X[j], p.Y[j], p.X[i], p.Y[i]
rollback -= 1
}
}
case 2:
if (actions & ActionTranslate) != 0 {
// Shift all points
a := rnd.NormFloat64() * scale
b := rnd.NormFloat64() * scale
for i := range p.X {
p.X[i] = clamp(p.X[i]+a, -m, float64(w-1+m))
p.Y[i] = clamp(p.Y[i]+b, -m, float64(h-1+m))
}
if p.Valid() {
repeat = false
break
}
if rollback > 0 {
// Since we have clamp, this is not exact, but it'll have to do for now.
for i := range p.X {
p.X[i] = clamp(p.X[i]-a, -m, float64(w-1+m))
p.Y[i] = clamp(p.Y[i]-b, -m, float64(h-1+m))
}
rollback -= 1
}
}
case 3:
if (actions & ActionRotate) != 0 {
// Rotate all points
cx := 0.0
cy := 0.0
for i := range p.X {
cx += p.X[i]
cy += p.Y[i]
}
cx /= float64(len(p.X))
cy /= float64(len(p.X))
theta := rnd.NormFloat64() * temp * R
cos := math.Cos(theta)
sin := math.Sin(theta)
var a, b float64
for i := range p.X {
a, b = rotateAbout(p.X[i], p.Y[i], cx, cy, cos, sin)
p.X[i] = clamp(a, -m, float64(w-1+m))
p.Y[i] = clamp(b, -m, float64(h-1+m))
}
if p.Valid() {
repeat = false
break
}
if rollback > 0 {
// Since we have clamp, this is not exact, but it'll have to do for now.
cos := math.Cos(-theta)
sin := math.Sin(-theta)
for i := range p.X {
a, b = rotateAbout(p.X[i], p.Y[i], cx, cy, cos, sin)
p.X[i] = clamp(a, -m, float64(w-1+m))
p.Y[i] = clamp(b, -m, float64(h-1+m))
}
rollback -= 1
}
}
}
}
}
func (p *Polygon) Valid() bool {
if !p.Convex {
return true
}
var sign bool
for a := 0; a < p.Order; a++ {
i := (a + 0) % p.Order
j := (a + 1) % p.Order
k := (a + 2) % p.Order
c := cross3(p.X[i], p.Y[i], p.X[j], p.Y[j], p.X[k], p.Y[k])
if a == 0 {
sign = c > 0
} else if c > 0 != sign {
return false
}
}
return true
}
func cross3(x1, y1, x2, y2, x3, y3 float64) float64 {
dx1 := x2 - x1
dy1 := y2 - y1
dx2 := x3 - x2
dy2 := y3 - y2
return dx1*dy2 - dy1*dx2
}
func (p *Polygon) Rasterize(rc *RasterContext) []Scanline {
var path raster.Path
for i := 0; i <= p.Order; i++ {
f := fixp(p.X[i%p.Order], p.Y[i%p.Order])
if i == 0 {
path.Start(f)
} else {
path.Add1(f)
}
}
return fillPath(rc, path)
} | primitive/shape/polygon.go | 0.66356 | 0.413122 | polygon.go | starcoder |
package index
import (
"fmt"
"math"
)
type Datom struct {
entity int
attribute int
value Value
transaction int
added bool
}
var MinDatom = Datom{0, 0, MinValue, 0, false}
var MaxDatom = Datom{math.MaxInt64, math.MaxInt64, MaxValue, math.MaxInt64, true}
func NewDatom(e int, a int, v interface{}, tx int, added bool) Datom {
return Datom{e, a, NewValue(v), tx, added}
}
func (d Datom) Entity() int { return d.entity }
func (d Datom) E() int { return d.entity }
func (d Datom) Attribute() int { return d.attribute }
func (d Datom) A() int { return d.attribute }
func (d Datom) Value() Value { return d.value }
func (d Datom) V() Value { return d.value }
func (d Datom) Transaction() int { return d.transaction }
func (d Datom) Tx() int { return d.transaction }
func (d Datom) Added() bool { return d.added }
func (d Datom) Retraction() Datom {
return Datom{d.entity, d.attribute, d.value, d.transaction, false}
}
func (d Datom) String() string {
return fmt.Sprintf("index.Datom{%d %d %v %d %t}", d.entity, d.attribute, d.value, d.transaction, d.added)
}
func CompareEavt(ai, bi interface{}) int {
a := ai.(*Datom)
b := bi.(*Datom)
cmp := a.entity - b.entity
if cmp != 0 {
return cmp
}
cmp = a.attribute - b.attribute
if cmp != 0 {
return cmp
}
cmp = a.value.Compare(b.value)
if cmp != 0 {
return cmp
}
return b.transaction - a.transaction
}
func CompareAevt(ai, bi interface{}) int {
a := ai.(*Datom)
b := bi.(*Datom)
cmp := a.attribute - b.attribute
if cmp != 0 {
return cmp
}
cmp = a.entity - b.entity
if cmp != 0 {
return cmp
}
cmp = a.value.Compare(b.value)
if cmp != 0 {
return cmp
}
return b.transaction - a.transaction
}
func CompareAvet(ai, bi interface{}) int {
a := ai.(*Datom)
b := bi.(*Datom)
cmp := a.attribute - b.attribute
if cmp != 0 {
return cmp
}
cmp = a.value.Compare(b.value)
if cmp != 0 {
return cmp
}
cmp = a.entity - b.entity
if cmp != 0 {
return cmp
}
return b.transaction - a.transaction
}
func CompareVaet(ai, bi interface{}) int {
a := ai.(*Datom)
b := bi.(*Datom)
cmp := a.value.Compare(b.value)
if cmp != 0 {
return cmp
}
cmp = a.attribute - b.attribute
if cmp != 0 {
return cmp
}
cmp = a.entity - b.entity
if cmp != 0 {
return cmp
}
return b.transaction - a.transaction
} | index/datom.go | 0.723798 | 0.400134 | datom.go | starcoder |
package xcom
import (
"github.com/DomBlack/advent-of-code-2018/lib/vectors"
"strconv"
"strings"
)
const DefaultHealth = 200
// All adjacent cells in "reading order"
var AdjacentCells = [4]vectors.Vec2{
{0, -1},
{-1, 0},
{1, 0},
{0, 1},
}
type Unit struct {
IsElf bool // Is this unit an elf? If not then it's a goblin
Health int
AttackPower int
Position vectors.Vec2
}
// Creates a new goblin at the given position
func NewGoblin(pos vectors.Vec2) *Unit {
return &Unit{
false,
DefaultHealth,
3,
pos,
}
}
// Creates a new elf at the given position
func NewElf(pos vectors.Vec2, attackPower int) *Unit {
return &Unit{
true,
DefaultHealth,
attackPower,
pos,
}
}
// Finds all enemy units on the map
func (u *Unit) FindTargets(m *Map) Units {
targets := make(Units, 0)
for _, possibleTarget := range m.Units {
if possibleTarget.IsElf != u.IsElf && !possibleTarget.IsDead() {
targets = append(targets, possibleTarget)
}
}
return targets
}
// Finds an adjacent target or nil, with the lowest health (in reading order if tied)
func (u *Unit) GetAdjacentTarget(m *Map) *Unit {
var adjacentUnit *Unit
for _, dir := range AdjacentCells {
adjacentPos := u.Position.Add(dir)
adjacentCell, found := m.Cells[adjacentPos]
// Is there an enemy in the adjacent cell?
if found &&
adjacentCell.Unit != nil &&
adjacentCell.Unit.IsElf != u.IsElf &&
!adjacentCell.Unit.IsDead() {
if adjacentUnit == nil || adjacentCell.Unit.Health < adjacentUnit.Health {
// No possible target yet, so this unit automatically becomes it
// Or the just found enemies health is lower than the one already found
adjacentUnit = adjacentCell.Unit
}
}
}
return adjacentUnit
}
// Finds all empty adjacent cells
func (u *Unit) GetAdjacentEmptyCells(m *Map) []vectors.Vec2 {
adjacentCells := make([]vectors.Vec2, 0)
for _, dir := range AdjacentCells {
adjacentPos := u.Position.Add(dir)
adjacentCell, found := m.Cells[adjacentPos]
if found && adjacentCell.IsEmpty() {
adjacentCells = append(adjacentCells, adjacentPos)
}
}
return adjacentCells
}
// This unit attacks the "other"
func (u *Unit) Attack(other *Unit) (wasKilled bool) {
other.Health -= u.AttackPower
wasKilled = other.IsDead()
return
}
// Is this unit dead?
func (u *Unit) IsDead() bool {
return u.Health <= 0
}
// Writes out unit information to the string
func (u Unit) String() string {
var str strings.Builder
if u.IsElf {
str.WriteRune('E')
} else {
str.WriteRune('G')
}
str.WriteRune('(')
str.WriteString(strconv.Itoa(u.Health))
str.WriteRune(')')
return str.String()
}
// A slice of units (for the reading order sort; top to bottom, then left to right)
type Units []*Unit
func (u Units) Len() int {
return len(u)
}
func (u Units) Swap(i, j int) {
u[i], u[j] = u[j], u[i]
}
func (u Units) Less(i, j int) bool {
return u[i].Position.IsReadingOrderLess(u[j].Position)
}
// All empty cells adjacent to these units
func (u Units) GetEmptyAdjacentCells(m *Map) []vectors.Vec2 {
emptyCells := make([]vectors.Vec2, 0)
for _, unit := range u {
for _, cell := range unit.GetAdjacentEmptyCells(m) {
emptyCells = append(emptyCells, cell)
}
}
return emptyCells
} | day-15/xcom/Unit.go | 0.763484 | 0.425963 | Unit.go | starcoder |
package restruct
import (
"encoding/binary"
"reflect"
)
/*
Unpack reads data from a byteslice into a structure.
Each structure field will be read sequentially based on a straightforward
interpretation of the type. For example, an int32 will be read as a 32-bit
signed integer, taking 4 bytes of memory. Structures and arrays are laid out
flat with no padding or metadata.
The behavior of deserialization can be customized using struct tags. The
following struct tag syntax is supported:
`struct:"[flags...]"`
Flags are comma-separated keys. The following are available:
type A bare type name, e.g. int32 or []string.
sizeof=[Field] Specifies that the field should be treated as a count of
the number of elements in Field.
skip=[Count] Skips Count bytes before the field. You can use this to
e.g. emulate C structure alignment.
big,msb Specifies big endian byte order. When applied to structs,
this will apply to all fields under the struct.
little,lsb Specifies little endian byte order. When applied to structs,
this will apply to all fields under the struct.
*/
func Unpack(data []byte, order binary.ByteOrder, v interface{}) (err error) {
defer func() {
if r := recover(); r != nil {
err = r.(error)
}
}()
val := reflect.ValueOf(v)
if val.Kind() == reflect.Ptr {
val = val.Elem()
}
d := decoder{order: order, buf: data}
d.read(fieldFromType(val.Type()), val)
return
}
/*
Pack writes data from a datastructure into a byteslice.
Each structure is serialized in the same way it would be deserialized with
Unpack. See Unpack documentation for the struct tag format.
*/
func Pack(order binary.ByteOrder, v interface{}) (data []byte, err error) {
defer func() {
if r := recover(); r != nil {
data = nil
err = r.(error)
}
}()
val := reflect.ValueOf(v)
if val.Kind() == reflect.Ptr {
val = val.Elem()
}
f := fieldFromType(val.Type())
data = make([]byte, f.SizeOf(val))
e := encoder{order: order, buf: data}
e.write(f, val)
return
} | vendor/gopkg.in/restruct.v1/packing.go | 0.70416 | 0.519704 | packing.go | starcoder |
package gokalman
import (
"fmt"
"math"
"github.com/gonum/matrix/mat64"
)
// NewVanilla returns a new Vanilla KF. To get the next estimate, simply push to
// the MeasChan the next measurement and read from StateEst and MeasEst to get
// the next state estimate (\hat{x}_{k+1}^{+}) and next measurement estimate (\hat{y}_{k+1}).
// The Covar channel stores the next covariance of the system (P_{k+1}^{+}).
// Parameters:
// - x0: initial state
// - Covar0: initial covariance matrix
// - F: state update matrix
// - G: control matrix (if all zeros, then control vector will not be used)
// - H: measurement update matrix
// - n: Noise
func NewVanilla(x0 *mat64.Vector, Covar0 mat64.Symmetric, F, G, H mat64.Matrix, noise Noise) (*Vanilla, *VanillaEstimate, error) {
// Let's check the dimensions of everything here to panic ASAP.
if err := checkMatDims(x0, Covar0, "x0", "Covar0", rows2cols); err != nil {
return nil, nil, err
}
if err := checkMatDims(F, Covar0, "F", "Covar0", rows2cols); err != nil {
return nil, nil, err
}
if err := checkMatDims(H, x0, "H", "x0", cols2rows); err != nil {
return nil, nil, err
}
// Populate with the initial values.
rowsH, _ := H.Dims()
cr, _ := Covar0.Dims()
predCovar := mat64.NewSymDense(cr, nil)
est0 := VanillaEstimate{x0, mat64.NewVector(rowsH, nil), mat64.NewVector(rowsH, nil), Covar0, predCovar, nil}
return &Vanilla{F, G, H, noise, !IsNil(G), est0, est0, 0, false}, &est0, nil
}
// NewPurePredictorVanilla returns a new Vanilla KF which only does prediction.
func NewPurePredictorVanilla(x0 *mat64.Vector, Covar0 mat64.Symmetric, F, G, H mat64.Matrix, noise Noise) (*Vanilla, *VanillaEstimate, error) {
// Let's check the dimensions of everything here to panic ASAP.
if err := checkMatDims(x0, Covar0, "x0", "Covar0", rows2cols); err != nil {
return nil, nil, err
}
if err := checkMatDims(F, Covar0, "F", "Covar0", rows2cols); err != nil {
return nil, nil, err
}
if err := checkMatDims(H, x0, "H", "x0", cols2rows); err != nil {
return nil, nil, err
}
// Populate with the initial values.
rowsH, _ := H.Dims()
cr, _ := Covar0.Dims()
predCovar := mat64.NewSymDense(cr, nil)
est0 := VanillaEstimate{x0, mat64.NewVector(rowsH, nil), mat64.NewVector(rowsH, nil), Covar0, predCovar, nil}
return &Vanilla{F, G, H, noise, !IsNil(G), est0, est0, 0, true}, &est0, nil
}
// Vanilla defines a vanilla kalman filter. Use NewVanilla to initialize.
type Vanilla struct {
F mat64.Matrix
G mat64.Matrix
H mat64.Matrix
Noise Noise
needCtrl bool
prevEst, initEst VanillaEstimate
step int
predictionOnly bool
}
func (kf *Vanilla) String() string {
return fmt.Sprintf("F=%v\nG=%v\nH=%v\n%s", mat64.Formatted(kf.F, mat64.Prefix(" ")), mat64.Formatted(kf.G, mat64.Prefix(" ")), mat64.Formatted(kf.H, mat64.Prefix(" ")), kf.Noise)
}
// GetStateTransition returns the F matrix.
func (kf *Vanilla) GetStateTransition() mat64.Matrix {
return kf.F
}
// GetInputControl returns the G matrix.
func (kf *Vanilla) GetInputControl() mat64.Matrix {
return kf.G
}
// GetMeasurementMatrix returns the H matrix.
func (kf *Vanilla) GetMeasurementMatrix() mat64.Matrix {
return kf.H
}
// SetStateTransition updates the F matrix.
func (kf *Vanilla) SetStateTransition(F mat64.Matrix) {
kf.F = F
}
// SetInputControl updates the F matrix.
func (kf *Vanilla) SetInputControl(G mat64.Matrix) {
kf.G = G
}
// SetMeasurementMatrix updates the H matrix.
func (kf *Vanilla) SetMeasurementMatrix(H mat64.Matrix) {
kf.H = H
}
// SetNoise updates the Noise.
func (kf *Vanilla) SetNoise(n Noise) {
kf.Noise = n
}
// GetNoise updates the F matrix.
func (kf *Vanilla) GetNoise() Noise {
return kf.Noise
}
// Reset reinitializes the KF with its initial estimate.
func (kf *Vanilla) Reset() {
kf.prevEst = kf.initEst
kf.step = 0
kf.Noise.Reset()
}
// Update implements the KalmanFilter interface.
func (kf *Vanilla) Update(measurement, control *mat64.Vector) (est Estimate, err error) {
if err = checkMatDims(control, kf.G, "control (u)", "G", rows2cols); kf.needCtrl && err != nil {
return nil, err
}
if err = checkMatDims(measurement, kf.H, "measurement (y)", "H", rows2rows); err != nil {
return nil, err
}
// Prediction step.
var xKp1Minus, xKp1Minus1, xKp1Minus2 mat64.Vector
xKp1Minus1.MulVec(kf.F, kf.prevEst.State())
if kf.needCtrl {
xKp1Minus2.MulVec(kf.G, control)
xKp1Minus.AddVec(&xKp1Minus1, &xKp1Minus2)
} else {
xKp1Minus = xKp1Minus1
}
xKp1Minus.AddVec(&xKp1Minus, kf.Noise.Process(kf.step))
// P_{k+1}^{-}
var Pkp1Minus, FP, FPFt mat64.Dense
FP.Mul(kf.F, kf.prevEst.Covariance())
FPFt.Mul(&FP, kf.F.T())
Pkp1Minus.Add(&FPFt, kf.Noise.ProcessMatrix())
// Compute estimated measurement update \hat{y}_{k}
var ykHat mat64.Vector
ykHat.MulVec(kf.H, kf.prevEst.State())
ykHat.AddVec(&ykHat, kf.Noise.Measurement(kf.step))
// Kalman gain
var PHt, HPHt, Kkp1 mat64.Dense
PHt.Mul(&Pkp1Minus, kf.H.T())
HPHt.Mul(kf.H, &PHt)
HPHt.Add(&HPHt, kf.Noise.MeasurementMatrix())
if ierr := HPHt.Inverse(&HPHt); ierr != nil {
//panic(fmt.Errorf("could not invert `H*P_kp1_minus*H' + R`: %s", ierr))
return nil, fmt.Errorf("could not invert `H*P_kp1_minus*H' + R`: %s", ierr)
}
Kkp1.Mul(&PHt, &HPHt)
if kf.predictionOnly {
// Note that in the case of a pure prediction, we set the prediction
// covariance and the covariance to Pkp1Minus.
Pkp1MinusSym, _ := AsSymDense(&Pkp1Minus)
rowsH, _ := kf.H.Dims()
est = VanillaEstimate{&xKp1Minus, &ykHat, mat64.NewVector(rowsH, nil), Pkp1MinusSym, Pkp1MinusSym, &Kkp1}
kf.prevEst = est.(VanillaEstimate)
kf.step++
return
}
// Measurement update
var innov, xkp1Plus, xkp1Plus1, xkp1Plus2 mat64.Vector
xkp1Plus1.MulVec(kf.H, &xKp1Minus) // Predicted measurement
innov.SubVec(measurement, &xkp1Plus1) // Innovation vector
if rX, _ := innov.Dims(); rX == 1 {
// xkp1Plus1 is a scalar and mat64 won't be happy, so fiddle around to get a vector.
var sKkp1 mat64.Dense
sKkp1.Scale(innov.At(0, 0), &Kkp1)
rGain, _ := sKkp1.Dims()
xkp1Plus2.AddVec(sKkp1.ColView(0), mat64.NewVector(rGain, nil))
} else {
xkp1Plus2.MulVec(&Kkp1, &innov)
}
xkp1Plus.AddVec(&xKp1Minus, &xkp1Plus2)
xkp1Plus.AddVec(&xkp1Plus, kf.Noise.Process(kf.step))
var Pkp1Plus, Pkp1Plus1, Kkp1H, Kkp1R, Kkp1RKkp1 mat64.Dense
Kkp1H.Mul(&Kkp1, kf.H)
n, _ := Kkp1H.Dims()
Kkp1H.Sub(Identity(n), &Kkp1H)
Pkp1Plus1.Mul(&Kkp1H, &Pkp1Minus)
Pkp1Plus.Mul(&Pkp1Plus1, Kkp1H.T())
Kkp1R.Mul(&Kkp1, kf.Noise.MeasurementMatrix())
Kkp1RKkp1.Mul(&Kkp1R, Kkp1.T())
Pkp1Plus.Add(&Pkp1Plus, &Kkp1RKkp1)
Pkp1MinusSym, err := AsSymDense(&Pkp1Minus)
if err != nil {
return nil, err
}
Pkp1PlusSym, err := AsSymDense(&Pkp1Plus)
if err != nil {
return nil, err
}
est = VanillaEstimate{&xkp1Plus, &ykHat, &innov, Pkp1PlusSym, Pkp1MinusSym, &Kkp1}
kf.prevEst = est.(VanillaEstimate)
kf.step++
return
}
// VanillaEstimate is the output of each update state of the Vanilla KF.
// It implements the Estimate interface.
type VanillaEstimate struct {
state, meas, innovation *mat64.Vector
covar, predCovar mat64.Symmetric
gain mat64.Matrix
}
// IsWithinNσ returns whether the estimation is within the 2σ bounds.
func (e VanillaEstimate) IsWithinNσ(N float64) bool {
for i := 0; i < e.state.Len(); i++ {
nσ := N * math.Sqrt(e.covar.At(i, i))
if e.state.At(i, 0) > nσ || e.state.At(i, 0) < -nσ {
return false
}
}
return true
}
// IsWithin2σ returns whether the estimation is within the 2σ bounds.
func (e VanillaEstimate) IsWithin2σ() bool {
return e.IsWithinNσ(2)
}
// State implements the Estimate interface.
func (e VanillaEstimate) State() *mat64.Vector {
return e.state
}
// Measurement implements the Estimate interface.
func (e VanillaEstimate) Measurement() *mat64.Vector {
return e.meas
}
// Innovation implements the Estimate interface.
func (e VanillaEstimate) Innovation() *mat64.Vector {
return e.innovation
}
// Covariance implements the Estimate interface.
func (e VanillaEstimate) Covariance() mat64.Symmetric {
return e.covar
}
// PredCovariance implements the Estimate interface.
func (e VanillaEstimate) PredCovariance() mat64.Symmetric {
return e.predCovar
}
// Gain the Estimate interface.
func (e VanillaEstimate) Gain() mat64.Matrix {
return e.gain
}
func (e VanillaEstimate) String() string {
state := mat64.Formatted(e.State(), mat64.Prefix(" "))
meas := mat64.Formatted(e.Measurement(), mat64.Prefix(" "))
covar := mat64.Formatted(e.Covariance(), mat64.Prefix(" "))
gain := mat64.Formatted(e.Gain(), mat64.Prefix(" "))
innov := mat64.Formatted(e.Innovation(), mat64.Prefix(" "))
predp := mat64.Formatted(e.PredCovariance(), mat64.Prefix(" "))
return fmt.Sprintf("{\ns=%v\ny=%v\nP=%v\nK=%v\nP-=%v\ni=%v\n}", state, meas, covar, gain, predp, innov)
} | vanilla.go | 0.796411 | 0.586848 | vanilla.go | starcoder |
package tile
import (
"bufio"
"encoding/binary"
"io"
)
const lower = 0x0F
type reader interface {
io.Reader
}
// Decoder for Tiles
type Decoder struct {
r reader
Dimensions int // Dimensions is the size of the Tile(s) to make
buf []byte
t Tile
}
// DecodeOption just an alias
type DecodeOption func(*Decoder)
// Dimensions are 8 for 8x8 Tile or 16 for 16x16 Tile
// Default value is 16.
func Dimensions(dim int) DecodeOption {
return func(d *Decoder) {
d.Dimensions = dim
}
}
// NewDecoder is a constructor ofc
func NewDecoder(r io.Reader, opts ...DecodeOption) *Decoder {
d := &Decoder{Dimensions: 16}
d.initReader(r)
for _, opt := range opts {
opt(d)
}
d.initBuf()
return d
}
func (d *Decoder) initBuf() {
if d.Dimensions == 8 {
d.buf = make([]byte, 32)
}
if d.Dimensions == 16 {
d.buf = make([]byte, 128)
}
}
func (d *Decoder) initReader(r io.Reader) {
if rr, ok := r.(reader); ok {
d.r = rr
} else {
d.r = bufio.NewReader(r)
}
}
// Decode reads data and then returns it as a Tile.
func (d *Decoder) decode() error {
_, err := d.r.Read(d.buf)
if err != nil {
return err
}
data := unpack32(d.buf)
d.t = Tile{0, data, 16}
return nil
}
// Decode reads a Tile from r
func (d *Decoder) Decode() (Tile, error) {
if err := d.decode(); err != nil {
return Tile{}, err
}
return d.t, nil
}
// unpack32 unpacks all 128bytes of a 16x16 tile 'at once'
func unpack32(b []byte) (t []byte) {
row := make([]uint32, 32)
for i := 0; i < 32; i++ {
//effectively: tile3, tile0, tile2, tile4
p := []byte{b[i+64], b[i], b[i+96], b[i+32]}
row[i] = binary.LittleEndian.Uint32(p)
}
transpose32(row)
t = toPixel32(row)
return reverse(t)
}
// Massages 32 uint32s into 64 'pixels'
func toPixel32(row []uint32) (t []byte) {
pix := make([]byte, 4)
t = make([]byte, 256)
for i, v := range row[16:] {
binary.LittleEndian.PutUint32(pix, v)
t[i] = pix[3] >> 4
t[i+16] = pix[3] & lower
t[i+32] = pix[2] >> 4
t[i+48] = pix[2] & lower
t[i+64] = pix[1] >> 4
t[i+80] = pix[1] & lower
t[i+96] = pix[0] >> 4
t[i+112] = pix[0] & lower
}
for i, v := range row[:16] {
binary.LittleEndian.PutUint32(pix, v)
t[i+128] = pix[3] >> 4
t[i+144] = pix[3] & lower
t[i+160] = pix[2] >> 4
t[i+176] = pix[2] & lower
t[i+192] = pix[1] >> 4
t[i+208] = pix[1] & lower
t[i+224] = pix[0] >> 4
t[i+240] = pix[0] & lower
}
return t
}
// Unpack16 unpacks all 32bytes of a 8x8 tile 'at once'
func unpack16(b []byte) (t []byte) {
row := make([]uint16, 16)
for i := 0; i < 16; i++ {
p := []byte{b[i], b[i+16]}
row[i] = binary.LittleEndian.Uint16(p)
}
transpose16(row)
t = toPixel16(row)
return reverse(t)
}
func toPixel16(row []uint16) (t []byte) {
lower := byte(15)
pix := make([]byte, 2)
t = make([]byte, 64)
for i, v := range row[8:] {
binary.LittleEndian.PutUint16(pix, v)
t[i] = pix[1] >> 4
t[i+8] = pix[1] & lower
t[i+16] = pix[0] >> 4
t[i+24] = pix[0] & lower
}
for i, v := range row[:8] {
binary.LittleEndian.PutUint16(pix, v)
t[i+32] = pix[1] >> 4
t[i+40] = pix[1] & lower
t[i+48] = pix[0] >> 4
t[i+56] = pix[0] & lower
}
return t
} | pkg/tile/reader.go | 0.67971 | 0.441191 | reader.go | starcoder |
package main
import (
"container/list"
)
/*
127 word ladder I
A transformation sequence from word beginWord to word endWord using a dictionary wordList is a sequence of words such that:
The first word in the sequence is beginWord.
The last word in the sequence is endWord.
Only one letter is different between each adjacent pair of words in the sequence.
Every word in the sequence is in wordList.
Given two words, beginWord and endWord, and a dictionary wordList, return the number of words in the shortest transformation
sequence from beginWord to endWord, or 0 if no such sequence exists.
basically it is shortest path in a graph, use bfs level traversal
iterate word with one letter differ from a-z, and determine if it is in wordDict
turn wordDict into hashSet first, for O(1) find
Example 1:
Input: beginWord = "hit", endWord = "cog", wordList = ["hot","dot","dog","lot","log","cog"]
Output: 5
Explanation: One shortest transformation is "hit" -> "hot" -> "dot" -> "dog" -> "cog" with 5 words.
Example 2:
Input: beginWord = "hit", endWord = "cog", wordList = ["hot","dot","dog","lot","log"]
Output: 0
Explanation: The endWord "cog" is not in wordList, therefore there is no possible transformation.
*/
func ladderLength(beginWord string, endWord string, wordList []string) int {
if len(wordList) == 0 {
return 0
}
if beginWord == endWord {
return 1
}
transitionNum := 1
hashset := map[string]struct{}{}
queue := list.New()
queue.PushBack(beginWord)
hashset[beginWord] = struct{}{}
var dict = make(map[string]struct{}, 0)
for _, dictWord := range wordList {
dict[dictWord] = struct {}{}
}
for queue.Len() > 0 {
size := queue.Len()
for i := 0; i < size; i++ {
ele := queue.Front().Value.(string)
queue.Remove(queue.Front())
for _, word := range nextWord(dict, ele) {
if word == endWord {
return transitionNum + 1
}
if _, exist := hashset[word]; !exist {
queue.PushBack(word)
hashset[word] = struct{}{}
}
}
}
transitionNum += 1
}
return 0
}
func nextWord(dict map[string]struct{}, word string) []string {
nextWordList := make([]string, 0)
for i := 0; i < len(word); i++ {
// loop 26 letters
for j := 0; j < 26; j++ {
if word[i] == byte(int('a')+j) {
continue
}
tmp := word[:i] + string(int('a')+j) + word[i+1:]
if containsStr(dict, tmp) {
nextWordList = append(nextWordList, tmp)
}
}
}
return nextWordList
}
func containsStr(dict map[string]struct{}, target string) bool {
_, exist := dict[target]
return exist
}
/*
126 WordLadderII
Given two words (beginWord and endWord), and a dictionary's word list, find all shortest transformation
sequence(s) from beginWord to endWord, such that:
Only one letter can be changed at a time
Each transformed word must exist in the word list. Note that beginWord is not a transformed word.
Note:
Return an empty list if there is no such transformation sequence.
All words have the same length.
All words contain only lowercase alphabetic characters.
You may assume no duplicates in the word list.
You may assume beginWord and endWord are non-empty and are not the same.
using bfs to find depth, and get node -> depth relationship, using dfs to recursively
find all paths with the rel got from bfs result
Example 1:
Input:
beginWord = "hit",
endWord = "cog",
wordList = ["hot","dot","dog","lot","log","cog"]
Output:
[
["hit","hot","dot","dog","cog"],
["hit","hot","lot","log","cog"]
]
Example 2:
Input:
beginWord = "hit"
endWord = "cog"
wordList = ["hot","dot","dog","lot","log"]
Output: []
Explanation: The endWord "cog" is not in wordList, therefore no possible transformation.
*/
func findLadders(beginWord string, endWord string, wordList []string) [][]string {
var dict = make(map[string]struct{}, 0)
for _, dictWord := range wordList {
dict[dictWord] = struct {}{}
}
depth := markDepth(beginWord, endWord, dict)
result := make([][]string, 0)
wordLadderHelper(depth, &result, beginWord, beginWord, endWord, dict, []string{beginWord})
return result
}
// bfs
func markDepth(beginWord string, endWord string, dict map[string]struct{}) map[string]int {
queue := list.New()
depth := make(map[string]int)
queue.PushBack(beginWord)
depth[beginWord] = 1
for queue.Len() > 0 {
ele := queue.Front().Value.(string)
queue.Remove(queue.Front())
for _, word := range nextWord(dict, ele) {
if _, exist := depth[word]; !exist {
depth[word] = depth[ele] + 1
queue.PushBack(word)
if word == endWord {
return depth
}
}
}
}
return nil
}
// dfs
func wordLadderHelper(
depth map[string]int, results *[][]string,
current string, startWord string,
endWord string, dict map[string]struct{},
result []string) {
if result[len(result)-1] == endWord {
tmp := make([]string, len(result), len(result))
copy(tmp, result)
*results = append(*results, tmp)
}
for _, word := range nextWord(dict, current) {
if depth[word] != depth[current] + 1 {
continue
}
result = append(result, word)
wordLadderHelper(depth, results, word, startWord, endWord, dict, result)
result = result[:len(result)-1]
}
} | bfs/127-word-ladder.go | 0.612657 | 0.481149 | 127-word-ladder.go | starcoder |
// We change our concrete type System. Instead of using two concrete types Xenia and Pillar, we
// use 2 interface types Puller and Storer. Our concrete type System where we can have concrete
// behaviors is now based on the embedding of 2 interface types. It means that we can inject any
// data, not based on the common DNA but on the data that providing the capability, the behavior
// that we need.
// Now we can be fully decouple because any value that implements the Puller interface can be store
// inside the System (same with Storer interface). We can create multiple Systems and that data can
// be passed in Copy.
// We don't need method here. We just need one function that accept data and its behavior will
// change based on the data we put in.
// Now System is not based on Xenia and Pillar anymore. It is based on 2 interfaces, one that
// stores Xenia and one that stores Pillar. We get the extra layer of decoupling.
// If the system change, no big deal. We replace the system as we need to during the program
// startup.
// We solve this problem. We put this in production. Every single refactoring that we did went into
// production before we did the next one. We keep minimizing technical debt.
// System ps
// -------------------- ---------
// | _________ |-pull | |-pull
// | | | |-store | *System |-store
// | | *Xenia |-pull | | |
// | | | | <------------------ ---------
// | --------- | p | |
// | | | | ----- | * |
// | | * |------- |-> | |-pull | |
// | | | | ----- ---------
// | --------- |
// |
// | __________ |
// | | | |
// | | * Pillar |-store |
// | | | |
// | ---------- | s
// | | | | ----- p s
// | | * |------ |-> | |-store --------- ---------
// | | | | ----- | |-pull | |-store
// | ---------- | | *System | | *System |
// -------------------- | | | |
// A --------- ---------
// | | | | |
// ------------------------------------------| * | ------- | * |
// | | | |
// --------- ---------
package main
import (
"errors"
"fmt"
"io"
"math/rand"
"time"
)
func init() {
rand.Seed(time.Now().UnixNano())
}
// Data is the structure of the data we are copying.
type Data struct {
Line string
}
// Puller declares behavior for pulling data.
type Puller interface {
Pull(d *Data) error
}
// Storer declares behavior for storing data.
type Storer interface {
Store(d *Data) error
}
// PullStorer declares behavior for both pulling and storing.
type PullStorer interface {
Puller
Storer
}
// Xenia is a system we need to pull data from.
type Xenia struct {
Host string
Timeout time.Duration
}
// Pull knows how to pull data out of Xenia.
func (*Xenia) Pull(d *Data) error {
switch rand.Intn(10) {
case 1, 9:
return io.EOF
case 5:
return errors.New("Error reading data from Xenia")
default:
d.Line = "Data"
fmt.Println("In:", d.Line)
return nil
}
}
// Pillar is a system we need to store data into.
type Pillar struct {
Host string
Timeout time.Duration
}
// Store knows how to store data into Pillar.
func (*Pillar) Store(d *Data) error {
fmt.Println("Out:", d.Line)
return nil
}
// System wraps Pullers and Stores together into a single system.
type System struct {
Puller
Storer
}
// pull knows how to pull bulks of data from any Puller.
func pull(p Puller, data []Data) (int, error) {
for i := range data {
if err := p.Pull(&data[i]); err != nil {
return i, err
}
}
return len(data), nil
}
// store knows how to store bulks of data from any Storer.
func store(s Storer, data []Data) (int, error) {
for i := range data {
if err := s.Store(&data[i]); err != nil {
return i, err
}
}
return len(data), nil
}
// Copy knows how to pull and store data from any System.
func Copy(ps PullStorer, batch int) error {
data := make([]Data, batch)
for {
i, err := pull(ps, data)
if i > 0 {
if _, err := store(ps, data[:i]); err != nil {
return err
}
}
if err != nil {
return err
}
}
}
func main() {
sys := System{
Puller: &Xenia{
Host: "localhost:8000",
Timeout: time.Second,
},
Storer: &Pillar{
Host: "localhost:9000",
Timeout: time.Second,
},
}
if err := Copy(&sys, 3); err != io.EOF {
fmt.Println(err)
}
} | go/design/decoupling_4.go | 0.63624 | 0.634331 | decoupling_4.go | starcoder |
package preflight
import (
"crypto/sha256"
"encoding/hex"
"log"
"math"
"math/big"
"strconv"
"github.com/airbnb/rudolph/pkg/clock"
"github.com/airbnb/rudolph/pkg/model/syncstate"
"github.com/pkg/errors"
)
// Select a MOD that will enable a proper dithering technique such as a proven cyclic group
// https://mathworld.wolfram.com/ModuloMultiplicationGroup.html
var MOD float64 = 11
const (
daysToElapseUntilRefreshCleanSync = 7
)
// Steps to determine if a Clean Sync must be forced upon a requesting machine
// 1. Determined via the rules counts, ie if the returned number of rules from the machine equals zero, force a clean sync
// Note: the DB may also have zero rules but this is fine then as the resulting sync time is the same
// 2. Periodically refresh systems via a clean sync --> this is determined by the amount of days that elapse since a machines last clean sync
// Force this after 1d10 + 7 days.
func (c concreteCleanSyncService) determineCleanSync(machineID string, preflightRequest *PreflightRequest, syncState *syncstate.SyncStateRow) (performCleanSync bool, err error) {
// Determine clean sync via rule count
performCleanSync = determineCleanSyncByRuleCount(preflightRequest)
if performCleanSync {
return
}
// Periodically re-force a clean sync to keep rules fresh
performCleanSync, err = determineCleanSyncRefresh(
c.timeProvider,
machineID,
syncState,
)
return
}
func determineCleanSyncByRuleCount(preflightRequest *PreflightRequest) bool {
ruleCount := preflightRequest.BinaryRuleCount + preflightRequest.CertificateRuleCount + preflightRequest.CompilerRuleCount + preflightRequest.TransitiveRuleCount
if ruleCount == 0 {
return true
}
return false
}
func determineCleanSyncRefresh(timeProvider clock.TimeProvider, machineID string, syncState *syncstate.SyncStateRow) (performCleanSync bool, err error) {
daysSinceLastCleanSync := daysSinceLastCleanSync(timeProvider, syncState)
// To reduce stampeding, we introduce a bit of dithering by using the machineID as the seed to randomize the number of days required to elapse before performing a clean sync.
// Given the same MachineID, performCleanSync will always provide the same chaos int and evenly space all clients out to require clean sync
// 7 days + 1d10 (Based on the MachineID input) * 10 minutes
performCleanSync, err = determinePeriodicRefreshCleanSync(
machineID,
daysSinceLastCleanSync,
)
return
}
// Returns the number of full 24 hour days since the previous clean sync
// Returns an unusually large number (99999999) when it thinks no sync has successfully happened before.
func daysSinceLastCleanSync(timeProvider clock.TimeProvider, prevSyncState *syncstate.SyncStateRow) int {
infinity := 99999999
if prevSyncState == nil {
return infinity
}
if prevSyncState.LastCleanSync == "" {
return infinity
}
lastCleanSyncTime, err := clock.ParseRFC3339(prevSyncState.LastCleanSync)
if err != nil {
// Disregard the error
log.Printf("failed to determine number of days since last sync from value: (%s); going to clean sync anyway", prevSyncState.LastCleanSync)
return infinity
}
currentTime := timeProvider.Now().UTC()
diff := currentTime.Sub(lastCleanSyncTime)
return int(diff.Hours() / 24)
}
// shouldPerformCleanSync is a method to perform dithering in an attempt to prevent a surge of forced clean syncs at the same time
// For a given MachineID, the chaos is equal to 1d10 and will remain consistent
func determinePeriodicRefreshCleanSync(machineID string, daysSinceLastSync int) (performCleanSync bool, err error) {
// Generate a chaos by converting the machineID to an int
chaos, err := machineIDToInt(machineID)
if err != nil {
return false, err
}
// Check to make sure that chaos is within 0 to 10
if chaos > 11 || chaos < 0 {
err = errors.New("chaos was greater than 11 or less than 0")
return
}
if daysSinceLastSync >= (daysToElapseUntilRefreshCleanSync + chaos) {
performCleanSync = true
}
return
}
// machineIDToInt outputs a consistent integer as provided by a consistent machineID string
// For a given MachineID, the chaos is equal to 1d10 or has modified by MOD variable
func machineIDToInt(machineID string) (int, error) {
var result int
machineIDHash := sha256.New()
_, err := machineIDHash.Write([]byte(machineID))
if err != nil {
return result, errors.Wrap(err, "error generating hash from machineID")
}
// Convert machineIDHash into a hex which will be converted into a *int
bigInt := new(big.Int)
bigInt.SetString(hex.EncodeToString(machineIDHash.Sum(nil)), 16)
//machineIDFloat is required to perform mod math so convert the big int into a float
machineIDFloat, err := strconv.ParseFloat(bigInt.String(), 64)
if err != nil {
return result, errors.Wrap(err, "error parsing machineID into a float")
}
// chaos number is 1d10 based on the machineID to remain consistent
result = int(math.Mod(machineIDFloat, MOD))
return result, nil
} | internal/handlers/preflight/clean_sync.go | 0.682997 | 0.410579 | clean_sync.go | starcoder |
It includes a variety of resampling filters to handle interpolation in case that upsampling or downsampling is required.*/
package transform
import "math"
// ResampleFilter is used to evaluate sample points and interpolate between them.
// Support is the number of points required by the filter per 'side'.
// For example, a support of 1.0 means that the filter will get pixels on
// positions -1 and +1 away from it.
// Fn is the resample filter function to evaluate the samples.
type ResampleFilter struct {
Support float64
Fn func(x float64) float64
}
// NearestNeighbor resampling filter assigns to each point the sample point nearest to it.
var NearestNeighbor ResampleFilter
// Box resampling filter, only let pass values in the x < 0.5 range from sample.
// It produces similar results to the Nearest Neighbor method.
var Box ResampleFilter
// Linear resampling filter interpolates linearly between the two nearest samples per dimension.
var Linear ResampleFilter
// Gaussian resampling filter interpolates using a Gaussian function between the two nearest
// samples per dimension.
var Gaussian ResampleFilter
// MitchellNetravali resampling filter interpolates between the four nearest samples per dimension.
var MitchellNetravali ResampleFilter
// CatmullRom resampling filter interpolates between the four nearest samples per dimension.
var CatmullRom ResampleFilter
// Lanczos resampling filter interpolates between the six nearest samples per dimension.
var Lanczos ResampleFilter
func init() {
NearestNeighbor = ResampleFilter{
Support: 0,
Fn: nil,
}
Box = ResampleFilter{
Support: 0.5,
Fn: func(x float64) float64 {
if math.Abs(x) < 0.5 {
return 1
}
return 0
},
}
Linear = ResampleFilter{
Support: 1.0,
Fn: func(x float64) float64 {
x = math.Abs(x)
if x < 1.0 {
return 1.0 - x
}
return 0
},
}
Gaussian = ResampleFilter{
Support: 1.0,
Fn: func(x float64) float64 {
x = math.Abs(x)
if x < 1.0 {
exp := 2.0
x *= 2.0
y := math.Pow(0.5, math.Pow(x, exp))
base := math.Pow(0.5, math.Pow(2, exp))
return (y - base) / (1 - base)
}
return 0
},
}
MitchellNetravali = ResampleFilter{
Support: 2.0,
Fn: func(x float64) float64 {
b := 1.0 / 3
c := 1.0 / 3
var w [4]float64
x = math.Abs(x)
if x < 1.0 {
w[0] = 0
w[1] = 6 - 2*b
w[2] = (-18 + 12*b + 6*c) * x * x
w[3] = (12 - 9*b - 6*c) * x * x * x
} else if x <= 2.0 {
w[0] = 8*b + 24*c
w[1] = (-12*b - 48*c) * x
w[2] = (6*b + 30*c) * x * x
w[3] = (-b - 6*c) * x * x * x
} else {
return 0
}
return (w[0] + w[1] + w[2] + w[3]) / 6
},
}
CatmullRom = ResampleFilter{
Support: 2.0,
Fn: func(x float64) float64 {
b := 0.0
c := 0.5
var w [4]float64
x = math.Abs(x)
if x < 1.0 {
w[0] = 0
w[1] = 6 - 2*b
w[2] = (-18 + 12*b + 6*c) * x * x
w[3] = (12 - 9*b - 6*c) * x * x * x
} else if x <= 2.0 {
w[0] = 8*b + 24*c
w[1] = (-12*b - 48*c) * x
w[2] = (6*b + 30*c) * x * x
w[3] = (-b - 6*c) * x * x * x
} else {
return 0
}
return (w[0] + w[1] + w[2] + w[3]) / 6
},
}
Lanczos = ResampleFilter{
Support: 3.0,
Fn: func(x float64) float64 {
x = math.Abs(x)
if x == 0 {
return 1.0
} else if x < 3.0 {
return (3.0 * math.Sin(math.Pi*x) * math.Sin(math.Pi*(x/3.0))) / (math.Pi * math.Pi * x * x)
}
return 0.0
},
}
} | transform/filters.go | 0.746971 | 0.867654 | filters.go | starcoder |
package rom
// seasonsChest constructs a MutableSlot from a treasure name and an address in
// bank $15, where the ID and sub-ID are two consecutive bytes at that address.
// This applies to almost all chests, and exclusively to chests.
func seasonsChest(treasure string, addr uint16,
group, room, mode, coords byte) *MutableSlot {
return basicSlot(treasure, 0x15, addr, addr+1, group, room, mode, coords)
}
// seasonsScriptItem constructs a MutableSlot from a treasure name and an
// address in bank $0b, where the ID and sub-ID are two consecutive bytes at
// that address. This applies to most items given by NPCs.
func seasonsScriptItem(treasure string, addr uint16,
group, room, mode, coords byte) *MutableSlot {
return basicSlot(treasure, 0x0b, addr, addr+1, group, room, mode, coords)
}
// seasonsFoundItem constructs a MutableSlot from a treasure name and an address in
// bank $09, where the sub-ID and ID (in that order) are two consecutive bytes
// at that address. This applies to most items that are found lying around.
func seasonsFoundItem(treasure string, addr uint16,
group, room, mode, coords byte) *MutableSlot {
return basicSlot(treasure, 0x09, addr+1, addr, group, room, mode, coords)
}
var seasonsSlots = map[string]*MutableSlot{
// holodrum
"eyeglass lake, across bridge": seasonsChest(
"gasha seed", 0x4f92, 0x00, 0xb8, collectChest, 0xb8),
"maku tree": &MutableSlot{
treasureName: "gnarled key",
idAddrs: []Addr{{0x15, 0x613a}, {0x09, 0x7e16}},
subIDAddrs: []Addr{{0x15, 0x613d}, {0x09, 0x7e19}},
group: 0x02,
room: 0x0b,
collectMode: collectFall,
mapCoords: 0xc9,
},
"horon village SW chest": seasonsChest(
"rupees, 20", 0x4f7e, 0x00, 0xf5, collectChest, 0xf5),
"horon village SE chest": seasonsChest(
"rupees, 20", 0x4f82, 0x00, 0xf9, collectChest, 0xf9),
"holly's house": seasonsScriptItem(
"shovel", 0x6a6c, 0x03, 0xa3, collectFind2, 0x7f),
"chest on top of D2": seasonsChest(
"gasha seed", 0x4f86, 0x00, 0x8e, collectChest, 0x8e),
"blaino prize": seasonsScriptItem(
"gasha seed", 0x64cc, 0x03, 0xb4, collectFind1, 0x78),
"floodgate keeper's house": seasonsFoundItem(
"floodgate key", 0x6281, 0x03, 0xb5, collectFind1, 0x62),
"spool swamp cave": &MutableSlot{
treasureName: "square jewel",
idAddrs: []Addr{{0x0b, 0x7395}},
subIDAddrs: []Addr{{0x0b, 0x7399}},
group: 0x04,
room: 0xfa,
collectMode: collectChest,
mapCoords: 0xc2,
},
"moblin keep": seasonsChest(
"piece of heart", 0x4f8e, 0x00, 0x5b, collectChest, 0x5b),
"master diver's challenge": seasonsChest(
"master's plaque", 0x510a, 0x05, 0xbc, collectChest, 0x2e),
"master diver's reward": seasonsScriptItem( // addr set at EOB
"flippers", 0x0000, 0x05, 0xbd, collectNil, 0x2e), // special case
"spring banana tree": seasonsFoundItem(
"spring banana", 0x66c6, 0x00, 0x0f, collectFind2, 0x0f),
"goron mountain, across pits": seasonsFoundItem(
"dragon key", 0x62a3, 0x00, 0x1a, collectFind1, 0x1a),
"diving spot outside D4": seasonsScriptItem(
"pyramid jewel", 0x734e, 0x07, 0xe5, collectUnderwater, 0x1d),
"black beast's chest": seasonsChest(
"x-shaped jewel", 0x4f8a, 0x00, 0xf4, collectChest, 0xf4),
"old man in treehouse": seasonsScriptItem(
"round jewel", 0x7332, 0x03, 0x94, collectFind2, 0xb5),
"lost woods": &MutableSlot{
treasureName: "sword 2",
idAddrs: []Addr{{0x0b, 0x6418}, {0x0b, 0x641f}},
subIDAddrs: []Addr{{0x0b, 0x6419}, {0x0b, 0x6420}},
group: 0x00,
room: 0xc9,
collectMode: collectFind1,
mapCoords: 0x40,
},
"samasa desert pit": &MutableSlot{
treasureName: "<NAME>",
idAddrs: []Addr{{0x09, 0x648d}, {0x0b, 0x60b1}},
subIDAddrs: []Addr{{0x09, 0x648c}},
group: 0x05,
room: 0xd2,
collectMode: collectFind2,
mapCoords: 0xbf,
},
"samasa desert chest": seasonsChest(
"blast ring", 0x4f9a, 0x00, 0xff, collectChest, 0xff),
"western coast, beach chest": seasonsChest(
"rang ring L-1", 0x4f96, 0x00, 0xe3, collectChest, 0xe3),
"western coast, in house": seasonsChest(
"bombs, 10", 0x4fac, 0x03, 0x88, collectChest, 0xd2),
"cave south of mrs. ruul": seasonsChest(
"octo ring", 0x5081, 0x04, 0xe0, collectChest, 0xb3),
"cave north of D1": seasonsChest(
"quicksand ring", 0x5085, 0x04, 0xe1, collectChest, 0x87),
"cave outside D2": seasonsChest(
"moblin ring", 0x50fe, 0x05, 0xb3, collectChest, 0x8e),
"woods of winter, 1st cave": seasonsChest(
"rupees, 30", 0x5102, 0x05, 0xb4, collectChest, 0x7d),
"sunken city, summer cave": seasonsChest(
"gasha seed", 0x5106, 0x05, 0xb5, collectChest, 0x4f),
"chest in master diver's cave": seasonsChest(
"rupees, 50", 0x510e, 0x05, 0xbd, collectChest, 0x2e),
"dry eyeglass lake, east cave": seasonsChest(
"piece of heart", 0x5112, 0x05, 0xc0, collectChest, 0xaa),
"chest in goron mountain": seasonsChest(
"armor ring L-2", 0x511a, 0x05, 0xc8, collectChest, 0x18),
"natzu region, across water": seasonsChest(
"rupees, 50", 0x5122, 0x05, 0x0e, collectChest, 0x49),
"mt. cucco, talon's cave": seasonsChest(
"subrosian ring", 0x511e, 0x05, 0xb6, collectChest, 0x1b),
"tarm ruins, under tree": seasonsChest(
"gasha seed", 0x4fa8, 0x03, 0x9b, collectChest, 0x10),
"eastern suburbs, on cliff": seasonsChest(
"gasha seed", 0x5089, 0x04, 0xf7, collectChest, 0xcc),
"dry eyeglass lake, west cave": &MutableSlot{
treasureName: "rupees, 100",
idAddrs: []Addr{{0x0b, 0x73a1}},
subIDAddrs: []Addr{{0x0b, 0x73a5}},
group: 0x04,
room: 0xfb,
collectMode: collectChest,
mapCoords: 0xa7,
},
"woods of winter, 2nd cave": &MutableSlot{
treasureName: "gasha seed",
idAddrs: []Addr{{0x0a, 0x5003}},
subIDAddrs: []Addr{{0x0a, 0x5008}},
group: 0x05,
room: 0x12,
collectMode: collectChest,
mapCoords: 0x7e,
},
// dummy slots for bombs and shield
"shop, 20 rupees": &MutableSlot{
treasureName: "bombs, 10",
group: 0x03,
room: 0xa6,
collectMode: collectNil,
mapCoords: 0xe6,
},
"shop, 30 rupees": &MutableSlot{
treasureName: "wooden shield",
group: 0x03,
room: 0xa6,
collectMode: collectNil,
mapCoords: 0xe6,
},
"shop, 150 rupees": &MutableSlot{
treasureName: "strange flute",
idAddrs: []Addr{{0x08, 0x4ce8}},
subIDAddrs: []Addr{{0x08, 0x4ce9}},
group: 0x03,
room: 0xa6,
collectMode: collectNil,
mapCoords: 0xe6,
},
"member's shop 1": &MutableSlot{
treasureName: "satchel 2",
idAddrs: []Addr{{0x08, 0x4cce}},
subIDAddrs: []Addr{{0x08, 0x4ccf}},
group: 0x03,
room: 0xb0,
collectMode: collectNil,
mapCoords: 0xe6,
},
"member's shop 2": &MutableSlot{
treasureName: "gasha seed",
idAddrs: []Addr{{0x08, 0x4cd2}},
subIDAddrs: []Addr{{0x08, 0x4cd3}},
group: 0x03,
room: 0xb0,
collectMode: collectNil,
mapCoords: 0xe6,
},
"member's shop 3": &MutableSlot{
treasureName: "treasure map",
idAddrs: []Addr{{0x08, 0x4cd8}},
subIDAddrs: []Addr{{0x08, 0x4cd9}},
group: 0x03,
room: 0xb0,
collectMode: collectNil,
mapCoords: 0xe6,
},
// subrosia
"tower of winter": seasonsScriptItem(
"winter", 0x4fc5, 0x05, 0xf2, collectFind1, 0x9a),
"tower of summer": seasonsScriptItem(
"summer", 0x4fb9, 0x05, 0xf8, collectFind1, 0xb0),
"tower of spring": seasonsScriptItem(
"spring", 0x4fb5, 0x05, 0xf5, collectFind1, 0x1e),
"tower of autumn": seasonsScriptItem(
"autumn", 0x4fc1, 0x05, 0xfb, collectFind1, 0xb9),
"subrosian dance hall": seasonsScriptItem(
"boomerang 1", 0x6646, 0x03, 0x95, collectFind2, 0x9a),
"temple of seasons": &MutableSlot{
treasureName: "rod",
idAddrs: []Addr{{0x15, 0x70ce}},
subIDAddrs: []Addr{{0x15, 0x70cc}},
group: 0x03,
room: 0xac,
collectMode: collectNil,
mapCoords: 0x9a,
},
"subrosia seaside": &MutableSlot{ // addrs set dynamically at EOB
treasureName: "star ore",
idAddrs: []Addr{{0x08, 0x0000}},
subIDAddrs: []Addr{{0x08, 0x0000}},
group: 0x01,
room: 0x66,
collectMode: collectDig,
mapCoords: 0xb0,
},
"subrosian wilds chest": seasonsChest(
"blue ore", 0x4f9f, 0x01, 0x41, collectChest, 0x1e),
"subrosia village chest": seasonsChest(
"red ore", 0x4fa3, 0x01, 0x58, collectChest, 0xb9),
"subrosia, open cave": seasonsChest(
"gasha seed", 0x5095, 0x04, 0xf1, collectChest, 0x25),
"subrosia, locked cave": seasonsChest(
"gasha seed", 0x5116, 0x05, 0xc6, collectChest, 0xb0),
"subrosia market, 1st item": &MutableSlot{
treasureName: "ribbon",
idAddrs: []Addr{{0x09, 0x77da}},
subIDAddrs: []Addr{{0x09, 0x77db}},
group: 0x03,
room: 0xa0,
collectMode: collectNil,
mapCoords: 0xb0,
},
"subrosia market, 2nd item": &MutableSlot{
treasureName: "rare peach stone",
idAddrs: []Addr{{0x09, 0x77e2}},
subIDAddrs: []Addr{{0x09, 0x77e3}},
group: 0x03,
room: 0xa0,
collectMode: collectNil,
mapCoords: 0xb0,
},
"subrosia market, 5th item": &MutableSlot{
treasureName: "member's card",
idAddrs: []Addr{{0x09, 0x77f4}},
subIDAddrs: []Addr{{0x09, 0x77f5}},
group: 0x03,
room: 0xa0,
collectMode: collectNil,
mapCoords: 0xb0,
},
"great furnace": &MutableSlot{ // addrs set dynamically at EOB
treasureName: "hard ore",
idAddrs: []Addr{{0x15, 0x0000}, {0x09, 0x66eb}},
subIDAddrs: []Addr{{0x15, 0x0000}, {0x09, 0x66ea}},
group: 0x03,
room: 0x8e,
collectMode: collectFind2,
mapCoords: 0xb9,
},
"subrosian smithy": &MutableSlot{
treasureName: "shield L-2",
idAddrs: []Addr{{0x15, 0x62be}},
subIDAddrs: []Addr{{0x15, 0x62b4}},
group: 0x03,
room: 0x97,
collectMode: collectFind2,
mapCoords: 0x25,
},
// hero's cave
"d0 sword chest": &MutableSlot{
treasureName: "sword 1",
idAddrs: []Addr{{0x0a, 0x7b90}},
paramAddrs: []Addr{{0x0a, 0x7b92}},
textAddrs: []Addr{{0x0a, 0x7b9c}},
gfxAddrs: []Addr{{0x3f, 0x6676}},
group: 0x04,
room: 0x06,
collectMode: collectNil,
mapCoords: 0xd4,
},
"d0 rupee chest": seasonsChest(
"rupees, 30", 0x4fb5, 0x04, 0x05, collectChest, 0xd4),
// d1
"d1 basement": seasonsFoundItem(
"satchel 1", 0x66b1, 0x06, 0x09, collectFind2, 0x96),
"d1 block-pushing room": seasonsChest(
"gasha seed", 0x4fbd, 0x04, 0x0d, collectChest, 0x96),
"d1 railway chest": seasonsChest(
"bombs, 10", 0x4fc5, 0x04, 0x10, collectChest, 0x96),
"d1 floormaster room": seasonsChest(
"discovery ring", 0x4fd1, 0x04, 0x17, collectChest, 0x96),
"d1 lever room": seasonsChest(
"compass", 0x4fc1, 0x04, 0x0f, collectChest2, 0x96),
"d1 stalfos chest": seasonsChest(
"dungeon map", 0x4fd5, 0x04, 0x19, collectChest2, 0x96),
"d1 goriya chest": seasonsChest(
"d1 boss key", 0x4fcd, 0x04, 0x14, collectChest, 0x96),
// d2
"d2 moblin chest": seasonsChest(
"bracelet", 0x4fe1, 0x04, 0x2a, collectChest, 0x8d),
"d2 roller chest": seasonsChest(
"rupees, 10", 0x4fd9, 0x04, 0x1f, collectChest, 0x8d),
"d2 left from entrance": seasonsChest(
"rupees, 5", 0x4ff5, 0x04, 0x38, collectChest, 0x8d),
"d2 pot chest": seasonsChest(
"dungeon map", 0x4fe5, 0x04, 0x2b, collectChest2, 0x8d),
"d2 rope chest": seasonsChest(
"compass", 0x4ff1, 0x04, 0x36, collectChest2, 0x8d),
"d2 terrace chest": seasonsChest(
"d2 boss key", 0x4fdd, 0x04, 0x24, collectChest, 0x8d),
// d3
"d3 mimic chest": seasonsChest(
"feather 1", 0x5015, 0x04, 0x50, collectChest, 0x60),
"d3 water room": seasonsChest(
"rupees, 30", 0x4ff9, 0x04, 0x41, collectChest, 0x60),
"d3 quicksand terrace": seasonsChest(
"gasha seed", 0x5001, 0x04, 0x44, collectChest, 0x60),
"d3 moldorm chest": seasonsChest(
"bombs, 10", 0x5019, 0x04, 0x54, collectChest, 0x60),
"d3 trampoline chest": seasonsChest(
"compass", 0x5009, 0x04, 0x4d, collectChest2, 0x60),
"d3 bombed wall chest": seasonsChest(
"dungeon map", 0x5011, 0x04, 0x51, collectChest2, 0x60),
"d3 giant blade room": seasonsChest(
"d3 boss key", 0x4ffd, 0x04, 0x46, collectChest, 0x60),
// d4
"d4 cracked floor room": seasonsChest(
"slingshot 1", 0x502d, 0x04, 0x73, collectChest, 0x1d),
"d4 north of entrance": seasonsChest(
"bombs, 10", 0x5031, 0x04, 0x7f, collectChest, 0x1d),
"d4 maze chest": seasonsChest(
"dungeon map", 0x5025, 0x04, 0x69, collectChest2, 0x1d),
"d4 water ring room": seasonsChest(
"compass", 0x5035, 0x04, 0x83, collectChest2, 0x1d),
"d4 dive spot": seasonsScriptItem(
"d4 boss key", 0x4c0b, 0x04, 0x6c, collectDive, 0x1d),
// d5
"d5 magnet ball chest": seasonsChest(
"magnet gloves", 0x503d, 0x04, 0x89, collectChest, 0x89),
"d5 terrace chest": seasonsChest(
"rupees, 100", 0x5041, 0x04, 0x97, collectChest, 0x8a),
"d5 gibdo/zol chest": seasonsChest(
"dungeon map", 0x5039, 0x04, 0x8f, collectChest2, 0x8f),
"d5 spiral chest": seasonsChest(
"compass", 0x5049, 0x04, 0x9d, collectChest2, 0x8a),
"d5 basement": seasonsScriptItem(
"d5 boss key", 0x4c22, 0x06, 0x8b, collectFind2, 0x8a),
// d6
"d6 armos hall": seasonsChest(
"boomerang 2", 0x507d, 0x04, 0xd0, collectChest, 0x00),
"d6 1F east": seasonsChest(
"rupees, 10", 0x505d, 0x04, 0xaf, collectChest, 0x00),
"d6 2F armos chest": seasonsChest(
"rupees, 5", 0x5065, 0x04, 0xb3, collectChest, 0x00),
"d6 2F gibdo chest": seasonsChest(
"bombs, 10", 0x5069, 0x04, 0xbf, collectChest, 0x00),
"d6 crystal trap room": seasonsChest(
"rupees, 5", 0x5075, 0x04, 0xc3, collectChest, 0x00),
"d6 beamos room": seasonsChest(
"compass", 0x5059, 0x04, 0xad, collectChest2, 0x00),
"d6 1F terrace": seasonsChest(
"dungeon map", 0x5061, 0x04, 0xb0, collectChest2, 0x00),
"d6 escape room": seasonsChest(
"d6 boss key", 0x5079, 0x04, 0xc4, collectChest, 0x00),
// d7
"d7 spike chest": seasonsChest(
"feather 2", 0x509e, 0x05, 0x44, collectChest, 0xd0),
"d7 maze chest": seasonsChest(
"rupees, 1", 0x509a, 0x05, 0x43, collectChest, 0xd0),
"d7 right of entrance": seasonsChest(
"power ring L-1", 0x50b6, 0x05, 0x5a, collectChest, 0xd0),
"d7 bombed wall chest": seasonsChest(
"compass", 0x50aa, 0x05, 0x52, collectChest2, 0xd0),
"d7 quicksand chest": seasonsChest(
"dungeon map", 0x50b2, 0x05, 0x58, collectChest2, 0xd0),
"d7 stalfos chest": seasonsChest(
"d7 boss key", 0x50a6, 0x05, 0x48, collectChest, 0xd0),
// d8
"d8 armos chest": seasonsChest(
"slingshot 2", 0x50da, 0x05, 0x8d, collectChest, 0x04),
"d8 SW lava chest": seasonsChest(
"bombs, 10", 0x50ba, 0x05, 0x6a, collectChest, 0x04),
"d8 three eyes chest": seasonsChest(
"steadfast ring", 0x50c6, 0x05, 0x7d, collectChest, 0x04),
"d8 spike room": seasonsChest(
"compass", 0x50d2, 0x05, 0x8b, collectChest2, 0x04),
"d8 magnet ball room": seasonsChest(
"dungeon map", 0x50de, 0x05, 0x8e, collectChest2, 0x04),
"d8 pols voice chest": seasonsChest(
"d8 boss key", 0x50ca, 0x05, 0x80, collectChest, 0x04),
// don't use this slot; no one knows about it and it's not required for
// anything in a normal playthrough
// "ring box L-2 gift": seasonsScriptItem("ring box L-2", 0x5c18),
// these are "fake" item slots in that they don't slot real treasures
"horon village seed tree": &MutableSlot{
treasureName: "ember tree seeds",
idAddrs: []Addr{{0x0d, 0x68fb}},
},
"woods of winter seed tree": &MutableSlot{
treasureName: "mystery tree seeds",
idAddrs: []Addr{{0x0d, 0x68fe}},
},
"north horon seed tree": &MutableSlot{
treasureName: "scent tree seeds",
idAddrs: []Addr{{0x0d, 0x6901}},
},
"spool swamp seed tree": &MutableSlot{
treasureName: "pegasus tree seeds",
idAddrs: []Addr{{0x0d, 0x6904}},
},
"sunken city seed tree": &MutableSlot{
treasureName: "gale tree seeds",
idAddrs: []Addr{{0x0d, 0x6907}},
},
"tarm ruins seed tree": &MutableSlot{
treasureName: "gale tree seeds",
idAddrs: []Addr{{0x0d, 0x690a}},
},
} | rom/seasons_slots.go | 0.681091 | 0.612078 | seasons_slots.go | starcoder |
package runners
// AutoResizeEnabledKey is the key of flag that enables pvc-autoresizer.
const AutoResizeEnabledKey = "resize.kubesphere.io/enabled"
// ResizeThresholdAnnotation is the key of resize threshold.
const ResizeThresholdAnnotation = "resize.kubesphere.io/threshold"
// ResizeInodesThresholdAnnotation is the key of resize threshold for inodes.
const ResizeInodesThresholdAnnotation = "resize.kubesphere.io/inodes-threshold"
// ResizeIncreaseAnnotation is the key of amount increased.
const ResizeIncreaseAnnotation = "resize.kubesphere.io/increase"
// StorageLimitAnnotation is the key of storage limit value
const StorageLimitAnnotation = "resize.kubesphere.io/storage-limit"
// PreviousCapacityBytesAnnotation is the key of previous volume capacity.
const PreviousCapacityBytesAnnotation = "resize.kubesphere.io/pre-capacity-bytes"
// AutoRestartEnabledKey is the key of flag that enables pods-autoRestart.
const AutoRestartEnabledKey = "restart.kubesphere.io/enabled"
// SupportOnlineResize is the key of flag that the storage class support online expansion
const SupportOnlineResize = "restart.kubesphere.io/online-expansion-support"
// RestartSkip is the key of flag that the workload don't need autoRestart
const RestartSkip = "restart.kubesphere.io/skip"
// ResizingMaxTime is the key of flag that the maximum number of seconds that autoRestart can wait for pvc resize
const ResizingMaxTime = "restart.kubesphere.io/max-time"
// RestartStage is used to record whether autoRestart has finished shutting down the pod
const RestartStage = "restart.kubesphere.io/stage"
// RestartStopTime is used to record the time when the pod is closed
const RestartStopTime = "restart.kubesphere.io/stop-time"
// ExpectReplicaNums is used to record the value of replicas before restart
const ExpectReplicaNums = "restart.kubesphere.io/replica-nums"
// DefaultThreshold is the default value of ResizeThresholdAnnotation.
const DefaultThreshold = "10%"
// DefaultInodesThreshold is the default value of ResizeInodesThresholdAnnotation.
const DefaultInodesThreshold = "10%"
// DefaultIncrease is the default value of ResizeIncreaseAnnotation.
const DefaultIncrease = "10%" | vendor/github.com/kubesphere/pvc-autoresizer/runners/constants.go | 0.566978 | 0.415254 | constants.go | starcoder |
package deepequals
import (
"math"
"reflect"
"time"
"unsafe"
"github.com/hasSalil/customdeepequal"
ch "gopkg.in/check.v1"
)
// DeltaDeepEquals is the standard deltaDeepEqualsChecker instance
var DeltaDeepEquals = deltaDeepEqualsChecker(0.001, time.Second)
// marginOfErrorDeepEqualsChecker does deep equals between a pair of structs and returns
// true if recursive floats and time values are within the defined margin of error
type marginOfErrorDeepEqualsChecker struct {
*ch.CheckerInfo
deepEquals customdeepequal.CustomDeepEquals
}
// deltaDeepEqualsChecker creates a pointer to marginOfErrorDeepEqualsChecker
func deltaDeepEqualsChecker(floatDelta float64, timeGran time.Duration) *marginOfErrorDeepEqualsChecker {
checker := &marginOfErrorDeepEqualsChecker{
CheckerInfo: &ch.CheckerInfo{Name: "DeepEquals", Params: []string{"obtained", "expected"}},
}
de := customdeepequal.NewCustomDeepEquals()
f32 := float32(0)
f64 := float64(0)
t := time.Now()
de.RegisterEquivalenceForType(reflect.TypeOf(f32), func(a, b unsafe.Pointer) bool {
af := *(*float32)(a)
bf := *(*float32)(b)
return math.Abs(float64(af-bf)) <= floatDelta
})
de.RegisterEquivalenceForType(reflect.TypeOf(f64), func(a, b unsafe.Pointer) bool {
af := *(*float64)(a)
bf := *(*float64)(b)
return math.Abs(af-bf) <= floatDelta
})
de.RegisterEquivalenceForType(reflect.TypeOf(t), func(a, b unsafe.Pointer) bool {
aT := (*time.Time)(a)
bT := (*time.Time)(b)
aNano := aT.UnixNano()
bNano := bT.UnixNano()
nanoGrain := timeGran.Nanoseconds()
return (aNano / nanoGrain) == (bNano / nanoGrain)
})
checker.deepEquals = de
return checker
}
// WithDeepEqualForType registers the equals function for the given type
func (checker *marginOfErrorDeepEqualsChecker) WithDeepEqualForType(ty reflect.Type, equals func(a, b unsafe.Pointer) bool) *marginOfErrorDeepEqualsChecker {
checker.deepEquals.RegisterEquivalenceForType(ty, equals)
return checker
}
// UseFloatDelta sets the float delta
func (checker *marginOfErrorDeepEqualsChecker) UseFloatDelta(delta float64) *marginOfErrorDeepEqualsChecker {
f32 := float32(0)
f64 := float64(0)
checker.deepEquals.RegisterEquivalenceForType(reflect.TypeOf(f32), func(a, b unsafe.Pointer) bool {
af := *(*float32)(a)
bf := *(*float32)(b)
return math.Abs(float64(af-bf)) <= delta
})
checker.deepEquals.RegisterEquivalenceForType(reflect.TypeOf(f64), func(a, b unsafe.Pointer) bool {
af := *(*float64)(a)
bf := *(*float64)(b)
return math.Abs(af-bf) <= delta
})
return checker
}
// UseTimeGranularity sets the float delta
func (checker *marginOfErrorDeepEqualsChecker) UseTimeGranularity(timeGran time.Duration) *marginOfErrorDeepEqualsChecker {
t := time.Now()
checker.deepEquals.RegisterEquivalenceForType(reflect.TypeOf(t), func(a, b unsafe.Pointer) bool {
aT := (*time.Time)(a)
bT := (*time.Time)(b)
aNano := aT.UnixNano()
bNano := bT.UnixNano()
nanoGrain := timeGran.Nanoseconds()
return (aNano / nanoGrain) == (bNano / nanoGrain)
})
return checker
}
// Check implements check.Checker
func (checker *marginOfErrorDeepEqualsChecker) Check(params []interface{}, names []string) (result bool, error string) {
return checker.deepEquals.DeepEqual(params[0], params[1]), ""
} | deepequals/deltadeepequals.go | 0.78572 | 0.447219 | deltadeepequals.go | starcoder |
package main
import (
"fmt"
"github.com/NOX73/go-neural"
"github.com/NOX73/go-neural/learn"
"github.com/NOX73/go-neural/persist"
"github.com/cheggaaa/pb"
"math"
"math/rand"
"sort"
"time"
)
// Board consists of 4 x 4 cells, but we're training on a single slice which can
// represent either a horizontal or vertical slice of the board:
const WIDTH = 4
// In the actual game, each tile can be empty or contain 2, 4, 8, 16, 32, 64, 128, 256, 512, or 1024
// These are powers of 2, which can be represented by their log2 / bitnumber, ie empty = 0,
// 2 = 1, 16 = 4, 1024 = 10.
const VALUES = 11
const PERMUTATIONS = 14641 // 11pow4
type Sample struct {
BoardSlice []float64 // values in this slice
Goal []float64 // 0 = has gap, 1 = has fold
}
var Samples []Sample
func makeSamples() {
rand.Seed(time.Now().UTC().UnixNano())
zeroes := 333
Samples = make([]Sample, PERMUTATIONS + zeroes)
zero := Sample{
BoardSlice: []float64{0, 0, 0, 0},
Goal: []float64{0, 0},
}
for i := 0; i < PERMUTATIONS; i++ {
slice := &Samples[i]
values := make([]int, WIDTH)
slice.BoardSlice = make([]float64, WIDTH)
seed := i
for x := 0; x < WIDTH; x++ {
value := seed % VALUES
if value != 0 {
values[x] = value
slice.BoardSlice[x] = float64(value)
}
seed /= VALUES // shift right
}
var hasGap, hasFold float64
for x := 0; x < WIDTH - 1; x++ {
if values[x] != 0 {
if values[x+1] == 0 {
hasGap = 1.0
} else if values[x+1] == values[x] {
hasFold = 1.0
}
}
}
for x := 0; hasFold < 0.1 && x < WIDTH -1 ; x++ {
if values[x] == 0 {
continue
}
for x2 := x + 1; x2 < WIDTH; x2++ {
if values[x2] != 0 {
if values[x2] == values[x] {
hasFold = 1.0
}
break
}
}
}
slice.Goal = []float64{ hasGap, hasFold }
}
for i := 0; i < zeroes; i++ {
Samples[PERMUTATIONS + i] = zero
}
fmt.Println("-- created", len(Samples), "samples")
}
func main() {
makeSamples()
// inputs consist of one neuron for each tile value
const numInputs = WIDTH // one neuron to represent each tile value
net := neural.NewNetwork(WIDTH, []int{ 101, 41, 2 })
net.RandomizeSynapses()
testNetwork(net)
trainingSpeed := 0.15
for gen := 0; ; gen++ {
fmt.Println("generation", gen)
// train on the complete dataset a few times.
track("training", 33 * len(Samples), func (_ int, progress chan<- bool) {
for n := 0; n < 33; n++ {
for i := 0; i < len(Samples); i++ {
progress <- true
learn.Learn(net, Samples[i].BoardSlice, Samples[i].Goal, trainingSpeed)
}
}
})
_, gapDelta, foldDelta, _ := testNetwork(net)
if gapDelta >= 0.01 || foldDelta >= 0.01 {
continue
}
fmt.Println("full evaluation")
failedSamples := make([]Sample, 0, len(Samples))
deltas := make([]float64, 0, len(Samples))
for _, sample := range Samples {
gapDelta, foldDelta := testSample(net, sample)
if gapDelta > 0.01 || foldDelta > 0.01 {
failedSamples = append(failedSamples, sample)
delta := gapDelta
if foldDelta > delta {
delta = foldDelta
}
deltas = append(deltas, delta)
}
}
if len(failedSamples) == 0 {
fmt.Println("victory")
fmt.Println("saving to trained-net.json")
persist.ToFile("trained-net.json", net)
return
}
sort.Slice(failedSamples, func (l,r int) bool {
return deltas[l] > deltas[r]
})
fmt.Printf("** FAILED %d of %d samples\n", len(failedSamples), len(Samples))
if len(failedSamples) < 10 {
fmt.Println("-- SAVING")
persist.ToFile("trained-net.json", net)
}
for idx, sample := range failedSamples[:5] {
gapDelta, foldDelta := testSample(net, sample)
fmt.Println(idx + 1, "gap", gapDelta, "fold", foldDelta, sample)
}
if len(failedSamples) < 2000 {
fmt.Printf("retraining %d samples\n", len(failedSamples))
totalCount := 0
for totalCount < 1000 {
for r := 1; r < len(failedSamples); r++ {
for l := 0; l < r; l++ {
learn.Learn(net, failedSamples[l].BoardSlice, failedSamples[l].Goal, 0.01)
totalCount += 1
}
}
}
}
if len(failedSamples) < 50 {
trainingSpeed = 0.05
} else if len(failedSamples) < 200 {
trainingSpeed = 0.07
} else if len(failedSamples) < 1000 {
trainingSpeed = 0.09
} else {
trainingSpeed = 0.1
}
}
}
func testSample(net *neural.Network, sample Sample) (gapDelta, foldDelta float64) {
outputs := net.Calculate(sample.BoardSlice)
return math.Abs(outputs[0] - sample.Goal[0]), math.Abs(outputs[1] - sample.Goal[1])
}
func testNetwork(net *neural.Network) (evaluation, gapDelta, foldDelta, worst float64) {
for _, sample := range Samples {
sampleGapDelta, sampleFoldDelta := testSample(net, sample)
if sampleGapDelta > worst {
worst = sampleGapDelta
}
if sampleFoldDelta > worst {
worst = sampleFoldDelta
}
gapDelta += sampleGapDelta
foldDelta += sampleFoldDelta
evaluation += learn.Evaluation(net, sample.BoardSlice, sample.Goal)
}
nSamples := float64(len(Samples))
evaluation /= nSamples
gapDelta /= nSamples
foldDelta /= nSamples
fmt.Println("eval", evaluation, "gap", gapDelta, "fold", foldDelta, "worst", worst)
return
}
func track(what string, iterations int, action func(int, chan<- bool)) {
fmt.Println("--", what)
progressBar := pb.StartNew(iterations)
defer progressBar.Finish()
statusCh := make(chan bool, 128)
go func () {
defer close(statusCh)
action(iterations, statusCh)
}()
progressBar.SetRefreshRate(500 * time.Millisecond)
for range statusCh {
progressBar.Increment()
}
} | golang/nn1/main.go | 0.617974 | 0.432663 | main.go | starcoder |
package quadtree
import (
"SpreadSimulator/util"
)
//Quadtree constants
const (
NodeCapacity = 4
)
//Quadtree struct
type Quadtree struct {
boundary util.Rect
entries []util.PositionIndexPair
northWest *Quadtree
northEast *Quadtree
southWest *Quadtree
southEast *Quadtree
}
//NewQuadtree creates a new Quadtree
func NewQuadtree(boundary util.Rect) *Quadtree {
return &Quadtree{
boundary,
make([]util.PositionIndexPair, 0),
nil, nil, nil, nil,
}
}
//Insert function returns true on success
func (qtree *Quadtree) Insert(pair util.PositionIndexPair) bool {
if !qtree.boundary.ContainsPoint(pair.Position) {
return false
}
if len(qtree.entries) < NodeCapacity && qtree.northWest == nil {
qtree.entries = append(qtree.entries, pair)
return true
}
if qtree.northWest == nil {
qtree.subdivide()
}
if qtree.northWest.Insert(pair) {
return true
}
if qtree.northEast.Insert(pair) {
return true
}
if qtree.southWest.Insert(pair) {
return true
}
if qtree.southEast.Insert(pair) {
return true
}
//Should not happen
return false
}
//QueryRange returns all indexed points within the given range
func (qtree *Quadtree) QueryRange(bounds util.Rect) []int {
inRange := make([]int, 0)
if !qtree.boundary.IntersectsRect(bounds) {
return inRange
}
for i := range qtree.entries {
if bounds.ContainsPoint(qtree.entries[i].Position) {
inRange = append(inRange, qtree.entries[i].Index)
}
}
if qtree.northWest == nil {
return inRange
}
inRange = append(inRange, qtree.northWest.QueryRange(bounds)...)
inRange = append(inRange, qtree.northEast.QueryRange(bounds)...)
inRange = append(inRange, qtree.southWest.QueryRange(bounds)...)
inRange = append(inRange, qtree.southEast.QueryRange(bounds)...)
return inRange
}
func (qtree *Quadtree) subdivide() {
NWCenter := util.Vector2f{
X: qtree.boundary.Center.X - qtree.boundary.HalfDim.X/2,
Y: qtree.boundary.Center.Y - qtree.boundary.HalfDim.Y/2,
}
qtree.northWest = NewQuadtree(util.NewRect(NWCenter, util.Vector2f{X: qtree.boundary.HalfDim.X / 2, Y: qtree.boundary.HalfDim.Y / 2}))
NECenter := util.Vector2f{
X: qtree.boundary.Center.X + qtree.boundary.HalfDim.X/2,
Y: qtree.boundary.Center.Y - qtree.boundary.HalfDim.Y/2,
}
qtree.northEast = NewQuadtree(util.NewRect(NECenter, util.Vector2f{X: qtree.boundary.HalfDim.X / 2, Y: qtree.boundary.HalfDim.Y / 2}))
SWCenter := util.Vector2f{
X: qtree.boundary.Center.X - qtree.boundary.HalfDim.X/2,
Y: qtree.boundary.Center.Y + qtree.boundary.HalfDim.Y/2,
}
qtree.southWest = NewQuadtree(util.NewRect(SWCenter, util.Vector2f{X: qtree.boundary.HalfDim.X / 2, Y: qtree.boundary.HalfDim.Y / 2}))
SECenter := util.Vector2f{
X: qtree.boundary.Center.X + qtree.boundary.HalfDim.X/2,
Y: qtree.boundary.Center.Y + qtree.boundary.HalfDim.Y/2,
}
qtree.southEast = NewQuadtree(util.NewRect(SECenter, util.Vector2f{X: qtree.boundary.HalfDim.X / 2, Y: qtree.boundary.HalfDim.Y / 2}))
} | quadtree/Quadtree.go | 0.765769 | 0.470007 | Quadtree.go | starcoder |
package imghash
import (
"image"
"image/color"
)
// grayscale turns the image into a grayscale image.
func grayscale(img image.Image) image.Image {
rect := img.Bounds()
gray := image.NewGray(rect)
var x, y int
for y = rect.Min.Y; y < rect.Max.Y; y++ {
for x = rect.Min.X; x < rect.Max.X; x++ {
gray.Set(x, y, img.At(x, y))
}
}
return gray
}
// average converts the sums to averages and returns the result.
func average(sum []uint64, w, h int, n uint64) image.Image {
ret := image.NewRGBA(image.Rect(0, 0, w, h))
pix := ret.Pix
var x, y, idx int
for y = 0; y < h; y++ {
for x = 0; x < w; x++ {
idx = 4 * (y*w + x)
pix[idx] = uint8(sum[idx] / n)
pix[idx+1] = uint8(sum[idx+1] / n)
pix[idx+2] = uint8(sum[idx+2] / n)
pix[idx+3] = uint8(sum[idx+3] / n)
}
}
return ret
}
// resize returns a scaled copy of the image slice r of m.
// The returned image has width w and height h.
func resize(m image.Image, w, h int) image.Image {
if w < 0 || h < 0 {
return nil
}
r := m.Bounds()
if w == 0 || h == 0 || r.Dx() <= 0 || r.Dy() <= 0 {
return image.NewRGBA64(image.Rect(0, 0, w, h))
}
switch m := m.(type) {
case *image.RGBA:
return resizeRGBA(m, r, w, h)
case *image.YCbCr:
if m, ok := resizeYCbCr(m, r, w, h); ok {
return m
}
}
ww, hh := uint64(w), uint64(h)
dx, dy := uint64(r.Dx()), uint64(r.Dy())
n, sum := dx*dy, make([]uint64, 4*w*h)
var x, y int
var r32, g32, b32, a32 uint32
var r64, g64, b64, a64, remx, remy, index uint64
var py, px, qx, qy uint64
minx, miny := r.Min.X, r.Min.Y
maxx, maxy := r.Max.X, r.Max.Y
for y = miny; y < maxy; y++ {
for x = minx; x < maxx; x++ {
// Get the source pixel.
r32, g32, b32, a32 = m.At(x, y).RGBA()
r64 = uint64(r32)
g64 = uint64(g32)
b64 = uint64(b32)
a64 = uint64(a32)
// Spread the source pixel over 1 or more destination rows.
py = uint64(y) * hh
for remy = hh; remy > 0; {
qy = dy - (py % dy)
if qy > remy {
qy = remy
}
// Spread the source pixel over 1 or more destination columns.
px = uint64(x) * ww
index = 4 * ((py/dy)*ww + (px / dx))
for remx = ww; remx > 0; {
qx = dx - (px % dx)
if qx > remx {
qx = remx
}
sum[index] += r64 * qx * qy
sum[index+1] += g64 * qx * qy
sum[index+2] += b64 * qx * qy
sum[index+3] += a64 * qx * qy
index += 4
px += qx
remx -= qx
}
py += qy
remy -= qy
}
}
}
return average(sum, w, h, n*0x0101)
}
// resizeYCbCr returns a scaled copy of the YCbCr image slice r of m.
// The returned image has width w and height h.
func resizeYCbCr(m *image.YCbCr, r image.Rectangle, w, h int) (image.Image, bool) {
var verticalRes int
switch m.SubsampleRatio {
case image.YCbCrSubsampleRatio420:
verticalRes = 2
case image.YCbCrSubsampleRatio422:
verticalRes = 1
default:
return nil, false
}
ww, hh := uint64(w), uint64(h)
dx, dy := uint64(r.Dx()), uint64(r.Dy())
n, sum := dx*dy, make([]uint64, 4*w*h)
var x, y int
var r8, g8, b8 uint8
var r64, g64, b64, remx, remy, index uint64
var py, px, qx, qy, qxy uint64
var Y, Cb, Cr []uint8
minx, miny := r.Min.X, r.Min.Y
maxx, maxy := r.Max.X, r.Max.Y
for y = miny; y < maxy; y++ {
Y = m.Y[y*m.YStride:]
Cb = m.Cb[y/verticalRes*m.CStride:]
Cr = m.Cr[y/verticalRes*m.CStride:]
for x = minx; x < maxx; x++ {
// Get the source pixel.
r8, g8, b8 = color.YCbCrToRGB(Y[x], Cb[x/2], Cr[x/2])
r64 = uint64(r8)
g64 = uint64(g8)
b64 = uint64(b8)
// Spread the source pixel over 1 or more destination rows.
py = uint64(y) * hh
for remy = hh; remy > 0; {
qy = dy - (py % dy)
if qy > remy {
qy = remy
}
// Spread the source pixel over 1 or more destination columns.
px = uint64(x) * ww
index = 4 * ((py/dy)*ww + (px / dx))
for remx = ww; remx > 0; {
qx = dx - (px % dx)
if qx > remx {
qx = remx
}
qxy = qx * qy
sum[index] += r64 * qxy
sum[index+1] += g64 * qxy
sum[index+2] += b64 * qxy
sum[index+3] += 0xFFFF * qxy
index += 4
px += qx
remx -= qx
}
py += qy
remy -= qy
}
}
}
return average(sum, w, h, n), true
}
// resizeRGBA returns a scaled copy of the RGBA image slice r of m.
// The returned image has width w and height h.
func resizeRGBA(m *image.RGBA, r image.Rectangle, w, h int) image.Image {
ww, hh := uint64(w), uint64(h)
dx, dy := uint64(r.Dx()), uint64(r.Dy())
n, sum := dx*dy, make([]uint64, 4*w*h)
var x, y int
var pixOffset int
var r64, g64, b64, a64, remx, remy, index uint64
var py, px, qx, qy, qxy uint64
minx, miny := r.Min.X, r.Min.Y
maxx, maxy := r.Max.X, r.Max.Y
for y = miny; y < maxy; y++ {
pixOffset = m.PixOffset(minx, y)
for x = minx; x < maxx; x++ {
// Get the source pixel.
r64 = uint64(m.Pix[pixOffset+0])
g64 = uint64(m.Pix[pixOffset+1])
b64 = uint64(m.Pix[pixOffset+2])
a64 = uint64(m.Pix[pixOffset+3])
pixOffset += 4
// Spread the source pixel over 1 or more destination rows.
py = uint64(y) * hh
for remy = hh; remy > 0; {
qy = dy - (py % dy)
if qy > remy {
qy = remy
}
// Spread the source pixel over 1 or more destination columns.
px = uint64(x) * ww
index = 4 * ((py/dy)*ww + (px / dx))
for remx = ww; remx > 0; {
qx = dx - (px % dx)
if qx > remx {
qx = remx
}
qxy = qx * qy
sum[index] += r64 * qxy
sum[index+1] += g64 * qxy
sum[index+2] += b64 * qxy
sum[index+3] += a64 * qxy
index += 4
px += qx
remx -= qx
}
py += qy
remy -= qy
}
}
}
return average(sum, w, h, n)
} | vendor/github.com/jteeuwen/imghash/image.go | 0.823257 | 0.552962 | image.go | starcoder |
package main
import "math/rand"
type Universe struct {
height uint32
width uint32
cells []uint8
}
func NewUniverse(livePopulation int) *Universe {
width := uint32(64)
height := uint32(64)
cells := make([]uint8, width*height)
for i := range cells {
if rand.Intn(100) < livePopulation {
cells[i] = alive
} else {
cells[i] = dead
}
}
return &Universe{
height: height,
width: width,
cells: cells,
}
}
func (u *Universe) getIndex(row, column uint32) uint32 {
return row*u.width + column
}
func (u *Universe) aliveNeighbors(row, column uint32) uint8 {
count := uint8(0)
// We use height/width and modulos to avoid manually handling edge cases
// (literally: cases at the edge of the universe, e.g. cells at row/col 0).
for _, rowDiff := range []uint32{u.height - 1, 0, 1} {
for _, colDiff := range []uint32{u.width - 1, 0, 1} {
if rowDiff == 0 && colDiff == 0 {
// Skip checking the cell itself
continue
}
neighborRow := (row + rowDiff) % u.height
neighborColumn := (column + colDiff) % u.width
neighborIdx := u.getIndex(neighborRow, neighborColumn)
count += u.cells[neighborIdx]
}
}
return count
}
func (u *Universe) tick() {
newCells := make([]uint8, u.height*u.width)
for row := uint32(0); row < u.width; row++ {
for column := uint32(0); column < u.height; column++ {
cellIndex := u.getIndex(row, column)
cell := u.cells[cellIndex]
liveNeighbors := u.aliveNeighbors(row, column)
if cell == alive && liveNeighbors < 2 {
// 1. Any live cell with fewer than two live neighbours dies, as if by underpopulation.
newCells[cellIndex] = dead
} else if cell == alive && (liveNeighbors == 2 || liveNeighbors == 3) {
// 2. Any live cell with two or three live neighbours lives on to the next generation.
newCells[cellIndex] = alive
} else if cell == alive && liveNeighbors > 3 {
// 3. Any live cell with more than three live neighbours dies, as if by overpopulation.
newCells[cellIndex] = dead
} else if cell == dead && liveNeighbors == 3 {
// 4. Any dead cell with exactly three live neighbours becomes a live cell, as if by reproduction.
newCells[cellIndex] = alive
} else {
newCells[cellIndex] = cell
}
}
}
u.cells = newCells
}
func (u *Universe) reset() {
u.cells = make([]uint8, u.width*u.height)
}
func (u *Universe) toggleCellAt(row, column uint32) {
idx := u.getIndex(row, column)
if u.cells[idx] == alive {
u.cells[idx] = dead
} else {
u.cells[idx] = alive
}
}
func (u *Universe) setRectangle(startingRow, startingColumn uint32, values [][]uint8) {
for i, row := range values {
for j, value := range row {
idx := u.getIndex(startingRow+uint32(i), startingColumn+uint32(j))
u.cells[idx] = value
}
}
} | universe.go | 0.590661 | 0.420064 | universe.go | starcoder |
package numf
import (
"fmt"
"math"
"strconv"
"strings"
"github.com/Akkurate/utils/str"
)
type Rounded struct {
Rawvalue float64 // raw value with given digits but no prefix
Prefix string // prefix string
Value float64 // rounded value with given digits and prefix
Response string // response string as <Value> <Prefix><unit>
}
// Returns the rounded value to given digits and correct prefix (M for Megas, k for Kilos etc.)
// Special case is abs(value) between 1000....10000, which is not converted to kilos (because it looks nicer)
// set prefix to force certain prefix, otherwise the function figures it out on its' own.
// These units are excluded from having a prefix
// noprefixUnits := []string{"%", "cycles", "years", "°c", "°lon", "°lat", "events", "", " "}
func RoundWithPrefix(v float64, digits int, unit string, prefix string) Rounded {
lowercaseunit := strings.ToLower(unit) // force lowercase
prefixes := []string{"G", "M", "k", "m", "u", "n"}
powers := []float64{1e9, 1e6, 1e3, 1e-3, 1e-6, 1e-9}
noprefixUnits := []string{"%", "cycles", "years", "°c", "°lon", "°lat", "events", "", " "}
// initialize response
resp := Rounded{
Rawvalue: RoundTo(v, digits),
}
setprefix := true
// no prefix if noprefixUnit given
if str.Contains(noprefixUnits, lowercaseunit) {
setprefix = false
resp.Prefix = ""
}
if setprefix {
prefixpos, _ := str.FindIndex(prefixes, prefix)
if prefixpos >= 0 {
resp.Value = RoundTo(v/powers[prefixpos], digits)
resp.Response = fmt.Sprintf("%v %v%v", resp.Prefix, resp.Value, unit)
return resp
}
resp.Prefix = ""
if math.Abs(v) >= 0.01 && math.Abs(v) <= 9999 {
resp.Value = RoundTo(v, digits)
resp.Response = fmt.Sprintf("%v %v%v", resp.Value, resp.Prefix, unit)
return resp
}
for i, p := range prefixes {
if math.Abs(v) >= powers[i] {
resp.Prefix = p
resp.Value = RoundTo(v/powers[i], digits)
resp.Response = fmt.Sprintf("%v %v%v", resp.Value, resp.Prefix, unit)
return resp
}
}
}
resp.Value = RoundTo(v, digits)
resp.Response = fmt.Sprintf("%v %v%v", resp.Value, resp.Prefix, unit)
return resp
}
// Rounds the number to given significant digits.
func RoundTo(val float64, digits int) float64 {
s := fmt.Sprintf("%."+strconv.Itoa(digits)+"g", val)
r, _ := strconv.ParseFloat(s, 64)
return r
}
// Rounds <value> to <nearest> value
func RoundToNearest(value float64, nearest float64) float64 {
modulo := math.Mod(value, nearest)
if modulo >= nearest/2 {
return value - modulo + nearest
}
return value - modulo
} | numf/rounding.go | 0.769514 | 0.409811 | rounding.go | starcoder |
package utils
import (
"reflect"
"sort"
)
func AppendEmptySliceField(slice reflect.Value) reflect.Value {
newField := reflect.Zero(slice.Type().Elem())
return reflect.Append(slice, newField)
}
func SetSliceLengh(slice reflect.Value, length int) reflect.Value {
if length > slice.Len() {
for i := slice.Len(); i < length; i++ {
slice = AppendEmptySliceField(slice)
}
} else if length < slice.Len() {
slice = slice.Slice(0, length)
}
return slice
}
func DeleteEmptySliceElementsVal(sliceVal reflect.Value) reflect.Value {
if sliceVal.Kind() != reflect.Slice {
panic("Argument is not a slice: " + sliceVal.String())
}
zeroVal := reflect.Zero(sliceVal.Type().Elem())
for i := 0; i < sliceVal.Len(); i++ {
elemVal := sliceVal.Index(i)
if reflect.DeepEqual(elemVal.Interface(), zeroVal.Interface()) {
before := sliceVal.Slice(0, i)
after := sliceVal.Slice(i+1, sliceVal.Len())
sliceVal = reflect.AppendSlice(before, after)
i--
}
}
return sliceVal
}
func DeleteEmptySliceElements(slice interface{}) interface{} {
return DeleteEmptySliceElementsVal(reflect.ValueOf(slice)).Interface()
}
func DeleteSliceElementVal(sliceVal reflect.Value, idx int) reflect.Value {
if idx < 0 || idx >= sliceVal.Len() {
return sliceVal
}
before := sliceVal.Slice(0, idx)
after := sliceVal.Slice(idx+1, sliceVal.Len())
sliceVal = reflect.AppendSlice(before, after)
return sliceVal
}
func DeleteSliceElement(slice interface{}, idx int) interface{} {
return DeleteSliceElementVal(reflect.ValueOf(slice), idx).Interface()
}
// Implements sort.Interface
type SortableInterfaceSlice struct {
Slice []interface{}
LessFunc func(a, b interface{}) bool
}
func (self *SortableInterfaceSlice) Len() int {
return len(self.Slice)
}
func (self *SortableInterfaceSlice) Less(i, j int) bool {
return self.LessFunc(self.Slice[i], self.Slice[j])
}
func (self *SortableInterfaceSlice) Swap(i, j int) {
self.Slice[i], self.Slice[j] = self.Slice[j], self.Slice[i]
}
func (self *SortableInterfaceSlice) Sort() {
sort.Sort(self)
}
func SortInterfaceSlice(slice []interface{}, lessFunc func(a, b interface{}) bool) {
sortable := SortableInterfaceSlice{slice, lessFunc}
sortable.Sort()
} | core/utils/slices.go | 0.750004 | 0.406744 | slices.go | starcoder |
package slice
// ContainsBool checks if a value exists in a bool slice
func ContainsBool(a []bool, x bool) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
}
// ContainsByte checks if a value exists in a byte slice
func ContainsByte(a []byte, x byte) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
}
// ContainsComplex64 checks if a value exists in a complex64 slice
func ContainsComplex64(a []complex64, x complex64) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
}
// ContainsComplex128 checks if a value exists in a complex128 slice
func ContainsComplex128(a []complex128, x complex128) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
}
// ContainsFloat32 checks if a value exists in a float32 slice
func ContainsFloat32(a []float32, x float32) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
}
// ContainsFloat64 checks if a value exists in a float64 slice
func ContainsFloat64(a []float64, x float64) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
}
// ContainsInt checks if a value exists in an int slice
func ContainsInt(a []int, x int) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
}
// ContainsInt16 checks if a value exists in an int16 slice
func ContainsInt16(a []int16, x int16) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
}
// ContainsInt32 checks if a value exists in an int32 slice
func ContainsInt32(a []int32, x int32) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
}
// ContainsInt64 checks if a value exists in an int64 slice
func ContainsInt64(a []int64, x int64) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
}
// ContainsInt8 checks if a value exists in an int8 slice
func ContainsInt8(a []int8, x int8) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
}
// ContainsRune checks if a value exists in a rune slice
func ContainsRune(a []rune, x rune) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
}
// ContainsString checks if a value exists in a string slice
func ContainsString(a []string, x string) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
}
// ContainsUint checks if a value exists in a uint slice
func ContainsUint(a []uint, x uint) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
}
// ContainsUint8 checks if a value exists in a uint8 slice
func ContainsUint8(a []uint8, x uint8) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
}
// ContainsUint16 checks if a value exists in a uint16 slice
func ContainsUint16(a []uint16, x uint16) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
}
// ContainsUint32 checks if a value exists in a uint32 slice
func ContainsUint32(a []uint32, x uint32) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
}
// ContainsUint64 checks if a value exists in a uint64 slice
func ContainsUint64(a []uint64, x uint64) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
}
// ContainsUintptr checks if a value exists in a uintptr slice
func ContainsUintptr(a []uintptr, x uintptr) bool {
if len(a) == 0 {
return false
}
for k := range a {
if a[k] == x {
return true
}
}
return false
} | contains.go | 0.737158 | 0.529446 | contains.go | starcoder |
package vit
import (
"fmt"
"math"
"os"
"strings"
)
// position describes a specific position in a file
type Position struct {
FilePath string
Line int // line inside the file starting at 1
Column int // column inside the line starting at 1 (this is pointing to the rune, not the byte)
}
// String returns a human readable description of the position
func (p Position) String() string {
return fmt.Sprintf("%s:%d:%d", p.FilePath, p.Line, p.Column)
}
// IsEqual returns true if with positions point to the same location in the same file
func (p Position) IsEqual(o Position) bool {
return p.FilePath == o.FilePath && p.Line == o.Line && p.Column == o.Column
}
// positionRange describes a range of runes in a file
type PositionRange struct {
FilePath string
// start points to the first rune of the range
StartLine int // line inside the file starting at 1
StartColumn int // column inside the line starting at 1 (this is pointing to the rune, not the byte)
// end points to the last rune of the range
EndLine int // line inside the file starting at 1
EndColumn int // column inside the line starting at 1 (this is pointing to the rune, not the byte)
}
// newRangeFromStartToEnd returns a range that starts and ends at the same position.
func NewRangeFromPosition(pos Position) PositionRange {
return PositionRange{
FilePath: pos.FilePath,
StartLine: pos.Line,
StartColumn: pos.Column,
EndLine: pos.Line,
EndColumn: pos.Column,
}
}
// newRangeFromStartToEnd returns a range from the start position to the end position.
// The filePath is taken from the start position.
func NewRangeFromStartToEnd(start Position, end Position) PositionRange {
return PositionRange{
FilePath: start.FilePath,
StartLine: start.Line,
StartColumn: start.Column,
EndLine: end.Line,
EndColumn: end.Column,
}
}
func CombineRanges(a, b PositionRange) PositionRange {
if a.FilePath != b.FilePath {
fmt.Printf("RangeUnion has been called with two ranges from different files")
}
out := PositionRange{FilePath: a.FilePath}
if a.StartLine < b.StartLine {
out.StartLine = a.StartLine
out.StartColumn = a.StartColumn
} else if a.StartLine > b.StartLine {
out.StartLine = b.StartLine
out.StartColumn = b.StartColumn
} else {
out.StartLine = a.StartLine
if a.StartColumn < b.StartColumn {
out.StartColumn = a.StartColumn
} else {
out.StartColumn = b.StartColumn
}
}
if a.EndLine < b.EndLine {
out.EndLine = b.EndLine
out.EndColumn = b.EndColumn
} else if a.EndLine > b.EndLine {
out.EndLine = a.EndLine
out.EndColumn = a.EndColumn
} else {
out.EndLine = a.EndLine
if a.EndColumn < b.EndColumn {
out.EndColumn = b.EndColumn
} else {
out.EndColumn = a.EndColumn
}
}
return out
}
// String returns a human readable description of the range between two position
func (p PositionRange) String() string {
return fmt.Sprintf("%s:%d:%d", p.FilePath, p.StartLine, p.StartColumn)
}
func (p PositionRange) Report() string {
f, err := os.ReadFile(p.FilePath)
if err != nil {
fmt.Printf("(unable to generate detailed position string: %v)\r\n", err)
return p.String()
}
lines := strings.Split(string(f), "\n")
var out strings.Builder
if p.StartLine == p.EndLine {
out.WriteString(fmt.Sprintf("%s:%d:%d\r\n", p.FilePath, p.StartLine, p.StartColumn))
// The line prefix. We need to know it's length to know where to start printing the markers in the line below.
lineNumberPrefix := fmt.Sprintf(" %s | ", formatLineNumber(p.StartLine, p.EndLine))
lineContent := lines[p.StartLine-1]
// Because we will remove any leading spaces and tabs we need to know how many there are to adjust the markers.
trimmedCharacters := len(lineContent) - len(strings.TrimLeft(lineContent, " \t"))
// Here we actually remove all leading and trailing spaces and tabs (and potential \r while we are at it). We also replace all remaining tabs that might be in the line with spaces
// to make sure the markers line up properly because we can't tell how wide tabs in this line would be and only a single marker would be printed.
trimmedContent := strings.ReplaceAll(strings.Trim(lineContent, " \t\r"), "\t", " ")
out.WriteString(lineNumberPrefix)
out.WriteString(trimmedContent)
out.WriteString("\r\n")
out.WriteString(strings.Repeat(" ", len(lineNumberPrefix)-trimmedCharacters+p.StartColumn-1)) // leading spaces to get the right offset
out.WriteString(strings.Repeat("^", p.EndColumn-p.StartColumn+1)) // now print the markers
} else {
out.WriteString("report for multiline range is not implemnted yet\r\n")
}
return out.String()
}
// Start returns the position of the first rune
func (p PositionRange) Start() Position {
return Position{
FilePath: p.FilePath,
Line: p.StartLine,
Column: p.StartColumn,
}
}
// End returns the position of the last rune
func (p PositionRange) End() Position {
return Position{
FilePath: p.FilePath,
Line: p.EndLine,
Column: p.EndColumn,
}
}
func (p *PositionRange) SetEnd(pos Position) {
p.EndLine = pos.Line
p.EndColumn = pos.Column
}
// formatLineNumber returns a stringified version of the line number with enough padded spaces to accommodate the highest line number
func formatLineNumber(line int, highest int) string {
return fmt.Sprintf(
fmt.Sprintf("%%%dd", digits(highest)), // generate a format string with the correct padding number
line,
)
}
// digits returns the number of digits in the given number
func digits(n int) int {
if n == 0 {
return 1
}
return int(math.Log10(float64(n))) + 1
} | vit/position.go | 0.713132 | 0.522994 | position.go | starcoder |
package geometry
import (
"errors"
)
type TriangleZZ struct {
p1 *PointZZ
p2 *PointZZ
p3 *PointZZ
extent Extent
}
func CreateTriangleZZ(a *PointZZ, b *PointZZ, c *PointZZ) TriangleZZ {
var minx, miny, maxx, maxy float64
minx = 180
miny = 180
maxx = -180
maxy = -180
if maxx < a.X {
maxx = a.X
}
if maxx < b.X {
maxx = b.X
}
if maxx < c.X {
maxx = c.X
}
if minx > a.X {
minx = a.X
}
if minx > b.X {
minx = b.X
}
if minx > c.X {
minx = c.X
}
if maxy < a.Y {
maxy = a.Y
}
if maxy < b.Y {
maxy = b.Y
}
if maxy < c.Y {
maxy = c.Y
}
if miny > a.Y {
miny = a.Y
}
if miny > b.Y {
miny = b.Y
}
if miny > c.Y {
miny = c.Y
}
e := Extent{LowerLeft: Point{X: minx, Y: miny}, UpperRight: Point{X: maxx, Y: maxy}}
return TriangleZZ{p1: a, p2: b, p3: c, extent: e}
}
//https://codeplea.com/triangular-interpolation
func (t TriangleZZ) GetValue(x float64, y float64, zidx int) (float64, float64, error) {
invDenom := 1 / ((t.p2.Y-t.p3.Y)*(t.p1.X-t.p3.X) + (t.p3.X-t.p2.X)*(t.p1.Y-t.p3.Y))
w1 := ((t.p2.Y-t.p3.Y)*(x-t.p3.X) + (t.p3.X-t.p2.X)*(y-t.p3.Y)) * invDenom
w2 := ((t.p3.Y-t.p1.Y)*(x-t.p3.X) + (t.p1.X-t.p3.X)*(y-t.p3.Y)) * invDenom
w3 := 1.0 - w1 - w2
if w1 >= 0 && w2 >= 0 && w3 >= 0 {
return (w1*t.p1.ZSwl[zidx] + w2*t.p2.ZSwl[zidx] + w3*t.p3.ZSwl[zidx]), (w1*t.p1.ZHm0[zidx] + w2*t.p2.ZHm0[zidx] + w3*t.p3.ZHm0[zidx]), nil
}
return -9999, -9999, errors.New("Point Outside Triangle")
}
func (t TriangleZZ) GetValues(x float64, y float64) ([]float64, []float64, error) {
invDenom := 1 / ((t.p2.Y-t.p3.Y)*(t.p1.X-t.p3.X) + (t.p3.X-t.p2.X)*(t.p1.Y-t.p3.Y))
w1 := ((t.p2.Y-t.p3.Y)*(x-t.p3.X) + (t.p3.X-t.p2.X)*(y-t.p3.Y)) * invDenom
w2 := ((t.p3.Y-t.p1.Y)*(x-t.p3.X) + (t.p1.X-t.p3.X)*(y-t.p3.Y)) * invDenom
w3 := 1.0 - w1 - w2
lenz := len(t.p1.ZSwl)
vals := make([]float64, lenz)
hmos := make([]float64, lenz)
if w1 >= 0 && w2 >= 0 && w3 >= 0 {
ele := (w1*t.p1.ZElev + w2*t.p2.ZElev + w3*t.p3.ZElev)
for i, z := range t.p1.ZSwl {
swl := (w1*z + w2*t.p2.ZSwl[i] + w3*t.p3.ZSwl[i])
//should i do data checks on ele?
vals[i] = swl - ele
hmos[i] = (w1*t.p1.ZHm0[i] + w2*t.p2.ZHm0[i] + w3*t.p3.ZHm0[i])
}
return vals, hmos, nil
}
return []float64{-9999}, []float64{-9999}, errors.New("Point Outside Triangle")
}
func (t *TriangleZZ) Extent() Extent {
return t.extent
}
func (t TriangleZZ) HasData() bool {
if len(t.p1.ZSwl) > 0 {
return true
}
if len(t.p2.ZSwl) > 0 {
return true
}
if len(t.p3.ZSwl) > 0 {
return true
}
return false
}
func (t *TriangleZZ) Points() []float64 {
return []float64{t.p1.X, t.p1.Y, t.p2.X, t.p2.Y, t.p3.X, t.p3.Y}
} | geometry/trianglezz.go | 0.526586 | 0.457924 | trianglezz.go | starcoder |
package reflects
import (
"fmt"
"reflect"
)
func IsPtr(a interface{}) bool {
return reflect.TypeOf(a).Kind() == reflect.Ptr
}
func IsBool(a interface{}) bool {
return reflect.TypeOf(a).Kind() == reflect.Bool
}
func IsNumber(a interface{}) bool {
if a == nil {
return false
}
kind := reflect.TypeOf(a).Kind()
return reflect.Int <= kind && kind <= reflect.Float64
}
func IsInteger(a interface{}) bool {
kind := reflect.TypeOf(a).Kind()
return reflect.Int <= kind && kind <= reflect.Int64
}
func IsUnsignedInteger(a interface{}) bool {
kind := reflect.TypeOf(a).Kind()
return reflect.Uint <= kind && kind <= reflect.Uint64
}
func IsFloat(a interface{}) bool {
kind := reflect.TypeOf(a).Kind()
return reflect.Float32 <= kind && kind <= reflect.Float64
}
func ToInteger(a interface{}) int64 {
if IsInteger(a) {
return reflect.ValueOf(a).Int()
} else if IsUnsignedInteger(a) {
return int64(reflect.ValueOf(a).Uint())
} else if IsFloat(a) {
return int64(reflect.ValueOf(a).Float())
} else {
panic(fmt.Errorf("Expected a number! Got <%T> %#v", a, a))
}
}
func ToUnsignedInteger(a interface{}) uint64 {
if IsInteger(a) {
return uint64(reflect.ValueOf(a).Int())
} else if IsUnsignedInteger(a) {
return reflect.ValueOf(a).Uint()
} else if IsFloat(a) {
return uint64(reflect.ValueOf(a).Float())
} else {
panic(fmt.Errorf("Expected a number! Got <%T> %#v", a, a))
}
}
func ToFloat(a interface{}) float64 {
if IsInteger(a) {
return float64(reflect.ValueOf(a).Int())
} else if IsUnsignedInteger(a) {
return float64(reflect.ValueOf(a).Uint())
} else if IsFloat(a) {
return reflect.ValueOf(a).Float()
} else {
panic(fmt.Errorf("Expected a number! Got <%T> %#v", a, a))
}
}
func IsError(a interface{}) bool {
_, ok := a.(error)
return ok
}
func IsChan(a interface{}) bool {
if IsNil(a) {
return false
}
return reflect.TypeOf(a).Kind() == reflect.Chan
}
func IsMap(a interface{}) bool {
if a == nil {
return false
}
return reflect.TypeOf(a).Kind() == reflect.Map
}
func IsArrayOrSlice(a interface{}) bool {
if a == nil {
return false
}
switch reflect.TypeOf(a).Kind() {
case reflect.Array, reflect.Slice:
return true
default:
return false
}
}
func IsString(a interface{}) bool {
if a == nil {
return false
}
return reflect.TypeOf(a).Kind() == reflect.String
}
func IsFunc(a interface{}) bool {
if IsNil(a) {
return false
}
return reflect.TypeOf(a).Kind() == reflect.Func
}
func ToString(a interface{}) (string, bool) {
aString, isString := a.(string)
if isString {
return aString, true
}
aBytes, isBytes := a.([]byte)
if isBytes {
return string(aBytes), true
}
aStringer, isStringer := a.(fmt.Stringer)
if isStringer {
return aStringer.String(), true
}
return "", false
}
func LengthOf(a interface{}) (int, bool) {
if a == nil {
return 0, false
}
switch reflect.TypeOf(a).Kind() {
case reflect.Map, reflect.Array, reflect.String, reflect.Chan, reflect.Slice:
return reflect.ValueOf(a).Len(), true
default:
return 0, false
}
}
func CapOf(a interface{}) (int, bool) {
if a == nil {
return 0, false
}
switch reflect.TypeOf(a).Kind() {
case reflect.Array, reflect.Chan, reflect.Slice:
return reflect.ValueOf(a).Cap(), true
default:
return 0, false
}
}
func IsNil(a interface{}) bool {
if a == nil {
return true
}
switch reflect.TypeOf(a).Kind() {
case reflect.Chan, reflect.Func, reflect.Map, reflect.Ptr, reflect.Slice, reflect.Interface:
return reflect.ValueOf(a).IsNil()
}
return false
} | reflects/type_support.go | 0.699768 | 0.641731 | type_support.go | starcoder |
package adjust
import (
"image"
"image/color"
"math"
"github.com/anthonynsimon/bild/math/f64"
"github.com/anthonynsimon/bild/util"
)
// Brightness returns a copy of the image with the adjusted brightness.
// Change is the normalized amount of change to be applied (range -1.0 to 1.0).
func Brightness(src image.Image, change float64) *image.RGBA {
lookup := make([]uint8, 256)
for i := 0; i < 256; i++ {
lookup[i] = uint8(f64.Clamp(float64(i)*(1+change), 0, 255))
}
fn := func(c color.RGBA) color.RGBA {
return color.RGBA{lookup[c.R], lookup[c.G], lookup[c.B], c.A}
}
img := Apply(src, fn)
return img
}
// Gamma returns a gamma corrected copy of the image. Provided gamma param must be larger than 0.
func Gamma(src image.Image, gamma float64) *image.RGBA {
gamma = math.Max(0.00001, gamma)
lookup := make([]uint8, 256)
for i := 0; i < 256; i++ {
lookup[i] = uint8(f64.Clamp(math.Pow(float64(i)/255, 1.0/gamma)*255, 0, 255))
}
fn := func(c color.RGBA) color.RGBA {
return color.RGBA{lookup[c.R], lookup[c.G], lookup[c.B], c.A}
}
img := Apply(src, fn)
return img
}
// Contrast returns a copy of the image with its difference in high and low values adjusted by the change param.
// Change is the normalized amount of change to be applied, in the range of -1.0 to 1.0.
// If Change is set to 0.0, then the values remain the same, if it's set to 0.5, then all values will be moved 50% away from the middle value.
func Contrast(src image.Image, change float64) *image.RGBA {
lookup := make([]uint8, 256)
for i := 0; i < 256; i++ {
lookup[i] = uint8(f64.Clamp(((((float64(i)/255)-0.5)*(1+change))+0.5)*255, 0, 255))
}
fn := func(c color.RGBA) color.RGBA {
return color.RGBA{lookup[c.R], lookup[c.G], lookup[c.B], c.A}
}
img := Apply(src, fn)
return img
}
// Hue adjusts the overall hue of the provided image and returns the result.
// Parameter change is the amount of change to be applied and is of the range
// -360 to 360. It corresponds to the hue angle in the HSL color model.
func Hue(img image.Image, change int) *image.RGBA {
fn := func(c color.RGBA) color.RGBA {
h, s, l := util.RGBToHSL(c)
h = float64((int(h) + change) % 360)
outColor := util.HSLToRGB(h, s, l)
outColor.A = c.A
return outColor
}
return Apply(img, fn)
}
// Saturation adjusts the saturation of the image and returns the result.
// Parameter change is the amount of change to be applied and is of the range
// -1.0 to 1.0 (-1.0 being -100% and 1.0 being 100%).
func Saturation(img image.Image, change float64) *image.RGBA {
fn := func(c color.RGBA) color.RGBA {
h, s, l := util.RGBToHSL(c)
s = f64.Clamp(s*(1+change), 0.0, 1.0)
outColor := util.HSLToRGB(h, s, l)
outColor.A = c.A
return outColor
}
return Apply(img, fn)
} | adjust/adjustment.go | 0.879794 | 0.589923 | adjustment.go | starcoder |
package server
import (
"math"
"github.com/armsnyder/othelgo/pkg/common"
)
// doAIPlayerMove takes a turn as the AI player.
func doAIPlayerMove(board common.Board, difficulty int) (common.Board, [2]int) {
aiState := &aiGameState{
board: board,
maximizingPlayer: 2,
turn: 2,
}
var depth int
switch difficulty {
default:
depth = 1
case 1:
depth = 4
case 2:
depth = 6
}
move := findMoveUsingMinimax(aiState, depth)
return aiState.moves[move], aiState.moveLocations[move]
}
// aiGameState implements the othelgo domain-specific logic needed by the AI.
type aiGameState struct {
board common.Board
turn common.Disk
maximizingPlayer common.Disk
moves []common.Board
moveLocations [][2]int
}
func (a *aiGameState) Score() float64 {
p1, p2 := common.KeepScore(a.board)
if a.maximizingPlayer == 1 {
p1, p2 = p2, p1
}
if common.GameOver(a.board) {
switch {
case p2 > p1:
return math.Inf(1)
case p1 < p2:
return math.Inf(-1)
default:
return 0
}
}
trueScoreDelta := float64(p2 - p1)
scoreModifier := a.scoreModifier(2) - a.scoreModifier(1)
// Modifier strength decreases as the board fills up.
scoreModifier *= a.percentFull()
return trueScoreDelta + scoreModifier
}
func (a *aiGameState) scoreModifier(player common.Disk) (score float64) {
endIndex := common.BoardSize - 1
// Edges are valuable.
edgeScore := 0.5
for i := 1; i < endIndex; i++ {
if a.board[i][0] == player {
score += edgeScore
}
if a.board[0][i] == player {
score += edgeScore
}
if a.board[i][endIndex] == player {
score += edgeScore
}
if a.board[endIndex][i] == player {
score += edgeScore
}
}
// Corners are highly valuable.
cornerScore := float64(2)
if a.board[0][0] == player {
score += cornerScore
}
if a.board[0][endIndex] == player {
score += cornerScore
}
if a.board[endIndex][0] == player {
score += cornerScore
}
if a.board[endIndex][endIndex] == player {
score += cornerScore
}
return score
}
func (a *aiGameState) percentFull() float64 {
freeCells := 0
for x := 0; x < common.BoardSize; x++ {
for y := 0; y < common.BoardSize; y++ {
if a.board[x][y] == 0 {
freeCells++
}
}
}
return float64(freeCells) / common.BoardSize / common.BoardSize
}
func (a *aiGameState) AITurn() bool {
return a.turn == a.maximizingPlayer
}
func (a *aiGameState) MoveCount() int {
if a.moves == nil {
a.moves = []common.Board{}
for x := 0; x < common.BoardSize; x++ {
for y := 0; y < common.BoardSize; y++ {
if board, updated := common.ApplyMove(a.board, x, y, a.turn); updated {
a.moves = append(a.moves, board)
a.moveLocations = append(a.moveLocations, [2]int{x, y})
}
}
}
}
return len(a.moves)
}
func (a *aiGameState) Move(i int) AIGameState {
a.MoveCount() // Lazy initialize moves
nextState := &aiGameState{
board: a.moves[i],
turn: a.turn,
}
if common.HasMoves(a.moves[i], a.turn%2+1) {
nextState.turn = a.turn%2 + 1
}
return nextState
} | pkg/server/ai.go | 0.649912 | 0.558989 | ai.go | starcoder |
package binarytree
import (
"github.com/Thrimbda/dune/utils"
)
type RBNode struct {
BinNodePtr
color bool
}
const (
black = true
red = false
)
// black for black, red for red
func (node RBNode) SetColor(color bool) {
node.color = color
}
func (node RBNode) Color() bool {
return node.color
}
type RBTree struct {
root *RBNode
}
var RBNil = &RBNode{BinNodePtr{nil, nil, nil, nil}, black}
//should I set the nil as an attribute of a RB-Tree?
func (rbt RBTree) Insert(value interface{}) {
father := RBNil
brother := rbt.root
node := &RBNode{BinNodePtr{value, RBNil, RBNil, nil}, red}
for brother != RBNil {
father = brother
if utils.LessComparator(node.Element(), brother.Element()) {
brother = brother.Left().(*RBNode)
} else {
brother = brother.Right().(*RBNode)
}
}
node.SetParent(father)
if father == RBNil {
rbt.root = node
} else if utils.LessComparator(node.Element(), father.Element()) {
father.SetLeft(node)
} else {
father.SetRight(node)
}
rbt.RBInsertFixUp(node)
}
func (rbt RBTree) RBInsertFixUp(node *RBNode) {
var uncle *RBNode
for !node.Parent().(*RBNode).Color() {
if node.Parent() == node.Parent().Parent().Left() {
uncle = node.Parent().Parent().Right().(*RBNode)
if !uncle.Color() {
node.Parent().(*RBNode).SetColor(black)
uncle.SetColor(black)
node.Parent().Parent().(*RBNode).SetColor(red)
node = node.Parent().Parent().(*RBNode)
} else if node == node.Parent().Right().(*RBNode) {
node = node.Parent().(*RBNode)
rbt.leftRotate(node)
}
node.Parent().(*RBNode).SetColor(black)
node.Parent().Parent().(*RBNode).SetColor(red)
rbt.rightRotate(node.Parent().Parent().(*RBNode))
} else {
uncle = node.Parent().Parent().Left().(*RBNode)
if !uncle.Color() {
node.Parent().(*RBNode).SetColor(black)
uncle.SetColor(black)
node.Parent().Parent().(*RBNode).SetColor(red)
node = node.Parent().Parent().(*RBNode)
} else if node == node.Parent().Left().(*RBNode) {
node = node.Parent().(*RBNode)
rbt.rightRotate(node)
}
node.Parent().(*RBNode).SetColor(black)
node.Parent().Parent().(*RBNode).SetColor(red)
rbt.leftRotate(node.Parent().Parent().(*RBNode))
}
rbt.root.SetColor(black)
}
}
func (rbt RBTree) Search(key int) BST {
return &RBTree{SearchHelp(rbt.root, key).(*RBNode)}
}
func (rbt RBTree) Delete(key int) {
node := SearchHelp(rbt.root, key).(*RBNode)
var tracker *RBNode
replacement := node
originColor := replacement.Color()
if node.Left() == RBNil {
tracker = node.Right().(*RBNode)
rbt.RBTransplant(node, tracker)
} else if node.Right() == RBNil {
tracker = node.Left().(*RBNode)
rbt.RBTransplant(node, tracker)
} else {
replacement = MinimumHelp(node.Right()).(*RBNode)
originColor = replacement.Color()
tracker = replacement.Right().(*RBNode)
if replacement.Parent() != node {
rbt.RBTransplant(replacement, replacement.Right().(*RBNode))
replacement.SetRight(node.Right())
replacement.Right().SetParent(node.Parent())
}
rbt.RBTransplant(node, replacement)
replacement.SetLeft(node.Left())
replacement.Left().SetParent(replacement)
replacement.SetColor(node.Color())
if originColor {
rbt.RBDeleteFixUp(tracker)
}
}
}
func (rbt RBTree) RBDeleteFixUp(node *RBNode) {
var brother *RBNode
for node == rbt.root && node.Color() {
if node != node.Parent().Left() {
brother = node.Parent().Right().(*RBNode)
if !brother.Color() {
brother.SetColor(black)
node.Parent().(*RBNode).SetColor(red)
rbt.leftRotate(node.Parent().(*RBNode))
brother = node.Parent().Right().(*RBNode)
}
if brother.Left().(*RBNode).Color() && brother.Right().(*RBNode).Color() {
brother.SetColor(red)
node = node.Parent().(*RBNode)
} else if !brother.Right().(*RBNode).Color() {
brother.Right().(*RBNode).SetColor(black)
brother.SetColor(red)
rbt.rightRotate(brother)
brother = node.Parent().Right().(*RBNode)
}
brother.SetColor(node.Parent().(*RBNode).Color())
node.Parent().(*RBNode).SetColor(black)
brother.Right().(*RBNode).SetColor(black)
rbt.leftRotate(node.Parent().(*RBNode))
node = rbt.root
} else {
brother = node.Parent().Left().(*RBNode)
if !brother.Color() {
brother.SetColor(black)
node.Parent().(*RBNode).SetColor(red)
rbt.rightRotate(node.Parent().(*RBNode))
brother = node.Parent().Left().(*RBNode)
}
if brother.Right().(*RBNode).Color() && brother.Left().(*RBNode).Color() {
brother.SetColor(red)
node = node.Parent().(*RBNode)
} else if !brother.Left().(*RBNode).Color() {
brother.Left().(*RBNode).SetColor(black)
brother.SetColor(red)
rbt.leftRotate(brother)
brother = node.Parent().Left().(*RBNode)
}
brother.SetColor(node.Parent().(*RBNode).Color())
node.Parent().(*RBNode).SetColor(black)
brother.Left().(*RBNode).SetColor(black)
rbt.rightRotate(node.Parent().(*RBNode))
node = rbt.root
}
}
node.SetColor(black)
}
func (rbt RBTree) RBTransplant(u, v *RBNode) {
if u.Parent() == RBNil {
rbt.root = v
} else if u == u.Parent().Left() {
u.Parent().SetLeft(v)
} else {
u.Parent().SetRight(v)
}
v.SetParent(u.Parent())
}
func (rbt RBTree) Predecessor() BST {
return &RBTree{PredecessorHelp(rbt.root).(*RBNode)}
}
func (rbt RBTree) Successor() BST {
return &RBTree{SuccessorHelp(rbt.root).(*RBNode)}
}
func (rbt RBTree) Minimum() BST {
return &RBTree{MinimumHelp(rbt.root).(*RBNode)}
}
func (rbt RBTree) Maximum() BST {
return &RBTree{MaximumHelp(rbt.root).(*RBNode)}
}
func (rbt RBTree) InorderWalk() {
inorderWalkHelp(rbt.root)
}
func (rbt RBTree) leftRotate(node *RBNode) {
rChild := node.Right().(*RBNode)
node.SetRight(rChild.Left())
if rChild.Left() != RBNil {
rChild.Left().SetRight(node)
}
rChild.SetParent(node.Parent())
if node.Parent() == RBNil {
rbt.root = rChild
} else if node == node.Parent().Left() {
node.Parent().SetLeft(rChild)
} else {
node.Parent().SetRight(rChild)
}
rChild.SetLeft(node)
node.SetParent(rChild)
}
func (rbt RBTree) rightRotate(node *RBNode) {
lChild := node.Right().(*RBNode)
node.SetLeft(lChild.Right())
if lChild.Right() != RBNil {
lChild.Right().SetParent(node)
}
lChild.SetParent(node.Parent())
if node.Parent() == RBNil {
rbt.root = lChild
} else if node == node.Parent().Left() {
node.Parent().SetLeft(lChild)
} else {
node.Parent().SetRight(lChild)
}
lChild.SetRight(node)
node.SetParent(lChild)
} | binarytree/red_black_tree.go | 0.563498 | 0.562056 | red_black_tree.go | starcoder |
package task
import (
"fmt"
"strings"
"github.com/spf13/cobra"
"github.com/PaddlePaddle/PaddleDTX/dai/blockchain"
pbCom "github.com/PaddlePaddle/PaddleDTX/dai/protos/common"
requestClient "github.com/PaddlePaddle/PaddleDTX/dai/requester/client"
"github.com/PaddlePaddle/PaddleDTX/dai/util/file"
"github.com/PaddlePaddle/PaddleDTX/xdb/errorx"
)
var (
files string
executors string
algorithm string
taskType string
taskName string
label string
labelName string
regMode string
regParam float64
alpha float64
amplitude float64
accuracy uint64
taskId string
description string
psiLabel string
batchSize uint64
ev bool // whether perform model evaluation
evRule int32 // evRule is the way to evaluate model, 0 means `Random Split`, 1 means `Cross Validation`, 2 means `Leave One Out`
percentLO int32 // percentage to leave out as validation set when perform model evaluation in the way of `Random Split`
folds int32 // number of folds, 5 or 10 supported, default `10`, a optional parameter when perform model evaluation in the way of `Cross Validation`
shuffle bool // whether to randomly disorder the samples before dividion, default `false`, a optional parameter when perform model evaluation in the way of `Cross Validation`
le bool // whether perform live model evaluation
lPercentLO int32 // percentage to leave out as validation set when perform live model evaluation
)
// checkTaskPublishParams check mpc task parameters
// verify if algorithm, taskType, regMode is legal
func checkTaskPublishParams() (pbCom.Algorithm, pbCom.TaskType, pbCom.RegMode, error) {
var pAlgo pbCom.Algorithm
var pType pbCom.TaskType
var pRegMode pbCom.RegMode
// task algorithm name check
if algo, ok := blockchain.VlAlgorithmListName[algorithm]; ok {
pAlgo = algo
} else {
return pAlgo, pType, pRegMode, errorx.New(errorx.ErrCodeParam, "algorithm only support linear-vl or logistic-vl")
}
// task type check
if taskType, ok := blockchain.TaskTypeListName[taskType]; ok {
pType = taskType
} else {
return pAlgo, pType, pRegMode, errorx.New(errorx.ErrCodeParam, "invalid task type: %s", taskType)
}
// task regMode check, no regularization if not set
if mode, ok := blockchain.RegModeListName[regMode]; ok {
pRegMode = mode
} else {
pRegMode = pbCom.RegMode_Reg_None
}
return pAlgo, pType, pRegMode, nil
}
// publishCmd publishes FL task
var publishCmd = &cobra.Command{
Use: "publish",
Short: "publish a task, can be a training task or a prediction task",
Run: func(cmd *cobra.Command, args []string) {
client, err := requestClient.GetRequestClient(configPath)
if err != nil {
fmt.Printf("GetRequestClient failed: %v\n", err)
return
}
algo, taskType, regMode, err := checkTaskPublishParams()
if err != nil {
fmt.Printf("failed to check task publish algoParam : %v\n", err)
return
}
// check params about evaluation
if evRule < 0 || evRule > 2 {
fmt.Printf("invalid `evRule`, it should be 0 or 1 or 2")
return
}
if percentLO <= 0 || percentLO >= 100 {
fmt.Printf("invalid `plo`, it should in the range of (0,100)")
return
}
if folds != 5 && folds != 10 {
fmt.Printf("invalid `folds`, it should be 5 or 10")
return
}
if lPercentLO <= 0 || lPercentLO >= 100 {
fmt.Printf("invalid `lplo`, it should in the range of (0,100)")
return
}
// pack `pbCom.TaskParams`
algorithmParams := pbCom.TaskParams{
Algo: algo,
TaskType: taskType,
ModelTaskID: taskId,
TrainParams: &pbCom.TrainParams{
Label: label,
LabelName: labelName,
RegMode: regMode,
RegParam: regParam,
Alpha: alpha,
Amplitude: amplitude,
Accuracy: int64(accuracy),
BatchSize: int64(batchSize),
},
}
// set `Evaluation` part
if ev {
algorithmParams.EvalParams = &pbCom.EvaluationParams{
Enable: true,
EvalRule: pbCom.EvaluationRule(evRule),
}
if algorithmParams.EvalParams.EvalRule == pbCom.EvaluationRule_ErRandomSplit {
algorithmParams.EvalParams.RandomSplit = &pbCom.RandomSplit{PercentLO: percentLO}
} else if algorithmParams.EvalParams.EvalRule == pbCom.EvaluationRule_ErCrossVal {
algorithmParams.EvalParams.Cv = &pbCom.CrossVal{
Folds: folds,
Shuffle: shuffle,
}
}
}
// set `LiveEvaluation` part
if le {
algorithmParams.LivalParams = &pbCom.LiveEvaluationParams{
Enable: true,
RandomSplit: &pbCom.RandomSplit{
PercentLO: lPercentLO,
},
}
}
if privateKey == "" {
privateKeyBytes, err := file.ReadFile(keyPath, file.PrivateKeyFileName)
if err != nil {
fmt.Printf("Read privateKey failed, err: %v\n", err)
return
}
privateKey = strings.TrimSpace(string(privateKeyBytes))
}
taskID, err := client.Publish(requestClient.PublishOptions{
PrivateKey: privateKey,
Files: files,
Executors: executors,
TaskName: taskName,
AlgoParam: algorithmParams,
Description: description,
PSILabels: psiLabel,
})
if err != nil {
fmt.Printf("Publish task failed: %v\n", err)
return
}
fmt.Println("TaskID:", taskID)
},
}
func init() {
rootCmd.AddCommand(publishCmd)
publishCmd.Flags().StringVarP(&taskName, "name", "n", "", "task's name")
publishCmd.Flags().StringVarP(&privateKey, "privkey", "k", "", "requester's private key hex string")
publishCmd.Flags().StringVarP(&keyPath, "keyPath", "", "./keys", "requester's key path")
publishCmd.Flags().StringVarP(&taskType, "type", "t", "", "task type, 'train' or 'predict'")
publishCmd.Flags().StringVarP(&algorithm, "algorithm", "a", "", "algorithm assigned to task, 'linear-vl' and 'logistic-vl' are supported")
publishCmd.Flags().StringVarP(&files, "files", "f", "", "sample files IDs with ',' as delimiter, like '123,456'")
publishCmd.Flags().StringVarP(&executors, "executors", "e", "", "executor node names with ',' as delimiter, like 'executor1,executor2'")
// optional params
publishCmd.Flags().StringVarP(&label, "label", "l", "", "target feature for training task")
publishCmd.Flags().StringVar(&labelName, "labelName", "", "target variable required in logistic-vl training")
publishCmd.Flags().StringVarP(&psiLabel, "PSILabel", "p", "", "ID feature name list with ',' as delimiter, like 'id,id', required in vertical task")
publishCmd.Flags().StringVarP(&taskId, "taskId", "i", "", "finished train task ID from which obtain the model, required for predict task")
publishCmd.Flags().StringVar(®Mode, "regMode", "", "regularization mode required in train task, no regularization if not set, options are l1(L1-norm) and l2(L2-norm)")
publishCmd.Flags().Float64Var(®Param, "regParam", 0.1, "regularization parameter required in train task if set regMode")
publishCmd.Flags().Float64Var(&alpha, "alpha", 0.1, "learning rate required in train task")
publishCmd.Flags().Float64Var(&litude, "amplitude", 0.0001, "target difference of costs in two contiguous rounds that determines whether to stop training")
publishCmd.Flags().Uint64Var(&accuracy, "accuracy", 10, "accuracy of homomorphic encryption")
publishCmd.Flags().StringVarP(&description, "description", "d", "", "task description")
publishCmd.Flags().Uint64VarP(&batchSize, "batchSize", "b", 4,
"size of samples for one round of training loop, 0 for BGD(Batch Gradient Descent), non-zero for SGD(Stochastic Gradient Descent) or MBGD(Mini-Batch Gradient Descent)")
// optional params about evaluation
publishCmd.Flags().BoolVar(&ev, "ev", false, "perform model evaluation")
publishCmd.Flags().Int32Var(&evRule, "evRule", 0, "the way to evaluate model, 0 means 'Random Split', 1 means 'Cross Validation', 2 means 'Leave One Out'")
publishCmd.Flags().Int32Var(&folds, "folds", 10, "number of folds, 5 or 10 supported, a optional parameter when perform model evaluation in the way of 'Cross Validation'")
publishCmd.Flags().BoolVar(&shuffle, "shuffle", false, "shuffle the samples before division when perform model evaluation in the way of 'Cross Validation'")
publishCmd.Flags().Int32Var(&percentLO, "plo", 30, "percentage to leave out as validation set when perform model evaluation in the way of 'Random Split'")
// optional params about live evaluation
publishCmd.Flags().BoolVar(&le, "le", false, "perform live model evaluation")
publishCmd.Flags().Int32Var(&lPercentLO, "lplo", 30, "percentage to leave out as validation set when perform live model evaluation")
publishCmd.MarkFlagRequired("name")
publishCmd.MarkFlagRequired("type")
publishCmd.MarkFlagRequired("algorithm")
publishCmd.MarkFlagRequired("files")
publishCmd.MarkFlagRequired("executors")
} | dai/requester/cmd/cli/task/publish.go | 0.58948 | 0.404507 | publish.go | starcoder |
package bdd
import (
"fmt"
"strings"
"github.com/onsi/gomega"
"github.com/onsi/gomega/format"
)
// HasSubstr succeeds if actual is a string or stringer that contains the
// passed-in substring.
var HasSubstr = &matcher{
minArgs: 1,
maxArgs: 1,
name: "HasSubstr",
apply: func(actual interface{}, expected []interface{}) Result {
substr, ok := toString(expected[0])
if !ok {
err := fmt.Errorf("expected a string or stringer, got: \n %s", format.Object(expected[0], 1))
return Result{Error: err}
}
return resultFromGomega(gomega.ContainSubstring(substr), actual)
},
}
// Regexp succeeds if actual is a string or stringer that matches the
// passed-in regexp.
var MatchesRegexp = &matcher{
minArgs: 1,
maxArgs: 1,
name: "MatchesRegexp",
apply: func(actual interface{}, expected []interface{}) Result {
regex, ok := toString(expected[0])
if !ok {
err := fmt.Errorf("expected a string or stringer, got: \n %s", format.Object(expected[0], 1))
return Result{Error: err}
}
str, ok := toString(actual)
if !ok {
err := fmt.Errorf("expected a string or stringer, got: \n %s", format.Object(actual, 1))
return Result{Error: err}
}
return resultFromGomega(gomega.MatchRegexp(regex), str)
},
}
// Regexp succeeds if actual is a string or stringer that matches the
// has the passed-in suffix.
var HasSuffix = &matcher{
minArgs: 1,
maxArgs: 1,
name: "HasSuffix",
apply: func(actual interface{}, expected []interface{}) Result {
str, ok := toString(actual)
if !ok {
err := fmt.Errorf("expected a string or stringer, got: \n %s", format.Object(actual, 1))
return Result{Error: err}
}
suffix, ok := toString(expected[0])
if !ok {
err := fmt.Errorf("expected a string or stringer, got: \n %s", format.Object(expected[0], 1))
return Result{Error: err}
}
var r Result
if strings.HasSuffix(str, suffix) {
r.Success = true
} else {
r.FailureMessage = format.Message(actual, " to have suffix ", expected...)
r.NegatedFailureMessage = format.Message(actual, " not to have suffix ", expected...)
}
return r
},
}
// Regexp succeeds if actual is a string or stringer that matches the
// has the passed-in prefix.
var HasPrefix = &matcher{
minArgs: 1,
maxArgs: 1,
name: "HasSuffix",
apply: func(actual interface{}, expected []interface{}) Result {
str, ok := toString(actual)
if !ok {
err := fmt.Errorf("expected a string or stringer, got: \n %s", format.Object(actual, 1))
return Result{Error: err}
}
prefix, ok := toString(expected[0])
if !ok {
err := fmt.Errorf("expected a string or stringer, got: \n %s", format.Object(expected[0], 1))
return Result{Error: err}
}
var r Result
if strings.HasPrefix(str, prefix) {
r.Success = true
} else {
r.FailureMessage = format.Message(actual, " to have prefix ", expected...)
r.NegatedFailureMessage = format.Message(actual, " not to have prefix ", expected...)
}
return r
},
} | matcher_str.go | 0.556882 | 0.486027 | matcher_str.go | starcoder |
package table
import (
"fmt"
"sort"
)
// SortDirection indicates whether a column should sort by ascending or descending.
type SortDirection int
const (
// SortDirectionAsc indicates the column should be in ascending order.
SortDirectionAsc SortDirection = iota
// SortDirectionDesc indicates the column should be in descending order.
SortDirectionDesc
)
// SortColumn describes which column should be sorted and how.
type SortColumn struct {
ColumnKey string
Direction SortDirection
}
// SortByAsc sets the main sorting column to the given key, in ascending order.
// If a previous sort was used, it is replaced by the given column each time
// this function is called. Values are sorted as numbers if possible, or just
// as simple string comparisons if not numbers.
func (m Model) SortByAsc(columnKey string) Model {
m.sortOrder = []SortColumn{
{
ColumnKey: columnKey,
Direction: SortDirectionAsc,
},
}
return m
}
// SortByDesc sets the main sorting column to the given key, in descending order.
// If a previous sort was used, it is replaced by the given column each time
// this function is called. Values are sorted as numbers if possible, or just
// as simple string comparisons if not numbers.
func (m Model) SortByDesc(columnKey string) Model {
m.sortOrder = []SortColumn{
{
ColumnKey: columnKey,
Direction: SortDirectionDesc,
},
}
return m
}
// ThenSortByAsc provides a secondary sort after the first, in ascending order.
// Can be chained multiple times, applying to smaller subgroups each time.
func (m Model) ThenSortByAsc(columnKey string) Model {
m.sortOrder = append([]SortColumn{
{
ColumnKey: columnKey,
Direction: SortDirectionAsc,
},
}, m.sortOrder...)
return m
}
// ThenSortByDesc provides a secondary sort after the first, in descending order.
// Can be chained multiple times, applying to smaller subgroups each time.
func (m Model) ThenSortByDesc(columnKey string) Model {
m.sortOrder = append([]SortColumn{
{
ColumnKey: columnKey,
Direction: SortDirectionDesc,
},
}, m.sortOrder...)
return m
}
type sortableTable struct {
rows []Row
byColumn SortColumn
}
func (s *sortableTable) Len() int {
return len(s.rows)
}
func (s *sortableTable) Swap(i, j int) {
old := s.rows[i]
s.rows[i] = s.rows[j]
s.rows[j] = old
}
func (s *sortableTable) extractString(i int, column string) string {
iData, exists := s.rows[i].Data[column]
if !exists {
return ""
}
switch iData := iData.(type) {
case StyledCell:
return fmt.Sprintf("%v", iData.Data)
case string:
return iData
default:
return fmt.Sprintf("%v", iData)
}
}
func (s *sortableTable) extractNumber(i int, column string) (float64, bool) {
iData, exists := s.rows[i].Data[column]
if !exists {
return 0, false
}
return asNumber(iData)
}
func (s *sortableTable) Less(first, second int) bool {
firstNum, firstNumIsValid := s.extractNumber(first, s.byColumn.ColumnKey)
secondNum, secondNumIsValid := s.extractNumber(second, s.byColumn.ColumnKey)
if firstNumIsValid && secondNumIsValid {
if s.byColumn.Direction == SortDirectionAsc {
return firstNum < secondNum
}
return firstNum > secondNum
}
firstVal := s.extractString(first, s.byColumn.ColumnKey)
secondVal := s.extractString(second, s.byColumn.ColumnKey)
if s.byColumn.Direction == SortDirectionAsc {
return firstVal < secondVal
}
return firstVal > secondVal
}
func getSortedRows(sortOrder []SortColumn, rows []Row) []Row {
var sortedRows []Row
if len(sortOrder) == 0 {
sortedRows = rows
return sortedRows
}
sortedRows = make([]Row, len(rows))
copy(sortedRows, rows)
for _, byColumn := range sortOrder {
sorted := &sortableTable{
rows: sortedRows,
byColumn: byColumn,
}
sort.Stable(sorted)
sortedRows = sorted.rows
}
return sortedRows
} | table/sort.go | 0.76533 | 0.477554 | sort.go | starcoder |
package goldengine
import sf "github.com/manyminds/gosfml"
//Transformer : Wrapper arround sfml Transformer
type Transformer interface {
sf.Drawer
sf.Transformer
}
//TransformerPrefab : Info to create a Transformer from JSON Prefab
type TransformerPrefab struct {
Kind string
Arguments map[string]interface{}
}
//Sprite : Wrapper around sfml Object
type Sprite sf.Sprite
//CircleShape : Wrapper around sfml Object
type CircleShape sf.CircleShape
//ConvexShape : Wrapper around sfml Object
type ConvexShape sf.ConvexShape
//RectangleShape : Wrapper around sfml Object
type RectangleShape sf.RectangleShape
//Text : Wrapper around sfml Object
type Text sf.Text
//Shape : Group of Functions all Shapes have
type Shape interface {
GetOrigin() sf.Vector2f
SetOrigin(sf.Vector2f)
GetOutlineThickness() float32
SetOutlineThickness(float32)
GetOutlineColor() sf.Color
SetOutlineColor(sf.Color)
GetFillColor() sf.Color
SetFillColor(sf.Color)
}
const (
//SpriteName : Name of Transformer
SpriteName = "Sprite"
//CircleShapeName : Name of Transformer
CircleShapeName = "CircleShape"
//ConvexShapeName : Name of Transformer
ConvexShapeName = "ConvexShape"
//RectangleShapeName : Name of Transformer
RectangleShapeName = "RectangleShape"
//TextName : Name of Transformer
TextName = "Text"
)
//TranformerGenerators : map to create Generators
var TranformerGenerators = map[string]func(args map[string]interface{}) (Transformer, error){
SpriteName: SpriteFromArguments,
CircleShapeName: CircleShapeFromArguments,
ConvexShapeName: ConvexShapeFromArguments,
RectangleShapeName: RectangleShapeFromArguments,
TextName: TextFromArguments,
}
//TransformerFromTranformerPrefab : Returns Transformer from Transform Prefab
func TransformerFromTranformerPrefab(t TransformerPrefab) (Transformer, error) {
var transformer Transformer
generator, ok := TranformerGenerators[t.Kind]
if !ok {
return nil, nil
}
transformer, err := generator(t.Arguments)
return transformer, err
}
/*SpriteFromArguments : Generates sprite from Arguments field of Prefab
TODO : Get Texture*/
func SpriteFromArguments(args map[string]interface{}) (Transformer, error) {
return sf.NewSprite(nil)
}
//CircleShapeFromArguments : Generates CircleShape from Arguments field of Prefab
func CircleShapeFromArguments(args map[string]interface{}) (Transformer, error) {
shape, err := sf.NewCircleShape()
if err != nil {
return nil, err
}
ApplyArgsToShape(shape, args)
if arg, ok := args["Radius"]; ok {
radius, ok := ArgAsFloat32(arg)
if ok {
shape.SetRadius(radius)
}
}
return shape, err
}
//ConvexShapeFromArguments : Generates ConvexShape from Arguments field of Prefab
func ConvexShapeFromArguments(args map[string]interface{}) (Transformer, error) {
return sf.NewConvexShape()
}
//RectangleShapeFromArguments : Generates RectangleShape from Arguments field of Prefab
func RectangleShapeFromArguments(args map[string]interface{}) (Transformer, error) {
shape, err := sf.NewRectangleShape()
if err != nil {
return nil, err
}
ApplyArgsToShape(shape, args)
if arg, ok := args["Size"]; ok {
size, ok := ArgAsVector2f(arg)
if ok {
shape.SetSize(size)
}
}
return shape, err
}
//ApplyArgsToShape : Sets Properties like OutlineThickness
func ApplyArgsToShape(shape Shape, args map[string]interface{}) {
if arg, ok := args["OutlineThickness"]; ok {
thickness, ok := ArgAsFloat32(arg)
if ok {
shape.SetOutlineThickness(thickness)
}
}
if arg, ok := args["OutlineColor"]; ok {
color, ok := ArgAsColor(arg)
if ok {
shape.SetOutlineColor(color)
}
}
if arg, ok := args["Origin"]; ok {
origin, ok := ArgAsVector2f(arg)
if ok {
shape.SetOrigin(origin)
}
}
if arg, ok := args["FillColor"]; ok {
color, ok := ArgAsColor(arg)
if ok {
shape.SetFillColor(color)
}
}
}
/*TextFromArguments : Generates Text from Arguments field of Prefab
TODO : Get fonts*/
func TextFromArguments(args map[string]interface{}) (Transformer, error) {
return sf.NewText(nil)
}
//ArgAsFloat32 Converts an interface from a JSON Parser to a float32
func ArgAsFloat32(arg interface{}) (float32, bool) {
value, ok := arg.(float64)
if ok {
return float32(value), ok
}
return 0.0, ok
}
//ArgAsVector2f Converts an interface from a JSON Parser to a Vector2f
func ArgAsVector2f(arg interface{}) (sf.Vector2f, bool) {
value, ok := arg.(map[string]interface{})
if ok {
vec, ok := Vector{X: float32(value["X"].(float64)), Y: float32(value["Y"].(float64))}, ok
return vec.ToSFML(), ok
}
return sf.Vector2f{}, ok
}
//ArgAsVector Converts an interface from a JSON Parser to a Vector2f
func ArgAsVector(arg interface{}) (Vector, bool) {
value, ok := arg.(map[string]interface{})
if ok {
vec, ok := Vector{X: float32(value["X"].(float64)), Y: float32(value["Y"].(float64))}, ok
return vec, ok
}
return Vector{}, ok
}
//ArgAsColor Converts an interface from a JSON Parser to a Color
func ArgAsColor(arg interface{}) (sf.Color, bool) {
value, ok := arg.(map[string]interface{})
if ok {
return sf.Color{
R: uint8(value["R"].(float64)),
G: uint8(value["G"].(float64)),
B: uint8(value["B"].(float64)),
A: uint8(value["A"].(float64)),
}, ok
}
return sf.Color{}, ok
} | transform.go | 0.5769 | 0.401952 | transform.go | starcoder |
package fake
import (
"encoding/json"
"errors"
"fmt"
"math/rand"
)
// Random generates true/false values based on a predetermined percentage.
type Random struct {
id string
rnd *rand.Rand
pctGood float64
keepStats bool
Stats *RandomStats
v bool
}
// RandomStats keeps track of various statistics of a Random while it's running.
type RandomStats struct {
// The ID of the Random.
ID string `json:"id"`
// Cumulative count of how many times Next() was called.
CTotal int64 `json:"cumulativeTotal"`
// Cumulative count of how many times the value was "good".
CGoodCount int64 `json:"cumulativeGoodCount"`
// Cumulative count of how many times the value was "bad".
CBadCount int64 `json:"cumulativeBadCount"`
// Cumulative ratio of good/bad
CRatio float64 `json:"cumulativeRatio"`
// Slot count of how many times Next() was called. This gets reset after every JSON() call.
Total int64 `json:"slotTotal"`
// Slot count of how many times the value was "good". This gets reset after every JSON() call.
GoodCount int64 `json:"slotGoodCount"`
// Slot count of how many times the value was "bad". This gets reset after every JSON() call.
BadCount int64 `json:"slotBadCount"`
// Slot ratio of good/bad. This gets reset after every JSON() call.
Ratio float64 `json:"slotGoodRatio"`
}
// Add adds a value to the running tally.
func (rs *RandomStats) Add(v interface{}) {
rs.CTotal++
rs.Total++
if v.(bool) {
rs.CGoodCount++
rs.GoodCount++
} else {
rs.CBadCount++
rs.BadCount++
}
rs.CRatio = float64(rs.CGoodCount) / float64(rs.CTotal)
rs.Ratio = float64(rs.GoodCount) / float64(rs.Total)
}
// JSON returns a JSON summary of the current random statistics and resets the
// slot tally.
func (rs *RandomStats) JSON() string {
out, _ := json.Marshal(rs)
rs.Total = 0
rs.GoodCount = 0
rs.BadCount = 0
rs.Ratio = 0
return string(out)
}
// Next generates the next random value.
func (fr *Random) Next() {
fr.v = fr.rnd.Float64() < fr.pctGood
if fr.keepStats {
fr.Stats.Add(fr.v)
}
}
// Val returns the current random value.
func (fr *Random) Val() interface{} {
return fr.v
}
// Vals returns the next count of values as an interface{} array.
func (fr *Random) Vals(count int) []interface{} {
return makeValues(fr, count)
}
// JSONStats retrieves the current stats as s JSON string.
func (fr *Random) JSONStats() string {
return fr.Stats.JSON()
}
// Good returns whether the current value is "good".
func (fr *Random) Good() bool {
return fr.v
}
// Bad returns whether the current value is "bad".
func (fr *Random) Bad() bool {
return !fr.v
}
// Values returns the next count of values as a bool array.
func (fr *Random) Values(count int) []bool {
out := make([]bool, count)
for i := 0; i < count; i++ {
out[i] = fr.Good()
fr.Next()
}
return out
}
// NewRandom creates a new Random. A random has a unique id, a random seed to
// ensure consistency when generating random numbers for the same seed, a
// percentage of required "good" samples and needs to know wheter to keep
// internal statistics.
func NewRandom(id string, seed int64, pctGood float64, keepStats bool) (*Random, error) {
if id == "" {
return nil, errors.New("ID for a fake random cannot be blank")
}
if pctGood < 0 || pctGood > 1 {
return nil, errors.New("Percentage good for a FakeRandom with id '" + id + "' must be between 0 and 1 but was '" + fmt.Sprintf("%v", pctGood) + "'")
}
r := &Random{
id: id,
rnd: generateRandom(seed),
pctGood: pctGood,
keepStats: keepStats,
Stats: &RandomStats{ID: id},
}
r.Next()
return r, nil
} | random.go | 0.754915 | 0.408572 | random.go | starcoder |
package matchers
import (
"fmt"
"reflect"
"github.com/onsi/gomega/format"
)
type ConsistOfMatcher struct {
Elements []interface{}
}
func (matcher *ConsistOfMatcher) Match(actual interface{}) (success bool, err error) {
if !isArrayOrSlice(actual) && !isMap(actual) {
return false, fmt.Errorf("ConsistOf matcher expects an array/slice/map. Got:\n%s", format.Object(actual, 1))
}
elements := matcher.Elements
if len(matcher.Elements) == 1 && isArrayOrSlice(matcher.Elements[0]) {
elements = []interface{}{}
value := reflect.ValueOf(matcher.Elements[0])
for i := 0; i < value.Len(); i++ {
elements = append(elements, value.Index(i).Interface())
}
}
matchers := map[int]omegaMatcher{}
for i, element := range elements {
matcher, isMatcher := element.(omegaMatcher)
if !isMatcher {
matcher = &EqualMatcher{Expected: element}
}
matchers[i] = matcher
}
values := matcher.valuesOf(actual)
if len(values) != len(matchers) {
return false, nil
}
for _, value := range values {
found := false
for key, matcher := range matchers {
success, err := matcher.Match(value)
if err != nil {
continue
}
if success {
found = true
delete(matchers, key)
break
}
}
if !found {
return false, nil
}
}
return true, nil
}
func (matcher *ConsistOfMatcher) valuesOf(actual interface{}) []interface{} {
value := reflect.ValueOf(actual)
values := []interface{}{}
if isMap(actual) {
keys := value.MapKeys()
for i := 0; i < value.Len(); i++ {
values = append(values, value.MapIndex(keys[i]).Interface())
}
} else {
for i := 0; i < value.Len(); i++ {
values = append(values, value.Index(i).Interface())
}
}
return values
}
func (matcher *ConsistOfMatcher) FailureMessage(actual interface{}) (message string) {
return format.Message(actual, "to consist of", matcher.Elements)
}
func (matcher *ConsistOfMatcher) NegatedFailureMessage(actual interface{}) (message string) {
return format.Message(actual, "not to consist of", matcher.Elements)
} | vendor/github.com/onsi/gomega/matchers/consist_of.go | 0.654784 | 0.415492 | consist_of.go | starcoder |
package dlframework
const (
dlframework_swagger = `{
"swagger": "2.0",
"info": {
"title": "MLModelScope",
"version": "0.2.18",
"description": "MLModelScope is a hardware/software agnostic platform to facilitate the evaluation, measurement, and introspection of ML models within AI pipelines. MLModelScope aids application developers in discovering and experimenting with models, data scientists developers in replicating and evaluating for publishing models, and system architects in understanding the performance of AI workloads.",
"contact": {
"name": "<NAME>, <NAME>",
"url": "https://github.com/rai-project/carml"
},
"license": {
"name": "NCSA/UIUC",
"url": "https://raw.githubusercontent.com/rai-project/dlframework/master/LICENSE.TXT"
}
},
"schemes": [
"http",
"https"
],
"consumes": [
"application/json"
],
"produces": [
"application/json"
],
"paths": {
"/predict/close": {
"post": {
"summary": "Close a predictor clear it's memory.",
"operationId": "Close",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/dlframeworkPredictorCloseResponse"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/dlframeworkPredictorCloseRequest"
}
}
],
"tags": [
"Predict"
]
}
},
"/predict/dataset": {
"post": {
"summary": "Dataset method receives a single dataset and runs\nthe predictor on all elements of the dataset.",
"description": "The result is a prediction feature list.",
"operationId": "Dataset",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/dlframeworkFeaturesResponse"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/dlframeworkDatasetRequest"
}
}
],
"tags": [
"Predict"
]
}
},
"/predict/images": {
"post": {
"summary": "Image method receives a list base64 encoded images and runs\nthe predictor on all the images.",
"description": "The result is a prediction feature list for each image.",
"operationId": "Images",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/dlframeworkFeaturesResponse"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/dlframeworkImagesRequest"
}
}
],
"tags": [
"Predict"
]
}
},
"/predict/open": {
"post": {
"summary": "Opens a predictor and returns an id where the predictor\nis accessible. The id can be used to perform inference\nrequests.",
"operationId": "Open",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/dlframeworkPredictor"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/dlframeworkPredictorOpenRequest"
}
}
],
"tags": [
"Predict"
]
}
},
"/predict/reset": {
"post": {
"summary": "Clear method clears the internal cache of the predictors",
"operationId": "Reset",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/dlframeworkResetResponse"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/dlframeworkResetRequest"
}
}
],
"tags": [
"Predict"
]
}
},
"/predict/stream/dataset": {
"post": {
"summary": "Dataset method receives a single dataset and runs\nthe predictor on all elements of the dataset.",
"description": "The result is a prediction feature stream.",
"operationId": "DatasetStream",
"responses": {
"200": {
"description": "A successful response.(streaming responses)",
"schema": {
"$ref": "#/definitions/dlframeworkFeatureResponse"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/dlframeworkDatasetRequest"
}
}
],
"tags": [
"Predict"
]
}
},
"/predict/stream/images": {
"post": {
"summary": "Image method receives a list base64 encoded images and runs\nthe predictor on all the images.",
"description": "The result is a prediction feature stream for each image.",
"operationId": "ImagesStream",
"responses": {
"200": {
"description": "A successful response.(streaming responses)",
"schema": {
"$ref": "#/definitions/dlframeworkFeatureResponse"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/dlframeworkImagesRequest"
}
}
],
"tags": [
"Predict"
]
}
},
"/predict/stream/urls": {
"post": {
"summary": "Image method receives a stream of urls and runs\nthe predictor on all the urls. The",
"description": "The result is a prediction feature stream for each url.",
"operationId": "URLsStream",
"responses": {
"200": {
"description": "A successful response.(streaming responses)",
"schema": {
"$ref": "#/definitions/dlframeworkFeatureResponse"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/dlframeworkURLsRequest"
}
}
],
"tags": [
"Predict"
]
}
},
"/predict/urls": {
"post": {
"summary": "Image method receives a stream of urls and runs\nthe predictor on all the urls. The",
"description": "The result is a prediction feature stream for each url.",
"operationId": "URLs",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/dlframeworkFeaturesResponse"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/dlframeworkURLsRequest"
}
}
],
"tags": [
"Predict"
]
}
},
"/registry/frameworks/agent": {
"get": {
"operationId": "FrameworkAgents",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/dlframeworkAgents"
}
}
},
"parameters": [
{
"name": "framework_name",
"in": "query",
"required": false,
"type": "string"
},
{
"name": "framework_version",
"in": "query",
"required": false,
"type": "string"
}
],
"tags": [
"Registry"
]
}
},
"/registry/frameworks/manifest": {
"get": {
"operationId": "FrameworkManifests",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/dlframeworkFrameworkManifestsResponse"
}
}
},
"parameters": [
{
"name": "framework_name",
"in": "query",
"required": false,
"type": "string"
},
{
"name": "framework_version",
"in": "query",
"required": false,
"type": "string"
}
],
"tags": [
"Registry"
]
}
},
"/registry/models/agent": {
"get": {
"operationId": "ModelAgents",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/dlframeworkAgents"
}
}
},
"parameters": [
{
"name": "framework_name",
"in": "query",
"required": false,
"type": "string"
},
{
"name": "framework_version",
"in": "query",
"required": false,
"type": "string"
},
{
"name": "model_name",
"in": "query",
"required": false,
"type": "string"
},
{
"name": "model_version",
"in": "query",
"required": false,
"type": "string"
}
],
"tags": [
"Registry"
]
}
},
"/registry/models/manifest": {
"get": {
"operationId": "ModelManifests",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/dlframeworkModelManifestsResponse"
}
}
},
"parameters": [
{
"name": "framework_name",
"in": "query",
"required": false,
"type": "string"
},
{
"name": "framework_version",
"in": "query",
"required": false,
"type": "string"
},
{
"name": "model_name",
"in": "query",
"required": false,
"type": "string"
},
{
"name": "model_version",
"in": "query",
"required": false,
"type": "string"
}
],
"tags": [
"Registry"
]
}
},
"/auth/login": {
"post": {
"summary": "Login to MLModelScope platform",
"operationId": "Login",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/dlframeworkLoginResponse"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/dlframeworkLogin"
}
}
],
"tags": [
"Authentication"
]
}
},
"/auth/signup": {
"post": {
"summary": "Signup to MLModelScope platform",
"operationId": "Signup",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/dlframeworkSignupResponse"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/dlframeworkSignup"
}
}
],
"tags": [
"Authentication"
]
}
}
},
"definitions": {
"DatasetRequestDataset": {
"type": "object",
"properties": {
"category": {
"type": "string"
},
"name": {
"type": "string"
}
}
},
"ExecutionOptionsTraceLevel": {
"type": "string",
"enum": [
"NO_TRACE",
"APPLICATION_TRACE",
"MODEL_TRACE",
"FRAMEWORK_TRACE",
"LIBRARY_TRACE",
"HARDWARE_TRACE",
"FULL_TRACE"
],
"default": "NO_TRACE"
},
"URLsRequestURL": {
"type": "object",
"properties": {
"id": {
"type": "string",
"title": "An id used to identify the output feature: maps to input_id for output"
},
"data": {
"type": "string"
}
}
},
"dlframeworkAudio": {
"type": "object",
"properties": {
"data": {
"type": "string",
"format": "byte"
},
"format": {
"type": "string"
}
}
},
"dlframeworkCPUOptions": {
"type": "object"
},
"dlframeworkClassification": {
"type": "object",
"properties": {
"index": {
"type": "integer",
"format": "int32"
},
"label": {
"type": "string"
}
}
},
"dlframeworkDatasetRequest": {
"type": "object",
"properties": {
"predictor": {
"$ref": "#/definitions/dlframeworkPredictor"
},
"dataset": {
"$ref": "#/definitions/DatasetRequestDataset"
},
"options": {
"$ref": "#/definitions/dlframeworkPredictionOptions"
}
}
},
"dlframeworkExecutionOptions": {
"type": "object",
"properties": {
"trace_id": {
"$ref": "#/definitions/dlframeworkTraceID"
},
"trace_level": {
"$ref": "#/definitions/ExecutionOptionsTraceLevel"
},
"timeout_in_ms": {
"type": "string",
"format": "uint64",
"description": "Time to wait for operation to complete in milliseconds."
},
"device_count": {
"type": "object",
"additionalProperties": {
"type": "integer",
"format": "int32"
},
"description": "Map from device type name (e.g., \"CPU\" or \"GPU\" ) to maximum\nnumber of devices of that type to use. If a particular device\ntype is not found in the map, the system picks an appropriate\nnumber."
},
"cpu_options": {
"$ref": "#/definitions/dlframeworkCPUOptions",
"description": "Options that apply to all CPUs."
},
"gpu_options": {
"$ref": "#/definitions/dlframeworkGPUOptions",
"description": "Options that apply to all GPUs."
}
}
},
"dlframeworkFeature": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"type": {
"$ref": "#/definitions/dlframeworkFeatureType"
},
"probability": {
"type": "number",
"format": "float"
},
"metadata": {
"type": "object",
"additionalProperties": {
"type": "string"
}
},
"classification": {
"$ref": "#/definitions/dlframeworkClassification"
},
"image": {
"$ref": "#/definitions/dlframeworkImage"
},
"text": {
"$ref": "#/definitions/dlframeworkText"
},
"region": {
"$ref": "#/definitions/dlframeworkRegion"
},
"audio": {
"$ref": "#/definitions/dlframeworkAudio"
},
"geolocation": {
"$ref": "#/definitions/dlframeworkGeoLocation"
},
"raw": {
"$ref": "#/definitions/dlframeworkRaw"
}
}
},
"dlframeworkFeatureResponse": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"request_id": {
"type": "string"
},
"input_id": {
"type": "string"
},
"features": {
"type": "array",
"items": {
"$ref": "#/definitions/dlframeworkFeature"
}
},
"metadata": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
}
},
"dlframeworkFeatureType": {
"type": "string",
"enum": [
"UNKNOWN",
"IMAGE",
"CLASSIFICATION",
"GEOLOCATION",
"REGION",
"TEXT",
"AUDIO",
"RAW"
],
"default": "UNKNOWN"
},
"dlframeworkFeaturesResponse": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"trace_id": {
"$ref": "#/definitions/dlframeworkTraceID"
},
"responses": {
"type": "array",
"items": {
"$ref": "#/definitions/dlframeworkFeatureResponse"
}
}
}
},
"dlframeworkGPUOptions": {
"type": "object",
"properties": {
"per_process_gpu_memory_fraction": {
"type": "number",
"format": "double",
"description": "A value between 0 and 1 that indicates what fraction of the\navailable GPU memory to pre-allocate for each process. 1 means\nto pre-allocate all of the GPU memory, 0.5 means the process\nallocates ~50% of the available GPU memory."
},
"allocator_type": {
"type": "string",
"description": "The type of GPU allocation strategy to use.\n\nAllowed values:\n\"\": The empty string (default) uses a system-chosen default\n which may change over time.\n\n\"BFC\": A \"Best-fit with coalescing\" algorithm, simplified from a\n version of dlmalloc."
},
"visible_device_list": {
"type": "string",
"description": "A comma-separated list of GPU ids that determines the 'visible'\nto 'virtual' mapping of GPU devices. For example, if TensorFlow\ncan see 8 GPU devices in the process, and one wanted to map\nvisible GPU devices 5 and 3 as \"/device:GPU:0\", and \"/device:GPU:1\", then\none would specify this field as \"5,3\". This field is similar in spirit to\nthe CUDA_VISIBLE_DEVICES environment variable, except it applies to the\nvisible GPU devices in the process.\n\nNOTE: The GPU driver provides the process with the visible GPUs\nin an order which is not guaranteed to have any correlation to\nthe *physical* GPU id in the machine. This field is used for\nremapping \"visible\" to \"virtual\", which means this operates only\nafter the process starts. Users are required to use vendor\nspecific mechanisms (e.g., CUDA_VISIBLE_DEVICES) to control the\nphysical to visible device mapping prior to invoking TensorFlow."
},
"force_gpu_compatible": {
"type": "boolean",
"format": "boolean",
"description": "Force all tensors to be gpu_compatible. On a GPU-enabled TensorFlow,\nenabling this option forces all CPU tensors to be allocated with Cuda\npinned memory. Normally, TensorFlow will infer which tensors should be\nallocated as the pinned memory. But in case where the inference is\nincomplete, this option can significantly speed up the cross-device memory\ncopy performance as long as it fits the memory.\nNote that this option is not something that should be\nenabled by default for unknown or very large models, since all Cuda pinned\nmemory is unpageable, having too much pinned memory might negatively impact\nthe overall host system performance."
}
}
},
"dlframeworkGeoLocation": {
"type": "object",
"properties": {
"index": {
"type": "integer",
"format": "int32"
},
"latitude": {
"type": "number",
"format": "double"
},
"longitude": {
"type": "number",
"format": "double"
}
}
},
"dlframeworkImage": {
"type": "object",
"properties": {
"id": {
"type": "string",
"title": "An id used to identify the output feature: maps to input_id for output"
},
"data": {
"type": "string",
"format": "byte",
"title": "The image is base64 encoded"
}
}
},
"dlframeworkImagesRequest": {
"type": "object",
"properties": {
"predictor": {
"$ref": "#/definitions/dlframeworkPredictor"
},
"images": {
"type": "array",
"items": {
"$ref": "#/definitions/dlframeworkImage"
},
"title": "A list of Base64 encoded images"
},
"options": {
"$ref": "#/definitions/dlframeworkPredictionOptions"
}
}
},
"dlframeworkPredictionOptions": {
"type": "object",
"properties": {
"request_id": {
"type": "string"
},
"feature_limit": {
"type": "integer",
"format": "int32"
},
"batch_size": {
"type": "integer",
"format": "int32"
},
"execution_options": {
"$ref": "#/definitions/dlframeworkExecutionOptions"
},
"agent": {
"type": "string"
}
}
},
"dlframeworkPredictor": {
"type": "object",
"properties": {
"id": {
"type": "string"
}
}
},
"dlframeworkPredictorCloseRequest": {
"type": "object",
"properties": {
"predictor": {
"$ref": "#/definitions/dlframeworkPredictor"
},
"force": {
"type": "boolean",
"format": "boolean"
}
}
},
"dlframeworkPredictorCloseResponse": {
"type": "object"
},
"dlframeworkPredictorOpenRequest": {
"type": "object",
"properties": {
"model_name": {
"type": "string"
},
"model_version": {
"type": "string"
},
"framework_name": {
"type": "string"
},
"framework_version": {
"type": "string"
},
"persist": {
"type": "boolean",
"format": "boolean"
},
"options": {
"$ref": "#/definitions/dlframeworkPredictionOptions"
}
}
},
"dlframeworkRaw": {
"type": "object",
"properties": {
"data": {
"type": "string",
"format": "byte"
},
"format": {
"type": "string"
}
}
},
"dlframeworkRegion": {
"type": "object",
"properties": {
"data": {
"type": "string",
"format": "byte"
},
"format": {
"type": "string"
}
}
},
"dlframeworkResetRequest": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"predictor": {
"$ref": "#/definitions/dlframeworkPredictor"
}
}
},
"dlframeworkResetResponse": {
"type": "object",
"properties": {
"predictor": {
"$ref": "#/definitions/dlframeworkPredictor"
}
}
},
"dlframeworkText": {
"type": "object",
"properties": {
"data": {
"type": "string",
"format": "byte"
}
}
},
"dlframeworkTraceID": {
"type": "object",
"properties": {
"id": {
"type": "string"
}
}
},
"dlframeworkURLsRequest": {
"type": "object",
"properties": {
"predictor": {
"$ref": "#/definitions/dlframeworkPredictor"
},
"urls": {
"type": "array",
"items": {
"$ref": "#/definitions/URLsRequestURL"
}
},
"options": {
"$ref": "#/definitions/dlframeworkPredictionOptions"
}
}
},
"ModelManifestModel": {
"type": "object",
"properties": {
"base_url": {
"type": "string"
},
"weights_path": {
"type": "string"
},
"graph_path": {
"type": "string"
},
"is_archive": {
"type": "boolean",
"format": "boolean"
},
"weights_checksum": {
"type": "string"
},
"graph_checksum": {
"type": "string"
}
}
},
"TypeParameter": {
"type": "object",
"properties": {
"value": {
"type": "string"
}
}
},
"dlframeworkAgent": {
"type": "object",
"properties": {
"host": {
"type": "string"
},
"port": {
"type": "string"
},
"hostname": {
"type": "string"
},
"architecture": {
"type": "string"
},
"hasgpu": {
"type": "boolean",
"format": "boolean"
},
"cpuinfo": {
"type": "string"
},
"gpuinfo": {
"type": "string"
},
"frameworks": {
"type": "array",
"items": {
"$ref": "#/definitions/dlframeworkFrameworkManifest"
}
},
"metadata": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
}
},
"dlframeworkAgents": {
"type": "object",
"properties": {
"agents": {
"type": "array",
"items": {
"$ref": "#/definitions/dlframeworkAgent"
}
}
}
},
"dlframeworkContainerHardware": {
"type": "object",
"properties": {
"gpu": {
"type": "string"
},
"cpu": {
"type": "string"
}
}
},
"dlframeworkFrameworkManifest": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"version": {
"type": "string"
},
"container": {
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/dlframeworkContainerHardware"
}
}
}
},
"dlframeworkFrameworkManifestsResponse": {
"type": "object",
"properties": {
"manifests": {
"type": "array",
"items": {
"$ref": "#/definitions/dlframeworkFrameworkManifest"
}
}
}
},
"dlframeworkModelManifest": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"version": {
"type": "string"
},
"framework": {
"$ref": "#/definitions/dlframeworkFrameworkManifest"
},
"container": {
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/dlframeworkContainerHardware"
}
},
"description": {
"type": "string"
},
"reference": {
"type": "array",
"items": {
"type": "string"
}
},
"license": {
"type": "string"
},
"inputs": {
"type": "array",
"items": {
"$ref": "#/definitions/dlframeworkModelManifestType"
}
},
"output": {
"$ref": "#/definitions/dlframeworkModelManifestType"
},
"before_preprocess": {
"type": "string"
},
"preprocess": {
"type": "string"
},
"after_preprocess": {
"type": "string"
},
"before_postprocess": {
"type": "string"
},
"postprocess": {
"type": "string"
},
"after_postprocess": {
"type": "string"
},
"model": {
"$ref": "#/definitions/ModelManifestModel"
},
"attributes": {
"type": "object",
"additionalProperties": {
"type": "string"
}
},
"hidden": {
"type": "boolean",
"format": "boolean"
}
}
},
"dlframeworkModelManifestType": {
"type": "object",
"properties": {
"type": {
"type": "string"
},
"description": {
"type": "string"
},
"parameters": {
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/TypeParameter"
}
}
}
},
"dlframeworkModelManifestsResponse": {
"type": "object",
"properties": {
"manifests": {
"type": "array",
"items": {
"$ref": "#/definitions/dlframeworkModelManifest"
}
}
}
},
"dlframeworkLogin": {
"type": "object",
"properties": {
"username": {
"type": "string"
},
"password": {
"type": "string"
}
}
},
"dlframeworkLoginResponse": {
"type": "object",
"properties": {
"outcome": {
"type": "string"
}
}
},
"dlframeworkSignupResponse": {
"type": "object",
"properties": {
"outcome": {
"type": "string"
},
"username": {
"type": "string"
}
}
},
"dlframeworkSignup": {
"type": "object",
"properties": {
"first_name": {
"type": "string"
},
"last_name": {
"type": "string"
},
"affiliation": {
"type": "string"
},
"username": {
"type": "string"
},
"password": {
"type": "string"
}
}
}
},
"host": "carml.org",
"basePath": "/api",
"externalDocs": {
"url": "https://rai-project.github.io/carml"
}
}
`
swagger_info = `{
"info": {
"title": "MLModelScope",
"description": "MLModelScope is a hardware/software agnostic platform to facilitate the evaluation, measurement, and introspection of ML models within AI pipelines. MLModelScope aids application developers in discovering and experimenting with models, data scientists developers in replicating and evaluating for publishing models, and system architects in understanding the performance of AI workloads.",
"version": "0.2.18",
"contact": {
"name": "<NAME>, <NAME>",
"url": "https://github.com/rai-project/carml"
},
"license": {
"name": "NCSA/UIUC",
"url": "https://raw.githubusercontent.com/rai-project/dlframework/master/LICENSE.TXT"
}
},
"paths": {
"/auth/login": {
"post": {
"summary": "Login to MLModelScope platform",
"operationId": "Login",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/dlframeworkLoginResponse"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/dlframeworkLogin"
}
}
],
"tags": ["Authentication"]
}
},
"/auth/signup": {
"post": {
"summary": "Signup to MLModelScope platform",
"operationId": "Signup",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/dlframeworkSignupResponse"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/dlframeworkSignup"
}
}
],
"tags": ["Authentication"]
}
}
},
"definitions": {
"dlframeworkLogin": {
"type": "object",
"properties": {
"username": {
"type": "string"
},
"password": {
"type": "string"
}
}
},
"dlframeworkLoginResponse": {
"type": "object",
"properties": {
"outcome": {
"type": "string"
}
}
},
"dlframeworkSignupResponse": {
"type": "object",
"properties": {
"outcome": {
"type": "string"
},
"username": {
"type": "string"
}
}
},
"dlframeworkSignup": {
"type": "object",
"properties": {
"first_name": {
"type": "string"
},
"last_name": {
"type": "string"
},
"affiliation": {
"type": "string"
},
"username": {
"type": "string"
},
"password": {
"type": "string"
}
}
}
},
"host": "carml.org",
"basePath": "/api",
"externalDocs": {
"url": "https://rai-project.github.io/carml"
}
}
`
) | vendor/github.com/rai-project/dlframework/swagger.go | 0.826887 | 0.467332 | swagger.go | starcoder |
package prometheusextension
import (
"errors"
"time"
"github.com/prometheus/client_golang/prometheus"
dto "github.com/prometheus/client_model/go"
)
// GaugeOps is the part of `prometheus.Gauge` that is relevant to
// instrumented code.
// This factoring should be in prometheus, analogous to the way
// it already factors out the Observer interface for histograms and summaries.
type GaugeOps interface {
// Set is the same as Gauge.Set
Set(float64)
// Inc is the same as Gauge.inc
Inc()
// Dec is the same as Gauge.Dec
Dec()
// Add is the same as Gauge.Add
Add(float64)
// Sub is the same as Gauge.Sub
Sub(float64)
// SetToCurrentTime the same as Gauge.SetToCurrentTime
SetToCurrentTime()
}
// A TimingHistogram tracks how long a `float64` variable spends in
// ranges defined by buckets. Time is counted in nanoseconds. The
// histogram's sum is the integral over time (in nanoseconds, from
// creation of the histogram) of the variable's value.
type TimingHistogram interface {
prometheus.Metric
prometheus.Collector
GaugeOps
}
// TimingHistogramOpts is the parameters of the TimingHistogram constructor
type TimingHistogramOpts struct {
Namespace string
Subsystem string
Name string
Help string
ConstLabels prometheus.Labels
// Buckets defines the buckets into which observations are
// accumulated. Each element in the slice is the upper
// inclusive bound of a bucket. The values must be sorted in
// strictly increasing order. There is no need to add a
// highest bucket with +Inf bound. The default value is
// prometheus.DefBuckets.
Buckets []float64
// The initial value of the variable.
InitialValue float64
}
// NewTimingHistogram creates a new TimingHistogram
func NewTimingHistogram(opts TimingHistogramOpts) (TimingHistogram, error) {
return NewTestableTimingHistogram(time.Now, opts)
}
// NewTestableTimingHistogram creates a TimingHistogram that uses a mockable clock
func NewTestableTimingHistogram(nowFunc func() time.Time, opts TimingHistogramOpts) (TimingHistogram, error) {
desc := prometheus.NewDesc(
prometheus.BuildFQName(opts.Namespace, opts.Subsystem, opts.Name),
wrapTimingHelp(opts.Help),
nil,
opts.ConstLabels,
)
return newTimingHistogram(nowFunc, desc, opts)
}
func wrapTimingHelp(given string) string {
return "EXPERIMENTAL: " + given
}
func newTimingHistogram(nowFunc func() time.Time, desc *prometheus.Desc, opts TimingHistogramOpts, variableLabelValues ...string) (TimingHistogram, error) {
allLabelsM := prometheus.Labels{}
allLabelsS := prometheus.MakeLabelPairs(desc, variableLabelValues)
for _, pair := range allLabelsS {
if pair == nil || pair.Name == nil || pair.Value == nil {
return nil, errors.New("prometheus.MakeLabelPairs returned a nil")
}
allLabelsM[*pair.Name] = *pair.Value
}
weighted, err := newWeightedHistogram(desc, WeightedHistogramOpts{
Namespace: opts.Namespace,
Subsystem: opts.Subsystem,
Name: opts.Name,
Help: opts.Help,
ConstLabels: allLabelsM,
Buckets: opts.Buckets,
}, variableLabelValues...)
if err != nil {
return nil, err
}
return &timingHistogram{
nowFunc: nowFunc,
weighted: weighted,
lastSetTime: nowFunc(),
value: opts.InitialValue,
}, nil
}
type timingHistogram struct {
nowFunc func() time.Time
weighted *weightedHistogram
// The following fields must only be accessed with weighted's lock held
lastSetTime time.Time // identifies when value was last set
value float64
}
var _ TimingHistogram = &timingHistogram{}
func (th *timingHistogram) Set(newValue float64) {
th.update(func(float64) float64 { return newValue })
}
func (th *timingHistogram) Inc() {
th.update(func(oldValue float64) float64 { return oldValue + 1 })
}
func (th *timingHistogram) Dec() {
th.update(func(oldValue float64) float64 { return oldValue - 1 })
}
func (th *timingHistogram) Add(delta float64) {
th.update(func(oldValue float64) float64 { return oldValue + delta })
}
func (th *timingHistogram) Sub(delta float64) {
th.update(func(oldValue float64) float64 { return oldValue - delta })
}
func (th *timingHistogram) SetToCurrentTime() {
th.update(func(oldValue float64) float64 { return th.nowFunc().Sub(time.Unix(0, 0)).Seconds() })
}
func (th *timingHistogram) update(updateFn func(float64) float64) {
th.weighted.lock.Lock()
defer th.weighted.lock.Unlock()
now := th.nowFunc()
delta := now.Sub(th.lastSetTime)
value := th.value
if delta > 0 {
th.weighted.observeWithWeightLocked(value, uint64(delta))
th.lastSetTime = now
}
th.value = updateFn(value)
}
func (th *timingHistogram) Desc() *prometheus.Desc {
return th.weighted.Desc()
}
func (th *timingHistogram) Write(dest *dto.Metric) error {
th.Add(0) // account for time since last update
return th.weighted.Write(dest)
}
func (th *timingHistogram) Describe(ch chan<- *prometheus.Desc) {
ch <- th.weighted.Desc()
}
func (th *timingHistogram) Collect(ch chan<- prometheus.Metric) {
ch <- th
} | staging/src/k8s.io/component-base/metrics/prometheusextension/timing_histogram.go | 0.808974 | 0.423577 | timing_histogram.go | starcoder |
package pckg
import (
"sort"
"github.com/gonum/floats"
"github.com/gonum/stat"
)
// Group is a slice of package stats.
type Group []*Stats
// SetEstimatedStmtCountFrom sets the estimated statement count based on the
// average number of statements in other packages in the group.
func (g *Group) SetEstimatedStmtCountFrom(reference Group) {
for _, p := range *g {
p.Statements = reference.MedianStatementCount()
}
}
// MedianStatementCount calculates the median number of statements in packages
// that currently have associated tests.
func (g *Group) MedianStatementCount() float64 {
sc := g.StatementCounts()
if floats.Sum(sc) == 0 {
return 0
}
sort.Float64s(sc)
return stat.Quantile(0.5, stat.Empirical, sc, nil)
}
// TotalStatementCount returns the sum of statements across all packaes that
// currently have associated tests.
func (g *Group) TotalStatementCount() float64 {
return floats.Sum(g.StatementCounts())
}
// StatementCounts returns a list of statement counts from the group.
func (g *Group) StatementCounts() (statementCounts []float64) {
for _, p := range *g {
statementCounts = append(statementCounts, p.Statements)
}
return
}
// TotalCovered returns the total number of statements that have been covered
// by unit tests.
func (g *Group) TotalCovered() float64 {
c := []float64{}
for _, p := range *g {
c = append(c, p.Covered)
}
return floats.Sum(c)
}
// TotalUncovered returns the total number of statements that have not been
// covered by unit tests.
func (g *Group) TotalUncovered() float64 {
u := []float64{}
for _, p := range *g {
u = append(u, p.Uncovered)
}
return floats.Sum(u)
}
// CoveragePercent calculates the coverage percentage across all packages.
func (g *Group) CoveragePercent() (coveragePercent float64) {
if g.TotalStatementCount() >= 1 {
coveragePercent = g.TotalCovered() / g.TotalStatementCount()
}
return
}
// EstimateCount returns the number of packages who have had their coverage
// percentages estimated.
func (g *Group) EstimateCount() (estimatedCount float64) {
for _, p := range *g {
if p.Estimated {
estimatedCount++
}
}
return
} | drygopher/coverage/pckg/group.go | 0.836655 | 0.432902 | group.go | starcoder |
package countrymaam
import (
"errors"
"math"
"math/rand"
"github.com/ar90n/countrymaam/collection"
"github.com/ar90n/countrymaam/number"
)
type CutPlane[T number.Number, U any] interface {
Evaluate(feature []T) bool
Distance(feature []T) float64
Construct(elements []treeElement[T, U], indice []int) (CutPlane[T, U], error)
}
type kdCutPlane[T number.Number, U any] struct {
Axis uint
Value float64
}
func (cp kdCutPlane[T, U]) Evaluate(feature []T) bool {
return 0.0 <= cp.Distance(feature)
}
func (cp kdCutPlane[T, U]) Distance(feature []T) float64 {
diff := float64(feature[cp.Axis]) - cp.Value
sqDist := diff * diff
if diff < 0.0 {
sqDist = -sqDist
}
return sqDist
}
func (cp kdCutPlane[T, U]) Construct(elements []treeElement[T, U], indice []int) (CutPlane[T, U], error) {
minValues := append([]T{}, elements[indice[0]].Feature...)
maxValues := append([]T{}, elements[indice[0]].Feature...)
for _, i := range indice {
element := elements[i]
for j, v := range element.Feature {
minValues[j] = number.Min(minValues[j], v)
maxValues[j] = number.Max(maxValues[j], v)
}
}
maxRange := maxValues[0] - minValues[0]
cutPlane := kdCutPlane[T, U]{
Axis: uint(0),
Value: float64(maxValues[0]+minValues[0]) / 2,
}
for i := uint(1); i < uint(len(minValues)); i++ {
diff := maxValues[i] - minValues[i]
if maxRange < diff {
maxRange = diff
cutPlane = kdCutPlane[T, U]{
Axis: i,
Value: float64(maxValues[i]+minValues[i]) / 2,
}
}
}
return &cutPlane, nil
}
type randomizedKdCutPlane[T number.Number, U any] struct {
Axis uint
Value T
}
func (cp randomizedKdCutPlane[T, U]) Evaluate(feature []T) bool {
return 0.0 <= cp.Distance(feature)
}
func (cp randomizedKdCutPlane[T, U]) Distance(feature []T) float64 {
diff := float64(feature[cp.Axis] - cp.Value)
sqDist := diff * diff
if diff < 0.0 {
sqDist = -sqDist
}
return sqDist
}
func (cp randomizedKdCutPlane[T, U]) Construct(elements []treeElement[T, U], indice []int) (CutPlane[T, U], error) {
if len(indice) == 0 {
return nil, errors.New("elements is empty")
}
dim := len(elements[0].Feature)
accs := make([]float64, dim)
sqAccs := make([]float64, dim)
nSamples := number.Min(uint(len(indice)), 100)
for _, i := range indice[:nSamples] {
element := elements[i]
for j, v := range element.Feature {
v := float64(v)
accs[j] += v
sqAccs[j] += v * v
}
}
invN := 1.0 / float64(nSamples)
queue := collection.PriorityQueue[*kdCutPlane[T, U]]{}
for i := range accs {
mean := accs[i] * invN
sqMean := sqAccs[i] * invN
variance := sqMean - mean*mean
cutPlane := &kdCutPlane[T, U]{
Axis: uint(i),
Value: mean,
}
queue.Push(cutPlane, -variance)
}
nCandidates := number.Min(5, queue.Len())
nSkip := rand.Intn(nCandidates) - 1
for i := 0; i < nSkip; i++ {
queue.Pop()
}
return queue.Pop()
}
type rpCutPlane[T number.Number, U any] struct {
NormalVector []float64
A float64
}
func (cp rpCutPlane[T, U]) Evaluate(feature []T) bool {
return 0.0 <= cp.Distance(feature)
}
func (cp rpCutPlane[T, U]) Distance(feature []T) float64 {
dot := cp.A + number.CalcDot(feature, cp.NormalVector)
sqDist := dot * dot
if dot < 0.0 {
sqDist = -sqDist
}
return sqDist
}
func (cp rpCutPlane[T, U]) Construct(elements []treeElement[T, U], indice []int) (CutPlane[T, U], error) {
if len(indice) == 0 {
return nil, errors.New("elements is empty")
}
lhsIndex := rand.Intn(len(indice))
rhsIndex := rand.Intn(len(indice) - 1)
if lhsIndex <= rhsIndex {
rhsIndex++
}
const maxIter = 32
dim := len(elements[indice[lhsIndex]].Feature)
lhsCenter := make([]float64, dim)
rhsCenter := make([]float64, dim)
lhsCount := 1
rhsCount := 1
for i := 0; i < dim; i++ {
lhsCenter[i] = float64(elements[indice[lhsIndex]].Feature[i])
rhsCenter[i] = float64(elements[indice[rhsIndex]].Feature[i])
}
nSamples := number.Min(uint(len(indice)), 32)
for i := 0; i < maxIter; i++ {
rand.Shuffle(len(indice), func(i, j int) { indice[i], indice[j] = indice[j], indice[i] })
for _, k := range indice[:nSamples] {
feature := elements[k].Feature
lhsSqDist := number.CalcSqDist(feature, lhsCenter)
rhsSqDist := number.CalcSqDist(feature, rhsCenter)
if lhsSqDist < rhsSqDist {
invCountPlusOone := 1.0 / float64(lhsCount+1)
for j, v := range feature {
lhsCenter[j] = (lhsCenter[j]*float64(lhsCount) + float64(v)) * invCountPlusOone
}
lhsCount++
} else {
invCountPlusOone := 1.0 / float64(rhsCount+1)
for j, v := range feature {
rhsCenter[j] = (rhsCenter[j]*float64(rhsCount) + float64(v)) * invCountPlusOone
}
rhsCount++
}
}
}
accSqDiff := 0.0
normalVector := make([]float64, dim)
for i := 0; i < dim; i++ {
diff := lhsCenter[i] - rhsCenter[i]
normalVector[i] = diff
accSqDiff += diff * diff
}
invNorm := 1.0 / (math.Sqrt(accSqDiff) + 1e-10)
for i := 0; i < dim; i++ {
normalVector[i] *= invNorm
}
a := 0.0
for i := 0; i < dim; i++ {
a -= float64(normalVector[i]) * float64(rhsCenter[i]+lhsCenter[i])
}
a /= 2.0
cutPlane := rpCutPlane[T, U]{
NormalVector: normalVector,
A: a,
}
return &cutPlane, nil
} | cut_plane.go | 0.622 | 0.427815 | cut_plane.go | starcoder |
package geometry
import "math"
type Vector3D struct {
X float32
Y float32
Z float32
}
func NewVector(x float32, y float32, z float32) Vector3D {
return Vector3D{
X: x,
Y: y,
Z: z,
}
}
func NewVector_BetweenPoints(from Point3D, to Point3D) Vector3D {
return Vector3D{
X: to.X - from.X,
Y: to.Y - from.Y,
Z: to.Z - from.Z,
}
}
func NewVector_FromPoint(point Point3D) Vector3D {
return Vector3D{
X: point.X,
Y: point.Y,
Z: point.Z,
}
}
func Zero() Vector3D {
return Vector3D{X: 0.0, Y: 0.0, Z: 0.0}
}
func (vector Vector3D) Magnitude() float32 {
return float32(math.Sqrt(float64(vector.X*vector.X + vector.Y*vector.Y + vector.Z*vector.Z)))
}
func (vector Vector3D) ToUnit() Direction3D {
return NewDirection_FromVector(vector)
}
func (vector Vector3D) ToOrthonormalBasis() Matrix3D {
return NewDirection_FromVector(vector).ToOrthonormalBasis()
}
func (vector Vector3D) Scale(scale float32) Vector3D {
return Vector3D{
X: vector.X * scale,
Y: vector.Y * scale,
Z: vector.Z * scale,
}
}
func (vector Vector3D) Projection_Dir(direction Direction3D) Vector3D {
return direction.ToVector().Scale(Dot(vector, direction.ToVector()))
}
func (vector Vector3D) Projection_Vec(direction Vector3D) Vector3D {
var denominator = Dot(direction, direction)
if denominator > 0.0 {
return direction.Scale(Dot(vector, direction) / denominator)
} else {
return Zero()
}
}
func (vector Vector3D) Rotate(matrix Matrix3D) Vector3D {
return Vector3D{
X: vector.X*matrix.X.X + vector.Y*matrix.Y.X + vector.Z*matrix.Z.X,
Y: vector.X*matrix.X.Y + vector.Y*matrix.Y.Y + vector.Z*matrix.Z.Y,
Z: vector.X*matrix.X.Z + vector.Y*matrix.Y.Z + vector.Z*matrix.Z.Z,
}
}
func (vector Vector3D) Equals(other Vector3D) bool {
return (vector.X == other.X) &&
(vector.Y == other.Y) &&
(vector.Z == other.Z)
}
func (vector Vector3D) EqualsTol(other Vector3D, tolerance float32) bool {
return float32(math.Abs(float64(vector.X-other.X))) < tolerance &&
float32(math.Abs(float64(vector.Y-other.Y))) < tolerance &&
float32(math.Abs(float64(vector.Z-other.Z))) < tolerance
}
func (vector Vector3D) Add(other Vector3D) Vector3D {
return NewVector(
vector.X+other.X,
vector.Y+other.Y,
vector.Z+other.Z,
)
}
func (vector Vector3D) Sub(other Vector3D) Vector3D {
return NewVector(
vector.X-other.X,
vector.Y-other.Y,
vector.Z-other.Z,
)
}
func (vector Vector3D) Mul(scale float32) Vector3D {
return vector.Scale(scale)
}
func (vector Vector3D) Div(scale float32) Vector3D {
var inv_scale = 1.0 / scale
return vector.Scale(inv_scale)
}
func (vector Vector3D) Neg() Vector3D {
return NewVector(
-vector.X,
-vector.Y,
-vector.Z,
)
}
func Dot(vector1 Vector3D, vector2 Vector3D) float32 {
return vector1.X*vector2.X +
vector1.Y*vector2.Y +
vector1.Z*vector2.Z
}
func Cross(vector1 Vector3D, vector2 Vector3D) Vector3D {
return NewVector(
vector1.Y*vector2.Z-vector1.Z*vector2.Y,
vector1.Z*vector2.X-vector1.X*vector2.Z,
vector1.X*vector2.Y-vector1.Y*vector2.X,
)
} | geometry/Vector3D.go | 0.885928 | 0.966851 | Vector3D.go | starcoder |
package math
var (
clipSpacePlanePoints = []Vector3{
Vec3(-1, -1, -1),
Vec3(1, -1, -1),
Vec3(1, 1, -1),
Vec3(-1, 1, -1),
Vec3(-1, -1, 1),
Vec3(1, -1, 1),
Vec3(1, 1, 1),
Vec3(-1, 1, 1)}
)
// A truncated rectangular pyramid.
// Used to define the viewable region and it's projection onto the screen.
type Frustum struct {
// The six clipping planes, near, far, left, right, top, bottom
Left, Right *Plane
Top, Bottom *Plane
Near, Far *Plane
planePoints []Vector3
}
func NewFrustum() *Frustum {
zeroPlane := NewPlane(Vec3(0, 0, 0), 0)
return &Frustum{Left: zeroPlane, Right: zeroPlane.Cpy(),
Top: zeroPlane.Cpy(), Bottom: zeroPlane.Cpy(),
Near: zeroPlane.Cpy(), Far: zeroPlane.Cpy(),
planePoints: make([]Vector3, len(clipSpacePlanePoints))}
}
func (f *Frustum) Update(invProjectionView *Matrix4) {
/* TODO
for i := range clipSpacePlanePoints {
f.planePoints[i] = invProjectionView.Project(clipSpacePlanePoints[i])
}
*/
f.Near.Set(f.planePoints[1], f.planePoints[0], f.planePoints[2])
f.Far.Set(f.planePoints[4], f.planePoints[5], f.planePoints[7])
f.Left.Set(f.planePoints[0], f.planePoints[4], f.planePoints[3])
f.Right.Set(f.planePoints[5], f.planePoints[1], f.planePoints[6])
f.Top.Set(f.planePoints[2], f.planePoints[3], f.planePoints[6])
f.Bottom.Set(f.planePoints[4], f.planePoints[0], f.planePoints[1])
}
// Returns whether the point is in the frustum.
func (f *Frustum) PointInFrustum(point Vector3) bool {
if f.Left.PlaneSide(point) == PlaneSide_Back {
return false
}
if f.Right.PlaneSide(point) == PlaneSide_Back {
return false
}
if f.Top.PlaneSide(point) == PlaneSide_Back {
return false
}
if f.Bottom.PlaneSide(point) == PlaneSide_Back {
return false
}
if f.Near.PlaneSide(point) == PlaneSide_Back {
return false
}
if f.Far.PlaneSide(point) == PlaneSide_Back {
return false
}
return true
}
// Returns whether the given sphere is in the frustum.
func (f *Frustum) SphereInFrustum(center Vector3, radius float32) bool {
if (f.Left.Normal.X*center.X + f.Left.Normal.Y*center.Y + f.Left.Normal.Z*center.Z) < (-radius - f.Left.D) {
return false
}
if (f.Right.Normal.X*center.X + f.Right.Normal.Y*center.Y + f.Right.Normal.Z*center.Z) < (-radius - f.Right.D) {
return false
}
if (f.Top.Normal.X*center.X + f.Top.Normal.Y*center.Y + f.Top.Normal.Z*center.Z) < (-radius - f.Top.D) {
return false
}
if (f.Bottom.Normal.X*center.X + f.Bottom.Normal.Y*center.Y + f.Bottom.Normal.Z*center.Z) < (-radius - f.Bottom.D) {
return false
}
if (f.Near.Normal.X*center.X + f.Near.Normal.Y*center.Y + f.Near.Normal.Z*center.Z) < (-radius - f.Near.D) {
return false
}
if (f.Far.Normal.X*center.X + f.Far.Normal.Y*center.Y + f.Far.Normal.Z*center.Z) < (-radius - f.Far.D) {
return false
}
return true
}
// Returns whether the given sphere is in the frustum not checking whether it is behind the near and far clipping plane.
func (f *Frustum) SphereInFrustumWithoutNearFar(center Vector3, radius float32) bool {
if (f.Left.Normal.X*center.X + f.Left.Normal.Y*center.Y + f.Left.Normal.Z*center.Z) < (-radius - f.Left.D) {
return false
}
if (f.Right.Normal.X*center.X + f.Right.Normal.Y*center.Y + f.Right.Normal.Z*center.Z) < (-radius - f.Right.D) {
return false
}
if (f.Top.Normal.X*center.X + f.Top.Normal.Y*center.Y + f.Top.Normal.Z*center.Z) < (-radius - f.Top.D) {
return false
}
if (f.Bottom.Normal.X*center.X + f.Bottom.Normal.Y*center.Y + f.Bottom.Normal.Z*center.Z) < (-radius - f.Bottom.D) {
return false
}
return true
}
// Returns whether the given {@link BoundingBox} is in the frustum.
func (f *Frustum) BoundsInFrustum(bounds *BoundingBox) bool {
corners := bounds.Corners()
out := 0
for i := 0; i < len(corners); i++ {
if f.Left.PlaneSide(corners[i]) == PlaneSide_Back {
out++
}
}
if out == 8 {
return false
}
out = 0
for i := 0; i < len(corners); i++ {
if f.Right.PlaneSide(corners[i]) == PlaneSide_Back {
out++
}
}
if out == 8 {
return false
}
out = 0
for i := 0; i < len(corners); i++ {
if f.Top.PlaneSide(corners[i]) == PlaneSide_Back {
out++
}
}
if out == 8 {
return false
}
out = 0
for i := 0; i < len(corners); i++ {
if f.Bottom.PlaneSide(corners[i]) == PlaneSide_Back {
out++
}
}
if out == 8 {
return false
}
out = 0
for i := 0; i < len(corners); i++ {
if f.Near.PlaneSide(corners[i]) == PlaneSide_Back {
out++
}
}
if out == 8 {
return false
}
out = 0
for i := 0; i < len(corners); i++ {
if f.Far.PlaneSide(corners[i]) == PlaneSide_Back {
out++
}
}
if out == 8 {
return false
}
return true
} | frustum.go | 0.628863 | 0.587618 | frustum.go | starcoder |
package camt
import (
"encoding/xml"
"github.com/fairxio/finance-messaging/iso20022"
)
type Document02700104 struct {
XMLName xml.Name `xml:"urn:iso:std:iso:20022:tech:xsd:camt.027.001.04 Document"`
Message *ClaimNonReceiptV04 `xml:"ClmNonRct"`
}
func (d *Document02700104) AddMessage() *ClaimNonReceiptV04 {
d.Message = new(ClaimNonReceiptV04)
return d.Message
}
// Scope
// The Claim Non Receipt message is sent by a case creator/case assigner to a case assignee.
// This message is used to initiate an investigation for missing funds at the creditor (missing credit entry to its account) or at an agent along the processing chain (missing cover for a received payment instruction).
// Usage
// The claim non receipt case occurs in two situations:
// - The creditor is expecting funds from a particular debtor and cannot find the corresponding credit entry on its account. In this situation, it is understood that the creditor will contact its debtor, and that the debtor will trigger the claim non receipt case on its behalf. A workflow where the creditor directly addresses a Claim Non Receipt message to its account servicing institution is not retained.
// - An agent in the processing chain cannot find a cover payment corresponding to a received payment instruction. In this situation, the agent may directly trigger the investigation by sending a Claim Non Receipt message to the sender of the original payment instruction.
// The Claim Non Receipt message covers one and only one payment instruction at a time. If several expected payment instructions/cover instructions are found missing, then multiple Claim Non Receipt messages must be sent.
// Depending on the result of the investigation by a case assignee (incorrect routing, errors/omissions when processing the instruction or even the absence of an error) and the stage at which the payment instruction is being process, the claim non receipt case may lead to a:
// - Request To Cancel Payment message, sent to the subsequent agent in the payment processing chain, if the original payment instruction has been incorrectly routed through the chain of agents. (This also implies that a new, corrected, payment instruction is issued).
// - Request To Modify Payment message, sent to the subsequent agent in the payment processing chain, if a truncation or omission has occurred during the processing of the original payment instruction.
// If the above situations occur, the assignee wanting to request a payment cancellation or payment modification should first send out a Resolution Of Investigation with a confirmation status that indicates that either cancellation (CWFW) modification (MWFW) or unable to apply (UWFW) will follow. (See section on Resolution Of Investigation for more details).
// In the cover is missing, the case assignee may also simply issue the omitted cover payment or when the initial cover information was incorrect, update the cover (through modification and/or cancellation as required) with the correction information provided in the ClaimNonReceipt message. The case assignee will issue a Resolution Of Investigation message with the CorrectionTransaction element mentioning the references of the cover payment.
// The Claim Non Receipt message may be forwarded to subsequent case assignees.
// The ClaimNonReceipt message has the following main characteristics:
// - Case Identification:
// The case creator assigns a unique case identification. This information will be passed unchanged to subsequent case assignee(s).
// - Underlying Payment:
// The case creator refers to the underlying payment instruction for the unambiguous identification of the payment instruction. This identification needs to be updated by the subsequent case assigner(s) in order to match the one used with their case assignee(s).
// - MissingCoverIndicator:
// The MissingCoverIndication element distinguishes between a missing cover situation (when set to YES) or a missing funds situation (when set to NO).
// - CoverCorrection
// The CoverCorrection element allows the case assigner to provide corrected cover information, when these are incorrect in the underlying payment instruction for which the cover is issued.
type ClaimNonReceiptV04 struct {
// Identifies the assignment of an investigation case from an assigner to an assignee.
// Usage: The Assigner must be the sender of this confirmation and the Assignee must be the receiver.
Assignment *iso20022.CaseAssignment3 `xml:"Assgnmt"`
// Identifies the investigation case.
Case *iso20022.Case3 `xml:"Case"`
// Identifies the payment instruction for which the Creditor has not received the funds.
// Usage: In case of a missing cover, it must be the identification of the related payment instruction.
// In case of a claim non receipt initiated by the debtor, it must be the identification of the instruction.
Underlying *iso20022.UnderlyingTransaction2Choice `xml:"Undrlyg"`
// Provides the cover related information of a claim non receipt investigation. The absence of the component means that the message is not a cover related investigation.
CoverDetails *iso20022.MissingCover3 `xml:"CoverDtls,omitempty"`
// Additional information that cannot be captured in the structured elements and/or any other specific block.
SupplementaryData []*iso20022.SupplementaryData1 `xml:"SplmtryData,omitempty"`
}
func (c *ClaimNonReceiptV04) AddAssignment() *iso20022.CaseAssignment3 {
c.Assignment = new(iso20022.CaseAssignment3)
return c.Assignment
}
func (c *ClaimNonReceiptV04) AddCase() *iso20022.Case3 {
c.Case = new(iso20022.Case3)
return c.Case
}
func (c *ClaimNonReceiptV04) AddUnderlying() *iso20022.UnderlyingTransaction2Choice {
c.Underlying = new(iso20022.UnderlyingTransaction2Choice)
return c.Underlying
}
func (c *ClaimNonReceiptV04) AddCoverDetails() *iso20022.MissingCover3 {
c.CoverDetails = new(iso20022.MissingCover3)
return c.CoverDetails
}
func (c *ClaimNonReceiptV04) AddSupplementaryData() *iso20022.SupplementaryData1 {
newValue := new(iso20022.SupplementaryData1)
c.SupplementaryData = append(c.SupplementaryData, newValue)
return newValue
} | iso20022/camt/ClaimNonReceiptV04.go | 0.732783 | 0.519643 | ClaimNonReceiptV04.go | starcoder |
package world
import (
"github.com/spkaeros/rscgo/pkg/game/entity"
"sync"
)
//Pathway Represents a path for a mobile entity to traverse across the virtual world.
type Pathway struct {
sync.RWMutex
StartX, StartY int
WaypointsX []int
WaypointsY []int
CurrentWaypoint int
}
//NewPathwayToCoords returns a new Pathway pointing to the specified location. Will attempt traversal to l via a
// simple algorithm: if curX < destX then increase, if curX > destX then decrease, same for y, until equal.
func NewPathwayToCoords(destX, destY int) *Pathway {
return &Pathway{StartX: destX, StartY: destY}
}
//NewPathwayToLocation returns a new Pathway pointing to the specified location. Will attempt traversal to l via a
// simple algorithm: if curX < destX then increase, if curX > destX then decrease, same for y, until equal.
func NewPathwayToLocation(l entity.Location) *Pathway {
return &Pathway{StartX: l.X(), StartY: l.Y()}
}
//NewPathway returns a new Pathway with the specified variables. destX and destY are a straight line, and waypoints define turns from that point.
func NewPathway(destX, destY int, waypointsX, waypointsY []int) *Pathway {
return &Pathway{StartX: destX, StartY: destY, WaypointsX: waypointsX, WaypointsY: waypointsY, CurrentWaypoint: -1}
}
// Returns an optimal path from start location to end location as decided by way of the AStar pathfinding algorithm.
// If a path isn't found within a certain constrained number of steps, the algorithm returns nil to free up the CPU.
func MakePath(start, end Location) (*Pathway, bool) {
path := NewPathfinder(start, end).MakePath()
return path, path != nil
}
//countWaypoints Returns the length of the largest waypoint slice within this path.
func (p *Pathway) countWaypoints() int {
p.RLock()
defer p.RUnlock()
xCount, yCount := len(p.WaypointsX), len(p.WaypointsY)
if xCount >= yCount {
return xCount
}
return yCount
}
//waypointX Returns the x coordinate of the specified waypoint, by taking the waypointX delta at w, and adding it to StartX.
// If w is out of bounds, returns the StartX coordinate, aka the x coord to start turning at.
func (p *Pathway) waypointX(w int) int {
p.RLock()
defer p.RUnlock()
offset := func(p *Pathway, w int) int {
if w >= len(p.WaypointsX) || w < 0 {
return 0
}
return p.WaypointsX[w]
}(p, w)
return p.StartX + offset
}
//waypointY Returns the y coordinate of the specified waypoint, by taking the waypointY delta at w, and adding it to StartY.
// If w is out of bounds, returns the StartY coordinate, aka the y coord to start turning at.
func (p *Pathway) waypointY(w int) int {
p.RLock()
defer p.RUnlock()
offset := func(p *Pathway, w int) int {
if w >= len(p.WaypointsY) || w < 0 {
return 0
}
return p.WaypointsY[w]
}(p, w)
return p.StartY + offset
}
//nextTile Returns the next destination within our path. If our current waypoint is out of bounds, it will return
// the same value as startingTile.
func (p *Pathway) nextTile() Location {
return NewLocation(p.waypointX(p.CurrentWaypoint), p.waypointY(p.CurrentWaypoint))
}
//startingTile Returns the location of the start of the path, This location is actually not our starting location,
// but the first tile that we begin traversing our waypoint deltas from. Required to walk to this location to start
// traversing waypoints,
func (p *Pathway) startingTile() Location {
return NewLocation(p.StartX, p.StartY)
}
func (p *Pathway) endingTile() entity.Location {
return NewLocation(p.StartX + p.waypointX(p.countWaypoints()-1), p.StartX + p.waypointX(p.countWaypoints()-1))
}
//addFirstWaypoint Prepends a waypoint to this path.
func (p *Pathway) addFirstWaypoint(x, y int) *Pathway {
p.Lock()
defer p.Unlock()
p.WaypointsX = append([]int{x}, p.WaypointsX...)
p.WaypointsY = append([]int{y}, p.WaypointsY...)
return p
}
//NextTileToward Returns the next tile toward the final destination of this pathway from currentLocation
func (p *Pathway) nextTileFrom(currentLocation entity.Location) entity.Location {
dest := p.nextTile()
destX, destY := dest.X(), dest.Y()
currentX, currentY := currentLocation.X(), currentLocation.Y()
destination := NewLocation(currentX, currentY)
switch {
case currentX > destX:
destination.x.Dec()
case currentX < destX:
destination.x.Inc()
}
switch {
case currentY > destY:
destination.y.Dec()
case currentY < destY:
destination.y.Inc()
}
return destination
} | pkg/game/world/pathway.go | 0.832169 | 0.431464 | pathway.go | starcoder |
package wardleyToGo
import (
"errors"
"fmt"
"image"
"image/draw"
"strings"
"gonum.org/v1/gonum/graph/simple"
)
// a Map is a directed graph whose components knows their own position wrt to an anchor.
// The anchor is the point A of a rectangle as defined by
// A := image.Point{}
// image.Rectangle{A, Pt(100, 100)}
type Map struct {
id int64
Title string
// Canvas is the function that will draw the initial map
// allowing the placement of the axis, legend and so on
Canvas draw.Drawer
Annotations []*Annotation
AnnotationsPlacement image.Point
area image.Rectangle
*simple.DirectedGraph
}
func (m *Map) String() string {
var b strings.Builder
b.WriteString("map {\n")
nodes := m.DirectedGraph.Nodes()
for nodes.Next() {
n := nodes.Node().(Component)
if a, ok := n.(Area); ok {
b.WriteString(
fmt.Sprintf("\t%v '%v' [%v,%v,%v,%v];\n", a.ID(), a,
a.GetArea().Min.X, a.GetArea().Min.Y,
a.GetArea().Max.X, a.GetArea().Max.Y))
} else {
b.WriteString(fmt.Sprintf("\t%v '%v' [%v,%v];\n", n.ID(), n, n.GetPosition().X, n.GetPosition().Y))
}
}
b.WriteString("\n")
edges := m.DirectedGraph.Edges()
for edges.Next() {
e := edges.Edge().(Collaboration)
b.WriteString(fmt.Sprintf("\t%v -> %v [%v];\n", e.From().ID(), e.To().ID(), e.GetType()))
}
b.WriteString("}\n")
return b.String()
}
// NewMap with initial area of 100x100
func NewMap(id int64) *Map {
return &Map{
id: id,
area: image.Rect(0, 0, 100, 100),
DirectedGraph: simple.NewDirectedGraph(),
}
}
// a Map fulfills the graph.Node interface; thererfore if can be part of a graph of maps
func (m *Map) ID() int64 {
return m.id
}
// GetPosition fulfills the componnts.Component interface. Therefore a map can be a component of another map.
// This allows doing submaping.
// The position is the center of the area of the map
func (m *Map) GetPosition() image.Point {
return image.Pt((m.area.Max.X-m.area.Min.X)/2, (m.area.Max.Y-m.area.Min.Y)/2)
}
func (m *Map) GetArea() image.Rectangle {
return m.area
}
// Draw aligns r.Min in dst with sp in src and then replaces the
// rectangle r in dst with the result of drawing src on dst.
// If the Components and Collaboration elemts of the maps are draw.Drawer, their methods
// are called accordingly
func (m *Map) Draw(dst draw.Image, r image.Rectangle, src image.Image, sp image.Point) {
if m.Canvas != nil {
m.Canvas.Draw(dst, r, src, sp)
}
// Draw edges first
edges := m.Edges()
for edges.Next() {
if e, ok := edges.Edge().(draw.Drawer); ok {
e.Draw(dst, r, src, sp)
}
}
nodes := m.Nodes()
for nodes.Next() {
if n, ok := nodes.Node().(draw.Drawer); ok {
n.Draw(dst, r, src, sp)
}
}
}
// SVG representation, class is subMapElement and element
/*
func (m *Map) SVG(s *svg.SVG, bounds image.Rectangle) {
coords := utils.CalcCoords(m.GetPosition(), bounds)
s.Gid(strconv.FormatInt(m.id, 10))
s.Translate(coords.X, coords.Y)
s.Text(10, 10, m.Title)
s.Circle(0, 0, 5, `stroke-width="1"`, `stroke="black"`, `fill="black"`, `class="subMapElement, element"`)
s.Gend()
s.Gend()
}
*/
// AddComponent add e to the graph. It returns an error if e is out-of-bounds,
// meaning its coordinates are less than 0 or more that 100
func (m *Map) AddComponent(e Component) error {
if !e.GetPosition().In(image.Rect(0, 0, 100, 100)) {
return errors.New("component out of bounds")
}
m.DirectedGraph.AddNode(e)
return nil
}
func (m *Map) SetCollaboration(e Collaboration) error {
m.DirectedGraph.SetEdge(e)
return nil
} | map.go | 0.720172 | 0.488588 | map.go | starcoder |
package ssa
import (
"cmd_local/internal/src"
"math"
)
// A biasedSparseMap is a sparseMap for integers between J and K inclusive,
// where J might be somewhat larger than zero (and K-J is probably much smaller than J).
// (The motivating use case is the line numbers of statements for a single function.)
// Not all features of a SparseMap are exported, and it is also easy to treat a
// biasedSparseMap like a SparseSet.
type biasedSparseMap struct {
s *sparseMap
first int
}
// newBiasedSparseMap returns a new biasedSparseMap for values between first and last, inclusive.
func newBiasedSparseMap(first, last int) *biasedSparseMap {
if first > last {
return &biasedSparseMap{first: math.MaxInt32, s: nil}
}
return &biasedSparseMap{first: first, s: newSparseMap(1 + last - first)}
}
// cap returns one more than the largest key valid for s
func (s *biasedSparseMap) cap() int {
if s == nil || s.s == nil {
return 0
}
return s.s.cap() + int(s.first)
}
// size returns the number of entries stored in s
func (s *biasedSparseMap) size() int {
if s == nil || s.s == nil {
return 0
}
return s.s.size()
}
// contains reports whether x is a key in s
func (s *biasedSparseMap) contains(x uint) bool {
if s == nil || s.s == nil {
return false
}
if int(x) < s.first {
return false
}
if int(x) >= s.cap() {
return false
}
return s.s.contains(ID(int(x) - s.first))
}
// get returns the value s maps for key x, or -1 if
// x is not mapped or is out of range for s.
func (s *biasedSparseMap) get(x uint) int32 {
if s == nil || s.s == nil {
return -1
}
if int(x) < s.first {
return -1
}
if int(x) >= s.cap() {
return -1
}
return s.s.get(ID(int(x) - s.first))
}
// getEntry returns the i'th key and value stored in s,
// where 0 <= i < s.size()
func (s *biasedSparseMap) getEntry(i int) (x uint, v int32) {
e := s.s.contents()[i]
x = uint(int(e.key) + s.first)
v = e.val
return
}
// add inserts x->0 into s, provided that x is in the range of keys stored in s.
func (s *biasedSparseMap) add(x uint) {
if int(x) < s.first || int(x) >= s.cap() {
return
}
s.s.set(ID(int(x)-s.first), 0, src.NoXPos)
}
// add inserts x->v into s, provided that x is in the range of keys stored in s.
func (s *biasedSparseMap) set(x uint, v int32) {
if int(x) < s.first || int(x) >= s.cap() {
return
}
s.s.set(ID(int(x)-s.first), v, src.NoXPos)
}
// remove removes key x from s.
func (s *biasedSparseMap) remove(x uint) {
if int(x) < s.first || int(x) >= s.cap() {
return
}
s.s.remove(ID(int(x) - s.first))
}
func (s *biasedSparseMap) clear() {
if s.s != nil {
s.s.clear()
}
} | src/cmd_local/compile/internal/ssa/biasedsparsemap.go | 0.75183 | 0.478833 | biasedsparsemap.go | starcoder |
package tuple
import (
"fmt"
"golang.org/x/exp/constraints"
)
// T2 is a tuple type holding 2 generic values.
type T2[Ty1, Ty2 any] struct {
V1 Ty1
V2 Ty2
}
// Len returns the number of values held by the tuple.
func (t T2[Ty1, Ty2]) Len() int {
return 2
}
// Values returns the values held by the tuple.
func (t T2[Ty1, Ty2]) Values() (Ty1, Ty2) {
return t.V1, t.V2
}
// Array returns an array of the tuple values.
func (t T2[Ty1, Ty2]) Array() [2]any {
return [2]any{
t.V1,
t.V2,
}
}
// Slice returns a slice of the tuple values.
func (t T2[Ty1, Ty2]) Slice() []any {
a := t.Array()
return a[:]
}
// String returns the string representation of the tuple.
func (t T2[Ty1, Ty2]) String() string {
return tupString(t.Slice())
}
// GoString returns a Go-syntax representation of the tuple.
func (t T2[Ty1, Ty2]) GoString() string {
return tupGoString(t.Slice())
}
// New2 creates a new tuple holding 2 generic values.
func New2[Ty1, Ty2 any](v1 Ty1, v2 Ty2) T2[Ty1, Ty2] {
return T2[Ty1, Ty2]{
V1: v1,
V2: v2,
}
}
// FromArray2 returns a tuple from an array of length 2.
// If any of the values can not be converted to the generic type, an error is returned.
func FromArray2[Ty1, Ty2 any](arr [2]any) (T2[Ty1, Ty2], error) {
v1, ok := arr[0].(Ty1)
if !ok {
return T2[Ty1, Ty2]{}, fmt.Errorf("value at array index 0 expected to have type %s but has type %T", typeName[Ty1](), arr[0])
}
v2, ok := arr[1].(Ty2)
if !ok {
return T2[Ty1, Ty2]{}, fmt.Errorf("value at array index 1 expected to have type %s but has type %T", typeName[Ty2](), arr[1])
}
return New2(v1, v2), nil
}
// FromArray2X returns a tuple from an array of length 2.
// If any of the values can not be converted to the generic type, the function panics.
func FromArray2X[Ty1, Ty2 any](arr [2]any) T2[Ty1, Ty2] {
return FromSlice2X[Ty1, Ty2](arr[:])
}
// FromSlice2 returns a tuple from a slice of length 2.
// If the length of the slice doesn't match, or any of the values can not be converted to the generic type, an error is returned.
func FromSlice2[Ty1, Ty2 any](values []any) (T2[Ty1, Ty2], error) {
if len(values) != 2 {
return T2[Ty1, Ty2]{}, fmt.Errorf("slice length %d must match number of tuple values 2", len(values))
}
v1, ok := values[0].(Ty1)
if !ok {
return T2[Ty1, Ty2]{}, fmt.Errorf("value at slice index 0 expected to have type %s but has type %T", typeName[Ty1](), values[0])
}
v2, ok := values[1].(Ty2)
if !ok {
return T2[Ty1, Ty2]{}, fmt.Errorf("value at slice index 1 expected to have type %s but has type %T", typeName[Ty2](), values[1])
}
return New2(v1, v2), nil
}
// FromSlice2X returns a tuple from a slice of length 2.
// If the length of the slice doesn't match, or any of the values can not be converted to the generic type, the function panics.
func FromSlice2X[Ty1, Ty2 any](values []any) T2[Ty1, Ty2] {
if len(values) != 2 {
panic(fmt.Errorf("slice length %d must match number of tuple values 2", len(values)))
}
v1 := values[0].(Ty1)
v2 := values[1].(Ty2)
return New2(v1, v2)
}
// Equal2 returns whether the host tuple is equal to the other tuple.
// All tuple elements of the host and guest parameters must match the "comparable" built-in constraint.
// To test equality of tuples that hold custom Equalable values, use the Equal2E function.
// To test equality of tuples that hold custom Comparable values, use the Equal2C function.
// Otherwise, use Equal or reflect.DeepEqual to test tuples of any types.
func Equal2[Ty1, Ty2 comparable](host, guest T2[Ty1, Ty2]) bool {
return host.V1 == guest.V1 && host.V2 == guest.V2
}
// Equal2E returns whether the host tuple is semantically equal to the guest tuple.
// All tuple elements of the host and guest parameters must match the Equalable constraint.
// To test equality of tuples that hold built-in "comparable" values, use the Equal2 function.
// To test equality of tuples that hold custom Comparable values, use the Equal2C function.
// Otherwise, use Equal or reflect.DeepEqual to test tuples of any types.
func Equal2E[Ty1 Equalable[Ty1], Ty2 Equalable[Ty2]](host, guest T2[Ty1, Ty2]) bool {
return host.V1.Equal(guest.V1) && host.V2.Equal(guest.V2)
}
// Equal2C returns whether the host tuple is semantically less than, equal to, or greater than the guest tuple.
// All tuple elements of the host and guest parameters must match the Comparable constraint.
// To test equality of tuples that hold built-in "comparable" values, use the Equal2 function.
// To test equality of tuples that hold custom Equalable values, use the Equal2E function.
// Otherwise, use Equal or reflect.DeepEqual to test tuples of any types.
func Equal2C[Ty1 Comparable[Ty1], Ty2 Comparable[Ty2]](host, guest T2[Ty1, Ty2]) bool {
return host.V1.CompareTo(guest.V1).EQ() && host.V2.CompareTo(guest.V2).EQ()
}
// Compare2 returns whether the host tuple is semantically less than, equal to, or greater than the guest tuple.
// All tuple elements of the host and guest parameters must match the "Ordered" constraint.
// To compare tuples that hold custom comparable values, use the Compare2C function.
func Compare2[Ty1, Ty2 constraints.Ordered](host, guest T2[Ty1, Ty2]) OrderedComparisonResult {
return multiCompare(
func() OrderedComparisonResult { return compareOrdered(host.V1, guest.V1) },
func() OrderedComparisonResult { return compareOrdered(host.V2, guest.V2) },
)
}
// Compare2C returns whether the host tuple is semantically less than, equal to, or greater than the guest tuple.
// All tuple elements of the host and guest parameters must match the Comparable constraint.
// To compare tuples that hold built-in "Ordered" values, use the Compare2 function.
func Compare2C[Ty1 Comparable[Ty1], Ty2 Comparable[Ty2]](host, guest T2[Ty1, Ty2]) OrderedComparisonResult {
return multiCompare(
func() OrderedComparisonResult { return host.V1.CompareTo(guest.V1) },
func() OrderedComparisonResult { return host.V2.CompareTo(guest.V2) },
)
}
// LessThan2 returns whether the host tuple is semantically less than the guest tuple.
// All tuple elements of the host and guest parameters must match the "Ordered" constraint.
// To compare tuples that hold custom comparable values, use the LessThan2C function.
func LessThan2[Ty1, Ty2 constraints.Ordered](host, guest T2[Ty1, Ty2]) bool {
return Compare2(host, guest).LT()
}
// LessThan2C returns whether the host tuple is semantically less than the guest tuple.
// All tuple elements of the host and guest parameters must match the Comparable constraint.
// To compare tuples that hold built-in "Ordered" values, use the LessThan2 function.
func LessThan2C[Ty1 Comparable[Ty1], Ty2 Comparable[Ty2]](host, guest T2[Ty1, Ty2]) bool {
return Compare2C(host, guest).LT()
}
// LessOrEqual2 returns whether the host tuple is semantically less than or equal to the guest tuple.
// All tuple elements of the host and guest parameters must match the "Ordered" constraint.
// To compare tuples that hold custom comparable values, use the LessOrEqual2C function.
func LessOrEqual2[Ty1, Ty2 constraints.Ordered](host, guest T2[Ty1, Ty2]) bool {
return Compare2(host, guest).LE()
}
// LessOrEqual2C returns whether the host tuple is semantically less than or equal to the guest tuple.
// All tuple elements of the host and guest parameters must match the Comparable constraint.
// To compare tuples that hold built-in "Ordered" values, use the LessOrEqual2 function.
func LessOrEqual2C[Ty1 Comparable[Ty1], Ty2 Comparable[Ty2]](host, guest T2[Ty1, Ty2]) bool {
return Compare2C(host, guest).LE()
}
// GreaterThan2 returns whether the host tuple is semantically greater than the guest tuple.
// All tuple elements of the host and guest parameters must match the "Ordered" constraint.
// To compare tuples that hold custom comparable values, use the GreaterThan2C function.
func GreaterThan2[Ty1, Ty2 constraints.Ordered](host, guest T2[Ty1, Ty2]) bool {
return Compare2(host, guest).GT()
}
// GreaterThan2C returns whether the host tuple is semantically greater than the guest tuple.
// All tuple elements of the host and guest parameters must match the Comparable constraint.
// To compare tuples that hold built-in "Ordered" values, use the GreaterThan2 function.
func GreaterThan2C[Ty1 Comparable[Ty1], Ty2 Comparable[Ty2]](host, guest T2[Ty1, Ty2]) bool {
return Compare2C(host, guest).GT()
}
// GreaterOrEqual2 returns whether the host tuple is semantically greater than or equal to the guest tuple.
// All tuple elements of the host and guest parameters must match the "Ordered" constraint.
// To compare tuples that hold custom comparable values, use the GreaterOrEqual2C function.
func GreaterOrEqual2[Ty1, Ty2 constraints.Ordered](host, guest T2[Ty1, Ty2]) bool {
return Compare2(host, guest).GE()
}
// GreaterOrEqual2C returns whether the host tuple is semantically greater than or equal to the guest tuple.
// All tuple elements of the host and guest parameters must match the Comparable constraint.
// To compare tuples that hold built-in "Ordered" values, use the GreaterOrEqual2 function.
func GreaterOrEqual2C[Ty1 Comparable[Ty1], Ty2 Comparable[Ty2]](host, guest T2[Ty1, Ty2]) bool {
return Compare2C(host, guest).GE()
} | tuple2.go | 0.848878 | 0.554832 | tuple2.go | starcoder |
package main
import (
"crypto/elliptic"
"math/big"
)
type Secp256k1CurveParams struct {
elliptic.CurveParams
}
var secp256k1 *Secp256k1CurveParams
func InitSecp256k1() {
secp256k1 = &Secp256k1CurveParams{}
secp256k1.Name = "Secp256k1"
secp256k1.BitSize = 256
p, _ := new(big.Int).SetString("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F", 16)
secp256k1.P = p
n, _ := new(big.Int).SetString("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141", 16)
secp256k1.N = n
b, _ := new(big.Int).SetString("0000000000000000000000000000000000000000000000000000000000000007", 16)
secp256k1.B = b
gx, _ := new(big.Int).SetString("79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798", 16)
secp256k1.Gx = gx
gy, _ := new(big.Int).SetString("483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8", 16)
secp256k1.Gy = gy
secp256k1.BitSize = 256
}
func (curve *Secp256k1CurveParams) IsOnCurve(x, y *big.Int) bool {
y2 := new(big.Int).Exp(y, big.NewInt(2), nil)
x3 := new(big.Int).Exp(x, big.NewInt(3), nil)
ans := new(big.Int).Mod(y2.Sub(y2, x3.Add(x3, curve.B)), curve.P)
return ans.Cmp(big.NewInt(0)) == 0
}
func (curve *Secp256k1CurveParams) Params() *elliptic.CurveParams {
return &elliptic.CurveParams{
P: curve.P,
N: curve.N,
B: curve.B,
Gx: curve.Gx,
Gy: curve.Gy,
BitSize: curve.BitSize,
Name: curve.Name,
}
}
func (curve *Secp256k1CurveParams) addJacobian(x1, y1, z1, x2, y2, z2 *big.Int) (*big.Int, *big.Int, *big.Int) {
x3, y3, z3 := new(big.Int), new(big.Int), new(big.Int)
if z1.Sign() == 0 {
x3.Set(x2)
y3.Set(y2)
z3.Set(z2)
return x3, y3, z3
}
if z2.Sign() == 0 {
x3.Set(x1)
y3.Set(y1)
z3.Set(z1)
return x3, y3, z3
}
z1z1 := new(big.Int).Mul(z1, z1)
z1z1.Mod(z1z1, curve.P)
z2z2 := new(big.Int).Mul(z2, z2)
z2z2.Mod(z2z2, curve.P)
u1 := new(big.Int).Mul(x1, z2z2)
u1.Mod(u1, curve.P)
u2 := new(big.Int).Mul(x2, z1z1)
u2.Mod(u2, curve.P)
h := new(big.Int).Sub(u2, u1)
xEqual := h.Sign() == 0
if h.Sign() == -1 {
h.Add(h, curve.P)
}
i := new(big.Int).Lsh(h, 1)
i.Mul(i, i)
j := new(big.Int).Mul(h, i)
s1 := new(big.Int).Mul(y1, z2)
s1.Mul(s1, z2z2)
s1.Mod(s1, curve.P)
s2 := new(big.Int).Mul(y2, z1)
s2.Mul(s2, z1z1)
s2.Mod(s2, curve.P)
r := new(big.Int).Sub(s2, s1)
if r.Sign() == -1 {
r.Add(r, curve.P)
}
yEqual := r.Sign() == 0
if xEqual && yEqual {
return curve.doubleJacobian(x1, y1, z1)
}
r.Lsh(r, 1)
v := new(big.Int).Mul(u1, i)
x3.Set(r)
x3.Mul(x3, x3)
x3.Sub(x3, j)
x3.Sub(x3, v)
x3.Sub(x3, v)
x3.Mod(x3, curve.P)
y3.Set(r)
v.Sub(v, x3)
y3.Mul(y3, v)
s1.Mul(s1, j)
s1.Lsh(s1, 1)
y3.Sub(y3, s1)
y3.Mod(y3, curve.P)
z3.Add(z1, z2)
z3.Mul(z3, z3)
z3.Sub(z3, z1z1)
z3.Sub(z3, z2z2)
z3.Mul(z3, h)
z3.Mod(z3, curve.P)
return x3, y3, z3
}
func (curve *Secp256k1CurveParams) affineFromJacobian(x, y, z *big.Int) (xOut, yOut *big.Int) {
if z.Sign() == 0 {
return new(big.Int), new(big.Int)
}
zinv := new(big.Int).ModInverse(z, curve.P)
zinvsq := new(big.Int).Mul(zinv, zinv)
xOut = new(big.Int).Mul(x, zinvsq)
xOut.Mod(xOut, curve.P)
zinvsq.Mul(zinvsq, zinv)
yOut = new(big.Int).Mul(y, zinvsq)
yOut.Mod(yOut, curve.P)
return
}
func zForAffine(x, y *big.Int) *big.Int {
z := new(big.Int)
if x.Sign() != 0 || y.Sign() != 0 {
z.SetInt64(1)
}
return z
}
func (curve *Secp256k1CurveParams) Add(x1, y1, x2, y2 *big.Int) (x, y *big.Int) {
z1 := zForAffine(x1, y1)
z2 := zForAffine(x2, y2)
return curve.affineFromJacobian(curve.addJacobian(x1, y1, z1, x2, y2, z2))
}
func (curve *Secp256k1CurveParams) doubleJacobian(x, y, z *big.Int) (*big.Int, *big.Int, *big.Int) {
a := new(big.Int).Mul(x, x)
b := new(big.Int).Mul(y, y)
c := new(big.Int).Mul(b, b)
d := new(big.Int).Add(x, b)
d.Mul(d, d)
d.Sub(d, a)
d.Sub(d, c)
d.Mul(d, big.NewInt(2))
e := new(big.Int).Mul(big.NewInt(3), a)
f := new(big.Int).Mul(e, e)
x3 := new(big.Int).Mul(big.NewInt(2), d)
x3.Sub(f, x3)
x3.Mod(x3, curve.P)
y3 := new(big.Int).Sub(d, x3)
y3.Mul(e, y3)
y3.Sub(y3, new(big.Int).Mul(big.NewInt(8), c))
y3.Mod(y3, curve.P)
z3 := new(big.Int).Mul(y, z)
z3.Mul(big.NewInt(2), z3)
z3.Mod(z3, curve.P)
return x3, y3, z3
}
func (curve *Secp256k1CurveParams) Double(x1, y1 *big.Int) (x, y *big.Int) {
z1 := zForAffine(x1, y1)
return curve.affineFromJacobian(curve.doubleJacobian(x1, y1, z1))
}
func (curve *Secp256k1CurveParams) ScalarMult(Bx, By *big.Int, k []byte) (x, y *big.Int) {
Bz := new(big.Int).SetInt64(1)
x, y, z := new(big.Int), new(big.Int), new(big.Int)
for _, byte := range k {
for bitNum := 0; bitNum < 8; bitNum++ {
x, y, z = curve.doubleJacobian(x, y, z) // 倍算
if byte&0x80 == 0x80 { // ビットが1だったら加算
x, y, z = curve.addJacobian(Bx, By, Bz, x, y, z)
}
byte <<= 1
}
}
return curve.affineFromJacobian(x, y, z)
}
func (curve *Secp256k1CurveParams) ScalarBaseMult(k []byte) (x, y *big.Int) {
return curve.ScalarMult(curve.Gx, curve.Gy, k)
} | secp256k1.go | 0.672224 | 0.431345 | secp256k1.go | starcoder |
// Copied from Go's text/template/parse package and modified for yacc.
// Package yacc parses .y files.
package yacc
import (
"fmt"
"runtime"
)
// Tree is the representation of a single parsed file.
type Tree struct {
Name string // name of the template represented by the tree.
Productions []*ProductionNode
text string // text parsed to create the template (or its parent)
// Parsing only; cleared after parse.
lex *lexer
token [2]item // two-token lookahead for parser.
peekCount int
}
// Parse parses the yacc file text with optional name.
func Parse(name, text string) (t *Tree, err error) {
t = New(name)
t.text = text
err = t.Parse(text)
return
}
// next returns the next token.
func (t *Tree) next() item {
if t.peekCount > 0 {
t.peekCount--
} else {
t.token[0] = t.lex.nextItem()
}
return t.token[t.peekCount]
}
// backup backs the input stream up one token.
func (t *Tree) backup() {
t.peekCount++
}
// peek returns but does not consume the next token.
func (t *Tree) peek() item {
if t.peekCount > 0 {
return t.token[t.peekCount-1]
}
t.peekCount = 1
t.token[0] = t.lex.nextItem()
return t.token[0]
}
// Parsing.
// New allocates a new parse tree with the given name.
func New(name string) *Tree {
return &Tree{
Name: name,
}
}
// errorf formats the error and terminates processing.
func (t *Tree) errorf(format string, args ...interface{}) {
format = fmt.Sprintf("parse: %s:%d: %s", t.Name, t.lex.lineNumber(), format)
panic(fmt.Errorf(format, args...))
}
// expect consumes the next token and guarantees it has the required type.
func (t *Tree) expect(expected itemType, context string) item {
token := t.next()
if token.typ != expected {
t.unexpected(token, context)
}
return token
}
// unexpected complains about the token and terminates processing.
func (t *Tree) unexpected(token item, context string) {
t.errorf("unexpected %s in %s", token, context)
}
// recover is the handler that turns panics into returns from the top level of Parse.
func (t *Tree) recover(errp *error) {
if e := recover(); e != nil {
if _, ok := e.(runtime.Error); ok {
panic(e)
}
if t != nil {
t.stopParse()
}
*errp = e.(error)
}
}
// startParse initializes the parser, using the lexer.
func (t *Tree) startParse(lex *lexer) {
t.lex = lex
}
// stopParse terminates parsing.
func (t *Tree) stopParse() {
t.lex = nil
}
// Parse parses the yacc string to construct a representation of
// the file for analysis.
func (t *Tree) Parse(text string) (err error) {
defer t.recover(&err)
t.startParse(lex(t.Name, text))
t.text = text
t.parse()
t.stopParse()
return nil
}
// parse is the top-level parser for a file.
// It runs to EOF.
func (t *Tree) parse() {
for {
switch token := t.next(); token.typ {
case itemIdent:
p := newProduction(token.pos, token.val)
t.parseProduction(p)
t.Productions = append(t.Productions, p)
case itemEOF:
return
}
}
}
func (t *Tree) parseProduction(p *ProductionNode) {
const context = "production"
t.expect(itemColon, context)
t.expect(itemNL, context)
expectExpr := true
for {
token := t.next()
switch token.typ {
case itemComment, itemNL:
// ignore
case itemPipe:
if expectExpr {
t.unexpected(token, context)
}
expectExpr = true
default:
t.backup()
if !expectExpr {
return
}
e := newExpression(token.pos)
t.parseExpression(e)
p.Expressions = append(p.Expressions, e)
expectExpr = false
}
}
}
func (t *Tree) parseExpression(e *ExpressionNode) {
const context = "expression"
for {
switch token := t.next(); token.typ {
case itemNL:
peek := t.peek().typ
if peek == itemPipe || peek == itemNL {
return
}
case itemIdent:
e.Items = append(e.Items, Item{token.val, TypToken})
case itemLiteral:
e.Items = append(e.Items, Item{token.val, TypLiteral})
case itemExpr:
e.Command = token.val
t.expect(itemNL, context)
return
case itemPct, itemComment:
// ignore
default:
t.unexpected(token, context)
}
}
} | pkg/internal/rsg/yacc/parse.go | 0.703549 | 0.474022 | parse.go | starcoder |
package beacon
import (
"context"
"github.com/filecoin-project/go-state-types/abi"
logging "github.com/ipfs/go-log/v2"
"golang.org/x/xerrors"
"github.com/filecoin-project/lotus/build"
"github.com/filecoin-project/lotus/chain/types"
)
var log = logging.Logger("beacon")
type Response struct {
Entry types.BeaconEntry
Err error
}
type Schedule []BeaconPoint
func (bs Schedule) BeaconForEpoch(e abi.ChainEpoch) RandomBeacon {
for i := len(bs) - 1; i >= 0; i-- {
bp := bs[i]
if e >= bp.Start {
return bp.Beacon
}
}
return bs[0].Beacon
}
type BeaconPoint struct {
Start abi.ChainEpoch
Beacon RandomBeacon
}
// RandomBeacon represents a system that provides randomness to Lotus.
// Other components interrogate the RandomBeacon to acquire randomness that's
// valid for a specific chain epoch. Also to verify beacon entries that have
// been posted on chain.
type RandomBeacon interface {
Entry(context.Context, uint64) <-chan Response
VerifyEntry(types.BeaconEntry, types.BeaconEntry) error
MaxBeaconRoundForEpoch(abi.ChainEpoch) uint64
}
func ValidateBlockValues(bSchedule Schedule, h *types.BlockHeader, parentEpoch abi.ChainEpoch,
prevEntry types.BeaconEntry) error {
{
parentBeacon := bSchedule.BeaconForEpoch(parentEpoch)
currBeacon := bSchedule.BeaconForEpoch(h.Height)
if parentBeacon != currBeacon {
if len(h.BeaconEntries) != 2 {
return xerrors.Errorf("expected two beacon entries at beacon fork, got %d", len(h.BeaconEntries))
}
err := currBeacon.VerifyEntry(h.BeaconEntries[1], h.BeaconEntries[0])
if err != nil {
return xerrors.Errorf("beacon at fork point invalid: (%v, %v): %w",
h.BeaconEntries[1], h.BeaconEntries[0], err)
}
return nil
}
}
// TODO: fork logic
b := bSchedule.BeaconForEpoch(h.Height)
maxRound := b.MaxBeaconRoundForEpoch(h.Height)
if maxRound == prevEntry.Round {
if len(h.BeaconEntries) != 0 {
return xerrors.Errorf("expected not to have any beacon entries in this block, got %d", len(h.BeaconEntries))
}
return nil
}
if len(h.BeaconEntries) == 0 {
return xerrors.Errorf("expected to have beacon entries in this block, but didn't find any")
}
last := h.BeaconEntries[len(h.BeaconEntries)-1]
if last.Round != maxRound {
return xerrors.Errorf("expected final beacon entry in block to be at round %d, got %d", maxRound, last.Round)
}
for i, e := range h.BeaconEntries {
if err := b.VerifyEntry(e, prevEntry); err != nil {
return xerrors.Errorf("beacon entry %d (%d - %x (%d)) was invalid: %w", i, e.Round, e.Data, len(e.Data), err)
}
prevEntry = e
}
return nil
}
func BeaconEntriesForBlock(ctx context.Context, bSchedule Schedule, epoch abi.ChainEpoch, parentEpoch abi.ChainEpoch, prev types.BeaconEntry) ([]types.BeaconEntry, error) {
{
parentBeacon := bSchedule.BeaconForEpoch(parentEpoch)
currBeacon := bSchedule.BeaconForEpoch(epoch)
if parentBeacon != currBeacon {
// Fork logic
round := currBeacon.MaxBeaconRoundForEpoch(epoch)
out := make([]types.BeaconEntry, 2)
rch := currBeacon.Entry(ctx, round-1)
res := <-rch
if res.Err != nil {
return nil, xerrors.Errorf("getting entry %d returned error: %w", round-1, res.Err)
}
out[0] = res.Entry
rch = currBeacon.Entry(ctx, round)
res = <-rch
if res.Err != nil {
return nil, xerrors.Errorf("getting entry %d returned error: %w", round, res.Err)
}
out[1] = res.Entry
return out, nil
}
}
beacon := bSchedule.BeaconForEpoch(epoch)
start := build.Clock.Now()
maxRound := beacon.MaxBeaconRoundForEpoch(epoch)
if maxRound == prev.Round {
return nil, nil
}
// TODO: this is a sketchy way to handle the genesis block not having a beacon entry
if prev.Round == 0 {
prev.Round = maxRound - 1
}
cur := maxRound
var out []types.BeaconEntry
for cur > prev.Round {
rch := beacon.Entry(ctx, cur)
select {
case resp := <-rch:
if resp.Err != nil {
return nil, xerrors.Errorf("beacon entry request returned error: %w", resp.Err)
}
out = append(out, resp.Entry)
cur = resp.Entry.Round - 1
case <-ctx.Done():
return nil, xerrors.Errorf("context timed out waiting on beacon entry to come back for epoch %d: %w", epoch, ctx.Err())
}
}
log.Debugw("fetching beacon entries", "took", build.Clock.Since(start), "numEntries", len(out))
reverse(out)
return out, nil
}
func reverse(arr []types.BeaconEntry) {
for i := 0; i < len(arr)/2; i++ {
arr[i], arr[len(arr)-(1+i)] = arr[len(arr)-(1+i)], arr[i]
}
} | chain/beacon/beacon.go | 0.525856 | 0.47457 | beacon.go | starcoder |
package pain
import (
"encoding/xml"
"github.com/figassis/bankiso/iso20022"
)
type Document00100102 struct {
XMLName xml.Name `xml:"urn:iso:std:iso:20022:tech:xsd:pain.001.001.02 Document"`
Message *CustomerCreditTransferInitiationV02 `xml:"pain.001.001.02"`
}
func (d *Document00100102) AddMessage() *CustomerCreditTransferInitiationV02 {
d.Message = new(CustomerCreditTransferInitiationV02)
return d.Message
}
// Scope
// The CustomerCreditTransferInitiation message is sent by the initiating party to the forwarding agent or debtor agent. It is used to request movement of funds from the debtor account to a creditor.
// Usage
// The CustomerCreditTransferInitiation message can contain one or more customer credit transfer instructions.
// The CustomerCreditTransferInitiation message is used to exchange:
// - One or more instances of a credit transfer initiation;
// - Payment transactions that result in book transfers at the debtor agent or payments to another financial institution;
// - Payment transactions that result in an electronic cash transfer to the creditor account or in the emission of a cheque.
// The message can be used in a direct or a relay scenario:
// - In a direct scenario, the message is sent directly to the debtor agent. The debtor agent is the account servicer of the debtor.
// - In a relay scenario, the message is sent to a forwarding agent. The forwarding agent acts as a concentrating financial institution. It will forward the CustomerCreditTransferInitiation message to the debtor agent.
// The message can also be used by an initiating party that has authority to send the message on behalf of the debtor. This caters for example for the scenario of a payments factory initiating all payments on behalf of a large corporate.
// The CustomerCreditTransferInitiation message can be used in domestic and cross-border scenarios.
// The CustomerCreditTransferInitiation message must not be used by the debtor agent to execute the credit transfer instruction(s). The FIToFICustomerCreditTransfer message must be used instead.
// If it is agreed to include the payment information related to the debit side only once (i.e. Grouped mode), the PaymentInformation block will be present only once.
// If it is agreed to repeat the payment information related to the debit side (i.e. Single mode), the PaymentInformation block must be present once per occurrence of the CreditTransferTransactionInformation block.
// The CustomerCreditTransferInitiation message also allows for a Mixed mode where the PaymentInformation block can be repeated and each PaymentInformation block can contain one or several CreditTransferTransactionInformation block(s).
// Single
// When grouping is set to Single, information for each individual instruction is included separately. This means the
// PaymentInformation block is repeated, and present for each occurrence of the CreditTransferTransactionInformation block.
// Grouped
// When grouping is set to Grouped, the PaymentInformation block will be present once and the CreditTransferTransactionInformation block will be repeated.
// Mixed
// When grouping is set to Mixed, the PaymentInformation block may be present once or may be repeated. Each sequence
// of the PaymentInformation block may contain one or several CreditTransferTransactionInformation block(s).
type CustomerCreditTransferInitiationV02 struct {
// Set of characteristics shared by all individual transactions included in the message.
GroupHeader *iso20022.GroupHeader1 `xml:"GrpHdr"`
// Set of characteristics that applies to the debit side of the payment transactions included in the credit transfer initiation.
PaymentInformation []*iso20022.PaymentInstructionInformation1 `xml:"PmtInf"`
}
func (c *CustomerCreditTransferInitiationV02) AddGroupHeader() *iso20022.GroupHeader1 {
c.GroupHeader = new(iso20022.GroupHeader1)
return c.GroupHeader
}
func (c *CustomerCreditTransferInitiationV02) AddPaymentInformation() *iso20022.PaymentInstructionInformation1 {
newValue := new(iso20022.PaymentInstructionInformation1)
c.PaymentInformation = append(c.PaymentInformation, newValue)
return newValue
}
func ( d *Document00100102 ) String() (result string, ok bool) { return } | generate/iso20022/pain/CustomerCreditTransferInitiationV02.go | 0.740174 | 0.428532 | CustomerCreditTransferInitiationV02.go | starcoder |
package command
import (
"encoding/json"
"fmt"
"os"
"strings"
"testing"
"time"
"github.com/infracloudio/botkube/pkg/config"
"github.com/infracloudio/botkube/pkg/execute"
"github.com/infracloudio/botkube/test/e2e/utils"
"github.com/nlopes/slack"
"github.com/stretchr/testify/assert"
"k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
type botkubeCommand struct {
command string
expected string
}
// Send botkube command via Slack message and check if BotKube returns correct response
func (c *context) testBotkubeCommand(t *testing.T) {
botkubeVersion := os.Getenv("BOTKUBE_VERSION")
// Test cases
tests := map[string]botkubeCommand{
"BotKube ping": {
command: "ping",
expected: fmt.Sprintf("```pong from cluster '%s'\n\nK8s Server Version: %s\nBotKube version: %s```", c.Config.Settings.ClusterName, execute.K8sVersion, botkubeVersion),
},
"BotKube filters list": {
command: "filters list",
expected: "FILTER ENABLED DESCRIPTION\n" +
"IngressValidator true Checks if services and tls secrets used in ingress specs are available.\n" +
"JobStatusChecker true Sends notifications only when job succeeds and ignores other job update events.\n" +
"NodeEventsChecker true Sends notifications on node level critical events.\n" +
"PodLabelChecker true Checks and adds recommedations if labels are missing in the pod specs.\n" +
"ImageTagChecker true Checks and adds recommendation if 'latest' image tag is used for container image.\n",
},
}
for name, test := range tests {
t.Run(name, func(t *testing.T) {
// Send message to a channel
c.SlackServer.SendMessageToBot(c.Config.Communications.Slack.Channel, test.command)
// Get last seen slack message
time.Sleep(time.Second)
lastSeenMsg := c.GetLastSeenSlackMessage()
// Convert text message into Slack message structure
m := slack.Message{}
err := json.Unmarshal([]byte(lastSeenMsg), &m)
assert.NoError(t, err, "message should decode properly")
assert.Equal(t, c.Config.Communications.Slack.Channel, m.Channel)
switch test.command {
case "filters list":
fl := compareFilters(strings.Split(test.expected, "\n"), strings.Split(strings.Trim(m.Text, "```"), "\n"))
assert.Equal(t, fl, true)
default:
assert.Equal(t, test.expected, m.Text)
}
})
}
}
func compareFilters(expected, actual []string) bool {
if len(expected) != len(actual) {
return false
}
// Compare slices
for _, a := range actual {
found := false
for _, e := range expected {
if a == e {
found = true
break
}
}
if !found {
return false
}
}
return true
}
// Test disable notification with BotKube notifier command
// - disable notifier with '@BotKube notifier stop'
// - create pod and verify BotKube doesn't send notification
// - enable notifier with '@BotKube notifier start'
func (c *context) testNotifierCommand(t *testing.T) {
// Disable notifier with @BotKube notifier stop
t.Run("disable notifier", func(t *testing.T) {
// Send message to a channel
c.SlackServer.SendMessageToBot(c.Config.Communications.Slack.Channel, "notifier stop")
// Get last seen slack message
time.Sleep(time.Second)
lastSeenMsg := c.GetLastSeenSlackMessage()
// Convert text message into Slack message structure
m := slack.Message{}
err := json.Unmarshal([]byte(lastSeenMsg), &m)
assert.NoError(t, err, "message should decode properly")
assert.Equal(t, c.Config.Communications.Slack.Channel, m.Channel)
assert.Equal(t, fmt.Sprintf("```Sure! I won't send you notifications from cluster '%s' anymore.```", c.Config.Settings.ClusterName), m.Text)
assert.Equal(t, config.Notify, false)
})
// Create pod and verify that BotKube is not sending notifications
pod := utils.CreateObjects{
Kind: "pod",
Namespace: "test",
Specs: &v1.Pod{ObjectMeta: metav1.ObjectMeta{Name: "test-pod-notifier"}},
Expected: utils.SlackMessage{
Attachments: []slack.Attachment{{Color: "good", Fields: []slack.AttachmentField{{Title: "Pod create", Value: "Pod `test-pod` in of cluster `test-cluster-1`, namespace `test` has been created:\n```Resource created\nRecommendations:\n- pod 'test-pod' creation without labels should be avoided.\n```", Short: false}}, Footer: "BotKube"}},
},
}
t.Run("create resource", func(t *testing.T) {
// Inject an event into the fake client.
utils.CreateResource(t, pod)
// Get last seen slack message
time.Sleep(time.Second)
lastSeenMsg := c.GetLastSeenSlackMessage()
// Convert text message into Slack message structure
m := slack.Message{}
err := json.Unmarshal([]byte(lastSeenMsg), &m)
assert.NoError(t, err, "message should decode properly")
assert.Equal(t, c.Config.Communications.Slack.Channel, m.Channel)
assert.NotEqual(t, pod.Expected.Attachments, m.Attachments)
})
// Revert and Enable notifier
t.Run("Enable notifier", func(t *testing.T) {
// Send message to a channel
c.SlackServer.SendMessageToBot(c.Config.Communications.Slack.Channel, "notifier start")
// Get last seen slack message
time.Sleep(time.Second)
lastSeenMsg := c.GetLastSeenSlackMessage()
// Convert text message into Slack message structure
m := slack.Message{}
err := json.Unmarshal([]byte(lastSeenMsg), &m)
assert.NoError(t, err, "message should decode properly")
assert.Equal(t, c.Config.Communications.Slack.Channel, m.Channel)
assert.Equal(t, fmt.Sprintf("```Brace yourselves, notifications are coming from cluster '%s'.```", c.Config.Settings.ClusterName), m.Text)
assert.Equal(t, config.Notify, true)
})
} | test/e2e/command/botkube.go | 0.597021 | 0.535038 | botkube.go | starcoder |
package chunk
import (
"strconv"
"github.com/pingcap/parser/mysql"
"github.com/pingcap/tidb/types"
"github.com/pingcap/tidb/types/json"
)
// Row represents a row of data, can be used to access values.
type Row struct {
c *Chunk
idx int
}
// Chunk returns the Chunk which the row belongs to.
func (r Row) Chunk() *Chunk {
return r.c
}
// IsEmpty returns true if the Row is empty.
func (r Row) IsEmpty() bool {
return r == Row{}
}
// Idx returns the row index of Chunk.
func (r Row) Idx() int {
return r.idx
}
// Len returns the number of values in the row.
func (r Row) Len() int {
return r.c.NumCols()
}
// GetInt64 returns the int64 value with the colIdx.
func (r Row) GetInt64(colIdx int) int64 {
return r.c.columns[colIdx].GetInt64(r.idx)
}
// GetUint64 returns the uint64 value with the colIdx.
func (r Row) GetUint64(colIdx int) uint64 {
return r.c.columns[colIdx].GetUint64(r.idx)
}
// GetFloat32 returns the float32 value with the colIdx.
func (r Row) GetFloat32(colIdx int) float32 {
return r.c.columns[colIdx].GetFloat32(r.idx)
}
// GetFloat64 returns the float64 value with the colIdx.
func (r Row) GetFloat64(colIdx int) float64 {
return r.c.columns[colIdx].GetFloat64(r.idx)
}
// GetString returns the string value with the colIdx.
func (r Row) GetString(colIdx int) string {
return r.c.columns[colIdx].GetString(r.idx)
}
// GetBytes returns the bytes value with the colIdx.
func (r Row) GetBytes(colIdx int) []byte {
return r.c.columns[colIdx].GetBytes(r.idx)
}
// GetTime returns the Time value with the colIdx.
func (r Row) GetTime(colIdx int) types.Time {
return r.c.columns[colIdx].GetTime(r.idx)
}
// GetDuration returns the Duration value with the colIdx.
func (r Row) GetDuration(colIdx int, fillFsp int) types.Duration {
return r.c.columns[colIdx].GetDuration(r.idx, fillFsp)
}
func (r Row) getNameValue(colIdx int) (string, uint64) {
return r.c.columns[colIdx].getNameValue(r.idx)
}
// GetEnum returns the Enum value with the colIdx.
func (r Row) GetEnum(colIdx int) types.Enum {
return r.c.columns[colIdx].GetEnum(r.idx)
}
// GetSet returns the Set value with the colIdx.
func (r Row) GetSet(colIdx int) types.Set {
return r.c.columns[colIdx].GetSet(r.idx)
}
// GetMyDecimal returns the MyDecimal value with the colIdx.
func (r Row) GetMyDecimal(colIdx int) *types.MyDecimal {
return r.c.columns[colIdx].GetDecimal(r.idx)
}
// GetJSON returns the JSON value with the colIdx.
func (r Row) GetJSON(colIdx int) json.BinaryJSON {
return r.c.columns[colIdx].GetJSON(r.idx)
}
// GetDatumRow converts chunk.Row to types.DatumRow.
// Keep in mind that GetDatumRow has a reference to r.c, which is a chunk,
// this function works only if the underlying chunk is valid or unchanged.
func (r Row) GetDatumRow(fields []*types.FieldType) []types.Datum {
datumRow := make([]types.Datum, 0, r.c.NumCols())
for colIdx := 0; colIdx < r.c.NumCols(); colIdx++ {
datum := r.GetDatum(colIdx, fields[colIdx])
datumRow = append(datumRow, datum)
}
return datumRow
}
// GetDatum implements the chunk.Row interface.
func (r Row) GetDatum(colIdx int, tp *types.FieldType) types.Datum {
var d types.Datum
switch tp.Tp {
case mysql.TypeTiny, mysql.TypeShort, mysql.TypeInt24, mysql.TypeLong, mysql.TypeLonglong:
if !r.IsNull(colIdx) {
if mysql.HasUnsignedFlag(tp.Flag) {
d.SetUint64(r.GetUint64(colIdx))
} else {
d.SetInt64(r.GetInt64(colIdx))
}
}
case mysql.TypeYear:
// FIXBUG: because insert type of TypeYear is definite int64, so we regardless of the unsigned flag.
if !r.IsNull(colIdx) {
d.SetInt64(r.GetInt64(colIdx))
}
case mysql.TypeFloat:
if !r.IsNull(colIdx) {
d.SetFloat32(r.GetFloat32(colIdx))
}
case mysql.TypeDouble:
if !r.IsNull(colIdx) {
d.SetFloat64(r.GetFloat64(colIdx))
}
case mysql.TypeVarchar, mysql.TypeVarString, mysql.TypeString, mysql.TypeBlob, mysql.TypeTinyBlob, mysql.TypeMediumBlob, mysql.TypeLongBlob:
if !r.IsNull(colIdx) {
d.SetString(r.GetString(colIdx), tp.Collate)
}
case mysql.TypeDate, mysql.TypeDatetime, mysql.TypeTimestamp:
if !r.IsNull(colIdx) {
d.SetMysqlTime(r.GetTime(colIdx))
}
case mysql.TypeDuration:
if !r.IsNull(colIdx) {
duration := r.GetDuration(colIdx, tp.Decimal)
d.SetMysqlDuration(duration)
}
case mysql.TypeNewDecimal:
if !r.IsNull(colIdx) {
d.SetMysqlDecimal(r.GetMyDecimal(colIdx))
d.SetLength(tp.Flen)
// If tp.Decimal is unspecified(-1), we should set it to the real
// fraction length of the decimal value, if not, the d.Frac will
// be set to MAX_UINT16 which will cause unexpected BadNumber error
// when encoding.
if tp.Decimal == types.UnspecifiedLength {
d.SetFrac(d.Frac())
} else {
d.SetFrac(tp.Decimal)
}
}
case mysql.TypeEnum:
if !r.IsNull(colIdx) {
d.SetMysqlEnum(r.GetEnum(colIdx), tp.Collate)
}
case mysql.TypeSet:
if !r.IsNull(colIdx) {
d.SetMysqlSet(r.GetSet(colIdx), tp.Collate)
}
case mysql.TypeBit:
if !r.IsNull(colIdx) {
d.SetMysqlBit(r.GetBytes(colIdx))
}
case mysql.TypeJSON:
if !r.IsNull(colIdx) {
d.SetMysqlJSON(r.GetJSON(colIdx))
}
}
return d
}
// GetRaw returns the underlying raw bytes with the colIdx.
func (r Row) GetRaw(colIdx int) []byte {
return r.c.columns[colIdx].GetRaw(r.idx)
}
// IsNull returns if the datum in the chunk.Row is null.
func (r Row) IsNull(colIdx int) bool {
return r.c.columns[colIdx].IsNull(r.idx)
}
// CopyConstruct creates a new row and copies this row's data into it.
func (r Row) CopyConstruct() Row {
newChk := renewWithCapacity(r.c, 1, 1)
newChk.AppendRow(r)
return newChk.GetRow(0)
}
// ToString returns all the values in a row.
func (r Row) ToString(ft []*types.FieldType) string {
var buf []byte
for colIdx := 0; colIdx < r.Chunk().NumCols(); colIdx++ {
if r.IsNull(colIdx) {
buf = append(buf, "NULL"...)
} else {
switch ft[colIdx].EvalType() {
case types.ETInt:
buf = strconv.AppendInt(buf, r.GetInt64(colIdx), 10)
case types.ETString:
switch ft[colIdx].Tp {
case mysql.TypeEnum:
buf = append(buf, r.GetEnum(colIdx).String()...)
case mysql.TypeSet:
buf = append(buf, r.GetSet(colIdx).String()...)
default:
buf = append(buf, r.GetString(colIdx)...)
}
case types.ETDatetime, types.ETTimestamp:
buf = append(buf, r.GetTime(colIdx).String()...)
case types.ETDecimal:
buf = append(buf, r.GetMyDecimal(colIdx).ToString()...)
case types.ETDuration:
buf = append(buf, r.GetDuration(colIdx, ft[colIdx].Decimal).String()...)
case types.ETJson:
buf = append(buf, r.GetJSON(colIdx).String()...)
case types.ETReal:
switch ft[colIdx].Tp {
case mysql.TypeFloat:
buf = strconv.AppendFloat(buf, float64(r.GetFloat32(colIdx)), 'f', -1, 32)
case mysql.TypeDouble:
buf = strconv.AppendFloat(buf, r.GetFloat64(colIdx), 'f', -1, 64)
}
}
}
if colIdx != r.Chunk().NumCols()-1 {
buf = append(buf, ", "...)
}
}
return string(buf)
} | util/chunk/row.go | 0.729038 | 0.581065 | row.go | starcoder |
package encoder
// Algorithm 3-way Radix Quicksort, d means the radix.
// Reference: https://algs4.cs.princeton.edu/51radix/Quick3string.java.html
func radixQsort(kvs []_MapPair, d, maxDepth int) {
for len(kvs) > 11 {
// To avoid the worst case of quickSort (time: O(n^2)), use introsort here.
// Reference: https://en.wikipedia.org/wiki/Introsort and
// https://github.com/golang/go/issues/467
if maxDepth == 0 {
heapSort(kvs, 0, len(kvs))
return
}
maxDepth--
p := pivot(kvs, d)
lt, i, gt := 0, 0, len(kvs)
for i < gt {
c := byteAt(kvs[i].k, d)
if c < p {
swap(kvs, lt, i)
i++
lt++
} else if c > p {
gt--
swap(kvs, i, gt)
} else {
i++
}
}
// kvs[0:lt] < v = kvs[lt:gt] < kvs[gt:len(kvs)]
// Native implemention:
// radixQsort(kvs[:lt], d, maxDepth)
// if p > -1 {
// radixQsort(kvs[lt:gt], d+1, maxDepth)
// }
// radixQsort(kvs[gt:], d, maxDepth)
// Optimize as follows: make recursive calls only for the smaller parts.
// Reference: https://www.geeksforgeeks.org/quicksort-tail-call-optimization-reducing-worst-case-space-log-n/
if p == -1 {
if lt > len(kvs) - gt {
radixQsort(kvs[gt:], d, maxDepth)
kvs = kvs[:lt]
} else {
radixQsort(kvs[:lt], d, maxDepth)
kvs = kvs[gt:]
}
} else {
ml := maxThree(lt, gt-lt, len(kvs)-gt)
if ml == lt {
radixQsort(kvs[lt:gt], d+1, maxDepth)
radixQsort(kvs[gt:], d, maxDepth)
kvs = kvs[:lt]
} else if ml == gt-lt {
radixQsort(kvs[:lt], d, maxDepth)
radixQsort(kvs[gt:], d, maxDepth)
kvs = kvs[lt:gt]
d += 1
} else {
radixQsort(kvs[:lt], d, maxDepth)
radixQsort(kvs[lt:gt], d+1, maxDepth)
kvs = kvs[gt:]
}
}
}
insertRadixSort(kvs, d)
}
func insertRadixSort(kvs []_MapPair, d int) {
for i := 1; i < len(kvs); i++ {
for j := i; j > 0 && lessFrom(kvs[j].k, kvs[j-1].k, d); j-- {
swap(kvs, j, j-1)
}
}
}
func pivot(kvs []_MapPair, d int) int {
m := len(kvs) >> 1
if len(kvs) > 40 {
// Tukey's ``Ninther,'' median of three mediankvs of three.
t := len(kvs) / 8
return medianThree(
medianThree(byteAt(kvs[0].k, d), byteAt(kvs[t].k, d), byteAt(kvs[2*t].k, d)),
medianThree(byteAt(kvs[m].k, d), byteAt(kvs[m-t].k, d), byteAt(kvs[m+t].k, d)),
medianThree(byteAt(kvs[len(kvs)-1].k, d),
byteAt(kvs[len(kvs)-1-t].k, d),
byteAt(kvs[len(kvs)-1-2*t].k, d)))
}
return medianThree(byteAt(kvs[0].k, d), byteAt(kvs[m].k, d), byteAt(kvs[len(kvs)-1].k, d))
}
func medianThree(i, j, k int) int {
if i > j {
i, j = j, i
} // i < j
if k < i {
return i
}
if k > j {
return j
}
return k
}
func maxThree(i, j, k int) int {
max := i
if max < j {
max = j
}
if max < k {
max = k
}
return max
}
// maxDepth returns a threshold at which quicksort should switch
// to heapsort. It returnkvs 2*ceil(lg(n+1)).
func maxDepth(n int) int {
var depth int
for i := n; i > 0; i >>= 1 {
depth++
}
return depth * 2
}
// siftDown implements the heap property on kvs[lo:hi].
// first is an offset into the array where the root of the heap lies.
func siftDown(kvs []_MapPair, lo, hi, first int) {
root := lo
for {
child := 2*root + 1
if child >= hi {
break
}
if child+1 < hi && kvs[first+child].k < kvs[first+child+1].k {
child++
}
if kvs[first+root].k >= kvs[first+child].k {
return
}
swap(kvs, first+root, first+child)
root = child
}
}
func heapSort(kvs []_MapPair, a, b int) {
first := a
lo := 0
hi := b - a
// Build heap with the greatest element at top.
for i := (hi - 1) / 2; i >= 0; i-- {
siftDown(kvs, i, hi, first)
}
// Pop elements, the largest first, into end of kvs.
for i := hi - 1; i >= 0; i-- {
swap(kvs, first, first+i)
siftDown(kvs, lo, i, first)
}
}
// Note that _MapPair.k is NOT pointed to _MapPair.m when map key is integer after swap
func swap(kvs []_MapPair, a, b int) {
kvs[a].k, kvs[b].k = kvs[b].k, kvs[a].k
kvs[a].v, kvs[b].v = kvs[b].v, kvs[a].v
}
// Compare two strings from the pos d.
func lessFrom(a, b string, d int) bool {
l := len(a)
if l > len(b) {
l = len(b)
}
for i := d; i < l; i++ {
if a[i] == b[i] {
continue
}
return a[i] < b[i]
}
return len(a) < len(b)
}
func byteAt(b string, p int) int {
if p < len(b) {
return int(b[p])
}
return -1
} | encoder/sort.go | 0.830147 | 0.53279 | sort.go | starcoder |
package main
import (
"math"
"time"
)
type Moon struct {
phase float64
illum float64
age float64
dist float64
angdia float64
sundist float64
sunangdia float64
pdata float64
quarters [8]float64
timespace float64
longitude float64
}
var synmonth float64 = 29.53058868 // Synodic month (new Moon to new Moon)
func New(t time.Time) (moonP *Moon) {
moonP = new(Moon)
// Astronomical constants
var epoch float64 = 2444238.5 // 1989 January 0.0
//Constants defining the Sun's apparent orbit
var elonge float64 = 278.833540 // Ecliptic longitude of the Sun at epoch 1980.0
var elongp float64 = 282.596403 // Ecliptic longitude of the Sun at perigee
var eccent float64 = 0.016718 // Eccentricity of Earth's orbit
var sunsmax float64 = 1.495985e8 // Sun's angular size, degrees, at semi-major axis distance
var sunangsiz float64 = 0.533128
// Elements of the Moon's orbit, epoch 1980.0
var mmlong float64 = 64.975464 // Moon's mean longitude at the epoch
var mmlongp float64 = 349.383063 // Mean longitude of the perigee at the epoch
var mecc float64 = 0.054900 // Eccentricity of the Moon's orbit
var mangsiz float64 = 0.5181 // Moon's angular size at distance a from Earth
var msmax float64 = 384401 // Semi-major axis of Moon's orbit in km
moonP.timespace = float64(t.Unix())
moonP.pdata = utcToJulian(float64(t.Unix()))
// Calculation of the Sun's position
var day = moonP.pdata - epoch // Date within epoch
var n float64 = fixangle((360 / 365.2422) * day) // Mean anomaly of the Sun
var m float64 = fixangle(n + elonge - elongp) // Convert from perigee co-orginates to epoch 1980.0
var ec = kepler(m, eccent) // Solve equation of Kepler
ec = math.Sqrt((1 + eccent) / (1 - eccent)) * math.Tan(ec / 2)
ec = 2 * rad2deg(math.Atan(ec)) // True anomaly
var lambdasun float64 = fixangle(ec + elongp) // Sun's geocentric ecliptic longitude
var f float64 = ((1 + eccent * cos(deg2rad(ec))) / (1 - eccent * eccent)) // Orbital distance factor
var sunDist float64 = sunsmax / f // Distance to Sun in km
var sunAng float64 = f * sunangsiz // Sun's angular size in degrees
// Calsulation of the Moon's position
var ml float64 = fixangle(13.1763966 * day + mmlong) // Moon's mean longitude
var mm float64 = fixangle(ml - 0.1114041 * day - mmlongp) // Moon's mean anomaly
var ev float64 = 1.2739 * sin(deg2rad(2 * (ml - lambdasun) - mm)) // Evection
var ae float64 = 0.1858 * sin(deg2rad(m)) // Annual equation
var a3 float64 = 0.37 * sin(deg2rad(m)) // Correction term
var mmP float64 = mm + ev - ae - a3 // Corrected anomaly
var mec float64 = 6.2886 * sin(deg2rad(mmP)) // Correction for the equation of the centre
var a4 float64 = 0.214 * sin(deg2rad(2 * mmP)) // Another correction term
var lP float64 = ml + ev + mec - ae + a4 // Corrected longitude
var v float64 = 0.6583 * sin(deg2rad(2 * (lP - lambdasun))) // Variation
var lPP float64 = lP + v // True longitude
// Calculation of the phase of the Moon
var moonAge float64 = lPP - lambdasun // Age of the Moon in degrees
var moonPhase float64 = (1 - cos(deg2rad(moonAge))) / 2 // Phase of the Moon
// Distance of moon from the centre of the Earth
var moonDist float64 = (msmax * (1 - mecc * mecc)) / (1 + mecc * cos(deg2rad(mmP + mec)))
var moonDFrac float64 = moonDist / msmax
var moonAng float64 = mangsiz / moonDFrac // Moon's angular diameter
// store result
moonP.phase = fixangle(moonAge) / 360 // Phase (0 to 1)
moonP.illum = moonPhase // Illuminated fraction (0 to 1)
moonP.age = synmonth * moonP.phase // Age of moon (days)
moonP.dist = moonDist // Distance (kilometres)
moonP.angdia = moonAng // Angular diameter (degreees)
moonP.sundist = sunDist // Distance to Sun (kilometres)
moonP.sunangdia = sunAng // Sun's angular diameter (degrees)
moonP.longitude = lPP // Moon's true longitude
moonP.phaseHunt()
return moonP
}
func sin(a float64) float64 {
return math.Sin(a)
}
func cos(a float64) float64 {
return math.Cos(a)
}
func rad2deg(r float64) float64 {
return (r * 180) / math.Pi
}
func deg2rad(d float64) float64 {
return (d * math.Pi) / 180
}
func fixangle(a float64) float64 {
return (a - 360 * math.Floor(a / 360))
}
func kepler(m float64, ecc float64) float64 {
epsilon := 0.000001
m = deg2rad(m)
e := m
var delta float64
delta = e - ecc * math.Sin(e) - m
e -= delta / (1 - ecc * math.Cos(e))
for math.Abs(delta) > epsilon {
delta = e - ecc * math.Sin(e) - m
e -= delta / (1 - ecc * math.Cos(e))
}
return e
}
func (m *Moon) phaseHunt() {
var sdate float64 = utcToJulian(m.timespace)
var adate float64 = sdate - 45
var ats float64 = m.timespace - 86400 * 45
t := time.Unix(int64(ats), 0)
var yy float64 = float64(t.Year())
var mm float64 = float64(t.Month())
var k1 float64 = math.Floor( float64( yy + ( ( mm - 1) * ( 1 / 12 ) ) - 1900 ) * 12.3685)
var nt1 float64 = meanPhase(adate, k1)
adate = nt1
var nt2, k2 float64
for {
adate += synmonth
k2 = k1 + 1
nt2 = meanPhase(adate, k2)
if math.Abs(nt2 - sdate) < 0.75 {
nt2 = truePhase(k2, 0.0)
}
if nt1 <= sdate && nt2 > sdate {
break
}
nt1 = nt2
k1 = k2
}
var data [8]float64
data[0] = truePhase(k1, 0.0)
data[1] = truePhase(k1, 0.25)
data[2] = truePhase(k1, 0.5)
data[3] = truePhase(k1, 0.75)
data[4] = truePhase(k2, 0.0)
data[5] = truePhase(k2, 0.25)
data[6] = truePhase(k2, 0.5)
data[7] = truePhase(k2, 0.75)
for i := 0; i < 8; i++ {
m.quarters[i] = (data[i] - 2440587.5) * 86400 // convert to UNIX time
}
}
func utcToJulian(t float64) float64 {
return t / 86400 + 2440587.5
}
// func julianToUtc(t float64) float64 {
// return t*86400 + 2440587.5
// }
/**
Calculates time of the mean new Moon for a given
base date. This argument K to this function is the
precomputed synodic month index, given by:
K = (year - 1900) * 12.3685
where year is expressed as a year aand fractional year
*/
func meanPhase(sdate float64, k float64) float64 {
// Time in Julian centuries from 1900 January 0.5
var t float64 = (sdate - 2415020.0) / 36525
var t2 float64 = t * t
var t3 float64 = t2 * t
nt := float64(2415020.75933 + synmonth * k +
0.0001178 * t2 -
0.000000155 * t3 +
0.00033 * sin( deg2rad( 166.56 + 132.87 * t - 0.009173 * t2)))
return nt
}
func truePhase(k float64, phase float64) float64 {
k += phase // Add phase to new moon time
var t float64 = k / 1236.85 // Time in Julian centures from 1900 January 0.5
var t2 float64 = t * t
var t3 float64 = t2 * t
var pt float64
pt = 2415020.75933 + synmonth * k +
0.0001178 * t2 -
0.000000155 * t3 +
0.00033 * sin( deg2rad( 166.56 + 132.87 * t - 0.009173 * t2))
var m, mprime, f float64
m = 359.2242 + 29.10535608 * k - 0.0000333 * t2 - 0.00000347 * t3 // Sun's mean anomaly
mprime = 306.0253 + 385.81691806 * k + 0.0107306 * t2 + 0.00001236 * t3 // Moon's mean anomaly
f = 21.2964 + 390.67050646 * k - 0.0016528 * t2 - 0.00000239 * t3 // Moon's argument of latitude
if phase < 0.01 || math.Abs(phase - 0.5) < 0.01 {
// Corrections for New and Full Moon
pt += (0.1734 - 0.000393 * t) * sin( deg2rad( m ) ) +
0.0021 * sin( deg2rad( 2 * m ) ) -
0.4068 * sin( deg2rad( mprime ) ) +
0.0161 * sin( deg2rad( 2 * mprime) ) -
0.0004 * sin( deg2rad( 3 * mprime ) ) +
0.0104 * sin( deg2rad( 2 * f ) ) -
0.0051 * sin( deg2rad( m + mprime ) ) -
0.0074 * sin( deg2rad( m - mprime ) ) +
0.0004 * sin( deg2rad( 2 * f + m ) ) -
0.0004 * sin( deg2rad( 2 * f - m ) ) -
0.0006 * sin( deg2rad( 2 * f + mprime ) ) +
0.0010 * sin( deg2rad( 2 * f - mprime ) ) +
0.0005 * sin( deg2rad( m + 2 * mprime ) );
} else if math.Abs(phase - 0.25) < 0.01 || math.Abs(phase - 0.75) < 0.01 {
pt += (0.1721 - 0.0004 * t) * sin( deg2rad( m ) )+
0.0021 * sin( deg2rad( 2 * m ) )-
0.6280 * sin( deg2rad( mprime ) )+
0.0089 * sin( deg2rad( 2 * mprime) )-
0.0004 * sin( deg2rad( 3 * mprime ) )+
0.0079 * sin( deg2rad( 2 * f ) )-
0.0119 * sin( deg2rad( m + mprime ) )-
0.0047 * sin( deg2rad ( m - mprime ) )+
0.0003 * sin( deg2rad( 2 * f + m ) )-
0.0004 * sin( deg2rad( 2 * f - m ) )-
0.0006 * sin( deg2rad( 2 * f + mprime ) )+
0.0021 * sin( deg2rad( 2 * f - mprime ) )+
0.0003 * sin( deg2rad( m + 2 * mprime ) )+
0.0004 * sin( deg2rad( m - 2 * mprime ) )-
0.0003 * sin( deg2rad( 2 * m + mprime ) );
if phase < 0.5 { // First quarter correction
pt += 0.0028 - 0.0004 * cos( deg2rad( m )) + 0.0003 * cos( deg2rad( mprime ))
} else { // Last quarter correction
pt += -0.0028 + 0.0004 * cos( deg2rad( m )) - 0.0003 * cos( deg2rad( mprime ))
}
}
return pt
}
//func (m *Moon) getPhase(n int8) float64 {
// return m.quarters[n]
//}
func (m *Moon) Phase() float64 {
return m.phase
}
func (m *Moon) Illumination() float64 {
return m.illum
}
func (m *Moon) Age() float64 {
return m.age
}
func (m *Moon) Distance() float64 {
return m.dist
}
func (m *Moon) Diameter() float64 {
return m.angdia
}
func (m *Moon) SunDistance() float64 {
return m.sundist
}
func (m *Moon) SunDiameter() float64 {
return m.sunangdia
}
func (m *Moon) NewMoon() float64 {
return m.quarters[0]
}
func (m *Moon) FirstQuarter() float64 {
return m.quarters[1]
}
func (m *Moon) FullMoon() float64 {
return m.quarters[2]
}
func (m *Moon) LastQuarter() float64 {
return m.quarters[3]
}
func (m *Moon) NextNewMoon() float64 {
return m.quarters[4]
}
func (m *Moon) NextFirstQuarter() float64 {
return m.quarters[1]
}
func (m *Moon) NextFullMoon() float64 {
return m.quarters[6]
}
func (m *Moon) NextLastQuarter() float64 {
return m.quarters[7]
}
func (m *Moon) PhaseName() string {
names := map[int]string {
0 : "New Moon",
1 : "Waxing Crescent",
2 : "First Quarter",
3 : "Waxing Gibbous",
4 : "Full Moon",
5 : "Waning Gibbous",
6 : "Third Quarter",
7 : "Waning Crescent",
8 : "New Moon",
}
i := int(math.Floor(( m.phase + 0.0625 ) * 8))
return names[i]
}
func (m *Moon) Longitude() float64 {
return m.longitude
}
func (m *Moon) ZodiacSign() string {
if m.longitude < 33.18 {
return "aries"
} else if m.longitude < 51.16 {
return "taurus"
} else if m.longitude < 93.44 {
return "gemini"
} else if m.longitude < 119.48 {
return "cancer"
} else if m.longitude < 135.30 {
return "leo"
} else if m.longitude < 173.34 {
return "virgo"
} else if m.longitude < 224.17 {
return "libra"
} else if m.longitude < 242.57 {
return "scorpio"
} else if m.longitude < 271.26 {
return "sagittarius"
} else if m.longitude < 302.49 {
return "capricorn"
} else if m.longitude < 311.72 {
return "aquarius"
} else if m.longitude < 348.58 {
return "pisces"
} else {
return "aries"
}
} | moon_phase.go | 0.734976 | 0.555857 | moon_phase.go | starcoder |
package heap
import "fmt"
type MinIntHeap struct {
capacity int
size int
items []int
}
//Operations to get Indexes of nodes
func (m *MinIntHeap) getLeftChildIndex(parentIndex int) int { return 2*parentIndex + 1 }
func (m *MinIntHeap) getRightChildIndex(parentIndex int) int { return 2*parentIndex + 2 }
func (m *MinIntHeap) getParentIndex(childIndex int) int { return (childIndex - 1) / 2 }
//Operations to check for existence of nodes
func (m *MinIntHeap) hasLeftChildIndex(index int) bool { return m.getLeftChildIndex(index) < m.size }
func (m *MinIntHeap) hasRightChildIndex(index int) bool { return m.getRightChildIndex(index) < m.size }
func (m *MinIntHeap) hasParent(index int) bool { return m.getParentIndex(index) >= 0 }
//Operations to get the Node Values
func (m *MinIntHeap) leftChild(index int) int { return m.items[m.getLeftChildIndex(index)] }
func (m *MinIntHeap) rightChild(index int) int { return m.items[m.getRightChildIndex(index)] }
func (m *MinIntHeap) parent(index int) int { return m.items[m.getParentIndex(index)] }
//Swap values at index
func (m *MinIntHeap) swap(indexOne int, indexTwo int) {
m.items[indexOne], m.items[indexTwo] = m.items[indexTwo], m.items[indexOne]
}
//double capacity of array EDIT: SLICES WILL DOUBLE CAPACITY AS THEY FILL
/*
func (m *MinIntHeap) ensureExtraCapacity() {
if m.size == m.capacity {
m.items = append(m.items, make([]int, len(m.items))...)
m.capacity *= 2
}
}*/
//Swap values at index
func (m *MinIntHeap) peek() int {
if m.size == 0 {
panic("Illegal State, Size is zero?")
}
return m.items[0]
}
func (m *MinIntHeap) poll() int {
item := m.items[0]
m.items[0] = m.items[m.size-1]
m.size -= 1
m.heapifyDown()
return item
}
func (m *MinIntHeap) add(item int) {
//m.ensureExtraCapacity()
m.items = append(m.items, item)
m.size += 1
m.heapifyUp()
}
func (m *MinIntHeap) heapifyUp() {
index := m.size - 1
for m.hasParent(index) && m.parent(index) > m.items[index] {
m.swap(m.getParentIndex(index), index)
index = m.getParentIndex(index)
}
}
func (m *MinIntHeap) heapifyDown() {
index := 0
for m.hasLeftChildIndex(index) {
smallerChildIndex := m.getLeftChildIndex(index)
if m.hasRightChildIndex(index) && m.rightChild(index) < m.leftChild(index) {
smallerChildIndex = m.getRightChildIndex(index)
}
if m.items[index] < m.items[smallerChildIndex] {
break
} else {
m.swap(index, smallerChildIndex)
}
index = smallerChildIndex
}
}
func makeHeapFromArray(initArray []int) MinIntHeap {
return MinIntHeap{
capacity: cap(initArray),
size: len(initArray),
items: initArray,
}
}
func Main() {
inputArray := []int{6, 5, 7, 2, 8}
fmt.Println("Size of Array is ", cap(inputArray))
heap := makeHeapFromArray([]int{0})
for _, value := range inputArray {
heap.add(value)
}
for _, number := range heap.items {
fmt.Println("Number is: ", number)
}
} | src/heap/heap.go | 0.625896 | 0.420897 | heap.go | starcoder |
package model
import (
"fmt"
"github.com/hyperjumptech/grule-rule-engine/pkg"
"reflect"
)
// NewGoValueNode creates new instance of ValueNode backed by golang reflection
func NewGoValueNode(value reflect.Value, identifiedAs string) ValueNode {
return &GoValueNode{
parentNode: nil,
identifiedAs: identifiedAs,
thisValue: value,
}
}
// GoValueNode is an implementation of ValueNode that used to traverse native golang primitives through reflect package
type GoValueNode struct {
parentNode ValueNode
identifiedAs string
thisValue reflect.Value
}
// Value returns the underlying reflect.Value
func (node *GoValueNode) Value() reflect.Value {
return node.thisValue
}
// HasParent returns `true` if the current value is a field, function, map, array, slice of another value
func (node *GoValueNode) HasParent() bool {
return node.parentNode != nil
}
// Parent returns the value node of the parent value, if this node is a field, function, map, array, slice of another value
func (node *GoValueNode) Parent() ValueNode {
return node.parentNode
}
// IdentifiedAs return the current representation of this Value Node
func (node *GoValueNode) IdentifiedAs() string {
if node.HasParent() {
if node.parentNode.IsArray() || node.parentNode.IsMap() {
return fmt.Sprintf("%s%s", node.parentNode.IdentifiedAs(), node.identifiedAs)
}
return fmt.Sprintf("%s.%s", node.parentNode.IdentifiedAs(), node.identifiedAs)
}
return node.identifiedAs
}
// ContinueWithValue will return a nother ValueNode to wrap the specified value and treated as child of current node.
// The main purpose of this is for easier debugging.
func (node *GoValueNode) ContinueWithValue(value reflect.Value, identifiedAs string) ValueNode {
return &GoValueNode{
parentNode: node,
identifiedAs: identifiedAs,
thisValue: value,
}
}
// GetValue will return the underlying reflect.Value
func (node *GoValueNode) GetValue() (reflect.Value, error) {
return node.thisValue, nil
}
// GetType will return the underlying value's type
func (node *GoValueNode) GetType() (reflect.Type, error) {
return node.thisValue.Type(), nil
}
// IsArray to check if the underlying value is an array or not
func (node *GoValueNode) IsArray() bool {
return node.thisValue.Kind() == reflect.Array || node.thisValue.Kind() == reflect.Slice
}
// GetArrayType to get the type of underlying value array element types.
func (node *GoValueNode) GetArrayType() (reflect.Type, error) {
if node.IsArray() {
return node.thisValue.Type().Elem(), nil
}
return nil, fmt.Errorf("this node identified as \"%s\" is not referring to an array or slice", node.IdentifiedAs())
}
// GetArrayValueAt to get the value of an array element if the current underlying value is an array
func (node *GoValueNode) GetArrayValueAt(index int) (reflect.Value, error) {
if node.IsArray() {
return node.thisValue.Index(index), nil
}
return reflect.Value{}, fmt.Errorf("this node identified as \"%s\" is not referring to an array or slice", node.IdentifiedAs())
}
// GetChildNodeByIndex is similar to `GetArrayValueAt`, where this will return a ValueNode that wrap the value.
func (node *GoValueNode) GetChildNodeByIndex(index int) (ValueNode, error) {
if node.IsArray() {
v, err := node.GetArrayValueAt(index)
if err != nil {
return nil, err
}
gv := node.ContinueWithValue(v, fmt.Sprintf("[%d]", index))
return gv, nil
}
return nil, fmt.Errorf("this node identified as \"%s\" is not an array. its %s", node.IdentifiedAs(), node.thisValue.Type().String())
}
// SetArrayValueAt will set the value of specified array index on the current underlying array value.
func (node *GoValueNode) SetArrayValueAt(index int, value reflect.Value) (err error) {
if node.IsArray() {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("recovered : %v", r)
}
}()
val := node.thisValue.Index(index)
if val.CanAddr() && val.CanSet() {
if pkg.IsNumber(val) && pkg.IsNumber(value) {
return SetNumberValue(val, value)
}
val.Set(value)
return nil
}
return fmt.Errorf("this node identified as \"%s\" can not set value on array index %d", node.IdentifiedAs(), index)
}
return fmt.Errorf("this node identified as \"%s\" is not referencing an array or slice", node.IdentifiedAs())
}
// AppendValue will append the new values into the current underlying array.
// will return error if argument list are not compatible with the array element type.
func (node *GoValueNode) AppendValue(value []reflect.Value) (err error) {
if node.IsArray() {
arrVal := node.thisValue
if arrVal.CanSet() {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("recovered : %v", r)
}
}()
arrVal.Set(reflect.Append(arrVal, value...))
return nil
}
}
return fmt.Errorf("this node identified as \"%s\" is not referencing an array or slice", node.IdentifiedAs())
}
// Length will return the length of underlying value if its an array, slice, map or string
func (node *GoValueNode) Length() (int, error) {
if node.IsArray() || node.IsMap() || node.IsString() {
return node.thisValue.Len(), nil
}
return 0, fmt.Errorf("this node identified as \"%s\" is not referencing an array, slice, map or string", node.IdentifiedAs())
}
// IsMap will validate if the underlying value is a map.
func (node *GoValueNode) IsMap() bool {
return node.thisValue.Kind() == reflect.Map
}
// GetMapValueAt will retrieve a map value by the specified key argument.
func (node *GoValueNode) GetMapValueAt(index reflect.Value) (reflect.Value, error) {
if node.IsMap() {
retVal := node.thisValue.MapIndex(index)
if retVal.IsValid() {
return retVal, nil
}
return reflect.Value{}, fmt.Errorf("this node identified as \"%s\" have no selector with specified key", node.IdentifiedAs())
}
return reflect.Value{}, fmt.Errorf("this node identified as \"%s\" is not referencing a map", node.IdentifiedAs())
}
// SetMapValueAt will set the map value for the specified key, value argument
func (node *GoValueNode) SetMapValueAt(index, newValue reflect.Value) (err error) {
if node.IsMap() {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("recovered : %v", r)
}
}()
node.thisValue.SetMapIndex(index, newValue)
return nil
}
return fmt.Errorf("this node identified as \"%s\" is not referencing a map", node.IdentifiedAs())
}
// GetChildNodeBySelector is similar to GetMapValueAt, it retrieve a value of map element identified by a value index as ValueNode.
func (node *GoValueNode) GetChildNodeBySelector(index reflect.Value) (ValueNode, error) {
val, err := node.GetMapValueAt(index)
if err != nil {
return nil, err
}
return node.ContinueWithValue(val, fmt.Sprintf("[%s->%s]", index.Type().String(), index.String())), nil
}
// IsObject will check if the underlying value is a struct or pointer to a struct
func (node *GoValueNode) IsObject() bool {
if node.thisValue.IsValid() {
typ := node.thisValue.Type()
if typ.Kind() == reflect.Ptr {
return typ.Elem().Kind() == reflect.Struct
}
return typ.Kind() == reflect.Struct
}
return false
}
// GetObjectValueByField will return underlying value's field
func (node *GoValueNode) GetObjectValueByField(field string) (reflect.Value, error) {
if node.IsObject() {
var val reflect.Value
if node.thisValue.Kind() == reflect.Ptr {
val = node.thisValue.Elem().FieldByName(field)
}
if node.thisValue.Kind() == reflect.Struct {
val = node.thisValue.FieldByName(field)
}
if val.IsValid() {
return val, nil
}
return reflect.Value{}, fmt.Errorf("this node have no field named %s", field)
}
return reflect.Value{}, fmt.Errorf("this node identified as \"%s\" is not referencing to an object", node.IdentifiedAs())
}
// GetObjectTypeByField will return underlying type of the value's field
func (node *GoValueNode) GetObjectTypeByField(field string) (typ reflect.Type, err error) {
if node.IsObject() {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("recovered : %v", r)
typ = nil
}
}()
if node.thisValue.Kind() == reflect.Ptr {
return node.thisValue.Elem().FieldByName(field).Type(), nil
}
if node.thisValue.Kind() == reflect.Struct {
return node.thisValue.FieldByName(field).Type(), nil
}
}
return nil, fmt.Errorf("this node identified as \"%s\" is not referring to an object", node.IdentifiedAs())
}
// SetNumberValue will assign a numeric value to a numeric target value
// this helper function is to ensure assignment between numerical types is happening regardless of types, int, uint or float.
// The rule designer should be careful as conversion of types in automatic way like this will cause lost of precision
// during conversion. This will be removed in the future version.
func SetNumberValue(target, newvalue reflect.Value) error {
if pkg.IsNumber(target) && pkg.IsNumber(newvalue) {
switch target.Type().Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
if pkg.GetBaseKind(newvalue) == reflect.Uint64 {
target.SetInt(int64(newvalue.Uint()))
} else if pkg.GetBaseKind(newvalue) == reflect.Float64 {
target.SetInt(int64(newvalue.Float()))
} else {
target.SetInt(newvalue.Int())
}
return nil
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
if pkg.GetBaseKind(newvalue) == reflect.Uint64 {
target.SetUint(newvalue.Uint())
} else if pkg.GetBaseKind(newvalue) == reflect.Float64 {
target.SetUint(uint64(newvalue.Float()))
} else {
target.SetUint(uint64(newvalue.Int()))
}
return nil
case reflect.Float32, reflect.Float64:
if pkg.GetBaseKind(newvalue) == reflect.Uint64 {
target.SetFloat(float64(newvalue.Uint()))
} else if pkg.GetBaseKind(newvalue) == reflect.Float64 {
target.SetFloat(newvalue.Float())
} else {
target.SetFloat(float64(newvalue.Int()))
}
return nil
}
return fmt.Errorf("this line should not be reached")
}
return fmt.Errorf("this function only used for assigning number data to number variable")
}
// SetObjectValueByField will set the underlying value's field with new value.
func (node *GoValueNode) SetObjectValueByField(field string, newValue reflect.Value) (err error) {
fieldVal := node.thisValue.Elem().FieldByName(field)
if fieldVal.IsValid() && fieldVal.CanAddr() && fieldVal.CanSet() {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("recovered : %v", r)
}
}()
if pkg.IsNumber(fieldVal) && pkg.IsNumber(newValue) {
return SetNumberValue(fieldVal, newValue)
}
fieldVal.Set(newValue)
return nil
}
return fmt.Errorf("this node identified as \"%s\" have field \"%s\" that is not valid nor addressable", node.IdentifiedAs(), field)
}
// CallFunction will call a function owned by the underlying value receiver.
// this function will artificially create a built-in functions for constants, array and map.
func (node *GoValueNode) CallFunction(funcName string, args ...reflect.Value) (retval reflect.Value, err error) {
switch pkg.GetBaseKind(node.thisValue) {
case reflect.Int64, reflect.Uint64, reflect.Float64, reflect.Bool:
return reflect.ValueOf(nil), fmt.Errorf("this node identified as \"%s\" try to call function %s which is not supported for type %s", node.IdentifiedAs(), funcName, node.thisValue.Type().String())
case reflect.String:
var strfunc func(string, []reflect.Value) (reflect.Value, error)
switch funcName {
case "In":
strfunc = StrIn
case "Compare":
strfunc = StrCompare
case "Contains":
strfunc = StrContains
case "Count":
strfunc = StrCount
case "HasPrefix":
strfunc = StrHasPrefix
case "HasSuffix":
strfunc = StrHasSuffix
case "Index":
strfunc = StrIndex
case "LastIndex":
strfunc = StrLastIndex
case "Repeat":
strfunc = StrRepeat
case "Replace":
strfunc = StrReplace
case "Split":
strfunc = StrSplit
case "ToLower":
strfunc = StrToLower
case "ToUpper":
strfunc = StrToUpper
case "Trim":
strfunc = StrTrim
case "Len":
strfunc = StrLen
case "MatchString":
strfunc = StrMatchRegexPattern
}
if strfunc != nil {
val, err := strfunc(node.thisValue.String(), args)
if err != nil {
return reflect.Value{}, err
}
return val, nil
}
return reflect.Value{}, fmt.Errorf("this node identified as \"%s\" call function %s is not supported for string", node.IdentifiedAs(), funcName)
}
if node.IsArray() {
var arrFunc func(reflect.Value, []reflect.Value) (reflect.Value, error)
switch funcName {
case "Len":
arrFunc = ArrMapLen
case "Append":
node.AppendValue(args)
return reflect.Value{}, nil
}
if arrFunc != nil {
if funcName == "Clear" {
val, err := arrFunc(node.thisValue, args)
if err != nil {
return reflect.Value{}, err
}
return val, nil
}
val, err := arrFunc(node.thisValue, args)
if err != nil {
return reflect.Value{}, err
}
return val, nil
}
return reflect.Value{}, fmt.Errorf("this node identified as \"%s\" call function %s is not supported for array", node.IdentifiedAs(), funcName)
}
if node.IsMap() {
var mapFunc func(reflect.Value, []reflect.Value) (reflect.Value, error)
switch funcName {
case "Len":
mapFunc = ArrMapLen
}
if mapFunc != nil {
val, err := mapFunc(node.thisValue, args)
if err != nil {
return reflect.Value{}, err
}
return val, nil
}
return reflect.Value{}, fmt.Errorf("this node identified as \"%s\" call function %s is not supported for map", node.IdentifiedAs(), funcName)
}
if node.IsObject() {
funcValue := node.thisValue.MethodByName(funcName)
if funcValue.IsValid() {
rets := funcValue.Call(args)
if len(rets) > 1 {
return reflect.Value{}, fmt.Errorf("this node identified as \"%s\" calling function %s which returns multiple values, multiple value returns are not supported", node.IdentifiedAs(), funcName)
}
if len(rets) == 1 {
return rets[0], nil
}
return reflect.Value{}, nil
}
return reflect.Value{}, fmt.Errorf("this node identified as \"%s\" have no function named %s", node.IdentifiedAs(), funcName)
}
return reflect.ValueOf(nil), fmt.Errorf("this node identified as \"%s\" is not referencing an object thus function %s call is not supported", node.IdentifiedAs(), funcName)
}
// GetChildNodeByField will retrieve the underlying struct's field and return the ValueNode wraper.
func (node *GoValueNode) GetChildNodeByField(field string) (ValueNode, error) {
val, err := node.GetObjectValueByField(field)
if err != nil {
return nil, err
}
return node.ContinueWithValue(val, field), nil
}
// IsTime will check if the underlying value is a time.Time
func (node *GoValueNode) IsTime() bool {
return node.thisValue.Type().String() == "time.Time"
}
// IsInteger will check if the underlying value is a type of int, or uint
func (node *GoValueNode) IsInteger() bool {
kind := pkg.GetBaseKind(node.thisValue)
return kind == reflect.Int64 || kind == reflect.Uint64
}
// IsReal will check if the underlying value is a type of real number, float.
func (node *GoValueNode) IsReal() bool {
kind := pkg.GetBaseKind(node.thisValue)
return kind == reflect.Float64
}
// IsBool will check if the underlying value is a type of boolean.
func (node *GoValueNode) IsBool() bool {
return node.thisValue.Kind() == reflect.Bool
}
// IsString will check if the underlying value is a type of string
func (node *GoValueNode) IsString() bool {
return node.thisValue.Kind() == reflect.String
} | model/GoDataAccessLayer.go | 0.774711 | 0.525491 | GoDataAccessLayer.go | starcoder |
package matchers
import "bytes"
// Png matches a Portable Network Graphics file.
func Png(in []byte) bool {
return bytes.HasPrefix(in, []byte{0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A})
}
// Jpg matches a Joint Photographic Experts Group file.
func Jpg(in []byte) bool {
return bytes.HasPrefix(in, []byte{0xFF, 0xD8, 0xFF})
}
// isJpeg2k matches a generic JPEG2000 file.
func isJpeg2k(in []byte) bool {
if len(in) < 24 {
return false
}
signature := in[4:8]
return bytes.Equal(signature, []byte{0x6A, 0x50, 0x20, 0x20}) ||
bytes.Equal(signature, []byte{0x6A, 0x50, 0x32, 0x20})
}
// Jp2 matches a JPEG 2000 Image file (ISO 15444-1).
func Jp2(in []byte) bool {
return isJpeg2k(in) && bytes.Equal(in[20:24], []byte{0x6a, 0x70, 0x32, 0x20})
}
// Jpx matches a JPEG 2000 Image file (ISO 15444-2).
func Jpx(in []byte) bool {
return isJpeg2k(in) && bytes.Equal(in[20:24], []byte{0x6a, 0x70, 0x78, 0x20})
}
// Jpm matches a JPEG 2000 Image file (ISO 15444-6).
func Jpm(in []byte) bool {
return isJpeg2k(in) && bytes.Equal(in[20:24], []byte{0x6a, 0x70, 0x6D, 0x20})
}
// Gif matches a Graphics Interchange Format file.
func Gif(in []byte) bool {
return bytes.HasPrefix(in, []byte("GIF87a")) ||
bytes.HasPrefix(in, []byte("GIF89a"))
}
// Webp matches a WebP file.
func Webp(in []byte) bool {
return len(in) > 12 &&
bytes.Equal(in[0:4], []byte("RIFF")) &&
bytes.Equal(in[8:12], []byte{0x57, 0x45, 0x42, 0x50})
}
// Bmp matches a bitmap image file.
func Bmp(in []byte) bool {
return len(in) > 1 && in[0] == 0x42 && in[1] == 0x4D
}
// Ps matches a PostScript file.
func Ps(in []byte) bool {
return bytes.HasPrefix(in, []byte("%!PS-Adobe-"))
}
// Psd matches a Photoshop Document file.
func Psd(in []byte) bool {
return bytes.HasPrefix(in, []byte("8BPS"))
}
// Ico matches an ICO file.
func Ico(in []byte) bool {
return bytes.HasPrefix(in, []byte{0x00, 0x00, 0x01, 0x00}) ||
bytes.HasPrefix(in, []byte{0x00, 0x00, 0x02, 0x00})
}
// Icns matches an ICNS (Apple Icon Image format) file.
func Icns(in []byte) bool {
return bytes.HasPrefix(in, []byte("icns"))
}
// Tiff matches a Tagged Image File Format file.
func Tiff(in []byte) bool {
return bytes.HasPrefix(in, []byte{0x49, 0x49, 0x2A, 0x00}) ||
bytes.HasPrefix(in, []byte{0x4D, 0x4D, 0x00, 0x2A})
}
// Bpg matches a Better Portable Graphics file.
func Bpg(in []byte) bool {
return bytes.HasPrefix(in, []byte{0x42, 0x50, 0x47, 0xFB})
}
// Dwg matches a CAD drawing file.
func Dwg(in []byte) bool {
if len(in) < 6 || in[0] != 0x41 || in[1] != 0x43 {
return false
}
dwgVersions := [][]byte{
{0x31, 0x2E, 0x34, 0x30},
{0x31, 0x2E, 0x35, 0x30},
{0x32, 0x2E, 0x31, 0x30},
{0x31, 0x30, 0x30, 0x32},
{0x31, 0x30, 0x30, 0x33},
{0x31, 0x30, 0x30, 0x34},
{0x31, 0x30, 0x30, 0x36},
{0x31, 0x30, 0x30, 0x39},
{0x31, 0x30, 0x31, 0x32},
{0x31, 0x30, 0x31, 0x34},
{0x31, 0x30, 0x31, 0x35},
{0x31, 0x30, 0x31, 0x38},
{0x31, 0x30, 0x32, 0x31},
{0x31, 0x30, 0x32, 0x34},
{0x31, 0x30, 0x33, 0x32},
}
for _, d := range dwgVersions {
if bytes.Equal(in[2:6], d) {
return true
}
}
return false
} | vendor/github.com/gabriel-vasile/mimetype/internal/matchers/image.go | 0.676834 | 0.60133 | image.go | starcoder |
package rapid
import (
"fmt"
"math"
"math/bits"
"reflect"
)
const (
float32ExpBits = 8
float32SignifBits = 23
float64ExpBits = 11
float64SignifBits = 52
floatExpLabel = "floatexp"
floatSignifLabel = "floatsignif"
)
var (
float32Type = reflect.TypeOf(float32(0))
float64Type = reflect.TypeOf(float64(0))
)
func Float32() *Generator {
return Float32Range(-math.MaxFloat32, math.MaxFloat32)
}
func Float32Min(min float32) *Generator {
return Float32Range(min, math.MaxFloat32)
}
func Float32Max(max float32) *Generator {
return Float32Range(-math.MaxFloat32, max)
}
func Float32Range(min float32, max float32) *Generator {
assertf(min == min, "min should not be a NaN")
assertf(max == max, "max should not be a NaN")
assertf(min <= max, "invalid range [%v, %v]", min, max)
return newGenerator(&floatGen{
typ: float32Type,
min: float64(min),
max: float64(max),
minVal: -math.MaxFloat32,
maxVal: math.MaxFloat32,
})
}
func Float64() *Generator {
return Float64Range(-math.MaxFloat64, math.MaxFloat64)
}
func Float64Min(min float64) *Generator {
return Float64Range(min, math.MaxFloat64)
}
func Float64Max(max float64) *Generator {
return Float64Range(-math.MaxFloat64, max)
}
func Float64Range(min float64, max float64) *Generator {
assertf(min == min, "min should not be a NaN")
assertf(max == max, "max should not be a NaN")
assertf(min <= max, "invalid range [%v, %v]", min, max)
return newGenerator(&floatGen{
typ: float64Type,
min: min,
max: max,
minVal: -math.MaxFloat64,
maxVal: math.MaxFloat64,
})
}
type floatGen struct {
typ reflect.Type
min float64
max float64
minVal float64
maxVal float64
}
func (g *floatGen) String() string {
kind := "Float64"
if g.typ == float32Type {
kind = "Float32"
}
if g.min != g.minVal && g.max != g.maxVal {
return fmt.Sprintf("%sRange(%g, %g)", kind, g.min, g.max)
} else if g.min != g.minVal {
return fmt.Sprintf("%sMin(%g)", kind, g.min)
} else if g.max != g.maxVal {
return fmt.Sprintf("%sMax(%g)", kind, g.max)
}
return fmt.Sprintf("%s()", kind)
}
func (g *floatGen) type_() reflect.Type {
return g.typ
}
func (g *floatGen) value(t *T) value {
if g.typ == float32Type {
return float32FromParts(genFloatRange(t.s, g.min, g.max, float32SignifBits))
} else {
return float64FromParts(genFloatRange(t.s, g.min, g.max, float64SignifBits))
}
}
func ufloatFracBits(e int32, signifBits uint) uint {
if e <= 0 {
return signifBits
} else if uint(e) < signifBits {
return signifBits - uint(e)
} else {
return 0
}
}
func ufloat32Parts(f float32) (int32, uint64, uint64) {
u := math.Float32bits(f) & math.MaxInt32
e := int32(u>>float32SignifBits) - int32(bitmask64(float32ExpBits-1))
s := uint64(u) & bitmask64(float32SignifBits)
n := ufloatFracBits(e, float32SignifBits)
return e, s >> n, s & bitmask64(n)
}
func ufloat64Parts(f float64) (int32, uint64, uint64) {
u := math.Float64bits(f) & math.MaxInt64
e := int32(u>>float64SignifBits) - int32(bitmask64(float64ExpBits-1))
s := u & bitmask64(float64SignifBits)
n := ufloatFracBits(e, float64SignifBits)
return e, s >> n, s & bitmask64(n)
}
func ufloat32FromParts(e int32, si uint64, sf uint64) float32 {
e_ := (uint32(e) + uint32(bitmask64(float32ExpBits-1))) << float32SignifBits
s_ := (uint32(si) << ufloatFracBits(e, float32SignifBits)) | uint32(sf)
return math.Float32frombits(e_ | s_)
}
func ufloat64FromParts(e int32, si uint64, sf uint64) float64 {
e_ := (uint64(e) + bitmask64(float64ExpBits-1)) << float64SignifBits
s_ := (si << ufloatFracBits(e, float64SignifBits)) | sf
return math.Float64frombits(e_ | s_)
}
func float32FromParts(sign bool, e int32, si uint64, sf uint64) float32 {
f := ufloat32FromParts(e, si, sf)
if sign {
return -f
} else {
return f
}
}
func float64FromParts(sign bool, e int32, si uint64, sf uint64) float64 {
f := ufloat64FromParts(e, si, sf)
if sign {
return -f
} else {
return f
}
}
func genUfloatRange(s bitStream, min float64, max float64, signifBits uint) (int32, uint64, uint64) {
assert(min >= 0 && min <= max)
var (
minExp, maxExp int32
minSignifI, maxSignifI, minSignifF, maxSignifF uint64
)
if signifBits == float32SignifBits {
minExp, minSignifI, minSignifF = ufloat32Parts(float32(min))
maxExp, maxSignifI, maxSignifF = ufloat32Parts(float32(max))
} else {
minExp, minSignifI, minSignifF = ufloat64Parts(min)
maxExp, maxSignifI, maxSignifF = ufloat64Parts(max)
}
i := s.beginGroup(floatExpLabel, false)
e, lOverflow, rOverflow := genIntRange(s, int64(minExp), int64(maxExp), true)
s.endGroup(i, false)
fracBits := ufloatFracBits(int32(e), signifBits)
j := s.beginGroup(floatSignifLabel, false)
var siMin, siMax uint64
switch {
case lOverflow:
siMin, siMax = minSignifI, minSignifI
case rOverflow:
siMin, siMax = maxSignifI, maxSignifI
case minExp == maxExp:
siMin, siMax = minSignifI, maxSignifI
case int32(e) == minExp:
siMin, siMax = minSignifI, bitmask64(signifBits-fracBits)
case int32(e) == maxExp:
siMin, siMax = 0, maxSignifI
default:
siMin, siMax = 0, bitmask64(signifBits-fracBits)
}
si, _, _ := genUintRange(s, siMin, siMax, false)
var sfMin, sfMax uint64
switch {
case lOverflow:
sfMin, sfMax = minSignifF, minSignifF
case rOverflow:
sfMin, sfMax = maxSignifF, maxSignifF
case minExp == maxExp && minSignifI == maxSignifI:
sfMin, sfMax = minSignifF, maxSignifF
case int32(e) == minExp && si == minSignifI:
sfMin, sfMax = minSignifF, bitmask64(fracBits)
case int32(e) == maxExp && si == maxSignifI:
sfMin, sfMax = 0, maxSignifF
default:
sfMin, sfMax = 0, bitmask64(fracBits)
}
maxR := bits.Len64(sfMax - sfMin)
r := genUintNNoReject(s, uint64(maxR))
sf, _, _ := genUintRange(s, sfMin, sfMax, false)
s.endGroup(j, false)
for i := uint(0); i < uint(maxR)-uint(r); i++ {
mask := ^(uint64(1) << i)
if sf&mask < sfMin {
break
}
sf &= mask
}
return int32(e), si, sf
}
func genFloatRange(s bitStream, min float64, max float64, signifBits uint) (bool, int32, uint64, uint64) {
var posMin, negMin, pNeg float64
if min >= 0 {
posMin = min
pNeg = 0
} else if max <= 0 {
negMin = -max
pNeg = 1
} else {
pNeg = 0.5
}
if flipBiasedCoin(s, pNeg) {
e, si, sf := genUfloatRange(s, negMin, -min, signifBits)
return true, e, si, sf
} else {
e, si, sf := genUfloatRange(s, posMin, max, signifBits)
return false, e, si, sf
}
} | vendor/pgregory.net/rapid/floats.go | 0.646125 | 0.453322 | floats.go | starcoder |
package jodaTime
/*
jodaTime provides a date formatter using the yoda syntax.
http://joda-time.sourceforge.net/apidocs/org/joda/time/format/DateTimeFormat.html
*/
import (
"strconv"
"time"
)
/*
Symbol Meaning Presentation Examples
------ ------- ------------ -------
G era text AD
C century of era (>=0) number 20
Y year of era (>=0) year 1996
x weekyear year 1996
w week of weekyear number 27
e day of week number 2
E day of week text Tuesday; Tue
y year year 1996
D day of year number 189
M month of year month July; Jul; 07
d day of month number 10
a halfday of day text PM
K hour of halfday (0~11) number 0
h clockhour of halfday (1~12) number 12
H hour of day (0~23) number 0
k clockhour of day (1~24) number 24
m minute of hour number 30
s second of minute number 55
S fraction of second number 987654321
z time zone text Pacific Standard Time; PST
Z time zone offset/id zone -0800; -08:00; America/Los_Angeles
' escape for text delimiter
'' single quote literal '
*/
// Format formats a date based on joda conventions
func Format(format string, date time.Time) string {
formatRune := []rune(format)
lenFormat := len(formatRune)
out := ""
for i := 0; i < len(formatRune); i++ {
switch r := formatRune[i]; r {
case 'Y', 'y', 'x': // Y YYYY YY year
j := 1
for ; i+j < lenFormat; j++ {
if formatRune[i+j] != r {
break
}
}
i = i + j - 1
switch j {
case 1, 3, 4: // Y YYY YYY
out += strconv.Itoa(date.Year())
case 2: // YY
out += strconv.Itoa(date.Year())[2:4]
}
case 'D': // D DD day of year
j := 1
for ; i+j < lenFormat; j++ {
if formatRune[i+j] != r {
break
}
}
i = i + j - 1
switch j {
case 1: // D
out += strconv.Itoa(date.YearDay())
case 2: // DD
if date.YearDay() < 10 {
out += "0"
out += strconv.Itoa(date.YearDay())
} else {
out += strconv.Itoa(date.YearDay())
}
}
case 'w': // w ww week of weekyear
j := 1
for ; i+j < lenFormat; j++ {
if formatRune[i+j] != r {
break
}
}
i = i + j - 1
_, w := date.ISOWeek()
switch j {
case 1: // w
out += strconv.Itoa(w)
case 2: // ww
if w < 10 {
out += "0"
out += strconv.Itoa(w)
} else {
out += strconv.Itoa(w)
}
}
case 'M': // M MM MMM MMMM month of year
j := 1
for ; i+j < lenFormat; j++ {
if formatRune[i+j] != r {
break
}
}
i = i + j - 1
v := date.Month()
switch j {
case 1: // M
out += strconv.Itoa(int(v))
case 2: // MM
if v < 10 {
out += "0"
out += strconv.Itoa(int(v))
} else {
out += strconv.Itoa(int(v))
}
case 3: // MMM
out += v.String()[0:3]
case 4: // MMMM
out += v.String()
}
case 'd': // d dd day of month
j := 1
for ; i+j < lenFormat; j++ {
if formatRune[i+j] != r {
break
}
}
i = i + j - 1
v := date.Day()
switch j {
case 1: // d
out += strconv.Itoa(v)
case 2: // dd
if v < 10 {
out += "0"
out += strconv.Itoa(v)
} else {
out += strconv.Itoa(v)
}
}
case 'e': // e ee day of week(number)
j := 1
for ; i+j < lenFormat; j++ {
if formatRune[i+j] != r {
break
}
}
i = i + j - 1
v := date.Weekday()
switch j {
case 1: // e
out += strconv.Itoa(int(v))
case 2: // ee
out += "0"
out += strconv.Itoa(int(v))
}
case 'E': // E EE
j := 1
for ; i+j < lenFormat; j++ {
if formatRune[i+j] != r {
break
}
}
i = i + j - 1
v := date.Weekday()
switch j {
case 1, 2, 3: // E
out += v.String()[0:3]
case 4: // EE
out += v.String()
}
case 'h': // h hh clockhour of halfday (1~12)
j := 1
for ; i+j < lenFormat; j++ {
if formatRune[i+j] != r {
break
}
}
i = i + j - 1
v := date.Hour()
if v > 12 {
v = v - 12
} else if v == 0 {
v = 12
}
switch j {
case 1: // h
out += strconv.Itoa(v)
case 2: // hh
if v < 10 {
out += "0"
out += strconv.Itoa(v)
} else {
out += strconv.Itoa(v)
}
}
case 'H': // H HH
j := 1
for ; i+j < lenFormat; j++ {
if formatRune[i+j] != r {
break
}
}
i = i + j - 1
v := date.Hour()
switch j {
case 1: // H
out += strconv.Itoa(v)
case 2: // HH
if v < 10 {
out += "0"
out += strconv.Itoa(v)
} else {
out += strconv.Itoa(v)
}
}
case 'a': // a
if date.Hour() > 12 {
out += "PM"
} else {
out += "AM"
}
case 'm': // m mm minute of hour
j := 1
for ; i+j < lenFormat; j++ {
if formatRune[i+j] != r {
break
}
}
i = i + j - 1
v := date.Minute()
switch j {
case 1: // m
out += strconv.Itoa(v)
case 2: // mm
if v < 10 {
out += "0"
out += strconv.Itoa(v)
} else {
out += strconv.Itoa(v)
}
}
case 's': // s ss
j := 1
for ; i+j < lenFormat; j++ {
if formatRune[i+j] != r {
break
}
}
i = i + j - 1
v := date.Second()
switch j {
case 1: // s
out += strconv.Itoa(v)
case 2: // ss
if v < 10 {
out += "0"
out += strconv.Itoa(v)
} else {
out += strconv.Itoa(v)
}
}
case 'S': // S (from 1 to 9 repeats)
j := 1
for ; i+j < lenFormat; j++ {
if formatRune[i+j] != r {
break
}
}
i = i + j - 1
if j >= 1 && j <= 9 {
v := date.Nanosecond() - (date.Nanosecond()/1000000000)*1000000000
numStr := strconv.Itoa(v)
out += ("000000000"[:9-len(numStr)] + numStr)[:j]
}
case 'z': // z
z, _ := date.Zone()
out += z
case 'Z': // Z ZZ
j := 1
for ; i+j < lenFormat; j++ {
if formatRune[i+j] != r {
break
}
}
i = i + j - 1
zs, z := date.Zone()
sign := "+"
if z < 0 {
sign = "-"
z = -z
}
v := z / 3600
switch j {
case 1: // Z
out += sign
if v < 10 {
out += "0"
out += strconv.Itoa(v)
} else {
out += strconv.Itoa(v)
}
out += "00"
case 2: // ZZ
out += sign
if v < 10 {
out += "0"
out += strconv.Itoa(v)
} else {
out += strconv.Itoa(v)
}
out += ":00"
case 3: // ZZZ
out += timeZone[zs]
}
case 'G': //era text
out += "AD"
case 'C': //century of era (>=0) number
out += strconv.Itoa(date.Year())[0:2]
case 'K': // K KK hour of halfday (0~11)
j := 1
for ; i+j < lenFormat; j++ {
if formatRune[i+j] != r {
break
}
}
i = i + j - 1
v := date.Hour()
if v >= 12 {
v = v - 12
}
switch j {
case 1: // K
out += strconv.Itoa(v)
case 2: // KK
if v < 10 {
out += "0"
out += strconv.Itoa(v)
} else {
out += strconv.Itoa(v)
}
}
case 'k': // k kk clockhour of day (1~24)
j := 1
for ; i+j < lenFormat; j++ {
if formatRune[i+j] != r {
break
}
}
i = i + j - 1
v := date.Hour()
switch j {
case 1: // k
if v == 0 {
v = 24
}
out += strconv.Itoa(v)
case 2: // kk
if v == 0 {
v = 24
} else if v < 10 {
out += "0"
}
out += strconv.Itoa(v)
}
case '\'': // ' (text delimiter) or '' (real quote)
// real quote
if formatRune[i+1] == r {
out += "'"
i = i + 1
continue
}
var tmp []rune
j := 1
for ; i+j < lenFormat; j++ {
if formatRune[i+j] != r {
tmp = append(tmp, formatRune[i+j])
continue
}
break
}
i = i + j
out += string(tmp)
default:
out += string(r)
}
}
return out
}
var timeZone = map[string]string{
"GMT": "Europe/London",
"BST": "Europe/London",
"BSDT": "Europe/London",
"CET": "Europe/Paris",
"UTC": "",
"PST": "America/Los_Angeles",
"PDT": "America/Los_Angeles",
"LA": "America/Los_Angeles",
"LAX": "America/Los_Angeles",
"MST": "America/Denver",
"MDT": "America/Denver",
"CST": "America/Chicago",
"CDT": "America/Chicago",
"Chicago": "America/Chicago",
"EST": "America/New_York",
"EDT": "America/New_York",
"NYC": "America/New_York",
"NY": "America/New_York",
"AEST": "Australia/Sydney",
"AEDT": "Australia/Sydney",
"AWST": "Australia/Perth",
"AWDT": "Australia/Perth",
"ACST": "Australia/Adelaide",
"ACDT": "Australia/Adelaide",
} | format.go | 0.530236 | 0.440229 | format.go | starcoder |
package spec_util
import (
"bytes"
"encoding/base64"
protohash "github.com/akitasoftware/objecthash-proto"
"github.com/golang/protobuf/proto"
"github.com/pkg/errors"
pb "github.com/akitasoftware/akita-ir/go/api_spec"
)
// Given 2 DataTemplates that may contain references to Data in the common
// prefix, return whether the 2 DataTemplates are equivalent.
// Equivalence is defined as identical (modulo non-fixed values) after constant
// propagation, and there's a bijection between the non-fixed values. We can't
// use bijection to define equivalence over fixed values because each fixed
// value has a different meaning. For example, in enum{WRITER, READER}, it would
// be wrong to say WRITER == READER. Note we always treat bool primitives as
// "fixed", so bijection does not apply to bool values.
func EquivalentDataTemplates(sharedPrefix []*pb.MethodTemplate, dt1 *pb.DataTemplate, dt2 *pb.DataTemplate) (bool, error) {
chk := &equivChecker{
sharedPrefix: sharedPrefix,
lToRPrimitiveMap: make(map[string]*pb.Primitive),
rToLPrimitiveMap: make(map[string]*pb.Primitive),
}
return chk.equivalentDataTemplates(dt1, dt2)
}
type equivChecker struct {
sharedPrefix []*pb.MethodTemplate
// Maintains bijective mapping from hash(primitive_1) to primitive_2 and vice
// versa.
// TODO: We are assuming there are no hash collision. If collision happens,
// we'll say that 2 equivalent templates are not equivalent.
lToRPrimitiveMap map[string]*pb.Primitive
rToLPrimitiveMap map[string]*pb.Primitive
}
func (chk *equivChecker) equivalentDataTemplates(dt1 *pb.DataTemplate, dt2 *pb.DataTemplate) (bool, error) {
// Unroll DataTemplates to resolve top-level references.
var err error
dt1, err = unrollDataTemplate(chk.sharedPrefix, dt1)
if err != nil {
return false, errors.Wrapf(err, "unroll DataTemplate failed")
}
dt2, err = unrollDataTemplate(chk.sharedPrefix, dt2)
if err != nil {
return false, errors.Wrapf(err, "unroll DataTemplate failed")
}
switch vt1 := dt1.ValueTemplate.(type) {
case *pb.DataTemplate_StructTemplate:
if vt2, ok := dt2.ValueTemplate.(*pb.DataTemplate_StructTemplate); ok {
return chk.equivalentStructTemplates(vt1.StructTemplate, vt2.StructTemplate)
} else {
return false, nil
}
case *pb.DataTemplate_ListTemplate:
if vt2, ok := dt2.ValueTemplate.(*pb.DataTemplate_ListTemplate); ok {
return chk.equivalentListTemplates(vt1.ListTemplate, vt2.ListTemplate)
}
return false, nil
case *pb.DataTemplate_Value:
if vt2, ok := dt2.ValueTemplate.(*pb.DataTemplate_Value); ok {
return chk.equivalentData(vt1.Value, vt2.Value)
}
return false, nil
case *pb.DataTemplate_Ref:
if vt2, ok := dt2.ValueTemplate.(*pb.DataTemplate_Ref); ok {
// Since we performed unrolling first, all remaining refs will only refer
// to responses. Thus we can directly use proto.Equal to compare the refs.
return proto.Equal(vt1.Ref, vt2.Ref), nil
}
return false, nil
case *pb.DataTemplate_OptionalTemplate:
if vt2, ok := dt2.ValueTemplate.(*pb.DataTemplate_OptionalTemplate); ok {
return chk.equivalentDataTemplates(vt1.OptionalTemplate.ValueTemplate, vt2.OptionalTemplate.ValueTemplate)
}
return false, nil
default:
return false, errors.Errorf("unsupported value_template type %T", vt1)
}
}
func (chk *equivChecker) equivalentStructTemplates(st1 *pb.StructTemplate, st2 *pb.StructTemplate) (bool, error) {
for fieldName, ft1 := range st1.GetFieldTemplates() {
if ft2, ok := st2.GetFieldTemplates()[fieldName]; ok {
if eq, err := chk.equivalentDataTemplates(ft1, ft2); err != nil {
return false, errors.Wrapf(err, "failed to compare struct template field %s", fieldName)
} else if !eq {
return false, nil
}
} else {
return false, nil
}
}
return true, nil
}
func (chk *equivChecker) equivalentListTemplates(lt1 *pb.ListTemplate, lt2 *pb.ListTemplate) (bool, error) {
if len(lt1.GetElemTemplates()) != len(lt2.GetElemTemplates()) {
return false, nil
}
for i, et1 := range lt1.GetElemTemplates() {
if eq, err := chk.equivalentDataTemplates(et1, lt2.GetElemTemplates()[i]); err != nil {
return false, errors.Wrapf(err, "failed to compare list template element %d", i)
} else if !eq {
return false, nil
}
}
return true, nil
}
func (chk *equivChecker) equivalentData(d1 *pb.Data, d2 *pb.Data) (bool, error) {
switch v1 := d1.Value.(type) {
case *pb.Data_Primitive:
if v2, ok := d2.Value.(*pb.Data_Primitive); ok {
return chk.equivalentPrimitives(v1.Primitive, v2.Primitive)
}
return false, nil
case *pb.Data_Struct:
if v2, ok := d2.Value.(*pb.Data_Struct); ok {
return chk.equivalentStructs(v1.Struct, v2.Struct)
}
return false, nil
case *pb.Data_List:
if v2, ok := d2.Value.(*pb.Data_List); ok {
return chk.equivalentLists(v1.List, v2.List)
}
return false, nil
case *pb.Data_Optional:
if v2, ok := d2.Value.(*pb.Data_Optional); ok {
return chk.equivalentOptionals(v1.Optional, v2.Optional)
}
return false, nil
default:
return false, errors.Errorf("unsupported data type %T", v1)
}
return false, nil
}
func (chk *equivChecker) equivalentPrimitives(p1 *pb.Primitive, p2 *pb.Primitive) (bool, error) {
if p1.GetTypeHint() != p2.GetTypeHint() {
return false, nil
}
isFixed := containsFixedValue(p1)
if isFixed != containsFixedValue(p2) {
return false, nil
}
if isFixed {
return proto.Equal(p1, p2), nil
} else {
return chk.isBijective(p1, p2)
}
}
func (chk *equivChecker) isBijective(p1 *pb.Primitive, p2 *pb.Primitive) (bool, error) {
hash1, err := hashPrimitive(p1)
if err != nil {
return false, err
}
hash2, err := hashPrimitive(p2)
if err != nil {
return false, err
}
checkOrInsert := func(m map[string]*pb.Primitive, key string, val *pb.Primitive) bool {
if v, ok := m[key]; ok {
return proto.Equal(v, val)
} else {
m[key] = val
return true
}
}
return checkOrInsert(chk.lToRPrimitiveMap, hash1, p2) && checkOrInsert(chk.rToLPrimitiveMap, hash2, p1), nil
}
func (chk *equivChecker) equivalentStructs(s1 *pb.Struct, s2 *pb.Struct) (bool, error) {
for fieldName, f1 := range s1.GetFields() {
if f2, ok := s2.GetFields()[fieldName]; ok {
if eq, err := chk.equivalentData(f1, f2); err != nil {
return false, errors.Wrapf(err, "failed to compare struct field %s", fieldName)
} else if !eq {
return false, nil
}
} else {
return false, nil
}
}
return true, nil
}
func (chk *equivChecker) equivalentLists(l1 *pb.List, l2 *pb.List) (bool, error) {
if len(l1.GetElems()) != len(l2.GetElems()) {
return false, nil
}
for i, e1 := range l1.GetElems() {
if eq, err := chk.equivalentData(e1, l2.GetElems()[i]); err != nil {
return false, errors.Wrapf(err, "failed to compare list elem %d", i)
} else if !eq {
return false, nil
}
}
return true, nil
}
func (chk *equivChecker) equivalentOptionals(o1 *pb.Optional, o2 *pb.Optional) (bool, error) {
switch v1 := o1.Value.(type) {
case *pb.Optional_Data:
if v2, ok := o2.Value.(*pb.Optional_Data); ok {
return chk.equivalentData(v1.Data, v2.Data)
}
return false, nil
case *pb.Optional_None:
_, ok := o2.Value.(*pb.Optional_None)
return ok, nil
default:
return false, errors.Errorf("unsupported optional value type %T", v1)
}
}
func containsFixedValue(p *pb.Primitive) bool {
switch pv := p.Value.(type) {
case *pb.Primitive_BoolValue:
// We always consider bool as fixed because it's inherently limited to 2
// values.
return true
case *pb.Primitive_BytesValue:
v := pv.BytesValue.GetValue()
for _, fv := range pv.BytesValue.GetType().GetFixedValues() {
if bytes.Equal(fv, v) {
return true
}
}
case *pb.Primitive_StringValue:
v := pv.StringValue.GetValue()
for _, fv := range pv.StringValue.GetType().GetFixedValues() {
if fv == v {
return true
}
}
case *pb.Primitive_Int32Value:
v := pv.Int32Value.GetValue()
for _, fv := range pv.Int32Value.GetType().GetFixedValues() {
if fv == v {
return true
}
}
case *pb.Primitive_Int64Value:
v := pv.Int64Value.GetValue()
for _, fv := range pv.Int64Value.GetType().GetFixedValues() {
if fv == v {
return true
}
}
case *pb.Primitive_Uint32Value:
v := pv.Uint32Value.GetValue()
for _, fv := range pv.Uint32Value.GetType().GetFixedValues() {
if fv == v {
return true
}
}
case *pb.Primitive_Uint64Value:
v := pv.Uint64Value.GetValue()
for _, fv := range pv.Uint64Value.GetType().GetFixedValues() {
if fv == v {
return true
}
}
case *pb.Primitive_DoubleValue:
v := pv.DoubleValue.GetValue()
for _, fv := range pv.DoubleValue.GetType().GetFixedValues() {
if fv == v {
return true
}
}
case *pb.Primitive_FloatValue:
v := pv.FloatValue.GetValue()
for _, fv := range pv.FloatValue.GetType().GetFixedValues() {
if fv == v {
return true
}
}
}
return false
}
func hashPrimitive(p *pb.Primitive) (string, error) {
// Use FNV1-a as the hash function since it's faster and we don't need
// cryptographically secure hash.
protoHasher := protohash.NewHasher(protohash.BasicHashFunction(protohash.FNV1A_128))
if protoBytes, err := protoHasher.HashProto(p); err != nil {
return "", errors.Wrap(err, "hashPrimitive failed")
} else {
return base64.StdEncoding.EncodeToString(protoBytes), nil
}
}
// If DataTemplate is a reference, resolves it into one of
// - template
// - contant value
// - ref to response
// Note that there could still be references nested inside templates (i.e.
// StructTemplate).
// This function is intended to be a helper function for checking data
// templates for equivalence, hence the preservation of response refs.
func unrollDataTemplate(prefix []*pb.MethodTemplate, dt *pb.DataTemplate) (*pb.DataTemplate, error) {
switch vt := dt.ValueTemplate.(type) {
case *pb.DataTemplate_Ref:
return unrollMethodDataRef(prefix, dt, vt.Ref)
default:
return dt, nil
}
}
func unrollMethodDataRef(prefix []*pb.MethodTemplate, dt *pb.DataTemplate, r *pb.MethodDataRef) (*pb.DataTemplate, error) {
if r.GetMethodIndex() < 0 || r.GetMethodIndex() >= int32(len(prefix)) {
return nil, errors.Errorf("unrollMethodDataRef index of out range index=%d len=%d", r.GetMethodIndex(), len(prefix))
}
d := prefix[r.GetMethodIndex()]
switch ref := r.Ref.(type) {
case *pb.MethodDataRef_ArgRef:
namedRef := ref.ArgRef
if arg, ok := d.GetArgTemplates()[namedRef.GetKey()]; ok {
if t, err := unrollDataRef(prefix, arg, namedRef.GetDataRef()); err != nil {
return nil, errors.Wrapf(err, "failed to resolve reference to arg %s", namedRef.GetKey())
} else {
return t, nil
}
} else {
return nil, errors.Errorf("no such argument %s", namedRef.GetKey())
}
case *pb.MethodDataRef_ResponseRef:
// Return DataTemplate directly to avoid memory allocation.
return dt, nil
default:
return nil, errors.Errorf("unsuppported MethodDataRef type %T", ref)
}
}
func unrollDataRef(prefix []*pb.MethodTemplate, dt *pb.DataTemplate, ref *pb.DataRef) (*pb.DataTemplate, error) {
switch v := dt.ValueTemplate.(type) {
case *pb.DataTemplate_Value:
if data, err := GetDataRef(ref, v.Value); err != nil {
return nil, errors.Wrapf(err, "failed to resolve reference into constant value")
} else {
// This extra memory allocation makes the interface nicer by only having
// to worry about comparing DataTemplates. If this is too costly, we can
// do more plumbing to return either a DataTemplate or a raw Data proto.
return &pb.DataTemplate{
ValueTemplate: &pb.DataTemplate_Value{data},
}, nil
}
case *pb.DataTemplate_StructTemplate:
if r, ok := ref.ValueRef.(*pb.DataRef_StructRef); ok {
return unrollStructRef(prefix, dt, v.StructTemplate, r.StructRef)
} else {
return nil, errors.Errorf("got value_ref type %T for struct template", ref.ValueRef)
}
case *pb.DataTemplate_ListTemplate:
if r, ok := ref.ValueRef.(*pb.DataRef_ListRef); ok {
return unrollListRef(prefix, dt, v.ListTemplate, r.ListRef)
} else {
return nil, errors.Errorf("got value_ref type %T for list template", ref.ValueRef)
}
case *pb.DataTemplate_OptionalTemplate:
return unrollDataRef(prefix, v.OptionalTemplate.ValueTemplate, ref)
case *pb.DataTemplate_Ref:
return unrollMethodDataRef(prefix, dt, v.Ref)
default:
return nil, errors.Errorf("unsupported value_template type %T", v)
}
}
func unrollStructRef(prefix []*pb.MethodTemplate, dt *pb.DataTemplate, s *pb.StructTemplate, ref *pb.StructRef) (*pb.DataTemplate, error) {
switch r := ref.Ref.(type) {
case *pb.StructRef_FullStruct:
return dt, nil
case *pb.StructRef_FieldRef:
if field, ok := s.GetFieldTemplates()[r.FieldRef.GetKey()]; ok {
return unrollDataRef(prefix, field, r.FieldRef.GetDataRef())
} else {
return nil, errors.Errorf("StructTemplate does not contain field %s", r.FieldRef.GetKey())
}
default:
return nil, errors.Errorf("unsupported StructRef type %T", r)
}
}
func unrollListRef(prefix []*pb.MethodTemplate, dt *pb.DataTemplate, l *pb.ListTemplate, ref *pb.ListRef) (*pb.DataTemplate, error) {
switch r := ref.Ref.(type) {
case *pb.ListRef_FullList:
return dt, nil
case *pb.ListRef_ElemRef:
if r.ElemRef.GetIndex() < 0 || r.ElemRef.GetIndex() >= int32(len(l.ElemTemplates)) {
return nil, errors.Errorf("out of bounds on ListTemplate index=%d len=%d", r.ElemRef.GetIndex(), len(l.ElemTemplates))
}
return unrollDataRef(prefix, l.ElemTemplates[r.ElemRef.GetIndex()], r.ElemRef.GetDataRef())
default:
return nil, errors.Errorf("unsupported StructRef type %T", r)
}
} | spec_util/equiv.go | 0.621311 | 0.412353 | equiv.go | starcoder |
This file contains the implementation of the ZKRP scheme proposed in the paper:
Efficient Protocols for Set Membership and Range Proofs
<NAME>, <NAME>, <NAME>
Asiacrypt 2008
*/
package ccs08
import (
"bytes"
"crypto/rand"
"errors"
"math"
"math/big"
"strconv"
"github.com/ing-bank/zkrp/crypto/bbsignatures"
"github.com/ing-bank/zkrp/crypto/bn256"
. "github.com/ing-bank/zkrp/util"
"github.com/ing-bank/zkrp/util/bn"
"github.com/ing-bank/zkrp/util/intconversion"
)
/*
paramsSet contains elements generated by the verifier, which are necessary for the prover.
This must be computed in a trusted setup.
*/
type paramsSet struct {
signatures map[int64]*bn256.G2
H *bn256.G2
kp bbsignatures.Keypair
// u determines the amount of signatures we need in the public params.
// Each signature can be compressed to just 1 field element of 256 bits.
// Then the parameters have minimum size equal to 256*u bits.
// l determines how many pairings we need to compute, then in order to improve
// verifier`s performance we want to minize it.
// Namely, we have 2*l pairings for the prover and 3*l for the verifier.
}
/*
paramsUL contains elements generated by the verifier, which are necessary for the prover.
This must be computed in a trusted setup.
*/
type paramsUL struct {
signatures map[string]*bn256.G2
H *bn256.G2
kp bbsignatures.Keypair
// u determines the amount of signatures we need in the public params.
// Each signature can be compressed to just 1 field element of 256 bits.
// Then the parameters have minimum size equal to 256*u bits.
// l determines how many pairings we need to compute, then in order to improve
// verifier`s performance we want to minize it.
// Namely, we have 2*l pairings for the prover and 3*l for the verifier.
u, l int64
}
/*
proofSet contains the necessary elements for the ZK Set Membership proof.
*/
type proofSet struct {
V *bn256.G2
D, C *bn256.G2
a *bn256.GT
s, t, zsig, zv *big.Int
c, m, zr *big.Int
}
/*
proofUL contains the necessary elements for the ZK proof.
*/
type proofUL struct {
V []*bn256.G2
D, C *bn256.G2
a []*bn256.GT
s, t, zsig, zv []*big.Int
c, m, zr *big.Int
}
/*
SetupSet generates the signature for the elements in the set.
*/
func SetupSet(s []int64) (paramsSet, error) {
var (
i int
p paramsSet
)
p.kp, _ = bbsignatures.Keygen()
p.signatures = make(map[int64]*bn256.G2)
for i = 0; i < len(s); i++ {
sig_i, _ := bbsignatures.Sign(new(big.Int).SetInt64(int64(s[i])), p.kp.Privk)
p.signatures[s[i]] = sig_i
}
// Issue #12: p.H must be computed using MapToPoint method.
h := intconversion.BigFromBase10("18560948149108576432482904553159745978835170526553990798435819795989606410925")
p.H = new(bn256.G2).ScalarBaseMult(h)
return p, nil
}
/*
SetupUL generates the signature for the interval [0,u^l).
The value of u should be roughly b/log(b), but we can choose smaller values in
order to get smaller parameters, at the cost of having worse performance.
*/
func SetupUL(u, l int64) (paramsUL, error) {
var (
i int64
p paramsUL
)
p.kp, _ = bbsignatures.Keygen()
p.signatures = make(map[string]*bn256.G2)
for i = 0; i < u; i++ {
sig_i, _ := bbsignatures.Sign(new(big.Int).SetInt64(i), p.kp.Privk)
p.signatures[strconv.FormatInt(i, 10)] = sig_i
}
// Issue #12: p.H must be computed using MapToPoint method.
h := intconversion.BigFromBase10("18560948149108576432482904553159745978835170526553990798435819795989606410925")
p.H = new(bn256.G2).ScalarBaseMult(h)
p.u = u
p.l = l
return p, nil
}
/*
ProveSet method is used to produce the ZK Set Membership proof.
*/
func ProveSet(x int64, r *big.Int, p paramsSet) (proofSet, error) {
var (
v *big.Int
proof_out proofSet
)
// Initialize variables
proof_out.D = new(bn256.G2)
proof_out.D.SetInfinity()
proof_out.m, _ = rand.Int(rand.Reader, bn256.Order)
v, _ = rand.Int(rand.Reader, bn256.Order)
A, ok := p.signatures[x]
if !ok {
return proof_out, errors.New("Could not generate proof. Element does not belong to the interval.")
}
// D = g^s.H^m
D := new(bn256.G2).ScalarMult(p.H, proof_out.m)
proof_out.s, _ = rand.Int(rand.Reader, bn256.Order)
aux := new(bn256.G2).ScalarBaseMult(proof_out.s)
D.Add(D, aux)
proof_out.V = new(bn256.G2).ScalarMult(A, v)
proof_out.t, _ = rand.Int(rand.Reader, bn256.Order)
proof_out.a = bn256.Pair(G1, proof_out.V)
proof_out.a.ScalarMult(proof_out.a, proof_out.s)
proof_out.a.Invert(proof_out.a)
proof_out.a.Add(proof_out.a, new(bn256.GT).ScalarMult(E, proof_out.t))
proof_out.D.Add(proof_out.D, D)
// Consider passing C as input,
// so that it is possible to delegate the commitment computation to an external party.
proof_out.C, _ = Commit(new(big.Int).SetInt64(x), r, p.H)
// Fiat-Shamir heuristic
proof_out.c, _ = HashSet(proof_out.a, proof_out.D)
proof_out.c = bn.Mod(proof_out.c, bn256.Order)
proof_out.zr = bn.Sub(proof_out.m, bn.Multiply(r, proof_out.c))
proof_out.zr = bn.Mod(proof_out.zr, bn256.Order)
proof_out.zsig = bn.Sub(proof_out.s, bn.Multiply(new(big.Int).SetInt64(x), proof_out.c))
proof_out.zsig = bn.Mod(proof_out.zsig, bn256.Order)
proof_out.zv = bn.Sub(proof_out.t, bn.Multiply(v, proof_out.c))
proof_out.zv = bn.Mod(proof_out.zv, bn256.Order)
return proof_out, nil
}
/*
ProveUL method is used to produce the ZKRP proof that secret x belongs to the interval [0,U^L].
*/
func ProveUL(x, r *big.Int, p paramsUL) (proofUL, error) {
var (
i int64
v []*big.Int
proof_out proofUL
)
decx, _ := Decompose(x, p.u, p.l)
// Initialize variables
v = make([]*big.Int, p.l)
proof_out.V = make([]*bn256.G2, p.l)
proof_out.a = make([]*bn256.GT, p.l)
proof_out.s = make([]*big.Int, p.l)
proof_out.t = make([]*big.Int, p.l)
proof_out.zsig = make([]*big.Int, p.l)
proof_out.zv = make([]*big.Int, p.l)
proof_out.D = new(bn256.G2)
proof_out.D.SetInfinity()
proof_out.m, _ = rand.Int(rand.Reader, bn256.Order)
// D = H^m
D := new(bn256.G2).ScalarMult(p.H, proof_out.m)
for i = 0; i < p.l; i++ {
v[i], _ = rand.Int(rand.Reader, bn256.Order)
A, ok := p.signatures[strconv.FormatInt(decx[i], 10)]
if ok {
proof_out.V[i] = new(bn256.G2).ScalarMult(A, v[i])
proof_out.s[i], _ = rand.Int(rand.Reader, bn256.Order)
proof_out.t[i], _ = rand.Int(rand.Reader, bn256.Order)
proof_out.a[i] = bn256.Pair(G1, proof_out.V[i])
proof_out.a[i].ScalarMult(proof_out.a[i], proof_out.s[i])
proof_out.a[i].Invert(proof_out.a[i])
proof_out.a[i].Add(proof_out.a[i], new(bn256.GT).ScalarMult(E, proof_out.t[i]))
ui := new(big.Int).Exp(new(big.Int).SetInt64(p.u), new(big.Int).SetInt64(i), nil)
muisi := new(big.Int).Mul(proof_out.s[i], ui)
muisi = bn.Mod(muisi, bn256.Order)
aux := new(bn256.G2).ScalarBaseMult(muisi)
D.Add(D, aux)
} else {
return proof_out, errors.New("Could not generate proof. Element does not belong to the interval.")
}
}
proof_out.D.Add(proof_out.D, D)
// Consider passing C as input,
// so that it is possible to delegate the commitment computation to an external party.
proof_out.C, _ = Commit(x, r, p.H)
// Fiat-Shamir heuristic
proof_out.c, _ = Hash(proof_out.a, proof_out.D)
proof_out.c = bn.Mod(proof_out.c, bn256.Order)
proof_out.zr = bn.Sub(proof_out.m, bn.Multiply(r, proof_out.c))
proof_out.zr = bn.Mod(proof_out.zr, bn256.Order)
for i = 0; i < p.l; i++ {
proof_out.zsig[i] = bn.Sub(proof_out.s[i], bn.Multiply(new(big.Int).SetInt64(decx[i]), proof_out.c))
proof_out.zsig[i] = bn.Mod(proof_out.zsig[i], bn256.Order)
proof_out.zv[i] = bn.Sub(proof_out.t[i], bn.Multiply(v[i], proof_out.c))
proof_out.zv[i] = bn.Mod(proof_out.zv[i], bn256.Order)
}
return proof_out, nil
}
/*
VerifySet is used to validate the ZK Set Membership proof. It returns true iff the proof is valid.
*/
func VerifySet(proof_out *proofSet, p *paramsSet) (bool, error) {
var (
D *bn256.G2
r1, r2 bool
p1, p2 *bn256.GT
)
// D == C^c.h^ zr.g^zsig ?
D = new(bn256.G2).ScalarMult(proof_out.C, proof_out.c)
D.Add(D, new(bn256.G2).ScalarMult(p.H, proof_out.zr))
aux := new(bn256.G2).ScalarBaseMult(proof_out.zsig)
D.Add(D, aux)
DBytes := D.Marshal()
pDBytes := proof_out.D.Marshal()
r1 = bytes.Equal(DBytes, pDBytes)
r2 = true
// a == [e(V,y)^c].[e(V,g)^-zsig].[e(g,g)^zv]
p1 = bn256.Pair(p.kp.Pubk, proof_out.V)
p1.ScalarMult(p1, proof_out.c)
p2 = bn256.Pair(G1, proof_out.V)
p2.ScalarMult(p2, proof_out.zsig)
p2.Invert(p2)
p1.Add(p1, p2)
p1.Add(p1, new(bn256.GT).ScalarMult(E, proof_out.zv))
pBytes := p1.Marshal()
aBytes := proof_out.a.Marshal()
r2 = r2 && bytes.Equal(pBytes, aBytes)
return r1 && r2, nil
}
/*
VerifyUL is used to validate the ZKRP proof. It returns true iff the proof is valid.
*/
func VerifyUL(proof_out *proofUL, p *paramsUL) (bool, error) {
var (
i int64
D *bn256.G2
r1, r2 bool
p1, p2 *bn256.GT
)
// D == C^c.h^ zr.g^zsig ?
D = new(bn256.G2).ScalarMult(proof_out.C, proof_out.c)
D.Add(D, new(bn256.G2).ScalarMult(p.H, proof_out.zr))
for i = 0; i < p.l; i++ {
ui := new(big.Int).Exp(new(big.Int).SetInt64(p.u), new(big.Int).SetInt64(i), nil)
muizsigi := new(big.Int).Mul(proof_out.zsig[i], ui)
muizsigi = bn.Mod(muizsigi, bn256.Order)
aux := new(bn256.G2).ScalarBaseMult(muizsigi)
D.Add(D, aux)
}
DBytes := D.Marshal()
pDBytes := proof_out.D.Marshal()
r1 = bytes.Equal(DBytes, pDBytes)
r2 = true
for i = 0; i < p.l; i++ {
// a == [e(V,y)^c].[e(V,g)^-zsig].[e(g,g)^zv]
p1 = bn256.Pair(p.kp.Pubk, proof_out.V[i])
p1.ScalarMult(p1, proof_out.c)
p2 = bn256.Pair(G1, proof_out.V[i])
p2.ScalarMult(p2, proof_out.zsig[i])
p2.Invert(p2)
p1.Add(p1, p2)
p1.Add(p1, new(bn256.GT).ScalarMult(E, proof_out.zv[i]))
pBytes := p1.Marshal()
aBytes := proof_out.a[i].Marshal()
r2 = r2 && bytes.Equal(pBytes, aBytes)
}
return r1 && r2, nil
}
/*
proof contains the necessary elements for the ZK proof.
*/
type proof struct {
p1, p2 proofUL
}
/*
params contains elements generated by the verifier, which are necessary for the prover.
This must be computed in a trusted setup.
*/
type params struct {
p *paramsUL
a, b int64
}
type ccs08 struct {
p *params
x, r *big.Int
proof_out proof
}
/*
SetupInnerProduct receives integers a and b, and configures the parameters for the rangeproof scheme.
*/
func (zkrp *ccs08) Setup(a, b int64) error {
// Compute optimal values for u and l
var (
u, l int64
logb float64
p *params
)
if a > b {
zkrp.p = nil
return errors.New("a must be less than or equal to b")
}
p = new(params)
logb = math.Log(float64(b))
if logb != 0 {
// u = b / int64(logb)
u = 57
if u != 0 {
l = 0
for i := b; i > 0; i = i / u {
l = l + 1
}
params_out, e := SetupUL(u, l)
p.p = ¶ms_out
p.a = a
p.b = b
zkrp.p = p
return e
} else {
zkrp.p = nil
return errors.New("u is zero")
}
} else {
zkrp.p = nil
return errors.New("log(b) is zero")
}
}
/*
Prove method is responsible for generating the zero knowledge proof.
*/
func (zkrp *ccs08) Prove() error {
ul := new(big.Int).Exp(new(big.Int).SetInt64(zkrp.p.p.u), new(big.Int).SetInt64(zkrp.p.p.l), nil)
// x - b + ul
xb := new(big.Int).Sub(zkrp.x, new(big.Int).SetInt64(zkrp.p.b))
xb.Add(xb, ul)
first, _ := ProveUL(xb, zkrp.r, *zkrp.p.p)
// x - a
xa := new(big.Int).Sub(zkrp.x, new(big.Int).SetInt64(zkrp.p.a))
second, _ := ProveUL(xa, zkrp.r, *zkrp.p.p)
zkrp.proof_out.p1 = first
zkrp.proof_out.p2 = second
return nil
}
/*
Verify is responsible for validating the proof.
*/
func (zkrp *ccs08) Verify() (bool, error) {
first, _ := VerifyUL(&zkrp.proof_out.p1, zkrp.p.p)
second, _ := VerifyUL(&zkrp.proof_out.p2, zkrp.p.p)
return first && second, nil
} | crypto/vendor/ing-bank/zkrp/ccs08/ccs08.go | 0.793426 | 0.584686 | ccs08.go | starcoder |
package parcom
import "strings"
// Char creates a parser to parse a character.
func (s *State) Char(r rune) Parser {
return func() (interface{}, error) {
if s.currentRune() != r {
return nil, newInvalidCharacterError(s)
}
s.readRune()
return r, nil
}
}
// NotChar creates a parser to parse a character which is not the one of an argument.
func (s *State) NotChar(r rune) Parser {
return s.NotChars(string(r))
}
// Chars creates a parser to parse one of given characters.
func (s *State) Chars(cs string) Parser {
rs := stringToRuneSet(cs)
return func() (interface{}, error) {
if _, ok := rs[s.currentRune()]; ok {
defer s.readRune()
return s.currentRune(), nil
}
return nil, newInvalidCharacterError(s)
}
}
// NotChars creates a parser to parse a character not in a given string.
func (s *State) NotChars(str string) Parser {
rs := stringToRuneSet(str)
return func() (interface{}, error) {
if _, ok := rs[s.currentRune()]; !ok {
defer s.readRune()
return s.currentRune(), nil
}
return nil, newInvalidCharacterError(s)
}
}
// Str creates a parser to parse a string.
func (s *State) Str(str string) Parser {
rs := []rune(str)
ps := make([]Parser, 0, len(rs))
for _, r := range rs {
ps = append(ps, s.Char(r))
}
return s.Stringify(s.And(ps...))
}
// Wrap wraps a parser with parsers which parse something before and after.
// Resulting parsers' parsing results are ones of the middle parsers.
func (s *State) Wrap(l, m, r Parser) Parser {
p := s.And(l, m, r)
return func() (interface{}, error) {
xs, err := p()
if err != nil {
return nil, err
}
return xs.([]interface{})[1], nil
}
}
// Prefix creates a parser with 2 parsers which returns the second one's result.
func (s *State) Prefix(p, q Parser) Parser {
return s.Wrap(p, q, s.None())
}
// Suffix creates a parser with 2 parsers which returns the first one's result.
func (s *State) Suffix(p, q Parser) Parser {
return s.Wrap(s.None(), p, q)
}
// Many creates a parser of more than or equal to 0 repetition of a given parser.
func (s *State) Many(p Parser) Parser {
return func() (interface{}, error) {
xs := []interface{}{}
for {
ss := *s
x, err := p()
if err != nil {
*s = ss
break
}
xs = append(xs, x)
}
return xs, nil
}
}
// Many1 creates a parser of more than 0 repetition of a given parser.
func (s *State) Many1(p Parser) Parser {
pp := s.Many(p)
return func() (interface{}, error) {
x, err := p()
if err != nil {
return nil, err
}
y, err := pp()
if err != nil {
return nil, err
}
return append([]interface{}{x}, y.([]interface{})...), nil
}
}
// ExhaustiveMany creates a parser of more than or equal to 0 repetition of a given
// parser which continues parsing until a source is exhausted.
func (s *State) ExhaustiveMany(p Parser) Parser {
return func() (interface{}, error) {
xs := []interface{}{}
for !s.exhausted() {
x, err := p()
if err != nil {
return nil, err
}
xs = append(xs, x)
}
return xs, nil
}
}
// Or creates a selectional parser from given parsers.
func (s *State) Or(ps ...Parser) Parser {
return func() (interface{}, error) {
err := error(nil)
ss := *s
for _, p := range ps {
var x interface{}
x, err = p()
if err == nil {
return x, nil
}
*s = ss
}
return nil, err
}
}
// And creates a parser which combines given parsers sequentially.
func (s *State) And(ps ...Parser) Parser {
return func() (interface{}, error) {
xs := make([]interface{}, 0, len(ps))
for _, p := range ps {
x, err := p()
if err != nil {
return nil, err
}
xs = append(xs, x)
}
return xs, nil
}
}
// Lazy creates a parser which runs a parser created by a given constructor.
// This combinator is useful to define recursive parsers.
func (s *State) Lazy(f func() Parser) Parser {
p := Parser(nil)
return func() (interface{}, error) {
if p == nil {
p = f()
}
return p()
}
}
// Void creates a parser whose result is always nil but parses something from
// a given parser.
func (State) Void(p Parser) Parser {
return func() (interface{}, error) {
_, err := p()
return nil, err
}
}
// Exhaust creates a parser which fails when a source string is not exhausted
// after running a given parser.
func (s *State) Exhaust(p Parser) Parser {
return func() (interface{}, error) {
x, err := p()
if err != nil {
return nil, err
} else if !s.exhausted() {
return nil, NewError("source not exhausted", s)
}
return x, nil
}
}
// App creates a parser which applies a function to results of a given parser.
func (s *State) App(f func(interface{}) (interface{}, error), p Parser) Parser {
return func() (interface{}, error) {
x, err := p()
if err != nil {
return nil, err
}
return f(x)
}
}
// None creates a parser which parses nothing and succeeds always.
func (s *State) None() Parser {
return func() (interface{}, error) {
return nil, nil
}
}
// Maybe creates a parser which runs a given parser or parses nothing when it
// fails.
func (s *State) Maybe(p Parser) Parser {
return s.Or(p, s.None())
}
// Stringify creates a parser which returns a string converted from a result of
// a given parser. The result of a given parser must be a rune, a string or a
// sequence of them in []interface{}.
func (s *State) Stringify(p Parser) Parser {
return s.App(func(x interface{}) (interface{}, error) { return stringify(x), nil }, p)
}
func stringify(x interface{}) string {
switch x := x.(type) {
case nil:
return ""
case string:
return x
case rune:
return string(x)
case []interface{}:
ss := make([]string, 0, len(x))
for _, s := range x {
ss = append(ss, stringify(s))
}
return strings.Join(ss, "")
}
panic("invalid result type for stringify combinator")
}
func stringToRuneSet(s string) map[rune]bool {
rs := make(map[rune]bool)
for _, r := range s {
rs[r] = true
}
return rs
} | combinators.go | 0.803058 | 0.507446 | combinators.go | starcoder |
package npm
import (
"fmt"
"regexp"
"strings"
"golang.org/x/xerrors"
"github.com/aquasecurity/go-version/pkg/part"
"github.com/aquasecurity/go-version/pkg/semver"
)
const cvRegex string = `v?([0-9|x|X|\*]+)(\.[0-9|x|X|\*]+)?(\.[0-9|x|X|\*]+)?` +
`(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` +
`(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?`
var (
constraintOperators = map[string]operatorFunc{
"": constraintEqual,
"=": constraintEqual,
"==": constraintEqual,
">": constraintGreaterThan,
"<": constraintLessThan,
">=": constraintGreaterThanEqual,
"=>": constraintGreaterThanEqual,
"<=": constraintLessThanEqual,
"=<": constraintLessThanEqual,
"~": constraintTilde,
"^": constraintCaret,
}
constraintRegexp *regexp.Regexp
validConstraintRegexp *regexp.Regexp
)
type operatorFunc func(v, c Version) bool
func init() {
ops := make([]string, 0, len(constraintOperators))
for k := range constraintOperators {
ops = append(ops, regexp.QuoteMeta(k))
}
constraintRegexp = regexp.MustCompile(fmt.Sprintf(
`(%s)\s*(%s)`,
strings.Join(ops, "|"),
cvRegex))
validConstraintRegexp = regexp.MustCompile(fmt.Sprintf(
`^\s*(\s*(%s)\s*(%s)\s*\,?)*\s*$`,
strings.Join(ops, "|"),
cvRegex))
}
// Constraints is one or more constraint that a npm version can be
// checked against.
type Constraints [][]constraint
type constraint struct {
version Version
operator operatorFunc
original string
}
// NewConstraints parses the given string and returns an instance of Constraints
func NewConstraints(v string) (Constraints, error) {
var css [][]constraint
for _, vv := range strings.Split(v, "||") {
// Validate the segment
if !validConstraintRegexp.MatchString(vv) {
return Constraints{}, xerrors.Errorf("improper constraint: %s", vv)
}
ss := constraintRegexp.FindAllString(vv, -1)
if ss == nil {
ss = append(ss, strings.TrimSpace(vv))
}
var cs []constraint
for _, single := range ss {
c, err := newConstraint(single)
if err != nil {
return Constraints{}, err
}
cs = append(cs, c)
}
css = append(css, cs)
}
return css, nil
}
func newConstraint(c string) (constraint, error) {
if c == "" {
return constraint{
version: semver.New(part.Any(true), part.Any(true), part.Any(true),
part.NewParts("*"), ""),
operator: constraintOperators[""],
}, nil
}
m := constraintRegexp.FindStringSubmatch(c)
if m == nil {
return constraint{}, xerrors.Errorf("improper constraint: %s", c)
}
major := m[3]
minor := strings.TrimPrefix(m[4], ".")
patch := strings.TrimPrefix(m[5], ".")
pre := part.NewParts(strings.TrimPrefix(m[6], "-"))
v := semver.New(newPart(major), newPart(minor), newPart(patch), pre, "")
return constraint{
version: v,
operator: constraintOperators[m[1]],
original: c,
}, nil
}
func newPart(p string) part.Part {
if p == "" {
p = "*"
}
return part.NewPart(p)
}
func (c constraint) check(v Version) bool {
op := preCheck(c.operator)
return op(v, c.version)
}
func (c constraint) String() string {
return c.original
}
// Check tests if a version satisfies all the constraints.
func (cs Constraints) Check(v Version) bool {
for _, c := range cs {
if andCheck(v, c) {
return true
}
}
return false
}
// Returns the string format of the constraints
func (cs Constraints) String() string {
var csStr []string
for _, orC := range cs {
var cstr []string
for _, andC := range orC {
cstr = append(cstr, andC.String())
}
csStr = append(csStr, strings.Join(cstr, ","))
}
return strings.Join(csStr, "||")
}
func andCheck(v Version, constraints []constraint) bool {
for _, c := range constraints {
if !c.check(v) {
return false
}
}
return true
}
//-------------------------------------------------------------------
// Constraint functions
//-------------------------------------------------------------------
func constraintEqual(v, c Version) bool {
return v.Equal(c)
}
func constraintGreaterThan(v, c Version) bool {
if c.IsPreRelease() && v.IsPreRelease() {
return v.Release().Equal(c.Release()) && v.GreaterThan(c)
}
return v.GreaterThan(c)
}
func constraintLessThan(v, c Version) bool {
if c.IsPreRelease() && v.IsPreRelease() {
return v.Release().Equal(c.Release()) && v.LessThan(c)
}
return v.LessThan(c)
}
func constraintGreaterThanEqual(v, c Version) bool {
if c.IsPreRelease() && v.IsPreRelease() {
return v.Release().Equal(c.Release()) && v.GreaterThanOrEqual(c)
}
return v.GreaterThanOrEqual(c)
}
func constraintLessThanEqual(v, c Version) bool {
if c.IsPreRelease() && v.IsPreRelease() {
return v.Release().Equal(c.Release()) && v.LessThanOrEqual(c)
}
return v.LessThanOrEqual(c)
}
func constraintTilde(v, c Version) bool {
// ~*, ~>* --> >= 0.0.0 (any)
// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0, <3.0.0
// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0, <2.1.0
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0, <1.3.0
// ~1.2.3, ~>1.2.3 --> >=1.2.3, <1.3.0
// ~1.2.0, ~>1.2.0 --> >=1.2.0, <1.3.0
if c.IsPreRelease() && v.IsPreRelease() {
return v.GreaterThanOrEqual(c) && v.LessThan(c.Release())
}
return v.GreaterThanOrEqual(c) && v.LessThan(c.TildeBump())
}
func constraintCaret(v, c Version) bool {
// ^* --> (any)
// ^1.2.3 --> >=1.2.3 <2.0.0
// ^1.2 --> >=1.2.0 <2.0.0
// ^1 --> >=1.0.0 <2.0.0
// ^0.2.3 --> >=0.2.3 <0.3.0
// ^0.2 --> >=0.2.0 <0.3.0
// ^0.0.3 --> >=0.0.3 <0.0.4
// ^0.0 --> >=0.0.0 <0.1.0
// ^0 --> >=0.0.0 <1.0.0
if c.IsPreRelease() && v.IsPreRelease() {
return v.GreaterThanOrEqual(c) && v.LessThan(c.Release())
}
return v.GreaterThanOrEqual(c) && v.LessThan(c.CaretBump())
}
func preCheck(f operatorFunc) operatorFunc {
return func(v, c Version) bool {
if v.IsPreRelease() && !c.IsPreRelease() {
return false
} else if c.IsPreRelease() && c.IsAny() {
return false
}
return f(v, c)
}
} | vendor/github.com/aquasecurity/go-npm-version/pkg/constraint.go | 0.65379 | 0.420897 | constraint.go | starcoder |
package primitive
import (
"fmt"
"strings"
"github.com/fogleman/gg"
"github.com/golang/freetype/raster"
)
type Quadratic struct {
Worker *Worker
X1, Y1 float64
X2, Y2 float64
X3, Y3 float64
Width float64
}
func NewRandomQuadratic(worker *Worker) *Quadratic {
rnd := worker.Rnd
x1 := rnd.Float64() * float64(worker.W)
y1 := rnd.Float64() * float64(worker.H)
x2 := x1 + rnd.Float64()*40 - 20
y2 := y1 + rnd.Float64()*40 - 20
x3 := x2 + rnd.Float64()*40 - 20
y3 := y2 + rnd.Float64()*40 - 20
width := 1.0 / 2
q := &Quadratic{worker, x1, y1, x2, y2, x3, y3, width}
q.Mutate()
return q
}
func (q *Quadratic) Draw(dc *gg.Context, scale float64) {
dc.MoveTo(q.X1, q.Y1)
dc.QuadraticTo(q.X2, q.Y2, q.X3, q.Y3)
dc.SetLineWidth(q.Width * scale)
dc.Stroke()
}
func (q *Quadratic) SVG(attrs string) string {
// TODO: this is a little silly
attrs = strings.Replace(attrs, "fill", "stroke", -1)
return fmt.Sprintf(
"<path %s fill=\"none\" d=\"M %f %f Q %f %f, %f %f\" stroke-width=\"%f\" />",
attrs, q.X1, q.Y1, q.X2, q.Y2, q.X3, q.Y3, q.Width)
}
func (q *Quadratic) Copy() Shape {
a := *q
return &a
}
func (q *Quadratic) Mutate() {
const m = 16
w := q.Worker.W
h := q.Worker.H
rnd := q.Worker.Rnd
for {
switch rnd.Intn(4) {
case 0:
q.X1 = clamp(q.X1+rnd.NormFloat64()*16, -m, float64(w-1+m))
q.Y1 = clamp(q.Y1+rnd.NormFloat64()*16, -m, float64(h-1+m))
case 1:
q.X2 = clamp(q.X2+rnd.NormFloat64()*16, -m, float64(w-1+m))
q.Y2 = clamp(q.Y2+rnd.NormFloat64()*16, -m, float64(h-1+m))
case 2:
q.X3 = clamp(q.X3+rnd.NormFloat64()*16, -m, float64(w-1+m))
q.Y3 = clamp(q.Y3+rnd.NormFloat64()*16, -m, float64(h-1+m))
case 3:
q.Width = clamp(q.Width+rnd.NormFloat64(), 1, 16)
}
if q.Valid() {
break
}
}
}
func (q *Quadratic) Valid() bool {
dx12 := int(q.X1 - q.X2)
dy12 := int(q.Y1 - q.Y2)
dx23 := int(q.X2 - q.X3)
dy23 := int(q.Y2 - q.Y3)
dx13 := int(q.X1 - q.X3)
dy13 := int(q.Y1 - q.Y3)
d12 := dx12*dx12 + dy12*dy12
d23 := dx23*dx23 + dy23*dy23
d13 := dx13*dx13 + dy13*dy13
return d13 > d12 && d13 > d23
}
func (q *Quadratic) Rasterize() []Scanline {
var path raster.Path
p1 := fixp(q.X1, q.Y1)
p2 := fixp(q.X2, q.Y2)
p3 := fixp(q.X3, q.Y3)
path.Start(p1)
path.Add2(p2, p3)
width := fix(q.Width)
return strokePath(q.Worker, path, width, raster.RoundCapper, raster.RoundJoiner)
} | primitive/quadratic.go | 0.511473 | 0.499695 | quadratic.go | starcoder |
package plaid
import (
"encoding/json"
)
// StandaloneInvestmentTransactionType Valid values for investment transaction types and subtypes. Note that transactions representing inflow of cash will appear as negative amounts, outflow of cash will appear as positive amounts.
type StandaloneInvestmentTransactionType struct {
// Buying an investment
Buy string `json:"buy"`
// Selling an investment
Sell string `json:"sell"`
// A cancellation of a pending transaction
Cancel string `json:"cancel"`
// Activity that modifies a cash position
Cash string `json:"cash"`
// Fees on the account, e.g. commission, bookkeeping, options-related.
Fee string `json:"fee"`
// Activity that modifies a position, but not through buy/sell activity e.g. options exercise, portfolio transfer
Transfer string `json:"transfer"`
AdditionalProperties map[string]interface{}
}
type _StandaloneInvestmentTransactionType StandaloneInvestmentTransactionType
// NewStandaloneInvestmentTransactionType instantiates a new StandaloneInvestmentTransactionType object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewStandaloneInvestmentTransactionType(buy string, sell string, cancel string, cash string, fee string, transfer string) *StandaloneInvestmentTransactionType {
this := StandaloneInvestmentTransactionType{}
this.Buy = buy
this.Sell = sell
this.Cancel = cancel
this.Cash = cash
this.Fee = fee
this.Transfer = transfer
return &this
}
// NewStandaloneInvestmentTransactionTypeWithDefaults instantiates a new StandaloneInvestmentTransactionType object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewStandaloneInvestmentTransactionTypeWithDefaults() *StandaloneInvestmentTransactionType {
this := StandaloneInvestmentTransactionType{}
return &this
}
// GetBuy returns the Buy field value
func (o *StandaloneInvestmentTransactionType) GetBuy() string {
if o == nil {
var ret string
return ret
}
return o.Buy
}
// GetBuyOk returns a tuple with the Buy field value
// and a boolean to check if the value has been set.
func (o *StandaloneInvestmentTransactionType) GetBuyOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Buy, true
}
// SetBuy sets field value
func (o *StandaloneInvestmentTransactionType) SetBuy(v string) {
o.Buy = v
}
// GetSell returns the Sell field value
func (o *StandaloneInvestmentTransactionType) GetSell() string {
if o == nil {
var ret string
return ret
}
return o.Sell
}
// GetSellOk returns a tuple with the Sell field value
// and a boolean to check if the value has been set.
func (o *StandaloneInvestmentTransactionType) GetSellOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Sell, true
}
// SetSell sets field value
func (o *StandaloneInvestmentTransactionType) SetSell(v string) {
o.Sell = v
}
// GetCancel returns the Cancel field value
func (o *StandaloneInvestmentTransactionType) GetCancel() string {
if o == nil {
var ret string
return ret
}
return o.Cancel
}
// GetCancelOk returns a tuple with the Cancel field value
// and a boolean to check if the value has been set.
func (o *StandaloneInvestmentTransactionType) GetCancelOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Cancel, true
}
// SetCancel sets field value
func (o *StandaloneInvestmentTransactionType) SetCancel(v string) {
o.Cancel = v
}
// GetCash returns the Cash field value
func (o *StandaloneInvestmentTransactionType) GetCash() string {
if o == nil {
var ret string
return ret
}
return o.Cash
}
// GetCashOk returns a tuple with the Cash field value
// and a boolean to check if the value has been set.
func (o *StandaloneInvestmentTransactionType) GetCashOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Cash, true
}
// SetCash sets field value
func (o *StandaloneInvestmentTransactionType) SetCash(v string) {
o.Cash = v
}
// GetFee returns the Fee field value
func (o *StandaloneInvestmentTransactionType) GetFee() string {
if o == nil {
var ret string
return ret
}
return o.Fee
}
// GetFeeOk returns a tuple with the Fee field value
// and a boolean to check if the value has been set.
func (o *StandaloneInvestmentTransactionType) GetFeeOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Fee, true
}
// SetFee sets field value
func (o *StandaloneInvestmentTransactionType) SetFee(v string) {
o.Fee = v
}
// GetTransfer returns the Transfer field value
func (o *StandaloneInvestmentTransactionType) GetTransfer() string {
if o == nil {
var ret string
return ret
}
return o.Transfer
}
// GetTransferOk returns a tuple with the Transfer field value
// and a boolean to check if the value has been set.
func (o *StandaloneInvestmentTransactionType) GetTransferOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Transfer, true
}
// SetTransfer sets field value
func (o *StandaloneInvestmentTransactionType) SetTransfer(v string) {
o.Transfer = v
}
func (o StandaloneInvestmentTransactionType) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["buy"] = o.Buy
}
if true {
toSerialize["sell"] = o.Sell
}
if true {
toSerialize["cancel"] = o.Cancel
}
if true {
toSerialize["cash"] = o.Cash
}
if true {
toSerialize["fee"] = o.Fee
}
if true {
toSerialize["transfer"] = o.Transfer
}
for key, value := range o.AdditionalProperties {
toSerialize[key] = value
}
return json.Marshal(toSerialize)
}
func (o *StandaloneInvestmentTransactionType) UnmarshalJSON(bytes []byte) (err error) {
varStandaloneInvestmentTransactionType := _StandaloneInvestmentTransactionType{}
if err = json.Unmarshal(bytes, &varStandaloneInvestmentTransactionType); err == nil {
*o = StandaloneInvestmentTransactionType(varStandaloneInvestmentTransactionType)
}
additionalProperties := make(map[string]interface{})
if err = json.Unmarshal(bytes, &additionalProperties); err == nil {
delete(additionalProperties, "buy")
delete(additionalProperties, "sell")
delete(additionalProperties, "cancel")
delete(additionalProperties, "cash")
delete(additionalProperties, "fee")
delete(additionalProperties, "transfer")
o.AdditionalProperties = additionalProperties
}
return err
}
type NullableStandaloneInvestmentTransactionType struct {
value *StandaloneInvestmentTransactionType
isSet bool
}
func (v NullableStandaloneInvestmentTransactionType) Get() *StandaloneInvestmentTransactionType {
return v.value
}
func (v *NullableStandaloneInvestmentTransactionType) Set(val *StandaloneInvestmentTransactionType) {
v.value = val
v.isSet = true
}
func (v NullableStandaloneInvestmentTransactionType) IsSet() bool {
return v.isSet
}
func (v *NullableStandaloneInvestmentTransactionType) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableStandaloneInvestmentTransactionType(val *StandaloneInvestmentTransactionType) *NullableStandaloneInvestmentTransactionType {
return &NullableStandaloneInvestmentTransactionType{value: val, isSet: true}
}
func (v NullableStandaloneInvestmentTransactionType) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableStandaloneInvestmentTransactionType) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | plaid/model_standalone_investment_transaction_type.go | 0.726231 | 0.407157 | model_standalone_investment_transaction_type.go | starcoder |
package slice
import (
"math/rand"
"time"
)
// ShuffleBool shuffles (in place) a bool slice
func ShuffleBool(a []bool) []bool {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
}
// ShuffleByte shuffles (in place) a byte slice
func ShuffleByte(a []byte) []byte {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
}
// ShuffleComplex128 shuffles (in place) a complex128 slice
func ShuffleComplex128(a []complex128) []complex128 {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
}
// ShuffleComplex64 shuffles (in place) a complex64 slice
func ShuffleComplex64(a []complex64) []complex64 {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
}
// ShuffleFloat32 shuffles (in place) a float32 slice
func ShuffleFloat32(a []float32) []float32 {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
}
// ShuffleFloat64 shuffles (in place) a float64 slice
func ShuffleFloat64(a []float64) []float64 {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
}
// ShuffleInt shuffles (in place) a int slice
func ShuffleInt(a []int) []int {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
}
// ShuffleInt16 shuffles (in place) a int16 slice
func ShuffleInt16(a []int16) []int16 {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
}
// ShuffleInt32 shuffles (in place) a int32 slice
func ShuffleInt32(a []int32) []int32 {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
}
// ShuffleInt64 shuffles (in place) a int64 slice
func ShuffleInt64(a []int64) []int64 {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
}
// ShuffleInt8 shuffles (in place) a int8 slice
func ShuffleInt8(a []int8) []int8 {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
}
// ShuffleRune shuffles (in place) a rune slice
func ShuffleRune(a []rune) []rune {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
}
// ShuffleString shuffles (in place) a string slice
func ShuffleString(a []string) []string {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
}
// ShuffleUint shuffles (in place) a uint slice
func ShuffleUint(a []uint) []uint {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
}
// ShuffleUint16 shuffles (in place) a uint16 slice
func ShuffleUint16(a []uint16) []uint16 {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
}
// ShuffleUint32 shuffles (in place) a uint32 slice
func ShuffleUint32(a []uint32) []uint32 {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
}
// ShuffleUint64 shuffles (in place) a uint64 slice
func ShuffleUint64(a []uint64) []uint64 {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
}
// ShuffleUint8 shuffles (in place) a uint8 slice
func ShuffleUint8(a []uint8) []uint8 {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
}
// ShuffleUintptr shuffles (in place) a uintptr slice
func ShuffleUintptr(a []uintptr) []uintptr {
if len(a) <= 1 {
return a
}
r := rand.New(rand.NewSource(time.Now().UnixNano()))
r.Shuffle(len(a), func(i, j int) {
a[i], a[j] = a[j], a[i]
})
return a
} | shuffle.go | 0.706494 | 0.536374 | shuffle.go | starcoder |
package client
import (
"github.com/summerwind/h2spec/config"
"github.com/summerwind/h2spec/spec"
)
func FrameFormat() *spec.ClientTestGroup {
tg := NewTestGroup("4.1", "Frame Format")
// Type: The 8-bit type of the frame. The frame type determines
// the format and semantics of the frame. Implementations MUST
// ignore and discard any frame that has a type that is unknown.
tg.AddTestCase(&spec.ClientTestCase{
Desc: "Sends a frame with unknown type",
Requirement: "The endpoint MUST ignore and discard any frame that has a type that is unknown.",
Run: func(c *config.Config, conn *spec.Conn) error {
err := conn.Handshake()
if err != nil {
return err
}
// UNKONWN Frame:
// Length: 8, Type: 255, Flags: 0, R: 0, StreamID: 0
conn.Send([]byte("\x00\x00\x08\x16\x00\x00\x00\x00\x00"))
conn.Send([]byte("\x00\x00\x00\x00\x00\x00\x00\x00"))
data := [8]byte{}
conn.WritePing(false, data)
return spec.VerifyPingFrameWithAck(conn, data)
},
})
// Flags are assigned semantics specific to the indicated frame
// type. Flags that have no defined semantics for a particular
// frame type MUST be ignored and MUST be left unset (0x0) when
// sending.
tg.AddTestCase(&spec.ClientTestCase{
Desc: "Sends a frame with undefined flag",
Requirement: "The endpoint MUST ignore any flags that is undefined.",
Run: func(c *config.Config, conn *spec.Conn) error {
err := conn.Handshake()
if err != nil {
return err
}
// PING Frame:
// Length: 8, Type: 6, Flags: 255, R: 0, StreamID: 0
conn.Send([]byte("\x00\x00\x08\x06\x16\x00\x00\x00\x00"))
conn.Send([]byte("\x00\x00\x00\x00\x00\x00\x00\x00"))
return spec.VerifyEventType(conn, spec.EventPingFrame)
},
})
// R: A reserved 1-bit field. The semantics of this bit are
// undefined, and the bit MUST remain unset (0x0) when sending
// and MUST be ignored when receiving.
tg.AddTestCase(&spec.ClientTestCase{
Desc: "Sends a frame with reserved field bit",
Requirement: "The endpoint MUST ignore the value of reserved field.",
Run: func(c *config.Config, conn *spec.Conn) error {
err := conn.Handshake()
if err != nil {
return err
}
// PING Frame:
// Length: 8, Type: 6, Flags: 255, R: 1, StreamID: 0
conn.Send([]byte("\x00\x00\x08\x06\x16\x80\x00\x00\x00"))
conn.Send([]byte("\x00\x00\x00\x00\x00\x00\x00\x00"))
return spec.VerifyEventType(conn, spec.EventPingFrame)
},
})
return tg
} | vendor/github.com/summerwind/h2spec/client/4_1_frame_format.go | 0.59796 | 0.410845 | 4_1_frame_format.go | starcoder |
// Error handling has to be part of our code and usually it is bounded to logging.
// The main goal of logging is to debug.
// We only log things that are actionable. Only log the contexts that are allowed us to identify
// what is going on. Anything else ideally is noise and would be better suited up on the dashboard
// through metrics. For example, socket connection and disconnection, we can log these but these
// are not actionable because we don't necessarily lookup the log for that.
// There is a package that is written by <NAME> called errors that let us simplify error
// handling and logging at the same time. Below is a demonstration on how to leverage the package
// to simplify our code. By reducing logging, we also reduce a large amount of pressure on the heap
// (garbage collection).
package main
import (
"fmt"
// This is <NAME>'s errors package that have all the wrapping functions.
"github.com/pkg/errors"
)
// AppError represents a custom error type.
type AppError struct {
State int
}
// AppError implements the error interface.
func (c *AppError) Error() string {
return fmt.Sprintf("App Error, State: %d", c.State)
}
func main() {
// Make the function call and validate the error.
// firtCall calls secondCall calls thirdCall then results in AppError.
// Start down the call stack, in thirdCall, where the error occurs. The is the root of the
// error. We return it up the call stack in our traditional error interface value.
// Back to secondCall, we get the interface value and there is a concrete type stored inside
// the value. secondCall has to make a decision whether to handle the error and push up the
// call stack if it cannot handle. If secondCall decides to handle the error, it has the
// responsibility of logging it. If not, its responsibility is to move it up. However, if we
// are going to push it up the call stack, we cannot lose context. This is where the error
// package comes in. We create a new interface value that wraps this error, add a context
// around it and push it up. This maintains the call stack of where we are in the code.
// Similarly, firstCall doesn't handle the error but wraps and pushes it up.
// In main, we are handling the call, which means the error stops here and we have to log it.
// In order to properly handle this error, we need to know that the root cause of this error
// was. It is the original error that is not wrapped. Cause method will bubble up this error out of
// these wrapping and allow us to be able to use all the language mechanics we have.
// We are not only be able to access the State even though we've done this assertion back to
// concrete, we can log out the entire stack trace by using %+v for this call.
if err := firstCall(10); err != nil {
// Use type as context to determine cause.
switch v := errors.Cause(err).(type) {
case *AppError:
// We got our custom error type.
fmt.Println("Custom App Error:", v.State)
default:
// We did not get any specific error type.
fmt.Println("Default Error")
}
// Display the stack trace for the error.
fmt.Println("\nStack Trace\n********************************")
fmt.Printf("%+v\n", err)
fmt.Println("\nNo Trace\n********************************")
fmt.Printf("%v\n", err)
}
}
// firstCall makes a call to a secondCall function and wraps any error.
func firstCall(i int) error {
if err := secondCall(i); err != nil {
return errors.Wrapf(err, "firstCall->secondCall(%d)", i)
}
return nil
}
// secondCall makes a call to a thirdCall function and wraps any error.
func secondCall(i int) error {
if err := thirdCall(); err != nil {
return errors.Wrap(err, "secondCall->thirdCall()")
}
return nil
}
// thirdCall function creates an error value we will validate.
func thirdCall() error {
return &AppError{99}
} | go/design/error_6.go | 0.690559 | 0.420183 | error_6.go | starcoder |
Joko Engineering Part
https://www.youtube.com/c/JokoEngineeringhelp
https://grabcad.com/library/freecad-practice-part-1
*/
//-----------------------------------------------------------------------------
package main
import (
"log"
"math"
"github.com/deadsy/sdfx/obj"
"github.com/deadsy/sdfx/render"
"github.com/deadsy/sdfx/sdf"
)
//-----------------------------------------------------------------------------
// small end
const radiusOuterSmall = 1.0
const radiusInnerSmall = 0.55
const smallThickness = 1.0
// big end
const radiusOuterBig = 1.89
const radiusInnerBig = 2.90 * 0.5
const armWidth0 = 0.4
const armWidth1 = 0.5
const smallLength = 3.0
const overallLength = 9.75
const overallHeight = 4.0
var theta0 = sdf.DtoR(65.0) * 0.5
var theta1 = 0.5*sdf.Pi - theta0
const filletRadius0 = 0.25
const filletRadius1 = 0.50
const shaftRadius = 0.55
const keyRadius = 0.77
const keyWidth = 0.35
// derived
const centerToCenter = overallLength - radiusOuterBig - radiusOuterSmall
//-----------------------------------------------------------------------------
func planView() (sdf.SDF2, error) {
sOuter, err := sdf.FlatFlankCam2D(centerToCenter, radiusOuterBig, radiusOuterSmall)
if err != nil {
return nil, err
}
sInner := sdf.Offset2D(sOuter, -armWidth0)
s0 := sdf.Difference2D(sOuter, sInner)
s1, err := sdf.Circle2D(radiusOuterSmall)
if err != nil {
return nil, err
}
s1 = sdf.Transform2D(s1, sdf.Translate2d(sdf.V2{0, centerToCenter}))
k := obj.WasherParms{
InnerRadius: radiusInnerBig,
OuterRadius: radiusOuterBig,
}
s2, err := obj.Washer2D(&k)
if err != nil {
return nil, err
}
s3 := sdf.Union2D(s0, s1, s2)
s3.(*sdf.UnionSDF2).SetMin(sdf.PolyMin(0.3))
return sdf.Intersect2D(sOuter, s3), nil
}
//-----------------------------------------------------------------------------
const smoothSteps = 5
func sideView() (sdf.SDF2, error) {
dx0 := smallThickness * 0.5
dy1 := smallLength
dx2 := (overallHeight - smallThickness) * 0.5
dy2 := dx2 * math.Tan(theta1)
dy3 := overallLength - smallLength - dy2
dx4 := -armWidth1
dy5 := -dy3 + (armWidth1 / math.Cos(theta1)) - armWidth1*math.Tan(theta1)
dx6 := armWidth1 - overallHeight*0.5
dy6 := dx6 / math.Tan(theta0)
p := sdf.NewPolygon()
p.Add(dx0, 0)
p.Add(0, dy1).Rel().Smooth(filletRadius1, smoothSteps)
p.Add(dx2, dy2).Rel().Smooth(filletRadius1, smoothSteps)
p.Add(0, dy3).Rel()
p.Add(dx4, 0).Rel()
p.Add(0, dy5).Rel().Smooth(filletRadius1, smoothSteps)
p.Add(dx6, dy6).Rel().Smooth(filletRadius0, smoothSteps)
// mirror
p.Add(dx6, -dy6).Rel().Smooth(filletRadius1, smoothSteps)
p.Add(0, -dy5).Rel()
p.Add(dx4, 0).Rel()
p.Add(0, -dy3).Rel().Smooth(filletRadius1, smoothSteps)
p.Add(dx2, -dy2).Rel().Smooth(filletRadius1, smoothSteps)
p.Add(0, -dy1).Rel()
return sdf.Polygon2D(p.Vertices())
}
//-----------------------------------------------------------------------------
func shaft() (sdf.SDF3, error) {
k := obj.KeywayParameters{
ShaftRadius: shaftRadius,
KeyRadius: keyRadius,
KeyWidth: keyWidth,
ShaftLength: overallHeight,
}
s, err := obj.Keyway3D(&k)
if err != nil {
return nil, err
}
m := sdf.RotateY(sdf.DtoR(-90))
m = sdf.RotateX(sdf.DtoR(-30)).Mul(m)
m = sdf.Translate3d(sdf.V3{0, radiusOuterSmall, 0}).Mul(m)
s = sdf.Transform3D(s, m)
return s, nil
}
//-----------------------------------------------------------------------------
func part() (sdf.SDF3, error) {
side2d, err := sideView()
if err != nil {
return nil, err
}
side3d := sdf.Extrude3D(side2d, radiusOuterBig*2.0)
plan2d, err := planView()
if err != nil {
return nil, err
}
plan3d := sdf.Extrude3D(plan2d, overallHeight)
m := sdf.RotateZ(sdf.DtoR(180))
m = sdf.Translate3d(sdf.V3{0, centerToCenter + radiusOuterSmall, 0}).Mul(m)
m = sdf.RotateY(sdf.DtoR(90)).Mul(m)
plan3d = sdf.Transform3D(plan3d, m)
part := sdf.Intersect3D(plan3d, side3d)
shaft, err := shaft()
if err != nil {
return nil, err
}
return sdf.Difference3D(part, shaft), nil
}
//-----------------------------------------------------------------------------
func main() {
s, err := part()
if err != nil {
log.Fatalf("error: %s", err)
}
render.RenderSTL(s, 300, "part.stl")
}
//----------------------------------------------------------------------------- | examples/joko/main.go | 0.632957 | 0.482917 | main.go | starcoder |
package main
import (
"fmt"
"reflect"
)
func isBool(in reflect.Type) bool {
return in.Kind() == reflect.Bool
}
func isInt(in reflect.Type) bool {
switch in.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return true
}
return false
}
func isString(in reflect.Type) bool {
return in.Kind() == reflect.String
}
func isFloat(in reflect.Type) bool {
switch in.Kind() {
case reflect.Float32, reflect.Float64:
return true
}
return false
}
func isSlice(t reflect.Type) bool {
return t.Kind() == reflect.Slice
}
func isStruct(t reflect.Type) bool {
return t.Kind() == reflect.Struct
}
func isMap(t reflect.Type) bool {
return t.Kind() == reflect.Map
}
func isDuration(t reflect.Type) bool {
return t.Kind() == reflect.Struct && t.String() == "v1.Duration"
}
func isQuantity(t reflect.Type) bool {
return t.Kind() == reflect.Struct && t.String() == "resource.Quantity"
}
func isIntOrString(t reflect.Type) bool {
return t.Kind() == reflect.Struct && t.String() == "intstr.IntOrString"
}
func isPtr(t reflect.Type) bool {
return t.Kind() == reflect.Ptr
}
func isValueType(in reflect.Type) bool {
switch in.Kind() {
case reflect.Ptr:
return isValueType(in.Elem())
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Float32, reflect.Float64, reflect.String, reflect.Bool:
return true
case reflect.Slice, reflect.Map:
return false
case reflect.Struct:
return in.String() == "v1.Duration" || in.String() == "resource.Quantity" || in.String() == "intstr.IntOrString"
default:
panic(fmt.Sprintf("unknown kind %v", in.Kind()))
}
}
func getFields(t reflect.Type, flatten bool) []_field {
var ret []_field
for i := 0; i < t.NumField(); i++ {
f := t.Field(i)
if f.Anonymous && flatten {
ret = append(ret, getFields(f.Type, flatten)...)
} else {
ret = append(ret, _field{
StructField: t.Field(i),
Owner: t,
})
}
}
return ret
}
func verifyFields(t reflect.Type, fields ...string) error {
for _, field := range fields {
valid := false
for i := 0; i < t.NumField(); i++ {
f := t.Field(i)
if f.Name == field {
valid = true
}
}
if !valid {
return fmt.Errorf("field %s is not part of struct %s", field, t.Name())
}
}
return nil
} | hack/gen-tf-code/reflect.go | 0.572364 | 0.550668 | reflect.go | starcoder |
package hr
import "github.com/rannoch/cldr"
var currencies = []cldr.Currency{
{Currency: "ADP", DisplayName: "andorska pezeta", Symbol: "ADP"},
{Currency: "AED", DisplayName: "UAE dirham", Symbol: "AED"},
{Currency: "AFA", DisplayName: "afganistanski afgani (1927.–2002.)", Symbol: "AFA"},
{Currency: "AFN", DisplayName: "afganistanski afgani", Symbol: "AFN"},
{Currency: "ALK", DisplayName: "stari albanski lek", Symbol: "ALK"},
{Currency: "ALL", DisplayName: "albanski lek", Symbol: "ALL"},
{Currency: "AMD", DisplayName: "armenski dram", Symbol: "AMD"},
{Currency: "ANG", DisplayName: "nizozemskoantilski gulden", Symbol: "ANG"},
{Currency: "AOA", DisplayName: "angolska kvanza", Symbol: "AOA"},
{Currency: "AOK", DisplayName: "angolska kvanza (1977.–1990.)", Symbol: "AOK"},
{Currency: "AON", DisplayName: "angolska nova kvanza (1990.–2000.)", Symbol: "AON"},
{Currency: "AOR", DisplayName: "angolska kvanza (1995.–1999.)", Symbol: "AOR"},
{Currency: "ARA", DisplayName: "argentinski austral", Symbol: "ARA"},
{Currency: "ARL", DisplayName: "argentinski pezo lej (1970.–1983.)", Symbol: "ARL"},
{Currency: "ARM", DisplayName: "argentinski pezo (1881.–1970.)", Symbol: "ARM"},
{Currency: "ARP", DisplayName: "argentinski pezo (1983.–1985.)", Symbol: "ARP"},
{Currency: "ARS", DisplayName: "argentinski pezo", Symbol: "ARS"},
{Currency: "ATS", DisplayName: "austrijski šiling", Symbol: "ATS"},
{Currency: "AUD", DisplayName: "australski dolar", Symbol: "AUD"},
{Currency: "AWG", DisplayName: "arupski florin", Symbol: "AWG"},
{Currency: "AZM", DisplayName: "azerbajdžanski manat (1993.–2006.)", Symbol: "AZM"},
{Currency: "AZN", DisplayName: "azerbajdžanski manat", Symbol: "AZN"},
{Currency: "BAD", DisplayName: "bosansko-hercegovački dinar", Symbol: "BAD"},
{Currency: "BAM", DisplayName: "konvertibilna marka", Symbol: "BAM"},
{Currency: "BAN", DisplayName: "bosansko-hercegovački novi dinar", Symbol: "BAN"},
{Currency: "BBD", DisplayName: "barbadoski dolar", Symbol: "BBD"},
{Currency: "BDT", DisplayName: "bangladeška taka", Symbol: "BDT"},
{Currency: "BEC", DisplayName: "belgijski franak (konvertibilan)", Symbol: "BEC"},
{Currency: "BEF", DisplayName: "belgijski franak", Symbol: "BEF"},
{Currency: "BEL", DisplayName: "belgijski franak (financijski)", Symbol: "BEL"},
{Currency: "BGL", DisplayName: "bugarski čvrsti lev", Symbol: "BGL"},
{Currency: "BGM", DisplayName: "bugarski socijalistički lev", Symbol: "BGM"},
{Currency: "BGN", DisplayName: "bugarski lev", Symbol: "BGN"},
{Currency: "BGO", DisplayName: "stari bugarski lev", Symbol: "BGO"},
{Currency: "BHD", DisplayName: "bahreinski dinar", Symbol: "BHD"},
{Currency: "BIF", DisplayName: "burundski franak", Symbol: "BIF"},
{Currency: "BMD", DisplayName: "bermudski dolar", Symbol: "BMD"},
{Currency: "BND", DisplayName: "brunejski dolar", Symbol: "BND"},
{Currency: "BOB", DisplayName: "bolivijski bolivijano", Symbol: "BOB"},
{Currency: "BOL", DisplayName: "stari bolivijski bolivijano", Symbol: "BOL"},
{Currency: "BOP", DisplayName: "bolivijski pezo", Symbol: "BOP"},
{Currency: "BOV", DisplayName: "bolivijski mvdol", Symbol: "BOV"},
{Currency: "BRB", DisplayName: "brazilski novi cruzeiro (1967.–1986.)", Symbol: "BRB"},
{Currency: "BRC", DisplayName: "brazilski cruzado", Symbol: "BRC"},
{Currency: "BRE", DisplayName: "brazilski cruzeiro (1990.–1993.)", Symbol: "BRE"},
{Currency: "BRL", DisplayName: "brazilski real", Symbol: "BRL"},
{Currency: "BRN", DisplayName: "brazilski novi cruzado", Symbol: "BRN"},
{Currency: "BRR", DisplayName: "brazilski cruzeiro", Symbol: "BRR"},
{Currency: "BRZ", DisplayName: "stari brazilski kruzeiro", Symbol: "BRZ"},
{Currency: "BSD", DisplayName: "bahamski dolar", Symbol: "BSD"},
{Currency: "BTN", DisplayName: "butanski ngultrum", Symbol: "BTN"},
{Currency: "BUK", DisplayName: "burmanski kyat", Symbol: "BUK"},
{Currency: "BWP", DisplayName: "bocvanska pula", Symbol: "BWP"},
{Currency: "BYB", DisplayName: "bjeloruska nova rublja (1994–1999)", Symbol: "BYB"},
{Currency: "BYR", DisplayName: "bjeloruska rublja", Symbol: "BYR"},
{Currency: "BZD", DisplayName: "belizeanski dolar", Symbol: "BZD"},
{Currency: "CAD", DisplayName: "kanadski dolar", Symbol: "CAD"},
{Currency: "CDF", DisplayName: "kongoanski franak", Symbol: "CDF"},
{Currency: "CHE", DisplayName: "WIR euro", Symbol: "CHE"},
{Currency: "CHF", DisplayName: "švicarski franak", Symbol: "CHF"},
{Currency: "CHW", DisplayName: "WIR franak", Symbol: "CHW"},
{Currency: "CLE", DisplayName: "čileanski eskudo", Symbol: "CLE"},
{Currency: "CLF", DisplayName: "čileanski unidades de fomentos", Symbol: "CLF"},
{Currency: "CLP", DisplayName: "čileanski pezo", Symbol: "CLP"},
{Currency: "CNX", DisplayName: "kineski narodni dolar", Symbol: "CNX"},
{Currency: "CNY", DisplayName: "kineski yuan", Symbol: "CNY"},
{Currency: "COP", DisplayName: "kolumbijski pezo", Symbol: "COP"},
{Currency: "COU", DisplayName: "unidad de valor real", Symbol: "COU"},
{Currency: "CRC", DisplayName: "kostarikanski kolon", Symbol: "CRC"},
{Currency: "CSD", DisplayName: "stari srpski dinar", Symbol: "CSD"},
{Currency: "CSK", DisplayName: "čehoslovačka kruna", Symbol: "CSK"},
{Currency: "CUC", DisplayName: "kubanski konvertibilni pezo", Symbol: "CUC"},
{Currency: "CUP", DisplayName: "kubanski pezo", Symbol: "CUP"},
{Currency: "CVE", DisplayName: "zelenortski eskudo", Symbol: "CVE"},
{Currency: "CYP", DisplayName: "ciparska funta", Symbol: "CYP"},
{Currency: "CZK", DisplayName: "češka kruna", Symbol: "CZK"},
{Currency: "DDM", DisplayName: "istočnonjemačka marka", Symbol: "DDM"},
{Currency: "DEM", DisplayName: "njemačka marka", Symbol: "DEM"},
{Currency: "DJF", DisplayName: "džibutski franak", Symbol: "DJF"},
{Currency: "DKK", DisplayName: "danska kruna", Symbol: "DKK"},
{Currency: "DOP", DisplayName: "dominikanski pezo", Symbol: "DOP"},
{Currency: "DZD", DisplayName: "alžirski dinar", Symbol: "DZD"},
{Currency: "ECS", DisplayName: "ekvatorska sukra", Symbol: "ECS"},
{Currency: "ECV", DisplayName: "ekvatorski unidad de valor constante (UVC)", Symbol: "ECV"},
{Currency: "EEK", DisplayName: "estonska kruna", Symbol: "EEK"},
{Currency: "EGP", DisplayName: "egipatska funta", Symbol: "EGP"},
{Currency: "ERN", DisplayName: "eritrejska nakfa", Symbol: "ERN"},
{Currency: "ESA", DisplayName: "španjolska pezeta (A račun)", Symbol: "ESA"},
{Currency: "ESB", DisplayName: "španjolska pezeta (konvertibilni račun)", Symbol: "ESB"},
{Currency: "ESP", DisplayName: "španjolska pezeta", Symbol: "ESP"},
{Currency: "ETB", DisplayName: "etiopski bir", Symbol: "ETB"},
{Currency: "EUR", DisplayName: "euro", Symbol: "EUR"},
{Currency: "FIM", DisplayName: "finska marka", Symbol: "FIM"},
{Currency: "FJD", DisplayName: "fidžijski dolar", Symbol: "FJD"},
{Currency: "FKP", DisplayName: "falklandska funta", Symbol: "FKP"},
{Currency: "FRF", DisplayName: "francuski franak", Symbol: "FRF"},
{Currency: "GBP", DisplayName: "britanska funta", Symbol: "GBP"},
{Currency: "GEK", DisplayName: "gruzijski kupon larit", Symbol: "GEK"},
{Currency: "GEL", DisplayName: "gruzijski lari", Symbol: "GEL"},
{Currency: "GHC", DisplayName: "ganski cedi (1979.–2007.)", Symbol: "GHC"},
{Currency: "GHS", DisplayName: "ganski cedi", Symbol: "GHS"},
{Currency: "GIP", DisplayName: "gibraltarska funta", Symbol: "GIP"},
{Currency: "GMD", DisplayName: "gambijski dalas", Symbol: "GMD"},
{Currency: "GNF", DisplayName: "gvinejski franak", Symbol: "GNF"},
{Currency: "GNS", DisplayName: "gvinejski syli", Symbol: "GNS"},
{Currency: "GQE", DisplayName: "ekvatorski gvinejski ekwele", Symbol: "GQE"},
{Currency: "GRD", DisplayName: "grčka drahma", Symbol: "GRD"},
{Currency: "GTQ", DisplayName: "gvatemalski kvecal", Symbol: "GTQ"},
{Currency: "GWE", DisplayName: "portugalski gvinejski eskudo", Symbol: "GWE"},
{Currency: "GWP", DisplayName: "gvinejskobisauski pezo", Symbol: "GWP"},
{Currency: "GYD", DisplayName: "gvajanski dolar", Symbol: "GYD"},
{Currency: "HKD", DisplayName: "hongkonški dolar", Symbol: "HKD"},
{Currency: "HNL", DisplayName: "honduraška lempira", Symbol: "HNL"},
{Currency: "HRD", DisplayName: "hrvatski dinar", Symbol: "HRD"},
{Currency: "HRK", DisplayName: "hrvatska kuna", Symbol: "HRK"},
{Currency: "HTG", DisplayName: "haićanski gourd", Symbol: "HTG"},
{Currency: "HUF", DisplayName: "mađarska forinta", Symbol: "HUF"},
{Currency: "IDR", DisplayName: "indonezijska rupija", Symbol: "IDR"},
{Currency: "IEP", DisplayName: "irska funta", Symbol: "IEP"},
{Currency: "ILP", DisplayName: "izraelska funta", Symbol: "ILP"},
{Currency: "ILR", DisplayName: "stari izraelski šekel", Symbol: "ILR"},
{Currency: "ILS", DisplayName: "novi izraelski šekel", Symbol: "ILS"},
{Currency: "INR", DisplayName: "indijska rupija", Symbol: "INR"},
{Currency: "IQD", DisplayName: "irački dinar", Symbol: "IQD"},
{Currency: "IRR", DisplayName: "iranski rijal", Symbol: "IRR"},
{Currency: "ISJ", DisplayName: "stara islandska kruna", Symbol: "ISJ"},
{Currency: "ISK", DisplayName: "islandska kruna", Symbol: "ISK"},
{Currency: "ITL", DisplayName: "talijanska lira", Symbol: "ITL"},
{Currency: "JMD", DisplayName: "jamajčanski dolar", Symbol: "JMD"},
{Currency: "JOD", DisplayName: "jordanski dinar", Symbol: "JOD"},
{Currency: "JPY", DisplayName: "japanski jen", Symbol: "JPY"},
{Currency: "KES", DisplayName: "kenijski šiling", Symbol: "KES"},
{Currency: "KGS", DisplayName: "kirgiski som", Symbol: "KGS"},
{Currency: "KHR", DisplayName: "kambođanski rijal", Symbol: "KHR"},
{Currency: "KMF", DisplayName: "komorski franak", Symbol: "KMF"},
{Currency: "KPW", DisplayName: "sjevernokorejski won", Symbol: "KPW"},
{Currency: "KRH", DisplayName: "južnokorejski hvan", Symbol: "KRH"},
{Currency: "KRO", DisplayName: "stari južnokorejski von", Symbol: "KRO"},
{Currency: "KRW", DisplayName: "južnokorejski won", Symbol: "KRW"},
{Currency: "KWD", DisplayName: "kuvajtski dinar", Symbol: "KWD"},
{Currency: "KYD", DisplayName: "kajmanski dolar", Symbol: "KYD"},
{Currency: "KZT", DisplayName: "kazahstanski tenge", Symbol: "KZT"},
{Currency: "LAK", DisplayName: "laoski kip", Symbol: "LAK"},
{Currency: "LBP", DisplayName: "libanonska funta", Symbol: "LBP"},
{Currency: "LKR", DisplayName: "šrilankanska rupija", Symbol: "LKR"},
{Currency: "LRD", DisplayName: "liberijski dolar", Symbol: "LRD"},
{Currency: "LSL", DisplayName: "lesoto loti", Symbol: "LSL"},
{Currency: "LTL", DisplayName: "litavski litas", Symbol: "LTL"},
{Currency: "LTT", DisplayName: "litavski talonas", Symbol: "LTT"},
{Currency: "LUC", DisplayName: "luksemburški konvertibilni franak", Symbol: "LUC"},
{Currency: "LUF", DisplayName: "luksemburški franak", Symbol: "LUF"},
{Currency: "LUL", DisplayName: "luksemburški financijski franak", Symbol: "LUL"},
{Currency: "LVL", DisplayName: "<NAME>", Symbol: "LVL"},
{Currency: "LVR", DisplayName: "<NAME>", Symbol: "LVR"},
{Currency: "LYD", DisplayName: "<NAME>", Symbol: "LYD"},
{Currency: "MAD", DisplayName: "<NAME>", Symbol: "MAD"},
{Currency: "MAF", DisplayName: "<NAME>", Symbol: "MAF"},
{Currency: "MCF", DisplayName: "monegaški franak", Symbol: "MCF"},
{Currency: "MDC", DisplayName: "<NAME>", Symbol: "MDC"},
{Currency: "MDL", DisplayName: "<NAME>", Symbol: "MDL"},
{Currency: "MGA", DisplayName: "<NAME>", Symbol: "MGA"},
{Currency: "MGF", DisplayName: "<NAME>", Symbol: "MGF"},
{Currency: "MKD", DisplayName: "<NAME>", Symbol: "MKD"},
{Currency: "MKN", DisplayName: "<NAME>", Symbol: "MKN"},
{Currency: "MLF", DisplayName: "malijski franak", Symbol: "MLF"},
{Currency: "MMK", DisplayName: "<NAME>", Symbol: "MMK"},
{Currency: "MNT", DisplayName: "<NAME>", Symbol: "MNT"},
{Currency: "MOP", DisplayName: "makaoška pataka", Symbol: "MOP"},
{Currency: "MRO", DisplayName: "mauritanijska ouguja", Symbol: "MRO"},
{Currency: "MTL", DisplayName: "malteška lira", Symbol: "MTL"},
{Currency: "MTP", DisplayName: "malteška funta", Symbol: "MTP"},
{Currency: "MUR", DisplayName: "mauricijska rupija", Symbol: "MUR"},
{Currency: "MVP", DisplayName: "maldivijska rupija", Symbol: "MVP"},
{Currency: "MVR", DisplayName: "maldivijska rufija", Symbol: "MVR"},
{Currency: "MWK", DisplayName: "malavijska kvača", Symbol: "MWK"},
{Currency: "MXN", DisplayName: "meksički pezo", Symbol: "MXN"},
{Currency: "MXP", DisplayName: "meksički srebrni pezo (1861–1992)", Symbol: "MXP"},
{Currency: "MXV", DisplayName: "meksički unidad de inversion (UDI)", Symbol: "MXV"},
{Currency: "MYR", DisplayName: "malezijski ringit", Symbol: "MYR"},
{Currency: "MZE", DisplayName: "mozambijski eskudo", Symbol: "MZE"},
{Currency: "MZM", DisplayName: "stari mozambijski metikal", Symbol: "MZM"},
{Currency: "MZN", DisplayName: "mozambički metikal", Symbol: "MZN"},
{Currency: "NAD", DisplayName: "namibijski dolar", Symbol: "NAD"},
{Currency: "NGN", DisplayName: "nigerijska naira", Symbol: "NGN"},
{Currency: "NIC", DisplayName: "nikaragvanska kordoba", Symbol: "NIC"},
{Currency: "NIO", DisplayName: "nikaragvanska zlatna kordoba", Symbol: "NIO"},
{Currency: "NLG", DisplayName: "nizozemski gulden", Symbol: "NLG"},
{Currency: "NOK", DisplayName: "norveška kruna", Symbol: "NOK"},
{Currency: "NPR", DisplayName: "nepalska rupija", Symbol: "NPR"},
{Currency: "NZD", DisplayName: "novozelandski dolar", Symbol: "NZD"},
{Currency: "OMR", DisplayName: "omanski rijal", Symbol: "OMR"},
{Currency: "PAB", DisplayName: "panamska balboa", Symbol: "PAB"},
{Currency: "PEI", DisplayName: "peruanski inti", Symbol: "PEI"},
{Currency: "PEN", DisplayName: "peruanski novi sol", Symbol: "PEN"},
{Currency: "PES", DisplayName: "peruanski sol", Symbol: "PES"},
{Currency: "PGK", DisplayName: "k<NAME>", Symbol: "PGK"},
{Currency: "PHP", DisplayName: "filipinski pezo", Symbol: "PHP"},
{Currency: "PKR", DisplayName: "pakistanska rupija", Symbol: "PKR"},
{Currency: "PLN", DisplayName: "poljska zlota", Symbol: "PLN"},
{Currency: "PLZ", DisplayName: "poljska zlota (1950.–1995.)", Symbol: "PLZ"},
{Currency: "PTE", DisplayName: "portugalski eskudo", Symbol: "PTE"},
{Currency: "PYG", DisplayName: "paragvajski gvarani", Symbol: "PYG"},
{Currency: "QAR", DisplayName: "katarski rial", Symbol: "QAR"},
{Currency: "RHD", DisplayName: "rodezijski dolar", Symbol: "RHD"},
{Currency: "ROL", DisplayName: "starorumunjski lek", Symbol: "ROL"},
{Currency: "RON", DisplayName: "rumunjski lej", Symbol: "RON"},
{Currency: "RSD", DisplayName: "srpski dinar", Symbol: "RSD"},
{Currency: "RUB", DisplayName: "ruska rublja", Symbol: "RUB"},
{Currency: "RUR", DisplayName: "ruska rublja (1991.–1998.)", Symbol: "RUR"},
{Currency: "RWF", DisplayName: "ruandski franak", Symbol: "RWF"},
{Currency: "SAR", DisplayName: "saudijski rial", Symbol: "SAR"},
{Currency: "SBD", DisplayName: "solmonskootočni dolar", Symbol: "SBD"},
{Currency: "SCR", DisplayName: "sejšelska rupija", Symbol: "SCR"},
{Currency: "SDD", DisplayName: "sudanski dinar", Symbol: "SDD"},
{Currency: "SDG", DisplayName: "sudanska funta", Symbol: "SDG"},
{Currency: "SDP", DisplayName: "stara sudanska funta", Symbol: "SDP"},
{Currency: "SEK", DisplayName: "švedska kruna", Symbol: "SEK"},
{Currency: "SGD", DisplayName: "singapurski dolar", Symbol: "SGD"},
{Currency: "SHP", DisplayName: "svetohelenska funta", Symbol: "SHP"},
{Currency: "SIT", DisplayName: "slovenski tolar", Symbol: "SIT"},
{Currency: "SKK", DisplayName: "slovačka kruna", Symbol: "SKK"},
{Currency: "SLL", DisplayName: "sijeraleonski leone", Symbol: "SLL"},
{Currency: "SOS", DisplayName: "somalijski šiling", Symbol: "SOS"},
{Currency: "SRD", DisplayName: "surinamski dolar", Symbol: "SRD"},
{Currency: "SRG", DisplayName: "surinamski gulden", Symbol: "SRG"},
{Currency: "SSP", DisplayName: "južnosudanska funta", Symbol: "SSP"},
{Currency: "STD", DisplayName: "dobra Svetog Tome i Principa", Symbol: "STD"},
{Currency: "SUR", DisplayName: "sovjetska rublja", Symbol: "SUR"},
{Currency: "SVC", DisplayName: "salvadorski kolon", Symbol: "SVC"},
{Currency: "SYP", DisplayName: "sirijska funta", Symbol: "SYP"},
{Currency: "SZL", DisplayName: "svazi lilangeni", Symbol: "SZL"},
{Currency: "THB", DisplayName: "tajlandski baht", Symbol: "THB"},
{Currency: "TJR", DisplayName: "tajikistanska rublja", Symbol: "TJR"},
{Currency: "TJS", DisplayName: "tadžikistanski somoni", Symbol: "TJS"},
{Currency: "TMM", DisplayName: "turkmenistanski manat (1993.–2009.)", Symbol: "TMM"},
{Currency: "TMT", DisplayName: "turkmenistanski manat", Symbol: "TMT"},
{Currency: "TND", DisplayName: "tuniski dinar", Symbol: "TND"},
{Currency: "TOP", DisplayName: "tongaška pa’anga", Symbol: "TOP"},
{Currency: "TPE", DisplayName: "timorski eskudo", Symbol: "TPE"},
{Currency: "TRL", DisplayName: "turska lira (1922.–2005.)", Symbol: "TRL"},
{Currency: "TRY", DisplayName: "turska lira", Symbol: "TRY"},
{Currency: "TTD", DisplayName: "trininadtobaški dolar", Symbol: "TTD"},
{Currency: "TWD", DisplayName: "novotajvanski dolar", Symbol: "TWD"},
{Currency: "TZS", DisplayName: "tanzanijski šiling", Symbol: "TZS"},
{Currency: "UAH", DisplayName: "ukrajinska hrivnja", Symbol: "UAH"},
{Currency: "UAK", DisplayName: "ukrajinski karbovanet", Symbol: "UAK"},
{Currency: "UGS", DisplayName: "ugandski šiling (1966.–1987.)", Symbol: "UGS"},
{Currency: "UGX", DisplayName: "ugandski šiling", Symbol: "UGX"},
{Currency: "USD", DisplayName: "američki dolar", Symbol: "USD"},
{Currency: "USN", DisplayName: "američki dolar (sljedeći dan)", Symbol: "USN"},
{Currency: "USS", DisplayName: "američki dolar (isti dan)", Symbol: "USS"},
{Currency: "UYI", DisplayName: "urugvajski pezo en unidades indexadas", Symbol: "UYI"},
{Currency: "UYP", DisplayName: "urugvajski pezo (1975.–1993.)", Symbol: "UYP"},
{Currency: "UYU", DisplayName: "urugvajski pezo", Symbol: "UYU"},
{Currency: "UZS", DisplayName: "uzbekistanski som", Symbol: "UZS"},
{Currency: "VEB", DisplayName: "venezuelanski bolivar (1871.–2008.)", Symbol: "VEB"},
{Currency: "VEF", DisplayName: "venezuelanski bolivar", Symbol: "VEF"},
{Currency: "VND", DisplayName: "vijetnamski dong", Symbol: "VND"},
{Currency: "VNN", DisplayName: "vijetnamski dong (1978.–1985.)", Symbol: "VNN"},
{Currency: "VUV", DisplayName: "vanuatuški vatu", Symbol: "VUV"},
{Currency: "WST", DisplayName: "samoanska tala", Symbol: "WST"},
{Currency: "XAF", DisplayName: "CFA franak BEAC", Symbol: "FCFA"},
{Currency: "XAG", DisplayName: "srebro", Symbol: "XAG"},
{Currency: "XAU", DisplayName: "zlato", Symbol: "XAU"},
{Currency: "XBA", DisplayName: "Europska složena jedinica", Symbol: "XBA"},
{Currency: "XBB", DisplayName: "Europska monetarna jedinica", Symbol: "XBB"},
{Currency: "XBC", DisplayName: "europska obračunska jedinica (XBC)", Symbol: "XBC"},
{Currency: "XBD", DisplayName: "europska obračunska jedinica (XBD)", Symbol: "XBD"},
{Currency: "XCD", DisplayName: "istočnokaripski dolar", Symbol: "XCD"},
{Currency: "XDR", DisplayName: "posebna crtaća prava", Symbol: "XDR"},
{Currency: "XEU", DisplayName: "europska monetarna jedinica (ECU)", Symbol: "XEU"},
{Currency: "XFO", DisplayName: "francuski zlatni franak", Symbol: "XFO"},
{Currency: "XFU", DisplayName: "francuski UIC-franak", Symbol: "XFU"},
{Currency: "XOF", DisplayName: "CFA franak BCEAO", Symbol: "CFA"},
{Currency: "XPD", DisplayName: "paladij", Symbol: "XPD"},
{Currency: "XPF", DisplayName: "CFP franak", Symbol: "XPF"},
{Currency: "XPT", DisplayName: "platina", Symbol: "XPT"},
{Currency: "XRE", DisplayName: "RINET fondovi", Symbol: "XRE"},
{Currency: "XSU", DisplayName: "sukre", Symbol: "XSU"},
{Currency: "XTS", DisplayName: "ispitni kod valute", Symbol: "XTS"},
{Currency: "XUA", DisplayName: "obračunska jedinica ADB", Symbol: "XUA"},
{Currency: "XXX", DisplayName: "nepoznata valuta", Symbol: "XXX"},
{Currency: "YDD", DisplayName: "jemenski dinar", Symbol: "YDD"},
{Currency: "YER", DisplayName: "jemenski rial", Symbol: "YER"},
{Currency: "YUD", DisplayName: "jugoslavenski čvrsti dinar", Symbol: "YUD"},
{Currency: "YUM", DisplayName: "jugoslavenski novi dinar", Symbol: "YUM"},
{Currency: "YUN", DisplayName: "jugoslavenski konvertibilni dinar", Symbol: "YUN"},
{Currency: "YUR", DisplayName: "jugoslavenski reformirani dinar", Symbol: "YUR"},
{Currency: "ZAL", DisplayName: "južnoafrički rand (financijski)", Symbol: "ZAL"},
{Currency: "ZAR", DisplayName: "južnoafrički rand", Symbol: "ZAR"},
{Currency: "ZMK", DisplayName: "zambijska kvača (1968–2012)", Symbol: "ZMK"},
{Currency: "ZMW", DisplayName: "zambijska kvača", Symbol: "ZMW"},
{Currency: "ZRN", DisplayName: "zairski novi zair", Symbol: "ZRN"},
{Currency: "ZRZ", DisplayName: "zairski zair", Symbol: "ZRZ"},
{Currency: "ZWD", DisplayName: "zimbabveanski dolar (1980.–2008.)", Symbol: "ZWD"},
{Currency: "ZWL", DisplayName: "zimbabveanski dolar (2009)", Symbol: "ZWL"},
{Currency: "ZWR", DisplayName: "zimbabveanski dolar (2008)", Symbol: "ZWR"},
} | resources/locales/hr/currency.go | 0.511473 | 0.469338 | currency.go | starcoder |
package statefulset_spec
import (
apps "k8s.io/api/apps/v1"
v1 "k8s.io/api/core/v1"
"reactive-tech.io/kubegres/controllers/ctx"
"reflect"
)
type VolumeSpecEnforcer struct {
kubegresContext ctx.KubegresContext
}
func CreateVolumeSpecEnforcer(kubegresContext ctx.KubegresContext) VolumeSpecEnforcer {
return VolumeSpecEnforcer{kubegresContext: kubegresContext}
}
func (r *VolumeSpecEnforcer) GetSpecName() string {
return "Volume"
}
func (r *VolumeSpecEnforcer) CheckForSpecDifference(statefulSet *apps.StatefulSet) StatefulSetSpecDifference {
currentCustomVolumes := r.getCurrentCustomVolumes(statefulSet)
expectedCustomVolumes := r.kubegresContext.Kubegres.Spec.Volume.Volumes
if !r.compareVolumes(currentCustomVolumes, expectedCustomVolumes) {
return StatefulSetSpecDifference{
SpecName: "Volume.Volumes",
Current: r.volumesToString(currentCustomVolumes),
Expected: r.volumesToString(expectedCustomVolumes),
}
}
currentCustomVolumeMounts := r.getCurrentCustomVolumeMounts(statefulSet)
expectedCustomVolumeMounts := r.kubegresContext.Kubegres.Spec.Volume.VolumeMounts
if !r.compareVolumeMounts(currentCustomVolumeMounts, expectedCustomVolumeMounts) {
return StatefulSetSpecDifference{
SpecName: "Volume.VolumeMounts",
Current: r.volumeMountsToString(currentCustomVolumeMounts),
Expected: r.volumeMountsToString(expectedCustomVolumeMounts),
}
}
return StatefulSetSpecDifference{}
}
func (r *VolumeSpecEnforcer) EnforceSpec(statefulSet *apps.StatefulSet) (wasSpecUpdated bool, err error) {
r.removeCustomVolumes(statefulSet)
r.removeCustomVolumeMounts(statefulSet)
if r.kubegresContext.Kubegres.Spec.Volume.Volumes != nil {
statefulSet.Spec.Template.Spec.Volumes = append(statefulSet.Spec.Template.Spec.Volumes, r.kubegresContext.Kubegres.Spec.Volume.Volumes...)
}
if statefulSet.Spec.Template.Spec.Containers[0].VolumeMounts != nil {
statefulSet.Spec.Template.Spec.Containers[0].VolumeMounts = append(statefulSet.Spec.Template.Spec.Containers[0].VolumeMounts, r.kubegresContext.Kubegres.Spec.Volume.VolumeMounts...)
}
return true, nil
}
func (r *VolumeSpecEnforcer) OnSpecEnforcedSuccessfully(statefulSet *apps.StatefulSet) error {
return nil
}
func (r *VolumeSpecEnforcer) compareVolumes(currentCustomVolumes []v1.Volume, expectedCustomVolumes []v1.Volume) bool {
if len(expectedCustomVolumes) != len(currentCustomVolumes) {
return false
}
for _, expectedCustomVolume := range expectedCustomVolumes {
if !r.doesExpectedVolumeExist(expectedCustomVolume, currentCustomVolumes) {
return false
}
}
return true
}
func (r *VolumeSpecEnforcer) doesExpectedVolumeExist(expectedCustomVolume v1.Volume, currentCustomVolumes []v1.Volume) bool {
for _, currentCustomVolume := range currentCustomVolumes {
if reflect.DeepEqual(expectedCustomVolume, currentCustomVolume) {
return true
}
}
return false
}
func (r *VolumeSpecEnforcer) compareVolumeMounts(currentCustomVolumeMounts []v1.VolumeMount, expectedCustomVolumeMounts []v1.VolumeMount) bool {
if len(expectedCustomVolumeMounts) != len(currentCustomVolumeMounts) {
return false
}
for _, expectedCustomVolumeMount := range expectedCustomVolumeMounts {
if !r.doesExpectedVolumeMountExist(expectedCustomVolumeMount, currentCustomVolumeMounts) {
return false
}
}
return true
}
func (r *VolumeSpecEnforcer) doesExpectedVolumeMountExist(expectedCustomVolumeMount v1.VolumeMount, currentCustomVolumeMounts []v1.VolumeMount) bool {
for _, currentCustomVolumeMount := range currentCustomVolumeMounts {
if reflect.DeepEqual(expectedCustomVolumeMount, currentCustomVolumeMount) {
return true
}
}
return false
}
func (r *VolumeSpecEnforcer) getCurrentCustomVolumes(statefulSet *apps.StatefulSet) []v1.Volume {
statefulSetSpec := &statefulSet.Spec.Template.Spec
var customVolumes []v1.Volume
for _, volume := range statefulSetSpec.Volumes {
if !r.kubegresContext.IsReservedVolumeName(volume.Name) {
customVolumes = append(customVolumes, volume)
}
}
return customVolumes
}
func (r *VolumeSpecEnforcer) getCurrentCustomVolumeMounts(statefulSet *apps.StatefulSet) []v1.VolumeMount {
container := &statefulSet.Spec.Template.Spec.Containers[0]
var customVolumeMounts []v1.VolumeMount
for _, volumeMount := range container.VolumeMounts {
if !r.kubegresContext.IsReservedVolumeName(volumeMount.Name) {
customVolumeMounts = append(customVolumeMounts, volumeMount)
}
}
return customVolumeMounts
}
func (r *VolumeSpecEnforcer) removeCustomVolumes(statefulSet *apps.StatefulSet) {
currentCustomVolumes := r.getCurrentCustomVolumes(statefulSet)
if len(currentCustomVolumes) == 0 {
return
}
currentCustomVolumesCopy := make([]v1.Volume, len(currentCustomVolumes))
copy(currentCustomVolumesCopy, currentCustomVolumes)
statefulSetSpec := &statefulSet.Spec.Template.Spec
for _, customVolume := range currentCustomVolumesCopy {
index := r.getIndexOfVolume(customVolume, statefulSetSpec.Volumes)
if index >= 0 {
statefulSetSpec.Volumes = append(statefulSetSpec.Volumes[:index], statefulSetSpec.Volumes[index+1:]...)
}
}
}
func (r *VolumeSpecEnforcer) getIndexOfVolume(volumeToSearch v1.Volume, volumes []v1.Volume) int {
index := 0
for _, volume := range volumes {
if reflect.DeepEqual(volume, volumeToSearch) {
return index
}
index++
}
return -1
}
func (r *VolumeSpecEnforcer) removeCustomVolumeMounts(statefulSet *apps.StatefulSet) {
currentCustomVolumeMounts := r.getCurrentCustomVolumeMounts(statefulSet)
if len(currentCustomVolumeMounts) == 0 {
return
}
currentCustomVolumeMountsCopy := make([]v1.VolumeMount, len(currentCustomVolumeMounts))
copy(currentCustomVolumeMountsCopy, currentCustomVolumeMounts)
container := &statefulSet.Spec.Template.Spec.Containers[0]
for _, customVolumeMount := range currentCustomVolumeMountsCopy {
index := r.getIndexOfVolumeMount(customVolumeMount, container.VolumeMounts)
if index >= 0 {
container.VolumeMounts = append(container.VolumeMounts[:index], container.VolumeMounts[index+1:]...)
}
}
}
func (r *VolumeSpecEnforcer) getIndexOfVolumeMount(volumeMountToSearch v1.VolumeMount, volumeMounts []v1.VolumeMount) int {
index := 0
for _, volumeMount := range volumeMounts {
if reflect.DeepEqual(volumeMount, volumeMountToSearch) {
return index
}
index++
}
return -1
}
func (r *VolumeSpecEnforcer) volumesToString(volumes []v1.Volume) string {
toString := ""
for _, volume := range volumes {
toString += volume.String() + " - "
}
return toString
}
func (r *VolumeSpecEnforcer) volumeMountsToString(volumeMounts []v1.VolumeMount) string {
toString := ""
for _, volumeMount := range volumeMounts {
toString += volumeMount.String() + " - "
}
return toString
} | controllers/spec/enforcer/statefulset_spec/VolumeSpecEnforcer.go | 0.659624 | 0.409398 | VolumeSpecEnforcer.go | starcoder |
package models
import (
"strconv"
"github.com/cuttle-ai/brain/log"
"github.com/cuttle-ai/octopus/interpreter"
"github.com/google/uuid"
"github.com/jinzhu/gorm"
)
/*
* This file contains the model implementation of node's db model
*/
const (
//NodeMetadataPropWord is the metadata property of a node for word
NodeMetadataPropWord = "Word"
//NodeMetadataPropName is the metadata property of a node for name
NodeMetadataPropName = "Name"
//NodeMetadataPropDimension is the metadata property of a node for dimension
NodeMetadataPropDimension = "Dimension"
//NodeMetadataPropMeasure is the metadata property of a node for measure
NodeMetadataPropMeasure = "Measure"
//NodeMetadataPropAggregationFn is the metadata property of a node for aggregation function
NodeMetadataPropAggregationFn = "AggregationFn"
//NodeMetadataPropDataType is the metadata property of a node for data type
NodeMetadataPropDataType = "DataType"
//NodeMetadataPropDescription is the metadata property of a node for description
NodeMetadataPropDescription = "Description"
//NodeMetadataPropDefaultDateFieldUID is the metadata property of a node for default date field uid
NodeMetadataPropDefaultDateFieldUID = "DefaultDateFieldUID"
//NodeMetadataPropDatastoreID is the metadata property of a node for giving the datastore to which the node belongs to
NodeMetadataPropDatastoreID = "NodeMetadataPropDatastoreID"
//NodeMetadataPropKBType is the metadata property of a knowledge base node for giving kb type of the kb node
NodeMetadataPropKBType = "KBType"
//NodeMetadataPropOperation is the metadata property of a operation node for giving the type of the operation
NodeMetadataPropOperation = "Operation"
//NodeMetadataPropDateFormat is the metadata property of a column's csv data if the given column is of data type date
NodeMetadataPropDateFormat = "DateFormat"
)
const (
//NodeMetadataPropValueTrue is the value to be put for true as metadata value
NodeMetadataPropValueTrue = "true"
//NodeMetadataPropValueFalse is the value to be put for false as metadata value
NodeMetadataPropValueFalse = "false"
//NodeMetadataPropValueSystemKB is the value to be put for SystemKB as KBType metadata value
NodeMetadataPropValueSystemKB = "1"
//NodeMetadataPropValueUserKB is the value to be put for UserKB as KBType metadata value
NodeMetadataPropValueUserKB = "2"
//NodeMetadataPropValueEqOperator is the value to be put for Equal operator as operation of operator node
NodeMetadataPropValueEqOperator = "="
//NodeMetadataPropValueNotEqOperator is the value to be put for Not Equal operator as operation of operator node
NodeMetadataPropValueNotEqOperator = "<>"
//NodeMetadataPropValueGreaterOperator is the value to be put for Greater than or Equal operator as operation of operator node
NodeMetadataPropValueGreaterOperator = ">="
//NodeMetadataPropValueLessOperator is the value to be put for Less than or Equal operator as operation of operator node
NodeMetadataPropValueLessOperator = "<="
//NodeMetadataPropValueContainsOperator is the value to be put for Contains operator as operation of operator node
NodeMetadataPropValueContainsOperator = "HAS"
//NodeMetadataPropValueLikeOperator is the value to be put for Like operator as operation of operator node
NodeMetadataPropValueLikeOperator = "LIKE"
)
var (
//NodeMetadataAggregationFns is the map containing the supported aggregation functions
NodeMetadataAggregationFns = map[string]struct{}{
interpreter.AggregationFnAvg: {},
interpreter.AggregationFnCount: {},
interpreter.AggregationFnSum: {},
}
//NodeMetadataDataTypes is the map containing the supported datatypes
NodeMetadataDataTypes = map[string]struct{}{
interpreter.DataTypeDate: {},
interpreter.DataTypeFloat: {},
interpreter.DataTypeInt: {},
interpreter.DataTypeString: {},
}
)
//Node represents a octopus node's db record
type Node struct {
gorm.Model
//UID is the unique id of the node
UID uuid.UUID
//Type of the node
Type interpreter.Type
//PUID is the unique id of the parent node
PUID uuid.UUID
//DatasetID is the id of the dataset to which the node belongs to
DatasetID uint
//NodeMetadatas holds the metadata corresponding to the node
NodeMetadatas []NodeMetadata
//Parent denotes the the parent for the node
Parent *Node `gorm:"-"`
//DefaultDateField holds the default date field if any for a table
DefaultDateField *interpreter.ColumnNode `gorm:"-"`
}
//NodeMetadata stores the metadata associated with a node
type NodeMetadata struct {
gorm.Model
//NodeID is the id of the node to which the metadata belongs to
NodeID uint
//DatasetID is the id of the dataset to which the node belongs to
DatasetID uint
//Prop stores the metadata property
Prop string
//Value stores the metadata value
Value string
}
//InterpreterNode will convert a node to corresponding interpreter node
func (n Node) InterpreterNode() (interpreter.Node, bool) {
switch n.Type {
case interpreter.Column:
cN := n.ColumnNode()
return &cN, true
case interpreter.Table:
tN := n.TableNode()
return &tN, true
case interpreter.KnowledgeBase:
kN := n.KnowledgeBaseNode()
return &kN, true
case interpreter.Operator:
oN := n.OperatorNode()
return &oN, true
default:
return nil, false
}
}
//ColumnNode returns column node converted form of the node
func (n Node) ColumnNode() interpreter.ColumnNode {
dT := interpreter.DataTypeString
aggFn := interpreter.AggregationFnCount
mes := false
dim := false
name := ""
word := ""
description := ""
dateFormat := ""
for _, v := range n.NodeMetadatas {
if v.Prop == NodeMetadataPropWord {
word = v.Value
} else if v.Prop == NodeMetadataPropName {
name = v.Value
} else if v.Prop == NodeMetadataPropDimension && v.Value == NodeMetadataPropValueTrue {
dim = true
} else if v.Prop == NodeMetadataPropMeasure && v.Value == NodeMetadataPropValueTrue {
mes = true
} else if v.Prop == NodeMetadataPropAggregationFn {
if _, ok := NodeMetadataAggregationFns[v.Value]; ok {
aggFn = v.Value
}
} else if v.Prop == NodeMetadataPropDataType {
if _, ok := NodeMetadataDataTypes[v.Value]; ok {
dT = v.Value
}
} else if v.Prop == NodeMetadataPropDescription {
description = v.Value
} else if v.Prop == NodeMetadataPropDateFormat {
dateFormat = v.Value
}
}
result := interpreter.ColumnNode{
UID: n.UID.String(),
Word: []rune(word),
PUID: n.PUID.String(),
Name: name,
Children: []interpreter.ValueNode{},
Dimension: dim,
Measure: mes,
AggregationFn: aggFn,
DataType: dT,
Description: description,
DateFormat: dateFormat,
}
if n.Parent != nil && n.PUID.String() == n.Parent.UID.String() && n.Parent.Type == interpreter.Table {
pN := n.Parent.TableNode()
result.PN = &pN
}
return result
}
//FromColumn converts the interpreter column node to node
func (n Node) FromColumn(c interpreter.ColumnNode) Node {
metadata := []NodeMetadata{}
for _, v := range n.NodeMetadatas {
metadata = append(metadata, v)
}
if len(metadata) == 0 {
metadata = append(metadata, NodeMetadata{
Prop: NodeMetadataPropWord,
}, NodeMetadata{
Prop: NodeMetadataPropName,
}, NodeMetadata{
Prop: NodeMetadataPropDimension,
}, NodeMetadata{
Prop: NodeMetadataPropMeasure,
}, NodeMetadata{
Prop: NodeMetadataPropAggregationFn,
}, NodeMetadata{
Prop: NodeMetadataPropDataType,
}, NodeMetadata{
Prop: NodeMetadataPropDescription,
}, NodeMetadata{
Prop: NodeMetadataPropDateFormat,
})
}
for i := 0; i < len(metadata); i++ {
metadata[i].DatasetID = n.DatasetID
if metadata[i].Prop == NodeMetadataPropWord {
metadata[i].Value = string(c.Word)
} else if metadata[i].Prop == NodeMetadataPropName {
metadata[i].Value = c.Name
} else if metadata[i].Prop == NodeMetadataPropDimension {
if c.Dimension {
metadata[i].Value = NodeMetadataPropValueTrue
} else {
metadata[i].Value = NodeMetadataPropValueFalse
}
} else if metadata[i].Prop == NodeMetadataPropMeasure {
if c.Measure {
metadata[i].Value = NodeMetadataPropValueTrue
} else {
metadata[i].Value = NodeMetadataPropValueFalse
}
} else if metadata[i].Prop == NodeMetadataPropAggregationFn {
if _, ok := NodeMetadataAggregationFns[c.AggregationFn]; ok {
metadata[i].Value = c.AggregationFn
} else {
metadata[i].Value = interpreter.AggregationFnCount
}
} else if metadata[i].Prop == NodeMetadataPropDataType {
if _, ok := NodeMetadataDataTypes[c.DataType]; ok {
metadata[i].Value = c.DataType
} else {
metadata[i].Value = interpreter.DataTypeString
}
} else if metadata[i].Prop == NodeMetadataPropDescription {
metadata[i].Value = c.Description
} else if metadata[i].Prop == NodeMetadataPropDateFormat {
metadata[i].Value = c.DateFormat
}
}
uid, _ := uuid.Parse(c.UID)
puid, _ := uuid.Parse(c.PUID)
return Node{
Model: n.Model,
UID: uid,
Type: c.Type(),
PUID: puid,
DatasetID: n.DatasetID,
NodeMetadatas: metadata,
}
}
//TableNode returns table node converted form of the node
func (n Node) TableNode() interpreter.TableNode {
name := ""
word := ""
description := ""
defauldDateFieldUID := ""
datastoreID := 0
for _, v := range n.NodeMetadatas {
if v.Prop == NodeMetadataPropWord {
word = v.Value
} else if v.Prop == NodeMetadataPropName {
name = v.Value
} else if v.Prop == NodeMetadataPropDefaultDateFieldUID {
defauldDateFieldUID = v.Value
} else if v.Prop == NodeMetadataPropDescription {
description = v.Value
} else if v.Prop == NodeMetadataPropDatastoreID {
datastoreID, _ = strconv.Atoi(v.Value)
}
}
return interpreter.TableNode{
UID: n.UID.String(),
Word: []rune(word),
PUID: n.PUID.String(),
Name: name,
Children: []interpreter.ColumnNode{},
DefaultDateFieldUID: defauldDateFieldUID,
DefaultDateField: n.DefaultDateField,
Description: description,
DatastoreID: uint(datastoreID),
}
}
//FromTable converts the interpreter table node to node
func (n Node) FromTable(t interpreter.TableNode) Node {
metadata := []NodeMetadata{}
for _, v := range n.NodeMetadatas {
metadata = append(metadata, v)
}
if len(metadata) == 0 {
metadata = append(metadata, NodeMetadata{
Prop: NodeMetadataPropWord,
}, NodeMetadata{
Prop: NodeMetadataPropName,
}, NodeMetadata{
Prop: NodeMetadataPropDefaultDateFieldUID,
}, NodeMetadata{
Prop: NodeMetadataPropDescription,
}, NodeMetadata{
Prop: NodeMetadataPropDatastoreID,
})
}
for i := 0; i < len(metadata); i++ {
metadata[i].DatasetID = n.DatasetID
if metadata[i].Prop == NodeMetadataPropWord {
metadata[i].Value = string(t.Word)
} else if metadata[i].Prop == NodeMetadataPropName {
metadata[i].Value = t.Name
} else if metadata[i].Prop == NodeMetadataPropDescription {
metadata[i].Value = t.Description
} else if metadata[i].Prop == NodeMetadataPropDefaultDateFieldUID {
metadata[i].Value = t.DefaultDateFieldUID
} else if metadata[i].Prop == NodeMetadataPropDatastoreID {
metadata[i].Value = strconv.Itoa(int(t.DatastoreID))
}
}
uid, _ := uuid.Parse(t.UID)
puid, _ := uuid.Parse(t.PUID)
return Node{
Model: n.Model,
UID: uid,
Type: t.Type(),
PUID: puid,
DatasetID: n.DatasetID,
NodeMetadatas: metadata,
DefaultDateField: t.DefaultDateField,
}
}
//KnowledgeBaseNode returns the knowledgebase node converted form of the node
func (n Node) KnowledgeBaseNode() interpreter.KnowledgeBaseNode {
name := ""
word := ""
description := ""
kbType := interpreter.SystemKB
for _, v := range n.NodeMetadatas {
if v.Prop == NodeMetadataPropWord {
word = v.Value
} else if v.Prop == NodeMetadataPropName {
name = v.Value
} else if v.Prop == NodeMetadataPropDescription {
description = v.Value
} else if v.Prop == NodeMetadataPropKBType && v.Value == NodeMetadataPropValueUserKB {
kbType = interpreter.UserKB
}
}
return interpreter.KnowledgeBaseNode{
UID: n.UID.String(),
Word: []rune(word),
Name: name,
Children: []interpreter.Node{},
Description: description,
KBType: kbType,
}
}
//FromKnowledgeBase converts the interpreter knowledgebase node to node
func (n Node) FromKnowledgeBase(k interpreter.KnowledgeBaseNode) Node {
metadata := []NodeMetadata{}
for _, v := range n.NodeMetadatas {
metadata = append(metadata, v)
}
if len(metadata) == 0 {
metadata = append(metadata, NodeMetadata{
Prop: NodeMetadataPropWord,
}, NodeMetadata{
Prop: NodeMetadataPropName,
}, NodeMetadata{
Prop: NodeMetadataPropDescription,
}, NodeMetadata{
Prop: NodeMetadataPropKBType,
})
}
for i := 0; i < len(metadata); i++ {
metadata[i].DatasetID = n.DatasetID
if metadata[i].Prop == NodeMetadataPropWord {
metadata[i].Value = string(k.Word)
} else if metadata[i].Prop == NodeMetadataPropName {
metadata[i].Value = k.Name
} else if metadata[i].Prop == NodeMetadataPropDescription {
metadata[i].Value = k.Description
} else if metadata[i].Prop == NodeMetadataPropKBType {
metadata[i].Value = strconv.Itoa(int(k.KBType))
}
}
uid, _ := uuid.Parse(k.UID)
return Node{
Model: n.Model,
UID: uid,
Type: k.Type(),
NodeMetadatas: metadata,
}
}
//OperatorNode returns the operator node converted form of the node
func (n Node) OperatorNode() interpreter.OperatorNode {
word := ""
operation := ""
for _, v := range n.NodeMetadatas {
if v.Prop == NodeMetadataPropWord {
word = v.Value
} else if v.Prop == NodeMetadataPropDimension && v.Value == NodeMetadataPropValueEqOperator {
operation = interpreter.EqOperator
} else if v.Prop == NodeMetadataPropDimension && v.Value == NodeMetadataPropValueNotEqOperator {
operation = interpreter.NotEqOperator
} else if v.Prop == NodeMetadataPropDimension && v.Value == NodeMetadataPropValueGreaterOperator {
operation = interpreter.GreaterOperator
} else if v.Prop == NodeMetadataPropDimension && v.Value == NodeMetadataPropValueLessOperator {
operation = interpreter.LessOperator
} else if v.Prop == NodeMetadataPropDimension && v.Value == NodeMetadataPropValueContainsOperator {
operation = interpreter.ContainsOperator
} else if v.Prop == NodeMetadataPropDimension && v.Value == NodeMetadataPropValueLikeOperator {
operation = interpreter.LikeOperator
}
}
result := interpreter.OperatorNode{
UID: n.UID.String(),
Word: []rune(word),
PUID: n.PUID.String(),
Operation: operation,
}
if n.Parent != nil && n.PUID.String() == n.Parent.UID.String() {
pN, ok := n.Parent.InterpreterNode()
if ok {
result.PN = pN
}
}
return result
}
//FromOperatorNode converts the interpreter operator node to node
func (n Node) FromOperatorNode(o interpreter.OperatorNode) Node {
metadata := []NodeMetadata{}
for _, v := range n.NodeMetadatas {
metadata = append(metadata, v)
}
if len(metadata) == 0 {
metadata = append(metadata, NodeMetadata{
Prop: NodeMetadataPropWord,
}, NodeMetadata{
Prop: NodeMetadataPropOperation,
})
}
for i := 0; i < len(metadata); i++ {
metadata[i].DatasetID = n.DatasetID
if metadata[i].Prop == NodeMetadataPropWord {
metadata[i].Value = string(o.Word)
} else if metadata[i].Prop == NodeMetadataPropOperation {
metadata[i].Value = o.Operation
}
}
uid, _ := uuid.Parse(o.UID)
puid, _ := uuid.Parse(o.PUID)
return Node{
Model: n.Model,
UID: uid,
Type: o.Type(),
PUID: puid,
NodeMetadatas: metadata,
}
}
//UpdateNodeMetadata updates the given node metadata. If the node metadata is not created, will create the same
func UpdateNodeMetadata(l log.Log, conn *gorm.DB, metadata []NodeMetadata) error {
/*
* We will begin the transaction
* Will iterate through the node metadata
* And update the node metadata
*/
//starting the transaction
tx := conn.Begin()
defer func() {
if r := recover(); r != nil {
tx.Rollback()
}
}()
if err := tx.Error; err != nil {
return err
}
//iterating through the metadata
for _, v := range metadata {
//and updating the metadata
err := tx.Where(" id = ? and dataset_id = ?", v.ID, v.DatasetID).Save(&v).Error
if err != nil {
//error while updating the nodemetadata
l.Error("error while updating the node metadata with ID", v.ID)
tx.Rollback()
return err
}
}
return nil
} | models/node.go | 0.617513 | 0.540257 | node.go | starcoder |
package incrdelaunay
import (
"math"
)
// CircumcircleGrid is a data structure that uses spatial partitioning to allowed fast operations
// involving multiple Triangle's and their Circumcircle's.
type CircumcircleGrid struct {
triangles [][][]uint16 // The grid used to store triangles
cols, rows int
rowPixels, colPixels float64 // The number of pixels per row and column
}
// NewCircumcircleGrid returns a new grid with a specified number of columns and rows.
func NewCircumcircleGrid(cols, rows, w, h int) CircumcircleGrid {
c := CircumcircleGrid{}
c.rows = rows
c.cols = cols
c.colPixels = float64(w) / float64(cols)
c.rowPixels = float64(h) / float64(rows)
c.triangles = make([][][]uint16, c.rows)
for i := range c.triangles {
c.triangles[i] = make([][]uint16, c.cols)
}
return c
}
// AddTriangle adds a Triangle with an index to the grid.
func (c *CircumcircleGrid) AddTriangle(t Triangle, index uint16) {
// Find all the boxes of the grid that the triangle's circumcircle intersects
radius := t.Circumcircle.Radius + 0.001
topLeftX := int(float64(t.Circumcircle.cX-radius) / c.colPixels)
topLeftY := int(float64(t.Circumcircle.cY-radius) / c.rowPixels)
bottomRightX := int(math.Ceil(float64(t.Circumcircle.cX+radius) / c.colPixels))
bottomRightY := int(math.Ceil(float64(t.Circumcircle.cY+radius) / c.rowPixels))
if topLeftX < 0 {
topLeftX = 0
}
if topLeftY < 0 {
topLeftY = 0
}
if bottomRightX > c.cols {
bottomRightX = c.cols
}
if bottomRightY > c.rows {
bottomRightY = c.rows
}
for x := topLeftX; x < bottomRightX; x++ {
col := c.triangles[x]
for y := topLeftY; y < bottomRightY; y++ {
col[y] = append(col[y], index)
}
}
}
// RemoveTriangle removes a triangle from the grid.
func (c *CircumcircleGrid) RemoveTriangle(tri Triangle, index uint16) {
// Find all the boxes of the grid that the triangle's circumcircle intersects
radius := tri.Circumcircle.Radius + 0.001
topLeftX := int(float64(tri.Circumcircle.cX-radius) / c.colPixels)
topLeftY := int(float64(tri.Circumcircle.cY-radius) / c.rowPixels)
bottomRightX := int(math.Ceil(float64(tri.Circumcircle.cX+radius) / c.colPixels))
bottomRightY := int(math.Ceil(float64(tri.Circumcircle.cY+radius) / c.rowPixels))
if topLeftX < 0 {
topLeftX = 0
}
if topLeftY < 0 {
topLeftY = 0
}
if bottomRightX > c.cols {
bottomRightX = c.cols
}
if bottomRightY > c.rows {
bottomRightY = c.rows
}
for x := topLeftX; x < bottomRightX; x++ {
col := c.triangles[x]
for y := topLeftY; y < bottomRightY; y++ {
for i, t := range col[y] {
if t == index {
in := len(col[y]) - 1
col[y][i] = col[y][in]
col[y] = col[y][:in]
break
}
}
}
}
}
// HasPoint returns if a triangle in the grid has a point.
func (c CircumcircleGrid) HasPoint(p Point, triangles []Triangle) bool {
// Find which box of the grid the point falls into
x := int(math.Floor(float64(p.X) / c.colPixels))
y := int(math.Floor(float64(p.Y) / c.rowPixels))
if x == c.cols {
x = c.cols - 1
}
if y == c.rows {
y = c.rows - 1
}
group := c.triangles[x][y]
size := len(group)
for i := 0; i < size; i++ {
t := group[i]
tri := triangles[t]
if tri.A.X == -1 {
panic("UH OH")
}
if tri.HasVertex(p) {
return true
}
}
return false
}
// RemoveCircumcirclesThatContain removes all triangles whose circumcircle contain a point.
func (c CircumcircleGrid) RemoveCircumcirclesThatContain(p Point, triangles []Triangle, contains func(i uint16)) {
// Find which box of the grid the point falls into
x := int(math.Floor(float64(p.X) / c.colPixels))
y := int(math.Floor(float64(p.Y) / c.rowPixels))
if x == c.cols {
x = c.cols - 1
}
if y == c.rows {
y = c.rows - 1
}
group := c.triangles[x][y]
size := len(group)
for i := 0; i < size; i++ {
t := group[i]
tri := triangles[t]
if tri.A.X == -1 {
panic("UH OH")
}
if inCircle(int64(tri.A.X), int64(tri.A.Y), int64(tri.B.X), int64(tri.B.Y), int64(tri.C.X), int64(tri.C.Y), int64(p.X), int64(p.Y)) >= 0 {
contains(t)
c.RemoveTriangle(tri, t)
i--
size--
}
}
}
// RemoveThatHasVertex removes all triangles that have a vertex.
func (c CircumcircleGrid) RemoveThatHasVertex(p Point, triangles []Triangle, contains func(i uint16)) {
// Find which box of the grid the point falls into
x := int(math.Floor(float64(p.X) / c.colPixels))
y := int(math.Floor(float64(p.Y) / c.rowPixels))
if x == c.cols {
x = c.cols - 1
}
if y == c.rows {
y = c.rows - 1
}
group := c.triangles[x][y]
size := len(group)
for i := 0; i < size; i++ {
t := group[i]
tri := triangles[t]
if tri.HasVertex(p) {
contains(t)
c.RemoveTriangle(tri, t)
i--
size--
}
}
}
// Set sets a CircumcircleGrid to another CircumcircleGrid.
func (c *CircumcircleGrid) Set(other *CircumcircleGrid) {
for x, col := range c.triangles {
for y := range col {
c.triangles[x][y] = c.triangles[x][y][:cap(c.triangles[x][y])]
if len(c.triangles[x][y]) > len(other.triangles[x][y]) {
c.triangles[x][y] = c.triangles[x][y][:len(other.triangles[x][y])]
} else if len(c.triangles[x][y]) < len(other.triangles[x][y]) {
c.triangles[x][y] = make([]uint16, len(other.triangles[x][y]))
}
copy(c.triangles[x][y], other.triangles[x][y])
}
}
} | triangulation/incrdelaunay/grid.go | 0.79162 | 0.733583 | grid.go | starcoder |
package checkdigit
import "errors"
type (
// A Verifier is verifying to code by implemented algorithm or calculator.
Verifier interface {
Verify(code string) bool
}
// A Generator generates a check digit by implemented algorithm or calculator.
Generator interface {
Generate(seed string) (int, error)
}
// A Provider has Verifier and Generator interfaces.
Provider interface {
Verifier
Generator
}
)
// ErrInvalidArgument is happening when given the wrong argument.
var ErrInvalidArgument = errors.New("checkdigit: invalid argument")
func isNotNumber(n rune) bool {
return n < '0' || '9' < n
}
// NewLuhn returns a new Provider that implemented the Luhn algorithm.
func NewLuhn() Provider {
return &luhn{}
}
// NewDamm returns a new Provider that implemented the Damm algorithm.
func NewDamm() Provider {
return &damm{
matrix: [][]int{
{0, 3, 1, 7, 5, 9, 8, 6, 4, 2},
{7, 0, 9, 2, 1, 5, 4, 8, 6, 3},
{4, 2, 0, 6, 8, 7, 1, 3, 5, 9},
{1, 7, 5, 0, 9, 8, 3, 4, 2, 6},
{6, 1, 2, 3, 0, 4, 5, 9, 7, 8},
{3, 6, 7, 4, 2, 0, 9, 5, 8, 1},
{5, 8, 6, 9, 7, 2, 0, 1, 3, 4},
{8, 9, 4, 5, 3, 6, 2, 0, 1, 7},
{9, 4, 3, 8, 6, 1, 7, 2, 0, 5},
{2, 5, 8, 1, 4, 3, 6, 7, 9, 0},
},
}
}
// NewVerhoeff returns a new Provider that implemented the Verhoeff algorithm.
func NewVerhoeff() Provider {
return &verhoeff{
multiplication: [][]int{
{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
{1, 2, 3, 4, 0, 6, 7, 8, 9, 5},
{2, 3, 4, 0, 1, 7, 8, 9, 5, 6},
{3, 4, 0, 1, 2, 8, 9, 5, 6, 7},
{4, 0, 1, 2, 3, 9, 5, 6, 7, 8},
{5, 9, 8, 7, 6, 0, 4, 3, 2, 1},
{6, 5, 9, 8, 7, 1, 0, 4, 3, 2},
{7, 6, 5, 9, 8, 2, 1, 0, 4, 3},
{8, 7, 6, 5, 9, 3, 2, 1, 0, 4},
{9, 8, 7, 6, 5, 4, 3, 2, 1, 0},
},
permutation: [][]int{
{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
{1, 5, 7, 6, 2, 8, 3, 0, 9, 4},
{5, 8, 0, 3, 7, 9, 6, 1, 4, 2},
{8, 9, 1, 6, 0, 4, 3, 5, 2, 7},
{9, 4, 5, 3, 1, 2, 6, 8, 7, 0},
{4, 2, 8, 6, 5, 7, 3, 9, 0, 1},
{2, 7, 9, 3, 8, 0, 6, 4, 1, 5},
{7, 0, 4, 6, 9, 1, 3, 2, 5, 8},
},
inverse: []int{0, 4, 3, 2, 1, 5, 6, 7, 8, 9},
}
}
// NewISBN10 returns a new Provider that implemented modulus 11 weight 10 to 2 calculator.
func NewISBN10() Provider {
return &isbn10{}
}
// NewISBN13 returns a new Provider that implemented modulus 10 weight 3 calculator.
func NewISBN13() Provider {
return &isbn13{}
}
// NewEAN8 returns a new Provider that implemented GTIN-8 with position correction calculator.
func NewEAN8() Provider {
return >in{
digit: 8,
posCorr: true,
}
}
// NewEAN13 returns a new Provider that implemented GTIN-13 with position correction calculator.
func NewEAN13() Provider {
return >in{
digit: 13,
posCorr: true,
}
}
// NewJAN8 returns a new Provider that implemented GTIN-8 with position correction calculator.
func NewJAN8() Provider {
return >in{
digit: 8,
posCorr: true,
}
}
// NewJAN13 returns a new Provider that implemented GTIN-13 with position correction calculator.
func NewJAN13() Provider {
return >in{
digit: 13,
posCorr: true,
}
}
// NewITF returns a new Provider that implemented GTIN-14 calculator.
func NewITF() Provider {
return >in{
digit: 14,
}
}
// NewUPC returns a new Provider that implemented GTIN-12 with position correction calculator.
func NewUPC() Provider {
return >in{
digit: 12,
posCorr: true,
}
}
// NewSSCC returns a new Provider that implemented GTIN-18 calculator.
func NewSSCC() Provider {
return >in{
digit: 18,
}
} | checkdigit.go | 0.791418 | 0.440409 | checkdigit.go | starcoder |
package tabulate
import (
"encoding"
"fmt"
"reflect"
"sort"
"strings"
)
// Flags control how reflection tabulation operates on different
// values.
type Flags int
// Flag values for reflection tabulation.
const (
OmitEmpty Flags = 1 << iota
InheritHeaders
)
const nilLabel = "<nil>"
// Reflect tabulates the value into the tabulation object. The flags
// control how different values are handled. The tags lists element
// tags which are included in reflection. If the element does not have
// tabulation tag, then it is always included in tabulation.
func Reflect(tab *Tabulate, flags Flags, tags []string, v interface{}) error {
tagMap := make(map[string]bool)
for _, tag := range tags {
tagMap[tag] = true
}
value := reflect.ValueOf(v)
// Follows pointers.
for value.Type().Kind() == reflect.Ptr {
if value.IsZero() {
return nil
}
value = reflect.Indirect(value)
}
if value.Type().Kind() == reflect.Struct {
return reflectStruct(tab, flags, tagMap, value)
}
if value.Type().Kind() == reflect.Map {
return reflectMap(tab, flags, tagMap, value)
}
data, err := reflectValue(tab, flags, tagMap, value)
if err != nil {
return err
}
row := tab.Row()
row.Column("")
row.ColumnData(data)
return nil
}
// Array tabulates the argument v into rows and columns. If the tab
// defines header columns, those will be used. Otherwise the first row
// of v defines the header columns.
func Array(tab *Tabulate, v [][]interface{}) (*Tabulate, error) {
flags := OmitEmpty
tags := make(map[string]bool)
if len(tab.Headers) == 0 {
if len(v) == 0 {
return tab, nil
}
for _, c := range v[0] {
data, err := reflectValue(tab, flags, tags, reflect.ValueOf(c))
if err != nil {
return nil, err
}
tab.HeaderData(data)
}
v = v[1:]
}
for _, r := range v {
row := tab.Row()
for _, c := range r {
data, err := reflectValue(tab, flags, tags, reflect.ValueOf(c))
if err != nil {
return nil, err
}
row.ColumnData(data)
}
}
return tab, nil
}
func reflectValue(tab *Tabulate, flags Flags, tags map[string]bool,
value reflect.Value) (Data, error) {
if value.CanInterface() {
switch v := value.Interface().(type) {
case encoding.TextMarshaler:
data, err := v.MarshalText()
if err != nil {
return nil, err
}
return NewLinesData([]string{string(data)}), nil
}
}
// Resolve interfaces.
for value.Type().Kind() == reflect.Interface {
if value.IsZero() {
if flags&OmitEmpty == 0 {
return NewLinesData([]string{nilLabel}), nil
}
return NewLinesData(nil), nil
}
value = value.Elem()
}
// Follow pointers.
for value.Type().Kind() == reflect.Ptr {
if value.IsZero() {
if flags&OmitEmpty == 0 {
return NewLinesData([]string{nilLabel}), nil
}
}
value = reflect.Indirect(value)
}
switch value.Type().Kind() {
case reflect.Bool:
return NewValue(value.Bool()), nil
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return NewValue(value.Int()), nil
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
reflect.Uint64:
return NewValue(value.Uint()), nil
case reflect.Float32, reflect.Float64:
return NewValue(value.Float()), nil
case reflect.Map:
if value.Len() > 0 || flags&OmitEmpty == 0 {
sub := tab.Clone()
if flags&InheritHeaders == 0 {
sub.Headers = nil
}
err := reflectMap(sub, flags, tags, value)
if err != nil {
return nil, err
}
return sub, nil
}
return NewLinesData(nil), nil
case reflect.String:
text := value.String()
lines := strings.Split(strings.TrimRight(text, "\n"), "\n")
return NewLinesData(lines), nil
case reflect.Slice:
// Check slice element type.
switch value.Type().Elem().Kind() {
case reflect.Uint8:
return reflectByteSliceValue(tab, flags, tags, value)
case reflect.Int, reflect.Uint:
return reflectSliceValue(tab, flags, tags, 40, value)
default:
return reflectSliceValue(tab, flags, tags, 0, value)
}
case reflect.Struct:
sub := tab.Clone()
if flags&InheritHeaders == 0 {
sub.Headers = nil
}
err := reflectStruct(sub, flags, tags, value)
if err != nil {
return nil, err
}
return sub, nil
default:
text := value.String()
if len(text) == 0 && flags&OmitEmpty == 1 {
return NewLinesData(nil), nil
}
return NewLinesData([]string{text}), nil
}
}
func reflectByteSliceValue(tab *Tabulate, flags Flags, tags map[string]bool,
value reflect.Value) (Data, error) {
arr, ok := value.Interface().([]byte)
if !ok {
return nil, fmt.Errorf("reflectByteSliceValue called for %T",
value.Type().Kind())
}
const lineLength = 32
var lines []string
for i := 0; i < len(arr); i += lineLength {
l := len(arr) - i
if l > lineLength {
l = lineLength
}
lines = append(lines, fmt.Sprintf("%x", arr[i:i+l]))
}
return NewLinesData(lines), nil
}
func reflectSliceValue(tab *Tabulate, flags Flags, tags map[string]bool,
width int, value reflect.Value) (Data, error) {
data := NewSlice(width)
loop:
for i := 0; i < value.Len(); i++ {
v := value.Index(i)
// Follow pointers.
for v.Type().Kind() == reflect.Ptr {
if v.IsZero() {
if flags&OmitEmpty == 0 {
data.Append(NewText(nilLabel))
}
continue loop
}
v = reflect.Indirect(v)
}
switch v.Type().Kind() {
case reflect.Struct:
sub := tab.Clone()
if flags&InheritHeaders == 0 {
sub.Headers = nil
}
err := reflectStruct(sub, flags, tags, v)
if err != nil {
return nil, err
}
data.Append(sub)
default:
sub, err := reflectValue(tab, flags, tags, v)
if err != nil {
return nil, err
}
data.Append(sub)
}
}
return data, nil
}
type row struct {
key Data
val Data
}
func reflectMap(tab *Tabulate, flags Flags, tags map[string]bool,
v reflect.Value) error {
var rows []row
iter := v.MapRange()
for iter.Next() {
keyData, err := reflectValue(tab, flags, tags, iter.Key())
if err != nil {
return err
}
valData, err := reflectValue(tab, flags, tags, iter.Value())
if err != nil {
return err
}
rows = append(rows, row{
key: keyData,
val: valData,
})
}
sort.Slice(rows, func(i, j int) bool {
di := rows[i].key
dj := rows[j].key
height := di.Height()
if dj.Height() < height {
height = dj.Height()
}
for row := 0; row < height; row++ {
cmp := strings.Compare(di.Content(row), dj.Content(row))
switch cmp {
case -1:
return true
case 1:
return false
}
}
if di.Height() <= dj.Height() {
return true
}
return false
})
for _, r := range rows {
row := tab.Row()
row.ColumnData(r.key)
row.ColumnData(r.val)
}
return nil
}
func reflectStruct(tab *Tabulate, flags Flags, tags map[string]bool,
value reflect.Value) error {
loop:
for i := 0; i < value.NumField(); i++ {
field := value.Type().Field(i)
myFlags := flags
for _, tag := range strings.Split(field.Tag.Get("tabulate"), ",") {
if tag == "omitempty" {
myFlags |= OmitEmpty
} else if strings.HasPrefix(tag, "@") {
// Tagged field. Skip unless filter tags contain it.
if !tags[tag[1:]] {
continue loop
}
}
}
v := value.Field(i)
// Follow pointers.
for v.Type().Kind() == reflect.Ptr {
if v.IsZero() {
if myFlags&OmitEmpty == 0 {
row := tab.Row()
row.Column(field.Name)
}
continue loop
}
v = reflect.Indirect(v)
}
if v.CanInterface() {
switch iv := v.Interface().(type) {
case encoding.TextMarshaler:
data, err := iv.MarshalText()
if err != nil {
return err
}
row := tab.Row()
row.Column(field.Name)
row.Column(string(data))
continue loop
}
}
data, err := reflectValue(tab, flags, tags, v)
if err != nil {
return err
}
if data.Height() > 0 || flags&OmitEmpty == 0 {
row := tab.Row()
row.Column(field.Name)
row.ColumnData(data)
}
}
return nil
} | reflect.go | 0.588061 | 0.525491 | reflect.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.