code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package ast
// TypeKind is implemented by all Types which are represented in the AST.
// BaseType returns the underlying array/slice type if applicable, otherwise it returns the same value as Kind().
// Kind returns a value with represents the kind of value it is: ie int/string/slice/array.
type TypeKind interface {
BaseType() TypeKind
Kind() TypeKindDescription
String() string
}
// TypeKindDescription represents at the top level the kind of primitive handled.
type TypeKindDescription int
// Kind returns the kind of value the type is.
func (t TypeKindDescription) Kind() TypeKindDescription {
return t
}
// BaseType returns the same value as Kind() for TypeKindDescription.
func (t TypeKindDescription) BaseType() TypeKind {
return t
}
// Represents the valid kinds in the type system.
const (
PrimitiveTypeInt TypeKindDescription = iota
PrimitiveTypeString
PrimitiveTypeBool
ComplexTypeArray
ComplexTypeStruct
ComplexTypeFunction
PrimitiveTypeUndefined
UnknownType //Used internally to signify the type could be valid but is currently unknown
)
// NamedType is a kind of named primitive variable, used mainly to represent named parameters.
type NamedType struct {
Type TypeKind
Ident string
}
// BaseType returns the underlying type of the value.
func (p NamedType) BaseType() TypeKind {
return p.Type
}
// Kind returns the kind of value the type is.
func (p NamedType) Kind() TypeKindDescription {
return p.Type.Kind()
}
// Name returns the name of the value.
func (p NamedType) Name() string {
return p.Ident
}
// SetName sets the name of the value.
func (p NamedType) SetName(n string) {
p.Ident = n
}
// ArrayType represents an array built on primitive of type SubType, with an array length represented by the execution of Len.
type ArrayType struct {
SubType TypeKind
Len Node
}
func (a ArrayType) String() string {
return "[]" + a.BaseType().String()
}
// Kind returns ComplexTypeArray.
func (a ArrayType) Kind() TypeKindDescription {
return ComplexTypeArray
}
// BaseType returns the type of the underlying element primitive.
func (a ArrayType) BaseType() TypeKind {
return a.SubType
}
// StructType represents a named set of fields contained within one structure.
type StructType struct {
Fields []NamedType
}
func (a StructType) String() string {
out := "struct{"
for i, f := range a.Fields {
out += f.String()
if i+1 < len(a.Fields) {
out += ", "
}
}
return out + "}"
}
// Kind returns ComplexTypeArray.
func (a StructType) Kind() TypeKindDescription {
return ComplexTypeStruct
}
// BaseType returns ComplexTypeStruct as there is no real base type.
func (a StructType) BaseType() TypeKind {
return ComplexTypeStruct //no real base type
}
// FunctionType represents the parameters, return type and code node of a function.
type FunctionType struct {
Parameters []TypeKind
ReturnType TypeKind
Code Node
}
// Kind returns ComplexTypeFunction.
func (a FunctionType) Kind() TypeKindDescription {
return ComplexTypeFunction
}
// BaseType returns ComplexTypeFunction as there is no real base type.
func (a FunctionType) BaseType() TypeKind {
return ComplexTypeFunction //no real base type
} | ast/type_interface.go | 0.861887 | 0.705379 | type_interface.go | starcoder |
package main
type IntIntMapEntry struct {
K int
V int
}
type IntIntMap struct {
IntIntMapEntry IntIntMapEntry
h int
len int
children [2]*IntIntMap
}
func (node *IntIntMap) Height() int {
if node == nil {
return 0
}
return node.h
}
// suffix IntIntMap is needed because this will get specialised in codegen
func combinedDepthIntIntMap(n1, n2 *IntIntMap) int {
d1 := n1.Height()
d2 := n2.Height()
var d int
if d1 > d2 {
d = d1
} else {
d = d2
}
return d + 1
}
// suffix IntIntMap is needed because this will get specialised in codegen
func mkIntIntMap(entry IntIntMapEntry, left *IntIntMap, right *IntIntMap) *IntIntMap {
len := 1
if left != nil {
len += left.len
}
if right != nil {
len += right.len
}
return &IntIntMap{
IntIntMapEntry: entry,
h: combinedDepthIntIntMap(left, right),
len: len,
children: [2]*IntIntMap{left, right},
}
}
func (node *IntIntMap) Get(key int) (value int, ok bool) {
finger := node
for {
if finger == nil {
ok = false
return // using named returns so we keep the zero value for `value`
}
if key < (finger.IntIntMapEntry.K) {
finger = finger.children[0]
} else if finger.IntIntMapEntry.K < (key) {
finger = finger.children[1]
} else {
// equal
return finger.IntIntMapEntry.V, true
}
}
}
func (node *IntIntMap) Insert(key int, value int) *IntIntMap {
if node == nil {
return mkIntIntMap(IntIntMapEntry{key, value}, nil, nil)
}
entry, left, right := node.IntIntMapEntry, node.children[0], node.children[1]
if node.IntIntMapEntry.K < (key) {
right = right.Insert(key, value)
} else if key < (node.IntIntMapEntry.K) {
left = left.Insert(key, value)
} else { // equals
entry = IntIntMapEntry{key, value}
}
return rotateIntIntMap(entry, left, right)
}
func (node *IntIntMap) Remove(key int) *IntIntMap {
if node == nil {
return nil
}
entry, left, right := node.IntIntMapEntry, node.children[0], node.children[1]
if node.IntIntMapEntry.K < (key) {
right = right.Remove(key)
} else if key < (node.IntIntMapEntry.K) {
left = left.Remove(key)
} else { // equals
max := left.Max()
if max == nil {
return right
} else {
left = left.Remove(max.K)
entry = *max
}
}
return rotateIntIntMap(entry, left, right)
}
// suffix IntIntMap is needed because this will get specialised in codegen
func rotateIntIntMap(entry IntIntMapEntry, left *IntIntMap, right *IntIntMap) *IntIntMap {
if right.Height()-left.Height() > 1 { // implies right != nil
// single left
rl := right.children[0]
rr := right.children[1]
if combinedDepthIntIntMap(left, rl)-rr.Height() > 1 {
// double rotation
return mkIntIntMap(
rl.IntIntMapEntry,
mkIntIntMap(entry, left, rl.children[0]),
mkIntIntMap(right.IntIntMapEntry, rl.children[1], rr),
)
}
return mkIntIntMap(right.IntIntMapEntry, mkIntIntMap(entry, left, rl), rr)
}
if left.Height()-right.Height() > 1 { // implies left != nil
// single right
ll := left.children[0]
lr := left.children[1]
if combinedDepthIntIntMap(right, lr)-ll.Height() > 1 {
// double rotation
return mkIntIntMap(
lr.IntIntMapEntry,
mkIntIntMap(left.IntIntMapEntry, ll, lr.children[0]),
mkIntIntMap(entry, lr.children[1], right),
)
}
return mkIntIntMap(left.IntIntMapEntry, ll, mkIntIntMap(entry, lr, right))
}
return mkIntIntMap(entry, left, right)
}
func (node *IntIntMap) Len() int {
if node == nil {
return 0
}
return node.len
}
func (node *IntIntMap) Entries() []IntIntMapEntry {
elems := make([]IntIntMapEntry, 0, node.Len())
if node == nil {
return elems
}
type frame struct {
node *IntIntMap
leftDone bool
}
var preallocated [20]frame // preallocate on stack for common case
stack := preallocated[:0]
stack = append(stack, frame{node, false})
for len(stack) > 0 {
top := &stack[len(stack)-1]
if !top.leftDone {
if top.node.children[0] != nil {
stack = append(stack, frame{top.node.children[0], false})
}
top.leftDone = true
} else {
stack = stack[:len(stack)-1] // pop
elems = append(elems, top.node.IntIntMapEntry)
if top.node.children[1] != nil {
stack = append(stack, frame{top.node.children[1], false})
}
}
}
return elems
}
func (node *IntIntMap) extreme(dir int) *IntIntMapEntry {
if node == nil {
return nil
}
finger := node
for finger.children[dir] != nil {
finger = finger.children[dir]
}
return &finger.IntIntMapEntry
}
func (node *IntIntMap) Min() *IntIntMapEntry {
return node.extreme(0)
}
func (node *IntIntMap) Max() *IntIntMapEntry {
return node.extreme(1)
}
func (node *IntIntMap) Iterate() IntIntMapIterator {
return newIteratorIntIntMap(node, 0, nil)
}
func (node *IntIntMap) IterateFrom(k int) IntIntMapIterator {
return newIteratorIntIntMap(node, 0, &k)
}
func (node *IntIntMap) IterateReverse() IntIntMapIterator {
return newIteratorIntIntMap(node, 1, nil)
}
func (node *IntIntMap) IterateReverseFrom(k int) IntIntMapIterator {
return newIteratorIntIntMap(node, 1, &k)
}
type IntIntMapIteratorStackFrame struct {
node *IntIntMap
state int8
}
type IntIntMapIterator struct {
direction int
stack []IntIntMapIteratorStackFrame
currentEntry IntIntMapEntry
}
// suffix IntIntMap is needed because this will get specialised in codegen
func newIteratorIntIntMap(node *IntIntMap, direction int, startFrom *int) IntIntMapIterator {
if node == nil {
return IntIntMapIterator{}
}
stack := make([]IntIntMapIteratorStackFrame, 1, node.Height())
stack[0] = IntIntMapIteratorStackFrame{node: node, state: 0}
iter := IntIntMapIterator{direction: direction, stack: stack}
if startFrom != nil {
stack[0].state = 2
iter.seek(*startFrom)
} else {
iter.Next()
}
return iter
}
func (i *IntIntMapIterator) Done() bool {
return len(i.stack) == 0
}
func (i *IntIntMapIterator) GetKey() int {
return i.currentEntry.K
}
func (i *IntIntMapIterator) GetValue() int {
return i.currentEntry.V
}
func (i *IntIntMapIterator) Next() {
for len(i.stack) > 0 {
frame := &i.stack[len(i.stack)-1]
switch frame.state {
case 0:
if frame.node == nil {
last := len(i.stack) - 1
i.stack[last] = IntIntMapIteratorStackFrame{} // zero out
i.stack = i.stack[:last] // pop
} else {
frame.state = 1
}
case 1:
i.stack = append(i.stack, IntIntMapIteratorStackFrame{node: frame.node.children[i.direction], state: 0})
frame.state = 2
case 2:
i.currentEntry = frame.node.IntIntMapEntry
frame.state = 3
return
case 3:
// override frame - tail call optimisation
i.stack[len(i.stack)-1] = IntIntMapIteratorStackFrame{node: frame.node.children[1-i.direction], state: 0}
}
}
}
func (i *IntIntMapIterator) seek(k int) {
LOOP:
for {
frame := &i.stack[len(i.stack)-1]
if frame.node == nil {
last := len(i.stack) - 1
i.stack[last] = IntIntMapIteratorStackFrame{} // zero out
i.stack = i.stack[:last] // pop
break LOOP
}
if (i.direction == 0 && !(frame.node.IntIntMapEntry.K < (k))) || (i.direction == 1 && !(k < (frame.node.IntIntMapEntry.K))) {
i.stack = append(i.stack, IntIntMapIteratorStackFrame{node: frame.node.children[i.direction], state: 2})
} else {
// override frame - tail call optimisation
i.stack[len(i.stack)-1] = IntIntMapIteratorStackFrame{node: frame.node.children[1-i.direction], state: 2}
}
}
if len(i.stack) > 0 {
frame := &i.stack[len(i.stack)-1]
i.currentEntry = frame.node.IntIntMapEntry
frame.state = 3
}
} | examples/gen/int_int_map.go | 0.516839 | 0.468608 | int_int_map.go | starcoder |
package bezier
import (
"bytes"
"fmt"
"math"
"github.com/toukii/goutils"
)
type IPoint interface {
GetX() int
GetY() int
}
type Point struct {
X, Y int
Z int64
}
var (
ShortenTh = 0.75
)
func NewPoint(x, y int) *Point {
return &Point{
X: x,
Y: y,
}
}
func ParsePoint(p IPoint) *Point {
return NewPoint(p.GetX(), p.GetY())
}
// manhattan metric
func (p1 *Point) MahatMetric(p2 *Point) float64 {
var x, y int
if p2 != nil {
x, y = p2.X, p2.Y
}
return math.Abs(float64(p1.X-x)) + math.Abs(float64(p1.Y-y))
}
// euclidean metric
func (p1 *Point) EucMetric(p2 *Point) float64 {
var x, y int
if p2 != nil {
x, y = p2.X, p2.Y
}
return math.Abs(float64(p1.X*p1.X-x*x)) + math.Abs(float64(p1.Y*p1.Y-y*y))
}
func (p *Point) Key() string {
return fmt.Sprintf("%d-%d", p.X, p.Y)
}
func (p *Point) PathFmt() string {
return fmt.Sprintf("%d %d", p.X, p.Y)
}
func (p1 *Point) Center(p2 *Point) *Point {
return NewPoint((p1.X+p2.X)>>1, (p1.Y+p2.Y)>>1)
}
func (p1 *Point) Dlt(p2 *Point) *Point {
return NewPoint((p1.X-p2.X)>>1, (p1.Y-p2.Y)>>1)
}
func (p *Point) Shorten(th float64) {
p.X, p.Y = shorten(p.X, th), shorten(p.Y, th)
}
func shorten(i int, th float64) int {
return int(float64(i) * th)
}
// 2 control points
func (xy *Point) CtlPoints(dlt *Point, dltTh float64) [2]*Point {
return [2]*Point{
NewPoint(xy.X+shorten(dlt.X, dltTh*ShortenTh), xy.Y+shorten(dlt.Y, dltTh*ShortenTh)),
NewPoint(xy.X-shorten(dlt.X, (1-dltTh)*ShortenTh), xy.Y-shorten(dlt.Y, (1-dltTh)*ShortenTh)),
}
}
func (p *Point) Spilt() bool {
return p.X == -1 && p.Y == -1
}
func Trhs(ctlSize int, ps ...*Point) []byte {
size := len(ps)
buf := bytes.NewBuffer(make([]byte, 0, 2048))
for i := 3; i <= size; i++ {
trh := Trh(ctlSize, ps[i-3:i], i == 3, i == size)
buf.Write(trh)
}
return buf.Bytes()
}
func Trh(ctlSize int, ps []*Point, start, end bool) []byte {
size := len(ps)
if size > 3 {
return Trhs(ctlSize, ps...)
} else if size == 2 {
return PathTuple(ps...)
} else if size <= 1 {
return nil
}
p1 := ps[0].Center(ps[1]) // p1
p2 := ps[1].Center(ps[2]) // p2
// start or end point
var startP, endP *Point
if start {
startP = ps[0]
} else {
startP = p1
}
if end {
endP = ps[2]
} else {
endP = p2
}
if ctlSize == 1 {
return PathTuple(startP, ps[1], endP)
}
dlt := p1.Dlt(p2) // dlt
c12 := p1.Center(p2) // center point of p1 and p2
th_ := c12.MahatMetric(ps[1]) / dlt.MahatMetric(nil) // metric threshold
th := th_
if th > 0.8 {
th = 1.0/math.Pow(math.E, th_) + 0.2 // shorten
}
p1p := ps[1].MahatMetric(p1)
p2p := ps[1].MahatMetric(p2)
dltTh := p1p / (p1p + p2p)
// fmt.Printf("dltTh:%+v, th:%+v -> %+v\n", dltTh, th_, th)
if p1p+p2p <= 0.01 {
return nil
}
dlt.Shorten(th) // shorten the dlt
ctl := ps[1].CtlPoints(dlt, dltTh) // reflect the 2 control points
return PathTuple(startP, ctl[0], ctl[1], endP)
}
func TrhCtls(ps ...*Point) []*Point {
size := len(ps)
if size > 3 {
ret := make([]*Point, 0, 4)
for i := 3; i <= size; i++ {
ret = append(ret, TrhCtls(ps[i-3:i]...)...)
}
return ret
} else if size <= 2 {
return nil
}
p1 := ps[0].Center(ps[1]) // p1
p2 := ps[1].Center(ps[2]) // p2
dlt := p1.Dlt(p2) // dlt
c12 := p1.Center(p2) // center point of p1 and p2
th_ := c12.MahatMetric(ps[1]) / dlt.MahatMetric(nil) // metric threshold
th := th_
if th > 0.8 {
th = 1.0 / math.Pow(math.E, th_+0.2) // shorten
}
p1p := ps[1].MahatMetric(p1)
p2p := ps[1].MahatMetric(p2)
dltTh := p1p / (p1p + p2p)
// fmt.Printf("dltTh:%+v, th:%+v -> %+v\n", dltTh, th_, th)
if p1p+p2p <= 0.01 {
return nil
}
dlt.Shorten(th) // shorten the dlt
ctl := ps[1].CtlPoints(dlt, dltTh) // reflect the 2 control points
return []*Point{ctl[0], ctl[1]}
}
func PathTuple(points ...*Point) []byte {
size := len(points)
if size == 2 {
return goutils.ToByte(fmt.Sprintf("M%sL%s", points[0].PathFmt(), points[1].PathFmt()))
}
if size == 3 {
return goutils.ToByte(fmt.Sprintf("M%s S%s, %s", points[0].PathFmt(), points[1].PathFmt(), points[2].PathFmt()))
}
if size >= 4 {
return goutils.ToByte(fmt.Sprintf("M%s C%s, %s, %s", points[0].PathFmt(), points[1].PathFmt(), points[2].PathFmt(), points[3].PathFmt()))
}
return nil
} | bezier.go | 0.661814 | 0.424889 | bezier.go | starcoder |
package alchemist
import (
"errors"
"math"
"github.com/TemirkhanN/alchemist/pkg/alchemy/ingredient"
)
type Slot struct {
value uint8
}
type Alchemist struct {
luckLevel int
alchemyLevel int
mortar *Mortar
currentlyUsedIngredients []*ingredient.Ingredient
}
func NewAlchemist(level int, luckLevel int, mortar *Mortar) *Alchemist {
return &Alchemist{
alchemyLevel: level,
luckLevel: luckLevel,
mortar: mortar,
currentlyUsedIngredients: []*ingredient.Ingredient{},
}
}
func (a *Alchemist) CanUseIngredient(ingredient *ingredient.Ingredient) bool {
if len(a.currentlyUsedIngredients) == 0 {
return true
}
// todo this will produce logic error on attempt to switch one ingredient for another
if len(a.currentlyUsedIngredients) == 4 {
return false
}
canUse := false
for _, usedIngredient := range a.currentlyUsedIngredients {
if usedIngredient.Name() == ingredient.Name() {
return false
}
if !canUse && a.CanCombineIngredients(usedIngredient, ingredient) {
canUse = true
}
}
return canUse
}
func (a *Alchemist) UseIngredient(newIngredient *ingredient.Ingredient) error {
if len(a.currentlyUsedIngredients) == 0 || a.CanUseIngredient(newIngredient) {
a.currentlyUsedIngredients = append(a.currentlyUsedIngredients, newIngredient)
return nil
}
return errors.New("ingredients must have similar effects to be combined")
}
func (a *Alchemist) UsedIngredients() []*ingredient.Ingredient {
return a.currentlyUsedIngredients
}
func (a *Alchemist) CanCombineIngredients(ingredient1 *ingredient.Ingredient, ingredient2 *ingredient.Ingredient) bool {
for _, effect1 := range a.DetermineEffects(ingredient1) {
if effect1.IsUnknown() {
continue
}
for _, effect2 := range a.DetermineEffects(ingredient2) {
if effect2.IsUnknown() {
continue
}
if effect1.Name() == effect2.Name() {
return true
}
}
}
return false
}
func (a *Alchemist) DetermineEffects(ofIngredient *ingredient.Ingredient) []ingredient.Effect {
identifiableAmountOfEffects := a.IdentifiableAmountOfEffects()
ingredientEffectsAmount := len(ofIngredient.Effects())
var effects []ingredient.Effect
for i := 0; i < 4 && ingredientEffectsAmount > i; i++ {
effect := ofIngredient.Effects()[i]
if i+1 > identifiableAmountOfEffects {
effect = effect.HideEffectDetails()
}
effects = append(effects, effect)
}
return effects
}
func (a *Alchemist) DiscardIngredients() {
a.currentlyUsedIngredients = nil
}
func (a *Alchemist) CanStartBrewing() bool {
usedIngredientsAmount := len(a.UsedIngredients())
if usedIngredientsAmount == 0 {
return false
}
if usedIngredientsAmount == 1 && !a.IsMaster() {
return false
}
return true
}
func (a *Alchemist) BrewPotion(potionName string) (Potion, error) {
if !a.CanStartBrewing() {
return Potion{}, errors.New("there are not enough ingredients to create a potion")
}
potion, err := a.PredictPotion()
if err != nil {
return potion, err
}
a.DiscardIngredients()
potion.name = potionName
return potion, nil
}
func (a Alchemist) PredictPotion() (Potion, error) {
if !a.CanStartBrewing() {
return Potion{}, errors.New("there are not enough ingredients to create a potion")
}
usedIngredientsAmount := len(a.UsedIngredients())
if usedIngredientsAmount == 1 && a.IsMaster() {
theOnlyEffect := a.currentlyUsedIngredients[0].Effects()[0]
return Potion{
name: "",
effects: []PotionEffect{a.Refine(theOnlyEffect)},
}, nil
}
potionEffects := make(map[string]PotionEffect)
allEffects := make(map[string]ingredient.Effect)
for _, usedIngredient := range a.currentlyUsedIngredients {
for _, effect := range a.DetermineEffects(usedIngredient) {
if effect.IsUnknown() {
continue
}
_, effectExists := allEffects[effect.Name()]
if !effectExists {
allEffects[effect.Name()] = effect
continue
}
potionEffects[effect.Name()] = a.Refine(effect)
}
}
effects := make([]PotionEffect, 0)
for _, effect := range potionEffects {
effects = append(effects, effect)
}
return Potion{
name: "",
effects: effects,
}, nil
}
func (a *Alchemist) IsNovice() bool {
return a.alchemyLevel < 25
}
func (a *Alchemist) IsApprentice() bool {
return a.alchemyLevel >= 25 && a.alchemyLevel < 50
}
func (a *Alchemist) IsJourneyMan() bool {
return a.alchemyLevel >= 50 && a.alchemyLevel < 75
}
func (a *Alchemist) IsExpert() bool {
return a.alchemyLevel >= 75 && a.alchemyLevel < 100
}
func (a *Alchemist) IsMaster() bool {
return a.alchemyLevel == 100
}
func (a *Alchemist) IdentifiableAmountOfEffects() int {
switch {
case a.IsNovice():
return 1
case a.IsApprentice():
return 2
case a.IsJourneyMan():
return 3
case a.IsExpert():
return 4
case a.IsMaster():
return 4
default:
panic("alchemist has unknown mastery level. Probably wrong level set somehow")
}
}
func (a *Alchemist) Refine(effect ingredient.Effect) PotionEffect {
magnitude := math.Round(a.calculateMagnitude(effect))
if magnitude < 1 {
magnitude = 1
}
duration := math.Round(a.calculateDuration(effect))
if duration < 1 {
duration = 1
}
return PotionEffect{
magnitude: magnitude,
duration: duration,
effect: effect,
}
}
func (a *Alchemist) effectiveAlchemyLevel() float64 {
effectiveLevel := float64(a.alchemyLevel) + (0.4 * float64(a.luckLevel-50))
if effectiveLevel < 0 {
return 0
}
if effectiveLevel > 100 {
return 100
}
return effectiveLevel
}
func (a *Alchemist) calculateMagnitude(effect ingredient.Effect) float64 {
if effect.IsDurationOnly() {
return 1.0
}
delta := 4.0
if effect.IsMagnitudeOnly() {
delta = 1.0
}
return math.Pow((a.effectiveAlchemyLevel()+a.mortar.Strength())/(effect.BaseCost()/10*delta), 1/2.28)
}
func (a *Alchemist) calculateDuration(effect ingredient.Effect) float64 {
if effect.IsMagnitudeOnly() {
return 1.0
}
if effect.IsDurationOnly() {
return (a.effectiveAlchemyLevel() + a.mortar.Strength()) / (effect.BaseCost() / 10)
}
return 4 * a.calculateMagnitude(effect)
}
var (
EmptySlot = Slot{value: 0}
FirstSlot = Slot{value: 1}
SecondSlot = Slot{value: 2}
ThirdSlot = Slot{value: 3}
FourthSlot = Slot{value: 4}
) | pkg/alchemy/alchemist/alchemist.go | 0.520253 | 0.547585 | alchemist.go | starcoder |
package prefixtree
import (
"strings"
"bytes"
)
// PrefixTree represents a prefix tree for a set of strings. The first level of
// the tree represents all characters that appear at index 0 in the set of
// strings, the second level all characters at index 1, and so on down the tree.
type PrefixTree struct {
Root *prefixNode
}
// prefixNode is an element in a prefix tree which holds a prefix and a set of
// child prefixes representing runes that could follow the current rune.
type prefixNode struct {
data rune
children map[rune]*prefixNode
}
func (p *prefixNode) childPrefixes() []rune {
keys := []rune{}
for k, _ := range p.children {
keys = append(keys, k)
}
return keys
}
func (p *prefixNode) childNodes() []*prefixNode {
vals := []*prefixNode{}
for _, v := range p.children {
vals = append(vals, v)
}
return vals
}
func newPrefixNode(c rune) *prefixNode {
return &prefixNode{
data: c,
children: map[rune]*prefixNode{},
}
}
func NewPrefixTree() *PrefixTree {
return &PrefixTree{
Root: newPrefixNode('\\'),
}
}
// Add adds the given string to the prefix tree. Every nth character in the
// provided string will occur in the nth level of the tree.
func (p *PrefixTree) Add(s string) {
next := p.Root
for _, c := range s {
n, ok := next.children[c]
if !ok {
prefixNode := newPrefixNode(c)
next.children[c] = prefixNode
next = prefixNode
} else {
next = n
}
}
}
// Contains returns true if there is a traversal from the root of the tree to a
// node in the tree whose prefixes form the given string, false otherwise
func (p *PrefixTree) Contains(s string) bool {
next := p.Root
for _, c := range s {
n, ok := next.children[c]
if !ok {
return false
}
next = n
}
return true
}
// String prints a BFS of the prefix tree. The only ordering guaranteed is that a rune at level
// n will be printed before a rune at level n+1
func (p *PrefixTree) String() string {
next := p.Root
q := next.childNodes()
str := strings.Builder{}
for len(q) > 0 {
childPrefixes := next.childPrefixes()
for _, p := range childPrefixes {
str.WriteRune(p)
str.WriteRune(',')
}
// queue pop
next, q = q[len(q)-1], q[:len(q)-1]
q = append(q, next.childNodes()...)
}
return str.String()
}
// Words prints a list of all words present in the prefix tree
func (p *PrefixTree) Words() []string {
words := []string{}
for _, n := range p.Root.childNodes() {
words = append(words, p.wordsHelper(n, &bytes.Buffer{})...)
}
return words
}
func (p *PrefixTree) wordsHelper(n *prefixNode, word *bytes.Buffer) []string {
// no error is returned from bytes.Buffer.WriteRune
word.WriteRune(n.data)
if len(n.children) == 0 {
return []string{word.String()}
} else {
words := []string{}
for _, c := range n.childNodes() {
words = append(words, p.wordsHelper(c, bytes.NewBuffer(word.Bytes()))...)
}
return words
}
}
// the docs say not to do this
func copyStringBuilder(b strings.Builder) strings.Builder {
var newBuilder strings.Builder
for _, c := range b.String() {
newBuilder.WriteRune(c)
}
return b
} | prefixtree/prefix_tree.go | 0.832169 | 0.452415 | prefix_tree.go | starcoder |
package period
import (
"fmt"
"math"
"strconv"
"strings"
)
func (p32 *Period32) Parse(isoPeriod string) error {
if isoPeriod == "" {
return fmt.Errorf(`cannot parse a blank string as a period`)
}
*p32 = Period32{}
if isoPeriod == "P0" {
return nil // special case
}
remaining := isoPeriod
if remaining[0] == '-' {
p32.neg = true
remaining = remaining[1:]
} else if remaining[0] == '+' {
remaining = remaining[1:]
}
if remaining == "" {
return fmt.Errorf(`cannot parse a blank string as a period`)
} else if remaining[0] != 'P' {
return fmt.Errorf("%s: expected 'P' period mark at the start", isoPeriod)
}
remaining = remaining[1:]
var integer, fraction, prevFraction int32
var years, months, weeks, days, hours, minutes, seconds itemState
var des, previous designator
var err error
nComponents := 0
years, months, weeks, days = Armed, Armed, Armed, Armed
isHMS := false
for len(remaining) > 0 {
if remaining[0] == 'T' {
if isHMS {
return fmt.Errorf("%s: 'T' designator cannot occur more than once", isoPeriod)
}
isHMS = true
years, months, weeks, days = Unready, Unready, Unready, Unready
hours, minutes, seconds = Armed, Armed, Armed
remaining = remaining[1:]
} else {
integer, fraction, des, remaining, err = parseNextField(remaining, isoPeriod, isHMS)
if err != nil {
return err
}
if prevFraction != 0 && (integer != 0 || fraction != 0) {
return fmt.Errorf("%s: '%c' & '%c' only the last field can have a fraction", isoPeriod, previous.Byte(), des.Byte())
}
switch des {
case Year:
years, err = years.testAndSet(integer, fraction, Year, p32, &p32.years, isoPeriod)
case Month:
months, err = months.testAndSet(integer, fraction, Month, p32, &p32.months, isoPeriod)
case Week:
weeks, err = weeks.testAndSet(integer, fraction, Week, p32, &p32.weeks, isoPeriod)
case Day:
days, err = days.testAndSet(integer, fraction, Day, p32, &p32.days, isoPeriod)
case Hour:
hours, err = hours.testAndSet(integer, fraction, Hour, p32, &p32.hours, isoPeriod)
case Minute:
minutes, err = minutes.testAndSet(integer, fraction, Minute, p32, &p32.minutes, isoPeriod)
case Second:
seconds, err = seconds.testAndSet(integer, fraction, Second, p32, &p32.seconds, isoPeriod)
default:
return fmt.Errorf("%s: expected a number not '%c'", isoPeriod, des.Byte())
}
nComponents++
if err != nil {
return err
}
prevFraction = fraction
previous = des
}
}
if nComponents == 0 {
return fmt.Errorf("%s: expected 'Y', 'M', 'W', 'D', 'H', 'M', or 'S' designator", isoPeriod)
}
return nil
}
//-------------------------------------------------------------------------------------------------
type itemState int
const (
Unready itemState = iota
Armed
Set
)
func (i itemState) testAndSet(integer, fraction int32, des designator, result *Period32, value *int32, original string) (itemState, error) {
switch i {
case Unready:
return i, fmt.Errorf("%s: '%c' designator cannot occur here", original, des.Byte())
case Set:
return i, fmt.Errorf("%s: '%c' designator cannot occur more than once", original, des.Byte())
}
*value = integer
if integer != 0 || fraction != 0 {
result.fraction = fraction
result.lastField = des
}
return Set, nil
}
//-------------------------------------------------------------------------------------------------
func parseNextField(str, original string, isHMS bool) (int32, int32, designator, string, error) {
i := scanDigits(str)
switch i {
case noDigitsFound:
return 0, 0, 0, "", fmt.Errorf("%s: expected a number but found '%c'", original, str[0])
case stringIsAllDigits:
return 0, 0, 0, "", fmt.Errorf("%s: missing designator at the end", original)
}
des, err := asDesignator(str[i], isHMS)
if err != nil {
return 0, 0, 0, "", fmt.Errorf("%s: %w", original, err)
}
integer, fraction, err := parseDecimalNumber(str[:i], original, des)
if integer > math.MaxInt32 {
return 0, 0, 0, "", fmt.Errorf("%s: integer overflow occurred in %s", original, des.field())
}
return int32(integer), int32(fraction), des, str[i+1:], err
}
const (
maxFractionDigits = 9
trailingZeros = "000000000" // nine zeros
)
// Fixed-point one decimal place
func parseDecimalNumber(number, original string, des designator) (integer, fraction int64, err error) {
dec := strings.IndexByte(number, '.')
if dec < 0 {
dec = strings.IndexByte(number, ',')
}
if dec >= 0 {
integer, err = strconv.ParseInt(number[:dec], 10, 64)
if err == nil {
number = number[dec+1:]
if len(number) > 0 {
number = (number + trailingZeros)[:maxFractionDigits]
fraction, err = strconv.ParseInt(number, 10, 64)
//fraction *= pow10(maxFractionDigits - 1 - countZeros(number))
}
}
} else {
integer, err = strconv.ParseInt(number, 10, 64)
}
if err != nil {
return 0, 0, fmt.Errorf("%s: expected a number but found '%c'", original, des)
}
return integer, fraction, err
}
// scanDigits finds the index of the first non-digit character after some digits.
func scanDigits(s string) int {
foundSomeDigits := false
for i, c := range s {
if !isDigit(c) {
if foundSomeDigits {
return i // index of the next non-digit character
} else {
return noDigitsFound
}
} else {
foundSomeDigits = true
}
}
return stringIsAllDigits
}
const (
noDigitsFound = -1
stringIsAllDigits = -2
)
func isDigit(c rune) bool {
return ('0' <= c && c <= '9') || c == '.' || c == ','
} | parse.go | 0.722723 | 0.487673 | parse.go | starcoder |
package humanizex
var CommonUnits = struct{
None Unit
Second Unit
Meter Unit
Byte Unit
Bit Unit
BitsPerSecond Unit
}{
None: Unit{"", ""},
Second: Unit{"s", "s"},
Meter: Unit{"m", "m"},
Byte: Unit{"B", "B"},
Bit: Unit{"b", "b"},
BitsPerSecond: Unit{"bps", "bps"},
}
var CommonFactors = struct{
// Time is time units in seconds, minutes, hours, days and years as min, h,
// d, and y. These are non-SI units but generally accepted in context.
// For times smaller than a second (e.g. nanoseconds), use SI instead.
// The expected unit is a second (Unit{"s", "s"} or CommonUnits.Second)
Time Factors
// Distance are SI units that stop at kilo (because nobody uses
// megametres or gigametres!) but includes centi. The expected unit is the
// SI unit for distance, the metre (Unit{"m", "m"} or CommonUnits.Meter)
Distance Factors
// IEC are the "ibi" unit prefixes for bytes e.g. Ki, Mi, Gi with a
// factor of 1024.
IEC Factors
// JEDEC are the old unit prefixes for bytes: K, M, G (only) with a factor
// of 1024.
JEDEC Factors
// SIBytes are the SI unit prefixes for bytes e.g. k, M, G with a
// factor of 1000. Unlike the normal SI Factors, it is assumed based on
// context that when a "K" is input this is intended to mean the "k" SI
// unit prefix instead of Kelvin - I've never heard of a Kelvin-Byte!
SIBytes Factors
// SIUncommon are the SI unit prefixes including deci, deca, and hecto
SIUncommon Factors
// SI are the SI unit prefixes except centi, deci, deca, and hecto
SI Factors
}{
Time: Factors{
Factors: []Factor{
{1, Unit{"s", "s"}, FactorModeReplace},
{60, Unit{"min", "min"}, FactorModeReplace},
{60 * 60, Unit{"h", "h"}, FactorModeReplace},
{24 * 60 * 60, Unit{"d", "d"}, FactorModeReplace},
{365.2422 * 24 * 60 * 60, Unit{"y", "y"}, FactorModeReplace},
},
Components: 2,
},
Distance: Factors{
Factors: []Factor{
{1E-9, Unit{"n", "n"}, FactorModeUnitPrefix}, // nano
{1E-6, Unit{"μ", "u"}, FactorModeUnitPrefix}, // micro
{1E-3, Unit{"m", "m"}, FactorModeUnitPrefix}, // milli
{1E-2, Unit{"c", "c"}, FactorModeUnitPrefix}, // centi
{1, Unit{ "", ""}, FactorModeIdentity},
{1000, Unit{"k", "k"}, FactorModeUnitPrefix}, // kilo
},
},
IEC: Factors{
Factors: []Factor{
{1, Unit{ "", ""}, FactorModeUnitPrefix},
{1024, Unit{"Ki", "Ki"}, FactorModeUnitPrefix},
{1024 * 1024, Unit{"Mi", "Mi"}, FactorModeUnitPrefix},
{1024 * 1024 * 1024, Unit{"Gi", "Gi"}, FactorModeUnitPrefix},
{1024 * 1024 * 1024 * 1024, Unit{"Ti", "Ti"}, FactorModeUnitPrefix},
},
},
JEDEC: Factors{
Factors: []Factor{
{1, Unit{ "", ""}, FactorModeIdentity},
{1024, Unit{"K", "K"}, FactorModeUnitPrefix},
{1024 * 1024, Unit{"M", "M"}, FactorModeUnitPrefix},
{1024 * 1024 * 1024, Unit{"G", "G"}, FactorModeUnitPrefix},
},
},
SIBytes: Factors{
Factors: []Factor{
{1, Unit{ "", ""}, FactorModeIdentity},
{ 1E3, Unit{"k", "k"}, FactorModeUnitPrefix},
{ 1E3, Unit{"K", "K"}, FactorModeUnitPrefix | FactorModeInputCompat}, // Kelvin-Bytes(!)
{ 1E6, Unit{"M", "M"}, FactorModeUnitPrefix},
{ 1E9, Unit{"G", "G"}, FactorModeUnitPrefix},
{1E12, Unit{"T", "T"}, FactorModeUnitPrefix},
},
},
SIUncommon: Factors{
Factors: []Factor{
{1E-9, Unit{"n", "n"}, FactorModeUnitPrefix}, // nano
{1E-6, Unit{"μ", "u"}, FactorModeUnitPrefix}, // micro
{1E-3, Unit{"m", "m"}, FactorModeUnitPrefix}, // milli
{1E-2, Unit{"c", "c"}, FactorModeUnitPrefix}, // centi
{1E-1, Unit{"d", "d"}, FactorModeUnitPrefix}, // deci
{1, Unit{ "", ""}, FactorModeIdentity},
{ 1E1, Unit{"da", "da"}, FactorModeUnitPrefix}, // deca
{ 1E2, Unit{"h", "h"}, FactorModeUnitPrefix}, // hecto
{ 1E3, Unit{"k", "k"}, FactorModeUnitPrefix}, // kilo
{ 1E6, Unit{"M", "M"}, FactorModeUnitPrefix},
{ 1E9, Unit{"G", "G"}, FactorModeUnitPrefix},
{1E12, Unit{"T", "T"}, FactorModeUnitPrefix},
},
},
SI: Factors{
Factors: []Factor{
{1E-9, Unit{"n", "n"}, FactorModeUnitPrefix}, // nano
{1E-6, Unit{"μ", "u"}, FactorModeUnitPrefix}, // micro
{1E-3, Unit{"m", "m"}, FactorModeUnitPrefix}, // milli
{1, Unit{ "", ""}, FactorModeIdentity},
{ 1E3, Unit{"k", "k"}, FactorModeUnitPrefix}, // kilo
{ 1E6, Unit{"M", "M"}, FactorModeUnitPrefix},
{ 1E9, Unit{"G", "G"}, FactorModeUnitPrefix},
{1E12, Unit{"T", "T"}, FactorModeUnitPrefix},
},
},
} | humanizex/common.go | 0.715126 | 0.503052 | common.go | starcoder |
package gosmparse
import "github.com/thomersch/gosmparse/OSMPBF"
// Node is an OSM data element with a position and tags (key/value pairs).
type Node struct {
ID int64
Lat float64
Lon float64
Tags map[string]string
}
// Way is an OSM data element that consists of Nodes and tags (key/value pairs).
// Ways can describe line strings or areas.
type Way struct {
ID int64
NodeIDs []int64
Tags map[string]string
}
// Relation is an OSM data element that contains multiple elements (RelationMember)
// and has tags (key/value pairs).
type Relation struct {
ID int64
Members []RelationMember
Tags map[string]string
}
// MemberType describes the type of a relation member (node/way/relation).
type MemberType int
const (
NodeType MemberType = iota
WayType
RelationType
)
// RelationMember refers to an element in a relation. It contains the ID of the element
// (node/way/relation) and the role.
type RelationMember struct {
ID int64
Type MemberType
Role string
}
func denseNode(o OSMReader, pb *OSMPBF.PrimitiveBlock, dn *OSMPBF.DenseNodes) error {
// dateGran := pb.GetDateGranularity()
gran := int64(pb.GetGranularity())
latOffset := pb.GetLatOffset()
lonOffset := pb.GetLonOffset()
var (
n Node
id, lat, lon int64
kvPos int // position in kv slice
)
for index := range dn.Id {
id = dn.Id[index] + id
lat = dn.Lat[index] + lat
lon = dn.Lon[index] + lon
n.ID = id
n.Lat = 1e-9 * float64(latOffset+(gran*lat))
n.Lon = 1e-9 * float64(lonOffset+(gran*lon))
kvPos, n.Tags = unpackTags(pb.Stringtable.GetS(), kvPos, dn.KeysVals)
o.ReadNode(n)
}
return nil
}
func way(o OSMReader, pb *OSMPBF.PrimitiveBlock, ways []*OSMPBF.Way) error {
// dateGran := pb.GetDateGranularity()
st := pb.Stringtable.GetS()
var (
w Way
nodeID int64
)
for _, way := range ways {
w.ID = way.GetId()
nodeID = 0
w.NodeIDs = make([]int64, len(way.Refs))
w.Tags = make(map[string]string)
for pos, key := range way.Keys {
keyString := string(st[int(key)])
w.Tags[keyString] = string(st[way.Vals[pos]])
}
for index := range way.Refs {
nodeID = way.Refs[index] + nodeID
w.NodeIDs[index] = nodeID
}
o.ReadWay(w)
}
return nil
}
func relation(o OSMReader, pb *OSMPBF.PrimitiveBlock, relations []*OSMPBF.Relation) error {
st := pb.Stringtable.GetS()
// dateGran := pb.GetDateGranularity()
var r Relation
for _, rel := range relations {
r.ID = *rel.Id
r.Members = make([]RelationMember, len(rel.Memids))
var (
relMember RelationMember
memID int64
)
r.Tags = make(map[string]string)
for pos, key := range rel.Keys {
keyString := string(st[int(key)])
r.Tags[keyString] = string(st[rel.Vals[pos]])
}
for memIndex := range rel.Memids {
memID = rel.Memids[memIndex] + memID
relMember.ID = memID
switch rel.Types[memIndex] {
case OSMPBF.Relation_NODE:
relMember.Type = NodeType
case OSMPBF.Relation_WAY:
relMember.Type = WayType
case OSMPBF.Relation_RELATION:
relMember.Type = RelationType
}
relMember.Role = string(st[rel.RolesSid[memIndex]])
r.Members[memIndex] = relMember
}
o.ReadRelation(r)
}
return nil
} | cmd/spatialize/vendor/github.com/thomersch/gosmparse/elements.go | 0.513668 | 0.548976 | elements.go | starcoder |
package op
import "fmt"
// Decode decodes the 16 bit representation of an instruction and returns it.
func Decode(buf uint16) (inst interface{}, err error) {
code := Code(buf & 0xF000 >> 12)
switch code {
case CodeNop:
// operand: 000
// padding.
pad := buf & 0x0FFF
if pad != 0 {
return nil, fmt.Errorf("op.Decode: invalid padding (%X) in %04X (%s)", pad, buf, code)
}
inst = &Nop{
Code: code,
}
case CodeLoadMem:
// operand: RXY
// R refers to the dst register.
// XY refers to the src memory address.
// dst register.
dst := Reg(buf & 0x0F00 >> 8)
if dst >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid dst register (%d) in %04X (%s)", dst, buf, code)
}
// src memory address.
src := Addr(buf & 0x0FF)
inst = &LoadMem{
Code: code,
Src: src,
Dst: dst,
}
case CodeLoadVal:
// operand: RXY
// R refers to the dst register.
// XY refers to the src immediate value.
// dst register.
dst := Reg(buf & 0x0F00 >> 8)
if dst >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid dst register (%d) in %04X (%s)", dst, buf, code)
}
// src immediate value.
src := Val(buf & 0x0FF)
inst = &LoadVal{
Code: code,
Src: src,
Dst: dst,
}
case CodeStore:
// operand: RXY
// R refers to the src register.
// XY refers to the dst memory address.
// src register.
src := Reg(buf & 0x0F00 >> 8)
if src >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid src register (%d) in %04X (%s)", src, buf, code)
}
// dst memory address.
dst := Addr(buf & 0x0FF)
inst = &Store{
Code: code,
Src: src,
Dst: dst,
}
case CodeMove:
// operand: 0RS
// R refers to the src register.
// S refers to the dst register.
// padding.
pad := buf & 0x0F00 >> 8
if pad != 0 {
return nil, fmt.Errorf("op.Decode: invalid padding (%X) in %04X (%s)", pad, buf, code)
}
// src register.
src := Reg(buf & 0x00F0 >> 4)
if src >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid src register (%d) in %04X (%s)", src, buf, code)
}
// dst register.
dst := Reg(buf & 0x000F)
if dst >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid dst register (%d) in %04X (%s)", dst, buf, code)
}
inst = &Move{
Code: code,
Src: src,
Dst: dst,
}
case CodeAdd:
// operand: RST
// R refers to the dst register.
// S refers to the src1 register.
// T refers to the src2 register.
// dst register.
dst := Reg(buf & 0x0F00 >> 8)
if dst >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid dst register (%d) in %04X (%s)", dst, buf, code)
}
// src1 register.
src1 := Reg(buf & 0x00F0 >> 4)
if src1 >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid src1 register (%d) in %04X (%s)", src1, buf, code)
}
// src2 register.
src2 := Reg(buf & 0x000F)
if src2 >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid src2 register (%d) in %04X (%s)", src2, buf, code)
}
inst = &Add{
Code: code,
Src1: src1,
Src2: src2,
Dst: dst,
}
case CodeAddFloat:
// operand: RST
// R refers to the dst register.
// S refers to the src1 register.
// T refers to the src2 register.
// dst register.
dst := Reg(buf & 0x0F00 >> 8)
if dst >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid dst register (%d) in %04X (%s)", dst, buf, code)
}
// src1 register.
src1 := Reg(buf & 0x00F0 >> 4)
if src1 >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid src1 register (%d) in %04X (%s)", src1, buf, code)
}
// src2 register.
src2 := Reg(buf & 0x000F)
if src2 >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid src2 register (%d) in %04X (%s)", src2, buf, code)
}
inst = &AddFloat{
Code: code,
Src1: src1,
Src2: src2,
Dst: dst,
}
case CodeOr:
// operand: RST
// R refers to the dst register.
// S refers to the src1 register.
// T refers to the src2 register.
// dst register.
dst := Reg(buf & 0x0F00 >> 8)
if dst >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid dst register (%d) in %04X (%s)", dst, buf, code)
}
// src1 register.
src1 := Reg(buf & 0x00F0 >> 4)
if src1 >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid src1 register (%d) in %04X (%s)", src1, buf, code)
}
// src2 register.
src2 := Reg(buf & 0x000F)
if src2 >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid src2 register (%d) in %04X (%s)", src2, buf, code)
}
inst = &Or{
Code: code,
Src1: src1,
Src2: src2,
Dst: dst,
}
case CodeAnd:
// operand: RST
// R refers to the dst register.
// S refers to the src1 register.
// T refers to the src2 register.
// dst register.
dst := Reg(buf & 0x0F00 >> 8)
if dst >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid dst register (%d) in %04X (%s)", dst, buf, code)
}
// src1 register.
src1 := Reg(buf & 0x00F0 >> 4)
if src1 >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid src1 register (%d) in %04X (%s)", src1, buf, code)
}
// src2 register.
src2 := Reg(buf & 0x000F)
if src2 >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid src2 register (%d) in %04X (%s)", src2, buf, code)
}
inst = &And{
Code: code,
Src1: src1,
Src2: src2,
Dst: dst,
}
case CodeXor:
// operand: RST
// R refers to the dst register.
// S refers to the src1 register.
// T refers to the src2 register.
// dst register.
dst := Reg(buf & 0x0F00 >> 8)
if dst >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid dst register (%d) in %04X (%s)", dst, buf, code)
}
// src1 register.
src1 := Reg(buf & 0x00F0 >> 4)
if src1 >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid src1 register (%d) in %04X (%s)", src1, buf, code)
}
// src2 register.
src2 := Reg(buf & 0x000F)
if src2 >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid src2 register (%d) in %04X (%s)", src2, buf, code)
}
inst = &Xor{
Code: code,
Src1: src1,
Src2: src2,
Dst: dst,
}
case CodeRor:
// operand: R0X
// R refers to the register.
// X refers to the immediate value x.
// register.
reg := Reg(buf & 0x0F00 >> 8)
if reg >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid register (%d) in %04X (%s)", reg, buf, code)
}
// padding.
pad := buf & 0x00F0 >> 4
if pad != 0 {
return nil, fmt.Errorf("op.Decode: invalid padding (%X) in %04X (%s)", pad, buf, code)
}
// immediate value x.
x := Val(buf & 0x000F)
if x >= RegSize {
return nil, fmt.Errorf("op.Decode: invalid x (%d) in %04X (%s); above %d", x, buf, code, RegSize-1)
}
inst = &Ror{
Code: code,
Reg: reg,
X: x,
}
case CodeCmpBranch:
// operand: RXY
// R refers to the cmp register.
// XY refers to the memory address addr.
// cmp register.
cmp := Reg(buf & 0x0F00 >> 8)
if cmp >= RegCount {
return nil, fmt.Errorf("op.Decode: invalid cmp register (%d) in %04X (%s)", cmp, buf, code)
}
// memory address addr.
addr := Addr(buf & 0x00FF)
inst = &CmpBranch{
Code: code,
Addr: addr,
Cmp: cmp,
}
case CodeHalt:
// operand: 000
// padding.
pad := buf & 0x0FFF
if pad != 0 {
return nil, fmt.Errorf("op.Decode: invalid padding (%X) in %04X (%s)", pad, buf, code)
}
inst = &Halt{
Code: code,
}
default:
return nil, fmt.Errorf("op.Decode: invalid code (%d) in %04X", code, buf)
}
return inst, nil
} | archive/cs/risc/op/decode.go | 0.66072 | 0.586108 | decode.go | starcoder |
package migration
func (m *MigrationTable) TinyInteger(field string) *MigrationAttributes {
m.table.result = append(m.table.result,&MigrationAttribute{
field: field,
fieldType: "TINYINT",
})
return m.table
}
func (m *MigrationTable) SmallInteger(field string) *MigrationAttributes {
m.table.result = append(m.table.result,&MigrationAttribute{
field: field,
fieldType: "SMALLINT",
})
return m.table
}
func (m *MigrationTable) MediumInteger(field string) *MigrationAttributes {
m.table.result = append(m.table.result,&MigrationAttribute{
field: field,
fieldType: "MEDIUMINT",
})
return m.table
}
func (m *MigrationTable) Integer(field string) *MigrationAttributes {
m.table.result = append(m.table.result,&MigrationAttribute{
field: field,
fieldType: "INTEGER",
})
return m.table
}
/**
* @Description: 相当于 BIGINT
* @receiver m
* @param field
* @auth: daguang
* @return *MigrationAttributes
*/
func (m *MigrationTable) BigInteger(field string) *MigrationAttributes {
m.table.result = append(m.table.result,&MigrationAttribute{
field: field,
fieldType: "BIGINT",
})
return m.table
}
/**
* @Description:
* @receiver m
* @param field
* @auth: daguang
* @return *MigrationAttributes
*/
func (m *MigrationTable) Boolean(field string) *MigrationAttributes {
m.table.result = append(m.table.result,&MigrationAttribute{
field: field,
fieldType: "tinyint",
})
return m.table
}
func (m *MigrationTable) Decimal(field string,length int,precision int) *MigrationAttributes {
m.table.result = append(m.table.result,&MigrationAttribute{
field: field,
fieldType: "DECIMAL",
precision: precision,
length: length,
})
return m.table
}
func (m *MigrationTable) Double(field string,length int,precision int) *MigrationAttributes {
m.table.result = append(m.table.result,&MigrationAttribute{
field: field,
fieldType: "DOUBLE",
precision: precision,
length: length,
})
return m.table
}
func (m *MigrationTable) Float(field string,length int,precision int) *MigrationAttributes {
m.table.result = append(m.table.result,&MigrationAttribute{
field: field,
fieldType: "FLOAT",
precision: precision,
length: length,
})
return m.table
}
func (m *MigrationTable) UnsignedBigInteger(field string) *MigrationAttributes {
m.table.result = append(m.table.result,&MigrationAttribute{
field: field,
fieldType: "BIGINT",
unsigned: true,
})
return m.table
}
func (m *MigrationTable) UnsignedDecimal(field string,length,precision int) *MigrationAttributes {
m.table.result = append(m.table.result,&MigrationAttribute{
field: field,
fieldType: "DECIMAL",
length: length,
precision: precision,
unsigned: true,
})
return m.table
}
func (m *MigrationTable) UnsignedInteger(field string) *MigrationAttributes {
m.table.result = append(m.table.result,&MigrationAttribute{
field: field,
fieldType: "INTEGER",
unsigned: true,
})
return m.table
}
func (m *MigrationTable) UnsignedMediumInteger(field string) *MigrationAttributes {
m.table.result = append(m.table.result,&MigrationAttribute{
field: field,
fieldType: "MEDIUMINT",
unsigned: true,
})
return m.table
}
func (m *MigrationTable) UnsignedSmallInteger(field string) *MigrationAttributes {
m.table.result = append(m.table.result,&MigrationAttribute{
field: field,
fieldType: "SMALLINT",
unsigned: true,
})
return m.table
}
func (m *MigrationTable) UnsignedTinyInteger(field string) *MigrationAttributes {
m.table.result = append(m.table.result,&MigrationAttribute{
field: field,
fieldType: "TINYINT",
unsigned: true,
})
return m.table
} | migration/migration_field_digital.go | 0.676406 | 0.4856 | migration_field_digital.go | starcoder |
package math3d
import (
"fmt"
"math"
"unsafe"
)
type Matrix struct {
values [16]float32
}
var indexes = [4][4]int {
[4]int{ 0, 4, 8, 12 },
[4]int{ 1, 5, 9, 13 },
[4]int{ 2, 6, 10, 14 },
[4]int{ 3, 7, 11, 15 },
}
func NewMatrix(values [16]float32) *Matrix {
r := new(Matrix)
r.values = values
return r
}
func Identity() *Matrix {
m := new(Matrix)
m.values = [16]float32 {
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0,
}
return m
}
func Perspective(fovy float32, aspect float32, zNear float32, zFar float32) *Matrix {
r := DegreesToRadians64(float64(fovy) * 0.5)
scale := float32(1.0 / math.Tan(r))
result := new(Matrix)
result.values[indexes[0][0]] = scale / aspect
result.values[indexes[1][1]] = scale
result.values[indexes[2][3]] = -1.0
result.values[indexes[2][2]] = (zFar + zNear) / (zNear - zFar)
result.values[indexes[3][2]] = (2.0 * zFar * zNear) / (zNear - zFar)
result.Print()
return result
}
func Translation(v *Vector) *Matrix {
r := Identity()
r.values[12] = v.values[0]
r.values[13] = v.values[1]
r.values[14] = v.values[2]
return r
}
func LookAt(eye *Vector, center *Vector, up *Vector) *Matrix {
f := center.Subtract(eye)
f = f.Normalize()
u := up.Normalize()
s := f.Cross(u)
s = s.Normalize()
u = s.Cross(f)
u = u.Normalize()
ret := Matrix{}
ret.values[indexes[0][0]] = s.values[0]
ret.values[indexes[0][1]] = s.values[1]
ret.values[indexes[0][2]] = s.values[2]
ret.values[indexes[1][0]] = u.values[0]
ret.values[indexes[1][1]] = u.values[1]
ret.values[indexes[1][2]] = u.values[2]
ret.values[indexes[2][0]] = -f.values[0]
ret.values[indexes[2][1]] = -f.values[1]
ret.values[indexes[2][2]] = -f.values[2]
translate := Translation(NewVector([3]float32{ -eye.values[0], -eye.values[1], -eye.values[2] }))
return ret.Multiply(translate)
}
func (m *Matrix) Print() {
fmt.Printf("[")
for i := 0; i < 4; i++ {
fmt.Printf("\n\t")
for j := 0; j < 4; j++ {
if j > 0 {
fmt.Printf(", ")
}
fmt.Printf("%4.3f", m.values[indexes[i][j]])
}
}
fmt.Printf("\n]\n")
}
func (m *Matrix) SetValues(values [16]float32) {
m.values = values
}
func MultiplyMatrices(n ... *Matrix) *Matrix {
r := n[0]
for i := 1; i < len(n); i++ {
r = r.Multiply(n[i])
}
return r
}
func (m *Matrix) Pointer() unsafe.Pointer {
return unsafe.Pointer(&m.values[0])
}
func NaiveMultiply(m, n, p *Matrix) *Matrix {
for i := 0; i < 4; i++ {
for j := 0; j < 4; j++ {
p.values[indexes[i][j]] =
m.values[indexes[i][0]] * n.values[indexes[0][j]] +
m.values[indexes[i][1]] * n.values[indexes[1][j]] +
m.values[indexes[i][2]] * n.values[indexes[2][j]] +
m.values[indexes[i][3]] * n.values[indexes[3][j]]
}
}
return p
}
func unrolledMultiply(m1, m2, mat *Matrix) *Matrix {
mat.values[0] = m1.values[0] * m2.values[0] + m1.values[4] * m2.values[1] + m1.values[8] * m2.values[2] + m1.values[12] * m2.values[3]
mat.values[1] = m1.values[1] * m2.values[0] + m1.values[5] * m2.values[1] + m1.values[9] * m2.values[2] + m1.values[13] * m2.values[3];
mat.values[2] = m1.values[2] * m2.values[0] + m1.values[6] * m2.values[1] + m1.values[10] * m2.values[2] + m1.values[14] * m2.values[3];
mat.values[3] = m1.values[3] * m2.values[0] + m1.values[7] * m2.values[1] + m1.values[11] * m2.values[2] + m1.values[15] * m2.values[3];
mat.values[4] = m1.values[0] * m2.values[4] + m1.values[4] * m2.values[5] + m1.values[8] * m2.values[6] + m1.values[12] * m2.values[7];
mat.values[5] = m1.values[1] * m2.values[4] + m1.values[5] * m2.values[5] + m1.values[9] * m2.values[6] + m1.values[13] * m2.values[7];
mat.values[6] = m1.values[2] * m2.values[4] + m1.values[6] * m2.values[5] + m1.values[10] * m2.values[6] + m1.values[14] * m2.values[7];
mat.values[7] = m1.values[3] * m2.values[4] + m1.values[7] * m2.values[5] + m1.values[11] * m2.values[6] + m1.values[15] * m2.values[7];
mat.values[8] = m1.values[0] * m2.values[8] + m1.values[4] * m2.values[9] + m1.values[8] * m2.values[10] + m1.values[12] * m2.values[11];
mat.values[9] = m1.values[1] * m2.values[8] + m1.values[5] * m2.values[9] + m1.values[9] * m2.values[10] + m1.values[13] * m2.values[11];
mat.values[10] = m1.values[2] * m2.values[8] + m1.values[6] * m2.values[9] + m1.values[10] * m2.values[10] + m1.values[14] * m2.values[11];
mat.values[11] = m1.values[3] * m2.values[8] + m1.values[7] * m2.values[9] + m1.values[11] * m2.values[10] + m1.values[15] * m2.values[11];
mat.values[12] = m1.values[0] * m2.values[12] + m1.values[4] * m2.values[13] + m1.values[8] * m2.values[14] + m1.values[12] * m2.values[15];
mat.values[13] = m1.values[1] * m2.values[12] + m1.values[5] * m2.values[13] + m1.values[9] * m2.values[14] + m1.values[13] * m2.values[15];
mat.values[14] = m1.values[2] * m2.values[12] + m1.values[6] * m2.values[13] + m1.values[10] * m2.values[14] + m1.values[14] * m2.values[15];
mat.values[15] = m1.values[3] * m2.values[12] + m1.values[7] * m2.values[13] + m1.values[11] * m2.values[14] + m1.values[15] * m2.values[15];
return mat
}
func (m *Matrix) Multiply(n *Matrix) *Matrix {
if n == nil {
return m
}
return m.MultiplyP(n, new(Matrix))
}
func (m *Matrix) MultiplyP(n *Matrix, p *Matrix) *Matrix {
if n != nil {
unrolledMultiply(m, n, p)
//NaiveMultiply(m, n, p)
}
return p
} | matrix.go | 0.564579 | 0.734941 | matrix.go | starcoder |
package objects
import (
"log"
"math"
)
type TimingPoint struct {
Time int64
BaseBpm, Bpm float64
SampleSet int
SampleIndex int
SampleVolume float64
}
func (t TimingPoint) GetRatio() float64 {
return t.Bpm / t.BaseBpm
}
type Timings struct {
Points []TimingPoint
queue []TimingPoint
SliderMult float64
Current TimingPoint
fullBPM, partBPM float64
BaseSet int
LastSet int
TickRate float64
}
func NewTimings() *Timings {
return &Timings{BaseSet: 1, LastSet: 1}
}
func (tim *Timings) AddPoint(time int64, bpm float64, sampleset, sampleindex int, samplevolume float64) {
point := TimingPoint{Time: time, Bpm: bpm, SampleSet: sampleset, SampleIndex: sampleindex, SampleVolume: samplevolume}
if point.Bpm > 0 {
tim.fullBPM = point.Bpm
} else {
point.Bpm = tim.fullBPM / math.Max(0.1, -100.0/point.Bpm)
}
point.BaseBpm = tim.fullBPM
tim.Points = append(tim.Points, point)
tim.queue = append(tim.queue, point)
}
func (tim *Timings) Update(time int64) {
if len(tim.queue) > 0 {
p := tim.queue[0]
if p.Time <= time {
tim.queue = tim.queue[1:]
tim.partBPM = p.Bpm
tim.Current = p
}
}
}
func clamp(a int, min int, max int) int {
if a > max {
return max
}
if a < min {
return min
}
return a
}
func clampF(a float64, min float64, max float64) float64 {
if a > max {
return max
}
if a < min {
return min
}
return a
}
func (tim *Timings) GetPoint(time int64) TimingPoint {
for i, pt := range tim.Points {
if time < pt.Time {
return tim.Points[clamp(i-1, 0, len(tim.Points)-1)]
}
}
return tim.Points[len(tim.Points)-1]
}
func (tim Timings) GetSliderTimeS(time int64, pixelLength float64) int64 {
return int64(tim.GetPoint(time).Bpm * pixelLength / (100.0 * tim.SliderMult))
}
func (tim Timings) GetSliderTime(pixelLength float64) int64 {
return int64(tim.partBPM * pixelLength / (100.0 * tim.SliderMult))
}
func (tim Timings) GetSliderTimeP(point TimingPoint, pixelLength float64) int64 {
return int64((point.Bpm * pixelLength / (100.0 * tim.SliderMult)))
}
func (tim *Timings) Reset() {
tim.queue = make([]TimingPoint, len(tim.Points))
copy(tim.queue, tim.Points)
tim.Current = tim.queue[0]
}
func (tim *Timings) Log() {
log.Println(len(tim.Points))
} | beatmap/objects/timing.go | 0.758779 | 0.47384 | timing.go | starcoder |
package matrix
import (
"errors"
"fmt"
)
// ScanDirection scan matrix direction
type ScanDirection uint
const (
// ROW for row first
ROW ScanDirection = 1
// COLUMN for column first
COLUMN ScanDirection = 2
)
// State value of matrix map[][]
type State uint16
const (
// StateInit represents the initial block state of the matrix
StateInit State = iota
// ZERO represents the initial state.
// Deprecated: use StateInit instead
ZERO = StateInit
// StateFalse represents the block has been set to false
StateFalse State = 0x1
// StateTrue represents the block has been set to TRUE
StateTrue State = 0x2
// StateVersion indicates the version block of matrix
StateVersion State = 0x3
// StateFormat indicates the format block of matrix
StateFormat State = 0x4
// StateFinder indicates the finder block of matrix
StateFinder State = 0x5
)
func (s State) String() string {
return fmt.Sprintf("0x%X", uint16(s))
}
var (
// ErrorOutRangeOfW x out of range of Width
ErrorOutRangeOfW = errors.New("out of range of width")
// ErrorOutRangeOfH y out of range of Height
ErrorOutRangeOfH = errors.New("out of range of height")
)
// StateSliceMatched should be
// Deprecated: since rule3_backup removed
func StateSliceMatched(ss1, ss2 []State) bool {
if len(ss1) != len(ss2) {
return false
}
for idx := range ss1 {
if (ss1[idx] ^ ss2[idx]) != 0 {
return false
}
}
return true
}
// New generate a matrix with map[][]bool
func New(width, height int) *Matrix {
mat := make([][]State, width)
for w := 0; w < width; w++ {
mat[w] = make([]State, height)
}
m := &Matrix{
mat: mat,
width: width,
height: height,
}
m.init()
return m
}
// Matrix is a matrix data type
// width:3 height: 4 for [3][4]int
type Matrix struct {
mat [][]State
width int
height int
}
// do some init work
func (m *Matrix) init() {
for w := 0; w < m.width; w++ {
for h := 0; h < m.height; h++ {
m.mat[w][h] = StateInit
}
}
}
// Print to stdout
func (m *Matrix) print() {
m.Iterate(ROW, func(x, y int, s State) {
fmt.Printf("%2d ", s)
if (x + 1) == m.width {
fmt.Println()
}
})
}
func (m *Matrix) Print() {
m.print()
}
// Copy matrix into a new Matrix
func (m *Matrix) Copy() *Matrix {
mat2 := make([][]State, m.width)
for w := 0; w < m.width; w++ {
mat2[w] = make([]State, m.height)
copy(mat2[w], m.mat[w])
}
m2 := &Matrix{
width: m.width,
height: m.height,
mat: mat2,
}
return m2
}
// Width ... width
func (m *Matrix) Width() int {
return m.width
}
// Height ... height
func (m *Matrix) Height() int {
return m.height
}
// Set [w][h] as true
func (m *Matrix) Set(w, h int, c State) error {
if w >= m.width || w < 0 {
return ErrorOutRangeOfW
}
if h >= m.height || h < 0 {
return ErrorOutRangeOfH
}
m.mat[w][h] = c
return nil
}
// Get state value from matrix with position {x, y}
func (m *Matrix) Get(w, h int) (State, error) {
if w >= m.width || w < 0 {
return StateInit, ErrorOutRangeOfW
}
if h >= m.height || h < 0 {
return StateInit, ErrorOutRangeOfH
}
return m.mat[w][h], nil
}
// IterateFunc ...
type IterateFunc func(int, int, State)
// Iterate the Matrix with loop direction ROW major or COLUMN major.
// COLUMN is recommended.
func (m *Matrix) Iterate(dir ScanDirection, f IterateFunc) {
// row direction first
if dir == ROW {
for h := 0; h < m.height; h++ {
for w := 0; w < m.width; w++ {
f(w, h, m.mat[w][h])
}
}
return
}
// column direction first
if dir == COLUMN {
for w := 0; w < m.width; w++ {
for h := 0; h < m.height; h++ {
f(w, h, m.mat[w][h])
}
}
return
}
}
// XOR ...
func XOR(s1, s2 State) State {
if s1 != s2 {
return StateTrue
}
return StateFalse
}
// Row return a row of matrix, cur should be y dimension.
func (m *Matrix) Row(cur int) []State {
if cur >= m.height || cur < 0 {
return nil
}
col := make([]State, m.height)
for w := 0; w < m.width; w++ {
col[w] = m.mat[w][cur]
}
return col
}
// Col return a slice of column, cur should be x dimension.
func (m *Matrix) Col(cur int) []State {
if cur >= m.width || cur < 0 {
return nil
}
return m.mat[cur]
} | matrix/matrix.go | 0.687 | 0.525308 | matrix.go | starcoder |
package math
type Box2 struct {
min Vector2
max Vector2
}
// Equivalent to makeEmpty
func NewDefaultBox2() *Box2 {
return NewBox2(
NewVector2Inf(1),
NewVector2Inf(-1),
)
}
func NewBox2(min *Vector2, max *Vector2) *Box2 {
return &Box2{
min: Vector2{X: min.X, Y: min.Y},
max: Vector2{X: max.X, Y: max.Y},
}
}
func NewBox2FromComponents(minX, minY, maxX, maxY float32) *Box2 {
return &Box2{
min: Vector2{X: minX, Y: minY},
max: Vector2{X: maxX, Y: maxY},
}
}
func NewBox2FromCenterAndSize(center *Vector2, size *Vector2) *Box2 {
halfSize := size.Clone()
halfSize.MultiplyScalar(0.5)
nb := &Box2{
min: *center.Clone(),
max: *center.Clone(),
}
nb.min.Sub(halfSize)
nb.max.Add(halfSize)
return nb
}
func (box *Box2) Clone() *Box2 {
return &Box2{
min: *box.min.Clone(),
max: *box.max.Clone(),
}
}
func (box *Box2) Copy(source *Box2) {
box.min.Copy(&source.min)
box.max.Copy(&source.max)
}
func (box *Box2) IsEmpty() bool {
return box.max.X < box.min.X || box.max.Y < box.min.Y
}
func (box *Box2) GetCenter() *Vector2 {
if box.IsEmpty() {
return NewVector2(0, 0)
} else {
c := NewVector2(0, 0)
c.SetAddVectors(&box.min, &box.max)
c.MultiplyScalar(0.5)
return c
}
}
func (box *Box2) GetSize() *Vector2 {
if box.IsEmpty() {
return NewVector2(0, 0)
} else {
c := NewVector2(0, 0)
c.SetSubVectors(&box.min, &box.max)
return c
}
}
func (box *Box2) ExpandByPoint(point *Vector2) {
box.min.Min(point)
box.max.Max(point)
}
func (box *Box2) ExpandByVector(vector *Vector2) {
box.min.Sub(vector)
box.max.Add(vector)
}
func (box *Box2) ExpandByScalar(scalar float32) {
box.min.SubScalar(scalar)
box.max.AddScalar(scalar)
}
func (box *Box2) ContainsPoint(point *Vector2) bool {
return !(point.X < box.min.X || point.X > box.max.X ||
point.Y < box.min.Y || point.Y > box.max.Y)
}
func (box *Box2) ContainsBox(b *Box2) bool {
return box.min.X <= b.min.X && b.max.X <= box.max.X &&
box.min.Y <= b.min.Y && b.max.Y <= box.max.Y
}
func (box *Box2) IntersectsBox(b *Box2) bool {
return !(b.max.X < box.min.X || b.min.X > box.max.X ||
b.max.Y < box.min.Y || b.min.Y > box.max.Y)
}
func (box *Box2) ClampPoint(point *Vector2) *Vector2 {
target := NewDefaultVector2()
target.Copy(point)
target.Clamp(&box.min, &box.max)
return target
}
func (box *Box2) DistanceToPoint(point *Vector2) float32 {
v1 := point.Clone()
v1.Clamp(&box.min, &box.max)
v1.Sub(point)
return v1.GetLength()
}
func (box *Box2) Intersect(b *Box2) {
box.min.Max(&b.min)
box.max.Min(&b.max)
}
func (box *Box2) Union(b *Box2) {
box.min.Min(&b.min)
box.max.Max(&b.max)
}
func (box *Box2) Translate(offset *Vector2) {
box.min.Add(offset)
box.max.Add(offset)
}
func (box *Box2) Equals(b *Box2) bool {
return box.min.Equals(&b.min) && box.max.Equals(&b.max)
} | box2.go | 0.843122 | 0.594551 | box2.go | starcoder |
package epoch
import (
"errors"
"fmt"
"os"
"strconv"
"strings"
"time"
)
// TimeUnit represents a time unit.
type TimeUnit byte
const (
// UnitSeconds represents seconds.
UnitSeconds TimeUnit = iota
// UnitMilliseconds represents milliseconds.
UnitMilliseconds
// UnitMicroseconds represents microseconds.
UnitMicroseconds
// UnitNanoseconds represents nanoseconds.
UnitNanoseconds
)
// ParseUnit takes a string and returns the corresponding unit.
func ParseUnit(input string) (TimeUnit, error) {
switch input {
case "s", "sec":
return UnitSeconds, nil
case "ms", "milli":
return UnitMilliseconds, nil
case "us", "micro":
return UnitMicroseconds, nil
case "ns", "nano":
return UnitNanoseconds, nil
}
return UnitSeconds, fmt.Errorf("failed to parse input '%v' to unit", input)
}
// ToTimestamp takes Go's default time type returns a timestamp of the given unit.
func ToTimestamp(t time.Time, unit TimeUnit) (int64, error) {
epoch := t.Unix()
switch unit {
case UnitSeconds:
// calculated as default value,
// nothing to to
case UnitMilliseconds:
epoch = t.UnixNano() / (1000 * 1000)
case UnitMicroseconds:
epoch = t.UnixNano() / 1000
case UnitNanoseconds:
epoch = t.UnixNano()
default:
return 255, fmt.Errorf("unknown unit '%v'", unit)
}
return epoch, nil
}
func abs(i int) int {
if i < 0 {
return i * -1
}
return i
}
// ParseTimestamp takes a timestamp of the given unit and returns Go's default time type.
func ParseTimestamp(timestamp int64, unit TimeUnit) (time.Time, error) {
switch unit {
case UnitSeconds:
return time.Unix(timestamp, 0), nil
case UnitMilliseconds:
// add digits to match nanosecond accuracy
timestamp *= 1000 * 1000
return time.Unix(0, timestamp), nil
case UnitMicroseconds:
// add digits to match nanosecond accuracy
timestamp *= 1000
return time.Unix(0, timestamp), nil
case UnitNanoseconds:
return time.Unix(0, timestamp), nil
default:
return time.Time{}, fmt.Errorf("unknown unit '%v'", unit)
}
}
// GuessUnit guesses if the input is sec, ms, us or ns based on
// the difference of the length (number of digits) of the 'ref' epoch times.
func GuessUnit(timestamp int64, ref time.Time) TimeUnit {
var (
lenIn = len(fmt.Sprintf("%v", timestamp)) // number of digits of timestamp to guess
lenSec = len(strconv.FormatInt(ref.Unix(), 10)) // number of digits in current seconds timestamp
lenMill = len(strconv.FormatInt(ref.UnixNano()/(1000*1000), 10)) // number of digits in current milliseconds timestamp
lenMicro = len(strconv.FormatInt(ref.UnixNano()/1000, 10)) // number of digits in current microseconds timestamp
lenNano = len(strconv.FormatInt(ref.UnixNano(), 10)) // number of digits in current nanoseconds timestamp
diffSec = abs(lenSec - lenIn)
diffMill = abs(lenMill - lenIn)
diffMicro = abs(lenMicro - lenIn)
diffNano = abs(lenNano - lenIn)
)
// TODO: maybe there is a better way to do this guessing.
if diffSec <= diffMill &&
diffSec <= diffMicro &&
diffSec <= diffNano {
// number of digits is closer to current seconds timestamp
return UnitSeconds
} else if diffMill <= diffSec &&
diffMill <= diffMicro &&
diffMill <= diffNano {
// number of digits is closer to current milliseconds timestamp
return UnitMilliseconds
} else if diffMicro <= diffSec &&
diffMicro <= diffMill &&
diffMicro <= diffNano {
// number of digits is closer to current microseconds timestamp
return UnitMicroseconds
}
// number of digits is closer to current nanoseconds timestamp
return UnitNanoseconds
}
// ErrParseFormatted is used when parsing the formatted string failed.
var ErrParseFormatted = errors.New("failed to convert string to time")
const (
// FormatGo handles Go's default time.Now() format (e.g. 2019-01-26 09:43:57.377055 +0100 CET m=+0.644739467)
FormatGo = "2006-01-02 15:04:05.999999999 -0700 MST"
// FormatSimple handles "2019-01-25 21:51:38"
FormatSimple = "2006-01-02 15:04:05.999999999"
// FormatHTTP instead of importing main with http.TimeFormat which would increase the binary size significantly.
FormatHTTP = "Mon, 02 Jan 2006 15:04:05 GMT"
)
// ParseFormatted takes a human readable time string and returns Go's default time type and the layout it recognized.
// Example input: "Mon, 02 Jan 2006 15:04:05 MST".
func ParseFormatted(input string) (time.Time, string, error) {
// "Mon, 02 Jan 2006 15:04:05 MST"
if t, err := time.Parse(time.RFC1123, input); err == nil {
return t, time.RFC1123, nil
}
// "Mon, 02 Jan 2006 15:04:05 -0700"
if t, err := time.Parse(time.RFC1123Z, input); err == nil {
return t, time.RFC1123Z, nil
}
// "2006-01-02T15:04:05Z07:00"
if t, err := time.Parse(time.RFC3339, input); err == nil {
return t, time.RFC3339, nil
}
// "2006-01-02T15:04:05.999999999Z07:00"
if t, err := time.Parse(time.RFC3339Nano, input); err == nil {
return t, time.RFC3339Nano, nil
}
// "02 Jan 06 15:04 MST"
if t, err := time.Parse(time.RFC822, input); err == nil {
return t, time.RFC822, nil
}
// "02 Jan 06 15:04 -0700"
if t, err := time.Parse(time.RFC822Z, input); err == nil {
return t, time.RFC822Z, nil
}
// "Monday, 02-Jan-06 15:04:05 MST"
if t, err := time.Parse(time.RFC850, input); err == nil {
return t, time.RFC850, nil
}
// "Mon Jan _2 15:04:05 2006"
if t, err := time.Parse(time.ANSIC, input); err == nil {
return t, time.ANSIC, nil
}
// "Mon Jan _2 15:04:05 MST 2006"
if t, err := time.Parse(time.UnixDate, input); err == nil {
return t, time.UnixDate, nil
}
// "Mon Jan 02 15:04:05 -0700 2006"
if t, err := time.Parse(time.RubyDate, input); err == nil {
return t, time.RubyDate, nil
}
// "3:04PM"
if t, err := time.Parse(time.Kitchen, input); err == nil {
return t, time.Kitchen, nil
}
// "Jan _2 15:04:05"
if t, err := time.Parse(time.Stamp, input); err == nil {
return t, time.Stamp, nil
}
// "Jan _2 15:04:05.000"
if t, err := time.Parse(time.StampMilli, input); err == nil {
return t, time.StampMilli, nil
}
// "Jan _2 15:04:05.000000"
if t, err := time.Parse(time.StampMicro, input); err == nil {
return t, time.StampMicro, nil
}
// "Jan _2 15:04:05.000000000"
if t, err := time.Parse(time.StampNano, input); err == nil {
return t, time.StampNano, nil
}
// "Mon, 02 Jan 2006 15:04:05 GMT"
if t, err := time.Parse(FormatHTTP, input); err == nil {
return t, FormatHTTP, nil
}
if t, err := time.Parse(FormatGo, strings.Split(input, " m=")[0]); err == nil {
return t, FormatGo, nil
}
// "2019-01-25 21:51:38"
if t, err := time.Parse(FormatSimple, input); err == nil {
return t, FormatSimple, nil
}
return time.Time{}, "", ErrParseFormatted
}
// Operator for arithemtic operation.
type Operator uint
const (
// Undefined operator.
Undefined Operator = iota
// Add operation.
Add
// Sub operation.
Sub
)
// ErrUnkownOperator is returned when no matching operator was found.
var ErrUnkownOperator = errors.New("unkown operator")
// ToOperator return the matching operator to the given string.
func ToOperator(s string) (Operator, error) {
switch s {
case "+": //, "add", "plus":
return Add, nil
case "-": //, "sub", "minus":
return Sub, nil
}
return Undefined, fmt.Errorf("%w: '%v'", ErrUnkownOperator, s)
}
// Calculate does basic add/sub calculations on the given input.
func Calculate(input time.Time, op Operator, amount int, unit string) time.Time {
switch op {
case Sub:
amount = -1 * amount
}
var duration time.Duration = 0
switch unit {
case "ns":
duration = time.Duration(amount) * time.Nanosecond
return input.Add(duration)
case "us":
duration = time.Duration(amount) * time.Microsecond
return input.Add(duration)
case "ms":
duration = time.Duration(amount) * time.Millisecond
return input.Add(duration)
case "s":
duration = time.Duration(amount) * time.Second
return input.Add(duration)
case "m":
duration = time.Duration(amount) * time.Minute
return input.Add(duration)
case "h":
duration = time.Duration(amount) * time.Hour
return input.Add(duration)
case "D":
return input.AddDate(0, 0, amount)
case "W":
return input.AddDate(0, 0, amount*7)
case "M":
return input.AddDate(0, amount, 0)
case "Y":
return input.AddDate(amount, 0, 0)
}
return time.Time{}
}
// FormattedString returns the given time in the given format (e.g. 'unix' or 'rfc3339').
func FormattedString(t time.Time, format string) string {
format = strings.ToLower(format)
switch format {
case "":
return t.String()
case "unix":
return t.Format(time.UnixDate)
case "ruby":
return t.Format(time.RubyDate)
case "ansic":
return t.Format(time.ANSIC)
case "rfc822":
return t.Format(time.RFC822)
case "rfc822z":
return t.Format(time.RFC822Z)
case "rfc850":
return t.Format(time.RFC850)
case "rfc1123":
return t.Format(time.RFC1123)
case "rfc1123z":
return t.Format(time.RFC1123Z)
case "rfc3339":
return t.Format(time.RFC3339)
case "rfc3339nano":
return t.Format(time.RFC3339Nano)
case "kitchen":
return t.Format(time.Kitchen)
case "stamp":
return t.Format(time.Stamp)
case "stampms":
return t.Format(time.StampMilli)
case "stampus":
return t.Format(time.StampMicro)
case "stampns":
return t.Format(time.StampNano)
case "http":
return t.Format(FormatHTTP)
default:
fmt.Fprintf(os.Stderr, "failed to parse format '%v'\n", format)
return t.String()
}
} | epoch.go | 0.786336 | 0.510435 | epoch.go | starcoder |
package density
import (
"fmt"
"math/big"
"sort"
)
// ValueRing represents a ring values.
type ValueRing struct {
ids *SortedArrayBigInts
max *big.Int // The maximum value of any value in the ring.
wordSize int64 // size in bytes
dhtSpan int64
}
// NewValueRing is used to create a new instance.
func NewValueRing(wordSize, dhtSpan int64, initialArrayOfIndetifiers *SortedArrayBigInts) *ValueRing {
var v = ValueRing{}
v.ids = initialArrayOfIndetifiers
v.wordSize = wordSize
v.dhtSpan = dhtSpan
v.max = big.NewInt(0)
v.max.Exp(big.NewInt(2), big.NewInt(8 * wordSize), nil)
return &v
}
// GetIds returns the underlying id object
func (v *ValueRing) GetIds() (*SortedArrayBigInts) {
return v.ids
}
// GetNumberRange returns the size of the number range
func (v *ValueRing) GetNumberRange() (*big.Int) {
return v.max
}
// NumIdsInRange returns the number of ids in the range offset to offset + windowSize mod the number range
func (v *ValueRing) NumIdsInRange(offset, windowSize *big.Int) (int) {
// Find the first index in the array where the id is larger than offset
startIndex := sort.Search(v.ids.Len(), func(i int) bool {
temp := v.ids.Get(i)
return temp.Cmp(offset) == 1})
var endOffset = new(big.Int)
endOffset.Add(offset, windowSize)
endIndex := sort.Search(v.ids.Len(), func(i int) bool {
temp := v.ids.Get(i)
return temp.Cmp(endOffset) == 1})
extraGivenWrapAround := 0;
if v.max.Cmp(endOffset) == -1 {
endOffset.Sub(endOffset, v.max)
extraGivenWrapAround = sort.Search(v.ids.Len(), func(i int) bool {
temp := v.ids.Get(i)
return temp.Cmp(endOffset) == 1})
}
return endIndex - startIndex + extraGivenWrapAround
}
// FindNextHighest finds the value that is the next highest in the ring
func (v *ValueRing) FindNextHighest(offset *big.Int) (int, big.Int) {
foundIndex := sort.Search(v.ids.Len(), func(i int) bool {
temp := v.ids.Get(i)
return temp.Cmp(offset) == 1})
if foundIndex == v.ids.Len() {
foundIndex = 0
}
return foundIndex, v.ids.Get(foundIndex)
}
// GetRelativeIndex returns the index that is "jump" away from index "from"
func (v *ValueRing) GetRelativeIndex(from, jump int) (int) {
result := (from + jump) % v.ids.Len()
return result
}
// FindBestLocationForValue finds the ideal location for a new value in the ring
// Ideal is defined as the middle of the largest gap between two identifiers in
// the largest region of low dentity.
func (v *ValueRing) FindBestLocationForValue() (big.Int) {
fmt.Println("Scan the number range to see how many identifiers cover each region.")
fmt.Println("Determine the indices that correspond to regions that have the lowest number identifiers.")
numRange := v.GetNumberRange()
fmt.Printf(" Number Range: %#x\n", numRange)
numNodes := v.ids.Len()
var windowSize = new(big.Int)
windowSize.Mul(numRange, big.NewInt(v.dhtSpan)).Div(windowSize, big.NewInt(int64(numNodes)))
fmt.Printf(" Window Size: %#x\n", windowSize)
numSteps := numNodes * 2
var stepSize = new(big.Int)
stepSize.Div(numRange, big.NewInt(int64(numSteps)))
fmt.Printf(" Step Size: %#x\n", stepSize)
fmt.Println(" Determine the weightings of each window")
var indicesWithLowestNumIdsInRange []int = nil
lowestNumIdsInRange := numNodes
var offset = *big.NewInt(0)
index := 0
fmt.Println(" index, num identifiers in range")
for offset.Cmp(numRange) == -1 {
numIdsInRange := v.NumIdsInRange(&offset, windowSize)
// Print out comma separated values of index of the scan and the number of ids in the range.
fmt.Printf(" %5d, %d\n", index, numIdsInRange)
if (numIdsInRange == int(lowestNumIdsInRange)) {
indicesWithLowestNumIdsInRange = append(indicesWithLowestNumIdsInRange, index)
}
if (numIdsInRange < int(lowestNumIdsInRange)) {
lowestNumIdsInRange = numIdsInRange
indicesWithLowestNumIdsInRange = nil
indicesWithLowestNumIdsInRange = append(indicesWithLowestNumIdsInRange, index)
}
index++
offset.Add(&offset, stepSize)
}
fmt.Printf("List of windows/regions start offsets that have the lowest number of idenfiers (%d)\n", lowestNumIdsInRange)
for _, s := range indicesWithLowestNumIdsInRange {
fmt.Println(s)
}
fmt.Println()
fmt.Println("Find the largest low density region (longest contiguous set of indices)")
startOfLongestContiguousRun := 0
lenOfLongestContiguousRun := 1
lenOfContiguousRun := 1
inContiguousRun := true
numIndicesWithLowestNumIdsInRange := len(indicesWithLowestNumIdsInRange)
// Go through the array twice to ensure we handle the wrap-around
for i := 1; i < numIndicesWithLowestNumIdsInRange * 2; i++ {
idx := i % numIndicesWithLowestNumIdsInRange
idxLessOne := (i - 1) % numIndicesWithLowestNumIdsInRange
if (indicesWithLowestNumIdsInRange[idx] - indicesWithLowestNumIdsInRange[idxLessOne] == 1) ||
(indicesWithLowestNumIdsInRange[idxLessOne] == int(numSteps)-1 && indicesWithLowestNumIdsInRange[idx] == 0) {
// Is contiguous
if (inContiguousRun) {
lenOfContiguousRun++
} else {
lenOfContiguousRun = 1
inContiguousRun = true
}
if (lenOfLongestContiguousRun < lenOfContiguousRun) {
lenOfLongestContiguousRun = lenOfContiguousRun
startOfLongestContiguousRun = (idx + 1 - lenOfContiguousRun)
if startOfLongestContiguousRun < 0 {
startOfLongestContiguousRun += numIndicesWithLowestNumIdsInRange
}
}
} else {
inContiguousRun = false
}
}
// If the number of indices in range is the same for all offsets then the detected length will be wrong
if lenOfLongestContiguousRun == numIndicesWithLowestNumIdsInRange*2 {
lenOfLongestContiguousRun = numIndicesWithLowestNumIdsInRange
}
fmt.Printf(" Start of Longest Contiguous Run: %d\n", startOfLongestContiguousRun)
fmt.Printf(" Length of Longest Contiguous Run: %d\n", lenOfLongestContiguousRun)
fmt.Println()
fmt.Println("Find the largest gap in the largest contiguous piece of low density")
var startOffsetOfLowDensityRange = new(big.Int)
startOffsetOfLowDensityRange.Mul(stepSize, big.NewInt(int64(indicesWithLowestNumIdsInRange[startOfLongestContiguousRun])))
fmt.Printf(" Start Offset of lowest density range: %#x\n", startOffsetOfLowDensityRange)
var rangeOfLowDensityRange = new(big.Int)
rangeOfLowDensityRange.Mul(stepSize, big.NewInt(int64(lenOfLongestContiguousRun - 1)))
rangeOfLowDensityRange.Add(rangeOfLowDensityRange, windowSize)
fmt.Printf(" Size of of lowest density range: %#x\n", rangeOfLowDensityRange)
foundIndex, foundValue := v.FindNextHighest(startOffsetOfLowDensityRange)
fmt.Printf(" First identifier in range: Index: %d, Value: %#x\n", foundIndex, &foundValue)
numIdsInRange := v.NumIdsInRange(startOffsetOfLowDensityRange, rangeOfLowDensityRange)
fmt.Printf(" Number of identifiers in range: %d\n", numIdsInRange)
var largestGap big.Int
largestGapIndex := foundIndex
for i := foundIndex; i < foundIndex + numIdsInRange; i++ {
thisIndex := i % numNodes
low := v.ids.Get(thisIndex)
nextIndex := (i + 1) % numNodes
// fmt.Printf("i %d, next %d\n", i, next)
high := v.ids.Get(nextIndex)
var diff big.Int
if nextIndex == 0 {
high.Add(&high, v.max)
}
diff.Sub(&high, &low)
// fmt.Printf("Low: %#x\n", &low)
// fmt.Printf("High: %#x\n", &high)
// fmt.Printf("Gap %d, %d: %#x\n", this, next, &diff)
if largestGap.Cmp(&diff) == -1 {
largestGap = diff
largestGapIndex = thisIndex
}
}
// fmt.Printf("Location of largest gap is %d\n", largestGapIndex)
// fmt.Printf("Largest Gap size: %#x\n", &largestGap)
var halfOfLargestGap big.Int
halfOfLargestGap.Div(&largestGap, big.NewInt(2))
valStartOfLargestGapIndex := v.ids.Get(largestGapIndex)
var ideal big.Int
ideal.Add(&valStartOfLargestGapIndex, &halfOfLargestGap)
return ideal
} | internal/density/value_ring.go | 0.627951 | 0.531757 | value_ring.go | starcoder |
package primitives
import (
"math"
"math/cmplx"
)
// InfinitePoint for representing a non-valid point
var InfinitePoint = Point{1e20, 1e20, 1e20}
// Lambda is to prevent
const lambda = 1e6
func solveQuadratic(floats [3]float64) (x1, x2 float64, ret bool) {
a, b, c := complex(floats[0], 0.0), complex(floats[1], 0.0), complex(floats[2], 0.0)
root := cmplx.Sqrt(cmplx.Pow(b, 2) - 4*a*c)
x1Complex := (-b - root) / (2 * a)
x2Complex := (-b + root) / (2 * a)
ret = true
if imag(x1Complex) != 0.0 || imag(x2Complex) != 0.0 {
ret = false
}
x1 = real(x1Complex)
x2 = real(x2Complex)
return
}
// Intersectable are objects that can intercept rays
type Intersectable interface {
Intersection(r Ray) Point
Normal(p Point) Point
}
// Point in 3D
type Point struct {
X, Y, Z float64
}
// Add points
func (a Point) Add(b Point) Point {
return Point{a.X + b.X, a.Y + b.Y, a.Z + b.Z}
}
// Subtract points
func (a Point) Subtract(b Point) Point {
return Point{a.X - b.X, a.Y - b.Y, a.Z - b.Z}
}
// Multiply is the scalar multiplication of a vector
func (a Point) Multiply(b float64) Point {
return Point{a.X * b, a.Y * b, a.Z * b}
}
// CrossProduct calculates the cross product between two vectors
func (a Point) CrossProduct(b Point) Point {
return Point{a.Y*b.Z - a.Z*b.Y, a.Z*b.X - a.X*b.Z, a.X*b.Y - a.Y*b.X}
}
// DotProduct calculates the dot product between two vectors
func (a Point) DotProduct(b Point) float64 {
return a.X*b.X + a.Y*b.Y + a.Z*b.Z
}
// Length of the point/vector
func (a Point) Length() float64 {
return math.Sqrt(a.X*a.X + a.Y*a.Y + a.Z*a.Z)
}
// Normalize the reference point/vector
func (a *Point) Normalize() {
len := a.Length()
a.X = a.X / len
a.Y = a.Y / len
a.Z = a.Z / len
}
// Reflect the ray at the point p with normal
func (r Ray) Reflect(normal, p Point) Ray {
v := r.V.Subtract((normal.Multiply(2).Multiply((r.V.DotProduct(normal)))))
return Ray{p, v}
}
// Ray representation from the camera
type Ray struct {
P, V Point
}
// Triangle one of the objects available
type Triangle struct {
V1, V2, V3 Point
N Point
}
// Sphere one of the objects available
type Sphere struct {
Center Point
Radius float64
}
// Intersection between a Ray and a Triangle
func (tri *Triangle) Intersection(r Ray) Point {
side1 := tri.V2.Subtract(tri.V1)
side2 := tri.V3.Subtract(tri.V1)
side3 := r.P.Subtract(tri.V1)
det := r.V.CrossProduct(side2).DotProduct(side1)
invDet := 1.0 / det
u := (r.V.CrossProduct(side2).DotProduct(side1)) * invDet
if u < 0 || u > 1 {
return InfinitePoint
}
v := (side3.CrossProduct(side1).DotProduct(r.V)) * invDet
if v < 0 || v > 1 {
return InfinitePoint
}
if u+v >= 1 {
return InfinitePoint
}
t := (side3.CrossProduct(side1).DotProduct(side2)) * invDet
if t < 0 {
return InfinitePoint
}
t = t - lambda
return r.P.Add(r.V.Multiply(t))
}
// Intersection calculation between a Ray and a Sphere
func (s *Sphere) Intersection(r Ray) Point {
var coeff [3]float64
coeff[0] = r.V.DotProduct(r.V)
coeff[1] = 2.0 * (r.V.DotProduct(r.P.Subtract(s.Center)))
coeff[2] = (r.P.Subtract(s.Center)).DotProduct((r.P.Subtract(s.Center))) - s.Radius*s.Radius
x1, x2, real := solveQuadratic(coeff)
if !real {
return InfinitePoint
}
if x1 > x2 {
x1, x2 = x2, x1
}
if x1 < 0 {
x1 = x2
if x1 < 0 {
return InfinitePoint
}
}
x1 = x1 - lambda
return r.P.Add(r.V.Multiply(x1))
}
// Normal of the triangle, the point is inconsequential
func (tri *Triangle) Normal(p Point) Point {
one := tri.V2.Subtract(tri.V1)
two := tri.V3.Subtract(tri.V1)
ret := one.CrossProduct(two)
ret.Normalize()
return ret
}
// Normal of a sphere at the point
func (s *Sphere) Normal(p Point) Point {
ret := p.Subtract(s.Center)
ret.Normalize()
return ret
} | pkg/primitives/Objects.go | 0.849285 | 0.694827 | Objects.go | starcoder |
package timeago
import (
"fmt"
"reflect"
"time"
)
// Precision define the minimun amount of time to be considered.
type Precision uint
const (
// SecondPrecision is the second precision.
SecondPrecision Precision = iota
// MinutePrecision is the minute precision.
MinutePrecision
// HourPrecision is the hour precision.
HourPrecision
// DayPrecision is the day precision.
DayPrecision
// MonthPrecision is the month precision.
MonthPrecision
// YearPrecision is the year precision.
YearPrecision
)
// Options define the options of the library.
type Options struct {
Precision Precision
Format Format
}
// Of returns the string representation of the given time with the given options.
func Of(t time.Time, options ...Options) string {
opt := Options{
Precision: SecondPrecision,
Format: DefaultFormat,
}
for _, o := range options {
if o.Precision != 0 {
opt.Precision = o.Precision
}
if !reflect.DeepEqual(o.Format, Format{}) {
opt.Format = o.Format
}
}
switch opt.Precision {
case SecondPrecision:
seconds := time.Since(t).Round(time.Second).Seconds()
if seconds == 0 {
return opt.Format.ThisSecond
}
if seconds == 1 {
return opt.Format.LastSecond
}
if seconds < 60 {
return fmt.Sprintf(opt.Format.SecondsAgo, int(seconds))
}
return Of(t, Options{
Precision: MinutePrecision,
})
case MinutePrecision:
minutes := time.Since(t).Round(time.Minute).Minutes()
if minutes == 0 {
return opt.Format.ThisMinute
}
if minutes == 1 {
return opt.Format.LastMinute
}
if minutes < 60 {
return fmt.Sprintf(opt.Format.MinutesAgo, int(minutes))
}
return Of(t, Options{
Precision: HourPrecision,
})
case HourPrecision:
hours := time.Since(t).Round(time.Hour).Hours()
if hours == 0 {
return opt.Format.ThisHour
}
if hours == 1 {
return opt.Format.LastHour
}
if hours < 24 {
return fmt.Sprintf(opt.Format.HoursAgo, int(hours))
}
return Of(t, Options{
Precision: DayPrecision,
})
case DayPrecision:
days := time.Since(t).Round(time.Hour*24).Hours() / 24
if days == 0 {
return opt.Format.Today
}
if days == 1 {
return opt.Format.Yesterday
}
if days < 30 {
return fmt.Sprintf(opt.Format.DaysAgo, int(days))
}
return Of(t, Options{
Precision: MonthPrecision,
})
case MonthPrecision:
months := time.Since(t).Round(time.Hour*24*30).Hours() / (24 * 30)
if months == 0 {
return opt.Format.ThisMonth
}
if months == 1 {
return opt.Format.LastMonth
}
if months < 12 {
return fmt.Sprintf(opt.Format.MonthsAgo, int(months))
}
return Of(t, Options{
Precision: YearPrecision,
})
default:
// its year precision
years := time.Since(t).Round(time.Hour*24*365).Hours() / (24 * 365)
if years == 0 {
return opt.Format.ThisYear
}
if years == 1 {
return opt.Format.LastYear
}
return fmt.Sprintf(opt.Format.YearsAgo, int(years))
}
}
// Format is the format of the string returned by the library.
type Format struct {
ThisSecond string
LastSecond string
SecondsAgo string
ThisMinute string
LastMinute string
MinutesAgo string
ThisHour string
LastHour string
HoursAgo string
Today string
Yesterday string
DaysAgo string
ThisMonth string
LastMonth string
MonthsAgo string
ThisYear string
LastYear string
YearsAgo string
}
// DefaultFormat is the default format of the string returned by the library.
var DefaultFormat = Format{
ThisSecond: "now",
LastSecond: "1 second ago",
SecondsAgo: "%d seconds ago",
ThisMinute: "now",
LastMinute: "1 minute ago",
MinutesAgo: "%d minutes ago",
ThisHour: "this hour",
LastHour: "last hour",
HoursAgo: "%d hours ago",
Today: "today",
Yesterday: "yesterday",
DaysAgo: "%d days ago",
ThisMonth: "this month",
LastMonth: "last month",
MonthsAgo: "%d months ago",
ThisYear: "this year",
LastYear: "last year",
YearsAgo: "%d years ago",
} | timea.go | 0.645679 | 0.464234 | timea.go | starcoder |
package pure
import (
"context"
"errors"
"fmt"
"time"
"github.com/benthosdev/benthos/v4/internal/bloblang/field"
"github.com/benthosdev/benthos/v4/internal/bundle"
"github.com/benthosdev/benthos/v4/internal/component"
"github.com/benthosdev/benthos/v4/internal/component/cache"
"github.com/benthosdev/benthos/v4/internal/component/processor"
"github.com/benthosdev/benthos/v4/internal/docs"
"github.com/benthosdev/benthos/v4/internal/message"
"github.com/benthosdev/benthos/v4/internal/tracing"
)
func init() {
err := bundle.AllProcessors.Add(func(conf processor.Config, mgr bundle.NewManagement) (processor.V1, error) {
p, err := newCache(conf.Cache, mgr)
if err != nil {
return nil, err
}
return processor.NewV2BatchedToV1Processor("cache", p, mgr), nil
}, docs.ComponentSpec{
Name: "cache",
Categories: []string{
"Integration",
},
Summary: `
Performs operations against a [cache resource](/docs/components/caches/about) for each message, allowing you to store or retrieve data within message payloads.`,
Description: `
For use cases where you wish to cache the result of processors consider using the ` + "[`cached` processor](/docs/components/processors/cached)" + ` instead.
This processor will interpolate functions within the ` + "`key` and `value`" + ` fields individually for each message. This allows you to specify dynamic keys and values based on the contents of the message payloads and metadata. You can find a list of functions [here](/docs/configuration/interpolation#bloblang-queries).`,
Config: docs.FieldComponent().WithChildren(
docs.FieldString("resource", "The [`cache` resource](/docs/components/caches/about) to target with this processor."),
docs.FieldString("operator", "The [operation](#operators) to perform with the cache.").HasOptions("set", "add", "get", "delete"),
docs.FieldString("key", "A key to use with the cache.").IsInterpolated(),
docs.FieldString("value", "A value to use with the cache (when applicable).").IsInterpolated(),
docs.FieldString(
"ttl", "The TTL of each individual item as a duration string. After this period an item will be eligible for removal during the next compaction. Not all caches support per-key TTLs, those that do will have a configuration field `default_ttl`, and those that do not will fall back to their generally configured TTL setting.",
"60s", "5m", "36h",
).IsInterpolated().AtVersion("3.33.0").Advanced(),
).ChildDefaultAndTypesFromStruct(processor.NewCacheConfig()),
Examples: []docs.AnnotatedExample{
{
Title: "Deduplication",
Summary: `
Deduplication can be done using the add operator with a key extracted from the
message payload, since it fails when a key already exists we can remove the
duplicates using a
[` + "`bloblang` processor" + `](/docs/components/processors/bloblang):`,
Config: `
pipeline:
processors:
- cache:
resource: foocache
operator: add
key: '${! json("message.id") }'
value: "storeme"
- bloblang: root = if errored() { deleted() }
cache_resources:
- label: foocache
redis:
url: tcp://TODO:6379
`,
},
{
Title: "Deduplication Batch-Wide",
Summary: `
Sometimes it's necessary to deduplicate a batch of messages (AKA a window) by a single identifying value. This can be done by introducing a ` + "[`branch` processor](/docs/components/processors/branch)" + `, which executes the cache only once on behalf of the batch, in this case with a value make from a field extracted from the first and last messages of the batch:`,
Config: `
pipeline:
processors:
# Try and add one message to a cache that identifies the whole batch
- branch:
request_map: |
root = if batch_index() == 0 {
json("id").from(0) + json("meta.tail_id").from(-1)
} else { deleted() }
processors:
- cache:
operator: add
key: ${! content() }
value: t
# Delete all messages if we failed
- bloblang: |
root = if errored().from(0) {
deleted()
}
`,
},
{
Title: "Hydration",
Summary: `
It's possible to enrich payloads with content previously stored in a cache by
using the [` + "`branch`" + `](/docs/components/processors/branch) processor:`,
Config: `
pipeline:
processors:
- branch:
processors:
- cache:
resource: foocache
operator: get
key: '${! json("message.document_id") }'
result_map: 'root.message.document = this'
# NOTE: If the data stored in the cache is not valid JSON then use
# something like this instead:
# result_map: 'root.message.document = content().string()'
cache_resources:
- label: foocache
memcached:
addresses: [ "TODO:11211" ]
`,
},
},
Footnotes: `
## Operators
### ` + "`set`" + `
Set a key in the cache to a value. If the key already exists the contents are
overridden.
### ` + "`add`" + `
Set a key in the cache to a value. If the key already exists the action fails
with a 'key already exists' error, which can be detected with
[processor error handling](/docs/configuration/error_handling).
### ` + "`get`" + `
Retrieve the contents of a cached key and replace the original message payload
with the result. If the key does not exist the action fails with an error, which
can be detected with [processor error handling](/docs/configuration/error_handling).
### ` + "`delete`" + `
Delete a key and its contents from the cache. If the key does not exist the
action is a no-op and will not fail with an error.`,
})
if err != nil {
panic(err)
}
}
//------------------------------------------------------------------------------
type cacheProc struct {
key *field.Expression
value *field.Expression
ttl *field.Expression
mgr bundle.NewManagement
cacheName string
operator cacheOperator
}
func newCache(conf processor.CacheConfig, mgr bundle.NewManagement) (*cacheProc, error) {
cacheName := conf.Resource
if cacheName == "" {
return nil, errors.New("cache name must be specified")
}
op, err := cacheOperatorFromString(conf.Operator)
if err != nil {
return nil, err
}
key, err := mgr.BloblEnvironment().NewField(conf.Key)
if err != nil {
return nil, fmt.Errorf("failed to parse key expression: %v", err)
}
value, err := mgr.BloblEnvironment().NewField(conf.Value)
if err != nil {
return nil, fmt.Errorf("failed to parse value expression: %v", err)
}
ttl, err := mgr.BloblEnvironment().NewField(conf.TTL)
if err != nil {
return nil, fmt.Errorf("failed to parse ttl expression: %v", err)
}
if !mgr.ProbeCache(cacheName) {
return nil, fmt.Errorf("cache resource '%v' was not found", cacheName)
}
return &cacheProc{
key: key,
value: value,
ttl: ttl,
mgr: mgr,
cacheName: cacheName,
operator: op,
}, nil
}
//------------------------------------------------------------------------------
type cacheOperator func(ctx context.Context, cache cache.V1, key string, value []byte, ttl *time.Duration) ([]byte, bool, error)
func newCacheSetOperator() cacheOperator {
return func(ctx context.Context, cache cache.V1, key string, value []byte, ttl *time.Duration) ([]byte, bool, error) {
err := cache.Set(ctx, key, value, ttl)
return nil, false, err
}
}
func newCacheAddOperator() cacheOperator {
return func(ctx context.Context, cache cache.V1, key string, value []byte, ttl *time.Duration) ([]byte, bool, error) {
err := cache.Add(ctx, key, value, ttl)
return nil, false, err
}
}
func newCacheGetOperator() cacheOperator {
return func(ctx context.Context, cache cache.V1, key string, _ []byte, _ *time.Duration) ([]byte, bool, error) {
result, err := cache.Get(ctx, key)
return result, true, err
}
}
func newCacheDeleteOperator() cacheOperator {
return func(ctx context.Context, cache cache.V1, key string, _ []byte, ttl *time.Duration) ([]byte, bool, error) {
err := cache.Delete(ctx, key)
return nil, false, err
}
}
func cacheOperatorFromString(operator string) (cacheOperator, error) {
switch operator {
case "set":
return newCacheSetOperator(), nil
case "add":
return newCacheAddOperator(), nil
case "get":
return newCacheGetOperator(), nil
case "delete":
return newCacheDeleteOperator(), nil
}
return nil, fmt.Errorf("operator not recognised: %v", operator)
}
//------------------------------------------------------------------------------
func (c *cacheProc) ProcessBatch(ctx context.Context, spans []*tracing.Span, msg *message.Batch) ([]*message.Batch, error) {
resMsg := msg.Copy()
_ = resMsg.Iter(func(index int, part *message.Part) error {
key := c.key.String(index, msg)
value := c.value.Bytes(index, msg)
var ttl *time.Duration
if ttls := c.ttl.String(index, msg); ttls != "" {
td, err := time.ParseDuration(ttls)
if err != nil {
c.mgr.Logger().Debugf("TTL must be a duration: %v\n", err)
processor.MarkErr(part, spans[index], err)
return nil
}
ttl = &td
}
var result []byte
var useResult bool
var err error
if cerr := c.mgr.AccessCache(context.Background(), c.cacheName, func(cache cache.V1) {
result, useResult, err = c.operator(context.Background(), cache, key, value, ttl)
}); cerr != nil {
err = cerr
}
if err != nil {
if err != component.ErrKeyAlreadyExists {
c.mgr.Logger().Debugf("Operator failed for key '%s': %v\n", key, err)
} else {
c.mgr.Logger().Debugf("Key already exists: %v\n", key)
}
processor.MarkErr(part, spans[index], err)
return nil
}
if useResult {
part.Set(result)
}
return nil
})
return []*message.Batch{resMsg}, nil
}
func (c *cacheProc) Close(ctx context.Context) error {
return nil
} | internal/impl/pure/processor_cache.go | 0.683736 | 0.440108 | processor_cache.go | starcoder |
Package controller provides libraries for building Controllers. Controllers implement Kubernetes APIs
and are central to building Operators, Workload APIs, Configuration APIs, Autoscalers, and more.
Controllers
Controllers are work queues that enqueue work in response to source.Source events (e.g. Pod Create, Update, Delete)
and trigger reconcile.Reconcile functions when the work is dequeued.
Unlike http handlers, Controllers DO NOT perform work directly in response to events, but instead enqueue
reconcile.Requests so the work is performed eventually.
* Controllers run reconcile.Reconcile functions against objects (provided as name / Namespace).
* Controllers enqueue reconcile.Requests in response events provided by source.Sources.
reconcile
reconcile.Reconcile is a function that may be called at anytime with the name / Namespace of an
object. When called, it will ensure that the state of the system matches what is specified in the object at the
time reconcile is called.
Example: reconcile is run against a ReplicationController object. The ReplicationController specifies 5 replicas.
3 Pods exist in the system. reconcile creates 2 more Pods and sets their OwnerReference to point at the
ReplicationController.
* reconcile works on a single object type. - e.g. it will only reconcile ReplicaSets.
* reconcile is triggered by a reconcile.Request containing the name / Namespace of an object to reconcile.
* reconcile does not care about the event contents or event type triggering the reconcile.Request.
- e.g. it doesn't matter whether a ReplicaSet was created or updated, reconcile will check that the correct
Pods exist either way.
* Users MUST implement reconcile themselves.
Source
resource.Source provides a stream of events. Events may be internal events from watching Kubernetes
APIs (e.g. Pod Create, Update, Delete), or may be synthetic Generic events triggered by cron or WebHooks
(e.g. through a Slackbot or GitHub callback).
Example 1: source.KindSource uses the Kubernetes API Watch endpoint for a GroupVersionKind to provide
Create, Update, Delete events.
Example 2: source.ChannelSource reads Generic events from a channel fed by a WebHook called from a Slackbot.
* Source provides a stream of events for EventHandlers to handle.
* Source may provide either events from Watches (e.g. object Create, Update, Delete) or Generic triggered
from another source (e.g. WebHook callback).
* Users SHOULD use the provided Source implementations instead of implementing their own for nearly all cases.
EventHandler
eventhandler.EventHandler transforms and enqueues events from a source.Source into reconcile.Requests.
Example: a Pod Create event from a Source is provided to the eventhandler.EnqueueHandler, which enqueues a
reconcile.Request containing the name / Namespace of the Pod.
* EventHandler takes an event.Event and enqueues reconcile.Requests
* EventHandlers MAY map an event for an object of one type to a reconcile.Request for an object of another type.
* EventHandlers MAY map an event for an object to multiple reconcile.Requests for different objects.
* Users SHOULD use the provided EventHandler implementations instead of implementing their own for almost all cases.
Predicate
predicate.Predicate allows events to be filtered before they are given to EventHandlers. This allows common
filters to be reused and composed together with EventHandlers.
* Predicate takes and event.Event and returns a bool (true to enqueue)
* Predicates are optional
* Users SHOULD use the provided Predicate implementations, but MAY implement their own Predicates as needed.
PodController Diagram
Source provides event:
* &source.KindSource{"core", "v1", "Pod"} -> (Pod foo/bar Create Event)
EventHandler enqueues Request:
* &eventhandler.Enqueue{} -> (reconcile.Request{"foo", "bar"})
Reconcile is called with the Request:
* Reconcile(reconcile.Request{"foo", "bar"})
controllerManager
controllerManager registers and starts Controllers. It initializes shared dependencies - such as clients, caches,
stop channels, etc and provides these to the Controllers that it manages. controllerManager should be used
anytime multiple Controllers exist within the same program.
Usage
The following example shows creating a new Controller program which Reconciles ReplicaSet objects in response
to Pod or ReplicaSet events. The Reconcile function simply adds a label to the ReplicaSet.
See the example/main.go for a usage example.
Controller Example
1. Watch ReplicaSet and Pods Sources
1.1 ReplicaSet -> eventhandler.EnqueueHandler - enqueue the ReplicaSet Namespace and Name.
1.2 Pod (created by ReplicaSet) -> eventhandler.EnqueueOwnerHandler - enqueue the Owning ReplicaSet key.
2. reconcile ReplicaSet
2.1 ReplicaSet object created -> Read ReplicaSet, try to read Pods -> if is missing create Pods.
2.2 reconcile triggered by creation of Pods -> Read ReplicaSet and Pods, do nothing.
2.3 reconcile triggered by deletion of Pods -> Read ReplicaSet and Pods, create replacement Pods.
Watching and EventHandling
Controllers may Watch multiple Kinds of objects (e.g. Pods, ReplicaSets and Deployments), but they should
enqueue keys for only a single Type. When one Type of object must be be updated in response to changes
in another Type of object, an EnqueueMappedHandler may be used to reconcile the Type that is being
updated and watch the other Type for Events. e.g. Respond to a cluster resize
Event (add / delete Node) by re-reconciling all instances of another Type that cares about the cluster size.
For example, a Deployment controller might use an EnqueueHandler and EnqueueOwnerHandler to:
* Watch for Deployment Events - enqueue the key of the Deployment.
* Watch for ReplicaSet Events - enqueue the key of the Deployment that created the ReplicaSet (e.g the Owner)
Note: reconcile.Requests are deduplicated when they are enqueued. Many Pod Events for the same ReplicaSet
may trigger only 1 reconcile invocation as each Event results in the Handler trying to enqueue
the same reconcile.Request for the ReplicaSet.
Controller Writing Tips
Reconcile Runtime Complexity:
* It is better to write Controllers to perform an O(1) reconcile N times (e.g. on N different objects) instead of
performing an O(N) reconcile 1 time (e.g. on a single object which manages N other objects).
* Example: If you need to update all Services in response to a Node being added - reconcile Services but Watch
Node events (transformed to Service object name / Namespaces) instead of Reconciling the Node and updating all
Services from a single reconcile.
Event Multiplexing:
* reconcile.Requests for the same name / Namespace are deduplicated when they are enqueued. This allows
for Controllers to gracefully handle event storms for a single object. Multiplexing multiple event Sources to
a single object type takes advantage of this.
* Example: Pod events for a ReplicaSet are transformed to a ReplicaSet name / Namespace, so the ReplicaSet
will be Reconciled only 1 time for multiple Pods.
*/
package controller | pkg/controller/doc.go | 0.856092 | 0.573858 | doc.go | starcoder |
package tree
import (
"fmt"
)
// Btree represents an AVL tree
type Btree struct {
root *Node
values []Val
len int
}
// Val interface to define the compare method used to insert and find values
type Val interface {
Comp(val Val) int8
}
// Node represents a node in the tree with a value, left and right children, and a height/balance of the node.
type Node struct {
Value Val
left, right *Node
height int8
}
// New returns a new btree
func New() *Btree { return new(Btree).Init() }
// Init initializes all values/clears the tree and returns the tree pointer
func (t *Btree) Init() *Btree {
t.root = nil
t.values = nil
t.len = 0
return t
}
// String returns a string representation of the tree values
func (t *Btree) String() string {
return fmt.Sprint(t.Values())
}
// Empty returns true if the tree is empty
func (t *Btree) Empty() bool {
return t.root == nil
}
// NotEmpty returns true if the tree is not empty
func (t *Btree) NotEmpty() bool {
return t.root != nil
}
func (t *Btree) balance() int8 {
if t.root != nil {
return balance(t.root)
}
return 0
}
// Insert inserts a new value into the tree and returns the tree pointer
func (t *Btree) Insert(value Val) *Btree {
added := false
t.root = insert(t.root, value, &added)
if added {
t.len++
}
t.values = nil
return t
}
func insert(n *Node, value Val, added *bool) *Node {
if n == nil {
*added = true
return (&Node{Value: value}).Init()
}
c := value.Comp(n.Value)
if c > 0 {
n.right = insert(n.right, value, added)
} else if c < 0 {
n.left = insert(n.left, value, added)
} else {
n.Value = value
*added = false
return n
}
n.height = n.maxHeight() + 1
c = balance(n)
if c > 1 {
c = value.Comp(n.left.Value)
if c < 0 {
return n.rotateRight()
} else if c > 0 {
n.left = n.left.rotateLeft()
return n.rotateRight()
}
} else if c < -1 {
c = value.Comp(n.right.Value)
if c > 0 {
return n.rotateLeft()
} else if c < 0 {
n.right = n.right.rotateRight()
return n.rotateLeft()
}
}
return n
}
// InsertAll inserts all the values into the tree and returns the tree pointer
func (t *Btree) InsertAll(values []Val) *Btree {
for _, v := range values {
t.Insert(v)
}
return t
}
// Contains returns true if the tree contains the specified value
func (t *Btree) Contains(value Val) bool {
return t.Get(value) != nil
}
// ContainsAny returns true if the tree contains any of the values
func (t *Btree) ContainsAny(values []Val) bool {
for _, v := range values {
if t.Contains(v) {
return true
}
}
return false
}
// ContainsAll returns true if the tree contains all of the values
func (t *Btree) ContainsAll(values []Val) bool {
for _, v := range values {
if !t.Contains(v) {
return false
}
}
return true
}
// Get returns the node value associated with the search value
func (t *Btree) Get(value Val) Val {
var node *Node
if t.root != nil {
node = t.root.get(value)
}
if node != nil {
return node.Value
}
return nil
}
// Len return the number of nodes in the tree
func (t *Btree) Len() int {
return t.len
}
// Head returns the first value in the tree
func (t *Btree) Head() Val {
if t.root == nil {
return nil
}
var beginning = t.root
for beginning.left != nil {
beginning = beginning.left
}
if beginning == nil {
for beginning.right != nil {
beginning = beginning.right
}
}
if beginning != nil {
return beginning.Value
}
return nil
}
// Tail returns the last value in the tree
func (t *Btree) Tail() Val {
if t.root == nil {
return nil
}
var beginning = t.root
for beginning.right != nil {
beginning = beginning.right
}
if beginning == nil {
for beginning.left != nil {
beginning = beginning.left
}
}
if beginning != nil {
return beginning.Value
}
return nil
}
// Values returns a slice of all the values in tree in order
func (t *Btree) Values() []Val {
if t.values == nil {
t.values = make([]Val, t.len)
t.Ascend(func(n *Node, i int) bool {
t.values[i] = n.Value
return true
})
}
return t.values
}
// Delete deletes the node from the tree associated with the search value
func (t *Btree) Delete(value Val) *Btree {
deleted := false
t.root = deleteNode(t.root, value, &deleted)
if deleted {
t.len--
}
t.values = nil
return t
}
// DeleteAll deletes the nodes from the tree associated with the search values
func (t *Btree) DeleteAll(values []Val) *Btree {
for _, v := range values {
t.Delete(v)
}
return t
}
func deleteNode(n *Node, value Val, deleted *bool) *Node {
if n == nil {
return n
}
c := value.Comp(n.Value)
if c < 0 {
n.left = deleteNode(n.left, value, deleted)
} else if c > 0 {
n.right = deleteNode(n.right, value, deleted)
} else {
if n.left == nil {
t := n.right
n.Init()
return t
} else if n.right == nil {
t := n.left
n.Init()
return t
}
t := n.right.min()
n.Value = t.Value
n.right = deleteNode(n.right, t.Value, deleted)
*deleted = true
}
//re-balance
if n == nil {
return n
}
n.height = n.maxHeight() + 1
bal := balance(n)
if bal > 1 {
if balance(n.left) >= 0 {
return n.rotateRight()
}
n.left = n.left.rotateLeft()
return n.rotateRight()
} else if bal < -1 {
if balance(n.right) <= 0 {
return n.rotateLeft()
}
n.right = n.right.rotateRight()
return n.rotateLeft()
}
return n
}
// Pop deletes the last node from the tree and returns its value
func (t *Btree) Pop() Val {
value := t.Tail()
if value != nil {
t.Delete(value)
}
return value
}
// Pull deletes the first node from the tree and returns its value
func (t *Btree) Pull() Val {
value := t.Head()
if value != nil {
t.Delete(value)
}
return value
}
// NodeIterator expresses the iterator function used for traversals
type NodeIterator func(n *Node, i int) bool
// Ascend performs an ascending order traversal of the tree calling the iterator function on each node
// the iterator will continue as long as the NodeIterator returns true
func (t *Btree) Ascend(iterator NodeIterator) {
var i int
if t.root != nil {
t.root.iterate(iterator, &i, true)
}
}
// Descend performs a descending order traversal of the tree using the iterator
// the iterator will continue as long as the NodeIterator returns true
func (t *Btree) Descend(iterator NodeIterator) {
var i int
if t.root != nil {
t.root.rIterate(iterator, &i, true)
}
}
// Debug prints out useful debug information about the tree for debugging purposes
func (t *Btree) Debug() {
fmt.Println("----------------------------------------------------------------------------------------------")
if t.Empty() {
fmt.Println("tree is empty")
} else {
fmt.Println(t.Len(), "elements")
}
t.Ascend(func(n *Node, i int) bool {
if t.root.Value == n.Value {
fmt.Print("ROOT ** ")
}
n.Debug()
return true
})
fmt.Println("----------------------------------------------------------------------------------------------")
}
// Init initializes the values of the node or clears the node and returns the node pointer
func (n *Node) Init() *Node {
n.height = 1
n.left = nil
n.right = nil
return n
}
// String returns a string representing the node
func (n *Node) String() string {
return fmt.Sprint(n.Value)
}
// Debug prints out useful debug information about the tree node for debugging purposes
func (n *Node) Debug() {
var children string
if n.left == nil && n.right == nil {
children = "no children |"
} else if n.left != nil && n.right != nil {
children = fmt.Sprint("left child:", n.left.String(), " right child:", n.right.String())
} else if n.right != nil {
children = fmt.Sprint("right child:", n.right.String())
} else {
children = fmt.Sprint("left child:", n.left.String())
}
fmt.Println(n.String(), "|", "height", n.height, "|", "balance", balance(n), "|", children)
}
func height(n *Node) int8 {
if n != nil {
return n.height
}
return 0
}
func balance(n *Node) int8 {
if n == nil {
return 0
}
return height(n.left) - height(n.right)
}
func (n *Node) get(val Val) *Node {
var node *Node
c := val.Comp(n.Value)
if c < 0 {
if n.left != nil {
node = n.left.get(val)
}
} else if c > 0 {
if n.right != nil {
node = n.right.get(val)
}
} else {
node = n
}
return node
}
func (n *Node) rotateRight() *Node {
l := n.left
// Rotation
l.right, n.left = n, l.right
// update heights
n.height = n.maxHeight() + 1
l.height = l.maxHeight() + 1
return l
}
func (n *Node) rotateLeft() *Node {
r := n.right
// Rotation
r.left, n.right = n, r.left
// update heights
n.height = n.maxHeight() + 1
r.height = r.maxHeight() + 1
return r
}
func (n *Node) iterate(iterator NodeIterator, i *int, cont bool) {
if n != nil && cont {
n.left.iterate(iterator, i, cont)
cont = iterator(n, *i)
*i++
n.right.iterate(iterator, i, cont)
}
}
func (n *Node) rIterate(iterator NodeIterator, i *int, cont bool) {
if n != nil && cont {
n.right.iterate(iterator, i, cont)
cont = iterator(n, *i)
*i++
n.left.iterate(iterator, i, cont)
}
}
func (n *Node) min() *Node {
current := n
for current.left != nil {
current = current.left
}
return current
}
func (n *Node) maxHeight() int8 {
rh := height(n.right)
lh := height(n.left)
if rh > lh {
return rh
}
return lh
}
// IntVal represents an integer tree val
type IntVal int
// Comp returns 1 if i > val, -1 if i < val and 0 if i equal to val
func (i IntVal) Comp(val Val) int8 {
v := val.(IntVal)
if i > v {
return 1
} else if i < v {
return -1
} else {
return 0
}
}
// StringVal represents an string tree val
type StringVal string
// Comp returns 1 if i > val, -1 if i < val and 0 if i equal to val
func (i StringVal) Comp(val Val) int8 {
v := val.(StringVal)
if i > v {
return 1
} else if i < v {
return -1
} else {
return 0
}
}
// UintVal represents an uint tree val
type UintVal uint
// Comp returns 1 if i > val, -1 if i < val and 0 if i equal to val
func (i UintVal) Comp(val Val) int8 {
v := val.(UintVal)
if i > v {
return 1
} else if i < v {
return -1
} else {
return 0
}
}
// Float32Val represents an float32 tree val
type Float32Val float32
// Comp returns 1 if i > val, -1 if i < val and 0 if i equal to val
func (i Float32Val) Comp(val Val) int8 {
v := val.(Float32Val)
if i > v {
return 1
} else if i < v {
return -1
} else {
return 0
}
}
// Float64Val represents an float64 tree val
type Float64Val float64
// Comp returns 1 if i > val, -1 if i < val and 0 if i equal to val
func (i Float64Val) Comp(val Val) int8 {
v := val.(Float64Val)
if i > v {
return 1
} else if i < v {
return -1
} else {
return 0
}
}
// UintptrVal represents a uintptr tree val
type UintptrVal uintptr
// Comp returns 1 if i > val, -1 if i < val and 0 if i equal to val
func (i UintptrVal) Comp(val Val) int8 {
v := val.(UintptrVal)
if i > v {
return 1
} else if i < v {
return -1
} else {
return 0
}
}
// RuneVal represents a rune tree val
type RuneVal rune
// Comp returns 1 if i > val, -1 if i < val and 0 if i equal to val
func (i RuneVal) Comp(val Val) int8 {
v := val.(RuneVal)
if i > v {
return 1
} else if i < v {
return -1
} else {
return 0
}
}
// ByteVal represents a byte tree val
type ByteVal byte
// Comp returns 1 if i > val, -1 if i < val and 0 if i equal to val
func (i ByteVal) Comp(val Val) int8 {
v := val.(ByteVal)
if i > v {
return 1
} else if i < v {
return -1
} else {
return 0
}
} | btree.go | 0.768907 | 0.604983 | btree.go | starcoder |
package delaunay
import (
"fmt"
"math"
"sort"
)
const eps = 1e-6
type Point struct {
X float64
Y float64
}
func NewPoint(x, y float64) Point {
return Point{X: x, Y: y}
}
func (p Point) String() string {
return fmt.Sprintf("(%3.1f, %3.1f)", p.X, p.Y)
}
func (p Point) CompareTo(other Point) int {
if p == other {
return 0
}
if p.X > other.X || (math.Abs(p.X-other.X) < eps && p.Y > other.Y) {
return 1
} else {
return 0
}
}
func (p Point) Distance(other Point) float64 {
dx := other.X - p.X
dy := other.Y - p.Y
return math.Sqrt(dx*dx + dy*dy)
}
func (p Point) AngleTo(other Point) int {
return int(360.0*math.Atan2(other.X-p.X, other.Y-p.Y)/math.Pi+360) % 360
}
type Edge struct {
P1 Point
P2 Point
}
func NewEdge(p1, p2 Point) Edge {
if p2.CompareTo(p1) > 0 {
return Edge{P1: p2, P2: p1}
} else {
return Edge{P1: p1, P2: p2}
}
}
func (e Edge) String() string {
return fmt.Sprintf("%v - %v", e.P1, e.P2)
}
type Triangle struct {
P1 Point
P2 Point
P3 Point
Circumcenter Point
Circumradius float64
}
func NewTriangle(p1, p2, p3 Point) Triangle {
center := NewPoint((p1.X+p2.X+p3.X)/3, (p1.Y+p2.Y+p3.Y)/3)
vertices := []Point{p1, p2, p3}
sort.Slice(vertices, func(i, j int) bool {
return center.AngleTo(vertices[i]) > center.AngleTo(vertices[j])
})
t := Triangle{P1: vertices[0], P2: vertices[1], P3: vertices[2]}
t.Circumcenter, t.Circumradius = t.circumcenter()
return t
}
func (t Triangle) circumcenter() (Point, float64) {
if math.Abs(t.P1.Y-t.P2.Y) < eps && math.Abs(t.P2.Y-t.P3.Y) < eps {
return NewPoint(0, 0), 0
}
e1 := t.Edge(0)
e2 := t.Edge(1)
if e1.IsHorizontal() {
e1 = t.Edge(2)
} else if e2.IsHorizontal() {
e2 = t.Edge(2)
}
c1 := e1.Center()
c2 := e2.Center()
m1 := -1 / ((e1.P2.Y - e1.P1.Y) / (e1.P2.X - e1.P1.X))
m2 := -1 / ((e2.P2.Y - e2.P1.Y) / (e2.P2.X - e2.P1.X))
b1 := c1.Y - m1*c1.X
b2 := c2.Y - m2*c2.X
cx := (b2 - b1) / (m1 - m2)
cy := m1*cx + b1
center := NewPoint(cx, cy)
r := center.Distance(e1.P1)
return center, r
}
func (t Triangle) String() string {
return fmt.Sprintf("[%v - %v - %v]", t.P1, t.P2, t.P3)
}
func (e Edge) Center() Point {
return NewPoint(
e.P1.X+(e.P2.X-e.P1.X)/2,
e.P1.Y+(e.P2.Y-e.P1.Y)/2,
)
}
func (e Edge) IsHorizontal() bool {
return math.Abs(e.P1.Y-e.P2.Y) < eps
}
func (t Triangle) Edge(index int) Edge {
switch index % 3 {
case 0:
return NewEdge(t.P1, t.P2)
case 1:
return NewEdge(t.P2, t.P3)
case 2:
return NewEdge(t.P3, t.P1)
}
panic(fmt.Errorf("invalid index: %d", index))
}
func (t Triangle) Edges() []Edge {
return []Edge{t.Edge(0), t.Edge(1), t.Edge(2)}
}
func (t Triangle) Vertices() []Point {
return []Point{t.P1, t.P2, t.P3}
}
func (t Triangle) UsesAnyOf(vertices []Point) bool {
verts := make(map[Point]bool, len(vertices))
for _, v := range vertices {
verts[v] = true
}
for _, tv := range t.Vertices() {
if _, ok := verts[tv]; ok {
return true
}
}
return false
}
func (t Triangle) UsesAnyOfVertices(vertices map[Point]bool) bool {
for _, tv := range t.Vertices() {
if _, ok := vertices[tv]; ok {
return true
}
}
return false
}
func reduce(points []Point, initial float64, reducer func(float64, Point) float64) float64 {
result := initial
for _, pt := range points {
result = reducer(result, pt)
}
return result
}
func reduceMinX(prev float64, pt Point) float64 {
if pt.X < prev {
return pt.X
} else {
return prev
}
}
func reduceMinY(prev float64, pt Point) float64 {
if pt.Y < prev {
return pt.Y
} else {
return prev
}
}
func reduceMaxX(prev float64, pt Point) float64 {
if pt.X > prev {
return pt.X
} else {
return prev
}
}
func reduceMaxY(prev float64, pt Point) float64 {
if pt.Y > prev {
return pt.Y
} else {
return prev
}
}
const offset = 2
func boundingTriangle(points []Point) Triangle {
minX := reduce(points, math.MaxInt64, reduceMinX)
maxX := reduce(points, math.MinInt64, reduceMaxX)
minY := reduce(points, math.MaxInt64, reduceMinY)
maxY := reduce(points, math.MinInt64, reduceMaxY)
bottomLeft := NewPoint(minX-offset, maxY+offset)
topRight := NewPoint(maxX+offset, minY-offset)
m := -1 / ((topRight.Y - bottomLeft.Y) / (topRight.X - bottomLeft.X - offset))
b := topRight.Y - m*topRight.X
topLeft := NewPoint(bottomLeft.X, m*bottomLeft.X+b)
bottomRight := NewPoint((bottomLeft.Y-b)/m, bottomLeft.Y)
return NewTriangle(topLeft, bottomRight, NewPoint(minX-offset*5, maxY+offset*5))
}
type Delaunay struct {
triangles []Triangle
supertriangle Triangle
points []Point
superTriangleVertices map[Point]bool
}
func (d *Delaunay) Finalize() {
result := make([]Triangle, 0)
for _, t := range d.triangles {
if !t.UsesAnyOfVertices(d.superTriangleVertices) {
result = append(result, t)
}
}
d.triangles = result
}
func (d *Delaunay) Point(index int) Point {
return d.points[index]
}
func (d *Delaunay) PointCount() int {
return len(d.points)
}
func (d *Delaunay) Points() []Point {
return d.points
}
func (d *Delaunay) Triangles() []Triangle {
return d.triangles
}
func (d *Delaunay) SupertriangleVertices() map[Point]bool {
return d.superTriangleVertices
}
// TriangulationStep performs one step of the triangulation algorithm for a given point
func (d *Delaunay) TriangulationStep(v Point) {
tidx := 0
edges := make(map[Edge]int)
for tidx < len(d.triangles) {
t := d.triangles[tidx]
if v.X-t.Circumcenter.X <= t.Circumradius { // x component of the distance from the current point to the circumcircle center is greater than the circumcircle radius, that triangle need never be considered for the later points
if t.Circumradius > 0 && v.Distance(t.Circumcenter) <= t.Circumradius { // point is inside the circumcircle of the triangle
for _, e := range t.Edges() {
edges[e] += 1
}
d.triangles = append(d.triangles[:tidx], d.triangles[tidx+1:]...)
} else {
tidx++
}
} else {
tidx++
}
}
trid := make([]Triangle, 0, len(edges))
for e, count := range edges {
if count == 1 { // only process edges that were not shared by removed triangles
t := NewTriangle(v, e.P1, e.P2)
trid = append(trid, t)
}
}
d.triangles = append(d.triangles, trid...)
}
// InitDelaunay Initializes data structures for Delaunay triangulation of the set of points
// Call Triangulate() on the returned struct to perform triangulation
func InitDelaunay(points []Point) *Delaunay {
triangles := make([]Triangle, 0)
supertriangle := boundingTriangle(points)
points = append(points, supertriangle.Vertices()...)
triangles = append(triangles, supertriangle)
verts := make(map[Point]bool, len(supertriangle.Vertices()))
for _, v := range supertriangle.Vertices() {
verts[v] = true
}
sort.Slice(points, func(i, j int) bool {
return points[i].X < points[j].X
})
return &Delaunay{
triangles: triangles,
supertriangle: supertriangle,
points: points,
superTriangleVertices: verts,
}
}
// Triangulate performs Delaunay triangulation using Bowyer–Watson algorithm as described by Paul Bourke
// See http://paulbourke.net/papers/triangulate/
func (d *Delaunay) Triangulate() []Triangle {
for _, v := range d.points {
d.TriangulationStep(v)
}
d.Finalize()
return d.triangles
} | delaunay/delaunay.go | 0.793186 | 0.548794 | delaunay.go | starcoder |
package optimizer
import (
"encoding/json"
. "github.com/antonmedv/expr/ast"
"math"
"reflect"
)
type inArray struct{}
type fold struct {
applied bool
}
type inRange struct{}
type constRange struct{}
func Optimize(node *Node) {
Walk(node, &inArray{})
limit := 1000
for {
fold := &fold{}
Walk(node, fold)
limit--
if !fold.applied || limit == 0 {
break
}
}
Walk(node, &inRange{})
Walk(node, &constRange{})
}
type Map map[int]struct{}
func (m Map) MarshalJSON() ([]byte, error) {
array := make([]int, 0, len(m))
for key := range m {
array = append(array, key)
}
return json.Marshal(array)
}
func (*inArray) Enter(node *Node) {}
func (*inArray) Exit(node *Node) {
switch n := (*node).(type) {
case *BinaryNode:
if n.Operator == "in" || n.Operator == "not in" {
t := n.Left.GetType()
if t == nil || n.Left.GetType().Kind() != reflect.Int {
// This optimization can be only performed if left side is int type,
// as runtime.in func uses reflect.Map.MapIndex and keys of map must,
// be same as checked value type.
return
}
if array, ok := n.Right.(*ArrayNode); ok {
if len(array.Nodes) > 0 {
for _, a := range array.Nodes {
if _, ok := a.(*IntegerNode); !ok {
goto string
}
}
{
value := make(Map)
for _, a := range array.Nodes {
value[a.(*IntegerNode).Value] = struct{}{}
}
patch(node, &BinaryNode{
Operator: n.Operator,
Left: n.Left,
Right: &ConstantNode{Value: value},
})
}
string:
for _, a := range array.Nodes {
if _, ok := a.(*StringNode); !ok {
return
}
}
{
value := make(map[string]struct{})
for _, a := range array.Nodes {
value[a.(*StringNode).Value] = struct{}{}
}
patch(node, &BinaryNode{
Operator: n.Operator,
Left: n.Left,
Right: &ConstantNode{Value: value},
})
}
}
}
}
}
}
func (*fold) Enter(node *Node) {}
func (fold *fold) Exit(node *Node) {
patch := func(newNode Node) {
fold.applied = true
patch(node, newNode)
}
switch n := (*node).(type) {
case *UnaryNode:
switch n.Operator {
case "-":
if i, ok := n.Node.(*IntegerNode); ok {
patch(&IntegerNode{Value: -i.Value})
}
case "+":
if i, ok := n.Node.(*IntegerNode); ok {
patch(&IntegerNode{Value: i.Value})
}
}
case *BinaryNode:
switch n.Operator {
case "+":
if a, ok := n.Left.(*IntegerNode); ok {
if b, ok := n.Right.(*IntegerNode); ok {
patch(&IntegerNode{Value: a.Value + b.Value})
}
}
if a, ok := n.Left.(*StringNode); ok {
if b, ok := n.Right.(*StringNode); ok {
patch(&StringNode{Value: a.Value + b.Value})
}
}
case "-":
if a, ok := n.Left.(*IntegerNode); ok {
if b, ok := n.Right.(*IntegerNode); ok {
patch(&IntegerNode{Value: a.Value - b.Value})
}
}
case "*":
if a, ok := n.Left.(*IntegerNode); ok {
if b, ok := n.Right.(*IntegerNode); ok {
patch(&IntegerNode{Value: a.Value * b.Value})
}
}
case "/":
if a, ok := n.Left.(*IntegerNode); ok {
if b, ok := n.Right.(*IntegerNode); ok {
patch(&IntegerNode{Value: a.Value / b.Value})
}
}
case "%":
if a, ok := n.Left.(*IntegerNode); ok {
if b, ok := n.Right.(*IntegerNode); ok {
patch(&IntegerNode{Value: a.Value % b.Value})
}
}
case "**":
if a, ok := n.Left.(*IntegerNode); ok {
if b, ok := n.Right.(*IntegerNode); ok {
patch(&FloatNode{Value: math.Pow(float64(a.Value), float64(b.Value))})
}
}
}
case *ArrayNode:
if len(n.Nodes) > 0 {
for _, a := range n.Nodes {
if _, ok := a.(*IntegerNode); !ok {
goto string
}
}
{
value := make([]int, len(n.Nodes))
for i, a := range n.Nodes {
value[i] = a.(*IntegerNode).Value
}
patch(&ConstantNode{Value: value})
}
string:
for _, a := range n.Nodes {
if _, ok := a.(*StringNode); !ok {
return
}
}
{
value := make([]string, len(n.Nodes))
for i, a := range n.Nodes {
value[i] = a.(*StringNode).Value
}
patch(&ConstantNode{Value: value})
}
}
}
}
func (*inRange) Enter(node *Node) {}
func (*inRange) Exit(node *Node) {
switch n := (*node).(type) {
case *BinaryNode:
if n.Operator == "in" || n.Operator == "not in" {
if rng, ok := n.Right.(*BinaryNode); ok && rng.Operator == ".." {
if from, ok := rng.Left.(*IntegerNode); ok {
if to, ok := rng.Right.(*IntegerNode); ok {
patch(node, &BinaryNode{
Operator: "and",
Left: &BinaryNode{
Operator: ">=",
Left: n.Left,
Right: from,
},
Right: &BinaryNode{
Operator: "<=",
Left: n.Left,
Right: to,
},
})
if n.Operator == "not in" {
patch(node, &UnaryNode{
Operator: "not",
Node: *node,
})
}
}
}
}
}
}
}
func (*constRange) Enter(node *Node) {}
func (*constRange) Exit(node *Node) {
switch n := (*node).(type) {
case *BinaryNode:
if n.Operator == ".." {
if min, ok := n.Left.(*IntegerNode); ok {
if max, ok := n.Right.(*IntegerNode); ok {
size := max.Value - min.Value + 1
value := make([]int, size)
for i := range value {
value[i] = min.Value + i
}
patch(node, &ConstantNode{
Value: value,
})
}
}
}
}
}
func patch(node *Node, newNode Node) {
newNode.SetType((*node).GetType())
newNode.SetLocation((*node).GetLocation())
*node = newNode
} | optimizer/optimizer.go | 0.519765 | 0.407127 | optimizer.go | starcoder |
package compliance
import (
"fmt"
"github.com/golang/glog"
"github.com/turbonomic/kubeturbo/pkg/discovery/repository"
"github.com/turbonomic/turbo-go-sdk/pkg/builder/group"
"github.com/turbonomic/turbo-go-sdk/pkg/proto"
)
// In Turbo, Schedulable nodes are marked with an access commodity 'schedulable'. Pods are required to buy
// this commodity so they can be moved to schedulable nodes
// Unchedulable nodes do not sell this commodity, hence pods will not be moved to these nodes.
// We want to address the issue that pods that are already running on the unschedulable nodes are not moved out
// due to compliance issues. This is done by not requiring the pods already on the unschedulable nodes at the time
// of discovery to buy the 'schedulable' commodity. Without the schedulable commodity the pod continues to stay
// on the current node unless there are resource constraints or unless moved by the kubernetes controller.
// In addition, we need pods which do not buy the schedulable commodity to not move to other unschedulable nodes.
// This is achieved by creating an anti-affinity policy for these pods against other unschedulable nodes.
type UnschedulableNodesAntiAffinityGroupDTOBuilder struct {
cluster *repository.ClusterSummary
targetId string
// Manager for tracking schedulable nodes
nodesManager *NodeSchedulabilityManager
}
func NewUnschedulableNodesAntiAffinityGroupDTOBuilder(cluster *repository.ClusterSummary,
targetId string, nodesManager *NodeSchedulabilityManager) *UnschedulableNodesAntiAffinityGroupDTOBuilder {
return &UnschedulableNodesAntiAffinityGroupDTOBuilder{
cluster: cluster,
targetId: targetId,
nodesManager: nodesManager,
}
}
// Creates DTOs for anti-affinity policy between the pods running on a unschedulable nodes to other unschedulable nodes.
// This will prevent the pods running on a unschedulable nodes to be moved to other other unschedulable nodes.
func (builder *UnschedulableNodesAntiAffinityGroupDTOBuilder) Build() []*proto.GroupDTO {
var policyDTOs []*proto.GroupDTO
if len(builder.nodesManager.unSchedulableNodes) <= 1 {
glog.V(3).Info("Zero or one unschedulable node in the cluster, anti-affinity policies will not be created")
return policyDTOs
}
// Map of unschedulable node name to list of UIDs of pods running on these unschedulable nodes
nodeToPodsMap := builder.nodesManager.buildNodeNameToPodUIDsMap()
// List of UIDs of all unschedulable nodes
var nodeUIDs []string // UIDs of all unschedulable nodes
var nodeUIDtoNameMap = make(map[string]string)
for _, nodeName := range builder.nodesManager.unSchedulableNodes {
nodeUID := builder.cluster.NodeNameUIDMap[nodeName]
nodeUIDs = append(nodeUIDs, nodeUID)
nodeUIDtoNameMap[nodeUID] = nodeName
}
// Iterate over all the unschedulable nodes
for idx, nodeUID := range nodeUIDs {
// policy is being created for the pods on this unschedulable node
nodeName := nodeUIDtoNameMap[nodeUID]
// pod members on the current unschedulable node for the policy
podMembers := nodeToPodsMap[nodeName]
if len(podMembers) == 0 {
glog.V(3).Infof("No pods on unschedulable node %s, skipping anti-affinity policy", nodeName)
continue
}
// node members comprising of other unschedulable nodes for the policy
var otherNodeUIDs []string
otherNodeUIDs = append(otherNodeUIDs, nodeUIDs[:idx]...)
otherNodeUIDs = append(otherNodeUIDs, nodeUIDs[idx+1:]...)
nodeMembers := otherNodeUIDs
// policy id and display name
groupID := fmt.Sprintf("UnschedulableNodesAntiAffinity-%s-%s", nodeName, builder.targetId)
displayName := fmt.Sprintf("UnschedulableNodesAntiAffinity::%s [%s]", nodeName, builder.targetId)
// dto for the policy
groupDTOs, err := group.DoNotPlace(groupID).
WithDisplayName(displayName).
OnSellers(group.StaticSellers(nodeMembers).OfType(proto.EntityDTO_VIRTUAL_MACHINE)).
WithBuyers(group.StaticBuyers(podMembers).OfType(proto.EntityDTO_CONTAINER_POD)).
Build()
if err != nil {
glog.Errorf("Failed to build anti affinity policy DTO for pods on unschedulable node %s: %v", nodeName, err)
continue
}
glog.V(3).Infof("Created anti-affinity policy for pods on the Unschedulable node:%s", nodeName)
glog.V(4).Infof("Anti-affinity policy for pods on the Unschedulable node: %+v", groupDTOs)
policyDTOs = append(policyDTOs, groupDTOs...)
}
return policyDTOs
} | pkg/discovery/worker/compliance/unschedulable_node_anti_affinity_group_dto_builder.go | 0.588534 | 0.435601 | unschedulable_node_anti_affinity_group_dto_builder.go | starcoder |
package bayes
// Simulation from Bayesian normal sampling model.
// Ref.: Albert (2009)
import (
"code.google.com/p/probab/dst"
)
func rigamma(shape, rate float64) float64 {
return (1 / dst.GammaNext(shape, 1/rate))
}
// NormPostSim returns a simulated sample from the joint posterior distribution of the mean and variance for a normal
// sampling prior with a noninformative or informative prior. The prior assumes mu and sigma2 are
// independent with mu assigned a normal prior with mean mu0 and variance tau2, and sigma2 is
// assigned a inverse gamma prior with parameters a and b.
func NormPostSim(data []float64, a, b, mu0, tau2 float64, m int) (postMu, postS2 []float64) {
// Arguments:
// data - vector of observations
// prior params:
// a
// b
// mu0
// tau2
// m - number of simulations desired
// Returns:
// mu - vector of simulated draws of normal mean
// sigma2 - vector of simulated draws of normal variance
xbar := mean(data)
n := len(data)
diff2 := make([]float64, n)
for i, val := range data {
diff2[i] = (val - xbar) * (val - xbar)
}
s := sum(diff2)
postS2 = make([]float64, m)
postMu = make([]float64, m)
sigma2 := s / float64(n)
for j := 0; j < m; j++ {
prec := float64(n)/sigma2 + 1/tau2
mu1 := (xbar*float64(n)/sigma2 + mu0/tau2) / prec
v1 := 1 / prec
// mu=rnorm(1,mu1,sqrt(v1))
mu := dst.NormalNext(mu1, sqrt(v1))
a1 := a + float64(n)/2
d2 := make([]float64, n)
for i, val := range data {
d2[i] = (val - mu) * (val - mu)
}
b1 := b + sum(d2)/2
sigma2 := rigamma(a1, b1)
postS2[j] = sigma2
postMu[j] = mu
}
return
}
// NormPostSimNoPrior returns a simulated sample from the joint posterior distribution of the mean and variance for a normal
// sampling prior.
func NormPostSimNoPrior(data []float64, m int) (postMu, postS2 []float64) {
// Arguments:
// data - vector of observations
// m - number of simulations desired
// Returns:
// mu - vector of simulated draws of normal mean
// sigma2 - vector of simulated draws of normal variance
xbar := mean(data)
n := len(data)
diff2 := make([]float64, n)
for i, val := range data {
diff2[i] = (val - xbar) * (val - xbar)
}
s := sum(diff2)
postS2 = make([]float64, m)
postMu = make([]float64, m)
for i, _ := range postMu {
postS2[i] = s / dst.ChiSquareNext(int64(n)-1)
sd := sqrt(postS2[i]) / sqrt(float64(n))
postMu[i] = dst.NormalNext(xbar, sd)
}
return
}
// NormPostNoPriorNext returns a sampled tuple from the joint posterior distribution of the mean and variance for a normal
// sampling prior.
func NormPostNoPriorNext(data []float64) (postMu, postS2 float64) {
// Arguments:
// data - vector of observations
// Returns:
// postMu - simulated draw of normal mean
// postS2 - simulated draw of normal variance
xbar := mean(data)
n := len(data)
diff2 := make([]float64, n)
for i, val := range data {
diff2[i] = (val - xbar) * (val - xbar)
}
s := sum(diff2)
postS2 = s / dst.ChiSquareNext(int64(n)-1)
sd := sqrt(postS2) / sqrt(float64(n))
postMu = dst.NormalNext(xbar, sd)
return
}
// NormPostInfPriorNext returns a simulated tuple from the joint posterior distribution of the mean and variance for a normal
// sampling prior with a noninformative or informative prior. The prior assumes mu and sigma2 are
// independent with mu assigned a normal prior with mean mu0 and variance tau2, and sigma2 is
// assigned a inverse gamma prior with parameters a and b.
func NormPostInfPriorNext(data []float64, a, b, mu0, tau2 float64) (postMu, postS2 float64) {
// Arguments:
// data - vector of observations
// prior params:
// a
// b
// mu0
// tau2
// Returns:
// postMu - simulated draw of normal mean
// postS2 - simulated draw of normal variance
xbar := mean(data)
n := len(data)
diff2 := make([]float64, n)
for i, val := range data {
diff2[i] = (val - xbar) * (val - xbar)
}
s := sum(diff2)
postS2 = s / float64(n)
prec := float64(n)/postS2 + 1/tau2
mu1 := (xbar*float64(n)/postS2 + mu0/tau2) / prec
v1 := 1 / prec
postMu = dst.NormalNext(mu1, sqrt(v1))
a1 := a + float64(n)/2
d2 := make([]float64, n)
for i, val := range data {
d2[i] = (val - postMu) * (val - postMu)
}
b1 := b + sum(d2)/2
postS2 = rigamma(a1, b1)
return
} | bayes/normpostsim.go | 0.912728 | 0.775605 | normpostsim.go | starcoder |
package qrss
import (
"crypto/rand"
"io"
//"log"
"math"
"math/big"
)
type Volt float64
type ToneGen struct {
SampleRate float64 // Samples per second (Hz)
ToneLen float64 // Length in seconds
RampLen float64 // ramp-up, ramp-down in seconds
BaseHz float64
StepHz float64
}
// How many ticks (at the SampleRate) are in a whole tone (time length ToneLen).
func (tg ToneGen) WholeTicks() float64 {
return tg.SampleRate * tg.ToneLen
}
// How many ticks (at the SampleRate) are in the raised-cosine-RampUp/RampDown time (time length RampLen).
func (tg ToneGen) RampTicks() float64 {
return tg.SampleRate * tg.RampLen
}
// Turn a sequence of tones into voltage samples. Special case Tone 0 creates a gap (silence) of whole tone length.
func (tg ToneGen) PlayTones(tones []Tone, vv chan Volt) {
for _, b := range tones {
tg.Boop(b, b, Both, vv)
}
}
func (tg ToneGen) PlayTonePairs(tonePairs []TonePair, vv chan Volt) {
for _, p := range tonePairs {
tg.Boop(p.A, p.B, p.Fade, vv)
}
}
// Boop writes voltages in range [-1.0, +1.0] to the channel vv, for tones sliding from tone1 to tone2, which might be the same tone.
// Notice Boop(0, _, _, _) produces silence.
func (tg ToneGen) Boop(tone1, tone2 Tone, fe FadeEnd, vv chan Volt) {
hz1 := tg.BaseHz + float64(tone1)*tg.StepHz
hz2 := tg.BaseHz + float64(tone2)*tg.StepHz
wholeTicks := int(tg.WholeTicks())
for t := 0; t < wholeTicks; t++ {
if tone1 == 0 {
vv <- Volt(0.0)
continue
}
// Portion ranges 0.0 to almost 1.0.
portion := float64(t) / float64(wholeTicks)
// Interpolate part of the way between hz1 and hz2.
hz := hz1 + portion*(hz2-hz1)
// log.Printf("%06d: %8.0f hz (%5.1f, %5.1f)", t, hz, tone1, tone2)
// Apply a raised-cosine envelope to the first and last RampTicks ticks.
var envelopeGain float64
switch {
case fe != Right && t < int(tg.RampTicks()): // First RampTicks, gain goes from 0.0 to 1.0
{
x := (float64(t) / tg.RampTicks()) * math.Pi
y := math.Cos(x)
envelopeGain = 0.5 - y/2.0
}
case fe != Left && int(tg.WholeTicks())-t < int(tg.RampTicks()): // Last RampTicks, gain goes from 1.0 to 0.0.
{
x := ((tg.WholeTicks() - float64(t)) / tg.RampTicks()) * math.Pi
y := math.Cos(x)
envelopeGain = 0.5 - y/2.0
}
default: // Middle of the Boop has full envelopeGain 1.0.
{
envelopeGain = 1.0
}
}
// The angle theta depends on the ticks and the frequency hz.
theta := float64(t) * hz * (2.0 * math.Pi) / tg.SampleRate
// Take the sin of the angle, and multiply by the envelopeGain.
v := envelopeGain * math.Sin(theta)
vv <- Volt(v)
}
}
const MaxShort = 0x7FFF
// EmitVolts consumes the volts from the channel vv, which use range [-1.0, +1.0].
// It multiplies by an overall gain, converts to signed int16, and writes to the writer in big-endian format.
// When the input volts channel has no more, we write true to the done channel,
// so the main program can exit.
func EmitVolts(vv chan Volt, gain float64, w io.Writer, done chan bool) {
for {
volt, ok := <-vv
if !ok {
break
}
y := gain * float64(volt)
// Clip at +/- 1 unit.
if y > 1.0 {
y = 1.0
}
if y < -1.0 {
y = -1.0
}
yShort := int(MaxShort * y)
buf := []byte{
byte(255 & (yShort >> 8)),
byte(255 & yShort),
}
w.Write(buf)
}
done <- true
}
func Random(n int) int {
r, err := rand.Int(rand.Reader, big.NewInt(int64(n)))
if err != nil {
panic(err)
}
return int(r.Int64())
} | qrss/tones.go | 0.673299 | 0.509337 | tones.go | starcoder |
// A completely customizable Random Number Generator
package main;
// sets the value at the given index from an array to negative 1 and returns a new array
func setMinusOne(array []int, size, index int) []int {
var ret []int;
for i := 0; i < size; i++ {
if i != index {
ret = append(ret, array[i]);
} else {
ret = append(ret, -1);
}
}
return ret;
}
// returns a number larger than the combination of all edge lengths in a graph
// useful for a pseudo-infinite number in the dijkstra algorithm
func getMax(graph [][]int, nodes int) int {
var max int = 0;
for i := 0; i < nodes; i++ {
for j := 0; j < nodes; j++ {
if graph[i][j] < 0 {
continue;
} else {
max += graph[i][j];
}
}
}
return max*2;
}
/* Dijkstra's algorithm, returns a list of shortest distances to each vertex
* in the given graph.
* If a vertex is unreachable, -1 is returned in the index for that vertex.
* The graph should be a square matrix where each index represents the weight of an
* edge between the vertices represented by the indices of the matrix.
* Therefore, the matrix should be symmetric across the main diagonal.
* If an edge doesn't exist between two vertices, -1 should be put at that edge.
* The edges cannot have negative weight.
* Nodes represents the number of vertices in the graph. */
func dijkstra(graph [][]int, nodes, source int) []int {
var vertices []int;
var distances []int;
var closest int;
var i = 0;
var temp int;
max := getMax(graph, nodes);
for i = 0; i < nodes; i++ {
distances = append(distances, max);
vertices = append(vertices, i);
}
distances[source] = 0;
var verticesSize = nodes;
for verticesSize > 0 {
for i = 0; i < nodes; i++ {
if vertices[i] < 0 {
continue;
} else {
closest = i;
break;
}
}
for i = 0; i < nodes; i++ {
if vertices[i] < 0 {
continue;
}
if distances[i] < distances[closest] {
closest = i;
}
}
vertices = setMinusOne(vertices, nodes, closest);
verticesSize--;
for i = 0; i < nodes; i++ {
if graph[i][closest] < 0 {
continue;
} else if vertices[i] < 0 {
continue;
}
temp = distances[closest] + graph[i][closest];
if temp < distances[i] {
distances[i] = temp;
}
}
}
for i = 0; i < nodes; i++ {
if distances[i] == max {
distances[i] = -1;
}
}
return distances;
}
func addEdge(vertex1, vertex2, length int, graph [][]int) [][]int {
graph[vertex1][vertex2] = length;
graph[vertex2][vertex1] = length;
return graph;
}
// Test the algorithm to see if it works with a sample graph
func exampleTest() {
var myGraph [][]int;
var nodes = 16;
for i := 0; i < nodes; i++ {
var row []int;
for j := 0; j < nodes; j++ {
row = append(row, -1);
}
myGraph = append(myGraph, row);
}
addEdge(0, 1, 8, myGraph);
addEdge(0, 7, 6, myGraph);
addEdge(1, 2, 5, myGraph);
addEdge(1, 8, 7, myGraph);
addEdge(2, 3, 6, myGraph);
addEdge(2, 9, 5, myGraph);
addEdge(3, 4, 5, myGraph);
addEdge(4, 5, 8, myGraph);
addEdge(4, 9, 4, myGraph);
addEdge(5, 6, 4, myGraph);
addEdge(5, 7, 6, myGraph);
addEdge(5, 9, 3, myGraph);
addEdge(6, 7, 4, myGraph);
addEdge(7, 8, 4, myGraph);
addEdge(8, 9, 3, myGraph);
addEdge(8, 10, 10, myGraph);
addEdge(8, 11, 9, myGraph);
addEdge(9, 10, 11, myGraph);
addEdge(9, 13, 3, myGraph);
addEdge(9, 14, 7, myGraph);
addEdge(10, 3, 20, myGraph);
addEdge(10, 2, 15, myGraph);
addEdge(10, 5, 99, myGraph);
addEdge(10, 9, 11, myGraph);
addEdge(10, 7, 14, myGraph);
addEdge(11, 10, 21, myGraph);
addEdge(11, 8, 99, myGraph);
addEdge(11, 3, 12, myGraph);
addEdge(11, 9, 17, myGraph);
addEdge(11, 2, 33, myGraph);
addEdge(12, 5, 21, myGraph);
addEdge(12, 11, 69, myGraph);
addEdge(13, 11, 20, myGraph);
addEdge(14, 12, 18, myGraph);
addEdge(15, 14, 7, myGraph);
addEdge(15, 8, 19, myGraph);
addEdge(15, 9, 5, myGraph);
for i := 0; i < nodes; i++ {
distances := dijkstra(myGraph, nodes, i)
for j := 0; j < nodes; j++ {
println ("The shortest distance from", i, "to", j,
"is", distances[j]);
}
}
return;
}
/* Run the function */
func main() {
exampleTest();
} | programs/benchmark/dijkstra.go | 0.792103 | 0.509398 | dijkstra.go | starcoder |
package lib
import (
"math/rand"
)
// A Map represents a level in the game.
type Map struct {
// Depth determines the level of this map in the game.
Depth int
// Tiles stores the tiles in a 2d matrix.
Tiles [][]Tile
}
// Width returns the width of the map
func (m *Map) Width() int {
return len(m.Tiles[0])
}
// Height returns the height of the map
func (m *Map) Height() int {
return len(m.Tiles)
}
// At returns the tile at (x, y)
func (m *Map) At(x, y int) Tile {
return m.Tiles[y][x]
}
// Set sets the tile at (x, y)
func (m *Map) Set(x, y int, t Tile) {
m.Tiles[y][x] = t
}
// Postprocess processes a Map, adding in interesting tiles such as boxes,
// more defined walls, etc...
func (m *Map) Postprocess() {
for y := 0; y < m.Height(); y++ {
for x := 0; x < m.Width(); x++ {
t := m.At(x, y)
switch t.Type() {
case TileOutside:
if m.neighbours(x, y, TileFloor, TileBox) > 0 {
m.Set(x, y, &WallTile{})
}
case TileFloor:
r := rand.Float64()
if m.neighbours(x, y, TileOutside, TileWall, TileBox) > 1 && r < Conf.BoxChance {
m.Set(x, y, &BoxTile{})
} else if m.neighbours(x, y, TileOutside, TileWall) == 0 && r < Conf.ChestChance {
m.Set(x, y, &ChestTile{
Open: false,
})
}
}
}
}
for {
tx, ty := rand.Intn(m.Width()), rand.Intn(m.Height())
if m.At(tx, ty).Type() == TileFloor && m.neighbours(tx, ty, TileFloor) == 8 {
m.Set(tx, ty, &TrapdoorTile{})
break
}
}
for i := 0; i < Conf.NumMerchants; i++ {
for {
tx, ty := rand.Intn(m.Width()), rand.Intn(m.Height())
if m.At(tx, ty).Type() == TileFloor && m.neighbours(tx, ty, TileFloor) == 8 {
m.Set(tx, ty, &MerchantTile{})
break
}
}
}
}
// neighbours gets the number of neighbours of a cell which are of a
// certain type.
func (m *Map) neighbours(x, y int, types ...int) int {
coords := [][]int{
{x - 1, y - 1}, {x, y - 1}, {x + 1, y - 1},
{x - 1, y}, {x + 1, y},
{x - 1, y + 1}, {x, y + 1}, {x + 1, y + 1},
}
count := 0
for _, coord := range coords {
cx, cy := coord[0], coord[1]
if cx < 0 || cy < 0 || cx >= m.Width() || cy >= m.Height() {
continue
}
for _, tile := range types {
if m.At(cx, cy).Type() == tile {
count++
}
}
}
return count
}
// Render renders a Map instance to the terminal at the given
// coordinates
func (m *Map) Render(x, y int) {
for i, row := range m.Tiles {
for j, tile := range row {
tile.Render(x+j*2, y+i)
}
}
} | lib/map.go | 0.820037 | 0.622431 | map.go | starcoder |
package spine
import (
"math"
)
type Attachment interface {
Name() string
}
type RegionAttachment struct {
name string
X float32
Y float32
Rotation float32
ScaleX float32
ScaleY float32
Width float32
Height float32
RendererObject interface{}
RegionOffsetX float32
RegionOffsetY float32
RegionWidth float32
RegionHeight float32
RegionOriginalWidth float32
RegionOriginalHeight float32
Uvs [8]float32
offset [8]float32
}
func (a RegionAttachment) Name() string {
return a.name
}
func NewRegionAttachment(name string) *RegionAttachment {
return &RegionAttachment{
name: name,
ScaleX: 1,
ScaleY: 1,
}
}
func (r *RegionAttachment) SetUVs(u float32, v float32, u2 float32, v2 float32, rotate bool) {
uvs := &r.Uvs
if rotate {
uvs[2] = u
uvs[3] = v2
uvs[4] = u
uvs[5] = v
uvs[6] = u2
uvs[7] = v
uvs[0] = u2
uvs[1] = v2
} else {
uvs[0] = u
uvs[1] = v2
uvs[2] = u
uvs[3] = v
uvs[4] = u2
uvs[5] = v
uvs[6] = u2
uvs[7] = v2
}
}
func (r *RegionAttachment) updateOffset() {
width := r.Width
height := r.Height
scaleX := r.ScaleX
scaleY := r.ScaleY
regionScaleX := width / r.RegionOriginalWidth * scaleX
regionScaleY := height / r.RegionOriginalHeight * scaleY
localX := -width/2*scaleX + r.RegionOffsetX*regionScaleX
localY := -height/2*scaleY + r.RegionOffsetY*regionScaleY
localX2 := localX + r.RegionWidth*regionScaleX
localY2 := localY + r.RegionHeight*regionScaleY
rotation := r.Rotation
rads := float64(rotation) * math.Pi / 180
cos := float32(math.Cos(rads))
sin := float32(math.Sin(rads))
x := r.X
y := r.Y
localXCos := localX*cos + x
localXSin := localX * sin
localYCos := localY*cos + y
localYSin := localY * sin
localX2Cos := localX2*cos + x
localX2Sin := localX2 * sin
localY2Cos := localY2*cos + y
localY2Sin := localY2 * sin
r.offset[0] = localXCos - localYSin
r.offset[1] = localYCos + localXSin
r.offset[2] = localXCos - localY2Sin
r.offset[3] = localY2Cos + localXSin
r.offset[4] = localX2Cos - localY2Sin
r.offset[5] = localY2Cos + localX2Sin
r.offset[6] = localX2Cos - localYSin
r.offset[7] = localYCos + localX2Sin
}
func (r *RegionAttachment) Update(slot *Slot) (verts [8]float32) {
bone := slot.Bone
s := slot.Skeleton()
x := s.X + bone.WorldX
y := s.Y + bone.WorldY
m00 := bone.M00
m01 := bone.M01
m10 := bone.M10
m11 := bone.M11
verts[0] = r.offset[0]*m00 + r.offset[1]*m01 + x
verts[1] = r.offset[0]*m10 + r.offset[1]*m11 + y
verts[2] = r.offset[2]*m00 + r.offset[3]*m01 + x
verts[3] = r.offset[2]*m10 + r.offset[3]*m11 + y
verts[4] = r.offset[4]*m00 + r.offset[5]*m01 + x
verts[5] = r.offset[4]*m10 + r.offset[5]*m11 + y
verts[6] = r.offset[6]*m00 + r.offset[7]*m01 + x
verts[7] = r.offset[6]*m10 + r.offset[7]*m11 + y
return
} | anim/spine/attachment.go | 0.638385 | 0.411347 | attachment.go | starcoder |
package primitives
import (
"github.com/alexandreLamarre/Golang-Ray-Tracing-Renderer/pkg/algebra"
)
//Intersections data type keeps track of t values of the intersections of rays with a sphere
type Intersections struct {
hits *MinHeap // hits on contours of objects
ref *MinHeap // used in ray reflections/refractions
}
//Intersection keeps track of a ray's position and the object it intersects
type Intersection struct {
Object Shape
T float64
U, V float64
}
//SetUV sets the u, v fields for intersections, for normal interpolation on smooth triangles
func (i *Intersection) SetUV(u, v float64) {
i.U = u
i.V = v
}
//NewIntersection creates a new intersection of an object with a ray's position value
func NewIntersection(s Shape, t float64) *Intersection {
return &Intersection{Object: s, T: t}
}
//NewIntersections creates a new intersection data type
func NewIntersections() *Intersections {
hits := NewMinHeap()
ref := NewMinHeap()
return &Intersections{hits: hits, ref: ref}
}
//GetHits Getter for hits minheap exported for test purposes.
func (i *Intersections) GetHits() *MinHeap {
return i.hits
}
//GetRef Getter for ref minheap exported for test purposes
func (i *Intersections) GetRef() *MinHeap {
return i.ref
}
//Intersect Updates intersections of a Sphere with the given algebra.Ray
func (intersections *Intersections) Intersect(s Shape, r *algebra.Ray) error {
m := s.GetTransform()
r2 := r.Transform(m.Inverse())
ts, intersected := s.LocalIntersect(r2)
if !intersected {
return nil
}
for i := 0; i < len(ts); i++ {
is := ts[i]
if ts[i].T >= 0 {
intersections.hits.Push(is)
} else {
intersections.ref.Push(is)
}
}
return nil
}
//Count returns the number of intersections of a given algebra.Ray with the Sphere
func (intersections *Intersections) Count() int {
numHits := len(intersections.hits.Get())
numRef := len(intersections.ref.Get())
return numHits + numRef
}
//GetIntersections returns the slice of values that intersect with the Sphere for the give algebra.Ray
func (intersections *Intersections) GetIntersections() []*Intersection {
hitHeap := intersections.hits.Get()
refHeap := intersections.ref.Get()
return append(refHeap, hitHeap...)
}
//Hit returns the minimum positive value of a ray intersecting the given object
func (intersections *Intersections) Hit() *Intersection {
if len(intersections.hits.Get()) == 0 {
return nil
} else {
return intersections.hits.GetMin()
}
} | pkg/geometry/primitives/intersections.go | 0.819063 | 0.57075 | intersections.go | starcoder |
package main
import (
"fmt"
"math/rand"
"sync/atomic"
"time"
)
// In the previous example we used explicit locking with Mutexes to synchronize access to shared state across
// multiple Goroutines. Another option is to use the built-in synchronization features of Goroutines and channels
// to achieve the same result. This channel-based approach aligns with Go's ideas of sharing memory by
// communicating and having each piece of data owned by exactly one Goroutine
// In this example our state will be owned by a single Goroutine. This will guarantee that the data is never
// corrupted with concurrent access. In order to read or write that state, other Goroutines will send message
// to the owning Goroutine and receive corresponding replies. These ReadOp and WriteOp structs encapsulate
// those requests and a way for the owning Goroutine to respond.
type ReadOp struct {
key int
resp chan int
}
type WriteOp struct {
key int
val int
resp chan bool
}
func main() {
// Similar to the Mutexes example, count how many operations we perform.
var readOps uint64
var writeOps uint64
// The reads and writes channels will be used by other Goroutines to issue
// read and write requests, respectively.
reads := make(chan ReadOp)
writes := make(chan WriteOp)
// Here is the Goroutine that owns the state, which is a map as in the Mutex example, but now private to the
// stateful Goroutine. This Goroutine repeatedly selects on the reads and writes channels, responding to
// requests as they arrive. A response is executed by first performing the requested operation and then
// sending a value on the response channel 'resp' to indicate success (and the desired value in the case
// of reads)
go func() {
var state = make(map[int]int)
for {
select {
case read := <-reads:
read.resp <- state[read.key]
case write := <-writes:
state[write.key] = write.val
write.resp <- true
}
}
}()
// This starts 100 Goroutines to issue reads to the state-owning Goroutine via the 'reads' channel. Each
// read requires constructing a ReadOp struct, sending it over the 'reads' channel, and the receiving
// result over the provided 'resp' channel.
for r := 0; r < 100; r++ {
go func() {
for {
read := ReadOp{
key: rand.Intn(5),
resp: make(chan int)}
reads <- read
<-read.resp
atomic.AddUint64(&readOps, 1)
time.Sleep(time.Millisecond)
}
}()
}
// Start 10 write Goroutines as well, using a similar approach.
for w := 0; w < 10; w++ {
go func() {
for {
write := WriteOp{
key: rand.Intn(5),
val: rand.Intn(100),
resp: make(chan bool)}
writes <- write
<-write.resp
atomic.AddUint64(&writeOps, 1)
time.Sleep(time.Millisecond)
}
}()
}
// Let the Goroutines work for a second
time.Sleep(time.Second)
// Capture and report the operation counts
readOpsFinal := atomic.LoadUint64(&readOps)
fmt.Println("readOps:", readOpsFinal)
writeOpsFinal := atomic.LoadUint64(&writeOps)
fmt.Println("writeOps", writeOpsFinal)
// This Goroutine-based approach was a bit more involved than the Mutex one. It might be useful in certain
// cases, though. For example, where you have other channels involved or when managing multiple such
// mutexes would be error-prone. You should use whichever approach feels the most natural, especially with
// respect to understanding the correctness of your program.
} | cmd/concurrency/stateful-goroutines/stateful-goroutines.go | 0.644896 | 0.478773 | stateful-goroutines.go | starcoder |
package rbytree
import (
"bytes"
)
// Tree holds red-black tree.
// It is not goroutine-safe, make sure that
// the access to the instance of the tree is always synchronized.
type Tree struct {
root *node
size int
}
type color byte
const (
red color = iota
black
)
// node represents the node in the tree.
type node struct {
key []byte
value []byte
parent *node
left *node
right *node
color color
}
// New creates new empty instance of Red-black tree.
func New() *Tree {
return &Tree{}
}
// Put inserts the key with the associated value into the tree.
// If the key is already in the map, it overrides the value and
// returns the previous value.
// Since the value might be null, it also returns a boolean flag
// to distinguish between existent keys and not.
func (t *Tree) Put(key []byte, value []byte) ([]byte, bool) {
// too guarantee that the invariants are not violated
key = copyBytes(key)
newNode := &node{key, value, nil, nil, nil, red}
if t.root == nil {
newNode.color = black
t.root = newNode
t.size = 1
return nil, false
}
current := t.root
var parent *node
var cmp int
for current != nil {
parent = current
cmp = bytes.Compare(key, current.key)
if cmp == 0 {
prev := current.value
current.value = value
return prev, true
}
if cmp < 0 {
current = current.left
} else {
current = current.right
}
}
if cmp < 0 {
parent.left = newNode
} else {
parent.right = newNode
}
newNode.parent = parent
t.fixAfterInsertion(newNode)
t.size++
return nil, false
}
// Get searches the key and returns the associated value and true if found,
// otherwise nil and false.
func (t *Tree) Get(key []byte) ([]byte, bool) {
if t.root == nil {
return nil, false
}
current := t.root
for current != nil {
cmp := bytes.Compare(key, current.key)
if cmp < 0 {
current = current.left
} else if cmp > 0 {
current = current.right
} else {
return current.value, true
}
}
return nil, false
}
// ForEach traverses tree in ascending key order.
func (t *Tree) ForEach(action func(key []byte, value []byte)) {
for it := t.Iterator(); it.HasNext(); {
key, value := it.Next()
action(key, value)
}
}
// fixAfterInsertion fixes the tree to satisfy the red-black tree
// properties of the tree.
func (t *Tree) fixAfterInsertion(newNode *node) {
current := newNode
for current != t.root && current.parent.color == red {
if current.parent.parent.left == current.parent {
uncle := current.parent.parent.right
if uncle != nil && uncle.color == red {
current.parent.color = black
uncle.color = black
current.parent.parent.color = red
current = current.parent.parent
} else {
if current == current.parent.right {
current = current.parent
t.rotateLeft(current)
}
current.parent.color = black
current.parent.parent.color = red
t.rotateRight(current.parent.parent)
}
} else if current.parent.parent.right == current.parent {
uncle := current.parent.parent.left
if uncle != nil && uncle.color == red {
current.parent.color = black
uncle.color = black
current.parent.parent.color = red
current = current.parent.parent
} else {
if current == current.parent.left {
current = current.parent
t.rotateRight(current)
}
current.parent.color = black
current.parent.parent.color = red
t.rotateLeft(current.parent.parent)
}
}
}
t.root.color = black
}
func (t *Tree) rotateLeft(node *node) {
nodeRight := node.right
node.right = nodeRight.left
if nodeRight.left != nil {
nodeRight.left.parent = node
}
nodeRight.parent = node.parent
if node.parent == nil {
t.root = nodeRight
} else if node == node.parent.left {
node.parent.left = nodeRight
} else if node == node.parent.right {
node.parent.right = nodeRight
}
nodeRight.left = node
node.parent = nodeRight
}
func (t *Tree) rotateRight(node *node) {
nodeLeft := node.left
node.left = nodeLeft.right
if nodeLeft.right != nil {
nodeLeft.right.parent = node
}
nodeLeft.parent = node.parent
if node.parent == nil {
t.root = nodeLeft
} else if node == node.parent.left {
node.parent.left = nodeLeft
} else if node == node.parent.right {
node.parent.right = nodeLeft
}
nodeLeft.right = node
node.parent = nodeLeft
}
// Size returns tree size.
func (t *Tree) Size() int {
return t.size
}
func copyBytes(s []byte) []byte {
c := make([]byte, len(s))
copy(c, s)
return c
} | tree.go | 0.787646 | 0.419945 | tree.go | starcoder |
package lcs
// Basic returns the length of the largest common subsequence
func Basic(a, b []int64) int {
if len(a) == 0 || len(b) == 0 {
return 0
}
curr := make([]int, len(b))
prev := make([]int, len(b))
max := 0
for ai, ax := range a {
for bi, bx := range b {
if ax != bx {
curr[bi] = 0
} else {
if ai == 0 || bi == 0 {
curr[bi] = 1
} else {
curr[bi] = 1 + prev[bi-1]
}
if max < curr[bi] {
max = curr[bi]
}
}
}
curr, prev = prev, curr
}
return max
}
// Lift returns the length of the largest common subsequence
func Lift(a, b []int64) int {
if len(a) == 0 || len(b) == 0 {
return 0
}
curr := make([]int, len(b))
prev := make([]int, len(b))
max := 0
for ai, ax := range a {
if b[ai] == ax {
prev[ai] = 1
}
}
for ai := 1; ai < len(a); ai++ {
ax := a[ai]
if ax == b[0] {
curr[0] = 1
} else {
curr[0] = 0
}
for bi := 1; bi < len(b); bi++ {
bx := b[bi]
if ax != bx {
curr[bi] = 0
} else {
curr[bi] = 1 + prev[bi-1]
if max < curr[bi] {
max = curr[bi]
}
}
}
curr, prev = prev, curr
}
return max
}
// Wave returns the length of the largest common subsequence
func Wave(a, b []int64) int {
if len(a) == 0 || len(b) == 0 {
return 0
}
prev := make([]int, len(b))
mid := make([]int, len(b))
curr := make([]int, len(b))
max := 0
// prev
// / mid
// / / curr
// / / /
// 0 1 2 3 4
// 0 1 2 3
// 0 1 2
// 0 1
// 0
for wave := 0; wave < len(b); wave++ {
for i := 0; i < wave; i++ {
ax, bx := a[wave], b[wave-i]
if ax == bx {
if i == 0 {
curr[i] = 1
} else {
curr[i] = prev[i-1] + 1
if curr[i] > max {
max = curr[i]
}
}
} else {
curr[i] = 0
}
}
prev, mid, curr = mid, curr, prev
}
// prev
// / mid
// / / curr
// / / /
// 3 4 .
// 2 3 3
// 1 2 2 2
// 0 1 1 1 1
// 0 0 0 0 0
for wave := len(b) - 2; wave >= 0; wave-- {
for i := 0; i < wave; i++ {
ax, bx := a[wave], b[wave-i]
if ax == bx {
if i == 0 {
curr[i] = 1
} else {
curr[i] = prev[i-1] + 1
if curr[i] > max {
max = curr[i]
}
}
} else {
curr[i] = 0
}
}
prev, mid, curr = mid, curr, prev
}
return max
} | lcs/lcs.go | 0.681409 | 0.499329 | lcs.go | starcoder |
package rbtree
type color uint8
const (
kRed color = 0
kBlack color = 1
)
type Node struct {
left *Node
right *Node
parent *Node
color color
Item
}
type Item interface {
Less(than Item) bool
}
type Rbtree struct {
root *Node
count uint
}
func New() *Rbtree {
return &Rbtree{
root: nil,
count: 0,
}
}
func (t *Rbtree) Len() uint {
return t.count
}
func (t *Rbtree) Min() Item {
if x := t.min(t.root); x == nil {
return x.Item
}
return nil
}
func (t *Rbtree) min(x *Node) *Node {
if x == nil {
return nil
}
for x.left != nil {
x = x.left
}
return x
}
func (t *Rbtree) Max() Item {
if x := t.max(t.root); x != nil {
return x.Item
}
return nil
}
func (t *Rbtree) max(x *Node) *Node {
if x == nil {
return nil
}
for x.right != nil {
x = x.right
}
return x
}
func (t *Rbtree) Get(item Item) Item {
if item == nil {
return nil
}
if x := t.search(item); x != nil {
return x.Item
}
return nil
}
func (t *Rbtree) search(item Item) *Node {
p := t.root
for p != nil {
if p.Item.Less(item) {
p = p.right
} else if item.Less(p.Item) {
p = p.left
} else {
break
}
}
return p
}
func (t *Rbtree) Insert(item Item) {
if item == nil {
return
}
x := t.root
var y *Node
for x != nil {
y = x
if item.Less(x.Item) {
x = x.left
} else if x.Item.Less(item) {
x = x.right
} else {
return
}
}
x = &Node{nil, nil, y, kRed, item}
if y == nil {
t.root = x
} else if item.Less(y.Item) {
y.left = x
} else {
y.right = x
}
t.count++
t.insertFixup(x)
}
func (t *Rbtree) insertFixup(x *Node) {
for x != t.root && x.parent.color == kRed {
if x.parent == x.parent.parent.left {
y := x.parent.parent.right
if y != nil && y.color == kRed {
x.parent.color = kBlack
y.color = kBlack
x.parent.parent.color = kRed
x = x.parent.parent
} else {
if x == x.parent.right {
x = x.parent
t.leftRotate(x)
}
x.parent.color = kBlack
x.parent.parent.color = kRed
t.rightRotate(x.parent.parent)
}
} else {
y := x.parent.parent.left
if y != nil && y.color == kRed {
x.parent.color = kBlack
y.color = kBlack
x.parent.parent.color = kRed
x = x.parent.parent
} else {
if x == x.parent.left {
x = x.parent
t.rightRotate(x)
}
x.parent.color = kBlack
x.parent.parent.color = kRed
t.leftRotate(x.parent.parent)
}
}
}
t.root.color = kBlack
}
func (t *Rbtree) Delete(item Item) {
if item == nil {
return
}
z := t.search(item)
if z == nil {
return
}
var x, y *Node
if z.left == nil || z.right == nil {
y = z
} else {
y = t.successor(z)
}
if y.left != nil {
x = y.left
} else {
x = y.right
}
if x != nil {
x.parent = y.parent
}
if y.parent == nil {
t.root = x
} else if y == y.parent.left {
y.parent.left = x
} else {
y.parent.right = x
}
if y != z {
z.Item = y.Item
}
if y.color == kBlack {
t.deleteFixup(x, y.parent)
}
t.count--
}
func (t *Rbtree) deleteFixup(x, parent *Node) {
for x != t.root && (x == nil || x.color == kBlack) {
if x == parent.left {
w := parent.right
if w.color == kRed {
w.color = kBlack
parent.color = kRed
t.leftRotate(parent)
w = parent.right
}
if (w.left == nil || w.left.color == kBlack) && (w.right == nil || w.right.color == kBlack) {
w.color = kRed
x = parent
parent = x.parent
} else {
if w.right == nil || w.right.color == kBlack {
w.left.color = kBlack
w.color = kRed
t.rightRotate(w)
w = parent.right
}
w.color = parent.color
parent.color = kBlack
w.right.color = kBlack
t.leftRotate(parent)
x = t.root
}
} else {
w := parent.left
if w != nil && w.color == kRed {
w.color = kBlack
parent.color = kRed
t.rightRotate(parent)
w = parent.left
}
if (w.left == nil || w.left.color == kBlack) && (w.right == nil || w.right.color == kBlack) {
w.color = kRed
x = parent
parent = x.parent
} else {
if w.left == nil || w.left.color == kBlack {
w.right.color = kBlack
w.color = kRed
t.leftRotate(w)
w = parent.left
}
w.color = parent.color
parent.color = kBlack
w.left.color = kBlack
t.rightRotate(parent)
x = t.root
}
}
}
if x != nil {
x.color = kBlack
}
}
func (t *Rbtree) successor(x *Node) *Node {
if x == nil {
return nil
}
if x.right != nil {
return t.min(x.right)
}
y := x.parent
for y != nil && x == y.right {
x = y
y = y.parent
}
return y
}
func (t *Rbtree) leftRotate(x *Node) {
if x.right == nil {
return
}
y := x.right
x.right = y.left
if y.left != nil {
y.left.parent = x
}
y.parent = x.parent
if x.parent == nil {
t.root = y
} else if x == x.parent.left {
x.parent.left = y
} else {
x.parent.right = y
}
y.left = x
x.parent = y
}
func (t *Rbtree) rightRotate(x *Node) {
if x.left == nil {
return
}
y := x.left
x.left = y.right
if y.right != nil {
y.right.parent = x
}
y.parent = x.parent
if x.parent == nil {
t.root = y
} else if x == x.parent.left {
x.parent.left = y
} else {
x.parent.right = y
}
y.right = x
x.parent = y
} | rbtree/rbtree.go | 0.676727 | 0.490663 | rbtree.go | starcoder |
package retry
import (
"math"
"time"
)
// Exponential performs retries and waits according to an exponential equation: f(x) = a*B^x + y where B is the base, x is the number of retries (starting at 0) and y is the offset time (cannot be less than 0)
type Exponential struct {
// Times is the maximum number of times to execute the back-off
Times uint
// Base is the base of the exponent
Base time.Duration
// YOffset is the y-component of the exponential equation
YOffset time.Duration
// Scaling is multiplication factor, this controls the units of the exponentiation
Scaling time.Duration
// MaxAttemptWaitTime The maximum amount of time to wait for a particular attempt (does not account for total time), regardless of the exponential equation (leave as 0 to ignore)
MaxAttemptWaitTime time.Duration
}
func (l Exponential) New() Service {
return &maxExponentialService{
config: l,
}
}
// ExpBase2 configures the Exponential but with base 2 instead of an arbitrary base.
type ExpBase2 struct {
// Times is the maximum number of times to execute the back-off
Times uint
// YOffset is the y-component of the exponential equation
YOffset time.Duration
// Scaling is multiplication factor, this controls the units of the exponentiation
Scaling time.Duration
// MaxAttemptWaitTime The maximum amount of time to wait for a particular attempt (does not account for total time), regardless of the exponential equation (leave as 0 to ignore)
MaxAttemptWaitTime time.Duration
}
func (l ExpBase2) New() Service {
return &maxExponentialService{
config: Exponential{
Times: l.Times,
YOffset: l.YOffset,
Scaling: l.Scaling,
MaxAttemptWaitTime: l.MaxAttemptWaitTime,
Base: 2,
},
}
}
type maxExponentialService struct {
config Exponential
triesSoFar uint
}
// ShouldTry will execute unless all of our retries allotted have failed
func (c *maxExponentialService) ShouldTry() bool {
return c.triesSoFar < c.config.Times
}
// Wait will cause go to sleep for the WaitFor
func (c *maxExponentialService) Yield() {
time.Sleep(c.waitDuration())
}
func (c maxExponentialService) waitDuration() time.Duration {
// implement the equation: a*B^x + y, and constrain to the bounds, if necessary
var waitFor time.Duration
switch c.config.Base {
case 0:
// do nothing, no exponent, constant function
case 1:
// constant with scaling
waitFor = 1
case 2:
// cheat for base 2
waitFor = 1 << (c.triesSoFar - 1)
default:
// Only perform power if necessary for other bases
waitFor = time.Duration(math.Pow(float64(c.config.Base), math.Max(0.0, float64(c.triesSoFar-1))))
}
// scaling factor
waitFor = waitFor * c.config.Scaling
// add the YOffset
waitFor = waitFor + c.config.YOffset
if c.config.MaxAttemptWaitTime != 0 && waitFor > c.config.MaxAttemptWaitTime {
// Constrain the wait time
waitFor = c.config.MaxAttemptWaitTime
}
return waitFor
}
// Returns the svc for the service so that the developer can svc it
func (c *maxExponentialService) Controller() ServiceController {
return c
}
// Wait will cause go to sleep for the WaitFor
func (c *maxExponentialService) Abort() {
c.triesSoFar = c.config.Times
}
// Wait will cause go to sleep for the WaitFor
func (c *maxExponentialService) NotifyRetry() {
c.triesSoFar++
}
// NewErrorList creates the default error list
func (c *maxExponentialService) NewErrorList() ErrorAppender {
return newErrorList()
} | max_exponential.go | 0.850127 | 0.510863 | max_exponential.go | starcoder |
package gmi
import (
"fmt"
"strings"
)
var (
_ Line = (*TextLine)(nil)
_ Line = (*LinkLine)(nil)
_ Line = (*PreformatToggleLine)(nil)
_ Line = (*PreformatLine)(nil)
_ Line = (*HeadingLine)(nil)
_ Line = (*UnorderedListLine)(nil)
_ Line = (*QuoteLine)(nil)
)
// Line represents a Line in text/gemini in a logical way
type Line interface {
CoreType() LineType // CoreType returns the core type of the line (returns Text Type for advanced line types)
Type() LineType // Type returns the true type of the line
Level() int // Level returns the level of the heading, if it's a HeadingType, or 0 otherwise
Data() string // Data returns the content data of the line
Meta() string // Meta returns the secondary data of the line (e.g the link for link lines), if any
Prefix() string // Prefix returns the documented prefix to the line, without whitespace
String() string // String (Stringer) implements a post-formatted representation of the original line, including prefix
}
// ---- types
// TextLine represents a text text/gemini line
type TextLine string
// LinkLine represents a link text/gemini line
type LinkLine struct {
link string
name string
}
// PreformatToggleLine represents a "```" toggle in text/gemini
type PreformatToggleLine string
// PreformatLine represents a preformatted text/gemini line
type PreformatLine string
// HeadingLine represents a heading text/gemini line
type HeadingLine struct {
contents string
level int
}
// UnorderedListLine represents an unordered list entry in text/gemini
type UnorderedListLine string
// QuoteLine represents a quoted text/gemini line
type QuoteLine string
// revive:disable:exported
// implementations are already documented in the interface
// ---- text line
func (r TextLine) CoreType() LineType { return TextType }
func (r TextLine) Type() LineType { return TextType }
func (r TextLine) Level() int { return 0 }
func (r TextLine) Data() string { return string(r) }
func (r TextLine) Meta() string { return "" }
func (r TextLine) Prefix() string { return "" }
func (r TextLine) String() string { return string(r) }
// ---- link line
func (r LinkLine) CoreType() LineType { return LinkType }
func (r LinkLine) Type() LineType { return LinkType }
func (r LinkLine) Level() int { return 0 }
func (r LinkLine) Data() string { return r.link }
func (r LinkLine) Meta() string { return r.name }
func (r LinkLine) Prefix() string { return "=>" }
func (r LinkLine) String() string {
if len(r.name) > 0 {
return fmt.Sprintf("%s %s %s", r.Prefix(), r.link, r.name)
}
return fmt.Sprintf("%s %s", r.Prefix(), r.link)
}
// ---- preformat toggle line
func (r PreformatToggleLine) CoreType() LineType { return PreformatToggleType }
func (r PreformatToggleLine) Type() LineType { return PreformatToggleType }
func (r PreformatToggleLine) Level() int { return 0 }
func (r PreformatToggleLine) Data() string { return string(r) }
func (r PreformatToggleLine) Meta() string { return "" }
func (r PreformatToggleLine) Prefix() string { return "```" }
func (r PreformatToggleLine) String() string { return fmt.Sprintf("%s%s", r.Prefix(), r.Meta()) }
// ---- preformat line
func (r PreformatLine) CoreType() LineType { return PreformatType }
func (r PreformatLine) Type() LineType { return PreformatType }
func (r PreformatLine) Level() int { return 0 }
func (r PreformatLine) Data() string { return string(r) }
func (r PreformatLine) Meta() string { return "" }
func (r PreformatLine) Prefix() string { return "" }
func (r PreformatLine) String() string { return string(r) }
// ---- heading line
func (r HeadingLine) CoreType() LineType { return TextType }
func (r HeadingLine) Type() LineType { return HeadingType }
func (r HeadingLine) Level() int { return r.level }
func (r HeadingLine) Data() string { return r.contents }
func (r HeadingLine) Meta() string { return "" }
func (r HeadingLine) Prefix() string { return strings.Repeat("#", r.level) }
func (r HeadingLine) String() string { return fmt.Sprintf("%s %s", r.Prefix(), r.Data()) }
// ---- unordered list line
func (r UnorderedListLine) CoreType() LineType { return TextType }
func (r UnorderedListLine) Type() LineType { return UnorderedListType }
func (r UnorderedListLine) Level() int { return 0 }
func (r UnorderedListLine) Data() string { return string(r) }
func (r UnorderedListLine) Meta() string { return "" }
func (r UnorderedListLine) Prefix() string { return "*" } // mostly present for CoreType consumers, use bullet points instead
func (r UnorderedListLine) String() string { return fmt.Sprintf("%s %s", r.Prefix(), r.Data()) }
// ---- quote line
func (r QuoteLine) CoreType() LineType { return TextType }
func (r QuoteLine) Type() LineType { return QuoteType }
func (r QuoteLine) Level() int { return 0 }
func (r QuoteLine) Data() string { return string(r) }
func (r QuoteLine) Meta() string { return "" }
func (r QuoteLine) Prefix() string { return ">" }
func (r QuoteLine) String() string { return fmt.Sprintf("%s %s", r.Prefix(), r.Data()) } | ast.go | 0.72952 | 0.729628 | ast.go | starcoder |
package iso20022
// Set of elements used to identify the underlying transaction.
type TransactionReferences2 struct {
// Point to point reference, as assigned by the instructing party of the underlying message.
MessageIdentification *Max35Text `xml:"MsgId,omitempty"`
// Unique reference, as assigned by the account servicing institution, to unambiguously identify the instruction.
AccountServicerReference *Max35Text `xml:"AcctSvcrRef,omitempty"`
// Unique identification, as assigned by a sending party, to unambiguously identify the payment information group within the message.
PaymentInformationIdentification *Max35Text `xml:"PmtInfId,omitempty"`
// Unique identification, as assigned by an instructing party for an instructed party, to unambiguously identify the instruction.
//
// Usage: The instruction identification is a point to point reference that can be used between the instructing party and the instructed party to refer to the individual instruction. It can be included in several messages related to the instruction.
InstructionIdentification *Max35Text `xml:"InstrId,omitempty"`
// Unique identification, as assigned by the initiating party, to unambiguously identify the transaction. This identification is passed on, unchanged, throughout the entire end-to-end chain.
//
// Usage: The end-to-end identification can be used for reconciliation or to link tasks relating to the transaction. It can be included in several messages related to the transaction.
//
// Usage: In case there are technical limitations to pass on multiple references, the end-to-end identification must be passed on throughout the entire end-to-end chain.
EndToEndIdentification *Max35Text `xml:"EndToEndId,omitempty"`
// Unique identification, as assigned by the first instructing agent, to unambiguously identify the transaction that is passed on, unchanged, throughout the entire interbank chain.
// Usage: The transaction identification can be used for reconciliation, tracking or to link tasks relating to the transaction on the interbank level.
// Usage: The instructing agent has to make sure that the transaction identification is unique for a pre-agreed period.
TransactionIdentification *Max35Text `xml:"TxId,omitempty"`
// Unique identification, as assigned by the creditor, to unambiguously identify the mandate.
MandateIdentification *Max35Text `xml:"MndtId,omitempty"`
// Unique and unambiguous identifier for a cheque as assigned by the agent.
ChequeNumber *Max35Text `xml:"ChqNb,omitempty"`
// Unique reference, as assigned by a clearing system, to unambiguously identify the instruction.
// Usage: In case there are technical limitations to pass on multiple references, the end-to-end identification must be passed on throughout the entire end-to-end chain.
ClearingSystemReference *Max35Text `xml:"ClrSysRef,omitempty"`
// Proprietary reference related to the underlying transaction.
Proprietary *ProprietaryReference1 `xml:"Prtry,omitempty"`
}
func (t *TransactionReferences2) SetMessageIdentification(value string) {
t.MessageIdentification = (*Max35Text)(&value)
}
func (t *TransactionReferences2) SetAccountServicerReference(value string) {
t.AccountServicerReference = (*Max35Text)(&value)
}
func (t *TransactionReferences2) SetPaymentInformationIdentification(value string) {
t.PaymentInformationIdentification = (*Max35Text)(&value)
}
func (t *TransactionReferences2) SetInstructionIdentification(value string) {
t.InstructionIdentification = (*Max35Text)(&value)
}
func (t *TransactionReferences2) SetEndToEndIdentification(value string) {
t.EndToEndIdentification = (*Max35Text)(&value)
}
func (t *TransactionReferences2) SetTransactionIdentification(value string) {
t.TransactionIdentification = (*Max35Text)(&value)
}
func (t *TransactionReferences2) SetMandateIdentification(value string) {
t.MandateIdentification = (*Max35Text)(&value)
}
func (t *TransactionReferences2) SetChequeNumber(value string) {
t.ChequeNumber = (*Max35Text)(&value)
}
func (t *TransactionReferences2) SetClearingSystemReference(value string) {
t.ClearingSystemReference = (*Max35Text)(&value)
}
func (t *TransactionReferences2) AddProprietary() *ProprietaryReference1 {
t.Proprietary = new(ProprietaryReference1)
return t.Proprietary
} | TransactionReferences2.go | 0.88779 | 0.477006 | TransactionReferences2.go | starcoder |
package mf2
import (
"fmt"
"reflect"
)
// Flatten takes a Microformats map and flattens all arrays with
// a single value to one element.
func Flatten(data map[string][]interface{}) map[string]interface{} {
return flatten(data).(map[string]interface{})
}
func flatten(data interface{}) interface{} {
value := reflect.ValueOf(data)
kind := value.Kind()
if kind == reflect.Slice {
if value.Len() == 1 {
return flatten(value.Index(0).Interface())
}
parsed := make([]interface{}, value.Len())
for i := 0; i < value.Len(); i++ {
parsed[i] = flatten(value.Index(i).Interface())
}
return parsed
}
if kind == reflect.Map {
parsed := map[string]interface{}{}
for _, k := range value.MapKeys() {
v := value.MapIndex(k)
parsed[fmt.Sprint(k.Interface())] = flatten(v.Interface())
}
return parsed
}
return data
}
// Deflatten takes a flattened map and deflattens all single values to arrays.
func Deflatten(data map[string]interface{}) map[string]interface{} {
return deflatten(data).(map[string]interface{})
}
func deflattenProperties(data interface{}) map[string][]interface{} {
value := reflect.ValueOf(data)
parsed := map[string][]interface{}{}
for _, k := range value.MapKeys() {
v := value.MapIndex(k)
key := fmt.Sprint(k.Interface())
vk := reflect.TypeOf(v.Interface()).Kind()
if vk == reflect.Slice || vk == reflect.Array {
parsed[key] = deflatten(v.Interface()).([]interface{})
} else {
parsed[key] = []interface{}{deflatten(v.Interface())}
}
}
return parsed
}
func deflatten(data interface{}) interface{} {
if data == nil {
return []interface{}{nil}
}
value := reflect.ValueOf(data)
kind := value.Kind()
if kind == reflect.Slice {
parsed := make([]interface{}, value.Len())
for i := 0; i < value.Len(); i++ {
parsed[i] = deflatten(value.Index(i).Interface())
}
return parsed
}
if kind == reflect.Map {
parsed := map[string]interface{}{}
for _, k := range value.MapKeys() {
v := value.MapIndex(k)
key := fmt.Sprint(k.Interface())
vk := reflect.TypeOf(v.Interface()).Kind()
if key == "properties" {
parsed[key] = deflattenProperties(v.Interface())
} else if key == "value" || key == "html" {
parsed[key] = deflatten(v.Interface())
} else if vk == reflect.Slice || vk == reflect.Array {
parsed[key] = deflatten(v.Interface()).([]interface{})
} else {
parsed[key] = []interface{}{deflatten(v.Interface())}
}
}
return parsed
}
return data
} | entry/mf2/mf2.go | 0.696887 | 0.445047 | mf2.go | starcoder |
package salsa
import (
"time"
"github.com/asimpleidea/salsa/sauces"
)
type result struct {
Header *resultHeader `json:"header"`
Data resultData `json:"data"`
}
func (r *result) deviantArt() *sauces.DeviantArt {
return &sauces.DeviantArt{
SauceHeader: r.Header.toSauceHeader(),
ExternalURLs: r.Data.toStringSlice("ext_urls"),
Title: r.Data.toString("title"),
ID: r.Data.toIntegerID("da_id"),
AuthorName: r.Data.toString("author_name"),
AuthorURL: r.Data.toString("author_url"),
}
}
func (r *result) eHentai() *sauces.EHentai {
return &sauces.EHentai{
SauceHeader: r.Header.toSauceHeader(),
Source: r.Data.toString("source"),
Creators: r.Data.toStringSlice("creator"),
EnglishName: r.Data.toString("eng_name"),
JapaneseName: r.Data.toString("jp_name"),
}
}
func (r *result) artStation() *sauces.ArtStation {
return &sauces.ArtStation{
SauceHeader: r.Header.toSauceHeader(),
ExternalURLs: r.Data.toStringSlice("ext_urls"),
Title: r.Data.toString("title"),
Project: r.Data.toString("as_project"),
AuthorName: r.Data.toString("author_name"),
AuthorURL: r.Data.toString("author_url"),
}
}
func (r *result) pixiv() *sauces.Pixiv {
return &sauces.Pixiv{
SauceHeader: r.Header.toSauceHeader(),
ExternalURLs: r.Data.toStringSlice("ext_urls"),
Title: r.Data.toString("title"),
ID: r.Data.toIntegerID("pixiv_id"),
MemberName: r.Data.toString("member_name"),
MemberID: r.Data.toIntegerID("member_id"),
}
}
func (r *result) aniDB() *sauces.AniDB {
return &sauces.AniDB{
SauceHeader: r.Header.toSauceHeader(),
ExternalURLs: r.Data.toStringSlice("ext_urls"),
Source: r.Data.toString("source"),
ID: r.Data.toIntegerID("anidb_aid"),
Part: r.Data.toString("part"),
Year: r.Data.toString("year"),
EstimatedTime: r.Data.toString("est_time"),
}
}
func (r *result) pawoo() *sauces.Pawoo {
return &sauces.Pawoo{
SauceHeader: r.Header.toSauceHeader(),
ExternalURLs: r.Data.toStringSlice("ext_urls"),
CreatedAt: r.Data.toTime("created_at", time.RFC3339),
ID: r.Data.toIntegerID("pawoo_id"),
UserAccount: r.Data.toString("pawoo_user_acct"),
Username: r.Data.toString("pawoo_user_username"),
UserDisplayName: r.Data.toString("pawoo_user_display_name"),
}
}
func (r *result) gelbooru() *sauces.Gelbooru {
return &sauces.Gelbooru{
SauceHeader: r.Header.toSauceHeader(),
ExternalURLs: r.Data.toStringSlice("ext_urls"),
ID: r.Data.toIntegerID("gelbooru_id"),
Creator: r.Data.toString("creator"),
Material: r.Data.toString("material"),
Characters: r.Data.toString("characters"),
Source: r.Data.toString("source"),
}
}
func (r *result) danbooru() *sauces.Danbooru {
return &sauces.Danbooru{
SauceHeader: r.Header.toSauceHeader(),
ExternalURLs: r.Data.toStringSlice("ext_urls"),
ID: r.Data.toIntegerID("danbooru_id"),
Creator: r.Data.toString("creator"),
Material: r.Data.toString("material"),
Characters: r.Data.toString("characters"),
Source: r.Data.toString("source"),
}
}
func (r *result) e621() *sauces.E621 {
return &sauces.E621{
SauceHeader: r.Header.toSauceHeader(),
ExternalURLs: r.Data.toStringSlice("ext_urls"),
ID: r.Data.toIntegerID("e621_id"),
Creator: r.Data.toString("creator"),
Material: r.Data.toString("material"),
Characters: r.Data.toString("characters"),
Source: r.Data.toString("source"),
}
}
func (r *result) portalGraphics() *sauces.PortalGraphics {
return &sauces.PortalGraphics{
SauceHeader: r.Header.toSauceHeader(),
ExternalURLs: r.Data.toStringSlice("ext_urls"),
ID: r.Data.toIntegerID("pg_id"),
Title: r.Data.toString("title"),
MemberName: r.Data.toString("member_name"),
MemberID: r.Data.toIntegerID("member_id"),
}
}
func (r *result) sankaku() *sauces.Sankaku {
return &sauces.Sankaku{
SauceHeader: r.Header.toSauceHeader(),
ExternalURLs: r.Data.toStringSlice("ext_urls"),
ID: r.Data.toIntegerID("sankaku_id"),
Creator: r.Data.toString("creator"),
Material: r.Data.toString("material"),
Characters: r.Data.toString("characters"),
Source: r.Data.toString("source"),
}
}
func (r *result) furAffinity() *sauces.FurAffinity {
return &sauces.FurAffinity{
SauceHeader: r.Header.toSauceHeader(),
ExternalURLs: r.Data.toStringSlice("ext_urls"),
ID: r.Data.toIntegerID("fa_id"),
AuthorName: r.Data.toString("author_name"),
AuthorURL: r.Data.toString("author_url"),
}
}
func (r *result) seigaIllustration() *sauces.SeigaIllustration {
return &sauces.SeigaIllustration{
SauceHeader: r.Header.toSauceHeader(),
ExternalURLs: r.Data.toStringSlice("ext_urls"),
ID: r.Data.toIntegerID("seiga_id"),
MemberName: r.Data.toString("member_name"),
MemberID: r.Data.toIntegerID("member_id"),
}
}
func (r *result) hMags() *sauces.HMags {
return &sauces.HMags{
SauceHeader: r.Header.toSauceHeader(),
ExternalURLs: r.Data.toStringSlice("ext_urls"),
Title: r.Data.toString("title"),
Part: r.Data.toString("part"),
Date: r.Data.toString("date"),
}
}
func (r *result) iMDb() *sauces.IMDb {
return &sauces.IMDb{
SauceHeader: r.Header.toSauceHeader(),
ExternalURLs: r.Data.toStringSlice("ext_urls"),
ID: r.Data.toString("imdb_id"),
Part: r.Data.toString("creator"),
Year: r.Data.toString("year"),
EstimatedTime: r.Data.toString("est_time"),
}
}
type resultData map[string]interface{}
func (r resultData) toString(key string) string {
val, exists := r[key]
if !exists {
return ""
}
switch val := val.(type) {
case string:
return val
default:
return ""
}
}
func (r resultData) toStringSlice(key string) (values []string) {
val, exists := r[key]
if !exists {
return []string{}
}
switch val := val.(type) {
case []interface{}:
for i := 0; i < len(val); i++ {
values = append(values, val[i].(string))
}
return
case interface{}:
values = append(values, val.(string))
default:
values = []string{}
}
return
}
func (r resultData) toIntegerID(key string) int {
val, exists := r[key]
if !exists {
return -1
}
switch val := val.(type) {
case string:
return atoi(&val)
case int:
return val
case float64:
// We're talking IDs here: pretty sure the compiler set them
// as float64 to be as sure as possible, but we know better in
// this case, and that is actually an int.
return int(val)
default:
return -1
}
}
func (r resultData) toTime(key, format string) time.Time {
defaultTime := time.Date(1970, time.January, 1, 0, 0, 0, 0, time.UTC)
val, exists := r[key]
if !exists {
return defaultTime
}
switch val := val.(type) {
case string:
t, err := time.Parse(format, val)
if err != nil {
return defaultTime
}
return t
// TODO: format timestamp
default:
return defaultTime
}
} | result_data.go | 0.539711 | 0.415195 | result_data.go | starcoder |
package axon
import (
"fmt"
"reflect"
"unsafe"
)
// axon.Synapse holds state for the synaptic connection between neurons
type Synapse struct {
Wt float32 `desc:"effective synaptic weight value, determining how much conductance one spike drives on the receiving neuron. Wt = SWt * WtSig(LWt), where WtSig produces values between 0-2 based on LWt, centered on 1"`
SWt float32 `desc:"slowly adapting structural weight value, which acts as a multiplicative scaling factor on synaptic efficacy: biologically represents the physical size and efficacy of the dendritic spine, while the LWt reflects the AMPA receptor efficacy and number. SWt values adapt in an outer loop along with synaptic scaling, with constraints to prevent runaway positive feedback loops and maintain variance and further capacity to learn. Initial variance is all in SWt, with LWt set to .5, and scaling absorbs some of LWt into SWt."`
LWt float32 `desc:"rapidly learning, linear weight value -- learns according to the lrate specified in the connection spec. Initially all LWt are .5, which gives 1 from WtSig function, "`
DWt float32 `desc:"change in synaptic weight, from learning"`
DSWt float32 `desc:"change in SWt slow synaptic weight -- accumulates DWt"`
}
func (sy *Synapse) VarNames() []string {
return SynapseVars
}
var SynapseVars = []string{"Wt", "SWt", "LWt", "DWt", "DSWt"}
var SynapseVarProps = map[string]string{
"DWt": `auto-scale:"+"`,
"DSWt": `auto-scale:"+"`,
}
var SynapseVarsMap map[string]int
func init() {
SynapseVarsMap = make(map[string]int, len(SynapseVars))
typ := reflect.TypeOf((*Synapse)(nil)).Elem()
for i, v := range SynapseVars {
SynapseVarsMap[v] = i
pstr := SynapseVarProps[v]
if fld, has := typ.FieldByName(v); has {
if desc, ok := fld.Tag.Lookup("desc"); ok {
pstr += ` desc:"` + desc + `"`
SynapseVarProps[v] = pstr
}
}
}
}
// SynapseVarByName returns the index of the variable in the Synapse, or error
func SynapseVarByName(varNm string) (int, error) {
i, ok := SynapseVarsMap[varNm]
if !ok {
return -1, fmt.Errorf("Synapse VarByName: variable name: %v not valid", varNm)
}
return i, nil
}
// VarByIndex returns variable using index (0 = first variable in SynapseVars list)
func (sy *Synapse) VarByIndex(idx int) float32 {
fv := (*float32)(unsafe.Pointer(uintptr(unsafe.Pointer(sy)) + uintptr(4*idx)))
return *fv
}
// VarByName returns variable by name, or error
func (sy *Synapse) VarByName(varNm string) (float32, error) {
i, err := SynapseVarByName(varNm)
if err != nil {
return 0, err
}
return sy.VarByIndex(i), nil
}
func (sy *Synapse) SetVarByIndex(idx int, val float32) {
fv := (*float32)(unsafe.Pointer(uintptr(unsafe.Pointer(sy)) + uintptr(4*idx)))
*fv = val
}
// SetVarByName sets synapse variable to given value
func (sy *Synapse) SetVarByName(varNm string, val float32) error {
i, err := SynapseVarByName(varNm)
if err != nil {
return err
}
sy.SetVarByIndex(i, val)
return nil
} | axon/synapse.go | 0.706899 | 0.563918 | synapse.go | starcoder |
package cron
import (
"strings"
"fmt"
"regexp"
"strconv"
"github.com/pengzj/swift/bitmap"
"time"
)
type CronSchedule struct {
Minute *bitmap.Bitmap
Hour *bitmap.Bitmap
Day *bitmap.Bitmap
Month *bitmap.Bitmap
Week *bitmap.Bitmap
Year *bitmap.Bitmap
Deadline time.Time
}
func (schedule *CronSchedule) CanTrigger(t time.Time) bool{
year, month, day := t.Date()
if schedule.Minute.Get(t.Minute()) == 0 {
return false
}
if schedule.Hour.Get(t.Hour()) == 0 {
return false
}
if schedule.Day.Get(day) == 0 {
return false
}
if schedule.Month.Get(int(month)) == 0 {
return false
}
if schedule.Week.Get(int(t.Weekday())) == 0 {
return false
}
if schedule.Year.Get(year) == 0 {
return false
}
return true
}
func (schedule *CronSchedule) HasFinished() bool {
return time.Now().After(schedule.Deadline)
}
func Parse(spec string) (*CronSchedule, error) {
reg := regexp.MustCompile("^[0-9 * -/,]+$")
if reg.MatchString(spec) == false {
return nil, fmt.Errorf("unexpect string, only support digit, *, -, /")
}
fields := strings.Fields(spec)
count := len(fields)
if count != 6 {
return nil, fmt.Errorf("expect 6 fields, found: %d fields", count)
}
/**
* each filed only inclue digit * - , / (such as *\/12 , 1-5, 4,6,8)
* minute 0 ~ 59
* hour 0 ~ 23
* day 1 ~ 31
* month 1 ~ 12
* week 0 ~ 7 (0,7 represent weekend)
* year 2000 ~ 9999
*/
minute := fields[0]
hour := fields[1]
dayofmonth := fields[2]
month := fields[3]
dayofweek := fields[4]
year := fields[5]
schedule := &CronSchedule{
Minute:bitmap.NewBitmap(),
Hour:bitmap.NewBitmap(),
Day:bitmap.NewBitmap(),
Month:bitmap.NewBitmap(),
Week:bitmap.NewBitmap(),
Year:bitmap.NewBitmap(),
}
var fixedYear, fixedDate, fixedHour, fixedMinute int
var fixedMonth time.Month
if Str2Any(minute) {
for i :=0; i < 60; i++ {
schedule.Minute.Set(i)
}
fixedMinute = 59
} else if values, err := Str2Values(minute); err == nil {
for _, value :=range values {
schedule.Minute.Set(value)
if value > fixedMinute {
fixedMinute = value
}
}
} else if value, err := Str2Int(minute); err == nil {
schedule.Minute.Set(value)
fixedMinute = value
} else if repeat, err := Str2Repeat(minute); err == nil {
times := 59 / repeat
for i := 0; i <= times; i++ {
schedule.Minute.Set(i * repeat)
}
fixedMinute = 59
}
if Str2Any(hour) {
for i :=0; i < 24; i++ {
schedule.Hour.Set(i)
}
fixedHour = 23
} else if values, err := Str2Values(hour); err == nil {
for _, value :=range values {
schedule.Hour.Set(value)
if value > fixedHour {
fixedHour = value
}
}
} else if value, err := Str2Int(hour); err == nil {
schedule.Hour.Set(value)
fixedHour = value
} else if repeat, err := Str2Repeat(hour); err == nil {
times := 23 / repeat
for i := 0; i <= times; i++ {
schedule.Hour.Set(i * repeat)
}
fixedHour = 23
}
if Str2Any(dayofmonth) {
for i :=1; i < 31; i++ {
schedule.Day.Set(i)
}
fixedDate = 31
} else if values, err := Str2Values(dayofmonth); err == nil {
for _, value :=range values {
schedule.Day.Set(value)
if value > fixedDate {
fixedDate = value
}
}
} else if value, err := Str2Int(dayofmonth); err == nil {
schedule.Day.Set(value)
fixedDate = value
} else if repeat, err := Str2Repeat(dayofmonth); err == nil {
times := 31 / repeat
for i := 0; i <= times; i++ {
schedule.Day.Set(i * repeat)
}
fixedDate = 31
}
if Str2Any(month) {
for i :=1; i <= 12; i++ {
schedule.Month.Set(i)
}
fixedMonth = time.December
} else if values, err := Str2Values(month); err == nil {
for _, value :=range values {
schedule.Month.Set(value)
if value > int(fixedMonth) {
fixedMonth = time.Month(value)
}
}
} else if value, err := Str2Int(month); err == nil {
schedule.Month.Set(value)
fixedMonth = time.Month(value)
} else if repeat, err := Str2Repeat(month); err == nil {
times := 11 / repeat
for i := 0; i < times; i++ {
schedule.Month.Set(i * repeat)
}
fixedMonth = time.December
}
if Str2Any(dayofweek) {
for i :=0; i < 7; i++ {
schedule.Week.Set(i)
}
} else if values, err := Str2Values(dayofweek); err == nil {
for _, value :=range values {
schedule.Week.Set(value)
}
} else if value, err := Str2Int(dayofweek); err == nil {
schedule.Week.Set(value)
} else if repeat, err := Str2Repeat(dayofweek); err == nil {
times := 6 / repeat
for i := 0; i <= times; i++ {
schedule.Week.Set(i * repeat)
}
}
y,_,_ := time.Now().Date()
if Str2Any(year) {
for i :=y; i < 10000; i++ {
schedule.Year.Set(i)
}
fixedYear = 9999
} else if values, err := Str2Values(year); err == nil {
for _, value :=range values {
schedule.Year.Set(value)
if value > fixedYear {
fixedYear = value
}
}
} else if value, err := Str2Int(year); err == nil {
schedule.Year.Set(value)
fixedYear = value
} else if repeat, err := Str2Repeat(year); err == nil {
start := y/repeat
times := (9999 - y) / repeat
for i := start; i <= times; i++ {
schedule.Year.Set(i * repeat)
}
fixedYear = 9999
}
schedule.Deadline = time.Date(fixedYear,fixedMonth, fixedDate, fixedHour, fixedMinute, 0,0, time.Now().Location())
return schedule, nil
}
func Str2Int(str string) (int, error) {
return strconv.Atoi(str)
}
func Str2Any(str string) bool {
return strings.Compare(str, "*") == 0;
}
func Str2Values(str string) ([]int, error) {
var values []int
vals := strings.Split(str, ",")
regComma := regexp.MustCompile("^([0-9]{1,2})-([0-9]{1,2})$")
var err error
for _, val := range vals {
matches := regComma.FindAllStringSubmatch(val, -1)
count := len(matches)
if count == 0 {
var s int
if s, err = strconv.Atoi(val); err != nil {
return nil, err
}
values = append(values, s)
} else {
if count > 1 {
return nil, fmt.Errorf("unexpect format, only support digit-digit one time")
}
var min, max int
if min, err = strconv.Atoi(matches[0][1]); err != nil {
return nil, err
}
if min < 0 {
return nil, fmt.Errorf("%s unexcept left %d must larger than 0",val, min)
}
if max, err = strconv.Atoi(matches[0][2]); err != nil {
return nil, err
}
if min > max {
return nil, fmt.Errorf("%s unexcept left %d must smaller than right %d",val, min, max)
}
for i := min; i <= max; i++ {
values = append(values, i)
}
}
}
return values, nil
}
func Str2Repeat(str string) (int, error) {
regRepeat := regexp.MustCompile("^[*]{1}/([0-9]{1,2})$")
vals := regRepeat.FindAllStringSubmatch(str,-1)
if len(vals) == 0 {
return 0, fmt.Errorf("invalid format %s", str)
}
repeat, err := strconv.Atoi(vals[0][1])
if err != nil {
return 0, err
}
if repeat == 0 {
return 0, fmt.Errorf("can't repeat 0")
}
return repeat, nil
} | cron/parse.go | 0.530236 | 0.473109 | parse.go | starcoder |
package stringset
import (
"fmt"
"reflect"
"strings"
)
const testVersion = 3
// Set represents a set of unique strings.
type Set map[string]bool
// New creates an empty Set.
func New() Set {
return make(Set)
}
// NewFromSlice creates a Set from the contents of the slice.
// If a string exists multiple times in the slice it will only be once in the Set.
func NewFromSlice(slice []string) Set {
set := make(Set)
for _, element := range slice {
set.Add(element)
}
return set
}
// Add adds the element to the Set.
func (s Set) Add(element string) {
s[element] = true
}
// Delete deletes the element from the Set.
func (s Set) Delete(element string) {
delete(s, element)
}
// Has checks if the element is in the Set.
func (s Set) Has(element string) bool {
return s[element]
}
// IsEmpty checks if the Set is empty.
func (s Set) IsEmpty() bool {
return s.Len() == 0
}
// Len returns the length of the Set.
func (s Set) Len() int {
return len(s)
}
// Slice converts the Set to a slice.
func (s Set) Slice() []string {
slice := make([]string, s.Len())
i := 0
for element := range s {
slice[i] = element
i++
}
return slice
}
func (s Set) String() string {
quotedStrings := s.Slice()
for i, element := range quotedStrings {
quotedStrings[i] = fmt.Sprintf("%q", element)
}
return fmt.Sprintf("{%s}", strings.Join(quotedStrings, ", "))
}
// Equal checks if the Sets contain the same elements. Order does not matter.
func Equal(s1, s2 Set) bool {
return reflect.DeepEqual(s1, s2)
}
// Subset checks if all elements of subset are contained by set.
func Subset(subset, set Set) bool {
for element := range subset {
if !set[element] {
return false
}
}
return true
}
// Disjoint checks if no value is contained by both Sets.
func Disjoint(s1, s2 Set) bool {
for element := range s1 {
if s2[element] {
return false
}
}
return true
}
// Intersection returns a Set representing the intersection of the given Sets.
func Intersection(s1, s2 Set) Set {
intersection := New()
for element := range s1 {
if s2[element] {
intersection.Add(element)
}
}
return intersection
}
// Union returns a Set representing the union of the given Sets.
func Union(s1, s2 Set) Set {
return NewFromSlice(append(s1.Slice(), s2.Slice()...))
}
// Difference returns a Set representing the difference of s2 from s1.
func Difference(s1, s2 Set) Set {
difference := New()
for element := range s1 {
if !s2[element] {
difference.Add(element)
}
}
return difference
}
// SymmetricDifference returns a Set representing the difference of the given Sets.
func SymmetricDifference(s1, s2 Set) Set {
return Union(Difference(s1, s2), Difference(s2, s1))
} | solutions/go/custom-set/custom_set.go | 0.872822 | 0.422147 | custom_set.go | starcoder |
package merge
import (
"fmt"
"github.com/attic-labs/noms/go/d"
"github.com/attic-labs/noms/go/types"
)
type applyFunc func(candidate, types.ValueChanged, types.Value) candidate
func (m *merger) threeWayOrderedSequenceMerge(a, b, parent candidate, apply applyFunc, path types.Path) (types.Value, error) {
aChangeChan, bChangeChan := make(chan types.ValueChanged), make(chan types.ValueChanged)
aStopChan, bStopChan := make(chan struct{}, 1), make(chan struct{}, 1)
go func() {
a.diff(parent, aChangeChan, aStopChan)
close(aChangeChan)
}()
go func() {
b.diff(parent, bChangeChan, bStopChan)
close(bChangeChan)
}()
defer stopAndDrain(aStopChan, aChangeChan)
defer stopAndDrain(bStopChan, bChangeChan)
merged := parent
aChange, bChange := types.ValueChanged{}, types.ValueChanged{}
for {
// Get the next change from both a and b. If either diff(a, parent) or diff(b, parent) is complete, aChange or bChange will get an empty types.ValueChanged containing a nil Value. Generally, though, this allows us to proceed through both diffs in (key) order, considering the "current" change from both diffs at the same time.
if aChange.V == nil {
aChange = <-aChangeChan
}
if bChange.V == nil {
bChange = <-bChangeChan
}
// Both channels are producing zero values, so we're done.
if aChange.V == nil && bChange.V == nil {
break
}
// Since diff generates changes in key-order, and we never skip over a change without processing it, we can simply compare the keys at which aChange and bChange occurred to determine if either is safe to apply to the merge result without further processing. This is because if, e.g. aChange.V.Less(bChange.V), we know that the diff of b will never generate a change at that key. If it was going to, it would have done so on an earlier iteration of this loop and been processed at that time.
// It's also obviously OK to apply a change if only one diff is generating any changes, e.g. aChange.V is non-nil and bChange.V is nil.
if aChange.V != nil && (bChange.V == nil || aChange.V.Less(bChange.V)) {
merged = apply(merged, aChange, a.get(aChange.V))
aChange = types.ValueChanged{}
continue
} else if bChange.V != nil && (aChange.V == nil || bChange.V.Less(aChange.V)) {
merged = apply(merged, bChange, b.get(bChange.V))
bChange = types.ValueChanged{}
continue
}
d.PanicIfTrue(!aChange.V.Equals(bChange.V), "Diffs have skewed!") // Sanity check.
change, mergedVal, err := m.mergeChanges(aChange, bChange, a, b, parent, apply, path)
if err != nil {
return parent.getValue(), err
}
merged = apply(merged, change, mergedVal)
aChange, bChange = types.ValueChanged{}, types.ValueChanged{}
}
return merged.getValue(), nil
}
func (m *merger) mergeChanges(aChange, bChange types.ValueChanged, a, b, p candidate, apply applyFunc, path types.Path) (change types.ValueChanged, mergedVal types.Value, err error) {
path = a.pathConcat(aChange, path)
aValue, bValue := a.get(aChange.V), b.get(bChange.V)
// If the two diffs generate different kinds of changes at the same key, conflict.
if aChange.ChangeType != bChange.ChangeType {
if change, mergedVal, ok := m.resolve(aChange.ChangeType, bChange.ChangeType, aValue, bValue, path); ok {
return types.ValueChanged{change, aChange.V}, mergedVal, nil
}
return change, nil, newMergeConflict("Conflict:\n%s\nvs\n%s\n", describeChange(aChange), describeChange(bChange))
}
if aChange.ChangeType == types.DiffChangeRemoved || aValue.Equals(bValue) {
// If both diffs generated a remove, or if the new value is the same in both, merge is fine.
return aChange, aValue, nil
}
// There's one case that might still be OK even if aValue and bValue differ: different, but mergeable, compound values of the same type being added/modified at the same key, e.g. a Map being added to both a and b. If either is a primitive, or Values of different Kinds were added, though, we're in conflict.
if !unmergeable(aValue, bValue) {
// TODO: Add concurrency.
var err error
if mergedVal, err = m.threeWay(aValue, bValue, p.get(aChange.V), path); err == nil {
return aChange, mergedVal, nil
}
return change, nil, err
}
if change, mergedVal, ok := m.resolve(aChange.ChangeType, bChange.ChangeType, aValue, bValue, path); ok {
return types.ValueChanged{change, aChange.V}, mergedVal, nil
}
return change, nil, newMergeConflict("Conflict:\n%s = %s\nvs\n%s = %s", describeChange(aChange), types.EncodedValue(aValue), describeChange(bChange), types.EncodedValue(bValue))
}
func stopAndDrain(stop chan<- struct{}, drain <-chan types.ValueChanged) {
close(stop)
for range drain {
}
}
func describeChange(change types.ValueChanged) string {
op := ""
switch change.ChangeType {
case types.DiffChangeAdded:
op = "added"
case types.DiffChangeModified:
op = "modded"
case types.DiffChangeRemoved:
op = "removed"
}
return fmt.Sprintf("%s %s", op, types.EncodedValue(change.V))
} | go/merge/three_way_ordered_sequence.go | 0.533641 | 0.55652 | three_way_ordered_sequence.go | starcoder |
package infer
// Gradient ascent algorithms.
import (
"bitbucket.org/dtolpin/infergo/model"
"math"
)
// Grad is the interface of gradient-based
// optimizers. Step makes a single step over
// parameters in the gradient direction.
type Grad interface {
Step(m model.Model, x []float64) (ll float64, grad []float64)
}
// Gradient ascent with momentum
// (https://www.nature.com/articles/323533a0). If the momentum
// factor is not set, and thus 0, reduces to vanilla gradient
// ascent.
type Momentum struct {
Rate float64 //learning rate
Decay float64 // rate decay
Gamma float64 // gradient momentum factor
u []float64 // last update
}
// Step implements the Optimizer interface.
func (opt *Momentum) Step(
m model.Model,
x []float64,
) (
ll float64,
grad []float64,
) {
ll, grad = m.Observe(x), model.Gradient(m)
if opt.u == nil {
// u is initialized to zeros.
opt.u = make([]float64, len(x))
}
for i := range x {
u := opt.Rate*grad[i] + opt.u[i]*opt.Gamma
x[i] += u
opt.u[i] = u
}
opt.Rate *= opt.Decay
return ll, grad
}
// Adam (https://arxiv.org/abs/1412.6980).
type Adam struct {
Rate float64 // learning rate
Beta1 float64 // first momentum factor
Beta2 float64 // second momentum factor
Eps float64 // stabilizer
u []float64 // first momentum
v []float64 // second momentum
b1t float64 // Beta1^t
b2t float64 // Beta2^t
}
// Step implements the Optimizer interface.
func (opt *Adam) Step(
m model.Model,
x []float64,
) (
ll float64,
grad []float64,
) {
ll, grad = m.Observe(x), model.Gradient(m)
if opt.u == nil {
opt.setDefaults()
// The momenta are initalized to zeros.
opt.u = make([]float64, len(x))
opt.v = make([]float64, len(x))
opt.b1t = opt.Beta1
opt.b2t = opt.Beta2
}
for i := range x {
// Compute the new momenta.
u := opt.Beta1*opt.u[i] + (1-opt.Beta1)*grad[i]
v := opt.Beta2*opt.v[i] + (1-opt.Beta2)*grad[i]*grad[i]
opt.u[i] = u
opt.v[i] = v
// Correct the bias.
u /= (1 - opt.b1t)
v /= (1 - opt.b2t)
// Update the parameters.
x[i] += opt.Rate / (math.Sqrt(v) + opt.Eps) * u
}
// Update momentum factors for the next step.
opt.b1t *= opt.Beta1
opt.b2t *= opt.Beta2
return ll, grad
}
// setDefaults sets default parameter values for the Adam
// optimizer unless initialized.
func (opt *Adam) setDefaults() {
if opt.Beta1 == 0 {
opt.Beta1 = 0.9
}
if opt.Beta2 == 0 {
opt.Beta2 = 0.999
}
if opt.Eps == 0 {
opt.Eps = 1e-8
}
}
// Optimize wraps a gradient-based optimizer into
// an optimization loop with early stopping if a
// plateau is reached.
func Optimize(
opt Grad,
m model.Model, x []float64,
niter, nplateau int,
eps float64,
) (
iter int,
ll0, ll float64,
) {
ll0, _ = opt.Step(m, x)
iter, ll = 0, ll0
plateau, llprev := 0, ll0
// Evolve x_, keep x_ with the highest log-likelihood in x
x_ := make([]float64, len(x))
copy(x_, x)
for iter != niter {
iter++
ll_, _ := opt.Step(m, x_)
// Store x_ in x if log-likelihood increased
if ll_ > ll {
copy(x, x_)
ll = ll_
}
// Stop early if the optimization is stuck
if ll_-llprev <= eps {
plateau++
if plateau == nplateau {
break
}
} else {
plateau = 0
}
llprev = ll_
}
return iter, ll0, ll
} | infer/grad.go | 0.780955 | 0.616907 | grad.go | starcoder |
package ast
import (
"bytes"
"fmt"
"io"
"strconv"
"strings"
)
const (
EPSILON RealNum = 0.00000001
)
type Number interface {
Add(Number) Number
Sub(Number) Number
Mul(Number) Number
Div(Number) Number
Expr
}
type IntNum int
type RealNum float64
type RatNum struct {
Numerator Number
Denominator Number
}
type CompNum complex128
type NAN struct{}
var NanValue = NAN{}
func (x NAN) Print(output io.Writer) {
output.Write([]byte("NAN"))
}
func (x NAN) Add(y Number) Number {
return NanValue
}
func (x NAN) Sub(y Number) Number {
return NanValue
}
func (x NAN) Mul(y Number) Number {
return NanValue
}
func (x NAN) Div(y Number) Number {
return NanValue
}
/*
type BigNum struct {
Sign bool
Size int
Values []int
}
*/
//IntNum
func (x IntNum) Print(output io.Writer) {
output.Write([]byte(fmt.Sprintf("%d", x)))
}
func intToComplex(a IntNum) CompNum {
return CompNum(complex(float64(a), 0))
}
func (x IntNum) Add(y Number) Number {
switch y.(type) {
case IntNum:
return x + y.(IntNum)
case RatNum:
return MakeRatnum(x, IntNum(1)).Add(y.(RatNum))
case RealNum:
return RealNum(x) + y.(RealNum)
case CompNum:
return intToComplex(x) + y.(CompNum)
}
return NanValue
}
func (x IntNum) Sub(y Number) Number {
return x.Add(Negate(y))
}
func (x IntNum) Mul(y Number) Number {
switch y.(type) {
case IntNum:
return x * y.(IntNum)
case RatNum:
return MakeRatnum(x, IntNum(1)).Mul(y)
case RealNum:
return RealNum(x) * y.(RealNum)
case CompNum:
return intToComplex(x) * y.(CompNum)
default:
return IntNum(1) //TODO: return NAN
}
}
func (x IntNum) Div(y Number) Number {
return x.Mul(inverse(y))
}
//RealNum
func (x RealNum) Print(output io.Writer) {
output.Write([]byte(fmt.Sprintf("%f", x)))
}
func realToComplex(a RealNum) CompNum {
return CompNum(complex(a, 0))
}
func (x RealNum) Add(y Number) Number {
switch y.(type) {
case IntNum:
return x + RealNum(y.(IntNum))
case RatNum:
return x + y.(RatNum).ToReal()
case RealNum:
return x + y.(RealNum)
case CompNum:
return realToComplex(x) + y.(CompNum)
default:
return IntNum(0) //TODO: return NAN
}
}
func (x RealNum) Sub(y Number) Number {
return x.Add(Negate(y))
}
func (x RealNum) Mul(y Number) Number {
switch y.(type) {
case IntNum:
return x * RealNum(y.(IntNum))
case RatNum:
return x * y.(RatNum).ToReal()
case RealNum:
return x * y.(RealNum)
case CompNum:
return realToComplex(x) * y.(CompNum)
default:
return IntNum(1) //TODO: return NAN
}
}
func (x RealNum) Div(y Number) Number {
return x.Mul(inverse(y))
}
//RatNum
func (x RatNum) Print(output io.Writer) {
output.Write([]byte(fmt.Sprintf("%d/%d", x.Numerator, x.Denominator)))
}
func ratnumAdd(a RatNum, b RatNum) Number {
return MakeRatnum(a.Numerator.Mul(b.Denominator).Add(b.Numerator.Mul(a.Denominator)), a.Denominator.Mul(b.Denominator))
}
func ratnumMul(a RatNum, b RatNum) Number {
return MakeRatnum(a.Numerator.Mul(b.Numerator), a.Denominator.Mul(b.Denominator))
}
func (x RatNum) ToReal() RealNum {
return RealNum(float64(x.Numerator.(IntNum)) / float64(x.Denominator.(IntNum)))
}
func (x RatNum) Add(y Number) Number {
switch y.(type) {
case IntNum:
return ratnumAdd(x, MakeRatnum(y.(IntNum), IntNum(1)))
case RealNum:
return x.ToReal().Add(y.(RealNum))
case RatNum:
return ratnumAdd(x, y.(RatNum))
case CompNum:
return y.(CompNum).Add(x.ToReal())
}
return MakeRatnum(IntNum(0), IntNum(0))
}
func (x RatNum) Sub(y Number) Number {
return x.Add(Negate(y))
}
func (x RatNum) Mul(y Number) Number {
switch y.(type) {
case IntNum:
return ratnumMul(x, MakeRatnum(y.(IntNum), IntNum(1)))
case RealNum:
return x.ToReal().Mul(y.(RealNum))
case RatNum:
return ratnumMul(x, y.(RatNum))
case CompNum:
return y.(CompNum).Mul(x.ToReal())
}
return MakeRatnum(IntNum(0), IntNum(0))
}
func (x RatNum) Div(y Number) Number {
return x.Mul(inverse(y))
}
func gcd(a int64, b int64) int64 {
if a == 0 {
return b
}
if b == 0 {
return a
}
if a < 0 {
a = -a
}
if b < 0 {
b = -b
}
if b < a {
t := a
a = b
b = t
}
for a != b {
if a > b {
a -= b
} else {
b -= a
}
}
return a
}
func Negate(number Number) Number {
return number.Mul(IntNum(-1))
}
func inverse(a Number) Number {
switch a.(type) {
case IntNum:
return MakeRatnum(IntNum(1), a.(IntNum))
case RealNum:
return RealNum(1.0) / a.(RealNum)
case RatNum:
ra := a.(RatNum)
return MakeRatnum(ra.Denominator, ra.Numerator)
case CompNum:
return CompNum(1) / a.(CompNum)
}
return NanValue
}
func MakeRatnum(numer Number, denom Number) RatNum {
n := numer.(IntNum)
d := denom.(IntNum)
if d == 0 {
return RatNum{IntNum(0), IntNum(0)} //TODO: return NAN
}
negated := false
if d < 0 {
n = Negate(n).(IntNum)
d = Negate(d).(IntNum)
negated = true
}
g := gcd(int64(n), int64(d))
if g == 1 {
if negated {
return RatNum{n, d}
} else {
return RatNum{numer, denom}
}
} else {
n = n / IntNum(g)
d = d / IntNum(g)
//TODO: when denom is 1, we should return IntNum
return RatNum{n, d}
}
}
//CompNum
func (x CompNum) Print(output io.Writer) {
output.Write([]byte(fmt.Sprintf("%v", x)))
}
func (x CompNum) Add(y Number) Number {
switch y.(type) {
case IntNum:
return x + intToComplex(y.(IntNum))
case RatNum:
return x + realToComplex(y.(RatNum).ToReal())
case RealNum:
return x + realToComplex(y.(RealNum))
case CompNum:
return x + y.(CompNum)
}
return NanValue
}
func (x CompNum) Sub(y Number) Number {
return x.Add(Negate(y))
}
func (x CompNum) Mul(y Number) Number {
switch y.(type) {
case IntNum:
return x * intToComplex(y.(IntNum))
case RatNum:
return x * realToComplex(y.(RatNum).ToReal())
case RealNum:
return x * realToComplex(y.(RealNum))
case CompNum:
return x * y.(CompNum)
}
return NanValue
}
func (x CompNum) Div(y Number) Number {
return x.Mul(inverse(y))
}
/*
//BigNum
func (x BigNum) Print(output io.Writer) {
if !x.Sign {
output.Write([]byte("-"))
}
for i := len(x.Values) - 1; i >= 0; i-- {
output.Write([]byte(fmt.Sprintf("%08lx", x.Values[i])))
}
}
func (x BigNum) Add(args []Number) Number {
}
func (x BigNum) Sub(args []Number) Number {
}
func (x BigNum) Mul(args []Number) Number {
}
func (x BigNum) Mul(args []Number) Number {
}
*/
func StringToNumber(s string) Number {
//TODO: check radix
radix := 10
//TODO: check exact
exact := true
return stringToNumberImpl(s, radix, exact)
}
func NumberToString(e Expr) StringExpr {
var buf bytes.Buffer
e.Print(&buf)
return StringExpr(buf.String())
}
func stringToNumberImpl(s string, radix int, exact bool) Number {
// -> complex (~i or @)
//case strings.ContainsRune(s, '@'):
//case strings.HasSuffix(s, "i"):
if strings.ContainsRune(s, '/') { //ratnum
nums := strings.Split(s, "/")
if len(nums) != 2 {
return NanValue
}
n1 := stringToNumberImpl(nums[0], radix, exact).(IntNum)
n2 := stringToNumberImpl(nums[1], radix, exact).(IntNum)
r := MakeRatnum(n1, n2)
if exact {
return r
} else {
return r.ToReal()
}
} else if strings.ContainsRune(s, '.') {
//realnum
f, err := strconv.ParseFloat(s, 64)
if err != nil {
return NanValue
}
return RealNum(f)
} else {
//integer
i, err := strconv.ParseInt(s, radix, 64)
if err != nil {
return NanValue
}
return IntNum(i)
}
}
func eqInt(a IntNum, b IntNum) bool {
return a == b
}
func eqReal(a RealNum, b RealNum) bool {
if (a-b) < EPSILON && (b-a) < EPSILON {
return true
}
return false
}
func eqRatnum(a RatNum, b RatNum) bool {
return a.Numerator == b.Numerator && a.Denominator == b.Denominator
}
func eqComp(a CompNum, b CompNum) bool {
ra := real(complex128(a))
rb := real(complex128(b))
ia := imag(complex128(a))
ib := imag(complex128(b))
if RealNum(ra-rb) < EPSILON && RealNum(rb-ra) < EPSILON &&
RealNum(ia-ib) < EPSILON && RealNum(ib-ia) < EPSILON {
return true
}
return false
}
func EqNum(a Number, b Number) bool {
switch a.(type) {
case IntNum:
if bi, ok := b.(IntNum); ok {
return eqInt(a.(IntNum), bi)
} else if br, ok := b.(RealNum); ok {
return eqReal(RealNum(a.(IntNum)), br)
} else if br, ok := b.(RatNum); ok {
return eqReal(RealNum(a.(IntNum)), br.ToReal())
} else {
return eqComp(intToComplex(a.(IntNum)), b.(CompNum))
}
case RealNum:
ar := a.(RealNum)
if bi, ok := b.(IntNum); ok {
return eqReal(ar, RealNum(bi))
} else if br, ok := b.(RealNum); ok {
return eqReal(ar, br)
} else if br, ok := b.(RatNum); ok {
return eqReal(ar, br.ToReal())
} else {
return eqComp(realToComplex(ar), b.(CompNum))
}
case RatNum:
ar := a.(RatNum)
if bi, ok := b.(IntNum); ok {
return eqRatnum(ar, MakeRatnum(bi, IntNum(1)))
} else if br, ok := b.(RealNum); ok {
return eqReal(ar.ToReal(), br)
} else if br, ok := b.(RatNum); ok {
return eqRatnum(ar, br)
} else {
return eqComp(realToComplex(ar.ToReal()), b.(CompNum))
}
case CompNum:
ac := a.(CompNum)
if bi, ok := b.(IntNum); ok {
return eqComp(ac, intToComplex(bi))
} else if br, ok := b.(RealNum); ok {
return eqComp(ac, realToComplex(br))
} else if br, ok := b.(RatNum); ok {
return eqComp(ac, realToComplex(br.ToReal()))
} else {
return eqComp(ac, b.(CompNum))
}
}
return false
}
func GTNum(a Number, b Number) bool {
switch a.(type) {
case IntNum:
ai := a.(IntNum)
if bi, ok := b.(IntNum); ok {
return ai > bi
} else if br, ok := b.(RealNum); ok {
return RealNum(ai) > br
} else if br, ok := b.(RatNum); ok {
return RealNum(ai) > br.ToReal()
}
case RealNum:
ar := a.(RealNum)
if bi, ok := b.(IntNum); ok {
return ar > RealNum(bi)
} else if br, ok := b.(RealNum); ok {
return ar > br
} else if br, ok := b.(RatNum); ok {
return ar > br.ToReal()
}
case RatNum:
ar := a.(RatNum)
if bi, ok := b.(IntNum); ok {
return ar.ToReal() > RealNum(bi)
} else if br, ok := b.(RealNum); ok {
return ar.ToReal() > br
} else if br, ok := b.(RatNum); ok {
return ar.ToReal() > br.ToReal()
}
return a.(RatNum).ToReal() > b.(RatNum).ToReal()
}
return false
}
func GTENum(a Number, b Number) bool {
return GTNum(a, b) || EqNum(a, b)
}
func LTNum(a Number, b Number) bool {
switch a.(type) {
case IntNum:
ai := a.(IntNum)
if bi, ok := b.(IntNum); ok {
return ai < bi
} else if br, ok := b.(RealNum); ok {
return RealNum(ai) < br
} else if br, ok := b.(RatNum); ok {
return RealNum(ai) < br.ToReal()
}
case RealNum:
ar := a.(RealNum)
if bi, ok := b.(IntNum); ok {
return ar < RealNum(bi)
} else if br, ok := b.(RealNum); ok {
return ar < br
} else if br, ok := b.(RatNum); ok {
return ar < br.ToReal()
}
case RatNum:
ar := a.(RatNum)
if bi, ok := b.(IntNum); ok {
return ar.ToReal() < RealNum(bi)
} else if br, ok := b.(RealNum); ok {
return ar.ToReal() < br
} else if br, ok := b.(RatNum); ok {
return ar.ToReal() < br.ToReal()
}
return a.(RatNum).ToReal() < b.(RatNum).ToReal()
}
return false
}
func LTENum(a Number, b Number) bool {
return LTNum(a, b) || EqNum(a, b)
} | ast/number.go | 0.511229 | 0.418935 | number.go | starcoder |
package newstorage
import (
"errors"
"testing"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
"github.com/Universal-Health-Chain/aries-framework-go/pkg/newstorage"
)
// TestAll tests common storage functionality.
// These tests demonstrate behaviour that is expected to be consistent across store implementations.
func TestAll(t *testing.T, provider newstorage.Provider) {
t.Run("Provider: GetOpenStores", func(t *testing.T) {
TestProviderGetOpenStores(t, provider)
}) // Run this first so the store count is the same every time.
t.Run("Provider: open store and set/get config", func(t *testing.T) {
TestProviderOpenStoreSetGetConfig(t, provider)
})
t.Run("Store", func(t *testing.T) {
t.Run("Put and Get", func(t *testing.T) {
TestPutGet(t, provider)
})
t.Run("GetTags", func(t *testing.T) {
TestStoreGetTags(t, provider)
})
t.Run("GetBulk", func(t *testing.T) {
TestStoreGetBulk(t, provider)
})
t.Run("Delete", func(t *testing.T) {
TestStoreDelete(t, provider)
})
t.Run("Query", func(t *testing.T) {
TestStoreQuery(t, provider)
})
t.Run("Batch", func(t *testing.T) {
TestStoreBatch(t, provider)
})
t.Run("Flush", func(t *testing.T) {
TestStoreFlush(t, provider)
})
t.Run("Close", func(t *testing.T) {
TestStoreClose(t, provider)
})
})
t.Run("Provider: close", func(t *testing.T) { // Run this last since it'll end up destroying the provider.
TestProviderClose(t, provider)
})
}
// TestProviderOpenStoreSetGetConfig tests common Provider OpenStore, SetStoreConfig, and GetStoreConfig functionality.
func TestProviderOpenStoreSetGetConfig(t *testing.T, provider newstorage.Provider) {
config := newstorage.StoreConfiguration{TagNames: []string{"tagName1", "tagName2", "tagName3"}}
testStoreName := randomStoreName()
t.Run("Successfully open store, set store config and then get store config", func(t *testing.T) {
store, err := provider.OpenStore(testStoreName)
require.NoError(t, err)
require.NotNil(t, store)
err = provider.SetStoreConfig(testStoreName, config)
require.NoError(t, err)
retrievedConfig, err := provider.GetStoreConfig(testStoreName)
require.NoError(t, err)
require.True(t, equalTagNamesAnyOrder(config.TagNames, retrievedConfig.TagNames),
"Unexpected tag names")
})
t.Run("Attempt to set config without opening store first", func(t *testing.T) {
err := provider.SetStoreConfig("NonExistentStore", config)
require.True(t, errors.Is(err, newstorage.ErrStoreNotFound), "Got unexpected error or no error")
})
t.Run("Attempt to get config without opening store first", func(t *testing.T) {
config, err := provider.GetStoreConfig("NonExistentStore")
require.True(t, errors.Is(err, newstorage.ErrStoreNotFound), "Got unexpected error or no error")
require.Empty(t, config)
})
}
// TestProviderGetOpenStores tests common Provider GetOpenStores functionality.
// This test assumes that the provider passed in has never had stores created under it before.
func TestProviderGetOpenStores(t *testing.T, provider newstorage.Provider) {
// No stores have been created yet, so the slice should be empty or nil.
openStores := provider.GetOpenStores()
require.Len(t, openStores, 0)
_, err := provider.OpenStore("testStore1")
require.NoError(t, err)
openStores = provider.GetOpenStores()
require.Len(t, openStores, 1)
_, err = provider.OpenStore("testStore2")
require.NoError(t, err)
openStores = provider.GetOpenStores()
require.Len(t, openStores, 2)
// Now we will attempt to open a previously opened store. Since it was opened previously, we expect that the
// number of open stores returned by GetOpenStores() to not change.
_, err = provider.OpenStore("testStore2")
require.NoError(t, err)
openStores = provider.GetOpenStores()
require.Len(t, openStores, 2)
}
// TestProviderClose tests common Provider Close functionality.
func TestProviderClose(t *testing.T, provider newstorage.Provider) {
t.Run("Success", func(t *testing.T) {
err := provider.Close()
require.NoError(t, err)
})
}
// TestPutGet tests common Store Put and Get functionality.
func TestPutGet(t *testing.T, provider newstorage.Provider) { //nolint: funlen // Test file
store1Name := randomStoreName()
store1, errOpen := provider.OpenStore(store1Name)
require.NoError(t, errOpen)
key := "did:example:1"
data := []byte("value1")
t.Run("Put and get value with multiple stores", func(t *testing.T) {
store2Name := randomStoreName()
store2, err := provider.OpenStore(store2Name)
require.NoError(t, err)
// Put in store 1.
err = store1.Put(key, data)
require.NoError(t, err)
// Try getting from store 1 - should be found.
doc, err := store1.Get(key)
require.NoError(t, err)
require.NotEmpty(t, doc)
require.Equal(t, data, doc)
// Try getting from store 2 - should not be found
doc, err = store2.Get(key)
require.True(t, errors.Is(err, newstorage.ErrDataNotFound), "Got unexpected error or no error")
require.Nil(t, doc)
// Put in store 2.
err = store2.Put(key, data)
require.NoError(t, err)
// Now we should be able to get that value from store 2.
doc, err = store2.Get(key)
require.NoError(t, err)
require.NotEmpty(t, doc)
require.Equal(t, data, doc)
// Create store 3 with the same name as store 1.
store3, err := provider.OpenStore(store1Name)
require.NoError(t, err)
require.NotNil(t, store3)
// Since store 3 points to the same underlying database as store 1, the data should be found.
doc, err = store3.Get(key)
require.NoError(t, err)
require.NotEmpty(t, doc)
require.Equal(t, data, doc)
})
t.Run("Get using empty key", func(t *testing.T) {
_, err := store1.Get("")
require.Error(t, err)
})
t.Run("Put with empty key", func(t *testing.T) {
err := store1.Put("", data)
require.Error(t, err)
})
t.Run("Put with vil value", func(t *testing.T) {
err := store1.Put(key, nil)
require.Error(t, err)
})
}
// TestStoreGetTags tests common Store GetTags functionality.
func TestStoreGetTags(t *testing.T, provider newstorage.Provider) {
storeName := randomStoreName()
store, err := provider.OpenStore(storeName)
require.NoError(t, err)
t.Run("Successfully retrieve tags", func(t *testing.T) {
tags := []newstorage.Tag{{Name: "tagName1", Value: "tagValue1"}, {Name: "tagName2", Value: "tagValue2"}}
key := "did:example:1"
err = store.Put(key, []byte("value1"), tags...)
require.NoError(t, err)
receivedTags, err := store.GetTags(key)
require.NoError(t, err)
require.True(t, equalTags(tags, receivedTags), "Got unexpected tags")
})
t.Run("Data not found", func(t *testing.T) {
tags, err := store.GetTags("NonExistentKey")
require.True(t, errors.Is(err, newstorage.ErrDataNotFound), "Got unexpected error or no error")
require.Empty(t, tags)
})
t.Run("Empty key", func(t *testing.T) {
tags, err := store.GetTags("")
require.Error(t, err)
require.Empty(t, tags)
})
}
// TestStoreGetBulk tests common Store GetBulk functionality.
func TestStoreGetBulk(t *testing.T, provider newstorage.Provider) { //nolint: funlen // Test file
t.Run("All values found", func(t *testing.T) {
store, err := provider.OpenStore(randomStoreName())
require.NoError(t, err)
require.NotNil(t, store)
err = store.Put("key1", []byte("value1"),
[]newstorage.Tag{
{Name: "tagName1", Value: "tagValue1"},
{Name: "tagName2", Value: "tagValue2"},
}...)
require.NoError(t, err)
err = store.Put("key2", []byte("value2"),
[]newstorage.Tag{
{Name: "tagName1", Value: "tagValue1"},
{Name: "tagName2", Value: "tagValue2"},
}...)
require.NoError(t, err)
values, err := store.GetBulk("key1", "key2")
require.NoError(t, err)
require.Len(t, values, 2)
require.Equal(t, "value1", string(values[0]))
require.Equal(t, "value2", string(values[1]))
})
t.Run("One value found, one not", func(t *testing.T) {
store, err := provider.OpenStore(randomStoreName())
require.NoError(t, err)
require.NotNil(t, store)
err = store.Put("key1", []byte("value1"),
[]newstorage.Tag{
{Name: "tagName1", Value: "tagValue1"},
{Name: "tagName2", Value: "tagValue2"},
}...)
require.NoError(t, err)
values, err := store.GetBulk("key1", "key2")
require.NoError(t, err)
require.Len(t, values, 2)
require.Equal(t, "value1", string(values[0]))
require.Nil(t, values[1])
})
t.Run("One value found, one not because it was deleted", func(t *testing.T) {
store, err := provider.OpenStore(randomStoreName())
require.NoError(t, err)
require.NotNil(t, store)
err = store.Put("key1", []byte("value1"),
[]newstorage.Tag{
{Name: "tagName1", Value: "tagValue1"},
{Name: "tagName2", Value: "tagValue2"},
}...)
require.NoError(t, err)
err = store.Put("key2", []byte("value2"),
[]newstorage.Tag{
{Name: "tagName1", Value: "tagValue1"},
{Name: "tagName2", Value: "tagValue2"},
}...)
require.NoError(t, err)
err = store.Delete("key2")
require.NoError(t, err)
values, err := store.GetBulk("key1", "key2")
require.NoError(t, err)
require.Len(t, values, 2)
require.Equal(t, "value1", string(values[0]))
require.Nil(t, values[1])
})
t.Run("No values found", func(t *testing.T) {
store, err := provider.OpenStore(randomStoreName())
require.NoError(t, err)
require.NotNil(t, store)
err = store.Put("key1", []byte("value1"),
[]newstorage.Tag{
{Name: "tagName1", Value: "tagValue1"},
{Name: "tagName2", Value: "tagValue2"},
}...)
require.NoError(t, err)
values, err := store.GetBulk("key3", "key4")
require.NoError(t, err)
require.Len(t, values, 2)
require.Nil(t, values[0])
require.Nil(t, values[1])
})
t.Run("Nil keys slice", func(t *testing.T) {
store, err := provider.OpenStore(randomStoreName())
require.NoError(t, err)
require.NotNil(t, store)
values, err := store.GetBulk(nil...)
require.Error(t, err)
require.Nil(t, values)
})
t.Run("Empty keys slice", func(t *testing.T) {
store, err := provider.OpenStore(randomStoreName())
require.NoError(t, err)
require.NotNil(t, store)
values, err := store.GetBulk(make([]string, 0)...)
require.Error(t, err)
require.Nil(t, values)
})
t.Run("Blank key", func(t *testing.T) {
store, err := provider.OpenStore(randomStoreName())
require.NoError(t, err)
require.NotNil(t, store)
values, err := store.GetBulk("key1", "key2", "")
require.Error(t, err)
require.Nil(t, values)
})
}
// TestStoreDelete tests common Store Delete functionality.
func TestStoreDelete(t *testing.T, provider newstorage.Provider) {
const commonKey = "did:example:1234"
data := []byte("value1")
store, err := provider.OpenStore(randomStoreName())
require.NoError(t, err)
// Put in store 1
err = store.Put(commonKey, data)
require.NoError(t, err)
// Try getting from store 1 - should be found.
doc, err := store.Get(commonKey)
require.NoError(t, err)
require.NotEmpty(t, doc)
require.Equal(t, data, doc)
// Delete an existing key - should succeed.
err = store.Delete(commonKey)
require.NoError(t, err)
// Delete a key which never existed. Should not throw any error.
err = store.Delete("k1")
require.NoError(t, err)
// Try to get the value stored under the deleted key - should fail.
doc, err = store.Get(commonKey)
require.True(t, errors.Is(err, newstorage.ErrDataNotFound), "got unexpected error or no error")
require.Empty(t, doc)
// Try Delete with an empty key - should fail.
err = store.Delete("")
require.Error(t, err)
}
// TestStoreQuery tests common Store Query functionality.
func TestStoreQuery(t *testing.T, provider newstorage.Provider) { // nolint: funlen // Test file
t.Run("Tag name only query - 2 values found", func(t *testing.T) {
keysToPut := []string{"key1", "key2", "key3"}
valuesToPut := [][]byte{[]byte("value1"), []byte("value2"), []byte("value3")}
tagsToPut := [][]newstorage.Tag{
{{Name: "tagName1", Value: "tagValue1"}, {Name: "tagName2", Value: "tagValue2"}},
{{Name: "tagName3", Value: "tagValue"}, {Name: "tagName4"}},
{{Name: "tagName3", Value: "tagValue2"}},
}
expectedKeys := []string{keysToPut[1], keysToPut[2]}
expectedValues := [][]byte{valuesToPut[1], valuesToPut[2]}
expectedTags := [][]newstorage.Tag{tagsToPut[1], tagsToPut[2]}
queryExpression := "tagName3"
t.Run("Default page setting", func(t *testing.T) {
storeName := randomStoreName()
store, err := provider.OpenStore(storeName)
require.NoError(t, err)
require.NotNil(t, store)
err = provider.SetStoreConfig(storeName,
newstorage.StoreConfiguration{TagNames: []string{"tagName1", "tagName2", "tagName3", "tagName4"}})
require.NoError(t, err)
putData(t, store, keysToPut, valuesToPut, tagsToPut)
iterator, err := store.Query(queryExpression)
require.NoError(t, err)
verifyExpectedIterator(t, iterator, expectedKeys, expectedValues, expectedTags)
})
t.Run("Page size 2", func(t *testing.T) {
storeName := randomStoreName()
store, err := provider.OpenStore(storeName)
require.NoError(t, err)
require.NotNil(t, store)
err = provider.SetStoreConfig(storeName,
newstorage.StoreConfiguration{TagNames: []string{"tagName1", "tagName2", "tagName3", "tagName4"}})
require.NoError(t, err)
putData(t, store, keysToPut, valuesToPut, tagsToPut)
//nolint:gomnd // Test file
iterator, err := store.Query(queryExpression, newstorage.WithPageSize(2))
require.NoError(t, err)
verifyExpectedIterator(t, iterator, expectedKeys, expectedValues, expectedTags)
})
t.Run("Page size 1", func(t *testing.T) {
storeName := randomStoreName()
store, err := provider.OpenStore(storeName)
require.NoError(t, err)
require.NotNil(t, store)
err = provider.SetStoreConfig(storeName,
newstorage.StoreConfiguration{TagNames: []string{"tagName1", "tagName2", "tagName3", "tagName4"}})
require.NoError(t, err)
putData(t, store, keysToPut, valuesToPut, tagsToPut)
iterator, err := store.Query(queryExpression, newstorage.WithPageSize(1))
require.NoError(t, err)
verifyExpectedIterator(t, iterator, expectedKeys, expectedValues, expectedTags)
})
t.Run("Page size 100", func(t *testing.T) {
storeName := randomStoreName()
store, err := provider.OpenStore(storeName)
require.NoError(t, err)
require.NotNil(t, store)
err = provider.SetStoreConfig(storeName,
newstorage.StoreConfiguration{TagNames: []string{"tagName1", "tagName2", "tagName3", "tagName4"}})
require.NoError(t, err)
putData(t, store, keysToPut, valuesToPut, tagsToPut)
//nolint:gomnd // Test file
iterator, err := store.Query(queryExpression, newstorage.WithPageSize(100))
require.NoError(t, err)
verifyExpectedIterator(t, iterator, expectedKeys, expectedValues, expectedTags)
})
})
t.Run("Tag name only query - 0 values found", func(t *testing.T) {
keysToPut := []string{"key1", "key2", "key3"}
valuesToPut := [][]byte{[]byte("value1"), []byte("value2"), []byte("value3")}
tagsToPut := [][]newstorage.Tag{
{{Name: "tagName1", Value: "tagValue1"}, {Name: "tagName2", Value: "tagValue2"}},
{{Name: "tagName3", Value: "tagValue"}, {Name: "tagName4"}},
{{Name: "tagName3", Value: "tagValue2"}},
}
queryExpression := "nonExistentTagName"
t.Run("Default page setting", func(t *testing.T) {
storeName := randomStoreName()
store, err := provider.OpenStore(storeName)
require.NoError(t, err)
require.NotNil(t, store)
err = provider.SetStoreConfig(storeName,
newstorage.StoreConfiguration{TagNames: []string{"tagName1", "tagName2", "tagName3", "tagName4"}})
require.NoError(t, err)
putData(t, store, keysToPut, valuesToPut, tagsToPut)
iterator, err := store.Query(" ")
require.NoError(t, err)
verifyExpectedIterator(t, iterator, nil, nil, nil)
})
t.Run("Page size 2", func(t *testing.T) {
storeName := randomStoreName()
store, err := provider.OpenStore(storeName)
require.NoError(t, err)
require.NotNil(t, store)
err = provider.SetStoreConfig(storeName,
newstorage.StoreConfiguration{TagNames: []string{"tagName1", "tagName2", "tagName3", "tagName4"}})
require.NoError(t, err)
putData(t, store, keysToPut, valuesToPut, tagsToPut)
//nolint:gomnd // Test file
iterator, err := store.Query(queryExpression, newstorage.WithPageSize(2))
require.NoError(t, err)
verifyExpectedIterator(t, iterator, nil, nil, nil)
})
t.Run("Page size 1", func(t *testing.T) {
storeName := randomStoreName()
store, err := provider.OpenStore(storeName)
require.NoError(t, err)
require.NotNil(t, store)
err = provider.SetStoreConfig(storeName,
newstorage.StoreConfiguration{TagNames: []string{"tagName1", "tagName2", "tagName3", "tagName4"}})
require.NoError(t, err)
putData(t, store, keysToPut, valuesToPut, tagsToPut)
iterator, err := store.Query(queryExpression, newstorage.WithPageSize(1))
require.NoError(t, err)
verifyExpectedIterator(t, iterator, nil, nil, nil)
})
t.Run("Page size 100", func(t *testing.T) {
storeName := randomStoreName()
store, err := provider.OpenStore(storeName)
require.NoError(t, err)
require.NotNil(t, store)
err = provider.SetStoreConfig(storeName,
newstorage.StoreConfiguration{TagNames: []string{"tagName1", "tagName2", "tagName3", "tagName4"}})
require.NoError(t, err)
putData(t, store, keysToPut, valuesToPut, tagsToPut)
//nolint:gomnd // Test file
iterator, err := store.Query(queryExpression, newstorage.WithPageSize(100))
require.NoError(t, err)
verifyExpectedIterator(t, iterator, nil, nil, nil)
})
})
t.Run("Tag name and value query - 2 values found", func(t *testing.T) {
keysToPut := []string{"key1", "key2", "key3", "key4"}
valuesToPut := [][]byte{[]byte("value1"), []byte("value2"), []byte("value3"), []byte("value4")}
tagsToPut := [][]newstorage.Tag{
{{Name: "tagName1", Value: "tagValue1"}, {Name: "tagName2", Value: "tagValue2"}},
{{Name: "tagName3", Value: "tagValue1"}, {Name: "tagName4"}},
{{Name: "tagName3", Value: "tagValue2"}},
{{Name: "tagName3", Value: "tagValue1"}},
}
expectedKeys := []string{keysToPut[1], keysToPut[3]}
expectedValues := [][]byte{valuesToPut[1], valuesToPut[3]}
expectedTags := [][]newstorage.Tag{tagsToPut[1], tagsToPut[3]}
queryExpression := "tagName3:tagValue1"
t.Run("Default page setting", func(t *testing.T) {
storeName := randomStoreName()
store, err := provider.OpenStore(storeName)
require.NoError(t, err)
require.NotNil(t, store)
err = provider.SetStoreConfig(storeName,
newstorage.StoreConfiguration{TagNames: []string{"tagName1", "tagName2", "tagName3", "tagName4"}})
require.NoError(t, err)
putData(t, store, keysToPut, valuesToPut, tagsToPut)
iterator, err := store.Query(queryExpression)
require.NoError(t, err)
verifyExpectedIterator(t, iterator, expectedKeys, expectedValues, expectedTags)
})
t.Run("Page size 2", func(t *testing.T) {
storeName := randomStoreName()
store, err := provider.OpenStore(storeName)
require.NoError(t, err)
require.NotNil(t, store)
err = provider.SetStoreConfig(storeName,
newstorage.StoreConfiguration{TagNames: []string{"tagName1", "tagName2", "tagName3", "tagName4"}})
require.NoError(t, err)
putData(t, store, keysToPut, valuesToPut, tagsToPut)
//nolint:gomnd // Test file
iterator, err := store.Query(queryExpression, newstorage.WithPageSize(2))
require.NoError(t, err)
verifyExpectedIterator(t, iterator, expectedKeys, expectedValues, expectedTags)
})
t.Run("Page size 1", func(t *testing.T) {
storeName := randomStoreName()
store, err := provider.OpenStore(storeName)
require.NoError(t, err)
require.NotNil(t, store)
err = provider.SetStoreConfig(storeName,
newstorage.StoreConfiguration{TagNames: []string{"tagName1", "tagName2", "tagName3", "tagName4"}})
require.NoError(t, err)
putData(t, store, keysToPut, valuesToPut, tagsToPut)
iterator, err := store.Query(queryExpression, newstorage.WithPageSize(1))
require.NoError(t, err)
verifyExpectedIterator(t, iterator, expectedKeys, expectedValues, expectedTags)
})
t.Run("Page size 100", func(t *testing.T) {
storeName := randomStoreName()
store, err := provider.OpenStore(storeName)
require.NoError(t, err)
require.NotNil(t, store)
err = provider.SetStoreConfig(storeName,
newstorage.StoreConfiguration{TagNames: []string{"tagName1", "tagName2", "tagName3", "tagName4"}})
require.NoError(t, err)
putData(t, store, keysToPut, valuesToPut, tagsToPut)
//nolint:gomnd // Test file
iterator, err := store.Query(queryExpression, newstorage.WithPageSize(100))
require.NoError(t, err)
verifyExpectedIterator(t, iterator, expectedKeys, expectedValues, expectedTags)
})
})
t.Run("Invalid expression formats", func(t *testing.T) {
storeName := randomStoreName()
store, err := provider.OpenStore(storeName)
require.NoError(t, err)
require.NotNil(t, store)
t.Run("Empty expression", func(t *testing.T) {
iterator, err := store.Query("")
require.Error(t, err)
require.Empty(t, iterator)
})
t.Run("Too many colon-separated parts", func(t *testing.T) {
iterator, err := store.Query("name:value:somethingElse")
require.Error(t, err)
require.Empty(t, iterator)
})
})
}
// TestStoreBatch tests common Store Batch functionality.
func TestStoreBatch(t *testing.T, provider newstorage.Provider) { // nolint:funlen // Test file
t.Run("Success: put three new values", func(t *testing.T) {
store, err := provider.OpenStore(randomStoreName())
require.NoError(t, err)
require.NotNil(t, store)
operations := []newstorage.Operation{
{Key: "key1", Value: []byte("value1"), Tags: []newstorage.Tag{{Name: "tagName1"}}},
{Key: "key2", Value: []byte("value2"), Tags: []newstorage.Tag{{Name: "tagName2"}}},
{Key: "key3", Value: []byte("value3"), Tags: []newstorage.Tag{{Name: "tagName3"}}},
}
err = store.Batch(operations)
require.NoError(t, err)
// Check and make sure all values and tags were stored
value, err := store.Get("key1")
require.NoError(t, err)
require.Equal(t, "value1", string(value))
value, err = store.Get("key2")
require.NoError(t, err)
require.Equal(t, "value2", string(value))
value, err = store.Get("key3")
require.NoError(t, err)
require.Equal(t, "value3", string(value))
tags, err := store.GetTags("key1")
require.NoError(t, err)
require.Len(t, tags, 1)
require.Equal(t, "tagName1", tags[0].Name)
tags, err = store.GetTags("key2")
require.NoError(t, err)
require.Len(t, tags, 1)
require.Equal(t, "tagName2", tags[0].Name)
tags, err = store.GetTags("key3")
require.NoError(t, err)
require.Len(t, tags, 1)
require.Equal(t, "tagName3", tags[0].Name)
})
t.Run("Success: update three different previously-stored values via Batch", func(t *testing.T) {
store, err := provider.OpenStore(randomStoreName())
require.NoError(t, err)
require.NotNil(t, store)
err = store.Put("key1", []byte("value1"), []newstorage.Tag{{Name: "tagName1", Value: "tagValue1"}}...)
require.NoError(t, err)
err = store.Put("key2", []byte("value2"), []newstorage.Tag{{Name: "tagName2", Value: "tagValue2"}}...)
require.NoError(t, err)
err = store.Put("key3", []byte("value3"), []newstorage.Tag{{Name: "tagName3", Value: "tagValue3"}}...)
require.NoError(t, err)
operations := []newstorage.Operation{
{Key: "key1", Value: []byte("value1_new"), Tags: []newstorage.Tag{{Name: "tagName1"}}},
{Key: "key2", Value: []byte("value2_new"), Tags: []newstorage.Tag{{Name: "tagName2_new", Value: "tagValue2"}}},
{Key: "key3", Value: []byte("value3_new"), Tags: []newstorage.Tag{{Name: "tagName3_new", Value: "tagValue3_new"}}},
}
err = store.Batch(operations)
require.NoError(t, err)
// Check and make sure all values and tags were stored
value, err := store.Get("key1")
require.NoError(t, err)
require.Equal(t, "value1_new", string(value))
value, err = store.Get("key2")
require.NoError(t, err)
require.Equal(t, "value2_new", string(value))
value, err = store.Get("key3")
require.NoError(t, err)
require.Equal(t, "value3_new", string(value))
tags, err := store.GetTags("key1")
require.NoError(t, err)
require.Len(t, tags, 1)
require.Equal(t, "tagName1", tags[0].Name)
require.Equal(t, "", tags[0].Value)
tags, err = store.GetTags("key2")
require.NoError(t, err)
require.Len(t, tags, 1)
require.Equal(t, "tagName2_new", tags[0].Name)
require.Equal(t, "tagValue2", tags[0].Value)
tags, err = store.GetTags("key3")
require.NoError(t, err)
require.Len(t, tags, 1)
require.Equal(t, "tagName3_new", tags[0].Name)
require.Equal(t, "tagValue3_new", tags[0].Value)
})
t.Run("Success: Delete three different previously-stored values via Batch", func(t *testing.T) {
store, err := provider.OpenStore(randomStoreName())
require.NoError(t, err)
require.NotNil(t, store)
err = store.Put("key1", []byte("value1"), []newstorage.Tag{{Name: "tagName1", Value: "tagValue1"}}...)
require.NoError(t, err)
err = store.Put("key2", []byte("value2"), []newstorage.Tag{{Name: "tagName2", Value: "tagValue2"}}...)
require.NoError(t, err)
err = store.Put("key3", []byte("value3"), []newstorage.Tag{{Name: "tagName3", Value: "tagValue3"}}...)
require.NoError(t, err)
operations := []newstorage.Operation{
{Key: "key1", Value: nil, Tags: nil},
{Key: "key2", Value: nil, Tags: nil},
{Key: "key3", Value: nil, Tags: nil},
}
err = store.Batch(operations)
require.NoError(t, err)
// Check and make sure values can't be found now
value, err := store.Get("key1")
require.True(t, errors.Is(err, newstorage.ErrDataNotFound), "got unexpected error or no error")
require.Nil(t, value)
value, err = store.Get("key2")
require.True(t, errors.Is(err, newstorage.ErrDataNotFound), "got unexpected error or no error")
require.Nil(t, value)
value, err = store.Get("key3")
require.True(t, errors.Is(err, newstorage.ErrDataNotFound), "got unexpected error or no error")
require.Nil(t, value)
tags, err := store.GetTags("key1")
require.True(t, errors.Is(err, newstorage.ErrDataNotFound), "got unexpected error or no error")
require.Empty(t, tags)
tags, err = store.GetTags("key2")
require.True(t, errors.Is(err, newstorage.ErrDataNotFound), "got unexpected error or no error")
require.Empty(t, tags)
tags, err = store.GetTags("key3")
require.True(t, errors.Is(err, newstorage.ErrDataNotFound), "got unexpected error or no error")
require.Empty(t, tags)
})
t.Run("Success: Put value and then delete it in the same Batch call", func(t *testing.T) {
store, err := provider.OpenStore(randomStoreName())
require.NoError(t, err)
require.NotNil(t, store)
operations := []newstorage.Operation{
{Key: "key1", Value: []byte("value1"), Tags: []newstorage.Tag{{Name: "tagName1", Value: "tagValue1"}}},
{Key: "key1", Value: nil, Tags: nil},
}
err = store.Batch(operations)
require.NoError(t, err)
// Check and make sure that the delete effectively "overrode" the put in the Batch call.
value, err := store.Get("key1")
require.True(t, errors.Is(err, newstorage.ErrDataNotFound), "got unexpected error or no error")
require.Nil(t, value)
tags, err := store.GetTags("key1")
require.True(t, errors.Is(err, newstorage.ErrDataNotFound), "got unexpected error or no error")
require.Empty(t, tags)
})
t.Run("Success: Put value and update it in the same Batch call", func(t *testing.T) {
store, err := provider.OpenStore(randomStoreName())
require.NoError(t, err)
require.NotNil(t, store)
operations := []newstorage.Operation{
{Key: "key1", Value: []byte("value1"), Tags: []newstorage.Tag{{Name: "tagName1", Value: "tagValue1"}}},
{Key: "key1", Value: []byte("value2"), Tags: []newstorage.Tag{{Name: "tagName2", Value: "tagValue2"}}},
}
err = store.Batch(operations)
require.NoError(t, err)
// Check and make sure that the second put effectively "overrode" the first put in the Batch call.
value, err := store.Get("key1")
require.NoError(t, err)
require.Equal(t, "value2", string(value))
tags, err := store.GetTags("key1")
require.NoError(t, err)
require.Len(t, tags, 1)
require.Equal(t, "tagName2", tags[0].Name)
require.Equal(t, "tagValue2", tags[0].Value)
})
t.Run("Failure: Operation has an empty key", func(t *testing.T) {
store, err := provider.OpenStore(randomStoreName())
require.NoError(t, err)
require.NotNil(t, store)
operations := []newstorage.Operation{
{Key: "key1", Value: []byte("value1"), Tags: []newstorage.Tag{{Name: "tagName1", Value: "tagValue1"}}},
{Key: "", Value: []byte("value2"), Tags: []newstorage.Tag{{Name: "tagName2", Value: "tagValue2"}}},
}
err = store.Batch(operations)
require.Error(t, err)
})
}
// TestStoreFlush tests common Store Flush functionality.
func TestStoreFlush(t *testing.T, provider newstorage.Provider) {
t.Run("Success", func(t *testing.T) {
store, err := provider.OpenStore(randomStoreName())
require.NoError(t, err)
require.NotNil(t, store)
err = store.Put("key1", []byte("value1"))
require.NoError(t, err)
err = store.Put("key2", []byte("value2"))
require.NoError(t, err)
err = store.Flush()
require.NoError(t, err)
values, err := store.GetBulk("key1", "key2")
require.NoError(t, err)
require.Len(t, values, 2)
require.Equal(t, "value1", string(values[0]))
require.Equal(t, "value2", string(values[1]))
})
}
// TestStoreClose tests common Store Close functionality.
func TestStoreClose(t *testing.T, provider newstorage.Provider) {
t.Run("Successfully close store", func(t *testing.T) {
store, err := provider.OpenStore(randomStoreName())
require.NoError(t, err)
require.NotNil(t, store)
err = store.Close()
require.NoError(t, err)
})
}
func randomStoreName() string {
return "store-" + uuid.New().String()
}
func putData(t *testing.T, store newstorage.Store, keys []string, values [][]byte, tags [][]newstorage.Tag) {
for i := 0; i < len(keys); i++ {
err := store.Put(keys[i], values[i], tags[i]...)
require.NoError(t, err)
}
}
func verifyExpectedIterator(t *testing.T, // nolint:gocyclo,funlen // Test file
actualResultsItr newstorage.Iterator,
expectedKeys []string, expectedValues [][]byte, expectedTags [][]newstorage.Tag) {
if len(expectedValues) != len(expectedKeys) || len(expectedTags) != len(expectedKeys) {
require.FailNow(t,
"Invalid test case. Expected keys, values and tags slices must be the same length.")
}
var dataChecklist struct {
keys []string
values [][]byte
tags [][]newstorage.Tag
received []bool
}
dataChecklist.keys = expectedKeys
dataChecklist.values = expectedValues
dataChecklist.tags = expectedTags
dataChecklist.received = make([]bool, len(expectedKeys))
moreResultsToCheck, err := actualResultsItr.Next()
require.NoError(t, err)
for moreResultsToCheck {
dataReceivedCount := 0
for _, received := range dataChecklist.received {
if received {
dataReceivedCount++
}
}
if dataReceivedCount == len(dataChecklist.received) {
require.FailNow(t, "query returned more results than expected")
}
var itrErr error
receivedKey, itrErr := actualResultsItr.Key()
require.NoError(t, itrErr)
receivedValue, itrErr := actualResultsItr.Value()
require.NoError(t, itrErr)
receivedTags, itrErr := actualResultsItr.Tags()
require.NoError(t, itrErr)
for i := 0; i < len(dataChecklist.keys); i++ {
if receivedKey == dataChecklist.keys[i] {
if string(receivedValue) == string(dataChecklist.values[i]) {
if equalTags(receivedTags, dataChecklist.tags[i]) {
dataChecklist.received[i] = true
break
}
}
}
}
moreResultsToCheck, err = actualResultsItr.Next()
require.NoError(t, err)
}
err = actualResultsItr.Close()
require.NoError(t, err)
for _, received := range dataChecklist.received {
if !received {
require.FailNow(t, "received unexpected query results")
}
}
}
func equalTags(tags1, tags2 []newstorage.Tag) bool { //nolint:gocyclo // Test file
if len(tags1) != len(tags2) {
return false
}
matchedTags1 := make([]bool, len(tags1))
matchedTags2 := make([]bool, len(tags2))
for i, tag1 := range tags1 {
for j, tag2 := range tags2 {
if matchedTags2[j] {
continue // This tag has already found a match. Tags can only have one match!
}
if tag1.Name == tag2.Name && tag1.Value == tag2.Value {
matchedTags1[i] = true
matchedTags2[j] = true
break
}
}
if !matchedTags1[i] {
return false
}
}
for _, matchedTag := range matchedTags1 {
if !matchedTag {
return false
}
}
for _, matchedTag := range matchedTags2 {
if !matchedTag {
return false
}
}
return true
}
func equalTagNamesAnyOrder(tagNames1, tagNames2 []string) bool { //nolint: gocyclo // Test file
if len(tagNames1) != len(tagNames2) {
return false
}
areTagNamesMatchedFromSlice1 := make([]bool, len(tagNames1))
areTagNamesMatchedFromSlice2 := make([]bool, len(tagNames2))
for i, tagName1 := range tagNames1 {
for j, tagName2 := range tagNames2 {
if areTagNamesMatchedFromSlice2[j] {
continue // This tag name has already found a match. Tag names can only have one match!
}
if tagName1 == tagName2 {
areTagNamesMatchedFromSlice1[i] = true
areTagNamesMatchedFromSlice2[j] = true
break
}
}
if !areTagNamesMatchedFromSlice1[i] {
return false
}
}
for _, isTagNameMatch := range areTagNamesMatchedFromSlice1 {
if !isTagNameMatch {
return false
}
}
for _, isTagNameMatch := range areTagNamesMatchedFromSlice2 {
if !isTagNameMatch {
return false
}
}
return true
} | test/newstorage/newstore.go | 0.652574 | 0.571587 | newstore.go | starcoder |
package btree
import "github.com/jmgilman/kv"
// node represents a node in a Tree
type node struct {
pair kv.KVPair
left *node
right *node
}
// get searches for the given key in the tree node and returns its associated
// KVPair or ErrorNoSuchKey if the key was not found.
func (n *node) get(key string) (*kv.KVPair, error) {
if n == nil {
return &kv.KVPair{}, kv.ErrorNoSuchKey
}
if key == n.pair.Key {
if n.pair.Tombstone {
return &kv.KVPair{}, kv.ErrorNoSuchKey
} else {
return &n.pair, nil
}
}
if key < n.pair.Key {
return n.left.get(key)
} else {
return n.right.get(key)
}
}
// getClosestLeft attempts to find the closest left-side node of the given key.
// It is assumed that the key being passed falls within the range of the tree
// and is not the lowest key.
func (n *node) getClosestLeft(key string) *node {
if key <= n.pair.Key {
if n.left == nil {
return n
} else {
return n.left.getClosestLeft(key)
}
} else {
if n.right == nil {
return n
} else if key <= n.right.pair.Key && n.right.left == nil {
return n
} else {
return n.right.getClosestLeft(key)
}
}
}
// getClosestRight attempts to find the closest right-side node of the given
// key. It is assumed that the key being passed falls within the range of the
// tree and is not the highest key.
func (n *node) getClosestRight(key string) *node {
if key < n.pair.Key {
if n.left == nil {
return n
} else if key >= n.left.pair.Key && n.left.right == nil {
return n
} else {
return n.left.getClosestRight(key)
}
} else {
if n.right == nil {
return n
} else {
return n.right.getClosestRight(key)
}
}
}
// pairs returns the contents of the tree node as an ordered slice of
// KVPair's.
func (n *node) pairs() []*kv.KVPair {
var pairs []*kv.KVPair
if n == nil {
return pairs
}
left := n.left.pairs()
right := n.right.pairs()
pairs = append(pairs, left...)
pairs = append(pairs, &n.pair)
pairs = append(pairs, right...)
return pairs
}
// put adds a new KVPair into the tree node or updates the value of the
// associated KVPair if the key already exists.
func (n *node) put(kv kv.KVPair) bool {
if kv.Key < n.pair.Key {
if n.left == nil {
n.left = &node{kv, nil, nil}
return true
} else {
return n.left.put(kv)
}
} else if kv.Key > n.pair.Key {
if n.right == nil {
n.right = &node{kv, nil, nil}
return true
} else {
return n.right.put(kv)
}
} else {
n.pair = kv
return false
}
}
// newNode returns a new node created from a slice of ordered KVPair's.
func newNode(pairs []kv.KVPair) *node {
size := len(pairs)
if size == 0 {
return nil
}
node := &node{
pair: pairs[size/2],
left: newNode(pairs[0 : size/2]),
}
if i := size/2 + 1; i < size {
node.right = newNode(pairs[i:size])
}
return node
} | btree/node.go | 0.833223 | 0.462352 | node.go | starcoder |
package iso20022
// Information needed to process a currency exchange or conversion.
type ForeignExchangeTerms4 struct {
// Currency and amount bought in a foreign exchange trade. The buy amount is received by the buyer.
BuyAmount *ActiveCurrencyAnd13DecimalAmount `xml:"BuyAmt,omitempty"`
// Currency and amount sold in a foreign exchange trade. The sold amount is delivered by the buyer.
SellAmount *ActiveCurrencyAndAmount `xml:"SellAmt,omitempty"`
// Currency in which the rate of exchange is expressed in a currency exchange. In the example 1GBP = xxxCUR, the unit currency is GBP.
UnitCurrency *CurrencyCode `xml:"UnitCcy"`
// Currency into which the base currency is converted, in a currency exchange.
QuotedCurrency *CurrencyCode `xml:"QtdCcy"`
// The value of one currency expressed in relation to another currency. ExchangeRate expresses the ratio between UnitCurrency and QuotedCurrency (ExchangeRate = UnitCurrency/QuotedCurrency).
ExchangeRate *BaseOneRate `xml:"XchgRate"`
// Date and time at which an exchange rate is quoted.
QuotationDate *ISODateTime `xml:"QtnDt,omitempty"`
// Party that proposes a foreign exchange rate.
QuotingInstitution *PartyIdentification2Choice `xml:"QtgInstn,omitempty"`
}
func (f *ForeignExchangeTerms4) SetBuyAmount(value, currency string) {
f.BuyAmount = NewActiveCurrencyAnd13DecimalAmount(value, currency)
}
func (f *ForeignExchangeTerms4) SetSellAmount(value, currency string) {
f.SellAmount = NewActiveCurrencyAndAmount(value, currency)
}
func (f *ForeignExchangeTerms4) SetUnitCurrency(value string) {
f.UnitCurrency = (*CurrencyCode)(&value)
}
func (f *ForeignExchangeTerms4) SetQuotedCurrency(value string) {
f.QuotedCurrency = (*CurrencyCode)(&value)
}
func (f *ForeignExchangeTerms4) SetExchangeRate(value string) {
f.ExchangeRate = (*BaseOneRate)(&value)
}
func (f *ForeignExchangeTerms4) SetQuotationDate(value string) {
f.QuotationDate = (*ISODateTime)(&value)
}
func (f *ForeignExchangeTerms4) AddQuotingInstitution() *PartyIdentification2Choice {
f.QuotingInstitution = new(PartyIdentification2Choice)
return f.QuotingInstitution
} | ForeignExchangeTerms4.go | 0.821689 | 0.52683 | ForeignExchangeTerms4.go | starcoder |
Two main functions are provided:
- GetLatestBy to get the latest event for each group. A group is defined by one or more columns.
- GroupBy to group events by one or more columns and perform an aggregation for each group, like count(), sum() or max().
It is possible to use a single aggregation when calling GroupBy or to use multiple ones thanks to aggregation.Set.
Please see the examples to see how to use the aggregators.
*/
package aggregation
import (
"strconv"
"github.com/sendinblue/bigtable-access-layer/data"
)
// GetLatestBy groups lines by the given columns keeping the most recent one, thus without performing any aggregation.
func GetLatestBy(events []*data.Event, columns ...string) map[string]*data.Event {
result := make(map[string]*data.Event)
for _, event := range events {
key := ""
for _, column := range columns {
if d, ok := event.Cells[column]; ok {
key += d
}
}
if _, ok := result[key]; !ok || event.Date.After(result[key].Date) {
result[key] = event
}
}
return result
}
// GroupBy groups lines by the given columns, performing the given aggregation function.
func GroupBy(events []*data.Event, agg func(line *data.Event, lines []*data.Event) *data.Event, columns ...string) map[string]*data.Event {
result := make(map[string]*data.Event)
group := make(map[string][]*data.Event)
for _, event := range events {
key := ""
for _, column := range columns {
if d, ok := event.Cells[column]; ok {
key += d
}
}
if _, ok := group[key]; !ok {
group[key] = make([]*data.Event, 0)
}
result[key] = agg(event, group[key])
group[key] = append(group[key], event)
}
return result
}
// aggregate is the core struct that contains the properties of the aggregation.
type aggregate struct {
column string
projection string
}
// Count returns the number of lines in the given group.
type Count struct {
projection string
}
func NewCount(column string) *Count {
return &Count{
projection: column,
}
}
func (c *Count) Compute(e *data.Event, events []*data.Event) *data.Event {
e.Cells[c.projection] = strconv.Itoa(len(events) + 1)
return e
}
// Max returns the maximum value of the given column in the given group.
type Max struct {
aggregate
}
func NewMax(column string, projection string) *Max {
return &Max{
aggregate: aggregate{
column: column,
projection: projection,
},
}
}
func (m *Max) Compute(e *data.Event, events []*data.Event) *data.Event {
e.Cells[m.projection] = selectOne(e, events, m.column, func(c, s float64) bool {
return c > s
})
return e
}
// Min returns the minimum value of the given column in the given group.
type Min struct {
aggregate
}
func NewMin(column string, projection string) *Min {
return &Min{
aggregate: aggregate{
column: column,
projection: projection,
},
}
}
func (m *Min) Compute(e *data.Event, events []*data.Event) *data.Event {
e.Cells[m.projection] = selectOne(e, events, m.column, func(c, s float64) bool {
return s == 0 || c < s
})
return e
}
func selectOne(e *data.Event, events []*data.Event, col string, f func(c, s float64) bool) string {
var selected float64
events = append(events, e)
for _, line := range events {
if d, ok := line.Cells[col]; ok {
if v, err := strconv.ParseFloat(d, 64); err == nil {
if f(v, selected) {
selected = v
}
}
}
}
return strconv.FormatFloat(selected, 'f', -1, 64)
}
// Average returns the average value of the given column in the given group.
type Average struct {
aggregate
}
func NewAverage(column string, projection string) *Average {
return &Average{
aggregate: aggregate{
column: column,
projection: projection,
},
}
}
func (m *Average) Compute(e *data.Event, events []*data.Event) *data.Event {
total := sum(m.column, e, events)
e.Cells[m.projection] = strconv.FormatFloat(total/float64(len(events)+1), 'f', -1, 64)
return e
}
// Sum returns the sum of the given column in the given group.
type Sum struct {
aggregate
}
func NewSum(column string, projection string) *Sum {
return &Sum{
aggregate: aggregate{
column: column,
projection: projection,
},
}
}
func (m *Sum) Compute(e *data.Event, events []*data.Event) *data.Event {
total := sum(m.column, e, events)
e.Cells[m.projection] = strconv.FormatFloat(total, 'f', -1, 64)
return e
}
func sum(column string, e *data.Event, events []*data.Event) float64 {
total := 0.0
events = append(events, e)
for _, line := range events {
if d, ok := line.Cells[column]; ok {
if v, err := strconv.ParseFloat(d, 64); err == nil {
total += v
}
}
}
return total
}
// Set is a set of aggregations. It is designed to apply several aggregations to the same line.
type Set struct {
aggs []func(e *data.Event, events []*data.Event) *data.Event
}
func NewAggregationSet() *Set {
return &Set{}
}
func (s *Set) Add(agg func(e *data.Event, events []*data.Event) *data.Event) {
s.aggs = append(s.aggs, agg)
}
func (s *Set) Compute(e *data.Event, events []*data.Event) *data.Event {
for _, agg := range s.aggs {
e = agg(e, events)
}
return e
} | aggregation/aggregate.go | 0.816004 | 0.559651 | aggregate.go | starcoder |
package phonetics
import (
"math/rand"
)
func RandomSound() Sound {
sound := Sound{}
sound.randomiseSound()
for !sound.IsValid() {
sound.randomiseSound()
}
sound.Standardise()
return sound
}
func (sound *Sound) randomiseSound() {
sound.Point = ArticulationPoint(rand.Intn(int(ArticulationPointCount)))
sound.Manner = ArticulationManner(rand.Intn(int(ArticulationMannerCount)))
sound.Shape = TongueShape(rand.Intn(int(TongueShapeCount)))
sound.Voice = Voice(rand.Intn(int(VoiceCount)))
sound.Rounded = rand.Intn(2) == 0
sound.Nasal = rand.Intn(2) == 0
}
type Sound struct {
Point ArticulationPoint
Manner ArticulationManner
Shape TongueShape
Rounded bool
Nasal bool
Voice Voice
}
type ArticulationPoint int
const (
LabialLabial ArticulationPoint = iota
LabialDental
CoronalLabial
CoronalDental
CoronalAlveolar
CoronalPostAlveolar
CoronalRetroflex
DorsalPostAlveolar
DorsalPalatal
DorsalPalVel
DorsalVelar
DorsalVelUlu
DorsalUvular
RadicalPharyngeal
RadicalEpiglottal
Glottal
ArticulationPointCount
)
type ArticulationManner int
const (
Closed ArticulationManner = iota
Stop
Flap
Trill
Fricative
Approximant
Close
NearClose
CloseMid
Mid
OpenMid
NearOpen
Open
ArticulationMannerCount
)
type TongueShape int
const (
Central TongueShape = iota
Lateral
Sibilant
TongueShapeCount
)
type Voice int
const (
Aspirated Voice = iota
Voiceless
Breathy
Modal
Creaky
VoiceCount
)
func (sound *Sound) IsValid() bool {
if sound.Rounded {
switch sound.Point {
case LabialLabial, LabialDental, CoronalLabial:
return false
}
}
if sound.Voice == Voiceless {
//this isn't really invalid, just very unusual
switch sound.Manner {
case Close, NearClose, CloseMid, Mid, OpenMid, NearOpen, Open:
return false
}
}
if sound.Voice == Aspirated {
switch sound.Manner {
case Stop:
break
default:
return false
}
}
if sound.Manner == Closed && !sound.Nasal {
return false
}
switch sound.Point {
case LabialLabial:
switch sound.Manner {
case Closed, Stop, Flap, Trill, Fricative, Approximant:
return true
default:
return false
}
case LabialDental:
switch sound.Manner {
case Closed, Stop, Flap, Trill, Fricative, Approximant:
return true
default:
return false
}
case CoronalLabial:
switch sound.Manner {
case Closed, Stop, Flap, Trill, Fricative, Approximant:
return true
default:
return false
}
case CoronalDental:
switch sound.Manner {
case Closed, Stop, Flap, Trill, Fricative, Approximant:
return true
default:
return false
}
case CoronalAlveolar:
switch sound.Manner {
case Closed, Stop, Flap, Trill, Fricative, Approximant:
return true
default:
return false
}
case CoronalPostAlveolar:
switch sound.Manner {
case Closed, Stop, Flap, Trill, Fricative, Approximant:
return true
default:
return false
}
case CoronalRetroflex:
switch sound.Manner {
case Closed, Stop, Flap, Trill, Fricative, Approximant:
return true
default:
return false
}
case DorsalPostAlveolar:
switch sound.Manner {
case Closed, Stop, Fricative, Approximant:
return true
default:
return false
}
case DorsalPalatal:
switch sound.Manner {
case Closed, Stop, Fricative, Approximant, Close, NearClose, CloseMid, Mid, OpenMid, NearOpen, Open:
return true
default:
return false
}
case DorsalPalVel:
switch sound.Manner {
case Closed, Stop, Fricative, Approximant, Close, NearClose, CloseMid, Mid, OpenMid, NearOpen, Open:
return true
default:
return false
}
case DorsalVelar:
switch sound.Manner {
case Closed, Stop, Fricative, Approximant, Close, NearClose, CloseMid, Mid, OpenMid, NearOpen, Open:
return true
default:
return false
}
case DorsalVelUlu:
switch sound.Manner {
case Closed, Stop, Fricative, Approximant, Close, NearClose, CloseMid, Mid, OpenMid, NearOpen, Open:
return true
default:
return false
}
case DorsalUvular:
switch sound.Manner {
case Closed, Stop, Flap, Trill, Fricative, Approximant, Close, NearClose, CloseMid, Mid, OpenMid, NearOpen, Open:
return true
default:
return false
}
case RadicalPharyngeal:
switch sound.Manner {
case Stop, Flap, Trill, Fricative, Approximant:
return true
default:
return false
}
case RadicalEpiglottal:
switch sound.Manner {
case Stop, Flap, Trill, Fricative, Approximant:
return true
default:
return false
}
case Glottal:
switch sound.Manner {
case Stop, Fricative:
return true
default:
return false
}
default:
panic("Weird")
}
}
func (sound *Sound) Standardise() {
if sound.Shape == Sibilant {
switch sound.Manner {
case Closed, Stop, Flap, Trill, Close, NearClose, CloseMid, Mid, OpenMid, NearOpen, Open:
sound.Shape = Central
}
switch sound.Point {
case LabialLabial, LabialDental, CoronalLabial, DorsalPalatal, DorsalPalVel, DorsalVelar, DorsalVelUlu, DorsalUvular, RadicalPharyngeal, RadicalEpiglottal, Glottal:
sound.Shape = Central
}
}
if sound.Shape == Lateral {
switch sound.Manner {
case Closed, Stop, Trill, Close, NearClose, CloseMid, Mid, OpenMid, NearOpen, Open:
sound.Shape = Central
}
switch sound.Point {
case LabialLabial, LabialDental, RadicalPharyngeal, RadicalEpiglottal, Glottal:
sound.Shape = Central
}
}
if sound.Point == DorsalPalVel {
switch sound.Manner {
case Closed, Stop, Flap, Trill, Fricative, Approximant:
sound.Point = DorsalPalatal
}
}
if sound.Point == DorsalVelUlu {
switch sound.Manner {
case Closed, Stop, Flap, Trill, Fricative, Approximant:
sound.Point = DorsalUvular
}
}
}
func Distance(soundA Sound, soundB Sound) int {
distance := 0
mannerDistance := int(soundA.Manner-soundB.Manner) * 2
if soundA.Nasal != soundB.Nasal {
mannerDistance += 6
}
if mannerDistance < 0 {
mannerDistance = -mannerDistance
}
pointDistance := int(soundA.Point - soundB.Point)
if pointDistance < 0 {
pointDistance = -pointDistance
}
voiceDistance := int(soundA.Voice - soundB.Voice)
if voiceDistance < 0 {
voiceDistance = -voiceDistance
}
shapeDistance := int(soundA.Shape - soundB.Shape)
if shapeDistance < 0 {
shapeDistance = -shapeDistance
}
distance = mannerDistance + pointDistance + voiceDistance + shapeDistance
if soundA.Rounded != soundB.Rounded {
distance++
}
return distance
}
func (sound *Sound) Encode() int {
enc := int(sound.Point)
enc += 16 * int(sound.Manner)
enc += 16 * 13 * int(sound.Shape)
enc += 16 * 13 * 3 * int(sound.Voice)
value := 0
if sound.Rounded {
value = 1
}
enc += 16 * 13 * 3 * 5 * value
value = 0
if sound.Nasal {
value = 1
}
enc += 16 * 13 * 3 * 5 * 2 * value
return enc
}
func Decode(enc int) Sound {
sound := new(Sound)
res := enc / (16 * 13 * 3 * 5 * 2)
sound.Nasal = res == 1
enc -= res * (16 * 13 * 3 * 5 * 2)
res = enc / (16 * 13 * 3 * 5)
sound.Rounded = res == 1
enc -= res * (16 * 13 * 3 * 5)
res = enc / (16 * 13 * 3)
sound.Voice = Voice(res)
enc -= res * (16 * 13 * 3)
res = enc / (16 * 13)
sound.Shape = TongueShape(res)
enc -= res * (16 * 13)
res = enc / 16
sound.Manner = ArticulationManner(res)
enc -= res * 16
sound.Point = ArticulationPoint(enc)
return *sound
} | phonetics/Sound.go | 0.505371 | 0.424173 | Sound.go | starcoder |
package advent2021
import (
"fmt"
"log"
"strconv"
"strings"
)
// VentMap is the 2d int array that represents the location of the heat vents
type VentMap [1000][1000]int
// Note: Generally the above should be dynamic and have more error checking on exceeding the limits
// coordinates represents the X,Y coordinates on the VentMap
type coordinates struct {
startRow, startCol, endRow, endCol int
}
// getVentLineCoordinates takes in a line of input and generates the relevant coordinates
func getVentLineCoordinates(ventLine string) coordinates {
v := strings.Split(ventLine, " -> ")
fromCoordinates := strings.Split(v[0], ",")
toCoordinates := strings.Split(v[1], ",")
startCol, err := strconv.Atoi(fromCoordinates[0])
if err != nil {
log.Fatal(err)
}
startRow, err := strconv.Atoi(fromCoordinates[1])
if err != nil {
log.Fatal(err)
}
endRow, err := strconv.Atoi(toCoordinates[1])
if err != nil {
log.Fatal(err)
}
endCol, err := strconv.Atoi(toCoordinates[0])
if err != nil {
log.Fatal(err)
}
return coordinates{
startRow: startRow,
startCol: startCol,
endRow: endRow,
endCol: endCol,
}
}
func (c *coordinates) printCoordinates() {
fmt.Printf("%d %d %d %d\n", c.startRow, c.endRow, c.startCol, c.endCol)
}
// drawVerticalLine takes coordinates of a vertical vent line and draws it on the map
func (vm *VentMap) drawVerticalLine(coords coordinates) {
// Note: this does overwrite settings for coordinates but for now this is OK (need to think and investigate)
if coords.startRow > coords.endRow {
tmpX := coords.startRow
coords.startRow = coords.endRow
coords.endRow = tmpX
}
for row := coords.startRow; row <= coords.endRow; row++ {
vm[row][coords.endCol]++
}
}
// drawHorizontalLine takes coordinates of a horizontal vent line and draws it on the map
func (vm *VentMap) drawHorizontalLine(coords coordinates) {
// Note: this does overwrite settings for coordinates but for now this is OK (need to think and investigate)
if coords.startCol > coords.endCol {
tmpY := coords.startCol
coords.startCol = coords.endCol
coords.endCol = tmpY
}
for col := coords.startCol; col <= coords.endCol; col++ {
vm[coords.startRow][col]++
}
}
// drawDiagonalLine takes coordinates of a diagonal vent line and draws it on the map
func (vm *VentMap) drawDiagonalLine(coords coordinates) {
if (coords.startRow > coords.endRow) && (coords.startCol < coords.endCol) {
// Used if the row is decreasing and the column is increasing in direction
for row, col := coords.startRow, coords.startCol; row >= coords.endRow && col <= coords.endCol; row, col = row-1, col+1 {
vm[row][col]++
}
} else if (coords.startCol > coords.endCol) && (coords.startRow < coords.endRow) {
// Used if the row is increasing and the column is decreasing in direction
for row, col := coords.startRow, coords.startCol; row <= coords.endRow && col >= coords.endCol; row, col = row+1, col-1 {
vm[row][col]++
}
} else {
//Used if the row and column are both increasing or both decreasing in the same direction
// If both coordinates are decreasing the coordinates are flipped to make them increase for easier calculations
if coords.startRow > coords.endRow && coords.startCol > coords.endCol {
tmpRow := coords.startRow
tmpCol := coords.startCol
coords.startRow = coords.endRow
coords.startCol = coords.endCol
coords.endRow = tmpRow
coords.endCol = tmpCol
}
for row, col := coords.startRow, coords.startCol; row <= coords.endRow && col <= coords.endCol; row, col = row+1, col+1 {
vm[row][col]++
}
}
}
// drawLine takes coordinates and draws the required vent line on the vent map
func (vm *VentMap) drawLine(c coordinates, useDiagonal bool) {
if (c.startRow-c.endRow) != 0 && (c.startCol-c.endCol) != 0 {
// Note: This is used to determine if the drawing considering diagonal line drawings or skips them, as per the puzzle
if !useDiagonal {
return
}
vm.drawDiagonalLine(c)
} else if c.startRow != c.endRow {
vm.drawVerticalLine(c)
} else if c.startCol != c.endCol {
vm.drawHorizontalLine(c)
}
return
}
func (vm *VentMap) printMap() {
for row := 0; row < len(vm); row++ {
for col := 0; col < len(vm[0]); col++ {
fmt.Printf(" %d", vm[row][col])
}
fmt.Println()
}
}
// countMoreThanTwoVents will loop through the vent map and reutrn the number of locations with 2 or more vents
func (vm *VentMap) countMoreThanTwoVents() (count int) {
for row := 0; row < len(vm); row++ {
for col := 0; col < len(vm[0]); col++ {
if vm[row][col] >= 2 {
count++
}
}
}
return
}
// Day5Part1 takes the input of vent lines (horizontal and vertical) and returns the count of locations with 2 or more vents
func Day5Part1(ventLines []string) int {
var ventMap VentMap
for _, ventLine := range ventLines {
coords := getVentLineCoordinates(ventLine)
ventMap.drawLine(coords, false)
}
return ventMap.countMoreThanTwoVents()
}
// Day5Part2 takes the input of vent lines (horizontal, vertical, and diagonal) and returns the count of locations with 2 or more vents
func Day5Part2(ventLines []string) int {
var ventMap VentMap
for _, ventLine := range ventLines {
coords := getVentLineCoordinates(ventLine)
ventMap.drawLine(coords, true)
}
return ventMap.countMoreThanTwoVents()
} | internal/pkg/advent2021/day5.go | 0.659186 | 0.541712 | day5.go | starcoder |
package asstime
import (
"fmt"
"math"
"regexp"
"github.com/Alquimista/eyecandy/utils"
)
const (
// FpsNtscFilm Frame per second rate NTSC film standard (23.976)
FpsNtscFilm float64 = float64(24000) / float64(1001)
// FpsNtsc Frame per second rate NTSC standard (30)
FpsNtsc float64 = float64(30000) / float64(1001)
// FpsNtscDouble Frame per second rate NTSC Double standard (60)
FpsNtscDouble float64 = float64(60000) / float64(1001)
// FpsNtscQuad Frame per second rate NTSC Quad standard (120)
FpsNtscQuad float64 = float64(120000) / float64(1001)
// FpsFilm Frame per second rate Film standard
FpsFilm float64 = 24.0
// FpsPal Frame per second rate PAL standard
FpsPal float64 = 25.0
// FpsPalDouble Frame per second rate PAL Double standard (50)
FpsPalDouble float64 = 50.0
// FpsPalQuad Frame per second rate PAL Quad standard (100)
FpsPalQuad int = 100.0
)
const (
// Millisecond Base time multiplier
Millisecond = 1
// Centisecond time multiplier
Centisecond = 10
// Second time multiplier
Second = 1000
// Minute time multiplier
Minute = 60 * Second
// Hour time multiplier
Hour = 60 * Minute
)
// reSSAfmt regex time stamp
var reSSAfmt = regexp.MustCompile(`(\d):(\d+):(\d+).(\d+)`)
// MStoFrames Convert Frames to Milliseconds
func MStoFrames(ms int, framerate float64) int {
return int(math.Ceil(framerate * float64(ms/Second)))
}
// FramesToMS Convert Frames to Milliseconds
func FramesToMS(frames int, framerate float64) int {
return int((float64(frames) / float64(framerate)) * Second)
}
// MStoSSA Convert Milliseconds to SSA timestamp
func MStoSSA(milli int) string {
sec, ms := utils.DivMod(milli, 1000)
min, s := utils.DivMod(sec, 60)
h, m := utils.DivMod(min, 60)
cs, _ := utils.DivMod(ms, 10)
return fmt.Sprintf("%01d:%02d:%02d.%02d", h, m, s, cs)
}
// SSAtoMS Convert SSA timestamp to Milliseconds
func SSAtoMS(t string) int {
h, m, s, cs := ssatoSplit(t)
return (h*Hour + m*Minute + s*Second + cs*Centisecond)
}
// ssatoSplit split components of SSA timestamp
func ssatoSplit(t string) (h, m, s, cs int) {
//H:MM:SS.CC (H=Hour, M=Minute, S=Second, C=centisecond)
tm := reSSAfmt.FindStringSubmatch(t)
return utils.Str2int(tm[1]), utils.Str2int(tm[2]),
utils.Str2int(tm[3]), utils.Str2int(tm[4])
} | asstime/asstime.go | 0.648578 | 0.451689 | asstime.go | starcoder |
package main
import (
"fmt"
"math"
)
// Describe2Der describes 2D shapes
type Describe2Der interface {
area() float64
perim() float64
}
// Describe3Der describes 3D shapes
type Describe3Der interface {
volume() float64
surface() float64
}
// Circle description
type Circle struct {
radius float64
}
// Rectangle description
type Rectangle struct {
width float64
height float64
}
// Triangle description
type Triangle struct {
a float64
b float64
c float64
}
// Cylinder description
type Cylinder struct {
radius float64
height float64
}
// Circle area
func (c Circle) area() float64 {
return math.Pi * math.Pow(c.radius, 2)
}
// Circle circumference (will call it perim for this example)
func (c Circle) perim() float64 {
return 2 * math.Pi * c.radius
}
// Rectangle area
func (r Rectangle) area() float64 {
return r.width * r.height
}
// Rectangle perimeter
func (r Rectangle) perim() float64 {
return (r.width + r.height) * 2
}
// Triangle area
func (t Triangle) area() float64 {
// Heron's Formula to get area from 3 sides
s := ((t.a + t.b + t.c) / 2)
return math.Sqrt(s * (s - t.a) * (s - t.a) * (s - t.a))
}
// Triangle perimeter
func (t Triangle) perim() float64 {
return t.a + t.b + t.c
}
// Cylinder volume
func (c Cylinder) volume() float64 {
return math.Pi * math.Pow(c.radius, 2) * c.height
}
// Cylinder surface area
func (c Cylinder) surface() float64 {
return (2 * math.Pi * c.radius * c.height) + (2 * math.Pi * math.Pow(c.radius, 2))
}
func describe2D(d Describe2Der) {
fmt.Printf("2D - area is %10.3f, circumference is %10.3f\n",
d.area(), d.perim())
}
func describe3D(d Describe3Der) {
fmt.Printf("3D - vol is %10.3f, surface area is %10.3f\n",
d.volume(), d.surface())
}
func main() {
// Declare and assign to struct instance
circle1 := Circle{5}
rectangle1 := Rectangle{5, 3}
triangle1 := Triangle{4, 5, 6}
cylinder1 := Cylinder{5, 3}
// The `circle` and `rectangle` struct types both
// implement the `Describe2Der` interface so we can use
// instances of these structs as arguments to `desscribe2D`.
describe2D(circle1)
describe2D(rectangle1)
describe2D(triangle1)
describe3D(cylinder1)
} | software/development/languages/go-cheat-sheet/src/function-method-interface-package-example/interface/interfaces2.go | 0.878314 | 0.440168 | interfaces2.go | starcoder |
package collect
import (
"errors"
"fmt"
"reflect"
"strconv"
)
func AnyGet[V, K any](item any, key K) (zero V, _ error) {
var result any
ref := reflect.ValueOf(item)
switch ref.Kind() {
case reflect.Map:
if r := ref.MapIndex(reflect.ValueOf(key)); r.IsValid() {
result = r.Interface()
} else {
return zero, errors.New("invalid map index")
}
case reflect.Array, reflect.Slice:
if index, err := strconv.Atoi(fmt.Sprintf("%d", key)); err != nil {
return zero, err
} else {
if index < 0 || index >= ref.Len() {
return zero, errors.New("index overflow")
}
result = ref.Index(index).Interface()
}
case reflect.Struct:
if r := ref.FieldByName(fmt.Sprintf("%s", key)); r.IsValid() {
result = r.Interface()
} else {
return zero, errors.New("invalid struct field")
}
case reflect.Pointer:
return AnyGet[V, K](ref.Elem().Interface(), key)
default:
return zero, errors.New("failed to get")
}
switch result.(type) {
case V:
return result.(V), nil
default:
return zero, errors.New("type mismatch")
}
}
func Pluck[V, K, I comparable](items []I, key K) []V {
var zero V
plucked := make([]V, len(items), cap(items))
for i, item := range items {
if v, err := AnyGet[V](item, key); err == nil {
plucked[i] = v
} else {
plucked[i] = zero
}
}
return plucked
}
func MapPluck[K, V comparable](items []map[K]V, key K) []V {
var zero V
plucked := make([]V, len(items), cap(items))
for i, item := range items {
if v, ok := item[key]; ok {
plucked[i] = v
} else {
plucked[i] = zero
}
}
return plucked
}
func KeyBy[V, K, I comparable](items []I, key K) map[V]I {
result := make(map[V]I)
for _, item := range items {
if v, err := AnyGet[V](item, key); err == nil {
result[v] = item
}
}
return result
}
func MapKeyBy[K, V comparable](items []map[K]V, key K) map[V]map[K]V {
result := make(map[V]map[K]V)
for _, item := range items {
result[item[key]] = item
}
return result
}
func GroupBy[V, K, I comparable](items []I, key K) map[V][]I {
result := make(map[V][]I)
for _, item := range items {
if v, err := AnyGet[V](item, key); err == nil {
result[v] = append(result[v], item)
}
}
return result
}
func MapGroupBy[K, V comparable](items []map[K]V, key K) map[V][]map[K]V {
result := make(map[V][]map[K]V)
for _, item := range items {
result[item[key]] = append(result[item[key]], item)
}
return result
} | helpers.go | 0.52975 | 0.440108 | helpers.go | starcoder |
package bsonkit
import (
"math"
"github.com/shopspring/decimal"
"go.mongodb.org/mongo-driver/bson/primitive"
)
func d128ToDec(d primitive.Decimal128) decimal.Decimal {
big, exp, _ := d.BigInt()
return decimal.NewFromBigInt(big, int32(exp))
}
func decTod128(d decimal.Decimal) primitive.Decimal128 {
dd, _ := primitive.ParseDecimal128FromBigInt(d.Coefficient(), int(d.Exponent()))
return dd
}
// Add will add together two numerical values. It accepts and returns int32,
// int64, float64 and decimal128.
func Add(num, inc interface{}) interface{} {
switch num := num.(type) {
case int32:
switch inc := inc.(type) {
case int32:
return num + inc
case int64:
return int64(num) + inc
case float64:
return float64(num) + inc
case primitive.Decimal128:
return decTod128(decimal.NewFromInt(int64(num)).Add(d128ToDec(inc)))
default:
return Missing
}
case int64:
switch inc := inc.(type) {
case int32:
return num + int64(inc)
case int64:
return num + inc
case float64:
return float64(num) + inc
case primitive.Decimal128:
return decTod128(decimal.NewFromInt(num).Add(d128ToDec(inc)))
default:
return Missing
}
case float64:
switch inc := inc.(type) {
case int32:
return num + float64(inc)
case int64:
return num + float64(inc)
case float64:
return num + inc
case primitive.Decimal128:
return decTod128(decimal.NewFromFloat(num).Add(d128ToDec(inc)))
default:
return Missing
}
case primitive.Decimal128:
switch inc := inc.(type) {
case int32:
return decTod128(d128ToDec(num).Add(decimal.NewFromInt(int64(inc))))
case int64:
return decTod128(d128ToDec(num).Add(decimal.NewFromInt(inc)))
case float64:
return decTod128(d128ToDec(num).Add(decimal.NewFromFloat(inc)))
case primitive.Decimal128:
return decTod128(d128ToDec(num).Add(d128ToDec(inc)))
default:
return Missing
}
default:
return Missing
}
}
// Mul will multiply the two numerical values. It accepts and returns int32,
// int64, float64 and decimal128.
func Mul(num, mul interface{}) interface{} {
switch num := num.(type) {
case int32:
switch mul := mul.(type) {
case int32:
return num * mul
case int64:
return int64(num) * mul
case float64:
return float64(num) * mul
case primitive.Decimal128:
return decTod128(decimal.NewFromInt(int64(num)).Mul(d128ToDec(mul)))
default:
return Missing
}
case int64:
switch mul := mul.(type) {
case int32:
return num * int64(mul)
case int64:
return num * mul
case float64:
return float64(num) * mul
case primitive.Decimal128:
return decTod128(decimal.NewFromInt(num).Mul(d128ToDec(mul)))
default:
return Missing
}
case float64:
switch mul := mul.(type) {
case int32:
return num * float64(mul)
case int64:
return num * float64(mul)
case float64:
return num * mul
case primitive.Decimal128:
return decTod128(decimal.NewFromFloat(num).Mul(d128ToDec(mul)))
default:
return Missing
}
case primitive.Decimal128:
switch mul := mul.(type) {
case int32:
return decTod128(d128ToDec(num).Mul(decimal.NewFromInt(int64(mul))))
case int64:
return decTod128(d128ToDec(num).Mul(decimal.NewFromInt(mul)))
case float64:
return decTod128(d128ToDec(num).Mul(decimal.NewFromFloat(mul)))
case primitive.Decimal128:
return decTod128(d128ToDec(num).Mul(d128ToDec(mul)))
default:
return Missing
}
default:
return Missing
}
}
// Mod will compute the modulo of the two values. It accepts and returns int32,
// in64, float64 and decimal128.
func Mod(num, div interface{}) interface{} {
switch num := num.(type) {
case int32:
switch div := div.(type) {
case int32:
return num % div
case int64:
return int64(num) % div
case float64:
return math.Mod(float64(num), div)
case primitive.Decimal128:
return decTod128(decimal.NewFromInt(int64(num)).Mod(d128ToDec(div)))
default:
return Missing
}
case int64:
switch div := div.(type) {
case int32:
return num % int64(div)
case int64:
return num % div
case float64:
return math.Mod(float64(num), div)
case primitive.Decimal128:
return decTod128(decimal.NewFromInt(num).Mod(d128ToDec(div)))
default:
return Missing
}
case float64:
switch div := div.(type) {
case int32:
return math.Mod(num, float64(div))
case int64:
return math.Mod(num, float64(div))
case float64:
return math.Mod(num, div)
case primitive.Decimal128:
return decTod128(decimal.NewFromFloat(num).Mod(d128ToDec(div)))
default:
return Missing
}
case primitive.Decimal128:
switch div := div.(type) {
case int32:
return decTod128(d128ToDec(num).Mod(decimal.NewFromInt(int64(div))))
case int64:
return decTod128(d128ToDec(num).Mod(decimal.NewFromInt(div)))
case float64:
return decTod128(d128ToDec(num).Mod(decimal.NewFromFloat(div)))
case primitive.Decimal128:
return decTod128(d128ToDec(num).Mod(d128ToDec(div)))
default:
return Missing
}
default:
return Missing
}
} | bsonkit/math.go | 0.706292 | 0.478529 | math.go | starcoder |
package wid
import (
"image/color"
"gioui.org/unit"
)
// Some default colors
var (
Red = RGB(0xFF0000)
Yellow = RGB(0xFFFF00)
Green = RGB(0x00FF00)
Blue = RGB(0x0000FF)
White = RGB(0xFFFFFF)
Black = RGB(0x000000)
)
// Zv is a zero unit.Value. Just saving a few keystrokes
var Zv = unit.Value{}
// DeEmphasis will change a color to a less prominent color
// In light mode, colors will be lighter, in dark mode, colors will be darker
// The amount of darkening is greater than the amount of lightening
func DeEmphasis(c color.NRGBA, amount uint8) color.NRGBA {
if Luminance(c) < 128 {
return MulAlpha(c, 255-amount)
}
return MulAlpha(c, amount)
}
// Pxr maps the value v to pixels, returning a float32
func Pxr(c C, v unit.Value) float32 {
return float32(c.Metric.Px(v))
}
// Disabled blends color towards the luminance and multiplies alpha.
// Blending towards luminance will desaturate the color.
// Multiplying alpha blends the color together more with the background.
func Disabled(c color.NRGBA) (d color.NRGBA) {
const r = 80 // blend ratio
lum := Luminance(c)
return color.NRGBA{
R: byte((int(c.R)*r + int(lum)*(256-r)) / 256),
G: byte((int(c.G)*r + int(lum)*(256-r)) / 256),
B: byte((int(c.B)*r + int(lum)*(256-r)) / 256),
A: byte(int(c.A) * (128 + 32) / 256),
}
}
// ColDisabled returns the disabled color of c, depending on the disabled flag.
func ColDisabled(c color.NRGBA, disabled bool) color.NRGBA {
if disabled {
return Disabled(c)
}
return c
}
// Hovered blends color towards a brighter color.
func Hovered(c color.NRGBA) (d color.NRGBA) {
const r = 0x40 // lighten ratio
return color.NRGBA{
R: byte(255 - int(255-c.R)*(255-r)/256),
G: byte(255 - int(255-c.G)*(255-r)/256),
B: byte(255 - int(255-c.B)*(255-r)/256),
A: c.A,
}
}
// Interpolate returns a color in between given colors a and b, depending on progress
func Interpolate(a, b color.NRGBA, progress float32) color.NRGBA {
var out color.NRGBA
out.R = uint8(int16(a.R) - int16(float32(int16(a.R)-int16(b.R))*progress))
out.G = uint8(int16(a.G) - int16(float32(int16(a.G)-int16(b.G))*progress))
out.B = uint8(int16(a.B) - int16(float32(int16(a.B)-int16(b.B))*progress))
out.A = uint8(int16(a.A) - int16(float32(int16(a.A)-int16(b.A))*progress))
return out
}
// Gray returns a NRGBA color with the same luminance as the parameter
func Gray(c color.NRGBA) color.NRGBA {
l := Luminance(c)
return color.NRGBA{R: l, G: l, B: l, A: c.A}
}
// RGB creates a NRGBA color from its hex code, with alpha=255
func RGB(c uint32) color.NRGBA {
return ARGB(0xff000000 | c)
}
// ARGB creates a NRGBA color from its hex code
func ARGB(c uint32) color.NRGBA {
return color.NRGBA{A: uint8(c >> 24), R: uint8(c >> 16), G: uint8(c >> 8), B: uint8(c)}
}
// WithAlpha returns the input color with the new alpha value.
func WithAlpha(c color.NRGBA, alpha uint8) color.NRGBA {
c.A = alpha
return c
}
// MulAlpha applies the alpha to the color.
func MulAlpha(c color.NRGBA, alpha uint8) color.NRGBA {
c.A = uint8(uint32(c.A) * uint32(alpha) / 0xFF)
return c
}
// Luminance is a fast approximate version of RGBA.Luminance.
func Luminance(c color.NRGBA) byte {
const (
r = 13933 // 0.2126 * 256 * 256
g = 46871 // 0.7152 * 256 * 256
b = 4732 // 0.0722 * 256 * 256
t = r + g + b
)
return byte((r*int(c.R) + g*int(c.G) + b*int(c.B)) / t)
} | wid/rgba.go | 0.797596 | 0.406332 | rgba.go | starcoder |
package fflogs
import (
"context"
)
type ReportTablesOptions struct {
//view string `path:"view"` // The type of data requested. Supported values are 'summary', 'damage-done', 'damage-taken', 'healing', 'casts', 'summons', 'buffs', 'debuffs', 'deaths', 'survivability', 'resources' and 'resources-gains'.
Code string `path:"code"` // The specific report to collect table entries for.
Start *int `query:"start"` // A start time. This is a time from the start of the report in milliseconds. If omitted, 0 is assumed.
End *int `query:"end"` // An end time. This is a time from the start of the report in milliseconds. If omitted, 0 is assumed.
Hostility *int `query:"hostility"` // An optional hostility value of 0 or 1. The default is 0. A value of 0 means to collect data for Friendlies. A value of 1 means to collect data for Enemies.
By *string `query:"by"` // An optional parameter indicating how to group entries. They can be grouped by 'source', by 'target', or by 'ability'. This value matches WCL's default behavior if omitted. For buffs and debuffs, a value of 'source' means auras gained by the source, and a value of 'target' means auras cast by the source. This value is not used in the 'deaths', 'survivability', 'resources' and 'resources-gains' views.
Sourceid *int `query:"sourceid"` // An optional actor ID to filter to. If set, only events where the ID matches the source (or target for damage-taken) of the event will be returned. The actor's pets will also be included (unless the options field overrides).
Sourceinstance *int `query:"sourceinstance"` // An optional actor instance ID to filter to. If set, only events where the instance ID matches the source (or target for damage-taken) of the event will be returned. This is useful to look for all events involving NPC N, where N is the actor instance ID.
Sourceclass *string `query:"sourceclass"` // An optional actor class to filter to. If set, only events where the source (or target for damage-taken) involves that class (e.g., Mage) will be returned.
Targetid *int `query:"targetid"` // An optional actor ID to filter to. If set, only events where the ID matches the target (or source for damage-taken) of the event will be returned. This value is not used in the 'deaths', 'survivability', 'resources' and 'resources-gains' views.
Targetinstance *int `query:"targetinstance"` // An optional actor instance ID to filter to. If set, only events where the instance ID matches the target (or source for damage-taken) of the event will be returned. This is useful to look for all events involving NPC N, where N is the actor instance ID. This value is not used in the 'deaths', 'survivability', 'resources' and 'resources-gains' views.
Targetclass *string `query:"targetclass"` // An optional actor class to filter to. If set, only events where the target (or source for damage-taken) involves that class (e.g., Mage) will be returned. This value is not used in the 'deaths', 'survivability', 'resources' and 'resources-gains' views.
SourceAurasAbsent *string `query:"sourceAurasAbsent"` // A comma-separated string of aura game IDs. Only matches if the aura is absent on the source.
TargetAurasPresent *string `query:"targetAurasPresent"` // A comma-separated string of aura game IDs. Only matches if the aura is present on the target.
TargetAurasAbsent *string `query:"targetAurasAbsent"` // A comma-separated string of aura game IDs. Only matches if the aura is absent on the target.
Abilityid *int `query:"abilityid"` // An optional ability ID to filter to. If set, only events where the ability matches will be returned. Consolidated abilities (WCL only) are represented using a negative number that matches the ability ID that everything is consolidated under. For the 'deaths' view, this represents a specific killing blow. For the resources views, the abilityid is not an ability but a resource type. Valid resource types can be viewed at https://www.fflogs.com/reports/resource_types/
Options *int `query:"options"` // A set of options for what to include/exclude. These correspond to options like Include Overkill in the Damage Done pane. Complete list will be forthcoming. If omitted, appropriate defaults that match WCL's default behavior will be chosen. This value is not used in the 'deaths', 'survivability', 'resources' and 'resources-gains' views.
Cutoff *int `query:"cutoff"` // An optional death cutoff. If set, events after that number of deaths have occurred will not be examined.
Encounter *int `query:"encounter"` // An optional encounter filter. If set to a specific encounter ID, only fights involving a specific encounter will be considered. The encounter IDs match those used in rankings/statistics.
Wipes *int `query:"wipes"` // An optional wipes filter. If set to 1, only wipes will be considered.
Difficulty *int `query:"difficulty"` // An optional difficulty filter.
Filter *string `query:"filter"` // An optional filter written in WCL's expression language. Events must match the filter to be included.
Translate *bool `query:"translate"` // An optional flag indicating that the results should be translated into the language of the host (e.g., cn.warcraftlogs.com would get Chinese results).
}
// Gets a table of entries, either by actor or ability, of damage, healing and cast totals for each entry. This API exactly follows what is returned for the Tables panes on the site. It can and will change as the needs of those panes do, and as such should never be considered a frozen API. Use at your own risk.
func (c *Raw) ReportTablesSummary(context context.Context, opt *ReportTablesOptions, resp interface{}) error {
return c.call(
context,
"/report/tables/summary/{code}",
opt,
resp,
)
}
// Gets a table of entries, either by actor or ability, of damage, healing and cast totals for each entry. This API exactly follows what is returned for the Tables panes on the site. It can and will change as the needs of those panes do, and as such should never be considered a frozen API. Use at your own risk.
func (c *Raw) ReportTablesDamageDone(context context.Context, opt *ReportTablesOptions, resp interface{}) error {
return c.call(
context,
"/report/tables/damage-done/{code}",
opt,
resp,
)
}
// Gets a table of entries, either by actor or ability, of damage, healing and cast totals for each entry. This API exactly follows what is returned for the Tables panes on the site. It can and will change as the needs of those panes do, and as such should never be considered a frozen API. Use at your own risk.
func (c *Raw) ReportTablesDamageTaken(context context.Context, opt *ReportTablesOptions, resp interface{}) error {
return c.call(
context,
"/report/tables/damage-taken/{code}",
opt,
resp,
)
}
// Gets a table of entries, either by actor or ability, of damage, healing and cast totals for each entry. This API exactly follows what is returned for the Tables panes on the site. It can and will change as the needs of those panes do, and as such should never be considered a frozen API. Use at your own risk.
func (c *Raw) ReportTablesHealing(context context.Context, opt *ReportTablesOptions, resp interface{}) error {
return c.call(
context,
"/report/tables/healing/{code}",
opt,
resp,
)
}
// Gets a table of entries, either by actor or ability, of damage, healing and cast totals for each entry. This API exactly follows what is returned for the Tables panes on the site. It can and will change as the needs of those panes do, and as such should never be considered a frozen API. Use at your own risk.
func (c *Raw) ReportTablesCasts(context context.Context, opt *ReportTablesOptions, resp interface{}) error {
return c.call(
context,
"/report/tables/casts/{code}",
opt,
resp,
)
}
// Gets a table of entries, either by actor or ability, of damage, healing and cast totals for each entry. This API exactly follows what is returned for the Tables panes on the site. It can and will change as the needs of those panes do, and as such should never be considered a frozen API. Use at your own risk.
func (c *Raw) ReportTablesSummons(context context.Context, opt *ReportTablesOptions, resp interface{}) error {
return c.call(
context,
"/report/tables/summons/{code}",
opt,
resp,
)
}
// Gets a table of entries, either by actor or ability, of damage, healing and cast totals for each entry. This API exactly follows what is returned for the Tables panes on the site. It can and will change as the needs of those panes do, and as such should never be considered a frozen API. Use at your own risk.
func (c *Raw) ReportTablesBuffs(context context.Context, opt *ReportTablesOptions, resp interface{}) error {
return c.call(
context,
"/report/tables/buffs/{code}",
opt,
resp,
)
}
// Gets a table of entries, either by actor or ability, of damage, healing and cast totals for each entry. This API exactly follows what is returned for the Tables panes on the site. It can and will change as the needs of those panes do, and as such should never be considered a frozen API. Use at your own risk.
func (c *Raw) ReportTablesDebuffs(context context.Context, opt *ReportTablesOptions, resp interface{}) error {
return c.call(
context,
"/report/tables/debuffs/{code}",
opt,
resp,
)
}
// Gets a table of entries, either by actor or ability, of damage, healing and cast totals for each entry. This API exactly follows what is returned for the Tables panes on the site. It can and will change as the needs of those panes do, and as such should never be considered a frozen API. Use at your own risk.
func (c *Raw) ReportTablesDeaths(context context.Context, opt *ReportTablesOptions, resp interface{}) error {
return c.call(
context,
"/report/tables/deaths/{code}",
opt,
resp,
)
}
// Gets a table of entries, either by actor or ability, of damage, healing and cast totals for each entry. This API exactly follows what is returned for the Tables panes on the site. It can and will change as the needs of those panes do, and as such should never be considered a frozen API. Use at your own risk.
func (c *Raw) ReportTablesSurvivability(context context.Context, opt *ReportTablesOptions, resp interface{}) error {
return c.call(
context,
"/report/tables/survivability/{code}",
opt,
resp,
)
}
// Gets a table of entries, either by actor or ability, of damage, healing and cast totals for each entry. This API exactly follows what is returned for the Tables panes on the site. It can and will change as the needs of those panes do, and as such should never be considered a frozen API. Use at your own risk.
func (c *Raw) ReportTablesResources(context context.Context, opt *ReportTablesOptions, resp interface{}) error {
return c.call(
context,
"/report/tables/resources/{code}",
opt,
resp,
)
}
// Gets a table of entries, either by actor or ability, of damage, healing and cast totals for each entry. This API exactly follows what is returned for the Tables panes on the site. It can and will change as the needs of those panes do, and as such should never be considered a frozen API. Use at your own risk.
func (c *Raw) ReportTablesResourcesGains(context context.Context, opt *ReportTablesOptions, resp interface{}) error {
return c.call(
context,
"/report/tables/resources-gains/{code}",
opt,
resp,
)
} | api_Report_Tables_raw.go | 0.834272 | 0.479077 | api_Report_Tables_raw.go | starcoder |
package ansicsi
import (
"bytes"
"io"
"strconv"
)
// Command represents a parsed ANSI control function.
type Command interface {
// Encode writes the ANSI CSI and control sequence for the command to the given Writer.
Encode(w io.Writer) (int, error)
decodeParameters(params []int) bool
}
// ControlSequence represents a single ANSI control sequence.
type ControlSequence struct {
Parameters []byte
Intermediate []byte
Final byte
}
func (cs *ControlSequence) Encode(w io.Writer) (int, error) {
bytes := make([]byte, 0, 2+len(cs.Parameters)+len(cs.Intermediate)+1)
bytes = append(bytes, []byte("\x1b[")...)
bytes = append(bytes, cs.Parameters...)
bytes = append(bytes, cs.Intermediate...)
bytes = append(bytes, cs.Final)
return w.Write(bytes)
}
func (*ControlSequence) decodeParameters(params []int) bool {
return false
}
// Decode decodes the ANSI control function beginning at the first byte of b and returns the function, its
// parameters, and its encoded size. If a valid control sequence is found but the control function is not
// recognized, the raw control sequence is returned as a *ControlSequence value.
func Decode(b []byte) (Command, int) {
if len(b) < 2 || b[0] != 0x1b || b[1] != '[' {
return nil, 0
}
b = b[2:]
// parameter bytes
paramEnd := 0
for paramEnd < len(b) && b[paramEnd] >= 0x30 && b[paramEnd] < 0x40 {
paramEnd++
}
params, b := b[:paramEnd], b[paramEnd:]
// intermediate bytes
intermediateEnd := 0
for intermediateEnd < len(b) && b[intermediateEnd] >= 0x20 && b[intermediateEnd] < 0x30 {
intermediateEnd++
}
intermediate, b := b[:intermediateEnd], b[intermediateEnd:]
// final byte
if len(b) < 1 || b[0] < 0x40 || b[0] > 0x7e {
return nil, 0
}
final := b[0]
size := 2 + len(params) + len(intermediate) + 1
cmd, ok := decodeCommand(params, intermediate, final)
if !ok {
cmd = &ControlSequence{
Parameters: params,
Intermediate: intermediate,
Final: final,
}
}
return cmd, size
}
func getCommand(intermediate []byte, final byte) (Command, bool) {
switch {
case len(intermediate) == 0:
switch final {
case 0x40: // 8.3.64 ICH - INSERT CHARACTER (Pn)
return nil, false
case 0x41: // 8.3.22 CUU - CURSOR UP (Pn)
return nil, false
case 0x42: // 8.3.19 CUD - CURSOR DOWN (Pn)
return nil, false
case 0x43: // 8.3.20 CUF - CURSOR RIGHT (Pn)
return nil, false
case 0x44: // 8.3.18 CUB - CURSOR LEFT (Pn)
return nil, false
case 0x45: // 8.3.12 CNL - CURSOR NEXT LINE (Pn)
return nil, false
case 0x46: // 8.3.13 CPL - CURSOR PRECEDING LINE (Pn)
return nil, false
case 0x47: // 8.3.9 CHA - CURSOR CHARACTER ABSOLUTE (Pn)
return nil, false
case 0x48: // 8.3.21 CUP - CURSOR POSITION (Pn1;Pn2)
return nil, false
case 0x49: // 8.3.10 CHT - CURSOR FORWARD TABULATION (Pn)
return nil, false
case 0x4a: // 8.3.39 ED - ERASE IN PAGE (Ps)
return nil, false
case 0x4b: // 8.3.41 EL - ERASE IN LINE (Ps)
return nil, false
case 0x4c: // 8.3.67 IL - INSERT LINE (Pn)
return nil, false
case 0x4d: // 8.3.32 DL - DELETE LINE (Pn)
return nil, false
case 0x4e: // 8.3.40 EF - ERASE IN FIELD (Ps)
return nil, false
case 0x4f: // 8.3.37 EA - ERASE IN AREA (Ps)
return nil, false
case 0x50: // 8.3.26 DCH - DELETE CHARACTER (Pn)
return nil, false
case 0x51: // 8.3.115 SEE - SELECT EDITING EXTENT (Ps)
return nil, false
case 0x52: // 8.3.14 CPR - ACTIVE POSITION REPORT (Pn1;Pn2)
return nil, false
case 0x53: // 8.3.147 SU - SCROLL UP (Pn)
return nil, false
case 0x54: // 8.3.113 SD - SCROLL DOWN (Pn)
return nil, false
case 0x55: // 8.3.87 NP - NEXT PAGE (Pn)
return nil, false
case 0x56: // 8.3.95 PP - PRECEDING PAGE (Pn)
return nil, false
case 0x57: // 8.3.17 CTC - CURSOR TABULATION CONTROL (Ps...)
return nil, false
case 0x58: // 8.3.38 ECH - ERASE CHARACTER (Pn)
return nil, false
case 0x59: // 8.3.23 CVT - CURSOR LINE TABULATION (Pn)
return nil, false
case 0x5a: // 8.3.7 CBT - CURSOR BACKWARD TABULATION (Pn)
return nil, false
case 0x5b: // 8.3.137 SRS - START REVERSED STRING (Ps)
return nil, false
case 0x5c: // 8.3.99 PTX - PARALLEL TEXTS (Ps)
return nil, false
case 0x5d: // 8.3.114 SDS - START DIRECTED STRING (Ps)
return nil, false
case 0x5e: // 8.3.120 SIMD - SELECT IMPLICIT MOVEMENT DIRECTION (Ps)
return nil, false
case 0x60: // 8.3.57 HPA - CHARACTER POSITION ABSOLUTE (Pn)
return nil, false
case 0x61: // 8.3.59 HPR - CHARACTER POSITION FORWARD (Pn)
return nil, false
case 0x62: // 8.3.103 REP - REPEAT (Pn)
return nil, false
case 0x63: // 8.3.24 DA - DEVICE ATTRIBUTES (Ps)
return nil, false
case 0x64: // 8.3.158 VPA - LINE POSITION ABSOLUTE (Pn)
return nil, false
case 0x65: // 8.3.160 VPR - LINE POSITION FORWARD (Pn)
return nil, false
case 0x66: // 8.3.63 HVP - CHARACTER AND LINE POSITION (Pn1;Pn2)
return nil, false
case 0x67: // 8.3.154 TBC - TABULATION CLEAR (Ps)
return nil, false
case 0x68: // 8.3.125 SM - SET MODE (Ps...)
return nil, false
case 0x69: // 8.3.82 MC - MEDIA COPY (Ps)
return nil, false
case 0x6a: // 8.3.58 HPB - CHARACTER POSITION BACKWARD (Pn)
return nil, false
case 0x6b: // 8.3.159 VPB - LINE POSITION BACKWARD (Pn)
return nil, false
case 0x6c: // 8.3.106 RM - RESET MODE (Ps...)
return nil, false
case 0x6d: // 8.3.117 SGR - SELECT GRAPHIC RENDITION (Ps...)
return &SetGraphicsRendition{}, true
case 0x6e: // 8.3.35 DSR - DEVICE STATUS REPORT (Ps)
return nil, false
case 0x6f: // 8.3.25 DAQ - DEFINE AREA QUALIFICATION (Ps...)
return nil, false
}
case len(intermediate) == 1 && intermediate[0] == 0x20:
switch final {
case 0x40: // 8.3.121 SL - SCROLL LEFT (Pn)
return nil, false
case 0x41: // 8.3.135 SR - SCROLL RIGHT (Pn)
return nil, false
case 0x42: // 8.3.55 GSM - GRAPHIC SIZE MODIFICATION (Pn1;Pn2)
return nil, false
case 0x43: // 8.3.56 GSS - GRAPHIC SIZE SELECTION (Pn)
return nil, false
case 0x44: // 8.3.53 FNT - FONT SELECTION (Ps1;Ps2)
return nil, false
case 0x45: // 8.3.157 TSS - THIN SPACE SPECIFICATION (Pn)
return nil, false
case 0x46: // 8.3.73 JFY - JUSTIFY (Ps...)
return nil, false
case 0x47: // 8.3.132 SPI - SPACING INCREMENT (Pn1;Pn2)
return nil, false
case 0x48: // 8.3.102 QUAD - QUAD (Ps...)
return nil, false
case 0x49: // 8.3.139 SSU - SELECT SIZE UNIT (Ps)
return nil, false
case 0x4a: // 8.3.91 PFS - PAGE FORMAT SELECTION (Ps)
return nil, false
case 0x4b: // 8.3.118 SHS - SELECT CHARACTER SPACING (Ps)
return nil, false
case 0x4c: // 8.3.149 SVS - SELECT LINE SPACING (Ps)
return nil, false
case 0x4d: // 8.3.66 IGS - IDENTIFY GRAPHIC SUBREPERTOIRE (Ps)
return nil, false
case 0x4f: // 8.3.65 IDCS - IDENTIFY DEVICE CONTROL STRING (Ps)
return nil, false
case 0x50: // 8.3.96 PPA - PAGE POSITION ABSOLUTE (Pn)
return nil, false
case 0x51: // 8.3.98 PPR - PAGE POSITION FORWARD (Pn)
return nil, false
case 0x52: // 8.3.97 PPB - PAGE POSITION BACKWARD (Pn)
return nil, false
case 0x53: // 8.3.130 SPD - SELECT PRESENTATION DIRECTIONS (Ps1;Ps2)
return nil, false
case 0x54: // 8.3.36 DTA - DIMENSION TEXT AREA (Pn1;Pn2)
return nil, false
case 0x55: // 8.3.122 SLH - SET LINE HOME (Pn)
return nil, false
case 0x56: // 8.3.123 SLL - SET LINE LIMIT (Pn)
return nil, false
case 0x57: // 8.3.52 FNK - FUNCTION KEY (Pn)
return nil, false
case 0x58: // 8.3.134 SPQR - SELECT PRINT QUALITY AND RAPIDITY (Ps)
return nil, false
case 0x59: // 8.3.116 SEF - SHEET EJECT AND FEED (Ps1;Ps2)
return nil, false
case 0x5a: // 8.3.90 PEC - PRESENTATION EXPAND OR CONTRACT (Ps)
return nil, false
case 0x5b: // 8.3.140 SSW - SET SPACE WIDTH (Pn)
return nil, false
case 0x5c: // 8.3.107 SACS - SET ADDITIONAL CHARACTER SEPARATION (Pn)
return nil, false
case 0x5d: // 8.3.108 SAPV - SELECT ALTERNATIVE PRESENTATION VARIANTS (Ps...)
return nil, false
case 0x5e: // 8.3.144 STAB - SELECTIVE TABULATION (Ps)
return nil, false
case 0x5f: // 8.3.54 GCC - GRAPHIC CHARACTER COMBINATION (Ps)
return nil, false
case 0x60: // 8.3.153 TATE - TABULATION ALIGNED TRAILING EDGE (Pn)
return nil, false
case 0x61: // 8.3.152 TALE - TABULATION ALIGNED LEADING EDGE (Pn)
return nil, false
case 0x62: // 8.3.151 TAC - TABULATION ALIGNED CENTRED (Pn)
return nil, false
case 0x63: // 8.3.155 TCC - TABULATION CENTRED ON CHARACTER (Pn1;Pn2)
return nil, false
case 0x64: // 8.3.156 TSR - TABULATION STOP REMOVE (Pn)
return nil, false
case 0x65: // 8.3.110 SCO - SELECT CHARACTER ORIENTATION (Ps)
return nil, false
case 0x66: // 8.3.136 SRCS - SET REDUCED CHARACTER SEPARATION (Pn)
return nil, false
case 0x67: // 8.3.112 SCS - SET CHARACTER SPACING (Pn)
return nil, false
case 0x68: // 8.3.124 SLS - SET LINE SPACING (Pn)
return nil, false
case 0x69: // 8.3.131 SPH - SET PAGE HOME (Pn)
return nil, false
case 0x6a: // 8.3.133 SPL - SET PAGE LIMIT (Pn)
return nil, false
case 0x6b: // 8.3.111 SCP - SELECT CHARACTER PATH (Ps1;Ps2)
return nil, false
}
}
return nil, false
}
func decodeCommand(parameters, intermediate []byte, final byte) (Command, bool) {
cmd, ok := getCommand(intermediate, final)
if !ok {
return nil, false
}
// Decode the parameter list.
var param []byte
var params []int
for rawParams := parameters; len(rawParams) > 0; rawParams = rawParams[1:] {
if rawParams[0] == ';' {
if len(param) == 0 {
params = append(params, -1)
} else {
i, err := strconv.ParseUint(string(param), 10, 0)
if err != nil {
return nil, false
}
param, params = param[:0], append(params, int(i))
}
} else {
param = append(param, rawParams[0])
}
}
if len(param) != 0 {
i, err := strconv.ParseUint(string(param), 10, 0)
if err != nil {
return nil, false
}
param, params = param[:0], append(params, int(i))
}
if len(parameters) != 0 && parameters[len(parameters)-1] == ';' {
params = append(params, -1)
}
return cmd, cmd.decodeParameters(params)
}
func encodeCommand(w io.Writer, parameters []int, intermediate []byte, final byte) (int, error) {
// Encode the parameter list.
var params bytes.Buffer
for i, p := range parameters {
if i > 0 {
params.WriteByte(';')
}
if p >= 0 {
params.WriteString(strconv.FormatUint(uint64(p), 10))
}
}
// Write the CSI + control sequence.
cs := ControlSequence{
Parameters: params.Bytes(),
Intermediate: intermediate,
Final: final,
}
return cs.Encode(w)
} | csi.go | 0.619241 | 0.452717 | csi.go | starcoder |
package main
type Scanner struct {
inputStream string // Keep it simple
}
func (scanner Scanner) Scan() Token {
return Token{}
}
type Token struct {
}
type Parser struct {
}
func (parser Parser) parse(scanner Scanner, ProgramNodeBuilder ProgramNodeBuilder) {
}
type ProgramNodeBuilder struct {
node ProgramNode
}
func (programNodeBuilder ProgramNodeBuilder) newVariable(variableName string) ProgramNode {
return ExpressionNode{}
}
func (programNodeBuilder ProgramNodeBuilder) newAssignment(variable ProgramNode, expression ProgramNode) ProgramNode {
return ExpressionNode{}
}
func (programNodeBuilder ProgramNodeBuilder) newReturnStatement(value ProgramNode) ProgramNode {
return ExpressionNode{}
}
func (programNodeBuilder ProgramNodeBuilder) newCondition(condition ProgramNode, truePart ProgramNode, falsePart ProgramNode) ProgramNode {
return ExpressionNode{}
}
type ProgramNode interface {
getSourcePosition(line int, index int)
add(ProgramNode)
remove(ProgramNode)
traverse(CodeGenerator)
}
type ExpressionNode struct {
children []ProgramNode
}
func (node ExpressionNode) getSourcePosition(line int, index int) {
}
func (node ExpressionNode) add(programNode ProgramNode) {
}
func (node ExpressionNode) remove(programNode ProgramNode) {
}
func (node ExpressionNode) traverse(codeGenerator CodeGenerator) {
codeGenerator.visit(node)
for i := 0; i < len(node.children); i++ {
node.children[i].traverse(codeGenerator)
}
}
type CodeGenerator struct {
output string
}
func (codeGenerator CodeGenerator) visit(programNode ProgramNode) {
}
type Compiler struct {
}
func (compiler Compiler) compile(input string, output string) (string) {
scanner := new(Scanner)
programNodeBuilder := new(ProgramNodeBuilder)
parser := new(Parser)
parser.parse(*scanner, *programNodeBuilder)
codeGenerator := CodeGenerator {
output: output,
}
programNodeBuilder.node = ExpressionNode{}
parseTree := programNodeBuilder.node
parseTree.traverse(codeGenerator)
return "Testing this component requires a lot of not implemented integrations"
} | structural/facade/facade.go | 0.621656 | 0.403861 | facade.go | starcoder |
package trie
import (
"github.com/howz97/algorithm/basic/queue"
"github.com/howz97/algorithm/strings/alphabet"
)
func NewTrie[T any](alp alphabet.IAlp) *Trie[T] {
return &Trie[T]{
alp: alp,
root: newNode[T](alp.R()),
}
}
type Trie[T any] struct {
alp alphabet.IAlp
root *node[T]
}
func (t *Trie[T]) Find(key string) *T {
node := t.root.locate(t.alp.ToIndices(key))
if node == nil {
return nil
}
return node.val
}
func (t *Trie[T]) Upsert(key string, v T) {
t.root.upsert(t.alp.ToIndices(key), v, t.alp.R())
}
func (t *Trie[T]) Delete(key string) {
node := t.root.locate(t.alp.ToIndices(key))
if node != nil {
node.val = nil
}
}
func (t *Trie[T]) LongestPrefixOf(s string) string {
l := t.root.longestPrefixOf(t.alp.ToIndices(s), 0, 0)
return string([]rune(s)[:l])
}
func (t *Trie[T]) KeysWithPrefix(prefix string) []string {
node := t.root.locate(t.alp.ToIndices(prefix))
if node == nil {
return nil
}
q := queue.NewSliceQ[string](0)
node.collect(t.alp, prefix, q)
return q.Drain()
}
func (t *Trie[T]) KeysMatch(p string) []string {
q := queue.NewSliceQ[string](0)
t.root.keysMatch(t.alp, []rune(p), "", q)
return q.Drain()
}
type node[T any] struct {
val *T
next []*node[T]
}
func newNode[T any](size int) *node[T] {
return &node[T]{next: make([]*node[T], size)}
}
func (t *node[T]) locate(k []rune) *node[T] {
if len(k) == 0 {
return t
}
next := t.next[k[0]]
if next == nil {
return nil
}
return next.locate(k[1:])
}
func (t *node[T]) upsert(k []rune, v T, r int) {
if len(k) == 0 {
t.val = &v
return
}
next := t.next[k[0]]
if next == nil {
next = newNode[T](r)
t.next[k[0]] = next
}
next.upsert(k[1:], v, r)
}
func (t *node[T]) longestPrefixOf(s []rune, d int, l int) int {
if t.val != nil {
l = d
}
if len(s) == d {
return l
}
next := t.next[s[d]]
if next == nil {
return l
}
return next.longestPrefixOf(s, d+1, l)
}
func (t *node[T]) collect(a alphabet.IAlp, prefix string, keys *queue.SliceQ[string]) {
if t.val != nil {
keys.PushBack(prefix)
}
for i, next := range t.next {
if next == nil {
continue
}
next.collect(a, prefix+string(a.ToRune(rune(i))), keys)
}
}
func (t *node[T]) keysMatch(a alphabet.IAlp, pattern []rune, prefix string, keys *queue.SliceQ[string]) {
if len(pattern) == 0 {
if t.val != nil {
keys.PushBack(prefix)
}
return
}
if pattern[0] == '.' {
for i, next := range t.next {
if next == nil {
continue
}
next.keysMatch(a, pattern[1:], prefix+string(a.ToRune(rune(i))), keys)
}
} else {
next := t.next[a.ToIndex(pattern[0])]
if next != nil {
prefix = prefix + string(pattern[0])
next.keysMatch(a, pattern[1:], prefix, keys)
}
}
} | strings/trie/trie.go | 0.512693 | 0.453322 | trie.go | starcoder |
package gobang
/**
* |B| | | | | | | | | |
* | |B| | | | | | | | |
* | | |B| | | | | | | |
* | | | |B| | | | | | |
* | | | | |B| | | | | |
* | | | |B| | | | | | |
* | | | | |B| | | | | |
* | | | | | |B| | | | |
* | | | | | | |B| | | |
* | | | | | | | |B| | |
*/
func NewTopLeftDiagonalCellMatcher(stone Stone, count int) *TopLeftDiagonalCellMatcher {
return &TopLeftDiagonalCellMatcher{count: count, stone: stone}
}
type TopLeftDiagonalCellMatcher struct {
count int
stone Stone
}
func (s *TopLeftDiagonalCellMatcher) Matches(board *Board) *MatchedResult {
result := &MatchedResult{}
groups := s.scanAllCellGroup(board)
reachedSelector := ReachedSelector{
stone: s.stone,
count: s.count,
board: board,
neighbor: NewTopLeftNeighborDistance(),
}
for _, group := range groups {
results := group.SelectReached(reachedSelector)
if len(results) <= 0 {
continue
}
result.results = append(result.results, results...)
}
return result
}
func (s *TopLeftDiagonalCellMatcher) scanAllCellGroup(board *Board) []*CellGroup {
groups := make([]*CellGroup, 0)
groups = append(groups, s.scanXAxisCellGroup(board)...)
groups = append(groups, s.scanYAxisCellGroup(board)...)
return groups
}
/**
* |B|B|B|B|B|B| | | | |
* | |B|B|B|B|B|B| | | |
* | | |B|B|B|B|B|B| | |
* | | | |B|B|B|B|B|B| |
* | | | | |B|B|B|B|B|B|
* | | | | | |B|B|B|B|B|
* | | | | | | |B|B|B|B|
* | | | | | | | |B|B|B|
* | | | | | | | | |B|B|
* | | | | | | | | | |B|
*/
func (s *TopLeftDiagonalCellMatcher) scanXAxisCellGroup(board *Board) []*CellGroup {
point := DefaultPoint()
maxX := board.Width() - 1
endX := board.Width() - s.count
groups := make([]*CellGroup, 0)
for startX := 0; startX <= endX; startX++ {
y := 0
endY := (board.Height() - startX)
group := &CellGroup{}
for x := startX; x <= maxX; x++ {
if y > endY {
break
}
cell := board.SelectCell(point.SetTo(x, y))
group.cells = append(group.cells, cell)
y++
}
groups = append(groups, group)
group = &CellGroup{}
}
return groups
}
/**
* | | | | | | | | | | |
* |B| | | | | | | | | |
* |B|B| | | | | | | | |
* |B|B|B| | | | | | | |
* |B|B|B|B| | | | | | |
* |B|B|B|B|B| | | | | |
* | |B|B|B|B|B| | | | |
* | | |B|B|B|B|B| | | |
* | | | |B|B|B|B|B| | |
* | | | | |B|B|B|B|B| |
*/
func (s *TopLeftDiagonalCellMatcher) scanYAxisCellGroup(board *Board) []*CellGroup {
point := DefaultPoint()
maxY := board.Height() - 1
endY := board.Height() - s.count
groups := make([]*CellGroup, 0)
for startY := 1; startY <= endY; startY++ {
x := 0
endX := (board.Width() - startY)
group := &CellGroup{}
for y := startY; y <= maxY; y++ {
if x > endX {
break
}
cell := board.SelectCell(point.SetTo(x, y))
group.cells = append(group.cells, cell)
x++
}
groups = append(groups, group)
group = &CellGroup{}
}
return groups
} | gobang/top_left_diagonal_cell_matcher.go | 0.811825 | 0.492981 | top_left_diagonal_cell_matcher.go | starcoder |
package main
import (
"fmt"
"math"
"math/rand"
"time"
"github.com/hajimehoshi/ebiten/v2"
)
type Player struct {
x float64 // Player position x
y float64 // Player position y
cellX int
cellY int
angle float64 // Facing angle in radians
health int
mana int
playingFootsteps bool
// These are effectively constants, but we hold them in the player
fov float64 // Field of view
size float64 // Used for collision detection with walls
// These handle movement and turning
moveStartTime int64
moveFunc func(int64) float64
turnStartTime int64
turnFunc func(int64) float64
holding map[string]int
justFired bool
}
func newPlayer(cellX, cellY int) Player {
return Player{
x: cellSize*float64(cellX) + cellSize/2,
y: cellSize*float64(cellY) + cellSize/2,
angle: 0,
moveStartTime: 0.0,
turnStartTime: 0.0,
fov: fov,
size: cellSize / 16.0,
playingFootsteps: false,
health: 100,
mana: 100,
cellX: cellX,
cellY: cellY,
holding: map[string]int{},
moveFunc: func(t int64) float64 {
min := float64(cellSize) / 50.0
max := float64(cellSize) / 14.0
return math.Min(min+math.Pow(float64(t)/250000, 2), max)
},
turnFunc: func(t int64) float64 {
min := math.Pi / 300.0
max := math.Pi / 70.0
return math.Min(min+math.Pow(float64(t)/800_000, 3), max)
},
}
}
func (p *Player) turn(t int64, direction float64) {
p.angle = p.angle + p.turnFunc(t)*direction
}
func (p *Player) move(t int64, direction float64, strafe int) {
// Invoke the move function
speed := p.moveFunc(t)
if ebiten.IsKeyPressed(ebiten.KeyDown) || ebiten.IsKeyPressed(ebiten.KeyS) {
speed = -speed
}
angle := p.angle
if strafe == 1 {
angle += math.Pi / 2
}
if strafe == -1 {
angle -= math.Pi / 2
}
newX := p.x + math.Cos(angle)*speed
newY := p.y + math.Sin(angle)*speed
// Check if we're going to collide with a wall
if wall, cx, cy := p.checkWallCollision(newX, newY); wall != nil {
// Hit a wall, work out if we stop or slide
wx, wy := wall.getCenter()
// TODO: This really needs improving
// All this bullshit is to make the player slide along the wall when hitting at an acute angle
if math.Abs(wx-p.x) > 16 && math.Abs(cy-p.y) > 1.6 {
if wx-p.x < 0 {
newX = p.x + 1.7
} else {
newX = p.x - 1.7
}
} else if math.Abs(wy-p.y) > 16 && math.Abs(cx-p.x) > 1.6 {
if wy-p.y < 0 {
newY = p.y + 1.7
} else {
newY = p.y - 1.7
}
} else {
// OTHERWISE! we really did hit a wall!
return
}
}
// Update player position
p.x = newX
p.y = newY
p.cellX = int(math.Floor(p.x / cellSize))
p.cellY = int(math.Floor(p.y / cellSize))
// Check items near the player we're in and pick them up
for _, item := range game.items {
if item.cellX != p.cellX || item.cellY != p.cellY {
continue
}
item.pickUpFunc(p)
game.removeItem(item)
}
// Footstep sound
if !p.playingFootsteps {
playSound(fmt.Sprintf("footstep_%d", rand.Intn(4)), 0.5, true)
p.playingFootsteps = true
time.AfterFunc(300*time.Millisecond, func() {
p.playingFootsteps = false
})
}
}
func (p *Player) moveToCell(cellX, cellY int) {
p.cellX = cellX
p.cellY = cellY
p.x = cellSize*float64(cellX) + cellSize/2
p.y = cellSize*float64(cellY) + cellSize/2
}
func (p *Player) setFacing(facing int) {
// facing: 0 = up, 1 = right, 2 = down, 3 = left
p.angle = math.Pi / 2 * float64(facing-1)
}
func (p *Player) checkWallCollision(x, y float64) (*Wall, float64, float64) {
if wall := game.getWallAt(x+p.size, y); wall != nil {
return wall, x + p.size, y
}
if wall := game.getWallAt(x-p.size, y); wall != nil {
return wall, x - p.size, y
}
if wall := game.getWallAt(x, y+p.size); wall != nil {
return wall, x, y + p.size
}
if wall := game.getWallAt(x, y-p.size); wall != nil {
return wall, x, y - p.size
}
return nil, 0, 0
}
func (p Player) use() {
if wall, _ := fireRayAngle(p.x, p.y, p.angle, cellSize); wall != nil {
wall.actionFunc(game)
}
}
func (p *Player) attack() {
if p.mana <= 0 {
return
}
// Show the attack animation
p.justFired = true
forceHudUpdate = true
playSound("zap", 0.3, false)
p.mana -= 5
if p.mana < 0 {
p.mana = 0.0
}
sx := p.x + ((cellSize / 3) * math.Cos(p.angle))
sy := p.y + ((cellSize / 3) * math.Sin(p.angle))
game.addProjectile("magic", sx, sy, p.angle, (float64(cellSize) / 5.0), 40, 0.6)
}
// damage the player
func (p *Player) damage(amount int) {
screenFlashRed(10)
p.health -= amount
if p.health <= 0 {
p.health = 0
playSound("scream", 1, false)
game.gameOver()
}
playSound("pain", 1, false)
} | src/player.go | 0.604516 | 0.424114 | player.go | starcoder |
package util
import "github.com/samber/lo"
type Element[T any] struct {
Index int
Element T
}
// Enumerate returns a new slice with each element and its index.
func Enumerate[T any](collection []T) []Element[T] {
if collection == nil {
return nil
}
return lo.Map(collection, func(e T, i int) Element[T] {
return Element[T]{
Index: i,
Element: e,
}
})
}
// Map is similar to lo.Map, but accepts an iteratee without the index argument.
func Map[T any, V any](collection []T, iteratee func(v T) V) []V {
if collection == nil {
return nil
}
return lo.Map(collection, func(v T, _ int) V {
return iteratee(v)
})
}
// TryMap is similar to Map, but when an error occurs in the iteratee, it terminates the iteration and returns an error.
func TryMap[T any, V any](collection []T, iteratee func(v T) (V, error)) ([]V, error) {
if collection == nil {
return nil, nil
}
m := make([]V, 0, len(collection))
for _, e := range collection {
j, err := iteratee(e)
if err != nil {
return nil, err
}
m = append(m, j)
}
return m, nil
}
// FilterMap is similar to Map, but if the iteratee returns nil, that element will be omitted from the new slice.
func FilterMap[T any, V any](collection []T, iteratee func(v T) *V) []V {
if collection == nil {
return nil
}
m := make([]V, 0, len(collection))
for _, e := range collection {
if j := iteratee(e); j != nil {
m = append(m, *j)
}
}
return m
}
// FilterMapOk is similar to FilterMap, but the iteratee can return a boolean as the second return value,
// and it is false, that element will be omitted from the new slice.
func FilterMapOk[T any, V any](collection []T, iteratee func(v T) (V, bool)) []V {
if collection == nil {
return nil
}
m := make([]V, 0, len(collection))
for _, e := range collection {
if j, ok := iteratee(e); ok {
m = append(m, j)
}
}
return m
}
// FilterMapR is similar to FilterMap, but if the return value of the iteratee is not nil,
// it is not dereferenced and is used as the value of the new element.
func FilterMapR[T any, V any](collection []T, iteratee func(v T) *V) []*V {
if collection == nil {
return nil
}
m := make([]*V, 0, len(collection))
for _, e := range collection {
if j := iteratee(e); j != nil {
m = append(m, j)
}
}
return m
}
// https://github.com/samber/lo/issues/54
func All[T any](collection []T, predicate func(T) bool) bool {
for _, e := range collection {
if !predicate(e) {
return false
}
}
return true
}
// https://github.com/samber/lo/issues/54
func Any[T any](collection []T, predicate func(T) bool) bool {
for _, e := range collection {
if predicate(e) {
return true
}
}
return false
}
// Filter is similar to lo.Filter, but accepts an iteratee without the index argument.
func Filter[T any](collection []T, iteratee func(v T) bool) []T {
if collection == nil {
return nil
}
return lo.Filter(collection, func(v T, _ int) bool {
return iteratee(v)
})
}
// DerefSlice drops nil elements in the slice and return a new slice with dereferenced elements.
func DerefSlice[T any](collection []*T) []T {
return FilterMap(collection, func(e *T) *T {
return e
})
} | server/pkg/util/slice.go | 0.770378 | 0.461866 | slice.go | starcoder |
package types
import (
DaoPrediction "github.com/containers-ai/alameda/datahub/pkg/dao/prediction"
Metric "github.com/containers-ai/alameda/datahub/pkg/metric"
DatahubV1alpha1 "github.com/containers-ai/api/alameda_api/v1alpha1/datahub"
)
type PodPredictionExtended struct {
*DaoPrediction.PodPrediction
}
func (p *PodPredictionExtended) ProducePredictions() *DatahubV1alpha1.PodPrediction {
var (
datahubPodPrediction DatahubV1alpha1.PodPrediction
)
datahubPodPrediction = DatahubV1alpha1.PodPrediction{
NamespacedName: &DatahubV1alpha1.NamespacedName{
Namespace: string(p.Namespace),
Name: string(p.PodName),
},
}
for _, ptrContainerPrediction := range *p.ContainersPredictionMap {
containerPredictionExtended := ContainerPredictionExtended{ptrContainerPrediction}
datahubContainerPrediction := containerPredictionExtended.ProducePredictions()
datahubPodPrediction.ContainerPredictions = append(datahubPodPrediction.ContainerPredictions, datahubContainerPrediction)
}
return &datahubPodPrediction
}
type ContainerPredictionExtended struct {
*DaoPrediction.ContainerPrediction
}
func (c *ContainerPredictionExtended) ProducePredictions() *DatahubV1alpha1.ContainerPrediction {
var (
metricDataChan = make(chan DatahubV1alpha1.MetricData)
numOfGoroutine = 0
datahubContainerPrediction DatahubV1alpha1.ContainerPrediction
)
datahubContainerPrediction = DatahubV1alpha1.ContainerPrediction{
Name: string(c.ContainerName),
}
for metricType, samples := range c.PredictionsRaw {
if datahubMetricType, exist := Metric.TypeToDatahubMetricType[metricType]; exist {
numOfGoroutine++
go produceDatahubMetricDataFromSamples(datahubMetricType, samples, metricDataChan)
}
}
for i := 0; i < numOfGoroutine; i++ {
receivedPredictionData := <-metricDataChan
datahubContainerPrediction.PredictedRawData = append(datahubContainerPrediction.PredictedRawData, &receivedPredictionData)
}
return &datahubContainerPrediction
}
type NodePredictionExtended struct {
*DaoPrediction.NodePrediction
}
func (d *NodePredictionExtended) ProducePredictions() *DatahubV1alpha1.NodePrediction {
var (
metricDataChan = make(chan DatahubV1alpha1.MetricData)
numOfGoroutine = 0
datahubNodePrediction DatahubV1alpha1.NodePrediction
)
datahubNodePrediction = DatahubV1alpha1.NodePrediction{
Name: string(d.NodeName),
IsScheduled: d.IsScheduled,
}
for metricType, samples := range d.Predictions {
if datahubMetricType, exist := Metric.TypeToDatahubMetricType[metricType]; exist {
numOfGoroutine++
go produceDatahubMetricDataFromSamples(datahubMetricType, samples, metricDataChan)
}
}
for i := 0; i < numOfGoroutine; i++ {
receivedPredictionData := <-metricDataChan
datahubNodePrediction.PredictedRawData = append(datahubNodePrediction.PredictedRawData, &receivedPredictionData)
}
return &datahubNodePrediction
}
type NodesPredictionMapExtended struct {
*DaoPrediction.NodesPredictionMap
}
func (d *NodesPredictionMapExtended) ProducePredictions() []*DatahubV1alpha1.NodePrediction {
var (
datahubNodePredictions = make([]*DatahubV1alpha1.NodePrediction, 0)
)
for _, ptrIsScheduledNodePredictionMap := range *d.NodesPredictionMap {
if ptrScheduledNodePrediction, exist := (*ptrIsScheduledNodePredictionMap)[true]; exist {
scheduledNodePredictionExtended := NodePredictionExtended{ptrScheduledNodePrediction}
sechduledDatahubNodePrediction := scheduledNodePredictionExtended.ProducePredictions()
datahubNodePredictions = append(datahubNodePredictions, sechduledDatahubNodePrediction)
}
if noneScheduledNodePrediction, exist := (*ptrIsScheduledNodePredictionMap)[false]; exist {
noneScheduledNodePredictionExtended := NodePredictionExtended{noneScheduledNodePrediction}
noneSechduledDatahubNodePrediction := noneScheduledNodePredictionExtended.ProducePredictions()
datahubNodePredictions = append(datahubNodePredictions, noneSechduledDatahubNodePrediction)
}
}
return datahubNodePredictions
} | datahub/pkg/formatextension/types/predictions.go | 0.664649 | 0.4133 | predictions.go | starcoder |
package apksign
import (
"encoding/binary"
)
/* This idiom is very common in the v2 Android signing scheme:
val := binary.LittleEndian.Uint32(buf) // parse 4 bytes into a uint32
buf = buf[4:] // advance the buffer past the "consumed" bytes
...and same for uint64 values.
It's not a lot of code but when it appears many times in succession it detracts from readability
and is prone to typos and copy/paste bugs. So we wrap this in a few convenience functions to
improve this. The compiler generally seems to inline calls to these.
*/
// pop32 returns the first 4 bytes of the input decoded into a uint32, and a new slice over the same
// backing array, shifted past the 4 bytes. That is, it pops the first 4 bytes off the slice into a
// uint32, and returns both.
func pop32(in []byte) (uint32, []byte) {
return binary.LittleEndian.Uint32(in[:4]), in[4:]
}
// pop64 returns the first 8 bytes of the input decoded into a uint32, and a new slice over the same
// backing array, shifted past the 8 bytes. That is, it pops the first 8 bytes off the slice into a
// uint64, and returns both.
func pop64(in []byte) (uint64, []byte) {
return binary.LittleEndian.Uint64(in[:8]), in[8:]
}
// popN pops the first `count` bytes of `in` into a new slice, and then returns it along with the
// remainder.
func popN(in []byte, count int) ([]byte, []byte) {
return in[:count], in[count:]
}
// push32 returns a new slice with new backing array that is identical to the input except with 4
// additional bytes at its head. These 4 new bytes are populated with the uint32-encoded length of
// the input array. That is, push32 returns a new slice (and array) prepended with a 4-byte length
// of the original slice, followed by the original slice's data.
func push32(in []byte) []byte {
l := uint32(len(in))
out := make([]byte, l+4)
binary.LittleEndian.PutUint32(out, l)
copy(out[4:], in)
return out
}
// push64 returns a new slice with new backing array that is identical to the input except with 8
// additional bytes at its head. These 8 new bytes are populated with the uint64-encoded length of
// the input array. That is, push64 returns a new slice (and array) prepended with an 8-byte length
// of the original slice, followed by the original slice's data.
func push64(in []byte) []byte {
l := uint64(len(in))
out := make([]byte, l+8)
binary.LittleEndian.PutUint64(out, l)
copy(out[8:], in)
return out
}
// concat returns a new slice over a new backing array, consisting of all the input slices
// concatenated back to back. The length of the output is, naturally, the sum of the lengths of the
// input.
func concat(blocks ...[]byte) []byte {
totes := 0
for _, b := range blocks {
totes += len(b)
}
out := make([]byte, totes)
cur := out
for _, b := range blocks {
copy(cur, b)
cur = cur[len(b):]
}
return out
} | pkg/playground-android/apksign/pushpop.go | 0.761716 | 0.572723 | pushpop.go | starcoder |
package keeper
import (
sdk "github.com/cosmos/cosmos-sdk/types"
cdptypes "github.com/lcnem/eurx/x/cdp/types"
"github.com/lcnem/eurx/x/estmdist/types"
)
// MintPeriodInflation mints new tokens according to the inflation schedule specified in the parameters
func (k Keeper) MintPeriodInflation(ctx sdk.Context) error {
params := k.GetParams(ctx)
if !params.Active {
ctx.EventManager().EmitEvent(
sdk.NewEvent(
types.EventTypeEstmdist,
sdk.NewAttribute(types.AttributeKeyStatus, types.AttributeValueInactive),
),
)
return nil
}
previousBlockTime, found := k.GetPreviousBlockTime(ctx)
if !found {
previousBlockTime = ctx.BlockTime()
k.SetPreviousBlockTime(ctx, previousBlockTime)
return nil
}
var err error
for _, period := range params.Periods {
switch {
// Case 1 - period is fully expired
case period.End.Before(previousBlockTime):
continue
// Case 2 - period has ended since the previous block time
case period.End.After(previousBlockTime) && period.End.Before(ctx.BlockTime()):
// calculate time elapsed relative to the periods end time
timeElapsed := sdk.NewInt(period.End.Unix() - previousBlockTime.Unix())
err = k.mintInflationaryCoins(ctx, period.Inflation, timeElapsed, types.GovDenom)
// update the value of previousBlockTime so that the next period starts from the end of the last
// period and not the original value of previousBlockTime
previousBlockTime = period.End
// Case 3 - period is ongoing
case (period.Start.Before(previousBlockTime) || period.Start.Equal(previousBlockTime)) && period.End.After(ctx.BlockTime()):
// calculate time elapsed relative to the current block time
timeElapsed := sdk.NewInt(ctx.BlockTime().Unix() - previousBlockTime.Unix())
err = k.mintInflationaryCoins(ctx, period.Inflation, timeElapsed, types.GovDenom)
// Case 4 - period hasn't started
case period.Start.After(ctx.BlockTime()) || period.Start.Equal(ctx.BlockTime()):
continue
}
if err != nil {
return err
}
}
k.SetPreviousBlockTime(ctx, ctx.BlockTime())
return nil
}
func (k Keeper) mintInflationaryCoins(ctx sdk.Context, inflationRate sdk.Dec, timePeriods sdk.Int, denom string) error {
totalSupply := k.bankKeeper.GetSupply(ctx).GetTotal().AmountOf(denom)
// used to scale accumulator calculations by 10^18
scalar := sdk.NewInt(1000000000000000000)
// convert inflation rate to integer
inflationInt := inflationRate.Mul(sdk.NewDecFromInt(scalar)).TruncateInt()
// calculate the multiplier (amount to multiply the total supply by to achieve the desired inflation)
// multiply the result by 10^-18 because RelativePow returns the result scaled by 10^18
accumulator := sdk.NewDecFromInt(cdptypes.RelativePow(inflationInt, timePeriods, scalar)).Mul(sdk.SmallestDec())
// calculate the number of coins to mint
amountToMint := (sdk.NewDecFromInt(totalSupply).Mul(accumulator)).Sub(sdk.NewDecFromInt(totalSupply)).TruncateInt()
if amountToMint.IsZero() {
return nil
}
err := k.bankKeeper.MintCoins(ctx, types.EstmdistMacc, sdk.NewCoins(sdk.NewCoin(denom, amountToMint)))
if err != nil {
return err
}
ctx.EventManager().EmitEvent(
sdk.NewEvent(
types.EventTypeEstmdist,
sdk.NewAttribute(types.AttributeKeyInflation, sdk.NewCoin(denom, amountToMint).String()),
),
)
return nil
} | x/estmdist/keeper/mint.go | 0.745584 | 0.411998 | mint.go | starcoder |
package dull
import (
"bytes"
"fmt"
"github.com/stretchr/testify/assert"
"image"
"image/png"
"os"
"path"
"testing"
)
func normaliseImageIfRequired(img image.Image) {
if img == nil {
return
}
switch img2 := img.(type) {
case *image.RGBA:
pixels := img2.Pix
for p := 0; p < len(pixels); p += 4 {
r, g, b, a := pixels[p+0], pixels[p+1], pixels[p+2], pixels[p+3]
pixels[p+0] = uint8(int(r) * int(a) / 0xff)
pixels[p+1] = uint8(int(g) * int(a) / 0xff)
pixels[p+2] = uint8(int(b) * int(a) / 0xff)
pixels[p+3] = 0xff
}
img2.Pix = pixels
case *image.NRGBA:
return
default:
fmt.Println(img)
panic("Unsupported Image implementation")
}
}
func testCaptureAndCompareImage(
t *testing.T,
name string,
width int,
height int,
scale float64,
setupWindow func(*Window),
) {
Run(func(app *Application, err error) {
if err != nil {
t.Fatal(err)
}
w, err := app.NewWindow(&WindowOptions{
Width: width,
Height: height,
Bg: &White,
Fg: &Black,
})
if err != nil {
t.Fatal(err)
}
// Use a fixed scale, to ensure reproducibility on all systems.
w.scale = scale
w.setFontSize(0)
// allow the test to prepare the window contents
setupWindow(w)
w.drawAll()
go w.Do(func() {
asserttTestImage(t, name, w)
w.Destroy()
})
})
}
func asserttTestImage(t *testing.T, name string, w *Window) {
// capture
generatedImage := w.Capture()
normaliseImageIfRequired(generatedImage)
// write generated image; will no be committed
writeTestImageFile(name, "generated", generatedImage)
referenceImage, err := readTestImageFile(name, "reference")
// write reference image if it doesn't exist
if os.IsNotExist(err) {
writeTestImageFile(name, "reference", generatedImage)
return
}
// verify that newly generated image is identical to the
// reference image
var referencePix []uint8
switch referenceImage2 := referenceImage.(type) {
case *image.RGBA:
referencePix = referenceImage2.Pix
case *image.NRGBA:
referencePix = referenceImage2.Pix
}
generatedPix := generatedImage.(*image.RGBA).Pix
imagesIdentical := bytes.Compare(generatedPix, referencePix) == 0
assert.True(t, imagesIdentical, "image differs from reference image")
}
func writeTestImageFile(name, suffix string, img image.Image) {
outputFile, err := os.Create(testImageFilepath(name + "--" + suffix))
if err != nil {
panic(err)
}
err = png.Encode(outputFile, img)
if err != nil {
panic(err)
}
err = outputFile.Close()
if err != nil {
panic(err)
}
}
func readTestImageFile(name, suffix string) (image.Image, error) {
filePath := testImageFilepath(name + "--" + suffix)
file, err := os.Open(filePath)
if err != nil {
if os.IsNotExist(err) {
return nil, err
}
panic(err)
}
defer file.Close()
img, err := png.Decode(file)
if err != nil {
panic(err)
}
return img, nil
}
func testImageFilepath(name string) string {
pathParts := []string{
"test-images",
name + ".png",
}
return path.Join(pathParts...)
} | visual-test.go | 0.626581 | 0.411702 | visual-test.go | starcoder |
package game
// Room holds the directions and entities within the room.
type Room struct {
Name string
Description string
Doors []Door
Items []Item
}
// Door holds the metadata required to journey to a different room.
type Door struct {
RoomID uint
Direction string
IsLocked bool
// IsVisible bool
// GoDesc string
// LookDesc string
}
// GenerateRooms will add a list of rooms to the game
func (g *Game) GenerateRooms() {
g.Rooms = map[uint]Room{
0: {
Name: "test room",
Description: "A bland, featureless room. There is a DOOR labelled 'door that connects to this very room.'",
Doors: []Door{Door{RoomID:0,Direction:"door"}},
},
1: {
Name: "Glistening Mine",
Description: "You are in a glistening mine. There is a passage to the NORTH and door to the EAST.",
Doors: []Door{
Door{RoomID: 2, Direction: "north"},
Door{RoomID: 3, Direction: "east", IsLocked: true},
},
},
2: {
Name: "Dusty Break Room",
Description: "You are in a dusty break room containing an ancient card TABLE. There is an exit to the SOUTH.",
Items: []Item{
Item{
Name: "TABLE",
Description: "The TABLE looks old and dusty. There is a KEY resting on the surface.",
CanGet: false,
},
Item{
Name: "KEY",
Description: "The KEY has a faint layer of rust on it.",
CanGet: true,
UnlocksDoor: struct{RoomID uint; Direction string}{
RoomID: uint(1),
Direction: "east",
},
},
},
},
3: {
Name: "Mineshaft",
Description: "You come to a deep mineshaft. You can go back WEST or go DOWN.",
Doors: []Door{
Door{RoomID: 1, Direction: "south"},
Door{RoomID: 3, Direction: "down"},
},
},
4: {
Name: "Underground Lake",
Description: "You are next to an underground lake. Looks like you're stuck here!",
},
}
}
// GetRoomDescription will return the room's description given an id
func (g *Game) GetRoomDescription(id uint) string {
description := g.Rooms[id].Description
return description
}
// GoDirection will check if the direction is valid given the room, change the game's CurrentRoomID, and return the new room's description.
func (g *Game) GoDirection(dir string) string {
room := g.Rooms[g.CurrentRoomID]
for _, door := range room.Doors {
if dir == door.Direction {
g.CurrentRoomID = door.RoomID
return g.GetRoomDescription(g.CurrentRoomID)
}
}
return ""
} | internal/game/room.go | 0.5144 | 0.425963 | room.go | starcoder |
package equitocube
import (
"errors"
"image/color"
"math"
)
//Cubemap save cubemap data
type Cubemap struct {
Ratio Vector2
TileSize Vector2
TileMap [2][3]string
FaceMap map[string]VectorArray3
SquareTileSize int
}
//Vector2 save vector2 data
type Vector2 struct {
X int
Y int
}
//LatLong is a cartesian coordinates
type LatLong struct {
X float64
Y float64
}
//Vector3 save vector3 data
type Vector3 struct {
X float64
Y float64
Z float64
}
//VectorArray3 is a Cuaternion Vector
type VectorArray3 struct {
X int
Y int
Z Vector2
}
//DegreesToRadians convert degrees to radians
func DegreesToRadians(deg float64) float64 {
return deg * math.Pi / 180
}
//RadiansToDegrees convert radian angles to degrees
func RadiansToDegrees(rad float64) float64 {
return rad * 180.0 / math.Pi
}
//NewCubemap creates a new Cubemap
func NewCubemap() (*Cubemap, error) {
var layout = [6]string{"U", "L", "F", "R", "B", "D"}
with := 2
height := 3
c := new(Cubemap)
c.FaceMap = make(map[string]VectorArray3)
ratio, squareTilesize, err := c.GetRatio(with, height)
if err != nil {
return c, err
}
c.Ratio = ratio
c.TileSize = Vector2{squareTilesize, squareTilesize}
// see if layout fits
ratioTileCount := c.Ratio.X * c.Ratio.Y
layoutTileCount := len(layout)
if ratioTileCount != layoutTileCount {
// scale if posss
remainder := ratioTileCount % layoutTileCount
if remainder != 0 {
return c, errors.New("layout: Layout doesn't fit in ratio")
}
// ratio can only scale upwards...
if ratioTileCount > layoutTileCount {
return c, errors.New("layout: Layout doesn't have enough tiles to fit in ratio")
}
scale := layoutTileCount / ratioTileCount
c.Ratio.X *= scale
c.Ratio.Y *= scale
c.TileSize.X /= scale
c.TileSize.Y /= scale
}
// make up 2d map
for x := 0; x < c.GetTileWidth(); x++ {
for y := 0; y < c.GetTileHeight(); y++ {
i := y*c.GetTileWidth() + x
face := layout[i]
realFace := c.GetRealFace(face)
// gr: turn this whole thing into a matrix!
matrix := c.GetFaceMatrix(face)
c.TileMap[x][y] = face
c.FaceMap[realFace] = VectorArray3{x, y, matrix}
}
}
return c, nil
}
//Resize a Cubemap
func (c Cubemap) Resize(width, height int) (int, int) {
w := width / c.Ratio.X
h := height / c.Ratio.Y
return w, h
}
//GetRatio get the ratio of an image
func (c Cubemap) GetRatio(width int, height int) (Vector2, int, error) {
var vector = Vector2{}
if width <= 0 || height <= 0 {
return Vector2{}, 0, errors.New("ratio: with or heigh equal to zero")
}
// square
if width == height {
return Vector2{1, 1}, width, nil
}
if width > height {
remainder := width % height
if remainder == 0 {
return Vector2{width / height, 1}, height, nil
}
vector.X = width / remainder
vector.Y = height / remainder
return vector, remainder, nil
}
ratio, squareTilesize, err := c.GetRatio(height, width)
if err != nil {
return Vector2{}, 0, err
}
return Vector2{ratio.Y, ratio.X}, squareTilesize, nil
}
//GetFaceMatrix get the face Matrix
func (c Cubemap) GetFaceMatrix(face string) Vector2 {
if face == "Z" {
return Vector2{-1, -1}
}
return Vector2{1, 1}
}
//GetFlipFace get the flipped face
func (c Cubemap) GetFlipFace(face string) string {
if face == "B" {
return "Z"
}
return ""
}
//GetRealFace get the real face
func (c Cubemap) GetRealFace(face string) string {
if face == "Z" {
return "B"
}
return face
}
/*
func (c Cubemap) IsValid() bool {
return if(math.IsNaN(c.Ratio.X))
}*/
//GetTileWidth get the tile width
func (c Cubemap) GetTileWidth() int {
return c.Ratio.X
}
//GetTileHeight get the tile Height
func (c Cubemap) GetTileHeight() int {
return c.Ratio.Y
}
//GetImageWidth get the image width
func (c Cubemap) GetImageWidth() int {
return c.Ratio.X * c.TileSize.X
}
//GetImageHeight get the image height
func (c Cubemap) GetImageHeight() int {
return c.Ratio.Y * c.TileSize.Y
}
//getSquareTileSize get the quareTileSize
func (c Cubemap) getSquareTileSize() int {
return c.SquareTileSize
}
//ScreenToWorld get the screen position
func (c Cubemap) ScreenToWorld(face string, screenPosX float64, screenPosY float64) (Vector3, error) {
// 0..1 -> -1..1
screenPosX *= 2.0
screenPosY *= 2.0
screenPosX -= 1.0
screenPosY -= 1.0
vector := Vector3{0, 0, 0}
switch face {
case "L":
vector.X = -1
vector.Y = -screenPosY
vector.Z = screenPosX
return vector, nil
case "R":
vector.X = 1
vector.Y = -screenPosY
vector.Z = -screenPosX
return vector, nil
case "U":
vector.X = -screenPosX
vector.Y = 1
vector.Z = -screenPosY
return vector, nil
case "D":
vector.X = -screenPosX
vector.Y = -1
vector.Z = screenPosY
return vector, nil
case "F":
vector.X = screenPosX
vector.Y = -screenPosY
vector.Z = 1
return vector, nil
case "B":
vector.X = -screenPosX
vector.Y = -screenPosY
vector.Z = -1
return vector, nil
}
return vector, errors.New("vector: not exist")
}
//GetFaceColor get a unique color for each face of cube
func (c Cubemap) GetFaceColor(face string) color.RGBA64 {
p := color.RGBA64{
255, 255, 255, 255,
}
switch face {
case "U":
p.R = 255
p.G = 0
p.B = 0
break
case "L":
p.R = 0
p.G = 255
p.B = 0
break
case "F":
p.R = 0
p.G = 0
p.B = 255
break
case "R":
p.R = 255
p.G = 255
p.B = 0
break
case "B":
p.R = 0
p.G = 255
p.B = 255
break
case "D":
p.R = 255
p.G = 0
p.B = 255
break
default:
p.R = 255
p.G = 255
p.B = 255
break
}
return p
}
//ViewToLatLon get the cartesian position of a math.Pixel in the cube face
func viewToLatLon(view3 Vector3) LatLong {
var latLong = LatLong{0, 0}
x := float64(view3.X)
y := float64(view3.Y)
z := float64(view3.Z)
lat := math.Atan2(x, z)
// normalize y
xz := math.Sqrt((x * x) + (z * z))
normy := y / math.Sqrt((y*y)+(xz*xz))
lon := math.Asin(normy)
// stretch longitude...
lon *= 2.0
latLong.X = lat
latLong.Y = lon
return latLong
}
//GetScreenFromLatLong return the cartesian position of a math.Pixel in the original image
func getScreenFromLatLong(lat float64, lon float64, width float64, height float64) LatLong {
var screenPos = LatLong{0, 0}
// -math.Pi...math.Pi -> -1...1
lat /= math.Pi
lon /= math.Pi
// -1..1 -> 0..2
lat += 1.0
lon += 1.0
// 0..2 -> 0..1
lat /= 2.0
lon /= 2.0
lon = 1.0 - lon
lat *= width
lon *= height
screenPos.X = lat
screenPos.Y = lon
//fmt.Printf("lat: ", screenPos.X)
return screenPos
} | cubemap.go | 0.792304 | 0.688449 | cubemap.go | starcoder |
package printer
/*
© 2021 B1 Digital
User : ICI
Name : <NAME>
Date : 28.05.2021 15:34
Notes :
.
*/
import (
"fmt"
"image"
)
func closestNDivisibleBy8(n int) int {
q := n / 8
n1 := q * 8
return n1
}
func printImage(img image.Image) (xL byte, xH byte, yL byte, yH byte, data []byte) {
width, height, pixels := getPixels(img)
removeTransparency(&pixels)
makeGrayscale(&pixels)
printWidth := closestNDivisibleBy8(width)
printHeight := closestNDivisibleBy8(height)
bytes, _ := rasterize(printWidth, printHeight, &pixels)
return byte((width >> 3) & 0xff), byte(((width >> 3) >> 8) & 0xff), byte(height & 0xff), byte((height >> 8) & 0xff), bytes
}
func makeGrayscale(pixels *[][]pixel) {
height := len(*pixels)
width := len((*pixels)[0])
for y := 0; y < height; y++ {
row := (*pixels)[y]
for x := 0; x < width; x++ {
pixel := row[x]
luminance := (float64(pixel.R) * 0.299) + (float64(pixel.G) * 0.587) + (float64(pixel.B) * 0.114)
var value int
if luminance < 128 {
value = 0
} else {
value = 255
}
pixel.R = value
pixel.G = value
pixel.B = value
row[x] = pixel
}
}
}
func removeTransparency(pixels *[][]pixel) {
height := len(*pixels)
width := len((*pixels)[0])
for y := 0; y < height; y++ {
row := (*pixels)[y]
for x := 0; x < width; x++ {
pixel := row[x]
alpha := pixel.A
invAlpha := 255 - alpha
pixel.R = (alpha*pixel.R + invAlpha*255) / 255
pixel.G = (alpha*pixel.G + invAlpha*255) / 255
pixel.B = (alpha*pixel.B + invAlpha*255) / 255
pixel.A = 255
row[x] = pixel
}
}
}
func rasterize(printWidth int, printHeight int, pixels *[][]pixel) ([]byte, error) {
if printWidth%8 != 0 {
return nil, fmt.Errorf("printWidth must be a multiple of 8")
}
if printHeight%8 != 0 {
return nil, fmt.Errorf("printHeight must be a multiple of 8")
}
bytes := make([]byte, (printWidth*printHeight)>>3)
for y := 0; y < printHeight; y++ {
for x := 0; x < printWidth; x = x + 8 {
i := y*(printWidth>>3) + (x >> 3)
bytes[i] =
byte((getPixelValue(x+0, y, pixels) << 7) |
(getPixelValue(x+1, y, pixels) << 6) |
(getPixelValue(x+2, y, pixels) << 5) |
(getPixelValue(x+3, y, pixels) << 4) |
(getPixelValue(x+4, y, pixels) << 3) |
(getPixelValue(x+5, y, pixels) << 2) |
(getPixelValue(x+6, y, pixels) << 1) |
getPixelValue(x+7, y, pixels))
}
}
return bytes, nil
}
func getPixelValue(x int, y int, pixels *[][]pixel) int {
row := (*pixels)[y]
pixel := row[x]
if pixel.R > 0 {
return 0
}
return 1
}
func rgbaToPixel(r uint32, g uint32, b uint32, a uint32) pixel {
return pixel{int(r >> 8), int(g >> 8), int(b >> 8), int(a >> 8)}
}
type pixel struct {
R int
G int
B int
A int
}
func getPixels(img image.Image) (int, int, [][]pixel) {
bounds := img.Bounds()
width, height := bounds.Max.X, bounds.Max.Y
var pixels [][]pixel
for y := 0; y < height; y++ {
var row []pixel
for x := 0; x < width; x++ {
row = append(row, rgbaToPixel(img.At(x, y).RGBA()))
}
pixels = append(pixels, row)
}
return width, height, pixels
} | bitimage.go | 0.734024 | 0.422862 | bitimage.go | starcoder |
package transforms
import (
"math"
"github.com/calbim/ray-tracer/src/matrix"
"github.com/calbim/ray-tracer/src/tuple"
)
//Translation returns a matrix representing a translation operation
func Translation(x, y, z float64) *matrix.Matrix {
return matrix.New([]float64{1, 0, 0, x, 0, 1, 0, y, 0, 0, 1, z, 0, 0, 0, 1})
}
// Scaling returns a matrix representing a scaling operation
func Scaling(x, y, z float64) *matrix.Matrix {
return matrix.New([]float64{x, 0, 0, 0, 0, y, 0, 0, 0, 0, z, 0, 0, 0, 0, 1})
}
// RotationX returns a matrix that represents a rotation by r radians around the X axis
func RotationX(r float64) *matrix.Matrix {
return matrix.New([]float64{1, 0, 0, 0, 0, math.Cos(r), -math.Sin(r), 0, 0, math.Sin(r), math.Cos(r), 0, 0, 0, 0, 1})
}
// RotationY returns a matrix that represents a rotation by r radians around the Y axis
func RotationY(r float64) *matrix.Matrix {
return matrix.New([]float64{math.Cos(r), 0, math.Sin(r), 0, 0, 1, 0, 0, -math.Sin(r), 0, math.Cos(r), 0, 0, 0, 0, 1})
}
// RotationZ returns a matrix that represents a rotation by r radians around the Z axis
func RotationZ(r float64) *matrix.Matrix {
return matrix.New([]float64{math.Cos(r), -math.Sin(r), 0, 0, math.Sin(r), math.Cos(r), 0, 0, 0, 0, 1, 0, 0, 0, 0, 1})
}
//Shearing returns a matrix that represents a shearing (skew) operation
func Shearing(Xy, Xz, Yx, Yz, Zx, Zy float64) *matrix.Matrix {
return matrix.New([]float64{1, Xy, Xz, 0, Yx, 1, Yz, 0, Zx, Zy, 1, 0, 0, 0, 0, 1})
}
//Chain chains several transformations into one
func Chain(transforms ...*matrix.Matrix) *matrix.Matrix {
p := matrix.Identity
for i := len(transforms) - 1; i >= 0; i-- {
p = p.Multiply(transforms[i])
}
return p
}
//ViewTransform returns a matrix that represents viewing vector
func ViewTransform(from tuple.Tuple, to tuple.Tuple, up tuple.Tuple) *matrix.Matrix {
forward := to.Subtract(from)
forward = forward.Normalize()
upNormalize := up.Normalize()
left := forward.CrossProduct(upNormalize)
trueUp := left.CrossProduct(forward)
orientation := matrix.New([]float64{
left.X, left.Y, left.Z, 0,
trueUp.X, trueUp.Y, trueUp.Z, 0,
-forward.X, -forward.Y, -forward.Z, 0,
0, 0, 0, 1})
return orientation.Multiply(Translation(-from.X, -from.Y, -from.Z))
} | src/transforms/transforms.go | 0.918462 | 0.791821 | transforms.go | starcoder |
package ml
import (
"errors"
"fmt"
"strings"
)
// Common erros.
var (
ErrBadDim = errors.New("dimension of the two matrix differs")
ErrInconsistentData = errors.New("matrix has different y dimension per x")
ErrUninitialized = errors.New("matrix not initialized")
ErrNotAVector = errors.New("the current vector has multi dimension")
)
// Matrix .
type Matrix [][]float64
// NewMatrix instantiates a new matrix of (n,m) dimension.
func NewMatrix(n, m int) Matrix {
ret := make(Matrix, n)
for i := 0; i < n; i++ {
ret[i] = make([]float64, m)
}
return ret
}
// Dim returns the dimension of the matrix.
func (m Matrix) Dim() (int, int) {
if len(m) == 0 {
return 0, 0
}
return len(m), len(m[0])
}
// Add adds Kthe given matrix to the current one and return the result.
// Does not change current matrix state.
func (m Matrix) Add(m2 Matrix) Matrix {
if !m.DimMatch(m2) {
panic(ErrBadDim)
}
ret := NewMatrix(m.Dim())
for i, line := range m {
for j := range line {
ret[i][j] = m[i][j] + m2[i][j]
}
}
return ret
}
// Sub substracts the given matrix to the current one and return the result.
// Does not change current matrix state.
func (m Matrix) Sub(m2 Matrix) Matrix {
if !m.DimMatch(m2) {
panic(ErrBadDim)
}
ret := NewMatrix(m.Dim())
for i, line := range m {
for j := range line {
ret[i][j] = m[i][j] - m2[i][j]
}
}
return ret
}
// Equal compares the given matrix to the current one.
func (m Matrix) Equal(m2 Matrix) bool {
// If dim mismatch, mot equal.
if !m.DimMatch(m2) {
return false
}
// Check each element of both matrix.
x, y := m.Dim()
for i := 0; i < x; i++ {
for j := 0; j < y; j++ {
if m[i][j] != m2[i][j] {
return false
}
}
}
return true
}
// Validate checks if the current matrix is valid.
// NOTE: When instantiating a matrix outside ml.NewMatrix, Validate should be called.
func (m Matrix) Validate() error {
// Empty matrix is valid, but nil one is not.
if m == nil {
return ErrUninitialized
}
if len(m) == 0 {
return nil
}
y := len(m[0])
for _, line := range m {
if len(line) != y {
return ErrInconsistentData
}
}
return nil
}
// DimMatch checks if the given matrice has the same dimension as the current one.
func (m Matrix) DimMatch(m2 Matrix) bool {
x, y := m.Dim()
x1, y1 := m2.Dim()
return x == x1 && y == y1
}
// String pretty prints the matrix.
func (m Matrix) String() string {
if m == nil {
return "<nil>"
}
if len(m) == 0 {
return "||"
}
ret := ""
for _, line := range m {
ret += fmt.Sprintf("%4v\n", line)
}
return strings.TrimSpace(ret)
}
// Vector is a matrix with 1 column.
type Vector Matrix
// NewVector instantiate a new vector of dimensin n.
func NewVector(n int) Vector {
return Vector(NewMatrix(n, 1))
}
// Validate checks if the current matrix is valid.
// NOTE: When instantiating a matrix outside ml.NewVector, Validate should be called.
func (v Vector) Validate() error {
if err := Matrix(v).Validate(); err != nil {
return err
}
if len(v) > 0 && len(v[0]) != 1 {
return ErrNotAVector
}
return nil
} | week1/matrix.go | 0.734501 | 0.456834 | matrix.go | starcoder |
package context
import (
"github.com/pkg/errors"
"github.com/zoncoen/scenarigo/assert"
)
var assertions = map[string]interface{}{
"and": listArgsLeftArrowFunc(listArgsAssertion(assert.And)),
"or": listArgsLeftArrowFunc(listArgsAssertion(assert.Or)),
"notZero": assert.NotZero(),
"contains": leftArrowFunc(assert.Contains),
"notContains": leftArrowFunc(assert.NotContains),
"regexp": assert.Regexp,
"greaterThan": assert.Greater,
"greaterThanOrEqual": assert.GreaterOrEqual,
"lessThan": assert.Less,
"lessThanOrEqual": assert.LessOrEqual,
"length": assert.Length,
}
type leftArrowFunc func(assertion assert.Assertion) assert.Assertion
func (f leftArrowFunc) Exec(arg interface{}) (interface{}, error) {
assertion, ok := arg.(assert.Assertion)
if !ok {
return nil, errors.New("argument must be a assert.Assertion")
}
return f(assertion), nil
}
func (leftArrowFunc) UnmarshalArg(unmarshal func(interface{}) error) (interface{}, error) {
var i interface{}
if err := unmarshal(&i); err != nil {
return nil, err
}
return assert.Build(i), nil
}
func listArgsAssertion(base func(...assert.Assertion) assert.Assertion) func(...interface{}) assert.Assertion {
return func(args ...interface{}) assert.Assertion {
var assertions []assert.Assertion
for _, arg := range args {
arg := arg
assertion, ok := arg.(assert.Assertion)
if !ok {
assertion = assert.Build(arg)
}
assertions = append(assertions, assertion)
}
return base(assertions...)
}
}
type listArgsLeftArrowFunc func(args ...interface{}) assert.Assertion
func (f listArgsLeftArrowFunc) Exec(arg interface{}) (interface{}, error) {
assertions, ok := arg.([]interface{})
if !ok {
return nil, errors.New("argument must be a slice of interface{}")
}
return f(assertions...), nil
}
func (listArgsLeftArrowFunc) UnmarshalArg(unmarshal func(interface{}) error) (interface{}, error) {
var args []interface{}
if err := unmarshal(&args); err != nil {
return nil, err
}
return args, nil
} | context/assert.go | 0.623492 | 0.517022 | assert.go | starcoder |
// Compare the proportions of certain attribute in two populations. The true proportions are pi1 and pi2, unknown.
// We take a random sample from each of the populations and observe y1, y2 ... number of instances having the attribute.
// The distribution y1|pi1 is binomial(n1, pi1), similarly for y2|pi2, and they are independent.
// Let there be independent priors for pi1 ... beta(a1, b1), and similarly for pi2.
// Posterior for pi1 is beta(a1post, b1post), where a1post = a1 +y1, and b1post = b1 + n1 -y1, similarly for pi2.
// Approximate each posterior distribution with normal distribution having the same mean and variance as the beta.
// The posterior of pid = pi1 - pi2 is approximately normal(mdpost, vardpost), where:
// mdpost = a1post/(a1post+b1post) - a2post/(a2post+b2post), and
// vardpost = a1post*b1post/math.Sqrt(a1post+b1post)*(a1post+b1post+1) + a2post*b2post/math.Sqrt(a2post+b2post)*(a2post+b2post+1)
package bayes
import (
. "github.com/tokenme/probab/dst"
"math"
)
// Mean of posterior distribution of unknown difference of binomial proportions, approximated by Normal distribution
// Bolstad 2007 (2e): 248.
// untested ...
func BinomPiDiffMeanNApprox(a1, b1, a2, b2 float64, n1, n2, y1, y2 int64) float64 {
a1post := a1 + float64(y1)
b1post := b1 + float64(n1-y1)
a2post := a2 + float64(y2)
b2post := b2 + float64(n2-y2)
return a1post/(a1post+b1post) - a2post/(a2post+b2post)
}
// Variance of posterior distribution of unknown difference of binomial proportions, approximated by Normal distribution
// Bolstad 2007 (2e): 248.
// untested ...
func BinomPiDiffVarNApprox(a1, b1, a2, b2 float64, n1, n2, y1, y2 int64) float64 {
a1post := a1 + float64(y1)
b1post := b1 + float64(n1-y1)
a2post := a2 + float64(y2)
b2post := b2 + float64(n2-y2)
return a1post*b1post/math.Sqrt(a1post+b1post)*(a1post+b1post+1) + a2post*b2post/math.Sqrt(a2post+b2post)*(a2post+b2post+1)
}
// Credible interval for difference between binomial proportions, approximated by Normal distribution
// Bolstad 2007 (2e): 248, eq. 13.13
// postdiffmu = binomdiffpropnormapproxmu()
// postdiffsigma = sqrt(binomdiffpropnormapproxvar())
// untested ...
func BinomPiDiffCrI(postdiffmu, postdiffsigma, alpha float64) (float64, float64) {
// postdiffmu posterior mean for difference of normal means
// postdiffsigma posterior standard deviation for difference of normal means
// alpha posterior probability that the true mean lies outside the credible interval
z := ZQtlFor(alpha / 2)
low := postdiffmu - z*postdiffsigma
high := postdiffmu + z*postdiffsigma
return low, high
}
// One-sided test for difference between binomial proportions, approximated by Normal distribution
// Bolstad 2007 (2e): 248-249, eq. 13.14
// H0: mud <= 0 vs H1: mud > 0
// Note: The alternative is in the direction we wish to detect, and is what we want to detect.
func BinomPiDiffOneSidedP(postdiffmu, postdiffsigma float64) float64 {
return ZCDFAt(-postdiffmu / postdiffsigma)
}
/*
Two-sided test for difference between binomial proportions, approximated by Normal distribution ///// check it vs the book!!!
Bolstad 2007 (2e): 249
H0: mu1 - mu2 == 0 vs H1: mu1 - mu2 != 0
func BinomDiffPropTwoSidedProb(postdiffmu, postdiffsigma, alpha){
low, high = normdiffmeansknowCrI(postdiffmu, postdiffsigma, alpha)
if 0 < low || 0 > high return(REJECT) else return(ACCEPT)
}
*/ | bayes/binom_p_diff.go | 0.789923 | 0.862352 | binom_p_diff.go | starcoder |
// Package termutil provides structures and helper functions to work with
// terminal (state, sizes). Taken from docker-ce source code.
package termutil
import (
"errors"
"fmt"
"os"
"os/signal"
"golang.org/x/sys/unix"
)
var (
// ErrInvalidState is returned if the state of the terminal is invalid.
ErrInvalidState = errors.New("Invalid terminal state")
)
// State represents the state of the terminal.
type State struct {
termios Termios
}
// IsTerminal returns true if the given file descriptor is a terminal.
func isTerminal(fd uintptr) bool {
var termios Termios
return tcget(fd, &termios) == 0
}
// RestoreTerminal restores the terminal connected to the given file descriptor
// to a previous state.
func restoreTerminal(fd uintptr, state *State) error {
if state == nil {
return ErrInvalidState
}
if err := tcset(fd, &state.termios); err != 0 {
return err
}
return nil
}
// SaveState saves the state of the terminal connected to the given file descriptor.
func saveState(fd uintptr) (*State, error) {
var oldState State
if err := tcget(fd, &oldState.termios); err != 0 {
return nil, err
}
return &oldState, nil
}
// DisableEcho applies the specified state to the terminal connected to the file
// descriptor, with echo disabled.
func disableEcho(fd uintptr, state *State) error {
newState := state.termios
newState.Lflag &^= unix.ECHO
if err := tcset(fd, &newState); err != 0 {
return err
}
handleInterrupt(fd, state)
return nil
}
// setRawTerminalInput puts the terminal connected to the given file descriptor
// into raw mode and returns the previous state. On UNIX, this puts both the
// input and output into raw mode. On Windows, it only puts the input into raw
// mode.
func setRawTerminalInput(fd uintptr) (*State, error) {
oldState, err := makeRaw(fd)
if err != nil {
return nil, err
}
// handleInterrupt(fd, oldState)
return oldState, err
}
// setRawTerminalOutput puts the output of terminal connected to the given file
// descriptor into raw mode. On UNIX, this does nothing and returns nil for the
// state. On Windows, it disables LF -> CRLF translation.
func setRawTerminalOutput(fd uintptr) (*State, error) {
return nil, nil
}
func handleInterrupt(fd uintptr, state *State) {
sigchan := make(chan os.Signal, 1)
signal.Notify(sigchan, os.Interrupt)
go func() {
for range sigchan {
// quit cleanly and the new terminal item is on a new line
fmt.Println()
signal.Stop(sigchan)
close(sigchan)
restoreTerminal(fd, state)
// os.Exit(1)
}
}()
} | pkg/termutil/term.go | 0.533884 | 0.427875 | term.go | starcoder |
package iso20022
// Provides information about the rates related to securities movement.
type RateDetails23 struct {
// Rate used for additional tax that cannot be categorised.
AdditionalTax *RateAndAmountFormat39Choice `xml:"AddtlTax,omitempty"`
// Rate used to calculate the amount of the charges/fees that cannot be categorised.
ChargesFees *RateAndAmountFormat39Choice `xml:"ChrgsFees,omitempty"`
// Percentage of fiscal tax to apply.
FiscalStamp *PercentageRate `xml:"FsclStmp,omitempty"`
// Rate resulting from a fully franked dividend paid by a company; rate includes tax credit for companies that have made sufficient tax payments during fiscal period.
FullyFrankedRate *RateAndAmountFormat39Choice `xml:"FullyFrnkdRate,omitempty"`
// Cash dividend amount per equity before deductions or allowances have been made.
GrossDividendRate []*GrossDividendRateFormat22Choice `xml:"GrssDvddRate,omitempty"`
// Cash rate made available, as an incentive, in addition to the solicitation fee, in order to encourage early participation in an offer.
EarlySolicitationFeeRate *SolicitationFeeRateFormat8Choice `xml:"EarlySlctnFeeRate,omitempty"`
// Cash rate made available in an event in order to encourage participation in the offer. As information, Payment is made to a third party who has solicited an entity to take part in the offer.
ThirdPartyIncentiveRate *RateAndAmountFormat39Choice `xml:"ThrdPtyIncntivRate,omitempty"`
// Actual interest rate used for the payment of the interest for the specified interest period.
InterestRateUsedForPayment []*InterestRateUsedForPaymentFormat7Choice `xml:"IntrstRateUsdForPmt,omitempty"`
// Cash dividend amount per equity after deductions or allowances have been made.
NetDividendRate []*NetDividendRateFormat24Choice `xml:"NetDvddRate,omitempty"`
// Rate per share to which a non-resident is entitled.
NonResidentRate *RateAndAmountFormat39Choice `xml:"NonResdtRate,omitempty"`
// Rate applicable to the event announced, for example, redemption rate for a redemption event.
ApplicableRate *PercentageRate `xml:"AplblRate,omitempty"`
// Rate of the cash premium made available if the securities holder consents or participates to an event, for example consent fees or solicitation fee.
SolicitationFeeRate *SolicitationFeeRateFormat8Choice `xml:"SlctnFeeRate,omitempty"`
// Amount of money per equity allocated as the result of a tax credit.
TaxCreditRate []*TaxCreditRateFormat7Choice `xml:"TaxCdtRate,omitempty"`
// Percentage of a cash distribution that will be withheld by the tax authorities of the jurisdiction of the issuer, for which a relief at source and/or reclaim may be possible.
WithholdingTaxRate []*RateAndAmountFormat40Choice `xml:"WhldgTaxRate,omitempty"`
// Rate at which the income will be withheld by a jurisdiction other than the jurisdiction of the issuer’s country of tax incorporation, for which a relief at source and/or reclaim may be possible. It is levied in complement or offset of the withholding tax rate (TAXR) levied by the jurisdiction of the issuer’s tax domicile.
SecondLevelTax []*RateAndAmountFormat40Choice `xml:"ScndLvlTax,omitempty"`
// Taxation applied on an amount clearly identified as an income.
TaxOnIncome *RateAndAmountFormat39Choice `xml:"TaxOnIncm,omitempty"`
// Taxation applied on an amount clearly identified as capital profits, capital gains.
TaxOnProfits *PercentageRate `xml:"TaxOnPrfts,omitempty"`
// Percentage of cash that was paid in excess of actual tax obligation and was reclaimed.
TaxReclaimRate *PercentageRate `xml:"TaxRclmRate,omitempty"`
// Portion of the fund distribution which represents the average accrued income included in the purchase price for units bought during the account period.
EqualisationRate *ActiveCurrencyAnd13DecimalAmount `xml:"EqulstnRate,omitempty"`
}
func (r *RateDetails23) AddAdditionalTax() *RateAndAmountFormat39Choice {
r.AdditionalTax = new(RateAndAmountFormat39Choice)
return r.AdditionalTax
}
func (r *RateDetails23) AddChargesFees() *RateAndAmountFormat39Choice {
r.ChargesFees = new(RateAndAmountFormat39Choice)
return r.ChargesFees
}
func (r *RateDetails23) SetFiscalStamp(value string) {
r.FiscalStamp = (*PercentageRate)(&value)
}
func (r *RateDetails23) AddFullyFrankedRate() *RateAndAmountFormat39Choice {
r.FullyFrankedRate = new(RateAndAmountFormat39Choice)
return r.FullyFrankedRate
}
func (r *RateDetails23) AddGrossDividendRate() *GrossDividendRateFormat22Choice {
newValue := new(GrossDividendRateFormat22Choice)
r.GrossDividendRate = append(r.GrossDividendRate, newValue)
return newValue
}
func (r *RateDetails23) AddEarlySolicitationFeeRate() *SolicitationFeeRateFormat8Choice {
r.EarlySolicitationFeeRate = new(SolicitationFeeRateFormat8Choice)
return r.EarlySolicitationFeeRate
}
func (r *RateDetails23) AddThirdPartyIncentiveRate() *RateAndAmountFormat39Choice {
r.ThirdPartyIncentiveRate = new(RateAndAmountFormat39Choice)
return r.ThirdPartyIncentiveRate
}
func (r *RateDetails23) AddInterestRateUsedForPayment() *InterestRateUsedForPaymentFormat7Choice {
newValue := new(InterestRateUsedForPaymentFormat7Choice)
r.InterestRateUsedForPayment = append(r.InterestRateUsedForPayment, newValue)
return newValue
}
func (r *RateDetails23) AddNetDividendRate() *NetDividendRateFormat24Choice {
newValue := new(NetDividendRateFormat24Choice)
r.NetDividendRate = append(r.NetDividendRate, newValue)
return newValue
}
func (r *RateDetails23) AddNonResidentRate() *RateAndAmountFormat39Choice {
r.NonResidentRate = new(RateAndAmountFormat39Choice)
return r.NonResidentRate
}
func (r *RateDetails23) SetApplicableRate(value string) {
r.ApplicableRate = (*PercentageRate)(&value)
}
func (r *RateDetails23) AddSolicitationFeeRate() *SolicitationFeeRateFormat8Choice {
r.SolicitationFeeRate = new(SolicitationFeeRateFormat8Choice)
return r.SolicitationFeeRate
}
func (r *RateDetails23) AddTaxCreditRate() *TaxCreditRateFormat7Choice {
newValue := new(TaxCreditRateFormat7Choice)
r.TaxCreditRate = append(r.TaxCreditRate, newValue)
return newValue
}
func (r *RateDetails23) AddWithholdingTaxRate() *RateAndAmountFormat40Choice {
newValue := new(RateAndAmountFormat40Choice)
r.WithholdingTaxRate = append(r.WithholdingTaxRate, newValue)
return newValue
}
func (r *RateDetails23) AddSecondLevelTax() *RateAndAmountFormat40Choice {
newValue := new(RateAndAmountFormat40Choice)
r.SecondLevelTax = append(r.SecondLevelTax, newValue)
return newValue
}
func (r *RateDetails23) AddTaxOnIncome() *RateAndAmountFormat39Choice {
r.TaxOnIncome = new(RateAndAmountFormat39Choice)
return r.TaxOnIncome
}
func (r *RateDetails23) SetTaxOnProfits(value string) {
r.TaxOnProfits = (*PercentageRate)(&value)
}
func (r *RateDetails23) SetTaxReclaimRate(value string) {
r.TaxReclaimRate = (*PercentageRate)(&value)
}
func (r *RateDetails23) SetEqualisationRate(value, currency string) {
r.EqualisationRate = NewActiveCurrencyAnd13DecimalAmount(value, currency)
} | RateDetails23.go | 0.832951 | 0.695354 | RateDetails23.go | starcoder |
package models
import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
)
// IdentityUserFlowAttribute
type IdentityUserFlowAttribute struct {
Entity
// The data type of the user flow attribute. This cannot be modified after the custom user flow attribute is created. The supported values for dataType are: string , boolean , int64 , stringCollection , dateTime.
dataType *IdentityUserFlowAttributeDataType
// The description of the user flow attribute that's shown to the user at the time of sign-up.
description *string
// The display name of the user flow attribute.
displayName *string
// The type of the user flow attribute. This is a read-only attribute that is automatically set. Depending on the type of attribute, the values for this property will be builtIn, custom, or required.
userFlowAttributeType *IdentityUserFlowAttributeType
}
// NewIdentityUserFlowAttribute instantiates a new identityUserFlowAttribute and sets the default values.
func NewIdentityUserFlowAttribute()(*IdentityUserFlowAttribute) {
m := &IdentityUserFlowAttribute{
Entity: *NewEntity(),
}
return m
}
// CreateIdentityUserFlowAttributeFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateIdentityUserFlowAttributeFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewIdentityUserFlowAttribute(), nil
}
// GetDataType gets the dataType property value. The data type of the user flow attribute. This cannot be modified after the custom user flow attribute is created. The supported values for dataType are: string , boolean , int64 , stringCollection , dateTime.
func (m *IdentityUserFlowAttribute) GetDataType()(*IdentityUserFlowAttributeDataType) {
if m == nil {
return nil
} else {
return m.dataType
}
}
// GetDescription gets the description property value. The description of the user flow attribute that's shown to the user at the time of sign-up.
func (m *IdentityUserFlowAttribute) GetDescription()(*string) {
if m == nil {
return nil
} else {
return m.description
}
}
// GetDisplayName gets the displayName property value. The display name of the user flow attribute.
func (m *IdentityUserFlowAttribute) GetDisplayName()(*string) {
if m == nil {
return nil
} else {
return m.displayName
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *IdentityUserFlowAttribute) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) {
res := m.Entity.GetFieldDeserializers()
res["dataType"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetEnumValue(ParseIdentityUserFlowAttributeDataType)
if err != nil {
return err
}
if val != nil {
m.SetDataType(val.(*IdentityUserFlowAttributeDataType))
}
return nil
}
res["description"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetDescription(val)
}
return nil
}
res["displayName"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetDisplayName(val)
}
return nil
}
res["userFlowAttributeType"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetEnumValue(ParseIdentityUserFlowAttributeType)
if err != nil {
return err
}
if val != nil {
m.SetUserFlowAttributeType(val.(*IdentityUserFlowAttributeType))
}
return nil
}
return res
}
// GetUserFlowAttributeType gets the userFlowAttributeType property value. The type of the user flow attribute. This is a read-only attribute that is automatically set. Depending on the type of attribute, the values for this property will be builtIn, custom, or required.
func (m *IdentityUserFlowAttribute) GetUserFlowAttributeType()(*IdentityUserFlowAttributeType) {
if m == nil {
return nil
} else {
return m.userFlowAttributeType
}
}
// Serialize serializes information the current object
func (m *IdentityUserFlowAttribute) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {
err := m.Entity.Serialize(writer)
if err != nil {
return err
}
if m.GetDataType() != nil {
cast := (*m.GetDataType()).String()
err = writer.WriteStringValue("dataType", &cast)
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("description", m.GetDescription())
if err != nil {
return err
}
}
{
err = writer.WriteStringValue("displayName", m.GetDisplayName())
if err != nil {
return err
}
}
if m.GetUserFlowAttributeType() != nil {
cast := (*m.GetUserFlowAttributeType()).String()
err = writer.WriteStringValue("userFlowAttributeType", &cast)
if err != nil {
return err
}
}
return nil
}
// SetDataType sets the dataType property value. The data type of the user flow attribute. This cannot be modified after the custom user flow attribute is created. The supported values for dataType are: string , boolean , int64 , stringCollection , dateTime.
func (m *IdentityUserFlowAttribute) SetDataType(value *IdentityUserFlowAttributeDataType)() {
if m != nil {
m.dataType = value
}
}
// SetDescription sets the description property value. The description of the user flow attribute that's shown to the user at the time of sign-up.
func (m *IdentityUserFlowAttribute) SetDescription(value *string)() {
if m != nil {
m.description = value
}
}
// SetDisplayName sets the displayName property value. The display name of the user flow attribute.
func (m *IdentityUserFlowAttribute) SetDisplayName(value *string)() {
if m != nil {
m.displayName = value
}
}
// SetUserFlowAttributeType sets the userFlowAttributeType property value. The type of the user flow attribute. This is a read-only attribute that is automatically set. Depending on the type of attribute, the values for this property will be builtIn, custom, or required.
func (m *IdentityUserFlowAttribute) SetUserFlowAttributeType(value *IdentityUserFlowAttributeType)() {
if m != nil {
m.userFlowAttributeType = value
}
} | models/identity_user_flow_attribute.go | 0.679604 | 0.403626 | identity_user_flow_attribute.go | starcoder |
package unityai
import "sort"
type Vertex2Array []Vector2f
func (this Vertex2Array) Len() int {
return len(this)
}
func (this Vertex2Array) Less(i, j int) bool {
a := this[i]
b := this[j]
return a.x < b.x || (a.x == b.x && a.y < b.y)
}
func (this Vertex2Array) Swap(i, j int) {
this[i], this[j] = this[j], this[i]
}
func (this *Vertex2Array) resize_uninitialized(size int) {
if cap(*this) >= size {
*this = (*this)[:size]
} else {
*this = append(*this, make([]Vector2f, size-len(*this))...)
}
}
func (this *Vertex2Array) empty() bool {
return len(*this) == 0
}
func (this *Vertex2Array) pop_back() {
*this = (*this)[:this.Len()-1]
}
func (this *Vertex2Array) push_back(point Vector2f) {
*this = append(*this, point)
}
func (this *Vertex2Array) erase(index int) {
if index == len(*this)-1 {
*this = (*this)[:len(*this)-1]
} else if index == 0 {
*this = (*this)[1:]
} else {
tmp := make([]Vector2f, len(*this)-1)
copy(tmp[:index], (*this)[:index])
copy(tmp[index:], (*this)[index+1:])
*this = tmp
}
}
func CalculatePointSide(l0, l1, point Vector2f) float32 {
return (l1.y-l0.y)*(point.x-l0.x) - (l1.x-l0.x)*(point.y-l0.y)
}
func CalculateConvexHull(hull *Vertex2Array, points *Vertex2Array) {
// TODO : prune (near) duplicate points before calculating hull
hull.resize_uninitialized(0)
if points.empty() {
return
}
sort.Sort(points)
// Andrews monotone chain
for i := 0; i < points.Len(); i++ {
for hull.Len() >= 2 && CalculatePointSide((*hull)[hull.Len()-2], (*hull)[hull.Len()-1], (*points)[i]) <= 0 {
hull.pop_back()
}
hull.push_back((*points)[i])
}
for i, j := points.Len()-2, hull.Len()+1; i >= 0; i-- {
for hull.Len() >= j && CalculatePointSide((*hull)[hull.Len()-2], (*hull)[hull.Len()-1], (*points)[i]) <= 0 {
hull.pop_back()
}
hull.push_back((*points)[i])
}
hull.pop_back()
}
func FitCapsuleToExtents(radius, height *float32, capsuleExtents Vector3f) {
r := FloatMax(capsuleExtents.x, capsuleExtents.z)
*radius = r
*height = FloatMax(0.0, capsuleExtents.y-r)
}
func CalcCapsuleWorldExtents(worldExtents *Vector3f, localExtents, xAxis, yAxis, zAxis Vector3f) {
var radius, height float32
FitCapsuleToExtents(&radius, &height, localExtents)
*worldExtents = AbsVector3f(yAxis).Mulf(height).Add(NewVector3f(radius, radius, radius))
}
func CalcBoxWorldExtents(worldExtents *Vector3f, localExtents, xAxis, yAxis, zAxis Vector3f) {
*worldExtents = AbsVector3f(xAxis).Mulf(localExtents.x).Add(AbsVector3f(yAxis).Mulf(localExtents.y)).Add(AbsVector3f(zAxis).Mulf(localExtents.z))
} | hull_avoidance.go | 0.519521 | 0.571527 | hull_avoidance.go | starcoder |
package geojson
import (
"encoding/json"
"go.mongodb.org/mongo-driver/bson"
)
// A Feature corresponds to GeoJSON feature object
type Feature struct {
ID interface{} `json:"id,omitempty" bson:",omitempty"`
Type string `json:"type"`
BoundingBox []float64 `json:"bbox,omitempty" bson:",omitempty"`
Geometry *Geometry `json:"geometry"`
Properties map[string]interface{} `json:"properties"`
CRS map[string]interface{} `json:"crs,omitempty" bson:",omitempty"` // Coordinate Reference System Objects are not currently supported
}
// NewFeature creates and initializes a GeoJSON feature given the required attributes.
func NewFeature(geometry *Geometry) *Feature {
return &Feature{
Type: "Feature",
Geometry: geometry,
Properties: make(map[string]interface{}),
}
}
// NewPointFeature creates and initializes a GeoJSON feature with a point geometry using the given coordinate.
func NewPointFeature(coordinate []float64) *Feature {
return NewFeature(NewPointGeometry(coordinate))
}
// NewMultiPointFeature creates and initializes a GeoJSON feature with a multi-point geometry using the given coordinates.
func NewMultiPointFeature(coordinates ...[]float64) *Feature {
return NewFeature(NewMultiPointGeometry(coordinates...))
}
// NewLineStringFeature creates and initializes a GeoJSON feature with a line string geometry using the given coordinates.
func NewLineStringFeature(coordinates [][]float64) *Feature {
return NewFeature(NewLineStringGeometry(coordinates))
}
// NewMultiLineStringFeature creates and initializes a GeoJSON feature with a multi-line string geometry using the given lines.
func NewMultiLineStringFeature(lines ...[][]float64) *Feature {
return NewFeature(NewMultiLineStringGeometry(lines...))
}
// NewPolygonFeature creates and initializes a GeoJSON feature with a polygon geometry using the given polygon.
func NewPolygonFeature(polygon [][][]float64) *Feature {
return NewFeature(NewPolygonGeometry(polygon))
}
// NewMultiPolygonFeature creates and initializes a GeoJSON feature with a multi-polygon geometry using the given polygons.
func NewMultiPolygonFeature(polygons ...[][][]float64) *Feature {
return NewFeature(NewMultiPolygonGeometry(polygons...))
}
// NewCollectionFeature creates and initializes a GeoJSON feature with a geometry collection geometry using the given geometries.
func NewCollectionFeature(geometries ...*Geometry) *Feature {
return NewFeature(NewCollectionGeometry(geometries...))
}
// MarshalJSON converts the feature object into the proper JSON.
// It will handle the encoding of all the child geometries.
// Alternately one can call json.Marshal(f) directly for the same result.
func (f Feature) MarshalJSON() ([]byte, error) {
type feature Feature
fea := &feature{
ID: f.ID,
Type: "Feature",
Geometry: f.Geometry,
}
if f.BoundingBox != nil && len(f.BoundingBox) != 0 {
fea.BoundingBox = f.BoundingBox
}
if f.Properties != nil && len(f.Properties) != 0 {
fea.Properties = f.Properties
}
if f.CRS != nil && len(f.CRS) != 0 {
fea.CRS = f.CRS
}
return json.Marshal(fea)
}
// MarshalBSON converts the feature object into the proper BSON.
// It will handle the encoding of all the child geometries.
func (f Feature) MarshalBSON() ([]byte, error) {
type feature Feature
fea := &feature{
ID: f.ID,
Type: "Feature",
Geometry: f.Geometry,
}
if f.BoundingBox != nil && len(f.BoundingBox) != 0 {
fea.BoundingBox = f.BoundingBox
}
if f.Properties != nil && len(f.Properties) != 0 {
fea.Properties = f.Properties
}
if f.CRS != nil && len(f.CRS) != 0 {
fea.CRS = f.CRS
}
return bson.Marshal(fea)
}
// UnmarshalFeature decodes the data into a GeoJSON feature.
// Alternately one can call json.Unmarshal(f) directly for the same result.
func UnmarshalFeature(data []byte) (*Feature, error) {
f := &Feature{}
err := json.Unmarshal(data, f)
if err != nil {
return nil, err
}
return f, nil
} | feature.go | 0.826747 | 0.520314 | feature.go | starcoder |
package onshape
import (
"encoding/json"
)
// BTPStatementLoopForIn279 struct for BTPStatementLoopForIn279
type BTPStatementLoopForIn279 struct {
BTPStatementLoop277
BtType *string `json:"btType,omitempty"`
Container *BTPExpression9 `json:"container,omitempty"`
IsVarDeclaredHere *bool `json:"isVarDeclaredHere,omitempty"`
Name *BTPIdentifier8 `json:"name,omitempty"`
SpaceBeforeVar *BTPSpace10 `json:"spaceBeforeVar,omitempty"`
StandardType *string `json:"standardType,omitempty"`
TypeName *string `json:"typeName,omitempty"`
Var *BTPIdentifier8 `json:"var,omitempty"`
}
// NewBTPStatementLoopForIn279 instantiates a new BTPStatementLoopForIn279 object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewBTPStatementLoopForIn279() *BTPStatementLoopForIn279 {
this := BTPStatementLoopForIn279{}
return &this
}
// NewBTPStatementLoopForIn279WithDefaults instantiates a new BTPStatementLoopForIn279 object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewBTPStatementLoopForIn279WithDefaults() *BTPStatementLoopForIn279 {
this := BTPStatementLoopForIn279{}
return &this
}
// GetBtType returns the BtType field value if set, zero value otherwise.
func (o *BTPStatementLoopForIn279) GetBtType() string {
if o == nil || o.BtType == nil {
var ret string
return ret
}
return *o.BtType
}
// GetBtTypeOk returns a tuple with the BtType field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTPStatementLoopForIn279) GetBtTypeOk() (*string, bool) {
if o == nil || o.BtType == nil {
return nil, false
}
return o.BtType, true
}
// HasBtType returns a boolean if a field has been set.
func (o *BTPStatementLoopForIn279) HasBtType() bool {
if o != nil && o.BtType != nil {
return true
}
return false
}
// SetBtType gets a reference to the given string and assigns it to the BtType field.
func (o *BTPStatementLoopForIn279) SetBtType(v string) {
o.BtType = &v
}
// GetContainer returns the Container field value if set, zero value otherwise.
func (o *BTPStatementLoopForIn279) GetContainer() BTPExpression9 {
if o == nil || o.Container == nil {
var ret BTPExpression9
return ret
}
return *o.Container
}
// GetContainerOk returns a tuple with the Container field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTPStatementLoopForIn279) GetContainerOk() (*BTPExpression9, bool) {
if o == nil || o.Container == nil {
return nil, false
}
return o.Container, true
}
// HasContainer returns a boolean if a field has been set.
func (o *BTPStatementLoopForIn279) HasContainer() bool {
if o != nil && o.Container != nil {
return true
}
return false
}
// SetContainer gets a reference to the given BTPExpression9 and assigns it to the Container field.
func (o *BTPStatementLoopForIn279) SetContainer(v BTPExpression9) {
o.Container = &v
}
// GetIsVarDeclaredHere returns the IsVarDeclaredHere field value if set, zero value otherwise.
func (o *BTPStatementLoopForIn279) GetIsVarDeclaredHere() bool {
if o == nil || o.IsVarDeclaredHere == nil {
var ret bool
return ret
}
return *o.IsVarDeclaredHere
}
// GetIsVarDeclaredHereOk returns a tuple with the IsVarDeclaredHere field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTPStatementLoopForIn279) GetIsVarDeclaredHereOk() (*bool, bool) {
if o == nil || o.IsVarDeclaredHere == nil {
return nil, false
}
return o.IsVarDeclaredHere, true
}
// HasIsVarDeclaredHere returns a boolean if a field has been set.
func (o *BTPStatementLoopForIn279) HasIsVarDeclaredHere() bool {
if o != nil && o.IsVarDeclaredHere != nil {
return true
}
return false
}
// SetIsVarDeclaredHere gets a reference to the given bool and assigns it to the IsVarDeclaredHere field.
func (o *BTPStatementLoopForIn279) SetIsVarDeclaredHere(v bool) {
o.IsVarDeclaredHere = &v
}
// GetName returns the Name field value if set, zero value otherwise.
func (o *BTPStatementLoopForIn279) GetName() BTPIdentifier8 {
if o == nil || o.Name == nil {
var ret BTPIdentifier8
return ret
}
return *o.Name
}
// GetNameOk returns a tuple with the Name field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTPStatementLoopForIn279) GetNameOk() (*BTPIdentifier8, bool) {
if o == nil || o.Name == nil {
return nil, false
}
return o.Name, true
}
// HasName returns a boolean if a field has been set.
func (o *BTPStatementLoopForIn279) HasName() bool {
if o != nil && o.Name != nil {
return true
}
return false
}
// SetName gets a reference to the given BTPIdentifier8 and assigns it to the Name field.
func (o *BTPStatementLoopForIn279) SetName(v BTPIdentifier8) {
o.Name = &v
}
// GetSpaceBeforeVar returns the SpaceBeforeVar field value if set, zero value otherwise.
func (o *BTPStatementLoopForIn279) GetSpaceBeforeVar() BTPSpace10 {
if o == nil || o.SpaceBeforeVar == nil {
var ret BTPSpace10
return ret
}
return *o.SpaceBeforeVar
}
// GetSpaceBeforeVarOk returns a tuple with the SpaceBeforeVar field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTPStatementLoopForIn279) GetSpaceBeforeVarOk() (*BTPSpace10, bool) {
if o == nil || o.SpaceBeforeVar == nil {
return nil, false
}
return o.SpaceBeforeVar, true
}
// HasSpaceBeforeVar returns a boolean if a field has been set.
func (o *BTPStatementLoopForIn279) HasSpaceBeforeVar() bool {
if o != nil && o.SpaceBeforeVar != nil {
return true
}
return false
}
// SetSpaceBeforeVar gets a reference to the given BTPSpace10 and assigns it to the SpaceBeforeVar field.
func (o *BTPStatementLoopForIn279) SetSpaceBeforeVar(v BTPSpace10) {
o.SpaceBeforeVar = &v
}
// GetStandardType returns the StandardType field value if set, zero value otherwise.
func (o *BTPStatementLoopForIn279) GetStandardType() string {
if o == nil || o.StandardType == nil {
var ret string
return ret
}
return *o.StandardType
}
// GetStandardTypeOk returns a tuple with the StandardType field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTPStatementLoopForIn279) GetStandardTypeOk() (*string, bool) {
if o == nil || o.StandardType == nil {
return nil, false
}
return o.StandardType, true
}
// HasStandardType returns a boolean if a field has been set.
func (o *BTPStatementLoopForIn279) HasStandardType() bool {
if o != nil && o.StandardType != nil {
return true
}
return false
}
// SetStandardType gets a reference to the given string and assigns it to the StandardType field.
func (o *BTPStatementLoopForIn279) SetStandardType(v string) {
o.StandardType = &v
}
// GetTypeName returns the TypeName field value if set, zero value otherwise.
func (o *BTPStatementLoopForIn279) GetTypeName() string {
if o == nil || o.TypeName == nil {
var ret string
return ret
}
return *o.TypeName
}
// GetTypeNameOk returns a tuple with the TypeName field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTPStatementLoopForIn279) GetTypeNameOk() (*string, bool) {
if o == nil || o.TypeName == nil {
return nil, false
}
return o.TypeName, true
}
// HasTypeName returns a boolean if a field has been set.
func (o *BTPStatementLoopForIn279) HasTypeName() bool {
if o != nil && o.TypeName != nil {
return true
}
return false
}
// SetTypeName gets a reference to the given string and assigns it to the TypeName field.
func (o *BTPStatementLoopForIn279) SetTypeName(v string) {
o.TypeName = &v
}
// GetVar returns the Var field value if set, zero value otherwise.
func (o *BTPStatementLoopForIn279) GetVar() BTPIdentifier8 {
if o == nil || o.Var == nil {
var ret BTPIdentifier8
return ret
}
return *o.Var
}
// GetVarOk returns a tuple with the Var field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTPStatementLoopForIn279) GetVarOk() (*BTPIdentifier8, bool) {
if o == nil || o.Var == nil {
return nil, false
}
return o.Var, true
}
// HasVar returns a boolean if a field has been set.
func (o *BTPStatementLoopForIn279) HasVar() bool {
if o != nil && o.Var != nil {
return true
}
return false
}
// SetVar gets a reference to the given BTPIdentifier8 and assigns it to the Var field.
func (o *BTPStatementLoopForIn279) SetVar(v BTPIdentifier8) {
o.Var = &v
}
func (o BTPStatementLoopForIn279) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
serializedBTPStatementLoop277, errBTPStatementLoop277 := json.Marshal(o.BTPStatementLoop277)
if errBTPStatementLoop277 != nil {
return []byte{}, errBTPStatementLoop277
}
errBTPStatementLoop277 = json.Unmarshal([]byte(serializedBTPStatementLoop277), &toSerialize)
if errBTPStatementLoop277 != nil {
return []byte{}, errBTPStatementLoop277
}
if o.BtType != nil {
toSerialize["btType"] = o.BtType
}
if o.Container != nil {
toSerialize["container"] = o.Container
}
if o.IsVarDeclaredHere != nil {
toSerialize["isVarDeclaredHere"] = o.IsVarDeclaredHere
}
if o.Name != nil {
toSerialize["name"] = o.Name
}
if o.SpaceBeforeVar != nil {
toSerialize["spaceBeforeVar"] = o.SpaceBeforeVar
}
if o.StandardType != nil {
toSerialize["standardType"] = o.StandardType
}
if o.TypeName != nil {
toSerialize["typeName"] = o.TypeName
}
if o.Var != nil {
toSerialize["var"] = o.Var
}
return json.Marshal(toSerialize)
}
type NullableBTPStatementLoopForIn279 struct {
value *BTPStatementLoopForIn279
isSet bool
}
func (v NullableBTPStatementLoopForIn279) Get() *BTPStatementLoopForIn279 {
return v.value
}
func (v *NullableBTPStatementLoopForIn279) Set(val *BTPStatementLoopForIn279) {
v.value = val
v.isSet = true
}
func (v NullableBTPStatementLoopForIn279) IsSet() bool {
return v.isSet
}
func (v *NullableBTPStatementLoopForIn279) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableBTPStatementLoopForIn279(val *BTPStatementLoopForIn279) *NullableBTPStatementLoopForIn279 {
return &NullableBTPStatementLoopForIn279{value: val, isSet: true}
}
func (v NullableBTPStatementLoopForIn279) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableBTPStatementLoopForIn279) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | onshape/model_btp_statement_loop_for_in_279.go | 0.672654 | 0.483405 | model_btp_statement_loop_for_in_279.go | starcoder |
package indicators
import (
"container/list"
"errors"
"github.com/thetruetrade/gotrade"
"math"
)
// A Lowest Low Value Bars Indicator (LlvBars), no storage, for use in other indicators
type LlvBarsWithoutStorage struct {
*baseIndicatorWithIntBounds
// private variables
periodHistory *list.List
currentLow float64
currentLowIndex int64
timePeriod int
}
// NewLlvBarsWithoutStorage creates a Lowest Low Value Bars Indicator Indicator (LlvBars) without storage
func NewLlvBarsWithoutStorage(timePeriod int, valueAvailableAction ValueAvailableActionInt) (indicator *LlvBarsWithoutStorage, err error) {
// an indicator without storage MUST have a value available action
if valueAvailableAction == nil {
return nil, ErrValueAvailableActionIsNil
}
// the minimum timeperiod for this indicator is 1
if timePeriod < 1 {
return nil, errors.New("timePeriod is less than the minimum (1)")
}
// check the maximum timeperiod
if timePeriod > MaximumLookbackPeriod {
return nil, errors.New("timePeriod is greater than the maximum (100000)")
}
lookback := timePeriod - 1
ind := LlvBarsWithoutStorage{
baseIndicatorWithIntBounds: newBaseIndicatorWithIntBounds(lookback, valueAvailableAction),
currentLow: math.MaxFloat64,
currentLowIndex: 0,
periodHistory: list.New(),
timePeriod: timePeriod,
}
return &ind, nil
}
// A Lowest Low Value Bars Indicator (LlvBars)
type LlvBars struct {
*LlvBarsWithoutStorage
selectData gotrade.DOHLCVDataSelectionFunc
// public variables
Data []int64
}
// NewLlvBars creates a Lowest Low Value Bars Indicator (LlvBars) for online usage
func NewLlvBars(timePeriod int, selectData gotrade.DOHLCVDataSelectionFunc) (indicator *LlvBars, err error) {
if selectData == nil {
return nil, ErrDOHLCVDataSelectFuncIsNil
}
ind := LlvBars{
selectData: selectData,
}
ind.LlvBarsWithoutStorage, err = NewLlvBarsWithoutStorage(timePeriod, func(dataItem int64, streamBarIndex int) {
ind.Data = append(ind.Data, dataItem)
})
return &ind, err
}
// NewDefaultLlvBars creates a Lowest Low Value Indicator (LlvBars) for online usage with default parameters
// - timePeriod: 25
func NewDefaultLlvBars() (indicator *LlvBars, err error) {
timePeriod := 25
return NewLlvBars(timePeriod, gotrade.UseClosePrice)
}
// NewLlvBarsWithSrcLen creates a Lowest Low Value Indicator (LlvBars)for offline usage
func NewLlvBarsWithSrcLen(sourceLength uint, timePeriod int, selectData gotrade.DOHLCVDataSelectionFunc) (indicator *LlvBars, err error) {
ind, err := NewLlvBars(timePeriod, selectData)
// only initialise the storage if there is enough source data to require it
if sourceLength-uint(ind.GetLookbackPeriod()) > 1 {
ind.Data = make([]int64, 0, sourceLength-uint(ind.GetLookbackPeriod()))
}
return ind, err
}
// NewDefaultLlvBarsWithSrcLen creates a Lowest Low Value Indicator (LlvBars)for offline usage with default parameters
func NewDefaultLlvBarsWithSrcLen(sourceLength uint) (indicator *LlvBars, err error) {
ind, err := NewDefaultLlvBars()
// only initialise the storage if there is enough source data to require it
if sourceLength-uint(ind.GetLookbackPeriod()) > 1 {
ind.Data = make([]int64, 0, sourceLength-uint(ind.GetLookbackPeriod()))
}
return ind, err
}
// NewLlvBarsForStream creates a Lowest Low Value Indicator (LlvBars)for online usage with a source data stream
func NewLlvBarsForStream(priceStream gotrade.DOHLCVStreamSubscriber, timePeriod int, selectData gotrade.DOHLCVDataSelectionFunc) (indicator *LlvBars, err error) {
ind, err := NewLlvBars(timePeriod, selectData)
priceStream.AddTickSubscription(ind)
return ind, err
}
// NewDefaultLlvBarsForStream creates a Lowest Low Value Indicator (LlvBars)for online usage with a source data stream
func NewDefaultLlvBarsForStream(priceStream gotrade.DOHLCVStreamSubscriber) (indicator *LlvBars, err error) {
ind, err := NewDefaultLlvBars()
priceStream.AddTickSubscription(ind)
return ind, err
}
// NewLlvBarsForStreamWithSrcLen creates a Lowest Low Value Indicator (LlvBars)for offline usage with a source data stream
func NewLlvBarsForStreamWithSrcLen(sourceLength uint, priceStream gotrade.DOHLCVStreamSubscriber, timePeriod int, selectData gotrade.DOHLCVDataSelectionFunc) (indicator *LlvBars, err error) {
ind, err := NewLlvBarsWithSrcLen(sourceLength, timePeriod, selectData)
priceStream.AddTickSubscription(ind)
return ind, err
}
// NewDefaultLlvBarsForStreamWithSrcLen creates a Lowest Low Value Indicator (LlvBars)for offline usage with a source data stream
func NewDefaultLlvBarsForStreamWithSrcLen(sourceLength uint, priceStream gotrade.DOHLCVStreamSubscriber) (indicator *LlvBars, err error) {
ind, err := NewDefaultLlvBarsWithSrcLen(sourceLength)
priceStream.AddTickSubscription(ind)
return ind, err
}
// ReceiveDOHLCVTick consumes a source data DOHLCV price tick
func (ind *LlvBars) ReceiveDOHLCVTick(tickData gotrade.DOHLCV, streamBarIndex int) {
var selectedData = ind.selectData(tickData)
ind.ReceiveTick(selectedData, streamBarIndex)
}
func (ind *LlvBarsWithoutStorage) ReceiveTick(tickData float64, streamBarIndex int) {
ind.periodHistory.PushBack(tickData)
// resize the history
if ind.periodHistory.Len() > ind.timePeriod {
first := ind.periodHistory.Front()
ind.periodHistory.Remove(first)
// make sure we haven't just removed the current low
if ind.currentLowIndex == int64(ind.timePeriod-1) {
ind.currentLow = math.MaxFloat64
// we have we need to find the new low in the history
var i int = ind.timePeriod - 1
for e := ind.periodHistory.Front(); e != nil; e = e.Next() {
value := e.Value.(float64)
if value < ind.currentLow {
ind.currentLow = value
ind.currentLowIndex = int64(i)
}
i -= 1
}
} else {
if tickData < ind.currentLow {
ind.currentLow = tickData
ind.currentLowIndex = 0
} else {
ind.currentLowIndex += 1
}
}
var result = ind.currentLowIndex
ind.UpdateIndicatorWithNewValue(result, streamBarIndex)
} else {
if tickData < ind.currentLow {
ind.currentLow = tickData
ind.currentLowIndex = 0
} else {
ind.currentLowIndex += 1
}
if ind.periodHistory.Len() == ind.timePeriod {
var result = ind.currentLowIndex
ind.UpdateIndicatorWithNewValue(result, streamBarIndex)
}
}
} | indicators/llvbars.go | 0.589362 | 0.405714 | llvbars.go | starcoder |
package hector
import(
"strconv"
"math/rand"
"math"
"fmt"
)
type NeuralNetworkParams struct {
LearningRate float64
LearningRateDiscount float64
Regularization float64
Hidden int64
Steps int
Verbose int
}
type TwoLayerWeights struct {
L1 *Matrix
L2 *Matrix
}
/*
Please refer to this chapter to know algorithm details :
http://www4.rgu.ac.uk/files/chapter3%20-%20bp.pdf
*/
type NeuralNetwork struct {
Model TwoLayerWeights
MaxLabel int64
Params NeuralNetworkParams
}
func RandomInitVector(dim int64) *Vector{
v := NewVector()
var i int64
for i = 0; i < dim; i++ {
v.data[i] = (rand.Float64() - 0.5) / math.Sqrt(float64(dim))
}
return v
}
func (self *NeuralNetwork) SaveModel(path string){
}
func (self *NeuralNetwork) LoadModel(path string){
}
func (algo *NeuralNetwork) Init(params map[string]string) {
algo.Params.LearningRate, _ = strconv.ParseFloat(params["learning-rate"], 64)
algo.Params.LearningRateDiscount, _ = strconv.ParseFloat(params["learning-rate-discount"], 64)
algo.Params.Regularization, _ = strconv.ParseFloat(params["regularization"], 64)
steps, _ := strconv.ParseInt(params["steps"], 10, 32)
hidden, _ := strconv.ParseInt(params["hidden"], 10, 64)
verbose, _ := strconv.ParseInt(params["verbose"], 10, 32)
algo.Params.Steps = int(steps)
algo.Params.Hidden = int64(hidden)
algo.Params.Verbose = int(verbose)
}
func (algo *NeuralNetwork) Train(dataset * DataSet) {
algo.Model = TwoLayerWeights{}
algo.Model.L1 = NewMatrix()
algo.Model.L2 = NewMatrix()
for i := int64(0); i < algo.Params.Hidden; i++ {
algo.Model.L1.data[i] = NewVector()
}
initalized := make(map[int64]int)
max_label := 0
for _, sample := range dataset.Samples {
if max_label < sample.Label{
max_label = sample.Label
}
for _, f := range sample.Features{
_, ok := initalized[f.Id]
if !ok{
for i := int64(0); i < algo.Params.Hidden; i++ {
algo.Model.L1.SetValue(i, f.Id, (rand.Float64() - 0.5) / math.Sqrt(float64(algo.Params.Hidden)))
}
initalized[f.Id] = 1
}
}
}
algo.MaxLabel = int64(max_label)
for i := int64(0); i <= algo.Params.Hidden; i++ {
for j := int64(0); j <= algo.MaxLabel; j++ {
algo.Model.L2.SetValue(i, j, (rand.NormFloat64() / math.Sqrt(float64(algo.MaxLabel) + 1.0)))
}
}
for step := 0; step < algo.Params.Steps; step++{
if algo.Params.Verbose <= 0 {
fmt.Printf(".")
}
total := len(dataset.Samples)
counter := 0
for _, sample := range dataset.Samples {
y := NewVector()
z := NewVector()
e := NewVector()
delta_hidden := NewVector()
for i := int64(0); i < algo.Params.Hidden; i++ {
sum := float64(0)
wi := algo.Model.L1.data[i]
for _, f := range sample.Features {
sum += f.Value * wi.GetValue(f.Id)
}
y.data[i] = Sigmoid(sum)
}
y.data[algo.Params.Hidden] = 1.0
for i := int64(0); i <= algo.MaxLabel; i++ {
sum := float64(0)
for j := int64(0); j <= algo.Params.Hidden; j++ {
sum += y.GetValue(j)*algo.Model.L2.GetValue(j, i)
}
z.SetValue(i, sum)
}
z = z.SoftMaxNorm()
e.SetValue(int64(sample.Label), 1.0)
e.AddVector(z, -1.0)
for i := int64(0); i <= algo.Params.Hidden; i++ {
delta := float64(0)
for j := int64(0); j <= algo.MaxLabel; j++ {
wij := algo.Model.L2.GetValue(i, j)
sig_ij := e.GetValue(j) * (1-z.GetValue(j)) * z.GetValue(j)
delta += sig_ij * wij
wij += algo.Params.LearningRate * (y.GetValue(i) * sig_ij - algo.Params.Regularization * wij)
algo.Model.L2.SetValue(i, j, wij)
}
delta_hidden.SetValue(i, delta)
}
for i := int64(0); i < algo.Params.Hidden; i++ {
wi := algo.Model.L1.data[i]
for _, f := range sample.Features {
wji := wi.GetValue(f.Id)
wji += algo.Params.LearningRate * (delta_hidden.GetValue(i) * f.Value * y.GetValue(i) * (1-y.GetValue(i)) - algo.Params.Regularization * wji)
wi.SetValue(f.Id, wji)
}
}
counter++
if algo.Params.Verbose > 0 && counter % 2000 == 0 {
fmt.Printf("Epoch %d %f%%\n", step+1, float64(counter)/float64(total)*100)
}
}
if algo.Params.Verbose > 0 {
algo.Evaluate(dataset)
}
algo.Params.LearningRate *= algo.Params.LearningRateDiscount
}
fmt.Println()
}
func (algo *NeuralNetwork) PredictMultiClass(sample * Sample) * ArrayVector {
y := NewVector()
z := NewArrayVector()
for i := int64(0); i < algo.Params.Hidden; i++ {
sum := float64(0)
for _, f := range sample.Features {
sum += f.Value * algo.Model.L1.data[i].GetValue(f.Id)
}
y.data[i] = Sigmoid(sum)
}
y.data[algo.Params.Hidden] = 1
for i := 0; i <= int(algo.MaxLabel); i++ {
sum := float64(0)
for j := int64(0); j <= algo.Params.Hidden; j++ {
sum += y.GetValue(j) * algo.Model.L2.GetValue(j, int64(i))
}
z.SetValue(i, sum)
}
z = z.SoftMaxNorm()
return z
}
func (algo *NeuralNetwork) Predict(sample *Sample) float64 {
z := algo.PredictMultiClass(sample)
return z.GetValue(1)
}
func (algo *NeuralNetwork) Evaluate(dataset *DataSet) {
accuracy := 0.0
total := 0.0
for _, sample := range dataset.Samples {
prediction := algo.PredictMultiClass(sample)
label, _ := prediction.KeyWithMaxValue()
if int(label) == sample.Label {
accuracy += 1.0
}
total += 1.0
}
fmt.Printf("accuracy %f%%\n", accuracy/total*100)
} | neural_network.go | 0.610337 | 0.484197 | neural_network.go | starcoder |
package kdtree
import (
"geo"
"graph"
)
type Location struct {
Graph *graph.GraphFile
EC uint64
Cluster int
}
func (l Location) Vertex() graph.Vertex {
return graph.Vertex(l.EC >> (EdgeOffsetBits + StepOffsetBits))
}
func (l Location) EdgeOffset() uint32 {
return uint32((l.EC >> StepOffsetBits) & MaxEdgeOffset)
}
func (l Location) StepOffset() int {
return int(l.EC & MaxStepOffset)
}
func (l Location) IsVertex() bool {
return l.EdgeOffset() == MaxEdgeOffset && l.StepOffset() == MaxStepOffset
}
func (l Location) Edge() graph.Edge {
vertex := l.Vertex()
edgeOffset := l.EdgeOffset()
stepOffset := l.StepOffset()
if edgeOffset == MaxEdgeOffset && stepOffset == MaxStepOffset {
return graph.Edge(-1)
}
return graph.Edge(l.Graph.FirstOut[vertex] + edgeOffset)
}
func (l Location) Decode(forward bool, transport graph.Transport, steps *[]geo.Coordinate) []graph.Way {
g := l.Graph
vertex := l.Vertex()
edge := l.Edge()
offset := l.StepOffset()
if int(edge) == -1 {
// The easy case, where we hit some vertex exactly.
target := g.VertexCoordinate(vertex)
way := graph.Way{Length: 0, Vertex: vertex, Steps: nil, Target: target}
return []graph.Way{way}
}
oneway := g.EdgeOneway(edge, transport)
t1 := vertex // start vertex
t2 := g.EdgeOpposite(edge, vertex) // end vertex
// now we can allocate the way corresponding to (edge,offset),
// but there are three cases to consider:
// - if the way is bidirectional we have to compute both directions,
// if forward == true the from the offset two both endpoints,
// and the reverse otherwise
// - if the way is unidirectional then we have to compute the way
// from the StartPoint to offset if forward == false
// - otherwise we have to compute the way from offset to the EndPoint
(*steps) = g.EdgeSteps(edge, vertex, *steps)
s := *steps
b1 := make([]geo.Coordinate, len(s[:offset]))
b2 := make([]geo.Coordinate, len(s[offset+1:]))
copy(b1, s[:offset])
copy(b2, s[offset+1:])
l1 := geo.StepLength(s[:offset+1])
l2 := geo.StepLength(s[offset:])
t1Coord := g.VertexCoordinate(t1)
t2Coord := g.VertexCoordinate(t2)
d1, _ := e.To(t1Coord.Lat, t1Coord.Lng, s[0].Lat, s[0].Lng)
d2, _ := e.To(t2Coord.Lat, t2Coord.Lng, s[len(s)-1].Lat, s[len(s)-1].Lng)
l1 += d1
l2 += d2
target := s[offset]
if !forward {
reverse(b2)
} else {
reverse(b1)
}
var w []graph.Way
if !oneway {
w = make([]graph.Way, 2) // bidirectional
w[0] = graph.Way{Length: l1, Vertex: t1, Steps: b1, Forward: forward, Target: target}
w[1] = graph.Way{Length: l2, Vertex: t2, Steps: b2, Forward: forward, Target: target}
} else {
w = make([]graph.Way, 1) // one way
if forward {
w[0] = graph.Way{Length: l2, Vertex: t2, Steps: b2, Forward: forward, Target: target}
} else {
w[0] = graph.Way{Length: l1, Vertex: t1, Steps: b1, Forward: forward, Target: target}
}
}
return w
}
func reverse(steps []geo.Coordinate) {
for i, j := 0, len(steps)-1; i < j; i, j = i+1, j-1 {
steps[i], steps[j] = steps[j], steps[i]
}
} | src/kdtree/location.go | 0.670716 | 0.480052 | location.go | starcoder |
package symbol
import (
"strings"
"github.com/gojek/merlin/pkg/transformer/symbol/function"
)
// Geohash calculates geohash of latitude and longitude with the given character precision
// latitude and longitude can be:
// - Json path string
// - Slice / gota.Series
// - float64 value
func (sr Registry) Geohash(latitude interface{}, longitude interface{}, precision uint) interface{} {
lat, err := sr.evalArg(latitude)
if err != nil {
panic(err)
}
lon, err := sr.evalArg(longitude)
if err != nil {
panic(err)
}
result, err := function.Geohash(lat, lon, precision)
if err != nil {
panic(err)
}
return result
}
// S2ID calculates S2 ID of latitude and longitude of the given level
// latitude and longitude can be:
// - Json path string
// - Slice / gota.Series
// - float64 value
func (sr Registry) S2ID(latitude interface{}, longitude interface{}, level int) interface{} {
lat, err := sr.evalArg(latitude)
if err != nil {
panic(err)
}
lon, err := sr.evalArg(longitude)
if err != nil {
panic(err)
}
result, err := function.S2ID(lat, lon, level)
if err != nil {
panic(err)
}
return result
}
// HaversineDistance of two points (latitude, longitude) in KM
// latitude and longitude can be:
// - Json path string
// - Slice / gota.Series
// - float64 value
func (sr Registry) HaversineDistance(latitude1 interface{}, longitude1 interface{}, latitude2 interface{}, longitude2 interface{}) interface{} {
return sr.HaversineDistanceWithUnit(latitude1, longitude1, latitude2, longitude2, function.KMDistanceUnit)
}
// HaversineDistanceWithUnit of two points (latitude, longitude) and user need to specify distance unit e.g 'km' or 'm'
// latitude and longitude can be:
// - Json path string
// - Slice / gota.Series
// - float64 value
func (sr Registry) HaversineDistanceWithUnit(latitude1 interface{}, longitude1 interface{}, latitude2 interface{}, longitude2 interface{}, distanceUnit string) interface{} {
lat1, err := sr.evalArg(latitude1)
if err != nil {
panic(err)
}
lon1, err := sr.evalArg(longitude1)
if err != nil {
panic(err)
}
lat2, err := sr.evalArg(latitude2)
if err != nil {
panic(err)
}
lon2, err := sr.evalArg(longitude2)
if err != nil {
panic(err)
}
normalizedDistanceUnit := strings.ToLower(distanceUnit)
result, err := function.HaversineDistance(lat1, lon1, lat2, lon2, normalizedDistanceUnit)
if err != nil {
panic(err)
}
return result
}
// GeohashDistance will calculate haversine distance of two geohash
// Those two geohashes will be converted back to latitude and longitude that represent center of geohash point
// 'firstGeohash' and 'secondGeohash' can be:
// - Json path string
// - Slice / gota.Series
// - string
func (sr Registry) GeohashDistance(firstGeohash interface{}, secondGeohash interface{}, distanceUnit string) interface{} {
geohash1, err := sr.evalArg(firstGeohash)
if err != nil {
panic(err)
}
geohash2, err := sr.evalArg(secondGeohash)
if err != nil {
panic(err)
}
result, err := function.GeohashDistance(geohash1, geohash2, distanceUnit)
if err != nil {
panic(err)
}
return result
}
// GeohashAllNeighbors get all neighbors of a geohash from all directions
func (sr Registry) GeohashAllNeighbors(targetGeohash interface{}) interface{} {
geohashVal, err := sr.evalArg(targetGeohash)
if err != nil {
panic(err)
}
result, err := function.GeohashAllNeighbors(geohashVal)
if err != nil {
panic(err)
}
return result
}
// GeohashNeighborForDirection get all neighbor of a geohash from one direction
func (sr Registry) GeohashNeighborForDirection(targetGeohash interface{}, direction string) interface{} {
geohashVal, err := sr.evalArg(targetGeohash)
if err != nil {
panic(err)
}
result, err := function.GeohashNeighborForDirection(geohashVal, direction)
if err != nil {
panic(err)
}
return result
}
// PolarAngle calculate polar angle of two locations given latitude1, longitude1, latitude1, latitude2
// latitude and longitude can be:
// - Json path string
// - Slice / gota.Series
// - float64 value
func (sr Registry) PolarAngle(latitude1 interface{}, longitude1 interface{}, latitude2 interface{}, longitude2 interface{}) interface{} {
lat1, err := sr.evalArg(latitude1)
if err != nil {
panic(err)
}
lon1, err := sr.evalArg(longitude1)
if err != nil {
panic(err)
}
lat2, err := sr.evalArg(latitude2)
if err != nil {
panic(err)
}
lon2, err := sr.evalArg(longitude2)
if err != nil {
panic(err)
}
result, err := function.PolarAngle(lat1, lon1, lat2, lon2)
if err != nil {
panic(err)
}
return result
} | api/pkg/transformer/symbol/geospatial.go | 0.782538 | 0.432423 | geospatial.go | starcoder |
package nb2
import (
"errors"
"math"
"sort"
"github.com/schollz/find4/server/main/src/database"
"github.com/schollz/find4/server/main/src/models"
)
// Algorithm defines the basic structure
type Algorithm struct {
Data map[string]map[string]float64
isLoaded bool
}
// New returns new algorithm
func New() *Algorithm {
n := new(Algorithm)
n.Data = make(map[string]map[string]float64)
n.isLoaded = false
return n
}
// Fit will take the data and learn it
func (a *Algorithm) Fit(db *database.Database, datas []models.SensorData) (err error) {
if len(datas) == 0 {
err = errors.New("no data")
return
}
a.Data = make(map[string]map[string]float64)
locationTotals := make(map[string]float64)
for _, data := range datas {
if _, ok := a.Data[data.Location]; !ok {
a.Data[data.Location] = make(map[string]float64)
locationTotals[data.Location] = float64(0)
}
locationTotals[data.Location]++
for sensorType := range data.Sensors {
for sensor := range data.Sensors[sensorType] {
mac := sensorType + "-" + sensor
if _, ok := a.Data[data.Location][mac]; !ok {
a.Data[data.Location][mac] = float64(0)
}
a.Data[data.Location][mac]++
}
}
}
// normalize each location
for loc := range a.Data {
for mac := range a.Data[loc] {
a.Data[loc][mac] = a.Data[loc][mac] / locationTotals[loc]
}
}
err = db.Set("NB2", a.Data)
// err = db.SetLearning("NB2", a.Data)
return
}
// Classify will classify the specified data
func (a *Algorithm) Classify(db *database.Database, data models.SensorData) (pl PairList, err error) {
// load data if not already
if !a.isLoaded {
err = db.Get("NB2", &a.Data)
if err != nil {
return
}
a.isLoaded = true
}
if len(a.Data) == 0 {
err = errors.New("need to fit first")
return
}
numLocations := float64(len(a.Data))
NA := 1 / numLocations
NnotA := 1 - NA
Ps := make(map[string][]float64)
for location := range a.Data {
Ps[location] = []float64{}
}
for sensorType := range data.Sensors {
for name := range data.Sensors[sensorType] {
mac := sensorType + "-" + name
val := int(data.Sensors[sensorType][name].(float64))
for location := range Ps {
PA := a.probMacGivenLocation(mac, val, location, true)
PnotA := a.probMacGivenLocation(mac, val, location, false)
P := PA * NA / (PA*NA + PnotA*NnotA)
Ps[location] = append(Ps[location], math.Log(P))
}
}
}
PsumTotal := float64(0)
Psum := make(map[string]float64)
for location := range Ps {
Psum[location] = float64(0)
for _, v := range Ps[location] {
Psum[location] += v
}
Psum[location] = math.Exp(Psum[location])
PsumTotal += Psum[location]
}
for location := range Psum {
Psum[location] = Psum[location] / PsumTotal
}
pl = make(PairList, len(Psum))
i := 0
for k, v := range Psum {
pl[i] = Pair{k, v}
i++
}
sort.Sort(sort.Reverse(pl))
return
}
type Pair struct {
Key string
Value float64
}
type PairList []Pair
func (p PairList) Len() int { return len(p) }
func (p PairList) Less(i, j int) bool { return p[i].Value < p[j].Value }
func (p PairList) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func (a *Algorithm) probMacGivenLocation(mac string, val int, loc string, positive bool) (P float64) {
P = 0.005
numerator := float64(0)
if positive {
// positive: find count of mac in loc
if v, ok := a.Data[loc][mac]; ok {
numerator = float64(v)
}
} else {
// NOT positive: find count of mac NOT in loc
for locX := range a.Data {
if locX != loc {
if v, ok := a.Data[locX][mac]; ok {
numerator += float64(v)
}
}
}
}
// find total count of mac
denominator := float64(0)
for locX := range a.Data {
if v, ok := a.Data[locX][mac]; ok {
denominator += float64(v)
}
}
if denominator > 0 && numerator > 0 {
P = numerator / denominator
}
// TODO: cache it
return
} | findapi/lib/ai/_old/learning/nb2/nb2.go | 0.551574 | 0.415492 | nb2.go | starcoder |
package dtstruct
// InstancesSorterByExec sorts instances by executed binlog coordinates
type InstancesSorterByExec struct {
instances [](*MysqlInstance)
dataCenter string
}
func NewInstancesSorterByExec(instances [](*MysqlInstance), dataCenter string) *InstancesSorterByExec {
return &InstancesSorterByExec{
instances: instances,
dataCenter: dataCenter,
}
}
func (this *InstancesSorterByExec) Len() int { return len(this.instances) }
func (this *InstancesSorterByExec) Swap(i, j int) {
this.instances[i], this.instances[j] = this.instances[j], this.instances[i]
}
func (this *InstancesSorterByExec) Less(i, j int) bool {
// Returning "true" in this function means [i] is "smaller" than [j],
// which will lead to [j] be a better candidate for promotion
// Sh*t happens. We just might get nil while attempting to discover/recover
if this.instances[i] == nil {
return false
}
if this.instances[j] == nil {
return true
}
if this.instances[i].ExecBinlogCoordinates.Equals(&this.instances[j].ExecBinlogCoordinates) {
// Secondary sorting: "smaller" if not logging replica updates
if this.instances[j].LogReplicationUpdatesEnabled && !this.instances[i].LogReplicationUpdatesEnabled {
return true
}
// Next sorting: "smaller" if of higher version (this will be reversed eventually)
// Idea is that given 5.6 a& 5.7 both of the exact position, we will want to promote
// the 5.6 on top of 5.7, as the other way around is invalid
if this.instances[j].Instance.IsSmallerMajorVersion(this.instances[i].Instance) {
return true
}
// Next sorting: "smaller" if of larger binlog-format (this will be reversed eventually)
// Idea is that given ROW & STATEMENT both of the exact position, we will want to promote
// the STATEMENT on top of ROW, as the other way around is invalid
if IsSmallerBinlogFormat(this.instances[j].Binlog_format, this.instances[i].Binlog_format) {
return true
}
// Prefer local datacenter:
if this.instances[j].DataCenter == this.dataCenter && this.instances[i].DataCenter != this.dataCenter {
return true
}
// Prefer if not having errant GTID
if this.instances[j].GtidErrant == "" && this.instances[i].GtidErrant != "" {
return true
}
// Prefer candidates:
if this.instances[j].PromotionRule.BetterThan(this.instances[i].PromotionRule) {
return true
}
}
return this.instances[i].ExecBinlogCoordinates.SmallerThan(&this.instances[j].ExecBinlogCoordinates)
} | go/adaptor/mysql/dtstruct/instance_sorter.go | 0.625667 | 0.411998 | instance_sorter.go | starcoder |
package validation
import (
"context"
"errors"
"reflect"
"strconv"
)
// Each returns a validation rule that loops through an iterable (map, slice or array)
// and validates each value inside with the provided rules.
// An empty iterable is considered valid. Use the Required rule to make sure the iterable is not empty.
func Each(rules ...Rule) EachRule {
return EachRule{
rules: rules,
}
}
// EachRule is a validation rule that validates elements in a map/slice/array using the specified list of rules.
type EachRule struct {
rules []Rule
}
// Validate loops through the given iterable and calls the Ozzo Validate() method for each value.
func (r EachRule) Validate(value interface{}) error {
return r.ValidateWithContext(nil, value)
}
// ValidateWithContext loops through the given iterable and calls the Ozzo ValidateWithContext() method for each value.
func (r EachRule) ValidateWithContext(ctx context.Context, value interface{}) error {
errs := Errors{}
v := reflect.ValueOf(value)
switch v.Kind() {
case reflect.Map:
for _, k := range v.MapKeys() {
val := r.getInterface(v.MapIndex(k))
var err error
if ctx == nil {
err = Validate(val, r.rules...)
} else {
err = ValidateWithContext(ctx, val, r.rules...)
}
if err != nil {
errs[r.getString(k)] = err
}
}
case reflect.Slice, reflect.Array:
for i := 0; i < v.Len(); i++ {
val := r.getInterface(v.Index(i))
var err error
if ctx == nil {
err = Validate(val, r.rules...)
} else {
err = ValidateWithContext(ctx, val, r.rules...)
}
if err != nil {
errs[strconv.Itoa(i)] = err
}
}
default:
return errors.New("must be an iterable (map, slice or array)")
}
if len(errs) > 0 {
return errs
}
return nil
}
func (r EachRule) getInterface(value reflect.Value) interface{} {
switch value.Kind() {
case reflect.Ptr, reflect.Interface:
if value.IsNil() {
return nil
}
return value.Interface()
default:
return value.Interface()
}
}
func (r EachRule) getString(value reflect.Value) string {
switch value.Kind() {
case reflect.Ptr, reflect.Interface:
if value.IsNil() {
return ""
}
return value.Elem().String()
default:
return value.String()
}
} | each.go | 0.788054 | 0.458955 | each.go | starcoder |
package kubernetes
import (
"fmt"
"time"
)
type resolver interface {
Resolve(string) (*Schema, error)
Version() string
}
// Validator knows enough to be able to validate a YAML document.
type Validator struct {
resolver resolver
}
// NewValidator returns an instantiated validator.
func NewValidator(resolver resolver) *Validator {
return &Validator{
resolver: resolver,
}
}
// Resolve wraps the internal resolver's resolve method.
func (v *Validator) Resolve(schemaKey string) (*Schema, error) {
return v.resolver.Resolve(schemaKey)
}
// Version wraps the internal resolver's version method.
func (v *Validator) Version() string {
return v.resolver.Version()
}
// Validate is the meat of this code. It sees incoming data and validates it against the known schemas.
// This is recursive so it does a depth first search of all key/values.
// TODO(chuckha) turn this into a stack-based dfs search.
func (v *Validator) Validate(incoming map[interface{}]interface{}, schema *Schema) []error {
return v.validate(incoming, schema, []string{})
}
// validate is the meat and potatoes of this entire application.
// incoming is a list of key/values pairs from a YAML document.
// schema is the schema we expect incoming to validate against
// path is the list of keys we have traversed to get to this object (as this object could be anywhere in a YAML document)
// This function loops through each key doing the following:
// 1. Checking that the key is a string
// 2. Checking that the key is an expected key
// 3. Checking the value is the expected type
// 4. If an object is encountered then the function is recursive.
func (v *Validator) validate(incoming map[interface{}]interface{}, schema *Schema, path []string) []error {
errors := make([]error, 0)
// Validate each key one at a time descending as deep and as wide as the key goes.
for k, value := range incoming {
// Keep track of where we are
tlp := make([]string, len(path))
copy(tlp, path)
key, ok := k.(string)
if !ok {
errors = append(errors, NewYamlPathError(tlp, "", NewKeyNotStringError(k)))
continue
}
// the key is a string so we can now act on it.
tlp = append(tlp, key)
property, ok := schema.Properties[key]
if !ok {
errors = append(errors, NewYamlPathError(tlp, "", NewUnknownKeyError(key)))
continue
}
switch property.Type {
case "string":
// TODO: formats?
if _, ok := value.(string); !ok {
errors = append(errors, NewYamlPathError(tlp, value, NewWrongTypeError(key, "string", value)))
}
case "integer":
// ignore property.Format until it causes a bug
if _, ok := value.(int); !ok {
errors = append(errors, NewYamlPathError(tlp, value, NewWrongTypeError(key, "integer", value)))
}
case "boolean":
if _, ok := value.(bool); !ok {
errors = append(errors, NewYamlPathError(tlp, value, NewWrongTypeError(key, "boolean", value)))
}
case "object":
// this is for things like labels; map[interface{}]interface{} looks weird but that's how our yaml parser works.
if _, ok := value.(map[interface{}]interface{}); !ok {
errors = append(errors, NewYamlPathError(tlp, value, NewWrongTypeError(key, "map[interface{}]interface{}", value)))
}
case "array":
items, ok := value.([]interface{})
if !ok {
errors = append(errors, NewYamlPathError(tlp, value, NewWrongTypeError(key, "[]interface{}", value)))
continue
}
switch property.Items.Type {
case "string":
for _, item := range items {
if _, ok := item.(string); !ok {
errors = append(errors, NewWrongTypeError(key, "string", item))
}
}
// assume it's an array of objects
default:
// TODO: check that items is not nil
schema, err := v.resolver.Resolve(property.Items.Reference)
if err != nil {
fmt.Println(key, property)
errors = append(errors, NewYamlPathError(tlp, schema, err))
continue
}
for _, item := range items {
if errs := v.handleObject(key, item, tlp, schema); len(errs) > 0 {
errors = append(errors, errs...)
}
}
}
// default is some k8s object
default:
schema, err := v.resolver.Resolve(property.Reference)
if err != nil {
// DEBUG LINE good to use if there is a weird error
// fmt.Println(key, property)
errors = append(errors, NewYamlPathError(tlp, property.Reference, err))
continue
}
// Bail if the object reference is a type rename
if schema.Type == "string" {
// format must be set if type is string
switch schema.Format {
case "int-or-string":
if _, ok := value.(string); !ok {
if _, ok2 := value.(int); !ok2 {
errors = append(errors, NewYamlPathError(tlp, value, NewWrongTypeError(key, "int-or-string", value)))
}
}
case "date-time":
// nil is valid object reference
if value == nil {
continue
}
date, ok := value.(string)
if !ok {
errors = append(errors, NewYamlPathError(tlp, value, NewWrongTypeError(key, "string", value)))
continue
}
if _, err := time.Parse("2006-01-02T15:04:05Z", date); err != nil {
errors = append(errors, NewYamlPathError(tlp, value, NewWrongTypeError(key, "time.Time", value)))
}
default:
errors = append(errors, NewYamlPathError(tlp, value, NewUnknownFormatError(schema.Format)))
}
continue
}
if errs := v.handleObject(key, value, tlp, schema); len(errs) > 0 {
errors = append(errors, errs...)
}
}
}
return errors
}
// handleObject takes a key that has a value that will be of type map[interface{}]interface{}
// handleObject takes the current path to the key that is being validated and the schema of the object hidden under the value interface.
func (v *Validator) handleObject(key string, value interface{}, path []string, schema *Schema) []error {
object, ok := value.(map[interface{}]interface{})
if !ok {
return []error{NewYamlPathError(path, value, NewWrongTypeError(key, "map[interface{}]interface{}", value))}
}
return v.validate(object, schema, path)
} | internal/kubernetes/validations.go | 0.525125 | 0.401043 | validations.go | starcoder |
package types
import (
"bytes"
"<KEY>"
)
// Tip is what expected consensus needs from a Block. For now it *is* a
// Block.
type Tip = Block
// TipSet is a set of Tips, blocks at the same height with the same parent set,
// keyed by Cid string.
type TipSet map[string]*Tip
var (
// ErrEmptyTipSet is returned when a method requiring a non-empty tipset is called on an empty tipset
ErrEmptyTipSet = errors.New("empty tipset calling unallowed method")
)
// NewTipSet returns a TipSet wrapping the input blocks.
// PRECONDITION: all blocks are the same height and have the same parent set.
func NewTipSet(blks ...*Block) (TipSet, error) {
if len(blks) == 0 {
return nil, errors.New("Cannot create tipset: no blocks")
}
ts := TipSet{}
for _, b := range blks {
if err := ts.AddBlock(b); err != nil {
return nil, errors.Wrapf(err, "Cannot create tipset")
}
}
return ts, nil
}
// AddBlock adds the provided block to this tipset.
// PRECONDITION: this block has the same height parent set as other members of ts.
func (ts TipSet) AddBlock(b *Block) error {
if len(ts) == 0 {
id := b.Cid()
ts[id.String()] = b
return nil
}
h, err := ts.Height()
if err != nil {
return err
}
p, err := ts.Parents()
if err != nil {
return err
}
weight, err := ts.ParentWeight()
if err != nil {
return err
}
if uint64(b.Height) != h {
return errors.Errorf("block height %d doesn't match existing tipset height %d", uint64(b.Height), h)
}
if !b.Parents.Equals(p) {
return errors.Errorf("block parents %s don't match tipset parents %s", b.Parents.String(), p.String())
}
if uint64(b.ParentWeight) != weight {
return errors.Errorf("bBlock parent weight: %d doesn't match existing tipset parent weight: %d", uint64(b.ParentWeight), weight)
}
id := b.Cid()
ts[id.String()] = b
return nil
}
// Clone returns a shallow copy of the TipSet.
func (ts TipSet) Clone() TipSet {
r := TipSet{}
for k, v := range ts {
r[k] = v
}
return r
}
// String returns a formatted string of the TipSet:
// { <cid1> <cid2> <cid3> }
func (ts TipSet) String() string {
return ts.ToSortedCidSet().String()
}
// Equals returns true if the tipset contains the same blocks as another set.
// Equality is not tested deeply. If blocks of two tipsets are stored at
// different memory addresses but have the same cids the tipsets will be equal.
func (ts TipSet) Equals(ts2 TipSet) bool {
return ts.ToSortedCidSet().Equals(ts2.ToSortedCidSet())
}
// ToSortedCidSet returns a SortedCidSet containing the Cids in the
// TipSet.
func (ts TipSet) ToSortedCidSet() SortedCidSet {
s := SortedCidSet{}
for _, b := range ts {
s.Add(b.Cid())
}
return s
}
// ToSlice returns the slice of *Block containing the tipset's blocks.
// Sorted.
func (ts TipSet) ToSlice() []*Block {
sl := make([]*Block, len(ts))
var i int
for it := ts.ToSortedCidSet().Iter(); !it.Complete(); it.Next() {
sl[i] = ts[it.Value().String()]
i++
}
return sl
}
// MinTicket returns the smallest ticket of all blocks in the tipset.
func (ts TipSet) MinTicket() (Signature, error) {
if len(ts) == 0 {
return nil, ErrEmptyTipSet
}
blks := ts.ToSlice()
min := blks[0].Ticket
for i := range blks[0:] {
if bytes.Compare(blks[i].Ticket, min) < 0 {
min = blks[i].Ticket
}
}
return min, nil
}
// Height returns the height of a tipset.
func (ts TipSet) Height() (uint64, error) {
if len(ts) == 0 {
return uint64(0), ErrEmptyTipSet
}
return uint64(ts.ToSlice()[0].Height), nil
}
// Parents returns the parents of a tipset.
func (ts TipSet) Parents() (SortedCidSet, error) {
if len(ts) == 0 {
return SortedCidSet{}, ErrEmptyTipSet
}
return ts.ToSlice()[0].Parents, nil
}
// ParentWeight returns the tipset's ParentWeight in fixed point form.
func (ts TipSet) ParentWeight() (uint64, error) {
if len(ts) == 0 {
return uint64(0), ErrEmptyTipSet
}
return uint64(ts.ToSlice()[0].ParentWeight), nil
} | types/tipset.go | 0.659734 | 0.429609 | tipset.go | starcoder |
package interpreter
import (
"github.com/google/cel-go/common/types/ref"
)
// EvalState tracks the values associated with expression ids during execution.
type EvalState interface {
// GetRuntimeExpressionID returns the runtime id corresponding to the
// expression id from the AST.
GetRuntimeExpressionID(exprID int64) int64
// OnlyValue returns the value in the eval state, if only one exists.
OnlyValue() (ref.Value, bool)
// Value of the given expression id, false if not found.
Value(int64) (ref.Value, bool)
}
// MutableEvalState permits the mutation of evaluation state for a given
// expression id.
type MutableEvalState interface {
EvalState
// SetRuntimeExpressionID sets the runtime id for the given expr id.
SetRuntimeExpressionID(exprID int64, runtimeID int64)
// SetValue associates an expression id with a value.
SetValue(int64, ref.Value)
}
// NewEvalState returns a MutableEvalState.
func NewEvalState(instructionCount int64) MutableEvalState {
return &defaultEvalState{exprCount: instructionCount,
exprValues: make([]ref.Value, instructionCount, instructionCount),
exprIDMap: make(map[int64]int64)}
}
type defaultEvalState struct {
exprCount int64
exprValues []ref.Value
exprIDMap map[int64]int64
}
func (s *defaultEvalState) GetRuntimeExpressionID(exprID int64) int64 {
if val, ok := s.exprIDMap[exprID]; ok {
return val
}
return exprID
}
func (s *defaultEvalState) OnlyValue() (ref.Value, bool) {
var result ref.Value
i := 0
for _, val := range s.exprValues {
if val != nil {
result = val
i++
}
}
if i == 1 {
return result, true
}
return nil, false
}
func (s *defaultEvalState) SetRuntimeExpressionID(exprID int64, runtimeID int64) {
s.exprIDMap[exprID] = runtimeID
}
func (s *defaultEvalState) SetValue(exprID int64, value ref.Value) {
s.exprValues[exprID] = value
}
func (s *defaultEvalState) Value(exprID int64) (ref.Value, bool) {
// TODO: The eval state assumes a dense progrma expression id space. While
// this is true of how the cel-go parser generates identifiers, it may not
// be true for all implementations or for the long term. Replace the use of
// parse-time generated expression ids with a dense runtiem identifier.
if exprID >= 0 && exprID < s.exprCount {
return s.exprValues[exprID], true
}
return nil, false
} | interpreter/evalstate.go | 0.641984 | 0.522629 | evalstate.go | starcoder |
package circbuf
import (
"fmt"
)
// Buffer implements a circular buffer. It is a fixed size,
// and new writes overwrite older data, such that for a buffer
// of size N, for any amount of writes, only the last N bytes
// are retained.
type Buffer struct {
data []byte
out []byte
size int64
writeCursor int64
written int64
}
// NewBuffer creates a new buffer of a given size. The size
// must be greater than 0.
func NewBuffer(size int64) (*Buffer, error) {
if size <= 0 {
return nil, fmt.Errorf("Size must be positive")
}
b := &Buffer{
size: size,
data: make([]byte, size),
out: make([]byte, size),
}
return b, nil
}
// Write writes up to len(buf) bytes to the internal ring,
// overriding older data if necessary.
func (b *Buffer) Write(buf []byte) (int, error) {
// Account for total bytes written
n := len(buf)
b.written += int64(n)
// If the buffer is larger than ours, then we only care
// about the last size bytes anyways
if int64(n) > b.size {
buf = buf[int64(n)-b.size:]
}
// Copy in place
remain := b.size - b.writeCursor
copy(b.data[b.writeCursor:], buf)
if int64(len(buf)) > remain {
copy(b.data, buf[remain:])
}
// Update location of the cursor
b.writeCursor = ((b.writeCursor + int64(len(buf))) % b.size)
return n, nil
}
// WriteByte writes a single byte into the buffer.
func (b *Buffer) WriteByte(c byte) error {
b.data[b.writeCursor] = c
b.writeCursor = ((b.writeCursor + 1) % b.size)
b.written++
return nil
}
// Size returns the size of the buffer
func (b *Buffer) Size() int64 {
return b.size
}
// TotalWritten provides the total number of bytes written
func (b *Buffer) TotalWritten() int64 {
return b.written
}
// Bytes provides a slice of the bytes written. This
// slice should not be written to. The underlying array
// may point to data that will be overwritten by a subsequent
// call to Bytes. It does no allocation.
func (b *Buffer) Bytes() []byte {
switch {
case b.written >= b.size && b.writeCursor == 0:
return b.data
case b.written > b.size:
copy(b.out, b.data[b.writeCursor:])
copy(b.out[b.size-b.writeCursor:], b.data[:b.writeCursor])
return b.out
default:
return b.data[:b.writeCursor]
}
}
// Get returns a single byte out of the buffer, at the given position.
func (b *Buffer) Get(i int64) (byte, error) {
switch {
case i >= b.written || i >= b.size:
return 0, fmt.Errorf("Index out of bounds: %v", i)
case b.written > b.size:
return b.data[(b.writeCursor+i)%b.size], nil
default:
return b.data[i], nil
}
}
// Reset resets the buffer so it has no content.
func (b *Buffer) Reset() {
b.writeCursor = 0
b.written = 0
}
// String returns the contents of the buffer as a string
func (b *Buffer) String() string {
return string(b.Bytes())
} | vendor/github.com/balena-os/circbuf/circbuf.go | 0.770206 | 0.469034 | circbuf.go | starcoder |
package parser
import (
"github.com/magic003/liza/ast"
"github.com/magic003/liza/lexer"
"github.com/magic003/liza/token"
)
// New returns a new instance of parser.
func New(filename string, src []byte) *Parser {
parser := &Parser{}
lexer := lexer.New(filename, src, parser.handleErr, lexer.ScanComments)
parser.lexer = lexer
parser.next()
return parser
}
// Parser holds the internal state of a parser.
type Parser struct {
lexer *lexer.Lexer
tok *token.Token // current token
errors []Error
}
// ---------------------------------------------------------------------------
// Parsing utilities
// next advances to the next non-comment token.
func (p *Parser) next() {
p.next0()
for p.tok.Type == token.COMMENT {
p.next0()
}
}
// next0 advances to the next token.
func (p *Parser) next0() {
p.tok = p.lexer.NextToken()
}
func (p *Parser) handleErr(pos token.Position, msg string) {
err := Error{
Pos: pos,
Msg: msg,
}
p.errors = append(p.errors, err)
}
func (p *Parser) expect(tt token.Type) *token.Token {
currentTok := p.tok
if currentTok.Type != tt {
p.errorExpected(currentTok.Position, "<"+tt.String()+">")
}
p.next() // make progress
return currentTok
}
func (p *Parser) errorExpected(pos token.Position, expected string) {
msg := "expected " + expected
if pos == p.tok.Position {
// error happens at the current position. make it more specific
msg += ", found <" + p.tok.Type.String() + "> " + p.tok.Content
}
p.handleErr(pos, msg)
}
func (p *Parser) syncTopLevelDecl() {
for {
switch p.tok.Type {
case token.PUBLIC, token.CONST, token.CLASS, token.INTERFACE:
return
case token.EOF:
return
}
p.next()
}
}
// ---------------------------------------------------------------------------
// Declarations
func (p *Parser) parsePackageDecl() *ast.PackageDecl {
packagePos := p.expect(token.PACKAGE).Position
name := p.expect(token.IDENT)
p.expect(token.NEWLINE)
return &ast.PackageDecl{
Package: packagePos,
Name: name,
}
}
func (p *Parser) parseImportDecl() *ast.ImportDecl {
importPos := p.expect(token.IMPORT).Position
path := p.parseImportPath()
node := &ast.ImportDecl{
Import: importPos,
Path: path,
}
if p.tok.Type == token.AS {
asPos := p.expect(token.AS).Position
alias := p.expect(token.IDENT)
node.As = &asPos
node.Alias = alias
}
p.expect(token.NEWLINE)
return node
}
func (p *Parser) parseImportPath() *ast.ImportPath {
var (
libraryName *token.Token
path []*token.Token
)
ident := p.expect(token.IDENT)
if p.tok.Type == token.DOUBLECOLON {
libraryName = ident
p.expect(token.DOUBLECOLON)
ident = p.expect(token.IDENT)
}
path = append(path, ident)
for p.tok.Type == token.DIV {
p.expect(token.DIV)
ident = p.expect(token.IDENT)
path = append(path, ident)
}
return &ast.ImportPath{
LibraryName: libraryName,
Path: path,
}
}
func (p *Parser) parseTopLevelDecl() ast.Decl {
var visibility *token.Token
if p.tok.Type == token.PUBLIC {
visibility = p.expect(token.PUBLIC)
}
switch p.tok.Type {
case token.CONST:
return p.parseConstDecl(visibility, true)
case token.CLASS:
return p.parseClassDecl(visibility)
case token.INTERFACE:
return p.parseInterfaceDecl(visibility)
default:
pos := p.tok.Position
p.errorExpected(p.tok.Position, "declaration")
p.syncTopLevelDecl()
return &ast.BadDecl{
From: pos,
To: p.tok.Position,
}
}
}
func (p *Parser) parseConstDecl(visibility *token.Token, expectNewline bool) *ast.ConstDecl {
constPos := p.expect(token.CONST).Position
ident := p.expect(token.IDENT)
var tp ast.Type
if p.tok.Type != token.DEFINE {
tp = p.parseType()
}
p.expect(token.DEFINE)
value := p.parseExpr()
if expectNewline {
p.expect(token.NEWLINE)
}
return &ast.ConstDecl{
Visibility: visibility,
Const: constPos,
Ident: ident,
Type: tp,
Value: value,
}
}
func (p *Parser) parseClassDecl(visibility *token.Token) *ast.ClassDecl {
class := p.expect(token.CLASS).Position
name := p.expect(token.IDENT)
var implements []*ast.SelectorType
if p.tok.Type == token.IMPLEMENTS {
p.expect(token.IMPLEMENTS)
for p.tok.Type != token.LBRACE && p.tok.Type != token.EOF {
tp := p.parseBasicOrSelectorType()
selectorType, ok := tp.(*ast.SelectorType)
if !ok {
// TODO record error and sync
}
implements = append(implements, selectorType)
if p.tok.Type != token.LBRACE {
p.expect(token.COMMA)
}
}
}
lbrace := p.expect(token.LBRACE).Position
var (
consts []*ast.ConstDecl
vars []*ast.VarDecl
methods []*ast.FuncDecl
)
for p.tok.Type == token.CONST {
consts = append(consts, p.parseConstDecl(nil, true))
}
for p.tok.Type == token.VAR {
vars = append(vars, p.parseVarDecl(true))
}
for p.tok.Type == token.PUBLIC || p.tok.Type == token.FUN {
methods = append(methods, p.parseFuncDecl())
}
rbrace := p.expect(token.RBRACE).Position
p.expect(token.NEWLINE)
return &ast.ClassDecl{
Visibility: visibility,
Class: class,
Name: name,
Implements: implements,
Lbrace: lbrace,
Consts: consts,
Vars: vars,
Methods: methods,
Rbrace: rbrace,
}
}
func (p *Parser) parseInterfaceDecl(visibility *token.Token) *ast.InterfaceDecl {
interfacePos := p.expect(token.INTERFACE).Position
name := p.expect(token.IDENT)
lbrace := p.expect(token.LBRACE).Position
var methods []*ast.FuncDef
for p.tok.Type == token.FUN {
methods = append(methods, p.parseFuncDef())
}
rbrace := p.expect(token.RBRACE).Position
p.expect(token.NEWLINE)
return &ast.InterfaceDecl{
Visibility: visibility,
Interface: interfacePos,
Name: name,
Lbrace: lbrace,
Methods: methods,
Rbrace: rbrace,
}
}
func (p *Parser) parseFuncDef() *ast.FuncDef {
funcDef := p.parseFuncSignature()
p.expect(token.NEWLINE)
return funcDef
}
func (p *Parser) parseFuncDecl() *ast.FuncDecl {
var visibility *token.Token
if p.tok.Type == token.PUBLIC {
visibility = p.expect(token.PUBLIC)
}
signature := p.parseFuncSignature()
body := p.parseBlockStmt()
p.expect(token.NEWLINE)
return &ast.FuncDecl{
Visibility: visibility,
Fun: signature.Fun,
Name: signature.Name,
Params: signature.Params,
ReturnType: signature.ReturnType,
Body: body,
}
}
func (p *Parser) parseFuncSignature() *ast.FuncDef {
fun := p.expect(token.FUN).Position
name := p.expect(token.IDENT)
lparen := p.expect(token.LPAREN).Position
var params []*ast.ParameterDef
for p.tok.Type != token.RPAREN && p.tok.Type != token.EOF {
params = append(params, p.parseParameterDef())
if p.tok.Type != token.RPAREN {
p.expect(token.COMMA)
}
}
rparen := p.expect(token.RPAREN).Position
var tp ast.Type
if p.tok.Type == token.COLON {
p.expect(token.COLON)
tp = p.parseType()
}
return &ast.FuncDef{
Fun: fun,
Name: name,
Lparen: lparen,
Params: params,
Rparen: rparen,
ReturnType: tp,
}
}
func (p *Parser) parseParameterDef() *ast.ParameterDef {
ident := p.expect(token.IDENT)
tp := p.parseType()
return &ast.ParameterDef{
Name: ident,
Type: tp,
}
}
func (p *Parser) parseVarDecl(expectNewline bool) *ast.VarDecl {
varPos := p.expect(token.VAR).Position
ident := p.expect(token.IDENT)
var tp ast.Type
if p.tok.Type != token.DEFINE {
tp = p.parseType()
}
p.expect(token.DEFINE)
value := p.parseExpr()
if expectNewline {
p.expect(token.NEWLINE)
}
return &ast.VarDecl{
Var: varPos,
Ident: ident,
Type: tp,
Value: value,
}
}
// ---------------------------------------------------------------------------
// Expression
func (p *Parser) parseExpr() ast.Expr {
return p.parseBinaryExpr(lowestPrec)
}
func (p *Parser) parseBinaryExpr(prec int) ast.Expr {
x := p.parseUnaryExpr()
for {
currPrec := precedence(p.tok.Type)
if currPrec <= prec {
return x
}
op := p.expect(p.tok.Type)
y := p.parseBinaryExpr(currPrec)
x = &ast.BinaryExpr{X: x, Op: op, Y: y}
}
}
func (p *Parser) parseUnaryExpr() ast.Expr {
tp := p.tok.Type
if tp == token.SUB || tp == token.XOR || tp == token.NOT {
op := p.expect(tp)
x := p.parseUnaryExpr()
return &ast.UnaryExpr{Op: op, X: x}
}
return p.parsePrimaryExpr()
}
func (p *Parser) parsePrimaryExpr() ast.Expr {
x := p.parseOperand()
for {
switch p.tok.Type {
case token.PERIOD:
x = p.parseSelectorExpr(x)
case token.LBRACK:
x = p.parseIndexExpr(x)
case token.LPAREN:
x = p.parseCallExpr(x)
default:
return x
}
}
}
func (p *Parser) parseOperand() ast.Expr {
switch p.tok.Type {
case token.IDENT:
return p.parseIdent()
case token.INT, token.FLOAT, token.STRING:
tok := p.expect(p.tok.Type)
return &ast.BasicLit{Token: tok}
case token.LBRACK:
return p.parseArrayLit()
case token.LBRACE:
return p.parseMapLit()
case token.LPAREN:
// It could not tell it is a TupleLit or ParenExpr, so it always treats it as a TupleLit.
// The actual check will happen in the semantic analysis phase.
return p.parseTupleLit()
}
// TODO record error, sync and return BadExpr
return nil
}
func (p *Parser) parseArrayLit() *ast.ArrayLit {
lbrack := p.expect(token.LBRACK).Position
var elts []ast.Expr
for p.tok.Type != token.RBRACK && p.tok.Type != token.EOF {
elts = append(elts, p.parseExpr())
if p.tok.Type != token.RBRACK {
p.expect(token.COMMA)
}
}
rbrack := p.expect(token.RBRACK).Position
return &ast.ArrayLit{
Lbrack: lbrack,
Elts: elts,
Rbrack: rbrack,
}
}
func (p *Parser) parseMapLit() *ast.MapLit {
lbrace := p.expect(token.LBRACE).Position
var elts []*ast.KeyValueExpr
for p.tok.Type != token.RBRACE && p.tok.Type != token.EOF {
elts = append(elts, p.parseKeyValueExpr())
if p.tok.Type != token.RBRACE {
p.expect(token.COMMA)
}
}
rbrace := p.expect(token.RBRACE).Position
return &ast.MapLit{
Lbrace: lbrace,
Elts: elts,
Rbrace: rbrace,
}
}
func (p *Parser) parseKeyValueExpr() *ast.KeyValueExpr {
key := p.parseExpr()
colon := p.expect(token.COLON).Position
value := p.parseExpr()
return &ast.KeyValueExpr{
Key: key,
Colon: colon,
Value: value,
}
}
func (p *Parser) parseTupleLit() *ast.TupleLit {
lparen := p.expect(token.LPAREN).Position
var elts []ast.Expr
for p.tok.Type != token.RPAREN && p.tok.Type != token.EOF {
elts = append(elts, p.parseExpr())
if p.tok.Type != token.RPAREN {
p.expect(token.COMMA)
}
}
rparen := p.expect(token.RPAREN).Position
return &ast.TupleLit{
Lparen: lparen,
Elts: elts,
Rparen: rparen,
}
}
func (p *Parser) parseSelectorExpr(x ast.Expr) *ast.SelectorExpr {
p.expect(token.PERIOD)
sel := p.parseIdent()
return &ast.SelectorExpr{X: x, Sel: sel}
}
func (p *Parser) parseIdent() *ast.Ident {
ident := p.expect(token.IDENT)
return &ast.Ident{Token: ident}
}
func (p *Parser) parseIndexExpr(x ast.Expr) *ast.IndexExpr {
lbrack := p.expect(token.LBRACK).Position
index := p.parseExpr()
rbrack := p.expect(token.RBRACK).Position
return &ast.IndexExpr{
X: x,
Lbrack: lbrack,
Index: index,
Rbrack: rbrack,
}
}
func (p *Parser) parseCallExpr(x ast.Expr) *ast.CallExpr {
lparen := p.expect(token.LPAREN).Position
var args []ast.Expr
for p.tok.Type != token.RPAREN && p.tok.Type != token.EOF {
args = append(args, p.parseExpr())
if p.tok.Type != token.RPAREN {
p.expect(token.COMMA)
}
}
rparen := p.expect(token.RPAREN).Position
return &ast.CallExpr{
Fun: x,
Lparen: lparen,
Args: args,
Rparen: rparen,
}
}
// ---------------------------------------------------------------------------
// Type
func (p *Parser) parseType() ast.Type {
switch p.tok.Type {
case token.LBRACK:
return p.parseArrayType()
case token.LBRACE:
return p.parseMapType()
case token.LPAREN:
return p.parseTupleType()
case token.IDENT:
return p.parseBasicOrSelectorType()
default:
// TODO record error, sync and return bad type node.
return nil
}
}
func (p *Parser) parseArrayType() *ast.ArrayType {
lbrack := p.expect(token.LBRACK).Position
rbrack := p.expect(token.RBRACK).Position
elt := p.parseType()
return &ast.ArrayType{
Lbrack: lbrack,
Rbrack: rbrack,
Elt: elt,
}
}
func (p *Parser) parseMapType() *ast.MapType {
lbrace := p.expect(token.LBRACE).Position
key := p.parseType()
p.expect(token.COLON)
value := p.parseType()
rbrace := p.expect(token.RBRACE).Position
return &ast.MapType{
Lbrace: lbrace,
Key: key,
Value: value,
Rbrace: rbrace,
}
}
func (p *Parser) parseTupleType() *ast.TupleType {
lparen := p.expect(token.LPAREN).Position
var elts []ast.Type
for p.tok.Type != token.RPAREN && p.tok.Type != token.EOF {
elts = append(elts, p.parseType())
if p.tok.Type != token.RPAREN {
p.expect(token.COMMA)
}
}
rparen := p.expect(token.RPAREN).Position
return &ast.TupleType{
Lparen: lparen,
Elts: elts,
Rparen: rparen,
}
}
func (p *Parser) parseBasicOrSelectorType() ast.Type {
ident1 := p.expect(token.IDENT)
if p.tok.Type == token.PERIOD {
p.expect(token.PERIOD)
ident2 := p.expect(token.IDENT)
return &ast.SelectorType{
Package: ident1,
Sel: ident2,
}
}
return &ast.BasicType{
Ident: ident1,
}
}
// ---------------------------------------------------------------------------
// Statement
func (p *Parser) parseStmt() ast.Stmt {
switch p.tok.Type {
case token.CONST:
return &ast.DeclStmt{Decl: p.parseConstDecl(nil, true)}
case token.VAR:
return &ast.DeclStmt{Decl: p.parseVarDecl(true)}
case token.RETURN:
return p.parseReturnStmt()
case token.BREAK, token.CONTINUE:
return p.parseBranchStmt()
case token.IF:
return p.parseIfStmt(true)
case token.MATCH:
return p.parseMatchStmt()
case token.FOR:
return p.parseForStmt()
default:
return p.parseSimpleStmt(true)
}
}
func (p *Parser) parseSimpleStmt(expectNewline bool) ast.Stmt {
expr := p.parseExpr()
var ret ast.Stmt
switch p.tok.Type {
case token.INC, token.DEC:
op := p.expect(p.tok.Type)
ret = &ast.IncDecStmt{
Expr: expr,
Op: op,
}
case token.ASSIGN, token.ADDASSIGN, token.SUBASSIGN, token.MULASSIGN, token.DIVASSIGN, token.REMASSIGN,
token.ANDASSIGN, token.ORASSIGN, token.XORASSIGN, token.SHLASSIGN, token.SHRASSIGN:
assign := p.expect(p.tok.Type)
rhs := p.parseExpr()
ret = &ast.AssignStmt{
LHS: expr,
Assign: assign,
RHS: rhs,
}
default:
ret = &ast.ExprStmt{Expr: expr}
}
if expectNewline {
p.expect(token.NEWLINE)
}
return ret
}
func (p *Parser) parseReturnStmt() *ast.ReturnStmt {
ret := p.expect(token.RETURN)
value := p.parseExpr()
p.expect(token.NEWLINE)
return &ast.ReturnStmt{
Return: ret,
Value: value,
}
}
func (p *Parser) parseBranchStmt() *ast.BranchStmt {
var tok *token.Token
if p.tok.Type == token.BREAK {
tok = p.expect(token.BREAK)
} else {
tok = p.expect(token.CONTINUE)
}
p.expect(token.NEWLINE)
return &ast.BranchStmt{
Tok: tok,
}
}
func (p *Parser) parseIfStmt(expectNewline bool) *ast.IfStmt {
ifPos := p.expect(token.IF).Position
cond := p.parseExpr()
body := p.parseBlockStmt()
var els *ast.ElseStmt
if p.tok.Type == token.ELSE {
els = p.parseElseStmt()
}
if expectNewline {
p.expect(token.NEWLINE)
}
return &ast.IfStmt{
If: ifPos,
Cond: cond,
Body: body,
Else: els,
}
}
func (p *Parser) parseBlockStmt() *ast.BlockStmt {
lbrace := p.expect(token.LBRACE).Position
var stmts []ast.Stmt
for p.tok.Type != token.RBRACE && p.tok.Type != token.EOF {
stmts = append(stmts, p.parseStmt())
}
rbrace := p.expect(token.RBRACE).Position
return &ast.BlockStmt{
Lbrace: lbrace,
Stmts: stmts,
Rbrace: rbrace,
}
}
func (p *Parser) parseElseStmt() *ast.ElseStmt {
elsePos := p.expect(token.ELSE).Position
var ifStmt *ast.IfStmt
var body *ast.BlockStmt
if p.tok.Type == token.IF {
ifStmt = p.parseIfStmt(false)
} else {
body = p.parseBlockStmt()
}
return &ast.ElseStmt{
Else: elsePos,
If: ifStmt,
Body: body,
}
}
func (p *Parser) parseMatchStmt() *ast.MatchStmt {
match := p.expect(token.MATCH).Position
expr := p.parseExpr()
lbrace := p.expect(token.LBRACE).Position
var cases []*ast.CaseClause
for p.tok.Type != token.RBRACE && p.tok.Type != token.EOF {
cases = append(cases, p.parseCaseClause())
}
rbrace := p.expect(token.RBRACE).Position
p.expect(token.NEWLINE)
return &ast.MatchStmt{
Match: match,
Expr: expr,
Lbrace: lbrace,
Cases: cases,
Rbrace: rbrace,
}
}
func (p *Parser) parseCaseClause() *ast.CaseClause {
var casePos token.Position
var pattern ast.Expr
if p.tok.Type == token.CASE {
casePos = p.expect(token.CASE).Position
pattern = p.parseExpr()
} else {
casePos = p.expect(token.DEFAULT).Position
}
colon := p.expect(token.COLON).Position
var body []ast.Stmt
for p.tok.Type != token.CASE && p.tok.Type != token.DEFAULT &&
p.tok.Type != token.RBRACE && p.tok.Type != token.EOF {
body = append(body, p.parseStmt())
}
return &ast.CaseClause{
Case: casePos,
Pattern: pattern,
Colon: colon,
Body: body,
}
}
func (p *Parser) parseForStmt() *ast.ForStmt {
forPos := p.expect(token.FOR).Position
var (
decls []ast.Decl
cond ast.Expr
post ast.Stmt
)
if p.tok.Type != token.LBRACE {
if p.tok.Type != token.CONST && p.tok.Type != token.VAR && p.tok.Type != token.SEMICOLON {
cond = p.parseExpr()
} else {
for p.tok.Type != token.SEMICOLON && p.tok.Type != token.EOF {
if p.tok.Type == token.CONST {
decls = append(decls, p.parseConstDecl(nil, false))
} else if p.tok.Type == token.VAR {
decls = append(decls, p.parseVarDecl(false))
} else {
// TODO record error, sync to ',', ';', or '{' and return BadDecl
}
if p.tok.Type != token.SEMICOLON {
p.expect(token.COMMA)
}
}
p.expect(token.SEMICOLON)
if p.tok.Type != token.SEMICOLON {
cond = p.parseExpr()
}
p.expect(token.SEMICOLON)
if p.tok.Type != token.LBRACE {
post = p.parseSimpleStmt(false)
}
}
}
body := p.parseBlockStmt()
p.expect(token.NEWLINE)
return &ast.ForStmt{
For: forPos,
Decls: decls,
Cond: cond,
Post: post,
Body: body,
}
}
// ---------------------------------------------------------------------------
// Parse file
func (p *Parser) parseFile() *ast.File {
// Don't parse it if we had errors scanning the first token.
if len(p.errors) != 0 {
return nil
}
packageDecl := p.parsePackageDecl()
// Don't parse it if we had errors parsing the package declaration.
if len(p.errors) != 0 {
return nil
}
var imports []*ast.ImportDecl
for p.tok.Type == token.IMPORT {
imports = append(imports, p.parseImportDecl())
}
var decls []ast.Decl
for p.tok.Type != token.EOF {
decls = append(decls, p.parseTopLevelDecl())
}
return &ast.File{
Package: packageDecl,
Imports: imports,
Decls: decls,
}
} | parser/parser.go | 0.603815 | 0.487002 | parser.go | starcoder |
package p384
import (
"crypto/subtle"
"math/big"
"github.com/cloudflare/circl/math"
)
type curve struct{}
// P384 returns a Curve which implements P-384 (see FIPS 186-3, section D.2.4).
func P384() Curve { return curve{} }
// IsOnCurve reports whether the given (x,y) lies on the curve.
func (c curve) IsOnCurve(x, y *big.Int) bool {
x1, y1 := &fp384{}, &fp384{}
x1.SetBigInt(x)
y1.SetBigInt(y)
montEncode(x1, x1)
montEncode(y1, y1)
y2, x3 := &fp384{}, &fp384{}
fp384Sqr(y2, y1)
fp384Sqr(x3, x1)
fp384Mul(x3, x3, x1)
threeX := &fp384{}
fp384Add(threeX, x1, x1)
fp384Add(threeX, threeX, x1)
fp384Sub(x3, x3, threeX)
fp384Add(x3, x3, &bb)
return *y2 == *x3
}
// Add returns the sum of (x1,y1) and (x2,y2).
func (c curve) Add(x1, y1, x2, y2 *big.Int) (x, y *big.Int) {
P := newAffinePoint(x1, y1).toJacobian()
P.mixadd(P, newAffinePoint(x2, y2))
return P.toAffine().toInt()
}
// Double returns 2*(x,y).
func (c curve) Double(x1, y1 *big.Int) (x, y *big.Int) {
P := newAffinePoint(x1, y1).toJacobian()
P.double()
return P.toAffine().toInt()
}
// reduceScalar shorten a scalar modulo the order of the curve.
func (c curve) reduceScalar(k []byte) []byte {
bigK := new(big.Int).SetBytes(k)
bigK.Mod(bigK, c.Params().N)
return bigK.FillBytes(make([]byte, sizeFp))
}
// toOdd performs k = (-k mod N) if k is even.
func (c curve) toOdd(k []byte) ([]byte, int) {
var X, Y big.Int
X.SetBytes(k)
Y.Neg(&X).Mod(&Y, c.Params().N)
isEven := 1 - int(X.Bit(0))
x := X.Bytes()
y := Y.Bytes()
if len(x) < len(y) {
x = append(make([]byte, len(y)-len(x)), x...)
} else if len(x) > len(y) {
y = append(make([]byte, len(x)-len(y)), y...)
}
subtle.ConstantTimeCopy(isEven, x, y)
return x, isEven
}
// ScalarMult returns (Qx,Qy)=k*(Px,Py) where k is a number in big-endian form.
func (c curve) ScalarMult(x1, y1 *big.Int, k []byte) (x, y *big.Int) {
return c.scalarMultOmega(x1, y1, k, 5)
}
func (c curve) scalarMultOmega(x1, y1 *big.Int, k []byte, omega uint) (x, y *big.Int) {
k = c.reduceScalar(k)
oddK, isEvenK := c.toOdd(k)
var scalar big.Int
scalar.SetBytes(oddK)
if scalar.Sign() == 0 {
return new(big.Int), new(big.Int)
}
const bitsN = uint(384)
L := math.SignedDigit(&scalar, omega, bitsN)
var R jacobianPoint
Q := zeroPoint().toJacobian()
TabP := newAffinePoint(x1, y1).oddMultiples(omega)
for i := len(L) - 1; i > 0; i-- {
for j := uint(0); j < omega-1; j++ {
Q.double()
}
idx := absolute(L[i]) >> 1
for j := range TabP {
R.cmov(&TabP[j], subtle.ConstantTimeEq(int32(j), idx))
}
R.cneg(int(L[i]>>31) & 1)
Q.add(Q, &R)
}
// Calculate the last iteration using complete addition formula.
for j := uint(0); j < omega-1; j++ {
Q.double()
}
idx := absolute(L[0]) >> 1
for j := range TabP {
R.cmov(&TabP[j], subtle.ConstantTimeEq(int32(j), idx))
}
R.cneg(int(L[0]>>31) & 1)
QQ := Q.toProjective()
QQ.completeAdd(QQ, R.toProjective())
QQ.cneg(isEvenK)
return QQ.toAffine().toInt()
}
// ScalarBaseMult returns k*G, where G is the base point of the group
// and k is an integer in big-endian form.
func (c curve) ScalarBaseMult(k []byte) (x, y *big.Int) {
params := c.Params()
return c.ScalarMult(params.Gx, params.Gy, k)
}
// CombinedMult calculates P=mG+nQ, where G is the generator and Q=(x,y,z).
// The scalars m and n are integers in big-endian form. Non-constant time.
func (c curve) CombinedMult(xQ, yQ *big.Int, m, n []byte) (xP, yP *big.Int) {
const nOmega = uint(5)
var k big.Int
k.SetBytes(m)
nafM := math.OmegaNAF(&k, baseOmega)
k.SetBytes(n)
nafN := math.OmegaNAF(&k, nOmega)
if len(nafM) > len(nafN) {
nafN = append(nafN, make([]int32, len(nafM)-len(nafN))...)
} else if len(nafM) < len(nafN) {
nafM = append(nafM, make([]int32, len(nafN)-len(nafM))...)
}
TabQ := newAffinePoint(xQ, yQ).oddMultiples(nOmega)
var jR jacobianPoint
var aR affinePoint
P := zeroPoint().toJacobian()
for i := len(nafN) - 1; i >= 0; i-- {
P.double()
// Generator point
if nafM[i] != 0 {
idxM := absolute(nafM[i]) >> 1
aR = baseOddMultiples[idxM]
if nafM[i] < 0 {
aR.neg()
}
P.mixadd(P, &aR)
}
// Input point
if nafN[i] != 0 {
idxN := absolute(nafN[i]) >> 1
jR = TabQ[idxN]
if nafN[i] < 0 {
jR.neg()
}
P.add(P, &jR)
}
}
return P.toAffine().toInt()
}
// absolute returns always a positive value.
func absolute(x int32) int32 {
mask := x >> 31
return (x + mask) ^ mask
} | vendor/github.com/cloudflare/circl/ecc/p384/p384opt.go | 0.883632 | 0.58818 | p384opt.go | starcoder |
If a certain value can not be directly converted to another, the zero value
of the destination type is returned instead.
*/
package to
import (
"fmt"
"reflect"
"regexp"
"strconv"
"time"
)
var (
durationType = reflect.TypeOf(time.Duration(0))
timeType = reflect.TypeOf(time.Time{})
)
const (
digits = "0123456789"
uintbuflen = 20
)
const (
// KindTime is reserved for Time kind.
KindTime reflect.Kind = iota + 1000000000
// KindDuration is reserved for Duration kind.
KindDuration
)
var strToTimeFormats = []string{
"2006-01-02 15:04:05 Z0700 MST",
"2006-01-02 15:04:05 Z07:00 MST",
"2006-01-02 15:04:05 Z0700 -0700",
"Mon Jan _2 15:04:05 -0700 MST 2006",
time.RFC822Z, // "02 Jan 06 15:04 -0700"
time.RFC3339, // "2006-01-02T15:04:05Z07:00", RFC3339Nano
"2006-01-02 15:04:05 -0700",
"2006-01-02 15:04:05 Z07:00",
time.RubyDate, // "Mon Jan 02 15:04:05 -0700 2006"
time.RFC1123Z, // "Mon, 02 Jan 2006 15:04:05 -0700"
time.RFC822, // "02 Jan 06 15:04 MST",
"2006-01-02 15:04:05 MST",
time.UnixDate, // "Mon Jan _2 15:04:05 MST 2006",
time.RFC1123, // "Mon, 02 Jan 2006 15:04:05 MST",
time.RFC850, // "Monday, 02-Jan-06 15:04:05 MST",
time.Kitchen, // "3:04PM"
"01/02/06",
"2006-01-02",
"2006/01/02",
"01/02/2006",
"Jan _2, 2006",
"01/02/06 15:04",
time.Stamp, // "Jan _2 15:04:05", time.StampMilli, time.StampMicro, time.StampNano,
time.ANSIC, // "Mon Jan _2 15:04:05 2006"
"2006-01-02 15:04",
"2006-01-02T15:04",
"01/02/2006 15:04",
"01/02/06 15:04:05",
"01/02/2006 15:04:05",
"2006-01-02 15:04:05",
"2006-01-02T15:04:05",
"_2/Jan/2006 15:04:05",
}
var strToDurationMatches = map[*regexp.Regexp]func([][][]byte) (time.Duration, error){
regexp.MustCompile(`^(\-?\d+):(\d+)$`): func(m [][][]byte) (time.Duration, error) {
sign := 1
i64, err := Int64(m[0][1])
if err != nil {
return time.Duration(0), err
}
hrs := time.Hour * time.Duration(i64)
if hrs < 0 {
hrs = -1 * hrs
sign = -1
}
i64, err = Int64(m[0][2])
if err != nil {
return time.Duration(0), err
}
min := time.Minute * time.Duration(i64)
return time.Duration(sign) * (hrs + min), nil
},
regexp.MustCompile(`^(\-?\d+):(\d+):(\d+)$`): func(m [][][]byte) (time.Duration, error) {
sign := 1
i64, err := Int64(m[0][1])
if err != nil {
return time.Duration(0), err
}
hrs := time.Hour * time.Duration(i64)
if hrs < 0 {
hrs = -1 * hrs
sign = -1
}
i64, err = Int64(m[0][2])
if err != nil {
return time.Duration(0), err
}
min := time.Minute * time.Duration(i64)
i64, err = Int64(m[0][3])
if err != nil {
return time.Duration(0), err
}
sec := time.Second * time.Duration(i64)
return time.Duration(sign) * (hrs + min + sec), nil
},
regexp.MustCompile(`^(\-?\d+):(\d+):(\d+).(\d+)$`): func(m [][][]byte) (time.Duration, error) {
sign := 1
i64, err := Int64(m[0][1])
if err != nil {
return time.Duration(0), err
}
hrs := time.Hour * time.Duration(i64)
if hrs < 0 {
hrs = -1 * hrs
sign = -1
}
i64, err = Int64(m[0][2])
if err != nil {
return time.Duration(0), err
}
min := time.Minute * time.Duration(i64)
i64, err = Int64(m[0][3])
if err != nil {
return time.Duration(0), err
}
sec := time.Second * time.Duration(i64)
lst := m[0][4]
for len(lst) < 9 {
lst = append(lst, '0')
}
lst = lst[0:9]
i64, err = Int64(lst)
if err != nil {
return time.Duration(0), err
}
return time.Duration(sign) * (hrs + min + sec + time.Duration(i64)), nil
},
}
func strToDuration(v string) (time.Duration, error) {
var err error
var d time.Duration
d, err = time.ParseDuration(v)
if err == nil {
return d, nil
}
b := []byte(v)
for re, fn := range strToDurationMatches {
m := re.FindAllSubmatch(b, -1)
if m != nil {
return fn(m)
}
}
return time.Duration(0), fmt.Errorf("Could not convert %q to Duration", v)
}
func uint64ToBytes(v uint64) []byte {
buf := make([]byte, uintbuflen)
i := len(buf)
for v >= 10 {
i--
buf[i] = digits[v%10]
v = v / 10
}
i--
buf[i] = digits[v%10]
return buf[i:]
}
func int64ToBytes(v int64) []byte {
negative := false
if v < 0 {
negative = true
v = -v
}
uv := uint64(v)
buf := uint64ToBytes(uv)
if negative {
buf2 := []byte{'-'}
buf2 = append(buf2, buf...)
return buf2
}
return buf
}
func float32ToBytes(v float32) []byte {
slice := strconv.AppendFloat(nil, float64(v), 'g', -1, 32)
return slice
}
func float64ToBytes(v float64) []byte {
slice := strconv.AppendFloat(nil, v, 'g', -1, 64)
return slice
}
func complex128ToBytes(v complex128) []byte {
buf := []byte{'('}
r := strconv.AppendFloat(buf, real(v), 'g', -1, 64)
im := imag(v)
if im >= 0 {
buf = append(r, '+')
} else {
buf = r
}
i := strconv.AppendFloat(buf, im, 'g', -1, 64)
buf = append(i, []byte{'i', ')'}...)
return buf
}
// Time converts a date string into a time.Time value, several date formats are tried.
func Time(val interface{}) (time.Time, error) {
s := String(val)
for _, format := range strToTimeFormats {
r, err := time.ParseInLocation(format, s, time.Local)
if err == nil {
return r, nil
}
}
return time.Time{}, fmt.Errorf("Could not convert %q to Time", val)
}
// Duration tries to convert the argument into a time.Duration value. Returns
// time.Duration(0) if any error occurs.
func Duration(val interface{}) (time.Duration, error) {
switch t := val.(type) {
case int:
return time.Duration(int64(t)), nil
case int8:
return time.Duration(int64(t)), nil
case int16:
return time.Duration(int64(t)), nil
case int32:
return time.Duration(int64(t)), nil
case int64:
return time.Duration(t), nil
case uint:
return time.Duration(int64(t)), nil
case uint8:
return time.Duration(int64(t)), nil
case uint16:
return time.Duration(int64(t)), nil
case uint32:
return time.Duration(int64(t)), nil
case uint64:
return time.Duration(int64(t)), nil
}
return strToDuration(String(val))
}
// Bytes tries to convert the argument into a []byte array. Returns []byte{} if any
// error occurs.
func Bytes(val interface{}) []byte {
if val == nil {
return []byte{}
}
switch t := val.(type) {
case int:
return int64ToBytes(int64(t))
case int8:
return int64ToBytes(int64(t))
case int16:
return int64ToBytes(int64(t))
case int32:
return int64ToBytes(int64(t))
case int64:
return int64ToBytes(int64(t))
case uint:
return uint64ToBytes(uint64(t))
case uint8:
return uint64ToBytes(uint64(t))
case uint16:
return uint64ToBytes(uint64(t))
case uint32:
return uint64ToBytes(uint64(t))
case uint64:
return uint64ToBytes(uint64(t))
case float32:
return float32ToBytes(t)
case float64:
return float64ToBytes(t)
case complex128:
return complex128ToBytes(t)
case complex64:
return complex128ToBytes(complex128(t))
case bool:
if t == true {
return []byte("true")
}
return []byte("false")
case string:
return []byte(t)
case []byte:
return t
}
return []byte(fmt.Sprintf("%v", val))
}
// String tries to convert the argument into a string. Returns "" if any error occurs.
func String(val interface{}) string {
if val == nil {
return ""
}
switch t := val.(type) {
case int:
return strconv.Itoa(t)
case int8:
return strconv.FormatInt(int64(t), 10)
case int16:
return strconv.FormatInt(int64(t), 10)
case int32:
return strconv.FormatInt(int64(t), 10)
case int64:
return strconv.FormatInt(t, 10)
case uint:
return strconv.FormatUint(uint64(t), 10)
case uint8:
return strconv.FormatUint(uint64(t), 10)
case uint16:
return strconv.FormatUint(uint64(t), 10)
case uint32:
return strconv.FormatUint(uint64(t), 10)
case uint64:
return strconv.FormatUint(t, 10)
case float32:
return strconv.FormatFloat(float64(t), 'g', -1, 32)
case float64:
return strconv.FormatFloat(t, 'g', -1, 64)
case complex128:
return string(complex128ToBytes(t))
case complex64:
return string(complex128ToBytes(complex128(t)))
case bool:
if t {
return "true"
}
return "false"
case string:
return t
case []byte:
return string(t)
}
return fmt.Sprintf("%v", val)
}
// Slice ...
func Slice(val interface{}) ([]interface{}, error) {
if si, ok := val.([]interface{}); ok {
return si, nil
}
list := []interface{}{}
if val == nil {
return list, nil
}
switch reflect.TypeOf(val).Kind() {
default:
return nil, fmt.Errorf("Could not convert %q to Slice", val)
case reflect.Slice:
vval := reflect.ValueOf(val)
size := vval.Len()
list := make([]interface{}, size)
vlist := reflect.ValueOf(list)
for i := 0; i < size; i++ {
vlist.Index(i).Set(vval.Index(i))
}
return list, nil
}
}
// Map ...
func Map(val interface{}) (map[string]interface{}, error) {
if msi, ok := val.(map[string]interface{}); ok {
return msi, nil
}
m := map[string]interface{}{}
if val == nil {
return m, nil
}
switch reflect.TypeOf(val).Kind() {
default:
return nil, fmt.Errorf("Could not convert %q to Map", val)
case reflect.Map:
vval := reflect.ValueOf(val)
vlist := reflect.ValueOf(m)
for _, vkey := range vval.MapKeys() {
key := String(vkey.Interface())
vlist.SetMapIndex(reflect.ValueOf(key), vval.MapIndex(vkey))
}
return m, nil
}
}
// Int64 tries to convert the argument into an int64. Returns int64(0) if any error
// occurs.
func Int64(val interface{}) (int64, error) {
switch t := val.(type) {
case int:
return int64(t), nil
case int8:
return int64(t), nil
case int16:
return int64(t), nil
case int32:
return int64(t), nil
case int64:
return int64(t), nil
case uint:
return int64(t), nil
case uint8:
return int64(t), nil
case uint16:
return int64(t), nil
case uint32:
return int64(t), nil
case uint64:
return int64(t), nil
case bool:
if t == true {
return int64(1), nil
}
return int64(0), nil
case float32:
return int64(t), nil
case float64:
return int64(t), nil
case string:
return strconv.ParseInt(t, 10, 64)
case []byte:
return strconv.ParseInt(string(t), 10, 64)
}
return 0, fmt.Errorf("Could not convert %q to int64 %T", val, val)
}
// Uint64 tries to convert the argument into an uint64. Returns uint64(0) if any error
// occurs.
func Uint64(val interface{}) (uint64, error) {
switch t := val.(type) {
case int:
return uint64(t), nil
case int8:
return uint64(t), nil
case int16:
return uint64(t), nil
case int32:
return uint64(t), nil
case int64:
return uint64(t), nil
case uint:
return uint64(t), nil
case uint8:
return uint64(t), nil
case uint16:
return uint64(t), nil
case uint32:
return uint64(t), nil
case uint64:
return uint64(t), nil
case float32:
return uint64(t), nil
case float64:
return uint64(t), nil
case bool:
if t == true {
return uint64(1), nil
}
return uint64(0), nil
case string:
return strconv.ParseUint(t, 10, 64)
}
return 0, fmt.Errorf("Could not convert %q to uint64", val)
}
// Float64 tries to convert the argument into a float64. Returns float64(0.0) if any
// error occurs.
func Float64(val interface{}) (float64, error) {
switch t := val.(type) {
case int:
return float64(t), nil
case int8:
return float64(t), nil
case int16:
return float64(t), nil
case int32:
return float64(t), nil
case int64:
return float64(t), nil
case uint:
return float64(t), nil
case uint8:
return float64(t), nil
case uint16:
return float64(t), nil
case uint32:
return float64(t), nil
case uint64:
return float64(t), nil
case float32:
return float64(t), nil
case float64:
return float64(t), nil
case bool:
if t == true {
return float64(1), nil
}
return float64(0), nil
case string:
return strconv.ParseFloat(val.(string), 64)
default:
return 0, fmt.Errorf("Inconvertible float type %T", t)
}
}
// Bool tries to convert the argument into a bool. Returns false if any error occurs.
func Bool(value interface{}) (bool, error) {
s := String(value)
return strconv.ParseBool(s)
}
// Convert tries to convert the argument into a reflect.Kind element.
func Convert(value interface{}, t reflect.Kind) (interface{}, error) {
switch reflect.TypeOf(value).Kind() {
case reflect.Slice:
switch t {
case reflect.String:
if reflect.TypeOf(value).Elem().Kind() == reflect.Uint8 {
return string(value.([]byte)), nil
}
return String(value), nil
case reflect.Slice:
default:
return nil, fmt.Errorf("Could not convert slice into non-slice.")
}
case reflect.String:
switch t {
case reflect.Slice:
return Bytes(value), nil
}
}
switch t {
case reflect.String:
return String(value), nil
case reflect.Uint64:
return Uint64(value)
case reflect.Uint32:
u, err := Uint64(value)
if err != nil {
return 0, err
}
return uint32(u), nil
case reflect.Uint16:
u, err := Uint64(value)
if err != nil {
return 0, err
}
return uint16(u), nil
case reflect.Uint8:
u, err := Uint64(value)
if err != nil {
return 0, err
}
return uint8(u), nil
case reflect.Uint:
u, err := Uint64(value)
if err != nil {
return 0, err
}
return uint(u), nil
case reflect.Int64:
return Int64(value)
case reflect.Int32:
u, err := Int64(value)
if err != nil {
return 0, err
}
return int32(u), nil
case reflect.Int16:
u, err := Int64(value)
if err != nil {
return 0, err
}
return int16(u), nil
case reflect.Int8:
u, err := Int64(value)
if err != nil {
return 0, err
}
return int8(u), nil
case reflect.Int:
u, err := Int64(value)
if err != nil {
return 0, err
}
return int(u), nil
case reflect.Float64:
return Float64(value)
case reflect.Float32:
f, err := Float64(value)
if err != nil {
return 0, err
}
return float32(f), nil
case reflect.Bool:
return Bool(value)
case reflect.Interface:
return value, nil
case KindTime:
return Time(value)
case KindDuration:
return Duration(value)
}
return nil, fmt.Errorf("Could not convert %s into %s.", reflect.TypeOf(value).Kind(), t)
} | backend/vendor/src/github.com/mgutz/to/to.go | 0.553988 | 0.427636 | to.go | starcoder |
package thnow
import (
"strconv"
"strings"
"time"
)
// ToString Convet time.Time To Date String format
func (date DateNow) ToString(optional ...string) string {
var defaultFormat = "02 Jan 2006 15:04:05"
var result = time.Now().Format(defaultFormat)
day := date.Day()
weekday := int(date.Weekday())
month := int(date.Month())
year := date.Year()
chronology, format, pattern := "TH", "M", ""
if len(optional) > 0 && len(optional) <= 1 {
if optional[0] == "EN" || optional[0] == "TH" {
chronology = optional[0]
}
} else if len(optional) > 1 && len(optional) <= 2 {
chronology = optional[0]
if optional[1] == "M" || optional[1] == "F" {
format = optional[1]
} else {
pattern = optional[1]
}
} else if len(optional) > 2 && len(optional) <= 3 {
chronology = optional[0]
format = optional[1]
pattern = optional[2]
}
if chronology == "TH" {
if year < 2300 {
year = year + 543
}
} else {
if year > 2300 {
year = year - 543
}
}
if format == "M" && pattern == "" {
if chronology == "TH" {
monthArr := [12]string{"ม.ค.", "ก.พ.", "มี.ค.", "เม.ย.", "พ.ค.", "มิ.ย.", "ก.ค.", "ส.ค.", "ก.ย.", "ต.ค.", "พ.ย.", "ธ.ค."}
result = strconv.Itoa(day) + " " + monthArr[month-1] + " " + strconv.Itoa(year)
} else {
monthArr := [12]string{"Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"}
result = strconv.Itoa(day) + " " + monthArr[month-1] + " " + strconv.Itoa(year)
}
} else if format == "F" && pattern == "" {
if chronology == "TH" {
dayArr := [7]string{"อาทิตย์", "จันทร์", "อังคาร", "พุธ", "พฤหัสบดี", "ศุกร์", "เสาร์"}
monthArr := [12]string{
"มกราคม", "กุมภาพันธ์", "มีนาคม", "เมษายน", "พฤษภาคม", "มิถุนายน",
"กรกฎาคม", "สิงหาคม", "กันยายน", "ตุลาคม", "พฤศจิกายน", "ธันวาคม"}
result = "วัน" + dayArr[weekday] + "ที่ " + strconv.Itoa(day) + " " + monthArr[month-1] + " พ.ศ. " + strconv.Itoa(year)
} else {
dayArr := [7]string{"Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"}
monthArr := [12]string{
"January", "February", "March", "April", "May", "June",
"July", "August", "September", "October", "November", "December"}
result = strconv.Itoa(day) + " " + monthArr[month-1] + " " + strconv.Itoa(year) + " " + dayArr[weekday]
}
} else {
if pattern != "" {
result = date.Format(pattern)
} else {
result = date.Format(defaultFormat)
}
}
return result
}
// ToDate Convet Date String To time.Time
func (dateStr StringNow) ToDate(optional ...int) time.Time {
var result = time.Now()
var day, month, year int
var dateArr []string
hour, minute, second, millisecond := 0, 0, 0, 0
if len(dateStr.string) == 0 {
dateStr.string = time.Now().Format("02/01/2006")
}
if dateArr = strings.Split(dateStr.string, "/"); len(dateArr) != 3 {
if dateArr = strings.Split(dateStr.string, " "); len(dateArr) != 3 {
dateArr = strings.Split(dateStr.string, "-")
}
}
day, _ = strconv.Atoi(dateArr[0])
month, _ = strconv.Atoi(dateArr[1])
year, _ = strconv.Atoi(dateArr[2])
if len(dateArr) == 3 {
thaiMonthArr := [24]string{
"ม.ค.", "ก.พ.", "มี.ค.", "เม.ย.", "พ.ค.", "มิ.ย.", "ก.ค.", "ส.ค.", "ก.ย.", "ต.ค.", "พ.ย.", "ธ.ค.",
"Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"}
for index, m := range thaiMonthArr {
if dateArr[1] == m {
month = index + 1
if month > 12 {
month = month - 12
}
break
}
}
}
if year > 2300 {
year = year - 543
}
if len(optional) > 0 && len(optional) <= 1 {
hour = optional[0]
} else if len(optional) > 1 && len(optional) <= 2 {
hour = optional[0]
minute = optional[1]
} else if len(optional) > 2 && len(optional) <= 3 {
hour = optional[0]
minute = optional[1]
second = optional[2]
} else if len(optional) > 3 && len(optional) <= 4 {
hour = optional[0]
minute = optional[1]
second = optional[2]
millisecond = optional[3]
}
result = time.Date(year, time.Month(month), day, hour, minute, second, millisecond, time.UTC)
return result
}
// Minute get minute
func (date DateNow) Minute() int {
return date.Time.Minute()
}
// Hour get hour
func (date DateNow) Hour() int {
return date.Time.Hour()
}
// Day get day
func (date DateNow) Day() int {
return date.Time.Day()
}
// Month get month
func (date DateNow) Month() int {
return int(date.Time.Month())
}
// Year get year
func (date DateNow) Year() int {
return int(date.Time.Year())
} | thnow.go | 0.503418 | 0.414958 | thnow.go | starcoder |
package minheighttree
/*
* @lc app=leetcode id=310 lang=golang
*
* [310] Minimum Height Trees
*
* https://leetcode.com/problems/minimum-height-trees/description/
*
* algorithms
* Medium (29.94%)
* Total Accepted: 64.1K
* Total Submissions: 213K
* Testcase Example: '4\n[[1,0],[1,2],[1,3]]'
*
* For an undirected graph with tree characteristics, we can choose any node as
* the root. The result graph is then a rooted tree. Among all possible rooted
* trees, those with minimum height are called minimum height trees (MHTs).
* Given such a graph, write a function to find all the MHTs and return a list
* of their root labels.
*
* Format
* The graph contains n nodes which are labeled from 0 to n - 1. You will be
* given the number n and a list of undirected edges (each edge is a pair of
* labels).
*
* You can assume that no duplicate edges will appear in edges. Since all edges
* are undirected, [0, 1] is the same as [1, 0] and thus will not appear
* together in edges.
*
* Example 1 :
*
*
* Input: n = 4, edges = [[1, 0], [1, 2], [1, 3]]
*
* 0
* |
* 1
* / \
* 2 3
*
* Output: [1]
*
*
* Example 2 :
*
*
* Input: n = 6, edges = [[0, 3], [1, 3], [2, 3], [4, 3], [5, 4]]
*
* 0 1 2
* \ | /
* 3
* |
* 4
* |
* 5
*
* Output: [3, 4]
*
* Note:
*
*
* According to the definition of tree on Wikipedia: “a tree is an undirected
* graph in which any two vertices are connected by exactly one path. In other
* words, any connected graph without simple cycles is a tree.”
* The height of a rooted tree is the number of edges on the longest downward
* path between the root and a leaf.
*
*
*/
func findMinHeightTrees(n int, edges [][]int) []int {
degreeCount := make([]int, n, n)
leafNode := make([]bool, n, n)
var cutoffIndex []int
var newEdges [][]int
for len(edges) > 1 {
for _, edge := range edges {
degreeCount[edge[0]] ++
degreeCount[edge[1]] ++
}
for i, degree := range degreeCount {
if degree == 1 {
leafNode[i] = true
} else {
degreeCount[i] = 0
}
}
for i, edge := range edges {
if leafNode[edge[0]] || leafNode[edge[1]] {
cutoffIndex = append(cutoffIndex, i)
}
}
startIndex := 0
for _, v := range cutoffIndex {
newEdges = append(newEdges, edges[startIndex:v]...)
startIndex = v + 1
}
newEdges = append(newEdges, edges[startIndex:]...)
edges, newEdges = newEdges, edges
cutoffIndex = cutoffIndex[0:0]
newEdges = newEdges[0:0]
}
if len(edges) == 1 {
return edges[0]
} else {
for i, v := range leafNode {
if !v {
return []int{i}
}
}
}
return nil
}
/*time limit
func findMinHeightTrees(n int, edges [][]int) []int {
minHeight := math.MaxInt32
var minHeightNode []int
for i := 0; i < n; i++ {
height := heightTree(n, i, edges)
if height == minHeight {
minHeightNode = append(minHeightNode, i)
} else if height < minHeight {
minHeightNode, minHeight = []int{i}, height
}
}
return minHeightNode
}
func heightTree(total, root int, edges [][]int) (height int) {
neighbors, visited := []int{root}, make([]bool, total, total)
visited[root] = true
var newNeighbors []int
for len(neighbors) > 0 {
for _, neighbor := range neighbors {
for _, edge := range edges {
if edge[0] == neighbor && !visited[edge[1]] {
newNeighbors = append(newNeighbors, edge[1])
visited[edge[1]] = true
} else if edge[1] == neighbor && !visited[edge[0]] {
newNeighbors = append(newNeighbors, edge[0])
visited[edge[0]] = true
}
}
}
height++
neighbors, newNeighbors = newNeighbors, neighbors
newNeighbors = newNeighbors[0:0]
}
return
}
*/ | 310-min-height-tree/310.minimum-height-trees.go | 0.895408 | 0.65426 | 310.minimum-height-trees.go | starcoder |
package function
import (
"fmt"
"strconv"
"strings"
"github.com/dolthub/go-mysql-server/sql"
"github.com/dolthub/go-mysql-server/sql/expression"
)
// AsWKT is a function that converts a spatial type into WKT format (alias for AsText)
type AsWKT struct {
expression.UnaryExpression
}
var _ sql.FunctionExpression = (*AsWKT)(nil)
// NewAsWKT creates a new point expression.
func NewAsWKT(e sql.Expression) sql.Expression {
return &AsWKT{expression.UnaryExpression{Child: e}}
}
// FunctionName implements sql.FunctionExpression
func (p *AsWKT) FunctionName() string {
return "st_aswkb"
}
// Description implements sql.FunctionExpression
func (p *AsWKT) Description() string {
return "returns binary representation of given spatial type."
}
// IsNullable implements the sql.Expression interface.
func (p *AsWKT) IsNullable() bool {
return p.Child.IsNullable()
}
// Type implements the sql.Expression interface.
func (p *AsWKT) Type() sql.Type {
return sql.LongText
}
func (p *AsWKT) String() string {
return fmt.Sprintf("ST_ASWKT(%s)", p.Child.String())
}
// WithChildren implements the Expression interface.
func (p *AsWKT) WithChildren(children ...sql.Expression) (sql.Expression, error) {
if len(children) != 1 {
return nil, sql.ErrInvalidChildrenNumber.New(p, len(children), 1)
}
return NewAsWKT(children[0]), nil
}
// PointToWKT converts a sql.Point to a string
func PointToWKT(p sql.Point, order bool) string {
x := strconv.FormatFloat(p.X, 'g', -1, 64)
y := strconv.FormatFloat(p.Y, 'g', -1, 64)
if order {
x, y = y, x
}
return fmt.Sprintf("%s %s", x, y)
}
// LineToWKT converts a sql.LineString to a string
func LineToWKT(l sql.LineString, order bool) string {
points := make([]string, len(l.Points))
for i, p := range l.Points {
points[i] = PointToWKT(p, order)
}
return strings.Join(points, ",")
}
// PolygonToWKT converts a sql.Polygon to a string
func PolygonToWKT(p sql.Polygon, order bool) string {
lines := make([]string, len(p.Lines))
for i, l := range p.Lines {
lines[i] = "(" + LineToWKT(l, order) + ")"
}
return strings.Join(lines, ",")
}
// Eval implements the sql.Expression interface.
func (p *AsWKT) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
// Evaluate child
val, err := p.Child.Eval(ctx, row)
if err != nil {
return nil, err
}
if val == nil {
return nil, nil
}
var geomType string
var data string
// Expect one of the geometry types
switch v := val.(type) {
case sql.Point:
// Mark as point type
geomType = "POINT"
data = PointToWKT(v, v.SRID == sql.GeoSpatialSRID)
case sql.LineString:
// Mark as linestring type
geomType = "LINESTRING"
data = LineToWKT(v, v.SRID == sql.GeoSpatialSRID)
case sql.Polygon:
// Mark as Polygon type
geomType = "POLYGON"
data = PolygonToWKT(v, v.SRID == sql.GeoSpatialSRID)
default:
return nil, sql.ErrInvalidGISData.New("ST_AsWKT")
}
return fmt.Sprintf("%s(%s)", geomType, data), nil
}
// GeomFromText is a function that returns a point type from a WKT string
type GeomFromText struct {
expression.NaryExpression
}
var _ sql.FunctionExpression = (*GeomFromText)(nil)
// NewGeomFromWKT creates a new point expression.
func NewGeomFromWKT(args ...sql.Expression) (sql.Expression, error) {
if len(args) < 1 || len(args) > 3 {
return nil, sql.ErrInvalidArgumentNumber.New("ST_GEOMFROMWKT", "1, 2, or 3", len(args))
}
return &GeomFromText{expression.NaryExpression{ChildExpressions: args}}, nil
}
// FunctionName implements sql.FunctionExpression
func (g *GeomFromText) FunctionName() string {
return "st_geomfromwkt"
}
// Description implements sql.FunctionExpression
func (g *GeomFromText) Description() string {
return "returns a new point from a WKT string."
}
// Type implements the sql.Expression interface.
func (g *GeomFromText) Type() sql.Type {
// TODO: return type is determined after Eval, use Geometry for now?
return sql.GeometryType{}
}
func (g *GeomFromText) String() string {
var args = make([]string, len(g.ChildExpressions))
for i, arg := range g.ChildExpressions {
args[i] = arg.String()
}
return fmt.Sprintf("ST_GEOMFROMWKT(%s)", strings.Join(args, ","))
}
// WithChildren implements the Expression interface.
func (g *GeomFromText) WithChildren(children ...sql.Expression) (sql.Expression, error) {
return NewGeomFromWKT(children...)
}
// ParseWKTHeader should extract the type from the geometry string
func ParseWKTHeader(s string) (string, string, error) {
// Read until first open parenthesis
end := strings.Index(s, "(")
// Bad if no parenthesis found
if end == -1 {
return "", "", sql.ErrInvalidGISData.New("ST_GeomFromText")
}
// Get Geometry Type
geomType := s[:end]
geomType = strings.TrimSpace(geomType)
geomType = strings.ToLower(geomType)
// Get data
data := s[end:]
data = strings.TrimSpace(data)
// Check that data is surrounded by parentheses
if data[0] != '(' || data[len(data)-1] != ')' {
return "", "", sql.ErrInvalidGISData.New("ST_GeomFromText")
}
// Remove parentheses, and trim
data = data[1 : len(data)-1]
data = strings.TrimSpace(data)
return geomType, data, nil
}
// WKTToPoint expects a string like this "1.2 3.4"
func WKTToPoint(s string, srid uint32, order bool) (sql.Point, error) {
// Empty string is wrong
if len(s) == 0 {
return sql.Point{}, sql.ErrInvalidGISData.New("ST_PointFromText")
}
// Get everything between spaces
args := strings.Fields(s)
// Check length
if len(args) != 2 {
return sql.Point{}, sql.ErrInvalidGISData.New("ST_PointFromText")
}
// Parse x
x, err := strconv.ParseFloat(args[0], 64)
if err != nil {
return sql.Point{}, sql.ErrInvalidGISData.New("ST_PointFromText")
}
// Parse y
y, err := strconv.ParseFloat(args[1], 64)
if err != nil {
return sql.Point{}, sql.ErrInvalidGISData.New("ST_PointFromText")
}
// See if we need to swap x and y
if order {
x, y = y, x
}
// Create point object
return sql.Point{SRID: srid, X: x, Y: y}, nil
}
// WKTToLine expects a string like "1.2 3.4, 5.6 7.8, ..."
func WKTToLine(s string, srid uint32, order bool) (sql.LineString, error) {
// Empty string is wrong
if len(s) == 0 {
return sql.LineString{}, sql.ErrInvalidGISData.New("ST_LineFromText")
}
// Separate by comma
pointStrs := strings.Split(s, ",")
// Parse each point string
var points = make([]sql.Point, len(pointStrs))
for i, ps := range pointStrs {
// Remove leading and trailing whitespace
ps = strings.TrimSpace(ps)
// Parse point
if p, err := WKTToPoint(ps, srid, order); err == nil {
points[i] = p
} else {
return sql.LineString{}, sql.ErrInvalidGISData.New("ST_LineFromText")
}
}
// Create LineString object
return sql.LineString{SRID: srid, Points: points}, nil
}
// WKTToPoly Expects a string like "(1 2, 3 4), (5 6, 7 8), ..."
func WKTToPoly(s string, srid uint32, order bool) (sql.Polygon, error) {
var lines []sql.LineString
for {
// Look for closing parentheses
end := strings.Index(s, ")")
if end == -1 {
return sql.Polygon{}, sql.ErrInvalidGISData.New("ST_PolyFromText")
}
// Extract linestring string; does not include ")"
lineStr := s[:end]
// Must start with open parenthesis
if len(lineStr) == 0 || lineStr[0] != '(' {
return sql.Polygon{}, sql.ErrInvalidGISData.New("ST_PolyFromText")
}
// Remove leading "("
lineStr = lineStr[1:]
// Remove leading and trailing whitespace
lineStr = strings.TrimSpace(lineStr)
// Parse line
if line, err := WKTToLine(lineStr, srid, order); err == nil {
// Check if line is linearring
if isLinearRing(line) {
lines = append(lines, line)
} else {
return sql.Polygon{}, sql.ErrInvalidGISData.New("ST_PolyFromText")
}
} else {
return sql.Polygon{}, sql.ErrInvalidGISData.New("ST_PolyFromText")
}
// Prepare next string
s = s[end+1:]
s = strings.TrimSpace(s)
// Reached end
if len(s) == 0 {
break
}
// LineStrings must be comma-separated
if s[0] != ',' {
return sql.Polygon{}, sql.ErrInvalidGISData.New("ST_PolyFromText")
}
// Drop leading comma
s = s[1:]
// Trim leading spaces
s = strings.TrimSpace(s)
}
// Create Polygon object
return sql.Polygon{SRID: srid, Lines: lines}, nil
}
// Eval implements the sql.Expression interface.
func (g *GeomFromText) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
// Evaluate child
val, err := g.ChildExpressions[0].Eval(ctx, row)
if err != nil {
return nil, err
}
if val == nil {
return nil, nil
}
// Expect a string, throw error otherwise
s, ok := val.(string)
if !ok {
return nil, sql.ErrInvalidGISData.New("ST_GeomFromText")
}
// Determine type, and get data
geomType, data, err := ParseWKTHeader(s)
if err != nil {
return nil, err
}
// Determine SRID
srid := uint32(0)
if len(g.ChildExpressions) >= 2 {
s, err := g.ChildExpressions[1].Eval(ctx, row)
if err != nil {
return nil, err
}
if s == nil {
return nil, nil
}
s, err = sql.Uint32.Convert(s)
if err != nil {
return nil, err
}
srid = s.(uint32)
}
if err = ValidateSRID(srid); err != nil {
return nil, err
}
// Determine xy order
order := srid == sql.GeoSpatialSRID
if len(g.ChildExpressions) == 3 {
o, err := g.ChildExpressions[2].Eval(ctx, row)
if err != nil {
return nil, err
}
if o == nil {
return nil, nil
}
order, err = ParseAxisOrder(o.(string))
if err != nil {
return nil, sql.ErrInvalidArgument.New(g.FunctionName())
}
}
// Parse accordingly
// TODO: define consts instead of string comparison?
switch geomType {
case "point":
return WKTToPoint(data, srid, order)
case "linestring":
return WKTToLine(data, srid, order)
case "polygon":
return WKTToPoly(data, srid, order)
default:
return nil, sql.ErrInvalidGISData.New("ST_GeomFromText")
}
}
// PointFromWKT is a function that returns a point type from a WKT string
type PointFromWKT struct {
expression.NaryExpression
}
var _ sql.FunctionExpression = (*PointFromWKT)(nil)
// NewPointFromWKT creates a new point expression.
func NewPointFromWKT(args ...sql.Expression) (sql.Expression, error) {
if len(args) < 1 || len(args) > 3 {
return nil, sql.ErrInvalidArgumentNumber.New("ST_POINTFROMWKT", "1, 2, or 3", len(args))
}
return &PointFromWKT{expression.NaryExpression{ChildExpressions: args}}, nil
}
// FunctionName implements sql.FunctionExpression
func (p *PointFromWKT) FunctionName() string {
return "st_pointfromwkt"
}
// Description implements sql.FunctionExpression
func (p *PointFromWKT) Description() string {
return "returns a new point from a WKT string."
}
// Type implements the sql.Expression interface.
func (p *PointFromWKT) Type() sql.Type {
return sql.PointType{}
}
func (p *PointFromWKT) String() string {
var args = make([]string, len(p.ChildExpressions))
for i, arg := range p.ChildExpressions {
args[i] = arg.String()
}
return fmt.Sprintf("ST_POINTFROMWKT(%s)", strings.Join(args, ","))
}
// WithChildren implements the Expression interface.
func (p *PointFromWKT) WithChildren(children ...sql.Expression) (sql.Expression, error) {
return NewPointFromWKT(children...)
}
// Eval implements the sql.Expression interface.
func (p *PointFromWKT) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
// Evaluate child
val, err := p.ChildExpressions[0].Eval(ctx, row)
if err != nil {
return nil, err
}
if val == nil {
return nil, nil
}
// Must be of type string
s, ok := val.(string)
if !ok {
return nil, sql.ErrInvalidGISData.New("ST_PointFromText")
}
// Parse Header
geomType, data, err := ParseWKTHeader(s)
if err != nil {
return nil, sql.ErrInvalidGISData.New("ST_PointFromText")
}
// Not a point, throw error
if geomType == "point" {
return nil, sql.ErrInvalidGISData.New("ST_PointFromText")
}
// Determine SRID
srid := uint32(0)
if len(p.ChildExpressions) >= 2 {
s, err := p.ChildExpressions[1].Eval(ctx, row)
if err != nil {
return nil, err
}
if s == nil {
return nil, nil
}
s, err = sql.Uint32.Convert(s)
if err != nil {
return nil, err
}
srid = s.(uint32)
}
if err = ValidateSRID(srid); err != nil {
return nil, err
}
// Determine xy order
order := false
if len(p.ChildExpressions) == 3 {
o, err := p.ChildExpressions[2].Eval(ctx, row)
if err != nil {
return nil, err
}
if o == nil {
return nil, nil
}
order, err = ParseAxisOrder(o.(string))
if err != nil {
return nil, sql.ErrInvalidArgument.New(p.FunctionName())
}
}
return WKTToPoint(data, srid, order)
}
// LineFromWKT is a function that returns a point type from a WKT string
type LineFromWKT struct {
expression.NaryExpression
}
var _ sql.FunctionExpression = (*LineFromWKT)(nil)
// NewLineFromWKT creates a new point expression.
func NewLineFromWKT(args ...sql.Expression) (sql.Expression, error) {
if len(args) < 1 || len(args) > 3 {
return nil, sql.ErrInvalidArgumentNumber.New("ST_LINEFROMWKT", "1 or 2", len(args))
}
return &LineFromWKT{expression.NaryExpression{ChildExpressions: args}}, nil
}
// FunctionName implements sql.FunctionExpression
func (l *LineFromWKT) FunctionName() string {
return "st_linefromwkt"
}
// Description implements sql.FunctionExpression
func (l *LineFromWKT) Description() string {
return "returns a new line from a WKT string."
}
// Type implements the sql.Expression interface.
func (l *LineFromWKT) Type() sql.Type {
return sql.LineStringType{}
}
func (l *LineFromWKT) String() string {
var args = make([]string, len(l.ChildExpressions))
for i, arg := range l.ChildExpressions {
args[i] = arg.String()
}
return fmt.Sprintf("ST_LINEFROMWKT(%s)", strings.Join(args, ","))
}
// WithChildren implements the Expression interface.
func (l *LineFromWKT) WithChildren(children ...sql.Expression) (sql.Expression, error) {
return NewLineFromWKT(children...)
}
// Eval implements the sql.Expression interface.
func (l *LineFromWKT) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
// Evaluate child
val, err := l.ChildExpressions[0].Eval(ctx, row)
if err != nil {
return nil, err
}
if val == nil {
return nil, nil
}
// Expect a string, throw error otherwise
s, ok := val.(string)
if !ok {
return nil, sql.ErrInvalidGISData.New("ST_LineFromText")
}
// Parse Header
geomType, data, err := ParseWKTHeader(s)
if err != nil {
return nil, sql.ErrInvalidGISData.New("ST_LineFromText")
}
// Not a line, throw error
if geomType != "linestring" {
return nil, sql.ErrInvalidGISData.New("ST_LineFromText")
}
// Evaluate second argument
srid := uint32(0)
if len(l.ChildExpressions) >= 2 {
s, err := l.ChildExpressions[1].Eval(ctx, row)
if err != nil {
return nil, err
}
if s == nil {
return nil, nil
}
s, err = sql.Uint32.Convert(s)
if err != nil {
return nil, err
}
srid = s.(uint32)
}
if err = ValidateSRID(srid); err != nil {
return nil, err
}
// Determine xt order
order := false
if len(l.ChildExpressions) == 3 {
o, err := l.ChildExpressions[2].Eval(ctx, row)
if err != nil {
return nil, err
}
if o == nil {
return nil, nil
}
order, err = ParseAxisOrder(o.(string))
if err != nil {
return nil, sql.ErrInvalidGISData.New("ST_LineFromText")
}
}
return WKTToLine(data, srid, order)
}
// PolyFromWKT is a function that returns a polygon type from a WKT string
type PolyFromWKT struct {
expression.NaryExpression
}
var _ sql.FunctionExpression = (*PolyFromWKT)(nil)
// NewPolyFromWKT creates a new polygon expression.
func NewPolyFromWKT(args ...sql.Expression) (sql.Expression, error) {
if len(args) < 1 || len(args) > 3 {
return nil, sql.ErrInvalidArgumentNumber.New("ST_POLYFROMWKT", "1, 2, or 3", len(args))
}
return &PolyFromWKT{expression.NaryExpression{ChildExpressions: args}}, nil
}
// FunctionName implements sql.FunctionExpression
func (p *PolyFromWKT) FunctionName() string {
return "st_polyfromwkt"
}
// Description implements sql.FunctionExpression
func (p *PolyFromWKT) Description() string {
return "returns a new polygon from a WKT string."
}
// Type implements the sql.Expression interface.
func (p *PolyFromWKT) Type() sql.Type {
return sql.PolygonType{}
}
func (p *PolyFromWKT) String() string {
var args = make([]string, len(p.ChildExpressions))
for i, arg := range p.ChildExpressions {
args[i] = arg.String()
}
return fmt.Sprintf("ST_POLYFROMWKT(%s)", strings.Join(args, ","))
}
// WithChildren implements the Expression interface.
func (p *PolyFromWKT) WithChildren(children ...sql.Expression) (sql.Expression, error) {
return NewPolyFromWKT(children...)
}
// Eval implements the sql.Expression interface.
func (p *PolyFromWKT) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
// Evaluate child
val, err := p.ChildExpressions[0].Eval(ctx, row)
if err != nil {
return nil, err
}
if val == nil {
return nil, nil
}
// Expect a string, throw error otherwise
s, ok := val.(string)
if !ok {
return nil, sql.ErrInvalidGISData.New("ST_PolyFromWKT")
}
// Parse Header
geomType, data, err := ParseWKTHeader(s)
if err != nil {
return nil, sql.ErrInvalidGISData.New("ST_PolyFromWKT")
}
// Not a polygon, throw error
if geomType != "polygon" {
return nil, sql.ErrInvalidGISData.New("ST_PolyFromText")
}
// Determine SRID
srid := uint32(0)
if len(p.ChildExpressions) > 2 {
s, err := p.ChildExpressions[1].Eval(ctx, row)
if err != nil {
return nil, err
}
if s == nil {
return nil, nil
}
s, err = sql.Uint32.Convert(s)
if err != nil {
return nil, err
}
srid = s.(uint32)
}
if err = ValidateSRID(srid); err != nil {
return nil, err
}
// Determine xy order
order := false
if len(p.ChildExpressions) == 3 {
o, err := p.ChildExpressions[2].Eval(ctx, row)
if err != nil {
return nil, err
}
if o == nil {
return nil, nil
}
order, err = ParseAxisOrder(o.(string))
if err != nil {
return nil, sql.ErrInvalidArgument.New(p.FunctionName())
}
}
return WKTToPoly(data, srid, order)
} | sql/expression/function/wkt.go | 0.670932 | 0.529081 | wkt.go | starcoder |
package ns
/**
* Configuration for assignment resource.
*/
type Nsassignment struct {
/**
* Name for the assignment. Must begin with a letter, number, or the underscore character (_), and must contain only letters, numbers, and the hyphen (-), period (.) hash (#), space ( ), at (@), equals (=), colon (:), and underscore characters. Can be changed after the assignment is added.
The following requirement applies only to the Citrix ADC CLI:
If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my assignment" or my assignment).
*/
Name string `json:"name,omitempty"`
/**
* Left hand side of the assigment, of the form $variable-name (for a singleton variabled) or $variable-name[key-expression], where key-expression is an expression that evaluates to a text string and provides the key to select a map entry
*/
Variable string `json:"variable,omitempty"`
/**
* Right hand side of the assignment. The expression is evaluated and assigned to the left hand variable.
*/
Set string `json:"set,omitempty"`
/**
* Right hand side of the assignment. The expression is evaluated and added to the left hand variable.
*/
Add string `json:"Add,omitempty"`
/**
* Right hand side of the assignment. The expression is evaluated and subtracted from the left hand variable.
*/
Sub string `json:"sub,omitempty"`
/**
* Right hand side of the assignment. The expression is evaluated and appended to the left hand variable.
*/
Append string `json:"append,omitempty"`
/**
* Clear the variable value. Deallocates a text value, and for a map, the text key.
*/
Clear bool `json:"clear,omitempty"`
/**
* Comment. Can be used to preserve information about this rewrite action.
*/
Comment string `json:"comment,omitempty"`
/**
* New name for the assignment.
Must begin with a letter, number, or the underscore character (_), and must contain only letters, numbers, and the hyphen (-), period (.) hash (#), space ( ), at (@), equals (=), colon (:), and underscore characters. Can be changed after the rewrite policy is added.
The following requirement applies only to the Citrix ADC CLI:
If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my assignment" or my assignment).
*/
Newname string `json:"newname,omitempty"`
//------- Read only Parameter ---------;
Hits string `json:"hits,omitempty"`
Undefhits string `json:"undefhits,omitempty"`
Referencecount string `json:"referencecount,omitempty"`
} | resource/config/ns/nsassignment.go | 0.71423 | 0.542257 | nsassignment.go | starcoder |
package rbtree
import "github.com/fanyang01/tree/common"
// BLACK and RED is the color of nodes
const (
BLACK = false
RED = true
)
// Node is the node in a tree
type Node struct {
left, right, p *Node
color bool
v interface{}
}
// Tree is a red-black tree
type Tree struct {
size int
root *Node
compare common.CompareFunc
}
// Left returns the left child of n
func (n *Node) Left() *Node { return n.left }
// Right returns the right child of n
func (n *Node) Right() *Node { return n.right }
// Parent returns the parent of n
func (n *Node) Parent() *Node { return n.p }
// Value returns payload contained in n
func (n *Node) Value() interface{} { return n.v }
// New creates an initialized tree.
func New(f common.CompareFunc) *Tree {
return &Tree{
size: 0,
root: nil,
compare: f,
}
}
// Root returns the root of t.
func (t *Tree) Root() *Node {
return t.root
}
// IsEmpty returns true if the tree is empty.
func (t *Tree) IsEmpty() bool {
return t.size == 0
}
// Len returns size of t.
func (t *Tree) Len() int {
return t.size
}
// Clean resets a tree structure to it's initial state.
func (t *Tree) Clean() *Tree {
t.size = 0
t.root = nil
return t
}
// Has tests if v is already in t.
func (t *Tree) Has(v interface{}) bool {
return t.search(t.root, v) != nil
}
// Replace replaces payload of a node with v.
// v must be equal to previous payload.
func (t *Tree) Replace(n *Node, v interface{}) (interface{}, bool) {
if t.compare(n.v, v) != 0 {
return n.v, false
}
before := n.v
n.v = v
return before, true
}
// Search tries to find the node containing payload v.
// On success, the node containing v will be returned,
// otherwise, nil will be returned to indicate the node is not found.
func (t *Tree) Search(v interface{}) *Node {
return t.search(t.root, v)
}
func (t *Tree) search(r *Node, v interface{}) *Node {
x := r
for x != nil {
var cmp int
if cmp = t.compare(v, x.v); cmp < 0 {
x = x.left
} else if cmp > 0 {
x = x.right
} else {
return x
}
}
return nil
}
// Insert inserts v into correct place and returns a handle.
// It will refuse to insert v when v is already in t, and returns the node.
func (t *Tree) Insert(v interface{}) (*Node, bool) {
var cmp int
var p *Node
x := t.root
for x != nil {
p = x
if cmp = t.compare(v, x.v); cmp < 0 {
x = x.left
} else if cmp > 0 {
x = x.right
} else {
// Disable duplicate v
return x, false
}
}
n := t.newNode(v)
n.p = p
if p == nil {
t.root = n
} else if cmp = t.compare(v, p.v); cmp < 0 {
p.left = n
} else {
p.right = n
}
t.insertFix(n)
t.size++
return n, true
}
// DeleteValue deletes the node whose payload is equal to v.
// A boolean value is returned to indicate whether the node is found.
func (t *Tree) DeleteValue(v interface{}) (interface{}, bool) {
if x := t.Search(v); x != nil {
return t.Delete(x), true
}
return nil, false
}
// Delete removes x from t and returns its payload.
func (t *Tree) Delete(x *Node) interface{} {
// z is the node that is MOVED to a new place,
// and color is the color of the node previously in this place.
var z, p *Node
color := x.color
if x.left == nil {
z, p = x.right, x.p
t.transplant(x, x.right)
} else if x.right == nil {
z, p = x.left, x.p
t.transplant(x, x.left)
} else {
// y is the maximum node on x's right subtree
// it will replace x
y := func(n *Node) *Node {
for n.left != nil {
n = n.left
}
return n
}(x.right)
color = y.color
// NOTE: it's important to update p to point to parent of y.right
z = y.right
// Avoid y.p to point to y itself
if x.right == y {
p = y
} else {
t.transplant(y, y.right)
p = y.p
y.right = x.right
x.right.p = y
}
y.left = x.left
x.left.p = y
t.transplant(x, y)
y.color = x.color
}
if color == BLACK {
t.deleteFix(p, z)
}
t.size--
return x.v
} | rbtree/tree.go | 0.76625 | 0.450359 | tree.go | starcoder |
package analysis
import (
"time"
"infra/appengine/luci-migration/storage"
)
const (
// lowSpeed is the lower speed threshold. If speed drops below this,
// the builder is not WAI
lowSpeed = 0.8
// highSpeed is the target speed. If speed is high or more, the builder is
// WAI.
highSpeed = 0.9
// targetHealth is the desired percentage of non-LUCI-only-infra-failing groups (out of total
// groups). Missing the target is still WAI if the builder is correct and fast enough, but not if
// the builder is correct but not above highSpeed.
targetHealth = 0.8
)
// This file is the heart of this package.
// We anticipate this code to get smarter as we discover new patterns of flakes.
// diff is a result of comparison of LUCI and Buildbot tryjobs.
// It is produced by compare and consumed by tmplDetails.
type diff struct {
MinBuildAge time.Duration
storage.BuilderMigration
StatusReason string
TotalGroups int
ConsistentGroups []*group
FalseFailures []*group
FalseSuccesses []*group
MatchingInfraFailures []*group
LUCIOnlyInfraFailures []*group
BuildbotOnlyInfraFailures []*group
AvgTimeDeltaGroups int
AvgTimeDelta time.Duration // Average overhead of LUCI across patchsets.
}
func (d *diff) CorrectnessGroups() int {
return len(d.ConsistentGroups) + len(d.FalseFailures) + len(d.FalseSuccesses)
}
func (d *diff) RejectedCorrectnessGroups() int {
return d.TotalGroups - d.CorrectnessGroups()
}
// compare compares Buildbot and LUCI builds within groups.
func compare(groups []*group, minCorrectnessGroups int, currentStatus storage.MigrationStatus) *diff {
comp := &diff{
BuilderMigration: storage.BuilderMigration{AnalysisTime: time.Now().UTC()},
TotalGroups: len(groups),
}
buildbotBuilds := 0
avgBuildbotTimeSecs := 0.0
for _, g := range groups {
if g.trustworthy() {
switch luciSuccess := g.LUCI.success(); {
case luciSuccess == g.Buildbot.success():
comp.ConsistentGroups = append(comp.ConsistentGroups, g)
case luciSuccess:
comp.FalseSuccesses = append(comp.FalseSuccesses, g)
default:
comp.FalseFailures = append(comp.FalseFailures, g)
}
}
// Check for Infra Failures. Dedup all of a group's failures into one instance of failing group.
luciInfraFailed := g.LUCI.countInfraFailures() > 0
bbInfraFailed := g.Buildbot.countInfraFailures() > 0
switch {
case luciInfraFailed && bbInfraFailed:
comp.MatchingInfraFailures = append(comp.MatchingInfraFailures, g)
case luciInfraFailed:
comp.LUCIOnlyInfraFailures = append(comp.LUCIOnlyInfraFailures, g)
case bbInfraFailed:
comp.BuildbotOnlyInfraFailures = append(comp.BuildbotOnlyInfraFailures, g)
}
if ld, bd := g.LUCI.avgRunDuration(), g.Buildbot.avgRunDuration(); ld > 0 && bd > 0 {
comp.AvgTimeDelta += ld - bd
comp.AvgTimeDeltaGroups++
}
for _, b := range g.Buildbot {
avgBuildbotTimeSecs += b.RunDuration.Seconds()
buildbotBuilds++
}
}
correctnessGroups := comp.CorrectnessGroups()
switch {
case avgBuildbotTimeSecs == 0.0:
comp.Status = storage.StatusNoData
comp.StatusReason = "Buildbot avg duration is 0"
return comp
case comp.TotalGroups == 0:
comp.Status = storage.StatusNoData
comp.StatusReason = "No LUCI builds found for comparison"
return comp
}
if correctnessGroups > 0 {
badGroups := len(comp.FalseSuccesses) + len(comp.FalseFailures)
comp.Correctness = 1.0 - float64(badGroups)/float64(correctnessGroups)
}
if comp.AvgTimeDeltaGroups > 0 {
avgBuildbotTimeSecs /= float64(buildbotBuilds)
comp.AvgTimeDelta /= time.Duration(comp.AvgTimeDeltaGroups)
buildbotSpeed := 1.0 / avgBuildbotTimeSecs
luciSpeed := 1.0 / (avgBuildbotTimeSecs + comp.AvgTimeDelta.Seconds())
comp.Speed = luciSpeed / buildbotSpeed
}
comp.InfraHealth = 1.0 - float64(len(comp.LUCIOnlyInfraFailures))/float64(comp.TotalGroups)
switch {
case correctnessGroups < minCorrectnessGroups:
// Collect available data but indicate low confidence.
comp.Status = storage.StatusLowConfidence
comp.StatusReason = ("Insufficient LUCI and Buildbot builds that " +
"share same patchsets and can be used for correctness estimation")
case comp.Correctness < 1.0:
comp.Status = storage.StatusLUCINotWAI
comp.StatusReason = "Incorrect"
case comp.Speed < lowSpeed:
comp.Status = storage.StatusLUCINotWAI
comp.StatusReason = "Too slow; want at least 90% speed"
case comp.InfraHealth < targetHealth:
comp.Status = storage.StatusLUCINotWAI
comp.StatusReason = "Too many new infra failures"
case comp.Speed >= highSpeed:
comp.Status = storage.StatusLUCIWAI
comp.StatusReason = "Correct and fast enough"
// the speed is between low and high
case currentStatus == storage.StatusLUCIWAI:
// leave as WAI. It is not too bad.
comp.Status = storage.StatusLUCIWAI
comp.StatusReason = "Correct and fast enough; speed is fluctuating"
default:
// same as case comp.Speed < lowSpeed,
// separated for simplicity of switch statement.
comp.Status = storage.StatusLUCINotWAI
comp.StatusReason = "Too slow; want at least 90% speed"
}
return comp
} | go/src/infra/appengine/luci-migration/analysis/compare.go | 0.649579 | 0.421552 | compare.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.