code
stringlengths
114
1.05M
path
stringlengths
3
312
quality_prob
float64
0.5
0.99
learning_prob
float64
0.2
1
filename
stringlengths
3
168
kind
stringclasses
1 value
// Package tntengine - define TntEngine type and it's methods package tntengine import ( "bytes" "fmt" "math/big" ) // Rotor - the type of the TNT2 rotor type Rotor struct { Size int Start int Step int Current int Rotor []byte } // New - creates a new Rotor with the given size, start, step and rotor data. func NewRotor(size, start, step int, rotor []byte) *Rotor { var r Rotor r.Start, r.Current = start, start r.Size = size r.Step = step r.Rotor = rotor r.sliceRotor() return &r } // Update - updates the given Rotor with a new size, start and step. func (r *Rotor) Update(size, start, step int) { r.Start, r.Current = start, start r.Size = size r.Step = step r.sliceRotor() } // sliceRotor - appends the first 256 bits of the rotor to the end of the rotor. func (r *Rotor) sliceRotor() { var i, j uint j = uint(r.Size) for i = 0; i < 256; i++ { if GetBit(r.Rotor, i) { SetBit(r.Rotor, j) } else { ClrBit(r.Rotor, j) } j++ } } // SetIndex - set the current rotor position based on the given index func (r *Rotor) SetIndex(idx *big.Int) { // Special case if idx == 0 if idx.Sign() == 0 { r.Current = r.Start } else { p := new(big.Int) q := new(big.Int) rem := new(big.Int) p = p.Mul(idx, new(big.Int).SetInt64(int64(r.Step))) p = p.Add(p, new(big.Int).SetInt64(int64(r.Start))) _, rem = q.DivMod(p, new(big.Int).SetInt64(int64(r.Size)), rem) r.Current = int(rem.Int64()) } } // Index - Rotor does no track the index. func (r *Rotor) Index() *big.Int { return nil } // ApplyF - encrypts the given block of data. func (r *Rotor) ApplyF(blk *[CypherBlockBytes]byte) *[CypherBlockBytes]byte { var res [CypherBlockBytes]byte ress := res[:] rotor := r.Rotor idx := r.Current for cnt := 0; cnt < CypherBlockSize; cnt++ { if GetBit(rotor, uint(idx)) { SetBit(ress, uint(cnt)) } idx++ } r.Current = (r.Current + r.Step) % r.Size return AddBlock(blk, &res) } // ApplyG - decrypts the given block of data func (r *Rotor) ApplyG(blk *[CypherBlockBytes]byte) *[CypherBlockBytes]byte { var res [CypherBlockBytes]byte ress := res[:] rotor := r.Rotor[:] idx := r.Current for cnt := 0; cnt < CypherBlockSize; cnt++ { if GetBit(rotor, uint(idx)) { SetBit(ress, uint(cnt)) } idx++ } r.Current = (r.Current + r.Step) % r.Size return SubBlock(blk, &res) } // String - converts a Rotor to a string representation of the Rotor. func (r *Rotor) String() string { var output bytes.Buffer rotorLen := len(r.Rotor) output.WriteString(fmt.Sprintf("rotor.New(%d, %d, %d, []byte{\n", r.Size, r.Start, r.Step)) for i := 0; i < rotorLen; i += 16 { output.WriteString("\t") if i+16 < rotorLen { for _, k := range r.Rotor[i : i+16] { output.WriteString(fmt.Sprintf("%d, ", k)) } } else { l := len(r.Rotor[i:]) for _, k := range r.Rotor[i : i+l-1] { output.WriteString(fmt.Sprintf("%d, ", k)) } output.WriteString(fmt.Sprintf("%d})", r.Rotor[i+l-1])) } output.WriteString("\n") } return output.String() }
rotor.go
0.662032
0.409988
rotor.go
starcoder
package main import ( "k8s.io/klog" "math/rand" "math" "github.com/randysimpson/go-matrix/matrix" ) func main() { klog.Infoln("Initializing ml tutorial application"); sampleSize := 100 var X []float64 var T []float64 //build the X and T array of sample data for i := 0; i < sampleSize; i++ { //get random value from 0 - 10 value := rand.Float64() * 10.0 //add this value to the X array X = append(X, value) //get a fraction to add some error to the equation to simulate an error on real data. noise := rand.Float64() / 10.0 //add the corresponding T after using the function tValue := 2.0 - 0.1 * value + 0.05 * math.Pow(value - 7.0, 2) + noise T = append(T, tValue) } klog.Infof("X: =%v\n", X) klog.Infof("T: =%v\n", T) //add 1's to our X slice, this is required so that we can apply matrix functions. //and place them into slice of slieces. var X1 [][]float64 for i := 0; i < sampleSize; i++ { var arrayFloat []float64 arrayFloat = append(arrayFloat, 1.0) arrayFloat = append(arrayFloat, X[i]) X1 = append(X1, arrayFloat) } //create slice of slieces for T's var T1 [][]float64 for i := 0; i < sampleSize; i++ { var arrayFloat []float64 arrayFloat = append(arrayFloat, T[i]) T1 = append(T1, arrayFloat) } learning_rate := 0.01 epoch := 10 //setup weight matrix as initially all zeros. var w [][]float64 for i := 0; i < len(X1[0]); i++ { //length of row in X var arrayFloat []float64 for j := 0; j < len(T1[0]); j++ { //length of row in T arrayFloat = append(arrayFloat, 0.0) } w = append(w, arrayFloat) } klog.Infof("learning_rate: =%v\n", learning_rate) klog.Infof("epoch: =%v\n", epoch) klog.Infof("Initial w: =%v\n", w) for i := 0; i < epoch; i++ { for j := 0; j < sampleSize; j++ { //get the x values as a slice of slices, currently it's just a single slice. var xMatrix [][]float64 xMatrix = append(xMatrix, X1[j]) //multiply it by weight matrix to get predicted value for y using model y := matrix.Multiply(xMatrix, w) //put the target values into slice of slices, currently this is single slice as well. var tMatrix [][]float64 tMatrix = append(tMatrix, T1[j]) //find the error err := matrix.Subtract(tMatrix, y) //Find the amount of change to apply to the weight matrix xT := matrix.Transpose(xMatrix) diff := matrix.Multiply(xT, err) change := matrix.MultiplyScalar(diff, learning_rate) //add the change to the weight matrix w = matrix.Add(w, change) } } klog.Infof("Final w = %v\n", w) //run the model against the X values predicted := matrix.Multiply(X1, w) klog.Infof("predicted y's= %v\n", predicted) }
01_linear_regression_sgd/main.go
0.555194
0.440409
main.go
starcoder
package audio import ( "time" "github.com/kasworld/h4o/_examples/app" "github.com/kasworld/h4o/audio" "github.com/kasworld/h4o/eventtype" "github.com/kasworld/h4o/math32" "github.com/kasworld/h4o/util/helper" ) func init() { app.DemoMap["audio.doppler"] = &AudioDoppler{} } type AudioDoppler struct { ps1 *PlayerSphere ps2 *PlayerSphere ps3 *PlayerSphere ps4 *PlayerSphere ps5 *PlayerSphere ps6 *PlayerSphere } // Start is called once at the start of the demo. func (t *AudioDoppler) Start(a *app.App) { // Create axes helper axes := helper.NewAxes(1) a.Scene().Add(axes) // Show grid helper grid := helper.NewGrid(100, 1, &math32.Color{0.4, 0.4, 0.4}) a.Scene().Add(grid) // Sets camera position a.Camera().SetPosition(0, 4, 12) pos := a.Camera().Position() a.Camera().UpdateSize(pos.Length()) a.Camera().LookAt(&math32.Vector3{0, 0, 0}, &math32.Vector3{0, 1, 0}) // Creates listener and adds it to the current camera listener := audio.NewListener() a.Camera().Add(listener) // Creates player sphere t.ps1 = NewPlayerSphere(a, "engine.ogg", &math32.Color{1, 0, 0}) t.ps1.SetPosition(-3, 0, 50) t.ps1.speed = -20.00 t.ps1.player.SetRolloffFactor(1) a.Scene().Add(t.ps1) t.ps2 = NewPlayerSphere(a, "tone_1khz.wav", &math32.Color{0, 1, 0}) t.ps2.SetPosition(-2, 0, 50) t.ps2.speed = -25.00 a.Scene().Add(t.ps2) t.ps3 = NewPlayerSphere(a, "tone_2khz.wav", &math32.Color{0, 0, 1}) t.ps3.SetPosition(-1, 0, 50) t.ps3.speed = -30.00 a.Scene().Add(t.ps3) t.ps4 = NewPlayerSphere(a, "engine.ogg", &math32.Color{0, 1, 1}) t.ps4.SetPosition(1, 0, -50) t.ps4.speed = 20.00 a.Scene().Add(t.ps4) t.ps5 = NewPlayerSphere(a, "tone_1khz.wav", &math32.Color{1, 0, 1}) t.ps5.SetPosition(2, 0, -50) t.ps5.speed = 25.00 a.Scene().Add(t.ps5) t.ps6 = NewPlayerSphere(a, "tone_2khz.wav", &math32.Color{1, 1, 1}) t.ps6.SetPosition(2, 0, -50) t.ps6.speed = 30.00 a.Scene().Add(t.ps6) // Add controls if a.ControlFolder() == nil { return } g := a.ControlFolder().AddGroup("Play sources") cb1 := g.AddCheckBox("engine -Z").SetValue(true) cb1.Subscribe(eventtype.OnChange, func(evname eventtype.EventType, ev interface{}) { t.ps1.Toggle() }) cb2 := g.AddCheckBox("tone_1khz -Z").SetValue(true) cb2.Subscribe(eventtype.OnChange, func(evname eventtype.EventType, ev interface{}) { t.ps2.Toggle() }) cb3 := g.AddCheckBox("tone_2khz -Z").SetValue(true) cb3.Subscribe(eventtype.OnChange, func(evname eventtype.EventType, ev interface{}) { t.ps3.Toggle() }) cb4 := g.AddCheckBox("engine +Z").SetValue(true) cb4.Subscribe(eventtype.OnChange, func(evname eventtype.EventType, ev interface{}) { t.ps4.Toggle() }) cb5 := g.AddCheckBox("tone_1khz +Z").SetValue(true) cb5.Subscribe(eventtype.OnChange, func(evname eventtype.EventType, ev interface{}) { t.ps5.Toggle() }) cb6 := g.AddCheckBox("tone_2khz +Z").SetValue(true) cb6.Subscribe(eventtype.OnChange, func(evname eventtype.EventType, ev interface{}) { t.ps6.Toggle() }) } // Update is called every frame. func (t *AudioDoppler) Update(a *app.App, deltaTime time.Duration) { t.ps1.UpdateVel(a, deltaTime) t.ps2.UpdateVel(a, deltaTime) t.ps3.UpdateVel(a, deltaTime) t.ps4.UpdateVel(a, deltaTime) t.ps5.UpdateVel(a, deltaTime) t.ps6.UpdateVel(a, deltaTime) } // Cleanup is called once at the end of the demo. func (t *AudioDoppler) Cleanup(a *app.App) {} func (ps *PlayerSphere) UpdateVel(a *app.App, deltaTime time.Duration) { delta := float32(deltaTime.Seconds()) * ps.speed pos := ps.Position() pos.Z += delta if pos.Z >= 100 { pos.Z = -50 } if pos.Z <= -100 { pos.Z = 50 } ps.player.SetVelocity(0, 0, ps.speed) ps.SetPositionVec(&pos) }
_examples/demos/audio/doppler.go
0.621196
0.405096
doppler.go
starcoder
package vmath import ( "fmt" "github.com/go-gl/mathgl/mgl32" ) type AABB struct { Min mgl32.Vec3 Max mgl32.Vec3 } func NewAABB(x1, y1, z1, x2, y2, z2 float32) AABB { return AABB{ Min: mgl32.Vec3{x1, y1, z1}, Max: mgl32.Vec3{x2, y2, z2}, } } func (a AABB) RotateX(an, ox, oy, oz float32) AABB { mat := mgl32.Rotate3DX(an) o := mgl32.Vec3{ox, oy, oz} a.Max = mat.Mul3x1(a.Max.Sub(o)).Add(o) a.Min = mat.Mul3x1(a.Min.Sub(o)).Add(o) a.fixBounds() return a } func (a AABB) RotateY(an, ox, oy, oz float32) AABB { mat := mgl32.Rotate3DY(an) o := mgl32.Vec3{ox, oy, oz} a.Max = mat.Mul3x1(a.Max.Sub(o)).Add(o) a.Min = mat.Mul3x1(a.Min.Sub(o)).Add(o) a.fixBounds() return a } func (a *AABB) fixBounds() { for i := range a.Min { if a.Max[i] < a.Min[i] || a.Min[i] > a.Max[i] { a.Max[i], a.Min[i] = a.Min[i], a.Max[i] } } } func (a AABB) Intersects(o AABB) bool { return !(o.Min.X() >= a.Max.X() || o.Max.X() <= a.Min.X() || o.Min.Y() >= a.Max.Y() || o.Max.Y() <= a.Min.Y() || o.Min.Z() >= a.Max.Z() || o.Max.Z() <= a.Min.Z()) } func (a AABB) IntersectsLine(origin, dir mgl32.Vec3) (mgl32.Vec3, bool) { const right, left, middle = 0, 1, 2 var ( quadrant [3]int candidatePlane [3]float32 maxT = [3]float32{-1, -1, -1} ) inside := true for i := range origin { if origin[i] < a.Min[i] { quadrant[i] = left candidatePlane[i] = a.Min[i] inside = false } else if origin[i] > a.Max[i] { quadrant[i] = right candidatePlane[i] = a.Max[i] inside = false } else { quadrant[i] = middle } } if inside { return origin, true } for i := range dir { if quadrant[i] != middle && dir[i] != 0 { maxT[i] = (candidatePlane[i] - origin[i]) / dir[i] } } whichPlane := 0 for i := 1; i < 3; i++ { if maxT[whichPlane] < maxT[i] { whichPlane = i } } if maxT[whichPlane] < 0 { return origin, false } var coord mgl32.Vec3 for i := range origin { if whichPlane != i { coord[i] = origin[i] + maxT[whichPlane]*dir[i] if coord[i] < a.Min[i] || coord[i] > a.Max[i] { return origin, false } } else { coord[i] = candidatePlane[i] } } return coord, true } func (a AABB) Shift(x, y, z float32) AABB { a.Min[0] += x a.Max[0] += x a.Min[1] += y a.Max[1] += y a.Min[2] += z a.Max[2] += z return a } func (a AABB) Grow(x, y, z float32) AABB { a.Min[0] -= x a.Max[0] += x a.Min[1] -= y a.Max[1] += y a.Min[2] -= z a.Max[2] += z return a } func (a AABB) MoveOutOf(o AABB, dir mgl32.Vec3) AABB { if dir.X() != 0 { if dir.X() > 0 { ox := a.Max.X() a.Max[0] = o.Min.X() - 0.0001 a.Min[0] += a.Max.X() - ox } else { ox := a.Min.X() a.Min[0] = o.Max.X() + 0.0001 a.Max[0] += a.Min.X() - ox } } if dir.Y() != 0 { if dir.Y() > 0 { oy := a.Max.Y() a.Max[1] = o.Min.Y() - 0.0001 a.Min[1] += a.Max.Y() - oy } else { oy := a.Min.Y() a.Min[1] = o.Max.Y() + 0.0001 a.Max[1] += a.Min.Y() - oy } } if dir.Z() != 0 { if dir.Z() > 0 { oz := a.Max.Z() a.Max[2] = o.Min.Z() - 0.0001 a.Min[2] += a.Max.Z() - oz } else { oz := a.Min.Z() a.Min[2] = o.Max.Z() + 0.0001 a.Max[2] += a.Min.Z() - oz } } return a } func (a AABB) String() string { return fmt.Sprintf("[%v->%v]", a.Min, a.Max) }
type/vmath/aabb.go
0.619011
0.534248
aabb.go
starcoder
package svg import "image/color" var cssColors = map[string]*color.RGBA{ "aliceblue": &color.RGBA{R: 240, G: 248, B: 255, A: 255}, "antiquewhite": &color.RGBA{R: 250, G: 235, B: 215, A: 255}, "aqua": &color.RGBA{R: 0, G: 255, B: 255, A: 255}, "aquamarine": &color.RGBA{R: 127, G: 255, B: 212, A: 255}, "azure": &color.RGBA{R: 240, G: 255, B: 255, A: 255}, "beige": &color.RGBA{R: 245, G: 245, B: 220, A: 255}, "bisque": &color.RGBA{R: 255, G: 228, B: 196, A: 255}, "black": &color.RGBA{R: 0, G: 0, B: 0, A: 255}, "blanchedalmond": &color.RGBA{R: 255, G: 235, B: 205, A: 255}, "blue": &color.RGBA{R: 0, G: 0, B: 255, A: 255}, "blueviolet": &color.RGBA{R: 138, G: 43, B: 226, A: 255}, "brown": &color.RGBA{R: 165, G: 42, B: 42, A: 255}, "burlywood": &color.RGBA{R: 222, G: 184, B: 135, A: 255}, "cadetblue": &color.RGBA{R: 95, G: 158, B: 160, A: 255}, "chartreuse": &color.RGBA{R: 127, G: 255, B: 0, A: 255}, "chocolate": &color.RGBA{R: 210, G: 105, B: 30, A: 255}, "coral": &color.RGBA{R: 255, G: 127, B: 80, A: 255}, "cornflowerblue": &color.RGBA{R: 100, G: 149, B: 237, A: 255}, "cornsilk": &color.RGBA{R: 255, G: 248, B: 220, A: 255}, "crimson": &color.RGBA{R: 220, G: 20, B: 60, A: 255}, "cyan": &color.RGBA{R: 0, G: 255, B: 255, A: 255}, "darkblue": &color.RGBA{R: 0, G: 0, B: 139, A: 255}, "darkcyan": &color.RGBA{R: 0, G: 139, B: 139, A: 255}, "darkgoldenrod": &color.RGBA{R: 184, G: 134, B: 11, A: 255}, "darkgray": &color.RGBA{R: 169, G: 169, B: 169, A: 255}, "darkgreen": &color.RGBA{R: 0, G: 100, B: 0, A: 255}, "darkgrey": &color.RGBA{R: 169, G: 169, B: 169, A: 255}, "darkkhaki": &color.RGBA{R: 189, G: 183, B: 107, A: 255}, "darkmagenta": &color.RGBA{R: 139, G: 0, B: 139, A: 255}, "darkolivegreen": &color.RGBA{R: 85, G: 107, B: 47, A: 255}, "darkorange": &color.RGBA{R: 255, G: 140, B: 0, A: 255}, "darkorchid": &color.RGBA{R: 153, G: 50, B: 204, A: 255}, "darkred": &color.RGBA{R: 139, G: 0, B: 0, A: 255}, "darksalmon": &color.RGBA{R: 233, G: 150, B: 122, A: 255}, "darkseagreen": &color.RGBA{R: 143, G: 188, B: 143, A: 255}, "darkslateblue": &color.RGBA{R: 72, G: 61, B: 139, A: 255}, "darkslategray": &color.RGBA{R: 47, G: 79, B: 79, A: 255}, "darkslategrey": &color.RGBA{R: 47, G: 79, B: 79, A: 255}, "darkturquoise": &color.RGBA{R: 0, G: 206, B: 209, A: 255}, "darkviolet": &color.RGBA{R: 148, G: 0, B: 211, A: 255}, "deeppink": &color.RGBA{R: 255, G: 20, B: 147, A: 255}, "deepskyblue": &color.RGBA{R: 0, G: 191, B: 255, A: 255}, "dimgray": &color.RGBA{R: 105, G: 105, B: 105, A: 255}, "dimgrey": &color.RGBA{R: 105, G: 105, B: 105, A: 255}, "dodgerblue": &color.RGBA{R: 30, G: 144, B: 255, A: 255}, "firebrick": &color.RGBA{R: 178, G: 34, B: 34, A: 255}, "floralwhite": &color.RGBA{R: 255, G: 250, B: 240, A: 255}, "forestgreen": &color.RGBA{R: 34, G: 139, B: 34, A: 255}, "fuchsia": &color.RGBA{R: 255, G: 0, B: 255, A: 255}, "gainsboro": &color.RGBA{R: 220, G: 220, B: 220, A: 255}, "ghostwhite": &color.RGBA{R: 248, G: 248, B: 255, A: 255}, "gold": &color.RGBA{R: 255, G: 215, B: 0, A: 255}, "goldenrod": &color.RGBA{R: 218, G: 165, B: 32, A: 255}, "gray": &color.RGBA{R: 128, G: 128, B: 128, A: 255}, "green": &color.RGBA{R: 0, G: 128, B: 0, A: 255}, "greenyellow": &color.RGBA{R: 173, G: 255, B: 47, A: 255}, "grey": &color.RGBA{R: 128, G: 128, B: 128, A: 255}, "honeydew": &color.RGBA{R: 240, G: 255, B: 240, A: 255}, "hotpink": &color.RGBA{R: 255, G: 105, B: 180, A: 255}, "indianred": &color.RGBA{R: 205, G: 92, B: 92, A: 255}, "indigo": &color.RGBA{R: 75, G: 0, B: 130, A: 255}, "ivory": &color.RGBA{R: 255, G: 255, B: 240, A: 255}, "khaki": &color.RGBA{R: 240, G: 230, B: 140, A: 255}, "lavender": &color.RGBA{R: 230, G: 230, B: 250, A: 255}, "lavenderblush": &color.RGBA{R: 255, G: 240, B: 245, A: 255}, "lawngreen": &color.RGBA{R: 124, G: 252, B: 0, A: 255}, "lemonchiffon": &color.RGBA{R: 255, G: 250, B: 205, A: 255}, "lightblue": &color.RGBA{R: 173, G: 216, B: 230, A: 255}, "lightcoral": &color.RGBA{R: 240, G: 128, B: 128, A: 255}, "lightcyan": &color.RGBA{R: 224, G: 255, B: 255, A: 255}, "lightgoldenrodyellow": &color.RGBA{R: 250, G: 250, B: 210, A: 255}, "lightgray": &color.RGBA{R: 211, G: 211, B: 211, A: 255}, "lightgreen": &color.RGBA{R: 144, G: 238, B: 144, A: 255}, "lightgrey": &color.RGBA{R: 211, G: 211, B: 211, A: 255}, "lightpink": &color.RGBA{R: 255, G: 182, B: 193, A: 255}, "lightsalmon": &color.RGBA{R: 255, G: 160, B: 122, A: 255}, "lightseagreen": &color.RGBA{R: 32, G: 178, B: 170, A: 255}, "lightskyblue": &color.RGBA{R: 135, G: 206, B: 250, A: 255}, "lightslategray": &color.RGBA{R: 119, G: 136, B: 153, A: 255}, "lightslategrey": &color.RGBA{R: 119, G: 136, B: 153, A: 255}, "lightsteelblue": &color.RGBA{R: 176, G: 196, B: 222, A: 255}, "lightyellow": &color.RGBA{R: 255, G: 255, B: 224, A: 255}, "lime": &color.RGBA{R: 0, G: 255, B: 0, A: 255}, "limegreen": &color.RGBA{R: 50, G: 205, B: 50, A: 255}, "linen": &color.RGBA{R: 250, G: 240, B: 230, A: 255}, "magenta": &color.RGBA{R: 255, G: 0, B: 255, A: 255}, "maroon": &color.RGBA{R: 128, G: 0, B: 0, A: 255}, "mediumaquamarine": &color.RGBA{R: 102, G: 205, B: 170, A: 255}, "mediumblue": &color.RGBA{R: 0, G: 0, B: 205, A: 255}, "mediumorchid": &color.RGBA{R: 186, G: 85, B: 211, A: 255}, "mediumpurple": &color.RGBA{R: 147, G: 112, B: 219, A: 255}, "mediumseagreen": &color.RGBA{R: 60, G: 179, B: 113, A: 255}, "mediumslateblue": &color.RGBA{R: 123, G: 104, B: 238, A: 255}, "mediumspringgreen": &color.RGBA{R: 0, G: 250, B: 154, A: 255}, "mediumturquoise": &color.RGBA{R: 72, G: 209, B: 204, A: 255}, "mediumvioletred": &color.RGBA{R: 199, G: 21, B: 133, A: 255}, "midnightblue": &color.RGBA{R: 25, G: 25, B: 112, A: 255}, "mintcream": &color.RGBA{R: 245, G: 255, B: 250, A: 255}, "mistyrose": &color.RGBA{R: 255, G: 228, B: 225, A: 255}, "moccasin": &color.RGBA{R: 255, G: 228, B: 181, A: 255}, "navajowhite": &color.RGBA{R: 255, G: 222, B: 173, A: 255}, "navy": &color.RGBA{R: 0, G: 0, B: 128, A: 255}, "oldlace": &color.RGBA{R: 253, G: 245, B: 230, A: 255}, "olive": &color.RGBA{R: 128, G: 128, B: 0, A: 255}, "olivedrab": &color.RGBA{R: 107, G: 142, B: 35, A: 255}, "orange": &color.RGBA{R: 255, G: 165, B: 0, A: 255}, "orangered": &color.RGBA{R: 255, G: 69, B: 0, A: 255}, "orchid": &color.RGBA{R: 218, G: 112, B: 214, A: 255}, "palegoldenrod": &color.RGBA{R: 238, G: 232, B: 170, A: 255}, "palegreen": &color.RGBA{R: 152, G: 251, B: 152, A: 255}, "paleturquoise": &color.RGBA{R: 175, G: 238, B: 238, A: 255}, "palevioletred": &color.RGBA{R: 219, G: 112, B: 147, A: 255}, "papayawhip": &color.RGBA{R: 255, G: 239, B: 213, A: 255}, "peachpuff": &color.RGBA{R: 255, G: 218, B: 185, A: 255}, "peru": &color.RGBA{R: 205, G: 133, B: 63, A: 255}, "pink": &color.RGBA{R: 255, G: 192, B: 203, A: 255}, "plum": &color.RGBA{R: 221, G: 160, B: 221, A: 255}, "powderblue": &color.RGBA{R: 176, G: 224, B: 230, A: 255}, "purple": &color.RGBA{R: 128, G: 0, B: 128, A: 255}, "red": &color.RGBA{R: 255, G: 0, B: 0, A: 255}, "rosybrown": &color.RGBA{R: 188, G: 143, B: 143, A: 255}, "royalblue": &color.RGBA{R: 65, G: 105, B: 225, A: 255}, "saddlebrown": &color.RGBA{R: 139, G: 69, B: 19, A: 255}, "salmon": &color.RGBA{R: 250, G: 128, B: 114, A: 255}, "sandybrown": &color.RGBA{R: 244, G: 164, B: 96, A: 255}, "seagreen": &color.RGBA{R: 46, G: 139, B: 87, A: 255}, "seashell": &color.RGBA{R: 255, G: 245, B: 238, A: 255}, "sienna": &color.RGBA{R: 160, G: 82, B: 45, A: 255}, "silver": &color.RGBA{R: 192, G: 192, B: 192, A: 255}, "skyblue": &color.RGBA{R: 135, G: 206, B: 235, A: 255}, "slateblue": &color.RGBA{R: 106, G: 90, B: 205, A: 255}, "slategray": &color.RGBA{R: 112, G: 128, B: 144, A: 255}, "slategrey": &color.RGBA{R: 112, G: 128, B: 144, A: 255}, "snow": &color.RGBA{R: 255, G: 250, B: 250, A: 255}, "springgreen": &color.RGBA{R: 0, G: 255, B: 127, A: 255}, "steelblue": &color.RGBA{R: 70, G: 130, B: 180, A: 255}, "tan": &color.RGBA{R: 210, G: 180, B: 140, A: 255}, "teal": &color.RGBA{R: 0, G: 128, B: 128, A: 255}, "thistle": &color.RGBA{R: 216, G: 191, B: 216, A: 255}, "tomato": &color.RGBA{R: 255, G: 99, B: 71, A: 255}, "turquoise": &color.RGBA{R: 64, G: 224, B: 208, A: 255}, "violet": &color.RGBA{R: 238, G: 130, B: 238, A: 255}, "wheat": &color.RGBA{R: 245, G: 222, B: 179, A: 255}, "white": &color.RGBA{R: 255, G: 255, B: 255, A: 255}, "whitesmoke": &color.RGBA{R: 245, G: 245, B: 245, A: 255}, "yellow": &color.RGBA{R: 255, G: 255, B: 0, A: 255}, "yellowgreen": &color.RGBA{R: 154, G: 205, B: 50, A: 255}, }
colors.go
0.603815
0.509764
colors.go
starcoder
package revocation import ( "crypto/rand" "github.com/go-errors/errors" "github.com/privacybydesign/gabi/big" "github.com/privacybydesign/gabi/keyproof" "github.com/privacybydesign/gabi/pkg/common" ) /* This implements the zero knowledge proof of the RSA-B accumulator for revocation, introduced in "Dynamic Accumulators and Application to Efficient Revocation of Anonymous Credentials", <NAME> and <NAME>, CRYPTO 2002, DOI https://doi.org/10.1007/3-540-45708-9_5, http://static.cs.brown.edu/people/alysyans/papers/camlys02.pdf. This accumulator is only updated when revoking and does not change when adding new revocation handles to the accumulator. The user proves knowledge of two numbers u and e, called the witness, which are such that the relation u^e = 𝛎 mod n holds, where 𝛎 (greek letter "nu") is the accumulator (the issuer's current "non-revocation publickey"). Both u and e are kept secret to the user. Elsewhere the number e is included as an attribute in an IRMA credential, and this zero-knowledge proof convinces the verifier that the containing credential is not revoked. This is an implementation of the zero-knowledge proof at page 8 and 15 of the pdf linked to above, with the following differences. 1. In the zero knowledge proof conjunction on page 8 of the pdf, we skip the first, second and third items in the conjunction: these only serve to prove that the secret e is committed to in an element of a known prime order group. We don't need to do this as we have no such group: in our case everything happens within QR_n. 2. The fifth relation C_e = g^e * h^r1 is replaced by the Idemix relation Z = A^epsilon * S^v * Ri^mi * Re^e which is already proved elsewhere by the calling code. 3. The interval [A, B] from which the witness e is chosen does not satisfy the relation B*2^(k'+k''+1) < A^2 - 1, which is unnecessary: as long as A > 2, witnesses are unforgeable, by a simple extension of the unforgeability proof of Theorem 3. See below. In the following we follow the lead of the other zero knowledge proofs implemented elsehwere in gabi. 4. Secrets and randomizers within the zero-knowledge proofs are taken positive, instead of from symmetric intervals [-A,A]. 5. We use addition in the zero-knowledge proof responses: response = randomizer + challenge*secret. 6. We use the Fiat-Shamir heuristic. 7. We include the challenge c in the proof, and then verify by hashing the Schnorr commitments reconstructed from the proof, obtaining c' which must then equal c. We claim, prove, and implement the following: Let [A, B] be the interval from which the number e from the witness (u,e) is chosen, as in the paper. Then witnesses are unforgeable as in theorem 3, if A > 2 and B < 2^(l_n-1) where l_n is the bitsize of the modulus n. In particular, it is not necesary to require A^2 > B like theorem 3 does. Proof: let (u')^(x') = u^x where x = x_1*...*x_n, and set d = gcd(x, x'), as in the proof. Suppose that d is not relatively prime to phi(n) = 4*p'*q'. Since d is the product of a subset of the primes x_1, ..., x_n and since x_i > 2 for all of these primes, by the unique factorization theorem there must be a j such that x_j = p' or x_j = q'. Thus since p = 2p'+1 and q = 2q'+1, the algorithm that for each i checks if 2x_i+1 divides n = pq will succeed in factoring n. The remainder of the proof which handles the other case, where d is relatively prime to phi(n), works as is. The claim "d = gcd(x,x') => (d = 1 or d = x_j)" in the middle of the proof, which requires A^2 > B for its proof, is thus not necessary to use in the proof of theorem 3. Thus for unforgeability the size of e is not relevant. However, e should be chosen from a set so large that it is overhelmingly unlikely that any one prime e is chosen twice. Combining the prime counting function with the birthday paradox and simplifying, one finds the following: if N witnesses are chosen from the set of primes smaller than B, then the collision chance P approximately equals P = 1 - e^(-N^2 ln(B)/B). At n = 10^9 we have P = 1/2^128 if B = 2^195. */ type ( // Proof is a proof that a Witness is valid against the Accumulator from the specified // SignedAccumulator. Proof struct { Cr *big.Int `json:"C_r"` // Cr = g^r2 * h^r3 = g^epsilon * h^zeta Cu *big.Int `json:"C_u"` // Cu = u * h^r2 Nu *big.Int `json:"-"` // nu = Cu^e * h^(-e*r2) = Cu^alpha * h^-beta Challenge *big.Int `json:"-"` Responses map[string]*big.Int `json:"responses"` SignedAccumulator *SignedAccumulator `json:"sacc"` acc *Accumulator // Extracted from SignedAccumulator during verification } // ProofCommit contains the commitment state of a nonrevocation Proof. ProofCommit struct { cu, cr, nu *big.Int secrets map[string]*big.Int randomizers map[string]*big.Int g *qrGroup sacc *SignedAccumulator } proofStructure struct { cr qrRepresentationProofStructure nu qrRepresentationProofStructure one qrRepresentationProofStructure } // We implement the keyproof interfaces, containing exported methods, without exposing those // methods outside the package by implementing them on unexported structs - at the cost of // having to cast back and forth between these equivalent types when crossing the API boundary proof Proof proofCommit ProofCommit accumulator Accumulator witness Witness qrGroup QrGroup ) var ( ErrorRevoked = errors.New("revoked") parameters = struct { attributeSize uint // maximum size in bits for prime e challengeLength uint // k' = len(SHA256) = 256 zkStat uint // k'' = 128 twoZk, bTwoZk, b *big.Int // 2^(k'+k''), B*2^(k'+k''+1), 2^attributeSize }{ attributeSize: 195, challengeLength: 256, zkStat: 128, } bigOne = big.NewInt(1) secretNames = []string{"alpha", "beta", "delta", "epsilon", "zeta"} proofstructure = proofStructure{ cr: qrRepresentationProofStructure{ Lhs: []keyproof.LhsContribution{{Base: "cr", Power: bigOne}}, Rhs: []keyproof.RhsContribution{ {Base: "g", Secret: "epsilon", Power: 1}, // r2 {Base: "h", Secret: "zeta", Power: 1}, // r3 }, }, nu: qrRepresentationProofStructure{ Lhs: []keyproof.LhsContribution{{Base: "nu", Power: bigOne}}, Rhs: []keyproof.RhsContribution{ {Base: "cu", Secret: "alpha", Power: 1}, // e {Base: "h", Secret: "beta", Power: -1}, // e r2 }, }, one: qrRepresentationProofStructure{ Lhs: []keyproof.LhsContribution{{Base: "one", Power: bigOne}}, Rhs: []keyproof.RhsContribution{ {Base: "cr", Secret: "alpha", Power: 1}, // e {Base: "g", Secret: "beta", Power: -1}, // e r2 {Base: "h", Secret: "delta", Power: -1}, // e r3 }, }, } ) func init() { // Compute derivative parameters parameters.b = new(big.Int).Lsh(bigOne, parameters.attributeSize) parameters.twoZk = new(big.Int).Lsh(bigOne, parameters.challengeLength+parameters.zkStat) parameters.bTwoZk = new(big.Int).Mul(parameters.b, new(big.Int).Mul(parameters.twoZk, big.NewInt(2))) } // API // NewProofRandomizer returns a bigint suitable for use as the randomizer in a nonrevocation // zero knowledge proof. func NewProofRandomizer() *big.Int { return common.FastRandomBigInt(new(big.Int).Mul(parameters.b, parameters.twoZk)) } // RandomWitness returns a new random Witness valid against the specified Accumulator. func RandomWitness(sk *PrivateKey, acc *Accumulator) (*Witness, error) { e, err := common.RandomPrimeInRange(rand.Reader, 3, parameters.attributeSize) if err != nil { return nil, err } return newWitness(sk, acc, e) } // NewProofCommit performs the first move in the Schnorr zero-knowledge protocol: committing to randomizers. func NewProofCommit(grp *QrGroup, witn *Witness, randomizer *big.Int) ([]*big.Int, *ProofCommit, error) { Logger.Tracef("revocation.NewProofCommit()") defer Logger.Tracef("revocation.NewProofCommit() done") witn.randomizer = randomizer if randomizer == nil { witn.randomizer = NewProofRandomizer() } if !proofstructure.isTrue((*witness)(witn), witn.SignedAccumulator.Accumulator.Nu, grp.N) { return nil, nil, errors.New("non-revocation relation does not hold") } bases := keyproof.NewBaseMerge((*qrGroup)(grp), &accumulator{Nu: witn.SignedAccumulator.Accumulator.Nu}) list, commit := proofstructure.commitmentsFromSecrets((*qrGroup)(grp), []*big.Int{}, &bases, (*witness)(witn)) commit.sacc = witn.SignedAccumulator return list, (*ProofCommit)(&commit), nil } // SetExpected sets certain values of the proof to expected values, inferred from the containing proofs, // before verification. func (p *Proof) SetExpected(pk *PublicKey, challenge, response *big.Int) error { acc, err := p.SignedAccumulator.UnmarshalVerify(pk) if err != nil { return err } p.Nu = acc.Nu p.Challenge = challenge p.Responses["alpha"] = response return nil } func (p *Proof) ChallengeContributions(grp *QrGroup) []*big.Int { return proofstructure.commitmentsFromProof((*qrGroup)(grp), []*big.Int{}, p.Challenge, (*qrGroup)(grp), (*proof)(p), (*proof)(p)) } func (p *Proof) VerifyWithChallenge(pk *PublicKey, reconstructedChallenge *big.Int) bool { if !proofstructure.verifyProofStructure((*proof)(p)) { return false } if (*proof)(p).ProofResult("alpha").Cmp(parameters.bTwoZk) > 0 { return false } acc, err := p.SignedAccumulator.UnmarshalVerify(pk) if err != nil { return false } p.acc = acc if p.Nu.Cmp(p.acc.Nu) != 0 { return false } return p.Challenge.Cmp(reconstructedChallenge) == 0 } func (c *ProofCommit) BuildProof(challenge *big.Int) *Proof { Logger.Tracef("revocation.ProofCommit.BuildProof()") defer Logger.Tracef("revocation.ProofCommit.BuildProof() done") responses := make(map[string]*big.Int, 5) for _, name := range secretNames { responses[name] = new(big.Int).Add( (*proofCommit)(c).Randomizer(name), new(big.Int).Mul( challenge, (*proofCommit)(c).Secret(name)), ) } return &Proof{ Cr: c.cr, Cu: c.cu, Nu: c.nu, Challenge: challenge, Responses: responses, SignedAccumulator: c.sacc, } } func (c *ProofCommit) Update(commitments []*big.Int, witness *Witness) { Logger.Tracef("revocation.ProofCommit.Update()") defer Logger.Tracef("revocation.ProofCommit.Update() done") c.cu = new(big.Int).Exp(c.g.H, c.secrets["epsilon"], c.g.N) c.cu.Mul(c.cu, witness.U) c.nu = witness.SignedAccumulator.Accumulator.Nu c.sacc = witness.SignedAccumulator commit := (*proofCommit)(c) b := keyproof.NewBaseMerge(c.g, commit) l := proofstructure.nu.commitmentsFromSecrets(c.g, []*big.Int{}, &b, commit) commitments[1] = c.cu commitments[2] = witness.SignedAccumulator.Accumulator.Nu commitments[4] = l[0] } // Update updates the witness using the specified update data from the issuer, // after which the witness can be used to prove nonrevocation against the latest Accumulator // (contained in the update message). func (w *Witness) Update(pk *PublicKey, update *Update) error { Logger.Tracef("revocation.Witness.Update()") defer Logger.Tracef("revocation.Witness.Update() done") acc, err := update.Verify(pk) if err != nil { return err } if acc.Index == w.SignedAccumulator.Accumulator.Index { *w.SignedAccumulator = *update.SignedAccumulator w.Updated = acc.Time return nil } if len(update.Events) == 0 { return nil } startIndex, endIndex := update.Events[0].Index, acc.Index if endIndex <= w.SignedAccumulator.Accumulator.Index { return nil } if startIndex > w.SignedAccumulator.Accumulator.Index+1 { return errors.New("update too new") } var a, b big.Int if new(big.Int).GCD(&a, &b, w.E, update.Product(w.SignedAccumulator.Accumulator.Index+1)).Cmp(bigOne) != 0 { return ErrorRevoked } // u' = u^b * newNu^a mod n newU := new(big.Int) newU.Mul( new(big.Int).Exp(w.U, &b, pk.Group.N), new(big.Int).Exp(acc.Nu, &a, pk.Group.N), ).Mod(newU, pk.Group.N) if !verify(newU, w.E, acc, pk.Group) { return errors.New("nonrevocation witness invalidated by update") } // Update witness state only now after all possible errors have not occurred w.U = newU w.SignedAccumulator = update.SignedAccumulator w.Updated = acc.Time return nil } // Verify the witness against its SignedAccumulator. func (w *Witness) Verify(pk *PublicKey) error { _, err := w.SignedAccumulator.UnmarshalVerify(pk) if err != nil { return err } if !verify(w.U, w.E, w.SignedAccumulator.Accumulator, pk.Group) { return errors.New("invalid witness") } return nil } // Zero-knowledge proof methods func (c *proofCommit) Exp(ret *big.Int, name string, exp, n *big.Int) bool { ret.Exp(c.Base(name), exp, n) return true } func (c *proofCommit) Base(name string) *big.Int { switch name { case "cu": return c.cu case "cr": return c.cr case "nu": return c.nu case "one": return big.NewInt(1) default: return nil } } func (c *proofCommit) Names() []string { return []string{"cu", "cr", "nu", "one"} } func (c *proofCommit) Secret(name string) *big.Int { return c.secrets[name] } func (c *proofCommit) Randomizer(name string) *big.Int { return c.randomizers[name] } func (p *proof) ProofResult(name string) *big.Int { return p.Responses[name] } func (p *proof) verify(pk *PublicKey) bool { grp := (*qrGroup)(pk.Group) commitments := proofstructure.commitmentsFromProof(grp, []*big.Int{}, p.Challenge, grp, p, p) return (*Proof)(p).VerifyWithChallenge(pk, common.HashCommit(commitments, false)) } func (s *proofStructure) commitmentsFromSecrets(g *qrGroup, list []*big.Int, bases keyproof.BaseLookup, secretdata keyproof.SecretLookup) ([]*big.Int, proofCommit) { commit := proofCommit{ g: g, secrets: make(map[string]*big.Int, 5), randomizers: make(map[string]*big.Int, 5), cu: new(big.Int), cr: new(big.Int), nu: bases.Base("nu"), } r2 := common.FastRandomBigInt(g.nDiv4) r3 := common.FastRandomBigInt(g.nDiv4) alpha := secretdata.Secret("alpha") commit.secrets["alpha"] = alpha commit.secrets["beta"] = new(big.Int).Mul(alpha, r2) commit.secrets["delta"] = new(big.Int).Mul(alpha, r3) commit.secrets["epsilon"] = r2 commit.secrets["zeta"] = r3 commit.randomizers["alpha"] = secretdata.Randomizer("alpha") commit.randomizers["beta"] = common.FastRandomBigInt(g.nbDiv4twoZk) commit.randomizers["delta"] = common.FastRandomBigInt(g.nbDiv4twoZk) commit.randomizers["epsilon"] = common.FastRandomBigInt(g.nDiv4twoZk) commit.randomizers["zeta"] = common.FastRandomBigInt(g.nDiv4twoZk) var tmp big.Int // Set C_r = g^r2 * h^r3 bases.Exp(commit.cr, "g", r2, g.N) bases.Exp(&tmp, "h", r3, g.N) commit.cr.Mul(commit.cr, &tmp).Mod(commit.cr, g.N) // Set C_u = u * h^r2 bases.Exp(&tmp, "h", r2, g.N) commit.cu.Mul(secretdata.Secret("u"), &tmp).Mod(commit.cu, g.N) list = append(list, commit.cr, commit.cu, commit.nu) b := keyproof.NewBaseMerge(bases, &commit) list = s.cr.commitmentsFromSecrets(g, list, &b, &commit) list = s.nu.commitmentsFromSecrets(g, list, &b, &commit) list = s.one.commitmentsFromSecrets(g, list, &b, &commit) return list, commit } func (s *proofStructure) commitmentsFromProof(g *qrGroup, list []*big.Int, challenge *big.Int, bases keyproof.BaseLookup, proofdata keyproof.ProofLookup, proof *proof) []*big.Int { proofs := keyproof.NewProofMerge(proof, proofdata) b := keyproof.NewBaseMerge(g, &proofCommit{cr: proof.Cr, cu: proof.Cu, nu: proof.Nu}) list = append(list, proof.Cr, proof.Cu, proof.Nu) list = s.cr.commitmentsFromProof(g, list, challenge, &b, &proofs) list = s.nu.commitmentsFromProof(g, list, challenge, &b, &proofs) list = s.one.commitmentsFromProof(g, list, challenge, &b, &proofs) return list } func (s *proofStructure) verifyProofStructure(p *proof) bool { for _, name := range secretNames { if p.Responses[name] == nil { return false } } return p.Cr != nil && p.Cu != nil && p.Nu != nil && p.Challenge != nil } func (s *proofStructure) isTrue(secretdata keyproof.SecretLookup, nu, n *big.Int) bool { return new(big.Int). Exp(secretdata.Secret("u"), secretdata.Secret("alpha"), n). Cmp(nu) == 0 } func (b accumulator) Base(name string) *big.Int { if name == "nu" { return b.Nu } return nil } func (b accumulator) Exp(ret *big.Int, name string, exp, n *big.Int) bool { if name == "nu" { ret.Exp(b.Nu, exp, n) return true } return false } func (b accumulator) Names() []string { return []string{"nu"} } func (w *witness) Secret(name string) *big.Int { switch name { case "alpha": return w.E case "u": return w.U } return nil } func (w *witness) Randomizer(name string) *big.Int { if name == "alpha" { return w.randomizer } return nil } // Helpers func verify(u, e *big.Int, acc *Accumulator, grp *QrGroup) bool { return new(big.Int).Exp(u, e, grp.N).Cmp(acc.Nu) == 0 } func newWitness(sk *PrivateKey, acc *Accumulator, e *big.Int) (*Witness, error) { order := new(big.Int).Mul(sk.P, sk.Q) eInverse, ok := common.ModInverse(e, order) if !ok { return nil, errors.New("failed to compute modular inverse") } u := new(big.Int).Exp(acc.Nu, eInverse, sk.N) return &Witness{U: u, E: e, Updated: acc.Time}, nil }
revocation/proof.go
0.71889
0.673125
proof.go
starcoder
package plantest import ( "fmt" "time" "github.com/wolffcm/flux" "github.com/wolffcm/flux/plan" ) // Spec is a set of nodes and edges of a logical query plan type PlanSpec struct { Nodes []plan.Node // Edges is a list of predecessor-to-successor edges. // [1, 3] => Nodes[1] is a predecessor of Nodes[3]. // Predecessor ordering must be encoded in this list. Edges [][2]int Resources flux.ResourceManagement Now time.Time } // CreatePlanSpec creates a logical plan from a set of nodes and edges func CreatePlanSpec(spec *PlanSpec) *plan.Spec { return createPlanSpec(spec.Nodes, spec.Edges, spec.Resources, spec.Now) } // Copy makes a copy of a Spec. func (ps *PlanSpec) Copy() *PlanSpec { cps := new(PlanSpec) cps.Nodes = make([]plan.Node, len(ps.Nodes)) for i := range ps.Nodes { cps.Nodes[i] = copyNode(ps.Nodes[i]) } cps.Edges = make([][2]int, len(ps.Edges)) copy(cps.Edges, ps.Edges) cps.Resources = ps.Resources cps.Now = ps.Now return cps } func copyNode(n plan.Node) plan.Node { var cn plan.Node switch n := n.(type) { case *plan.LogicalNode: cn = plan.CreateLogicalNode(n.ID(), n.ProcedureSpec().Copy()) case *plan.PhysicalPlanNode: cn = plan.CreatePhysicalNode(n.ID(), n.ProcedureSpec().Copy().(plan.PhysicalProcedureSpec)) } return cn } func createPlanSpec(nodes []plan.Node, edges [][2]int, resources flux.ResourceManagement, now time.Time) *plan.Spec { predecessors := make(map[plan.Node][]plan.Node) successors := make(map[plan.Node][]plan.Node) // Compute predecessors and successors of each node for _, edge := range edges { parent := nodes[edge[0]] child := nodes[edge[1]] successors[parent] = append(successors[parent], child) predecessors[child] = append(predecessors[child], parent) } roots := make([]plan.Node, 0) // Construct query plan for _, node := range nodes { if len(successors[node]) == 0 { roots = append(roots, node) } if len(nodes) > 1 && len(predecessors[node]) == 0 && len(successors[node]) == 0 { panic(fmt.Errorf("found disconnected node: %v", node.ID())) } node.AddPredecessors(predecessors[node]...) node.AddSuccessors(successors[node]...) } plan := plan.NewPlanSpec() for _, root := range roots { plan.Roots[root] = struct{}{} } plan.Resources = resources plan.Now = now return plan }
plan/plantest/plan.go
0.688783
0.487917
plan.go
starcoder
package propcheck import ( "fmt" "github.com/hashicorp/go-multierror" "testing" ) type TestCases = int //The number of test cases to run. type PropName = string type Run = func(RunParms) Result type RunParms struct { TestCases TestCases Rng SimpleRNG } type Result interface { IsFalsified() bool } type Prop struct { Run Run Name PropName } func (w Prop) String() string { return fmt.Sprintf("Prop{Run: %T, Name: %v}", w.Run, w.Name) } type Falsified[A any] struct { Name string FailedCase A Successes int LastSuccessCase A Errors error } func (w Falsified[A]) String() string { return fmt.Sprintf("\u001B[31m Falsified{Name: %v, FailedCase: %v, Successes: %v, LastSuccessCase: %v, Errors: %v \u001B[30m}", w.Name, w.FailedCase, w.Successes, w.LastSuccessCase, w.Errors) } type Passed[A any] struct{} func (w Passed[A]) String() string { return fmt.Sprintf("Passed{}") } func (f Falsified[A]) IsFalsified() bool { return true } func (f Passed[A]) IsFalsified() bool { return false } // This is a lazily evaluated And that combines two properties. func And[A any](p1, p2 Prop) Prop { run := func(n RunParms) Result { r := p1.Run(n) if !r.IsFalsified() { return p2.Run(n) } else { return r } } return Prop{run, p1.Name} } // This is a lazily evaluated Or that combines two properties. func Or[A any](p1, p2 Prop) Prop { run := func(n RunParms) Result { r := p1.Run(n) if !r.IsFalsified() { return r } else { return p2.Run(n) } } return Prop{run, p1.Name} } /** Given a Generator(ge), a generated-value transformation function(f), and a variadic list of predicate functions(assertions), ForAll produces a function(of type Prop) that will run a set number of test cases with a given generator. Parameters: ge - a generator of type "func(SimpleRNG) (A, SimpleRNG)" name - a name to assign the Prop f - a function of type "f func(A) B" that takes the generated type A and returns another type B and then passes it along to the list of assertion functions. assertions - a variadic list of assertion functions of type "func(B) (bool, error)", each returning a pair consisting of a boolean success and a possible list of errors. Returns: Prop - a data structure consisting of a descriptive name for the property and a function of type func(n RunParms) Result. Result is a sum type that can be either Falsified or Passed. The FailedCase and LastSuccessCase attributes of the Falsified type(type parameter A) contain the value that caused the test failure and the last successful value for the test. */ func ForAll[A, B any](ge func(SimpleRNG) (A, SimpleRNG), name string, f func(A) B, assertions ...func(B) (bool, error)) Prop { run := func(n RunParms) Result { var rng = n.Rng var failedCases []Falsified[A] var successCases []Result var lastSuccessCase A var testData A for x := 0; x < n.TestCases; x++ { testData, rng = ge(rng) b := f(testData) var errors error for _, s := range assertions { success, err := s(b) if !success { if err != nil { errors = multierror.Append(errors, err) } break } } if errors == nil { successCases = append(successCases, Passed[A]{}) lastSuccessCase = testData } else { f := Falsified[A]{ Name: name, FailedCase: testData, Successes: x, LastSuccessCase: lastSuccessCase, Errors: errors, } failedCases = append(failedCases, f) } _, rng = NextInt(rng) } if len(failedCases) > 0 { return failedCases[0] } else { return Passed[A]{} } } return Prop{run, name} } func ExpectSuccess[A any](t *testing.T, result Result) { switch v := result.(type) { case Falsified[A]: t.Errorf("\033[31m Test Falsified with: %v \u001B[30m \n", v) case Passed[A]: default: panic(fmt.Sprintf("Expected type of Result to be:%T which is the type of the generator.", v)) } } func ExpectFailure[A any](t *testing.T, result Result) { switch v := result.(type) { case Passed[A]: t.Errorf("\u001B[31m Expected test to be Falsified but it was: %v \u001B[30m \n", v) case Falsified[A]: default: panic(fmt.Sprintf("Expected type of Result to be:%T which is the type of the generator.", v)) } } // Combines a list of assertion functions of type "func(A) (bool, error)" into a single new function that returns their logical OR. // Note that like Prop.Or above it evaluates lazily. As soon as a true function is encountered it returns. // Otherwise it returns all the accumulated errors. func AssertionOr[A any](assertions ...func(A) (bool, error)) func(A) (bool, error) { return func(b A) (bool, error) { var errors error for _, s := range assertions { success, err := s(b) if success { return true, nil } else { if err != nil { errors = multierror.Append(errors, err) } } } return false, errors } } // Combines a list of assertion functions of type "func(A) (bool, error)" into a new function that returns their logical AND // Note that like Prop.And above it evaluates lazily. Branches are evaluated only until one fails. func AssertionAnd[A any](assertions ...func(A) (bool, error)) func(A) (bool, error) { return func(b A) (bool, error) { var errors error for _, s := range assertions { success, err := s(b) if !success { if err != nil { errors = multierror.Append(errors, err) } return false, errors } } return true, nil } }
propcheck/prop.go
0.678647
0.486697
prop.go
starcoder
package day03 import ( "fmt" "io" "os" ) type point struct { x int y int } /* Solve1 solves the following AOC 2015 problem: Santa is delivering presents to an infinite two-dimensional grid of houses. He begins by delivering a present to the house at his starting location, and then an elf at the North Pole calls him via radio and tells him where to move next. Moves are always exactly one house to the north (^), south (v), east (>), or west (<). After each move, he delivers another present to the house at his new location. However, the elf back at the north pole has had a little too much eggnog, and so his directions are a little off, and Santa ends up visiting some houses more than once. How many houses receive at least one present? For example: > delivers presents to 2 houses: one at the starting location, and one to the east. ^>v< delivers presents to 4 houses in a square, including twice to the house at his starting/ending location. ^v^v^v^v^v delivers a bunch of presents to some very lucky children at only 2 houses. */ func Solve1(input io.Reader) (uniqueHouses int, err error) { var currentLocation point houses := make(map[point]int) houses[currentLocation] = 1 for { var r rune _, se := fmt.Fscanf(input, "%c", &r) if se == io.EOF { break } else if se != nil { err = se return } switch r { case '>': currentLocation.x++ case '<': currentLocation.x-- case '^': currentLocation.y++ case 'v', 'V': currentLocation.y-- default: err = fmt.Errorf("Unexpected character %c", r) return } houses[currentLocation]++ } uniqueHouses = len(houses) return } /* Solve2 solves the following AOC 2015 problem: The next year, to speed up the process, Santa creates a robot version of himself, Robo-Santa, to deliver presents with him. Santa and Robo-Santa start at the same location (delivering two presents to the same starting house), then take turns moving based on instructions from the elf, who is eggnoggedly reading from the same script as the previous year. This year, how many houses receive at least one present? For example: ^v delivers presents to 3 houses, because Santa goes north, and then Robo-Santa goes south. ^>v< now delivers presents to 3 houses, and Santa and Robo-Santa end up back where they started. ^v^v^v^v^v now delivers presents to 11 houses, with Santa going one direction and Robo-Santa going the other. */ func Solve2(input io.Reader) (uniqueHouses int, err error) { var santaLoc, roboSantaLoc point var i int houses := make(map[point]int) houses[point{0, 0}] = 2 for { var r rune _, se := fmt.Fscanf(input, "%c", &r) if se == io.EOF { break } else if se != nil { err = se return } var currentLocation *point if (i % 2) == 0 { currentLocation = &santaLoc } else { currentLocation = &roboSantaLoc } switch r { case '>': currentLocation.x++ case '<': currentLocation.x-- case '^': currentLocation.y++ case 'v', 'V': currentLocation.y-- default: err = fmt.Errorf("Unexpected character %c", r) return } houses[*currentLocation]++ i++ } uniqueHouses = len(houses) return } /* Solver holds the File reference for this day's input and implements the SolutionPrinter interface. */ type Solver struct { Input *os.File } /* PrintSolutions prints the solutions for this day's problems. */ func (s Solver) PrintSolutions() { defer s.Input.Close() res, err := Solve1(s.Input) if err != nil { fmt.Println("ERROR:", err) } else { fmt.Printf("Day 3, Part 1: %d houses receive at least one present\n", res) } s.Input.Seek(0, io.SeekStart) res, err = Solve2(s.Input) if err != nil { fmt.Println("ERROR:", err) } else { fmt.Printf("Day 3, Part 2: %d houses receive at least one present.\n", res) } }
day03/day03.go
0.507568
0.597667
day03.go
starcoder
package sampler import ( "errors" "fmt" "jensmcatanho/raytracer-go/math/geometry" "math/rand" "time" ) type Sampler struct { Samples int Sets int method func(int, int, *[]geometry.Vector) samples []geometry.Vector shuffledIndices []int count int jump int } func NewSampler(args ...interface{}) (*Sampler, error) { samples, sets, method, err := samplerParams(args) sampler := &Sampler{ Samples: samples, Sets: sets, method: method, shuffledIndices: rand.Perm(samples), count: 0, jump: 0, } return sampler, err } func samplerParams(args []interface{}) (samples, sets int, method func(int, int, *[]geometry.Vector), err error) { samples = 1 sets = 1 if len(args) > 3 { err = fmt.Errorf("Invalid number of arguments: %d arguments received", len(args)) return } for i, paramInterface := range args { switch i { case 0: param, ok := paramInterface.(int) if !ok { err = errors.New("1st parameter is not of type int") return } samples = param case 1: param, ok := paramInterface.(int) if !ok { err = errors.New("2nd parameter is not of type int") return } sets = param case 2: param, ok := paramInterface.(func(int, int, *[]geometry.Vector)) if !ok { err = errors.New("2nd parameter is not of type int") return } method = param } } return } func (s *Sampler) Sample() { s.method(s.Samples, s.Sets, &s.samples) } func (s *Sampler) SampleUnitSquare() geometry.Vector { s.setJump() sample := s.samples[s.jump+s.shuffledIndices[s.jump+s.count%s.Samples]] s.count++ return sample } func (s *Sampler) setJump() { rand.Seed(time.Now().UnixNano()) if s.count%s.Samples == 0 { s.jump = (rand.Int() % s.Sets) * s.Samples } } func shuffleX(numSamples, numSets int, samples *[]geometry.Vector) { rand.Seed(time.Now().UnixNano()) for i := 0; i < numSets; i++ { for j := 0; j < numSamples-1; j++ { index := rand.Int()%numSamples + i*numSamples value := (*samples)[j+i*numSamples+1].X (*samples)[j+i*numSamples+1].X = (*samples)[index].X (*samples)[index].X = value } } } func shuffleY(numSamples, numSets int, samples *[]geometry.Vector) { rand.Seed(time.Now().UnixNano()) for i := 0; i < numSets; i++ { for j := 0; j < numSamples-1; j++ { index := rand.Int()%numSamples + i*numSamples value := (*samples)[j+i*numSamples+1].Y (*samples)[j+i*numSamples+1].Y = (*samples)[index].Y (*samples)[index].Y = value } } }
math/sampler/sampler.go
0.535098
0.456894
sampler.go
starcoder
package aferosteps import ( "context" "fmt" "os" "path/filepath" "sync" "github.com/cucumber/godog" "github.com/godogx/expandvars" "github.com/nhatthm/aferoassert" "github.com/spf13/afero" ) const defaultFs = "_default" // TempDirer creates a temp dir evey time it is called. type TempDirer interface { TempDir() string } // Option is to configure Manager. type Option func(m *Manager) // Manager manages a list of file systems and provides steps for godog. type Manager struct { td TempDirer fss map[string]afero.Fs testDir string trackedFiles map[string][]string mu sync.Mutex } func (m *Manager) registerExpander(ctx *godog.ScenarioContext) { expandvars.NewStepExpander( func() expandvars.Pairs { cwd, err := os.Getwd() mustNoError(err) return expandvars.Pairs{ "TEST_DIR": m.testDir, "CWD": cwd, "WORKING_DIR": cwd, } }, ).RegisterExpander(ctx) } // RegisterContext registers all the steps. func (m *Manager) RegisterContext(td TempDirer, ctx *godog.ScenarioContext) { m.registerExpander(ctx) ctx.Before(func(context.Context, *godog.Scenario) (context.Context, error) { m.WithTempDirer(td) _ = m.resetDir() // nolint: errcheck return nil, nil }) ctx.After(func(context.Context, *godog.Scenario, error) (context.Context, error) { m.cleanup() _ = m.resetDir() // nolint: errcheck return nil, nil }) // Utils. ctx.Step(`(?:current|working) directory is temporary`, m.chTempDir) ctx.Step(`(?:current|working) directory is "([^"]+)"`, m.chDir) ctx.Step(`changes? (?:current|working) directory to "([^"]+)"`, m.chDir) ctx.Step(`resets? (?:current|working) directory`, m.resetDir) // Default FS. ctx.Step(`^there is no (?:file|directory) "([^"]+)"$`, m.removeFile) ctx.Step(`^there is a file "([^"]+)"$`, m.createFile) ctx.Step(`^there is a directory "([^"]+)"$`, m.createDirectory) ctx.Step(`^there is a file "([^"]+)" with content:`, m.createFileWithContent) ctx.Step(`changes? "([^"]+)" permission to ([0-9]+)$`, m.chmod) ctx.Step(`^(?:file|directory) "([^"]+)" permission is ([0-9]+)$`, m.chmod) ctx.Step(`^there should be a file "([^"]+)"$`, m.assertFileExists) ctx.Step(`^there should be a directory "([^"]+)"$`, m.assertDirectoryExists) ctx.Step(`^there should be a file "([^"]+)" with content:`, m.assertFileContent) ctx.Step(`^there should be a file "([^"]+)" with content matches:`, m.assertFileContentRegexp) ctx.Step(`^(?:file|directory) "([^"]+)" permission should be ([0-9]+)$`, m.assertFilePerm) ctx.Step(`^there should be only these files:`, m.assertTreeEqual) ctx.Step(`^there should be these files:`, m.assertTreeContains) ctx.Step(`^there should be only these files in "([^"]+)":`, m.assertTreeEqualInPath) ctx.Step(`^there should be these files in "([^"]+)":`, m.assertTreeContainsInPath) // Another FS. ctx.Step(`^there is no (?:file|directory) "([^"]+)" in "([^"]+)" (?:fs|filesystem|file system)$`, m.removeFileInFs) ctx.Step(`^there is a file "([^"]+)" in "([^"]+)" (?:fs|filesystem|file system)$`, m.createFileInFs) ctx.Step(`^there is a directory "([^"]+)" in "([^"]+)" (?:fs|filesystem|file system)$`, m.createDirectoryInFs) ctx.Step(`^there is a file "([^"]+)" in "([^"]+)" (?:fs|filesystem|file system) with content:`, m.createFileInFsWithContent) ctx.Step(`changes? "([^"]+)" permission in "([^"]+)" (?:fs|filesystem|file system) to ([0-9]+)$`, m.chmodInFs) ctx.Step(`^(?:file|directory) "([^"]+)" permission in "([^"]+)" (?:fs|filesystem|file system) is ([0-9]+)$`, m.chmodInFs) ctx.Step(`^there should be a file "([^"]+)" in "([^"]+)" (?:fs|filesystem|file system)$`, m.assertFileExistsInFs) ctx.Step(`^there should be a directory "([^"]+)" in "([^"]+)" (?:fs|filesystem|file system)$`, m.assertDirectoryExistsInFs) ctx.Step(`^there should be a file "([^"]+)" in "([^"]+)" (?:fs|filesystem|file system) with content:`, m.assertFileContentInFs) ctx.Step(`^there should be a file "([^"]+)" in "([^"]+)" (?:fs|filesystem|file system) with content matches:`, m.assertFileContentRegexpInFs) ctx.Step(`^(?:file|directory) "([^"]+)" permission in "([^"]+)" (?:fs|filesystem|file system) should be ([0-9]+)$`, m.assertFilePermInFs) ctx.Step(`^there should be only these files in "([^"]+)" (?:fs|filesystem|file system):`, m.assertTreeEqualInFs) ctx.Step(`^there should be these files in "([^"]+)" (?:fs|filesystem|file system):`, m.assertTreeContainsInFs) ctx.Step(`^there should be only these files in "([^"]+)" in "([^"]+)" (?:fs|filesystem|file system):`, m.assertTreeEqualInPathInFs) ctx.Step(`^there should be these files in "([^"]+)" in "([^"]+)" (?:fs|filesystem|file system):`, m.assertTreeContainsInPathInFs) } // WithTempDirer sets the TempDirer. func (m *Manager) WithTempDirer(td TempDirer) *Manager { m.mu.Lock() defer m.mu.Unlock() m.td = td return m } func (m *Manager) cleanup() { for id, files := range m.trackedFiles { fs := m.fs(id) for _, f := range files { _ = fs.RemoveAll(f) // nolint: errcheck } } m.mu.Lock() defer m.mu.Unlock() m.trackedFiles = make(map[string][]string) } func (m *Manager) trackPath(fs afero.Fs, path string) (string, error) { parent := filepath.Dir(path) if parent != "." { track, err := m.trackPath(fs, parent) if err != nil { return "", err } if track != "" { return track, nil } } if _, err := fs.Stat(path); err != nil { if os.IsNotExist(err) { return path, nil } return "", fmt.Errorf("could not stat(%q): %w", path, err) } return "", nil } func (m *Manager) track(fs string, path string) error { if _, ok := m.trackedFiles[fs]; !ok { m.trackedFiles[fs] = make([]string, 0) } path, err := m.trackPath(m.fs(fs), filepath.Clean(path)) if err != nil { return err } if path == "" { return nil } m.trackedFiles[fs] = append(m.trackedFiles[fs], path) return nil } func (m *Manager) fs(name string) afero.Fs { m.mu.Lock() defer m.mu.Unlock() return m.fss[name] } func (m *Manager) chTempDir() error { // TempDir will be deleted automatically, we don't need to track it manually. return m.chDir(m.td.TempDir()) } func (m *Manager) chDir(dir string) error { return os.Chdir(dir) } func (m *Manager) resetDir() error { return m.chDir(m.testDir) } func (m *Manager) chmod(path, perm string) error { return m.chmodInFs(path, defaultFs, perm) } func (m *Manager) removeFile(path string) error { return m.removeFileInFs(path, defaultFs) } func (m *Manager) createFile(path string) error { return m.createFileInFs(path, defaultFs) } func (m *Manager) createDirectory(path string) error { return m.createDirectoryInFs(path, defaultFs) } func (m *Manager) createFileWithContent(path string, body *godog.DocString) error { return m.createFileInFsWithContent(path, defaultFs, body) } func (m *Manager) assertFileExists(path string) error { return m.assertFileExistsInFs(path, defaultFs) } func (m *Manager) assertDirectoryExists(path string) error { return m.assertDirectoryExistsInFs(path, defaultFs) } func (m *Manager) assertFileContent(path string, body *godog.DocString) error { return m.assertFileContentInFs(path, defaultFs, body) } func (m *Manager) assertFileContentRegexp(path string, body *godog.DocString) error { return m.assertFileContentRegexpInFs(path, defaultFs, body) } func (m *Manager) assertFilePerm(path string, perm string) error { return m.assertFilePermInFs(path, defaultFs, perm) } func (m *Manager) assertTreeEqual(body *godog.DocString) error { return m.assertTreeEqualInFs(defaultFs, body) } func (m *Manager) assertTreeEqualInPath(path string, body *godog.DocString) error { return m.assertTreeEqualInPathInFs(path, defaultFs, body) } func (m *Manager) assertTreeContains(body *godog.DocString) error { return m.assertTreeContainsInFs(defaultFs, body) } func (m *Manager) assertTreeContainsInPath(path string, body *godog.DocString) error { return m.assertTreeContainsInPathInFs(path, defaultFs, body) } func (m *Manager) chmodInFs(path string, fs string, permStr string) error { perm, err := strToPerm(permStr) if err != nil { return err } return m.fs(fs).Chmod(path, perm) } func (m *Manager) removeFileInFs(path, fs string) error { return m.fs(fs).RemoveAll(path) } func (m *Manager) createFileInFs(path, fs string) error { return m.createFileInFsWithContent(path, fs, nil) } func (m *Manager) createDirectoryInFs(path, fs string) error { if err := m.track(fs, path); err != nil { return fmt.Errorf("could not track directory: %w", err) } path = filepath.Clean(path) if err := m.fs(fs).MkdirAll(path, 0o755); err != nil { return fmt.Errorf("could not mkdir %q: %w", path, err) } return nil } func (m *Manager) createFileInFsWithContent(path, fsID string, body *godog.DocString) error { if err := m.track(fsID, path); err != nil { return fmt.Errorf("could not track file: %w", err) } fs := m.fs(fsID) parent := filepath.Dir(path) if err := fs.MkdirAll(parent, 0o755); err != nil { return fmt.Errorf("could not mkdir %q: %w", parent, err) } path = filepath.Clean(path) f, err := fs.Create(path) if err != nil { return fmt.Errorf("could not create %q: %w", path, err) } defer f.Close() // nolint: errcheck if body != nil { if _, err = f.WriteString(body.Content); err != nil { return fmt.Errorf("could not write file %q: %w", path, err) } } return nil } func (m *Manager) assertFileExistsInFs(path string, fs string) error { t := teeError() if !aferoassert.FileExists(t, m.fs(fs), path) { return t.LastError() } return nil } func (m *Manager) assertDirectoryExistsInFs(path string, fs string) error { t := teeError() if !aferoassert.DirExists(t, m.fs(fs), path) { return t.LastError() } return nil } func (m *Manager) assertFileContentInFs(path string, fs string, body *godog.DocString) error { t := teeError() if !aferoassert.FileContent(t, m.fs(fs), path, body.Content) { return t.LastError() } return nil } func (m *Manager) assertFileContentRegexpInFs(path string, fs string, body *godog.DocString) error { t := teeError() if !aferoassert.FileContentRegexp(t, m.fs(fs), path, fileContentRegexp(body.Content)) { return t.LastError() } return nil } func (m *Manager) assertFilePermInFs(path string, fs string, permStr string) error { perm, err := strToPerm(permStr) if err != nil { return err } t := teeError() if !aferoassert.Perm(t, m.fs(fs), path, perm) { return t.LastError() } return nil } func (m *Manager) assertTreeEqualInFs(fs string, body *godog.DocString) error { return m.assertTreeEqualInPathInFs("", fs, body) } func (m *Manager) assertTreeEqualInPathInFs(path, fs string, body *godog.DocString) error { t := teeError() if !aferoassert.YAMLTreeEqual(t, m.fs(fs), body.Content, path) { return t.LastError() } return nil } func (m *Manager) assertTreeContainsInFs(fs string, body *godog.DocString) error { return m.assertTreeContainsInPathInFs("", fs, body) } func (m *Manager) assertTreeContainsInPathInFs(path, fs string, body *godog.DocString) error { t := teeError() if !aferoassert.YAMLTreeContains(t, m.fs(fs), body.Content, path) { return t.LastError() } return nil } // NewManager initiates a new Manager. func NewManager(options ...Option) *Manager { cwd, err := os.Getwd() mustNoError(err) m := &Manager{ fss: map[string]afero.Fs{ defaultFs: afero.NewOsFs(), }, testDir: cwd, trackedFiles: make(map[string][]string), } for _, o := range options { o(m) } return m } // WithFs sets a file system by name. func WithFs(name string, fs afero.Fs) Option { return func(m *Manager) { m.fss[name] = fs } } // WithDefaultFs sets the default file system. func WithDefaultFs(fs afero.Fs) Option { return func(m *Manager) { m.fss[defaultFs] = fs } }
manager.go
0.520009
0.477006
manager.go
starcoder
package evaluator import ( "clint/ast" "clint/object" ) var ( NULL = &object.Null{} TRUE = &object.Boolean{Value: true} FALSE = &object.Boolean{Value: false} ) func Eval(node ast.Node) object.Object { switch node := node.(type) { case *ast.Program: return evalProgram(node) case *ast.ExpressionStatement: return Eval(node.Expression) case *ast.PrefixExpression: right := Eval(node.RightHand) return evalPrefixExpression(node.Operator, right) case *ast.InfixExpression: left := Eval(node.LeftHand) right := Eval(node.RightHand) return evalInfixExpression(node.Operator, left, right) case *ast.IntegerLiteral: return &object.Integer{Value: node.Value} case *ast.Boolean: return nativeBoolToBooleanObject(node.Value) case *ast.BlockStatement: return evalBlockStatement(node) case *ast.IfExpression: return evalIfExpression(node) case *ast.ReturnStatement: val := Eval(node.ReturnValue) return &object.ReturnValue{Value: val} } return nil } func evalProgram(program *ast.Program) object.Object { var result object.Object for _, statement := range program.Statements { result = Eval(statement) if returnValue, ok := result.(*object.ReturnValue); ok { return returnValue.Value } } return result } func evalBlockStatement(block *ast.BlockStatement) object.Object { var result object.Object for _, statement := range block.Statements { result = Eval(statement) if result != nil && result.Type() == object.RETURN_VALUE_OBJ { return result } } return result } func evalPrefixExpression(operator string, right object.Object) object.Object { switch operator { case "!": return evalBangOperatorExpression(right) case "-": return evalMinusPrefixOperatorExpression(right) default: return NULL } } func evalInfixExpression(operator string, left, right object.Object) object.Object { switch { case left.Type() == object.INTEGER_OBJ && right.Type() == object.INTEGER_OBJ: return evalIntegerInfixExpression(operator, left, right) default: return NULL } } func evalIntegerInfixExpression(operator string, left, right object.Object) object.Object { leftHandValue := left.(*object.Integer).Value rightHandValue := right.(*object.Integer).Value switch operator { case "+": return &object.Integer{Value: leftHandValue + rightHandValue} case "-": return &object.Integer{Value: leftHandValue - rightHandValue} case "*": return &object.Integer{Value: leftHandValue * rightHandValue} case "/": return &object.Integer{Value: leftHandValue / rightHandValue} case "<": return nativeBoolToBooleanObject(leftHandValue < rightHandValue) case ">": return nativeBoolToBooleanObject(leftHandValue > rightHandValue) case "==": return nativeBoolToBooleanObject(leftHandValue == rightHandValue) case "!=": return nativeBoolToBooleanObject(leftHandValue != rightHandValue) default: return NULL } } func evalIfExpression(exp *ast.IfExpression) object.Object { condition := Eval(exp.Condition) if isTruthy(condition) { return Eval(exp.Consequence) } else if exp.Alternative != nil { return Eval(exp.Alternative) } else { return NULL } } func evalBangOperatorExpression(right object.Object) object.Object { switch right { case TRUE: return FALSE case FALSE: return TRUE case NULL: return TRUE default: return FALSE } } func evalMinusPrefixOperatorExpression(right object.Object) object.Object { if right.Type() != object.INTEGER_OBJ { return NULL } value := right.(*object.Integer).Value return &object.Integer{Value: -value} } func nativeBoolToBooleanObject(input bool) *object.Boolean { if input { return TRUE } return FALSE } func isTruthy(obj object.Object) bool { switch obj { case NULL: return false case TRUE: return true case FALSE: return false default: return true } }
evaluator/evaluator.go
0.60871
0.427038
evaluator.go
starcoder
package types import "github.com/attic-labs/noms/go/hash" type SkipValueCallback func(v Value) bool // WalkValues loads prolly trees progressively by walking down the tree. We don't wants to invoke // the value callback on internal sub-trees (which are valid values) because they are not logical // values in the graph type valueRec struct { v Value cb bool } const maxRefCount = 1 << 12 // ~16MB of data // WalkValues recursively walks over all types.Values reachable from r and calls cb on them. func WalkValues(target Value, vr ValueReader, cb SkipValueCallback) { visited := hash.HashSet{} refs := map[hash.Hash]bool{} values := []valueRec{{target, true}} for len(values) > 0 || len(refs) > 0 { for len(values) > 0 { rec := values[len(values)-1] values = values[:len(values)-1] v := rec.v if rec.cb && cb(v) { continue } if _, ok := v.(Blob); ok { continue // don't traverse into blob ptrees } if r, ok := v.(Ref); ok { refs[r.TargetHash()] = true continue } if col, ok := v.(Collection); ok && !col.asSequence().isLeaf() { col.WalkRefs(func(r Ref) { refs[r.TargetHash()] = false }) continue } v.WalkValues(func(sv Value) { values = append(values, valueRec{sv, true}) }) } if len(refs) == 0 { continue } hs := make(hash.HashSlice, 0, len(refs)) oldRefs := refs refs = map[hash.Hash]bool{} for h := range oldRefs { if _, ok := visited[h]; ok { continue } if len(hs) >= maxRefCount { refs[h] = oldRefs[h] continue } hs = append(hs, h) visited.Insert(h) } if len(hs) > 0 { readValues := vr.ReadManyValues(hs) for i, sv := range readValues { values = append(values, valueRec{sv, oldRefs[hs[i]]}) } } } } func mightContainStructs(t *Type) (mightHaveStructs bool) { if t.TargetKind() == StructKind || t.TargetKind() == ValueKind { mightHaveStructs = true return } t.WalkValues(func(v Value) { mightHaveStructs = mightHaveStructs || mightContainStructs(v.(*Type)) }) return }
go/types/walk.go
0.577257
0.410934
walk.go
starcoder
package nlp import ( "io" "math" "github.com/james-bowman/sparse" "gonum.org/v1/gonum/mat" ) // TfidfTransformer takes a raw term document matrix and weights each raw term frequency // value depending upon how commonly it occurs across all documents within the corpus. // For example a very commonly occurring word like `the` is likely to occur in all documents // and so would be weighted down. // More precisely, TfidfTransformer applies a tf-idf algorithm to the matrix where each // term frequency is multiplied by the inverse document frequency. Inverse document // frequency is calculated as log(n/df) where df is the number of documents in which the // term occurs and n is the total number of documents within the corpus. We add 1 to both n // and df before division to prevent division by zero. // weightPadding can be used to add a value to weights after calculation to make sure terms with zero idf don't get suppressed entirely // l2Normalization can be used to l2 normalize the values in the matrix after a Transform() is done, done on either each row or each column // smoothIDF can be used to prevent zero divisions by adding 1 to the numerator and denominator of all IDF calculations as if an extra document with 1 instance of each term was seen type TfidfTransformer struct { transform *sparse.DIA weightPadding float64 l2Normalization int smoothIDF bool } //L2 Normalization options for the TF-IDF Transformer const ( NoL2Normalization = iota RowBasedL2Normalization ColBasedL2Normalization ) // NewTfidfTransformer constructs a new TfidfTransformer. func NewTfidfTransformer() *TfidfTransformer { return &TfidfTransformer{} } // GetSmoothIDF retrieives a boolean that represents if the current TfidfTransformer is configured to smooth IDF values func (t *TfidfTransformer) GetSmoothIDF() bool { return t.smoothIDF } // SetSmoothIDF sets the TfidfTransformer configuration to either smooth IDF values or during calculation or to leave them raw func (t *TfidfTransformer) SetSmoothIDF(smoothIDF bool) { t.smoothIDF = smoothIDF } // GetWeightPadding retrieves the weight padding that is added to weights during Fit() func (t *TfidfTransformer) GetWeightPadding() float64 { return t.weightPadding } // SetWeightPadding sets the weight padding that is added to weights during Fit() func (t *TfidfTransformer) SetWeightPadding(wp float64) { t.weightPadding = wp } //GetL2Normalization retrieves the type of normalization done during Transform() func (t *TfidfTransformer) GetL2Normalization() int { return t.l2Normalization } // SetL2Normalization sets the type of normalization done during Transform() func (t *TfidfTransformer) SetL2Normalization(ln int) { t.l2Normalization = ln } // Fit takes a training term document matrix, counts term occurrences across all documents // and constructs an inverse document frequency transform to apply to matrices in subsequent // calls to Transform(). func (t *TfidfTransformer) Fit(matrix mat.Matrix) Transformer { if t, isTypeConv := matrix.(sparse.TypeConverter); isTypeConv { matrix = t.ToCSR() } m, n := matrix.Dims() smoothing := 0 if t.smoothIDF { smoothing = 1 } weights := make([]float64, m) var df int if csr, ok := matrix.(*sparse.CSR); ok { for i := 0; i < m; i++ { // weight padding can be used to ensure terms with zero idf don't get suppressed entirely. weights[i] = math.Log(float64(smoothing+n)/float64(smoothing+csr.RowNNZ(i))) + t.weightPadding } } else { for i := 0; i < m; i++ { df = 0 for j := 0; j < n; j++ { if matrix.At(i, j) != 0 { df++ } } // weight padding can be used to ensure terms with zero idf don't get suppressed entirely. weights[i] = math.Log(float64(smoothing+n)/float64(smoothing+df)) + t.weightPadding } } // build a diagonal matrix from array of term weighting values for subsequent // multiplication with term document matrics t.transform = sparse.NewDIA(m, m, weights) return t } // Transform applies the inverse document frequency (IDF) transform by multiplying // each term frequency by its corresponding IDF value. This has the effect of weighting // each term frequency according to how often it appears across the whole document corpus // so that naturally frequent occurring words are given less weight than uncommon ones. // The returned matrix is a sparse matrix type. func (t *TfidfTransformer) Transform(matrix mat.Matrix) (mat.Matrix, error) { if t, isTypeConv := matrix.(sparse.TypeConverter); isTypeConv { matrix = t.ToCSR() } var product sparse.CSR // simply multiply the matrix by our idf transform (the diagonal matrix of term weights) product.Mul(t.transform, matrix) //Perform L2 normalization of the matrix if the option is selected if t.l2Normalization != NoL2Normalization { //Transpose the matrix to normalize based on columns if t.l2Normalization == ColBasedL2Normalization { product.Clone(product.T().(*sparse.CSC).ToCSR()) } rawProduct := product.RawMatrix() //Perform normalization for i := 0; i < rawProduct.I; i++ { sum := 0.0 for j := rawProduct.Indptr[i]; j < rawProduct.Indptr[i+1]; j++ { sum += rawProduct.Data[j] * rawProduct.Data[j] } if sum == 0.0 { continue } sum = math.Sqrt(sum) for j := rawProduct.Indptr[i]; j < rawProduct.Indptr[i+1]; j++ { rawProduct.Data[j] /= sum } } //Transpose the matrix back to original format if Column based normalization if t.l2Normalization == ColBasedL2Normalization { product.Clone(product.T().(*sparse.CSC).ToCSR()) } } return &product, nil } // FitTransform is exactly equivalent to calling Fit() followed by Transform() on the // same matrix. This is a convenience where separate training data is not being // used to fit the model i.e. the model is fitted on the fly to the test data. // The returned matrix is a sparse matrix type. func (t *TfidfTransformer) FitTransform(matrix mat.Matrix) (mat.Matrix, error) { if t, isTypeConv := matrix.(sparse.TypeConverter); isTypeConv { matrix = t.ToCSR() } return t.Fit(matrix).Transform(matrix) } // Save binary serialises the model and writes it into w. This is useful for persisting // a trained model to disk so that it may be loaded (using the Load() method)in another // context (e.g. production) for reproducible results. func (t TfidfTransformer) Save(w io.Writer) error { _, err := t.transform.MarshalBinaryTo(w) return err } // Load binary deserialises the previously serialised model into the receiver. This is // useful for loading a previously trained and saved model from another context // (e.g. offline training) for use within another context (e.g. production) for // reproducible results. Load should only be performed with trusted data. func (t *TfidfTransformer) Load(r io.Reader) error { var model sparse.DIA if _, err := model.UnmarshalBinaryFrom(r); err != nil { return err } t.transform = &model return nil }
weightings.go
0.880258
0.671152
weightings.go
starcoder
package packed // Efficient sequential read/write of packed integers. type BulkOperationPacked1 struct { *BulkOperationPacked } func newBulkOperationPacked1() BulkOperation { return &BulkOperationPacked1{newBulkOperationPacked(1)} } func (op *BulkOperationPacked1) decodeLongToInt(blocks []int64, values []int32, iterations int) { blocksOffset, valuesOffset := 0, 0 for i := 0; i < iterations; i++ { block := blocks[blocksOffset] blocksOffset++ for shift := uint(63); shift >= 0; shift -= 1 { values[valuesOffset] = int32((int64(uint64(block) >> shift)) & 1) valuesOffset++ } } } func (op *BulkOperationPacked1) DecodeByteToInt(blocks []byte, values []int32, iterations int) { blocksOffset, valuesOffset := 0, 0 for j := 0; j < iterations; j++ { block := blocks[blocksOffset] blocksOffset++ values[valuesOffset] = int32(byte(uint8(block))>>7) & 1 valuesOffset++ values[valuesOffset] = int32(byte(uint8(block))>>6) & 1 valuesOffset++ values[valuesOffset] = int32(byte(uint8(block))>>5) & 1 valuesOffset++ values[valuesOffset] = int32(byte(uint8(block))>>4) & 1 valuesOffset++ values[valuesOffset] = int32(byte(uint8(block))>>3) & 1 valuesOffset++ values[valuesOffset] = int32(byte(uint8(block))>>2) & 1 valuesOffset++ values[valuesOffset] = int32(byte(uint8(block))>>1) & 1 valuesOffset++ values[valuesOffset] = int32(block & 1) valuesOffset++ } } func (op *BulkOperationPacked1) DecodeLongToLong(blocks []int64, values []int64, iterations int) { blocksOffset, valuesOffset := 0, 0 for i := 0; i < iterations; i++ { block := blocks[blocksOffset] blocksOffset++ for shift := uint(63); shift >= 0; shift -= 1 { values[valuesOffset] = (int64(uint64(block) >> shift)) & 1 valuesOffset++ } } } func (op *BulkOperationPacked1) decodeByteToLong(blocks []byte, values []int64, iterations int) { blocksOffset, valuesOffset := 0, 0 for j := 0; j < iterations; j++ { block := blocks[blocksOffset] blocksOffset++ values[valuesOffset] = int64(byte(uint8(block))>>7) & 1 valuesOffset++ values[valuesOffset] = int64(byte(uint8(block))>>6) & 1 valuesOffset++ values[valuesOffset] = int64(byte(uint8(block))>>5) & 1 valuesOffset++ values[valuesOffset] = int64(byte(uint8(block))>>4) & 1 valuesOffset++ values[valuesOffset] = int64(byte(uint8(block))>>3) & 1 valuesOffset++ values[valuesOffset] = int64(byte(uint8(block))>>2) & 1 valuesOffset++ values[valuesOffset] = int64(byte(uint8(block))>>1) & 1 valuesOffset++ values[valuesOffset] = int64(block & 1) valuesOffset++ } }
core/util/packed/bulkOperation1.go
0.629888
0.614625
bulkOperation1.go
starcoder
package dlx type problem struct { // input rows uint32 columns uint32 matrix [][]uint32 partialSolution []uint32 // internal nodes [][]*node sentinel *node candidate *candidate } // DLX : dancing link solver interface to solve the defined problem type DLX interface { Solve(callback SolutionCallback) } func newDLX(matrix [][]uint32) *problem { rows := uint32(len(matrix)) columns := uint32(0) if rows > 0 { columns = uint32(len(matrix[0])) } p := problem{ rows: rows, columns: columns, matrix: matrix, } return &p } // NewDLX : create a new dancing link solver for the constraint matrix provided func NewDLX(matrix [][]uint32) DLX { return newDLX(matrix) } // NewDLXWithPartialSolution : create a new dancing link solver for the constraint matrix provided // and it starts from the given partial solution func NewDLXWithPartialSolution(matrix [][]uint32, partialSolution []uint32) DLX { p := newDLX(matrix) p.partialSolution = partialSolution return p } func (d *problem) Solve(callback SolutionCallback) { p := leftmost{} d.solveInternal(&p, callback) } func (d *problem) solveInternal(heuristic picker, callback SolutionCallback) { d.createNodeMatrix() d.linkNodes() d.createHeaders() d.linkHeaders() d.candidate = &candidate{} d.fillInPartialSolution() d.solveRec(heuristic, callback) } func (d *problem) createNodeMatrix() { d.nodes = make([][]*node, d.rows) for i := uint32(0); i < d.rows; i++ { d.nodes[i] = make([]*node, d.columns) for j := uint32(0); j < d.columns; j++ { if d.matrix[i][j] != 0 { d.nodes[i][j] = &node{ Row: int64(i), Column: int64(j), } } } } } func (d *problem) linkNodes() { var first, current, previous *node for i := uint32(0); i < d.rows; i++ { current = nil first = nil previous = nil for j := uint32(0); j < d.columns; j++ { if d.nodes[i][j] != nil { current = d.nodes[i][j] if first == nil { first = current } if previous != nil { previous.Right = current current.Left = previous } previous = current } } if current == nil { panic("LinkNodes: LR Current is nil!") } current.Right = first first.Left = current } for j := uint32(0); j < d.columns; j++ { current = nil first = nil previous = nil for i := uint32(0); i < d.rows; i++ { if d.nodes[i][j] != nil { current = d.nodes[i][j] if first == nil { first = current } if previous != nil { previous.Down = current current.Up = previous } previous = current } } if current == nil { panic("LinkNodes: UD Current is nil!") } current.Down = first first.Up = current } } func (d *problem) createHeaders() { var first, last, current *node for i := int64(d.columns) - 1; i >= 0; i-- { current = &node{ Row: -1, Column: i, } if last == nil { last = current } if first == nil { first = current } current.Right = first current.Left = last first.Left = current last.Right = current first = current } d.sentinel = &node{ Row: -1, Column: -1, } d.sentinel.Right = first d.sentinel.Left = last if last == nil { last = d.sentinel } if first == nil { first = d.sentinel } first.Left = d.sentinel last.Right = d.sentinel } func (d *problem) linkHeaders() { var first *node header := d.sentinel.Right var count uint32 for i := uint32(0); i < d.columns; i++ { count = 0 for j := int64(d.rows) - 1; j >= 0; j-- { if d.nodes[j][i] != nil { count++ first = d.nodes[j][i] } } header.Up = first.Up first.Up.Down = header first.Up = header header.Down = first header.Count = count header = header.Right } } func coverUpDown(node *node) { node.Up.Down = node.Down node.Down.Up = node.Up } func uncoverUpDown(node *node) { node.Up.Down = node node.Down.Up = node } func coverLeftRight(node *node) { node.Right.Left = node.Left node.Left.Right = node.Right } func uncoverLeftRight(node *node) { node.Right.Left = node node.Left.Right = node } func cover(header *node) { coverLeftRight(header) for node := header.Down; node != header; node = node.Down { if node.isHeader() { node.Count = node.Count - 1 } for r := node.Right; r != node; r = r.Right { coverUpDown(r) } } } func uncover(header *node) { for node := header.Up; node != header; node = node.Up { if node.isHeader() { node.Count = node.Count + 1 } for l := node.Left; l != node; l = l.Left { uncoverUpDown(l) } } uncoverLeftRight(header) } func headerFor(node *node) *node { n := node for !n.isHeader() { n = n.Up } return n } func (d *problem) solveRec(heuristic picker, callback SolutionCallback) bool { if d.sentinel.Right == d.sentinel { d.candidate.notify(callback) return true } column := heuristic.pick(d.sentinel) cover(column) for node := column.Down; node != column; node = node.Down { d.candidate.addNode(node) for r := node.Right; r != node; r = r.Right { cover(headerFor(r)) } if d.solveRec(heuristic, callback) && !callback.More() { return true } d.candidate.removeNode(node) for l := node.Left; l != node; l = l.Left { uncover(headerFor(l)) } } uncover(column) return false } func (d *problem) fillInPartialSolution() { var line uint32 var node *node for i := 0; i < len(d.partialSolution); i++ { line = d.partialSolution[i] for j := uint32(0); j < d.columns; j++ { if d.nodes[line][j] != nil { node = d.nodes[line][j] d.candidate.addNode(node) cover(headerFor(node)) for r := node.Right; r != node; r = r.Right { cover(headerFor(r)) } break } } } }
pkg/dlx/dlx.go
0.646795
0.400808
dlx.go
starcoder
package stateful import ( "fmt" "regexp" "time" "github.com/influxdata/kapacitor/tick/ast" ) type EvalLambdaNode struct { nodeEvaluator NodeEvaluator constReturnType ast.ValueType state ExecutionState } func NewEvalLambdaNode(lambda *ast.LambdaNode) (*EvalLambdaNode, error) { nodeEvaluator, err := createNodeEvaluator(lambda.Expression) if err != nil { return nil, fmt.Errorf("Failed to handle node: %v", err) } return &EvalLambdaNode{ nodeEvaluator: nodeEvaluator, constReturnType: getConstantNodeType(lambda.Expression), // Create an independent state for this expression state: CreateExecutionState(), }, nil } func (n *EvalLambdaNode) String() string { return fmt.Sprintf("%s", n.nodeEvaluator) } func (n *EvalLambdaNode) Type(scope ReadOnlyScope) (ast.ValueType, error) { if n.constReturnType == ast.InvalidType { // We are dynamic and we need to figure out our type // Do NOT cache this result in n.returnType since it can change. return n.nodeEvaluator.Type(scope) } return n.constReturnType, nil } func (n *EvalLambdaNode) IsDynamic() bool { return n.nodeEvaluator.IsDynamic() } func (n *EvalLambdaNode) EvalRegex(scope *Scope, _ ExecutionState) (*regexp.Regexp, error) { typ, err := n.Type(scope) if err != nil { return nil, err } if typ == ast.TRegex { return n.nodeEvaluator.EvalRegex(scope, n.state) } return nil, ErrTypeGuardFailed{RequestedType: ast.TRegex, ActualType: typ} } func (n *EvalLambdaNode) EvalTime(scope *Scope, _ ExecutionState) (time.Time, error) { typ, err := n.Type(scope) if err != nil { return time.Time{}, err } return time.Time{}, ErrTypeGuardFailed{RequestedType: ast.TTime, ActualType: typ} } func (n *EvalLambdaNode) EvalDuration(scope *Scope, _ ExecutionState) (time.Duration, error) { typ, err := n.Type(scope) if err != nil { return 0, err } if typ == ast.TDuration { return n.nodeEvaluator.EvalDuration(scope, n.state) } return 0, ErrTypeGuardFailed{RequestedType: ast.TDuration, ActualType: typ} } func (n *EvalLambdaNode) EvalString(scope *Scope, _ ExecutionState) (string, error) { typ, err := n.Type(scope) if err != nil { return "", err } if typ == ast.TString { return n.nodeEvaluator.EvalString(scope, n.state) } return "", ErrTypeGuardFailed{RequestedType: ast.TString, ActualType: typ} } func (n *EvalLambdaNode) EvalFloat(scope *Scope, _ ExecutionState) (float64, error) { typ, err := n.Type(scope) if err != nil { return 0, err } if typ == ast.TFloat { return n.nodeEvaluator.EvalFloat(scope, n.state) } return 0, ErrTypeGuardFailed{RequestedType: ast.TFloat, ActualType: typ} } func (n *EvalLambdaNode) EvalInt(scope *Scope, _ ExecutionState) (int64, error) { typ, err := n.Type(scope) if err != nil { return 0, err } if typ == ast.TInt { return n.nodeEvaluator.EvalInt(scope, n.state) } return 0, ErrTypeGuardFailed{RequestedType: ast.TInt, ActualType: typ} } func (n *EvalLambdaNode) EvalBool(scope *Scope, _ ExecutionState) (bool, error) { typ, err := n.Type(scope) if err != nil { return false, err } if typ == ast.TBool { return n.nodeEvaluator.EvalBool(scope, n.state) } return false, ErrTypeGuardFailed{RequestedType: ast.TBool, ActualType: typ} } func (n *EvalLambdaNode) EvalMissing(scope *Scope, _ ExecutionState) (*ast.Missing, error) { typ, err := n.Type(scope) if err != nil { return nil, err } if typ == ast.TMissing { return n.nodeEvaluator.EvalMissing(scope, n.state) } return nil, ErrTypeGuardFailed{RequestedType: ast.TBool, ActualType: typ} }
tick/stateful/eval_lambda_node.go
0.630344
0.417034
eval_lambda_node.go
starcoder
package neotest import ( "time" "github.com/signalfx/golib/datapoint" "github.com/signalfx/golib/event" "github.com/signalfx/golib/trace" "github.com/signalfx/signalfx-agent/internal/core/dpfilters" "github.com/signalfx/signalfx-agent/internal/monitors/types" ) // TestOutput can be used in place of the normal monitor outut to provide a // simpler way of testing monitor output. type TestOutput struct { dpChan chan *datapoint.Datapoint eventChan chan *event.Event spanChan chan *trace.Span dimPropChan chan *types.DimProperties } // NewTestOutput creates a new initialized TestOutput instance func NewTestOutput() *TestOutput { return &TestOutput{ dpChan: make(chan *datapoint.Datapoint, 1000), eventChan: make(chan *event.Event, 1000), spanChan: make(chan *trace.Span, 1000), dimPropChan: make(chan *types.DimProperties, 1000), } } // Copy the output object func (to *TestOutput) Copy() types.Output { return to } // SendDatapoint accepts a datapoint and sticks it in a buffered queue func (to *TestOutput) SendDatapoint(dp *datapoint.Datapoint) { to.dpChan <- dp } // SendEvent accepts an event and sticks it in a buffered queue func (to *TestOutput) SendEvent(event *event.Event) { to.eventChan <- event } // SendSpan accepts a trace span and sticks it in a buffered queue func (to *TestOutput) SendSpan(span *trace.Span) { to.spanChan <- span } // SendDimensionProps accepts a dim prop update and sticks it in a buffered queue func (to *TestOutput) SendDimensionProps(dimProps *types.DimProperties) { to.dimPropChan <- dimProps } // AddExtraDimension is a noop here func (to *TestOutput) AddExtraDimension(key, value string) {} // RemoveExtraDimension is a noop here func (to *TestOutput) RemoveExtraDimension(key string) {} // FlushDatapoints returns all of the datapoints injected into the channel so // far. func (to *TestOutput) FlushDatapoints() []*datapoint.Datapoint { var out []*datapoint.Datapoint for { select { case dp := <-to.dpChan: out = append(out, dp) default: return out } } } // FlushEvents returns all of the datapoints injected into the channel so // far. func (to *TestOutput) FlushEvents() []*event.Event { var out []*event.Event for { select { case event := <-to.eventChan: out = append(out, event) default: return out } } } // WaitForDPs will keep pulling datapoints off of the internal queue until it // either gets the expected count or waitSeconds seconds have elapsed. It then // returns those datapoints. It will never return more than 'count' datapoints. func (to *TestOutput) WaitForDPs(count, waitSeconds int) []*datapoint.Datapoint { var dps []*datapoint.Datapoint timeout := time.After(time.Duration(waitSeconds) * time.Second) loop: for { select { case dp := <-to.dpChan: dps = append(dps, dp) if len(dps) >= count { break loop } case <-timeout: break loop } } return dps } // WaitForDimensionProps will keep pulling dimension property updates off of // the internal queue until it either gets the expected count or waitSeconds // seconds have elapsed. It then returns those dimension property updates. It // will never return more than 'count' objects. func (to *TestOutput) WaitForDimensionProps(count, waitSeconds int) []*types.DimProperties { var dps []*types.DimProperties timeout := time.After(time.Duration(waitSeconds) * time.Second) loop: for { select { case dp := <-to.dimPropChan: dps = append(dps, dp) if len(dps) >= count { break loop } case <-timeout: break loop } } return dps } // AddDatapointExclusionFilter is a noop here. func (to *TestOutput) AddDatapointExclusionFilter(f dpfilters.DatapointFilter) { }
internal/neotest/output.go
0.656218
0.424531
output.go
starcoder
package portalprocess import ( "errors" "github.com/incognitochain/incognito-chain/common" "github.com/incognitochain/incognito-chain/dataaccessobject/statedb" "github.com/incognitochain/incognito-chain/portal/portalv3" "math" "math/big" "sort" ) type RateInfo struct { Rate uint64 Decimal uint8 } type PortalExchangeRateTool struct { Rates map[string]RateInfo } // getDecimal returns decimal for portal token or collateral tokens func getDecimal(portalParams portalv3.PortalParams, tokenID string) uint8 { supportPortalCollateral := portalParams.SupportedCollateralTokens if portalParams.IsPortalToken(tokenID) || tokenID == common.PRVIDStr { return 9 } for _, col := range supportPortalCollateral { if tokenID == col.ExternalTokenID { return col.Decimal } } return 0 } func NewPortalExchangeRateTool( finalExchangeRate *statedb.FinalExchangeRatesState, portalParams portalv3.PortalParams, ) *PortalExchangeRateTool { t := new(PortalExchangeRateTool) t.Rates = map[string]RateInfo{} rates := finalExchangeRate.Rates() for tokenID, detail := range rates { decimal := getDecimal(portalParams, tokenID) if decimal > 0 { t.Rates[tokenID] = RateInfo{ Rate: detail.Amount, Decimal: decimal, } } } return t } // convert converts amount in nano unit from tokenIDFrom to tokenIDTo // result in nano unit (smallest unit of token) func (t *PortalExchangeRateTool) Convert(tokenIDFrom string, tokenIDTo string, amount uint64) (uint64, error) { rateFrom := t.Rates[tokenIDFrom] rateTo := t.Rates[tokenIDTo] if rateFrom.Rate == 0 || rateTo.Rate == 0 { return 0, errors.New("invalid exchange rate to convert") } res := new(big.Int).Mul(new(big.Int).SetUint64(amount), new(big.Int).SetUint64(rateFrom.Rate)) res = res.Mul(res, new(big.Int).SetUint64(uint64(math.Pow10(int(rateTo.Decimal))))) res = res.Div(res, new(big.Int).SetUint64(uint64(math.Pow10(int(rateFrom.Decimal))))) res = res.Div(res, new(big.Int).SetUint64(rateTo.Rate)) return res.Uint64(), nil } // ConvertToUSD converts amount to usdt amount (in nano) func (t *PortalExchangeRateTool) ConvertToUSD(tokenIDFrom string, amount uint64) (uint64, error) { rateFrom := t.Rates[tokenIDFrom] if rateFrom.Rate == 0 { return 0, errors.New("invalid exchange rate to convert to usdt") } res := new(big.Int).Mul(new(big.Int).SetUint64(amount), new(big.Int).SetUint64(rateFrom.Rate)) res = res.Div(res, new(big.Int).SetUint64(uint64(math.Pow10(int(rateFrom.Decimal))))) return res.Uint64(), nil } // ConvertToUSD converts amount from usdt to token amount (in nano) func (t *PortalExchangeRateTool) ConvertFromUSD(tokenIDTo string, amount uint64) (uint64, error) { rateTo := t.Rates[tokenIDTo] if rateTo.Rate == 0 { return 0, errors.New("invalid exchange rate to convert to usdt") } res := new(big.Int).Mul(new(big.Int).SetUint64(amount), new(big.Int).SetUint64(uint64(math.Pow10(int(rateTo.Decimal))))) res = res.Div(res, new(big.Int).SetUint64(rateTo.Rate)) return res.Uint64(), nil } func (t *PortalExchangeRateTool) ConvertMapTokensToUSD(tokens map[string]uint64) (uint64, error) { if len(tokens) == 0 { return 0, nil } res := uint64(0) for tokenID, amount := range tokens { amountInUSDT, err := t.ConvertToUSD(tokenID, amount) if err != nil { return 0, nil } res += amountInUSDT } return res, nil } func (t *PortalExchangeRateTool) ConvertMapTokensFromUSD(amountInUSDT uint64, maxPRVAmount uint64, maxTokenAmounts map[string]uint64) (uint64, map[string]uint64, error) { if amountInUSDT == 0 { return 0, nil, nil } prvAmountRes := uint64(0) tokenAmountsRes := map[string]uint64{} // convert to prv amount first maxPRVInUSDT, err := t.ConvertToUSD(common.PRVIDStr, maxPRVAmount) if err != nil { return 0, nil, nil } if maxPRVInUSDT <= amountInUSDT { prvAmountRes = maxPRVAmount amountInUSDT -= maxPRVInUSDT } else { prvAmountRes, err = t.ConvertFromUSD(common.PRVIDStr, amountInUSDT) if err != nil { return 0, nil, nil } amountInUSDT = 0 } if amountInUSDT == 0 { return prvAmountRes, tokenAmountsRes, nil } // sort token by amountInUSDT descending type tokenInfo struct { tokenID string amount uint64 amountInUSDT uint64 } tokenInfos := make([]tokenInfo, 0) for tokenID, maxAmount := range maxTokenAmounts { maxAmountInUSDT, err := t.ConvertToUSD(tokenID, maxAmount) if err != nil { return 0, nil, nil } tokenInfos = append(tokenInfos, tokenInfo{ tokenID: tokenID, amount: maxAmount, amountInUSDT: maxAmountInUSDT, }) } sort.SliceStable(tokenInfos, func(i, j int) bool { return tokenInfos[i].amountInUSDT > tokenInfos[j].amountInUSDT }) for _, tInfo := range tokenInfos { if tInfo.amountInUSDT <= amountInUSDT { tokenAmountsRes[tInfo.tokenID] = tInfo.amount amountInUSDT -= tInfo.amountInUSDT } else { tokenAmountsRes[tInfo.tokenID], err = t.ConvertFromUSD(tInfo.tokenID, amountInUSDT) if err != nil { return 0, nil, nil } amountInUSDT = 0 } if amountInUSDT == 0 { return prvAmountRes, tokenAmountsRes, nil } } return 0, nil, errors.New("Not enough token to convert") }
portal/portalv3/portalprocess/portalexchangeratetool.go
0.685423
0.408306
portalexchangeratetool.go
starcoder
package main import ( "errors" "gopkg.in/gographics/imagick.v3/imagick" "gopkg.in/yaml.v2" "time" ) type Identify struct { Image struct { Filename string `yaml:"Filename"` Format string `yaml:"Format"` MimeType string `yaml:"Mime type"` Class string `yaml:"Class"` Geometry string `yaml:"Geometry"` Resolution string `yaml:"Resolution"` PrintSize string `yaml:"Print size"` Units string `yaml:"Units"` Colorspace string `yaml:"Colorspace"` Type string `yaml:"Type"` BaseType string `yaml:"Base type"` Endianness string `yaml:"Endianness"` Depth string `yaml:"Depth"` ChannelDepth struct { Red string `yaml:"Red"` Green string `yaml:"Green"` Blue string `yaml:"Blue"` Alpha string `yaml:"Alpha"` } `yaml:"Channel depth"` ChannelStatistics struct { Pixels int `yaml:"Pixels"` Red struct { Min string `yaml:"min"` Max string `yaml:"max"` Mean string `yaml:"mean"` Median string `yaml:"median"` StandardDeviation string `yaml:"standard deviation"` Kurtosis float64 `yaml:"kurtosis"` Skewness float64 `yaml:"skewness"` Entropy float64 `yaml:"entropy"` } `yaml:"Red"` Green struct { Min string `yaml:"min"` Max string `yaml:"max"` Mean string `yaml:"mean"` Median string `yaml:"median"` StandardDeviation string `yaml:"standard deviation"` Kurtosis float64 `yaml:"kurtosis"` Skewness float64 `yaml:"skewness"` Entropy float64 `yaml:"entropy"` } `yaml:"Green"` Blue struct { Min string `yaml:"min"` Max string `yaml:"max"` Mean string `yaml:"mean"` Median string `yaml:"median"` StandardDeviation string `yaml:"standard deviation"` Kurtosis float64 `yaml:"kurtosis"` Skewness float64 `yaml:"skewness"` Entropy float64 `yaml:"entropy"` } `yaml:"Blue"` Alpha struct { Min string `yaml:"min"` Max string `yaml:"max"` Mean string `yaml:"mean"` Median string `yaml:"median"` StandardDeviation string `yaml:"standard deviation"` Kurtosis string `yaml:"kurtosis"` Skewness string `yaml:"skewness"` Entropy int `yaml:"entropy"` } `yaml:"Alpha"` } `yaml:"Channel statistics"` ImageStatistics struct { Overall struct { Min string `yaml:"min"` Max string `yaml:"max"` Mean string `yaml:"mean"` Median string `yaml:"median"` StandardDeviation string `yaml:"standard deviation"` Kurtosis float64 `yaml:"kurtosis"` Skewness float64 `yaml:"skewness"` Entropy float64 `yaml:"entropy"` } `yaml:"Overall"` } `yaml:"Image statistics"` RenderingIntent string `yaml:"Rendering intent"` Gamma float64 `yaml:"Gamma"` Chromaticity struct { RedPrimary string `yaml:"red primary"` GreenPrimary string `yaml:"green primary"` BluePrimary string `yaml:"blue primary"` WhitePoint string `yaml:"white point"` } `yaml:"Chromaticity"` MatteColor string `yaml:"Matte color"` BackgroundColor string `yaml:"Background color"` BorderColor string `yaml:"Border color"` TransparentColor string `yaml:"Transparent color"` Interlace string `yaml:"Interlace"` Intensity string `yaml:"Intensity"` Compose string `yaml:"Compose"` PageGeometry string `yaml:"Page geometry"` Dispose string `yaml:"Dispose"` Iterations int `yaml:"Iterations"` Scene string `yaml:"Scene"` Compression string `yaml:"Compression"` Orientation string `yaml:"Orientation"` Properties struct { DateCreate time.Time `yaml:"date:create"` DateModify time.Time `yaml:"date:modify"` PngCHRM string `yaml:"png:cHRM"` PngGAMA string `yaml:"png:gAMA"` PngIHDRBitDepthOrig int `yaml:"png:IHDR.bit-depth-orig"` PngIHDRBitDepth int `yaml:"png:IHDR.bit_depth"` PngIHDRColorTypeOrig int `yaml:"png:IHDR.color-type-orig"` PngIHDRColorType string `yaml:"png:IHDR.color_type"` PngIHDRInterlaceMethod string `yaml:"png:IHDR.interlace_method"` PngPHYs string `yaml:"png:pHYs"` PngSRGB string `yaml:"png:sRGB"` Signature string `yaml:"signature"` } `yaml:"Properties"` Tainted bool `yaml:"Tainted"` Filesize string `yaml:"Filesize"` NumberPixels int `yaml:"Number pixels"` PixelsPerSecond string `yaml:"Pixels per second"` UserTime string `yaml:"User time"` Version string `yaml:"Version"` } `yaml:"Image"` } func ParseIdentify(wand *imagick.MagickWand) (Identify, error) { var identify Identify identifyData := wand.IdentifyImage() if identifyData == "" { return identify, errors.New("failed to get identify") } err := yaml.Unmarshal([]byte(identifyData), &identify) return identify, err }
parseidentify.go
0.604516
0.498718
parseidentify.go
starcoder
package chared /* we want to make using color palettes easier, especially creating palettes from scratch for existing images. for that to work we cannot solely rely on indexes or we need to used fixed indexes for each color. either way we need a way to transform pictures from the old palettes to the new. it would be great to have stable color indexes, then we can switch palette and colors without changing each picture grid. we would need to come up with good and general set of color names. it would also be great if we can somehow restrict ourselfs to 256 colors so we can change the default grid tile to uint8. however we should consider the map editor and later game map needs. if we can accept to use no more than max 32 features with max 8 colors per combined asset we can keep using the feature color groups. after revisiting the map requirements, i come to the conclusion that we don't want to limit grids generally to 256 states, because we might have more tile states on the map (id + rotation + extra). so we have plenty of space to encode extra information like pois (points of interest) like mount points for head, limbs or items and cloths. or an optional decal layers that can be blended in. */ const ( FeatBasic = iota << 3 FeatFace FeatSkin FeatHair FeatHat FeatShirt FeatPants FeatShoes FeatMat1 FeatMat2 FeatMat3 // 21 more features available … ) // feature: first, first-light, first-dark, outline, second, second-light, second-dark, highlight const ( CodeFst = iota CodeFstLight CodeFstDark CodeOutline CodeSnd CodeSndLight CodeSndDark CodeHighlight ) var Names = map[int][]string{ FeatBasic: {"basic", "bg", "ol", "poi1", "poi2", "poi3", "poi4", "poi5", "poi6"}, FeatFace: {"face", "iris", "eye", "pupil", "tear", "lips", "teeth", "blush", "face-hl"}, FeatSkin: stdNames("skin"), FeatHair: stdNames("hair"), FeatHat: stdNames("hat"), FeatShirt: stdNames("shirt"), FeatPants: stdNames("pants"), FeatShoes: stdNames("shoes"), FeatMat1: stdNames("mat1"), FeatMat2: stdNames("mat2"), FeatMat3: stdNames("mat3"), // 21 more features available … } func PixelName(p Pixel) string { feat := Names[int(p>>3)] if c := int(1 + p&7); c < len(feat) { return feat[c] } return "" } func stdNames(feat string) []string { return []string{ feat, feat, feat + "-light", feat + "-dark", feat + "-ol", feat + "-snd", feat + "-snd-light", feat + "-snd-dark", feat + "-hl", } }
chared/pal.go
0.712932
0.566558
pal.go
starcoder
package p2102 type Item struct { key int val string } func (this Item) Less(that Item) bool { return this.key > that.key || this.key == that.key && this.val < that.val } /** * this is a AVL tree */ type Node struct { item Item height int cnt int size int left, right *Node } func (node *Node) Height() int { if node == nil { return 0 } return node.height } func (node *Node) Size() int { if node == nil { return 0 } return node.size } func max(a, b int) int { if a >= b { return a } return b } func NewNode(item Item) *Node { node := new(Node) node.item = item node.height = 1 node.cnt = 1 node.size = 1 return node } func rightRotate(y *Node) *Node { x := y.left t2 := x.right x.right = y y.left = t2 y.height = max(y.left.Height(), y.right.Height()) + 1 y.size = y.left.Size() + y.right.Size() + y.cnt x.height = max(x.left.Height(), x.right.Height()) + 1 x.size = x.left.Size() + x.right.Size() + x.cnt return x } func leftRotate(x *Node) *Node { y := x.right t2 := y.left y.left = x x.right = t2 x.height = max(x.left.Height(), x.right.Height()) + 1 x.size = x.left.Size() + x.right.Size() + x.cnt y.height = max(y.left.Height(), y.right.Height()) + 1 y.size = y.left.Size() + y.right.Size() + y.cnt return y } func (node *Node) GetBalance() int { if node == nil { return 0 } return node.left.Height() - node.right.Height() } func FindEqualOrGreater(node *Node, item Item) *Node { if node == nil { return nil } if node.item.Less(item) { return FindEqualOrGreater(node.right, item) } res := FindEqualOrGreater(node.left, item) if res == nil { return node } return res } func FindEqualOrLess(node *Node, item Item) *Node { if node == nil { return nil } if item.Less(node.item) { return FindEqualOrLess(node.left, item) } res := FindEqualOrLess(node.right, item) if res == nil { return node } return res } func Insert(node *Node, item Item) *Node { if node == nil { return NewNode(item) } if node.item == item { node.cnt++ node.size++ return node } if item.Less(node.item) { node.left = Insert(node.left, item) } else { node.right = Insert(node.right, item) } node.height = max(node.left.Height(), node.right.Height()) + 1 node.size = node.left.Size() + node.right.Size() + node.cnt balance := node.GetBalance() if balance > 1 && item.Less(node.left.item) { return rightRotate(node) } if balance < -1 && node.right.item.Less(item) { return leftRotate(node) } if balance > 1 && node.left.item.Less(item) { node.left = leftRotate(node.left) return rightRotate(node) } if balance < -1 && item.Less(node.right.item) { node.right = rightRotate(node.right) return leftRotate(node) } return node } func MinValueNode(root *Node) *Node { cur := root for cur.left != nil { cur = cur.left } return cur } func FindKth(root *Node, k int) *Node { if root.left.Size() >= k { return FindKth(root.left, k) } // root.left.Size() < k if root.left.Size()+root.cnt >= k { return root } return FindKth(root.right, k-root.left.Size()-root.cnt) } type SORTracker struct { root *Node cnt int } func Constructor() SORTracker { return SORTracker{nil, 0} } func (this *SORTracker) Add(name string, score int) { item := new(Item) item.key = score item.val = name this.root = Insert(this.root, *item) } func (this *SORTracker) Get() string { this.cnt++ node := FindKth(this.root, this.cnt) return node.item.val } /** * Your SORTracker object will be instantiated and called as such: * obj := Constructor(); * obj.Add(name,score); * param_2 := obj.Get(); */
src/leetcode/set1000/set2000/set2100/set2100/p2102/solution.go
0.781831
0.529993
solution.go
starcoder
package configifytest import ( "time" "github.com/robsignorelli/configify" "github.com/stretchr/testify/mock" ) // NewMockSource creates a mock source that lets you instruct it exactly how to // respond for specific inputs. func NewMockSource(setup func(*MockSource)) configify.Source { // We call setup() before using `mock.Anything` so that testify will try to apply those // matchers first on invocation. So the stuff you set up takes precedence over these defaults. s := new(MockSource) setup(s) s.On("Options", mock.Anything).Return(configify.Options{}) s.On("String", mock.Anything).Return("", false) s.On("StringSlice", mock.Anything).Return([]string{}, false) s.On("Int", mock.Anything).Return(0, false) s.On("Int8", mock.Anything).Return(int8(0), false) s.On("Int16", mock.Anything).Return(int16(0), false) s.On("Int32", mock.Anything).Return(int32(0), false) s.On("Int64", mock.Anything).Return(int64(0), false) s.On("Uint", mock.Anything).Return(uint(0), false) s.On("Uint8", mock.Anything).Return(uint8(0), false) s.On("Uint16", mock.Anything).Return(uint16(0), false) s.On("Uint32", mock.Anything).Return(uint32(0), false) s.On("Uint64", mock.Anything).Return(uint64(0), false) s.On("Bool", mock.Anything).Return(false, false) s.On("Float32", mock.Anything).Return(float32(0), false) s.On("Float64", mock.Anything).Return(float64(0), false) s.On("Duration", mock.Anything).Return(time.Duration(0), false) s.On("Time", mock.Anything).Return(time.Time{}, false) return s } // MockSource is a testing source where you can train the exact values you want to regurgitate // without having to have it backed by any "real" implementation. type MockSource struct { mock.Mock } // Options returns the trained value for this attribute. func (s MockSource) Options() configify.Options { return s.Called().Get(0).(configify.Options) } // String returns the trained value for this config attribute. func (s MockSource) String(key string) (string, bool) { args := s.Called(key) return args.Get(0).(string), args.Get(1).(bool) } // StringSlice returns the trained value for this config attribute. func (s MockSource) StringSlice(key string) ([]string, bool) { args := s.Called(key) return args.Get(0).([]string), args.Get(1).(bool) } // Int returns the trained value for this config attribute. func (s MockSource) Int(key string) (int, bool) { args := s.Called(key) return args.Get(0).(int), args.Get(1).(bool) } // Int8 returns the trained value for this config attribute. func (s MockSource) Int8(key string) (int8, bool) { args := s.Called(key) return args.Get(0).(int8), args.Get(1).(bool) } // Int16 returns the trained value for this config attribute. func (s MockSource) Int16(key string) (int16, bool) { args := s.Called(key) return args.Get(0).(int16), args.Get(1).(bool) } // Int32 returns the trained value for this config attribute. func (s MockSource) Int32(key string) (int32, bool) { args := s.Called(key) return args.Get(0).(int32), args.Get(1).(bool) } // Int64 returns the trained value for this config attribute. func (s MockSource) Int64(key string) (int64, bool) { args := s.Called(key) return args.Get(0).(int64), args.Get(1).(bool) } // Uint returns the trained value for this config attribute. func (s MockSource) Uint(key string) (uint, bool) { args := s.Called(key) return args.Get(0).(uint), args.Get(1).(bool) } // Uint8 returns the trained value for this config attribute. func (s MockSource) Uint8(key string) (uint8, bool) { args := s.Called(key) return args.Get(0).(uint8), args.Get(1).(bool) } // Uint16 returns the trained value for this config attribute. func (s MockSource) Uint16(key string) (uint16, bool) { args := s.Called(key) return args.Get(0).(uint16), args.Get(1).(bool) } // Uint32 returns the trained value for this config attribute. func (s MockSource) Uint32(key string) (uint32, bool) { args := s.Called(key) return args.Get(0).(uint32), args.Get(1).(bool) } // Uint64 returns the trained value for this config attribute. func (s MockSource) Uint64(key string) (uint64, bool) { args := s.Called(key) return args.Get(0).(uint64), args.Get(1).(bool) } // Float32 returns the trained value for this config attribute. func (s MockSource) Float32(key string) (float32, bool) { args := s.Called(key) return args.Get(0).(float32), args.Get(1).(bool) } // Float64 returns the trained value for this config attribute. func (s MockSource) Float64(key string) (float64, bool) { args := s.Called(key) return args.Get(0).(float64), args.Get(1).(bool) } // Bool returns the trained value for this config attribute. func (s MockSource) Bool(key string) (bool, bool) { args := s.Called(key) return args.Get(0).(bool), args.Get(1).(bool) } // Duration returns the trained value for this config attribute. func (s MockSource) Duration(key string) (time.Duration, bool) { args := s.Called(key) return args.Get(0).(time.Duration), args.Get(1).(bool) } // Time returns the trained value for this config attribute. func (s MockSource) Time(key string) (time.Time, bool) { args := s.Called(key) return args.Get(0).(time.Time), args.Get(1).(bool) }
configifytest/mocks.go
0.755997
0.48688
mocks.go
starcoder
package perseus // InFloat32 returns whether i is in slice. func InFloat32(i float32, slice []float32) bool { for _, b := range slice { if b == i { return true } } return false } // IndexFloat32 returns the position of s in slice. If s is not found, return -1. func IndexFloat32(s float32, slice []float32) int { for i, b := range slice { if b == s { return i } } return -1 } // ShiftFloat32 returns the first element of slice and other element's slice. func ShiftFloat32(slice []float32) (float32, []float32) { return slice[0], slice[1:] } // UnshiftFloat32 add an element to the beginning of a slice. func UnshiftFloat32(sep float32, i []float32) []float32 { return append([]float32{sep}, i...) } // DeleteFloat32 Delete specified element from slice func DeleteFloat32(slice []float32, sep int) []float32 { return append(slice[:sep], slice[sep+1:]...) } // CutFloat32 Delete from i to j from the slice func CutFloat32(slice []float32, i, j int) []float32 { return append(slice[:i], slice[j:]...) } // InsertFloat32 Insert element to specified position func InsertFloat32(slice []float32, element float32, position int) []float32 { return append(slice[:position], append([]float32{element}, slice[position:]...)...) } // InsertVectorFloat32 Insert slice to specified position func InsertVectorFloat32(origin, insert []float32, position int) []float32 { return append(origin[:position], append(insert, origin[position:]...)...) } // PopFloat32 returns the last element of slice and other element's slice. func PopFloat32(slice []float32) (float32, []float32) { return slice[len(slice)-1], slice[:len(slice)-1] } // ReversedFloat32 returns reversed slice func ReversedFloat32(slice []float32) []float32 { for left, right := 0, len(slice)-1; left < right; left, right = left+1, right-1 { slice[left], slice[right] = slice[right], slice[left] } return slice } // ExtendFloat32 connect slices together func ExtendFloat32(A, B []float32) []float32 { return append(A, B...) } func sumFloat32(values ...float32) float64 { var sum float64 for _, v := range values { sum += float64(v) } return sum } // SumFloat32 calculate summaries of arguments func SumFloat32(values ...float32) float32 { return float32(sumFloat32(values...)) } // SumFloat32ToFloat64 calculate summaries of arguments func SumFloat32ToFloat64(values ...float32) float64 { return sumFloat32(values...) }
float32.go
0.885074
0.401219
float32.go
starcoder
package vector import ( "fmt" "math" ) const min_to_breakup int = 7 const vector_print_format string = "%.3f" type Vector struct { items []float64 } func MakeVector(n int) (v Vector, e error){ if n < 0 { return Vector{make([]float64, 0)}, fmt.Errorf("n cannot be negative: %d", n) } return Vector{make([]float64, n)} } func (v Vector) Set(i int, value float64) (val float64, e error) { n := len(v.items) if (i < 0 || i >= len(v.items)) { return value, fmt.Errorf("index %d out of range for vector of length %d", i, n) } v.items[i] = value return value, nil } func (v Vector) Get(i int) (val float64, e error) { n := len(v.items) if (i < 0 || i >= len(v.items)) { return 0, fmt.Errorf("index %d out of range for vector of length %d", i, n) } return v.items[i], nil } type operation func(float64, float64) float64 type operation_bool func(float64, float64) bool func apply_op(v1 Vector, v2 Vector, op operation) (Vector, error) { v1_length := len(v1.items) v2_length := len(v2.items) if v1_length != v2_length { return Vector{make([]float64, 0)}, fmt.Errorf("vector length %d != %d", v1_length, v2_length) } result_v := Vector{make([]float64, v1_length)} for i := 0; i < v2_length; i++ { result_v.items[i] = op(v1.items[i], v2.items[i]) } return result_v, nil } func Add(v1 Vector, v2 Vector) (Vector, error) { add_op := func(a float64, b float64) float64 { return a + b } v, e := apply_op(v1, v2, add_op) return v, e } func AddParallel(v1 Vector, v2 Vector) (Vector, error) { return Vector{make([]float64, 0)}, nil } func Sub(v1 Vector, v2 Vector) (Vector, error) { sub_op := func(a float64, b float64) float64 { return a - b } v, e := apply_op(v1, v2, sub_op) return v, e } func Mul(v1 Vector, v2 Vector) (Vector, error) { mul_op := func(a float64, b float64) float64 { return a * b } v, e := apply_op(v1, v2, mul_op) return v, e } type operation_pointwise func(float64) float64 func apply_op_pointwise(v1 Vector, op operation_pointwise) (Vector, error) { n := len(v1.items) result_v := Vector{make([]float64, n)} for i := 0; i < n; i++ { result_v.items[i] = op(v1.items[i]) } return result_v, nil } func AddScalar(v1 Vector, x float64) (Vector, error) { add_op := func(a float64) float64 { return a + x } v, e := apply_op_pointwise(v1, add_op) return v, e } func SubScalar(v1 Vector, x float64) (Vector, error) { sub_op := func(a float64) float64 { return a - x } v, e := apply_op_pointwise(v1, sub_op) return v, e } func MulScalar(v1 Vector, x float64) (Vector, error) { mul_op := func(a float64) float64 { return a * x } v, e := apply_op_pointwise(v1, mul_op) return v, e } func (v Vector) Min() float64 { var min float64 = math.MaxFloat64 for _, v := range v.items { min = math.Min(min, v) } return min } func (v Vector) Max() float64 { var max float64 = -math.MaxFloat64 for _, v := range v.items { max = math.Max(max, v) } return max } func (v Vector) Mean() float64 { var mean float64 = 0.0 for _, v := range v.items { mean += v } return mean/float64(len(v.items)) } func (v Vector) Var() float64 { var variance float64 = 0.0 mean := v.Mean() for _, v := range v.items { diff := v-mean variance += diff*diff } return variance/float64(len(v.items)) } func (v Vector) Std() float64 { return math.Sqrt(v.Var()) } func (v Vector) ToString() string { var str string n := len(v.items) if n > min_to_breakup { first_3 := v.items[:3] last_3 := v.items[n-3:n] str += "[" for i, e := range first_3 { if i != 0 { str += ", " } str += fmt.Sprintf(vector_print_format, e) } str += " ... " for i, e := range last_3 { if i != 0 { str += ", " } str += fmt.Sprintf(vector_print_format, e) } str += "]" } else { str += fmt.Sprintf("[") for i, e := range v.items { if i != 0 { str += ", " } str += fmt.Sprintf(vector_print_format, e) } str += fmt.Sprintf("]") } return str } func main() { v1 := Vector{[]float64 {1, 2, 3, 4}} v2 := Vector{[]float64 {-1, -2, -3, -4}} fmt.Println("Min (v1): ", v1.Min()) fmt.Println("Max (v1): ", v1.Max()) fmt.Println("Mean (v1): ", v1.Mean()) fmt.Println("Std (v1): ", v1.Std()) fmt.Println("Var (v1): ", v1.Var()) v := Vector{} fmt.Println(v1.ToString()) fmt.Println(v2.ToString()) v, _ = Add(v1, v2) fmt.Println("Add:", v.ToString()) v, _ = Sub(v1, v2) fmt.Println("Sub:", v.ToString()) v, _ = Mul(v1, v2) fmt.Println("Mult:", v.ToString()) }
vector.go
0.74055
0.620018
vector.go
starcoder
package gt import ( "crypto/rand" "database/sql/driver" "encoding/hex" "fmt" "io" ) /* Creates a random UUID using `gt.ReadUuid` and "crypto/rand". Panics if random bytes can't be read. */ func RandomUuid() Uuid { val, err := ReadUuid(rand.Reader) try(err) return val } // Creates a UUID (version 4 variant 1) from bytes from the provided reader. func ReadUuid(src io.Reader) (val Uuid, err error) { _, err = io.ReadFull(src, val[:]) if err != nil { err = fmt.Errorf(`[gt] failed to read random bytes for UUID: %w`, err) return } val.setVersion() return } /* Shortcut: parses successfully or panics. Should be used only in root scope. When error handling is relevant, use `.Parse`. */ func ParseUuid(src string) (val Uuid) { try(val.Parse(src)) return } /* Simple UUID implementation. Features: * Reversible encoding/decoding in text. * Reversible encoding/decoding in JSON. * Reversible encoding/decoding in SQL. * Text encoding uses simplified format without dashes. * Text decoding supports formats with and without dashes, case-insensitive. Differences from "github.com/google/uuid".UUID: * Text encoding uses simplified format without dashes. * Text decoding supports only simplified and canonical format. * Supports only version 4 (random except for a few bits). When dealing with databases, it's highly recommended to use `NullUuid` instead. */ type Uuid [UuidLen]byte var ( _ = Encodable(Uuid{}) _ = Decodable((*Uuid)(nil)) ) // Implement `gt.Zeroable`. Equivalent to `reflect.ValueOf(self).IsZero()`. func (self Uuid) IsZero() bool { return self == Uuid{} } // Implement `gt.Nullable`. Always `false`. func (self Uuid) IsNull() bool { return false } // Implement `gt.Getter`, returning `[16]byte` understood by many DB drivers. func (self Uuid) Get() interface{} { return [UuidLen]byte(self) } // Implement `gt.Setter`, using `.Scan`. Panics on error. func (self *Uuid) Set(src interface{}) { try(self.Scan(src)) } // Implement `gt.Zeroer`, zeroing the receiver. func (self *Uuid) Zero() { if self != nil { *self = Uuid{} } } /* Implement `fmt.Stringer`, returning a simplified text representation: lowercase without dashes. */ func (self Uuid) String() string { return bytesString(self.Append(nil)) } /* Implement `gt.Parser`, parsing a valid UUID representation. Supports both the short format without dashes, and the canonical format with dashes. Parsing is case-insensitive. */ func (self *Uuid) Parse(src string) (err error) { defer errParse(&err, src, `UUID`) switch len(src) { case 32: return self.maybeSet(uuidParseSimple(src)) case 36: return self.maybeSet(uuidParseCanon(src)) default: return errUnrecLength } } // Implement `gt.Appender`, using the same representation as `.String`. func (self Uuid) Append(buf []byte) []byte { buf = append(buf, uuidStrZero[:]...) hex.Encode(buf[len(buf)-len(uuidStrZero):], self[:]) return buf } // Implement `encoding.TextMarhaler`, using the same representation as `.String`. func (self Uuid) MarshalText() ([]byte, error) { return self.Append(nil), nil } // Implement `encoding.TextUnmarshaler`, using the same algorithm as `.Parse`. func (self *Uuid) UnmarshalText(src []byte) error { return self.Parse(bytesString(src)) } // Implement `json.Marshaler`, using the same representation as `.String`. func (self Uuid) MarshalJSON() ([]byte, error) { var buf [UuidStrLen + 2]byte buf[0] = '"' hex.Encode(buf[1:len(buf)-1], self[:]) buf[len(buf)-1] = '"' return buf[:], nil } // Implement `json.Unmarshaler`, using the same algorithm as `.Parse`. func (self *Uuid) UnmarshalJSON(src []byte) error { if isJsonStr(src) { return self.UnmarshalText(cutJsonStr(src)) } return errJsonString(src, self) } // Implement `driver.Valuer`, using `.Get`. func (self Uuid) Value() (driver.Value, error) { return self.Get(), nil } /* Implement `sql.Scanner`, converting an arbitrary input to `gt.Uuid` and modifying the receiver. Acceptable inputs: * `string` -> use `.Parse` * `[]byte` -> use `.UnmarshalText` * `[16]byte` -> assign * `gt.Uuid` -> assign * `gt.NullUuid` -> assign * `gt.Getter` -> scan underlying value */ func (self *Uuid) Scan(src interface{}) error { switch src := src.(type) { case string: return self.Parse(src) case []byte: return self.UnmarshalText(src) case [UuidLen]byte: *self = Uuid(src) return nil case Uuid: *self = src return nil case NullUuid: *self = Uuid(src) return nil default: val, ok := get(src) if ok { return self.Scan(val) } return errScanType(self, src) } } // Equivalent to `a.String() < b.String()`. Useful for sorting. func (self Uuid) Less(other Uuid) bool { for i := range self { if self[i] < other[i] { return true } if self[i] > other[i] { return false } } return false } // Reminder: https://en.wikipedia.org/wiki/Universally_unique_identifier func (self *Uuid) setVersion() { // Version 4. (*self)[6] = ((*self)[6] & 0b00001111) | 0b01000000 // Variant 1. (*self)[8] = ((*self)[8] & 0b00111111) | 0b10000000 } func (self *Uuid) maybeSet(val Uuid, err error) error { if err == nil { *self = val } return err } func uuidParseSimple(src string) (val Uuid, err error) { if len(src) != 32 { err = errLengthMismatch return } _, err = hex.Decode(val[:], stringBytesUnsafe(src)) return } func uuidParseCanon(src string) (val Uuid, err error) { if len(src) != 36 { err = errLengthMismatch return } if !(src[8] == '-' && src[13] == '-' && src[18] == '-' && src[23] == '-') { err = errFormatMismatch return } for i, pair := range uuidGroups { char, ok := hexDecode(src[pair[0]], src[pair[1]]) if !ok { err = errInvalidCharAt(src, pair[0]) return } val[i] = char } return } /* Implement `fmt.GoStringer`, returning valid Go code that constructs this value. The rendered code is biased for readability over performance: it parses a string instead of using a literal constructor. */ func (self Uuid) GoString() string { const fun = `gt.ParseUuid` var arr [len(fun) + len("(`") + len(uuidStrZero) + len("`)")]byte buf := arr[:0] buf = append(buf, fun...) buf = append(buf, "(`"...) buf = self.Append(buf) buf = append(buf, "`)"...) return string(buf) }
gt_uuid.go
0.763484
0.42668
gt_uuid.go
starcoder
package main import ( "container/heap" "log" "math" ) // min returns the minumum value of an array of floats func min(fs []float64) float64 { minVal := math.MaxFloat64 for _, f := range fs { if f < minVal { minVal = f } } return minVal } // max returns the maximum value of an array of floats func max(fs []float64) float64 { maxVal := -math.MaxFloat64 for _, f := range fs { if f > maxVal { maxVal = f } } return maxVal } // mean computes the mean value of a list of float64 values func mean(items []float64) float64 { var mean float64 for _, x := range items { mean += x } return mean / float64(len(items)) } // variance computes the variance of a list of float64 values func variance(items []float64) float64 { var mk, qk float64 // helper values for one pass variance computation for i, d := range items { k := float64(i + 1) qk += (k - 1) * (d - mk) * (d - mk) / k mk += (d - mk) / k } var variance float64 if len(items) > 1 { variance = qk / float64(len(items)-1) } return variance } // median computes the median of the provided func median(fs []float64) float64 { m := newMedData() for _, f := range fs { updateMedian(m, f) } return m.val } // medData holds the data structures needed to compute a running median. // Currently, the running median is implemented via a min and max heap data // structure and thus requires storage on the order of the data set size type medData struct { smaller, larger FloatHeap val float64 } // newMedData initializes the data structure for computing the running median func newMedData() *medData { var m medData heap.Init(&m.smaller) heap.Init(&m.larger) return &m } // updateMedian updates the running median using two heaps the each keep // track of elements smaller and larger than the current median. func updateMedian(m *medData, v float64) *medData { if len(m.smaller) == 0 && len(m.larger) == 0 { // insert first element heap.Push(&m.smaller, -v) } else if len(m.smaller) == 0 { // insert second element (first case) if v > m.larger[0] { heap.Push(&m.smaller, -heap.Pop(&m.larger).(float64)) heap.Push(&m.larger, v) } else { heap.Push(&m.smaller, -v) } } else if len(m.larger) == 0 { // insert second element (second case) if v < -m.smaller[0] { heap.Push(&m.larger, -heap.Pop(&m.smaller).(float64)) heap.Push(&m.smaller, -v) } else { heap.Push(&m.larger, v) } } else { // insert third and following elements if v < m.val { heap.Push(&m.smaller, -v) } else if v > m.val { heap.Push(&m.larger, v) } else { if len(m.smaller) <= len(m.larger) { heap.Push(&m.smaller, -v) } else { heap.Push(&m.larger, v) } } } // fix up heaps if they differ in length by more than 2 if len(m.smaller) == len(m.larger)+2 { heap.Push(&m.larger, -heap.Pop(&m.smaller).(float64)) } else if len(m.larger) == len(m.smaller)+2 { heap.Push(&m.smaller, -heap.Pop(&m.larger).(float64)) } // compute new median if len(m.smaller) == len(m.larger) { m.val = 0.5 * (m.larger[0] - m.smaller[0]) } else if len(m.smaller) > len(m.larger) { m.val = -m.smaller[0] } else { m.val = m.larger[0] } if math.Abs(float64(len(m.smaller)-len(m.larger))) > 1 { log.Panic("median heaps differ by more than 2") } return m } // FloatHeap is a min-heap of float64 type FloatHeap []float64 // implement heap interface for FloatHeap func (f FloatHeap) Len() int { return len(f) } func (f FloatHeap) Less(i, j int) bool { return f[i] < f[j] } func (f FloatHeap) Swap(i, j int) { f[i], f[j] = f[j], f[i] } // Push is part of heap interface func (f *FloatHeap) Push(x interface{}) { *f = append(*f, x.(float64)) } // Pop is part of heap interface func (f *FloatHeap) Pop() interface{} { old := *f n := len(old) x := old[n-1] *f = old[0 : n-1] return x }
statistics.go
0.780453
0.48054
statistics.go
starcoder
package v1b3 import ( "context" "reflect" "github.com/pulumi/pulumi/sdk/v3/go/pulumi" ) // Gets the state of the specified Cloud Dataflow job. To get the state of a job, we recommend using `projects.locations.jobs.get` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.get` is not recommended, as you can only get the state of jobs that are running in `us-central1`. func LookupJob(ctx *pulumi.Context, args *LookupJobArgs, opts ...pulumi.InvokeOption) (*LookupJobResult, error) { var rv LookupJobResult err := ctx.Invoke("google-native:dataflow/v1b3:getJob", args, &rv, opts...) if err != nil { return nil, err } return &rv, nil } type LookupJobArgs struct { JobId string `pulumi:"jobId"` Location string `pulumi:"location"` Project *string `pulumi:"project"` View *string `pulumi:"view"` } type LookupJobResult struct { // The client's unique identifier of the job, re-used across retried attempts. If this field is set, the service will ensure its uniqueness. The request to create a job will fail if the service has knowledge of a previously submitted job with the same client's ID and job name. The caller may use this field to ensure idempotence of job creation across retried attempts to create a job. By default, the field is empty and, in that case, the service ignores it. ClientRequestId string `pulumi:"clientRequestId"` // The timestamp when the job was initially created. Immutable and set by the Cloud Dataflow service. CreateTime string `pulumi:"createTime"` // If this is specified, the job's initial state is populated from the given snapshot. CreatedFromSnapshotId string `pulumi:"createdFromSnapshotId"` // The current state of the job. Jobs are created in the `JOB_STATE_STOPPED` state unless otherwise specified. A job in the `JOB_STATE_RUNNING` state may asynchronously enter a terminal state. After a job has reached a terminal state, no further state updates may be made. This field may be mutated by the Cloud Dataflow service; callers cannot mutate it. CurrentState string `pulumi:"currentState"` // The timestamp associated with the current state. CurrentStateTime string `pulumi:"currentStateTime"` // The environment for the job. Environment EnvironmentResponse `pulumi:"environment"` // This field is populated by the Dataflow service to support filtering jobs by the metadata values provided here. Populated for ListJobs and all GetJob views SUMMARY and higher. JobMetadata JobMetadataResponse `pulumi:"jobMetadata"` // User-defined labels for this job. The labels map can contain no more than 64 entries. Entries of the labels map are UTF8 strings that comply with the following restrictions: * Keys must conform to regexp: \p{Ll}\p{Lo}{0,62} * Values must conform to regexp: [\p{Ll}\p{Lo}\p{N}_-]{0,63} * Both keys and values are additionally constrained to be <= 128 bytes in size. Labels map[string]string `pulumi:"labels"` // The [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) that contains this job. Location string `pulumi:"location"` // The user-specified Cloud Dataflow job name. Only one Job with a given name may exist in a project at any given time. If a caller attempts to create a Job with the same name as an already-existing Job, the attempt returns the existing Job. The name must match the regular expression `[a-z]([-a-z0-9]{0,38}[a-z0-9])?` Name string `pulumi:"name"` // Preliminary field: The format of this data may change at any time. A description of the user pipeline and stages through which it is executed. Created by Cloud Dataflow service. Only retrieved with JOB_VIEW_DESCRIPTION or JOB_VIEW_ALL. PipelineDescription PipelineDescriptionResponse `pulumi:"pipelineDescription"` // The ID of the Cloud Platform project that the job belongs to. Project string `pulumi:"project"` // If this job is an update of an existing job, this field is the job ID of the job it replaced. When sending a `CreateJobRequest`, you can update a job by specifying it here. The job named here is stopped, and its intermediate state is transferred to this job. ReplaceJobId string `pulumi:"replaceJobId"` // If another job is an update of this job (and thus, this job is in `JOB_STATE_UPDATED`), this field contains the ID of that job. ReplacedByJobId string `pulumi:"replacedByJobId"` // The job's requested state. `UpdateJob` may be used to switch between the `JOB_STATE_STOPPED` and `JOB_STATE_RUNNING` states, by setting requested_state. `UpdateJob` may also be used to directly set a job's requested state to `JOB_STATE_CANCELLED` or `JOB_STATE_DONE`, irrevocably terminating the job if it has not already reached a terminal state. RequestedState string `pulumi:"requestedState"` // Reserved for future use. This field is set only in responses from the server; it is ignored if it is set in any requests. SatisfiesPzs bool `pulumi:"satisfiesPzs"` // This field may be mutated by the Cloud Dataflow service; callers cannot mutate it. StageStates []ExecutionStageStateResponse `pulumi:"stageStates"` // The timestamp when the job was started (transitioned to JOB_STATE_PENDING). Flexible resource scheduling jobs are started with some delay after job creation, so start_time is unset before start and is updated when the job is started by the Cloud Dataflow service. For other jobs, start_time always equals to create_time and is immutable and set by the Cloud Dataflow service. StartTime string `pulumi:"startTime"` // Exactly one of step or steps_location should be specified. The top-level steps that constitute the entire job. Only retrieved with JOB_VIEW_ALL. Steps []StepResponse `pulumi:"steps"` // The Cloud Storage location where the steps are stored. StepsLocation string `pulumi:"stepsLocation"` // A set of files the system should be aware of that are used for temporary storage. These temporary files will be removed on job completion. No duplicates are allowed. No file patterns are supported. The supported files are: Google Cloud Storage: storage.googleapis.com/{bucket}/{object} bucket.storage.googleapis.com/{object} TempFiles []string `pulumi:"tempFiles"` // The map of transform name prefixes of the job to be replaced to the corresponding name prefixes of the new job. TransformNameMapping map[string]string `pulumi:"transformNameMapping"` // The type of Cloud Dataflow job. Type string `pulumi:"type"` } func LookupJobOutput(ctx *pulumi.Context, args LookupJobOutputArgs, opts ...pulumi.InvokeOption) LookupJobResultOutput { return pulumi.ToOutputWithContext(context.Background(), args). ApplyT(func(v interface{}) (LookupJobResult, error) { args := v.(LookupJobArgs) r, err := LookupJob(ctx, &args, opts...) return *r, err }).(LookupJobResultOutput) } type LookupJobOutputArgs struct { JobId pulumi.StringInput `pulumi:"jobId"` Location pulumi.StringInput `pulumi:"location"` Project pulumi.StringPtrInput `pulumi:"project"` View pulumi.StringPtrInput `pulumi:"view"` } func (LookupJobOutputArgs) ElementType() reflect.Type { return reflect.TypeOf((*LookupJobArgs)(nil)).Elem() } type LookupJobResultOutput struct{ *pulumi.OutputState } func (LookupJobResultOutput) ElementType() reflect.Type { return reflect.TypeOf((*LookupJobResult)(nil)).Elem() } func (o LookupJobResultOutput) ToLookupJobResultOutput() LookupJobResultOutput { return o } func (o LookupJobResultOutput) ToLookupJobResultOutputWithContext(ctx context.Context) LookupJobResultOutput { return o } // The client's unique identifier of the job, re-used across retried attempts. If this field is set, the service will ensure its uniqueness. The request to create a job will fail if the service has knowledge of a previously submitted job with the same client's ID and job name. The caller may use this field to ensure idempotence of job creation across retried attempts to create a job. By default, the field is empty and, in that case, the service ignores it. func (o LookupJobResultOutput) ClientRequestId() pulumi.StringOutput { return o.ApplyT(func(v LookupJobResult) string { return v.ClientRequestId }).(pulumi.StringOutput) } // The timestamp when the job was initially created. Immutable and set by the Cloud Dataflow service. func (o LookupJobResultOutput) CreateTime() pulumi.StringOutput { return o.ApplyT(func(v LookupJobResult) string { return v.CreateTime }).(pulumi.StringOutput) } // If this is specified, the job's initial state is populated from the given snapshot. func (o LookupJobResultOutput) CreatedFromSnapshotId() pulumi.StringOutput { return o.ApplyT(func(v LookupJobResult) string { return v.CreatedFromSnapshotId }).(pulumi.StringOutput) } // The current state of the job. Jobs are created in the `JOB_STATE_STOPPED` state unless otherwise specified. A job in the `JOB_STATE_RUNNING` state may asynchronously enter a terminal state. After a job has reached a terminal state, no further state updates may be made. This field may be mutated by the Cloud Dataflow service; callers cannot mutate it. func (o LookupJobResultOutput) CurrentState() pulumi.StringOutput { return o.ApplyT(func(v LookupJobResult) string { return v.CurrentState }).(pulumi.StringOutput) } // The timestamp associated with the current state. func (o LookupJobResultOutput) CurrentStateTime() pulumi.StringOutput { return o.ApplyT(func(v LookupJobResult) string { return v.CurrentStateTime }).(pulumi.StringOutput) } // The environment for the job. func (o LookupJobResultOutput) Environment() EnvironmentResponseOutput { return o.ApplyT(func(v LookupJobResult) EnvironmentResponse { return v.Environment }).(EnvironmentResponseOutput) } // This field is populated by the Dataflow service to support filtering jobs by the metadata values provided here. Populated for ListJobs and all GetJob views SUMMARY and higher. func (o LookupJobResultOutput) JobMetadata() JobMetadataResponseOutput { return o.ApplyT(func(v LookupJobResult) JobMetadataResponse { return v.JobMetadata }).(JobMetadataResponseOutput) } // User-defined labels for this job. The labels map can contain no more than 64 entries. Entries of the labels map are UTF8 strings that comply with the following restrictions: * Keys must conform to regexp: \p{Ll}\p{Lo}{0,62} * Values must conform to regexp: [\p{Ll}\p{Lo}\p{N}_-]{0,63} * Both keys and values are additionally constrained to be <= 128 bytes in size. func (o LookupJobResultOutput) Labels() pulumi.StringMapOutput { return o.ApplyT(func(v LookupJobResult) map[string]string { return v.Labels }).(pulumi.StringMapOutput) } // The [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) that contains this job. func (o LookupJobResultOutput) Location() pulumi.StringOutput { return o.ApplyT(func(v LookupJobResult) string { return v.Location }).(pulumi.StringOutput) } // The user-specified Cloud Dataflow job name. Only one Job with a given name may exist in a project at any given time. If a caller attempts to create a Job with the same name as an already-existing Job, the attempt returns the existing Job. The name must match the regular expression `[a-z]([-a-z0-9]{0,38}[a-z0-9])?` func (o LookupJobResultOutput) Name() pulumi.StringOutput { return o.ApplyT(func(v LookupJobResult) string { return v.Name }).(pulumi.StringOutput) } // Preliminary field: The format of this data may change at any time. A description of the user pipeline and stages through which it is executed. Created by Cloud Dataflow service. Only retrieved with JOB_VIEW_DESCRIPTION or JOB_VIEW_ALL. func (o LookupJobResultOutput) PipelineDescription() PipelineDescriptionResponseOutput { return o.ApplyT(func(v LookupJobResult) PipelineDescriptionResponse { return v.PipelineDescription }).(PipelineDescriptionResponseOutput) } // The ID of the Cloud Platform project that the job belongs to. func (o LookupJobResultOutput) Project() pulumi.StringOutput { return o.ApplyT(func(v LookupJobResult) string { return v.Project }).(pulumi.StringOutput) } // If this job is an update of an existing job, this field is the job ID of the job it replaced. When sending a `CreateJobRequest`, you can update a job by specifying it here. The job named here is stopped, and its intermediate state is transferred to this job. func (o LookupJobResultOutput) ReplaceJobId() pulumi.StringOutput { return o.ApplyT(func(v LookupJobResult) string { return v.ReplaceJobId }).(pulumi.StringOutput) } // If another job is an update of this job (and thus, this job is in `JOB_STATE_UPDATED`), this field contains the ID of that job. func (o LookupJobResultOutput) ReplacedByJobId() pulumi.StringOutput { return o.ApplyT(func(v LookupJobResult) string { return v.ReplacedByJobId }).(pulumi.StringOutput) } // The job's requested state. `UpdateJob` may be used to switch between the `JOB_STATE_STOPPED` and `JOB_STATE_RUNNING` states, by setting requested_state. `UpdateJob` may also be used to directly set a job's requested state to `JOB_STATE_CANCELLED` or `JOB_STATE_DONE`, irrevocably terminating the job if it has not already reached a terminal state. func (o LookupJobResultOutput) RequestedState() pulumi.StringOutput { return o.ApplyT(func(v LookupJobResult) string { return v.RequestedState }).(pulumi.StringOutput) } // Reserved for future use. This field is set only in responses from the server; it is ignored if it is set in any requests. func (o LookupJobResultOutput) SatisfiesPzs() pulumi.BoolOutput { return o.ApplyT(func(v LookupJobResult) bool { return v.SatisfiesPzs }).(pulumi.BoolOutput) } // This field may be mutated by the Cloud Dataflow service; callers cannot mutate it. func (o LookupJobResultOutput) StageStates() ExecutionStageStateResponseArrayOutput { return o.ApplyT(func(v LookupJobResult) []ExecutionStageStateResponse { return v.StageStates }).(ExecutionStageStateResponseArrayOutput) } // The timestamp when the job was started (transitioned to JOB_STATE_PENDING). Flexible resource scheduling jobs are started with some delay after job creation, so start_time is unset before start and is updated when the job is started by the Cloud Dataflow service. For other jobs, start_time always equals to create_time and is immutable and set by the Cloud Dataflow service. func (o LookupJobResultOutput) StartTime() pulumi.StringOutput { return o.ApplyT(func(v LookupJobResult) string { return v.StartTime }).(pulumi.StringOutput) } // Exactly one of step or steps_location should be specified. The top-level steps that constitute the entire job. Only retrieved with JOB_VIEW_ALL. func (o LookupJobResultOutput) Steps() StepResponseArrayOutput { return o.ApplyT(func(v LookupJobResult) []StepResponse { return v.Steps }).(StepResponseArrayOutput) } // The Cloud Storage location where the steps are stored. func (o LookupJobResultOutput) StepsLocation() pulumi.StringOutput { return o.ApplyT(func(v LookupJobResult) string { return v.StepsLocation }).(pulumi.StringOutput) } // A set of files the system should be aware of that are used for temporary storage. These temporary files will be removed on job completion. No duplicates are allowed. No file patterns are supported. The supported files are: Google Cloud Storage: storage.googleapis.com/{bucket}/{object} bucket.storage.googleapis.com/{object} func (o LookupJobResultOutput) TempFiles() pulumi.StringArrayOutput { return o.ApplyT(func(v LookupJobResult) []string { return v.TempFiles }).(pulumi.StringArrayOutput) } // The map of transform name prefixes of the job to be replaced to the corresponding name prefixes of the new job. func (o LookupJobResultOutput) TransformNameMapping() pulumi.StringMapOutput { return o.ApplyT(func(v LookupJobResult) map[string]string { return v.TransformNameMapping }).(pulumi.StringMapOutput) } // The type of Cloud Dataflow job. func (o LookupJobResultOutput) Type() pulumi.StringOutput { return o.ApplyT(func(v LookupJobResult) string { return v.Type }).(pulumi.StringOutput) } func init() { pulumi.RegisterOutputType(LookupJobResultOutput{}) }
sdk/go/google/dataflow/v1b3/getJob.go
0.770033
0.423875
getJob.go
starcoder
package cios import ( "encoding/json" ) // SeriesImage struct for SeriesImage type SeriesImage struct { Timestamp string `json:"timestamp"` // base64エンコードされた画像データ Image string `json:"image"` // 画像データのフォーマット ImageType string `json:"image_type"` } // NewSeriesImage instantiates a new SeriesImage object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewSeriesImage(timestamp string, image string, imageType string, ) *SeriesImage { this := SeriesImage{} this.Timestamp = timestamp this.Image = image this.ImageType = imageType return &this } // NewSeriesImageWithDefaults instantiates a new SeriesImage object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewSeriesImageWithDefaults() *SeriesImage { this := SeriesImage{} return &this } // GetTimestamp returns the Timestamp field value func (o *SeriesImage) GetTimestamp() string { if o == nil { var ret string return ret } return o.Timestamp } // GetTimestampOk returns a tuple with the Timestamp field value // and a boolean to check if the value has been set. func (o *SeriesImage) GetTimestampOk() (*string, bool) { if o == nil { return nil, false } return &o.Timestamp, true } // SetTimestamp sets field value func (o *SeriesImage) SetTimestamp(v string) { o.Timestamp = v } // GetImage returns the Image field value func (o *SeriesImage) GetImage() string { if o == nil { var ret string return ret } return o.Image } // GetImageOk returns a tuple with the Image field value // and a boolean to check if the value has been set. func (o *SeriesImage) GetImageOk() (*string, bool) { if o == nil { return nil, false } return &o.Image, true } // SetImage sets field value func (o *SeriesImage) SetImage(v string) { o.Image = v } // GetImageType returns the ImageType field value func (o *SeriesImage) GetImageType() string { if o == nil { var ret string return ret } return o.ImageType } // GetImageTypeOk returns a tuple with the ImageType field value // and a boolean to check if the value has been set. func (o *SeriesImage) GetImageTypeOk() (*string, bool) { if o == nil { return nil, false } return &o.ImageType, true } // SetImageType sets field value func (o *SeriesImage) SetImageType(v string) { o.ImageType = v } func (o SeriesImage) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} if true { toSerialize["timestamp"] = o.Timestamp } if true { toSerialize["image"] = o.Image } if true { toSerialize["image_type"] = o.ImageType } return json.Marshal(toSerialize) } type NullableSeriesImage struct { value *SeriesImage isSet bool } func (v NullableSeriesImage) Get() *SeriesImage { return v.value } func (v *NullableSeriesImage) Set(val *SeriesImage) { v.value = val v.isSet = true } func (v NullableSeriesImage) IsSet() bool { return v.isSet } func (v *NullableSeriesImage) Unset() { v.value = nil v.isSet = false } func NewNullableSeriesImage(val *SeriesImage) *NullableSeriesImage { return &NullableSeriesImage{value: val, isSet: true} } func (v NullableSeriesImage) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableSeriesImage) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
cios/model_series_image.go
0.734501
0.442396
model_series_image.go
starcoder
package entity import ( //"fmt" "strconv" ) type Date struct { Year, Month, Day, Hour, Minute int } func IsValid(d Date) bool { if d.Year < 1000 || d.Year > 9999 || d.Month < 1 || d.Month > 12 || d.Day < 1 || d.Hour < 0 || d.Hour >= 24 || d.Minute < 0 || d.Minute >= 60 { return false } if d.Month == 1 || d.Month == 3 || d.Month == 5 || d.Month == 7 || d.Month == 8 || d.Month == 10 || d.Month == 12 { if d.Day > 31 { return false } } else if d.Month == 4 || d.Month == 6 || d.Month == 9 || d.Month == 11 { if d.Day > 30 { return false } } else { if (d.Year%4 == 0 && d.Year%100 != 0) || (d.Year%400 == 0) { if d.Day > 29 { return false } } else { if d.Day > 28 { return false } } } return true } func StringToDate(s string) Date { var i_Date = Date{ Year: 0, Month: 0, Day: 0, Hour: 0, Minute: 0, } if len(s) != 16 || s[4] != '-' || s[7] != '-' || s[10] != '/' || s[13] != ':' { return i_Date } for i := 0; i < 16; i++ { if i == 4 || i == 7 || i == 10 || i == 13 { continue } if s[i] < '0' || s[i] > '9' { return i_Date } } i_Date.Year, _ = strconv.Atoi(s[0:4]) i_Date.Month, _ = strconv.Atoi(s[5:7]) i_Date.Day, _ = strconv.Atoi(s[8:10]) i_Date.Hour, _ = strconv.Atoi(s[11:13]) i_Date.Minute, _ = strconv.Atoi(s[14:16]) return i_Date } func DateToString(d Date) string { var d_String string = "" var i_Time string = "0000-00-00/00:00" if !IsValid(d) { return i_Time } var s_Year string = strconv.Itoa(d.Year) var s_Month string = strconv.Itoa(d.Month) var s_Day string = strconv.Itoa(d.Day) var s_Hour string = strconv.Itoa(d.Hour) var s_Minute string = strconv.Itoa(d.Minute) if d.Month < 10 { s_Month = "0" + s_Month } if d.Day < 10 { s_Day = "0" + s_Day } if d.Hour < 10 { s_Hour = "0" + s_Hour } if d.Minute < 10 { s_Minute = "0" + s_Minute } d_String = s_Year + "-" + s_Month + "-" + s_Day + "/" + s_Hour + ":" + s_Minute return d_String } func IsOverlapping(s_d1 Date, e_d1 Date, s_d2 Date, e_d2 Date) bool { if (Date_MoreThan(s_d1, s_d2) && Date_LessThan(s_d1, e_d2)) || (Date_MoreThan(e_d1, s_d2) && Date_LessThan(e_d1, e_d2)) || (Date_MoreThan(s_d2, s_d1) && Date_LessThan(s_d2, e_d1)) || (Date_MoreThan(e_d2, s_d1) && Date_LessThan(e_d2, e_d1)) { return true } if (Date_Equal(s_d1, s_d2)&&Date_Equal(e_d1, e_d2)) { return true } return false } func Date_LessThan(d1 Date, d2 Date) bool { if Date_MoreThan(d1, d2) == false && Date_Equal(d1, d2) == false { return true } return false } func Date_Equal(d1 Date, d2 Date) bool { if d1.Year == d2.Year && d1.Month == d2.Month && d1.Day == d2.Day && d1.Hour == d2.Hour && d1.Minute == d2.Minute { return true } return false } func Date_MoreThan(d1 Date, d2 Date) bool { if d1.Year > d2.Year { return true } if d1.Year < d2.Year { return false } if d1.Month > d2.Month { return true } if d1.Month < d2.Month { return false } if d1.Day > d2.Day { return true } if d1.Day < d2.Day { return false } if d1.Hour > d2.Hour { return true } if d1.Hour < d2.Hour { return false } if d1.Minute > d2.Minute { return true } if d1.Minute < d2.Minute { return false } return false } func GetYear(a Date) int { return a.Year } func GetMonth(a Date) int { return a.Month } func GetDay(a Date) int { return a.Day } func GetHour(a Date) int { return a.Hour } func GetMinute(a Date) int { return a.Minute }
entity/Date.go
0.537284
0.437944
Date.go
starcoder
package slices import ( "sort" "github.com/go-board/std/optional" "golang.org/x/exp/constraints" ) type sortBy[T any] struct { less func(a, b T) bool inner []T } func (s sortBy[T]) Len() int { return len(s.inner) } func (s sortBy[T]) Less(i, j int) bool { return s.less(s.inner[i], s.inner[j]) } func (s sortBy[T]) Swap(i, j int) { s.inner[i], s.inner[j] = s.inner[j], s.inner[i] } // SortBy sorts the given slice in-place by the given less function. func SortBy[T any](slice []T, less func(a, b T) bool) { sort.Sort(sortBy[T]{less: less, inner: slice}) } // Sort sorts the given slice in-place. func Sort[T constraints.Ordered](slice []T) { SortBy(slice, func(a, b T) bool { return a < b }) } // IsSortedBy returns true if the given slice is sorted by the given less function. func IsSortedBy[T any](slice []T, less func(a, b T) bool) bool { return sort.IsSorted(sortBy[T]{less: less, inner: slice}) } // IsSorted returns true if the given slice is sorted. func IsSorted[T constraints.Ordered](slice []T) bool { return IsSortedBy(slice, func(a, b T) bool { return a < b }) } // Map returns a new slice with the results of applying the given function to each element in the given slice. func Map[T, U any](slice []T, f func(T) U) []U { result := make([]U, len(slice)) for i, v := range slice { result[i] = f(v) } return result } // ForEach iterates over the given slice and calls the given function for each element. func ForEach[T any](slice []T, f func(T)) { for _, v := range slice { f(v) } } // Filter returns a new slice with all elements that satisfy the given predicate. func Filter[T any](slice []T, f func(T) bool) []T { result := make([]T, 0, len(slice)) for _, v := range slice { if f(v) { result = append(result, v) } } return result } // Fold returns the result of applying the given function to each element in the given slice. func Fold[T, A any](slice []T, initial A, accumulator func(A, T) A) A { result := initial for _, v := range slice { result = accumulator(result, v) } return result } // Reduce returns the result of applying the given function to each element in the given slice. func Reduce[T any](slice []T, f func(T, T) T) optional.Optional[T] { if len(slice) == 0 { return optional.None[T]() } return optional.Some(Fold(slice[0:], slice[0], f)) } // Any returns true if any element in the given slice satisfies the given predicate. func Any[T any](slice []T, f func(T) bool) bool { for _, v := range slice { if f(v) { return true } } return false } // All returns true if all elements in the given slice satisfy the given predicate. func All[T any](slice []T, f func(T) bool) bool { for _, v := range slice { if !f(v) { return false } } return true } // None returns true if no element in the given slice satisfies the given predicate. func None[T any](slice []T, f func(T) bool) bool { return !Any(slice, f) } // FindIndexBy returns the index of the first element in the given slice that satisfies the given predicate. func FindIndexBy[T any](slice []T, v T, eq func(T, T) bool) int { for i, vv := range slice { if eq(v, vv) { return i } } return -1 } // ContainsBy returns true if the given slice contains an element that satisfies the given predicate. func ContainsBy[T any](slice []T, v T, cmp func(T, T) bool) bool { return Any(slice, func(t T) bool { return cmp(t, v) }) } // Contains returns true if the given slice contains the given element. func Contains[T comparable](slice []T, v T) bool { return ContainsBy(slice, v, func(t1, t2 T) bool { return t1 == t2 }) } // MaxBy returns the maximium element in the given slice that satisfies the given function. func MaxBy[T any](slice []T, less func(T, T) bool) optional.Optional[T] { return Reduce(slice, func(a, b T) T { if less(a, b) { return b } else { return a } }) } // MinBy returns the minimium element in the given slice that satisfies the given function. func MinBy[T any](slice []T, less func(T, T) bool) optional.Optional[T] { return Reduce(slice, func(a, b T) T { if less(a, b) { return a } else { return b } }) } // Nth returns the nth element in the given slice. func Nth[T any](slice []T, n int) optional.Optional[T] { if n < 0 { n = len(slice) + n } if n < 0 || n >= len(slice) { return optional.None[T]() } return optional.Some(slice[n]) } // Flatten returns a new slice with all elements in the given slice and all elements in all sub-slices. func Flatten[T any](slice [][]T) []T { result := make([]T, 0, len(slice)) for _, v := range slice { result = append(result, v...) } return result } // Chunk returns a new slice with the given slice split into smaller slices of the given size. func Chunk[T any](slice []T, chunk int) [][]T { result := make([][]T, 0, len(slice)/chunk+1) for i := 0; i < len(slice); i += chunk { result = append(result, slice[i:i+chunk]) } return result } // GroupBy returns a new map with the given slice split into smaller slices of the given size. func GroupBy[T any, TKey comparable](slice []T, group func(T) TKey) map[TKey][]T { result := make(map[TKey][]T) for _, v := range slice { key := group(v) result[key] = append(result[key], v) } return result } // EqualBy returns true if the given slices are equal by the given function. func EqualBy[T any](slice1 []T, slice2 []T, eq func(T, T) bool) bool { if len(slice1) != len(slice2) { return false } for i, v := range slice1 { if !eq(v, slice2[i]) { return false } } return true } // Equal returns true if the given slices are equal. func Equal[T comparable](slice1 []T, slice2 []T) bool { return EqualBy(slice1, slice2, func(a, b T) bool { return a == b }) } // DeepClone returns a new slice with the same elements as the given slice. func DeepClone[T any](slice []T, clone func(T) T) []T { return Map(slice, clone) } // ToSet returns a new set with the given slice. func ToSet[T comparable](slice []T) map[T]struct{} { result := make(map[T]struct{}, len(slice)) for _, v := range slice { result[v] = struct{}{} } return result } // IntersectionBy returns a new slice with the elements that are in both given slices by the given function. func IntersectionBy[T any](slice1 []T, slice2 []T, eq func(T, T) bool) []T { result := make([]T, 0, len(slice1)) for _, v := range slice1 { if ContainsBy(slice2, v, eq) { result = append(result, v) } } return result } // DifferenceBy returns a new slice with the elements that are in the first slice but not in the second by the given function. func DifferenceBy[T any](slice1 []T, slice2 []T, eq func(T, T) bool) []T { result := make([]T, 0, len(slice1)) for _, v := range slice1 { if !ContainsBy(slice2, v, eq) { result = append(result, v) } } return result }
slices/slice.go
0.893088
0.531331
slice.go
starcoder
package utils import ( "reflect" "sort" "strconv" "strings" "time" ) // Element Array elements as object type Element interface{} // WrapArray Array elements to object func WrapArray(arrayData interface{}) map[string]Element { return WrapArrayWithElemName(arrayData, "data") } // WrapArrayWithElemName Array elements to object with field name. func WrapArrayWithElemName(arrayData interface{}, elemName string) map[string]Element { jsonData := make(map[string]Element) dataSize := getArraySize(arrayData) if arrayData == nil || dataSize <= 0 { jsonData[elemName] = make([]Element, 0) } else { jsonData[elemName] = arrayData } return jsonData } func getArraySize(data interface{}) int { if data == nil { return 0 } switch reflect.TypeOf(data).Kind() { case reflect.Slice: return reflect.ValueOf(data).Len() default: return -1 } } // IndexOfInt returns index of value in the array. func IndexOfInt(arrayData []int, val int) int { for i, v := range arrayData { if v == val { return i } } return -1 } // ExistsInt returns the value exists in the array. func ExistsInt(arrayData []int, val int) bool { for _, v := range arrayData { if v == val { return true } } return false } // UniqInt returns duplicate value in the array. func UniqInt(arrayData []int) []int { results := make([]int, 0, len(arrayData)) encounters := map[int]bool{} for _, v := range arrayData { if !encounters[v] { results = append(results, v) encounters[v] = true } } return results } // ExistsString returns the value exists in the array. func ExistsString(arrayData []string, val string) bool { for _, v := range arrayData { if v == val { return true } } return false } // IntArrayToString 数値の配列を delimiter で結合した文字列で返却する. func IntArrayToString(arrayData []int, delimiter string) string { results := make([]string, 0, len(arrayData)) for _, data := range arrayData { results = append(results, strconv.Itoa(data)) } return strings.Join(results, delimiter) } // StringToIntArray delimiter で結合された文字列を数値の配列にする. func StringToIntArray(str string) ([]int, error) { if str == "" { return make([]int, 0), nil } splitedStrList := strings.Split(str, ",") result := make([]int, len(splitedStrList)) for i, splitedStr := range splitedStrList { val, err := strconv.ParseInt(splitedStr, 10, 64) if err != nil { return make([]int, 0), err } result[i] = int(val) } return result, nil } // RemoveNumInArray 指定した数字を数値配列内から全て除去する. func RemoveNumInArray(removeNum int, intArray []int) []int { result := []int{} for _, num := range intArray { if num != removeNum { result = append(result, num) } } return result } // SliceInt 指定した範囲のリストを取得する func SliceInt(offset int, limit int, intArray []int) []int { result := make([]int, 0, limit) to := offset + limit if offset >= len(intArray) { return []int{} } if to > len(intArray) { to = len(intArray) } for i := offset; i < to; i++ { result = append(result, intArray[i]) } return result } //SortMapByValueForTime Map(key:int,value:time.Time)をvalue別に降順ソートし、その時に対応するkeyのリストを返す func SortMapByValueForTime(srcMap map[int]time.Time) []int { type temp struct { key int value time.Time } var result []int var work []temp for k, v := range srcMap { work = append(work, temp{ key: k, value: v, }) } sort.Slice(work, func(a int, b int) bool { return work[a].value.After(work[b].value) //a.time > b.time }) for _, sortTemp := range work { result = append(result, sortTemp.key) } return result } //SortMapByValueForFloat Map(key:int,value:float64)をvalue別に降順ソートし、その時に対応するkeyのリストを返す func SortMapByValueForFloat(srcMap map[int]float64) []int { type temp struct { key int value float64 } var result []int var work []temp for k, v := range srcMap { work = append(work, temp{ key: k, value: v, }) } sort.Slice(work, func(a int, b int) bool { return work[a].value > work[b].value }) for _, sortTemp := range work { result = append(result, sortTemp.key) } return result }
utils/arrays.go
0.58522
0.400544
arrays.go
starcoder
package naiveIPAM import ( "math" "fmt" ) // 32-bit word input to count zero bits on right func getEndingZerocount(v uint32) uint { Mod37BitPosition := // map a bit value mod 37 to its position []uint{ 32, 0, 1, 26, 2, 23, 27, 0, 3, 16, 24, 30, 28, 11, 0, 13, 4, 7, 17, 0, 25, 22, 31, 15, 29, 10, 12, 6, 0, 21, 14, 9, 5, 20, 8, 19, 18, } return Mod37BitPosition[(-v&v)%37] } func getLogBase(v uint32) (r uint) { MultiplyDeBruijnBitPosition := []uint{ 0, 9, 1, 10, 13, 21, 2, 29, 11, 14, 16, 18, 22, 25, 3, 30, 8, 12, 20, 28, 15, 17, 24, 7, 19, 27, 23, 6, 26, 5, 4, 31, } v |= v >> 1 // first round down to one less than a power of 2 v |= v >> 2 v |= v >> 4 v |= v >> 8 v |= v >> 16 r = MultiplyDeBruijnBitPosition[(uint32)(v*0x07C4ACDD)>>27] return } //cut [f,l) range into cidr ranges //say [0,256) will return [0,256) //[4,256) will return [[4 8) [8 16) [16 32) [32 64) [64 128) [128 256)] func cutNumRangeIntoCidrPieces(numRange NumRange, maxSubnetLen uint32) []NumRange { return cutRangeIntoCidrPieces(numRange.Low, numRange.High, maxSubnetLen) } func GetFirstLargeEnoughNumRange(f, l uint32, wangtedSubNetLen uint32) NumRange { p := f for p < l { step := uint32(1 << getEndingZerocount(p)) if step == 0 { step = 1 << 31 } for p+step > l { step = step >> 1 } if step >= wangtedSubNetLen { return NumRange{p, p + step} } p += step } return NumRange{} } func cutRangeIntoCidrPieces(f, l uint32, maxSubnetLen uint32) []NumRange { res := make([]NumRange, 0) if maxSubnetLen == 0 { // avoid infinite loop maxSubnetLen = math.MaxUint32 } p := f for p < l { step := uint32(1 << getEndingZerocount(p)) if step == 0 { step = 1 << 31 } for p+step > l || step > maxSubnetLen { step = step >> 1 } //fmt.Println(p, p+step-1) res = append(res, NumRange{p, p + step}) p += step } return res } func ceiling(n uint32) uint32 { if 1<<uint32(getLogBase(n)) == n { return n } return 2 << uint32(getLogBase(n)) } func allocNIPs(f, l uint32, n int, maxSubnetLen uint32) []NumRange { ranges := cutRangeIntoCidrPieces(f, l, maxSubnetLen) nrest := uint32(n) ret := make([]NumRange, 0) for _, r := range ranges { rcount := r.High - r.Low + 1 want := ceiling(nrest) if rcount > want { ret = append(ret, NumRange{r.Low, r.Low + want}) return ret } ret = append(ret, r) nrest -= rcount } return ret } func allocCIDRs(f, l string, n int, maxSubnetLen uint32) []string { fi := StringIpToInt(f) li := StringIpToInt(l) res := allocNIPs(fi, li, n, maxSubnetLen) ret := make([]string, 0) for _, r := range res { ret = append(ret, rangeToCidr(r.Low, r.High)) } return ret } func rangeToCidr(u uint32, u2 uint32) string { size := u2 - u logsize := getLogBase(size) u = (math.MaxUint32 << logsize) & u return fmt.Sprintf("%v/%v", IpIntToString(u), 32-logsize) }
rangeCalc.go
0.524395
0.406509
rangeCalc.go
starcoder
package swagger import "math" type InterestPrincipalCalculator struct { ICalculator } // Calculate how much needs to be paid back for each month/fortnightly/weekly func (InterestPrincipalCalculator) CalculateRepayment(InterestRate float64, LoanTerm int32, LoanAmount float64, totalNumberOfPayments int32) (repayment float64) { if InterestRate != 0 { rate := (InterestRate / float64(totalNumberOfPayments) / 100) loanTermInMonths := LoanTerm * totalNumberOfPayments repayment = float64(LoanAmount) * (rate + (rate / (math.Pow(float64(rate+1), float64(loanTermInMonths)) - 1))) } return } // Calculate total interest payable for the whole loan term (e.g 1,2,3year...etc) func (InterestPrincipalCalculator) CalculateTotalInterestPayable(loanAmout, repayment float64, totalNumberOfPayments int32) float64 { return (repayment * float64(totalNumberOfPayments)) - loanAmout } // Calculate the reducing interest and principal for the loan term func (InterestPrincipalCalculator) CalculateAmountOwning(interestRate float64, loanTerm int32, loanAmount float64, repayment float64, totalNumberOfPayments int32) (loanRepaymentsAmountOwing []LoanRepaymentsAmountOwing) { interestrate := interestRate loanTermInMonths := loanTerm * totalNumberOfPayments var monthlyInterest = ((interestrate / 100) / float64(loanTermInMonths)) var initialPeriod LoanRepaymentsAmountOwing = LoanRepaymentsAmountOwing{} initialPeriod.Year = 0 initialPeriod.Principal = loanAmount currentInterest := (repayment*float64(loanTermInMonths) - float64(loanAmount)) initialPeriod.Interest = (math.Ceil(currentInterest)) initialPeriod.Total = loanAmount + float64(initialPeriod.Interest) loanRepaymentsAmountOwing = append(loanRepaymentsAmountOwing, initialPeriod) previousInterest := currentInterest for i := int32(1); i <= loanTermInMonths; i++ { currentInterest = loanAmount * monthlyInterest period := LoanRepaymentsAmountOwing{} period.Year = i reducingInterest := previousInterest - currentInterest period.Interest = math.Round(reducingInterest) previousInterest = reducingInterest reducingPrincipal := loanAmount - (repayment - currentInterest) period.Principal = math.Round(reducingPrincipal) period.Total = math.Round(reducingPrincipal + reducingInterest) loanAmount = reducingPrincipal loanRepaymentsAmountOwing = append(loanRepaymentsAmountOwing, period) } return }
go/interest_principal_calculator.go
0.746971
0.567577
interest_principal_calculator.go
starcoder
package try import ( "github.com/dairaga/gs" "github.com/dairaga/gs/funcs" ) // From is a Try builder returns Success with given v if err is nil, otherwise returns Failure with given err. func From[T any](v T, err error) gs.Try[T] { return funcs.BuildWithErr(v, err, gs.Failure[T], gs.Success[T]) } // FromWithBool is a Try builder returns Success with given v if ok is true, otherwise returns Failure with ErrUnsatisfied. func FromWithBool[T any](v T, ok bool) gs.Try[T] { return From(v, funcs.Cond(ok, nil, gs.ErrUnsatisfied)) } // ----------------------------------------------------------------------------- // TODO: refactor following functions to methods when go 1.19 releases. // Fold retuns result applying given succ function to successful value if given t is a Success, otherwise applying given fail function to failed value from t. func Fold[T, R any](t gs.Try[T], fail funcs.Func[error, R], succ funcs.Func[T, R]) R { return funcs.Build(t.Fetch, fail, succ) } // Collect tries to build from applying given function p to successful value from t, or returns new Failure with failed value from t. func Collect[T, R any](t gs.Try[T], p funcs.Try[T, R]) gs.Try[R] { return funcs.Build(t.Fetch, gs.Failure[R], funcs.TryRecover(p, From[R])) } // FlatMap returns result applying given function op to successful value from t, or returns new Try with failed value from t. func FlatMap[T, R any](t gs.Try[T], op funcs.Func[T, gs.Try[R]]) gs.Try[R] { return funcs.Build(t.Fetch, gs.Failure[R], op) } // Map returns a Success from applying given function op to successful value from t, or returns a Failure with failed value from t. func Map[T, R any](t gs.Try[T], op funcs.Func[T, R]) gs.Try[R] { return funcs.Build(t.Fetch, gs.Failure[R], funcs.AndThen(op, gs.Success[R])) } // TryMap returns new Try built from applying given function op to successfu value from t, or returns a Failure with failed value from t. func TryMap[T, R any](t gs.Try[T], op funcs.Try[T, R]) gs.Try[R] { return funcs.Build(t.Fetch, gs.Failure[R], funcs.TryRecover(op, From[R])) } // PartialMap returns a Success if successful value from t satifies given function p, or return a Failure wiht ErrUnsatisfied. // Otherwise, returns Failure with failed value from t. func PartialMap[T, R any](t gs.Try[T], p funcs.Partial[T, R]) gs.Try[R] { return funcs.Build( t.Fetch, gs.Failure[R], funcs.PartialTransform(p, FromWithBool[R]), ) } // Transform retruns new Try built from applying given succ function to successful value from t, or applying given fail function to failed value from t. func Transform[T, R any](t gs.Try[T], fail funcs.Func[error, gs.Try[R]], succ funcs.Func[T, gs.Try[R]]) gs.Try[R] { return funcs.Build(t.Fetch, fail, succ) }
try/try.go
0.630912
0.564519
try.go
starcoder
package crun import ( "fmt" "log" "math/rand" "regexp/syntax" "time" ) // MoreTimes Maximum omitted default value const MoreTimes = 18 // Regexp syntax tree translated from regexp/syntax type Regexp struct { Op Op Sub []Regexps Rune []rune Min, Max int } // Regexps syntax tree translated from regexp/syntax type Regexps []*Regexp // Compile parses a regular expression and returns. func Compile(str string) (Regexps, error) { reg, err := syntax.Parse(str, syntax.Perl) if err != nil { return nil, fmt.Errorf("crun: Compile(%q): %w", str, err) } return NewRegexps(reg), nil } // MustCompile is like Compile but panics if the expression cannot be parsed. // It simplifies safe initialization of global variables holding compiled regular // expressions. func MustCompile(str string) Regexps { reg, err := Compile(str) if err != nil { panic(err) } return reg } // NewRegexps returns regexps translated from regexp/syntax func NewRegexps(reg *syntax.Regexp) (out Regexps) { return std.NewRegexps(reg) } var std = &Optional{ MoreTimes: MoreTimes, AnyCharNotNL: []rune{33, 126}, AnyChar: []rune{33, 126}, } // Optional is optional related option for regexps type Optional struct { MoreTimes int AnyCharNotNL []rune AnyChar []rune Rand Rand } // NewRegexps returns regexps translated from regexp/syntax func (o *Optional) NewRegexps(reg *syntax.Regexp) (out Regexps) { ff := func(rs ...*Regexp) { out = append(out, rs...) } switch reg.Op { case syntax.OpNoMatch: // matches no strings case syntax.OpEmptyMatch: // matches empty string case syntax.OpLiteral: // matches Runes sequence ff(&Regexp{ Op: OpLiteral, Rune: reg.Rune, }) case syntax.OpCharClass: // matches Runes interpreted as range pair list ff(&Regexp{ Op: OpRepeat, Rune: reg.Rune, Max: 1, Min: 1, }) case syntax.OpAnyCharNotNL: // matches any character except newline ff(&Regexp{ Op: OpRepeat, Rune: o.AnyCharNotNL, Max: 1, Min: 1, }) case syntax.OpAnyChar: // matches any character ff(&Regexp{ Op: OpRepeat, Rune: o.AnyChar, Max: 1, Min: 1, }) case syntax.OpBeginLine: // matches empty string at beginning of line case syntax.OpEndLine: // matches empty string at end of line case syntax.OpBeginText: // matches empty string at beginning of text case syntax.OpEndText: // matches empty string at end of text case syntax.OpWordBoundary: // matches word boundary `\b` case syntax.OpNoWordBoundary: // matches word non-boundary `\B` case syntax.OpCapture: // capturing subexpression with index Cap, optional name Name for _, v := range reg.Sub { ff(o.NewRegexps(v)...) } case syntax.OpStar: // matches Sub[0] zero or more times sub := make([]Regexps, 0, len(reg.Sub)) for _, v := range reg.Sub { sub = append(sub, o.NewRegexps(v)) } ff(&Regexp{ Op: OpRepeat, Sub: sub, Max: o.MoreTimes, Min: 0, }) case syntax.OpPlus: // matches Sub[0] one or more times sub := make([]Regexps, 0, len(reg.Sub)) for _, v := range reg.Sub { sub = append(sub, o.NewRegexps(v)) } ff(&Regexp{ Op: OpRepeat, Sub: sub, Max: o.MoreTimes, Min: 1, }) case syntax.OpQuest: // matches Sub[0] zero or one times sub := make([]Regexps, 0, len(reg.Sub)) for _, v := range reg.Sub { sub = append(sub, o.NewRegexps(v)) } ff(&Regexp{ Op: OpRepeat, Sub: sub, Max: 1, Min: 0, }) case syntax.OpRepeat: // matches Sub[0] at least Min times, at most Max (Max == -1 is no limit) sub := make([]Regexps, 0, len(reg.Sub)) for _, v := range reg.Sub { sub = append(sub, o.NewRegexps(v)) } ff(&Regexp{ Op: OpRepeat, Sub: sub, Max: reg.Max, Min: reg.Min, }) case syntax.OpConcat: // matches concatenation of Subs for _, v := range reg.Sub { ff(o.NewRegexps(v)...) } case syntax.OpAlternate: // matches alternation of Subs sub := make([]Regexps, 0, len(reg.Sub)) for _, v := range reg.Sub { sub = append(sub, o.NewRegexps(v)) } ff(&Regexp{ Op: OpAlternate, Sub: sub, }) default: log.Printf("crun: unsupported op %v", reg.Op) } return out } // Size The number of possibilities that can match regularity func (r Regexps) Size() int { s := 0 size(r, &s) return s } // Range all possibilities func (r Regexps) Range(f func(string) bool) bool { return r.RangeWithRunes(func(s []rune) bool { return f(string(s)) }) } // RangeWithRuns all possibilities func (r Regexps) RangeWithRunes(f func([]rune) bool) bool { return ranges(r, []rune{}, 0, func(s []rune) bool { return f(s) }) } // Rand possibilities func (r Regexps) Rand() string { return string(r.RandWithRunes()) } // RandWithRunes possibilities func (r Regexps) RandWithRunes() []rune { return rands(r, stdRandSource, []rune{}) } // RandSource possibilities func (r Regexps) RandSource(rand Rand) string { return string(r.RandSourceWithRunes(rand)) } // RandSourceWithRunes possibilities func (r Regexps) RandSourceWithRunes(rand Rand) []rune { return rands(r, rand, []rune{}) } var stdRandSource = rand.New(rand.NewSource(time.Now().UnixNano()))
regexps.go
0.697918
0.430028
regexps.go
starcoder
package geo import ( "github.com/draeron/gopkgs/color" "github.com/fogleman/gg" "github.com/twpayne/go-geom" "math" ) type Triangle struct { coord geom.Coord poly *geom.Polygon side float64 Orientation int32 } func NewTriangle(pts Point, side int, orientation int32) *Triangle { t := Triangle{} t.side = float64(side) t.coord = pts.ToCoord() t.poly = geom.NewPolygon(geom.XY) h2 := t.side / 2 * math.Sqrt(3) p1 := Point{0, h2 * 2 / 3} p2 := Point{t.side / 2, h2 * -1 / 3} p3 := Point{-t.side / 2, h2 * -1 / 3} radian := Radians(int(orientation)) p1.Rotate(radian) p2.Rotate(radian) p3.Rotate(radian) p1.Translate(pts) p2.Translate(pts) p3.Translate(pts) t.Orientation = orientation ring := geom.NewLinearRing(geom.XY) ring.MustSetCoords([]geom.Coord{p1.ToCoord(), p2.ToCoord(), p3.ToCoord(), p1.ToCoord()}) t.poly.Push(ring) return &t } func (t Triangle) Bounds() Rectangle { return ToRect(t.bounds()) } func (t Triangle) bounds() *geom.Bounds { return t.poly.Bounds() } func (t *Triangle) Rotate(rad float64) { t.RotateAround(rad, t.Centroid()) } func (t *Triangle) RotateAround(rad float64, pivot Point) { ring := t.poly.LinearRing(0) coords := ring.Coords() center := t.Centroid() for i := 0; i < 4; i++ { p := toPoint(coords[i]) p.RotateAround(rad, pivot) coords[i].Set(p.ToCoord()) } t.poly.MustSetCoords([][]geom.Coord{coords}) center.RotateAround(rad, pivot) t.coord = center.ToCoord() } func (t *Triangle) Translate(p Point) { ctr := toPoint(t.coord) ctr.Translate(p) t.coord = ctr.ToCoord() ring := t.poly.LinearRing(0) coords := ring.Coords() for i := 0; i < len(coords); i++ { coords[i][0] += p.X coords[i][1] += p.Y } t.poly.MustSetCoords([][]geom.Coord{coords}) } func (t *Triangle) Scale(ratio float64) { t.Translate(t.Pos().Invert()) for i := 0; i < len(t.coord); i++ { t.coord[i] -= ratio } t.Translate(t.Pos()) } func (t *Triangle) Intersect(ge Geometry) bool { return intersect(t, ge) } func (t *Triangle) threshold() float64 { return t.side / 2 * math.Sqrt(3) * 0.57 } func (t *Triangle) Pos() Point { return toPoint(t.coord) } func (t *Triangle) Centroid() Point { return toPoint(t.coord) } func (t *Triangle) Draw(g *gg.Context) { t.DrawOutline(g) t.DrawCenter(g) } func (t *Triangle) DrawOutline(g *gg.Context) { g.Push() defer g.Pop() for i := 0; i < t.poly.NumCoords(); i++ { p1 := t.poly.Coord(i) g.LineTo(p1.X(), p1.Y()) } g.FillPreserve() g.SetColor(color.Black) g.Stroke() } func (t *Triangle) DrawCenter(g *gg.Context) { g.Push() defer g.Pop() //ctr := t.Centroid() //rgb := color.Black.RGB() //rgb.A = 25 //g.SetColor(rgb) //g.DrawCircle(ctr.X, ctr.Y, t.threshold()) //g.Fill() g.SetColor(color.CyanBlue) t.Centroid().Draw(g) }
geo/triangle.go
0.745861
0.633297
triangle.go
starcoder
package images import ( "math" ) type ResampleFilter struct { Support float64 Kernel func(float64) float64 } var NearestNeighbor ResampleFilter var Box ResampleFilter var Linear ResampleFilter var Hermite ResampleFilter var MitchellNetravali ResampleFilter var CatmullRom ResampleFilter var BSpline ResampleFilter var Gaussian ResampleFilter var Bartlett ResampleFilter var Lanczos ResampleFilter var Hann ResampleFilter var Hamming ResampleFilter var Blackman ResampleFilter var Welch ResampleFilter var Cosine ResampleFilter func bcspline(x, b, c float64) float64 { x = math.Abs(x) if x < 1.0 { return ((12-9*b-6*c)*x*x*x + (-18+12*b+6*c)*x*x + (6 - 2*b)) / 6 } if x < 2.0 { return ((-b-6*c)*x*x*x + (6*b+30*c)*x*x + (-12*b-48*c)*x + (8*b + 24*c)) / 6 } return 0 } func sinc(x float64) float64 { if x == 0 { return 1 } return math.Sin(math.Pi*x) / (math.Pi * x) } func init() { NearestNeighbor = ResampleFilter{ Support: 0.0, // special case - not applying the filter } Box = ResampleFilter{ Support: 0.5, Kernel: func(x float64) float64 { x = math.Abs(x) if x <= 0.5 { return 1.0 } return 0 }, } Linear = ResampleFilter{ Support: 1.0, Kernel: func(x float64) float64 { x = math.Abs(x) if x < 1.0 { return 1.0 - x } return 0 }, } Hermite = ResampleFilter{ Support: 1.0, Kernel: func(x float64) float64 { x = math.Abs(x) if x < 1.0 { return bcspline(x, 0.0, 0.0) } return 0 }, } MitchellNetravali = ResampleFilter{ Support: 2.0, Kernel: func(x float64) float64 { x = math.Abs(x) if x < 2.0 { return bcspline(x, 1.0/3.0, 1.0/3.0) } return 0 }, } CatmullRom = ResampleFilter{ Support: 2.0, Kernel: func(x float64) float64 { x = math.Abs(x) if x < 2.0 { return bcspline(x, 0.0, 0.5) } return 0 }, } BSpline = ResampleFilter{ Support: 2.0, Kernel: func(x float64) float64 { x = math.Abs(x) if x < 2.0 { return bcspline(x, 1.0, 0.0) } return 0 }, } Gaussian = ResampleFilter{ Support: 2.0, Kernel: func(x float64) float64 { x = math.Abs(x) if x < 2.0 { return math.Exp(-2 * x * x) } return 0 }, } Bartlett = ResampleFilter{ Support: 3.0, Kernel: func(x float64) float64 { x = math.Abs(x) if x < 3.0 { return sinc(x) * (3.0 - x) / 3.0 } return 0 }, } Lanczos = ResampleFilter{ Support: 3.0, Kernel: func(x float64) float64 { x = math.Abs(x) if x < 3.0 { return sinc(x) * sinc(x/3.0) } return 0 }, } Hann = ResampleFilter{ Support: 3.0, Kernel: func(x float64) float64 { x = math.Abs(x) if x < 3.0 { return sinc(x) * (0.5 + 0.5*math.Cos(math.Pi*x/3.0)) } return 0 }, } Hamming = ResampleFilter{ Support: 3.0, Kernel: func(x float64) float64 { x = math.Abs(x) if x < 3.0 { return sinc(x) * (0.54 + 0.46*math.Cos(math.Pi*x/3.0)) } return 0 }, } Blackman = ResampleFilter{ Support: 3.0, Kernel: func(x float64) float64 { x = math.Abs(x) if x < 3.0 { return sinc(x) * (0.42 - 0.5*math.Cos(math.Pi*x/3.0+math.Pi) + 0.08*math.Cos(2.0*math.Pi*x/3.0)) } return 0 }, } Welch = ResampleFilter{ Support: 3.0, Kernel: func(x float64) float64 { x = math.Abs(x) if x < 3.0 { return sinc(x) * (1.0 - (x * x / 9.0)) } return 0 }, } Cosine = ResampleFilter{ Support: 3.0, Kernel: func(x float64) float64 { x = math.Abs(x) if x < 3.0 { return sinc(x) * math.Cos((math.Pi/2.0)*(x/3.0)) } return 0 }, } }
filters.go
0.811041
0.468
filters.go
starcoder
package esbuilder import ( "fmt" "strconv" ) // literalNode defines a literal value. type literalNode struct { // value is the literal value. value string } // arrayNode defines an array literal value. type arrayNode struct { // values are the values in the array. values []ExpressionBuilder } // objectNode defines an object literal value. type objectNode struct { // entries are the entries in the object. entries []ExpressionBuilder } // objectNodeEntry defines a single entry in the object. type objectNodeEntry struct { // key is the key for the object entry. key string // value is the value for the object entry. value ExpressionBuilder } func (node literalNode) emit(sb *sourceBuilder) { sb.append(node.value) } func (node arrayNode) emit(sb *sourceBuilder) { sb.append("[") sb.emitSeparated(node.values, ",") sb.append("]") } func (node objectNode) emit(sb *sourceBuilder) { sb.append("{") sb.emitSeparated(node.entries, ",") sb.append("}") } func (node objectNodeEntry) emit(sb *sourceBuilder) { sb.emit(Value(node.key)) sb.append(":") sb.emitWrapped(node.value) } func (node literalNode) isStateless() bool { return true } func (node arrayNode) isStateless() bool { for _, expr := range node.values { if !expr.IsStateless() { return false } } return true } func (node objectNode) isStateless() bool { for _, entry := range node.entries { if !entry.IsStateless() { return false } } return true } func (node objectNodeEntry) isStateless() bool { return node.value.IsStateless() } // ObjectEntry returns an object literal entry. func ObjectEntry(key string, value ExpressionBuilder) ExpressionBuilder { return expressionBuilder{objectNodeEntry{key, value}, nil} } // Object returns an object literal. func Object(entries ...ExpressionBuilder) ExpressionBuilder { return expressionBuilder{objectNode{entries}, nil} } // Array returns an array literal. func Array(values ...ExpressionBuilder) ExpressionBuilder { return expressionBuilder{arrayNode{values}, nil} } // LiteralValue returns a literal value. func LiteralValue(value string) ExpressionBuilder { return expressionBuilder{literalNode{value}, nil} } // Value returns a literal value. func Value(value interface{}) ExpressionBuilder { switch t := value.(type) { case bool: if t { return LiteralValue("true") } else { return LiteralValue("false") } case int: return LiteralValue(strconv.Itoa(t)) case float64: return LiteralValue(strconv.FormatFloat(t, 'E', -1, 32)) case string: return LiteralValue(strconv.Quote(t)) default: panic(fmt.Sprintf("unexpected value type %T\n", t)) } }
generator/escommon/esbuilder/expressions_literal.go
0.732496
0.440409
expressions_literal.go
starcoder
package xlpp var Registry = map[Type]func() Value{ // LPP Types TypeDigitalInput: func() Value { return new(DigitalInput) }, TypeDigitalOutput: func() Value { return new(DigitalOutput) }, TypeAnalogInput: func() Value { return new(AnalogInput) }, TypeAnalogOutput: func() Value { return new(AnalogOutput) }, TypeLuminosity: func() Value { return new(Luminosity) }, TypePresence: func() Value { return new(Presence) }, TypeTemperature: func() Value { return new(Temperature) }, TypeRelativeHumidity: func() Value { return new(RelativeHumidity) }, TypeAccelerometer: func() Value { return new(Accelerometer) }, TypeBarometricPressure: func() Value { return new(BarometricPressure) }, TypeGyrometer: func() Value { return new(Gyrometer) }, TypeGPS: func() Value { return new(GPS) }, // more LPP Types TypeVoltage: func() Value { return new(Voltage) }, TypeCurrent: func() Value { return new(Current) }, TypeFrequency: func() Value { return new(Frequency) }, TypePercentage: func() Value { return new(Percentage) }, TypeAltitude: func() Value { return new(Altitude) }, TypeConcentration: func() Value { return new(Concentration) }, TypePower: func() Value { return new(Power) }, TypeDistance: func() Value { return new(Distance) }, TypeEnergy: func() Value { return new(Energy) }, TypeDirection: func() Value { return new(Direction) }, TypeUnixTime: func() Value { return new(UnixTime) }, TypeColour: func() Value { return new(Colour) }, TypeSwitch: func() Value { return new(Switch) }, TypeMosquito: func() Value { return new(Mosquito) }, // XLPP Types TypeInteger: func() Value { return new(Integer) }, TypeNull: func() Value { return new(Null) }, TypeString: func() Value { return new(String) }, TypeBoolTrue: func() Value { b := new(Bool) *b = true return b }, TypeBoolFalse: func() Value { return new(Bool) }, TypeObject: func() Value { return new(Object) }, TypeArray: func() Value { return new(Array) }, TypeEndOfArray: func() Value { return endOfArray{} }, // TypeArrayOf: func() Value { return new(Array) }, // TypeFlags: func() Value { return new(Flags) }, TypeBinary: func() Value { return new(Binary) }, }
registry.go
0.579162
0.538862
registry.go
starcoder
package metrics // A Sample represents an OpenMetrics sample containing labels and the value. type Sample struct { Labels map[string]string Value uint64 } // MetricSet represents a set of metrics. type MetricSet struct { set map[MetricType][]Sample labels map[string]string } // MetricType is a numeric code identifying the metric. type MetricType int const ( // CPUSecondsTotal represents the total CPU seconds used CPUSecondsTotal MetricType = iota // DiskReadBytesTotal represents the read bytes for a disk DiskReadBytesTotal // DiskReadsCompletedTotal represents the completed for a disk DiskReadsCompletedTotal // DiskWrittenBytesTotal represents the written bytes for a disk DiskWrittenBytesTotal // DiskWritesCompletedTotal represents the completed writes for a disk DiskWritesCompletedTotal // FilesystemAvailBytes represents the available bytes on a filesystem FilesystemAvailBytes // FilesystemFreeBytes represents the free bytes on a filesystem FilesystemFreeBytes // FilesystemSizeBytes represents the size in bytes of a filesystem FilesystemSizeBytes // MemoryActiveAnonBytes represents the amount of anonymous memory on active LRU list MemoryActiveAnonBytes // MemoryActiveFileBytes represents the amount of file-backed memory on active LRU list MemoryActiveFileBytes // MemoryActiveBytes represents the amount of memory on active LRU list MemoryActiveBytes // MemoryCachedBytes represents the amount of cached memory MemoryCachedBytes // MemoryDirtyBytes represents the amount of memory waiting to get written back to the disk MemoryDirtyBytes // MemoryHugePagesFreeBytes represents the amount of free memory for hugetlb MemoryHugePagesFreeBytes // MemoryHugePagesTotalBytes represents the amount of used memory for hugetlb MemoryHugePagesTotalBytes // MemoryInactiveAnonBytes represents the amount of anonymous memory on inactive LRU list MemoryInactiveAnonBytes // MemoryInactiveFileBytes represents the amount of file-backed memory on inactive LRU list MemoryInactiveFileBytes // MemoryInactiveBytes represents the amount of memory on inactive LRU list MemoryInactiveBytes // MemoryMappedBytes represents the amount of mapped memory MemoryMappedBytes //MemoryMemAvailableBytes represents the amount of available memory MemoryMemAvailableBytes // MemoryMemFreeBytes represents the amount of free memory MemoryMemFreeBytes // MemoryMemTotalBytes represents the amount of used memory MemoryMemTotalBytes // MemoryRSSBytes represents the amount of anonymous and swap cache memory MemoryRSSBytes // MemoryShmemBytes represents the amount of cached filesystem data that is swap-backed MemoryShmemBytes // MemorySwapBytes represents the amount of swap memory MemorySwapBytes // MemoryUnevictableBytes represents the amount of unevictable memory MemoryUnevictableBytes // MemoryWritebackBytes represents the amount of memory queued for syncing to disk MemoryWritebackBytes // NetworkReceiveBytesTotal represents the amount of received bytes on a given interface NetworkReceiveBytesTotal // NetworkReceiveDropTotal represents the amount of received dropped bytes on a given interface NetworkReceiveDropTotal // NetworkReceiveErrsTotal represents the amount of received errors on a given interface NetworkReceiveErrsTotal // NetworkReceivePacketsTotal represents the amount of received packets on a given interface NetworkReceivePacketsTotal // NetworkTransmitBytesTotal represents the amount of transmitted bytes on a given interface NetworkTransmitBytesTotal // NetworkTransmitDropTotal represents the amount of transmitted dropped bytes on a given interface NetworkTransmitDropTotal // NetworkTransmitErrsTotal represents the amount of transmitted errors on a given interface NetworkTransmitErrsTotal // NetworkTransmitPacketsTotal represents the amount of transmitted packets on a given interface NetworkTransmitPacketsTotal // ProcsTotal represents the number of running processes ProcsTotal ) // MetricNames associates a metric type to its name. var MetricNames = map[MetricType]string{ CPUSecondsTotal: "lxd_cpu_seconds_total", DiskReadBytesTotal: "lxd_disk_read_bytes_total", DiskReadsCompletedTotal: "lxd_disk_reads_completed_total", DiskWrittenBytesTotal: "lxd_disk_written_bytes_total", DiskWritesCompletedTotal: "lxd_disk_writes_completed_total", FilesystemAvailBytes: "lxd_filesystem_avail_bytes", FilesystemFreeBytes: "lxd_filesystem_free_bytes", FilesystemSizeBytes: "lxd_filesystem_size_bytes", MemoryActiveAnonBytes: "lxd_memory_Active_anon_bytes", MemoryActiveFileBytes: "lxd_memory_Active_file_bytes", MemoryActiveBytes: "lxd_memory_Active_bytes", MemoryCachedBytes: "lxd_memory_Cached_bytes", MemoryDirtyBytes: "lxd_memory_Dirty_bytes", MemoryHugePagesFreeBytes: "lxd_memory_HugepagesFree_bytes", MemoryHugePagesTotalBytes: "lxd_memory_HugepagesTotal_bytes", MemoryInactiveAnonBytes: "lxd_memory_Inactive_anon_bytes", MemoryInactiveFileBytes: "lxd_memory_Inactive_file_bytes", MemoryInactiveBytes: "lxd_memory_Inactive_bytes", MemoryMappedBytes: "lxd_memory_Mapped_bytes", MemoryMemAvailableBytes: "lxd_memory_MemAvailable_bytes", MemoryMemFreeBytes: "lxd_memory_MemFree_bytes", MemoryMemTotalBytes: "lxd_memory_MemTotal_bytes", MemoryRSSBytes: "lxd_memory_RSS_bytes", MemoryShmemBytes: "lxd_memory_Shmem_bytes", MemorySwapBytes: "lxd_memory_Swap_bytes", MemoryUnevictableBytes: "lxd_memory_Unevictable_bytes", MemoryWritebackBytes: "lxd_memory_Writeback_bytes", NetworkReceiveBytesTotal: "lxd_network_receive_bytes_total", NetworkReceiveDropTotal: "lxd_network_receive_drop_total", NetworkReceiveErrsTotal: "lxd_network_receive_errs_total", NetworkReceivePacketsTotal: "lxd_network_receive_packets_total", NetworkTransmitBytesTotal: "lxd_network_transmit_bytes_total", NetworkTransmitDropTotal: "lxd_network_transmit_drop_total", NetworkTransmitErrsTotal: "lxd_network_transmit_errs_total", NetworkTransmitPacketsTotal: "lxd_network_transmit_packets_total", ProcsTotal: "lxd_procs_total", } // MetricHeaders represents the metric headers which contain help messages as specified by OpenMetrics. var MetricHeaders = map[MetricType]string{ CPUSecondsTotal: "# HELP lxd_cpu_seconds_total The total number of CPU seconds used in milliseconds.", DiskReadBytesTotal: "# HELP lxd_disk_read_bytes_total The total number of bytes read.", DiskReadsCompletedTotal: "# HELP lxd_disk_reads_completed_total The total number of completed reads.", DiskWrittenBytesTotal: "# HELP lxd_disk_written_bytes_total The total number of bytes written.", DiskWritesCompletedTotal: "# HELP lxd_disk_writes_completed_total The total number of completed writes.", FilesystemAvailBytes: "# HELP lxd_filesystem_avail_bytes The number of available space in bytes.", FilesystemFreeBytes: "# HELP lxd_filesystem_free_bytes The number of free space in bytes.", FilesystemSizeBytes: "# HELP lxd_filesystem_size_bytes The size of the filesystem in bytes.", MemoryActiveAnonBytes: "# HELP lxd_memory_Active_anon_bytes The amount of anonymous memory on active LRU list.", MemoryActiveFileBytes: "# HELP lxd_memory_Active_file_bytes The amount of file-backed memory on active LRU list.", MemoryActiveBytes: "# HELP lxd_memory_Active_bytes The amount of memory on active LRU list.", MemoryCachedBytes: "# HELP lxd_memory_Cached_bytes The amount of cached memory.", MemoryDirtyBytes: "# HELP lxd_memory_Dirty_bytes The amount of memory waiting to get written back to the disk.", MemoryHugePagesFreeBytes: "# HELP lxd_memory_HugepagesFree_bytes The amount of free memory for hugetlb.", MemoryHugePagesTotalBytes: "# HELP lxd_memory_HugepagesTotal_bytes The amount of used memory for hugetlb.", MemoryInactiveAnonBytes: "# HELP lxd_memory_Inactive_anon_bytes The amount of file-backed memory on inactive LRU list.", MemoryInactiveFileBytes: "# HELP lxd_memory_Inactive_file_bytes The amount of file-backed memory on inactive LRU list.", MemoryInactiveBytes: "# HELP lxd_memory_Inactive_bytes The amount of memory on inactive LRU list.", MemoryMappedBytes: "# HELP lxd_memory_Mapped_bytes The amount of mapped memory.", MemoryMemAvailableBytes: "# HELP lxd_memory_MemAvailable_bytes The amount of available memory.", MemoryMemFreeBytes: "# HELP lxd_memory_MemFree_bytes The amount of free memory.", MemoryMemTotalBytes: "# HELP lxd_memory_MemTotal_bytes The amount of used memory.", MemoryRSSBytes: "# HELP lxd_memory_RSS_bytes The amount of anonymous and swap cache memory.", MemoryShmemBytes: "# HELP lxd_memory_Shmem_bytes The amount of cached filesystem data that is swap-backed.", MemorySwapBytes: "# HELP lxd_memory_Swap_bytes The amount of used swap memory.", MemoryUnevictableBytes: "# HELP lxd_memory_Unevictable_bytes The amount of unevictable memory.", MemoryWritebackBytes: "# HELP lxd_memory_Writeback_bytes The amount of memory queued for syncing to disk.", NetworkReceiveBytesTotal: "# HELP lxd_network_receive_bytes_total The amount of received bytes on a given interface.", NetworkReceiveDropTotal: "# HELP lxd_network_receive_drop_total The amount of received dropped bytes on a given interface.", NetworkReceiveErrsTotal: "# HELP lxd_network_receive_errs_total The amount of received errors on a given interface.", NetworkReceivePacketsTotal: "# HELP lxd_network_receive_packets_total The amount of received packets on a given interface.", NetworkTransmitBytesTotal: "# HELP lxd_network_transmit_bytes_total The amount of transmitted bytes on a given interface.", NetworkTransmitDropTotal: "# HELP lxd_network_transmit_drop_total The amount of transmitted dropped bytes on a given interface.", NetworkTransmitErrsTotal: "# HELP lxd_network_transmit_errs_total The amount of transmitted errors on a given interface.", NetworkTransmitPacketsTotal: "# HELP lxd_network_transmit_packets_total The amount of transmitted packets on a given interface.", ProcsTotal: "# HELP lxd_procs_total The number of running processes.", }
lxd/metrics/types.go
0.688887
0.463687
types.go
starcoder
package util import ( "crypto/rand" "encoding/hex" "github.com/canpacis/birlang/src/ast" ) func GenerateIntPrimitive(value int64) ast.IntPrimitiveExpression { return ast.IntPrimitiveExpression{ Operation: "primitive", Value: value, Type: "int", Position: ast.Position{ Line: 0, Col: 0, }, } } func GenerateIntFromBool(condition bool) ast.IntPrimitiveExpression { var value int64 if condition { value = 1 } else { value = 0 } return ast.IntPrimitiveExpression{ Operation: "primitive", Value: value, Type: "int", Position: ast.Position{ Line: 0, Col: 0, }, } } func GenerateIdentifier(name string) ast.Identifier { return ast.Identifier{ Operation: "identifier", Negative: false, Value: name, Position: ast.Position{Line: 0, Col: 0}, } } func IsPowerOfTen(input int64) bool { if input%10 != 0 || input == 0 { return false } if input == 10 { return true } return IsPowerOfTen(input / 10) } func GenerateNativeFunction(name string, body ast.NativeFunction) ast.BlockDeclarationStatement { return ast.BlockDeclarationStatement{ Name: GenerateIdentifier(name), Native: true, Verbs: []ast.Identifier{}, Arguments: []ast.Identifier{}, Implementing: false, Implements: ast.Identifier{}, Populate: nil, Position: ast.Position{Line: 1, Col: 0}, Instance: nil, Body: body, } } func GenerateNativeFunctionReturn(_error bool, _warn bool, message string, value int64) ast.NativeFunctionReturn { return ast.NativeFunctionReturn{ Error: _error, Warn: _warn, Message: message, Value: GenerateIntPrimitive(value), } } func UUID() string { b := make([]byte, 16) _, err := rand.Read(b) if err != nil { panic(err) } uuid := hex.EncodeToString(b[0:4]) + "-" + hex.EncodeToString(b[0:4]) + "-" + hex.EncodeToString(b[0:4]) + "-" + hex.EncodeToString(b[0:4]) + "-" + hex.EncodeToString(b[0:4]) return uuid } type Color struct { Red string `json:"red"` Yellow string `json:"yellow"` Cyan string `json:"cyan"` Grey string `json:"grey"` Default string `json:"default"` } func NewColor(colored bool) Color { if colored { return Color{ Red: "\033[1;31m", Yellow: "\033[1;33m", Cyan: "\033[0;36m", Grey: "\033[1;30m", Default: "\033[0m", } } else { return Color{ Red: "", Yellow: "", Cyan: "", Grey: "", Default: "", } } } func (color Color) OutputRed(message string) string { return color.Red + message + color.Default } func (color Color) OutputYellow(message string) string { return color.Yellow + message + color.Default } func (color Color) OutputCyan(message string) string { return color.Cyan + message + color.Default } func (color Color) OutputGrey(message string) string { return color.Grey + message + color.Default }
src/util/util.go
0.775095
0.41324
util.go
starcoder
package main import ( "github.com/ByteArena/box2d" "github.com/wdevore/RangerGo/api" "github.com/wdevore/RangerGo/engine/nodes/custom" ) // GroundComponent represents both the visual and physic components type GroundComponent struct { visual api.INode b2Body *box2d.B2Body b2Shape box2d.B2EdgeShape b2Fixture *box2d.B2Fixture } // NewGroundComponent constructs a component func NewGroundComponent(name string, parent api.INode) *GroundComponent { o := new(GroundComponent) o.visual = custom.NewLineNode(name, parent.World(), parent) gln := o.visual.(*custom.LineNode) gln.SetPoints(-1.0, 0.0, 1.0, 0.0) // Set by unit coordinates return o } // Configure component func (c *GroundComponent) Configure(b2World *box2d.B2World) { // A body def used to create bodies bDef := box2d.MakeB2BodyDef() bDef.Type = box2d.B2BodyType.B2_staticBody // An instance of a body to contain Fixtures c.b2Body = b2World.CreateBody(&bDef) // Every Fixture has a shape c.b2Shape = box2d.MakeB2EdgeShape() fd := box2d.MakeB2FixtureDef() fd.Shape = &c.b2Shape fd.Density = 1.0 c.b2Fixture = c.b2Body.CreateFixtureFromDef(&fd) // attach Fixture to body } // SetColor sets the visual's color func (c *GroundComponent) SetColor(color api.IPalette) { gr := c.visual.(*custom.LineNode) gr.SetColor(color) } // SetScale sets the component's length func (c *GroundComponent) SetScale(scale float64) { c.visual.SetScale(scale) // To change a shape's property we need to destroy the old // fixture and create a new one based on the new value. c.b2Body.DestroyFixture(c.b2Fixture) c.b2Shape.Set(box2d.MakeB2Vec2(-scale, 0.0), box2d.MakeB2Vec2(scale, 0.0)) fd := box2d.MakeB2FixtureDef() fd.Shape = &c.b2Shape fd.Density = 1.0 c.b2Fixture = c.b2Body.CreateFixtureFromDef(&fd) // attach Fixture to body } // SetRotation sets the component's orientation func (c *GroundComponent) SetRotation(angle float64) { c.visual.SetRotation(angle) c.b2Body.SetTransform(c.b2Body.GetPosition(), angle) } // SetPosition sets component's location. func (c *GroundComponent) SetPosition(x, y float64) { c.visual.SetPosition(x, y) c.b2Body.SetTransform(box2d.MakeB2Vec2(x, y), c.b2Body.GetAngle()) }
examples/physics/basics/seesaw/ground_component.go
0.755637
0.515925
ground_component.go
starcoder
package main import ( "flag" "fmt" "image" "image/draw" "image/png" "log" "math" "os" "github.com/nfnt/resize" ) var mode = flag.String("mode", "std4", "The `mode` to use, std3 or std4.") var screenPath = flag.String("s", "", "The `path` to the screen image.") var boxPath = flag.String("b", "", "The `path` to the 3D boxart image.") var cartPath = flag.String("c", "", "The `path` to the 3D cartridge image.") var wheelPath = flag.String("w", "", "The `path` to the wheel iamge.") var outputPath = flag.String("o", "comp_test.png", "The `path` to write the output.") // PreDefValue are the predefined values for Values. type PreDefValue int const ( Undefined PreDefValue = iota Center Left Right Up Down ) // Value is a value which can be absolute, relative, or a predefined short hand. type Value struct { Rel float64 Abs int PreDef PreDefValue } // V returns the value based on the overall image dimension and the dimension of the // overlayed image. func (v Value) V(total, img int) int { if v.PreDef != Undefined { switch v.PreDef { case Center: return (total / 2) - (img / 2) case Left, Up: return 0 case Right, Down: return total - img } } if v.Rel != 0 { return int(math.Floor(float64(total) * v.Rel)) } return v.Abs } // Element represents an element in the overall image. type Element struct { Image string Width Value Height Value TopLeftX Value TopLeftY Value } // Def represents the overall mix definition. type Def struct { Width int Height int Elements []Element } // StandardThree creates a Def for the Standard 3 image mix. func StandardThree(screen, box, wheel string) *Def { def := &Def{ Width: 600, Height: 400, } def.Elements = append(def.Elements, Element{ Image: screen, Width: Value{Rel: 0.9}, Height: Value{Rel: 0.85}, TopLeftX: Value{PreDef: Center}, TopLeftY: Value{PreDef: Up}, }) def.Elements = append(def.Elements, Element{ Image: box, Width: Value{Rel: 0.5}, Height: Value{Rel: 0.75}, TopLeftX: Value{PreDef: Left}, TopLeftY: Value{PreDef: Down}, }) def.Elements = append(def.Elements, Element{ Image: wheel, Width: Value{Rel: 0.5}, Height: Value{Rel: 0.333}, TopLeftX: Value{PreDef: Right}, TopLeftY: Value{PreDef: Down}, }) return def } // StandardFour creates a Def for the Standard 4 image mix. func StandardFour(screen, box, cart, wheel string) *Def { def := &Def{ Width: 600, Height: 400, } def.Elements = append(def.Elements, Element{ Image: screen, Width: Value{Rel: 0.9}, Height: Value{Rel: 0.85}, TopLeftX: Value{PreDef: Center}, TopLeftY: Value{PreDef: Up}, }) def.Elements = append(def.Elements, Element{ Image: box, Width: Value{Rel: 0.5}, Height: Value{Rel: 0.75}, TopLeftX: Value{PreDef: Left}, TopLeftY: Value{PreDef: Down}, }) def.Elements = append(def.Elements, Element{ Image: cart, Width: Value{Rel: 0.25}, Height: Value{Rel: 0.375}, TopLeftX: Value{Rel: 0.167}, TopLeftY: Value{PreDef: Down}, }) def.Elements = append(def.Elements, Element{ Image: wheel, Width: Value{Rel: 0.5}, Height: Value{Rel: 0.333}, TopLeftX: Value{PreDef: Right}, TopLeftY: Value{PreDef: Down}, }) return def } // Draw draws the mix image. func Draw(def *Def) (image.Image, error) { m := image.NewRGBA(image.Rect(0, 0, def.Width, def.Height)) for i, e := range def.Elements { f, err := os.Open(e.Image) if err != nil { return m, fmt.Errorf("%s: %q", e.Image, err) } img, _, err := image.Decode(f) if err != nil { return m, fmt.Errorf("%s: %q", e.Image, err) } f.Close() w := e.Width.V(def.Width, 0) h := e.Width.V(def.Height, 0) img = resize.Thumbnail(uint(w), uint(h), img, resize.Bilinear) b := img.Bounds() w = b.Dx() h = b.Dy() x := e.TopLeftX.V(def.Width, w) y := e.TopLeftY.V(def.Height, h) r := image.Rect(x, y, x+w, y+h) if i == 0 { draw.Draw(m, r, img, image.ZP, draw.Src) } else { draw.Draw(m, r, img, image.ZP, draw.Over) } } return m, nil } func main() { flag.Parse() var def *Def if *mode == "std4" { def = StandardFour(*screenPath, *boxPath, *cartPath, *wheelPath) } else { def = StandardThree(*screenPath, *boxPath, *wheelPath) } m, err := Draw(def) if err != nil { log.Fatal(err) } o, err := os.Create(*outputPath) if err != nil { log.Fatal(err) } err = png.Encode(o, m) if err != nil { log.Fatal(err) } o.Close() }
mix_experiment/mix.go
0.62681
0.424054
mix.go
starcoder
package neural import ( "github.com/gonum/matrix/mat64" "github.com/milosgajdos83/go-neural/pkg/matrix" ) // Cost is neural network training cost type Cost interface { // CostFunc defines neural network cost function for given input, output and labels. // It returns a single number: cost for given input and output CostFunc(mat64.Matrix, mat64.Matrix, mat64.Matrix) float64 // Delta implements function that calculates error in the last network layer // It returns the output error matrix Delta(mat64.Matrix, mat64.Matrix) mat64.Matrix } // CrossEntropy implements Cost interface type CrossEntropy struct{} // CostFunc implements cross entropy cost function. // C = -(sum(sum((out_k .* log(out) + (1 - out_k) .* log(1 - out)), 2)))/samples func (c CrossEntropy) CostFunc(inMx, outMx, labelsMx mat64.Matrix) float64 { // safe switch type as matrix.MakeLabelsMx returns *mat64.Dense lMx := labelsMx.(*mat64.Dense) oMx := outMx.(*mat64.Dense) // out_k .* log(out) costMxA := new(mat64.Dense) costMxA.Apply(matrix.LogMx, oMx) costMxA.MulElem(lMx, costMxA) // (1 - out_k) .* log(1 - out) costMxB := new(mat64.Dense) lMx.Apply(matrix.SubtrMx(1.0), lMx) oMx.Apply(matrix.SubtrMx(1.0), oMx) oMx.Apply(matrix.LogMx, oMx) costMxB.MulElem(labelsMx, oMx) // Cost matrix costMxB.Add(costMxA, costMxB) // calculate the cost samples, _ := inMx.Dims() cost := -(mat64.Sum(costMxB) / float64(samples)) return cost } // Delta calculates the error of the last layer and returns it // D = (out_k - out) func (c CrossEntropy) Delta(outMx, expMx mat64.Matrix) mat64.Matrix { deltaMx := new(mat64.Dense) deltaMx.Sub(outMx, expMx) return deltaMx } // LogLikelihood implements Cost interface type LogLikelihood struct{} // CostFunc implements log-likelihood cost function. // C = -sum(sum(out_k.*log(out))) func (c LogLikelihood) CostFunc(inMx, outMx, labelsMx mat64.Matrix) float64 { // safe switch type as matrix.MakeLabelsMx returns *mat64.Dense lMx := labelsMx.(*mat64.Dense) oMx := outMx.(*mat64.Dense) // out_k .* log(out) costMx := new(mat64.Dense) costMx.Apply(matrix.LogMx, oMx) costMx.MulElem(lMx, costMx) // calculate the cost samples, _ := inMx.Dims() cost := (-mat64.Sum(costMx) / float64(samples)) return cost } // Delta calculates the error of the last layer and returns it // D = (out_k - out) func (c LogLikelihood) Delta(outMx, expMx mat64.Matrix) mat64.Matrix { deltaMx := new(mat64.Dense) deltaMx.Sub(outMx, expMx) return deltaMx }
neural/cost.go
0.792183
0.675666
cost.go
starcoder
package factory import ( "encoding/json" "github.com/AlexanderFadeev/ood/lab4/color" "github.com/AlexanderFadeev/ood/lab4/point" "github.com/AlexanderFadeev/ood/lab4/shape" "github.com/pkg/errors" ) type shapeDescription struct { Type *shapeType `json:"type"` Color *color.Color `json:"color"` Center *point.Point `json:"center"` rectangleDescription triangleDescription ellipseDescription regularPolygonDescription } func newDescription(descriptionStr string) (*shapeDescription, error) { var desc shapeDescription err := json.Unmarshal([]byte(descriptionStr), &desc) if err != nil { return nil, errors.Wrap(err, "Failed to unmarshal description") } return &desc, nil } func (sd *shapeDescription) toShape() (shape.Shape, error) { if sd.Type == nil { return nil, errors.New("Missing shape type") } if sd.Color == nil { return nil, errors.New("Missing shape color") } switch *sd.Type { case shapeTypeRectangle: return sd.rectangleDescription.toShape(*sd.Color) case shapeTypeTriangle: return sd.triangleDescription.toShape(*sd.Color) case shapeTypeEllipse: return sd.ellipseDescription.toShape(*sd.Color, sd.Center) case shapeTypeRegularPolygon: return sd.regularPolygonDescription.toShape(*sd.Color, sd.Center) default: return nil, errors.New("Missing shape type") } } type rectangleDescription struct { LeftTop *point.Point `json:"left_top"` RightBottom *point.Point `json:"right_bottom"` } func (rd *rectangleDescription) toShape(color color.Color) (shape.Shape, error) { if rd.LeftTop == nil { return nil, errors.New("Missing rectangle left top vertex") } if rd.RightBottom == nil { return nil, errors.New("Missing rectangle right bottom vertex") } return shape.NewRectangle(*rd.LeftTop, *rd.RightBottom, color), nil } type triangleDescription struct { VertexA *point.Point `json:"vertex_a"` VertexB *point.Point `json:"vertex_b"` VertexC *point.Point `json:"vertex_c"` } func (td *triangleDescription) toShape(color color.Color) (shape.Shape, error) { if td.VertexA == nil { return nil, errors.New("Missing triangle vertex A") } if td.VertexB == nil { return nil, errors.New("Missing triangle vertex B") } if td.VertexC == nil { return nil, errors.New("Missing triangle vertex C") } return shape.NewTriangle(*td.VertexA, *td.VertexB, *td.VertexC, color), nil } type ellipseDescription struct { VerticalRadius *float64 `json:"vertical_radius"` HorizontalRadius *float64 `json:"horizontal_radius"` } func (ed *ellipseDescription) toShape(color color.Color, center *point.Point) (shape.Shape, error) { if center == nil { return nil, errors.New("Missing ellipse center") } if ed.VerticalRadius == nil { return nil, errors.New("Missing ellipse vertical radius") } if ed.HorizontalRadius == nil { return nil, errors.New("Missing ellipse horizontal radius") } return shape.NewEllipse(*center, *ed.HorizontalRadius, *ed.VerticalRadius, color) } type regularPolygonDescription struct { Radius *float64 `json:"radius"` Vertices *uint `json:"vertices"` } func (rpd *regularPolygonDescription) toShape(color color.Color, center *point.Point) (shape.Shape, error) { if center == nil { return nil, errors.New("Missing regular polygon center") } if rpd.Radius == nil { return nil, errors.New("Missing regular polygon radius") } if rpd.Vertices == nil { return nil, errors.New("Missing regular polygon vertices count") } return shape.NewRegularPolygon(*rpd.Vertices, *center, *rpd.Radius, color) }
lab4/factory/description.go
0.826782
0.414306
description.go
starcoder
package metal /* #cgo CFLAGS: -x objective-c #cgo LDFLAGS: -framework Cocoa -framework Metal -framework MetalKit #include "api.h" #include <simd/matrix.h> */ import "C" import ( "math" ) func Matrix_multiply(a Matrix_float4x4, b Matrix_float4x4) Matrix_float4x4 { return Matrix_float4x4(C.simd_matrix_multiply(C.matrix_float4x4(a), C.matrix_float4x4(b))) } func Vector3_length(a Vector_float3) float32 { return float32(C.simd_vector3_length(&a[0])) } func Vector3_cross(a Vector_float3, b Vector_float3) Vector_float3 { return Vector_float3(C.simd_vector3_cross(&a[0], &b[0])) } func Vector4_normalize(a Vector_float4) Vector_float4 { return Vector_float4(C.simd_vector4_normalize(C.vector_float4(a))) } func NewMatrix_float4x4(m []Vector_float4) Matrix_float4x4 { return Matrix_float4x4(C.new_matrix_float4x4( C.vector_float4(m[0]), C.vector_float4(m[1]), C.vector_float4(m[2]), C.vector_float4(m[3]), )) } func NewMatrix_float3x3(m []Vector_float3) Matrix_float3x3 { // since m[0] is an array of 3 floats we have to pass the location of the initial element return Matrix_float3x3(C.new_matrix_float3x3( &m[0][0], &m[1][0], &m[2][0], )) } func Matrix_float4x4_translation(t Vector_float3) Matrix_float4x4 { X := C.vector_float4{1, 0, 0, 0} Y := C.vector_float4{0, 1, 0, 0} Z := C.vector_float4{0, 0, 1, 0} W := C.vector_float4{t[0], t[1], t[2], 1} return Matrix_float4x4(C.new_matrix_float4x4(X, Y, Z, W)) } func Matrix_float4x4_uniform_scale(scale float32) Matrix_float4x4 { return Matrix_float4x4(C.new_matrix_float4x4( C.vector_float4{C.float(scale), 0, 0, 0}, C.vector_float4{0, C.float(scale), 0, 0}, C.vector_float4{0, 0, C.float(scale), 0}, C.vector_float4{0, 0, 0, 1}, )) } func Matrix_float4x4_rotation(axis Vector_float3, angle float32) Matrix_float4x4 { c := C.float(math.Cos(float64(angle))) s := C.float(math.Sin(float64(angle))) X := C.vector_float4{ axis[0]*axis[0] + (1-axis[0]*axis[0])*c, axis[0]*axis[1]*(1-c) - axis[2]*s, axis[0]*axis[2]*(1-c) + axis[1]*s, 0.0, } Y := C.vector_float4{ axis[0]*axis[1]*(1-c) + axis[2]*s, axis[1]*axis[1] + (1-axis[1]*axis[1])*c, axis[1]*axis[2]*(1-c) - axis[0]*s, 0.0, } Z := C.vector_float4{ axis[0]*axis[2]*(1-c) - axis[1]*s, axis[1]*axis[2]*(1-c) + axis[0]*s, axis[2]*axis[2] + (1-axis[2]*axis[2])*c, 0.0, } W := C.vector_float4{0.0, 0.0, 0.0, 1.0} return Matrix_float4x4(C.new_matrix_float4x4(X, Y, Z, W)) } func Matrix_float4x4_perspective(aspect, fovy, near, far float32) Matrix_float4x4 { yScale := float32(1 / math.Tan(float64(fovy)*0.5)) xScale := yScale / aspect zRange := far - near zScale := -(far + near) / zRange wzScale := -2 * far * near / zRange return Matrix_float4x4(C.new_matrix_float4x4( C.vector_float4{C.float(xScale), 0, 0, 0}, C.vector_float4{0, C.float(yScale), 0, 0}, C.vector_float4{0, 0, C.float(zScale), -1}, C.vector_float4{0, 0, C.float(wzScale), 0}, )) } func Matrix_float4x4_extract_linear(m Matrix_float4x4) Matrix_float3x3 { return NewMatrix_float3x3([]Vector_float3{ Vector_float4(m.columns[0]).XYZ(), Vector_float4(m.columns[1]).XYZ(), Vector_float4(m.columns[2]).XYZ(), }) }
metal/utils.go
0.677154
0.640622
utils.go
starcoder
package temperature import ( "errors" "fmt" "math" ) // ErrNilArgument is an error when the argument is nil. var ErrNilArgument = errors.New("argument can't be nil") type temperatureChangeHandler func(Temperature) //Stringer provides String method. type Stringer interface { String() string } // Temperature provides all the function needed for a temperature. type Temperature interface { Stringer Value() float64 SetValue(float64) Unit() Convertible SetUnit(Convertible) SetTemperature(Temperature) SetTemperateChangeHandler(temperatureChangeHandler) } type temperature struct { v float64 unit Convertible handler temperatureChangeHandler } //New returns a new Temperature. func New(v float64, unit Convertible) Temperature { return &temperature{v: v, unit: unit} } //NewWithHandler returns a new temperature with a handler to handle temperature changes. func NewWithHandler(v float64, unit Convertible, handler temperatureChangeHandler) Temperature { return &temperature{v: v, unit: unit, handler: handler} } func (t temperature) String() string { return fmt.Sprintf("%v %s", round(float64(t.v), 2), t.unit) } // Value returns the value of the temperature. func (t temperature) Value() float64 { return t.v } // // SetValue set the value of the temperature. func (t *temperature) SetValue(v float64) { t.v = v if t.handler != nil { t.handler(t) } } func (t temperature) Unit() Convertible { return t.unit } // SetUnit set the unit of the temperature and update the temperature. func (t *temperature) SetUnit(u Convertible) { t.v = u.FromKelvin(t.unit.ToKelvin(t.v)).Value() t.unit = u } // SetTemperature set the temperature value from any other unit temperature. func (t *temperature) SetTemperature(temp Temperature) { val := t.unit.FromKelvin(temp.Unit().ToKelvin(temp.Value())).Value() t.SetValue(val) } // SetTemperateChangeHandler is a setter for the temperature change handler. func (t *temperature) SetTemperateChangeHandler(h temperatureChangeHandler) { t.handler = h } func round(num float64, precision int) float64 { output := math.Pow(10, float64(precision)) val := int(num*output + math.Copysign(0.5, num)) return float64(val) / output } // Convert a temperature to different unit. func Convert(input Temperature, output Convertible) (Temperature, error) { if input == nil || output == nil { return nil, ErrNilArgument } return output.FromKelvin(input.Unit().ToKelvin(input.Value())), nil } //Equals returns true if two temperature are equals. func Equals(a Temperature, b Temperature) bool { if a == b { return true } if a.Unit() == b.Unit() { return a.Value() == b.Value() } ka := a.Unit().ToKelvin(a.Value()) kb := b.Unit().ToKelvin(b.Value()) return round(ka, 2) == round(kb, 2) }
temperature.go
0.907458
0.469946
temperature.go
starcoder
package diag import ( "math" "time" ) // Value represents a reportable value to be stored in a Field. // The Value struct provides a slot for primitive values that require only // 64bits, a string, or an arbitrary interface. The interpretation of the slots is up to the Reporter. type Value struct { Primitive uint64 String string Ifc interface{} Reporter Reporter } // Reporter defines the type and supports unpacking, querying the decoded Value. type Reporter interface { Type() Type // Ifc decodes the Value and reports the decoded value to as `interface{}` // to the provided callback. Ifc(*Value, func(interface{})) } // Type represents the possible types a Value can have. type Type uint8 const ( IfcType Type = iota BoolType IntType Int64Type Uint64Type Float64Type DurationType TimestampType StringType ) // Interface decodes and returns the value stored in Value. func (v *Value) Interface() (ifc interface{}) { v.Reporter.Ifc(v, func(tmp interface{}) { ifc = tmp }) return ifc } // ValBool creates a new Value representing a bool. func ValBool(b bool) Value { var x uint64 if b { x = 1 } return Value{Primitive: x, Reporter: _boolReporter} } type boolReporter struct{} var _boolReporter Reporter = boolReporter{} func (boolReporter) Type() Type { return BoolType } func (boolReporter) Ifc(v *Value, fn func(interface{})) { fn(bool(v.Primitive != 0)) } // ValInt create a new Value representing an int. func ValInt(i int) Value { return Value{Primitive: uint64(i), Reporter: _intReporter} } type intReporter struct{} var _intReporter Reporter = intReporter{} func (intReporter) Type() Type { return IntType } func (intReporter) Ifc(v *Value, fn func(interface{})) { fn(int(v.Primitive)) } // ValInt64 creates a new Value representing an int64. func ValInt64(i int64) Value { return Value{Primitive: uint64(i), Reporter: _int64Reporter} } type int64Reporter struct{} var _int64Reporter Reporter = int64Reporter{} func (int64Reporter) Type() Type { return Int64Type } func (int64Reporter) Ifc(v *Value, fn func(v interface{})) { fn(int64(v.Primitive)) } // ValUint creates a new Value representing an uint. func ValUint(i uint) Value { return ValUint64(uint64(i)) } // ValUint64 creates a new Value representing an uint64. func ValUint64(u uint64) Value { return Value{Primitive: u, Reporter: _uint64Reporter} } type uint64Reporter struct{} var _uint64Reporter Reporter = uint64Reporter{} func (uint64Reporter) Type() Type { return Int64Type } func (uint64Reporter) Ifc(v *Value, fn func(v interface{})) { fn(uint64(v.Primitive)) } // ValFloat creates a new Value representing a float. func ValFloat(f float64) Value { return Value{Primitive: math.Float64bits(f), Reporter: _float64Reporter} } type float64Reporter struct{} var _float64Reporter Reporter = float64Reporter{} func (float64Reporter) Type() Type { return Float64Type } func (float64Reporter) Ifc(v *Value, fn func(v interface{})) { fn(math.Float64frombits(v.Primitive)) } // ValString creates a new Value representing a string. func ValString(str string) Value { return Value{String: str, Reporter: _strReporter} } type strReporter struct{} var _strReporter Reporter = strReporter{} func (strReporter) Type() Type { return StringType } func (strReporter) Ifc(v *Value, fn func(v interface{})) { fn(v.String) } // ValDuration creates a new Value representing a duration. func ValDuration(dur time.Duration) Value { return Value{Primitive: uint64(dur), Reporter: _durReporter} } type durReporter struct{} var _durReporter Reporter = durReporter{} func (durReporter) Type() Type { return DurationType } func (durReporter) Ifc(v *Value, fn func(v interface{})) { fn(time.Duration(v.Primitive)) } // ValTime creates a new Value representing a timestamp. func ValTime(ts time.Time) Value { return Value{Ifc: ts, Reporter: _timeReporter} } type timeReporter struct{} var _timeReporter Reporter = timeReporter{} func (timeReporter) Type() Type { return TimestampType } func (timeReporter) Ifc(v *Value, fn func(v interface{})) { fn(v.Ifc) } // ValAny creates a new Value representing any value as interface. func ValAny(ifc interface{}) Value { return Value{Ifc: ifc, Reporter: _anyReporter} } func reportAny(v *Value, fn func(v interface{})) { fn(v.Ifc) } type anyReporter struct{} var _anyReporter Reporter = anyReporter{} func (anyReporter) Type() Type { return IfcType } func (anyReporter) Ifc(v *Value, fn func(v interface{})) { fn(v.Ifc) }
vendor/github.com/urso/diag/value.go
0.748812
0.433862
value.go
starcoder
package sstable import ( "encoding/binary" "errors" "io" "github.com/jmgilman/kv" ) // Segment implements kv.Segment using an SSTable. It uses a contingous body // of read-only, ordered, and encoded KVPair's in order to store the contents // of a MemoryStore into a more durable long-term format. Internally, it uses // a MemoryStore in order to build a sparse index of its stored KVPair's to // reduce the amount of IO required to find a key. type Segment struct { data io.ReadSeeker id kv.SegmentID encoder kv.Encoder index kv.MemoryStore size int } // Get searches the underlying SSTable for the given key by first checking // the internal index table to locate the approximate position and then reading // the contents of the SSTable at that position to find the key. func (s *Segment) Get(key string) (*kv.KVPair, error) { // Get range to search for the given key start, end, err := s.searchIndex(key) if err != nil { return nil, err } // Create a limited reader set to the range s.data.Seek(int64(start), io.SeekStart) reader := LimitReadSeeker(s.data, int64(end-start)) // Create a cursor to iterate over pairs in this range cursor := kv.NewCursor(s.encoder, reader) // Search the range for the given key for { pair, err := cursor.Next() if err != nil { if errors.Is(err, io.EOF) { break } else { return nil, err } } if key == pair.Key { if pair.Tombstone { return nil, kv.ErrorNoSuchKey } else { return &pair, nil } } } return nil, kv.ErrorNoSuchKey } // ID returns the unique ID of this segment. func (s *Segment) ID() kv.SegmentID { return s.id } // LoadIndex populates the internal index table of the segment by reading the // index table data from the internal data stream. func (s *Segment) LoadIndex() error { // Get the size of the index table data buf := make([]byte, 4) _, err := s.data.Seek(-4, io.SeekEnd) if err != nil { return err } _, err = s.data.Read(buf) if err != nil { return err } indexSize := binary.BigEndian.Uint32(buf) // Create the index table _, err = s.data.Seek(int64(0-(int64(indexSize)+4)), io.SeekEnd) if err != nil { return nil } reader := LimitReadSeeker(s.data, int64(indexSize)) cursor := kv.NewCursor(s.encoder, reader) for { pair, err := cursor.Next() if err != nil { if errors.Is(err, io.EOF) { break } else { return err } } s.index.Put(pair) } return nil } // Min returns the lowest key stored in this segment. func (s *Segment) Min() *kv.KVPair { return s.index.Min() } // Max returns the highest key stored in this segment. func (s *Segment) Max() *kv.KVPair { return s.index.Max() } // searchIndex searches the index table to find the range, in bytes, where the // key is expected to be found. Returns ErrorNoSuchKey if the key is outside // the range of the index table. func (s *Segment) searchIndex(key string) (start int, end int, err error) { min := s.index.Min() max := s.index.Max() // Return out of range is the key is outside the bounds of the table if key < min.Key || key > max.Key { return 0, 0, kv.ErrorNoSuchKey } left, right, err := s.index.Range(key) if err != nil { return 0, 0, err } // Assume start is the beginning and end is the end of the data start = 0 end = s.size // If left is not nil, start at its index position if left != nil { start = int(binary.BigEndian.Uint32(left.Value)) } // If right is not nil, stop at its index position if right != nil { end = int(binary.BigEndian.Uint32(right.Value)) } return start, end, nil } func NewSegment(data io.ReadSeeker, encoder kv.Encoder, index kv.MemoryStore, size int) Segment { return Segment{ data: data, encoder: encoder, index: index, size: size, } } // LimitedReadSeeker provides the same interface for io.LimitedReader for types // implementing io.ReadSeeker. type LimitedReadSeeker struct { R io.ReadSeeker N int64 } func (l *LimitedReadSeeker) Read(p []byte) (n int, err error) { if l.N <= 0 { return 0, io.EOF } if int64(len(p)) > l.N { p = p[0:l.N] } n, err = l.R.Read(p) l.N -= int64(n) return n, nil } func (l *LimitedReadSeeker) Seek(offset int64, whence int) (int64, error) { return l.R.Seek(offset, whence) } func LimitReadSeeker(r io.ReadSeeker, n int64) io.ReadSeeker { return &LimitedReadSeeker{ R: r, N: n, } }
sstable/segment.go
0.808294
0.482551
segment.go
starcoder
package aoc import "C" import ( "math" "strconv" "strings" "utils" ) func createWireLines(wire string) []utils.WirePart { var output []utils.WirePart wirePaths := strings.Split(wire, ",") currentX := 0 currentY := 0 for _, wirePath := range wirePaths { targetX := currentX targetY := currentY direction := wirePath[0:1] length, _ := strconv.Atoi(wirePath[1:]) switch direction { case "U": targetY = currentY + length case "D": targetY = currentY - length case "R": targetX = currentX + length case "L": targetX = currentX - length default: panic("noohoo!") } wirePart := utils.WirePart{P1: utils.Point{X: currentX, Y: currentY}, P2: utils.Point{X: targetX, Y: targetY},} output = append(output, wirePart) currentX = wirePart.P2.X currentY = wirePart.P2.Y } return output } func ccw(point1 utils.Point, point2 utils.Point, point3 utils.Point) bool { return (point3.Y-point1.Y)*(point2.X-point1.X) > (point2.Y-point1.Y)*(point3.X-point1.X) } // https://bryceboe.com/2006/10/23/line-segment-intersection-algorithm/ func hasIntersection(wirePart1 utils.WirePart, wirePart2 utils.WirePart) bool { return ccw(wirePart1.P1, wirePart2.P1, wirePart2.P2) != ccw(wirePart1.P2, wirePart2.P1, wirePart2.P2) && ccw(wirePart1.P1, wirePart1.P2, wirePart2.P1) != ccw(wirePart1.P1, wirePart1.P2, wirePart2.P2) } func findIntersection(wirePart1 utils.WirePart, wirePart2 utils.WirePart) utils.Point { a1 := wirePart1.P2.Y - wirePart1.P1.Y b1 := wirePart1.P1.X - wirePart1.P2.X c1 := a1*wirePart1.P1.X + b1*wirePart1.P1.Y a2 := wirePart2.P2.Y - wirePart2.P1.Y b2 := wirePart2.P1.X - wirePart2.P2.X c2 := a2*wirePart2.P1.X + b2*wirePart2.P1.Y delta := a1*b2 - a2*b1 return utils.Point{X: int((b2*c1 - b1*c2) / delta), Y: int((a1*c2 - a2*c1) / delta)} } func solveDay3(wireInput1 string, wireInput2 string) (int, int) { start := utils.Point{X: 0, Y: 0} wire1 := createWireLines(wireInput1) wire2 := createWireLines(wireInput2) minDistance := math.MaxInt32 minSteps := math.MaxInt32 wireDistance1 := 0 for _, wirePart1 := range wire1 { wireDistance2 := 0 for _, wirePart2 := range wire2 { if hasIntersection(wirePart1, wirePart2) { intersectionPoint := findIntersection(wirePart1, wirePart2) minDistance = utils.Min(minDistance, utils.ManhattanDistance(start, intersectionPoint)) missing := utils.PointDistance(wirePart1.P1, intersectionPoint) + utils.PointDistance(wirePart2.P1, intersectionPoint) minSteps = utils.Min(minSteps, wireDistance2+wireDistance1+missing) } wireDistance2 += utils.WireDistance(wirePart2) } wireDistance1 += utils.WireDistance(wirePart1) } return minSteps, minDistance }
src/aoc/day3.go
0.63375
0.40342
day3.go
starcoder
package ch // incidentEdge incident edge for certain vertex type incidentEdge struct { vertexID int64 weight float64 } // addInIncidentEdge Adds incident edge's to pool of "incoming" edges of given vertex. // Just an alias to append() function // incomingVertexID - Library defined ID of vertex // weight - Travel cost of incoming edge func (vertex *Vertex) addInIncidentEdge(incomingVertexID int64, weight float64) { vertex.inIncidentEdges = append(vertex.inIncidentEdges, &incidentEdge{incomingVertexID, weight}) } // addOutIncidentEdge Adds incident edge's to pool of "outcoming" edges of given vertex. // Just an alias to append() function // outcomingVertexID - Library defined ID of vertex // weight - Travel cost of outcoming edge func (vertex *Vertex) addOutIncidentEdge(outcomingVertexID int64, weight float64) { vertex.outIncidentEdges = append(vertex.outIncidentEdges, &incidentEdge{outcomingVertexID, weight}) } // findInIncidentEdge Returns index of incoming incident edge by vertex ID // If incoming incident edge is not found then this function returns -1 func (vertex *Vertex) findInIncidentEdge(vertexID int64) int { for i := range vertex.inIncidentEdges { if vertex.inIncidentEdges[i].vertexID == vertexID { return i } } return -1 } // findOutIncidentEdge Returns index of outcoming incident edge by vertex ID on the other side of that edge // If outcoming incident edge is not found then this function returns -1 func (vertex *Vertex) findOutIncidentEdge(vertexID int64) int { for i := range vertex.outIncidentEdges { if vertex.outIncidentEdges[i].vertexID == vertexID { return i } } return -1 } // updateInIncidentEdge Updates incoming incident edge's cost by vertex ID on the other side of that edge // If operation is not successful then this function returns False func (vertex *Vertex) updateInIncidentEdge(vertexID int64, weight float64) bool { idx := vertex.findInIncidentEdge(vertexID) if idx < 0 { return false } vertex.inIncidentEdges[idx].weight = weight return true } // updateOutIncidentEdge Updates outcoming incident edge's cost by vertex ID on the other side of that edge // If operation is not successful then this function returns False func (vertex *Vertex) updateOutIncidentEdge(vertexID int64, weight float64) bool { idx := vertex.findOutIncidentEdge(vertexID) if idx < 0 { return false } vertex.outIncidentEdges[idx].weight = weight return true }
incident_edge.go
0.787972
0.571049
incident_edge.go
starcoder
package gween // Sequence represents a sequence of Tweens, executed one after the other. type Sequence struct { Tweens []*Tween index int // yoyo makes the sequence "yoyo" back to the beginning after it reaches the end yoyo bool // reverse runs the sequence backwards when true reverse bool // loop is the initial number of loops for this sequence to make loop int // loopRemaining is the remaining number of times to loop through the sequence loopRemaining int } // NewSequence returns a new Sequence object. func NewSequence(tweens ...*Tween) *Sequence { seq := &Sequence{ Tweens: tweens, yoyo: false, reverse: false, loopRemaining: 1, loop: 1, } return seq } // Add adds one or more Tweens in order to the Sequence. func (seq *Sequence) Add(tweens ...*Tween) { seq.Tweens = append(seq.Tweens, tweens...) } // Remove removes a Tween of the specified index from the Sequence. func (seq *Sequence) Remove(index int) { if index >= 0 && index < len(seq.Tweens) { seq.Tweens = append(seq.Tweens[:index], seq.Tweens[index+1:]...) } } // Update updates the currently active Tween in the Sequence; once that Tween is done, the Sequence moves onto the next one. // Update() returns the current Tween's output, whether that Tween is complete, and whether the entire Sequence was completed // during this Update. func (seq *Sequence) Update(dt float32) (float32, bool, bool) { if !seq.HasTweens() { return 0, false, true } var completed []int remaining := dt yoyoed := false for { // Yoyoing never gets out of bounds if (yoyoed && seq.index == 0) || seq.index >= len(seq.Tweens) || seq.index <= -1 { if seq.loopRemaining >= 1 { seq.loopRemaining -= 1 } if seq.loopRemaining == 0 || remaining == 0 { index := seq.index if index >= len(seq.Tweens) { index = len(seq.Tweens) - 1 } if yoyoed && seq.index == 0 { return seq.Tweens[index].begin, len(completed) > 0, true } return seq.Tweens[index].end, len(completed) > 0, true } seq.index = 0 } v, tc := seq.Tweens[seq.index].Update(remaining) if !tc { return v, len(completed) > 0, false } remaining = seq.Tweens[seq.index].Overflow completed = append(completed, seq.index) yoyoed = seq.yoyoed() seq.Tweens[seq.index].reverse = seq.Reverse() seq.Tweens[seq.index].Reset() if remaining < 0 { remaining *= -1 } if !yoyoed { if seq.reverse { seq.index-- } else { seq.index++ } // On the way back, tweens need to be configured to not go forward if seq.index < len(seq.Tweens) && seq.index >= 0 { seq.Tweens[seq.index].reverse = seq.Reverse() seq.Tweens[seq.index].Reset() } } } } func (seq *Sequence) yoyoed() bool { if seq.yoyo { if seq.index == len(seq.Tweens)-1 && seq.Tweens[seq.index].reverse == false { seq.reverse = true return true } if seq.index == 0 && seq.Tweens[seq.index].reverse == true { seq.reverse = false return true } } return false } // Index returns the current index of the Sequence. Note that this can exceed the number of Tweens in the Sequence. func (seq *Sequence) Index() int { return seq.index } // SetIndex sets the current index of the Sequence, influencing which Tween is active at any given time. func (seq *Sequence) SetIndex(index int) { seq.Tweens[seq.index].reverse = seq.Reverse() seq.Tweens[seq.index].Reset() seq.index = index } // SetLoop sets the default loop and the current remaining loops func (seq *Sequence) SetLoop(amount int) { seq.loop = amount seq.loopRemaining = seq.loop } // SetYoyo sets whether the Sequence should yoyo off of the end of the last Tween and complete at the beginning of the first Tween func (seq *Sequence) SetYoyo(willYoyo bool) { seq.yoyo = willYoyo } // Reset resets the Sequence, resetting all Tweens and setting the Sequence's index back to 0. func (seq *Sequence) Reset() { seq.loopRemaining = seq.loop for _, tween := range seq.Tweens { tween.Reset() } seq.index = 0 } // HasTweens returns whether the Sequence is populated with Tweens or not. func (seq *Sequence) HasTweens() bool { return len(seq.Tweens) > 0 } // Reverse returns whether the Sequence currently running in reverse. func (seq *Sequence) Reverse() bool { return seq.reverse }
sequence.go
0.640861
0.436082
sequence.go
starcoder
package porous // State holds state variables for porous media with liquid and gas // References: // [1] <NAME> (2015) A consistent u-p formulation for porous media with hysteresis. // Int Journal for Numerical Methods in Engineering, 101(8) 606-634 // http://dx.doi.org/10.1002/nme.4808 // [2] <NAME> (2015) A solution to transient seepage in unsaturated porous media. // Computer Methods in Applied Mechanics and Engineering, 285 791-816 // http://dx.doi.org/10.1016/j.cma.2014.12.009 type State struct { A_ns0 float64 // 1 initial partial fraction of solids A_sl float64 // 2 liquid saturation A_ρL float64 // 3 real (intrinsic) density of liquid A_ρG float64 // 4 real (intrinsic) density of gas A_Δpc float64 // 5 step increment of capillary pressure A_wet bool // 6 wetting flag } // GetCopy returns a copy of State func (o State) GetCopy() *State { return &State{ o.A_ns0, // 1 o.A_sl, // 2 o.A_ρL, // 3 o.A_ρG, // 4 o.A_Δpc, // 5 o.A_wet, // 6 } } // Set sets this State with another State func (o *State) Set(s *State) { o.A_ns0 = s.A_ns0 // 1 o.A_sl = s.A_sl // 2 o.A_ρL = s.A_ρL // 3 o.A_ρG = s.A_ρG // 4 o.A_Δpc = s.A_Δpc // 5 o.A_wet = s.A_wet // 6 } // LsVars hold data for liquid-solid computations type LsVars struct { A_ρl, A_ρ, A_p, Cpl, Cvs float64 Dρdpl, Dpdpl, DCpldpl, DCvsdpl, Dklrdpl float64 DρldusM, DρdusM, DCpldusM float64 } // LgsVars hold data for liquid-gas-solid computations type LgsVars struct { A_ρl, A_ρg float64 A_ρ, A_p float64 Cpl, Cpg, Cvs float64 Dpl, Dpg, Dvs float64 Dklrdpl, Dklrdpg float64 Dkgrdpl, Dkgrdpg float64 DρlduM, DρgduM float64 Dρdpl, Dρdpg float64 DρduM float64 Dpdpl, Dpdpg float64 DCpldpl, DCpldpg float64 DCpgdpl, DCpgdpg float64 DCvsdpl, DCvsdpg float64 DDpldpl, DDpldpg float64 DDpgdpl, DDpgdpg float64 DDvsdpl, DDvsdpg float64 DCplduM, DCpgduM float64 DDplduM, DDpgduM float64 } // CalcLs calculates variables for liquid-solid simulations func (o Model) CalcLs(res *LsVars, sta *State, pl, divu float64, derivs bool) (err error) { // auxiliary ns0 := sta.A_ns0 sl := sta.A_sl ρL := sta.A_ρL Cl := o.Liq.C ρS := o.RhoS0 // n variables; Eqs (13) and (28) of [1] ns := (1.0 - divu) * ns0 nf := 1.0 - ns nl := nf * sl // ρ variables; Eq (13) of [1] ρs := ns * ρS res.A_ρl = nl * ρL res.A_ρ = res.A_ρl + ρs // capillary pressure and pore-fluid pressure pc := -pl res.A_p = pl * sl // Eq. (16) of [1] // moduli Ccb, e := o.Ccb(sta, pc) if e != nil { return e } res.Cpl = nf * (sl*Cl - ρL*Ccb) // Eq (32a) of [1] res.Cvs = sl * ρL // Eq (32b) of [1] // derivatives if derivs { // Ccd Ccd, e := o.Ccd(sta, pc) if e != nil { return e } // derivatives w.r.t pl res.Dρdpl = nf * (sl*Cl - ρL*Ccb) // Eq (A.9) of [1] res.Dpdpl = sl + pc*Ccb // Eq (A.11) of [1] res.DCpldpl = nf * (ρL*Ccd - 2.0*Ccb*Cl) // Eq (A.2) of[1] res.DCvsdpl = sl*Cl - Ccb*ρL // Eq (A.4) of [1] res.Dklrdpl = -o.Cnd.DklrDsl(sl) * Ccb // Eq (A.7) of [1] // derivatives w.r.t us (multipliers only) res.DρldusM = sl * ρL * ns0 res.DρdusM = (sl*ρL - ρS) * ns0 // Eq (A.10) of [1] res.DCpldusM = (sl*Cl - ρL*Ccb) * ns0 // Eq (A.3) of [1] } return } // CalcLgs calculates variables for liquid-gas-solid simulations func (o Model) CalcLgs(res *LgsVars, sta *State, pl, pg, divus float64, derivs bool) (err error) { // auxiliary ns0 := sta.A_ns0 sl := sta.A_sl sg := 1.0 - sl ρL := sta.A_ρL ρG := sta.A_ρG Cl := o.Liq.C Cg := o.Gas.C ρS := o.RhoS0 // n variables ns := (1.0 - divus) * ns0 nf := 1.0 - ns nl := nf * sl ng := nf * sg // ρ variables ρs := ns * ρS res.A_ρl = nl * ρL res.A_ρg = ng * ρG res.A_ρ = res.A_ρl + res.A_ρg + ρs // capillary pressure and pore-fluid pressure pc := pg - pl res.A_p = pl*sl + pg*sg // moduli Cc, e := o.Ccb(sta, pc) if e != nil { return e } res.Cpl = nf * (sl*Cl - ρL*Cc) res.Cpg = nf * ρL * Cc res.Cvs = sl * ρL res.Dpl = nf * ρG * Cc res.Dpg = nf * (sg*Cg - ρG*Cc) res.Dvs = sg * ρG // derivatives if derivs { // Ccd Ccd, e := o.Ccd(sta, pc) if e != nil { return e } // conductivity multipliers dklrdsl := o.Cnd.DklrDsl(sl) dkgrdsg := o.Cnd.DkgrDsg(sg) res.Dklrdpl = -dklrdsl * Cc res.Dklrdpg = dklrdsl * Cc res.Dkgrdpl = dkgrdsg * Cc res.Dkgrdpg = -dkgrdsg * Cc // partial densities res.DρlduM = sl * ρL * ns0 res.DρgduM = sg * ρG * ns0 // mixture density res.Dρdpl = nf * (sl*Cl - ρL*Cc + ρG*Cc) res.Dρdpg = nf * (sg*Cg - ρG*Cc + ρL*Cc) res.DρduM = (sl*ρL + sg*ρG - ρS) * ns0 // pressure in pores res.Dpdpl = sl + pc*Cc res.Dpdpg = sg - pc*Cc // derivatives of C coefficients res.DCpldpl = nf * (ρL*Ccd - 2.0*Cc*Cl) res.DCpldpg = nf * (Cc*Cl - ρL*Ccd) res.DCpgdpl = nf * (Cl*Cc - ρL*Ccd) res.DCpgdpg = nf * ρL * Ccd res.DCvsdpl = sl*Cl - Cc*ρL res.DCvsdpg = Cc * ρL // derivatives of D coefficients res.DDpldpl = -nf * ρG * Ccd res.DDpldpg = nf * (ρG*Ccd + Cg*Cc) res.DDpgdpl = nf * (Cc*Cg + ρG*Ccd) res.DDpgdpg = -nf * (ρG*Ccd + 2.0*Cg*Cc) res.DDvsdpl = Cc * ρG res.DDvsdpg = sg*Cg - Cc*ρG // derivatives w.r.t u (multiplier) res.DCplduM = (sl*Cl - ρL*Cc) * ns0 res.DCpgduM = ρL * Cc * ns0 res.DDplduM = ρG * Cc * ns0 res.DDpgduM = (sg*Cg - ρG*Cc) * ns0 } return }
mdl/porous/states.go
0.727007
0.505859
states.go
starcoder
package raster import ( "image" "image/color" "unsafe" ) const ( SUBPIXEL_SHIFT = 3 SUBPIXEL_COUNT = 1 << SUBPIXEL_SHIFT SUBPIXEL_FULL_COVERAGE = 0xff ) var SUBPIXEL_OFFSETS = SUBPIXEL_OFFSETS_SAMPLE_8_FIXED type SUBPIXEL_DATA uint8 type NON_ZERO_MASK_DATA_UNIT uint8 type Rasterizer8BitsSample struct { MaskBuffer []SUBPIXEL_DATA WindingBuffer []NON_ZERO_MASK_DATA_UNIT Width int BufferWidth int Height int ClipBound [4]float64 RemappingMatrix [6]float64 } /* width and height define the maximum output size for the filler. * The filler will output to larger bitmaps as well, but the output will * be cropped. */ func NewRasterizer8BitsSample(width, height int) *Rasterizer8BitsSample { var r Rasterizer8BitsSample // Scale the coordinates by SUBPIXEL_COUNT in vertical direction // The sampling point for the sub-pixel is at the top right corner. This // adjustment moves it to the pixel center. r.RemappingMatrix = [6]float64{1, 0, 0, SUBPIXEL_COUNT, 0.5 / SUBPIXEL_COUNT, -0.5 * SUBPIXEL_COUNT} r.Width = width r.Height = height // The buffer used for filling needs to be one pixel wider than the bitmap. // This is because the end flag that turns the fill of is the first pixel // after the actually drawn edge. r.BufferWidth = width + 1 //r.MaskBuffer = make([]SUBPIXEL_DATA, r.BufferWidth*height) //r.WindingBuffer = make([]NON_ZERO_MASK_DATA_UNIT, r.BufferWidth*height*SUBPIXEL_COUNT) r.ClipBound = clip(0, 0, width, height, SUBPIXEL_COUNT) return &r } func clip(x, y, width, height, scale int) [4]float64 { var clipBound [4]float64 offset := 0.99 / float64(scale) clipBound[0] = float64(x) + offset clipBound[2] = float64(x+width) - offset clipBound[1] = float64(y * scale) clipBound[3] = float64((y + height) * scale) return clipBound } func union(r1, r2 [4]float64) [4]float64 { if r1[0] > r2[0] { r1[0] = r2[0] } if r1[2] < r2[2] { r1[2] = r2[2] } if r1[1] > r2[1] { r1[1] = r2[1] } if r1[3] < r2[3] { r1[3] = r2[3] } return r1 } func intersect(r1, r2 [4]float64) [4]float64 { if r1[0] < r2[0] { r1[0] = r2[0] } if r1[2] > r2[2] { r1[2] = r2[2] } if r1[1] < r2[1] { r1[1] = r2[1] } if r1[3] > r2[3] { r1[3] = r2[3] } return r1 } func (r *Rasterizer8BitsSample) RenderEvenOdd(img *image.RGBA, color color.Color, polygons []Polygon, tr [6]float64) { // memset 0 the mask buffer r.MaskBuffer = make([]SUBPIXEL_DATA, r.BufferWidth*r.Height) // inline matrix multiplication transform := [6]float64{ tr[0]*r.RemappingMatrix[0] + tr[1]*r.RemappingMatrix[2], tr[1]*r.RemappingMatrix[3] + tr[0]*r.RemappingMatrix[1], tr[2]*r.RemappingMatrix[0] + tr[3]*r.RemappingMatrix[2], tr[3]*r.RemappingMatrix[3] + tr[2]*r.RemappingMatrix[1], tr[4]*r.RemappingMatrix[0] + tr[5]*r.RemappingMatrix[2] + r.RemappingMatrix[4], tr[5]*r.RemappingMatrix[3] + tr[4]*r.RemappingMatrix[1] + r.RemappingMatrix[5], } clipRect := clip(img.Bounds().Min.X, img.Bounds().Min.Y, img.Bounds().Dx(), img.Bounds().Dy(), SUBPIXEL_COUNT) clipRect = intersect(clipRect, r.ClipBound) var edges [32]PolygonEdge bound := [4]float64{polygons[0][0], polygons[0][1], polygons[0][0], polygons[0][1]} for _, polygon := range polygons { p := 0 l := len(polygon) / 2 for p < l { edgeCount, subbound := polygon.getEdges(p, 16, edges[:], transform, clipRect) bound = union(bound, subbound) for k := 0; k < edgeCount; k++ { r.addEvenOddEdge(&edges[k]) } p += 16 } } clipRect = intersect(clipRect, bound) r.fillEvenOdd(img, color, clipRect) } //! Adds an edge to be used with even-odd fill. func (r *Rasterizer8BitsSample) addEvenOddEdge(edge *PolygonEdge) { x := Fix(edge.X * FIXED_FLOAT_COEF) slope := Fix(edge.Slope * FIXED_FLOAT_COEF) slopeFix := Fix(0) if edge.LastLine-edge.FirstLine >= SLOPE_FIX_STEP { slopeFix = Fix(edge.Slope*SLOPE_FIX_STEP*FIXED_FLOAT_COEF) - (slope << SLOPE_FIX_SHIFT) } var mask SUBPIXEL_DATA var ySub uint32 var xp, yLine int for y := edge.FirstLine; y <= edge.LastLine; y++ { ySub = uint32(y & (SUBPIXEL_COUNT - 1)) xp = int((x + SUBPIXEL_OFFSETS[ySub]) >> FIXED_SHIFT) mask = SUBPIXEL_DATA(1 << ySub) yLine = y >> SUBPIXEL_SHIFT r.MaskBuffer[yLine*r.BufferWidth+xp] ^= mask x += slope if y&SLOPE_FIX_MASK == 0 { x += slopeFix } } } func convert(c color.Color) color.RGBA { if rgba, ok := c.(color.RGBA); ok { return rgba } r, g, b, a := c.RGBA() return color.RGBA{uint8(r >> 8), uint8(g >> 8), uint8(b >> 8), uint8(a >> 8)} } // Renders the mask to the canvas with even-odd fill. func (r *Rasterizer8BitsSample) fillEvenOdd(img *image.RGBA, c color.Color, clipBound [4]float64) { var x, y uint32 minX := uint32(clipBound[0]) maxX := uint32(clipBound[2]) minY := uint32(clipBound[1]) >> SUBPIXEL_SHIFT maxY := uint32(clipBound[3]) >> SUBPIXEL_SHIFT rgba := convert(c) pixColor := *(*uint32)(unsafe.Pointer(&rgba)) cs1 := pixColor & 0xff00ff cs2 := pixColor >> 8 & 0xff00ff stride := uint32(img.Stride) var mask SUBPIXEL_DATA maskY := minY * uint32(r.BufferWidth) minY *= stride maxY *= stride var tp []uint8 var pixelx uint32 for y = minY; y < maxY; y += stride { tp = img.Pix[y:] mask = 0 //i0 := (s.Y-r.Image.Rect.Min.Y)*r.Image.Stride + (s.X0-r.Image.Rect.Min.X)*4 //i1 := i0 + (s.X1-s.X0)*4 pixelx = minX * 4 for x = minX; x <= maxX; x++ { mask ^= r.MaskBuffer[maskY+x] // 8bits alpha := uint32(coverageTable[mask]) // 16bits //alpha := uint32(coverageTable[mask & 0xff] + coverageTable[(mask >> 8) & 0xff]) // 32bits //alpha := uint32(coverageTable[mask & 0xff] + coverageTable[(mask >> 8) & 0xff] + coverageTable[(mask >> 16) & 0xff] + coverageTable[(mask >> 24) & 0xff]) // alpha is in range of 0 to SUBPIXEL_COUNT p := (*uint32)(unsafe.Pointer(&tp[pixelx])) if alpha == SUBPIXEL_FULL_COVERAGE { *p = pixColor } else if alpha != 0 { invAlpha := SUBPIXEL_COUNT - alpha ct1 := *p & 0xff00ff * invAlpha ct2 := *p >> 8 & 0xff00ff * invAlpha ct1 = (ct1 + cs1*alpha) >> SUBPIXEL_SHIFT & 0xff00ff ct2 = (ct2 + cs2*alpha) << (8 - SUBPIXEL_SHIFT) & 0xff00ff00 *p = ct1 + ct2 } pixelx += 4 } maskY += uint32(r.BufferWidth) } } /* * Renders the polygon with non-zero winding fill. * param aTarget the target bitmap. * param aPolygon the polygon to render. * param aColor the color to be used for rendering. * param aTransformation the transformation matrix. */ func (r *Rasterizer8BitsSample) RenderNonZeroWinding(img *image.RGBA, color color.Color, polygons []Polygon, tr [6]float64) { r.MaskBuffer = make([]SUBPIXEL_DATA, r.BufferWidth*r.Height) r.WindingBuffer = make([]NON_ZERO_MASK_DATA_UNIT, r.BufferWidth*r.Height*SUBPIXEL_COUNT) // inline matrix multiplication transform := [6]float64{ tr[0]*r.RemappingMatrix[0] + tr[1]*r.RemappingMatrix[2], tr[1]*r.RemappingMatrix[3] + tr[0]*r.RemappingMatrix[1], tr[2]*r.RemappingMatrix[0] + tr[3]*r.RemappingMatrix[2], tr[3]*r.RemappingMatrix[3] + tr[2]*r.RemappingMatrix[1], tr[4]*r.RemappingMatrix[0] + tr[5]*r.RemappingMatrix[2] + r.RemappingMatrix[4], tr[5]*r.RemappingMatrix[3] + tr[4]*r.RemappingMatrix[1] + r.RemappingMatrix[5], } clipRect := clip(img.Bounds().Min.X, img.Bounds().Min.Y, img.Bounds().Dx(), img.Bounds().Dy(), SUBPIXEL_COUNT) clipRect = intersect(clipRect, r.ClipBound) bound := [4]float64{polygons[0][0], polygons[0][1], polygons[0][0], polygons[0][1]} var edges [32]PolygonEdge for _, polygon := range polygons { p := 0 l := len(polygon) / 2 for p < l { edgeCount, subbound := polygon.getEdges(p, 16, edges[:], transform, clipRect) bound = union(bound, subbound) for k := 0; k < edgeCount; k++ { r.addNonZeroEdge(&edges[k]) } p += 16 } } clipRect = intersect(clipRect, bound) r.fillNonZero(img, color, clipRect) } //! Adds an edge to be used with non-zero winding fill. func (r *Rasterizer8BitsSample) addNonZeroEdge(edge *PolygonEdge) { x := Fix(edge.X * FIXED_FLOAT_COEF) slope := Fix(edge.Slope * FIXED_FLOAT_COEF) slopeFix := Fix(0) if edge.LastLine-edge.FirstLine >= SLOPE_FIX_STEP { slopeFix = Fix(edge.Slope*SLOPE_FIX_STEP*FIXED_FLOAT_COEF) - slope<<SLOPE_FIX_SHIFT } var mask SUBPIXEL_DATA var ySub uint32 var xp, yLine int winding := NON_ZERO_MASK_DATA_UNIT(edge.Winding) for y := edge.FirstLine; y <= edge.LastLine; y++ { ySub = uint32(y & (SUBPIXEL_COUNT - 1)) xp = int((x + SUBPIXEL_OFFSETS[ySub]) >> FIXED_SHIFT) mask = SUBPIXEL_DATA(1 << ySub) yLine = y >> SUBPIXEL_SHIFT r.MaskBuffer[yLine*r.BufferWidth+xp] |= mask r.WindingBuffer[(yLine*r.BufferWidth+xp)*SUBPIXEL_COUNT+int(ySub)] += winding x += slope if y&SLOPE_FIX_MASK == 0 { x += slopeFix } } } //! Renders the mask to the canvas with non-zero winding fill. func (r *Rasterizer8BitsSample) fillNonZero(img *image.RGBA, c color.Color, clipBound [4]float64) { var x, y uint32 minX := uint32(clipBound[0]) maxX := uint32(clipBound[2]) minY := uint32(clipBound[1]) >> SUBPIXEL_SHIFT maxY := uint32(clipBound[3]) >> SUBPIXEL_SHIFT rgba := convert(c) pixColor := *(*uint32)(unsafe.Pointer(&rgba)) cs1 := pixColor & 0xff00ff cs2 := pixColor >> 8 & 0xff00ff stride := uint32(img.Stride) var mask SUBPIXEL_DATA var n uint32 var values [SUBPIXEL_COUNT]NON_ZERO_MASK_DATA_UNIT maskY := minY * uint32(r.BufferWidth) minY *= stride maxY *= stride var pixelx uint32 for y = minY; y < maxY; y += stride { tp := img.Pix[y:] mask = 0 pixelx = minX * 4 for x = minX; x <= maxX; x++ { temp := r.MaskBuffer[maskY+x] if temp != 0 { var bit SUBPIXEL_DATA = 1 for n = 0; n < SUBPIXEL_COUNT; n++ { if temp&bit != 0 { t := values[n] values[n] += r.WindingBuffer[(maskY+x)*SUBPIXEL_COUNT+n] if (t == 0 || values[n] == 0) && t != values[n] { mask ^= bit } } bit <<= 1 } } // 8bits alpha := uint32(coverageTable[mask]) // 16bits //alpha := uint32(coverageTable[mask & 0xff] + coverageTable[(mask >> 8) & 0xff]) // 32bits //alpha := uint32(coverageTable[mask & 0xff] + coverageTable[(mask >> 8) & 0xff] + coverageTable[(mask >> 16) & 0xff] + coverageTable[(mask >> 24) & 0xff]) p := (*uint32)(unsafe.Pointer(&tp[pixelx])) if alpha == SUBPIXEL_FULL_COVERAGE { *p = pixColor } else if alpha != 0 { // alpha is in range of 0 to SUBPIXEL_COUNT invAlpha := uint32(SUBPIXEL_COUNT) - alpha ct1 := *p & 0xff00ff * invAlpha ct2 := *p >> 8 & 0xff00ff * invAlpha ct1 = (ct1 + cs1*alpha) >> SUBPIXEL_SHIFT & 0xff00ff ct2 = (ct2 + cs2*alpha) << (8 - SUBPIXEL_SHIFT) & 0xff00ff00 *p = ct1 + ct2 } pixelx += 4 } maskY += uint32(r.BufferWidth) } }
raster/fillerAA.go
0.606498
0.405272
fillerAA.go
starcoder
// Package cache provides a simple caching mechanism // that limits the age of cache entries and tries to avoid large // repopulation events by staggering refresh times. package cache import ( "math/rand" "sync" "time" "gopkg.in/errgo.v1" ) // entry holds a cache entry. The expire field // holds the time after which the entry will be // considered invalid. type entry struct { value interface{} expire time.Time } // Key represents a cache key. It must be a comparable type. type Key interface{} // Cache holds a time-limited set of values for arbitrary keys. type Cache struct { maxAge time.Duration // mu guards the fields below it. mu sync.Mutex // expire holds when the cache is due to expire. expire time.Time // We hold two maps so that can avoid scanning through all the // items in the cache when the cache needs to be refreshed. // Instead, we move items from old to new when they're accessed // and throw away the old map at refresh time. old, new map[Key]entry } // New returns a new Cache that will cache items for // at most maxAge. func New(maxAge time.Duration) *Cache { // A maxAge is < 2ns then the expiry code will panic because the // actual expiry time will be maxAge - a random value in the // interval [0, maxAge/2). If maxAge is < 2ns then this requires // a random interval in [0, 0) which causes a panic. if maxAge < 2*time.Nanosecond { maxAge = 2 * time.Nanosecond } // The returned cache will have a zero-valued expire // time, so will expire immediately, causing the new // map to be created. return &Cache{ maxAge: maxAge, } } // Len returns the total number of cached entries. func (c *Cache) Len() int { c.mu.Lock() defer c.mu.Unlock() return len(c.old) + len(c.new) } // Evict removes the entry with the given key from the cache if present. func (c *Cache) Evict(key Key) { c.mu.Lock() defer c.mu.Unlock() delete(c.new, key) delete(c.old, key) } // EvictAll removes all entries from the cache. func (c *Cache) EvictAll() { c.mu.Lock() defer c.mu.Unlock() c.new = make(map[Key]entry) c.old = nil } // Get returns the value for the given key, using fetch to fetch // the value if it is not found in the cache. // If fetch returns an error, the returned error from Get will have // the same cause. func (c *Cache) Get(key Key, fetch func() (interface{}, error)) (interface{}, error) { return c.getAtTime(key, fetch, time.Now()) } // getAtTime is the internal version of Get, useful for testing; now represents the current // time. func (c *Cache) getAtTime(key Key, fetch func() (interface{}, error), now time.Time) (interface{}, error) { if val, ok := c.cachedValue(key, now); ok { return val, nil } // Fetch the data without the mutex held // so that one slow fetch doesn't hold up // all the other cache accesses. val, err := fetch() if err != nil { // TODO consider caching cache misses. return nil, errgo.Mask(err, errgo.Any) } c.mu.Lock() defer c.mu.Unlock() // Add the new cache entry. Because it's quite likely that a // large number of cache entries will be initially fetched at // the same time, we want to avoid a thundering herd of fetches // when they all expire at the same time, so we set the expiry // time to a random interval between [now + t.maxAge/2, now + // t.maxAge] and so they'll be spread over time without // compromising the maxAge value. c.new[key] = entry{ value: val, expire: now.Add(c.maxAge - time.Duration(rand.Int63n(int64(c.maxAge/2)))), } return val, nil } // cachedValue returns any cached value for the given key // and whether it was found. func (c *Cache) cachedValue(key Key, now time.Time) (interface{}, bool) { c.mu.Lock() defer c.mu.Unlock() if now.After(c.expire) { c.old = c.new c.new = make(map[Key]entry) c.expire = now.Add(c.maxAge) } if e, ok := c.entry(c.new, key, now); ok { return e.value, true } if e, ok := c.entry(c.old, key, now); ok { // An old entry has been accessed; move it to the new // map so that we only use a single map access for // subsequent lookups. Note that because we use the same // duration for cache refresh (c.expire) as for max // entry age, this is strictly speaking unnecessary // because any entries in old will have expired by the // time it is dropped. c.new[key] = e delete(c.old, key) return e.value, true } return nil, false } // entry returns an entry from the map and whether it // was found. If the entry has expired, it is deleted from the map. func (c *Cache) entry(m map[Key]entry, key Key, now time.Time) (entry, bool) { e, ok := m[key] if !ok { return entry{}, false } if now.After(e.expire) { // Delete expired entries. delete(m, key) return entry{}, false } return e, true }
vendor/src/github.com/juju/utils/cache/cache.go
0.668231
0.454956
cache.go
starcoder
package main import ( "code.google.com/p/freetype-go/freetype" "code.google.com/p/freetype-go/freetype/raster" "code.google.com/p/freetype-go/freetype/truetype" "fmt" "github.com/disintegration/gift" "image" "image/color" "math" ) // boundingBoxer is a drawable image that reports a massive canvas. Ideally this means that // anything can be draw onto it. After done drawing, call boundingBoxer.complete(), then it will // report bounds that are just large enough to contain everything that was draw onto it. type boundingBoxer struct { boundsStarted bool // maxDist is the distance, above which, we don't bother to distinguish between distances. // this is so that we can spend more bits of precision right around an edge. maxDist float64 bounds image.Rectangle largeBounds image.Rectangle crossover map[image.Point]bool inside map[image.Point]bool // The grayscale image data. grayField []uint16 } func makeBoundingBoxer() *boundingBoxer { return &boundingBoxer{ largeBounds: image.Rect(-10000, -10000, 10000, 10000), crossover: make(map[image.Point]bool), inside: make(map[image.Point]bool), } } func (bb *boundingBoxer) At(x, y int) color.Color { if bb.maxDist == 0 { return color.Gray{0} } if !(image.Point{x, y}).In(bb.bounds) { return color.Gray{0} } return color.Gray16{bb.grayField[(x-bb.bounds.Min.X)+(y-bb.bounds.Min.Y)*bb.bounds.Dx()]} } func (bb *boundingBoxer) ColorModel() color.Model { return color.Gray16Model } func (bb *boundingBoxer) Bounds() image.Rectangle { if bb.maxDist == 0 { return bb.largeBounds } return bb.bounds } type offsetAndDist struct { x, y int dist float64 } func (bb *boundingBoxer) complete() { for insidePoint := range bb.inside { delete(bb.crossover, insidePoint) } // bb.maxDist = math.Sqrt(float64(bb.bounds.Dx()*bb.bounds.Dy())) / bb.something / 10 bb.maxDist = 50 bb.bounds.Min.X -= int(bb.maxDist) + 1 bb.bounds.Min.Y -= int(bb.maxDist) + 1 bb.bounds.Max.X += int(bb.maxDist) + 1 bb.bounds.Max.Y += int(bb.maxDist) + 1 distField := make([]float64, bb.bounds.Dx()*bb.bounds.Dy()) for i := range distField { distField[i] = bb.maxDist } max := int(bb.maxDist) // offsests will contain the offsets from a crossover point that we should test for, and dists // will contain the distance of that offset. This way we don't need to recalculate all of this // stuff for every crossover pixel we look at. var offsets []offsetAndDist for dy := -max; dy <= max; dy++ { for dx := -max; dx <= max; dx++ { dist := math.Sqrt(float64(dx*dx + dy*dy)) if dist >= bb.maxDist { continue } offsets = append(offsets, offsetAndDist{dx, dy, dist}) } } bbdx := bb.bounds.Dx() bbdy := bb.bounds.Dy() for point := range bb.crossover { for _, offset := range offsets { x := point.X + offset.x - bb.bounds.Min.X y := point.Y + offset.y - bb.bounds.Min.Y if x < 0 || x >= bbdx || y < 0 || y >= bbdy { continue } pos := x + y*bbdx if offset.dist < distField[pos] { distField[pos] = offset.dist } } } bb.grayField = make([]uint16, bb.bounds.Dx()*bb.bounds.Dy()) for i := range distField { x := (i % bb.bounds.Dx()) + bb.bounds.Min.X y := (i / bb.bounds.Dx()) + bb.bounds.Min.Y bb.grayField[i] = 32767 - uint16(32767*(distField[i]/bb.maxDist)) if bb.inside[image.Point{x, y}] { bb.grayField[i] = 65535 - bb.grayField[i] } } } func (bb *boundingBoxer) Set(x, y int, c color.Color) { bb.inside[image.Point{x, y}] = true bb.crossover[image.Point{x - 1, y}] = true bb.crossover[image.Point{x + 1, y}] = true bb.crossover[image.Point{x, y - 1}] = true bb.crossover[image.Point{x, y + 1}] = true // Update all bounds so that we can eventually report a bounding box for everything. if !bb.boundsStarted { bb.boundsStarted = true bb.bounds.Min = image.Point{x, y} bb.bounds.Max = image.Point{x, y} } if x < bb.bounds.Min.X { bb.bounds.Min.X = x } if y < bb.bounds.Min.Y { bb.bounds.Min.Y = y } if x > bb.bounds.Max.X { bb.bounds.Max.X = x } if y > bb.bounds.Max.Y { bb.bounds.Max.Y = y } } // Render draws rune r front the specified font at the specified dpi and scale. It returns a // grayscale image that is just large enough to contain the rune. func Render(font *truetype.Font, r rune, dpi, scale float64) (*image.Gray, error) { glyph := truetype.NewGlyphBuf() index := font.Index(r) glyph.Load(font, font.FUnitsPerEm(), index, truetype.FullHinting) ctx := freetype.NewContext() boxer := makeBoundingBoxer() ctx.SetSrc(image.NewUniform(color.White)) ctx.SetDst(boxer) ctx.SetClip(boxer.largeBounds) ctx.SetFontSize(250) ctx.SetDPI(dpi) ctx.SetFont(font) if err := glyph.Load(font, font.FUnitsPerEm(), font.Index(r), truetype.FullHinting); err != nil { return nil, fmt.Errorf("Unable to load glyph: %v\n", err) } var rp raster.Point rp.X = ctx.PointToFix32(0) rp.Y = ctx.PointToFix32(100) ctx.DrawString(string(r), rp) boxer.complete() g := gift.New( gift.Resize(int(float64(boxer.Bounds().Dx())*scale+0.5), int(float64(boxer.Bounds().Dy())*scale+0.5), gift.CubicResampling), ) dst := image.NewGray(g.Bounds(boxer.Bounds())) g.Draw(dst, boxer) return dst, nil }
text/tool/glyph.go
0.687945
0.490968
glyph.go
starcoder
package transfer import ( "encoding/gob" "github.com/MetaLife-Protocol/SuperNode/encoding" "github.com/ethereum/go-ethereum/common" ) /* Quick overview -------------- Goals: - Reliable failure recovery. Approach: - Use a write-ahead-log for state changes. Under a node restart the latest state snapshot can be recovered and the pending state changes reaplied. Requirements: - The function call `state_transition(curr_state, state_change)` must be deterministic, the recovery depends on the re-execution of the state changes from the WAL and must produce the same result. - StateChange must be idenpotent because the partner node might be recovering from a failure and a Event might be produced more than once. Requirements that are enforced: - A state_transition function must not produce a result that must be further processed, i.e. the state change must be self contained and the result state tree must be serializable to produce a snapshot. To enforce this inputs and outputs are separated under different class hierarquies (StateChange and Event). */ /* State is An isolated state, modified by StateChange messages. Notes: - Don't duplicate the same state data in two different States, instead use identifiers. - State objects may be nested. - State classes don't have logic by design. - Each iteration must operate on fresh copy of the state, treating the old objects as immutable. - This class is used as a marker for states. */ type State interface{} /* Event produced by the execution of a state change. Nomenclature convention: - 'Send' prefix for protocol messages. - 'ContractSend' prefix for smart contract function calls. - 'Event' for node events. Notes: - This class is used as a marker for events. - These objects don't have logic by design. - Separate events are preferred because there is a decoupling of what the upper layer will use the events for. */ type Event interface{} /* StateChange Declare the transition to be applied in a state object. StateChanges are incoming events that change this node state (eg. a blockchain event, a new packet, an error). It is not used for the node to communicate with the outer world. Nomenclature convention: - 'Receive' prefix for protocol messages. - 'ContractReceive' prefix for smart contract logs. - 'Action' prefix for other interactions. Notes: - These objects don't have logic by design. - This class is used as a marker for state changes. */ type StateChange interface{} //TransitionResult result of next state transition type TransitionResult struct { NewState State Events []Event } /* FuncStateTransition The mutable storage for the application state, this storage can do state transitions by applying the StateChanges to the current State. */ type FuncStateTransition func(state State, stateChange StateChange) *TransitionResult /* StateManager corresponding one MediatedTransfer it has State of the Transfer and Other Info for save and restore */ type StateManager struct { ID int64 `storm:"id,increment"` FuncStateTransition FuncStateTransition CurrentState State Identifier common.Hash //transfer identifier Name string LastReceivedMessage encoding.SignedMessager } //MessageTag for save and restore type MessageTag struct { EchoHash common.Hash } //NewStateManager create a StateManager func NewStateManager(stateTransition FuncStateTransition, currentState State, name string, identifier common.Hash, tokenAddress common.Address) *StateManager { return &StateManager{ FuncStateTransition: stateTransition, CurrentState: currentState, Name: name, Identifier: identifier, } } /* Dispatch Apply the `state_change` in the current machine and return the resulting events. stateChange : An object representation of a state change. Return: events: A list of events produced by the state transition, it's the upper layer's responsibility to decided how to handle these events. */ func (sm *StateManager) Dispatch(stateChange StateChange) (events []Event) { /* the state objects must be treated as immutable, so make a copy of the current state and pass the copy to the state machine to be modified. */ transitionResult := sm.FuncStateTransition(sm.CurrentState, stateChange) sm.CurrentState, events = transitionResult.NewState, transitionResult.Events return } func init() { gob.Register(&StateManager{}) gob.Register(&TransitionResult{}) gob.Register(&MessageTag{}) }
transfer/architecture.go
0.762778
0.431524
architecture.go
starcoder
package kuznyechik func s(dst, src []byte) { for index, value := range src { dst[index] = nonlinear(value) } } func invertedS(dst, src []byte) { for index, value := range src { dst[index] = inverseNonlinear(value) } } func inverseS(high, low uint64) (highNew, lowNew uint64) { highNew = (uint64(inverseNonlinearPermutation[uint8(high>>56)]) << 56) ^ (uint64(inverseNonlinearPermutation[uint8(high>>48)]) << 48) ^ (uint64(inverseNonlinearPermutation[uint8(high>>40)]) << 40) ^ (uint64(inverseNonlinearPermutation[uint8(high>>32)]) << 32) ^ (uint64(inverseNonlinearPermutation[uint8(high>>24)]) << 24) ^ (uint64(inverseNonlinearPermutation[uint8(high>>16)]) << 16) ^ (uint64(inverseNonlinearPermutation[uint8(high>>8)]) << 8) ^ uint64(inverseNonlinearPermutation[uint8(high)]) lowNew = (uint64(inverseNonlinearPermutation[uint8(low>>56)]) << 56) ^ (uint64(inverseNonlinearPermutation[uint8(low>>48)]) << 48) ^ (uint64(inverseNonlinearPermutation[uint8(low>>40)]) << 40) ^ (uint64(inverseNonlinearPermutation[uint8(low>>32)]) << 32) ^ (uint64(inverseNonlinearPermutation[uint8(low>>24)]) << 24) ^ (uint64(inverseNonlinearPermutation[uint8(low>>16)]) << 16) ^ (uint64(inverseNonlinearPermutation[uint8(low>>8)]) << 8) ^ uint64(inverseNonlinearPermutation[uint8(low)]) return } func nonlinear(b byte) byte { return nonlinearPermutation[b] } func inverseNonlinear(b byte) byte { return inverseNonlinearPermutation[b] } var ( nonlinearPermutation = []byte{0xfc, 0xee, 0xdd, 0x11, 0xcf, 0x6e, 0x31, 0x16, 0xfb, 0xc4, 0xfa, 0xda, 0x23, 0xc5, 0x4, 0x4d, 0xe9, 0x77, 0xf0, 0xdb, 0x93, 0x2e, 0x99, 0xba, 0x17, 0x36, 0xf1, 0xbb, 0x14, 0xcd, 0x5f, 0xc1, 0xf9, 0x18, 0x65, 0x5a, 0xe2, 0x5c, 0xef, 0x21, 0x81, 0x1c, 0x3c, 0x42, 0x8b, 0x1, 0x8e, 0x4f, 0x5, 0x84, 0x2, 0xae, 0xe3, 0x6a, 0x8f, 0xa0, 0x6, 0xb, 0xed, 0x98, 0x7f, 0xd4, 0xd3, 0x1f, 0xeb, 0x34, 0x2c, 0x51, 0xea, 0xc8, 0x48, 0xab, 0xf2, 0x2a, 0x68, 0xa2, 0xfd, 0x3a, 0xce, 0xcc, 0xb5, 0x70, 0xe, 0x56, 0x8, 0xc, 0x76, 0x12, 0xbf, 0x72, 0x13, 0x47, 0x9c, 0xb7, 0x5d, 0x87, 0x15, 0xa1, 0x96, 0x29, 0x10, 0x7b, 0x9a, 0xc7, 0xf3, 0x91, 0x78, 0x6f, 0x9d, 0x9e, 0xb2, 0xb1, 0x32, 0x75, 0x19, 0x3d, 0xff, 0x35, 0x8a, 0x7e, 0x6d, 0x54, 0xc6, 0x80, 0xc3, 0xbd, 0xd, 0x57, 0xdf, 0xf5, 0x24, 0xa9, 0x3e, 0xa8, 0x43, 0xc9, 0xd7, 0x79, 0xd6, 0xf6, 0x7c, 0x22, 0xb9, 0x3, 0xe0, 0xf, 0xec, 0xde, 0x7a, 0x94, 0xb0, 0xbc, 0xdc, 0xe8, 0x28, 0x50, 0x4e, 0x33, 0xa, 0x4a, 0xa7, 0x97, 0x60, 0x73, 0x1e, 0x0, 0x62, 0x44, 0x1a, 0xb8, 0x38, 0x82, 0x64, 0x9f, 0x26, 0x41, 0xad, 0x45, 0x46, 0x92, 0x27, 0x5e, 0x55, 0x2f, 0x8c, 0xa3, 0xa5, 0x7d, 0x69, 0xd5, 0x95, 0x3b, 0x7, 0x58, 0xb3, 0x40, 0x86, 0xac, 0x1d, 0xf7, 0x30, 0x37, 0x6b, 0xe4, 0x88, 0xd9, 0xe7, 0x89, 0xe1, 0x1b, 0x83, 0x49, 0x4c, 0x3f, 0xf8, 0xfe, 0x8d, 0x53, 0xaa, 0x90, 0xca, 0xd8, 0x85, 0x61, 0x20, 0x71, 0x67, 0xa4, 0x2d, 0x2b, 0x9, 0x5b, 0xcb, 0x9b, 0x25, 0xd0, 0xbe, 0xe5, 0x6c, 0x52, 0x59, 0xa6, 0x74, 0xd2, 0xe6, 0xf4, 0xb4, 0xc0, 0xd1, 0x66, 0xaf, 0xc2, 0x39, 0x4b, 0x63, 0xb6, } inverseNonlinearPermutation = []byte{0xa5, 0x2d, 0x32, 0x8f, 0xe, 0x30, 0x38, 0xc0, 0x54, 0xe6, 0x9e, 0x39, 0x55, 0x7e, 0x52, 0x91, 0x64, 0x3, 0x57, 0x5a, 0x1c, 0x60, 0x7, 0x18, 0x21, 0x72, 0xa8, 0xd1, 0x29, 0xc6, 0xa4, 0x3f, 0xe0, 0x27, 0x8d, 0xc, 0x82, 0xea, 0xae, 0xb4, 0x9a, 0x63, 0x49, 0xe5, 0x42, 0xe4, 0x15, 0xb7, 0xc8, 0x6, 0x70, 0x9d, 0x41, 0x75, 0x19, 0xc9, 0xaa, 0xfc, 0x4d, 0xbf, 0x2a, 0x73, 0x84, 0xd5, 0xc3, 0xaf, 0x2b, 0x86, 0xa7, 0xb1, 0xb2, 0x5b, 0x46, 0xd3, 0x9f, 0xfd, 0xd4, 0xf, 0x9c, 0x2f, 0x9b, 0x43, 0xef, 0xd9, 0x79, 0xb6, 0x53, 0x7f, 0xc1, 0xf0, 0x23, 0xe7, 0x25, 0x5e, 0xb5, 0x1e, 0xa2, 0xdf, 0xa6, 0xfe, 0xac, 0x22, 0xf9, 0xe2, 0x4a, 0xbc, 0x35, 0xca, 0xee, 0x78, 0x5, 0x6b, 0x51, 0xe1, 0x59, 0xa3, 0xf2, 0x71, 0x56, 0x11, 0x6a, 0x89, 0x94, 0x65, 0x8c, 0xbb, 0x77, 0x3c, 0x7b, 0x28, 0xab, 0xd2, 0x31, 0xde, 0xc4, 0x5f, 0xcc, 0xcf, 0x76, 0x2c, 0xb8, 0xd8, 0x2e, 0x36, 0xdb, 0x69, 0xb3, 0x14, 0x95, 0xbe, 0x62, 0xa1, 0x3b, 0x16, 0x66, 0xe9, 0x5c, 0x6c, 0x6d, 0xad, 0x37, 0x61, 0x4b, 0xb9, 0xe3, 0xba, 0xf1, 0xa0, 0x85, 0x83, 0xda, 0x47, 0xc5, 0xb0, 0x33, 0xfa, 0x96, 0x6f, 0x6e, 0xc2, 0xf6, 0x50, 0xff, 0x5d, 0xa9, 0x8e, 0x17, 0x1b, 0x97, 0x7d, 0xec, 0x58, 0xf7, 0x1f, 0xfb, 0x7c, 0x9, 0xd, 0x7a, 0x67, 0x45, 0x87, 0xdc, 0xe8, 0x4f, 0x1d, 0x4e, 0x4, 0xeb, 0xf8, 0xf3, 0x3e, 0x3d, 0xbd, 0x8a, 0x88, 0xdd, 0xcd, 0xb, 0x13, 0x98, 0x2, 0x93, 0x80, 0x90, 0xd0, 0x24, 0x34, 0xcb, 0xed, 0xf4, 0xce, 0x99, 0x10, 0x44, 0x40, 0x92, 0x3a, 0x1, 0x26, 0x12, 0x1a, 0x48, 0x68, 0xf5, 0x81, 0x8b, 0xc7, 0xd6, 0x20, 0xa, 0x8, 0x0, 0x4c, 0xd7, 0x74, } )
cipher/kuznyechik/s.go
0.542984
0.649704
s.go
starcoder
package bst import ( "github.com/cinar/indicator/container" ) // BST node. type Node struct { value interface{} left *Node right *Node } // BST type. type Tree struct { root *Node } // New binary search tree. func New() *Tree { return &Tree{} } // Inserts the given value. func (t *Tree) Insert(value interface{}) { newNode := &Node{ value: value, } if t.root == nil { t.root = newNode return } curNode := t.root for { if container.Compare(newNode.value, curNode.value) <= 0 { if curNode.left == nil { curNode.left = newNode return } else { curNode = curNode.left } } else { if curNode.right == nil { curNode.right = newNode return } else { curNode = curNode.right } } } } // Removes the given value. func (t *Tree) Remove(value interface{}) bool { var parent *Node node := t.root for node != nil { switch container.Compare(value, node.value) { case 0: t.removeNode(parent, node) return true case -1: parent = node node = node.left case 1: parent = node node = node.right } } return false } // Min value. func (t *Tree) Min() interface{} { node, _ := minNode(t.root) if node == nil { return nil } return node.value } // Max value. func (t *Tree) Max() interface{} { node, _ := maxNode(t.root) if node == nil { return nil } return node.value } // Remove node. func (t *Tree) removeNode(parent, node *Node) { if node.left != nil && node.right != nil { min, minParent := minNode(node.right) if minParent == nil { minParent = node } t.removeNode(minParent, min) node.value = min.value } else { var child *Node if node.left != nil { child = node.left } else { child = node.right } if node == t.root { t.root = child } else if parent.left == node { parent.left = child } else { parent.right = child } } } // Min node. Returns min node and its parent. func minNode(root *Node) (*Node, *Node) { if root == nil { return nil, nil } var parent *Node node := root for node.left != nil { parent = node node = node.left } return node, parent } // Max node. Returns max node and its parent. func maxNode(root *Node) (*Node, *Node) { if root == nil { return nil, nil } var parent *Node node := root for node.right != nil { parent = node node = node.right } return node, parent }
container/bst/bst.go
0.770033
0.412353
bst.go
starcoder
package main import "fmt" // A square matrix, ie a 2D grid of numbers. Matrices are addressed in row-major // order, eg matrix[1, 2] is the value at the 1st row in the 2nd column (with // 0-based indexing). type Matrix struct { Size int store []float64 } // Create a 2x2 matrix. func MakeMatrix2(v1, v2, v3, v4 float64) Matrix { return Matrix{ Size: 2, store: []float64{ v1, v2, v3, v4, }, } } // Create a 3x3 matrix. func MakeMatrix3(v1, v2, v3, v4, v5, v6, v7, v8, v9 float64) Matrix { return Matrix{ Size: 3, store: []float64{ v1, v2, v3, v4, v5, v6, v7, v8, v9, }, } } // Create a 4x4 matrix. func MakeMatrix4(v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16 float64) Matrix { return Matrix{ Size: 4, store: []float64{ v1, v2, v3, v4, v5, v6, v7, v8, v9, v10, v11, v12, v13, v14, v15, v16, }, } } var ( IdentityMatrix4 = MakeMatrix4( 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, ) ) // Find a cofactor in the matrix. func (m Matrix) Cofactor(row, col int) float64 { minor := m.Minor(row, col) if (row+col)%2 == 1 { return -minor } return minor } // Find the determinant of the matrix. func (m Matrix) Determinant() float64 { if m.Size == 2 { return m.Get(0, 0)*m.Get(1, 1) - m.Get(0, 1)*m.Get(1, 0) } determinant := 0.0 for col := 0; col < m.Size; col++ { determinant += m.Get(0, col) * m.Cofactor(0, col) } return determinant } // Get the value at a specific row and column in the matrix. func (m Matrix) Get(row, column int) float64 { return m.store[row*m.Size+column] } // Determine if two matrices are equal. Matrices are equal if they contain the // same values in the same positions. func (m Matrix) Equals(other Matrix) bool { if m.Size != other.Size { return false } for row := 0; row < m.Size; row++ { for col := 0; col < m.Size; col++ { if !Float64Equal(m.Get(row, col), other.Get(row, col)) { return false } } } return true } func (m Matrix) Inverted() Matrix { if !m.IsInvertible() { panic(fmt.Sprintf("Tried to invert non-invertible matrix: %v", m)) } determinant := m.Determinant() inverse := Matrix{ Size: m.Size, store: make([]float64, m.Size*m.Size), } for row := 0; row < m.Size; row++ { for col := 0; col < m.Size; col++ { cofactor := m.Cofactor(row, col) // We swap the row and column here to effectively transpose the // inverse without an explicit step. inverse.Set(col, row, cofactor/determinant) } } return inverse } // Determine if a matrix is invertible. func (m Matrix) IsInvertible() bool { return !Float64Equal(m.Determinant(), 0) } // Find the minor of a matrix which is defined as the determinant of its // submatrix. The submatrix is determined by the given row and column, which are // removed from the matrix to produce the submatrix. func (m Matrix) Minor(row, col int) float64 { return m.Submatrix(row, col).Determinant() } // Multiply this matrix by another. func (m Matrix) Multiply(b Matrix) Matrix { c := Matrix{Size: m.Size, store: make([]float64, m.Size*m.Size)} for row := 0; row < m.Size; row++ { for col := 0; col < m.Size; col++ { cellValue := 0.0 for i := 0; i < m.Size; i++ { aVal := m.Get(row, i) bVal := b.Get(i, col) cellValue += aVal * bVal } c.Set(row, col, cellValue) } } return c } func (m *Matrix) Set(row, column int, value float64) { m.store[row*m.Size+column] = value } // Create a matrix that is a copy of the current matrix but with a specific row // and column removed. func (m Matrix) Submatrix(removedRow, removedCol int) Matrix { targetRow := 0 targetCol := 0 c := Matrix{ Size: m.Size - 1, store: make([]float64, (m.Size-1)*(m.Size-1)), } for row := 0; row < m.Size; row++ { if row == removedRow { continue } targetCol = 0 for col := 0; col < m.Size; col++ { if col == removedCol { continue } c.Set(targetRow, targetCol, m.Get(row, col)) targetCol++ } targetRow++ } return c } // Get the transpose of the target matrix. func (m Matrix) Transposed() Matrix { c := Matrix{ Size: m.Size, store: make([]float64, m.Size*m.Size), } for row := 0; row < m.Size; row++ { for col := 0; col < m.Size; col++ { c.Set(col, row, m.Get(row, col)) } } return c } // Multiply the target matrix by a tuple. func (m Matrix) TupleMultiply(b Tuple) Tuple { return Tuple{ X: m.Get(0, 0)*b.X + m.Get(0, 1)*b.Y + m.Get(0, 2)*b.Z + m.Get(0, 3)*b.W, Y: m.Get(1, 0)*b.X + m.Get(1, 1)*b.Y + m.Get(1, 2)*b.Z + m.Get(1, 3)*b.W, Z: m.Get(2, 0)*b.X + m.Get(2, 1)*b.Y + m.Get(2, 2)*b.Z + m.Get(2, 3)*b.W, W: m.Get(3, 0)*b.X + m.Get(3, 1)*b.Y + m.Get(3, 2)*b.Z + m.Get(3, 3)*b.W, } }
matrix.go
0.793346
0.628436
matrix.go
starcoder
package crypto import ( "crypto/elliptic" "crypto/rand" "fmt" "io" "math/big" "github.com/zeebo/blake3" ) /* High level api for operating on P256 elliptic curve Points. */ var ( curve = elliptic.P256() encodeLen = encodeLenWithCurve(curve) ) // encodeLenWithCurve returns the number of bytes needed to encode a point func encodeLenWithCurve(curve elliptic.Curve) int { return len(elliptic.MarshalCompressed(curve, curve.Params().Gx, curve.Params().Gy)) } // Point represents a point on the P256 elliptic curve type Point struct { x *big.Int y *big.Int } // NewPoint returns a Point func NewPoint() *Point { return &Point{x: new(big.Int), y: new(big.Int)} } // Marshal converts a Point to a byte slice representation func (p *Point) Marshal() []byte { return elliptic.MarshalCompressed(curve, p.x, p.y) } // Unmarshal takes in a marshaledPoint byte slice and extracts the Point object func (p *Point) Unmarshal(marshaledPoint []byte) error { x, y := elliptic.UnmarshalCompressed(curve, marshaledPoint) // on error of Unmarshal, x is nil if x == nil { return fmt.Errorf("error unmarshalling elliptic curve point") } p.x.Set(x) p.y.Set(y) return nil } // Add adds two points func (p *Point) Add(q *Point) *Point { x, y := curve.Add(p.x, p.y, q.x, q.y) return &Point{x: x, y: y} } // ScalarMult multiplies a point with a scalar func (p *Point) ScalarMult(scalar []byte) *Point { x, y := curve.ScalarMult(p.x, p.y, scalar) return &Point{x: x, y: y} } // Sub substracts point p from q func (p *Point) Sub(q *Point) *Point { // p - q = p.x + q.x, p.y - q.y x, y := curve.Add(p.x, p.y, q.x, new(big.Int).Neg(q.y)) return &Point{x: x, y: y} } // DeriveKeyFromECPoint returns a key of 32 byte func (p *Point) DeriveKeyFromECPoint() []byte { key := blake3.Sum256(p.x.Bytes()) return key[:] } // GenerateKey returns a secret and public key pair func GenerateKey() ([]byte, *Point, error) { secret, x, y, err := elliptic.GenerateKey(curve, rand.Reader) if err != nil { return nil, nil, err } return secret, &Point{x: x, y: y}, nil } // pointWriter for elliptic curve points type pointWriter struct { w io.Writer } // pointReader for elliptic curve points type pointReader struct { r io.Reader } // NewECPointWriter returns an elliptic curve point writer func NewECPointWriter(w io.Writer) *pointWriter { return &pointWriter{w: w} } // NewECPointReader returns an elliptic curve point reader func NewECPointReader(r io.Reader) *pointReader { return &pointReader{r: r} } // Write writes the marshalled elliptic curve point to writer func (w *pointWriter) Write(p *Point) (err error) { _, err = w.w.Write(p.Marshal()) return err } // Read reads a marshalled elliptic curve point from reader and stores it in point func (r *pointReader) Read(p *Point) (err error) { pt := make([]byte, encodeLen) if _, err = io.ReadFull(r.r, pt); err != nil { return err } return p.Unmarshal(pt) } // Equal returns true when 2 points are equal func (p *Point) equal(q *Point) bool { return p.x.Cmp(q.x) == 0 && p.y.Cmp(q.y) == 0 }
internal/crypto/point.go
0.917279
0.473718
point.go
starcoder
package sql // Result represents a query result. // Depending on the statement type it represents a stream of rows or an update count. // It is not concurrency-safe except the Close and Iterator method. type Result interface { // RowMetadata returns metadata information about rows. // An error is returned if result represents an update count. RowMetadata() (RowMetadata, error) // IsRowSet returns whether this result has rows to iterate using the HasNext method. IsRowSet() bool // UpdateCount returns the number of rows updated by the statement or -1 if this result is a row set. UpdateCount() int64 // Iterator returns the RowsIterator over the result rows. // The iterator may be requested only once. // An error is returned if the iterator is requested more than once, or if the result contains only update count. Iterator() (RowsIterator, error) // Close notifies the member to release resources for the corresponding query for results that represents a stream of rows. // It can be safely called more than once, and it is concurrency-safe. // If result represents an update count, it has no effect. Close() error } // RowsIterator provides means to iterate over SQL statement result. // It is not concurrency-safe. type RowsIterator interface { // HasNext prepares the next result row for reading via Next method. // It returns true on success, or false if there is no next result row or an error happened while preparing it. HasNext() bool // Next returns the current row. // Every call to Next, even the first one, must be preceded by a call to HasNext. Next() (Row, error) } // Row represents an SQL result row. type Row interface { // Get returns the value of the column by index. // If index is out of range, an error is returned. Get(index int) (interface{}, error) // GetByColumnName returns the value of the column by name. // If columns does not exist, an error is returned. GetByColumnName(name string) (interface{}, error) // Metadata returns the metadata information about the row. Metadata() RowMetadata }
sql/result.go
0.68595
0.448426
result.go
starcoder
package galoisfield import ( "bytes" "errors" "fmt" "strconv" ) var ( ErrIncompatibleFields = errors.New("cannot combine polynomials from different finite fields") ) // Polynomial implements polynomials with coefficients drawn from a Galois field. type Polynomial struct { field *GF coefficients []byte } // NewPolynomial returns a new polynomial with the given coefficients. // Coefficients are in little-endian order; that is, the first coefficient is // the constant term, the second coefficient is the linear term, etc. func NewPolynomial(field *GF, coefficients ...byte) Polynomial { if field == nil { field = Default } return Polynomial{field, reduce(coefficients)} } // Field returns the Galois field from which this polynomial's coefficients are drawn. func (a Polynomial) Field() *GF { return a.field } // IsZero returns true iff this polynomial has no terms. func (a Polynomial) IsZero() bool { return a.coefficients == nil } // Degree returns the degree of this polynomial, with the convention that the // polynomial of zero terms has degree 0. func (a Polynomial) Degree() uint { if a.IsZero() { return 0 } return uint(len(a.coefficients) - 1) } // Coefficients returns the coefficients of the terms of this polynomial. The // result is in little-endian order; see NewPolynomial for details. func (a Polynomial) Coefficients() []byte { return a.coefficients } // Coefficient returns the coefficient of the i'th term. func (a Polynomial) Coefficient(i uint) byte { if i >= uint(len(a.coefficients)) { return 0 } return a.coefficients[i] } // Scale multiplies this polynomial by a scalar. func (a Polynomial) Scale(s byte) Polynomial { if s == 0 { return Polynomial{a.field, nil} } if s == 1 { return a } coefficients := make([]byte, len(a.coefficients)) for i, coeff_i := range a.coefficients { coefficients[i] = a.field.Mul(coeff_i, s) } return NewPolynomial(a.field, coefficients...) } // Add returns the sum of one or more polynomials. func (first Polynomial) Add(rest ...Polynomial) Polynomial { n := maxCoeffLen(first, rest...) sum := expand(n, first.coefficients) for _, next := range rest { if first.field != next.field { panic(ErrIncompatibleFields) } if next.IsZero() { continue } for i, ki := range next.coefficients { sum[i] = first.field.Add(sum[i], ki) } } return NewPolynomial(first.field, sum...) } // Mul returns the product of one or more polynomials. func (first Polynomial) Mul(rest ...Polynomial) Polynomial { prod := first.coefficients for _, next := range rest { if first.field != next.field { panic(ErrIncompatibleFields) } a, b := prod, next.coefficients newprod := make([]byte, len(a)+len(b)) for bi := 0; bi < len(b); bi++ { for ai := 0; ai < len(a); ai++ { newprod[ai+bi] = first.field.Add( newprod[ai+bi], first.field.Mul(a[ai], b[bi])) } } prod = reduce(newprod) } return NewPolynomial(first.field, prod...) } // GoString returns a Go-syntax representation of this polynomial. func (a Polynomial) GoString() string { var buf bytes.Buffer buf.WriteString("NewPolynomial(") buf.WriteString(a.field.GoString()) for _, k := range a.coefficients { buf.WriteString(", ") buf.WriteString(strconv.Itoa(int(k))) } buf.WriteByte(')') return buf.String() } // String returns a human-readable algebraic representation of this polynomial. func (a Polynomial) String() string { if a.IsZero() { return "0" } var buf bytes.Buffer for d := len(a.coefficients) - 1; d >= 0; d-- { k := a.coefficients[d] if k == 0 { continue } if buf.Len() > 0 { buf.WriteString(" + ") } if k > 1 || d == 0 { fmt.Fprintf(&buf, "%d", k) } if d > 1 { fmt.Fprintf(&buf, "x^%d", d) } else if d == 1 { buf.WriteByte('x') } } return buf.String() } // Compare defines a partial order for polynomials: -1 if a < b, 0 if a == b, // +1 if a > b, or panic if a and b are drawn from different Galois fields. func (a Polynomial) Compare(b Polynomial) int { if cmp := a.field.Compare(b.field); cmp != 0 { return cmp } if len(a.coefficients) < len(b.coefficients) { return -1 } if len(a.coefficients) > len(b.coefficients) { return 1 } for i := len(a.coefficients) - 1; i >= 0; i-- { pi := a.coefficients[i] qi := b.coefficients[i] if pi < qi { return -1 } if pi > qi { return 1 } } return 0 } // Equal returns true iff a == b. func (a Polynomial) Equal(b Polynomial) bool { return a.Compare(b) == 0 } // Less returns true iff a < b. func (a Polynomial) Less(b Polynomial) bool { return a.Compare(b) < 0 } // Evaluate substitutes for x and returns the resulting value. func (a Polynomial) Evaluate(x byte) byte { var sum byte = 0 var pow byte = 1 for _, k := range a.coefficients { sum = a.field.Add(sum, a.field.Mul(k, pow)) pow = a.field.Mul(pow, x) } return sum } func reduce(coefficients []byte) []byte { for i := len(coefficients) - 1; i >= 0; i-- { if coefficients[i] != 0 { break } coefficients = coefficients[:i] } return coefficients } func expand(n int, coefficients []byte) []byte { dup := make([]byte, n) copy(dup[:len(coefficients)], coefficients) return dup } func maxCoeffLen(first Polynomial, rest ...Polynomial) int { n := len(first.coefficients) for _, next := range rest { l := len(next.coefficients) if l > n { n = l } } return n }
polynomial.go
0.794185
0.657325
polynomial.go
starcoder
package bungieapigo // Returns data about a character's status with a given Objective. Combine with // DestinyObjectiveDefinition static data for display purposes. type DestinyObjectiveProgress struct { // The unique identifier of the Objective being referred to. Use to look up the // DestinyObjectiveDefinition in static data. ObjectiveHash int `json:"objectiveHash"` // If the Objective has a Destination associated with it, this is the unique identifier of the // Destination being referred to. Use to look up the DestinyDestinationDefinition in static // data. This will give localized data about *where* in the universe the objective should be // achieved. DestinationHash int `json:"destinationHash"` // If the Objective has an Activity associated with it, this is the unique identifier of the // Activity being referred to. Use to look up the DestinyActivityDefinition in static data. This // will give localized data about *what* you should be playing for the objective to be achieved. ActivityHash int `json:"activityHash"` // If progress has been made, and the progress can be measured numerically, this will be the value // of that progress. You can compare it to the DestinyObjectiveDefinition.completionValue // property for current vs. upper bounds, and use DestinyObjectiveDefinition.valueStyle to // determine how this should be rendered. Note that progress, in Destiny 2, need not be a literal // numeric progression. It could be one of a number of possible values, even a Timestamp. Always // examine DestinyObjectiveDefinition.valueStyle before rendering progress. Progress int `json:"progress"` // As of Forsaken, objectives' completion value is determined dynamically at runtime. // This value represents the threshold of progress you need to surpass in order for this objective // to be considered "complete". // If you were using objective data, switch from using the DestinyObjectiveDefinition's // "completionValue" to this value. CompletionValue int `json:"completionValue"` // Whether or not the Objective is completed. Complete bool `json:"complete"` // If this is true, the objective is visible in-game. Otherwise, it's not yet visible to the // player. Up to you if you want to honor this property. Visible bool `json:"visible"` }
pkg/models/DestinyObjectiveProgress.go
0.713931
0.461745
DestinyObjectiveProgress.go
starcoder
package poly import ( "fmt" "strings" ) // Int64T is a single term containing an int64 coefficient. type Int64T struct { Ind C int64 } // Ind represents the indeterminates of a single term. type Ind []int64 // Mul returns the product of 't' and 'x'. func (t Int64T) Mul(x Int64T) Int64T { t.Ind = t.Ind.Mul(x.Ind) t.C *= x.C return t } // Less reports whether 't' should be sorted before 'x' in a polynomial. func (t Int64T) Less(x Int64T) bool { for i, k := range t.Ind { if k == x.Ind[i] { continue } return k > x.Ind[i] } return false } // String returns a compact, human-readable representation of the term. func (t Int64T) String() string { if t.C == 0 { return "0" } s := t.Ind.String() if t.C == 1 { return s } if s == "1" { return fmt.Sprintf("%d", t.C) } return fmt.Sprintf("%d%s", t.C, s) } // Mul returns the product of two indeterminates. func (i Ind) Mul(x Ind) Ind { if len(i) < len(x) { i, x = x, i } ret := make(Ind, len(i)) for k := range i { ret[k] = i[k] if k < len(x) { ret[k] += x[k] } } return ret } // Eq checks two indeterminates for equality. func (i Ind) Eq(x Ind) bool { if len(i) != len(x) { return false } for i, k := range i { if k != x[i] { return false } } return true } // String returns a compact, human-readable representation of the indeterminates. func (i Ind) String() string { var s []string const simple = "xyz" switch size := len(i); { case size == 0: return "1" case size <= len(simple): s = strings.Split(simple, "")[:size] default: for i := range i { s = append(s, "x"+sub(int64(i))) } } ret := "" for i, x := range i { if x != 0 { ret += s[i] if x != 1 { ret += sup(x) } } } if ret == "" { return "1" } return ret } func sub(i int64) string { if i == 0 { return "₀" } return smap(i, [...]rune{'₀', '₁', '₂', '₃', '₄', '₅', '₆', '₇', '₈', '₉', '₋'}) } func sup(i int64) string { return smap(i, [...]rune{'⁰', '¹', '²', '³', '⁴', '⁵', '⁶', '⁷', '⁸', '⁹', '¯'}) } func smap(i int64, m [11]rune) string { s := "" neg := i < 0 if neg { i = -i } for i > 0 { s = fmt.Sprintf("%c%s", m[i%10], s) i /= 10 } if neg { s = fmt.Sprintf("%c%s", m[10], s) } return s }
go/poly/int64_t.go
0.770983
0.471406
int64_t.go
starcoder
package Utilities import ( "fmt" "math" "marvin/SnakeProGo/Utilities/Physics" ) type Point struct { X, Y int } func (p *Point) ToVector() *Physics.Vector { return &Physics.Vector{float64(p.X),float64(p.Y),0} } func (p *Point) Print() string { return fmt.Sprintf("(%v, %v)", p.X, p.Y) } func (p *Point) Equals(p2 *Point) bool { if p.X == p2.X && p.Y == p2.Y { return true } return false } func (p *Point) Copy() *Point { return &Point{p.X, p.Y} } func (p *Point) CollidesWithWall(XTiles,YTiles float64) bool { if p.X >= 0 && p.Y >= 0 && p.X < int(XTiles) && p.Y < int(YTiles) { return false } return true } func (p *Point) CollidesWithPnts(pnts []*Point) *Point { for _,pnt := range(pnts) { if p.Equals(pnt) { return pnt } } return nil } func (p *Point) CollidesWithPnt(pnt *Point) bool { if p.Equals(pnt) { return true } return false } func (p *Point) IsContained(pnts []*Point) bool { for _,pnt := range(pnts) { if p.Equals(pnt) { return true } } return false } func (p *Point) GetDisTo(p2 *Point) float64 { return math.Sqrt(float64((p.X-p2.X)*(p.X-p2.X)+(p.Y-p2.Y)*(p.Y-p2.Y))) } func CollidePnts(pnts1,pnts2 []*Point) (collPnts []*Point) { for _,pnt1 := range(pnts1) { for _,pnt2 := range(pnts2) { if pnt1.Equals(pnt2) { collPnts = append(collPnts, pnt1) } } } return } type SnakeTiles struct { Ts []*Point } func (t *SnakeTiles) Print() string { out := "[" for i,_ := range(t.Ts) { out += t.Ts[i].Print() if i < len(t.Ts)-1 { out += "," } } out += "]" return out } func (t *SnakeTiles) GetLast() *Point { if len(t.Ts) > 0 { return t.Ts[len(t.Ts)-1] } return &Point{0,0} } func (t *SnakeTiles) GetFirst() *Point { if len(t.Ts) > 0 { return t.Ts[0] } return &Point{0,0} } func (t *SnakeTiles) AddBack(xdir, ydir, tiles int) { var LastP *Point for i := 0; i < tiles; i++ { LastP = t.GetLast() t.Ts = append(t.Ts, &Point{LastP.X+xdir, LastP.Y+ydir}) } } func (t *SnakeTiles) RemBack(tiles int) { t.Ts = t.Ts[:len(t.Ts)-tiles] } func (t *SnakeTiles) AddFront(xdir, ydir, tiles int) { FirstP := t.GetFirst() appList := make([]*Point, tiles) for i := tiles-1; i >= 0; i-- { appList[i] = &Point{FirstP.X+xdir, FirstP.Y+ydir} FirstP = appList[i] } newList := make([]*Point, 0) for i,_ := range(appList) { newList = append(newList, appList[i]) } for i,_ := range(t.Ts) { newList = append(newList, t.Ts[i]) } t.Ts = newList } func (t *SnakeTiles) RemFront(tiles int) { t.Ts = t.Ts[tiles:] }
GoFiles/Utilities/snakeTiles.go
0.535584
0.493836
snakeTiles.go
starcoder
package gorocksdb // #include "rocksdb/c.h" import "C" // A SliceTransform can be used as a prefix extractor. type SliceTransform interface { // Transform a src in domain to a dst in the range. Transform(src []byte) []byte // Determine whether this is a valid src upon the function applies. InDomain(src []byte) bool // Determine whether dst=Transform(src) for some src. InRange(src []byte) bool // Return the name of this transformation. Name() string } // This type is a bit of a hack and will not behave as expected if clients try to // call its methods. It is handled specially in Options. type nativeSliceTransform struct { c *C.rocksdb_slicetransform_t } func (st nativeSliceTransform) Transform(src []byte) []byte { return nil } func (st nativeSliceTransform) InDomain(src []byte) bool { return false } func (st nativeSliceTransform) InRange(src []byte) bool { return false } func (st nativeSliceTransform) Name() string { return "" } // NewFixedPrefixTransform creates a new fixed prefix transform. func NewFixedPrefixTransform(prefixLen int) SliceTransform { return NewNativeSliceTransform(C.rocksdb_slicetransform_create_fixed_prefix(C.size_t(prefixLen))) } func NewNoopTransform() SliceTransform { return NewNativeSliceTransform(C.rocksdb_slicetransform_create_noop()) } // NewNativeSliceTransform allocates a SliceTransform object. // The SliceTransform's methods are no-ops, but it is still used correctly by // RocksDB. func NewNativeSliceTransform(c *C.rocksdb_slicetransform_t) SliceTransform { return nativeSliceTransform{c} } //export gorocksdb_slicetransform_transform func gorocksdb_slicetransform_transform(handler *SliceTransform, cKey *C.char, cKeyLen C.size_t, cDstLen *C.size_t) *C.char { key := charToByte(cKey, cKeyLen) dst := (*handler).Transform(key) *cDstLen = C.size_t(len(dst)) return byteToChar(dst) } //export gorocksdb_slicetransform_in_domain func gorocksdb_slicetransform_in_domain(handler *SliceTransform, cKey *C.char, cKeyLen C.size_t) C.uchar { key := charToByte(cKey, cKeyLen) inDomain := (*handler).InDomain(key) return boolToChar(inDomain) } //export gorocksdb_slicetransform_in_range func gorocksdb_slicetransform_in_range(handler *SliceTransform, cKey *C.char, cKeyLen C.size_t) C.uchar { key := charToByte(cKey, cKeyLen) inRange := (*handler).InRange(key) return boolToChar(inRange) } //export gorocksdb_slicetransform_name func gorocksdb_slicetransform_name(handler *SliceTransform) *C.char { return stringToChar((*handler).Name()) }
slice_transform.go
0.751466
0.413714
slice_transform.go
starcoder
package primitives import ( "bytes" "fmt" ) type MemberId []byte func (x MemberId) String() string { return fmt.Sprintf("%x", []byte(x)) } func (x MemberId) Equal(y MemberId) bool { return bytes.Equal(x, y) } func (x MemberId) KeyForMap() string { return string(x) } type MemberWeight uint64 func (x MemberWeight) String() string { return fmt.Sprintf("%x", uint64(x)) } func (x MemberWeight) Equal(y MemberWeight) bool { return x == y } func (x MemberWeight) KeyForMap() uint64 { return uint64(x) } type Signature []byte func (x Signature) String() string { return fmt.Sprintf("%x", []byte(x)) } func (x Signature) Equal(y Signature) bool { return bytes.Equal(x, y) } func (x Signature) KeyForMap() string { return string(x) } type RandomSeedSignature []byte func (x RandomSeedSignature) String() string { return fmt.Sprintf("%x", []byte(x)) } func (x RandomSeedSignature) Equal(y RandomSeedSignature) bool { return bytes.Equal(x, y) } func (x RandomSeedSignature) KeyForMap() string { return string(x) } type Uint256 []byte func (x Uint256) String() string { return fmt.Sprintf("%x", []byte(x)) } func (x Uint256) Equal(y Uint256) bool { return bytes.Equal(x, y) } func (x Uint256) KeyForMap() string { return string(x) } type BlockHeight uint64 func (x BlockHeight) String() string { return fmt.Sprintf("%x", uint64(x)) } func (x BlockHeight) Equal(y BlockHeight) bool { return x == y } func (x BlockHeight) KeyForMap() uint64 { return uint64(x) } type View uint64 func (x View) String() string { return fmt.Sprintf("%x", uint64(x)) } func (x View) Equal(y View) bool { return x == y } func (x View) KeyForMap() uint64 { return uint64(x) } type InstanceId uint64 func (x InstanceId) String() string { return fmt.Sprintf("%x", uint64(x)) } func (x InstanceId) Equal(y InstanceId) bool { return x == y } func (x InstanceId) KeyForMap() uint64 { return uint64(x) } type BlockHash []byte func (x BlockHash) String() string { return fmt.Sprintf("%x", []byte(x)) } func (x BlockHash) Equal(y BlockHash) bool { return bytes.Equal(x, y) } func (x BlockHash) KeyForMap() string { return string(x) } type TimestampSeconds uint32 func (x TimestampSeconds) String() string { return fmt.Sprintf("%x", uint32(x)) } func (x TimestampSeconds) Equal(y TimestampSeconds) bool { return x == y } func (x TimestampSeconds) KeyForMap() uint32 { return uint32(x) }
spec/types/go/primitives/lean_helix_primitives.mb.go
0.72662
0.486149
lean_helix_primitives.mb.go
starcoder
package base import ( "container/heap" "gonum.org/v1/gonum/stat" "sort" ) // SparseIdSet manages the map between dense IDs and sparse IDs. type SparseIdSet struct { DenseIds map[int]int SparseIds []int } // NotId represents an ID not existed in the data set. const NotId = -1 // MakeSparseIdSet makes a SparseIdSet. func MakeSparseIdSet() SparseIdSet { return SparseIdSet{ DenseIds: make(map[int]int), SparseIds: make([]int, 0), } } // Len returns the number of IDs. func (set *SparseIdSet) Len() int { return len(set.SparseIds) } // Add adds a new ID to the ID set. func (set *SparseIdSet) Add(sparseId int) { if _, exist := set.DenseIds[sparseId]; !exist { set.DenseIds[sparseId] = len(set.SparseIds) set.SparseIds = append(set.SparseIds, sparseId) } } // ToDenseId converts a sparse ID to a dense ID. func (set *SparseIdSet) ToDenseId(sparseId int) int { if denseId, exist := set.DenseIds[sparseId]; exist { return denseId } return NotId } // ToSparseId converts a dense ID to a sparse ID. func (set *SparseIdSet) ToSparseId(denseId int) int { return set.SparseIds[denseId] } // SparseVector handles the sparse vector. type SparseVector struct { Indices []int Values []float64 Sorted bool } // MakeSparseVector makes a SparseVector. func MakeSparseVector() SparseVector { return SparseVector{ Indices: make([]int, 0), Values: make([]float64, 0), } } // NewSparseVector creates a SparseVector. func NewSparseVector() *SparseVector { return &SparseVector{ Indices: make([]int, 0), Values: make([]float64, 0), } } // MakeDenseSparseMatrix makes an array of SparseVectors. func MakeDenseSparseMatrix(row int) []SparseVector { mat := make([]SparseVector, row) for i := range mat { mat[i] = MakeSparseVector() } return mat } func SparseVectorsMean(a []SparseVector) []float64 { m := make([]float64, len(a)) for i := range a { m[i] = stat.Mean(a[i].Values, nil) } return m } // Add a new item. func (vec *SparseVector) Add(index int, value float64) { vec.Indices = append(vec.Indices, index) vec.Values = append(vec.Values, value) vec.Sorted = false } // Len returns the number of items. func (vec *SparseVector) Len() int { return len(vec.Values) } // Less compares indices of two items. func (vec *SparseVector) Less(i, j int) bool { return vec.Indices[i] < vec.Indices[j] } // Swap two items. func (vec *SparseVector) Swap(i, j int) { vec.Indices[i], vec.Indices[j] = vec.Indices[j], vec.Indices[i] vec.Values[i], vec.Values[j] = vec.Values[j], vec.Values[i] } // ForEach iterates items in the vector. func (vec *SparseVector) ForEach(f func(i, index int, value float64)) { for i := range vec.Indices { f(i, vec.Indices[i], vec.Values[i]) } } // SortIndex sorts items by indices. func (vec *SparseVector) SortIndex() { if !vec.Sorted { sort.Sort(vec) vec.Sorted = true } } // ForIntersection iterates items in the intersection of two vectors. func (vec *SparseVector) ForIntersection(other *SparseVector, f func(index int, a, b float64)) { // Sort indices of the left vector vec.SortIndex() // Sort indices of the right vector other.SortIndex() // Iterate i, j := 0, 0 for i < vec.Len() && j < other.Len() { if vec.Indices[i] == other.Indices[j] { f(vec.Indices[i], vec.Values[i], other.Values[j]) i++ j++ } else if vec.Indices[i] < other.Indices[j] { i++ } else { j++ } } } // KNNHeap is designed for neighbor-based models to store K nearest neighborhoods. type KNNHeap struct { SparseVector Similarities []float64 K int } // MakeKNNHeap makes a KNNHeap with k size. func MakeKNNHeap(k int) KNNHeap { return KNNHeap{ SparseVector: SparseVector{}, Similarities: make([]float64, 0), K: k, } } func (vec *KNNHeap) Less(i, j int) bool { return vec.Similarities[i] < vec.Similarities[j] } func (vec *KNNHeap) Swap(i, j int) { vec.SparseVector.Swap(i, j) vec.Similarities[i], vec.Similarities[j] = vec.Similarities[j], vec.Similarities[i] } type KNNHeapItem struct { Index int Value float64 Similarity float64 } func (vec *KNNHeap) Push(x interface{}) { item := x.(KNNHeapItem) vec.Indices = append(vec.Indices, item.Index) vec.Values = append(vec.Values, item.Value) vec.Similarities = append(vec.Similarities, item.Similarity) } func (vec *KNNHeap) Pop() interface{} { // Extract the minimum n := vec.Len() item := KNNHeapItem{ Index: vec.Indices[n-1], Value: vec.Values[n-1], Similarity: vec.Similarities[n-1], } // Remove last element vec.Indices = vec.Indices[0 : n-1] vec.Values = vec.Values[0 : n-1] vec.Similarities = vec.Similarities[0 : n-1] // We dont' expect return return item } // Add a new neighbor to the adjacent vector. func (vec *KNNHeap) Add(index int, value float64, similarity float64) { // Deprecate zero items if similarity == 0 { return } // Insert item heap.Push(vec, KNNHeapItem{index, value, similarity}) // Remove minimum if vec.Len() > vec.K { heap.Pop(vec) } }
base/sparse.go
0.734024
0.402862
sparse.go
starcoder
package provider import ( "github.com/lavaorg/lrtx/management" "github.com/lavaorg/northstar/portal/model" ) // PortalProvider defines the basic interface that implementers of the portal api functionality must fulfill. type PortalProvider interface { // CreateNotebook creates a new notebook CreateNotebook(token string, notebook *model.Notebook) (*model.Notebook, *management.Error) // UpdateNotebook updates an existing notebook. UpdateNotebook(token string, notebook *model.Notebook) *management.Error // ListNotebooks lists the existing notebooks for an account. ListNotebooks(token string) ([]model.Notebook, *management.Error) // GetNotebook retrieves the notebook matching a notebook ID. GetNotebook(token string, notebookID string) (*model.Notebook, *management.Error) // DeleteNotebook removes the notebook with the specified ID. DeleteNotebook(token string, notebookID string) *management.Error // ExecuteCell submits an execution request for a cell. ExecuteCell(token string, callbackURL string, cell *model.Cell) *management.Error // GetNotebookUsers returns a list of users that have access to the specified notebook GetNotebookUsers(token string, notebookId string) ([]model.User, *management.Error) // UpdateNotebookUsers updates the permissions of the users that have access to the specified notebook UpdateNotebookUsers(token string, notebookId string, users []model.User) *management.Error // ListPortfolios lists the existing portfolios for an account. ListPortfolios(token string) ([]model.Portfolio, *management.Error) // ListFiles lists the existing files for a portfolio. ListFiles(token string, portfolio string, prefix string, count int, marker string) ([]model.File, *management.Error) // GetFile gets the file for download. GetFile(token string, portfolio string, file string) (*model.Data, *management.Error) // CreateTransformation creates a new transformation. CreateTransformation(token string, transformation *model.Transformation) (*model.Transformation, *management.Error) // UpdateTransformation updates an existing Transformation. UpdateTransformation(token string, transformation *model.Transformation) *management.Error // ListTransformations lists the Transformations owned by an account. ListTransformations(token string) ([]model.Transformation, *management.Error) // GetTransformation retrieves the specified Transformation. GetTransformation(token string, transformationId string) (*model.Transformation, *management.Error) // GetTransformationResults returns the collection of execution results for the specified Transformation. GetTransformationResults(token string, transformationID string) ([]model.Output, *management.Error) // DeleteTransformation deletes the specified transformationId. DeleteTransformation(token string, transformationId string) *management.Error // ExecuteTransformation submits an execution request for a transformation. ExecuteTransformation(token string, callbackURL string, transformation *model.Transformation) *management.Error // Schedule CreateSchedule(token string, transformationId string, schedule *model.Schedule) *management.Error // GetSchedule retrieves the schedule for the specified transformationId. GetSchedule(token string, transformationId string) (*model.Schedule, *management.Error) // DeleteSchedule deletes the specified transformationId. DeleteSchedule(token string, transformationId string) *management.Error // GetSchemas retrieves the event schemas from thingspace via northstarapi GetScheduleEventSchemas(token string) ([]model.ScheduleEventSchema, *management.Error) // QueryUsers returns a list of Thingspace users filtered according to the provided query QueryUsers(token string, user *model.User) ([]model.User, *management.Error) // ProcessEvent parses the specified payload based on type. ProcessEvent(id, payloadType string, payload []byte) (*model.Event, *management.Error) // Templates ListTemplates(token string) ([]model.Template, *management.Error) }
portal/provider/provider.go
0.679817
0.438184
provider.go
starcoder
package check import "fmt" // float64CheckerProvider provides checks on type float64. type float64CheckerProvider struct{ baseCheckerProvider } // Is checks the gotten float64 is equal to the target. func (p float64CheckerProvider) Is(tar float64) Float64Checker { pass := func(got float64) bool { return got == tar } expl := func(label string, got interface{}) string { return p.explain(label, tar, got) } return NewFloat64Checker(pass, expl) } // Not checks the gotten float64 is not equal to the target. func (p float64CheckerProvider) Not(values ...float64) Float64Checker { var match float64 pass := func(got float64) bool { for _, v := range values { if got == v { match = v return false } } return true } expl := func(label string, got interface{}) string { return p.explainNot(label, match, got) } return NewFloat64Checker(pass, expl) } // InRange checks the gotten float64 is in the closed interval [lo:hi]. func (p float64CheckerProvider) InRange(lo, hi float64) Float64Checker { pass := func(got float64) bool { return p.inrange(got, lo, hi) } expl := func(label string, got interface{}) string { return p.explain(label, fmt.Sprintf("in range [%v:%v]", lo, hi), got) } return NewFloat64Checker(pass, expl) } // OutRange checks the gotten float64 is not in the closed interval [lo:hi]. func (p float64CheckerProvider) OutRange(lo, hi float64) Float64Checker { pass := func(got float64) bool { return !p.inrange(got, lo, hi) } expl := func(label string, got interface{}) string { return p.explainNot(label, fmt.Sprintf("in range [%v:%v]", lo, hi), got) } return NewFloat64Checker(pass, expl) } // GT checks the gotten float64 is greater than the target. func (p float64CheckerProvider) GT(tar float64) Float64Checker { pass := func(got float64) bool { return !p.lte(got, tar) } expl := func(label string, got interface{}) string { return p.explain(label, fmt.Sprintf("> %v", tar), got) } return NewFloat64Checker(pass, expl) } // GTE checks the gotten float64 is greater or equal to the target. func (p float64CheckerProvider) GTE(tar float64) Float64Checker { pass := func(got float64) bool { return !p.lt(got, tar) } expl := func(label string, got interface{}) string { return p.explain(label, fmt.Sprintf(">= %v", tar), got) } return NewFloat64Checker(pass, expl) } // LT checks the gotten float64 is lesser than the target. func (p float64CheckerProvider) LT(tar float64) Float64Checker { pass := func(got float64) bool { return p.lt(got, tar) } expl := func(label string, got interface{}) string { return p.explain(label, fmt.Sprintf("< %v", tar), got) } return NewFloat64Checker(pass, expl) } // LTE checks the gotten float64 is lesser or equal to the target. func (p float64CheckerProvider) LTE(tar float64) Float64Checker { pass := func(got float64) bool { return p.lte(got, tar) } expl := func(label string, got interface{}) string { return p.explain(label, fmt.Sprintf("<= %v", tar), got) } return NewFloat64Checker(pass, expl) } // Helpers func (float64CheckerProvider) lt(a, b float64) bool { return a < b } func (float64CheckerProvider) lte(a, b float64) bool { return a <= b } func (p float64CheckerProvider) inrange(n, lo, hi float64) bool { return !p.lt(n, lo) && p.lte(n, hi) }
check/providers_float64.go
0.805173
0.428473
providers_float64.go
starcoder
package v1alpha1 import ( v1alpha1 "github.com/enj/example-operator/pkg/apis/example/v1alpha1" "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/labels" "k8s.io/client-go/tools/cache" ) // ExampleOperatorLister helps list ExampleOperators. type ExampleOperatorLister interface { // List lists all ExampleOperators in the indexer. List(selector labels.Selector) (ret []*v1alpha1.ExampleOperator, err error) // ExampleOperators returns an object that can list and get ExampleOperators. ExampleOperators(namespace string) ExampleOperatorNamespaceLister ExampleOperatorListerExpansion } // exampleOperatorLister implements the ExampleOperatorLister interface. type exampleOperatorLister struct { indexer cache.Indexer } // NewExampleOperatorLister returns a new ExampleOperatorLister. func NewExampleOperatorLister(indexer cache.Indexer) ExampleOperatorLister { return &exampleOperatorLister{indexer: indexer} } // List lists all ExampleOperators in the indexer. func (s *exampleOperatorLister) List(selector labels.Selector) (ret []*v1alpha1.ExampleOperator, err error) { err = cache.ListAll(s.indexer, selector, func(m interface{}) { ret = append(ret, m.(*v1alpha1.ExampleOperator)) }) return ret, err } // ExampleOperators returns an object that can list and get ExampleOperators. func (s *exampleOperatorLister) ExampleOperators(namespace string) ExampleOperatorNamespaceLister { return exampleOperatorNamespaceLister{indexer: s.indexer, namespace: namespace} } // ExampleOperatorNamespaceLister helps list and get ExampleOperators. type ExampleOperatorNamespaceLister interface { // List lists all ExampleOperators in the indexer for a given namespace. List(selector labels.Selector) (ret []*v1alpha1.ExampleOperator, err error) // Get retrieves the ExampleOperator from the indexer for a given namespace and name. Get(name string) (*v1alpha1.ExampleOperator, error) ExampleOperatorNamespaceListerExpansion } // exampleOperatorNamespaceLister implements the ExampleOperatorNamespaceLister // interface. type exampleOperatorNamespaceLister struct { indexer cache.Indexer namespace string } // List lists all ExampleOperators in the indexer for a given namespace. func (s exampleOperatorNamespaceLister) List(selector labels.Selector) (ret []*v1alpha1.ExampleOperator, err error) { err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { ret = append(ret, m.(*v1alpha1.ExampleOperator)) }) return ret, err } // Get retrieves the ExampleOperator from the indexer for a given namespace and name. func (s exampleOperatorNamespaceLister) Get(name string) (*v1alpha1.ExampleOperator, error) { obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) if err != nil { return nil, err } if !exists { return nil, errors.NewNotFound(v1alpha1.Resource("exampleoperator"), name) } return obj.(*v1alpha1.ExampleOperator), nil }
pkg/generated/listers/example/v1alpha1/exampleoperator.go
0.719384
0.406803
exampleoperator.go
starcoder
package pool import ( "sync" ) var TwoDimBS *TwoDimByteSlicePool type TwoDimByteSlicePool struct { pool *sync.Pool } // TwoDimByteSlice 二维递增数组 type TwoDimByteSlice struct { released bool data []byte flat []sliceHeader dim []sliceHeader } func (p *TwoDimByteSlicePool)Get() *TwoDimByteSlice { bs := p.pool.Get().(*TwoDimByteSlice) bs.released = false return bs } func (b *TwoDimByteSlice) Released() bool { return b.released } func (b *TwoDimByteSlice)Release() { if b.released { panic("bug! release a released object") } b.released = true b.Reset() TwoDimBS.pool.Put(b) } func (b *TwoDimByteSlice)Reset() { if len(b.data) > 0 { b.data = b.data[:0] } if len(b.flat) > 0 { b.flat = b.flat[:0] } if len(b.dim) > 0 { b.dim = b.dim[:0] } } func (b *TwoDimByteSlice)Grow(dataCap, flatCap, dimCap int) { dataLen := len(b.data) dataExtend := dataCap - cap(b.data) if dataExtend > 0 { b.data = append(b.data, make([]byte, dataExtend)...) b.data = b.data[:dataLen] } flatLen := len(b.flat) flatExtend := flatCap * dimCap - cap(b.flat) if flatExtend > 0 { b.flat = append(b.flat, make([]sliceHeader, flatExtend)...) b.flat = b.flat[:flatLen] } dimLen := len(b.dim) dimExtend := dimCap - cap(b.dim) if dimExtend > 0 { b.dim = append(b.dim, make([]sliceHeader, dimExtend)...) b.dim = b.dim[:dimLen] } } func (b *TwoDimByteSlice)NewDim() *TwoDimByteSlice { b.dim = append(b.dim, sliceHeader{ len(b.flat), 0 }) return b } func (b *TwoDimByteSlice)Dim() int { return len(b.dim) } func (b *TwoDimByteSlice)Len(dim int) int { return b.dim[dim].len } // Index 不要修改返回的数组内容 func (b *TwoDimByteSlice)Index(dim, index int) []byte { flat := b.dim[dim].offset start := b.flat[flat+index].offset if b.data[start] == '-' { return nil } end := start + b.flat[flat+index].len return b.data[start+1:end] } func (b *TwoDimByteSlice)IsNil(dim, index int) bool { return b.Index(dim, index) == nil } func (b *TwoDimByteSlice)CopyTo(dim, index int, buf []byte) []byte { bts := b.Index(dim, index) if bts == nil { return buf } buf = append(buf, bts...) return buf } func (b *TwoDimByteSlice)AppendConcat(bs ...[]byte) { used := len(b.data) b.data = append(b.data, '+') allNil := true length := 0 for _, bts := range bs { if bts == nil { continue } allNil = false length = length + len(bts) b.data = append(b.data, bts...) } if allNil { b.data[used] = '-' } b.flat = append(b.flat, sliceHeader{ used, length+1 }) b.dim[len(b.dim)-1].len++ } func (b *TwoDimByteSlice)Append(bs ...[]byte) { for _, bts := range bs { used := len(b.data) if bts == nil { b.data = append(b.data, '-') } else { b.data = append(b.data, '+') } b.data = append(b.data, bts...) b.flat = append(b.flat, sliceHeader{ used, len(bts)+1 }) b.dim[len(b.dim)-1].len++ } } func (b *TwoDimByteSlice)ToBytes(dim int, bs [][]byte) [][]byte { start := b.dim[dim].offset end := start + b.dim[dim].len for i, h := range b.flat[start:end] { if b.IsNil(dim, i) { bs = append(bs, nil) } else { bs = append(bs, b.data[h.offset+1:h.offset+h.len]) } } return bs } func (b *TwoDimByteSlice)ToByteSlice(dim int) (bs *ByteSlice) { bs = BS.Get() for i := 0; i < b.Len(dim); i++ { bs.Append(b.Index(dim, i)) } return bs }
pool/two_dim_byteslice.go
0.521715
0.600598
two_dim_byteslice.go
starcoder
package core type PrimaryExpressionType int const ( VarPrimaryExpressionType PrimaryExpressionType = 1 << iota ConstPrimaryExpressionType DynamicStrPrimaryExpressionType ArrayPrimaryExpressionType ObjectPrimaryExpressionType ChainCallPrimaryExpressionType ElemFunctionCallPrimaryExpressionType FunctionPrimaryExpressionType SubListPrimaryExpressionType ElementPrimaryExpressionType FunctionCallPrimaryExpressionType NestedPrimaryExpressionType NotPrimaryExpressionType SelfIncrPrimaryExpressionType SelfDecrPrimaryExpressionType TernaryOperatorPrimaryExpressionType MultiAssignedPrimaryExpressionType ) type PrimaryExpression interface { isVar() bool isConst() bool isDynamicStr() bool isArray() bool isObject() bool isChainCall() bool isElemFunctionCall() bool isFunction() bool isSubList() bool isElement() bool isFunctionCall() bool isNot() bool Expression } type PrimaryExpressionImpl struct { t PrimaryExpressionType doExec func() Value ExpressionAdapter } func newPrimaryExpression() PrimaryExpression { return &PrimaryExpressionImpl{} } func (priExpr *PrimaryExpressionImpl) execute() Value { if priExpr == nil { return NULL } var res Value if priExpr.doExec != nil { res = priExpr.doExec() } else { runtimeExcption("ExpressionExecutor#evalPrimaryExpr: unknown primary expression type") } if res == nil { res = NULL } return res } func (priExpr *PrimaryExpressionImpl) isVar() bool { return priExpr.t&VarPrimaryExpressionType > 0 } func (priExpr *PrimaryExpressionImpl) isConst() bool { return priExpr.t&ConstPrimaryExpressionType > 0 } func (priExpr *PrimaryExpressionImpl) isDynamicStr() bool { return priExpr.t&DynamicStrPrimaryExpressionType > 0 } func (priExpr *PrimaryExpressionImpl) isArray() bool { return priExpr.t&ArrayPrimaryExpressionType > 0 } func (priExpr *PrimaryExpressionImpl) isObject() bool { return priExpr.t&ObjectPrimaryExpressionType > 0 } func (priExpr *PrimaryExpressionImpl) isChainCall() bool { return priExpr.t&ChainCallPrimaryExpressionType > 0 } func (priExpr *PrimaryExpressionImpl) isElemFunctionCall() bool { return priExpr.t&ElemFunctionCallPrimaryExpressionType > 0 } func (priExpr *PrimaryExpressionImpl) isFunction() bool { return priExpr.t&FunctionPrimaryExpressionType > 0 } func (priExpr *PrimaryExpressionImpl) isSubList() bool { return priExpr.t&SubListPrimaryExpressionType > 0 } func (priExpr *PrimaryExpressionImpl) isElement() bool { return priExpr.t&ElementPrimaryExpressionType > 0 } func (priExpr *PrimaryExpressionImpl) isFunctionCall() bool { return priExpr.t&FunctionCallPrimaryExpressionType > 0 } func (priExpr *PrimaryExpressionImpl) isNot() bool { return priExpr.t&NotPrimaryExpressionType > 0 } func (priExpr *PrimaryExpressionImpl) isTernaryOperator() bool { return priExpr.t&TernaryOperatorPrimaryExpressionType > 0 }
core/100expression_primary.go
0.563618
0.466724
100expression_primary.go
starcoder
package entity import "github.com/cjduffett/synthea/utils" // Demographics is a lookup hash for demographics used to seed Synthea. var Demographics = map[string]interface{}{ "Race": []utils.Choice{ // https://en.wikipedia.org/wiki/Demographics_of_Massachusetts#Race.2C_ethnicity.2C_and_ancestry utils.Choice{ Weight: 0.694, Item: "White", }, utils.Choice{ Weight: 0.105, Item: "Hispanic", }, utils.Choice{ Weight: 0.081, Item: "Black", }, utils.Choice{ Weight: 0.06, Item: "Asian", }, utils.Choice{ Weight: 0.05, Item: "Native", }, utils.Choice{ Weight: 0.01, Item: "Other", }, }, "Ethnicity": map[string][]utils.Choice{ "White": []utils.Choice{ // https://en.wikipedia.org/wiki/Demographics_of_Massachusetts#Race.2C_ethnicity.2C_and_ancestry // Scaled out of 100% for each ethnicity. utils.Choice{ Weight: 0.263, Item: "Irish", }, utils.Choice{ Weight: 0.160, Item: "Italian", }, utils.Choice{ Weight: 0.123, Item: "English", }, utils.Choice{ Weight: 0.090, Item: "French", }, utils.Choice{ Weight: 0.074, Item: "German", }, utils.Choice{ Weight: 0.057, Item: "Polish", }, utils.Choice{ Weight: 0.054, Item: "Portuguese", }, utils.Choice{ Weight: 0.050, Item: "American", }, utils.Choice{ Weight: 0.044, Item: "French Canadian", }, utils.Choice{ Weight: 0.028, Item: "Scottish", }, utils.Choice{ Weight: 0.022, Item: "Russian", }, utils.Choice{ Weight: 0.021, Item: "Swedish", }, utils.Choice{ Weight: 0.014, Item: "Greek", }, }, "Hispanic": []utils.Choice{ utils.Choice{ Weight: 0.577, Item: "Puerto Rican", }, utils.Choice{ Weight: 0.0141, Item: "Mexican", }, utils.Choice{ Weight: 0.0141, Item: "Central American", }, utils.Choice{ Weight: 0.0141, Item: "South American", }, }, "Black": []utils.Choice{ utils.Choice{ Weight: 0.34, Item: "African", }, utils.Choice{ Weight: 0.33, Item: "Dominican", }, utils.Choice{ Weight: 0.33, Item: "West Indian", }, }, "Asian": []utils.Choice{ utils.Choice{ Weight: 0.6, Item: "Chinese", }, utils.Choice{ Weight: 0.4, Item: "Asian Indian", }, }, "Native": []utils.Choice{ utils.Choice{ Weight: 1, Item: "American Indian", }, }, "Other": []utils.Choice{ utils.Choice{ Weight: 1, Item: "Arab", }, }, }, "BloodType": map[string][]utils.Choice{ // blood type data from http://www.redcrossblood.org/learn-about-blood/blood-types // data for Native and Other from https://en.wikipedia.org/wiki/Blood_type_distribution_by_country "White": []utils.Choice{ utils.Choice{ Weight: 0.37, Item: "o_positive", }, utils.Choice{ Weight: 0.08, Item: "o_negative", }, utils.Choice{ Weight: 0.33, Item: "a_positive", }, utils.Choice{ Weight: 0.07, Item: "a_negative", }, utils.Choice{ Weight: 0.09, Item: "b_positive", }, utils.Choice{ Weight: 0.02, Item: "b_negative", }, utils.Choice{ Weight: 0.03, Item: "ab_positive", }, utils.Choice{ Weight: 0.01, Item: "ab_negative", }, }, "Hispanic": []utils.Choice{ utils.Choice{ Weight: 0.52, Item: "o_positive", }, utils.Choice{ Weight: 0.04, Item: "o_negative", }, utils.Choice{ Weight: 0.29, Item: "a_positive", }, utils.Choice{ Weight: 0.02, Item: "a_negative", }, utils.Choice{ Weight: 0.09, Item: "b_positive", }, utils.Choice{ Weight: 0.01, Item: "b_negative", }, utils.Choice{ Weight: 0.02, Item: "ab_positive", }, utils.Choice{ Weight: 0.01, Item: "ab_negative", }, }, "Black": []utils.Choice{ utils.Choice{ Weight: 0.46, Item: "o_positive", }, utils.Choice{ Weight: 0.04, Item: "o_negative", }, utils.Choice{ Weight: 0.24, Item: "a_positive", }, utils.Choice{ Weight: 0.02, Item: "a_negative", }, utils.Choice{ Weight: 0.18, Item: "b_positive", }, utils.Choice{ Weight: 0.01, Item: "b_negative", }, utils.Choice{ Weight: 0.04, Item: "ab_positive", }, utils.Choice{ Weight: 0.01, Item: "ab_negative", }, }, "Asian": []utils.Choice{ utils.Choice{ Weight: 0.39, Item: "o_positive", }, utils.Choice{ Weight: 0.01, Item: "o_negative", }, utils.Choice{ Weight: 0.26, Item: "a_positive", }, utils.Choice{ Weight: 0.01, Item: "a_negative", }, utils.Choice{ Weight: 0.25, Item: "b_positive", }, utils.Choice{ Weight: 0.01, Item: "b_negative", }, utils.Choice{ Weight: 0.06, Item: "ab_positive", }, utils.Choice{ Weight: 0.01, Item: "ab_negative", }, }, "Native": []utils.Choice{ utils.Choice{ Weight: 0.374, Item: "o_positive", }, utils.Choice{ Weight: 0.066, Item: "o_negative", }, utils.Choice{ Weight: 0.357, Item: "a_positive", }, utils.Choice{ Weight: 0.063, Item: "a_negative", }, utils.Choice{ Weight: 0.085, Item: "b_positive", }, utils.Choice{ Weight: 0.015, Item: "b_negative", }, utils.Choice{ Weight: 0.034, Item: "ab_positive", }, utils.Choice{ Weight: 0.006, Item: "ab_negative", }, }, "Other": []utils.Choice{ utils.Choice{ Weight: 0.374, Item: "o_positive", }, utils.Choice{ Weight: 0.066, Item: "o_negative", }, utils.Choice{ Weight: 0.357, Item: "a_positive", }, utils.Choice{ Weight: 0.063, Item: "a_negative", }, utils.Choice{ Weight: 0.085, Item: "b_positive", }, utils.Choice{ Weight: 0.015, Item: "b_negative", }, utils.Choice{ Weight: 0.034, Item: "ab_positive", }, utils.Choice{ Weight: 0.006, Item: "ab_negative", }, }, }, }
entity/demographics.go
0.602179
0.439206
demographics.go
starcoder
package mapping import () /* When mapping to a struct, we map each field from a path into another object. When mapping to a collection, we map each item from another collection. When mapping to a pointer, we map from a set of pointers. If all are nil, then map to nil. */ // Choice focus on Part of a Whole type. type Choice interface { getChoice() Choice } // SumChoice choose a branch of a Sum type. type SumChoice struct { Index int } // StructChoice choose a branch (Field) of a Struct type. type StructChoice struct { Index string } // MappedField is a field of a new struct type created from an old struct type. type MappedField struct { // SourcePath the sequence of choices from the old root struct to the // old Field this MappedField corresponds to. SourcePath []Choice // Name (of the Field) should correspond to the last item in SourcePath. Name string Atom MappedAtom } // MappedAtom is opaque to the shrinking algorithm. // The atom may be shrinkable on the inside, but fields cannot be pulled from it. // nil is the Identity mapping (no-op). type MappedAtom interface { getMappedAtom() MappedAtom Shrink() Print(int) } // MappedStruct a new struct type created from an old struct type. // MappedStructs can be restructured using the shrinking algorithm. type MappedStruct struct { // SourcePath the sequence of choices from the old root struct to the // old Struct this MappedStruct corresponds to SourcePath []Choice Fields []*MappedField } // MappedSlice create a new Slice type by mapping elements of an old Slice type. type MappedSlice struct { Elem MappedAtom } // MappedMap create a new Map type by mapping values of an old Map type. type MappedMap struct { Value MappedAtom } // AppendedChoice get a copy of "prefix" with the next choice appended to it. func AppendedChoice(prefix []Choice, choice Choice) []Choice { newPrefix := make([]Choice, len(prefix)) copy(newPrefix, prefix) return append(newPrefix, choice) } func incrementFieldCount(fieldCounts map[string]int, fieldName string) { count := getFieldCount(fieldCounts, fieldName) fieldCounts[fieldName] = count + 1 } func getFieldCount(fieldCounts map[string]int, fieldName string) int { if count, ok := fieldCounts[fieldName]; ok { return count } return 0 } // Shrink the mapping for values. func (m *MappedMap) Shrink() { if m.Value != nil { m.Value.Shrink() } } // Shrink the mapping for elements. func (m *MappedSlice) Shrink() { if m.Elem != nil { m.Elem.Shrink() } } // Shrink move struct fields as close to the root struct as possible without // creating field-name collisions. // TODO: When promoting from structs with only one field, // use the parent field name instead of the field name for the promoted field. func (s *MappedStruct) Shrink() { for { promotionCount := 0 // Count the occurrences of each field and subfield name. fieldCounts := map[string]int{} for _, field := range s.Fields { incrementFieldCount(fieldCounts, field.Name) if substruct, ok := field.Atom.(*MappedStruct); ok { for _, subfield := range substruct.Fields { incrementFieldCount( fieldCounts, subfield.Name) } } } // Promote the subfields with unique names. for fieldIx := 0; fieldIx < len(s.Fields); { deletedField := false field := s.Fields[fieldIx] if substruct, ok := field.Atom.(*MappedStruct); ok { for subfieldIx := 0; subfieldIx < len(substruct.Fields); { subfield := substruct.Fields[subfieldIx] if getFieldCount(fieldCounts, subfield.Name) == 1 { promotionCount++ deletedField = deletedField || s.PromoteSubfield(fieldIx, substruct, subfieldIx) } else { subfieldIx++ } } } if !deletedField { fieldIx++ } } if promotionCount == 0 { break } } // Finished shrinking the top level. Shrink the next level down. for _, field := range s.Fields { if field.Atom != nil { field.Atom.Shrink() } } } // PromoteSubfield move a Field from its struct to the parent of its struct. // If its original struct is now empty, delete this struct from its parent. func (s *MappedStruct) PromoteSubfield(fieldIx int, substruct *MappedStruct, subfieldIx int) (deletedField bool) { subfield := substruct.Fields[subfieldIx] substruct.DeleteFieldAt(subfieldIx) s.InsertFieldAt(fieldIx, subfield) if len(substruct.Fields) == 0 { s.DeleteFieldAt(fieldIx) deletedField = true } return } // InsertFieldAt insert a new Field into the Struct at a given index. func (s *MappedStruct) InsertFieldAt(fieldIx int, field *MappedField) { s.Fields = append(s.Fields[:fieldIx], append([]*MappedField{field}, s.Fields[fieldIx:]...)...) } // DeleteFieldAt delete the Field at a given index. func (s *MappedStruct) DeleteFieldAt(fieldIx int) { s.Fields = append(s.Fields[:fieldIx], s.Fields[fieldIx+1:]...) } func (c *SumChoice) getChoice() Choice { return c } func (c *StructChoice) getChoice() Choice { return c } func (s *MappedStruct) getMappedAtom() MappedAtom { return s } func (m *MappedSlice) getMappedAtom() MappedAtom { return m } func (m *MappedMap) getMappedAtom() MappedAtom { return m }
mapping/mapping.go
0.622345
0.45302
mapping.go
starcoder
package image import ( "encoding/binary" "image" "image/color" "image/draw" _ "image/jpeg" "image/png" "io" ) // setBit will set the LSB of n to the requested value func setBit(n uint32, is1 bool) uint8 { n = n >> 8 n = n & 0xFE if is1 { n = n | 0x1 } return uint8(n) } // convertByteToBits is a helper function that takes one byte and // returns a slice of booleans representing the binary value of that byte func convertByteToBits(b byte) []bool { result := make([]bool, 8) for j := 0; j < 8; j++ { mask := byte(1 << uint(j)) result[7-j] = b&mask>>uint(j) == 1 } return result } // getBits returns a slice of booleans representing the binary value of data func getBits(data []byte) []bool { bs := make([]byte, 4) binary.BigEndian.PutUint32(bs, uint32(len(data))) data = append(bs, data...) var results []bool for _, b := range data { results = append(results, convertByteToBits(b)...) } return results } // Encode takes an image and encodes a payload into the LSB func LSBEncoder(r io.Reader, w io.Writer, payload []byte) error { img, _, err := image.Decode(r) if err != nil { return err } bounds := img.Bounds() cimg := image.NewRGBA(bounds) draw.Draw(cimg, bounds, img, image.Point{}, draw.Over) data := getBits(payload) dataIdx := 0 dataLen := len(data) for y := bounds.Min.Y; y < bounds.Max.Y; y++ { for x := bounds.Min.X; x < bounds.Max.X; x++ { r, g, b, a := cimg.At(x, y).RGBA() r8 := uint8(r >> 8) g8 := uint8(g >> 8) b8 := uint8(b >> 8) a8 := uint8(a >> 8) if dataIdx < dataLen { r8 = setBit(r, data[dataIdx]) dataIdx++ } if dataIdx < dataLen { g8 = setBit(g, data[dataIdx]) dataIdx++ } if dataIdx < dataLen { b8 = setBit(b, data[dataIdx]) dataIdx++ } cimg.Set(x, y, color.RGBA{R: r8, G: g8, B: b8, A: a8}) } } return png.Encode(w, cimg) } // assemble takes the LSB data from a payload and reconstructes the original message func assemble(data []uint8) []byte { var result []byte length := len(data) for i := 0; i < len(data)/8; i++ { b := uint8(0) for j := 0; j < 8; j++ { if i*8+j < length { b = b<<1 + data[i*8+j] } } result = append(result, b) } payloadSize := binary.BigEndian.Uint32(result[0:4]) return result[4 : payloadSize+4] } // Decode takes an image and prints the payload that was encoded func LSBDecoder(r io.Reader) ([]byte, error) { img, err := png.Decode(r) if err != nil { return nil, err } bounds := img.Bounds() var data []uint8 for y := bounds.Min.Y; y < bounds.Max.Y; y++ { for x := bounds.Min.X; x < bounds.Max.X; x++ { r, g, b, _ := img.At(x, y).RGBA() data = append(data, uint8(r>>8)&1) data = append(data, uint8(g>>8)&1) data = append(data, uint8(b>>8)&1) } } payload := assemble(data) return payload, nil }
kit/image/lsb.go
0.660282
0.410225
lsb.go
starcoder
package onshape import ( "encoding/json" ) // BTPStatementBlock271 struct for BTPStatementBlock271 type BTPStatementBlock271 struct { BTPStatement269 BtType *string `json:"btType,omitempty"` SpaceAfterOpen *BTPSpace10 `json:"spaceAfterOpen,omitempty"` } // NewBTPStatementBlock271 instantiates a new BTPStatementBlock271 object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewBTPStatementBlock271() *BTPStatementBlock271 { this := BTPStatementBlock271{} return &this } // NewBTPStatementBlock271WithDefaults instantiates a new BTPStatementBlock271 object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewBTPStatementBlock271WithDefaults() *BTPStatementBlock271 { this := BTPStatementBlock271{} return &this } // GetBtType returns the BtType field value if set, zero value otherwise. func (o *BTPStatementBlock271) GetBtType() string { if o == nil || o.BtType == nil { var ret string return ret } return *o.BtType } // GetBtTypeOk returns a tuple with the BtType field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTPStatementBlock271) GetBtTypeOk() (*string, bool) { if o == nil || o.BtType == nil { return nil, false } return o.BtType, true } // HasBtType returns a boolean if a field has been set. func (o *BTPStatementBlock271) HasBtType() bool { if o != nil && o.BtType != nil { return true } return false } // SetBtType gets a reference to the given string and assigns it to the BtType field. func (o *BTPStatementBlock271) SetBtType(v string) { o.BtType = &v } // GetSpaceAfterOpen returns the SpaceAfterOpen field value if set, zero value otherwise. func (o *BTPStatementBlock271) GetSpaceAfterOpen() BTPSpace10 { if o == nil || o.SpaceAfterOpen == nil { var ret BTPSpace10 return ret } return *o.SpaceAfterOpen } // GetSpaceAfterOpenOk returns a tuple with the SpaceAfterOpen field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTPStatementBlock271) GetSpaceAfterOpenOk() (*BTPSpace10, bool) { if o == nil || o.SpaceAfterOpen == nil { return nil, false } return o.SpaceAfterOpen, true } // HasSpaceAfterOpen returns a boolean if a field has been set. func (o *BTPStatementBlock271) HasSpaceAfterOpen() bool { if o != nil && o.SpaceAfterOpen != nil { return true } return false } // SetSpaceAfterOpen gets a reference to the given BTPSpace10 and assigns it to the SpaceAfterOpen field. func (o *BTPStatementBlock271) SetSpaceAfterOpen(v BTPSpace10) { o.SpaceAfterOpen = &v } func (o BTPStatementBlock271) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} serializedBTPStatement269, errBTPStatement269 := json.Marshal(o.BTPStatement269) if errBTPStatement269 != nil { return []byte{}, errBTPStatement269 } errBTPStatement269 = json.Unmarshal([]byte(serializedBTPStatement269), &toSerialize) if errBTPStatement269 != nil { return []byte{}, errBTPStatement269 } if o.BtType != nil { toSerialize["btType"] = o.BtType } if o.SpaceAfterOpen != nil { toSerialize["spaceAfterOpen"] = o.SpaceAfterOpen } return json.Marshal(toSerialize) } type NullableBTPStatementBlock271 struct { value *BTPStatementBlock271 isSet bool } func (v NullableBTPStatementBlock271) Get() *BTPStatementBlock271 { return v.value } func (v *NullableBTPStatementBlock271) Set(val *BTPStatementBlock271) { v.value = val v.isSet = true } func (v NullableBTPStatementBlock271) IsSet() bool { return v.isSet } func (v *NullableBTPStatementBlock271) Unset() { v.value = nil v.isSet = false } func NewNullableBTPStatementBlock271(val *BTPStatementBlock271) *NullableBTPStatementBlock271 { return &NullableBTPStatementBlock271{value: val, isSet: true} } func (v NullableBTPStatementBlock271) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableBTPStatementBlock271) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
onshape/model_btp_statement_block_271.go
0.658527
0.521288
model_btp_statement_block_271.go
starcoder
package utils import "time" type Comparator func(a, b interface{}) int // StringComparator provides a fast comparison on strings func StringComparator(a, b interface{}) int { s1 := a.(string) s2 := b.(string) min := len(s2) if len(s1) < len(s2) { min = len(s1) } diff := 0 for i := 0; i < min && diff == 0; i++ { diff = int(s1[i]) - int(s2[i]) } if diff == 0 { diff = len(s1) - len(s2) } if diff < 0 { return -1 } if diff > 0 { return 1 } return 0 } // IntComparator provides a basic comparison on int func IntComparator(a, b interface{}) int { aAsserted := a.(int) bAsserted := b.(int) switch { case aAsserted > bAsserted: return 1 case aAsserted < bAsserted: return -1 default: return 0 } } // Int8Comparator provides a basic comparison on int8 func Int8Comparator(a, b interface{}) int { aAsserted := a.(int8) bAsserted := b.(int8) switch { case aAsserted > bAsserted: return 1 case aAsserted < bAsserted: return -1 default: return 0 } } // Int16Comparator provides a basic comparison on int16 func Int16Comparator(a, b interface{}) int { aAsserted := a.(int16) bAsserted := b.(int16) switch { case aAsserted > bAsserted: return 1 case aAsserted < bAsserted: return -1 default: return 0 } } // Int32Comparator provides a basic comparison on int32 func Int32Comparator(a, b interface{}) int { aAsserted := a.(int32) bAsserted := b.(int32) switch { case aAsserted > bAsserted: return 1 case aAsserted < bAsserted: return -1 default: return 0 } } // Int64Comparator provides a basic comparison on int64 func Int64Comparator(a, b interface{}) int { aAsserted := a.(int64) bAsserted := b.(int64) switch { case aAsserted > bAsserted: return 1 case aAsserted < bAsserted: return -1 default: return 0 } } // UIntComparator provides a basic comparison on uint func UIntComparator(a, b interface{}) int { aAsserted := a.(uint) bAsserted := b.(uint) switch { case aAsserted > bAsserted: return 1 case aAsserted < bAsserted: return -1 default: return 0 } } // UInt8Comparator provides a basic comparison on uint8 func UInt8Comparator(a, b interface{}) int { aAsserted := a.(uint8) bAsserted := b.(uint8) switch { case aAsserted > bAsserted: return 1 case aAsserted < bAsserted: return -1 default: return 0 } } // UInt16Comparator provides a basic comparison on uint16 func UInt16Comparator(a, b interface{}) int { aAsserted := a.(uint16) bAsserted := b.(uint16) switch { case aAsserted > bAsserted: return 1 case aAsserted < bAsserted: return -1 default: return 0 } } // UInt32Comparator provides a basic comparison on uint32 func UInt32Comparator(a, b interface{}) int { aAsserted := a.(uint32) bAsserted := b.(uint32) switch { case aAsserted > bAsserted: return 1 case aAsserted < bAsserted: return -1 default: return 0 } } // UInt64Comparator provides a basic comparison on uint64 func UInt64Comparator(a, b interface{}) int { aAsserted := a.(uint64) bAsserted := b.(uint64) switch { case aAsserted > bAsserted: return 1 case aAsserted < bAsserted: return -1 default: return 0 } } // Float32Comparator provides a basic comparison on float32 func Float32Comparator(a, b interface{}) int { aAsserted := a.(float32) bAsserted := b.(float32) switch { case aAsserted > bAsserted: return 1 case aAsserted < bAsserted: return -1 default: return 0 } } // Float64Comparator provides a basic comparison on float64 func Float64Comparator(a, b interface{}) int { aAsserted := a.(float64) bAsserted := b.(float64) switch { case aAsserted > bAsserted: return 1 case aAsserted < bAsserted: return -1 default: return 0 } } // ByteComparator provides a basic comparison on byte func ByteComparator(a, b interface{}) int { aAsserted := a.(byte) bAsserted := b.(byte) switch { case aAsserted > bAsserted: return 1 case aAsserted < bAsserted: return -1 default: return 0 } } // RuneComparator provides a basic comparison on rune func RuneComparator(a, b interface{}) int { aAsserted := a.(rune) bAsserted := b.(rune) switch { case aAsserted > bAsserted: return 1 case aAsserted < bAsserted: return -1 default: return 0 } } // TimeComparator provides a basic comparison on time.Time func TimeComparator(a, b interface{}) int { aAsserted := a.(time.Time) bAsserted := b.(time.Time) switch { case aAsserted.After(bAsserted): return 1 case aAsserted.Before(bAsserted): return -1 default: return 0 } }
utils/comparator.go
0.78968
0.561275
comparator.go
starcoder
package main import ( "encoding/hex" "fmt" "strconv" "strings" "time" "unicode/utf8" "github.com/bwmarrin/discordgo" mgo "gopkg.in/mgo.v2" "gopkg.in/mgo.v2/bson" ) // BotHelp is used to display a list of available commands or instructions on using a specified command func BotHelp(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { if len(command) == 1 { command = append(command, "nothing") } message := fmt.Sprintf("Need some help with the __%s__ command? EventsBot is happy to oblige :nerd:", command[1]) switch command[1] { case "nothing": message = "```List of EventsBot commands:" message = fmt.Sprintf("%s\r\n %slistevents", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n %sdetails", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n %snew", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n %snewevent", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n %scancelevent", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n %sedit", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n %ssignup", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n %sleave", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n %swisdom", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n %slisttimezones", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n\r\nCommands that require Admin priviledges", message) message = fmt.Sprintf("%s\r\n %saddserver", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n %saddtimezone", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n %sremovetimezone", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n %sroletimezone", message, config.CommandPrefix) //message = fmt.Sprintf("%s\r\n %simpersonate", message, config.CommandPrefix) //message = fmt.Sprintf("%s\r\n %sunimpersonate", message, config.CommandPrefix) message = fmt.Sprintf("%s```", message) message = fmt.Sprintf("%sYou can get help on any of these commands by typing %shelp followed by the name of the command", message, config.CommandPrefix) case "listevents": message = fmt.Sprintf("%s\r\nHere's how to get a list of upcoming events:", message) message = fmt.Sprintf("%s\r\n```%slistevents [Date] [@Username]\r\n", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n Date: The date for which you want to see events. This value is optional", message) message = fmt.Sprintf("%s\r\n @Username: The Discord user for which you want to see events. This value is optional.", message) message = fmt.Sprintf("%s\r\n\r\nNote: Both the date and username values are optional. You can specify either, neither or both but then they must be in the order shown above. If you omit the date, you will be shown all upcoming events and if you omit the user you will be shown events for all users.", message) message = fmt.Sprintf("%s```", message) case "details": message = fmt.Sprintf("%s\r\nHere's how to get details for an event:", message) message = fmt.Sprintf("%s\r\n```%sdetails EventID\r\n", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n EventID: That weird looking 7 character identifier that uniquely identifies the event. These values are case sensitive so do take care to get it right. It's your key to participation, enjoyment and a deeper level of zen.", message) message = fmt.Sprintf("%s```", message) case "new": message = fmt.Sprintf("%s\r\nHere's how to create a new event (interactive mode):", message) message = fmt.Sprintf("%s\r\n```%snew Name\r\n", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n Name: A name for your event.", message) message = fmt.Sprintf("%s```", message) message = fmt.Sprintf("%s\r\n\r\nHere's an example for you:", message) message = fmt.Sprintf("%s\r\n```%snew Last Wish Training Raid```", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\nThis will create an event named \"Last Wish Training Raid\" and the bot will prompt you for the remaining values required.", message) case "newevent": message = fmt.Sprintf("%s\r\nHere's how to create a new event (explicit mode):", message) message = fmt.Sprintf("%s\r\n```%snewevent Date Time (TimeZone) Duration Name Description Size\r\n", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n Date: In the format DD/MM/YYYY", message) message = fmt.Sprintf("%s\r\n Time: In the format HH:MM (24 hour clock)", message) message = fmt.Sprintf("%s\r\n TimeZone: A time zone abbreviation between brackets", message) message = fmt.Sprintf("%s\r\n Duration: Number of hours the event will last", message) message = fmt.Sprintf("%s\r\n Name: A name for your event. Surround it in quotes if it's more than one word", message) message = fmt.Sprintf("%s\r\nDescription: A longer description of your event. You totally want to surround this one in quotes", message) message = fmt.Sprintf("%s\r\n TeamSize: Just a number denoting how many players can sign up", message) message = fmt.Sprintf("%s\r\n\r\nNote: Specifying a time zone is optional, as can be seen in the example below. If no time zone is specified, the role default time zone will be used.", message) message = fmt.Sprintf("%s```", message) message = fmt.Sprintf("%s\r\n\r\nHere's an example for you:", message) message = fmt.Sprintf("%s\r\n```%snewevent %s 20:00 2 \"Normal Leviathan\" \"Fresh start of Leviathan raid\" 6```", message, config.CommandPrefix, time.Now().Format("02/01/2006")) message = fmt.Sprintf("%s\r\nThis will create a 2 hour event to start at 8pm tonight and which will allow 6 people to sign up", message) case "edit": message = fmt.Sprintf("%s\r\nHere's how to edit an event:", message) message = fmt.Sprintf("%s\r\n```%sedit EventID\r\n", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n EventID: That weird looking 4 character identifier that uniquely identifies the event. Take care to get it right. It's your key to participation, enjoyment and a deeper level of zen.", message) message = fmt.Sprintf("%s\r\nThis will bring up an interactive message allowing you to edit the", message) message = fmt.Sprintf("%s\r\n\r\nNote: Only the creator of an event or users with the EventsBotAdmin role assigned can edit an event.", message) message = fmt.Sprintf("%s```", message) case "cancelevent": message = fmt.Sprintf("%s\r\nHere's how to cancel an event:", message) message = fmt.Sprintf("%s\r\n```%scancelevent EventID\r\n", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n EventID: That weird looking 7 character identifier that uniquely identifies the event. These values are case sensitive so do take care to get it right. It's your key to participation, enjoyment and a deeper level of zen.", message) message = fmt.Sprintf("%s\r\n\r\nNote: Only the creator of an event or users with the EventsBotAdmin role assigned can cancel an event.", message) message = fmt.Sprintf("%s```", message) case "signup": message = fmt.Sprintf("%s\r\nHere's how to sign up to an event:", message) message = fmt.Sprintf("%s\r\n```%ssignup EventID [@Username] [@Username] ...\r\n", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n EventID: That weird looking 7 character identifier that uniquely identifies the event. These values are case sensitive so do take care to get it right. It's your key to participation, enjoyment and a deeper level of zen.", message) message = fmt.Sprintf("%s\r\n @Username: List of Discord users whom you wish to sign up to the event. Only the event creator and users with the EventsBotAdmin role assigned are allowed to sign users other than themselves up to an event. This value is optional.", message) message = fmt.Sprintf("%s\r\n\r\nNote: You can still sign up to an event even if it is already full. You will then be registered as a reserve for the event and promoted if someone leaves the event.", message) message = fmt.Sprintf("%s```", message) case "leave": message = fmt.Sprintf("%s\r\nHere's how to leave an event:", message) message = fmt.Sprintf("%s\r\n```%sleave EventID [@Username]\r\n", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n EventID: That weird looking 7 character identifier that uniquely identifies the event. These values are case sensitive so do take care to get it right. It's your key to participation, enjoyment and a deeper level of zen.", message) message = fmt.Sprintf("%s\r\n @Username: The Discord user whom you wish to remove from the event. Only the event creator and users with the EventsBotAdmin role assigned are allowed to remove users other than themselves from an event. This value is optional.", message) message = fmt.Sprintf("%s```", message) case "impersonate": message = fmt.Sprintf("%s\r\nHere's how to impersonate a user:", message) message = fmt.Sprintf("%s\r\n```%simpersonate @Username\r\n", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n @Username: The Discord user you wish to impersonate", message) message = fmt.Sprintf("%s\r\n\r\nNote: This will have the effect of any further commands you issue, until you've issued %sunimpersonate, behaving as if they originated from the specified user. This is dangerous of course and so only users with the EventsBotAdmin role assigned are allowed to issue this command. You have been warned.", message, config.CommandPrefix) message = fmt.Sprintf("%s```", message) case "unimpersonate": message = fmt.Sprintf("%s\r\nHere's how to stop impersonating a user:", message) message = fmt.Sprintf("%s\r\n```%sunimpersonate\r\n", message, config.CommandPrefix) message = fmt.Sprintf("%sYes, it's that simple", message) message = fmt.Sprintf("%s```", message) case "wisdom": message = fmt.Sprintf("%s\r\nHere's how to obtain a nugget of wisdom:", message) message = fmt.Sprintf("%s\r\n```%swisdom\r\n", message, config.CommandPrefix) message = fmt.Sprintf("%sYes, it's that simple. Just ask and you shall receive.", message) message = fmt.Sprintf("%s```", message) case "addnaughtylist": message = fmt.Sprintf("%s\r\nHere's how to add a user to the naughty list:", message) message = fmt.Sprintf("%s\r\n```%saddnaughtylist @Username\r\n", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n @Username: The Discord user you wish to add to the naughty list", message) message = fmt.Sprintf("%s```", message) case "addserver": message = fmt.Sprintf("%s\r\nHere's how to add a server to EventsBot:", message) message = fmt.Sprintf("%s\r\n```%saddserver\r\n", message, config.CommandPrefix) message = fmt.Sprintf("%sYes, it's that simple", message) message = fmt.Sprintf("%s```", message) case "addtimezone": message = fmt.Sprintf("%s\r\nHere's how to add a time zone to EventsBot:", message) message = fmt.Sprintf("%s\r\n```%saddtimezone Abbrev Location [Emoji]\r\n", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n Abbrev: Abbreviation to be used for this time zone (ie. ET, CT, etc.)", message) message = fmt.Sprintf("%s\r\n Location: A location that represents the time zone (conforms to the tz database naming convention)", message) message = fmt.Sprintf("%s\r\n Emoji: A server emoji representing the time zone. This value is optional", message) message = fmt.Sprintf("%s\r\n\r\nNote: For more information on the tz database naming convention, see https://en.wikipedia.org/wiki/Tz_database", message) message = fmt.Sprintf("%s\r\n\r\nNote: EventsBot automatically adjusts times based on the specified location's Daylight Saving convention.", message) message = fmt.Sprintf("%s```", message) case "removetimezone": message = fmt.Sprintf("%s\r\nHere's how to remove a time zone from EventsBot:", message) message = fmt.Sprintf("%s\r\n Abbrev: Abbreviation for the time zone to be removed", message) message = fmt.Sprintf("%s```", message) case "listtimezones": message = fmt.Sprintf("%s\r\nHere's how to obtain a list of time zones:", message) message = fmt.Sprintf("%s\r\n```%slisttimezones\r\n", message, config.CommandPrefix) message = fmt.Sprintf("%sYes, it's that simple. Just ask and you shall receive.", message) message = fmt.Sprintf("%s```", message) case "roletimezone": message = fmt.Sprintf("%s\r\nHere's how to associate a time zone to a server role:", message) message = fmt.Sprintf("%s\r\n```%sroletimezone Role Abbrev\r\n", message, config.CommandPrefix) message = fmt.Sprintf("%s\r\n Role: Server role to which time zone should be linked", message) message = fmt.Sprintf("%s\r\n Abbrev: Abbreviation provided when '%saddtimezone' command was issued", message, config.CommandPrefix) message = fmt.Sprintf("%s```", message) default: message = fmt.Sprintf("%s\r\nWait! What? Are you having me on? I don't know anything about %s", message, command[1]) message = fmt.Sprintf("%s\r\nEventsBot is not amused :expressionless:", message) } s.ChannelMessageSend(m.ChannelID, message) } // ListEvents is used to list all upcoming events on a specified (optional) date, for a specified (optional) user // ~listevents // ~listevents @username // ~listevents date // ~listevents date @username func ListEvents(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { message := "" // Test for correct number of arguments if len(command) > 3 { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :thinking:") message = fmt.Sprintf("%s\r\nFor help with listing events, type the following:\r\n```%shelp listevents```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } var specdate time.Time listuser := "all" // Check first argument if len(command) > 1 { if isUser(command[1]) { listuser = m.Mentions[0].Username } else if isDate(command[1]) { specdate, _ = time.ParseInLocation("02/01/2006", command[1], defaultLocation) } else { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :confounded:") message = fmt.Sprintf("%s\r\nFor help with listing events, type the following:\r\n```%shelp listevents```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } } // Check second argument if len(command) > 2 { if isUser(command[2]) { listuser = m.Mentions[0].Username } else { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :anguished:") message = fmt.Sprintf("%s\r\nFor help with listing events, type the following:\r\n```%shelp listevents```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } } filter := bson.M{} filter["dateTime"] = bson.M{ "$gte": time.Now().Add(-1 * time.Hour), } if listuser != "all" { filter["participants.userName"] = listuser } if !specdate.IsZero() { filter["dateTime"] = bson.M{ "$gte": specdate, "$lt": specdate.AddDate(0, 0, 1), } } c := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("Events") var results []ClanEvent err := c.Find(filter).Sort("dateTime").All(&results) if err != nil { s.ChannelMessageSend(m.ChannelID, ":scream::scream::scream:Something very weird happened when trying to read the events. Sorry but EventsBot has no answers for you :cry:") return } // Get all time zones ctz := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("TimeZones") var tzs []TimeZone tzLookup := make(map[string]TimeZone) err = ctz.Find(bson.M{}).Sort("abbrev").All(&tzs) if err != nil { s.ChannelMessageSend(m.ChannelID, ":scream::scream::scream:Something very weird happened when trying to read the events. Sorry but EventsBot has no answers for you :cry:") return } for _, tz := range tzs { tzLookup[tz.Abbrev] = tz } var reply string if specdate.IsZero() { reply = fmt.Sprintf("%s - Upcoming events", g.Name) } else { reply = fmt.Sprintf("%s - Events on %s", g.Name, specdate.Format("Mon 02/01/2006")) } reply = fmt.Sprintf("%s for %s\r\n", reply, listuser) if len(results) == 0 { reply = fmt.Sprintf("%sZip. Nothing. Nada.\r\nWhat nonsense is this? EventsBot does not approve :frowning2:", reply) } else { for _, event := range results { tzInfo := "" eventLocation := defaultLocation if event.TimeZone != "" { tzInfo = fmt.Sprintf(" (%s)", event.TimeZone) eventLocation, _ = time.LoadLocation(tzLookup[event.TimeZone].Location) } freeSpace := event.TeamSize - len(event.Participants) curEvent := fmt.Sprintf("```%8v: %s%s - %s", event.EventID, event.DateTime.In(eventLocation).Format("Mon 02/01 15:04"), tzInfo, event.Name) // Add players to message if len(event.Participants) > 0 { curEvent = fmt.Sprintf("%s\r\n Players:", curEvent) for _, participant := range event.Participants { curEvent = fmt.Sprintf("%s %s,", curEvent, participant.DisplayName()) } // Remove trailing comma curEvent = fmt.Sprintf("%s", strings.TrimSuffix(curEvent, ",")) } // Add reserves to message if len(event.Reserves) > 0 { curEvent = fmt.Sprintf("%s\r\nReserves:", curEvent) for _, reserve := range event.Reserves { curEvent = fmt.Sprintf("%s %s,", curEvent, reserve.DisplayName()) } // Remove trailing comma curEvent = fmt.Sprintf("%s", strings.TrimSuffix(curEvent, ",")) } // Add status to message curEvent = fmt.Sprintf("%s\r\n Status: ", curEvent) switch freeSpace { case 0: curEvent = fmt.Sprintf("%sFULL", curEvent) case 1: curEvent = fmt.Sprintf("%s1 Space", curEvent) default: curEvent = fmt.Sprintf("%s%d Spaces", curEvent, freeSpace) } curEvent = fmt.Sprintf("%s```", curEvent) // Ensure that the message don't grow to over 2000 characters. If it does, post it as is and begin a new one for the rest of the events if len(reply)+len(curEvent) > 1980 { s.ChannelMessageSend(m.ChannelID, reply) reply = "" } reply = fmt.Sprintf("%s%s", reply, curEvent) } } s.ChannelMessageSend(m.ChannelID, reply) } // Details is used to display detailed information on a specified event // Usage: ~details EventID func Details(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { message := "" // Test for correct number of arguments if len(command) != 2 { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :thinking:") message = fmt.Sprintf("%s\r\nFor help with getting the details of an event, type the following:\r\n```%shelp details```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } // Find event in DB c := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("Events") var event ClanEvent err := c.Find(bson.M{"eventId": command[1]}).One(&event) if err != nil { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("EventsBot could find no such event. Are you sure you got that Event ID of %s right? Them's finicky numbers :grimacing:", command[1])) return } // Get time zone tzInfo := "" eventLocation := defaultLocation ctz := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("TimeZones") if event.TimeZone != "" { var tz TimeZone err = ctz.Find(bson.M{"abbrev": event.TimeZone}).One(&tz) if err != nil { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("EventsBot had trouble interpreting the time zone information of this event. Are we anywhere near a worm hole perhaps? :no_mouth:")) return } tzInfo = tz.Abbrev eventLocation, _ = time.LoadLocation(tz.Location) } message = fmt.Sprintf("**EventID:** %s", event.EventID) message = fmt.Sprintf("%s\r\n**Creator:** %s", message, event.Creator.Mention()) message = fmt.Sprintf("%s\r\n**Date:** %s", message, event.DateTime.In(eventLocation).Format("Mon 02/01/2006")) message = fmt.Sprintf("%s\r\n**Time:** %s for %d hours", message, event.DateTime.In(eventLocation).Format("15:04"), event.Duration) if event.TimeZone != "" { message = fmt.Sprintf("%s\r\n**Time Zone:** %s", message, tzInfo) } message = fmt.Sprintf("%s\r\n**Name:** %s", message, event.Name) message = fmt.Sprintf("%s\r\n**Description:** %s", message, event.Description) message = fmt.Sprintf("%s\r\n**Team Size:** %d of %d", message, len(event.Participants), event.TeamSize) if len(event.Participants) > 0 { message = fmt.Sprintf("%s\r\n**Participants:**", message) for _, participant := range event.Participants { message = fmt.Sprintf("%s\r\n - %s", message, participant.Mention()) } } if len(event.Reserves) > 0 { message = fmt.Sprintf("%s\r\n**Reserves:**", message) for _, reserve := range event.Reserves { message = fmt.Sprintf("%s\r\n - %s", message, reserve.Mention()) } } s.ChannelMessageSend(m.ChannelID, message) } // New is used to create a new event interactively func New(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { location, locAbbr := getLocation(g, s, m) newid := getEventID(time.Now()) gv, ok := guildVars[g.ID] if !ok { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("EventsBot had trouble obtaining the guild information :no_mouth:")) return } curUser := gv.impersonated year, month, day := time.Now().Date() if curUser.UserName == "" { curUser = ClanUser{ UserName: m.Author.Username, UserID: m.Author.ID, Nickname: getNickname(g, s, m.Author.ID), DateTime: time.Now(), } } eventName := strings.Join(command[1:], " ") // Test for name if len(eventName) > 50 { message := fmt.Sprintf("That's a very long name right there. You realise EventsBot has to memorise these things? Have a heart and keep it under 50 characters please. :triumph:") message = fmt.Sprintf("%s\r\nFor help with creating a new event, type the following:\r\n```%shelp new```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } event := ClanEvent{ EventID: newid, Creator: curUser, Name: eventName, DateTime: time.Date(year, month, day, 19, 0, 0, 0, location), TimeZone: locAbbr, Duration: 1, TeamSize: 6, } newEvent := DevelopingEvent{ TriggerMessage: m, State: stateNew, Event: event, } ShowDevelopingEvent(s, m, m.ChannelID, newEvent) } // NewEvent is used to create a new event with all values provided up front func NewEvent(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { message := "" var dateNdx, timeNdx, tzNdx, durationNdx, nameNdx, descrNdx, teamNdx = -1, -1, -1, -1, -1, -1, -1 // Test for correct number of arguments switch len(command) { //case 2: // nameNdx = 1 // descrNdx = 2 case 7: dateNdx = 1 timeNdx = 2 durationNdx = 3 nameNdx = 4 descrNdx = 5 teamNdx = 6 case 8: dateNdx = 1 timeNdx = 2 tzNdx = 3 durationNdx = 4 nameNdx = 5 descrNdx = 6 teamNdx = 7 default: message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :thinking:") message = fmt.Sprintf("%s\r\nFor help with creating a new event, type the following:\r\n```%shelp newevent```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } locAbbr := "" var usrLocation *time.Location // Test for time zone specification if tzNdx > 0 { if !strings.HasPrefix(command[tzNdx], "(") || !strings.HasSuffix(command[tzNdx], ")") { message = fmt.Sprintf("Is %s supposed to be a time zone? Please put time zones in brackets :point_up:", command[tzNdx]) message = fmt.Sprintf("%s\r\nFor help with creating a new event, type the following:\r\n```%shelp newevent```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } locAbbr = command[tzNdx] locAbbr = strings.TrimPrefix(locAbbr, "(") locAbbr = strings.TrimSuffix(locAbbr, ")") // Check if timezone is known ctz := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("TimeZones") var tz TimeZone err := ctz.Find(bson.M{"abbrev": locAbbr}).One(&tz) if err != nil || tz.Location == "" { message = fmt.Sprintf("EventsBot doesn't know that %s time zone. Can we stick to time zones on earth please?", command[tzNdx]) message = fmt.Sprintf("%s\r\nTo see a list of available time zones, type the following:\r\n```%slisttimezones```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } usrLocation, err = time.LoadLocation(tz.Location) if err != nil { message = fmt.Sprintf("EventsBot is having trouble working with this time zone. Are we anywhere near a worm hole perhaps? :no_mouth:") message = fmt.Sprintf("%s\r\nFor help with creating a new event, type the following:\r\n```%shelp newevent```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } } else { usrLocation, locAbbr = getLocation(g, s, m) } // Test for date and time arguments datetime := fmt.Sprintf("%s %s", command[dateNdx], command[timeNdx]) dt, err := time.ParseInLocation("02/01/2006 15:04", datetime, usrLocation) if err != nil { message = fmt.Sprintf("Whoah, not so sure about that date and time (%s). EventsBot is confused :thinking:", datetime) message = fmt.Sprintf("%s\r\nFor help with creating a new event, type the following:\r\n```%shelp newevent```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } if dt.Before(time.Now()) { message = fmt.Sprintf("Are you trying to create an event in the past? EventsBot has lost his flux capacitor :robot:") message = fmt.Sprintf("%s\r\nFor help with creating a new event, type the following:\r\n```%shelp newevent```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } // Test for duration duration, err := strconv.Atoi(command[durationNdx]) if err != nil { message = fmt.Sprintf("What kind of a duration is %s? EventsBot needs a vacation of %s weeks :beach:", command[durationNdx], command[durationNdx]) message = fmt.Sprintf("%s\r\nFor help with creating a new event, type the following:\r\n```%shelp newevent```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } // Test for name if len(command[nameNdx]) > 50 { message = fmt.Sprintf("That's a very long name right there. You realise EventsBot has to memorise these things? Have a heart and keep it under 50 characters please. :triumph:") message = fmt.Sprintf("%s\r\nFor help with creating a new event, type the following:\r\n```%shelp newevent```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } // Test for description if len(command[descrNdx]) > 150 { message = fmt.Sprintf("That's a very long description right there. You realise EventsBot has to memorise these things? Have a heart and keep it under 150 characters please. :triumph:") message = fmt.Sprintf("%s\r\nFor help with creating a new event, type the following:\r\n```%shelp newevent```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } // Test for size teamSize, err := strconv.Atoi(command[teamNdx]) if err != nil { message = fmt.Sprintf("How many players you say? %s? EventsBot wouldn't do that if he were you :speak_no_evil:", command[teamNdx]) message = fmt.Sprintf("%s\r\nFor help with creating a new event, type the following:\r\n```%shelp newevent```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } newid := getEventID(time.Now()) curUser := guildVars[g.ID].impersonated if curUser.UserName == "" { curUser = ClanUser{ UserName: m.Author.Username, UserID: m.Author.ID, Nickname: getNickname(g, s, m.Author.ID), DateTime: time.Now(), } } newEvent := ClanEvent{ EventID: newid, Creator: curUser, DateTime: dt, TimeZone: locAbbr, Duration: duration, Name: command[nameNdx], Description: command[descrNdx], TeamSize: teamSize, Full: false, } c := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("Events") err = c.Insert(newEvent) if err != nil { s.ChannelMessageSend(m.ChannelID, ":scream::scream::scream:Something very weird happened when trying to create this event. Sorry but EventsBot has no answers for you :cry:") return } message = fmt.Sprintf("Woohoo! A new event has been created by %s. EventsBot is most pleased :ok_hand:", newEvent.Creator.Mention()) message = fmt.Sprintf("%s\r\nEvent ID: **%s**", message, newEvent.EventID) message = fmt.Sprintf("%s\r\n\r\nTo sign up for this event, type the following:", message) message = fmt.Sprintf("%s\r\n```%ssignup %s```", message, config.CommandPrefix, newEvent.EventID) s.ChannelMessageSend(m.ChannelID, message) signupCmd := []string{"signup", newEvent.EventID} Signup(g, s, m, signupCmd) } // Edit is used to edit an existing event func Edit(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { message := "" // Test for correct number of arguments if len(command) < 2 { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :thinking:") message = fmt.Sprintf("%s\r\nFor help with editing an event, type the following:\r\n```%shelp edit```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } curUser := guildVars[g.ID].impersonated curUser.DateTime = time.Now() if curUser.UserName == "" { curUser = ClanUser{ UserName: m.Author.Username, UserID: m.Author.ID, Nickname: getNickname(g, s, m.Author.ID), DateTime: time.Now(), } } // Find event in DB c := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("Events") var event ClanEvent err := c.Find(bson.M{"eventId": strings.ToUpper(command[1])}).One(&event) if err != nil { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("EventsBot could find no such event. Are you sure you got that Event ID of %s right? Them's finicky numbers :grimacing:", command[1])) return } // Check that user has permissions allowed := false if event.Creator.UserName == curUser.UserName { allowed = true } else if hasRole(g, s, m, "EventsBotAdmin") { allowed = true } if !allowed { message = fmt.Sprintf("Yo yo yo. Back up a second dude. You don't have permissions to edit this event.\r\nEventsBot will not stand for this :point_up:") message = fmt.Sprintf("%s\r\nFor help with editing events, type the following:\r\n```%shelp edit```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } newMsg, _ := s.ChannelMessageSend(m.ChannelID, "EDIT EVENT") EditEvent(s, m, m.ChannelID, newMsg.ID, strings.ToUpper(command[1])) } // CancelEvent is used to delete a specified event // ~cancelevent EventID func CancelEvent(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { message := "" // Test for correct number of arguments if len(command) != 2 { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :thinking:") message = fmt.Sprintf("%s\r\nFor help with cancelling an event, type the following:\r\n```%shelp cancelevent```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } curUser := guildVars[g.ID].impersonated curUser.DateTime = time.Now() if curUser.UserName == "" { curUser = ClanUser{ UserName: m.Author.Username, UserID: m.Author.ID, Nickname: getNickname(g, s, m.Author.ID), DateTime: time.Now(), } } // Find event in DB c := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("Events") var event ClanEvent err := c.Find(bson.M{"eventId": command[1]}).One(&event) if err != nil { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("EventsBot could find no such event. Are you sure you got that Event ID of %s right? Them's finicky numbers :grimacing:", command[1])) return } // Check that user has permissions allowed := false if event.Creator.UserName == curUser.UserName { allowed = true } else if hasRole(g, s, m, "EventsBotAdmin") { allowed = true } if !allowed { message = fmt.Sprintf("Yo yo yo. Back up a second dude. You don't have permissions to cancel this event.\r\nEventsBot will not stand for this :point_up:") message = fmt.Sprintf("%s\r\nFor help with cancelling events, type the following:\r\n```%shelp cancelevent```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } // Delete record err = c.Remove(bson.M{"eventId": command[1]}) if err != nil { s.ChannelMessageSend(m.ChannelID, ":scream::scream::scream:Something very weird happened when trying to create this event. Sorry but EventsBot has no answers for you :cry:") return } message = fmt.Sprintf("Tragedy! %s's event, %s, has been cancelled.", event.Creator.Mention(), event.Name) message = fmt.Sprintf("%s\r\n\r\n\"We don't have commit. Repeat. We are decommissioning the committal of the launch. It is now a negatory launch phase. We are in a no fly, no go phase. That is a November Gorgon phase, of non-flying. And we're gonna say 'goodnight, thank you, good work, over and out'\".", message) message = fmt.Sprintf("%s\r\n\r\nEventsBot will cry himself to sleep tonight :sob:", message) s.ChannelMessageSend(m.ChannelID, message) } // Signup is used to sign the author or a specified user up to an event // ~signup EventID // ~signup EventID @Username func Signup(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { message := "" curUser := guildVars[g.ID].impersonated curUser.DateTime = time.Now() if curUser.UserName == "" { curUser = ClanUser{ UserName: m.Author.Username, UserID: m.Author.ID, Nickname: getNickname(g, s, m.Author.ID), DateTime: time.Now(), } } signupUsers := []ClanUser{} if len(command) < 2 { message = fmt.Sprintf("Come on! Surely you're not expecting me to guess which event you're trying to sign up to :confused:") message = fmt.Sprintf("%s\r\nFor help with signing up to events, type the following:\r\n```%shelp signup```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } // Check first argument if len(command) > 2 { for i := 2; i < len(command); i++ { if isUser(command[i]) { // Find user in list of mentions for _, mentionedUser := range m.Mentions { if strings.Replace(command[i], "!", "", 1) == mentionedUser.Mention() { signupUser := ClanUser{ UserName: mentionedUser.Username, UserID: mentionedUser.ID, Nickname: getNickname(g, s, mentionedUser.ID), DateTime: time.Now(), } signupUsers = append(signupUsers, signupUser) } } } else { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :confounded:") message = fmt.Sprintf("%s\r\n%s doesn't look like anyone I recognise.", message, command[i]) message = fmt.Sprintf("%s\r\nFor help with signing up to events, type the following:\r\n```%shelp signup```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } } } else { signupUsers = append(signupUsers, curUser) } // Find event in DB c := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("Events") var event ClanEvent err := c.Find(bson.M{"eventId": command[1]}).One(&event) if err != nil { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("EventsBot could find no such event. Are you sure you got that Event ID of %s right? Them's finicky numbers :grimacing:", command[1])) return } // If different user is specified, check that current user has permissions if signupUsers[0] != curUser || len(signupUsers) > 1 { allowed := false if event.Creator.UserName == curUser.UserName { allowed = true } else if hasRole(g, s, m, "EventsBotAdmin") { allowed = true } if !allowed { message = fmt.Sprintf("Yo yo yo. Back up a second dude. You don't have permissions to sign other users up to events.\r\nEventsBot will not stand for this :point_up:") message = fmt.Sprintf("%s\r\nFor help with signing up to events, type the following:\r\n```%shelp signup```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } } // Check if any of the specified users are already signed up for this event for i1, signupUser := range signupUsers { for _, participant := range event.Participants { if participant.UserName == signupUser.UserName { if signupUser.UserName == curUser.UserName { s.ChannelMessageSend(m.ChannelID, "You are already signed up to this event.\r\nEventsBot hasn't got time for your shenanigans :rolling_eyes:") } else { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("%s is already signed up to this event.\r\nEventsBot hasn't got time for your shenanigans :rolling_eyes:", signupUser.DisplayName())) } return } } for _, reserve := range event.Reserves { if reserve.UserName == signupUser.UserName { if signupUser.UserName == curUser.UserName { s.ChannelMessageSend(m.ChannelID, "You are already a reserve for this event.\r\nCan you just relax please? EventsBot will let you know if a space opens up. Don't call us, we'll call you. :rolling_eyes:") } else { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("%s is already a reserve for this event.\r\nCan you just relax please? EventsBot will let you know if a space opens up. Don't call us, we'll call you. :rolling_eyes:", signupUser.DisplayName())) } return } } for i2 := 20; i2 < i1; i2++ { if signupUsers[i1].UserName == signupUsers[i2].UserName { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("How many times do you want to sign %s up for this event.\r\nNo one is _that_ important. :confused:", signupUser.DisplayName())) return } } } // Sign up all specified users for _, signupUser := range signupUsers { // Check if event is full if len(event.Participants) >= event.TeamSize { s.ChannelMessageSend(m.ChannelID, "Oh noes! This event is already full :cry:\r\nBut don't worry, EventsBot will put you on the reserves list and notify you if someone leaves.") for _, reserve := range event.Reserves { if reserve.UserName == curUser.UserName { continue } } event.Reserves = append(event.Reserves, signupUser) s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("%s is now signed up as a reserve for %s's event, %s.\r\nEventsBot approves :thumbsup:", signupUser.Mention(), event.Creator.Mention(), event.Name)) } else { event.Participants = append(event.Participants, signupUser) event.Full = len(event.Participants) >= event.TeamSize message := fmt.Sprintf("%s is now signed up for %s's event, %s.\r\n", signupUser.Mention(), event.Creator.Mention(), event.Name) if event.Full { message = fmt.Sprintf("%sThis event is now full. It's all systems go!\r\n", message) message = fmt.Sprintf("%sEventsBot definitely approves :thumbsup::thumbsup:", message) } else { if event.TeamSize-len(event.Participants) == 1 { message = fmt.Sprintf("%sThere is one space left\r\n", message) } else { message = fmt.Sprintf("%sThere are %d spaces left\r\n", message, event.TeamSize-len(event.Participants)) } message = fmt.Sprintf("%sEventsBot approves :thumbsup:", message) } s.ChannelMessageSend(m.ChannelID, message) } } err = c.Update(bson.M{"eventId": command[1]}, event) if err != nil { s.ChannelMessageSend(m.ChannelID, ":scream::scream::scream:Something very weird happened when trying to update the event. Sorry but EventsBot has no answers for you :cry:") return } } // Leave is used to remove the author or specified user from an event // ~leave EventID // ~leave EventID @Username func Leave(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { message := "" // Test for correct number of arguments if len(command) > 3 || len(command) < 2 { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :thinking:") message = fmt.Sprintf("%s\r\nFor help with leaving an event, type the following:\r\n```%shelp leave```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } curUser := guildVars[g.ID].impersonated curUser.DateTime = time.Now() if curUser.UserName == "" { curUser = ClanUser{ UserName: m.Author.Username, UserID: m.Author.ID, Nickname: getNickname(g, s, m.Author.ID), DateTime: time.Now(), } } removeUser := curUser // Check first argument if len(command) > 2 { if isUser(command[2]) { removeUser = ClanUser{ UserName: m.Mentions[0].Username, UserID: m.Mentions[0].ID, Nickname: getNickname(g, s, m.Mentions[0].ID), DateTime: time.Now(), } } else { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :confounded:") message = fmt.Sprintf("%s\r\nFor help with signing up to events, type the following:\r\n```%shelp signup```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } } // Find event in DB c := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("Events") var event ClanEvent err := c.Find(bson.M{"eventId": command[1]}).One(&event) if err != nil { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("EventsBot could find no such event. Are you sure you got that Event ID of %s right? Them's finicky numbers :grimacing:", command[1])) return } // If different user is specified, check that current user has permissions if removeUser != curUser { allowed := false if event.Creator.UserName == curUser.UserName { allowed = true } else if hasRole(g, s, m, "EventsBotAdmin") { allowed = true } if !allowed { message = fmt.Sprintf("Yo yo yo. Back up a second dude. You don't have permissions to remove other users from events.\r\nEventsBot will not stand for this :point_up:") message = fmt.Sprintf("%s\r\nFor help with leaving events, type the following:\r\n```%shelp signup```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } } // Check if user is in fact signed up for this event participantIndex := -1 for i, participant := range event.Participants { if participant.UserName == removeUser.UserName { participantIndex = i } } if participantIndex != -1 { // Remove participant from event event.Participants = append(event.Participants[:participantIndex], event.Participants[participantIndex+1:]...) message = fmt.Sprintf("Well okay then, %s has been removed from %s's event, %s\r\nEventsBot is sad to see you go :disappointed_relieved:", removeUser.Mention(), event.Creator.Mention(), event.Name) // Move first reserve into participants if len(event.Reserves) > 0 { message = fmt.Sprintf("%s\r\nBut hey! %s is on reserve so we're golden.\r\nEventsBot is relieved :relieved:", message, event.Reserves[0].Mention()) reserve := ClanUser{ UserName: event.Reserves[0].UserName, UserID: event.Reserves[0].UserID, Nickname: event.Reserves[0].Nickname, DateTime: event.Reserves[0].DateTime, } event.Participants = append(event.Participants, reserve) event.Reserves = append(event.Reserves[:0], event.Reserves[0+1:]...) err = c.Update(bson.M{"eventId": command[1]}, event) if err != nil { s.ChannelMessageSend(m.ChannelID, ":scream::scream::scream:Something very weird happened when trying to update the event. Sorry but EventsBot has no answers for you :cry:") return } } else { if event.TeamSize-len(event.Participants) == 1 { message = fmt.Sprintf("%s\r\nThere is now one space left\r\n", message) } else { message = fmt.Sprintf("%s\r\nThere are now %d spaces left\r\n", message, event.TeamSize-len(event.Participants)) } } } // Check if user is a reserve for this event reserveIndex := -1 for i, reserve := range event.Reserves { if reserve.UserName == removeUser.UserName { reserveIndex = i } } if reserveIndex != -1 { // Remove reserve from event event.Reserves = append(event.Reserves[:reserveIndex], event.Reserves[reserveIndex+1:]...) message = fmt.Sprintf("Well okay then, %s has been removed as a reserve from %s's event, %s\r\nEventsBot is sad to see you go :disappointed_relieved:", removeUser.Mention(), event.Creator.Mention(), event.Name) } if participantIndex == -1 && reserveIndex == -1 { if curUser.UserName == removeUser.UserName { s.ChannelMessageSend(m.ChannelID, "You are not signed up to this event.\r\nEventsBot does not find your jokes particularly funny :rolling_eyes:") } else { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("%s is not signed up to this event.\r\nEventsBot does not find your jokes particularly funny :rolling_eyes:", removeUser.DisplayName())) } return } err = c.Update(bson.M{"eventId": command[1]}, event) if err != nil { s.ChannelMessageSend(m.ChannelID, ":scream::scream::scream:Something very weird happened when trying to update the event. Sorry but EventsBot has no answers for you :cry:") return } s.ChannelMessageSend(m.ChannelID, message) } // Impersonate is used to assume the identity of another Discord user and issue commands on that user's behalf // TODO: There is a major problem with this functionality that needs to be fixed. The impersonated user is stored for each guild. This means that, on one Discord server, if Person B impersonates Person A, then everything Person C does will also be done in Person A impersonation mode. The impersonated user needs to stored per user, not per guild. func Impersonate(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { message := "" if !hasRole(g, s, m, "EventsBotAdmin") { message = fmt.Sprintf("Yo yo yo. Back up a second dude. You don't have permissions to impersonate other users.\r\nEventsBot will not stand for this :point_up:") message = fmt.Sprintf("%s\r\nFor help with impersonating users, type the following:\r\n```%shelp impersonate```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } // Test for correct number of arguments if len(command) > 2 { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :thinking:") message = fmt.Sprintf("%s\r\nFor help with impersonating users, type the following:\r\n```%shelp impersonate```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } // Check first argument if !isUser(command[1]) { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :confounded:") message = fmt.Sprintf("%s\r\nFor help with impersonating users, type the following:\r\n```%shelp impersonate```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } user := ClanUser{ UserName: m.Mentions[0].Username, UserID: m.Mentions[0].ID, Nickname: getNickname(g, s, m.Mentions[0].ID), DateTime: time.Now(), } guildVars[g.ID].impersonated = user s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("%s is now impersonated\r\nEventsBot is regarding this with some sense of apprehension :bust_in_silhouette:", guildVars[g.ID].impersonated.DisplayName())) } // Unimpersonate is used to return to the original user's identity after impersonating another user func Unimpersonate(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { guildVars[g.ID].impersonated = ClanUser{} s.ChannelMessageSend(m.ChannelID, "No more of this impersonation business!") } // Echo simply repeats the user's message func Echo(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf(strings.Join(command[1:], " "))) } // Test is used to simply check that the bot is online and responding func Test(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("EventsBot (Build Number %s) has been running since %s with DebugLevel=%d", buildNumber, liveTime.Format("2006/01/02 15:04:05"), config.DebugLevel)) } // Wisdom is used to deliver a nugget of wisdom func Wisdom(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { message := getInsult(m.Author.Mention()) sendMessage(m.ChannelID, message) } // AddNaughty is used to add a user to the naughty list func AddNaughty(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { message := "" // Test for correct number of arguments if len(command) != 2 { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :thinking:") message = fmt.Sprintf("%s\r\nFor help with adding a user to the naughty list, type the following:\r\n```%shelp addnaughtylist```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } var addUser ClanUser // Check first argument if isUser(command[1]) { addUser = ClanUser{ UserName: m.Mentions[0].Username, UserID: m.Mentions[0].ID, Nickname: getNickname(g, s, m.Mentions[0].ID), DateTime: time.Now(), } } else { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :confounded:") message = fmt.Sprintf("%s\r\nFor help with adding a user to the naughty list, type the following:\r\n```%shelp addnaughtylist```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } // Check that current user has permissions if !hasRole(g, s, m, "EventsBotAdmin") { message = fmt.Sprintf("Yo yo yo. Back up a second dude. You don't have permissions to add users to the naughty list.\r\nIf you're not careful then EventsBot might just add you to the naughty list :point_up:") message = fmt.Sprintf("%s\r\nFor help with adding a user to the naughty list, type the following:\r\n```%shelp addnaughtylist```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } c := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("NaughtyList") filter := bson.M{"userName": addUser.UserName} _, err := c.Upsert(filter, addUser) if err != nil { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf(":scream::scream::scream:Something very weird happened when trying to add %s to the naughty list. Sorry but EventsBot has no answers for you :cry:", addUser.DisplayName())) return } message = fmt.Sprintf("%s has been added to the naughty list :angry:", addUser.DisplayName()) s.ChannelMessageSend(m.ChannelID, message) } // RemoveNaughty is used to remove a user from the naughty list func RemoveNaughty(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { message := "" // Test for correct number of arguments if len(command) != 2 { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :thinking:") message = fmt.Sprintf("%s\r\nFor help with removing a user from the naughty list, type the following:\r\n```%shelp removenaughtylist```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } var removeUser ClanUser // Check first argument if isUser(command[1]) { removeUser = ClanUser{ UserName: m.Mentions[0].Username, UserID: m.Mentions[0].ID, Nickname: getNickname(g, s, m.Mentions[0].ID), DateTime: time.Now(), } } else { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :confounded:") message = fmt.Sprintf("%s\r\nFor help with removing a user from the naughty list, type the following:\r\n```%shelp removenaughtylist```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } // Check that current user has permissions if !hasRole(g, s, m, "EventsBotAdmin") { message = fmt.Sprintf("Yo yo yo. Back up a second dude. You don't have permissions to remove users from the naughty list.\r\nIf you're not careful then EventsBot might just add you to the naughty list :point_up:") message = fmt.Sprintf("%s\r\nFor help with removing a user from the naughty list, type the following:\r\n```%shelp removenaughtylist```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } c := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("NaughtyList") filter := bson.M{"userName": removeUser.UserName} info, err := c.RemoveAll(filter) if err != nil { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf(":scream::scream::scream:Something very weird happened when trying to remove %s from the naughty list. Sorry but EventsBot has no answers for you :cry:", removeUser.DisplayName())) return } if info.Removed > 0 { message = fmt.Sprintf("%s has been removed from the naughty list. Are we cool now? :kissing_heart:", removeUser.DisplayName()) } else { message = fmt.Sprintf("What are you talking about? %s is not on the naughty list. :shrug:", removeUser.DisplayName()) } s.ChannelMessageSend(m.ChannelID, message) } // AddServer is used to register a Discord server for ClanEvents to be able to run service functions for that server func AddServer(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { message := "" // Test for correct number of arguments if len(command) != 1 { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :thinking:") message = fmt.Sprintf("%s\r\nFor help with adding a server to EventsBot, type the following:\r\n```%shelp addserver```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } // Check that current user has permissions if !hasRole(g, s, m, "EventsBotAdmin") { message = fmt.Sprintf("Yo yo yo. Back up a second dude. You don't have permissions to register servers.\r\nEventsBot will not stand for this :point_up:") message = fmt.Sprintf("%s\r\nFor help with adding a server to EventsBot, type the following:\r\n```%shelp addserver```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } c1 := mongoSession.DB(fmt.Sprintf("ClanEvents")).C("Guilds") var guild Guild guild.ID = g.ID guild.Name = g.Name filter := bson.M{"discordId": guild.ID} _, err := c1.Upsert(filter, guild) if err != nil { s.ChannelMessageSend(m.ChannelID, ":scream::scream::scream:Something very weird happened when trying to register this server. Sorry but EventsBot has no answers for you :cry:") return } c2 := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("Config") var config ClanConfig config.DefaultChannel = m.ChannelID filter = bson.M{} _, err = c2.Upsert(filter, config) if err != nil { s.ChannelMessageSend(m.ChannelID, ":scream::scream::scream:Something very weird happened when trying to register this server. Sorry but EventsBot has no answers for you :cry:") return } c3 := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("Events") index := mgo.Index{ Key: []string{"eventId"}, Unique: true, DropDups: true, Background: true, Sparse: true, } err = c3.EnsureIndex(index) if err != nil { s.ChannelMessageSend(m.ChannelID, ":scream::scream::scream:Something very weird happened when trying to register this server. Sorry but EventsBot has no answers for you :cry:") return } message = fmt.Sprintf("%s has been registered with EventsBot", guild.Name) s.ChannelMessageSend(m.ChannelID, message) } // AddTimeZone is used to add capabilities for a time zone to ClanEvents func AddTimeZone(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { var newTZ TimeZone message := "" // Test for correct number of arguments if len(command) < 3 || len(command) > 4 { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :thinking:") message = fmt.Sprintf("%s\r\nFor help with adding a time zone, type the following:\r\n```%shelp addtimezone```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } if len(command) > 3 { if len(command[3]) != 8 || utf8.RuneCountInString(command[3]) != 2 { message = fmt.Sprintf("Huh? What kind of an emoji is that. Pick somethign better please :expressionless:") message = fmt.Sprintf("%s\r\nFor help with adding a time zone, type the following:\r\n```%shelp addtimezone```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) } } // Check that current user has permissions if !hasRole(g, s, m, "EventsBotAdmin") { message = fmt.Sprintf("Yo yo yo. Back up a second dude. You don't have permissions to add time zones.\r\nYou don't look like you're from Gallifrey either :point_up:") message = fmt.Sprintf("%s\r\nFor help with adding a time zone, type the following:\r\n```%shelp addtimezone```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } gv, ok := guildVars[g.ID] if !ok { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("EventsBot had trouble obtaining the guild information :no_mouth:")) return } newTZ, found := gv.tzByAbbr[command[1]] if found { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("The abbreviation, %s, is already registered for %s", newTZ.Abbrev, newTZ.Location)) return } newTZ.Abbrev = command[1] newTZ.Location = command[2] if len(command) > 3 { newTZ.Emoji = fmt.Sprintf("%X", []byte(command[3])) } newLoc, err := time.LoadLocation(newTZ.Location) if err != nil { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("EventsBot is not so sure about that location. %s. Maybe you should double check that.", newTZ.Location)) return } _, err = time.ParseInLocation("02/01/2006 15:04", "24/10/1975 12:00", newLoc) if err != nil { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("EventsBot is not so sure about that location. %s. Maybe you should double check that.", newTZ.Location)) return } c := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("TimeZones") err = c.Insert(newTZ) if err != nil { s.ChannelMessageSend(m.ChannelID, ":scream::scream::scream:Something very weird happened when trying to add the time zone. Sorry but EventsBot has no answers for you :cry:") return } gv.timezones = append(gv.timezones, newTZ) tzBA, tzBE := constructTZMaps(gv.timezones) gv.tzByAbbr = tzBA gv.tzByEmoji = tzBE message = fmt.Sprintf("When you hear the signal it will be exactly %s, well in your newly registered timezone, %s, that is. Congrats... I guess.", time.Now().In(newLoc).Format("15:04"), newTZ.Abbrev) s.ChannelMessageSend(m.ChannelID, message) } // RemoveTimeZone is used to remove a time zone from ClanEvents func RemoveTimeZone(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { message := "" // Test for correct number of arguments if len(command) != 2 { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :thinking:") message = fmt.Sprintf("%s\r\nFor help with removing a time zone, type the following:\r\n```%shelp removetimezone```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } // Check that current user has permissions if !hasRole(g, s, m, "EventsBotAdmin") { message = fmt.Sprintf("Yo yo yo. Back up a second dude. You don't have permissions to remove time zones.\r\nYou don't look like you're from Gallifrey either :point_up:") message = fmt.Sprintf("%s\r\nFor help with adding a time zone, type the following:\r\n```%shelp addtimezone```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } // Get guild variables gv, ok := guildVars[g.ID] if !ok { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf("EventsBot had trouble obtaining the guild information :no_mouth:")) return } // Remove time zone from TimeZones collection ctz := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("TimeZones") filter := bson.M{"abbrev": command[1]} info, err := ctz.RemoveAll(filter) if err != nil { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf(":scream::scream::scream:Something very weird happened when trying to remove %s from the time zones. Sorry but EventsBot has no answers for you :cry:", command[1])) return } // Remove all role time zones referencing this time zone from RoleTimeZones collection crtz := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("RoleTimeZones") _, err = crtz.RemoveAll(filter) if err != nil { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf(":scream::scream::scream:Something very weird happened when trying to remove %s from the time zones. Sorry but EventsBot has no answers for you :cry:", command[1])) return } // Remove time zone from guild variables timezones slice index := -1 for i, tz := range gv.timezones { if tz.Abbrev == command[1] { index = i } } if index > -1 { gv.timezones = append(gv.timezones[:index], gv.timezones[index+1:]...) } tzBA, tzBE := constructTZMaps(gv.timezones) gv.tzByAbbr = tzBA gv.tzByEmoji = tzBE if info.Removed > 0 { message = fmt.Sprintf("%s has been removed from the list of time zones. Your world just got smaller.", command[1]) } else { message = fmt.Sprintf("Are you trying to glitch the universe? %s is not in the list of time zones. :shrug:", command[1]) } s.ChannelMessageSend(m.ChannelID, message) } // ListTimeZones is used to display a list of registered time zones func ListTimeZones(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { message := "" // Test for correct number of arguments if len(command) != 1 { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :thinking:") message = fmt.Sprintf("%s\r\nFor help with listing time zones, type the following:\r\n```%shelp listtimezones```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } ctz := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("TimeZones") var timezones []TimeZone err := ctz.Find(bson.M{}).Sort("abbrev").All(&timezones) if err != nil { s.ChannelMessageSend(m.ChannelID, ":scream::scream::scream:Something very weird happened when trying to list the time zones. Sorry but EventsBot has no answers for you :cry:") return } crtz := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("RoleTimeZones") var roletzs []ServerRoleTimeZone err = crtz.Find(bson.M{}).Sort("serverRole").All(&roletzs) if err != nil { s.ChannelMessageSend(m.ChannelID, ":scream::scream::scream:Something very weird happened when trying to list the time zones. Sorry but EventsBot has no answers for you :cry:") return } message = fmt.Sprintf("%sList of registered time zones for %s:\r\n", message, g.Name) if len(timezones) == 0 { message = fmt.Sprintf("%sUhm. Nope. There ain't none. :shrug:", message) } else { for _, tz := range timezones { tzLoc, err := time.LoadLocation(tz.Location) if err != nil { s.ChannelMessageSend(m.ChannelID, ":scream::scream::scream:Something very weird happened when trying to list the time zones. Sorry but EventsBot has no answers for you :cry:") return } message = fmt.Sprintf("%s**%s**: %s, current time %s", message, tz.Abbrev, tz.Location, time.Now().In(tzLoc).Format("15:04")) if tz.Emoji != "" { bytearray, err := hex.DecodeString(tz.Emoji) if err != nil { s.ChannelMessageSend(m.ChannelID, ":scream::scream::scream:Something very weird happened when trying to interpret the time zone icon. Sorry but EventsBot has no answers for you :cry:") return } emojistr := string(bytearray[:len(bytearray)]) message = fmt.Sprintf("%s %s\r\n", message, emojistr) } else { message = fmt.Sprintf("%s\r\n", message) } } } if len(roletzs) > 0 { message = fmt.Sprintf("%s\r\nServer roles with time zones:\r\n", message) for _, roletz := range roletzs { message = fmt.Sprintf("%s%s: %s\r\n", message, roletz.RoleName, roletz.Abbrev) } } s.ChannelMessageSend(m.ChannelID, message) } // RoleTimeZone is used to associate a time zone with a server role func RoleTimeZone(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, command []string) { message := "" // Test for correct number of arguments if len(command) != 3 { message = fmt.Sprintf("Whoah, not so sure about those arguments. EventsBot is confused :thinking:") message = fmt.Sprintf("%s\r\nFor help with linking a time zone to a server role, type the following:\r\n```%shelp roletimezone```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } // Check that current user has permissions if !hasRole(g, s, m, "EventsBotAdmin") { message = fmt.Sprintf("Yo yo yo. Back up a second dude. You don't have permissions to link server roles to time zones.\r\nIf you're not careful then EventsBot might just add you to the naughty list :point_up:") message = fmt.Sprintf("%s\r\nFor help with linking server roles to time zones, type the following:\r\n```%shelp roletimezone```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } // Check that the server role exists roleName := command[1] found := false for _, gRole := range g.Roles { if fmt.Sprintf("<@&%s>", gRole.ID) == roleName { found = true } } if !found { message = fmt.Sprintf("Say what? %s? EventsBot doesn't know any such server role.", roleName) message = fmt.Sprintf("%s\r\nFor help with linking server roles to time zones, type the following:\r\n```%shelp roletimezone```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } // Check that the time zone exists tz := command[2] ctz := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("TimeZones") count, err := ctz.Find(bson.M{"abbrev": tz}).Count() if err != nil || count == 0 { message = fmt.Sprintf("Say what? %s? EventsBot doesn't know any such time zone.", tz) message = fmt.Sprintf("%s\r\nFor help with linking server roles to time zones, type the following:\r\n```%shelp roletimezone```", message, config.CommandPrefix) s.ChannelMessageSend(m.ChannelID, message) return } // Link time zone to role var srtz ServerRoleTimeZone srtz.RoleName = roleName srtz.Abbrev = tz crtz := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("RoleTimeZones") filter := bson.M{"serverRole": roleName} _, err = crtz.Upsert(filter, srtz) if err != nil { s.ChannelMessageSend(m.ChannelID, fmt.Sprintf(":scream::scream::scream:Something very weird happened when trying to link the %s timezone to the %s server role. Sorry but EventsBot has no answers for you :cry:", srtz.Abbrev, srtz.RoleName)) return } message = fmt.Sprintf("Booyaa! EventsBot linked the %s time zone to the %s server role", srtz.Abbrev, srtz.RoleName) s.ChannelMessageSend(m.ChannelID, message) } func isUser(arg string) bool { return strings.HasPrefix(arg, "<@") } func isDate(arg string) bool { _, err := time.Parse("02/01/2006", arg) return err == nil } func getGuild(s *discordgo.Session, m *discordgo.MessageCreate) *discordgo.Guild { // Attempt to get the channel from the state. // If there is an error, fall back to the restapi channel, err := s.State.Channel(m.ChannelID) if err != nil { channel, err = s.Channel(m.ChannelID) if err != nil { return nil } } // Attempt to get the g from the state, // If there is an error, fall back to the restapi. g, err := s.State.Guild(channel.GuildID) if err != nil { g, err = s.Guild(channel.GuildID) if err != nil { return nil } } return g } func hasRole(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate, role string) bool { roleID := "" for _, gRole := range g.Roles { if gRole.Name == role { roleID = gRole.ID break } } found := false for _, role := range getRoles(g, m) { if role.ID == roleID { found = true break } } return found } func getRoles(g *discordgo.Guild, m *discordgo.MessageCreate) []*discordgo.Role { var retval []*discordgo.Role roles := make(map[string]*discordgo.Role) for _, gRole := range g.Roles { roles[gRole.ID] = gRole } for _, member := range g.Members { if member.User.Username == m.Author.Username { for _, memberRole := range member.Roles { retval = append(retval, roles[memberRole]) } break } } return retval } func getNickname(g *discordgo.Guild, s *discordgo.Session, userID string) string { guildMember, err := s.GuildMember(g.ID, userID) if err != nil { return "" } return guildMember.Nick } func getLocation(g *discordgo.Guild, s *discordgo.Session, m *discordgo.MessageCreate) (*time.Location, string) { retloc := defaultLocation retabbr := "" // Start by getting all time zones and server role time zones ctz := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("TimeZones") crtz := mongoSession.DB(fmt.Sprintf("ClanEvents%s", g.ID)).C("RoleTimeZones") var tzs []TimeZone var roletzs []ServerRoleTimeZone tzLookup := make(map[string]TimeZone) err := ctz.Find(bson.M{}).Sort("abbrev").All(&tzs) if err != nil { return retloc, retabbr } err = crtz.Find(bson.M{}).Sort("serverRole").All(&roletzs) if err != nil { return retloc, retabbr } for _, tz := range tzs { tzLookup[tz.Abbrev] = tz } // Get all roles for specified user memberRoles := getRoles(g, m) // Find highest order role for member which has a time zone linked to it highest := 0 for _, roletz := range roletzs { for _, memberRole := range memberRoles { if fmt.Sprintf("<@&%s>", memberRole.ID) == roletz.RoleName { if memberRole.Position > highest { highest = memberRole.Position retloc, _ = time.LoadLocation(tzLookup[roletz.Abbrev].Location) retabbr = roletz.Abbrev } } } } return retloc, retabbr }
commands.go
0.515376
0.432183
commands.go
starcoder
package graphic import ( "github.com/g3n/engine/core" "github.com/g3n/engine/geometry" "github.com/g3n/engine/gls" "github.com/g3n/engine/material" "github.com/g3n/engine/math32" ) // Mesh is a Graphic with uniforms for the model, view, projection, and normal matrices. type Mesh struct { Graphic // Embedded graphic uniMVm gls.Uniform // Model view matrix uniform location cache uniMVPm gls.Uniform // Model view projection matrix uniform cache uniNm gls.Uniform // Normal matrix uniform cache } // NewMesh creates and returns a pointer to a mesh with the specified geometry and material. // If the mesh has multi materials, the material specified here must be nil and the // individual materials must be add using "AddMaterial" or AddGroupMaterial". func NewMesh(igeom geometry.IGeometry, imat material.IMaterial) *Mesh { m := new(Mesh) m.Init(igeom, imat) return m } // Init initializes the Mesh and its uniforms. func (m *Mesh) Init(igeom geometry.IGeometry, imat material.IMaterial) { m.Graphic.Init(igeom, gls.TRIANGLES) // Initialize uniforms m.uniMVm.Init("ModelViewMatrix") m.uniMVPm.Init("MVP") m.uniNm.Init("NormalMatrix") // Adds single material if not nil if imat != nil { m.AddMaterial(imat, 0, 0) } } // AddMaterial adds a material for the specified subset of vertices. func (m *Mesh) AddMaterial(imat material.IMaterial, start, count int) { m.Graphic.AddMaterial(m, imat, start, count) } // AddGroupMaterial adds a material for the specified geometry group. func (m *Mesh) AddGroupMaterial(imat material.IMaterial, gindex int) { m.Graphic.AddGroupMaterial(m, imat, gindex) } // RenderSetup is called by the engine before drawing the mesh geometry // It is responsible to updating the current shader uniforms with // the model matrices. func (m *Mesh) RenderSetup(gs *gls.GLS, rinfo *core.RenderInfo) { // Transfer uniform for model view matrix mvm := m.ModelViewMatrix() location := m.uniMVm.Location(gs) gs.UniformMatrix4fv(location, 1, false, &mvm[0]) // Transfer uniform for model view projection matrix mvpm := m.ModelViewProjectionMatrix() location = m.uniMVPm.Location(gs) gs.UniformMatrix4fv(location, 1, false, &mvpm[0]) // Calculates normal matrix and transfer uniform var nm math32.Matrix3 nm.GetNormalMatrix(mvm) location = m.uniNm.Location(gs) gs.UniformMatrix3fv(location, 1, false, &nm[0]) } // Raycast checks intersections between this geometry and the specified raycaster // and if any found appends it to the specified intersects array. func (m *Mesh) Raycast(rc *core.Raycaster, intersects *[]core.Intersect) { // Transform this mesh geometry bounding sphere from model // to world coordinates and checks intersection with raycaster geom := m.GetGeometry() sphere := geom.BoundingSphere() matrixWorld := m.MatrixWorld() sphere.ApplyMatrix4(&matrixWorld) if !rc.IsIntersectionSphere(&sphere) { return } // Copy ray and transform to model coordinates // This ray will will also be used to check intersects with // the geometry, as is much less expensive to transform the // ray to model coordinates than the geometry to world coordinates. var inverseMatrix math32.Matrix4 inverseMatrix.GetInverse(&matrixWorld) var ray math32.Ray ray.Copy(&rc.Ray).ApplyMatrix4(&inverseMatrix) bbox := geom.BoundingBox() if !ray.IsIntersectionBox(&bbox) { return } // Local function to check the intersection of the ray from the raycaster with // the specified face defined by three poins. checkIntersection := func(mat *material.Material, pA, pB, pC, point *math32.Vector3) *core.Intersect { var intersect bool switch mat.Side() { case material.SideBack: intersect = ray.IntersectTriangle(pC, pB, pA, true, point) case material.SideFront: intersect = ray.IntersectTriangle(pA, pB, pC, true, point) case material.SideDouble: intersect = ray.IntersectTriangle(pA, pB, pC, false, point) } if !intersect { return nil } // Transform intersection point from model to world coordinates var intersectionPointWorld = *point intersectionPointWorld.ApplyMatrix4(&matrixWorld) // Calculates the distance from the ray origin to intersection point origin := rc.Ray.Origin() distance := origin.DistanceTo(&intersectionPointWorld) // Checks if distance is between the bounds of the raycaster if distance < rc.Near || distance > rc.Far { return nil } return &core.Intersect{ Distance: distance, Point: intersectionPointWorld, Object: m, } } // Get buffer with position vertices vboPos := geom.VBO("VertexPosition") if vboPos == nil { panic("mesh.Raycast(): VertexPosition VBO not found") } positions := vboPos.Buffer() indices := geom.Indices() var vA, vB, vC math32.Vector3 // Geometry has indexed vertices if indices.Size() > 0 { for i := 0; i < indices.Size(); i += 3 { // Get face indices a := indices[i] b := indices[i+1] c := indices[i+2] // Get face position vectors positions.GetVector3(int(3*a), &vA) positions.GetVector3(int(3*b), &vB) positions.GetVector3(int(3*c), &vC) // Checks intersection of the ray with this face mat := m.GetMaterial(i).GetMaterial() var point math32.Vector3 intersect := checkIntersection(mat, &vA, &vB, &vC, &point) if intersect != nil { intersect.Index = uint32(i) *intersects = append(*intersects, *intersect) } } // Geometry has NO indexed vertices } else { stride := vboPos.Stride() offset := vboPos.AttribOffset("VertexPosition") for i := offset; i < positions.Size(); i += stride { // Get face indices a := i / 3 b := a + 1 c := a + 2 // Set face position vectors positions.GetVector3(int(3*a), &vA) positions.GetVector3(int(3*b), &vB) positions.GetVector3(int(3*c), &vC) // Checks intersection of the ray with this face mat := m.GetMaterial(i).GetMaterial() var point math32.Vector3 intersect := checkIntersection(mat, &vA, &vB, &vC, &point) if intersect != nil { intersect.Index = uint32(a) *intersects = append(*intersects, *intersect) } } } }
graphic/mesh.go
0.774242
0.484563
mesh.go
starcoder
package caire import ( "image" "math" ) type kernel [][]int32 var ( kernelX kernel = kernel{ {-1, 0, 1}, {-2, 0, 2}, {-1, 0, 1}, } kernelY kernel = kernel{ {-1, -2, -1}, {0, 0, 0}, {1, 2, 1}, } ) // Detect image edges. // See https://en.wikipedia.org/wiki/Sobel_operator func SobelFilter(img *image.NRGBA, threshold float64) *image.NRGBA { var sumX, sumY int32 dx, dy := img.Bounds().Max.X, img.Bounds().Max.Y dst := image.NewNRGBA(img.Bounds()) // Get 3x3 window of pixels because image data given is just a 1D array of pixels maxPixelOffset := dx*dy + len(kernelX) - 1 data := getImageData(img) length := len(data) - maxPixelOffset magnitudes := make([]int32, length) for i := 0; i < length; i++ { // Sum each pixel with the kernel value sumX, sumY = 0, 0 for x := 0; x < len(kernelX); x++ { for y := 0; y < len(kernelY); y++ { px := data[i+(dx*y)+x] if len(px) > 0 { r := px[0] // We are using px[0] (i.e. R value) because the image is grayscale anyway sumX += int32(r) * kernelX[y][x] sumY += int32(r) * kernelY[y][x] } } } magnitude := math.Sqrt(float64(sumX*sumX) + float64(sumY*sumY)) // Check for pixel color boundaries if magnitude < 0 { magnitude = 0 } else if magnitude > 255 { magnitude = 255 } // Set magnitude to 0 if doesn't exceed threshold, else set to magnitude if magnitude > threshold { magnitudes[i] = int32(magnitude) } else { magnitudes[i] = 0 } } dataLength := dx * dy * 4 edges := make([]int32, dataLength) // Apply the kernel values. for i := 0; i < dataLength; i++ { if i%4 != 0 { m := magnitudes[i/4] if m != 0 { edges[i-1] = m } } } // Generate the new image with the sobel filter applied. for idx := 0; idx < len(edges); idx += 4 { dst.Pix[idx] = uint8(edges[idx]) dst.Pix[idx+1] = uint8(edges[idx+1]) dst.Pix[idx+2] = uint8(edges[idx+2]) dst.Pix[idx+3] = 255 } return dst } // Group pixels into 2D array, each one containing the pixel RGB value. func getImageData(img *image.NRGBA) [][]uint8 { dx, dy := img.Bounds().Max.X, img.Bounds().Max.Y pixels := make([][]uint8, dx*dy*4) for i := 0; i < len(pixels); i += 4 { pixels[i/4] = []uint8{ img.Pix[i], img.Pix[i+1], img.Pix[i+2], img.Pix[i+3], } } return pixels }
sobel.go
0.761804
0.536495
sobel.go
starcoder
package main // event struct used to read data from the perf ring buffer type event struct { // PID of the process making the syscall Pid uint32 // syscall number ID uint32 // Command which makes the syscall Command [16]byte // Stops tracing syscalls if true StopTracing bool } // the source is a bpf program compiled at runtime. Some macro's like // BPF_HASH and BPF_PERF_OUTPUT are expanded during compilation // by bcc. $PARENT_PID gets replaced before compilation with the PID of the container // Complete documentation is available at // https://github.com/iovisor/bcc/blob/master/docs/reference_guide.md const source string = ` #include <linux/bpf.h> #include <linux/nsproxy.h> #include <linux/pid_namespace.h> #include <linux/ns_common.h> #include <linux/sched.h> #include <linux/tracepoint.h> /* * mnt_namespace is defined in fs/mount.h and not part of the kernel headers. * Hence, we need a forward decl here to make the compiler eat the code. */ struct mnt_namespace { atomic_t count; struct ns_common ns; }; // BPF_HASH used to store the PID namespace of the parent PID // of the processes inside the container. BPF_HASH(parent_namespace, u64, unsigned int); BPF_HASH(seen_syscalls, int, u64); // Opens a custom BPF table to push data to user space via perf ring buffer BPF_PERF_OUTPUT(events); // data_t used to store the data received from the event struct syscall_data { // PID of the process u32 pid; // the syscall number u32 id; // command which is making the syscall char comm[16]; // Stops tracing syscalls if true bool stopTracing; }; // enter_trace : function is attached to the kernel tracepoint raw_syscalls:sys_enter it is // called whenever a syscall is made. The function stores the pid_namespace (task->nsproxy->pid_ns_for_children->ns.inum) of the PID which // starts the container in the BPF_HASH called parent_namespace. // The data of the syscall made by the process with the same pid_namespace as the parent_namespace is pushed to // userspace using perf ring buffer // specification of args from sys/kernel/debug/tracing/events/raw_syscalls/sys_enter/format int enter_trace(struct tracepoint__raw_syscalls__sys_enter* args) { struct syscall_data data = {}; u64 key = 0; unsigned int zero = 0; struct task_struct *task; struct nsproxy *nsproxy; struct mnt_namespace *mnt_ns; int id = (int)args->id; data.pid = bpf_get_current_pid_tgid(); data.id = id; bpf_get_current_comm(&data.comm, sizeof(data.comm)); task = (struct task_struct *)bpf_get_current_task(); nsproxy = task->nsproxy; mnt_ns = nsproxy->mnt_ns; unsigned int inum = mnt_ns->ns.inum; if (data.pid == $PARENT_PID) { parent_namespace.update(&key, &inum); } unsigned int* parent_inum = parent_namespace.lookup_or_init(&key, &zero); if (parent_inum != NULL && *parent_inum != inum) { return 0; } u64 seen = 0, *tmp = seen_syscalls.lookup(&id); if (tmp != NULL) seen = *tmp; // Syscalls are not recorded until prctl() is called. The first // invocation of prctl is guaranteed to happen by the supported // OCI runtimes (i.e., runc and crun) as it's being called when // setting the seccomp profile. if (id == __NR_prctl) { // The syscall was already notified. if (seen > 1) return 0; // The first time we see prctl, we record it without generating // any event. if (seen == 0) { goto record_and_exit; } } else { // The syscall was already notified. if (seen > 0) return 0; } data.stopTracing = false; events.perf_submit(args, &data, sizeof(data)); record_and_exit: seen++; seen_syscalls.update(&id, &seen); return 0; } // Checks if the container has exited int check_exit(struct tracepoint__sched__sched_process_exit* args) { if (args->pid == $PARENT_PID) { struct syscall_data data = {}; data.pid = args->pid; data.id = 0; data.stopTracing = true; events.perf_submit(args, &data, sizeof(data)); } return 0; } `
ebpf.go
0.532182
0.4184
ebpf.go
starcoder
package goqu import ( "time" "github.com/doug-martin/goqu/v9/internal/util" "github.com/doug-martin/goqu/v9/sqlgen" ) type DialectWrapper struct { dialect string } // Creates a new DialectWrapper to create goqu.Datasets or goqu.Databases with the specified dialect. func Dialect(dialect string) DialectWrapper { return DialectWrapper{dialect: dialect} } // Create a new dataset for creating SELECT sql statements func (dw DialectWrapper) From(table ...interface{}) *SelectDataset { return From(table...).WithDialect(dw.dialect) } // Create a new dataset for creating SELECT sql statements func (dw DialectWrapper) Select(cols ...interface{}) *SelectDataset { return newDataset(dw.dialect, nil).Select(cols...) } // Create a new dataset for creating UPDATE sql statements func (dw DialectWrapper) Update(table interface{}) *UpdateDataset { return Update(table).WithDialect(dw.dialect) } // Create a new dataset for creating INSERT sql statements func (dw DialectWrapper) Insert(table interface{}) *InsertDataset { return Insert(table).WithDialect(dw.dialect) } // Create a new dataset for creating DELETE sql statements func (dw DialectWrapper) Delete(table interface{}) *DeleteDataset { return Delete(table).WithDialect(dw.dialect) } // Create a new dataset for creating TRUNCATE sql statements func (dw DialectWrapper) Truncate(table ...interface{}) *TruncateDataset { return Truncate(table...).WithDialect(dw.dialect) } func (dw DialectWrapper) DB(db SQLDatabase) *Database { return newDatabase(dw.dialect, db) } func New(dialect string, db SQLDatabase) *Database { return newDatabase(dialect, db) } // Set the column rename function. This is used for struct fields that do not have a db tag to specify the column name // By default all struct fields that do not have a db tag will be converted lowercase func SetColumnRenameFunction(renameFunc func(string) string) { util.SetColumnRenameFunction(renameFunc) } // Set the location to use when interpolating time.Time instances. See https://golang.org/pkg/time/#LoadLocation // NOTE: This has no effect when using prepared statements. func SetTimeLocation(loc *time.Location) { sqlgen.SetTimeLocation(loc) }
goqu.go
0.688992
0.462655
goqu.go
starcoder
package chapter17 import "sort" // HeightWidth is a struct having height and width type HeightWidth struct { height int weight int } // returns true if main should be lined up before that. // note that it's possible that this.isBefore(that) and that.isBefore(this) are both false func (main HeightWidth) isBefore(that HeightWidth) bool { if main.height < that.height && main.weight < that.weight { return true } return false } func canAppend(solution []HeightWidth, value HeightWidth) bool { last := solution[len(solution)-1] return last.isBefore(value) } // LongestIncreasingSequence returns the longest way in which items can be stacked on top of each other. func LongestIncreasingSequence(items []HeightWidth) []HeightWidth { sort.Slice(items, func(i, j int) bool { first := items[i] second := items[j] if first.height != second.height { return first.height < second.height } return first.weight < second.weight }) var solutions [][]HeightWidth var bestSequence []HeightWidth // Find the longest sequence that terminates with each element // Track the longest overall subsequence as we go for i := 0; i < len(items); i++ { longestIndex := bestSequenceAtIndex(items, solutions, i) solutions = append(solutions, longestIndex) bestSequence = maxSequence(bestSequence, longestIndex) } return bestSequence } func maxSequence(seq1 []HeightWidth, seq2 []HeightWidth) []HeightWidth { if seq1 == nil { return seq2 } if seq2 == nil { return seq1 } if len(seq1) > len(seq2) { return seq1 } return seq2 } // find the longest sequence which terminates with this element func bestSequenceAtIndex(array []HeightWidth, solutions [][]HeightWidth, index int) []HeightWidth { value := array[index] var bestSequence []HeightWidth // find the longest subsequence that we can append this element to for i := 0; i < index; i++ { solution := solutions[i] if canAppend(solution, value) { bestSequence = maxSequence(solution, bestSequence) } } best := append([]HeightWidth{}, bestSequence...) return append(best, value) }
chapter17/8_circus_tower.go
0.773131
0.471406
8_circus_tower.go
starcoder
package csv import ( "encoding/csv" "fmt" "io" "sort" "github.com/attic-labs/noms/go/d" "github.com/attic-labs/noms/go/types" ) // StringToKind maps names of valid NomsKinds (e.g. Bool, Float32, etc) to their associated types.NomsKind var StringToKind = func(kindMap map[types.NomsKind]string) map[string]types.NomsKind { m := map[string]types.NomsKind{} for k, v := range kindMap { m[v] = k } return m }(types.KindToString) // StringsToKinds looks up each element of strs in the StringToKind map and returns a slice of answers func StringsToKinds(strs []string) KindSlice { kinds := make(KindSlice, len(strs)) for i, str := range strs { k, ok := StringToKind[str] d.PanicIfTrue(!ok, "StringToKind[%s] failed", str) kinds[i] = k } return kinds } // KindsToStrings looks up each element of kinds in the types.KindToString map and returns a slice of answers func KindsToStrings(kinds KindSlice) []string { strs := make([]string, len(kinds)) for i, k := range kinds { strs[i] = types.KindToString[k] } return strs } // MakeStructTypeFromHeaders creates a struct type from the headers using |kinds| as the type of each field. If |kinds| is empty, default to strings. func MakeStructTypeFromHeaders(headers []string, structName string, kinds KindSlice) (typ *types.Type, fieldOrder []int, kindMap []types.NomsKind) { useStringType := len(kinds) == 0 d.Chk.True(useStringType || len(headers) == len(kinds)) fieldMap := make(types.TypeMap, len(headers)) origOrder := make(map[string]int, len(headers)) fieldNames := make(sort.StringSlice, len(headers)) for i, key := range headers { fn := types.EscapeStructField(key) origOrder[fn] = i kind := types.StringKind if !useStringType { kind = kinds[i] } _, ok := fieldMap[fn] d.PanicIfTrue(ok, `Duplicate field name "%s"`, key) fieldMap[fn] = types.MakePrimitiveType(kind) fieldNames[i] = fn } sort.Sort(fieldNames) kindMap = make([]types.NomsKind, len(fieldMap)) fieldOrder = make([]int, len(fieldMap)) fieldTypes := make([]*types.Type, len(fieldMap)) for i, fn := range fieldNames { typ := fieldMap[fn] fieldTypes[i] = typ kindMap[i] = typ.Kind() fieldOrder[origOrder[fn]] = i } typ = types.MakeStructType(structName, fieldNames, fieldTypes) return } // Read takes a CSV reader and reads it into a typed List of structs. Each row gets read into a struct named structName, described by headers. If the original data contained headers it is expected that the input reader has already read those and are pointing at the first data row. // If kinds is non-empty, it will be used to type the fields in the generated structs; otherwise, they will be left as string-fields. // In addition to the list, Read returns the typeRef for the structs in the list, and last the typeDef of the structs. func ReadToList(r *csv.Reader, structName string, headers []string, kinds KindSlice, vrw types.ValueReadWriter) (l types.List, t *types.Type) { t, fieldOrder, kindMap := MakeStructTypeFromHeaders(headers, structName, kinds) valueChan := make(chan types.Value, 128) // TODO: Make this a function param? listChan := types.NewStreamingList(vrw, valueChan) for { row, err := r.Read() if err == io.EOF { close(valueChan) break } else if err != nil { panic(err) } fields := make(types.ValueSlice, len(headers)) for i, v := range row { if i < len(headers) { fieldOrigIndex := fieldOrder[i] val, err := StringToValue(v, kindMap[fieldOrigIndex]) if err != nil { d.Chk.Fail(fmt.Sprintf("Error parsing value for column '%s': %s", headers[i], err)) } fields[fieldOrigIndex] = val } } valueChan <- types.NewStructWithType(t, fields) } return <-listChan, t } func ReadToMap(r *csv.Reader, headersRaw []string, pkIdx int, kinds KindSlice, vrw types.ValueReadWriter) types.Map { headers := make([]string, 0, len(headersRaw)-1) for i, h := range headersRaw { if i != pkIdx { headers = append(headers, h) } } var pkKind types.NomsKind if len(kinds) == 0 { pkKind = types.StringKind } else { pkKind = kinds[pkIdx] kinds = append(kinds[:pkIdx], kinds[pkIdx+1:]...) } t, fieldOrder, kindMap := MakeStructTypeFromHeaders(headers, "", kinds) kvChan := make(chan types.Value, 128) mapChan := types.NewStreamingMap(vrw, kvChan) for { row, err := r.Read() if err == io.EOF { break } else if err != nil { panic(err) } fieldIndex := 0 var pk types.Value fields := make(types.ValueSlice, len(headers)) for x, v := range row { if x == pkIdx { pk, err = StringToValue(v, pkKind) } else if fieldIndex < len(headers) { fieldOrigIndex := fieldOrder[fieldIndex] fields[fieldOrigIndex], err = StringToValue(v, kindMap[fieldOrigIndex]) fieldIndex++ } if err != nil { d.Chk.Fail(fmt.Sprintf("Error parsing value for column '%s': %s", headers[x], err)) } } kvChan <- pk kvChan <- types.NewStructWithType(t, fields) } close(kvChan) return <-mapChan }
samples/go/csv/read.go
0.601008
0.406715
read.go
starcoder
package chow import ( "github.com/OpenWhiteBox/primitives/encoding" "github.com/OpenWhiteBox/primitives/matrix" "github.com/OpenWhiteBox/primitives/number" ) // findMatrix finds an invertible matrix in a basis. func findMatrix(basis []matrix.Row) matrix.Matrix { im := matrix.NewIncrementalMatrix(64) for _, row := range basis { im.Add(row) } size := im.Size() for i := 0; i < size; i++ { row := im.Row(i) cand := matrix.Matrix{} for _, v := range row { cand = append(cand, matrix.Row{v}) } if _, ok := cand.Invert(); ok { return cand } } panic("Couldn't find an invertible matrix in the given basis!") } // affineLayer implements methods for disambiguating an affine layer of the SPN. type affineLayer encoding.BlockAffine func (al affineLayer) Encode(in [16]byte) [16]byte { return encoding.BlockAffine(al).Encode(in) } func (al affineLayer) Decode(in [16]byte) [16]byte { return encoding.BlockAffine(al).Decode(in) } // clean gets the affine layer back to MixColumns and returns the input and output parasites. func (al *affineLayer) clean() (input, output encoding.ConcatenatedBlock) { // Clean off the non-GF(2^8) noise. for pos := 0; pos < 16; pos++ { input[pos] = al.inputParasite(pos) output[pos] = al.outputParasite(pos) } al.adjust(input, output) // Clean off as much of the GF(2^8) noise as possible. in, out := al.stripScalars() al.adjust(in, out) for pos := 0; pos < 16; pos++ { input[pos] = encoding.ComposedBytes{input[pos], in[pos]} output[pos] = encoding.ComposedBytes{out[pos], output[pos]} } return } // adjust fixes the affine layer for two concatenated block encodings which will be moved into the S-box layer. func (al *affineLayer) adjust(input, output encoding.ConcatenatedBlock) { temp, _ := encoding.DecomposeBlockAffine(encoding.ComposedBlocks{ encoding.InverseBlock{input}, encoding.BlockAffine(*al), encoding.InverseBlock{output}, }) *al = affineLayer(temp) } // inputParasite returns the non-GF(2^8) part of the parasite on the output at position col. func (al *affineLayer) inputParasite(col int) encoding.Byte { block := col / 4 row := 4 * (col / 4) row0, col0 := row%4, col%4 row1, col1 := (row0+1)%4, (col0+1)%4 rowT, colT := row1+(4*block), col1+(4*block) blockAA, blockAB := al.getBlock(row, col), al.getBlock(row, colT) blockBA, blockBB := al.getBlock(rowT, col), al.getBlock(rowT, colT) blockAA, _ = blockAA.Invert() blockBB, _ = blockBB.Invert() B := blockAA.Compose(blockAB).Compose(blockBB).Compose(blockBA) lambda := unMixColumns[row0][col0].Compose(mixColumns[row0][col1]). Compose(unMixColumns[row1][col1]).Compose(mixColumns[row1][col0]) return encoding.NewByteLinear(findMatrix( B.LeftStretch().Add(lambda.RightStretch()).NullSpace(), )) } // outputParasite returns the non-GF(2^8) part of the parasite on the output at position row. func (al *affineLayer) outputParasite(row int) encoding.Byte { block := row / 4 col := 4 * (row / 4) row0, col0 := row%4, col%4 row1, col1 := (row0+1)%4, (col0+1)%4 rowT, colT := row1+(4*block), col1+(4*block) blockAA, blockAB := al.getBlock(row, col), al.getBlock(row, colT) blockBA, blockBB := al.getBlock(rowT, col), al.getBlock(rowT, colT) blockAB, _ = blockAB.Invert() blockBA, _ = blockBA.Invert() B := blockAA.Compose(blockBA).Compose(blockBB).Compose(blockAB) lambda := mixColumns[row0][col0].Compose(unMixColumns[row1][col0]). Compose(mixColumns[row1][col1]).Compose(unMixColumns[row0][col1]) return encoding.NewByteLinear(findMatrix( B.RightStretch().Add(lambda.LeftStretch()).NullSpace(), )) } // stripScalars gets rid of unknown scalars in each block of the affine layer. It leaves it exactly equal to MixColumns, // but there is an unknown scalar in each block that will move into the S-box layers. func (al *affineLayer) stripScalars() (encoding.ConcatenatedBlock, encoding.ConcatenatedBlock) { input, output := [16]encoding.ByteLinear{}, [16]encoding.ByteLinear{} for pos := 0; pos < 16; pos += 4 { found := false for guess := 1; guess < 256 && !found; guess++ { // Take a guess for the input scalar on the first column. input[pos], _ = encoding.DecomposeByteLinear(encoding.NewByteMultiplication(number.ByteFieldElem(guess))) // Given input scalar on first column, calculate output scalars on all rows. for i := pos; i < pos+4; i++ { mc, _ := mixColumns[i%4][0].Invert() output[i] = encoding.NewByteLinear( al.getBlock(i, pos).Compose(input[pos].Backwards).Compose(mc), ) } // Given output scalar on each row, calculate input scalars on all columns. for i := pos + 1; i < pos+4; i++ { mc, _ := mixColumns[0][i%4].Invert() input[i] = encoding.NewByteLinear( al.getBlock(pos, i).Compose(output[pos].Backwards).Compose(mc), ) } // Verify that guess is consistent. found = true for i := pos; i < pos+4 && found; i++ { for j := pos; j < pos+4 && found; j++ { cand := al.getBlock(i, j).Compose(output[i].Backwards).Compose(input[j].Backwards) real := mixColumns[i%4][j%4] if !cand.Equals(real) { found = false } } } } if !found { panic("Failed to disambiguate block affine layer!") } } in, out := encoding.ConcatenatedBlock{}, encoding.ConcatenatedBlock{} for pos := 0; pos < 16; pos++ { in[pos], out[pos] = input[pos], output[pos] } return in, out } // getBlock returns the 8-by-8 block of the affine layer at the given position. func (al *affineLayer) getBlock(row, col int) matrix.Matrix { out := matrix.Matrix{} for i := 0; i < 8; i++ { out = append(out, matrix.Row{al.BlockLinear.Forwards[8*row+i][col]}) } return out }
cryptanalysis/chow/affine.go
0.83498
0.510192
affine.go
starcoder
package courier import ( "context" "fmt" "testing" "time" "github.com/bxcodec/faker/v3" "github.com/pkg/errors" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/gofrs/uuid" ) var ErrQueueEmpty = errors.New("queue is empty") type ( Persister interface { AddMessage(context.Context, *Message) error NextMessages(context.Context, uint8) ([]Message, error) SetMessageStatus(context.Context, uuid.UUID, MessageStatus) error LatestQueuedMessage(ctx context.Context) (*Message, error) } PersistenceProvider interface { CourierPersister() Persister } ) func TestPersister(p Persister) func(t *testing.T) { return func(t *testing.T) { t.Run("case=no messages in queue", func(t *testing.T) { m, err := p.NextMessages(context.Background(), 10) require.EqualError(t, err, ErrQueueEmpty.Error()) assert.Len(t, m, 0) _, err = p.LatestQueuedMessage(context.Background()) require.EqualError(t, err, ErrQueueEmpty.Error()) }) messages := make([]Message, 5) t.Run("case=add messages to the queue", func(t *testing.T) { for k := range messages { require.NoError(t, faker.FakeData(&messages[k])) require.NoError(t, p.AddMessage(context.Background(), &messages[k])) time.Sleep(time.Second) // wait a bit so that the timestamp ordering works in MySQL. } }) t.Run("case=latest message in queue", func(t *testing.T) { expected, err := p.LatestQueuedMessage(context.Background()) require.NoError(t, err) actual := messages[len(messages)-1] assert.Equal(t, expected.ID, actual.ID) assert.Equal(t, expected.Subject, actual.Subject) }) t.Run("case=pull messages from the queue", func(t *testing.T) { for k, expected := range messages { t.Run(fmt.Sprintf("message=%d", k), func(t *testing.T) { messages, err := p.NextMessages(context.Background(), 1) require.NoError(t, err) require.Len(t, messages, 1) actual := messages[0] assert.Equal(t, expected.ID, actual.ID) assert.Equal(t, expected.Subject, actual.Subject) assert.Equal(t, expected.Body, actual.Body) assert.Equal(t, expected.Status, actual.Status) assert.Equal(t, expected.Type, actual.Type) assert.Equal(t, expected.Recipient, actual.Recipient) require.NoError(t, p.SetMessageStatus(context.Background(), actual.ID, MessageStatusSent)) }) } _, err := p.NextMessages(context.Background(), 10) require.EqualError(t, err, ErrQueueEmpty.Error()) }) t.Run("case=setting message status", func(t *testing.T) { require.NoError(t, p.SetMessageStatus(context.Background(), messages[0].ID, MessageStatusQueued)) ms, err := p.NextMessages(context.Background(), 1) require.NoError(t, err) require.Len(t, ms, 1) assert.Equal(t, messages[0].ID, ms[0].ID) require.NoError(t, p.SetMessageStatus(context.Background(), messages[0].ID, MessageStatusSent)) _, err = p.NextMessages(context.Background(), 1) require.EqualError(t, err, ErrQueueEmpty.Error()) }) } }
courier/persistence.go
0.599368
0.538983
persistence.go
starcoder
package criteria import ( "fmt" "github.com/viant/toolbox" "strings" ) const ( undefined int = iota eof illegal whitespaces operand operator logicalOperator assertlyExprMatcher quoted jsonObject jsonArray grouping ) var matchers = map[int]toolbox.Matcher{ eof: toolbox.EOFMatcher{}, whitespaces: toolbox.CharactersMatcher{" \n\t"}, operand: toolbox.NewCustomIdMatcher(".", "_", "$", "[", "]", "{", "}", "!", "-", "(", ")", "/", "\\", "+", "-", "*"), operator: toolbox.KeywordsMatcher{ Keywords: []string{"=", ">=", "<=", "<>", ">", "<", "!=", ":"}, CaseSensitive: false, }, logicalOperator: toolbox.KeywordsMatcher{ Keywords: []string{"&&", "||"}, CaseSensitive: false, }, quoted: &toolbox.BodyMatcher{"'", "'"}, grouping: &toolbox.BodyMatcher{"(", ")"}, jsonObject: &toolbox.BodyMatcher{"{", "}"}, jsonArray: &toolbox.BodyMatcher{"[", "]"}, assertlyExprMatcher: toolbox.NewSequenceMatcher("&&", "||"), } //Parser represents endly criteria parser type Parser struct{} func (p *Parser) expectOptionalWhitespaceFollowedBy(tokenizer *toolbox.Tokenizer, expectedTokensMessage string, expected ...int) (*toolbox.Token, error) { var expectedTokens = make([]int, 0) expectedTokens = append(expectedTokens, whitespaces) expectedTokens = append(expectedTokens, expected...) token := tokenizer.Nexts(expectedTokens...) if token.Token == eof && !toolbox.HasSliceAnyElements(expectedTokens, eof) { return nil, newIllegalTokenParsingError(tokenizer.Index, expectedTokensMessage) } if token.Token == illegal { return nil, newIllegalTokenParsingError(tokenizer.Index, expectedTokensMessage) } if token.Token == whitespaces { token = tokenizer.Nexts(expected...) } if token.Token == illegal { return nil, newIllegalTokenParsingError(tokenizer.Index, expectedTokensMessage) } if token.Token == eof && len(token.Matched) > 0 { return nil, newIllegalTokenParsingError(tokenizer.Index, expectedTokensMessage) } return token, nil } //Parse parses supplied expression. It returns criteria or parsing error. func (p *Parser) Parse(expression string) (*Predicate, error) { result := NewPredicate("") tokenizer := toolbox.NewTokenizer(expression, illegal, eof, matchers) var criterion *Criterion var leftOperandTokens = []int{quoted, jsonObject, jsonArray, grouping, operand, operator} var rightOperandTokens = []int{quoted, jsonObject, jsonArray, operand, operator} parsingCriteria := result outer: for { expectedTokens := leftOperandTokens if criterion != nil && criterion.Operator != "" { expectedTokens = rightOperandTokens } token, err := p.expectOptionalWhitespaceFollowedBy(tokenizer, "id or grouping expression", expectedTokens...) if err != nil { return nil, err } switch token.Token { case grouping: groupingExpression := string(token.Matched[1 : len(token.Matched)-1]) criteria, err := p.Parse(groupingExpression) if err != nil { return nil, err } if len(parsingCriteria.Criteria) == 0 { parsingCriteria.Criteria = criteria.Criteria parsingCriteria.LogicalOperator = criteria.LogicalOperator } else { parsingCriteria.Criteria = append(parsingCriteria.Criteria, &Criterion{ Predicate: criteria, }) } case operand, jsonObject, jsonArray, quoted: var matched = token.Matched if token.Token == quoted { matched = strings.Trim(token.Matched, "' ") } criterion = &Criterion{ LeftOperand: matched, } parsingCriteria.Criteria = append(parsingCriteria.Criteria, criterion) case operator: criterion = &Criterion{} parsingCriteria.Criteria = append(parsingCriteria.Criteria, criterion) } if token.Token != operator { token, err = p.expectOptionalWhitespaceFollowedBy(tokenizer, "operator", operator, logicalOperator, eof) if err != nil { return nil, err } } if token.Token == eof { break outer } else if token.Token == operator { criterion.Operator = token.Matched if criterion.Operator == ":" { token, err = p.expectOptionalWhitespaceFollowedBy(tokenizer, "right operand", assertlyExprMatcher, eof) } else { token, err = p.expectOptionalWhitespaceFollowedBy(tokenizer, "right operand", quoted, jsonObject, jsonArray, operand, eof) } if err != nil { return nil, err } if token.Token == eof { break outer } var matched = token.Matched if token.Token == quoted { matched = strings.Trim(token.Matched, "' ") } criterion.RightOperand = matched token, err = p.expectOptionalWhitespaceFollowedBy(tokenizer, "logical conjunction", logicalOperator, eof) if err != nil { return nil, err } if token.Token == eof { break outer } } if parsingCriteria.LogicalOperator == "" { parsingCriteria.LogicalOperator = token.Matched } if parsingCriteria.LogicalOperator == token.Matched { continue } conjunctionCriterion := &Criterion{} parsingCriteria.Criteria = append(parsingCriteria.Criteria, conjunctionCriterion) parsingCriteria = NewPredicate(token.Matched) conjunctionCriterion.Predicate = parsingCriteria criterion = nil } return result, nil } //NewParser creates a new criteria parser func NewParser() *Parser { return &Parser{} } type illegalTokenParsingError struct { Index int Expected string error string } func (e illegalTokenParsingError) Error() string { return e.error } func newIllegalTokenParsingError(index int, expected string) error { return &illegalTokenParsingError{Index: index, Expected: expected, error: fmt.Sprintf("illegal token at %v, expected %v", index, expected)} }
criteria/parser.go
0.693473
0.449816
parser.go
starcoder
package vox import ( "fmt" "math" ) // A DenseWorld is an arbitrarily sized voxel model. type DenseWorld struct { Min, Max [3]int // The range of coordinates that are valid. // A slice large enough to contain every voxel. // Voxels[0] is the voxel Min, and the last // element in the slice is the voxel Max. // The elements are stored in X, Y, Z min to max significance. Voxels []uint8 } // NewDenseWorld creates a new dense world for the given cuboid. func NewDenseWorld(min, max [3]int) (*DenseWorld, error) { if max[0] < min[0] || max[1] < min[1] || max[2] < min[2] { return nil, fmt.Errorf("the upper bounds of the cuboid %v must be at least as large as the lower bounds %v", max, min) } sx := max[0] - min[0] + 1 sy := max[1] - min[1] + 1 sz := max[2] - min[2] + 1 return &DenseWorld{min, max, make([]uint8, sx*sy*sz)}, nil } // Cuboid returns the size of the world. func (d *DenseWorld) Cuboid() (min, max [3]int) { return d.Min, d.Max } // Resize changes the size of the world, copying voxels // where the areas overlap. This is an expensive operation. func (d *DenseWorld) Resize(min, max [3]int) error { if min == d.Min && max == d.Max { return nil } ndw, err := NewDenseWorld(min, max) if err != nil { return err } SX := d.Max[0] - d.Min[0] + 1 SY := d.Max[1] - d.Min[1] + 1 for i, c := range d.Voxels { x := i % SX y := ((i - x) / SX) % SY z := ((i - x) / SX) / SY ndw.SetMaterialIndex([3]int{x + d.Min[0], y + d.Min[1], z + d.Min[2]}, c) } *d = *ndw return nil } // MaterialIndex returns the given voxel material. func (d *DenseWorld) MaterialIndex(c [3]int) (uint8, bool) { SX := d.Max[0] - d.Min[0] + 1 SY := d.Max[1] - d.Min[1] + 1 SZ := d.Max[2] - d.Min[2] + 1 x := c[0] - d.Min[0] y := c[1] - d.Min[1] z := c[2] - d.Min[2] if x < 0 || x >= SX || y < 0 || y >= SY || z < 0 || z >= SZ { // out of range return 0, false } return d.Voxels[z*(SX*SY)+y*SX+x], true } // SetMaterialIndex sets the given voxel to the given material index. // It reports if the assignment succeeded. func (d *DenseWorld) SetMaterialIndex(c [3]int, matIdx uint8) bool { SX := d.Max[0] - d.Min[0] + 1 SY := d.Max[1] - d.Min[1] + 1 SZ := d.Max[2] - d.Min[2] + 1 x := c[0] - d.Min[0] y := c[1] - d.Min[1] z := c[2] - d.Min[2] if x < 0 || x >= SX || y < 0 || y >= SY || z < 0 || z >= SZ { // out of range return false } d.Voxels[z*(SX*SY)+y*SX+x] = matIdx return true } func addVec(a, b [3]int) [3]int { return [3]int{a[0] + b[0], a[1] + b[1], a[2] + b[2]} } func abs(x int) int { if x < 0 { return -x } return x } // DenseWorldFromModel takes a magicavoxel transform and a model, and builds // a DenseWorld from it. func DenseWorldFromModel(tf TransformFrame, m Model) (*DenseWorld, error) { mat := tf.R v := [3]int{m.X, m.Y, m.Z} mv := mat.MulVec(v) mv[0] = abs(mv[0]) - 1 mv[1] = abs(mv[1]) - 1 mv[2] = abs(mv[2]) - 1 // magicvoxel puts the majority of the voxel block on the positive side of the zero axis. min := [3]int{-(mv[0] / 2), -(mv[1] / 2), -(mv[2] / 2)} max := [3]int{mv[0] + min[0], mv[1] + min[1], mv[2] + min[2]} T := [3]int{int(tf.T[0]), int(tf.T[1]), int(tf.T[2])} min = addVec(min, T) max = addVec(max, T) dw, err := NewDenseWorld(min, max) if err != nil { return nil, err } // find the corner of the model that maps to the smallest point. minCorner := [3]int{math.MaxInt64, math.MaxInt64, math.MaxInt64} for i := 0; i <= 1; i++ { for j := 0; j <= 1; j++ { for k := 0; k <= 1; k++ { x := [3]int{i * (m.X - 1), j * (m.Y - 1), k * (m.Z - 1)} mx := mat.MulVec(x) if mx[0] <= minCorner[0] && mx[1] <= minCorner[1] && mx[2] <= minCorner[2] { minCorner = mx } } } } // The translation that maps the unrotated model into the dense world coordinate space. trn := [3]int{min[0] - minCorner[0], min[1] - minCorner[1], min[2] - minCorner[2]} for _, vox := range m.V { voxLoc := [3]int{int(vox.X), int(vox.Y), int(vox.Z)} rv := mat.MulVec(voxLoc) rv = addVec(rv, trn) ok := dw.SetMaterialIndex(rv, vox.ColorIndex) if !ok { return nil, fmt.Errorf("rotation/translation is messed up. %x * %v = %v, out of bounds %v, %v", mat, voxLoc, rv, min, max) } } return dw, nil }
world.go
0.804329
0.47171
world.go
starcoder
package input import ( "github.com/Jeffail/benthos/v3/internal/docs" "github.com/Jeffail/benthos/v3/lib/input/reader" "github.com/Jeffail/benthos/v3/lib/log" "github.com/Jeffail/benthos/v3/lib/metrics" "github.com/Jeffail/benthos/v3/lib/types" "github.com/Jeffail/benthos/v3/lib/util/aws/session" ) //------------------------------------------------------------------------------ func init() { Constructors[TypeSQS] = TypeSpec{ constructor: NewAmazonSQS, Summary: ` Receive messages from an Amazon SQS URL.`, Description: ` ### Credentials By default Benthos will use a shared credentials file when connecting to AWS services. It's also possible to set them explicitly at the component level, allowing you to transfer data across accounts. You can find out more [in this document](/docs/guides/aws). ### Metadata This input adds the following metadata fields to each message: ` + "```text" + ` - sqs_message_id - sqs_receipt_handle - sqs_approximate_receive_count - All message attributes ` + "```" + ` You can access these metadata fields using [function interpolation](/docs/configuration/interpolation#metadata).`, FieldSpecs: append( append(docs.FieldSpecs{ docs.FieldCommon("url", "The SQS URL to consume from."), docs.FieldAdvanced("delete_message", "Whether to delete the consumed message once it is acked. Disabling allows you to handle the deletion using a different mechanism."), }, session.FieldSpecs()...), docs.FieldAdvanced("timeout", "The period of time to wait before abandoning a request and trying again."), docs.FieldAdvanced("max_number_of_messages", "The maximum number of messages to consume from each request."), ), Categories: []Category{ CategoryServices, CategoryAWS, }, } } //------------------------------------------------------------------------------ // NewAmazonSQS creates a new AWS SQS input type. func NewAmazonSQS(conf Config, mgr types.Manager, log log.Modular, stats metrics.Type) (Type, error) { s, err := reader.NewAmazonSQS(conf.SQS, log, stats) if err != nil { return nil, err } return NewAsyncReader(TypeSQS, true, reader.NewAsyncBundleUnacks(s), log, stats) } //------------------------------------------------------------------------------
lib/input/sqs.go
0.739893
0.591458
sqs.go
starcoder
package fzf import ( "bytes" "fmt" "regexp" "strconv" "strings" "github.com/karnh/fzf/src/util" ) const rangeEllipsis = 0 // Range represents nth-expression type Range struct { begin int end int } // Token contains the tokenized part of the strings and its prefix length type Token struct { text *util.Chars prefixLength int32 } // String returns the string representation of a Token. func (t Token) String() string { return fmt.Sprintf("Token{text: %s, prefixLength: %d}", t.text, t.prefixLength) } // Delimiter for tokenizing the input type Delimiter struct { regex *regexp.Regexp str *string } // String returns the string representation of a Delimeter. func (d Delimiter) String() string { return fmt.Sprintf("Delimiter{regex: %v, str: &%q}", d.regex, *d.str) } func newRange(begin int, end int) Range { if begin == 1 { begin = rangeEllipsis } if end == -1 { end = rangeEllipsis } return Range{begin, end} } // ParseRange parses nth-expression and returns the corresponding Range object func ParseRange(str *string) (Range, bool) { if (*str) == ".." { return newRange(rangeEllipsis, rangeEllipsis), true } else if strings.HasPrefix(*str, "..") { end, err := strconv.Atoi((*str)[2:]) if err != nil || end == 0 { return Range{}, false } return newRange(rangeEllipsis, end), true } else if strings.HasSuffix(*str, "..") { begin, err := strconv.Atoi((*str)[:len(*str)-2]) if err != nil || begin == 0 { return Range{}, false } return newRange(begin, rangeEllipsis), true } else if strings.Contains(*str, "..") { ns := strings.Split(*str, "..") if len(ns) != 2 { return Range{}, false } begin, err1 := strconv.Atoi(ns[0]) end, err2 := strconv.Atoi(ns[1]) if err1 != nil || err2 != nil || begin == 0 || end == 0 { return Range{}, false } return newRange(begin, end), true } n, err := strconv.Atoi(*str) if err != nil || n == 0 { return Range{}, false } return newRange(n, n), true } func withPrefixLengths(tokens []string, begin int) []Token { ret := make([]Token, len(tokens)) prefixLength := begin for idx := range tokens { chars := util.ToChars([]byte(tokens[idx])) ret[idx] = Token{&chars, int32(prefixLength)} prefixLength += chars.Length() } return ret } const ( awkNil = iota awkBlack awkWhite ) func awkTokenizer(input string) ([]string, int) { // 9, 32 ret := []string{} prefixLength := 0 state := awkNil begin := 0 end := 0 for idx := 0; idx < len(input); idx++ { r := input[idx] white := r == 9 || r == 32 switch state { case awkNil: if white { prefixLength++ } else { state, begin, end = awkBlack, idx, idx+1 } case awkBlack: end = idx + 1 if white { state = awkWhite } case awkWhite: if white { end = idx + 1 } else { ret = append(ret, input[begin:end]) state, begin, end = awkBlack, idx, idx+1 } } } if begin < end { ret = append(ret, input[begin:end]) } return ret, prefixLength } // Tokenize tokenizes the given string with the delimiter func Tokenize(text string, delimiter Delimiter) []Token { if delimiter.str == nil && delimiter.regex == nil { // AWK-style (\S+\s*) tokens, prefixLength := awkTokenizer(text) return withPrefixLengths(tokens, prefixLength) } if delimiter.str != nil { return withPrefixLengths(strings.SplitAfter(text, *delimiter.str), 0) } // FIXME performance var tokens []string if delimiter.regex != nil { for len(text) > 0 { loc := delimiter.regex.FindStringIndex(text) if len(loc) < 2 { loc = []int{0, len(text)} } last := util.Max(loc[1], 1) tokens = append(tokens, text[:last]) text = text[last:] } } return withPrefixLengths(tokens, 0) } func joinTokens(tokens []Token) string { var output bytes.Buffer for _, token := range tokens { output.WriteString(token.text.ToString()) } return output.String() } // Transform is used to transform the input when --with-nth option is given func Transform(tokens []Token, withNth []Range) []Token { transTokens := make([]Token, len(withNth)) numTokens := len(tokens) for idx, r := range withNth { parts := []*util.Chars{} minIdx := 0 if r.begin == r.end { idx := r.begin if idx == rangeEllipsis { chars := util.ToChars([]byte(joinTokens(tokens))) parts = append(parts, &chars) } else { if idx < 0 { idx += numTokens + 1 } if idx >= 1 && idx <= numTokens { minIdx = idx - 1 parts = append(parts, tokens[idx-1].text) } } } else { var begin, end int if r.begin == rangeEllipsis { // ..N begin, end = 1, r.end if end < 0 { end += numTokens + 1 } } else if r.end == rangeEllipsis { // N.. begin, end = r.begin, numTokens if begin < 0 { begin += numTokens + 1 } } else { begin, end = r.begin, r.end if begin < 0 { begin += numTokens + 1 } if end < 0 { end += numTokens + 1 } } minIdx = util.Max(0, begin-1) for idx := begin; idx <= end; idx++ { if idx >= 1 && idx <= numTokens { parts = append(parts, tokens[idx-1].text) } } } // Merge multiple parts var merged util.Chars switch len(parts) { case 0: merged = util.ToChars([]byte{}) case 1: merged = *parts[0] default: var output bytes.Buffer for _, part := range parts { output.WriteString(part.ToString()) } merged = util.ToChars(output.Bytes()) } var prefixLength int32 if minIdx < numTokens { prefixLength = tokens[minIdx].prefixLength } else { prefixLength = 0 } transTokens[idx] = Token{&merged, prefixLength} } return transTokens }
src/tokenizer.go
0.624752
0.435902
tokenizer.go
starcoder
package iso20022 // Parameters for contracts which obligate the buyer to receive and the seller to deliver in the future the assets specified at an agreed price or contracts which grant to the holder either the privilege to purchase or the privilege to sell the assets specified at a predetermined price or formula at or within a time in the future. type FutureOrOptionDetails1 struct { // Specifies the type of the contract for futures and options. FutureAndOptionContractType *FutureAndOptionContractType1Code `xml:"FutrAndOptnCtrctTp,omitempty"` // Last date/time by which the option for physical delivery may still be exercised. LastDeliveryDate *ISODateTime `xml:"LastDlvryDt,omitempty"` // Used to indicate the size of the underlying commodity on which the contract is based (e.g., 2500 lbs of lean cattle, 1000 barrels of crude oil, 1000 bushels of corn, etc.) UnitOfMeasure *UnitOfMeasure1Code `xml:"UnitOfMeasr,omitempty"` // Date on which future contracts settle. FutureDate *ISODateTime `xml:"FutrDt,omitempty"` // Specifies the minimum ratio or multiply factor used to convert from contracts to shares. MinimumSize *ActiveCurrencyAndAmount `xml:"MinSz,omitempty"` // Date/time, as announced by the issuer, at which the securities will be issued. AnnouncementDate *ISODateTime `xml:"AnncmntDt,omitempty"` // Specifies the deliverability of a security. Appearance *Appearance1Code `xml:"Apprnc,omitempty"` // Indicates whether the interest is separable from the principal. StrippableIndicator *YesNoIndicator `xml:"StrpblInd,omitempty"` // Indicates the maximum number of listed option contracts on a single security which can be held by an investor or group of investors acting jointly. PositionLimit *Number `xml:"PosLmt,omitempty"` // Position limit in the near-term contract for a given exchange-traded product. NearTermPositionLimit *Number `xml:"NearTermPosLmt,omitempty"` // Minimum price increase for a given exchange-traded Instrument MinimumTradingPricingIncrement *Number `xml:"MinTradgPricgIncrmt,omitempty"` // Reason for which money is raised through the issuance of a security. Purpose *Max256Text `xml:"Purp,omitempty"` // Specifies when the contract (i.e. MBS/TBA) will settle. ContractSettlementMonth *ISOYearMonth `xml:"CtrctSttlmMnth,omitempty"` // Date on which new securities begin trading. FirstDealingDate *DateAndDateTime1Choice `xml:"FrstDealgDt,omitempty"` // Ratio applied to convert the related security. Ratio []*UnderlyingRatio1 `xml:"Ratio,omitempty"` // Rating(s) of the security. Rating []*Rating1 `xml:"Ratg,omitempty"` // Initial issue price of a financial instrument. IssuePrice *Price4 `xml:"IssePric,omitempty"` // Rights to exercise the privilege to purchase or to sell the assets specified at a predetermined price or formula at or within a time in the future. OptionRights *OptionRight1Choice `xml:"OptnRghts,omitempty"` // Indicates whether or not this is the last transaction. LastTransaction *YesNoIndicator `xml:"LastTx,omitempty"` // Specifies that there will be one price and one transaction when two contracts are carried out simultaneously, one to buy and the other one to sell with two different expiration dates. SpreadTransaction *YesNoIndicator `xml:"SprdTx,omitempty"` } func (f *FutureOrOptionDetails1) SetFutureAndOptionContractType(value string) { f.FutureAndOptionContractType = (*FutureAndOptionContractType1Code)(&value) } func (f *FutureOrOptionDetails1) SetLastDeliveryDate(value string) { f.LastDeliveryDate = (*ISODateTime)(&value) } func (f *FutureOrOptionDetails1) SetUnitOfMeasure(value string) { f.UnitOfMeasure = (*UnitOfMeasure1Code)(&value) } func (f *FutureOrOptionDetails1) SetFutureDate(value string) { f.FutureDate = (*ISODateTime)(&value) } func (f *FutureOrOptionDetails1) SetMinimumSize(value, currency string) { f.MinimumSize = NewActiveCurrencyAndAmount(value, currency) } func (f *FutureOrOptionDetails1) SetAnnouncementDate(value string) { f.AnnouncementDate = (*ISODateTime)(&value) } func (f *FutureOrOptionDetails1) SetAppearance(value string) { f.Appearance = (*Appearance1Code)(&value) } func (f *FutureOrOptionDetails1) SetStrippableIndicator(value string) { f.StrippableIndicator = (*YesNoIndicator)(&value) } func (f *FutureOrOptionDetails1) SetPositionLimit(value string) { f.PositionLimit = (*Number)(&value) } func (f *FutureOrOptionDetails1) SetNearTermPositionLimit(value string) { f.NearTermPositionLimit = (*Number)(&value) } func (f *FutureOrOptionDetails1) SetMinimumTradingPricingIncrement(value string) { f.MinimumTradingPricingIncrement = (*Number)(&value) } func (f *FutureOrOptionDetails1) SetPurpose(value string) { f.Purpose = (*Max256Text)(&value) } func (f *FutureOrOptionDetails1) SetContractSettlementMonth(value string) { f.ContractSettlementMonth = (*ISOYearMonth)(&value) } func (f *FutureOrOptionDetails1) AddFirstDealingDate() *DateAndDateTime1Choice { f.FirstDealingDate = new(DateAndDateTime1Choice) return f.FirstDealingDate } func (f *FutureOrOptionDetails1) AddRatio() *UnderlyingRatio1 { newValue := new (UnderlyingRatio1) f.Ratio = append(f.Ratio, newValue) return newValue } func (f *FutureOrOptionDetails1) AddRating() *Rating1 { newValue := new (Rating1) f.Rating = append(f.Rating, newValue) return newValue } func (f *FutureOrOptionDetails1) AddIssuePrice() *Price4 { f.IssuePrice = new(Price4) return f.IssuePrice } func (f *FutureOrOptionDetails1) AddOptionRights() *OptionRight1Choice { f.OptionRights = new(OptionRight1Choice) return f.OptionRights } func (f *FutureOrOptionDetails1) SetLastTransaction(value string) { f.LastTransaction = (*YesNoIndicator)(&value) } func (f *FutureOrOptionDetails1) SetSpreadTransaction(value string) { f.SpreadTransaction = (*YesNoIndicator)(&value) }
FutureOrOptionDetails1.go
0.781122
0.482063
FutureOrOptionDetails1.go
starcoder
package geojson import ( "math" "strconv" "github.com/mmadfox/geojson/geo" "github.com/mmadfox/geojson/geometry" ) // Circle ... type Circle struct { object Object center geometry.Point meters float64 haversine float64 steps int km bool extra *extra } // NewCircle returns an circle object func NewCircle(center geometry.Point, meters float64, steps int) *Circle { if steps < 3 { steps = 3 } g := new(Circle) g.center = center g.meters = meters g.steps = steps if meters > 0 { meters = geo.NormalizeDistance(meters) g.haversine = geo.DistanceToHaversine(meters) } return g } // AppendJSON ... func (g *Circle) AppendJSON(dst []byte) []byte { dst = append(dst, `{"type":"Feature",`...) dst = append(dst, `"geometry":`...) dst = append(dst, `{"type":"Point","coordinates":[`...) dst = strconv.AppendFloat(dst, g.center.X, 'f', -1, 64) dst = append(dst, ',') dst = strconv.AppendFloat(dst, g.center.Y, 'f', -1, 64) dst = append(dst, `]},"properties":{"type":"Circle","radius":`...) dst = strconv.AppendFloat(dst, g.meters, 'f', -1, 64) dst = append(dst, `,"radius_units":"m"}}`...) return dst } // JSON ... func (g *Circle) JSON() string { return string(g.AppendJSON(nil)) } // MarshalJSON ... func (g *Circle) MarshalJSON() ([]byte, error) { return g.AppendJSON(nil), nil } // String ... func (g *Circle) String() string { return string(g.AppendJSON(nil)) } // Meters returns the circle's radius func (g *Circle) Meters() float64 { return g.meters } // Center returns the circle's center point func (g *Circle) Center() geometry.Point { return g.center } // Haversine returns the haversine corresponding to circle's radius func (g *Circle) Haversine() float64 { return g.haversine } // HaversineTo returns the haversine from a given point to circle's center func (g *Circle) HaversineTo(p geometry.Point) float64 { return geo.Haversine(p.Y, p.X, g.center.Y, g.center.X) } // Within returns true if circle is contained inside object func (g *Circle) Within(obj Object) bool { return obj.Contains(g) } // containsPoint returns true if circle contains a given point func (g *Circle) containsPoint(p geometry.Point) bool { h := geo.Haversine(p.Y, p.X, g.center.Y, g.center.X) return h <= g.haversine } // Contains returns true if the circle contains other object func (g *Circle) Contains(obj Object) bool { switch other := obj.(type) { case *Point: return g.containsPoint(other.Center()) case *SimplePoint: return g.containsPoint(other.Center()) case *Circle: return other.Distance(g) < (other.meters + g.meters) case Collection: for _, p := range other.Children() { if !g.Contains(p) { return false } } return true default: // No simple cases, so using polygon approximation. return g.getObject().Contains(other) } } // Intersects returns true the circle intersects other object func (g *Circle) Intersects(obj Object) bool { switch other := obj.(type) { case *Point: return g.containsPoint(other.Center()) case *Circle: return other.Distance(g) <= (other.meters + g.meters) case Collection: for _, p := range other.Children() { if g.Intersects(p) { return true } } return false case *Feature: return g.Intersects(other.base) default: // No simple cases, so using polygon approximation. return g.getObject().Intersects(obj) } } // Empty ... func (g *Circle) Empty() bool { return false } // Valid ... func (g *Circle) Valid() bool { return g.getObject().Valid() } // ForEach ... func (g *Circle) ForEach(iter func(geom Object) bool) bool { return iter(g) } // NumPoints ... func (g *Circle) NumPoints() int { // should this be g.steps? return 1 } // Distance ... func (g *Circle) Distance(other Object) float64 { return g.getObject().Distance(other) } // Rect ... func (g *Circle) Rect() geometry.Rect { return g.getObject().Rect() } // Spatial ... func (g *Circle) Spatial() Spatial { return g.getObject().Spatial() } // Primative returns a primative GeoJSON object. Either a Polygon or Point. func (g *Circle) Primative() Object { return g.getObject() } func (g *Circle) getObject() Object { if g.object != nil { return g.object } return makeCircleObject(g.center, g.meters, g.steps) } func makeCircleObject(center geometry.Point, meters float64, steps int) Object { if meters <= 0 { return NewPoint(center) } meters = geo.NormalizeDistance(meters) points := make([]geometry.Point, 0, steps+1) // calc the four corners maxY, _ := geo.DestinationPoint(center.Y, center.X, meters, 0) _, maxX := geo.DestinationPoint(center.Y, center.X, meters, 90) minY, _ := geo.DestinationPoint(center.Y, center.X, meters, 180) _, minX := geo.DestinationPoint(center.Y, center.X, meters, 270) // TODO: detect of pole and antimeridian crossing and generate a // valid multigeometry // use the half width of the lat and lon lons := (maxX - minX) / 2 lats := (maxY - minY) / 2 // generate the for th := 0.0; th <= 360.0; th += 360.0 / float64(steps) { radians := (math.Pi / 180) * th x := center.X + lons*math.Cos(radians) y := center.Y + lats*math.Sin(radians) points = append(points, geometry.Point{X: x, Y: y}) } // add last connecting point, make a total of steps+1 points = append(points, points[0]) return NewPolygon( geometry.NewPoly(points, nil, &geometry.IndexOptions{ Kind: geometry.None, }), ) }
circle.go
0.826187
0.475849
circle.go
starcoder
package gofp // The names and the package structure of gofp resemble the OWL Quick Reference Guide, found here: // https://www.w3.org/2007/OWL/refcard // For example, the gofp package "axioms" resembles the Guides section "2.5 Axioms". // Some things in gofp are, surely, made wrong. // At least, the following statements, found in: // https://www.w3.org/TR/owl2-syntax/#Appendix:_Complete_Grammar_.28Normative.29 // were not considered yet: // - OWL functional-style Syntax documents may have the strings "Prefix" or "Ontology" (case dependent) near the beginning of the document. // Remark: what means "near" ? // - Sets written in one of the exchange syntaxes (e.g., XML or RDF/XML) are not necessarily expected to be duplicate free. Duplicates SHOULD be eliminated when ontology documents written in such syntaxes are converted into instances of the UML classes of the structural specification. // An ontology written in functional-style syntax can contain the following class expression: // ObjectUnionOf( a:Person a:Animal a:Animal ) // During parsing, this expression should be "flattened" to the following expression: // ObjectUnionOf( a:Person a:Animal ) // Remark: Gofp does not yet "flatten" that. // - A functional-style syntax ontology document SHOULD use the UTF-8 encoding [RFC 3629]. // Remark: For gofp, it MUST be UTF-8 //todo support Axiom := Declaration | ClassAxiom | ObjectPropertyAxiom | DataPropertyAxiom | DatatypeDefinition | HasKey | Assertion | AnnotationAxiom //where axiomAnnotations := { Annotation } import ( "io" "github.com/shful/gofp/owlfunctional" "github.com/shful/gofp/owlfunctional/parser" "github.com/shful/gofp/parsehelper" "github.com/shful/gofp/storedefaults" ) // OntologyFromReader parses an owl-functional file contents into an Ontology struct. // r is the OWL-Functional file contents. // sourceName: see parser.NewParser() // For less convenience but more control, see the OntologyFromParser function. func OntologyFromReader(r io.Reader, sourceName string) (ontology *owlfunctional.Ontology, err error) { p := parser.NewParser(r, sourceName) k := storedefaults.NewDefaultK() // In this convenience method, by default, accept implicit declarations which is OWL standard // When true, any declaration needs to be explicit written before usage, or the parser stops with a error. k.ExplicitDecls = false rc := owlfunctional.StoreConfig{ AxiomStore: k, Decls: k, DeclStore: k, } ontology, err = OntologyFromParser(p, rc) if err != nil { return } // When parsing into the default structures, we can set the convenience attribute Ontology.K // See package "store" for parsing into custom structures instead: ontology.K = k return } // OntologyFromReader uses the Parser p to create an Ontology struct. // The configuration rc allows custom storage of Declarations and Axioms. // As a usage example of OntologyFromParser, see the code of the OntologyFromReader function. // Note that the API may change and Gofp, in its early state, does not use a semantic version number. func OntologyFromParser(p *parser.Parser, rc owlfunctional.StoreConfig) (ontology *owlfunctional.Ontology, err error) { prefixes := map[string]string{} for { tok, lit, pos := p.ScanIgnoreWSAndComment() switch tok { case parser.Prefix: p.Unscan() if err = parsePrefixTo(prefixes, p); err != nil { err = pos.Errorf("Parsing prefix raised:%v", err) return } case parser.Ontology: p.Unscan() ontology = owlfunctional.NewOntology(prefixes, rc) if err = ontology.Parse(p); err != nil { return } case parser.EOF: return default: err = pos.ErrorfUnexpectedToken(tok, lit, "Prefix or Ontology") return } } } // parsePrefixTo parses the next Prefix expression and // fills the given prefixes map. func parsePrefixTo(prefixes map[string]string, p *parser.Parser) (err error) { if err = p.ConsumeTokens(parser.Prefix, parser.B1); err != nil { return err } tok, prefix, pos := p.ScanIgnoreWSAndComment() if tok == parser.COLON { // empty Prefix(:=...) p.Unscan() prefix = "" } else { // Prefix(IDENT:=...) if tok != parser.IDENT { return pos.Errorf("unexpected \"%v\" when parsing prefix, need IDENT", prefix) } } if err = p.ConsumeTokens(parser.COLON, parser.EQUALS); err != nil { return err } prefixVal, err := parsehelper.ParseUnprefixedIRI(p) if err != nil { return pos.Errorf("unexpected \"%v\" when parsing prefix, need IRI", prefixVal) } if err = p.ConsumeTokens(parser.B2); err != nil { return err } if _, ok := prefixes[prefix]; ok { return pos.Errorf(`second occurrence of prefix "%v"`, prefix) } prefixes[prefix] = prefixVal return }
gofp.go
0.604983
0.535159
gofp.go
starcoder
package standard import ( "github.com/simp7/nonogram/unit" ) type nonomap struct { width int height int bitmap [][]bool } /* nonomap is divided into 3 parts and has arguments equal or more than 3, which is separated by '/'. First two elements indicates width and height respectively. Rest elements indicates actual map which player has to solve. Each elements indicates map data of each line. They are designated by bitmap, which 0 is blank and 1 is filled one. Since the size of int is 32bits, width of maps can be equal or less than 32 mathematically. But because of display's limit, width and height can't be more than 25 When it comes to player's map, 2 is checked one where player thinks that cell is blank. The extension of file is nm(*.nm) */ //Prototype returns prototype of nonogram.Map in this package. func Prototype() unit.Map { return new(nonomap) } func (nm *nonomap) Init(bitmap [][]bool) unit.Map { result := new(nonomap) result.height = len(bitmap) result.width = len(bitmap[0]) result.bitmap = bitmap return result } func (nm *nonomap) ShouldFilled(x int, y int) bool { return nm.bitmap[y][x] } func getMaxLength(data [][]int) int { max := 0 for _, v := range data { if len(v) > max { max = len(v) } } return max } func (nm *nonomap) createHorizontalProblemData() [][]int { horizontal := make([][]int, nm.height) for i := 0; i < nm.height; i++ { previousCell := false tmp := 0 for j := 0; j < nm.width; j++ { if nm.bitmap[i][j] { tmp++ previousCell = true } else { if previousCell { horizontal[i] = append(horizontal[i], tmp) tmp = 0 } previousCell = false } } if previousCell { horizontal[i] = append(horizontal[i], tmp) } if len(horizontal[i]) == 0 { horizontal[i] = append(horizontal[i], 0) } } return horizontal } func (nm *nonomap) createVerticalProblemData() [][]int { vertical := make([][]int, nm.width) for i := 0; i < nm.width; i++ { previousCell := false tmp := 0 for j := 0; j < nm.height; j++ { if nm.bitmap[j][i] { tmp++ previousCell = true } else { if previousCell { vertical[i] = append(vertical[i], tmp) tmp = 0 } previousCell = false } } if previousCell { vertical[i] = append(vertical[i], tmp) } if len(vertical[i]) == 0 { vertical[i] = append(vertical[i], 0) } } return vertical } func (nm *nonomap) CreateProblem() unit.Problem { hData := nm.createHorizontalProblemData() vData := nm.createVerticalProblemData() hMax := getMaxLength(hData) vMax := getMaxLength(vData) return newProblem(hData, vData, hMax, vMax) } //This function returns height of nonomap func (nm *nonomap) Height() int { return nm.height } func (nm *nonomap) Width() int { return nm.width } func (nm *nonomap) FilledTotal() (total int) { total = 0 for n := range nm.bitmap { total += nm.countRow(n) } return } func (nm *nonomap) countRow(y int) int { result := 0 for _, v := range nm.bitmap[y] { if v { result++ } } return result } func (nm *nonomap) HeightLimit() int { return 30 } func (nm *nonomap) WidthLimit() int { return 30 } func (nm *nonomap) CheckValidity() error { if nm.height > nm.HeightLimit() || nm.width > nm.WidthLimit() || nm.height <= 0 || nm.width <= 0 { return invalidMap } return nil } func (nm *nonomap) GetFormatter() unit.Formatter { return newFormatter() }
unit/standard/nonomap.go
0.560253
0.544922
nonomap.go
starcoder
package jit import ( "fmt" "strings" "github.com/tetratelabs/wazero/internal/asm" ) var ( // unreservedGeneralPurposeIntRegisters contains unreserved general purpose registers of integer type. unreservedGeneralPurposeIntRegisters []asm.Register // unreservedGeneralPurposeFloatRegisters contains unreserved general purpose registers of scalar float type. unreservedGeneralPurposeFloatRegisters []asm.Register ) func isNilRegister(r asm.Register) bool { return r == asm.NilRegister } func isIntRegister(r asm.Register) bool { return unreservedGeneralPurposeIntRegisters[0] <= r && r <= unreservedGeneralPurposeIntRegisters[len(unreservedGeneralPurposeIntRegisters)-1] } func isFloatRegister(r asm.Register) bool { return unreservedGeneralPurposeFloatRegisters[0] <= r && r <= unreservedGeneralPurposeFloatRegisters[len(unreservedGeneralPurposeFloatRegisters)-1] } // valueLocation corresponds to each variable pushed onto the wazeroir (virtual) stack, // and it has the information about where it exists in the physical machine. // It might exist in registers, or maybe on in the non-virtual physical stack allocated in memory. type valueLocation struct { regType generalPurposeRegisterType // Set to asm.NilRegister if the value is stored in the memory stack. register asm.Register // Set to conditionalRegisterStateUnset if the value is not on the conditional register. conditionalRegister asm.ConditionalRegisterState // This is the location of this value in the memory stack at runtime, stackPointer uint64 } func (v *valueLocation) registerType() (t generalPurposeRegisterType) { return v.regType } func (v *valueLocation) setRegisterType(t generalPurposeRegisterType) { v.regType = t } func (v *valueLocation) setRegister(reg asm.Register) { v.register = reg v.conditionalRegister = asm.ConditionalRegisterStateUnset } func (v *valueLocation) onRegister() bool { return v.register != asm.NilRegister && v.conditionalRegister == asm.ConditionalRegisterStateUnset } func (v *valueLocation) onStack() bool { return v.register == asm.NilRegister && v.conditionalRegister == asm.ConditionalRegisterStateUnset } func (v *valueLocation) onConditionalRegister() bool { return v.conditionalRegister != asm.ConditionalRegisterStateUnset } func (v *valueLocation) String() string { var location string if v.onStack() { location = fmt.Sprintf("stack(%d)", v.stackPointer) } else if v.onConditionalRegister() { location = fmt.Sprintf("conditional(%d)", v.conditionalRegister) } else if v.onRegister() { location = fmt.Sprintf("register(%d)", v.register) } return fmt.Sprintf("{type=%s,location=%s}", v.regType, location) } func newValueLocationStack() *valueLocationStack { return &valueLocationStack{usedRegisters: map[asm.Register]struct{}{}} } // valueLocationStack represents the wazeroir virtual stack // where each item holds the location information about where it exists // on the physical machine at runtime. // Notably this is only used in the compilation phase, not runtime, // and we change the state of this struct at every wazeroir operation we compile. // In this way, we can see where the operands of a operation (for example, // two variables for wazeroir add operation.) exist and check the necessity for // moving the variable to registers to perform actual CPU instruction // to achieve wazeroir's add operation. type valueLocationStack struct { // stack holds all the variables. stack []*valueLocation // sp is the current stack pointer. sp uint64 // usedRegisters stores the used registers. usedRegisters map[asm.Register]struct{} // stackPointerCeil tracks max(.sp) across the lifespan of this struct. stackPointerCeil uint64 } func (v *valueLocationStack) String() string { var stackStr []string for i := uint64(0); i < v.sp; i++ { stackStr = append(stackStr, v.stack[i].String()) } var usedRegisters []string for reg := range v.usedRegisters { usedRegisters = append(usedRegisters, fmt.Sprintf("%d", reg)) } return fmt.Sprintf("sp=%d, stack=[%s], used_registers=[%s]", v.sp, strings.Join(stackStr, ","), strings.Join(usedRegisters, ",")) } func (v *valueLocationStack) clone() *valueLocationStack { ret := &valueLocationStack{} ret.sp = v.sp ret.usedRegisters = make(map[asm.Register]struct{}, len(ret.usedRegisters)) for r := range v.usedRegisters { ret.markRegisterUsed(r) } ret.stack = make([]*valueLocation, len(v.stack)) for i, v := range v.stack { ret.stack[i] = &valueLocation{ regType: v.regType, conditionalRegister: v.conditionalRegister, stackPointer: v.stackPointer, register: v.register, } } ret.stackPointerCeil = v.stackPointerCeil return ret } // pushValueLocationOnRegister creates a new valueLocation with a given register and pushes onto // the location stack. func (v *valueLocationStack) pushValueLocationOnRegister(reg asm.Register) (loc *valueLocation) { loc = &valueLocation{register: reg, conditionalRegister: asm.ConditionalRegisterStateUnset} if isIntRegister(reg) { loc.setRegisterType(generalPurposeRegisterTypeInt) } else if isFloatRegister(reg) { loc.setRegisterType(generalPurposeRegisterTypeFloat) } v.push(loc) return } // pushValueLocationOnRegister creates a new valueLocation and pushes onto the location stack. func (v *valueLocationStack) pushValueLocationOnStack() (loc *valueLocation) { loc = &valueLocation{register: asm.NilRegister, conditionalRegister: asm.ConditionalRegisterStateUnset} v.push(loc) return } // pushValueLocationOnRegister creates a new valueLocation with a given conditional register state // and pushes onto the location stack. func (v *valueLocationStack) pushValueLocationOnConditionalRegister(state asm.ConditionalRegisterState) (loc *valueLocation) { loc = &valueLocation{register: asm.NilRegister, conditionalRegister: state} v.push(loc) return } // push pushes to a given valueLocation onto the stack. func (v *valueLocationStack) push(loc *valueLocation) { loc.stackPointer = v.sp if v.sp >= uint64(len(v.stack)) { // This case we need to grow the stack capacity by appending the item, // rather than indexing. v.stack = append(v.stack, loc) } else { v.stack[v.sp] = loc } if v.sp > v.stackPointerCeil { v.stackPointerCeil = v.sp } v.sp++ } func (v *valueLocationStack) pop() (loc *valueLocation) { v.sp-- loc = v.stack[v.sp] return } func (v *valueLocationStack) peek() (loc *valueLocation) { loc = v.stack[v.sp-1] return } func (v *valueLocationStack) releaseRegister(loc *valueLocation) { v.markRegisterUnused(loc.register) loc.register = asm.NilRegister loc.conditionalRegister = asm.ConditionalRegisterStateUnset } func (v *valueLocationStack) markRegisterUnused(regs ...asm.Register) { for _, reg := range regs { delete(v.usedRegisters, reg) } } func (v *valueLocationStack) markRegisterUsed(regs ...asm.Register) { for _, reg := range regs { v.usedRegisters[reg] = struct{}{} } } type generalPurposeRegisterType byte const ( generalPurposeRegisterTypeInt generalPurposeRegisterType = iota generalPurposeRegisterTypeFloat ) func (tp generalPurposeRegisterType) String() (ret string) { switch tp { case generalPurposeRegisterTypeInt: ret = "int" case generalPurposeRegisterTypeFloat: ret = "float" } return } // takeFreeRegister searches for unused registers. Any found are marked used and returned. func (v *valueLocationStack) takeFreeRegister(tp generalPurposeRegisterType) (reg asm.Register, found bool) { var targetRegs []asm.Register switch tp { case generalPurposeRegisterTypeFloat: targetRegs = unreservedGeneralPurposeFloatRegisters case generalPurposeRegisterTypeInt: targetRegs = unreservedGeneralPurposeIntRegisters } for _, candidate := range targetRegs { if _, ok := v.usedRegisters[candidate]; ok { continue } return candidate, true } return 0, false } func (v *valueLocationStack) takeFreeRegisters(tp generalPurposeRegisterType, num int) (regs []asm.Register, found bool) { var targetRegs []asm.Register switch tp { case generalPurposeRegisterTypeFloat: targetRegs = unreservedGeneralPurposeFloatRegisters case generalPurposeRegisterTypeInt: targetRegs = unreservedGeneralPurposeIntRegisters } regs = make([]asm.Register, 0, num) for _, candidate := range targetRegs { if _, ok := v.usedRegisters[candidate]; ok { continue } regs = append(regs, candidate) if len(regs) == num { found = true break } } return } // Search through the stack, and steal the register from the last used // variable on the stack. func (v *valueLocationStack) takeStealTargetFromUsedRegister(tp generalPurposeRegisterType) (*valueLocation, bool) { for i := uint64(0); i < v.sp; i++ { loc := v.stack[i] if loc.onRegister() { switch tp { case generalPurposeRegisterTypeFloat: if isFloatRegister(loc.register) { return loc, true } case generalPurposeRegisterTypeInt: if isIntRegister(loc.register) { return loc, true } } } } return nil, false }
vendor/github.com/tetratelabs/wazero/internal/wasm/jit/jit_value_location.go
0.694924
0.448366
jit_value_location.go
starcoder
package encoding import ( "encoding" stdjson "encoding/json" "fmt" "reflect" "strconv" "strings" "time" ) func decodeToType(typ reflect.Kind, value string) interface{} { switch typ { case reflect.String: return value case reflect.Bool: v, _ := strconv.ParseBool(value) return v case reflect.Int: v, _ := strconv.ParseInt(value, 10, 64) return int(v) case reflect.Int8: return int8(decodeToType(reflect.Int, value).(int)) case reflect.Int16: return int16(decodeToType(reflect.Int, value).(int)) case reflect.Int32: return int32(decodeToType(reflect.Int, value).(int)) case reflect.Int64: return int64(decodeToType(reflect.Int, value).(int)) case reflect.Uint: v, _ := strconv.ParseUint(value, 10, 64) return uint(v) case reflect.Uint8: return uint8(decodeToType(reflect.Uint, value).(uint)) case reflect.Uint16: return uint16(decodeToType(reflect.Uint, value).(uint)) case reflect.Uint32: return uint32(decodeToType(reflect.Uint, value).(uint)) case reflect.Uint64: return uint64(decodeToType(reflect.Uint, value).(uint)) case reflect.Float64: v, _ := strconv.ParseFloat(value, 64) return v case reflect.Float32: return float32(decodeToType(reflect.Float64, value).(float64)) } return nil } func unmarshalToType(typ reflect.Type, value string) (val interface{}, err error) { // If we get a pointer in, we'll return a pointer out if typ.Kind() == reflect.Ptr { typ = typ.Elem() } val = reflect.New(typ).Interface() defer func() { if err == nil && typ.Kind() != reflect.Ptr { val = reflect.Indirect(reflect.ValueOf(val)).Interface() } }() // Try Unmarshalers if um, ok := val.(encoding.TextUnmarshaler); ok { if err = um.UnmarshalText([]byte(value)); err == nil { return val, nil } } if um, ok := val.(stdjson.Unmarshaler); ok { if err = um.UnmarshalJSON([]byte(value)); err == nil { return val, nil } } // Try JSON if err = json.Unmarshal([]byte(value), val); err == nil { return val, nil } // Return error if we have one if err != nil { return nil, err } return val, fmt.Errorf("No way to unmarshal \"%s\" to %s", value, typ.Name()) } // FromStringStringMap decodes input into output with the same type as base. Only fields tagged by tagName get decoded. Optional argument properties specifies fields to decode. func FromStringStringMap(tagName string, base interface{}, input map[string]string) (output interface{}, err error) { baseType := reflect.TypeOf(base) valType := baseType if baseType.Kind() == reflect.Ptr { valType = valType.Elem() } // If we get a pointer in, we'll return a pointer out valPtr := reflect.New(valType) val := valPtr.Elem() output = valPtr.Interface() defer func() { if err == nil && baseType.Kind() != reflect.Ptr { output = reflect.Indirect(reflect.ValueOf(output)).Interface() } }() for i := 0; i < valType.NumField(); i++ { field := valType.Field(i) if field.PkgPath != "" { continue } fieldName, opts := parseTag(field.Tag.Get(tagName)) squash, include := opts.Has("squash"), opts.Has("include") if !squash && (fieldName == "" || fieldName == "-") { continue } inputStr, fieldInInput := input[fieldName] fieldType := field.Type fieldKind := field.Type.Kind() isPointerField := fieldKind == reflect.Ptr if isPointerField { if inputStr == "null" { continue } fieldType = fieldType.Elem() fieldKind = fieldType.Kind() } var iface interface{} if (squash || include) && fieldKind == reflect.Struct { var subInput map[string]string if squash { subInput = input } else { subInput = make(map[string]string) for k, v := range input { if strings.HasPrefix(k, fieldName+".") { subInput[strings.TrimPrefix(k, fieldName+".")] = v } } if len(subInput) == 0 { continue } } subOutput, err := FromStringStringMap(tagName, val.Field(i).Interface(), subInput) if err != nil { return nil, err } val.Field(i).Set(reflect.ValueOf(subOutput)) continue } if !fieldInInput || inputStr == "" { continue } switch fieldKind { case reflect.Struct, reflect.Array, reflect.Interface, reflect.Slice, reflect.Map: iface, err = unmarshalToType(fieldType, inputStr) if err != nil { return nil, err } default: iface = decodeToType(fieldKind, inputStr) } if v, ok := iface.(time.Time); ok { iface = v.UTC() } fieldVal := reflect.ValueOf(iface).Convert(fieldType) if isPointerField { fieldValPtr := reflect.New(fieldType) fieldValPtr.Elem().Set(fieldVal) val.Field(i).Set(fieldValPtr) } else { val.Field(i).Set(fieldVal) } } return output, nil }
encoding/decode.go
0.562898
0.437343
decode.go
starcoder
package validator import ( "bytes" "fmt" "net/http" "reflect" "sort" "strconv" "strings" "unicode/utf8" ) const tagName string = "valid" // Validator contruct type Validator struct { Attributes map[string]string CustomMessage map[string]string Translator *Translator } // Default returns a instance of Validator var Default = New() // New returns a new instance of Validator func New() *Validator { return &Validator{} } // validateBetween check The field under validation must have a size between the given min and max. Strings, numerics, arrays, and files are evaluated in the same fashion as the size rule. func validateBetween(v reflect.Value, params []string) bool { if len(params) != 2 { return false } switch v.Kind() { case reflect.String: min, _ := ToInt(params[0]) max, _ := ToInt(params[1]) return ValidateBetweenString(v.String(), min, max) case reflect.Slice, reflect.Map, reflect.Array: min, _ := ToInt(params[0]) max, _ := ToInt(params[1]) return ValidateDigitsBetweenInt64(int64(v.Len()), min, max) case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: min, _ := ToInt(params[0]) max, _ := ToInt(params[1]) return ValidateDigitsBetweenInt64(v.Int(), min, max) case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: min, _ := ToUint(params[0]) max, _ := ToUint(params[1]) return ValidateDigitsBetweenUint64(v.Uint(), min, max) case reflect.Float32, reflect.Float64: min, _ := ToFloat(params[0]) max, _ := ToFloat(params[1]) return ValidateDigitsBetweenFloat64(v.Float(), min, max) } panic(fmt.Sprintf("validator: Between unsupport Type %T", v.Interface())) } // ValidateBetween check The field under validation must have a size between the given min and max. Strings, numerics, arrays, and files are evaluated in the same fashion as the size rule. func ValidateBetween(i interface{}, params []string) bool { v := reflect.ValueOf(i) return validateBetween(v, params) } // validateDigitsBetween check The field under validation must have a length between the given min and max. func validateDigitsBetween(v reflect.Value, params []string) bool { if len(params) != 2 { return false } switch v.Kind() { case reflect.String, reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: min, _ := ToInt(params[0]) max, _ := ToInt(params[1]) var value string switch v.Kind() { case reflect.String: value = v.String() case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: value = ToString(v.Int()) case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: value = ToString(v.Uint()) } if value == "" || !IsNumeric(value) { return false } return ValidateBetweenString(value, min, max) } panic(fmt.Sprintf("validator: DigitsBetween unsupport Type %T", v.Interface())) } // ValidateDigitsBetween check The field under validation must have a length between the given min and max. func ValidateDigitsBetween(i interface{}, params []string) bool { v := reflect.ValueOf(i) return validateDigitsBetween(v, params) } // validateSize The field under validation must have a size matching the given value. // For string data, value corresponds to the number of characters. // For numeric data, value corresponds to a given integer value. // For an array | map | slice, size corresponds to the count of the array | map | slice. func validateSize(v reflect.Value, param []string) bool { switch v.Kind() { case reflect.String: p, _ := ToInt(param[0]) return compareString(v.String(), p, "==") case reflect.Slice, reflect.Map, reflect.Array: p, _ := ToInt(param[0]) return compareInt64(int64(v.Len()), p, "==") case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: p, _ := ToInt(param[0]) return compareInt64(v.Int(), p, "==") case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: p, _ := ToUint(param[0]) return compareUint64(v.Uint(), p, "==") case reflect.Float32, reflect.Float64: p, _ := ToFloat(param[0]) return compareFloat64(v.Float(), p, "==") } panic(fmt.Sprintf("validator: Size unsupport Type %T", v.Interface())) } // ValidateSize The field under validation must have a size matching the given value. // For string data, value corresponds to the number of characters. // For numeric data, value corresponds to a given integer value. // For an array | map | slice, size corresponds to the count of the array | map | slice. func ValidateSize(i interface{}, params []string) bool { v := reflect.ValueOf(i) return validateSize(v, params) } // validateMax is the validation function for validating if the current field's value is less than or equal to the param's value. func validateMax(v reflect.Value, param []string) bool { switch v.Kind() { case reflect.String: p, _ := ToInt(param[0]) return compareString(v.String(), p, "<=") case reflect.Slice, reflect.Map, reflect.Array: p, _ := ToInt(param[0]) return compareInt64(int64(v.Len()), p, "<=") case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: p, _ := ToInt(param[0]) return compareInt64(v.Int(), p, "<=") case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: p, _ := ToUint(param[0]) return compareUint64(v.Uint(), p, "<=") case reflect.Float32, reflect.Float64: p, _ := ToFloat(param[0]) return compareFloat64(v.Float(), p, "<=") } panic(fmt.Sprintf("validator: Max unsupport Type %T", v.Interface())) } // ValidatMax is the validation function for validating if the current field's value is less than or equal to the param's value. func ValidatMax(i interface{}, params []string) bool { v := reflect.ValueOf(i) return validateMax(v, params) } // validateMin is the validation function for validating if the current field's value is greater than or equal to the param's value. func validateMin(v reflect.Value, param []string) bool { switch v.Kind() { case reflect.String: p, _ := ToInt(param[0]) return compareString(v.String(), p, ">=") case reflect.Slice, reflect.Map, reflect.Array: p, _ := ToInt(param[0]) return compareInt64(int64(v.Len()), p, ">=") case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: p, _ := ToInt(param[0]) return compareInt64(v.Int(), p, ">=") case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: p, _ := ToUint(param[0]) return compareUint64(v.Uint(), p, ">=") case reflect.Float32, reflect.Float64: p, _ := ToFloat(param[0]) return compareFloat64(v.Float(), p, ">=") } panic(fmt.Sprintf("validator: Min unsupport Type %T", v.Interface())) } // ValidateMin is the validation function for validating if the current field's value is greater than or equal to the param's value. func ValidateMin(i interface{}, params []string) bool { v := reflect.ValueOf(i) return validateMin(v, params) } // validateSame is the validation function for validating if the current field's value euqal the param's value. func validateSame(v reflect.Value, anotherField reflect.Value) bool { if v.Kind() != anotherField.Kind() { panic(fmt.Sprintf("validator: Same The two fields must be of the same type %T, %T", v.Interface(), anotherField.Interface())) } switch v.Kind() { case reflect.String: return v.String() == anotherField.String() case reflect.Slice, reflect.Map, reflect.Array: return compareInt64(int64(v.Len()), int64(anotherField.Len()), "==") case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: return compareInt64(v.Int(), anotherField.Int(), "==") case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: return compareUint64(v.Uint(), anotherField.Uint(), "==") case reflect.Float32, reflect.Float64: return compareFloat64(v.Float(), anotherField.Float(), "==") } panic(fmt.Sprintf("validator: Same unsupport Type %T", v.Interface())) } // ValidateSame is the validation function for validating if the current field's value is greater than or equal to the param's value. func ValidateSame(i interface{}, a interface{}) bool { v := reflect.ValueOf(i) anotherField := reflect.ValueOf(a) return validateSame(v, anotherField) } // validateLt is the validation function for validating if the current field's value is less than the param's value. func validateLt(v reflect.Value, anotherField reflect.Value) bool { if v.Kind() != anotherField.Kind() { panic(fmt.Sprintf("validator: Lt The two fields must be of the same type %T, %T", v.Interface(), anotherField.Interface())) } switch v.Kind() { case reflect.String: return compareString(v.String(), int64(utf8.RuneCountInString(anotherField.String())), "<") case reflect.Slice, reflect.Map, reflect.Array: return compareInt64(int64(v.Len()), int64(anotherField.Len()), "<") case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: return compareInt64(v.Int(), anotherField.Int(), "<") case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: return compareUint64(v.Uint(), anotherField.Uint(), "<") case reflect.Float32, reflect.Float64: return compareFloat64(v.Float(), anotherField.Float(), "<") } panic(fmt.Sprintf("validator: Lt unsupport Type %T", v.Interface())) } // ValidateLt is the validation function for validating if the current field's value is less than the param's value. func ValidateLt(i interface{}, a interface{}) bool { v := reflect.ValueOf(i) anotherField := reflect.ValueOf(a) return validateLt(v, anotherField) } // validateLte is the validation function for validating if the current field's value is less than or equal to the param's value. func validateLte(v reflect.Value, anotherField reflect.Value) bool { if v.Kind() != anotherField.Kind() { panic(fmt.Sprintf("validator: Lte The two fields must be of the same type %T, %T", v.Interface(), anotherField.Interface())) } switch v.Kind() { case reflect.String: return compareString(v.String(), int64(utf8.RuneCountInString(anotherField.String())), "<=") case reflect.Slice, reflect.Map, reflect.Array: return compareInt64(int64(v.Len()), int64(anotherField.Len()), "<=") case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: return compareInt64(v.Int(), anotherField.Int(), "<=") case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: return compareUint64(v.Uint(), anotherField.Uint(), "<=") case reflect.Float32, reflect.Float64: return compareFloat64(v.Float(), anotherField.Float(), "<=") } panic(fmt.Sprintf("validator: Lte unsupport Type %T", v.Interface())) } // ValidateLte is the validation function for validating if the current field's value is less than or equal to the param's value. func ValidateLte(i interface{}, a interface{}) bool { v := reflect.ValueOf(i) anotherField := reflect.ValueOf(a) return validateLte(v, anotherField) } // validateGt is the validation function for validating if the current field's value is greater than to the param's value. func validateGt(v reflect.Value, anotherField reflect.Value) bool { if v.Kind() != anotherField.Kind() { panic(fmt.Sprintf("validator: Gt The two fields must be of the same type %T, %T", v.Interface(), anotherField.Interface())) } switch v.Kind() { case reflect.String: return compareString(v.String(), int64(utf8.RuneCountInString(anotherField.String())), ">") case reflect.Slice, reflect.Map, reflect.Array: return compareInt64(int64(v.Len()), int64(anotherField.Len()), ">") case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: return compareInt64(v.Int(), anotherField.Int(), ">") case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: return compareUint64(v.Uint(), anotherField.Uint(), ">") case reflect.Float32, reflect.Float64: return compareFloat64(v.Float(), anotherField.Float(), ">") } panic(fmt.Sprintf("validator: Gt unsupport Type %T", v.Interface())) } // ValidateGt is the validation function for validating if the current field's value is greater than to the param's value. func ValidateGt(i interface{}, a interface{}) bool { v := reflect.ValueOf(i) anotherField := reflect.ValueOf(a) return validateGt(v, anotherField) } // validateGte is the validation function for validating if the current field's value is greater than or equal to the param's value. func validateGte(v reflect.Value, anotherField reflect.Value) bool { if v.Kind() != anotherField.Kind() { panic(fmt.Sprintf("validator: Gte The two fields must be of the same type %T, %T", v.Interface(), anotherField.Interface())) } switch v.Kind() { case reflect.String: return compareString(v.String(), int64(utf8.RuneCountInString(anotherField.String())), ">=") case reflect.Slice, reflect.Map, reflect.Array: return compareInt64(int64(v.Len()), int64(anotherField.Len()), ">=") case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: return compareInt64(v.Int(), anotherField.Int(), ">=") case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: return compareUint64(v.Uint(), anotherField.Uint(), ">=") case reflect.Float32, reflect.Float64: return compareFloat64(v.Float(), anotherField.Float(), ">=") } panic(fmt.Sprintf("validator: Gte unsupport Type %T", v.Interface())) } // ValidateGte is the validation function for validating if the current field's value is greater than to the param's value. func ValidateGte(i interface{}, a interface{}) bool { v := reflect.ValueOf(i) anotherField := reflect.ValueOf(a) return validateGte(v, anotherField) } // validateDistinct is the validation function for validating an attribute is unique among other values. func validateDistinct(v reflect.Value) bool { switch v.Kind() { case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, reflect.Float32, reflect.Float64: return true case reflect.Slice, reflect.Array: m := reflect.MakeMap(reflect.MapOf(v.Type().Elem(), v.Type())) for i := 0; i < v.Len(); i++ { m.SetMapIndex(v.Index(i), v) } return v.Len() == m.Len() case reflect.Map: m := reflect.MakeMap(reflect.MapOf(v.Type().Elem(), v.Type())) for _, k := range v.MapKeys() { m.SetMapIndex(v.MapIndex(k), v) } return v.Len() == m.Len() } panic(fmt.Sprintf("validator: Distinct unsupport Type %T", v.Interface())) } // ValidateDistinct is the validation function for validating an attribute is unique among other values. func ValidateDistinct(i interface{}) bool { v := reflect.ValueOf(i) return validateDistinct(v) } // ValidateMimeTypes is the validation function for the file must match one of the given MIME types. func ValidateMimeTypes(data []byte, mimeTypes []string) bool { mimeType := http.DetectContentType(data) for _, value := range mimeTypes { if mimeType == value { return true } } return false } // ValidateMimes is the validation function for the file must have a MIME type corresponding to one of the listed extensions. func ValidateMimes(data []byte, mimes []string) bool { mimeTypes := make([]string, len(mimes)) for i, mime := range mimes { if val, ok := Mimes[mime]; ok { mimeTypes[i] = Mimes[val] } else { panic(fmt.Sprintf("validator: Mimes unsupport Type %s", mime)) } } return ValidateMimeTypes(data, mimeTypes) } // ValidateImage is the validation function for the The file under validation must be an image (jpeg, png, bmp, gif, or svg) func ValidateImage(data []byte) bool { return ValidateMimes(data, []string{"jpeg", "png", "gif", "bmp", "svg"}) } // ValidateStruct use tags for fields. // result will be equal to `false` if there are any errors. func ValidateStruct(s interface{}) error { return Default.ValidateStruct(s, nil, nil) } // ValidateStruct use tags for fields. // result will be equal to `false` if there are any errors. func (v *Validator) ValidateStruct(s interface{}, jsonNamespace []byte, structNamespace []byte) error { if s == nil { return nil } var err error val := reflect.ValueOf(s) if val.Kind() == reflect.Interface || val.Kind() == reflect.Ptr { val = val.Elem() } // we only accept structs if val.Kind() != reflect.Struct { return fmt.Errorf("function only accepts structs; got %s", val.Kind()) } var errs Errors fields := cachedTypefields(val.Type()) for _, f := range fields { valuefield := val.Field(f.index[0]) err := v.newTypeValidator(valuefield, &f, val, jsonNamespace, structNamespace) if err != nil { if errors, ok := err.(Errors); ok { for _, fieldError := range errors { errs = append(errs, fieldError) } } else { errs = append(errs, err) } } } if len(errs) > 0 { err = errs } return err } func (v *Validator) newTypeValidator(value reflect.Value, f *field, o reflect.Value, jsonNamespace []byte, structNamespace []byte) (resultErr error) { if !value.IsValid() || f.omitEmpty && Empty(value) { return nil } if value.Kind() == reflect.Interface || value.Kind() == reflect.Ptr { value = value.Elem() } name := string(append(jsonNamespace, f.nameBytes...)) structName := string(append(structNamespace, f.structName...)) if err := v.checkRequired(value, f, o, name, structName); err != nil { return err } for _, tag := range f.validTags { if validatefunc, ok := CustomTypeRuleMap.Get(tag.name); ok { if result := validatefunc(value, o, tag); !result { return v.formatsMessages(&FieldError{ name: name, structName: structName, tag: tag.name, messageName: tag.messageName, messageParameters: parseValidatorMessageParameters(tag, o), attribute: f.attribute, defaultAttribute: f.defaultAttribute, value: ToString(value.Interface()), }) } } } switch value.Kind() { case reflect.Bool, reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, reflect.Float32, reflect.Float64, reflect.String: for _, tag := range f.validTags { if err := v.checkDependentRules(tag, f, value, o, name, structName); err != nil { return err } if validfunc, ok := RuleMap[tag.name]; ok { isValid := validfunc(value) if !isValid { return v.formatsMessages(&FieldError{ name: name, structName: structName, tag: tag.name, messageName: tag.messageName, messageParameters: parseValidatorMessageParameters(tag, o), attribute: f.attribute, defaultAttribute: f.defaultAttribute, value: ToString(value.Interface()), }) } } if validfunc, ok := ParamRuleMap[tag.name]; ok { isValid := validfunc(value, tag.params) if !isValid { return v.formatsMessages(&FieldError{ name: name, structName: structName, tag: tag.name, messageName: tag.messageName, messageParameters: parseValidatorMessageParameters(tag, o), attribute: f.attribute, defaultAttribute: f.defaultAttribute, value: ToString(value.Interface()), }) } } switch value.Kind() { case reflect.String: if validfunc, ok := StringRulesMap[tag.name]; ok { isValid := validfunc(value.String()) if !isValid { return v.formatsMessages(&FieldError{ name: name, structName: structName, tag: tag.name, messageName: tag.messageName, messageParameters: parseValidatorMessageParameters(tag, o), attribute: f.attribute, defaultAttribute: f.defaultAttribute, value: ToString(value.Interface()), }) } } } } return nil case reflect.Map: if value.Type().Key().Kind() != reflect.String { return &UnsupportedTypeError{value.Type()} } for _, tag := range f.validTags { if err := v.checkDependentRules(tag, f, value, o, name, structName); err != nil { return err } if validfunc, ok := RuleMap[tag.name]; ok { isValid := validfunc(value) if !isValid { return v.formatsMessages(&FieldError{ name: name, structName: structName, tag: tag.name, messageName: tag.messageName, messageParameters: parseValidatorMessageParameters(tag, o), attribute: f.attribute, defaultAttribute: f.defaultAttribute, value: ToString(value.Interface()), }) } } if validfunc, ok := ParamRuleMap[tag.name]; ok { isValid := validfunc(value, tag.params) if !isValid { return v.formatsMessages(&FieldError{ name: name, structName: structName, tag: tag.name, messageName: tag.messageName, messageParameters: parseValidatorMessageParameters(tag, o), attribute: f.attribute, defaultAttribute: f.defaultAttribute, value: ToString(value.Interface()), }) } } } var sv stringValues sv = value.MapKeys() sort.Sort(sv) for _, k := range sv { var err error item := value.MapIndex(k) if value.Kind() == reflect.Interface { item = item.Elem() } if item.Kind() == reflect.Struct || item.Kind() == reflect.Ptr { newJSONNamespace := append(append(jsonNamespace, f.nameBytes...), '.') newJSONNamespace = append(append(newJSONNamespace, []byte(k.String())...), '.') newstructNamespace := append(append(structNamespace, f.structNameBytes...), '.') newstructNamespace = append(append(newstructNamespace, []byte(k.String())...), '.') err = v.ValidateStruct(item.Interface(), newJSONNamespace, newstructNamespace) if err != nil { return err } } } return nil case reflect.Slice, reflect.Array: for _, tag := range f.validTags { if err := v.checkDependentRules(tag, f, value, o, name, structName); err != nil { return err } if validfunc, ok := RuleMap[tag.name]; ok { isValid := validfunc(value) if !isValid { return v.formatsMessages(&FieldError{ name: name, structName: structName, tag: tag.name, messageName: tag.messageName, messageParameters: parseValidatorMessageParameters(tag, o), attribute: f.attribute, defaultAttribute: f.defaultAttribute, value: ToString(value.Interface()), }) } } if validfunc, ok := ParamRuleMap[tag.name]; ok { isValid := validfunc(value, tag.params) if !isValid { return v.formatsMessages(&FieldError{ name: name, structName: structName, tag: tag.name, messageName: tag.messageName, messageParameters: parseValidatorMessageParameters(tag, o), attribute: f.attribute, defaultAttribute: f.defaultAttribute, value: ToString(value.Interface()), }) } } } for i := 0; i < value.Len(); i++ { var err error item := value.Index(i) if item.Kind() == reflect.Interface { item = item.Elem() } if item.Kind() == reflect.Struct || item.Kind() == reflect.Ptr { newJSONNamespace := append(append(jsonNamespace, f.nameBytes...), '.') newJSONNamespace = append(append(newJSONNamespace, []byte(strconv.Itoa(i))...), '.') newStructNamespace := append(append(structNamespace, f.structNameBytes...), '.') newStructNamespace = append(append(newStructNamespace, []byte(strconv.Itoa(i))...), '.') err = v.ValidateStruct(value.Index(i).Interface(), newJSONNamespace, newStructNamespace) if err != nil { return err } } } return nil case reflect.Struct: jsonNamespace = append(append(jsonNamespace, f.nameBytes...), '.') structNamespace = append(append(structNamespace, f.structNameBytes...), '.') return v.ValidateStruct(value.Interface(), jsonNamespace, structNamespace) default: return &UnsupportedTypeError{value.Type()} } } // Empty determine whether a variable is empty func Empty(v reflect.Value) bool { switch v.Kind() { case reflect.String, reflect.Array: return v.Len() == 0 case reflect.Map, reflect.Slice: return v.Len() == 0 || v.IsNil() case reflect.Bool: return !v.Bool() case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: return v.Int() == 0 case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: return v.Uint() == 0 case reflect.Float32, reflect.Float64: return v.Float() == 0 case reflect.Interface, reflect.Ptr: return v.IsNil() } return reflect.DeepEqual(v.Interface(), reflect.Zero(v.Type()).Interface()) } // Error returns string equivalent for reflect.Type func (e *UnsupportedTypeError) Error() string { return "validator: unsupported type: " + e.Type.String() } func (sv stringValues) Len() int { return len(sv) } func (sv stringValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] } func (sv stringValues) Less(i, j int) bool { return sv.get(i) < sv.get(j) } func (sv stringValues) get(i int) string { return sv[i].String() } // validateRequired check value required when anotherField str is a member of the set of strings params func validateRequired(v reflect.Value) bool { return !Empty(v) } // ValidateRequired check value required when anotherField str is a member of the set of strings params func ValidateRequired(i interface{}) bool { v := reflect.ValueOf(i) return validateRequired(v) } // validateRequiredIf check value required when anotherField str is a member of the set of strings params func validateRequiredIf(v reflect.Value, anotherField reflect.Value, params []string, tag *ValidTag) bool { if anotherField.Kind() == reflect.Interface || anotherField.Kind() == reflect.Ptr { anotherField = anotherField.Elem() } if !anotherField.IsValid() { return true } switch anotherField.Kind() { case reflect.Bool, reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, reflect.Float32, reflect.Float64, reflect.String: value := ToString(anotherField) if InString(value, params) && Empty(v) && tag != nil { tag.messageParameters = append( tag.messageParameters, messageParameter{ Key: "Value", Value: value, }, ) return false } case reflect.Map: values := []string{} var sv stringValues sv = anotherField.MapKeys() sort.Sort(sv) for _, k := range sv { value := v.MapIndex(k) if value.Kind() == reflect.Interface || value.Kind() == reflect.Ptr { value = value.Elem() } if value.Kind() != reflect.Struct { values = append(values, ToString(value.Interface())) } else { panic(fmt.Sprintf("validator: RequiredIf unsupport Type %T", value.Interface())) } } for _, value := range values { if InString(value, params) && Empty(v) { tag.messageParameters = append( tag.messageParameters, messageParameter{ Key: "Value", Value: value, }, ) return false } } case reflect.Slice, reflect.Array: values := []string{} for i := 0; i < v.Len(); i++ { value := v.Index(i) if value.Kind() == reflect.Interface || value.Kind() == reflect.Ptr { value = value.Elem() } if value.Kind() != reflect.Struct { values = append(values, ToString(value.Interface())) } else { panic(fmt.Sprintf("validator: RequiredIf unsupport Type %T", value.Interface())) } } for _, value := range values { if InString(value, params) { if Empty(v) { tag.messageParameters = append( tag.messageParameters, messageParameter{ Key: "Value", Value: value, }, ) return false } } } default: panic(fmt.Sprintf("validator: RequiredIf unsupport Type %T", anotherField.Interface())) } return true } // validateRequiredUnless check value required when anotherField str is a member of the set of strings params func validateRequiredUnless(v reflect.Value, anotherField reflect.Value, params []string) bool { if anotherField.Kind() == reflect.Interface || anotherField.Kind() == reflect.Ptr { anotherField = anotherField.Elem() } if !anotherField.IsValid() { return true } switch anotherField.Kind() { case reflect.Bool, reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, reflect.Float32, reflect.Float64, reflect.String: value := ToString(anotherField) if !InString(value, params) { if Empty(v) { return false } } case reflect.Map: values := []string{} var sv stringValues sv = anotherField.MapKeys() sort.Sort(sv) for _, k := range sv { value := v.MapIndex(k) if value.Kind() == reflect.Interface || value.Kind() == reflect.Ptr { value = value.Elem() } if value.Kind() != reflect.Struct { values = append(values, ToString(value.Interface())) } else { panic(fmt.Sprintf("validator: requiredUnless unsupport Type %T", value.Interface())) } } for _, value := range values { if !InString(value, params) { if Empty(v) { return false } } } case reflect.Slice, reflect.Array: values := []string{} for i := 0; i < v.Len(); i++ { value := v.Index(i) if value.Kind() == reflect.Interface || value.Kind() == reflect.Ptr { value = value.Elem() } if value.Kind() != reflect.Struct { values = append(values, ToString(value.Interface())) } else { panic(fmt.Sprintf("validator: requiredUnless unsupport Type %T", value.Interface())) } } for _, value := range values { if !InString(value, params) { if Empty(v) { return false } } } default: panic(fmt.Sprintf("validator: requiredUnless unsupport Type %T", anotherField.Interface())) } return true } // allFailingRequired validate that an attribute exists when all other attributes do not. func allFailingRequired(parameters []string, v reflect.Value) bool { for _, p := range parameters { anotherField, err := findField(p, v) if err != nil { continue } if !Empty(anotherField) { return false } } return true } // anyFailingRequired determine if any of the given attributes fail the required test. func anyFailingRequired(parameters []string, v reflect.Value) bool { for _, p := range parameters { anotherField, err := findField(p, v) if err != nil { return true } if Empty(anotherField) { return true } } return false } func (v *Validator) checkRequired(value reflect.Value, f *field, o reflect.Value, name string, structName string) *FieldError { for _, tag := range f.requiredTags { isError := false switch tag.name { case "required": isError = !validateRequired(value) case "requiredIf": anotherField, err := findField(tag.params[0], o) if err == nil && len(tag.params) >= 2 && !validateRequiredIf(value, anotherField, tag.params[1:], tag) { isError = true } case "requiredUnless": anotherField, err := findField(tag.params[0], o) if err == nil && len(tag.params) >= 2 && !validateRequiredUnless(value, anotherField, tag.params[1:]) { isError = true } case "requiredWith": if !validateRequiredWith(tag.params, value) { isError = true } case "requiredWithAll": if !validateRequiredWithAll(tag.params, value) { isError = true } case "requiredWithout": if !validateRequiredWithout(tag.params, value) { isError = true } case "requiredWithoutAll": if !validateRequiredWithoutAll(tag.params, value) { isError = true } } if isError { return v.formatsMessages(&FieldError{ name: name, structName: structName, tag: tag.name, messageName: tag.messageName, messageParameters: parseValidatorMessageParameters(tag, o), attribute: f.attribute, defaultAttribute: f.defaultAttribute, value: ToString(value.Interface()), }) } } return nil } // validateRequiredWith The field under validation must be present and not empty only if any of the other specified fields are present. func validateRequiredWith(otherFields []string, v reflect.Value) bool { if !allFailingRequired(otherFields, v) { return validateRequired(v) } return true } // validateRequiredWithAll The field under validation must be present and not empty only if all of the other specified fields are present. func validateRequiredWithAll(otherFields []string, v reflect.Value) bool { if !anyFailingRequired(otherFields, v) { return validateRequired(v) } return true } // RequiredWithout The field under validation must be present and not empty only when any of the other specified fields are not present. func validateRequiredWithout(otherFields []string, v reflect.Value) bool { if anyFailingRequired(otherFields, v) { return validateRequired(v) } return true } // validateRequiredWithoutAll The field under validation must be present and not empty only when all of the other specified fields are not present. func validateRequiredWithoutAll(otherFields []string, v reflect.Value) bool { if allFailingRequired(otherFields, v) { return validateRequired(v) } return true } func parseValidatorMessageParameters(validTag *ValidTag, o reflect.Value) MessageParameters { messageParameters := validTag.messageParameters switch validTag.name { case "requiredWith", "requiredWithAll", "requiredWithout", "requiredWithoutAll": first := true var buff bytes.Buffer for _, v := range validTag.params { if first { first = false } else { buff.WriteByte(' ') buff.WriteByte('/') buff.WriteByte(' ') } buff.WriteString(v) } messageParameters = append( messageParameters, messageParameter{ Key: "Values", Value: buff.String(), }, ) case "requiredIf", "requiredUnless", "same": other := getDisplayableAttribute(o, validTag.params[0]) messageParameters = append( messageParameters, messageParameter{ Key: "Other", Value: other, }, ) } return messageParameters } func (v *Validator) formatsMessages(fieldError *FieldError) *FieldError { var message string var ok bool if message, ok = v.CustomMessage[fieldError.structName+"."+fieldError.messageName]; ok { fieldError.err = fmt.Errorf(message) return fieldError } message, ok = MessageMap[fieldError.messageName] if ok { attribute := fieldError.attribute if customAttribute, ok := v.Attributes[fieldError.structName]; ok { attribute = customAttribute } else if fieldError.defaultAttribute != "" { attribute = fieldError.defaultAttribute } message = replaceAttributes(message, attribute, fieldError.messageParameters) fieldError.err = fmt.Errorf(message) return fieldError } fieldError.err = fmt.Errorf("validator: undefined message : %s", fieldError.messageName) return fieldError } func replaceAttributes(message string, attribute string, messageParameters MessageParameters) string { message = strings.Replace(message, "{{.Attribute}}", attribute, -1) for _, parameter := range messageParameters { message = strings.Replace(message, "{{."+parameter.Key+"}}", parameter.Value, -1) } return message } func getDisplayableAttribute(o reflect.Value, attribute string) string { attributes := strings.Split(attribute, ".") if len(attributes) > 0 { attribute = o.Type().Name() + attributes[0] } else { attribute = strings.Join(attributes[len(attributes)-2:], ".") } return attributes[len(attributes)-1] } func findField(fieldName string, v reflect.Value) (reflect.Value, error) { fields := strings.Split(fieldName, ".") current := v.FieldByName(fields[0]) i := 1 if len(fields) > i { for true { if current.Kind() == reflect.Interface || current.Kind() == reflect.Ptr { current = current.Elem() } if !current.IsValid() { return current, fmt.Errorf("validator: findField Struct is nil") } name := fields[i] current = current.FieldByName(name) if i == len(fields)-1 { break } i++ } } return current, nil } func (v *Validator) checkDependentRules(validTag *ValidTag, f *field, value reflect.Value, o reflect.Value, name string, structName string) error { isValid := true var anotherField reflect.Value var err error switch validTag.name { case "gt", "gte", "lt", "lte", "same": anotherField, err = findField(validTag.params[0], o) if err != nil { return nil } } switch validTag.name { case "gt": isValid = validateGt(value, anotherField) case "gte": isValid = validateGte(value, anotherField) case "lt": isValid = validateLt(value, anotherField) case "lte": isValid = validateLte(value, anotherField) case "same": isValid = validateSame(value, anotherField) } if !isValid { return v.formatsMessages(&FieldError{ name: name, structName: structName, tag: validTag.name, messageName: validTag.messageName, messageParameters: parseValidatorMessageParameters(validTag, o), attribute: f.attribute, defaultAttribute: f.defaultAttribute, value: ToString(value.Interface()), }) } return nil }
validator.go
0.743447
0.527803
validator.go
starcoder