code
stringlengths
114
1.05M
path
stringlengths
3
312
quality_prob
float64
0.5
0.99
learning_prob
float64
0.2
1
filename
stringlengths
3
168
kind
stringclasses
1 value
package imageutil import "image" // ResizeNearestNeighbor returns a new NRGBA image with the given width and // height created by resizing the given image using the nearest neighbor // algorithm. func ResizeNearestNeighbor(img image.Image, newWidth, newHeight int) *image.NRGBA { w := img.Bounds().Max.X h := img.Bounds().Max.Y nimg := image.NewNRGBA(newWidth, newHeight) xr := (w<<16)/newWidth + 1 yr := (h<<16)/newHeight + 1 for yo := 0; yo < newHeight; yo++ { y2 := (yo * yr) >> 16 for xo := 0; xo < newWidth; xo++ { x2 := (xo * xr) >> 16 nimg.Set(xo, yo, img.At(x2, y2)) //Much faster, but requires some image type. //nimg.Pix[offset] = img.Pix[y2*w+x2] //offset++ } } return nimg } // ResizeBilinear returns a new NRGBA image with the given width and height // created by resizing the given NRGBA image using the bilinear interpolation. func ResizeBilinear(img *image.NRGBA, newWidth, newHeight int) *image.NRGBA { w := img.Bounds().Max.X h := img.Bounds().Max.Y xr := float32(w-1) / float32(newWidth) yr := float32(h-1) / float32(newHeight) nimg := image.NewNRGBA(newWidth, newHeight) offset := 0 for yo := 0; yo < newHeight; yo++ { y := int(yr * float32(yo)) dy := yr*float32(yo) - float32(y) ody := 1.0 - dy for xo := 0; xo < newWidth; xo++ { x := int(xr * float32(xo)) dx := xr*float32(xo) - float32(x) odx := 1.0 - dx i := y*w + x a := img.Pix[i] b := img.Pix[i+1] c := img.Pix[i+w] d := img.Pix[i+w+1] alpha := float32(a.A)*odx*ody + float32(b.A)*dx*ody + float32(c.A)*dy*odx + float32(d.A)*dx*dy red := float32(a.R)*float32(a.A)*odx*ody + float32(b.R)*float32(b.A)*dx*ody + float32(c.R)*float32(c.A)*dy*odx + float32(d.R)*float32(d.A)*dx*dy green := float32(a.G)*float32(a.A)*odx*ody + float32(b.G)*float32(b.A)*dx*ody + float32(c.G)*float32(c.A)*dy*odx + float32(d.G)*float32(d.A)*dx*dy blue := float32(a.B)*float32(a.A)*odx*ody + float32(b.B)*float32(b.A)*dx*ody + float32(c.B)*float32(c.A)*dy*odx + float32(d.B)*float32(d.A)*dx*dy aavg := (float32(a.A) + float32(b.A) + float32(c.A) + float32(d.A)) / 4.0 if aavg > 0 { red /= aavg green /= aavg blue /= aavg if red > 255 { red = 255 } if green > 255 { green = 255 } if blue > 255 { blue = 255 } } else { red = 0 green = 0 blue = 0 } nimg.Pix[offset] = image.NRGBAColor{ uint8(red), uint8(green), uint8(blue), uint8(alpha), } offset++ } } return nimg }
resize.go
0.689201
0.646028
resize.go
starcoder
package gofun import "fmt" // List represents value lists. type List struct { isCons bool head interface{} tail *List } // ListOrElse returns x if x is List pointer, otherwise y. func ListOrElse(x interface{}, y *List) *List { z, isOk := x.(*List) if isOk { return z } else { return y } } // Nil creates an empty list. func Nil() *List { return &List { isCons: false, head: nil, tail: nil } } // Cons creates a list with a first element and a tail that is other list. func Cons(head interface{}, tail *List) *List { return &List { isCons: true, head: head, tail: tail } } // IsNil returns true if list is empty, otherwise false. func (l *List) IsNil() bool { return !l.isCons } // IsCons returns true if list isn't empty, otherwise false. func (l *List) IsCons() bool { return l.isCons } // Head returns the first element. func (l *List) Head() interface{} { return l.head } // HeadOption returns the optional first element. func (l *List) HeadOption() *Option { if l.isCons { return Some(l.head) } else { return None() } } // Tail returns the list of elements except the first element. func (l *List) Tail() *List { return l.tail } // TailOption returns the optional list of elements except the first element. func (l *List) TailOption() *Option { if l.isCons { return Some(l.tail) } else { return None() } } // SetTail sets a tail if the list isn't empty. If SetTail sets the tail, this method returns true; // otherwise this method returns false. This method should be used to quick creates lists. func (l *List) SetTail(tail *List) bool { if l.isCons { l.tail = tail return true } else { return false } } func (l *List) String() string { s := "List[" isFirst := true for l2 := l; l2.isCons; l2 = l2.tail { if !isFirst { s += " " } s += fmt.Sprintf("%v", l2.head) isFirst = false } s += "]" return s } // Concat concatenates two lists. func (xs *List) Concat(ys *List) *List { var zs *List = Nil() var prev *List = nil for l := xs; l.IsCons(); l = l.Tail() { l2 := Cons(l.Head(), Nil()) if prev != nil { prev.SetTail(l2) } else { zs = l2 } prev = l2 } if prev != nil { prev.SetTail(ys) } else { zs = ys } return zs }
list.go
0.836955
0.442817
list.go
starcoder
package bgls import ( "bytes" "fmt" "math/big" "github.com/dchest/blake2b" "github.com/ethereum/go-ethereum/crypto/bn256/cloudflare" gosha3 "github.com/ethereum/go-ethereum/crypto/sha3" "golang.org/x/crypto/sha3" ) type altbn128 struct { } type altbn128Point1 struct { point *bn256.G1 } type altbn128Point2 struct { point *bn256.G2 } type altbn128PointT struct { point *bn256.GT } // Altbn128Inst is the instance for the altbn128 curve, with all of its functions. var Altbn128 = &altbn128{} // MakeG1Point copies points into []byte and unmarshals to get around curvePoint not being exported func (curve *altbn128) MakeG1Point(x, y *big.Int) (Point1, bool) { xBytes, yBytes := x.Bytes(), y.Bytes() ret := make([]byte, 64) copy(ret[32-len(xBytes):], xBytes) copy(ret[64-len(yBytes):], yBytes) result := new(bn256.G1) var ok error _, ok = result.Unmarshal(ret) if ok != nil { return nil, false } return &altbn128Point1{result}, true } func (g1Point *altbn128Point1) Add(otherPoint1 Point1) (Point1, bool) { if other, ok := (otherPoint1).(*altbn128Point1); ok { sum := new(bn256.G1).Add(g1Point.point, other.point) ret := &altbn128Point1{sum} return ret, true } return nil, false } func (g1Point *altbn128Point1) Copy() Point1 { result := new(bn256.G1) result.Unmarshal(g1Point.point.Marshal()) return &altbn128Point1{result} } func (g1Point *altbn128Point1) Equals(otherPoint1 Point1) bool { if other, ok := (otherPoint1).(*altbn128Point1); ok { return bytes.Equal(g1Point.point.Marshal(), other.point.Marshal()) } return false } func (g1Point *altbn128Point1) Marshal() []byte { x, y := g1Point.ToAffineCoords() xBytes := pad32Bytes(x.Bytes()) y.Mul(y, two) if y.Cmp(altbnG1Q) == 1 { xBytes[0] += 128 } return xBytes } func pad32Bytes(xBytes []byte) []byte { if len(xBytes) < 32 { offset := 32 - len(xBytes) rawBytes := make([]byte, 32, 32) for i := 0; i < len(xBytes); i++ { rawBytes[i+offset] = xBytes[i] } return rawBytes } return xBytes } func (g1Point *altbn128Point1) Mul(scalar *big.Int) Point1 { prod := new(bn256.G1).ScalarMult(g1Point.point, scalar) ret := &altbn128Point1{prod} return ret } func (g1Point *altbn128Point1) Pair(g2Point Point2) (PointT, bool) { if other, ok := (g2Point).(*altbn128Point2); ok { p3 := bn256.Pair(g1Point.point, other.point) ret := altbn128PointT{p3} return ret, true } return nil, false } func (g1Point *altbn128Point1) ToAffineCoords() (x, y *big.Int) { Bytestream := g1Point.point.Marshal() xBytes, yBytes := Bytestream[:32], Bytestream[32:64] x = new(big.Int).SetBytes(xBytes) y = new(big.Int).SetBytes(yBytes) return } // MakeG2Point copies points into []byte and unmarshals to get around twistPoint not being exported func (curve *altbn128) MakeG2Point(xx, xy, yx, yy *big.Int) (Point2, bool) { xxBytes, xyBytes := pad32Bytes(xx.Bytes()), pad32Bytes(xy.Bytes()) yxBytes, yyBytes := pad32Bytes(yx.Bytes()), pad32Bytes(yy.Bytes()) ret := make([]byte, 128) copy(ret[:32], xxBytes) copy(ret[32:], xyBytes) copy(ret[64:], yxBytes) copy(ret[96:], yyBytes) result := new(bn256.G2) var ok error _, ok = result.Unmarshal(ret) if ok != nil { fmt.Println(ok) fmt.Println(len(xxBytes), len(xyBytes), len(yxBytes), len(yyBytes)) return nil, false } return &altbn128Point2{result}, true } func (g2Point *altbn128Point2) Add(otherPoint2 Point2) (Point2, bool) { if other, ok := (otherPoint2).(*altbn128Point2); ok { sum := new(bn256.G2).Add(g2Point.point, other.point) ret := &altbn128Point2{sum} return ret, true } return nil, false } func (g2Point *altbn128Point2) Copy() Point2 { result := new(bn256.G2) result.Unmarshal(g2Point.point.Marshal()) return &altbn128Point2{result} } func (g2Point *altbn128Point2) Equals(otherPoint2 Point2) bool { if other, ok := (otherPoint2).(*altbn128Point2); ok { return bytes.Equal(g2Point.point.Marshal(), other.point.Marshal()) } return false } func (g2Point *altbn128Point2) Marshal() []byte { xi, xr, yi, yr := g2Point.ToAffineCoords() xiBytes := pad32Bytes(xi.Bytes()) xrBytes := pad32Bytes(xr.Bytes()) y2 := &complexNum{yi, yr} y2.Exp(y2, two, altbnG1Q) yi.Mul(yi, two) yr.Mul(yr, two) if yi.Cmp(altbnG1Q) == 1 { xiBytes[0] += 128 } if yr.Cmp(altbnG1Q) == 1 { xrBytes[0] += 128 } xBytes := make([]byte, 64, 64) copy(xBytes[:32], xiBytes) copy(xBytes[32:], xrBytes) return xBytes } func (g2Point *altbn128Point2) Mul(scalar *big.Int) Point2 { prod := new(bn256.G2).ScalarMult(g2Point.point, scalar) ret := &altbn128Point2{prod} return ret } func (g2Point *altbn128Point2) ToAffineCoords() (xx, xy, yx, yy *big.Int) { Bytestream := g2Point.point.Marshal() xxBytes, xyBytes := Bytestream[:32], Bytestream[32:64] yxBytes, yyBytes := Bytestream[64:96], Bytestream[96:128] xx = new(big.Int).SetBytes(xxBytes) xy = new(big.Int).SetBytes(xyBytes) yx = new(big.Int).SetBytes(yxBytes) yy = new(big.Int).SetBytes(yyBytes) return } func (gTPoint altbn128PointT) Add(otherPointT PointT) (PointT, bool) { if other, ok := (otherPointT).(altbn128PointT); ok { sum := new(bn256.GT).Add(gTPoint.point, other.point) ret := altbn128PointT{sum} return ret, true } return nil, false } func (gTPoint altbn128PointT) Copy() PointT { result := new(bn256.GT) result.Unmarshal(gTPoint.point.Marshal()) return &altbn128PointT{result} } func (gTPoint altbn128PointT) Marshal() []byte { return gTPoint.point.Marshal() } func (gTPoint altbn128PointT) Equals(otherPointT PointT) bool { if other, ok := (otherPointT).(altbn128PointT); ok { return bytes.Equal(gTPoint.Marshal(), other.Marshal()) } return false } func (gTPoint altbn128PointT) Mul(scalar *big.Int) PointT { prod := new(bn256.GT).ScalarMult(gTPoint.point, scalar) ret := altbn128PointT{prod} return ret } func (curve *altbn128) UnmarshalG1(data []byte) (Point1, bool) { if data == nil || (len(data) != 64 && len(data) != 32) { return nil, false } if len(data) == 64 { // No point compression curvePoint := new(bn256.G1) if _, ok := curvePoint.Unmarshal(data); ok == nil { return &altbn128Point1{curvePoint}, true } } else if len(data) == 32 { // Point compression ySgn := (data[0] >= 128) if ySgn { data[0] -= 128 } x := new(big.Int).SetBytes(data) if x.Cmp(zero) == 0 { return Altbn128.MakeG1Point(zero, zero) } y := Altbn128.g1XToYSquared(x) // Underlying library already checks that y is on the curve, thus isQuadRes isn't checked here y = calcQuadRes(y, altbnG1Q) doubleY := new(big.Int).Mul(y, two) cmpRes := doubleY.Cmp(altbnG1Q) if ySgn && cmpRes == -1 { y.Sub(altbnG1Q, y) } else if !ySgn && cmpRes == 1 { y.Sub(altbnG1Q, y) } return Altbn128.MakeG1Point(x, y) } return nil, false } func (curve *altbn128) UnmarshalG2(data []byte) (Point2, bool) { if data == nil || (len(data) != 64 && len(data) != 128) { return nil, false } if len(data) == 128 { // No point compression curvePoint := new(bn256.G2) if _, ok := curvePoint.Unmarshal(data); ok == nil { return &altbn128Point2{curvePoint}, true } } else if len(data) == 64 { // Point compression xiBytes := data[:32] xrBytes := data[32:] yiSgn := (xiBytes[0] >= 128) yrSgn := (xrBytes[0] >= 128) if yiSgn { xiBytes[0] -= 128 } if yrSgn { xrBytes[0] -= 128 } xi := new(big.Int).SetBytes(xiBytes) xr := new(big.Int).SetBytes(xrBytes) if xi.Cmp(zero) == 0 && xr.Cmp(zero) == 0 { return Altbn128.MakeG2Point(zero, zero, zero, zero) } x := &complexNum{xi, xr} y := Altbn128.g2XToYSquared(x) // Underlying library already checks that y is on the curve, thus isQuadRes isn't checked here y = calcComplexQuadRes(y, altbnG1Q) doubleYRe := new(big.Int).Mul(y.re, two) doubleYIm := new(big.Int).Mul(y.im, two) cmpResRe := doubleYRe.Cmp(altbnG1Q) cmpResIm := doubleYIm.Cmp(altbnG1Q) if yiSgn && cmpResIm == -1 { y.im.Sub(altbnG1Q, y.im) } else if !yiSgn && cmpResIm == 1 { y.im.Sub(altbnG1Q, y.im) } if yrSgn && cmpResRe == -1 { y.re.Sub(altbnG1Q, y.re) } else if !yrSgn && cmpResRe == 1 { y.re.Sub(altbnG1Q, y.re) } return Altbn128.MakeG2Point(x.im, x.re, y.im, y.re) } return nil, false } func (curve *altbn128) UnmarshalGT(data []byte) (PointT, bool) { if data == nil || len(data) != 384 { return nil, false } curvePoint := new(bn256.GT) if _, ok := curvePoint.Unmarshal(data); ok == nil { return altbn128PointT{curvePoint}, true } return nil, false } func (curve *altbn128) getG1A() *big.Int { return zero } func (curve *altbn128) getG1B() *big.Int { return altbnG1B } func (curve *altbn128) getG1Q() *big.Int { return altbnG1Q } func (curve *altbn128) getG1Order() *big.Int { return altbnG1Order } func (curve *altbn128) g1XToYSquared(x *big.Int) *big.Int { result := new(big.Int) result.Exp(x, three, altbnG1Q) result.Add(result, altbnG1B) return result } func (curve *altbn128) g2XToYSquared(x *complexNum) *complexNum { result := getComplexZero() result.Exp(x, three, altbnG1Q) result.Add(result, altbnG2B, altbnG1Q) return result } func (curve *altbn128) GetG1() Point1 { return altbnG1 } func (curve *altbn128) GetG2() Point2 { return altbnG2 } func (curve *altbn128) GetGT() PointT { return altbnGT } //curve specific constants var altbnG1B = big.NewInt(3) var altbnG1Q, _ = new(big.Int).SetString("21888242871839275222246405745257275088696311157297823662689037894645226208583", 10) var altbnG2BRe, _ = new(big.Int).SetString("19485874751759354771024239261021720505790618469301721065564631296452457478373", 10) var altbnG2BIm, _ = new(big.Int).SetString("266929791119991161246907387137283842545076965332900288569378510910307636690", 10) var altbnG2B = &complexNum{altbnG2BIm, altbnG2BRe} //precomputed Z = (-1 + sqrt(-3))/2 in Fq var altbnZ, _ = new(big.Int).SetString("2203960485148121921418603742825762020974279258880205651966", 10) //precomputed sqrt(-3) in Fq var altbnSqrtn3, _ = new(big.Int).SetString("4407920970296243842837207485651524041948558517760411303933", 10) var altbnG1 = &altbn128Point1{new(bn256.G1).ScalarBaseMult(one)} var altbnG2 = &altbn128Point2{new(bn256.G2).ScalarBaseMult(one)} var altbnGT, _ = altbnG1.Pair(altbnG2) var altbnG1Order, _ = new(big.Int).SetString("21888242871839275222246405745257275088548364400416034343698204186575808495617", 10) // Note that the cofactor in this curve is just 1 // AltbnSha3 Hashes a message to a point on Altbn128 using SHA3 and try and increment // The return value is the x,y affine coordinate pair. func AltbnSha3(message []byte) (p1, p2 *big.Int) { p1, p2 = hash64(message, sha3.Sum512, Altbn128) return } // AltbnKeccak3 Hashes a message to a point on Altbn128 using Keccak3 and try and increment // Keccak3 is only for compatability with Ethereum hashing. // The return value is the x,y affine coordinate pair. func AltbnKeccak3(message []byte) (p1, p2 *big.Int) { p1, p2 = hash32(message, EthereumSum256, Altbn128) return } // AltbnBlake2b Hashes a message to a point on Altbn128 using Blake2b and try and increment // The return value is the x,y affine coordinate pair. func AltbnBlake2b(message []byte) (p1, p2 *big.Int) { p1, p2 = hash64(message, blake2b.Sum512, Altbn128) return } // AltbnKang12 Hashes a message to a point on Altbn128 using Blake2b and try and increment // The return value is the x,y affine coordinate pair. func AltbnKang12(message []byte) (p1, p2 *big.Int) { p1, p2 = hash64(message, kang12_64, Altbn128) return } // HashToG1 Hashes a message to a point on Altbn128 using Keccak3 and try and increment // This is for compatability with Ethereum hashing. // The return value is the altbn_128 library's internel representation for points. func (curve *altbn128) HashToG1(message []byte) Point1 { x, y := AltbnKeccak3(message) p, _ := curve.MakeG1Point(x, y) return p } // EthereumSum256 returns the Keccak3-256 digest of the data. This is because Ethereum // uses a non-standard hashing algo. func EthereumSum256(data []byte) (digest [32]byte) { h := gosha3.NewKeccak256() h.Write(data) h.Sum(digest[:0]) return }
alt_bn128.go
0.725746
0.496643
alt_bn128.go
starcoder
package blueprint import ( "encoding/json" "fmt" "testing" "github.com/ingrammicro/cio/api/types" "github.com/ingrammicro/cio/utils" "github.com/stretchr/testify/assert" ) // TODO exclude from release compile // ListTemplatesMocked test mocked function func ListTemplatesMocked(t *testing.T, templatesIn []*types.Template) []*types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json dIn, err := json.Marshal(templatesIn) assert.Nil(err, "Template test data corrupted") // call service cs.On("Get", APIPathBlueprintTemplates).Return(dIn, 200, nil) templatesOut, err := ds.ListTemplates() assert.Nil(err, "Error getting template list") assert.Equal(templatesIn, templatesOut, "ListTemplates returned different templates") return templatesOut } // ListTemplatesFailErrMocked test mocked function func ListTemplatesFailErrMocked(t *testing.T, templatesIn []*types.Template) []*types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json dIn, err := json.Marshal(templatesIn) assert.Nil(err, "Template test data corrupted") // call service cs.On("Get", APIPathBlueprintTemplates).Return(dIn, 200, fmt.Errorf("mocked error")) templatesOut, err := ds.ListTemplates() assert.NotNil(err, "We are expecting an error") assert.Nil(templatesOut, "Expecting nil output") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") return templatesOut } // ListTemplatesFailStatusMocked test mocked function func ListTemplatesFailStatusMocked(t *testing.T, templatesIn []*types.Template) []*types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json dIn, err := json.Marshal(templatesIn) assert.Nil(err, "Template test data corrupted") // call service cs.On("Get", APIPathBlueprintTemplates).Return(dIn, 499, nil) templatesOut, err := ds.ListTemplates() assert.NotNil(err, "We are expecting an status code error") assert.Nil(templatesOut, "Expecting nil output") assert.Contains(err.Error(), "499", "Error should contain http code 499") return templatesOut } // ListTemplatesFailJSONMocked test mocked function func ListTemplatesFailJSONMocked(t *testing.T, templatesIn []*types.Template) []*types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // wrong json dIn := []byte{10, 20, 30} // call service cs.On("Get", APIPathBlueprintTemplates).Return(dIn, 200, nil) templatesOut, err := ds.ListTemplates() assert.NotNil(err, "We are expecting a marshalling error") assert.Nil(templatesOut, "Expecting nil output") assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'") return templatesOut } // GetTemplateMocked test mocked function func GetTemplateMocked(t *testing.T, template *types.Template) *types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json dIn, err := json.Marshal(template) assert.Nil(err, "Template test data corrupted") // call service cs.On("Get", fmt.Sprintf(APIPathBlueprintTemplate, template.ID)).Return(dIn, 200, nil) templateOut, err := ds.GetTemplate(template.ID) assert.Nil(err, "Error getting template") assert.Equal(*template, *templateOut, "GetTemplate returned different templates") return templateOut } // GetTemplateFailErrMocked test mocked function func GetTemplateFailErrMocked(t *testing.T, template *types.Template) *types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json dIn, err := json.Marshal(template) assert.Nil(err, "Template test data corrupted") // call service cs.On("Get", fmt.Sprintf(APIPathBlueprintTemplate, template.ID)).Return(dIn, 200, fmt.Errorf("mocked error")) templateOut, err := ds.GetTemplate(template.ID) assert.NotNil(err, "We are expecting an error") assert.Nil(templateOut, "Expecting nil output") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") return templateOut } // GetTemplateFailStatusMocked test mocked function func GetTemplateFailStatusMocked(t *testing.T, template *types.Template) *types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json dIn, err := json.Marshal(template) assert.Nil(err, "Template test data corrupted") // call service cs.On("Get", fmt.Sprintf(APIPathBlueprintTemplate, template.ID)).Return(dIn, 499, nil) templateOut, err := ds.GetTemplate(template.ID) assert.NotNil(err, "We are expecting an status code error") assert.Nil(templateOut, "Expecting nil output") assert.Contains(err.Error(), "499", "Error should contain http code 499") return templateOut } // GetTemplateFailJSONMocked test mocked function func GetTemplateFailJSONMocked(t *testing.T, template *types.Template) *types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // wrong json dIn := []byte{10, 20, 30} // call service cs.On("Get", fmt.Sprintf(APIPathBlueprintTemplate, template.ID)).Return(dIn, 200, nil) templateOut, err := ds.GetTemplate(template.ID) assert.NotNil(err, "We are expecting a marshalling error") assert.Nil(templateOut, "Expecting nil output") assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'") return templateOut } // CreateTemplateMocked test mocked function func CreateTemplateMocked(t *testing.T, templateIn *types.Template) *types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*templateIn) assert.Nil(err, "Template test data corrupted") // to json dOut, err := json.Marshal(templateIn) assert.Nil(err, "Template test data corrupted") // call service cs.On("Post", APIPathBlueprintTemplates, mapIn).Return(dOut, 200, nil) templateOut, err := ds.CreateTemplate(mapIn) assert.Nil(err, "Error creating template list") assert.Equal(templateIn, templateOut, "CreateTemplate returned different templates") return templateOut } // CreateTemplateFailErrMocked test mocked function func CreateTemplateFailErrMocked(t *testing.T, templateIn *types.Template) *types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*templateIn) assert.Nil(err, "Template test data corrupted") // to json dOut, err := json.Marshal(templateIn) assert.Nil(err, "Template test data corrupted") // call service cs.On("Post", APIPathBlueprintTemplates, mapIn).Return(dOut, 200, fmt.Errorf("mocked error")) templateOut, err := ds.CreateTemplate(mapIn) assert.NotNil(err, "We are expecting an error") assert.Nil(templateOut, "Expecting nil output") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") return templateOut } // CreateTemplateFailStatusMocked test mocked function func CreateTemplateFailStatusMocked(t *testing.T, templateIn *types.Template) *types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*templateIn) assert.Nil(err, "Template test data corrupted") // to json dOut, err := json.Marshal(templateIn) assert.Nil(err, "Template test data corrupted") // call service cs.On("Post", APIPathBlueprintTemplates, mapIn).Return(dOut, 499, nil) templateOut, err := ds.CreateTemplate(mapIn) assert.NotNil(err, "We are expecting an status code error") assert.Nil(templateOut, "Expecting nil output") assert.Contains(err.Error(), "499", "Error should contain http code 499") return templateOut } // CreateTemplateFailJSONMocked test mocked function func CreateTemplateFailJSONMocked(t *testing.T, templateIn *types.Template) *types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*templateIn) assert.Nil(err, "Template test data corrupted") // wrong json dOut := []byte{10, 20, 30} // call service cs.On("Post", APIPathBlueprintTemplates, mapIn).Return(dOut, 200, nil) templateOut, err := ds.CreateTemplate(mapIn) assert.NotNil(err, "We are expecting a marshalling error") assert.Nil(templateOut, "Expecting nil output") assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'") return templateOut } // UpdateTemplateMocked test mocked function func UpdateTemplateMocked(t *testing.T, templateIn *types.Template) *types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*templateIn) assert.Nil(err, "Template test data corrupted") // to json dOut, err := json.Marshal(templateIn) assert.Nil(err, "Template test data corrupted") // call service cs.On("Put", fmt.Sprintf(APIPathBlueprintTemplate, templateIn.ID), mapIn).Return(dOut, 200, nil) templateOut, err := ds.UpdateTemplate(templateIn.ID, mapIn) assert.Nil(err, "Error updating template list") assert.Equal(templateIn, templateOut, "UpdateTemplate returned different templates") return templateOut } // UpdateTemplateFailErrMocked test mocked function func UpdateTemplateFailErrMocked(t *testing.T, templateIn *types.Template) *types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*templateIn) assert.Nil(err, "Template test data corrupted") // to json dOut, err := json.Marshal(templateIn) assert.Nil(err, "Template test data corrupted") // call service cs.On("Put", fmt.Sprintf(APIPathBlueprintTemplate, templateIn.ID), mapIn). Return(dOut, 200, fmt.Errorf("mocked error")) templateOut, err := ds.UpdateTemplate(templateIn.ID, mapIn) assert.NotNil(err, "We are expecting an error") assert.Nil(templateOut, "Expecting nil output") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") return templateOut } // UpdateTemplateFailStatusMocked test mocked function func UpdateTemplateFailStatusMocked(t *testing.T, templateIn *types.Template) *types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*templateIn) assert.Nil(err, "Template test data corrupted") // to json dOut, err := json.Marshal(templateIn) assert.Nil(err, "Template test data corrupted") // call service cs.On("Put", fmt.Sprintf(APIPathBlueprintTemplate, templateIn.ID), mapIn).Return(dOut, 499, nil) templateOut, err := ds.UpdateTemplate(templateIn.ID, mapIn) assert.NotNil(err, "We are expecting an status code error") assert.Nil(templateOut, "Expecting nil output") assert.Contains(err.Error(), "499", "Error should contain http code 499") return templateOut } // UpdateTemplateFailJSONMocked test mocked function func UpdateTemplateFailJSONMocked(t *testing.T, templateIn *types.Template) *types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*templateIn) assert.Nil(err, "Template test data corrupted") // wrong json dOut := []byte{10, 20, 30} // call service cs.On("Put", fmt.Sprintf(APIPathBlueprintTemplate, templateIn.ID), mapIn).Return(dOut, 200, nil) templateOut, err := ds.UpdateTemplate(templateIn.ID, mapIn) assert.NotNil(err, "We are expecting a marshalling error") assert.Nil(templateOut, "Expecting nil output") assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'") return templateOut } // CompileTemplateMocked test mocked function func CompileTemplateMocked(t *testing.T, templateIn *types.Template) *types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json dOut, err := json.Marshal(templateIn) assert.Nil(err, "Template test data corrupted") // call service payload := new(map[string]interface{}) cs.On("Put", fmt.Sprintf(APIPathBlueprintTemplateCompile, templateIn.ID), payload).Return(dOut, 200, nil) templateOut, err := ds.CompileTemplate(templateIn.ID, payload) assert.Nil(err, "Error compiling template list") assert.Equal(templateIn, templateOut, "CompileTemplate returned different templates") return templateOut } // CompileTemplateFailErrMocked test mocked function func CompileTemplateFailErrMocked(t *testing.T, templateIn *types.Template) *types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json dOut, err := json.Marshal(templateIn) assert.Nil(err, "Template test data corrupted") // call service payload := new(map[string]interface{}) cs.On("Put", fmt.Sprintf(APIPathBlueprintTemplateCompile, templateIn.ID), payload). Return(dOut, 200, fmt.Errorf("mocked error")) templateOut, err := ds.CompileTemplate(templateIn.ID, payload) assert.NotNil(err, "We are expecting an error") assert.Nil(templateOut, "Expecting nil output") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") return templateOut } // CompileTemplateFailStatusMocked test mocked function func CompileTemplateFailStatusMocked(t *testing.T, templateIn *types.Template) *types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json dOut, err := json.Marshal(templateIn) assert.Nil(err, "Template test data corrupted") // call service payload := new(map[string]interface{}) cs.On("Put", fmt.Sprintf(APIPathBlueprintTemplateCompile, templateIn.ID), payload).Return(dOut, 409, nil) templateOut, err := ds.CompileTemplate(templateIn.ID, payload) assert.NotNil(err, "We are expecting an status code error") assert.Nil(templateOut, "Expecting nil output") assert.Contains(err.Error(), "409", "Error should contain http code 409") return templateOut } // CompileTemplateFailJSONMocked test mocked function func CompileTemplateFailJSONMocked(t *testing.T, templateIn *types.Template) *types.Template { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // wrong json dOut := []byte{10, 20, 30} // call service payload := new(map[string]interface{}) cs.On("Put", fmt.Sprintf(APIPathBlueprintTemplateCompile, templateIn.ID), payload).Return(dOut, 200, nil) templateOut, err := ds.CompileTemplate(templateIn.ID, payload) assert.NotNil(err, "We are expecting a marshalling error") assert.Nil(templateOut, "Expecting nil output") assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'") return templateOut } // DeleteTemplateMocked test mocked function func DeleteTemplateMocked(t *testing.T, templateIn *types.Template) { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json dIn, err := json.Marshal(templateIn) assert.Nil(err, "Template test data corrupted") // call service cs.On("Delete", fmt.Sprintf(APIPathBlueprintTemplate, templateIn.ID)).Return(dIn, 200, nil) err = ds.DeleteTemplate(templateIn.ID) assert.Nil(err, "Error deleting template") } // DeleteTemplateFailErrMocked test mocked function func DeleteTemplateFailErrMocked(t *testing.T, templateIn *types.Template) { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json dIn, err := json.Marshal(templateIn) assert.Nil(err, "Template test data corrupted") // call service cs.On("Delete", fmt.Sprintf(APIPathBlueprintTemplate, templateIn.ID)).Return(dIn, 200, fmt.Errorf("mocked error")) err = ds.DeleteTemplate(templateIn.ID) assert.NotNil(err, "We are expecting an error") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") } // DeleteTemplateFailStatusMocked test mocked function func DeleteTemplateFailStatusMocked(t *testing.T, templateIn *types.Template) { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json dIn, err := json.Marshal(templateIn) assert.Nil(err, "Template test data corrupted") // call service cs.On("Delete", fmt.Sprintf(APIPathBlueprintTemplate, templateIn.ID)).Return(dIn, 499, nil) err = ds.DeleteTemplate(templateIn.ID) assert.NotNil(err, "We are expecting an status code error") assert.Contains(err.Error(), "499", "Error should contain http code 499") } // ListTemplateScriptsMocked test mocked function func ListTemplateScriptsMocked( t *testing.T, templateScriptsIn []*types.TemplateScript, templateID string, scriptType string, ) []*types.TemplateScript { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json drsIn, err := json.Marshal(templateScriptsIn) assert.Nil(err, "Template script test data corrupted") // call service cs.On("Get", fmt.Sprintf(APIPathBlueprintTemplateScriptsByType, templateID, scriptType)).Return(drsIn, 200, nil) templateScriptsOut, err := ds.ListTemplateScripts(templateID, scriptType) assert.Nil(err, "Error getting template list") assert.Equal(templateScriptsIn, templateScriptsOut, "ListTemplates returned different templates") return templateScriptsOut } // ListTemplateScriptsFailErrMocked test mocked function func ListTemplateScriptsFailErrMocked( t *testing.T, templateScriptsIn []*types.TemplateScript, templateID string, scriptType string, ) []*types.TemplateScript { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json drsIn, err := json.Marshal(templateScriptsIn) assert.Nil(err, "Template script test data corrupted") // call service cs.On("Get", fmt.Sprintf(APIPathBlueprintTemplateScriptsByType, templateID, scriptType)). Return(drsIn, 200, fmt.Errorf("mocked error")) templateScriptsOut, err := ds.ListTemplateScripts(templateID, scriptType) assert.NotNil(err, "We are expecting an error") assert.Nil(templateScriptsOut, "Expecting nil output") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") return templateScriptsOut } // ListTemplateScriptsFailStatusMocked test mocked function func ListTemplateScriptsFailStatusMocked( t *testing.T, templateScriptsIn []*types.TemplateScript, templateID string, scriptType string, ) []*types.TemplateScript { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json drsIn, err := json.Marshal(templateScriptsIn) assert.Nil(err, "Template script test data corrupted") // call service cs.On("Get", fmt.Sprintf(APIPathBlueprintTemplateScriptsByType, templateID, scriptType)).Return(drsIn, 499, nil) templateScriptsOut, err := ds.ListTemplateScripts(templateID, scriptType) assert.NotNil(err, "We are expecting an status code error") assert.Nil(templateScriptsOut, "Expecting nil output") assert.Contains(err.Error(), "499", "Error should contain http code 499") return templateScriptsOut } // ListTemplateScriptsFailJSONMocked test mocked function func ListTemplateScriptsFailJSONMocked( t *testing.T, templateScriptsIn []*types.TemplateScript, templateID string, scriptType string, ) []*types.TemplateScript { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // wrong json drsIn := []byte{10, 20, 30} // call service cs.On("Get", fmt.Sprintf(APIPathBlueprintTemplateScriptsByType, templateID, scriptType)).Return(drsIn, 200, nil) templateScriptsOut, err := ds.ListTemplateScripts(templateID, scriptType) assert.NotNil(err, "We are expecting a marshalling error") assert.Nil(templateScriptsOut, "Expecting nil output") assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'") return templateScriptsOut } // GetTemplateScriptMocked test mocked function func GetTemplateScriptMocked(t *testing.T, dr *types.TemplateScript) *types.TemplateScript { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json drIn, err := json.Marshal(dr) assert.Nil(err, "Template script test data corrupted") // call service cs.On("Get", fmt.Sprintf(APIPathBlueprintTemplateScript, dr.TemplateID, dr.ID)).Return(drIn, 200, nil) drOut, err := ds.GetTemplateScript(dr.TemplateID, dr.ID) assert.Nil(err, "Error getting template") assert.Equal(*dr, *drOut, "GetTemplateScript returned different template scripts") return drOut } // GetTemplateScriptFailErrMocked test mocked function func GetTemplateScriptFailErrMocked(t *testing.T, dr *types.TemplateScript) *types.TemplateScript { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json drIn, err := json.Marshal(dr) assert.Nil(err, "Template script test data corrupted") // call service cs.On("Get", fmt.Sprintf(APIPathBlueprintTemplateScript, dr.TemplateID, dr.ID)). Return(drIn, 200, fmt.Errorf("mocked error")) drOut, err := ds.GetTemplateScript(dr.TemplateID, dr.ID) assert.NotNil(err, "We are expecting an error") assert.Nil(drOut, "Expecting nil output") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") return drOut } // GetTemplateScriptFailStatusMocked test mocked function func GetTemplateScriptFailStatusMocked(t *testing.T, dr *types.TemplateScript) *types.TemplateScript { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json drIn, err := json.Marshal(dr) assert.Nil(err, "Template script test data corrupted") // call service cs.On("Get", fmt.Sprintf(APIPathBlueprintTemplateScript, dr.TemplateID, dr.ID)).Return(drIn, 499, nil) drOut, err := ds.GetTemplateScript(dr.TemplateID, dr.ID) assert.NotNil(err, "We are expecting an status code error") assert.Nil(drOut, "Expecting nil output") assert.Contains(err.Error(), "499", "Error should contain http code 499") return drOut } // GetTemplateScriptFailJSONMocked test mocked function func GetTemplateScriptFailJSONMocked(t *testing.T, dr *types.TemplateScript) *types.TemplateScript { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // wrong json drIn := []byte{10, 20, 30} // call service cs.On("Get", fmt.Sprintf(APIPathBlueprintTemplateScript, dr.TemplateID, dr.ID)).Return(drIn, 200, nil) drOut, err := ds.GetTemplateScript(dr.TemplateID, dr.ID) assert.NotNil(err, "We are expecting a marshalling error") assert.Nil(drOut, "Expecting nil output") assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'") return drOut } // CreateTemplateScriptMocked test mocked function func CreateTemplateScriptMocked(t *testing.T, dr *types.TemplateScript) *types.TemplateScript { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*dr) assert.Nil(err, "Template script test data corrupted") // to json drIn, err := json.Marshal(dr) assert.Nil(err, "Template script test data corrupted") // call service cs.On("Post", fmt.Sprintf(APIPathBlueprintTemplateScripts, dr.TemplateID), mapIn).Return(drIn, 200, nil) drOut, err := ds.CreateTemplateScript(dr.TemplateID, mapIn) assert.Nil(err, "Error getting template") assert.Equal(*dr, *drOut, "CreateTemplateScript returned different template scripts") return drOut } // CreateTemplateScriptFailErrMocked test mocked function func CreateTemplateScriptFailErrMocked(t *testing.T, dr *types.TemplateScript) *types.TemplateScript { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*dr) assert.Nil(err, "Template script test data corrupted") // to json drIn, err := json.Marshal(dr) assert.Nil(err, "Template script test data corrupted") // call service cs.On("Post", fmt.Sprintf(APIPathBlueprintTemplateScripts, dr.TemplateID), mapIn). Return(drIn, 200, fmt.Errorf("mocked error")) drOut, err := ds.CreateTemplateScript(dr.TemplateID, mapIn) assert.NotNil(err, "We are expecting an error") assert.Nil(drOut, "Expecting nil output") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") return drOut } // CreateTemplateScriptFailStatusMocked test mocked function func CreateTemplateScriptFailStatusMocked(t *testing.T, dr *types.TemplateScript) *types.TemplateScript { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*dr) assert.Nil(err, "Template script test data corrupted") // to json drIn, err := json.Marshal(dr) assert.Nil(err, "Template script test data corrupted") // call service cs.On("Post", fmt.Sprintf(APIPathBlueprintTemplateScripts, dr.TemplateID), mapIn).Return(drIn, 499, nil) drOut, err := ds.CreateTemplateScript(dr.TemplateID, mapIn) assert.NotNil(err, "We are expecting an status code error") assert.Nil(drOut, "Expecting nil output") assert.Contains(err.Error(), "499", "Error should contain http code 499") return drOut } // CreateTemplateScriptFailJSONMocked test mocked function func CreateTemplateScriptFailJSONMocked(t *testing.T, dr *types.TemplateScript) *types.TemplateScript { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*dr) assert.Nil(err, "Template script test data corrupted") // wrong json drIn := []byte{10, 20, 30} // call service cs.On("Post", fmt.Sprintf(APIPathBlueprintTemplateScripts, dr.TemplateID), mapIn).Return(drIn, 200, nil) drOut, err := ds.CreateTemplateScript(dr.TemplateID, mapIn) assert.NotNil(err, "We are expecting a marshalling error") assert.Nil(drOut, "Expecting nil output") assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'") return drOut } // UpdateTemplateScriptMocked test mocked function func UpdateTemplateScriptMocked(t *testing.T, dr *types.TemplateScript) *types.TemplateScript { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*dr) assert.Nil(err, "Template script test data corrupted") // to json drIn, err := json.Marshal(dr) assert.Nil(err, "Template script test data corrupted") // call service cs.On("Put", fmt.Sprintf(APIPathBlueprintTemplateScript, dr.TemplateID, dr.ID), mapIn).Return(drIn, 200, nil) drOut, err := ds.UpdateTemplateScript(dr.TemplateID, dr.ID, mapIn) assert.Nil(err, "Error updating template list") assert.Equal(*dr, *drOut, "UpdateTemplateScript returned different template scripts") return drOut } // UpdateTemplateScriptFailErrMocked test mocked function func UpdateTemplateScriptFailErrMocked(t *testing.T, dr *types.TemplateScript) *types.TemplateScript { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*dr) assert.Nil(err, "Template script test data corrupted") // to json drIn, err := json.Marshal(dr) assert.Nil(err, "Template script test data corrupted") // call service cs.On("Put", fmt.Sprintf(APIPathBlueprintTemplateScript, dr.TemplateID, dr.ID), mapIn). Return(drIn, 200, fmt.Errorf("mocked error")) drOut, err := ds.UpdateTemplateScript(dr.TemplateID, dr.ID, mapIn) assert.NotNil(err, "We are expecting an error") assert.Nil(drOut, "Expecting nil output") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") return drOut } // UpdateTemplateScriptFailStatusMocked test mocked function func UpdateTemplateScriptFailStatusMocked(t *testing.T, dr *types.TemplateScript) *types.TemplateScript { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*dr) assert.Nil(err, "Template script test data corrupted") // to json drIn, err := json.Marshal(dr) assert.Nil(err, "Template script test data corrupted") // call service cs.On("Put", fmt.Sprintf(APIPathBlueprintTemplateScript, dr.TemplateID, dr.ID), mapIn).Return(drIn, 499, nil) drOut, err := ds.UpdateTemplateScript(dr.TemplateID, dr.ID, mapIn) assert.NotNil(err, "We are expecting an status code error") assert.Nil(drOut, "Expecting nil output") assert.Contains(err.Error(), "499", "Error should contain http code 499") return drOut } // UpdateTemplateScriptFailJSONMocked test mocked function func UpdateTemplateScriptFailJSONMocked(t *testing.T, dr *types.TemplateScript) *types.TemplateScript { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // convertMap mapIn, err := utils.ItemConvertParams(*dr) assert.Nil(err, "Template script test data corrupted") // wrong json drIn := []byte{10, 20, 30} // call service cs.On("Put", fmt.Sprintf(APIPathBlueprintTemplateScript, dr.TemplateID, dr.ID), mapIn).Return(drIn, 200, nil) drOut, err := ds.UpdateTemplateScript(dr.TemplateID, dr.ID, mapIn) assert.NotNil(err, "We are expecting a marshalling error") assert.Nil(drOut, "Expecting nil output") assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'") return drOut } // ReorderTemplateScriptMocked test mocked function func ReorderTemplateScriptMocked( t *testing.T, tsOut []*types.TemplateScript, templateID string, reorder []string, ) []*types.TemplateScript { assert := assert.New(t) cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") v := make(map[string]interface{}) v["script_ids"] = reorder // to json tsOutJSON, err := json.Marshal(tsOut) assert.Nil(err, "Template script test data corrupted") // call service cs.On("Put", fmt.Sprintf(APIPathBlueprintTemplateScriptsReorder, templateID), &v).Return(tsOutJSON, 200, nil) out, err := ds.ReorderTemplateScript(templateID, &v) assert.Nil(err, "Error updating template list") assert.Equal(tsOut, out, "ReorderTemplateScript returned different template scripts") return out } // ReorderTemplateScriptFailErrMocked test mocked function func ReorderTemplateScriptFailErrMocked( t *testing.T, tsOut []*types.TemplateScript, templateID string, reorder []string, ) []*types.TemplateScript { assert := assert.New(t) cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") v := make(map[string]interface{}) v["script_ids"] = reorder // to json tsOutJSON, err := json.Marshal(tsOut) assert.Nil(err, "Template script test data corrupted") // call service cs.On("Put", fmt.Sprintf(APIPathBlueprintTemplateScriptsReorder, templateID), &v). Return(tsOutJSON, 200, fmt.Errorf("mocked error")) out, err := ds.ReorderTemplateScript(templateID, &v) assert.NotNil(err, "We are expecting an error") assert.Nil(out, "Expecting nil output") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") return out } // ReorderTemplateScriptFailStatusMocked test mocked function func ReorderTemplateScriptFailStatusMocked( t *testing.T, tsOut []*types.TemplateScript, templateID string, reorder []string, ) []*types.TemplateScript { assert := assert.New(t) cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") v := make(map[string]interface{}) v["script_ids"] = reorder // to json tsOutJSON, err := json.Marshal(tsOut) assert.Nil(err, "Template script test data corrupted") // call service cs.On("Put", fmt.Sprintf(APIPathBlueprintTemplateScriptsReorder, templateID), &v).Return(tsOutJSON, 499, nil) out, err := ds.ReorderTemplateScript(templateID, &v) assert.NotNil(err, "We are expecting an status code error") assert.Nil(out, "Expecting nil output") assert.Contains(err.Error(), "499", "Error should contain http code 499") return out } // ReorderTemplateScriptFailJSONMocked test mocked function func ReorderTemplateScriptFailJSONMocked( t *testing.T, tsOut []*types.TemplateScript, templateID string, reorder []string, ) []*types.TemplateScript { assert := assert.New(t) cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") v := make(map[string]interface{}) v["script_ids"] = reorder // wrong json tsOutJSON := []byte{10, 20, 30} // call service cs.On("Put", fmt.Sprintf(APIPathBlueprintTemplateScriptsReorder, templateID), &v).Return(tsOutJSON, 200, nil) out, err := ds.ReorderTemplateScript(templateID, &v) assert.NotNil(err, "We are expecting a marshalling error") assert.Nil(out, "Expecting nil output") assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'") return out } // DeleteTemplateScriptMocked test mocked function func DeleteTemplateScriptMocked(t *testing.T, dr *types.TemplateScript) { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json drIn, err := json.Marshal(dr) assert.Nil(err, "Template script test data corrupted") // call service cs.On("Delete", fmt.Sprintf(APIPathBlueprintTemplateScript, dr.TemplateID, dr.ID)).Return(drIn, 200, nil) err = ds.DeleteTemplateScript(dr.TemplateID, dr.ID) assert.Nil(err, "Error deleting template script") } // DeleteTemplateScriptFailErrMocked test mocked function func DeleteTemplateScriptFailErrMocked(t *testing.T, dr *types.TemplateScript) { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json drIn, err := json.Marshal(dr) assert.Nil(err, "Template script test data corrupted") // call service cs.On("Delete", fmt.Sprintf(APIPathBlueprintTemplateScript, dr.TemplateID, dr.ID)). Return(drIn, 200, fmt.Errorf("mocked error")) err = ds.DeleteTemplateScript(dr.TemplateID, dr.ID) assert.NotNil(err, "We are expecting an error") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") } // DeleteTemplateScriptFailStatusMocked test mocked function func DeleteTemplateScriptFailStatusMocked(t *testing.T, dr *types.TemplateScript) { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json drIn, err := json.Marshal(dr) assert.Nil(err, "Template script test data corrupted") // call service cs.On("Delete", fmt.Sprintf(APIPathBlueprintTemplateScript, dr.TemplateID, dr.ID)).Return(drIn, 499, nil) err = ds.DeleteTemplateScript(dr.TemplateID, dr.ID) assert.NotNil(err, "We are expecting an status code error") assert.Contains(err.Error(), "499", "Error should contain http code 499") } // ListTemplateServersMocked test mocked function func ListTemplateServersMocked( t *testing.T, templateServersIn []*types.TemplateServer, templateID string, ) []*types.TemplateServer { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json drsIn, err := json.Marshal(templateServersIn) assert.Nil(err, "Template server test data corrupted") // call service cs.On("Get", fmt.Sprintf(APIPathBlueprintTemplateServers, templateID)).Return(drsIn, 200, nil) templateServersOut, err := ds.ListTemplateServers(templateID) assert.Nil(err, "Error getting template server list") assert.Equal(templateServersIn, templateServersOut, "ListTemplates returned different template servers") return templateServersOut } // ListTemplateServersFailErrMocked test mocked function func ListTemplateServersFailErrMocked( t *testing.T, templateServersIn []*types.TemplateServer, templateID string, ) []*types.TemplateServer { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json drsIn, err := json.Marshal(templateServersIn) assert.Nil(err, "Template server test data corrupted") // call service cs.On("Get", fmt.Sprintf(APIPathBlueprintTemplateServers, templateID)). Return(drsIn, 200, fmt.Errorf("mocked error")) templateServersOut, err := ds.ListTemplateServers(templateID) assert.NotNil(err, "We are expecting an error") assert.Nil(templateServersOut, "Expecting nil output") assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'") return templateServersOut } // ListTemplateServersFailStatusMocked test mocked function func ListTemplateServersFailStatusMocked( t *testing.T, templateServersIn []*types.TemplateServer, templateID string, ) []*types.TemplateServer { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // to json drsIn, err := json.Marshal(templateServersIn) assert.Nil(err, "Template server test data corrupted") // call service cs.On("Get", fmt.Sprintf(APIPathBlueprintTemplateServers, templateID)).Return(drsIn, 499, nil) templateServersOut, err := ds.ListTemplateServers(templateID) assert.NotNil(err, "We are expecting an status code error") assert.Nil(templateServersOut, "Expecting nil output") assert.Contains(err.Error(), "499", "Error should contain http code 499") return templateServersOut } // ListTemplateServersFailJSONMocked test mocked function func ListTemplateServersFailJSONMocked( t *testing.T, templateServersIn []*types.TemplateServer, templateID string, ) []*types.TemplateServer { assert := assert.New(t) // wire up cs := &utils.MockConcertoService{} ds, err := NewTemplateService(cs) assert.Nil(err, "Couldn't load template service") assert.NotNil(ds, "Template service not instanced") // wrong json drsIn := []byte{10, 20, 30} // call service cs.On("Get", fmt.Sprintf(APIPathBlueprintTemplateServers, templateID)).Return(drsIn, 200, nil) templateServersOut, err := ds.ListTemplateServers(templateID) assert.NotNil(err, "We are expecting a marshalling error") assert.Nil(templateServersOut, "Expecting nil output") assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'") return templateServersOut }
api/blueprint/templates_api_mocked.go
0.520984
0.469338
templates_api_mocked.go
starcoder
package perfcounters import ( "fmt" "math" "sync" "time" ) /* RateOfCountsPerSecond32 A difference counter that shows the average number of operations completed during each second of the sample interval. Counters of this type measure time in ticks of the system clock. Formula: (N 1 - N 0) / ((D 1 -D 0) / F), where N 1 and N 0 are performance counter readings, D 1 and D 0 are their corresponding time readings, and F represents the number of ticks per second. Thus, the numerator represents the number of operations performed during the last sample interval, the denominator represents the number of ticks elapsed during the last sample interval, and F is the frequency of the ticks. The value of F is factored into the equation so that the result can be displayed in seconds. Counters of this type include System\ File Read Operations/sec. [[source: https://msdn.microsoft.com/en-us/library/system.diagnostics.performancecountertype(v=vs.90).aspx]] */ type RateOfCountsPerSecond32 struct { lastTime *time.Time lastCount int32 currentCount int32 mu sync.Mutex } func NewRateOfCountsPerSecond32() *RateOfCountsPerSecond32 { return &RateOfCountsPerSecond32{ lastTime: nil, lastCount: 0, currentCount: 0, } } func (self *RateOfCountsPerSecond32) Increment() { self.Add(1) } func (self *RateOfCountsPerSecond32) Add(value int32) { self.mu.Lock() defer self.mu.Unlock() self.currentCount += value if self.lastTime == nil { lastTime := time.Now() self.lastTime = &lastTime } } func (self *RateOfCountsPerSecond32) CalculatedValue() float64 { self.mu.Lock() defer self.mu.Unlock() currentTime := time.Now() if self.lastTime == nil { self.lastTime = &currentTime return 0 } lastTime := self.lastTime lastCount := self.lastCount currentCount := self.currentCount diff := currentTime.Sub(*lastTime) calculatedValue := float64(currentCount-lastCount) / diff.Seconds() if math.IsNaN(calculatedValue) || math.IsInf(calculatedValue, 1) || math.IsInf(calculatedValue, -1) { calculatedValue = 0.0 } self.lastCount = currentCount self.lastTime = &currentTime return calculatedValue } func (self *RateOfCountsPerSecond32) String() string { return fmt.Sprintf("%.3f", self.CalculatedValue()) } /* func main() { ctr := NewRateOfCountsPerSecond32() fmt.Println(ctr.String()) // expected: 0 ctr.Increment() time.Sleep(1000 * time.Millisecond) fmt.Println(ctr.String()) // expected: 1 ctr.Increment() ctr.Increment() time.Sleep(1000 * time.Millisecond) fmt.Println(ctr.String()) // expected: 2 ctr.Increment() time.Sleep(2000 * time.Millisecond) fmt.Println(ctr.String()) // expected: 0.5 } */
perfcounters/rateofcountspersecond32.go
0.804675
0.746486
rateofcountspersecond32.go
starcoder
package cluster import ( "fmt" "math" "math/rand" "sort" "time" "gonum.org/v1/gonum/mat" ) // KV is a structure that holds key-value pairs of type float64. type KV struct { Key float64 Value float64 } // InitHuang implements initialization of cluster centroids based on the // frequency of attributes as defined in paper written by Z.Huang in 1998. func InitHuang(X *DenseMatrix, clustersNumber int, distFunc DistanceFunction) (*DenseMatrix, error) { _, xCols := X.Dims() centroids := NewDenseMatrix(clustersNumber, xCols, nil) freqTable := CreateFrequencyTable(X) for j := 0; j < clustersNumber; j++ { for i := 0; i < xCols; i++ { if len(freqTable[i]) > j { centroids.Set(j, i, freqTable[i][j].Key) } else { // Change to setting to randomly chosen value instead of first // one. centroids.Set(j, i, freqTable[i][0].Key) } } } return centroids, nil } // InitCao implements initialization of cluster centroids based on the frequency // and density of attributes as defined in // "A new initialization method for categorical data clustering" by F.Cao(2009) func InitCao(X *DenseMatrix, clustersNumber int, distFunc DistanceFunction) (*DenseMatrix, error) { xRows, xCols := X.Dims() centroids := NewDenseMatrix(clustersNumber, xCols, nil) // Compute density table and, int the same time find index of vector with // the highest density. highestDensityIndex := 0 maxDensity := 0.0 densityTable := make([]float64, xRows) for i := 0; i < xCols; i++ { freq := make(map[float64]int) for j := 0; j < xRows; j++ { freq[X.At(j, i)]++ } for j := 0; j < xRows; j++ { densityTable[j] += float64(freq[X.At(j, i)]) / float64(xCols) } } for k := 0; k < xRows; k++ { densityTable[k] = densityTable[k] / float64(xRows) if densityTable[k] > maxDensity { maxDensity = densityTable[k] highestDensityIndex = k } } // Choose first cluster - vector with maximum density. centroids.SetRow(0, X.RawRowView(highestDensityIndex)) // Find the rest of clusters centers. for i := 1; i < clustersNumber; i++ { dd := make([][]float64, i) for z := 0; z < i; z++ { dd[z] = make([]float64, xRows) } for j := 0; j < i; j++ { for k := 0; k < xRows; k++ { dist, err := distFunc(&DenseVector{X.RowView(k).(*mat.VecDense)}, &DenseVector{centroids.RowView(j).(*mat.VecDense)}) if err != nil { return NewDenseMatrix(0, 0, nil), fmt.Errorf("cao init: cannot compute cluster: %v ", err) } dd[j][k] = densityTable[k] * dist } } indexMax := findIndexCao(xRows, i, dd) centroids.SetRow(i, X.RawRowView(indexMax)) } return centroids, nil } func findIndexCao(xRows, i int, dd [][]float64) int { // Find minimum value for each column. minValuesTable := make([]float64, xRows) for j := 0; j < xRows; j++ { minValuesTable[j] = math.MaxFloat64 } for j := 0; j < i; j++ { for k := 0; k < xRows; k++ { if dd[j][k] < minValuesTable[k] { minValuesTable[k] = dd[j][k] } } } // Find max value and its index among minValuesTable. maxVal := 0.0 indexMax := 0 for j := 0; j < xRows; j++ { if minValuesTable[j] > maxVal { maxVal = minValuesTable[j] indexMax = j } } return indexMax } // InitRandom randomly initializes cluster centers - vectors chosen from X table. func InitRandom(X *DenseMatrix, clustersNumber int, distFunc DistanceFunction) (*DenseMatrix, error) { xRows, xCols := X.Dims() centroids := NewDenseMatrix(clustersNumber, xCols, nil) rand.Seed(time.Now().UnixNano()) for i := 0; i < clustersNumber; i++ { centroids.SetRow(i, X.RawRowView(rand.Intn(xRows))) } return centroids, nil } // InitNum initializes cluster centers for numerical data - random // initialization. func InitNum(X *DenseMatrix, clustersNumber int, distFunc DistanceFunction) (*DenseMatrix, error) { xRows, xCols := X.Dims() centroids := NewDenseMatrix(clustersNumber, xCols, nil) rand.Seed(time.Now().UnixNano()) for i := 0; i < clustersNumber; i++ { center := X.RawRowView(rand.Intn(xRows - 1)) centroids.SetRow(i, center) } return centroids, nil } // CreateFrequencyTable creates frequency table for attributes in given matrix, // it returns attributes in frequency descending order. func CreateFrequencyTable(X *DenseMatrix) [][]KV { xRows, xCols := X.Dims() frequencyTable := make([][]KV, xCols) for i := 0; i < xCols; i++ { column := X.ColView(i) frequencies := make(map[float64]float64) for j := 0; j < xRows; j++ { frequencies[column.At(j, 0)] = frequencies[column.At(j, 0)] + 1 } for k, v := range frequencies { frequencyTable[i] = append(frequencyTable[i], KV{k, v}) } sort.Slice(frequencyTable[i], func(a, b int) bool { return frequencyTable[i][a].Value > frequencyTable[i][b].Value }) } return frequencyTable }
cluster/init_functions.go
0.729616
0.507385
init_functions.go
starcoder
package collection import ( "github.com/deatil/lakego-admin/lakego/support/collection" ) /** * Collect * * @create 2021-7-3 * @author deatil */ // Collect transforms src into Collection. The src could be json string, []string, // []map[string]interface{}, map[string]interface{}, []int, []int16, []int32, []int64, // []float32, []float64, []interface{}. func Collect(src interface{}) collection.Collection { return collection.Collect(src) } /* type Collection interface { Value() interface{} // All returns the underlying array represented by the collection. All() []interface{} // Length return the length of the collection. Length() int // ToStruct turn the collection to the specified struct using mapstructure. // https://github.com/mitchellh/mapstructure ToStruct(dist interface{}) // Select select the keys of collection and delete others. Select(keys ...string) Collection // Avg returns the average value of a given key. Avg(key ...string) decimal.Decimal // Sum returns the sum of all items in the collection. Sum(key ...string) decimal.Decimal // Min returns the minimum value of a given key. Min(key ...string) decimal.Decimal // Max returns the maximum value of a given key. Max(key ...string) decimal.Decimal // Join joins the collection's values with a string. Join(delimiter string) string // Combine combines the values of the collection, as keys, with the values of another array or collection. Combine(value []interface{}) Collection // Count returns the total number of items in the collection. Count() int // Pluck retrieves all of the values for a given key. Pluck(key string) Collection // Mode returns the mode value of a given key. Mode(key ...string) []interface{} // Only returns the items in the collection with the specified keys. Only(keys []string) Collection // Prepend adds an item to the beginning of the collection. Prepend(values ...interface{}) Collection // Pull removes and returns an item from the collection by its key. Pull(key interface{}) Collection // Put sets the given key and value in the collection:. Put(key string, value interface{}) Collection // SortBy sorts the collection by the given key. SortBy(key string) Collection // Take returns a new collection with the specified number of items. Take(num int) Collection // Chunk breaks the collection into multiple, smaller collections of a given size. Chunk(num int) MultiDimensionalArrayCollection // Collapse collapses a collection of arrays into a single, flat collection. Collapse() Collection // Concat appends the given array or collection values onto the end of the collection. Concat(value interface{}) Collection // Contains determines whether the collection contains a given item. Contains(value ...interface{}) bool // CountBy counts the occurrences of values in the collection. By default, the method counts the occurrences of every element. CountBy(callback ...interface{}) map[interface{}]int // CrossJoin cross joins the collection's values among the given arrays or collections, returning a Cartesian product with all possible permutations. CrossJoin(array ...[]interface{}) MultiDimensionalArrayCollection // Dd dumps the collection's items and ends execution of the script. Dd() // Diff compares the collection against another collection or a plain PHP array based on its values. // This method will return the values in the original collection that are not present in the given collection. Diff(interface{}) Collection // DiffAssoc compares the collection against another collection or a plain PHP array based on its keys and values. // This method will return the key / value pairs in the original collection that are not present in the given collection. DiffAssoc(map[string]interface{}) Collection // DiffKeys compares the collection against another collection or a plain PHP array based on its keys. // This method will return the key / value pairs in the original collection that are not present in the given collection. DiffKeys(map[string]interface{}) Collection // Dump dumps the collection's items. Dump() // Each iterates over the items in the collection and passes each item to a callback. Each(func(item, value interface{}) (interface{}, bool)) Collection // Every may be used to verify that all elements of a collection pass a given truth test. Every(CB) bool // Except returns all items in the collection except for those with the specified keys. Except([]string) Collection // Filter filters the collection using the given callback, keeping only those items that pass a given truth test. Filter(CB) Collection // First returns the first element in the collection that passes a given truth test. First(...CB) interface{} // FirstWhere returns the first element in the collection with the given key / value pair. FirstWhere(key string, values ...interface{}) map[string]interface{} // FlatMap iterates through the collection and passes each value to the given callback. FlatMap(func(value interface{}) interface{}) Collection // Flip swaps the collection's keys with their corresponding values. Flip() Collection // Forget removes an item from the collection by its key. Forget(string) Collection // ForPage returns a new collection containing the items that would be present on a given page number. ForPage(int, int) Collection // Get returns the item at a given key. If the key does not exist, null is returned. Get(string, ...interface{}) interface{} // GroupBy groups the collection's items by a given key. GroupBy(string) Collection // Has determines if a given key exists in the collection. Has(...string) bool // Implode joins the items in a collection. Its arguments depend on the type of items in the collection. Implode(string, string) string // Intersect removes any values from the original collection that are not present in the given array or collection. Intersect([]string) Collection // IntersectByKeys removes any keys from the original collection that are not present in the given array or collection. IntersectByKeys(map[string]interface{}) Collection // IsEmpty returns true if the collection is empty; otherwise, false is returned. IsEmpty() bool // IsNotEmpty returns true if the collection is not empty; otherwise, false is returned. IsNotEmpty() bool // KeyBy keys the collection by the given key. If multiple items have the same key, only the last one will // appear in the new collection. KeyBy(interface{}) Collection // Keys returns all of the collection's keys. Keys() Collection // Last returns the last element in the collection that passes a given truth test. Last(...CB) interface{} // MapToGroups groups the collection's items by the given callback. MapToGroups(MapCB) Collection // MapWithKeys iterates through the collection and passes each value to the given callback. MapWithKeys(MapCB) Collection // Median returns the median value of a given key. Median(...string) decimal.Decimal // Merge merges the given array or collection with the original collection. If a string key in the given items // matches a string key in the original collection, the given items's value will overwrite the value in the // original collection. Merge(interface{}) Collection // Pad will fill the array with the given value until the array reaches the specified size. Pad(int, interface{}) Collection // Partition separate elements that pass a given truth test from those that do not. Partition(PartCB) (Collection, Collection) // Pop removes and returns the last item from the collection. Pop() interface{} // Push appends an item to the end of the collection. Push(interface{}) Collection // Random returns a random item from the collection. Random(...int) Collection // Reduce reduces the collection to a single value, passing the result of each iteration into the subsequent iteration. Reduce(ReduceCB) interface{} // Reject filters the collection using the given callback. Reject(CB) Collection // Reverse reverses the order of the collection's items, preserving the original keys. Reverse() Collection // Search searches the collection for the given value and returns its key if found. If the item is not found, // -1 is returned. Search(interface{}) int // Shift removes and returns the first item from the collection. Shift() Collection // Shuffle randomly shuffles the items in the collection. Shuffle() Collection // Slice returns a slice of the collection starting at the given index. Slice(...int) Collection // Sort sorts the collection. Sort() Collection // SortByDesc has the same signature as the sortBy method, but will sort the collection in the opposite order. SortByDesc() Collection // Splice removes and returns a slice of items starting at the specified index. Splice(index ...int) Collection // Split breaks a collection into the given number of groups. Split(int) Collection // Unique returns all of the unique items in the collection. Unique() Collection // WhereIn filters the collection by a given key / value contained within the given array. WhereIn(string, []interface{}) Collection // WhereNotIn filters the collection by a given key / value not contained within the given array. WhereNotIn(string, []interface{}) Collection // ToJson converts the collection into a json string. ToJson() string // ToNumberArray converts the collection into a plain golang slice which contains decimal.Decimal. ToNumberArray() []decimal.Decimal // ToIntArray converts the collection into a plain golang slice which contains int. ToIntArray() []int // ToInt64Array converts the collection into a plain golang slice which contains int. ToInt64Array() []int64 // ToStringArray converts the collection into a plain golang slice which contains string. ToStringArray() []string // ToMultiDimensionalArray converts the collection into a multi dimensional array. ToMultiDimensionalArray() [][]interface{} // ToMap converts the collection into a plain golang map. ToMap() map[string]interface{} // ToMapArray converts the collection into a plain golang slice which contains map. ToMapArray() []map[string]interface{} // Where filters the collection by a given key / value pair. Where(key string, values ...interface{}) Collection } */
pkg/lakego-admin/lakego/collection/collection.go
0.848565
0.644756
collection.go
starcoder
package gini import ( "fmt" "github.com/shuLhan/numerus" "github.com/shuLhan/tekstus" "os" "strconv" ) var ( // DEBUG debug level, set from environment. DEBUG = 0 ) /* Gini contain slice of sorted index, slice of partition values, slice of Gini index, Gini value for all samples. */ type Gini struct { // Skip if its true, the gain value would not be searched on this // instance. Skip bool // IsContinue define whether the Gini index came from continuous // attribute or not. IsContinu bool // Value of Gini index for all value in attribute. Value float64 // MaxPartGain contain the index of partition which have the maximum // gain. MaxPartGain int // MaxGainValue contain maximum gain of index. MaxGainValue float64 // MinIndexPart contain the index of partition which have the minimum // Gini index. MinIndexPart int // MinIndexGini contain minimum Gini index value. MinIndexValue float64 // SortedIndex of attribute, sorted by values of attribute. This will // be used to reference the unsorted target attribute. SortedIndex []int // ContinuPart contain list of partition value for continuous attribute. ContinuPart []float64 // DiscretePart contain the possible combination of discrete values. DiscretePart tekstus.TableStrings // Index contain list of Gini Index for each partition. Index []float64 // Gain contain information gain for each partition. Gain []float64 } func init() { v := os.Getenv("GINI_DEBUG") if v == "" { DEBUG = 0 } else { DEBUG, _ = strconv.Atoi(v) } } /* ComputeDiscrete Given an attribute A with discreate value 'discval', and the target attribute T which contain N classes in C, compute the information gain of A. The result is saved as gain value in MaxGainValue for each partition. */ func (gini *Gini) ComputeDiscrete(A *[]string, discval *[]string, T *[]string, C *[]string) { gini.IsContinu = false // create partition for possible combination of discrete values. gini.createDiscretePartition((*discval)) if DEBUG >= 2 { fmt.Println("[gini] part :", gini.DiscretePart) } gini.Index = make([]float64, len(gini.DiscretePart)) gini.Gain = make([]float64, len(gini.DiscretePart)) gini.MinIndexValue = 1.0 // compute gini index for all samples gini.Value = gini.compute(T, C) gini.computeDiscreteGain(A, T, C) } /* computeDiscreteGain will compute Gini index and Gain for each partition. */ func (gini *Gini) computeDiscreteGain(A *[]string, T *[]string, C *[]string) { // number of samples nsample := float64(len(*A)) if DEBUG >= 3 { fmt.Println("[gini] sample:", T) fmt.Printf("[gini] Gini(a=%s) = %f\n", (*A), gini.Value) } // compute gini index for each discrete values for i, subPart := range gini.DiscretePart { // check if sub partition has at least an element if len(subPart) <= 0 { continue } sumGI := 0.0 for _, part := range subPart { ndisc := 0.0 var subT []string for _, el := range part { for t, a := range *A { if a != el { continue } // count how many sample with this discrete value ndisc++ // split the target by discrete value subT = append(subT, (*T)[t]) } } // compute gini index for subtarget giniIndex := gini.compute(&subT, C) // compute probabilites of discrete value through all samples p := ndisc / nsample probIndex := p * giniIndex // sum all probabilities times gini index. sumGI += probIndex if DEBUG >= 3 { fmt.Printf("[gini] subsample: %v\n", subT) fmt.Printf("[gini] Gini(a=%s) = %f/%f * %f = %f\n", part, ndisc, nsample, giniIndex, probIndex) } } gini.Index[i] = sumGI gini.Gain[i] = gini.Value - sumGI if DEBUG >= 3 { fmt.Printf("[gini] sample: %v\n", subPart) fmt.Printf("[gini] Gain(a=%s) = %f - %f = %f\n", subPart, gini.Value, sumGI, gini.Gain[i]) } if gini.MinIndexValue > gini.Index[i] && gini.Index[i] != 0 { gini.MinIndexValue = gini.Index[i] gini.MinIndexPart = i } if gini.MaxGainValue < gini.Gain[i] { gini.MaxGainValue = gini.Gain[i] gini.MaxPartGain = i } } } /* createDiscretePartition will create possible combination for discrete value in DiscretePart. */ func (gini *Gini) createDiscretePartition(discval tekstus.Strings) { // no discrete values ? if len(discval) <= 0 { return } // use set partition function to group the discrete values into two // subset. gini.DiscretePart = discval.Partitioning(2) } /* ComputeContinu Given an attribute A and the target attribute T which contain N classes in C, compute the information gain of A. The result of Gini partitions value, Gini Index, and Gini Gain is saved in ContinuPart, Index, and Gain. */ func (gini *Gini) ComputeContinu(A *[]float64, T *[]string, C *[]string) { gini.IsContinu = true // make a copy of attribute and target. A2 := make([]float64, len(*A)) copy(A2, *A) T2 := make([]string, len(*T)) copy(T2, *T) gini.SortedIndex = numerus.Floats64IndirectSort(A2, true) if DEBUG >= 1 { fmt.Println("[gini] attr sorted :", A2) } // sort the target attribute using sorted index. tekstus.StringsSortByIndex(&T2, gini.SortedIndex) // create partition gini.createContinuPartition(&A2) // create holder for gini index and gini gain gini.Index = make([]float64, len(gini.ContinuPart)) gini.Gain = make([]float64, len(gini.ContinuPart)) gini.MinIndexValue = 1.0 // compute gini index for all samples gini.Value = gini.compute(&T2, C) gini.computeContinuGain(&A2, &T2, C) } /* createContinuPartition for dividing class and computing Gini index. This is assuming that the data `A` has been sorted in ascending order. */ func (gini *Gini) createContinuPartition(A *[]float64) { l := len(*A) gini.ContinuPart = make([]float64, 0) // loop from first index until last index - 1 for i := 0; i < l-1; i++ { sum := (*A)[i] + (*A)[i+1] med := sum / 2.0 // If median is zero, its mean both left and right value is // zero. We are not allowing this, because it will result the // minimum Gini Index or maximum Gain value. if med == 0 { continue } // Reject if median is contained in attribute's value. // We use equality because if both A[i] and A[i+1] value is // equal, the median is equal to both of them. exist := false for j := 0; j <= i; j++ { if (*A)[j] == med { exist = true break } } if !exist { gini.ContinuPart = append(gini.ContinuPart, med) } } } /* compute value for attribute T. Return Gini value in the form of, 1 - sum (probability of each classes in T) */ func (gini *Gini) compute(T *[]string, C *[]string) float64 { n := float64(len(*T)) if n == 0 { return 0 } classCount := tekstus.WordsCountTokens(*T, *C, true) var sump2 float64 for x, v := range classCount { p := float64(v) / n sump2 += (p * p) if DEBUG >= 3 { fmt.Printf("[gini] compute (%s): (%f/%f)^2 = %f\n", (*C)[x], v, n, p*p) } } return 1 - sump2 } /* computeContinuGain for each partition. The Gini gain formula we used here is, Gain(part,S) = Gini(S) - ((count(left)/S * Gini(left)) + (count(right)/S * Gini(right))) where, - left is sub-sample from S that is less than part value. - right is sub-sample from S that is greater than part value. */ func (gini *Gini) computeContinuGain(A *[]float64, T *[]string, C *[]string) { var gleft, gright float64 var tleft, tright []string nsample := len(*A) if DEBUG >= 2 { fmt.Println("[gini] sorted data:", A) fmt.Println("[gini] Gini.Value:", gini.Value) } for p, contVal := range gini.ContinuPart { // find the split of samples between partition based on // partition value partidx := nsample for x, attrVal := range *A { if attrVal > contVal { partidx = x break } } nleft := partidx nright := nsample - partidx pleft := float64(nleft) / float64(nsample) pright := float64(nright) / float64(nsample) if partidx > 0 { tleft = (*T)[0:partidx] tright = (*T)[partidx:] gleft = gini.compute(&tleft, C) gright = gini.compute(&tright, C) } else { tleft = nil tright = (*T)[0:] gleft = 0 gright = gini.compute(&tright, C) } // count class in partition gini.Index[p] = ((pleft * gleft) + (pright * gright)) gini.Gain[p] = gini.Value - gini.Index[p] if DEBUG >= 3 { fmt.Println("[gini] tleft:", tleft) fmt.Println("[gini] tright:", tright) fmt.Printf("[gini] GiniGain(%v) = %f - (%f * %f) + (%f * %f) = %f\n", contVal, gini.Value, pleft, gleft, pright, gright, gini.Gain[p]) } if gini.MinIndexValue > gini.Index[p] && gini.Index[p] != 0 { gini.MinIndexValue = gini.Index[p] gini.MinIndexPart = p } if gini.MaxGainValue < gini.Gain[p] { gini.MaxGainValue = gini.Gain[p] gini.MaxPartGain = p } } } /* GetMaxPartGainValue return the partition that have the maximum Gini gain. */ func (gini *Gini) GetMaxPartGainValue() interface{} { if gini.IsContinu { return gini.ContinuPart[gini.MaxPartGain] } return gini.DiscretePart[gini.MaxPartGain] } /* GetMaxGainValue return the value of partition which contain the maximum Gini gain. */ func (gini *Gini) GetMaxGainValue() float64 { return gini.MaxGainValue } /* GetMinIndexPartValue return the partition that have the minimum Gini index. */ func (gini *Gini) GetMinIndexPartValue() interface{} { if gini.IsContinu { return gini.ContinuPart[gini.MinIndexPart] } return gini.DiscretePart[gini.MinIndexPart] } /* GetMinIndexValue return the minimum Gini index value. */ func (gini *Gini) GetMinIndexValue() float64 { return gini.MinIndexValue } /* FindMaxGain find the attribute and value that have the maximum gain. The returned value is index of attribute. */ func FindMaxGain(gains *[]Gini) (MaxGainIdx int) { var gainValue = 0.0 var maxGainValue = 0.0 for i := range *gains { if (*gains)[i].Skip { continue } gainValue = (*gains)[i].GetMaxGainValue() if gainValue > maxGainValue { maxGainValue = gainValue MaxGainIdx = i } } return } /* FindMinGiniIndex return the index of attribute that have the minimum Gini index. */ func FindMinGiniIndex(ginis *[]Gini) (MinIndexIdx int) { var indexV = 0.0 var minIndexV = 1.0 for i := range *ginis { indexV = (*ginis)[i].GetMinIndexValue() if indexV > minIndexV { minIndexV = indexV MinIndexIdx = i } } return } /* String yes, it will print it JSON like format. */ func (gini Gini) String() (s string) { s = fmt.Sprint("{\n", " Skip :", gini.Skip, "\n", " IsContinu :", gini.IsContinu, "\n", " Index :", gini.Index, "\n", " Value :", gini.Value, "\n", " Gain :", gini.Gain, "\n", " MaxPartGain :", gini.MaxPartGain, "\n", " MaxGainValue :", gini.MaxGainValue, "\n", " MinIndexPart :", gini.MinIndexPart, "\n", " MinIndexValue :", gini.MinIndexValue, "\n", " SortedIndex :", gini.SortedIndex, "\n", " ContinuPart :", gini.ContinuPart, "\n", " DiscretePart :", gini.DiscretePart, "\n", "}") return }
gain/gini/gini.go
0.511961
0.463444
gini.go
starcoder
package iso3166 import ( "fmt" "strconv" ) // Country is a representation of a country. type Country uint16 // Alpha2 returns the ISO 3166-1 two-letter alphabetic code. func (c Country) Alpha2() string { return countries[c].alpha2 } // Alpha3 returns the ISO 3166-1 three-letter alphabetic code. func (c Country) Alpha3() string { return countries[c].alpha3 } // Numeric returns the ISO 3166-1 three-digit numeric code. func (c Country) Numeric() int { return countries[c].numeric } // StringNumeric returns the ISO 3166-1 three-digit numeric code. func (c Country) StringNumeric() string { return fmt.Sprintf("%03d", countries[c].numeric) } // Name returns the English name. func (c Country) Name() string { return countries[c].name } // FromAlpha2 returns Country for the two-letter alpha2 code. // Or an error if it does not exist. func FromAlpha2(alpha2 string) (Country, error) { for c, country := range countries { if country.alpha2 == alpha2 { return Country(c), nil } } return 0, Error("no country exists with alpha2-code " + alpha2) } // FromAlpha3 returns Country for the three-letter alpha3 code. // Or an error if it does not exist. func FromAlpha3(alpha3 string) (Country, error) { for c, country := range countries { if country.alpha3 == alpha3 { return Country(c), nil } } return 0, Error("no country exists with alpha3-code " + alpha3) } // FromNumeric returns Country for the three-digit numeric code. // Or an error if it does not exist. func FromNumeric(numeric string) (Country, error) { n, err := strconv.Atoi(numeric) if err != nil { return 0, Error(err.Error()) } for c, country := range countries { if country.numeric == n { return Country(c), nil } } return 0, Error("no country exists with numeric-code " + numeric) } // Must panics if err is non-nil and otherwise returns c. // Could be used to return a single value from FromAlpha2/FromAlpha3/FromNumeric. func Must(c Country, err error) Country { if err != nil { panic(err) } return c } // Error is the type of error returned by this package type Error string func (e Error) Error() string { return "iso3166: " + string(e) }
country.go
0.846514
0.565179
country.go
starcoder
package pure import ( "fmt" "time" "github.com/itchyny/timefmt-go" "github.com/rickb777/date/period" "github.com/benthosdev/benthos/v4/internal/bloblang/query" "github.com/benthosdev/benthos/v4/public/bloblang" ) func asDeprecated(s *bloblang.PluginSpec) *bloblang.PluginSpec { tmpSpec := *s newSpec := &tmpSpec newSpec = newSpec.Deprecated() return newSpec } func init() { // Note: The examples are run and tested from within // ./internal/bloblang/query/parsed_test.go tsRoundSpec := bloblang.NewPluginSpec(). Beta(). Static(). Category(query.MethodCategoryTime). Description(`Returns the result of rounding a timestamp to the nearest multiple of the argument duration (nanoseconds). The rounding behavior for halfway values is to round up. Timestamp values can either be a numerical unix time in seconds (with up to nanosecond precision via decimals), or a string in RFC 3339 format. The `+"[`ts_parse`](#ts_parse)"+` method can be used in order to parse different timestamp formats.`). Param(bloblang.NewInt64Param("duration").Description("A duration measured in nanoseconds to round by.")). Version("4.2.0"). Example("Use the method `parse_duration` to convert a duration string into an integer argument.", `root.created_at_hour = this.created_at.ts_round("1h".parse_duration())`, [2]string{ `{"created_at":"2020-08-14T05:54:23Z"}`, `{"created_at_hour":"2020-08-14T06:00:00Z"}`, }) tsRoundCtor := func(args *bloblang.ParsedParams) (bloblang.Method, error) { iDur, err := args.GetInt64("duration") if err != nil { return nil, err } dur := time.Duration(iDur) return bloblang.TimestampMethod(func(t time.Time) (interface{}, error) { return t.Round(dur), nil }), nil } if err := bloblang.RegisterMethodV2("ts_round", tsRoundSpec, tsRoundCtor); err != nil { panic(err) } tsTZSpec := bloblang.NewPluginSpec(). Beta(). Static(). Category(query.MethodCategoryTime). Description(`Returns the result of converting a timestamp to a specified timezone. Timestamp values can either be a numerical unix time in seconds (with up to nanosecond precision via decimals), or a string in RFC 3339 format. The `+"[`ts_parse`](#ts_parse)"+` method can be used in order to parse different timestamp formats.`). Param(bloblang.NewStringParam("tz").Description(`The timezone to change to. If set to "UTC" then the timezone will be UTC. If set to "Local" then the local timezone will be used. Otherwise, the argument is taken to be a location name corresponding to a file in the IANA Time Zone database, such as "America/New_York".`)). Version("4.3.0"). Example("", `root.created_at_utc = this.created_at.ts_tz("UTC")`, [2]string{ `{"created_at":"2021-02-03T17:05:06+01:00"}`, `{"created_at_utc":"2021-02-03T16:05:06Z"}`, }) tsTZCtor := func(args *bloblang.ParsedParams) (bloblang.Method, error) { timezoneStr, err := args.GetString("tz") if err != nil { return nil, err } timezone, err := time.LoadLocation(timezoneStr) if err != nil { return nil, fmt.Errorf("failed to parse timezone location name: %w", err) } return bloblang.TimestampMethod(func(target time.Time) (interface{}, error) { return target.In(timezone), nil }), nil } if err := bloblang.RegisterMethodV2("ts_tz", tsTZSpec, tsTZCtor); err != nil { panic(err) } //-------------------------------------------------------------------------- parseDurSpec := bloblang.NewPluginSpec(). Static(). Category(query.MethodCategoryTime). Description(`Attempts to parse a string as a duration and returns an integer of nanoseconds. A duration string is a possibly signed sequence of decimal numbers, each with an optional fraction and a unit suffix, such as "300ms", "-1.5h" or "2h45m". Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h".`). Example("", `root.delay_for_ns = this.delay_for.parse_duration()`, [2]string{ `{"delay_for":"50us"}`, `{"delay_for_ns":50000}`, }, ). Example("", `root.delay_for_s = this.delay_for.parse_duration() / 1000000000`, [2]string{ `{"delay_for":"2h"}`, `{"delay_for_s":7200}`, }, ) parseDurCtor := func(args *bloblang.ParsedParams) (bloblang.Method, error) { return bloblang.StringMethod(func(s string) (interface{}, error) { d, err := time.ParseDuration(s) if err != nil { return nil, err } return d.Nanoseconds(), nil }), nil } if err := bloblang.RegisterMethodV2("parse_duration", parseDurSpec, parseDurCtor); err != nil { panic(err) } parseDurISOSpec := bloblang.NewPluginSpec(). Category(query.MethodCategoryTime). Beta(). Static(). Description(`Attempts to parse a string using ISO-8601 rules as a duration and returns an integer of nanoseconds. A duration string is represented by the format "P[n]Y[n]M[n]DT[n]H[n]M[n]S" or "P[n]W". In these representations, the "[n]" is replaced by the value for each of the date and time elements that follow the "[n]". For example, "P3Y6M4DT12H30M5S" represents a duration of "three years, six months, four days, twelve hours, thirty minutes, and five seconds". The last field of the format allows fractions with one decimal place, so "P3.5S" will return 3500000000ns. Any additional decimals will be truncated.`). Example("Arbitrary ISO-8601 duration string to nanoseconds:", `root.delay_for_ns = this.delay_for.parse_duration_iso8601()`, [2]string{ `{"delay_for":"P3Y6M4DT12H30M5S"}`, `{"delay_for_ns":110839937000000000}`, }, ). Example("Two hours ISO-8601 duration string to seconds:", `root.delay_for_s = this.delay_for.parse_duration_iso8601() / 1000000000`, [2]string{ `{"delay_for":"PT2H"}`, `{"delay_for_s":7200}`, }, ). Example("Two and a half seconds ISO-8601 duration string to seconds:", `root.delay_for_s = this.delay_for.parse_duration_iso8601() / 1000000000`, [2]string{ `{"delay_for":"PT2.5S"}`, `{"delay_for_s":2.5}`, }, ) parseDurISOCtor := func(args *bloblang.ParsedParams) (bloblang.Method, error) { return bloblang.StringMethod(func(s string) (interface{}, error) { // No need to normalise the output since we need it expressed as nanoseconds. d, err := period.Parse(s, false) if err != nil { return nil, err } // The conversion is likely imprecise when the period specifies years, months and days. // See method documentation for details on precision. return d.DurationApprox().Nanoseconds(), nil }), nil } if err := bloblang.RegisterMethodV2("parse_duration_iso8601", parseDurISOSpec, parseDurISOCtor); err != nil { panic(err) } //-------------------------------------------------------------------------- parseTSSpec := bloblang.NewPluginSpec(). Category(query.MethodCategoryTime). Beta(). Static(). Description(`Attempts to parse a string as a timestamp following a specified format and outputs a timestamp, which can then be fed into methods such as ` + "[`ts_format`](#ts_format)" + `. The input format is defined by showing how the reference time, defined to be Mon Jan 2 15:04:05 -0700 MST 2006, would be displayed if it were the value. For an alternative way to specify formats check out the ` + "[`ts_strptime`](#ts_strptime)" + ` method.`). Param(bloblang.NewStringParam("format").Description("The format of the target string.")) parseTSSpecDep := asDeprecated(parseTSSpec) parseTSSpec = parseTSSpec. Example("", `root.doc.timestamp = this.doc.timestamp.ts_parse("2006-Jan-02")`, [2]string{ `{"doc":{"timestamp":"2020-Aug-14"}}`, `{"doc":{"timestamp":"2020-08-14T00:00:00Z"}}`, }, ) parseTSCtor := func(deprecated bool) bloblang.MethodConstructorV2 { return func(args *bloblang.ParsedParams) (bloblang.Method, error) { layout, err := args.GetString("format") if err != nil { return nil, err } return bloblang.StringMethod(func(s string) (interface{}, error) { ut, err := time.Parse(layout, s) if err != nil { return nil, err } if deprecated { return ut.Format(time.RFC3339Nano), nil } return ut, nil }), nil } } if err := bloblang.RegisterMethodV2("ts_parse", parseTSSpec, parseTSCtor(false)); err != nil { panic(err) } if err := bloblang.RegisterMethodV2("parse_timestamp", parseTSSpecDep, parseTSCtor(true)); err != nil { panic(err) } parseTSStrptimeSpec := bloblang.NewPluginSpec(). Category(query.MethodCategoryTime). Beta(). Static(). Description("Attempts to parse a string as a timestamp following a specified strptime-compatible format and outputs a timestamp, which can then be fed into [`ts_format`](#ts_format)."). Param(bloblang.NewStringParam("format").Description("The format of the target string.")) parseTSStrptimeSpecDep := asDeprecated(parseTSStrptimeSpec) parseTSStrptimeSpec = parseTSStrptimeSpec. Example( "The format consists of zero or more conversion specifiers and ordinary characters (except `%`). All ordinary characters are copied to the output string without modification. Each conversion specification begins with a `%` character followed by the character that determines the behaviour of the specifier. Please refer to [man 3 strptime](https://linux.die.net/man/3/strptime) for the list of format specifiers.", `root.doc.timestamp = this.doc.timestamp.ts_strptime("%Y-%b-%d")`, [2]string{ `{"doc":{"timestamp":"2020-Aug-14"}}`, `{"doc":{"timestamp":"2020-08-14T00:00:00Z"}}`, }, ). Example( "As an extension provided by the underlying formatting library, [itchyny/timefmt-go](https://github.com/itchyny/timefmt-go), the `%f` directive is supported for zero-padded microseconds, which originates from Python. Note that E and O modifier characters are not supported.", `root.doc.timestamp = this.doc.timestamp.ts_strptime("%Y-%b-%d %H:%M:%S.%f")`, [2]string{ `{"doc":{"timestamp":"2020-Aug-14 11:50:26.371000"}}`, `{"doc":{"timestamp":"2020-08-14T11:50:26.371Z"}}`, }, ) parseTSStrptimeCtor := func(deprecated bool) bloblang.MethodConstructorV2 { return func(args *bloblang.ParsedParams) (bloblang.Method, error) { layout, err := args.GetString("format") if err != nil { return nil, err } return bloblang.StringMethod(func(s string) (interface{}, error) { ut, err := timefmt.Parse(s, layout) if err != nil { return nil, err } if deprecated { return ut.Format(time.RFC3339Nano), nil } return ut, nil }), nil } } if err := bloblang.RegisterMethodV2("ts_strptime", parseTSStrptimeSpec, parseTSStrptimeCtor(false)); err != nil { panic(err) } if err := bloblang.RegisterMethodV2("parse_timestamp_strptime", parseTSStrptimeSpecDep, parseTSStrptimeCtor(true)); err != nil { panic(err) } //-------------------------------------------------------------------------- formatTSSpec := bloblang.NewPluginSpec(). Category(query.MethodCategoryTime). Beta(). Static(). Description(`Attempts to format a timestamp value as a string according to a specified format, or RFC 3339 by default. Timestamp values can either be a numerical unix time in seconds (with up to nanosecond precision via decimals), or a string in RFC 3339 format. The output format is defined by showing how the reference time, defined to be Mon Jan 2 15:04:05 -0700 MST 2006, would be displayed if it were the value. For an alternative way to specify formats check out the ` + "[`ts_strftime`](#ts_strftime)" + ` method.`). Param(bloblang.NewStringParam("format").Description("The output format to use.").Default(time.RFC3339Nano)). Param(bloblang.NewStringParam("tz").Description("An optional timezone to use, otherwise the timezone of the input string is used, or in the case of unix timestamps the local timezone is used.").Optional()) formatTSSpecDep := asDeprecated(formatTSSpec) formatTSSpec = formatTSSpec. Example("", `root.something_at = (this.created_at + 300).ts_format()`, // `{"created_at":1597405526}`, // `{"something_at":"2020-08-14T11:50:26.371Z"}`, ). Example( "An optional string argument can be used in order to specify the output format of the timestamp. The format is defined by showing how the reference time, defined to be Mon Jan 2 15:04:05 -0700 MST 2006, would be displayed if it were the value.", `root.something_at = (this.created_at + 300).ts_format("2006-Jan-02 15:04:05")`, // `{"created_at":1597405526}`, // `{"something_at":"2020-Aug-14 11:50:26"}`, ). Example( "A second optional string argument can also be used in order to specify a timezone, otherwise the timezone of the input string is used, or in the case of unix timestamps the local timezone is used.", `root.something_at = this.created_at.ts_format(format: "2006-Jan-02 15:04:05", tz: "UTC")`, [2]string{ `{"created_at":1597405526}`, `{"something_at":"2020-Aug-14 11:45:26"}`, }, [2]string{ `{"created_at":"2020-08-14T11:50:26.371Z"}`, `{"something_at":"2020-Aug-14 11:50:26"}`, }, ). Example( "And `ts_format` supports up to nanosecond precision with floating point timestamp values.", `root.something_at = this.created_at.ts_format("2006-Jan-02 15:04:05.999999", "UTC")`, [2]string{ `{"created_at":1597405526.123456}`, `{"something_at":"2020-Aug-14 11:45:26.123456"}`, }, [2]string{ `{"created_at":"2020-08-14T11:50:26.371Z"}`, `{"something_at":"2020-Aug-14 11:50:26.371"}`, }, ) formatTSCtor := func(args *bloblang.ParsedParams) (bloblang.Method, error) { layout, err := args.GetString("format") if err != nil { return nil, err } var timezone *time.Location tzOpt, err := args.GetOptionalString("tz") if err != nil { return nil, err } if tzOpt != nil { if timezone, err = time.LoadLocation(*tzOpt); err != nil { return nil, fmt.Errorf("failed to parse timezone location name: %w", err) } } return bloblang.TimestampMethod(func(target time.Time) (interface{}, error) { if timezone != nil { target = target.In(timezone) } return target.Format(layout), nil }), nil } if err := bloblang.RegisterMethodV2("ts_format", formatTSSpec, formatTSCtor); err != nil { panic(err) } if err := bloblang.RegisterMethodV2("format_timestamp", formatTSSpecDep, formatTSCtor); err != nil { panic(err) } formatTSStrftimeSpec := bloblang.NewPluginSpec(). Category(query.MethodCategoryTime). Beta(). Static(). Description("Attempts to format a timestamp value as a string according to a specified strftime-compatible format. Timestamp values can either be a numerical unix time in seconds (with up to nanosecond precision via decimals), or a string in RFC 3339 format."). Param(bloblang.NewStringParam("format").Description("The output format to use.")). Param(bloblang.NewStringParam("tz").Description("An optional timezone to use, otherwise the timezone of the input string is used.").Optional()) formatTSStrftimeSpecDep := asDeprecated(formatTSStrftimeSpec) formatTSStrftimeSpec = formatTSStrftimeSpec. Example( "The format consists of zero or more conversion specifiers and ordinary characters (except `%`). All ordinary characters are copied to the output string without modification. Each conversion specification begins with `%` character followed by the character that determines the behaviour of the specifier. Please refer to [man 3 strftime](https://linux.die.net/man/3/strftime) for the list of format specifiers.", `root.something_at = (this.created_at + 300).ts_strftime("%Y-%b-%d %H:%M:%S")`, // `{"created_at":1597405526}`, // `{"something_at":"2020-Aug-14 11:50:26"}`, ). Example( "A second optional string argument can also be used in order to specify a timezone, otherwise the timezone of the input string is used, or in the case of unix timestamps the local timezone is used.", `root.something_at = this.created_at.ts_strftime("%Y-%b-%d %H:%M:%S", "UTC")`, [2]string{ `{"created_at":1597405526}`, `{"something_at":"2020-Aug-14 11:45:26"}`, }, [2]string{ `{"created_at":"2020-08-14T11:50:26.371Z"}`, `{"something_at":"2020-Aug-14 11:50:26"}`, }, ). Example( "As an extension provided by the underlying formatting library, [itchyny/timefmt-go](https://github.com/itchyny/timefmt-go), the `%f` directive is supported for zero-padded microseconds, which originates from Python. Note that E and O modifier characters are not supported.", `root.something_at = this.created_at.ts_strftime("%Y-%b-%d %H:%M:%S.%f", "UTC")`, [2]string{ `{"created_at":1597405526}`, `{"something_at":"2020-Aug-14 11:45:26.000000"}`, }, [2]string{ `{"created_at":"2020-08-14T11:50:26.371Z"}`, `{"something_at":"2020-Aug-14 11:50:26.371000"}`, }, ) formatTSStrftimeCtor := func(args *bloblang.ParsedParams) (bloblang.Method, error) { layout, err := args.GetString("format") if err != nil { return nil, err } var timezone *time.Location tzOpt, err := args.GetOptionalString("tz") if err != nil { return nil, err } if tzOpt != nil { if timezone, err = time.LoadLocation(*tzOpt); err != nil { return nil, fmt.Errorf("failed to parse timezone location name: %w", err) } } return bloblang.TimestampMethod(func(target time.Time) (interface{}, error) { if timezone != nil { target = target.In(timezone) } return timefmt.Format(target, layout), nil }), nil } if err := bloblang.RegisterMethodV2("ts_strftime", formatTSStrftimeSpec, formatTSStrftimeCtor); err != nil { panic(err) } if err := bloblang.RegisterMethodV2("format_timestamp_strftime", formatTSStrftimeSpecDep, formatTSStrftimeCtor); err != nil { panic(err) } formatTSUnixSpec := bloblang.NewPluginSpec(). Category(query.MethodCategoryTime). Beta(). Static(). Description("Attempts to format a timestamp value as a unix timestamp. Timestamp values can either be a numerical unix time in seconds (with up to nanosecond precision via decimals), or a string in RFC 3339 format. The [`ts_parse`](#ts_parse) method can be used in order to parse different timestamp formats.") formatTSUnixSpecDep := asDeprecated(formatTSUnixSpec) formatTSUnixSpec = formatTSUnixSpec. Example("", `root.created_at_unix = this.created_at.ts_unix()`, [2]string{ `{"created_at":"2009-11-10T23:00:00Z"}`, `{"created_at_unix":1257894000}`, }, ) formatTSUnixCtor := func(args *bloblang.ParsedParams) (bloblang.Method, error) { return bloblang.TimestampMethod(func(target time.Time) (interface{}, error) { return target.Unix(), nil }), nil } if err := bloblang.RegisterMethodV2("ts_unix", formatTSUnixSpec, formatTSUnixCtor); err != nil { panic(err) } if err := bloblang.RegisterMethodV2("format_timestamp_unix", formatTSUnixSpecDep, formatTSUnixCtor); err != nil { panic(err) } formatTSUnixNanoSpec := bloblang.NewPluginSpec(). Category(query.MethodCategoryTime). Beta(). Static(). Description("Attempts to format a timestamp value as a unix timestamp with nanosecond precision. Timestamp values can either be a numerical unix time in seconds (with up to nanosecond precision via decimals), or a string in RFC 3339 format. The [`ts_parse`](#ts_parse) method can be used in order to parse different timestamp formats.") formatTSUnixNanoSpecDep := asDeprecated(formatTSUnixNanoSpec) formatTSUnixNanoSpec = formatTSUnixNanoSpec. Example("", `root.created_at_unix = this.created_at.ts_unix_nano()`, [2]string{ `{"created_at":"2009-11-10T23:00:00Z"}`, `{"created_at_unix":1257894000000000000}`, }, ) formatTSUnixNanoCtor := func(args *bloblang.ParsedParams) (bloblang.Method, error) { return bloblang.TimestampMethod(func(target time.Time) (interface{}, error) { return target.UnixNano(), nil }), nil } if err := bloblang.RegisterMethodV2("ts_unix_nano", formatTSUnixNanoSpec, formatTSUnixNanoCtor); err != nil { panic(err) } if err := bloblang.RegisterMethodV2("format_timestamp_unix_nano", formatTSUnixNanoSpecDep, formatTSUnixNanoCtor); err != nil { panic(err) } }
internal/impl/pure/bloblang_time.go
0.758511
0.499451
bloblang_time.go
starcoder
package openapi import ( "encoding/json" ) // AccountRouting struct for AccountRouting type AccountRouting struct { // The routing number used for US ACH payments. Only appears if `bank_countries` contains `US`. Value may be masked, in which case only the last four digits are returned. AchRoutingNumber *string `json:"ach_routing_number,omitempty"` // The countries that this bank operates the account in BankCountries []string `json:"bank_countries"` // The name of the bank managing the account BankName string `json:"bank_name"` // The routing number used for EFT payments, identifying a Canadian bank, consisting of the institution number and the branch number. Only appears if `bank_countries` contains `CA`. Value may be masked, in which case only the last four digits are returned. EftRoutingNumber *string `json:"eft_routing_number,omitempty"` // The SWIFT code for the bank. Value may be masked, in which case only the last four characters are returned. SwiftCode *string `json:"swift_code,omitempty"` // The routing number used for domestic wire payments. Only appears if `bank_countries` contains `US`. Value may be masked, in which case only the last four digits are returned. WireRoutingNumber *string `json:"wire_routing_number,omitempty"` } // NewAccountRouting instantiates a new AccountRouting object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewAccountRouting(bankCountries []string, bankName string) *AccountRouting { this := AccountRouting{} this.BankCountries = bankCountries this.BankName = bankName return &this } // NewAccountRoutingWithDefaults instantiates a new AccountRouting object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewAccountRoutingWithDefaults() *AccountRouting { this := AccountRouting{} return &this } // GetAchRoutingNumber returns the AchRoutingNumber field value if set, zero value otherwise. func (o *AccountRouting) GetAchRoutingNumber() string { if o == nil || o.AchRoutingNumber == nil { var ret string return ret } return *o.AchRoutingNumber } // GetAchRoutingNumberOk returns a tuple with the AchRoutingNumber field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *AccountRouting) GetAchRoutingNumberOk() (*string, bool) { if o == nil || o.AchRoutingNumber == nil { return nil, false } return o.AchRoutingNumber, true } // HasAchRoutingNumber returns a boolean if a field has been set. func (o *AccountRouting) HasAchRoutingNumber() bool { if o != nil && o.AchRoutingNumber != nil { return true } return false } // SetAchRoutingNumber gets a reference to the given string and assigns it to the AchRoutingNumber field. func (o *AccountRouting) SetAchRoutingNumber(v string) { o.AchRoutingNumber = &v } // GetBankCountries returns the BankCountries field value func (o *AccountRouting) GetBankCountries() []string { if o == nil { var ret []string return ret } return o.BankCountries } // GetBankCountriesOk returns a tuple with the BankCountries field value // and a boolean to check if the value has been set. func (o *AccountRouting) GetBankCountriesOk() (*[]string, bool) { if o == nil { return nil, false } return &o.BankCountries, true } // SetBankCountries sets field value func (o *AccountRouting) SetBankCountries(v []string) { o.BankCountries = v } // GetBankName returns the BankName field value func (o *AccountRouting) GetBankName() string { if o == nil { var ret string return ret } return o.BankName } // GetBankNameOk returns a tuple with the BankName field value // and a boolean to check if the value has been set. func (o *AccountRouting) GetBankNameOk() (*string, bool) { if o == nil { return nil, false } return &o.BankName, true } // SetBankName sets field value func (o *AccountRouting) SetBankName(v string) { o.BankName = v } // GetEftRoutingNumber returns the EftRoutingNumber field value if set, zero value otherwise. func (o *AccountRouting) GetEftRoutingNumber() string { if o == nil || o.EftRoutingNumber == nil { var ret string return ret } return *o.EftRoutingNumber } // GetEftRoutingNumberOk returns a tuple with the EftRoutingNumber field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *AccountRouting) GetEftRoutingNumberOk() (*string, bool) { if o == nil || o.EftRoutingNumber == nil { return nil, false } return o.EftRoutingNumber, true } // HasEftRoutingNumber returns a boolean if a field has been set. func (o *AccountRouting) HasEftRoutingNumber() bool { if o != nil && o.EftRoutingNumber != nil { return true } return false } // SetEftRoutingNumber gets a reference to the given string and assigns it to the EftRoutingNumber field. func (o *AccountRouting) SetEftRoutingNumber(v string) { o.EftRoutingNumber = &v } // GetSwiftCode returns the SwiftCode field value if set, zero value otherwise. func (o *AccountRouting) GetSwiftCode() string { if o == nil || o.SwiftCode == nil { var ret string return ret } return *o.SwiftCode } // GetSwiftCodeOk returns a tuple with the SwiftCode field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *AccountRouting) GetSwiftCodeOk() (*string, bool) { if o == nil || o.SwiftCode == nil { return nil, false } return o.SwiftCode, true } // HasSwiftCode returns a boolean if a field has been set. func (o *AccountRouting) HasSwiftCode() bool { if o != nil && o.SwiftCode != nil { return true } return false } // SetSwiftCode gets a reference to the given string and assigns it to the SwiftCode field. func (o *AccountRouting) SetSwiftCode(v string) { o.SwiftCode = &v } // GetWireRoutingNumber returns the WireRoutingNumber field value if set, zero value otherwise. func (o *AccountRouting) GetWireRoutingNumber() string { if o == nil || o.WireRoutingNumber == nil { var ret string return ret } return *o.WireRoutingNumber } // GetWireRoutingNumberOk returns a tuple with the WireRoutingNumber field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *AccountRouting) GetWireRoutingNumberOk() (*string, bool) { if o == nil || o.WireRoutingNumber == nil { return nil, false } return o.WireRoutingNumber, true } // HasWireRoutingNumber returns a boolean if a field has been set. func (o *AccountRouting) HasWireRoutingNumber() bool { if o != nil && o.WireRoutingNumber != nil { return true } return false } // SetWireRoutingNumber gets a reference to the given string and assigns it to the WireRoutingNumber field. func (o *AccountRouting) SetWireRoutingNumber(v string) { o.WireRoutingNumber = &v } func (o AccountRouting) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} if o.AchRoutingNumber != nil { toSerialize["ach_routing_number"] = o.AchRoutingNumber } if true { toSerialize["bank_countries"] = o.BankCountries } if true { toSerialize["bank_name"] = o.BankName } if o.EftRoutingNumber != nil { toSerialize["eft_routing_number"] = o.EftRoutingNumber } if o.SwiftCode != nil { toSerialize["swift_code"] = o.SwiftCode } if o.WireRoutingNumber != nil { toSerialize["wire_routing_number"] = o.WireRoutingNumber } return json.Marshal(toSerialize) } type NullableAccountRouting struct { value *AccountRouting isSet bool } func (v NullableAccountRouting) Get() *AccountRouting { return v.value } func (v *NullableAccountRouting) Set(val *AccountRouting) { v.value = val v.isSet = true } func (v NullableAccountRouting) IsSet() bool { return v.isSet } func (v *NullableAccountRouting) Unset() { v.value = nil v.isSet = false } func NewNullableAccountRouting(val *AccountRouting) *NullableAccountRouting { return &NullableAccountRouting{value: val, isSet: true} } func (v NullableAccountRouting) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableAccountRouting) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
synctera/model_account_routing.go
0.837421
0.418222
model_account_routing.go
starcoder
package pedersen import ( "encoding/binary" "errors" "io" ristretto "github.com/bwesterb/go-ristretto" generator "github.com/dusk-network/dusk-blockchain/pkg/crypto/rangeproof/generators" ) // Pedersen represents a pedersen struct which holds // the necessary information to commit a vector or a scalar to a point type Pedersen struct { BaseVector *generator.Generator GenData []byte BlindPoint ristretto.Point BasePoint ristretto.Point } // New will setup the BaseVector // returning a Pedersen struct // genData is the byte slice, that will be used // to form the unique set of generators func New(genData []byte) *Pedersen { gen := generator.New(genData) var blindPoint ristretto.Point var basePoint ristretto.Point basePoint.Derive([]byte("blindPoint")) blindPoint.SetBase() return &Pedersen{ BaseVector: gen, GenData: genData, BlindPoint: blindPoint, BasePoint: basePoint, } } // Commitment represents a Pedersen Commitment // storing the value and the random blinding factor type Commitment struct { // Value is the point which has been commited to Value ristretto.Point // blinding factor is the blinding scalar. // Note that n vectors have 1 blinding factor BlindingFactor ristretto.Scalar } func (c *Commitment) Encode(w io.Writer) error { return binary.Write(w, binary.BigEndian, c.Value.Bytes()) } func EncodeCommitments(w io.Writer, comms []Commitment) error { lenV := uint32(len(comms)) err := binary.Write(w, binary.BigEndian, lenV) if err != nil { return err } for i := range comms { err := comms[i].Encode(w) if err != nil { return err } } return nil } func (c *Commitment) Decode(r io.Reader) error { if c == nil { return errors.New("struct is nil") } var cBytes [32]byte err := binary.Read(r, binary.BigEndian, &cBytes) if err != nil { return err } ok := c.Value.SetBytes(&cBytes) if !ok { return errors.New("could not set bytes for commitment, not an encodable point") } return nil } func DecodeCommitments(r io.Reader) ([]Commitment, error) { var lenV uint32 err := binary.Read(r, binary.BigEndian, &lenV) if err != nil { return nil, err } comms := make([]Commitment, lenV) for i := uint32(0); i < lenV; i++ { err := comms[i].Decode(r) if err != nil { return nil, err } } return comms, nil } func (c *Commitment) EqualValue(other Commitment) bool { return c.Value.Equals(&other.Value) } func (c *Commitment) Equals(other Commitment) bool { return c.EqualValue(other) && c.BlindingFactor.Equals(&other.BlindingFactor) } func (p *Pedersen) commitToScalars(blind *ristretto.Scalar, scalars ...ristretto.Scalar) ristretto.Point { n := len(scalars) var sum ristretto.Point sum.SetZero() if blind != nil { var blindPoint ristretto.Point blindPoint.ScalarMult(&p.BlindPoint, blind) sum.Add(&sum, &blindPoint) } if len(p.BaseVector.Bases) < n { diff := n - len(p.BaseVector.Bases) p.BaseVector.Compute(uint32(diff)) // num of scalars to commit should be equal or less than the number of precomputed generators } for i := 0; i < n; i++ { bi := scalars[i] Hi := p.BaseVector.Bases[i] // H_i * b_i product := ristretto.Point{} product.ScalarMult(&Hi, &bi) sum.Add(&sum, &product) } return sum } // CommitToScalar generates a Commitment to a scalar v, s.t. V = v * Base + blind * BlindingPoint func (p *Pedersen) CommitToScalar(v ristretto.Scalar) Commitment { // generate random blinder blind := ristretto.Scalar{} blind.Rand() // v * Base var vBase ristretto.Point vBase.ScalarMult(&p.BasePoint, &v) // blind * BlindPoint var blindPoint ristretto.Point blindPoint.ScalarMult(&p.BlindPoint, &blind) var sum ristretto.Point sum.SetZero() sum.Add(&vBase, &blindPoint) return Commitment{ Value: sum, BlindingFactor: blind, } } // CommitToVectors will take n set of vectors and form a commitment to them s.t. // V = aH + <v1, G1> + <v2, G2> + <v3, G3> // where a is a scalar, v1 is a vector of scalars, and G1 is a vector of points func (p *Pedersen) CommitToVectors(vectors ...[]ristretto.Scalar) Commitment { // Generate random blinding factor blind := ristretto.Scalar{} blind.Rand() // For each vector, we can use the commitToScalars, because a vector is just a slice of scalars var sum ristretto.Point sum.SetZero() for i, vector := range vectors { if i == 0 { // Commit to vector + blinding factor commit := p.commitToScalars(&blind, vector...) sum.Add(&sum, &commit) } else { genData := append(p.GenData, uint8(i)) ped2 := New(genData) commit := ped2.commitToScalars(nil, vector...) sum.Add(&sum, &commit) } } return Commitment{ Value: sum, BlindingFactor: blind, } }
pkg/crypto/rangeproof/pedersen/pedersen.go
0.727589
0.446495
pedersen.go
starcoder
package render import ( goimage "image" "image/color" "math" "photofield/internal/image" "github.com/tdewolff/canvas" "golang.org/x/image/draw" "golang.org/x/image/math/f64" ) func getRGBA(col color.Color) color.RGBA { r, g, b, a := col.RGBA() return color.RGBA{uint8(r >> 8), uint8(g >> 8), uint8(b >> 8), uint8(a >> 8)} } type Bitmap struct { Path string Sprite Sprite Orientation image.Orientation } func (bitmap *Bitmap) Draw(rimg draw.Image, c *canvas.Context, scales Scales, source *image.Source) error { if bitmap.Sprite.IsVisible(c, scales) { image, _, err := source.GetImage(bitmap.Path) if err != nil { return err } bitmap.DrawImage(rimg, image, c) } return nil } func (bitmap *Bitmap) DrawImage(rimg draw.Image, img goimage.Image, c *canvas.Context) { bounds := img.Bounds() model := bitmap.Sprite.Rect.GetMatrixFitBoundsRotate(bounds, bitmap.Orientation) // modelTopLeft := model.Dot(canvas.Point{X: 0, Y: 0}) // modelBottomRight := model.Dot(canvas.Point{X: float64(bounds.Max.X), Y: float64(bounds.Max.Y)}) m := c.View().Mul(model) renderImageFast(rimg, img, m) // renderImageFastCropped(rimg, img, m, bitmap.Sprite.Rect, modelTopLeft, modelBottomRight) } func renderImageFast(rimg draw.Image, img goimage.Image, m canvas.Matrix) { bounds := img.Bounds() origin := m.Dot(canvas.Point{X: 0, Y: float64(bounds.Size().Y)}) h := float64(rimg.Bounds().Size().Y) aff3 := f64.Aff3{ m[0][0], -m[0][1], origin.X, -m[1][0], m[1][1], h - origin.Y, } draw.ApproxBiLinear.Transform(rimg, aff3, img, bounds, draw.Src, nil) } // TODO finish implementation func renderImageFastCropped(rimg draw.Image, img goimage.Image, m canvas.Matrix, crop Rect, modelTopLeft canvas.Point, modelBottomRight canvas.Point) { bounds := img.Bounds() // bounds := goimage.Rect(0, 0, int(modelBounds.X), int(modelBounds.Y)) origin := m.Dot(canvas.Point{X: 0, Y: float64(bounds.Size().Y)}) h := float64(rimg.Bounds().Size().Y) aff3 := f64.Aff3{ m[0][0], -m[0][1], origin.X, -m[1][0], m[1][1], h - origin.Y, } // croptl := m.Dot(canvas.Point{crop.X, crop.Y}) // cropbr := m.Dot(canvas.Point{crop.X + crop.W, crop.Y + crop.H}) // println(bounds.String(), crop.String(), croptl.String(), cropbr.String()) // tx, ty := m.D model := Rect{ X: modelTopLeft.X, Y: modelTopLeft.Y, W: modelBottomRight.X - modelTopLeft.X, H: modelBottomRight.Y - modelTopLeft.Y, } println(bounds.String(), "crop", crop.String(), "model", model.String()) // bounds = bounds.Inset(10) // bounds = draw.ApproxBiLinear.Transform(rimg, aff3, img, bounds, draw.Src, nil) } func (bitmap *Bitmap) GetSize(source *image.Source) image.Size { info := source.GetInfo(bitmap.Path) return image.Size{X: info.Width, Y: info.Height} } func (bitmap *Bitmap) DrawOverdraw(c *canvas.Context, size goimage.Point) { style := c.Style pixelZoom := bitmap.Sprite.Rect.GetPixelZoom(c, size) // barWidth := -pixelZoom * 0.1 // barHeight := 0.04 alpha := pixelZoom * 0.025 * 0xFF max := 0.8 * float64(0xFF) if alpha < 0 { alpha = math.Min(max, math.Max(0, -alpha)) style.FillColor = getRGBA(color.NRGBA{0xFF, 0x00, 0x00, uint8(alpha)}) } else { alpha = math.Min(max, math.Max(0, alpha)) style.FillColor = getRGBA(color.NRGBA{0x00, 0x00, 0xFF, uint8(alpha)}) } bitmap.Sprite.DrawWithStyle(c, style) // style.FillColor = canvas.Yellowgreen // c.RenderPath( // canvas.Rectangle(bitmap.Sprite.Rect.W*0.5*barWidth, bitmap.Sprite.Rect.H*barHeight), // style, // c.View().Mul(bitmap.Sprite.Rect.GetMatrix()). // Translate( // bitmap.Sprite.Rect.W*0.5, // bitmap.Sprite.Rect.H*(0.5-barHeight*0.5), // ), // ) } func (bitmap *Bitmap) DrawVideoIcon(c *canvas.Context) { style := c.Style sprite := bitmap.Sprite iconSize := sprite.Rect.H * 0.04 marginTop := iconSize * 1.5 marginRight := iconSize * 1.5 style.FillColor = getRGBA(color.White) style.StrokeColor = getRGBA(color.RGBA{R: 0, G: 0, B: 0, A: 0xCC}) canvasIconSize := canvas.Rect{W: iconSize}.Transform(c.View()).W style.StrokeWidth = canvasIconSize * 0.2 style.StrokeJoiner = canvas.RoundJoiner{} c.RenderPath( canvas.RegularPolygon(3, iconSize, true), style, c.View().Mul(sprite.Rect.GetMatrix()).Translate(sprite.Rect.W-marginRight, sprite.Rect.H-marginTop).Rotate(30), ) }
internal/render/bitmap.go
0.556641
0.40392
bitmap.go
starcoder
package hand import ( "fmt" "log" "time" "github.com/aamcrae/config" "github.com/aamcrae/gpio" "github.com/aamcrae/gpio/action" ) // Configuration data for the clock hand, usually read from a configuration file. type ClockConfig struct { Name string // Name of the hand Gpio []int // Output pins for the stepper Speed float64 // Speed the stepper runs at (RPM) Period time.Duration // Period of the hand (e.g time.Hour) Update time.Duration // How often the hand updates. Steps int // Initial reference steps per revolution Encoder int // Input pin for encoder Notch int // Minimum width of encoder mark Offset int // Hand offset from midnight to encoder mark } // ClockHand combines the I/O for a hand and an encoder. // A clock is comprised of multiple hands, each of which runs independently. // Each clock hand consists of a Hand which generates move requests according to the current time, // an Encoder which provides feedback as to the actual location of the hand, // and the I/O providers for the Hand and Encoder. // A config for each hand is parsed from a configuration file. type ClockHand struct { Stepper *action.Stepper Input *io.Gpio Hand *Hand Encoder *Encoder Config *ClockConfig } // Config reads and validates a ClockHand config from a config file section. // Sample config: // [name] # name of hand e.g hours, minutes, seconds // stepper=4,17,27,22,3.0 # GPIOs for stepper motor, and speed in RPM // period=12h # The clock period for this hand // update=5m # The update rate as a duration // steps=4096 # Reference number of steps in a revolution // encoder=21 # GPIO for encoder // notch=100 # Min width of sensor mark // offset=2100 # The offset of the hand at the encoder mark func Config(conf *config.Config, name string) (*ClockConfig, error) { s := conf.GetSection(name) if s == nil { return nil, fmt.Errorf("no config for %s", name) } var err error var h ClockConfig h.Name = name h.Gpio = make([]int, 4) n, err := s.Parse("stepper", "%d,%d,%d,%d,%f", &h.Gpio[0], &h.Gpio[1], &h.Gpio[2], &h.Gpio[3], &h.Speed) if err != nil { return nil, fmt.Errorf("stepper: %v", err) } if n != 5 { return nil, fmt.Errorf("invalid stepper arguments") } n, err = s.Parse("steps", "%d", &h.Steps) if err != nil { return nil, fmt.Errorf("steps: %v", err) } if n != 1 { return nil, fmt.Errorf("steps: argument count") } p, err := s.GetArg("period") if err != nil { return nil, fmt.Errorf("period: %v", err) } h.Period, err = time.ParseDuration(p) if err != nil { return nil, fmt.Errorf("period: %v", err) } u, err := s.GetArg("update") if err != nil { return nil, fmt.Errorf("update: %v", err) } h.Update, err = time.ParseDuration(u) if err != nil { return nil, fmt.Errorf("update: %v", err) } n, err = s.Parse("encoder", "%d", &h.Encoder) if err != nil { return nil, fmt.Errorf("encoder: %v", err) } if n != 1 { return nil, fmt.Errorf("encoder: argument count") } n, err = s.Parse("notch", "%d", &h.Notch) if err != nil { return nil, fmt.Errorf("notch: %v", err) } if n != 1 { return nil, fmt.Errorf("notch: argument count") } n, err = s.Parse("offset", "%d", &h.Offset) if err != nil { return nil, fmt.Errorf("offset: %v", err) } if n != 1 { return nil, fmt.Errorf("offset: argument count") } return &h, nil } // NewClockHand initialises the I/O, Hand, and Encoder using the configuration provided. func NewClockHand(hc *ClockConfig) (*ClockHand, error) { c := new(ClockHand) c.Config = hc var gp [4]*io.Gpio var err error for i, v := range hc.Gpio { gp[i], err = io.OutputPin(v) if err != nil { return nil, fmt.Errorf("Pin %d: %v", v, err) } } c.Stepper = action.NewStepper(hc.Steps, gp[0], gp[1], gp[2], gp[3]) c.Hand = NewHand(hc.Name, hc.Period, c, hc.Update, int(hc.Steps), hc.Offset) c.Input, err = io.Pin(hc.Encoder) if err != nil { c.Close() return nil, fmt.Errorf("Encoder %d: %v", hc.Encoder, err) } err = c.Input.Edge(io.BOTH) if err != nil { c.Close() return nil, fmt.Errorf("Encoder %d: %v", hc.Encoder, err) } c.Encoder = NewEncoder(hc.Name, c.Stepper, c.Hand, c.Input, hc.Notch) return c, nil } // Run starts the clock hand, initially running a calibration so that // the encoder mark position can be discovered, and then starting the // hand processing if requested. func (c *ClockHand) Run() { Calibrate(true, c.Encoder, c.Hand, c.Config.Steps) } // Move moves the stepper motor the steps indicated. This is a // shim between the hand and the stepper so that the motor can be // turned off between movements. Waits until the motor completes the // steps before returning. // TODO: Turning the motor off immediately will miss steps under load, so // some kind of delay is needed. func (c *ClockHand) Move(steps int) { if c.Stepper != nil { c.Stepper.Step(c.Config.Speed, steps) c.Stepper.Wait() } } // GetLocation returns the current absolute location. func (c *ClockHand) GetLocation() int64 { return c.Stepper.GetStep() } // Close shuts down the clock hand and release the resources. func (c *ClockHand) Close() { if c.Stepper != nil { c.Stepper.Close() } if c.Input != nil { c.Input.Close() } } // Calibrate moves the hand at least 4 revolutions to allow // the encoder to measure the actual steps for 360 degrees of movement, and // to discover the location of the encoder mark. func Calibrate(run bool, e *Encoder, h *Hand, reference int) { log.Printf("%s: Starting calibration", h.Name) h.mover.Move(int(reference*4 + reference/2)) if e.Measured == 0 { log.Fatalf("Unable to calibrate") } log.Printf("%s: Calibration complete (%d steps), encoder: %d", h.Name, e.Measured, e.Location()) if run { h.Run() } }
hand/config.go
0.568536
0.460713
config.go
starcoder
package evaluator import ( "time" "github.com/sonirico/datetoken.go/models" "github.com/sonirico/datetoken.go/ast" "github.com/sonirico/datetoken.go/lexer" "github.com/sonirico/datetoken.go/parser" "github.com/sonirico/datetoken.go/token" ) // Evaluator takes a token payload to eval. Handles lexing and parsing too. type Evaluator struct { time.Time weekStartDay time.Weekday tz *time.Location current time.Time timeSet bool } // New returns a new instance of Evaluator func New() *Evaluator { return &Evaluator{ weekStartDay: time.Sunday, tz: time.UTC, timeSet: false, } } // SetTZ allows to configure a different time.Location rather than UTC func (e *Evaluator) SetTZ(tz *time.Location) { e.tz = tz } // SetWeeksStartDay allows to configure a different time.WeekDay rather than Sunday func (e *Evaluator) SetWeeksStartDay(wd time.Weekday) { e.weekStartDay = wd } // Override initial node value func (e *Evaluator) setInitial(date time.Time) { if e.timeSet { return } e.current = date if e.tz != nil { e.current = e.current.In(e.tz) } e.timeSet = true } func (e *Evaluator) evalValueNode(node *ast.ValueNode) { switch node.Literal() { case now: fallthrough default: e.setInitial(time.Now()) } } func (e *Evaluator) evalArithmeticNode(node *ast.ArithmeticNode) { amount := int(node.Amount) if token.Minus == node.Sign { amount = -amount } switch node.Unit { case second: e.addSeconds(amount) case minute: e.addMinutes(amount) case hour: e.addHours(amount) case day: e.addDays(amount) case week: e.addWeeks(amount) case month: e.addMonths(amount) case year: e.addYears(amount) } } func (e *Evaluator) evalStartSnap(node *ast.SnapNode) { switch node.Unit { // time units case minute: e.snapStartOfMinute() case hour: e.snapStartOfHour() case day: e.snapStartOfDay() case week: e.snapStartOfWeek() case businessWeek: e.snapStartOfBusinessWeek() case month: e.snapStartOfMonth() case year: e.snapStartOfYear() case quarter: e.snapStartOfCurrentQuarter() case quarter1: e.snapStartOfQuarter(0) case quarter2: e.snapStartOfQuarter(1) case quarter3: e.snapStartOfQuarter(2) case quarter4: e.snapStartOfQuarter(3) // weekdays case monday: e.previousMonday() case tuesday: e.previousTuesday() case wednesday: e.previousWednesday() case thursday: e.previousThursday() case friday: e.previousFriday() case saturday: e.previousSaturday() case sunday: e.previousSunday() } } func (e *Evaluator) evalEndSnap(node *ast.SnapNode) { switch node.Unit { // time unit case minute: e.snapEndOfMinute() case hour: e.snapEndOfHour() case day: e.snapEndOfDay() case week: e.snapEndOfWeek() case businessWeek: e.snapEndOfBusinessWeek() case month: e.snapEndOfMonth() case year: e.snapEndOfYear() case quarter: e.snapEndOfCurrentQuarter() case quarter1: e.snapEndOfQuarter(0) case quarter2: e.snapEndOfQuarter(1) case quarter3: e.snapEndOfQuarter(2) case quarter4: e.snapEndOfQuarter(3) // weekdays case monday: e.nextMonday() case tuesday: e.nextTuesday() case wednesday: e.nextWednesday() case thursday: e.nextThursday() case friday: e.nextFriday() case saturday: e.nextSaturday() case sunday: e.nextSunday() } } func (e *Evaluator) evalSnapNode(node *ast.SnapNode) { switch node.Token.Type { case token.SnapStart: e.evalStartSnap(node) case token.SnapEnd: e.evalEndSnap(node) } } func (e *Evaluator) evalNode(node ast.Node) { switch nod := node.(type) { case *ast.ValueNode: e.evalValueNode(nod) case *ast.ArithmeticNode: e.evalArithmeticNode(nod) case *ast.SnapNode: e.evalSnapNode(nod) } } // Eval takes a token payload and evaluates it. If correct, it returns the time.Time. Otherwise, the current // timestamp is returned along with errors func (e *Evaluator) Eval(payload string) (time.Time, error) { lex := lexer.New(payload) par := parser.New(lex) astRoot := par.Parse() if len(astRoot.Nodes) < 1 { return time.Now(), models.ErrEmptyToken } if len(par.Errors()) > 0 { return time.Now(), models.NewInvalidTokenError(payload, par.Errors()) } for _, node := range astRoot.Nodes { e.evalNode(node) } return e.current, nil }
evaluator/evaluator.go
0.711932
0.482551
evaluator.go
starcoder
package bites import "unicode/utf8" // Bites' purpose is to give byte slices some useful methods. // The Get methods snip things off the front, and return the remainder of the slice. // The Expect methods do a Get, and then compare it to the provided value. // If there is not enough space for a Get, or if the Expect does not match, the method // will return nil. // The int methods are big-endian by default, but they have Little-Endian versions too. // The float and complex methods put them in the form of IEE754 binary representation. type Get []byte func (b Get) Put() Put { return Put(b) } // Return a slice containing the last s bytes. func (b Get) Last(s int) Get { return b[len(b)-s:] } // Return a slice without the first s bytes. func (b Get) Skip(s int) Get { return b[s:] } // Return a slice with the last s bytes snipped off. func (b Get) Snip(s int) Get { return b[:len(b)-s] } // Split the slice into the first s bytes and the rest. func (b Get) Split(s int) (Get, Get) { return b[:s], b[s:] } // Make an exact copy of b and return it. // This will allocate. func (b Get) Clone() Get { clone := make(Get, len(b), len(b)) copy(clone, b) return clone } // Return the slice as a string (allocates). func (b Get) String() string { return string(b) } // Get one byte, and return the rest. func (b Get) GetByte(byt *byte) Get { *byt = b[0] return b[1:] } // Get a list of bools. // The bools are interpreted as though they were written by PutBool. // If a bool is nil, that bit is skipped. func (b Get) GetBool(bools ...*bool) Get { bytenum := (len(bools)-1)/8 + 1 if !b.Space(bytenum) { return nil } for i, bol := range bools { v := b[i/8] >> uint(7-(i&7)) & 1 if bol != nil { if v == 1 { *bol = true } else { *bol = false } } } return b[bytenum:] } // Get the first UTF8 rune in b. // If the rune is not valid UTF8 or b is empty, it returns nil. func (b Get) GetRune(r *rune, s *int) Get { char, size := utf8.DecodeRune(b) if char == utf8.RuneError && (size == 0 || size == 1) { return nil } *r = char if s != nil { *s = size } return b[size:] } // Get a slice of the given size. func (b Get) GetSlice(slice *[]byte, size int) Get { if !b.Space(size) { return nil } *slice = b[:size] return b[size:] } // Copy b to slice, and return what's left of b. // If there's not enough in b to fill the slice, return nil. func (b Get) GetSliceCopy(slice []byte) Get { if !b.Space(len(slice)) { return nil } s := copy(slice, b) return b[s:] } // Get a string of the given size. // This allocates. // If there's not enough in b to read the full string, return nil. func (b Get) GetString(str *string, size int) Get { if !b.Space(size) { return nil } var slice []byte b = b.GetSlice(&slice, size) *str = string(slice) return b } // Return true if the slice is nil. // This is poorly named, use More instead. func (b Get) Error() bool { return b == nil } // Return true if there is more data to get. func (b Get) More() bool { return b.Len() > 0 } // Return the length of the slice. func (b Get) Len() int { return len(b) } // Returns true if the length of the slice is at least expect. func (b Get) Space(expect int) bool { return b.Len() >= expect }
get.go
0.71403
0.480844
get.go
starcoder
package spoe import ( "encoding/binary" "fmt" "net" "unsafe" "github.com/pkg/errors" ) type dataType byte const ( dataTypeNull dataType = 0 dataTypeBool dataType = 1 dataTypeInt32 dataType = 2 dataTypeUInt32 dataType = 3 dataTypeInt64 dataType = 4 dataTypeUInt64 dataType = 5 dataTypeIPV4 dataType = 6 dataTypeIPV6 dataType = 7 dataTypeString dataType = 8 dataTypeBinary dataType = 9 ) const ( dataTypeMask byte = 0x0F dataFlagMask byte = 0xF0 dataFlagTrue byte = 0x10 ) func decodeUint32(b []byte) (uint32, int, error) { // read the frame length if len(b) < 4 { return 0, 0, fmt.Errorf("decode uint32: need at least 4 bytes, got %d", len(b)) } v := binary.BigEndian.Uint32(b) return v, 4, nil } func decodeVarint(b []byte) (int, int, error) { if len(b) == 0 { return 0, 0, fmt.Errorf("decode varint: unterminated sequence") } val := int(b[0]) off := 1 if val < 240 { return val, 1, nil } r := uint(4) for { if off > len(b)-1 { return 0, 0, fmt.Errorf("decode varint: unterminated sequence") } v := int(b[off]) val += v << r off++ r += 7 if v < 128 { break } } return val, off, nil } func encodeVarint(b []byte, i int) (int, error) { if len(b) == 0 { return 0, fmt.Errorf("encode varint: insufficient space in buffer") } if i < 240 { b[0] = byte(i) return 1, nil } n := 0 b[n] = byte(i) | 240 n++ i = (i - 240) >> 4 for i >= 128 { if n > len(b)-1 { return 0, fmt.Errorf("encode varint: insufficient space in buffer") } b[n] = byte(i) | 128 n++ i = (i - 128) >> 7 } if n > len(b)-1 { return 0, fmt.Errorf("encode varint: insufficient space in buffer") } b[n] = byte(i) n++ return n, nil } func decodeBytes(b []byte) ([]byte, int, error) { l, off, err := decodeVarint(b) if err != nil { return nil, 0, errors.Wrap(err, "decode bytes") } if len(b) < l+off { return nil, 0, fmt.Errorf("decode bytes: unterminated sequence") } return b[off : off+l], off + l, nil } func encodeBytes(b []byte, v []byte) (int, error) { l := len(v) n, err := encodeVarint(b, l) if err != nil { return 0, err } if l+n > len(b) { return 0, fmt.Errorf("encode bytes: insufficient space in buffer") } copy(b[n:], v) return n + l, nil } func decodeIPV4(b []byte) (net.IP, int, error) { if len(b) < net.IPv4len { return nil, 0, fmt.Errorf("decode ipv4: unterminated sequence") } return net.IP(b[:net.IPv4len]), net.IPv4len, nil } func encodeIPV4(b []byte, ip net.IP) (int, error) { if len(b) < net.IPv4len { return 0, fmt.Errorf("decode ipv4: unterminated sequence") } copy(b, ip) return net.IPv4len, nil } func encodeIPV6(b []byte, ip net.IP) (int, error) { if len(b) < net.IPv6len { return 0, fmt.Errorf("decode ipv4: unterminated sequence") } copy(b, ip) return net.IPv6len, nil } func decodeIPV6(b []byte) (net.IP, int, error) { if len(b) < net.IPv4len { return nil, 0, fmt.Errorf("decode ipv6: unterminated sequence") } return net.IP(b[:net.IPv4len]), net.IPv4len, nil } func decodeString(b []byte) (string, int, error) { b, n, err := decodeBytes(b) return *(*string)(unsafe.Pointer(&b)), n, err } func encodeString(b []byte, v string) (int, error) { return encodeBytes(b, []byte(v)) } func decodeKV(b []byte) (string, interface{}, int, error) { off := 0 name, n, err := decodeString(b[off:]) if err != nil { return "", nil, 0, errors.Wrap(err, "decode k/v") } off += n dbyte := b[off] dtype := dataType(dbyte & dataTypeMask) off++ var value interface{} switch dtype { case dataTypeNull: // noop case dataTypeBool: value = dbyte&dataFlagTrue > 0 case dataTypeInt32, dataTypeInt64: v, n, err := decodeVarint(b[off:]) if err != nil { return "", nil, 0, errors.Wrap(err, "decode k/v") } off += n value = int(v) case dataTypeUInt32, dataTypeUInt64: v, n, err := decodeVarint(b[off:]) if err != nil { return "", nil, 0, errors.Wrap(err, "decode k/v") } off += n value = uint(v) case dataTypeIPV4: v, n, err := decodeIPV4(b[off:]) if err != nil { return "", nil, 0, errors.Wrap(err, "decode k/v") } off += n value = v case dataTypeIPV6: v, n, err := decodeIPV6(b[off:]) if err != nil { return "", nil, 0, errors.Wrap(err, "decode k/v") } off += n value = v case dataTypeString: v, n, err := decodeString(b[off:]) if err != nil { return "", nil, 0, errors.Wrap(err, "decode k/v") } off += n value = v case dataTypeBinary: v, n, err := decodeBytes(b[off:]) if err != nil { return "", nil, 0, errors.Wrap(err, "decode k/v") } off += n value = v default: return "", nil, 0, fmt.Errorf("decode k/v: unknown data type %x", dtype) } return name, value, off, nil } func decodeKVs(b []byte, count int) (map[string]interface{}, int, error) { ml := count if ml == -1 { ml = 1 } res := make(map[string]interface{}, ml) off := 0 for off < len(b) && (count == -1 || len(res) < count) { name, value, n, err := decodeKV(b[off:]) if err != nil { return nil, 0, err } off += n res[name] = value } return res, off, nil } func encodeKV(b []byte, name string, v interface{}) (int, error) { n, err := encodeString(b, name) if err != nil { return 0, errors.Wrapf(err, "encode k/v (%s): %s", name, err) } if len(b) == n { return 0, fmt.Errorf("encode k/v (%s): insufficient space", name) } var m int switch val := v.(type) { case int: b[n] = byte(dataTypeInt64) n++ m, err = encodeVarint(b[n:], int(val)) case int64: b[n] = byte(dataTypeInt64) n++ m, err = encodeVarint(b[n:], int(val)) case uint: b[n] = byte(dataTypeUInt64) n++ m, err = encodeVarint(b[n:], int(val)) case uint64: b[n] = byte(dataTypeUInt64) n++ m, err = encodeVarint(b[n:], int(val)) case int32: b[n] = byte(dataTypeInt32) n++ m, err = encodeVarint(b[n:], int(val)) case uint32: b[n] = byte(dataTypeUInt32) n++ m, err = encodeVarint(b[n:], int(val)) case string: b[n] = byte(dataTypeString) n++ m, err = encodeString(b[n:], val) case []byte: b[n] = byte(dataTypeBinary) n++ m, err = encodeBytes(b[n:], val) case net.IP: if v4 := val.To4(); len(v4) > 0 { b[n] = byte(dataTypeIPV4) n++ m, err = encodeIPV4(b[n:], v4) } else { b[n] = byte(dataTypeIPV6) n++ m, err = encodeIPV6(b[n:], val) } case bool: v := byte(dataTypeBool) if val { v |= dataFlagTrue } b[n] = v n++ default: return 0, fmt.Errorf("encode k/v (%s): type %T is not handled", name, v) } if err != nil { return 0, err } return n + m, nil }
encoding.go
0.607081
0.497376
encoding.go
starcoder
package main import ( "fmt" "math" ) type Point struct { X, Y float64 } // Declare an Abs method for the Point struct. // Abs has a value receiver, which means that it operates // on a copy of the original Point value func (p Point) Abs() float64 { return math.Sqrt(p.X*p.X + p.Y*p.Y) } // The Scale method has a pointer receiver, because it // should be able to change the Point's value that is called upon func (p *Point) Scale(f float64) { p.X = p.X * f p.Y = p.Y * f } // The Abs method written as a normal function // that takes a Point as an argument func Abs(p Point) float64 { return math.Sqrt(p.X*p.X + p.Y*p.Y) } // The Scale method written as normal function, with a // pointer to a Point struct as argument func Scale(p *Point, f float64) { p.X = p.X * f p.Y = p.Y * f } type MyInt int // Declare an Absolute method for the non-struct type MyInt func (i MyInt) Absolute() int { if i < 0 { return int(-i) } return int(i) } func main() { // Call the Abs method of a Point struct p1 := Point{3, 4} fmt.Println("p1 Abs method:", p1.Abs()) // Call the Abs function that takes a Point as an argument fmt.Println("Abs function with p1 as argument:", Abs(p1)) // Call the Absolute method of a MyInt variable mi1 := MyInt(-5) fmt.Println("mi1 Absolute method:", mi1.Absolute()) // Call the Scale method to change a Point p2 := Point{1, 2} fmt.Println("p2 before Scale method:", p2) // The Scale method has a pointer receiver, but Go // interprets p2.Scale as (&p2).Scale p2.Scale(2) // (&p2).Scale will produce the same result fmt.Println("p2 after Scale method:", p2) // Call the Abs and Scale functions instead of the methods p3 := Point{0.5, 0.5} Scale(&p3, 2) fmt.Println("Scale function with a pointer to p3 as argument:", p3) fmt.Println("Abs function with p3 as argument:", Abs(p3)) // p4 is a pointer, but Abs method can be called // although it has a value receiver. // Abs function needs a value argument, doesn't receive a pointer one p4 := &Point{5, 6} fmt.Println("p4 Abs method:", p4.Abs()) fmt.Println(("Abs function with dereferencing on p4 pointer to struct:"), Abs(*p4)) }
go-code-examples/methods-interfaces/methods/methods.go
0.727104
0.440168
methods.go
starcoder
package invasion import ( "fmt" "io/ioutil" "log" "math/rand" "time" "strings" "errors" "github.com/davecgh/go-spew/spew" ) // City represents a City with Name and 4 possible roads to neighbour cities // North, East, South, West is either nil or a string to neighbour cities // Alien stores the ID of alien currently in the city type City struct { Name string Roads map[int]string Alien int } // Invasion holds a full map and implements methods to run simulation type Invasion struct { Map map[string]*City VerboseLog []string DebugLog []string Iteration int Statistics struct { MaxCities int NumberOfMoves int } } // New constructs a new Invasion, sets up default values func New() *Invasion { rand.Seed(time.Now().UTC().UnixNano()) return &Invasion{ VerboseLog: make([]string, 0), DebugLog: make([]string, 0), Iteration: 0, } } // Run starts the main simulation loop: deploys and moves aliens, prints events func (data *Invasion) Run(numAliens int, iterations int) { data.verbose(fmt.Sprintf("Deploying %d aliens into cities...", numAliens)) data.Deploy(numAliens) defer data.PrintStatistics() for data.Iteration = 1; data.Iteration < iterations; data.Iteration++ { data.Move() if isAny := data.AnyCitiesLeft(); !isAny { data.Print(fmt.Sprintf("all cities (%d) have been destroyed", data.Statistics.MaxCities)) return } } data.Print(fmt.Sprintf("simulation terminated after %d iterations", data.Iteration)) } func (data *Invasion) PrintStatistics() { cities := data.AllCities() var citiesStr string // just to make it beautiful for singular/plural if data.Statistics.MaxCities-len(cities) == 1 { citiesStr = "city was" } else { citiesStr = "cities were" } data.Print("Statistics:\n") data.Print(fmt.Sprintf("\tnumber of moves executed: %d", data.Statistics.NumberOfMoves)) data.Print(fmt.Sprintf("\t%d %s destroyed out of %d, %d remained", data.Statistics.MaxCities-len(cities), citiesStr, data.Statistics.MaxCities, len(cities))) } // BuildMap reads map file and builds map struct from lines into Cities, stores map data in Invasion.Map func (data *Invasion) BuildMap(fileName string) { contents, err := ioutil.ReadFile(fileName) if err != nil { log.Fatal(err) } mapLines := strings.Split(string(contents[:]), "\n") for _, line := range mapLines { line = strings.TrimSuffix(line, "\r"); } cities := make(map[string]*City) for _, line := range mapLines { s := strings.Split(line, " ") cityName := s[0] if len(cityName) == 0 || cityName[0:1] == "#" { continue } if _, cityExists := cities[cityName]; cityExists { log.Fatalf("City '%s' is redefined in map, it's not supported, yet", cityName) } city := &City{ Name: cityName, Roads: make(map[int]string), } for _, directionData := range s[1:] { directions := strings.Split(directionData, "=") // splits <direction>=<cityName> city.Roads[dirNameToInt(directions[0])] = directions[1] } cities[cityName] = city } data.Map = cities data.Statistics.MaxCities = len(data.AllCities()) if err := data.ValidateRoads(); err != nil { log.Fatal(err.Error()) } } // ValidateRoads walks all defined roads and validates the source and destination // points, returns error if missing cities are found, describing the problem func (data *Invasion) ValidateRoads() error { for _, city := range data.Map { for _, direction := range AllRoads(city) { toCityName, toExists := city.Roads[direction] if !toExists { continue } toCity, toCityOk := data.Map[toCityName]; if !toCityOk { return errors.New(fmt.Sprintf("Map validation error: road to %s from %s, but %s not found on map", toCityName, city.Name, toCityName)) } if toCity.Roads[oppositeDirection(direction)] != city.Name { return errors.New(fmt.Sprintf("Map validation error: no back-road to %s from %s, but should be", toCity.Roads[oppositeDirection(direction)], city.Name)) } } } return nil } // DestroyCity removes a city from map, and removes it from neighbour cities // prints destruction fact func (data *Invasion) DestroyCity(cityName string, alien1 int, alien2 int) { city := data.Map[cityName] for _, direction := range AllRoads(city) { delete(data.Map[city.Roads[direction]].Roads, oppositeDirection(direction)) } delete(data.Map, cityName) data.Print(fmt.Sprintf("%s has been destroyed by alien %d and alien %d\n", cityName, alien1, alien2)) } // Deploy initially deploys aliens into cities randomly, takes care of 2 aliens in the same city destroys the city func (data *Invasion) Deploy(numAliens int) { for i := 1; i <= numAliens; i++ { allCities := data.AllCities() // need to re-read city keys as they could be destroyed during deployment // FIXME: could be more effective with a caching slice here numCities := len(allCities) randIndex := rand.Intn(numCities) data.MoveAlienTo(allCities[randIndex], i) } data.debug(data.Map) } // Move iterates over all cities and moves aliens if any road is still present in that city func (data *Invasion) Move() { allCities := data.AllCities() moveCache := make(map[string]bool, 0) // prevent back and forth move between neighbouring cities for _, cityName := range allCities { city, exists := data.Map[cityName]; if !exists { // this city has been already destroyed in movements phase continue } if _, exists := moveCache[cityName]; exists { // already moved into this one continue } if city.Alien == 0 { // no alien here to move continue } targets := data.TargetCitiesFrom(city) if len(targets) == 0 { // no roads out of this city continue } directionIndex := rand.Intn(len(targets)) cityTo := targets[directionIndex] data.MoveAlienTo(cityTo, city.Alien) city.Alien = 0 // moved out from this city moveCache[cityTo] = true } } // MoveAlienTo moves alien into a City, calls destroy if another alien is already there func (data *Invasion) MoveAlienTo(cityName string, alien int) { city := data.Map[cityName] data.Statistics.NumberOfMoves++ if city.Alien == 0 { // no alien in this city yet, move him in city.Alien = alien } else { // already an alien here, so they fight and destroy this city data.DestroyCity(cityName, city.Alien, alien) } } // TargetCitiesFrom returns all cities reachable from current one func (data *Invasion) TargetCitiesFrom(city *City) (cities []string) { for direction := 0; direction < 4; direction++ { if cityName, exists := city.Roads[direction]; exists { cities = append(cities, cityName) } } return cities } // AnyCitiesLeft returns false if all of the cities have been destroyed func (data *Invasion) AnyCitiesLeft() bool { return len(data.AllCities()) != 0 } // dirNameToInt convert a direction name (north, ...) to integer value func dirNameToInt(direction string) int { dirHash := map[string]int{"north": 0, "east": 1, "south": 2, "west": 3} return dirHash[direction] } // oppositeDirection returns the int representation of opposite direction from a city func oppositeDirection(direction int) int { return (direction + 2) % 4 } // AllCities returns all city names present on the full map func (data *Invasion) AllCities() []string { cities := make([]string, len(data.Map)) i := 0 for k := range data.Map { cities[i] = k i++ } return cities } // ALlRoads returns a slice of ints with roads from that city, simplifies other loops func AllRoads(city *City) (roads []int) { for direction := 0; direction < 4; direction++ { if _, toOk := city.Roads[direction]; toOk { roads = append(roads, direction) } } return roads } // Print outputs string in log format with iterations prefix func (data *Invasion) Print(s string) { log.Printf("[iter %5d] %s", data.Iteration, s) } // Dump dumps full map data for debugging purposes func (data *Invasion) Dump() { spew.Dump(data) } // verbose is an internal verbose printer, collects output in VerboseLog func (data *Invasion) verbose(str string) { data.VerboseLog = append(data.VerboseLog, str) } // debug is an internal debug printer, collects output in DebugLog func (data *Invasion) debug(obj ...interface{}) { data.DebugLog = append(data.DebugLog, spew.Sdump(obj)) }
src/invasion/invasion.go
0.535341
0.430327
invasion.go
starcoder
package hraft const peersInfoContent = ` As of Consul 0.7.0, the peers.json file is only used for recovery after an outage. The format of this file depends on what the server has configured for its Raft protocol version. Please see the agent configuration page at https://www.consul.io/docs/agent/options.html#_raft_protocol for more details about this parameter. For Raft protocol version 2 and earlier, this should be formatted as a JSON array containing the address and port of each Consul server in the cluster, like this: [ "10.1.0.1:8300", "10.1.0.2:8300", "10.1.0.3:8300" ] For Raft protocol version 3 and later, this should be formatted as a JSON array containing the node ID, address:port, and suffrage information of each Consul server in the cluster, like this: [ { "id": "adf4238a-882b-9ddc-4a9d-5b6758e4159e", "address": "10.1.0.1:8300", "non_voter": false }, { "id": "8b6dda82-3103-11e7-93ae-92361f002671", "address": "10.1.0.2:8300", "non_voter": false }, { "id": "97e17742-3103-11e7-93ae-92361f002671", "address": "10.1.0.3:8300", "non_voter": false } ] The "id" field is the node ID of the server. This can be found in the logs when the server starts up, or in the "node-id" file inside the server's data directory. The "address" field is the address and port of the server. The "non_voter" field controls whether the server is a non-voter, which is used in some advanced Autopilot configurations, please see https://www.consul.io/docs/guides/autopilot.html for more information. If "non_voter" is omitted it will default to false, which is typical for most clusters. Under normal operation, the peers.json file will not be present. When Consul starts for the first time, it will create this peers.info file and delete any existing peers.json file so that recovery doesn't occur on the first startup. Once this peers.info file is present, any peers.json file will be ingested at startup, and will set the Raft peer configuration manually to recover from an outage. It's crucial that all servers in the cluster are shut down before creating the peers.json file, and that all servers receive the same configuration. Once the peers.json file is successfully ingested and applied, it will be deleted. Please see https://www.consul.io/docs/guides/outage.html for more information. `
hashicorp-raft/hraft/peers-info.go
0.609292
0.469946
peers-info.go
starcoder
package benchmarking import ( "github.com/nathanhack/errorcorrectingcodes/linearblock/messagepassing/bec" mat "github.com/nathanhack/sparsemat" mat2 "gonum.org/v1/gonum/mat" "math" "math/rand" ) //RandomMessage creates a random message of length len. func RandomMessage(len int) mat.SparseVector { message := mat.CSRVec(len) for i := 0; i < len; i++ { message.Set(i, rand.Intn(2)) } return message } //RandomMessage creates a random message o lenght len with a hamming weight equal to onesCount func RandomMessageOnesCount(len int, onesCount int) mat.SparseVector { message := mat.CSRVec(len) for message.HammingWeight() < onesCount { message.Set(rand.Intn(len), 1) } return message } //RandomFlipBitCount randomly flips min(numberOfBitsToFlip,len(input)) number of bits. func RandomFlipBitCount(input mat.SparseVector, numberOfBitsToFlip int) mat.SparseVector { output := mat.CSRVecCopy(input) flip := make(map[int]bool) for len(flip) < numberOfBitsToFlip && len(flip) < input.Len() { flip[rand.Intn(input.Len())] = true } for i := range flip { output.Set(i, output.At(i)+1) } return output } //RandomErase creates a new slice of ErasureBits with some of them set to Erased given the probabilityOfErasure func RandomErase(codeword []bec.ErasureBit, probabilityOfErasure float64) []bec.ErasureBit { return RandomEraseCount(codeword, int(math.Round(probabilityOfErasure*float64(len(codeword))))) } //RandomErase creates a copy of the codeword and randomly sets numberOfBitsToFlip of them to Erased func RandomEraseCount(codeword []bec.ErasureBit, numberOfBitsToFlip int) []bec.ErasureBit { output := make([]bec.ErasureBit, len(codeword)) //randomly pick indices to erase flip := make(map[int]bool) for len(flip) < numberOfBitsToFlip { flip[rand.Intn(len(codeword))] = true } //copy the old data for i := range codeword { output[i] = codeword[i] } //set the erased for i := range flip { output[i] = bec.Erased } return output } //RandomNoiseBPSK creates a randomizes version of the bpsk vector using the E_b/N_0 passed in func RandomNoiseBPSK(bpsk mat2.Vector, E_bPerN_0 float64) mat2.Vector { //using σ^2 = N_0/2 and E_b=1 // we get σ = sqrt(1/(2*E_bPerN_0)) σ := math.Sqrt(1 / (2 * E_bPerN_0)) result := mat2.NewVecDense(bpsk.Len(), nil) for i := 0; i < bpsk.Len(); i++ { result.SetVec(i, rand.NormFloat64()*σ) } result.AddVec(result, bpsk) return result }
benchmarking/randomdata.go
0.675229
0.409723
randomdata.go
starcoder
package hraftd import "reflect" // DistributedApplier is the data applier with NodeID. type DistributedApplier interface { Distribute(NodeID NodeID, item Identifier) } // Identifier gives the ID getter. type Identifier interface { ID() string } // Distributor is the role to charge the distribution among the hraft cluster nodes. type Distributor struct { // sticky to the previous nodeID when redistribute every time. StickyMap map[string]NodeID } // NewDistributor makes a new Distributor. func NewDistributor() *Distributor { d := &Distributor{ StickyMap: make(map[string]NodeID), } return d } // nolint:gochecknoglobals var ( distributedApplierType = reflect.TypeOf((*DistributedApplier)(nil)).Elem() identifierType = reflect.TypeOf((*Identifier)(nil)).Elem() ) // Distribute do the distribution. func (d *Distributor) Distribute(peers []Peer, data, emptyReceiver interface{}) interface{} { rt := checkReceiverType(emptyReceiver) dv := d.checkDataType(data) d.cleanKeysNotIn(peers) dataLen := dv.Len() // 预先计算每个节点可以安放的数量 peersNumMap := makePeersMap(peers, dataLen) // 分配结果 distributed := reflect.MakeSlice(reflect.SliceOf(rt), 0, dataLen) // 需要新分配的项目 newItems := make([]Identifier, 0, dataLen) // 先保持粘滞 for i := 0; i < dataLen; i++ { item := dv.Index(i).Interface().(Identifier) if nodeID := d.StickyMap[item.ID()]; nodeID != "" { v := reflect.New(rt) a := v.Interface().(DistributedApplier) a.Distribute(nodeID, item) distributed = reflect.Append(distributed, v.Elem()) peersNumMap[nodeID]-- } else { newItems = append(newItems, item) } } // 再分配剩余 for _, item := range newItems { for _, peer := range peers { if peersNumMap[peer.ID] <= 0 { continue } d.Put(item.ID(), peer.ID) v := reflect.New(rt) a := v.Interface().(DistributedApplier) a.Distribute(peer.ID, item) distributed = reflect.Append(distributed, v.Elem()) peersNumMap[peer.ID]-- break } } return distributed.Interface() } // Put puts the node ID related to id directly. func (d *Distributor) Put(id string, nodeID NodeID) { d.StickyMap[id] = nodeID } func (d *Distributor) checkDataType(data interface{}) reflect.Value { dv := reflect.ValueOf(data) if dv.Type().Kind() != reflect.Slice { panic("data should be slice") } if !dv.Type().Elem().Implements(identifierType) { panic("data should implements Identifier") } return dv } func checkReceiverType(emptyReceiver interface{}) reflect.Type { rt := reflect.TypeOf(emptyReceiver) rtPtr := rt if rt.Kind() == reflect.Ptr { rt = rt.Elem() } else { rtPtr = reflect.PtrTo(rt) } if !rtPtr.Implements(distributedApplierType) { panic("receiver type should implement *DistributedApplier") } return rt } func makePeersMap(peers []Peer, dataLen int) map[NodeID]int { peersNumMap := make(map[NodeID]int) for i := 0; i < dataLen; i++ { p := peers[i%len(peers)] peersNumMap[p.ID]++ } return peersNumMap } // CleanSticky cleans the sticky map state. func (d *Distributor) CleanSticky() { d.StickyMap = make(map[string]NodeID) } func (d *Distributor) cleanKeysNotIn(peers []Peer) { peersMap := make(map[NodeID]bool) for _, p := range peers { peersMap[p.ID] = true } for id, nodeID := range d.StickyMap { if _, ok := peersMap[nodeID]; !ok { delete(d.StickyMap, id) } } }
distribute.go
0.507568
0.42925
distribute.go
starcoder
package goraph import ( "sort" ) // Graph is implemented by all of the graph types. All of the graph // algorithms use this data type instead of the concrete types. type Graph interface { // AddVertex creates an returns a new vertex in the graph. AddVertex() Vertex // RemoveVertex permanently removes a vertex from the graph. RemoveVertex(v Vertex) // AddEdge adds an edge between u and v. If the graph is directional, // then the edge will go from u to v. AddEdge(u, v Vertex) // RemoveEdge removes the edge between u and v. RemoveEdge(u, v Vertex) // Vertices returns a slice of the graph's vertices. Vertices() []Vertex // Edges returns a slice of the graph's edges. Edges() []Edge // Neighbours returns a slice of the vertices that neighbour v. Neighbours(v Vertex) []Vertex } var ( _ Graph = &DirectedAdjacencyList{} _ Graph = &AdjacencyList{} ) // Vertex represents a node in the graph. Users should create // new Vertex values with AddVertex. type Vertex int // Edge represents an edge between two vertices. // In a directed graph, the edge is from U to V. type Edge struct{ U, V Vertex } // AdjacencyList implements an undirected graph using an adjacency list. type AdjacencyList struct { edges map[Vertex][]Vertex nextVertex Vertex } // NewAdjacencyList creates an empty graph. func NewAdjacencyList() *AdjacencyList { return &AdjacencyList{edges: make(map[Vertex][]Vertex)} } func (g *AdjacencyList) AddVertex() Vertex { v := g.nextVertex g.edges[v] = make([]Vertex, 0) g.nextVertex++ return v } func (g *AdjacencyList) RemoveVertex(v Vertex) { delete(g.edges, v) for vtx, vertices := range g.edges { for idx, candidate := range vertices { if candidate == v { g.edges[vtx] = append(vertices[:idx], vertices[idx+1:len(vertices)]...) } } } } func (g *AdjacencyList) AddEdge(u, v Vertex) { if v < u { u, v = v, u } edges := g.edges[u] g.edges[u] = append(edges, v) } func (g *AdjacencyList) RemoveEdge(u, v Vertex) { if v < u { u, v = v, u } vertices, ok := g.edges[u] if !ok { return } for idx, vtx := range vertices { if vtx == v { // Remove the edge g.edges[u] = append(vertices[:idx], vertices[idx+1:len(vertices)]...) break } } } // VertexSlice is a convenience type for sorting vertices by ID. type VertexSlice []Vertex func (p VertexSlice) Len() int { return len(p) } func (p VertexSlice) Less(i, j int) bool { return p[i] < p[j] } func (p VertexSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] } func (p VertexSlice) Sort() { sort.Sort(p) } // EdgeSlice is a convenience type for sorting edges by ID. type EdgeSlice []Edge func (p EdgeSlice) Len() int { return len(p) } func (p EdgeSlice) Less(i, j int) bool { if p[i].U == p[j].U { return p[i].V < p[j].V } else { return p[i].U < p[j].U } } func (p EdgeSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] } func (p EdgeSlice) Sort() { sort.Sort(p) } func (g *AdjacencyList) Vertices() []Vertex { vertices := make(VertexSlice, len(g.edges)) var i int for k := range g.edges { vertices[i] = k i++ } return vertices } func (g *AdjacencyList) Edges() []Edge { var edges []Edge for k, neighbors := range g.edges { for _, n := range neighbors { edges = append(edges, Edge{k, n}) } } return edges } func (g *AdjacencyList) Neighbours(v Vertex) []Vertex { return g.edges[v] } // DirectedAdjacencyList is like AdjacencyList, but directed. type DirectedAdjacencyList struct { edges map[Vertex][]Vertex nextVertex Vertex } // NewDirectedAdjacencyList creates and initializes a DirectedAdjacencyList. func NewDirectedAdjacencyList() *DirectedAdjacencyList { g := &DirectedAdjacencyList{edges: make(map[Vertex][]Vertex)} return g } // addVertex adds v to the graph in an idempotent fashion. The return value // indicates whether or not the vertex was already in the graph; if false, // the value was not in the graph before it was added. func (g *DirectedAdjacencyList) addVertex(v Vertex) bool { _, ok := g.edges[v] if !ok { g.edges[v] = make([]Vertex, 0) } return ok } // AddEdge connects vertices u and v in the graph. func (g *DirectedAdjacencyList) AddEdge(u, v Vertex) { g.addVertex(u) g.addVertex(v) g.edges[u] = append(g.edges[u], v) } func (g *DirectedAdjacencyList) RemoveEdge(u, v Vertex) { vertices, ok := g.edges[u] if !ok { return } for idx, vtx := range vertices { if vtx == v { // Remove the edge g.edges[u] = append(vertices[:idx], vertices[idx+1:len(vertices)]...) break } } } func (g *DirectedAdjacencyList) AddVertex() Vertex { v := g.nextVertex g.addVertex(v) g.nextVertex++ return v } func (g *DirectedAdjacencyList) RemoveVertex(v Vertex) { delete(g.edges, v) for vtx, vertices := range g.edges { for idx, candidate := range vertices { if candidate == v { g.edges[vtx] = append(vertices[:idx], vertices[idx+1:len(vertices)]...) } } } } func (g *DirectedAdjacencyList) Vertices() []Vertex { vertices := make([]Vertex, 0, len(g.edges)) for k := range g.edges { vertices = append(vertices, k) } return vertices } func (g *DirectedAdjacencyList) Edges() []Edge { var edges []Edge for k, neighbors := range g.edges { for _, n := range neighbors { edges = append(edges, Edge{k, n}) } } return edges } func (g *DirectedAdjacencyList) Neighbours(v Vertex) []Vertex { return g.edges[v] } // Predecessors returns a slice of vertices that connect to v directionally. func (g *DirectedAdjacencyList) Predecessors(v Vertex) (result []Vertex) { for vtx, vertices := range g.edges { for _, candidate := range vertices { if candidate == v { result = append(result, vtx) } } } return } // Successors returns a slice of vertices that v connects to directionally. // This method returns the same thing as Neighbours. func (g *DirectedAdjacencyList) Successors(v Vertex) []Vertex { return g.edges[v] }
graph.go
0.86031
0.638849
graph.go
starcoder
package contact import ( "math" "github.com/hueypark/physics/core/body" "github.com/hueypark/physics/core/math/rotator" "github.com/hueypark/physics/core/math/vector" "github.com/hueypark/physics/core/shape" "github.com/hueypark/physics/core/shape/circle" "github.com/hueypark/physics/core/shape/convex" ) func (c *Contact) DetectCollision() { lhsType := c.lhs.Shape.Type() rhsType := c.rhs.Shape.Type() switch lhsType { case shape.BULLET: switch rhsType { case shape.BULLET: break case shape.CIRCLE: c.normal, c.penetration, c.points = bulletToCircle(c.lhs, c.rhs) break case shape.CONVEX: c.normal, c.penetration, c.points = bulletToConvex(c.lhs, c.rhs) break } break case shape.CIRCLE: switch rhsType { case shape.BULLET: c.swap() c.normal, c.penetration, c.points = bulletToCircle(c.lhs, c.rhs) break case shape.CIRCLE: c.normal, c.penetration, c.points = circleToCircle(c.lhs, c.rhs) break case shape.CONVEX: c.normal, c.penetration, c.points = circleToConvex(c.lhs, c.rhs) break } break case shape.CONVEX: switch rhsType { case shape.BULLET: c.swap() c.normal, c.penetration, c.points = bulletToConvex(c.lhs, c.rhs) case shape.CIRCLE: c.swap() c.normal, c.penetration, c.points = circleToConvex(c.lhs, c.rhs) break case shape.CONVEX: c.normal, c.penetration, c.points = convexToConvex(c.lhs, c.rhs) break } break } } func (c *Contact) swap() { c.lhs, c.rhs = c.rhs, c.lhs } func bulletToCircle(lhs, rhs *body.Body) (normal vector.Vector, penetration float64, points []vector.Vector) { rhsCircle := rhs.Shape.(*circle.Circle) normal = vector.Subtract(rhs.Position(), lhs.Position()) distanceSquared := normal.SizeSquared() if distanceSquared >= rhsCircle.Radius*rhsCircle.Radius { return } distance := math.Sqrt(distanceSquared) normal.Normalize() penetration = rhsCircle.Radius - distance points = append(points, vector.Add( lhs.Position(), vector.Multiply(normal, -0.5*penetration))) return normal, penetration, points } func bulletToConvex(lhs, rhs *body.Body) (normal vector.Vector, penetration float64, points []vector.Vector) { rhsConvex := rhs.Shape.(*convex.Convex) penetration = math.MaxFloat64 for _, edge := range rhsConvex.Edges() { worldStart := rhs.Rotation().RotateVector(edge.Start) worldStart.Add(rhs.Position()) worldEnd := rhs.Rotation().RotateVector(edge.End) worldEnd.Add(rhs.Position()) edgeVector := vector.Subtract(worldEnd, worldStart) pointVector := vector.Subtract(lhs.Position(), worldStart) if !pointVector.OnTheRight(edgeVector) { normal = vector.Vector{} penetration = 0 return normal, penetration, points } perpendicular := vector.Vector{-edgeVector.Y, edgeVector.X} perpendicular.Normalize() lhsVector := vector.Subtract(lhs.Position(), worldStart) proj := vector.Multiply(perpendicular, vector.Dot(lhsVector, perpendicular)) if proj.Size() < penetration { normal = perpendicular penetration = proj.Size() } } points = append(points, vector.Add( lhs.Position(), vector.Multiply(normal, -0.5*penetration))) return normal, penetration, points } func circleToCircle(lhs, rhs *body.Body) (normal vector.Vector, penetration float64, points []vector.Vector) { lhsCircle := lhs.Shape.(*circle.Circle) rhsCircle := rhs.Shape.(*circle.Circle) normal = vector.Subtract(rhs.Position(), lhs.Position()) distanceSquared := normal.SizeSquared() radius := lhsCircle.Radius + rhsCircle.Radius if distanceSquared >= radius*radius { return } distance := math.Sqrt(distanceSquared) normal.Normalize() penetration = radius - distance points = append(points, vector.Add( lhs.Position(), vector.Add(vector.Multiply(normal, lhsCircle.Radius), vector.Multiply(normal, -0.5*penetration)))) return normal, penetration, points } func circleToConvex(l, r *body.Body) (normal vector.Vector, penetration float64, points []vector.Vector) { lCircle := l.Shape.(*circle.Circle) rConvex := r.Shape.(*convex.Convex) minPenetration := math.MaxFloat64 for _, edge := range rConvex.Edges() { edgeNormal := r.Rotation().RotateVector(edge.Normal) edgeStart := r.Rotation().RotateVector(edge.Start) p := -vector.Dot(edgeNormal, vector.Subtract(l.Position(), vector.Add(r.Position(), edgeStart))) if p < -lCircle.Radius { return normal, penetration, points } if p < minPenetration { minPenetration = p normal = edgeNormal } } normal.Invert() penetration = lCircle.Radius + minPenetration points = append(points, vector.Add( l.Position(), vector.Add(vector.Multiply(normal, lCircle.Radius), vector.Multiply(normal, -0.5*penetration)))) return normal, penetration, points } func convexToConvex(l, r *body.Body) (normal vector.Vector, penetration float64, points []vector.Vector) { lConvex := l.Shape.(*convex.Convex) rConvex := r.Shape.(*convex.Convex) lPenetration, lNormal, lPoint := findAxisLeastPenetration(lConvex, rConvex, l.Position(), r.Position(), l.Rotation(), r.Rotation()) if lPenetration < 0.0 { return normal, penetration, points } rPenetration, rNormal, rPoint := findAxisLeastPenetration(rConvex, lConvex, r.Position(), l.Position(), r.Rotation(), l.Rotation()) if rPenetration < 0.0 { return normal, penetration, points } if lPenetration < rPenetration { normal = lNormal penetration = lPenetration points = append(points, lPoint) } else { normal = vector.Invert(rNormal) penetration = rPenetration points = append(points, rPoint) } return normal, -penetration, points } func findAxisLeastPenetration(l, r *convex.Convex, lPos, rPos vector.Vector, lRot, rRot rotator.Rotator) (minPenetration float64, bestNormal vector.Vector, bestPoint vector.Vector) { minPenetration = math.MaxFloat64 for _, edge := range l.Edges() { normal := lRot.RotateVector(edge.Normal) s := r.Support(vector.Invert(normal), rRot) v := vector.Add(lRot.RotateVector(edge.Start), lPos) v.Subtract(rPos) penetration := -vector.Dot(normal, vector.Subtract(s, v)) if penetration < minPenetration { bestNormal = normal minPenetration = penetration bestPoint = vector.Add(vector.Add(s, rPos), vector.Multiply(bestNormal, penetration*0.5)) } } return minPenetration, bestNormal, bestPoint }
core/contact/detect_collision.go
0.738103
0.545649
detect_collision.go
starcoder
package iso20022 // Set of characteristics that apply to the the direct debit transaction(s). type DirectDebitTransactionInformation2 struct { // Set of elements to reference a payment instruction. PaymentIdentification *PaymentIdentification2 `xml:"PmtId"` // Set of elements used to further specify the type of transaction. PaymentTypeInformation *PaymentTypeInformation4 `xml:"PmtTpInf,omitempty"` // Amount of money moved between the instructing agent and the instructed agent. InterbankSettlementAmount *CurrencyAndAmount `xml:"IntrBkSttlmAmt"` // Date on which the amount of money ceases to be available to the agent that owes it and when the amount of money becomes available to the agent to which it is due. InterbankSettlementDate *ISODate `xml:"IntrBkSttlmDt,omitempty"` // Amount of money to be moved between the debtor and creditor, before deduction of charges, expressed in the currency as ordered by the initiating party. InstructedAmount *CurrencyAndAmount `xml:"InstdAmt,omitempty"` // The factor used for conversion of an amount from one currency into another. This reflects the price at which one currency was bought with another currency. ExchangeRate *BaseOneRate `xml:"XchgRate,omitempty"` // Specifies which party/parties will bear the charges associated with the processing of the payment transaction. ChargeBearer *ChargeBearerType1Code `xml:"ChrgBr"` // Provides information on the charges related to the payment transaction. ChargesInformation []*ChargesInformation1 `xml:"ChrgsInf,omitempty"` // Date at which the creditor requests the amount of money to be collected from the debtor. RequestedCollectionDate *ISODate `xml:"ReqdColltnDt,omitempty"` // Set of elements providing information specific to the direct debit mandate. DirectDebitTransaction *DirectDebitTransaction1 `xml:"DrctDbtTx,omitempty"` // Party to which an amount of money is due. Creditor *PartyIdentification8 `xml:"Cdtr"` // Unambiguous identification of the account of the creditor to which a credit entry will be posted as a result of the payment transaction. CreditorAccount *CashAccount7 `xml:"CdtrAcct,omitempty"` // Financial institution servicing an account for the creditor. CreditorAgent *BranchAndFinancialInstitutionIdentification3 `xml:"CdtrAgt"` // Unambiguous identification of the account of the creditor agent at its servicing agent to which a credit entry will be made as a result of the payment transaction. CreditorAgentAccount *CashAccount7 `xml:"CdtrAgtAcct,omitempty"` // Ultimate party to which an amount of money is due. UltimateCreditor *PartyIdentification8 `xml:"UltmtCdtr,omitempty"` // Party that initiates the payment. In the payment context, this can either be the debtor (in a credit transfer), the creditor (in a direct debit), or a party that initiates the payment on behalf of the debtor or creditor. InitiatingParty *PartyIdentification8 `xml:"InitgPty,omitempty"` // Agent that instructs the next party in the chain to carry out the (set of) instruction(s). InstructingAgent *BranchAndFinancialInstitutionIdentification3 `xml:"InstgAgt,omitempty"` // Agent that is instructed by the previous party in the chain to carry out the (set of) instruction(s). InstructedAgent *BranchAndFinancialInstitutionIdentification3 `xml:"InstdAgt,omitempty"` // Agent between the debtor agent and creditor agent. // // Usage: If more than one intermediary agent is present, then IntermediaryAgent1 identifies the agent between the creditor agent and the intermediary agent 2. IntermediaryAgent1 *BranchAndFinancialInstitutionIdentification3 `xml:"IntrmyAgt1,omitempty"` // Unambiguous identification of the account of the intermediary agent 1 at its servicing agent in the payment chain. IntermediaryAgent1Account *CashAccount7 `xml:"IntrmyAgt1Acct,omitempty"` // Agent between the debtor agent and creditor agent. // // Usage: If more than two intermediary agents are present, then IntermediaryAgent2 identifies the agent between the intermediary agent 1 and the intermediary agent 3. IntermediaryAgent2 *BranchAndFinancialInstitutionIdentification3 `xml:"IntrmyAgt2,omitempty"` // Unambiguous identification of the account of the intermediary agent 2 at its servicing agent in the payment chain. IntermediaryAgent2Account *CashAccount7 `xml:"IntrmyAgt2Acct,omitempty"` // Agent between the debtor agent and creditor agent. // // Usage: If IntermediaryAgent3 is present, then it identifies the agent between the intermediary agent 2 and the debtor agent. IntermediaryAgent3 *BranchAndFinancialInstitutionIdentification3 `xml:"IntrmyAgt3,omitempty"` // Unambiguous identification of the account of the intermediary agent 3 at its servicing agent in the payment chain. IntermediaryAgent3Account *CashAccount7 `xml:"IntrmyAgt3Acct,omitempty"` // Party that owes an amount of money to the (ultimate) creditor. Debtor *PartyIdentification8 `xml:"Dbtr"` // Unambiguous identification of the account of the debtor to which a debit entry will be made as a result of the transaction. DebtorAccount *CashAccount7 `xml:"DbtrAcct"` // Financial institution servicing an account for the debtor. DebtorAgent *BranchAndFinancialInstitutionIdentification3 `xml:"DbtrAgt"` // Unambiguous identification of the account of the debtor agent at its servicing agent in the payment chain. DebtorAgentAccount *CashAccount7 `xml:"DbtrAgtAcct,omitempty"` // Ultimate party that owes an amount of money to the (ultimate) creditor. UltimateDebtor *PartyIdentification8 `xml:"UltmtDbtr,omitempty"` // Underlying reason for the payment transaction. // // Usage: Purpose is used by the end-customers, i.e. initiating party, (ultimate) debtor, (ultimate) creditor to provide information concerning the nature of the payment. Purpose is a content element, which is not used for processing by any of the agents involved in the payment chain. Purpose *Purpose1Choice `xml:"Purp,omitempty"` // Information needed due to regulatory and statutory requirements. RegulatoryReporting []*RegulatoryReporting2 `xml:"RgltryRptg,omitempty"` // Information related to the handling of the remittance information by any of the agents in the transaction processing chain. RelatedRemittanceInformation []*RemittanceLocation1 `xml:"RltdRmtInf,omitempty"` // Information supplied to enable the matching of an entry with the items that the transfer is intended to settle, such as commercial invoices in an accounts' receivable system. RemittanceInformation *RemittanceInformation1 `xml:"RmtInf,omitempty"` } func (d *DirectDebitTransactionInformation2) AddPaymentIdentification() *PaymentIdentification2 { d.PaymentIdentification = new(PaymentIdentification2) return d.PaymentIdentification } func (d *DirectDebitTransactionInformation2) AddPaymentTypeInformation() *PaymentTypeInformation4 { d.PaymentTypeInformation = new(PaymentTypeInformation4) return d.PaymentTypeInformation } func (d *DirectDebitTransactionInformation2) SetInterbankSettlementAmount(value, currency string) { d.InterbankSettlementAmount = NewCurrencyAndAmount(value, currency) } func (d *DirectDebitTransactionInformation2) SetInterbankSettlementDate(value string) { d.InterbankSettlementDate = (*ISODate)(&value) } func (d *DirectDebitTransactionInformation2) SetInstructedAmount(value, currency string) { d.InstructedAmount = NewCurrencyAndAmount(value, currency) } func (d *DirectDebitTransactionInformation2) SetExchangeRate(value string) { d.ExchangeRate = (*BaseOneRate)(&value) } func (d *DirectDebitTransactionInformation2) SetChargeBearer(value string) { d.ChargeBearer = (*ChargeBearerType1Code)(&value) } func (d *DirectDebitTransactionInformation2) AddChargesInformation() *ChargesInformation1 { newValue := new (ChargesInformation1) d.ChargesInformation = append(d.ChargesInformation, newValue) return newValue } func (d *DirectDebitTransactionInformation2) SetRequestedCollectionDate(value string) { d.RequestedCollectionDate = (*ISODate)(&value) } func (d *DirectDebitTransactionInformation2) AddDirectDebitTransaction() *DirectDebitTransaction1 { d.DirectDebitTransaction = new(DirectDebitTransaction1) return d.DirectDebitTransaction } func (d *DirectDebitTransactionInformation2) AddCreditor() *PartyIdentification8 { d.Creditor = new(PartyIdentification8) return d.Creditor } func (d *DirectDebitTransactionInformation2) AddCreditorAccount() *CashAccount7 { d.CreditorAccount = new(CashAccount7) return d.CreditorAccount } func (d *DirectDebitTransactionInformation2) AddCreditorAgent() *BranchAndFinancialInstitutionIdentification3 { d.CreditorAgent = new(BranchAndFinancialInstitutionIdentification3) return d.CreditorAgent } func (d *DirectDebitTransactionInformation2) AddCreditorAgentAccount() *CashAccount7 { d.CreditorAgentAccount = new(CashAccount7) return d.CreditorAgentAccount } func (d *DirectDebitTransactionInformation2) AddUltimateCreditor() *PartyIdentification8 { d.UltimateCreditor = new(PartyIdentification8) return d.UltimateCreditor } func (d *DirectDebitTransactionInformation2) AddInitiatingParty() *PartyIdentification8 { d.InitiatingParty = new(PartyIdentification8) return d.InitiatingParty } func (d *DirectDebitTransactionInformation2) AddInstructingAgent() *BranchAndFinancialInstitutionIdentification3 { d.InstructingAgent = new(BranchAndFinancialInstitutionIdentification3) return d.InstructingAgent } func (d *DirectDebitTransactionInformation2) AddInstructedAgent() *BranchAndFinancialInstitutionIdentification3 { d.InstructedAgent = new(BranchAndFinancialInstitutionIdentification3) return d.InstructedAgent } func (d *DirectDebitTransactionInformation2) AddIntermediaryAgent1() *BranchAndFinancialInstitutionIdentification3 { d.IntermediaryAgent1 = new(BranchAndFinancialInstitutionIdentification3) return d.IntermediaryAgent1 } func (d *DirectDebitTransactionInformation2) AddIntermediaryAgent1Account() *CashAccount7 { d.IntermediaryAgent1Account = new(CashAccount7) return d.IntermediaryAgent1Account } func (d *DirectDebitTransactionInformation2) AddIntermediaryAgent2() *BranchAndFinancialInstitutionIdentification3 { d.IntermediaryAgent2 = new(BranchAndFinancialInstitutionIdentification3) return d.IntermediaryAgent2 } func (d *DirectDebitTransactionInformation2) AddIntermediaryAgent2Account() *CashAccount7 { d.IntermediaryAgent2Account = new(CashAccount7) return d.IntermediaryAgent2Account } func (d *DirectDebitTransactionInformation2) AddIntermediaryAgent3() *BranchAndFinancialInstitutionIdentification3 { d.IntermediaryAgent3 = new(BranchAndFinancialInstitutionIdentification3) return d.IntermediaryAgent3 } func (d *DirectDebitTransactionInformation2) AddIntermediaryAgent3Account() *CashAccount7 { d.IntermediaryAgent3Account = new(CashAccount7) return d.IntermediaryAgent3Account } func (d *DirectDebitTransactionInformation2) AddDebtor() *PartyIdentification8 { d.Debtor = new(PartyIdentification8) return d.Debtor } func (d *DirectDebitTransactionInformation2) AddDebtorAccount() *CashAccount7 { d.DebtorAccount = new(CashAccount7) return d.DebtorAccount } func (d *DirectDebitTransactionInformation2) AddDebtorAgent() *BranchAndFinancialInstitutionIdentification3 { d.DebtorAgent = new(BranchAndFinancialInstitutionIdentification3) return d.DebtorAgent } func (d *DirectDebitTransactionInformation2) AddDebtorAgentAccount() *CashAccount7 { d.DebtorAgentAccount = new(CashAccount7) return d.DebtorAgentAccount } func (d *DirectDebitTransactionInformation2) AddUltimateDebtor() *PartyIdentification8 { d.UltimateDebtor = new(PartyIdentification8) return d.UltimateDebtor } func (d *DirectDebitTransactionInformation2) AddPurpose() *Purpose1Choice { d.Purpose = new(Purpose1Choice) return d.Purpose } func (d *DirectDebitTransactionInformation2) AddRegulatoryReporting() *RegulatoryReporting2 { newValue := new (RegulatoryReporting2) d.RegulatoryReporting = append(d.RegulatoryReporting, newValue) return newValue } func (d *DirectDebitTransactionInformation2) AddRelatedRemittanceInformation() *RemittanceLocation1 { newValue := new (RemittanceLocation1) d.RelatedRemittanceInformation = append(d.RelatedRemittanceInformation, newValue) return newValue } func (d *DirectDebitTransactionInformation2) AddRemittanceInformation() *RemittanceInformation1 { d.RemittanceInformation = new(RemittanceInformation1) return d.RemittanceInformation }
DirectDebitTransactionInformation2.go
0.77343
0.647109
DirectDebitTransactionInformation2.go
starcoder
package binmani const bitsPerByte int = 8 // Bit manipulation functions // GetMask creates a bitmask of size shifted left index bits. // GetMask(4, 1) -> 0b0000000000010000 // GetMask(2, 3) -> 0b0000000000011100 // GetMask(0, 8) -> 0b0000000011111111 // GetMask(3, 8) -> 0b0000011111111000 func GetMask(index, size uint8) uint16 { return ((1 << size) - 1) << index } // ReadFrom reads a specified bit or set of consecutive bits from data. // index works from the right of the data to the left. func ReadFrom(data uint16, index, size uint8) uint16 { return (data & GetMask(index, size)) >> index } // WriteTo writes a value to a specified bit or set of consecutive bits in data, and returns the result. // index works from the right of the data to the left. func WriteTo(data uint16, index, size uint8, value uint16) uint16 { return (data & (^GetMask(index, size))) | (value << index) } // BytesToBits converts a byte slice to a slice of each individual bit of the bytes. func BytesToBits(bytes []byte) *[]uint8 { bits := make([]uint8, len(bytes) * bitsPerByte) for i := 0; i < len(bytes); i++ { for j := 0; j < bitsPerByte; j++ { bits[i * bitsPerByte + j] = uint8(ReadFrom(uint16(bytes[i]), uint8(bitsPerByte - j - 1), 1)) } } return &bits } // BitsToBytes converts a slice of individual bits into a slice of bytes, effectively compressing them together. // padStart specifies whether to pad the start or end of the slice, if the length is not a multiple of 8. func BitsToBytes(bits []uint8, padStart bool) *[]byte { numBytes := len(bits) / bitsPerByte if len(bits) % bitsPerByte != 0 { numBytes++ } // Zero-pad the beginning/end of the array if the number of bits is not a multiple of 8 extraBits := make([]uint8, (8 - (len(bits) % bitsPerByte)) % 8) if padStart { bits = append(extraBits, bits...) } else { bits = append(bits, extraBits...) } bytes := make([]byte, numBytes) for i := 0; i < numBytes; i++ { for j := 0; j < bitsPerByte; j++ { bytes[i] = byte(WriteTo(uint16(bytes[i]), uint8(bitsPerByte - j - 1), 1, uint16(bits[i * bitsPerByte + j]))) } } return &bytes }
binmani.go
0.703957
0.668143
binmani.go
starcoder
package gen import ( "fmt" "math/rand" ) // CityStateZipGenerator generates a City state zip line in the form: City, ST 99999 func CityStateZipGenerator() string { var state = StateNameGenerator() return fmt.Sprintf("%s, %s %s", CityNameGenerator(), state, ZipcodeGenerator(state)) } // CityStateZipGenerator2 generates a City state zip line in the form: City, ST 99999 given a state func CityStateZipGenerator2(state string) string { return fmt.Sprintf("%s, %s %s", CityNameGenerator(), state, ZipcodeGenerator(state)) } // CityNameGenerator generates a prefix for a female name func CityNameGenerator() string { return cityData[rand.Intn(len(cityData))] } var cityData = []string{ "Abilene", "Akron", "Albuquerque", "Alexandria", "Allen", "Allentown", "Amarillo", "Anaheim", "Anchorage", "<NAME>", "Antioch", "Arlington", "Arvada", "Athens", "Atlanta", "Augusta", "Aurora", "Austin", "Bakersfield", "Baltimore", "Baton Rouge", "Beaumont", "Bellevue", "Berkeley", "Billings", "Birmingham", "Boise", "Boston", "Boulder", "Bridgeport", "Broken Arrow", "Brownsville", "Buffalo", "Burbank", "Cambridge", "Cape Coral", "Carlsbad", "Carrollton", "Cary", "Cedar Rapids", "Centennial", "Chandler", "Charleston", "Charlotte", "Chattanooga", "Chesapeake", "Chicago", "Chula Vista", "Cincinnati", "Clarksville", "Clearwater", "Cleveland", "Clinton", "Clovis", "College Station", "Colorado Springs", "Columbia", "Columbus", "Concord", "Coral Springs", "Corona", "<NAME>", "<NAME>", "Dallas", "Daly City", "Davenport", "Davie", "Dayton", "Denton", "Denver", "<NAME>", "Detroit", "Downey", "Durham", "Edison", "<NAME>", "<NAME>", "<NAME>", "Elgin", "Elizabeth", "El<NAME>", "Escondido", "Eugene", "Evansville", "Everett", "Fairfield", "Fargo", "Fayetteville", "Fontana", "<NAME>", "<NAME>", "<NAME>", "<NAME>", "Fremont", "Fresno", "Frisco", "Fullerton", "Gainesville", "Garden Grove", "Garland", "Gilbert", "Glendale", "Grand Prairie", "Grand Rapids", "Greeley", "Green Bay", "Greensboro", "Gresham", "Hampton", "Hartford", "Hayward", "Henderson", "Hialeah", "High Point", "Hillsboro", "Hollywood", "Honolulu", "Houston", "Huntington Beach", "Huntsville", "Independence", "Indianapolis", "Inglewood", "Irvine", "Irving", "Jackson", "Jacksonville", "Jersey City", "Joliet", "Jurupa Valley", "Kansas City", "Kenosha", "Kent", "Killeen", "Knoxville", "Lafayette", "Lakeland", "Lakewood", "Lancaster", "Lansing", "Laredo", "Las Cruces", "Las Vegas", "League City", "Lewisville", "Lexington", "Lincoln", "Little Rock", "Long Beach", "Los Angeles", "Louisville", "Lowell", "Lubbock", "Macon", "Madison", "Manchester", "McAllen", "McKinney", "Memphis", "Meridian", "Mesa", "Mesquite", "Miami", "<NAME>", "Midland", "Milwaukee", "Minneapolis", "Miramar", "Mobile", "Modesto", "Montgomery", "Moreno Valley", "Murfreesboro", "Murrieta", "Naperville", "Nashville", "New Haven", "New Orleans", "New York", "Newark", "Newport News", "Norfolk", "Norman", "North Charleston", "North Las Vegas", "Norwalk", "Oakland", "Oceanside", "Odessa", "Oklahoma City", "Olathe", "Omaha", "Ontario", "Orange", "Orlando", "Overland Park", "Oxnard", "Palm Bay", "Palmdale", "Pasadena", "Paterson", "Pearland", "Pembroke Pines", "Peoria", "Philadelphia", "Phoenix", "Pittsburgh", "Plano", "Pomona", "Pompano Beach", "Port St. Lucie", "Portland", "Providence", "Provo", "Pueblo", "Raleigh", "<NAME>", "Reno", "Renton", "Rialto", "Richardson", "Richmond", "Riverside", "Rochester", "Rockford", "Roseville", "Round Rock", "Sacramento", "<NAME>", "Salem", "Salinas", "Salt Lake City", "San Angelo", "San Antonio", "San Bernardino", "San Diego", "San Francisco", "San Jose", "San Mateo", "Sandy Springs", "Santa Ana", "Santa Clara", "Santa Clarita", "Santa Maria", "Santa Rosa", "Savannah", "Scottsdale", "Seattle", "Shreveport", "Simi Valley", "Sioux Falls", "South Bend", "Sparks", "Spokane", "Springfield", "St. Louis", "St. Petersburg", "Stamford", "Sterling Heights", "Stockton", "Sugar Land", "Sunnyvale", "Surprise", "Syracuse", "Tacoma", "Tallahassee", "Tampa", "Temecula", "Tempe", "Thornton", "Thousand Oaks", "Toledo", "Topeka", "Torrance", "Tucson", "Tulsa", "Tuscaloosa", "Tyler", "Vacaville", "Vallejo", "Vancouver", "Ventura", "Victorville", "Virginia Beach", "Visalia", "Vista", "Waco", "Warren", "Washington", "Waterbury", "West Covina", "West Jordan", "West Palm Beach", "West Valley City", "Westminster", "Wichita", "Wichita Falls", "Wilmington", "Winston–Salem", "Woodbridge", "Worcester", "Yonkers", } // StateNameGenerator generates a prefix for a female name func StateNameGenerator() string { return stateData[rand.Intn(len(stateData))] } var stateData = []string{ "AK", "AL", "AR", "AZ", "CA", "CO", "CT", "DC", "DE", "FL", "GA", "HI", "IA", "ID", "IL", "IN", "KS", "KY", "LA", "MA", "MD", "ME", "MI", "MN", "MO", "MS", "MT", "NC", "ND", "NE", "NH", "NJ", "NM", "NV", "NY", "OH", "OK", "OR", "PA", "RI", "SC", "SD", "TN", "TX", "UT", "VA", "VT", "WA", "WI", "WV", "WY", } // ZipcodeGenerator generates a prefix for a female name func ZipcodeGenerator(state string) string { var zip int if rng, found := zipcodeData[state]; found { zip = rand.Intn(rng.max-rng.min) + rng.min } else { zip = rand.Intn(99999) } return fmt.Sprintf("%5.5d", zip) } type zipRange struct { min int max int } var zipcodeData = map[string]zipRange{ "AK": {min: 99501, max: 99950}, "AL": {min: 35004, max: 36925}, "AR": {min: 71601, max: 75502}, "AZ": {min: 85001, max: 86556}, "CA": {min: 90001, max: 96162}, "CO": {min: 80001, max: 81658}, "CT": {min: 6001, max: 6928}, "DC": {min: 20001, max: 20799}, "DE": {min: 19701, max: 19980}, "FL": {min: 32004, max: 34997}, "GA": {min: 30001, max: 39901}, "HI": {min: 96701, max: 96898}, "IA": {min: 50001, max: 68120}, "ID": {min: 83201, max: 83876}, "IL": {min: 60001, max: 62999}, "IN": {min: 46001, max: 47997}, "KS": {min: 66002, max: 67954}, "KY": {min: 40003, max: 42788}, "LA": {min: 70001, max: 71497}, "MA": {min: 01001, max: 05544}, "MD": {min: 20331, max: 21930}, "ME": {min: 3901, max: 4992}, "MI": {min: 48001, max: 49971}, "MN": {min: 55001, max: 56763}, "MO": {min: 63001, max: 65899}, "MS": {min: 38601, max: 39776}, "MT": {min: 59001, max: 59937}, "NC": {min: 27006, max: 28909}, "ND": {min: 58001, max: 58856}, "NE": {min: 68001, max: 69367}, "NH": {min: 03031, max: 3897}, "NJ": {min: 07001, max: 8989}, "NM": {min: 87001, max: 88441}, "NV": {min: 88901, max: 89883}, "NY": {min: 10001, max: 14975}, "OH": {min: 43001, max: 45999}, "OK": {min: 73001, max: 74966}, "OR": {min: 97001, max: 97920}, "PA": {min: 15001, max: 19640}, "RI": {min: 2801, max: 2940}, "SC": {min: 29001, max: 29948}, "SD": {min: 57001, max: 57799}, "TN": {min: 37010, max: 38589}, "TX": {min: 73301, max: 79999}, "UT": {min: 84001, max: 84784}, "VA": {min: 20040, max: 24658}, "VT": {min: 5001, max: 5907}, "WA": {min: 98001, max: 99403}, "WI": {min: 53001, max: 54990}, "WV": {min: 24701, max: 26886}, "WY": {min: 82001, max: 83128}, } // PhoneGenerator generates a phone number for a state func PhoneGenerator(state string) string { if areas, found := areacodeData[state]; found { area := areas[rand.Intn(len(areas))] return fmt.Sprintf("(%s) %3.3d-%4.4d", area, rand.Intn(1000), rand.Intn(10000)) } return fmt.Sprintf("(%3.3d) %3.3d-%4.4d", rand.Intn(1000), rand.Intn(1000), rand.Intn(10000)) } var areacodeData = map[string][]string{ "AL": {"205", "251", "256", "334", "938"}, "AK": {"907"}, "AZ": {"480", "520", "602", "623", "928"}, "AR": {"479", "501", "870"}, "CA": {"209", "213", "310", "323", "408", "415", "424", "442", "510", "530", "559", "562", "619", "626", "628", "650", "657", "661", "669", "707", "714", "747", "760", "805", "818", "831", "858", "909", "916", "925", "949", "951"}, "CO": {"303", "719", "720", "970"}, "CT": {"203", "475", "860", "959"}, "DE": {"302"}, "DC": {"202"}, "FL": {"239", "305", "321", "352", "386", "407", "561", "727", "754", "772", "786", "813", "850", "863", "904", "941", "954"}, "GA": {"229", "404", "470", "478", "678", "706", "762", "770", "912"}, "HI": {"808"}, "ID": {"208"}, "IL": {"217", "224", "309", "312", "331", "618", "630", "708", "773", "779", "815", "847", "872"}, "IN": {"219", "260", "317", "463", "574", "765", "812", "930"}, "IA": {"319", "515", "563", "641", "712"}, "KS": {"316", "620", "785", "913"}, "KY": {"270", "364", "502", "606", "859"}, "LA": {"225", "318", "337", "504", "985"}, "ME": {"207"}, "MD": {"240", "301", "410", "443", "667"}, "MA": {"339", "351", "413", "508", "617", "774", "781", "857", "978"}, "MI": {"231", "248", "269", "313", "517", "586", "616", "679", "734", "810", "906", "947", "989"}, "MN": {"218", "320", "507", "612", "651", "763", "952"}, "MS": {"228", "601", "662", "769"}, "MO": {"314", "417", "573", "636", "660", "816"}, "MT": {"406"}, "NE": {"308", "402", "531"}, "NV": {"702", "725", "775"}, "NH": {"603"}, "NJ": {"201", "551", "609", "732", "848", "856", "862", "908", "973"}, "NM": {"505", "575"}, "NY": {"212", "315", "332", "347", "516", "518", "585", "607", "631", "646", "680", "716", "718", "838", "845", "914", "917", "929", "934"}, "NC": {"252", "336", "704", "743", "828", "910", "919", "980", "984"}, "ND": {"701"}, "OH": {"216", "220", "234", "330", "380", "419", "440", "513", "567", "614", "740", "937"}, "OK": {"405", "539", "580", "918"}, "OR": {"458", "503", "541", "971"}, "PA": {"215", "223", "267", "272", "412", "484", "570", "610", "717", "724", "814", "878"}, "RI": {"401"}, "SC": {"803", "843", "854", "864"}, "SD": {"605"}, "TN": {"423", "615", "629", "731", "865", "901", "931"}, "TX": {"210", "214", "254", "281", "325", "346", "361", "409", "430", "432", "469", "512", "682", "713", "726", "737", "806", "817", "830", "832", "903", "915", "936", "940", "956", "972", "979"}, "UT": {"385", "435", "801"}, "VT": {"802"}, "VA": {"276", "434", "540", "571", "703", "757", "804"}, "WA": {"206", "253", "360", "425", "509", "564"}, "WV": {"304", "681"}, "WI": {"262", "414", "534", "608", "715", "920"}, "WY": {"307"}, }
gen/city_state_zip_names.go
0.503174
0.42925
city_state_zip_names.go
starcoder
package shapes import ( "math" "sort" "github.com/factorion/graytracer/pkg/primitives" ) // MinMax Sort an array of floats and return the minimum and maximum values func MinMax(values []float64) (float64, float64) { sort.Float64s(values) return values[0], values[len(values) - 1] } // CombineBounds Combine a slice of bounds into one func CombineBounds(bounds_slice []*Bounds) *Bounds { if len(bounds_slice) == 0 { return nil } x_list := make([]float64, len(bounds_slice) * 2) y_list := make([]float64, len(bounds_slice) * 2) z_list := make([]float64, len(bounds_slice) * 2) for index, bounds := range bounds_slice { x_list[index * 2] = bounds.Min.X x_list[(index * 2) + 1] = bounds.Max.X y_list[index * 2] = bounds.Min.Y y_list[(index * 2) + 1] = bounds.Max.Y z_list[index * 2] = bounds.Min.Z z_list[(index * 2) + 1] = bounds.Max.Z } x_min, x_max := MinMax(x_list) y_min, y_max := MinMax(y_list) z_min, z_max := MinMax(z_list) return &Bounds{Min:primitives.MakePoint(x_min, y_min, z_min), Max:primitives.MakePoint(x_max, y_max, z_max)} } // AddBounds Add bounds with another bounds func (b* Bounds) AddBounds(bounds *Bounds) { x_min := math.Min(b.Min.X, bounds.Min.X) x_max := math.Max(b.Max.X, bounds.Max.X) y_min := math.Min(b.Min.Y, bounds.Min.Y) y_max := math.Max(b.Max.Y, bounds.Max.Y) z_min := math.Min(b.Min.Z, bounds.Min.Z) z_max := math.Max(b.Max.Z, bounds.Max.Z) b.Min = primitives.MakePoint(x_min, y_min, z_min) b.Max = primitives.MakePoint(x_max, y_max, z_max) } // Bounds Bounding box structure with a minimum and maximum point representing an axis-aligned cube type Bounds struct { Min, Max primitives.PV } func (b* Bounds) Transform(transform primitives.Matrix) *Bounds { x_list := make([]float64, 8) y_list := make([]float64, 8) z_list := make([]float64, 8) points := make([]primitives.PV, 8) // Create the eight points of a cube points[0] = primitives.MakePoint(b.Min.X, b.Min.Y, b.Min.Z) points[1] = primitives.MakePoint(b.Min.X, b.Min.Y, b.Max.Z) points[2] = primitives.MakePoint(b.Min.X, b.Max.Y, b.Min.Z) points[3] = primitives.MakePoint(b.Min.X, b.Max.Y, b.Max.Z) points[4] = primitives.MakePoint(b.Max.X, b.Min.Y, b.Min.Z) points[5] = primitives.MakePoint(b.Max.X, b.Min.Y, b.Max.Z) points[6] = primitives.MakePoint(b.Max.X, b.Max.Y, b.Min.Z) points[7] = primitives.MakePoint(b.Max.X, b.Max.Y, b.Max.Z) // Transform the eight points and add their coordinates to their respective slices for i := 0; i < 8; i++ { point := points[i].Transform(transform) x_list[i] = point.X y_list[i] = point.Y z_list[i] = point.Z } // Get the minimum and maximum, resulting in our new bounds x_min, x_max := MinMax(x_list) y_min, y_max := MinMax(y_list) z_min, z_max := MinMax(z_list) return &Bounds{Min:primitives.MakePoint(x_min, y_min, z_min), Max:primitives.MakePoint(x_max, y_max, z_max)} } func (b* Bounds) Intersect(ray primitives.Ray) bool { xtmin, xtmax := CheckAxis(ray.Origin.X, ray.Direction.X, b.Min.X, b.Max.X) ytmin, ytmax := CheckAxis(ray.Origin.Y, ray.Direction.Y, b.Min.Y, b.Max.Y) ztmin, ztmax := CheckAxis(ray.Origin.Z, ray.Direction.Z, b.Min.Z, b.Max.Z) tmin := math.Max(math.Max(xtmin, ytmin), ztmin) tmax := math.Min(math.Min(xtmax, ytmax), ztmax) return tmin <= tmax; }
pkg/shapes/bounds.go
0.799677
0.545104
bounds.go
starcoder
package unicornify import ( . "github.com/drbrain/go-unicornify/unicornify/core" "math" "sort" ) type tv struct { t float64 v float64 } type tvSorter struct { tvs []tv } func (s tvSorter) Len() int { return len(s.tvs) } func (s tvSorter) Less(i, j int) bool { return s.tvs[i].t < s.tvs[j].t } func (s tvSorter) Swap(i, j int) { s.tvs[i], s.tvs[j] = s.tvs[j], s.tvs[i] } func interpol(tvs ...tv) func(float64) float64 { sort.Sort(tvSorter{tvs}) l := make([]tv, len(tvs)+2) last := tvs[len(tvs)-1] l[0] = tv{t: last.t - 1, v: last.v} l[len(tvs)+1] = tv{t: tvs[0].t + 1, v: tvs[0].v} copy(l[1:len(tvs)+1], tvs) return func(t float64) float64 { for t < 0 { t++ } _, t = math.Modf(t) var t1, v1, t2, v2 float64 t1 = -2 t2 = 2 for _, tv := range l { if tv.t <= t && tv.t > t1 { t1, v1 = tv.t, tv.v } if tv.t >= t && tv.t < t2 { t2, v2 = tv.t, tv.v } } if t1 == t2 { return v1 } return MixFloats(v1, v2, (t-t1)/(t2-t1)) } } func RotatoryGallop(u *Unicorn, phase float64) { // movement per phase: ca. 125 fl, fr, bl, br := u.Legs[0], u.Legs[1], u.Legs[2], u.Legs[3] // approximated from http://commons.wikimedia.org/wiki/File:Horse_gif_slow.gif frontTop := interpol(tv{9. / 12, 74}, tv{2.5 / 12, -33}) frontBottom := interpol(tv{2. / 12, 0}, tv{6. / 12, -107}, tv{8. / 12, -90}, tv{10. / 12, 0}) backTop := interpol(tv{11. / 12, -53}, tv{4. / 12, 0}, tv{6. / 12, 0}) backBottom := interpol(tv{11. / 12, 0}, tv{1.5 / 12, 90}, tv{6. / 12, 30}, tv{8. / 12, 50}) fr.Knee.RotateAround(*fr.Hip, frontTop(phase)*DEGREE, 2) fr.Hoof.RotateAround(*fr.Hip, frontTop(phase)*DEGREE, 2) fr.Hoof.RotateAround(*fr.Knee, frontBottom(phase)*DEGREE, 2) fl.Knee.RotateAround(*fl.Hip, frontTop(phase-.25)*DEGREE, 2) fl.Hoof.RotateAround(*fl.Hip, frontTop(phase-.25)*DEGREE, 2) fl.Hoof.RotateAround(*fl.Knee, frontBottom(phase-.25)*DEGREE, 2) br.Knee.RotateAround(*br.Hip, backTop(phase)*DEGREE, 2) br.Hoof.RotateAround(*br.Hip, backTop(phase)*DEGREE, 2) br.Hoof.RotateAround(*br.Knee, backBottom(phase)*DEGREE, 2) bl.Knee.RotateAround(*bl.Hip, backTop(phase-.167)*DEGREE, 2) bl.Hoof.RotateAround(*bl.Hip, backTop(phase-.167)*DEGREE, 2) bl.Hoof.RotateAround(*bl.Knee, backBottom(phase-.167)*DEGREE, 2) } func Walk(u *Unicorn, phase float64) { fl, fr, bl, br := u.Legs[0], u.Legs[1], u.Legs[2], u.Legs[3] //approximated from http://de.wikipedia.org/w/index.php?title=Datei:Muybridge_horse_walking_animated.gif&filetimestamp=20061003154457 frontTop := interpol(tv{6.5 / 9, 40}, tv{2.5 / 9, -35}) frontBottom := interpol(tv{7. / 9, 0}, tv{2. / 9, 0}, tv{5. / 9, -70}) backTop := interpol(tv{1. / 9, -35}, tv{4. / 9, 0}, tv{6. / 12, 0}) backBottom := interpol(tv{5. / 9, 40}, tv{9. / 9, 10}) fr.Knee.RotateAround(*fr.Hip, frontTop(phase)*DEGREE, 2) fr.Hoof.RotateAround(*fr.Hip, frontTop(phase)*DEGREE, 2) fr.Hoof.RotateAround(*fr.Knee, frontBottom(phase)*DEGREE, 2) fl.Knee.RotateAround(*fl.Hip, frontTop(phase-.56)*DEGREE, 2) fl.Hoof.RotateAround(*fl.Hip, frontTop(phase-.56)*DEGREE, 2) fl.Hoof.RotateAround(*fl.Knee, frontBottom(phase-.56)*DEGREE, 2) br.Knee.RotateAround(*br.Hip, backTop(phase)*DEGREE, 2) br.Hoof.RotateAround(*br.Hip, backTop(phase)*DEGREE, 2) br.Hoof.RotateAround(*br.Knee, backBottom(phase)*DEGREE, 2) bl.Knee.RotateAround(*bl.Hip, backTop(phase-.44)*DEGREE, 2) bl.Hoof.RotateAround(*bl.Hip, backTop(phase-.44)*DEGREE, 2) bl.Hoof.RotateAround(*bl.Knee, backBottom(phase-.44)*DEGREE, 2) } var Poses = [...]func(*Unicorn, float64){RotatoryGallop, Walk}
unicornify/pose.go
0.666062
0.448909
pose.go
starcoder
package rangesum /* * @lc app=leetcode id=304 lang=golang * * [304] Range Sum Query 2D - Immutable * * https://leetcode.com/problems/range-sum-query-2d-immutable/description/ * * algorithms * Medium (31.68%) * Total Accepted: 68.7K * Total Submissions: 213.6K * Testcase Example: '["NumMatrix","sumRegion","sumRegion","sumRegion"]\n[[[[3,0,1,4,2],[5,6,3,2,1],[1,2,0,1,5],[4,1,0,1,7],[1,0,3,0,5]]],[2,1,4,3],[1,1,2,2],[1,2,2,4]]' * * Given a 2D matrix matrix, find the sum of the elements inside the rectangle * defined by its upper left corner (row1, col1) and lower right corner (row2, * col2). * * * * The above rectangle (with the red border) is defined by (row1, col1) = (2, * 1) and (row2, col2) = (4, 3), which contains sum = 8. * * * Example: * * Given matrix = [ * ⁠ [3, 0, 1, 4, 2], * ⁠ [5, 6, 3, 2, 1], * ⁠ [1, 2, 0, 1, 5], * ⁠ [4, 1, 0, 1, 7], * ⁠ [1, 0, 3, 0, 5] * ] * * sumRegion(2, 1, 4, 3) -> 8 * sumRegion(1, 1, 2, 2) -> 11 * sumRegion(1, 2, 2, 4) -> 12 * * * * Note: * * You may assume that the matrix does not change. * There are many calls to sumRegion function. * You may assume that row1 ≤ row2 and col1 ≤ col2. * * */ type NumMatrix struct { sumMatrix [][]int } func Constructor(matrix [][]int) NumMatrix { var rows, columns int if len(matrix) == 0 { rows, columns = 1, 1 } else { rows, columns = len(matrix)+1, len(matrix[0])+1 } sum := make([][]int, rows, rows) for i := range sum { sum[i] = make([]int, columns, columns) } for i := 0; i < rows; i++ { for j := 0; j < columns; j++ { if i == 0 || j == 0 { sum[i][j] = 0 } else { sum[i][j] = sum[i-1][j] + sum[i][j-1] + matrix[i-1][j-1] - sum[i-1][j-1] } } } return NumMatrix{sum} } func (this *NumMatrix) SumRegion(row1 int, col1 int, row2 int, col2 int) int { return this.sumMatrix[row2+1][col2+1] - this.sumMatrix[row1][col2+1] - this.sumMatrix[row2+1][col1] + this.sumMatrix[row1][col1] } /** * Your NumMatrix object will be instantiated and called as such: * obj := Constructor(matrix); * param_1 := obj.SumRegion(row1,col1,row2,col2); */
304-rangesumquery/304.range-sum-query-2d-immutable.go
0.9069
0.501648
304.range-sum-query-2d-immutable.go
starcoder
package client import ( "os" "github.com/charmbracelet/lipgloss" "golang.org/x/term" ) const ( // In real life situations we'd adjust the document to fit the width we've // detected. In the case of this example we're hardcoding the width, and // later using the detected width only to truncate in order to avoid jaggy // wrapping. width = 96 columnWidth = 30 ) // Style definitions. var ( // General. subtle = lipgloss.AdaptiveColor{Light: "#D9DCCF", Dark: "#383838"} highlight = lipgloss.AdaptiveColor{Light: "#874BFD", Dark: "#7D56F4"} special = lipgloss.AdaptiveColor{Light: "#43BF6D", Dark: "#73F59F"} divider = lipgloss.NewStyle(). SetString("•"). Padding(0, 1). Foreground(subtle). String() urlRender = lipgloss.NewStyle().Foreground(special).Render // Tabs. activeTabBorder = lipgloss.Border{ Top: "─", Bottom: " ", Left: "│", Right: "│", TopLeft: "╭", TopRight: "╮", BottomLeft: "┘", BottomRight: "└", } tabBorder = lipgloss.Border{ Top: "─", Bottom: "─", Left: "│", Right: "│", TopLeft: "╭", TopRight: "╮", BottomLeft: "┴", BottomRight: "┴", } // Title. titleStyle = lipgloss.NewStyle(). MarginLeft(1). MarginRight(5). Padding(0, 1). Italic(true). Foreground(lipgloss.Color("#FFF7DB")). SetString("<NAME>") descStyle = lipgloss.NewStyle().MarginTop(1) infoStyle = lipgloss.NewStyle(). BorderStyle(lipgloss.NormalBorder()). BorderTop(true). BorderForeground(subtle) // List. list = lipgloss.NewStyle(). Border(lipgloss.NormalBorder(), false, true, false, false). BorderForeground(subtle). MarginRight(2). Height(8). Width(columnWidth + 1) listHeader = lipgloss.NewStyle(). BorderStyle(lipgloss.NormalBorder()). BorderBottom(true). BorderForeground(subtle). MarginRight(2). Render listItem = lipgloss.NewStyle().PaddingLeft(2).Render checkMark = lipgloss.NewStyle().SetString("✓"). Foreground(special). PaddingRight(1). String() listDone = func(s string) string { return checkMark + lipgloss.NewStyle(). Strikethrough(true). Foreground(lipgloss.AdaptiveColor{Light: "#969B86", Dark: "#696969"}). Render(s) } // Paragraphs/History. historyStyle = lipgloss.NewStyle(). Align(lipgloss.Left). Foreground(lipgloss.Color("#FAFAFA")). Background(highlight). Margin(1, 3, 0, 0). Padding(1, 2). Height(19). Width(columnWidth) // Page. docStyle = lipgloss.NewStyle().Padding(1, 0, 1, 0) ) var ( // Dialog. dialogBoxStyle = lipgloss.NewStyle(). Border(lipgloss.RoundedBorder()). BorderForeground(lipgloss.Color("#874BFD")). Padding(1, 0). BorderTop(true). BorderLeft(true). BorderRight(true). BorderBottom(true) buttonStyle = lipgloss.NewStyle(). Foreground(lipgloss.Color("#FFF7DB")). Background(lipgloss.Color("#888B7E")). Padding(0, 3). MarginTop(1) activeButtonStyle = buttonStyle.Copy(). Foreground(lipgloss.Color("#FFF7DB")). Background(lipgloss.Color("#F25D94")). MarginRight(2). Underline(true) ) type viewStyle struct{} func (r viewStyle) PhysicalWidth() int { physicalWidth, _, _ := term.GetSize(int(os.Stdout.Fd())) return physicalWidth }
client/viewStyle.go
0.588061
0.439627
viewStyle.go
starcoder
package internal import ( "errors" "fmt" "math" "reflect" "sort" "github.com/lyraproj/dgo/dgo" "github.com/lyraproj/dgo/util" ) type ( array struct { slice []dgo.Value frozen bool } // defaultArrayType is the unconstrained array type defaultArrayType int // sizedArrayType represents array with element type constraint and a size constraint sizedArrayType struct { elementType dgo.Type min int max int } // tupleType represents an array with an exact number of ordered element types. tupleType struct { types []dgo.Value variadic bool } // exactArrayType only matches the array that it represents exactArrayType struct { deepExactType value *array } ) // DefaultArrayType is the unconstrained Array type const DefaultArrayType = defaultArrayType(0) func arrayTypeOne(args []interface{}) dgo.ArrayType { switch a0 := Value(args[0]).(type) { case dgo.Type: return newArrayType(a0, 0, math.MaxInt64) case dgo.Integer: return newArrayType(nil, int(a0.GoInt()), math.MaxInt64) default: panic(illegalArgument(`Array`, `Type or Integer`, args, 0)) } } func arrayTypeTwo(args []interface{}) dgo.ArrayType { a1, ok := Value(args[1]).(dgo.Integer) if !ok { panic(illegalArgument(`Array`, `Integer`, args, 1)) } switch a0 := Value(args[0]).(type) { case dgo.Type: return newArrayType(a0, int(a1.GoInt()), math.MaxInt64) case dgo.Integer: return newArrayType(nil, int(a0.GoInt()), int(a1.GoInt())) default: panic(illegalArgument(`Array`, `Type or Integer`, args, 0)) } } func arrayTypeThree(args []interface{}) dgo.ArrayType { a0, ok := Value(args[0]).(dgo.Type) if !ok { panic(illegalArgument(`Array`, `Type`, args, 0)) } a1, ok := Value(args[1]).(dgo.Integer) if !ok { panic(illegalArgument(`Array`, `Integer`, args, 1)) } a2, ok := Value(args[2]).(dgo.Integer) if !ok { panic(illegalArgument(`ArrayType`, `Integer`, args, 2)) } return newArrayType(a0, int(a1.GoInt()), int(a2.GoInt())) } // ArrayType returns a type that represents an Array value func ArrayType(args []interface{}) dgo.ArrayType { switch len(args) { case 0: return DefaultArrayType case 1: return arrayTypeOne(args) case 2: return arrayTypeTwo(args) case 3: return arrayTypeThree(args) default: panic(illegalArgumentCount(`Array`, 0, 3, len(args))) } } func newArrayType(elementType dgo.Type, min, max int) dgo.ArrayType { if min < 0 { min = 0 } if max < 0 { max = 0 } if max < min { t := max max = min min = t } if elementType == nil { elementType = DefaultAnyType } if min == 0 && max == math.MaxInt64 && elementType == DefaultAnyType { // Unbounded return DefaultArrayType } if min == 1 && min == max && dgo.IsExact(elementType) { return (&array{slice: []dgo.Value{elementType.(dgo.ExactType).ExactValue()}, frozen: true}).Type().(dgo.ArrayType) } return &sizedArrayType{elementType: elementType, min: min, max: max} } func (t defaultArrayType) Assignable(other dgo.Type) bool { switch other.(type) { case defaultArrayType, *tupleType, *exactArrayType, *sizedArrayType: return true } return CheckAssignableTo(nil, other, t) } func (t defaultArrayType) ElementType() dgo.Type { return DefaultAnyType } func (t defaultArrayType) Equals(other interface{}) bool { return t == other } func (t defaultArrayType) HashCode() int { return int(dgo.TiArray) } func (t defaultArrayType) Instance(value interface{}) bool { _, ok := value.(dgo.Array) return ok } func (t defaultArrayType) Max() int { return math.MaxInt64 } func (t defaultArrayType) Min() int { return 0 } func (t defaultArrayType) New(arg dgo.Value) dgo.Value { return newArray(t, arg) } func (t defaultArrayType) ReflectType() reflect.Type { return reflect.SliceOf(reflectAnyType) } func (t defaultArrayType) String() string { return TypeString(t) } func (t defaultArrayType) Type() dgo.Type { return &metaType{t} } func (t defaultArrayType) TypeIdentifier() dgo.TypeIdentifier { return dgo.TiArray } func (t defaultArrayType) Unbounded() bool { return true } func (t *sizedArrayType) Assignable(other dgo.Type) bool { return Assignable(nil, t, other) } func (t *sizedArrayType) DeepAssignable(guard dgo.RecursionGuard, other dgo.Type) bool { switch ot := other.(type) { case defaultArrayType: return false // lacks size case dgo.ArrayType: return t.min <= ot.Min() && ot.Max() <= t.max && t.elementType.Assignable(ot.ElementType()) } return CheckAssignableTo(guard, other, t) } func (t *sizedArrayType) ElementType() dgo.Type { return t.elementType } func (t *sizedArrayType) Equals(other interface{}) bool { return equals(nil, t, other) } func (t *sizedArrayType) deepEqual(seen []dgo.Value, other deepEqual) bool { if ot, ok := other.(*sizedArrayType); ok { return t.min == ot.min && t.max == ot.max && equals(seen, t.elementType, ot.elementType) } return false } func (t *sizedArrayType) HashCode() int { return deepHashCode(nil, t) } func (t *sizedArrayType) deepHashCode(seen []dgo.Value) int { h := int(dgo.TiArray) if t.min > 0 { h = h*31 + t.min } if t.max < math.MaxInt64 { h = h*31 + t.max } if DefaultAnyType != t.elementType { h = h*31 + deepHashCode(seen, t.elementType) } return h } func (t *sizedArrayType) Instance(value interface{}) bool { return Instance(nil, t, value) } func (t *sizedArrayType) DeepInstance(guard dgo.RecursionGuard, value interface{}) bool { if ov, ok := value.(*array); ok { l := len(ov.slice) return t.min <= l && l <= t.max && allInstance(guard, t.elementType, ov.slice) } return false } func (t *sizedArrayType) Max() int { return t.max } func (t *sizedArrayType) Min() int { return t.min } func (t *sizedArrayType) New(arg dgo.Value) dgo.Value { return newArray(t, arg) } func (t *sizedArrayType) Resolve(ap dgo.AliasAdder) { te := t.elementType t.elementType = DefaultAnyType t.elementType = ap.Replace(te).(dgo.Type) } func (t *sizedArrayType) ReflectType() reflect.Type { return reflect.SliceOf(t.elementType.ReflectType()) } func (t *sizedArrayType) String() string { return TypeString(t) } func (t *sizedArrayType) Type() dgo.Type { return &metaType{t} } func (t *sizedArrayType) TypeIdentifier() dgo.TypeIdentifier { return dgo.TiArray } func (t *sizedArrayType) Unbounded() bool { return t.min == 0 && t.max == math.MaxInt64 } func (t *exactArrayType) Element(index int) dgo.Type { return t.value.slice[index].Type() } func (t *exactArrayType) ElementType() dgo.Type { es := t.value.slice switch len(es) { case 0: return DefaultAnyType case 1: return es[0].Type() } return (*allOfValueType)(t.value) } func (t *exactArrayType) ElementTypes() dgo.Array { es := t.value.slice ts := make([]dgo.Value, len(es)) for i := range es { ts[i] = es[i].Type() } return &array{slice: ts, frozen: true} } func (t *exactArrayType) Generic() dgo.Type { return &sizedArrayType{elementType: Generic(t.ElementType()), min: 0, max: math.MaxInt64} } func (t *exactArrayType) Len() int { return t.value.Len() } func (t *exactArrayType) Max() int { return t.value.Len() } func (t *exactArrayType) Min() int { return t.value.Len() } func (t *exactArrayType) New(arg dgo.Value) dgo.Value { return newArray(t, arg) } func (t *exactArrayType) ReflectType() reflect.Type { return reflect.SliceOf(t.ElementType().ReflectType()) } func (t *exactArrayType) Resolve(ap dgo.AliasAdder) { t.value.Resolve(ap) } func (t *exactArrayType) ExactValue() dgo.Value { return t.value } func (t *exactArrayType) TypeIdentifier() dgo.TypeIdentifier { return dgo.TiArrayExact } func (t *exactArrayType) Unbounded() bool { return false } func (t *exactArrayType) Variadic() bool { return false } // DefaultTupleType is a tuple without size and type constraints var DefaultTupleType = &tupleType{variadic: true, types: []dgo.Value{DefaultAnyType}} // EmptyTupleType is a tuple that represents an empty array var EmptyTupleType = &tupleType{variadic: false, types: []dgo.Value{}} // TupleType creates a new TupleTupe based on the given types func TupleType(types []interface{}) dgo.TupleType { return newTupleType(types, false) } // VariadicTupleType returns a type that represents an Array value with a variadic number of elements. Each // given type determines the type of a corresponding element in an array except for the last one which // determines the remaining elements. func VariadicTupleType(types []interface{}) dgo.TupleType { n := len(types) if n == 0 { panic(errors.New(`a variadic tuple must have at least one element`)) } return newTupleType(types, true) } func newTupleType(types []interface{}, variadic bool) dgo.TupleType { l := len(types) if l == 0 { return EmptyTupleType } if variadic && l == 1 && DefaultAnyType.Equals(types[0]) { return DefaultTupleType } exact := !variadic es := make([]dgo.Value, l) for i := 0; i < l; i++ { et := types[i].(dgo.Type) if exact && !dgo.IsExact(et) { exact = false } es[i] = et } if exact { for i := 0; i < l; i++ { es[i] = es[i].(dgo.ExactType).ExactValue() } return (&array{slice: es, frozen: true}).Type().(dgo.TupleType) } return &tupleType{types: es, variadic: variadic} } func (t *tupleType) Assignable(other dgo.Type) bool { return Assignable(nil, t, other) } func (t *tupleType) DeepAssignable(guard dgo.RecursionGuard, other dgo.Type) bool { return tupleAssignable(guard, t, other) } func tupleAssignableTuple(guard dgo.RecursionGuard, t, ot dgo.TupleType) bool { if t.Min() > ot.Min() || ot.Max() > t.Max() { return false } var tv, ov dgo.Type tn := t.Len() if t.Variadic() { tn-- tv = t.Element(tn) } on := ot.Len() if ot.Variadic() { on-- ov = ot.Element(on) } // n := max(tn, on) n := tn if n < on { n = on } for i := 0; i < n; i++ { te := tv if i < tn { te = t.Element(i) } oe := ov if i < on { oe = ot.Element(i) } if te == nil || oe == nil || !Assignable(guard, te, oe) { return false } } return true } func tupleAssignableArray(guard dgo.RecursionGuard, t dgo.TupleType, ot *sizedArrayType) bool { if t.Min() <= ot.Min() && ot.Max() <= t.Max() { et := ot.ElementType() n := t.Len() if t.Variadic() { n-- } for i := 0; i < n; i++ { if !Assignable(guard, t.Element(i), et) { return false } } return !t.Variadic() || Assignable(guard, t.Element(n), et) } return false } func tupleAssignable(guard dgo.RecursionGuard, t dgo.TupleType, other dgo.Type) bool { switch ot := other.(type) { case defaultArrayType: return false case *exactArrayType: return Instance(guard, t, ot.ExactValue()) case dgo.TupleType: return tupleAssignableTuple(guard, t, ot) case *sizedArrayType: return tupleAssignableArray(guard, t, ot) } return CheckAssignableTo(guard, other, t) } func (t *tupleType) Element(index int) dgo.Type { return t.types[index].(dgo.Type) } func (t *tupleType) ElementType() dgo.Type { return tupleElementType(t) } func tupleElementType(t dgo.TupleType) (et dgo.Type) { switch t.Len() { case 0: et = DefaultAnyType case 1: et = t.Element(0) default: ea := t.ElementTypes().Unique() if ea.Len() == 1 { return ea.Get(0).(dgo.Type) } return (*allOfType)(ea.(*array)) } return } func (t *tupleType) ElementTypes() dgo.Array { return &array{slice: t.types, frozen: true} } func (t *tupleType) Equals(other interface{}) bool { return equals(nil, t, other) } func (t *tupleType) deepEqual(seen []dgo.Value, other deepEqual) bool { if ot, ok := other.(*tupleType); ok { return t.variadic == ot.variadic && sliceEquals(seen, t.types, ot.types) } return tupleEquals(seen, t, other) } func tupleEquals(seen []dgo.Value, t dgo.TupleType, other interface{}) bool { if ot, ok := other.(dgo.TupleType); ok { n := t.Len() if t.Variadic() == ot.Variadic() && n == ot.Len() { for i := 0; i < n; i++ { if !equals(seen, t.Element(i), ot.Element(i)) { return false } } return true } } return false } func (t *tupleType) Generic() dgo.Type { return newArrayType(Generic(t.ElementType()), 0, math.MaxInt64) } func (t *tupleType) HashCode() int { return tupleHashCode(t, nil) } func (t *tupleType) deepHashCode(seen []dgo.Value) int { return tupleHashCode(t, seen) } func tupleHashCode(t dgo.TupleType, seen []dgo.Value) int { h := 1 if t.Variadic() { h = 7 } l := t.Len() for i := 0; i < l; i++ { h = h*31 + deepHashCode(seen, t.Element(i)) } return h } func (t *tupleType) Instance(value interface{}) bool { return Instance(nil, t, value) } func (t *tupleType) DeepInstance(guard dgo.RecursionGuard, value interface{}) bool { return tupleInstance(guard, t, value) } func tupleInstance(guard dgo.RecursionGuard, t dgo.TupleType, value interface{}) bool { ov, ok := value.(*array) if !ok { return false } s := ov.slice n := len(s) if t.Variadic() { if t.Min() > n { return false } tn := t.Len() - 1 for i := 0; i < tn; i++ { if !Instance(guard, t.Element(i), s[i]) { return false } } vt := t.Element(tn) for ; tn < n; tn++ { if !Instance(guard, vt, s[tn]) { return false } } return true } if n != t.Len() { return false } for i := range s { if !Instance(guard, t.Element(i), s[i]) { return false } } return true } func (t *tupleType) Len() int { return len(t.types) } func (t *tupleType) Max() int { return tupleMax(t) } func tupleMax(t dgo.TupleType) int { n := t.Len() if t.Variadic() { n = math.MaxInt64 } return n } func (t *tupleType) Min() int { return tupleMin(t) } func (t *tupleType) New(arg dgo.Value) dgo.Value { return newArray(t, arg) } func tupleMin(t dgo.TupleType) int { n := t.Len() if t.Variadic() { n-- } return n } func (t *tupleType) ReflectType() reflect.Type { return reflect.SliceOf(t.ElementType().ReflectType()) } func (t *tupleType) Resolve(ap dgo.AliasAdder) { s := t.types t.types = nil resolveSlice(s, ap) t.types = s } func (t *tupleType) String() string { return TypeString(t) } func (t *tupleType) Type() dgo.Type { return &metaType{t} } func (t *tupleType) TypeIdentifier() dgo.TypeIdentifier { return dgo.TiTuple } func (t *tupleType) Unbounded() bool { return t.variadic } func (t *tupleType) Variadic() bool { return t.variadic } // Array returns a frozen dgo.Array that represents a copy of the given value. The value can be // a slice or an Iterable func Array(value interface{}) dgo.Array { switch value := value.(type) { case dgo.Array: return value.FrozenCopy().(dgo.Array) case dgo.Iterable: return arrayFromIterator(value.Len(), value.Each) case []dgo.Value: arr := make([]dgo.Value, len(value)) for i := range value { e := value[i] if f, ok := e.(dgo.Freezable); ok { e = f.FrozenCopy() } else if e == nil { e = Nil } arr[i] = e } return &array{slice: arr, frozen: true} case reflect.Value: return ValueFromReflected(value).(dgo.Array) default: return ValueFromReflected(reflect.ValueOf(value)).(dgo.Array) } } // arrayFromIterator creates an array from a size and an iterator goFunc. The // iterator goFunc is expected to call its actor exactly size number of times. func arrayFromIterator(size int, each func(dgo.Consumer)) *array { arr := make([]dgo.Value, size) i := 0 each(func(e dgo.Value) { if f, ok := e.(dgo.Freezable); ok { e = f.FrozenCopy() } arr[i] = e i++ }) return &array{slice: arr, frozen: true} } func sliceFromIterable(ir dgo.Iterable) []dgo.Value { es := make([]dgo.Value, ir.Len()) i := 0 ir.Each(func(e dgo.Value) { es[i] = e i++ }) return es } // ArrayFromReflected creates a new array that contains a copy of the given reflected slice func ArrayFromReflected(vr reflect.Value, frozen bool) dgo.Value { if vr.IsNil() { return Nil } var arr []dgo.Value if vr.CanInterface() { ix := vr.Interface() if bs, ok := ix.([]byte); ok { return Binary(bs, frozen) } if vs, ok := ix.([]dgo.Value); ok { arr = vs if frozen { arr = util.SliceCopy(arr) } } } if arr == nil { top := vr.Len() arr = make([]dgo.Value, top) for i := 0; i < top; i++ { arr[i] = ValueFromReflected(vr.Index(i)) } } if frozen { for i := range arr { if f, ok := arr[i].(dgo.Freezable); ok { arr[i] = f.FrozenCopy() } } } return &array{slice: arr, frozen: frozen} } // ArrayWithCapacity creates a new mutable array of the given type and initial capacity. func ArrayWithCapacity(capacity int) dgo.Array { return &array{slice: make([]dgo.Value, 0, capacity), frozen: false} } // WrapSlice wraps the given slice in an array. Unset entries in the slice will be replaced by Nil. func WrapSlice(values []dgo.Value) dgo.Array { ReplaceNil(values) return &array{slice: values, frozen: false} } // MutableValues returns a frozen dgo.Array that represents the given values func MutableValues(values []interface{}) dgo.Array { cp := make([]dgo.Value, len(values)) for i := range values { cp[i] = Value(values[i]) } return &array{slice: cp, frozen: false} } func newArray(t dgo.Type, arg dgo.Value) dgo.Array { if args, ok := arg.(dgo.Arguments); ok { args.AssertSize(`array`, 1, 1) arg = args.Get(0) } a := Array(arg) if !t.Instance(a) { panic(IllegalAssignment(t, a)) } return a } func valueSlice(values []interface{}, frozen bool) []dgo.Value { cp := make([]dgo.Value, len(values)) if frozen { for i := range values { v := Value(values[i]) if f, ok := v.(dgo.Freezable); ok { v = f.FrozenCopy() } cp[i] = v } } else { for i := range values { cp[i] = Value(values[i]) } } return cp } // Integers returns a dgo.Array that represents the given ints func Integers(values []int) dgo.Array { cp := make([]dgo.Value, len(values)) for i := range values { cp[i] = intVal(values[i]) } return &array{slice: cp, frozen: true} } // Strings returns a dgo.Array that represents the given strings func Strings(values []string) dgo.Array { cp := make([]dgo.Value, len(values)) for i := range values { cp[i] = makeHString(values[i]) } return &array{slice: cp, frozen: true} } // Values returns a frozen dgo.Array that represents the given values func Values(values []interface{}) dgo.Array { return &array{slice: valueSlice(values, true), frozen: true} } func (v *array) Add(vi interface{}) { if v.frozen { panic(frozenArray(`Add`)) } v.slice = append(v.slice, Value(vi)) } func (v *array) AddAll(values dgo.Iterable) { if v.frozen { panic(frozenArray(`AddAll`)) } a := v.slice if ar, ok := values.(*array); ok { a = ar.AppendToSlice(a) } else { values.Each(func(e dgo.Value) { a = append(a, e) }) } v.slice = a } func (v *array) AddValues(values ...interface{}) { if v.frozen { panic(frozenArray(`AddValues`)) } v.slice = append(v.slice, valueSlice(values, false)...) } func (v *array) All(predicate dgo.Predicate) bool { a := v.slice for i := range a { if !predicate(a[i]) { return false } } return true } func (v *array) Any(predicate dgo.Predicate) bool { a := v.slice for i := range a { if predicate(a[i]) { return true } } return false } func (v *array) AppendTo(w dgo.Indenter) { w.AppendRune('{') ew := w.Indent() a := v.slice for i := range a { if i > 0 { w.AppendRune(',') } ew.NewLine() ew.AppendValue(v.slice[i]) } w.NewLine() w.AppendRune('}') } func (v *array) AppendToSlice(slice []dgo.Value) []dgo.Value { return append(slice, v.slice...) } func (v *array) CompareTo(other interface{}) (int, bool) { return compare(nil, v, Value(other)) } func (v *array) deepCompare(seen []dgo.Value, other deepCompare) (int, bool) { ov, ok := other.(*array) if !ok { return 0, false } a := v.slice b := ov.slice top := len(a) max := len(b) r := 0 if top < max { r = -1 max = top } else if top > max { r = 1 } for i := 0; i < max; i++ { if _, ok = a[i].(dgo.Comparable); !ok { r = 0 break } var c int if c, ok = compare(seen, a[i], b[i]); !ok { r = 0 break } if c != 0 { r = c break } } return r, ok } func (v *array) Copy(frozen bool) dgo.Array { if frozen && v.frozen { return v } cp := util.SliceCopy(v.slice) if frozen { for i := range cp { if f, ok := cp[i].(dgo.Freezable); ok { cp[i] = f.FrozenCopy() } } } else { for i := range cp { if f, ok := cp[i].(dgo.Freezable); ok { cp[i] = f.ThawedCopy() } } } return &array{slice: cp, frozen: frozen} } func (v *array) ContainsAll(other dgo.Iterable) bool { return v.deepContainsAll(nil, other) } func (v *array) deepContainsAll(seen []dgo.Value, other dgo.Iterable) bool { a := v.slice l := len(a) if l < other.Len() { return false } if l == 0 { return true } var vs []dgo.Value if oa, ok := other.(*array); ok { vs = util.SliceCopy(oa.slice) } else { vs = sliceFromIterable(other) } // Keep track of elements that have been found equal using a copy // where such elements are set to nil. This avoids excessive calls // to Equals for i := range vs { ea := a[i] f := false for j := range vs { if be := vs[j]; be != nil { if equals(seen, be, ea) { vs[j] = nil f = true break } } } if !f { return false } } return true } func (v *array) Each(actor dgo.Consumer) { a := v.slice for i := range a { actor(a[i]) } } func (v *array) EachWithIndex(actor dgo.DoWithIndex) { a := v.slice for i := range a { actor(a[i], i) } } func (v *array) Equals(other interface{}) bool { return equals(nil, v, other) } func (v *array) deepEqual(seen []dgo.Value, other deepEqual) bool { if ov, ok := other.(*array); ok { return sliceEquals(seen, v.slice, ov.slice) } return false } func (v *array) Find(finder dgo.Mapper) interface{} { a := v.slice for i := range a { if fv := finder(a[i]); fv != nil { return fv } } return nil } func (v *array) Flatten() dgo.Array { a := v.slice for i := range a { if _, ok := a[i].(*array); ok { fs := make([]dgo.Value, i, len(a)*2) copy(fs, a) return &array{slice: flattenElements(a[i:], fs), frozen: v.frozen} } } return v } func flattenElements(elements, receiver []dgo.Value) []dgo.Value { for i := range elements { e := elements[i] if a, ok := e.(*array); ok { receiver = flattenElements(a.slice, receiver) } else { receiver = append(receiver, e) } } return receiver } func (v *array) Freeze() { if v.frozen { return } v.frozen = true a := v.slice for i := range a { if f, ok := a[i].(dgo.Freezable); ok { f.Freeze() } } } func (v *array) Frozen() bool { return v.frozen } func (v *array) FrozenCopy() dgo.Value { return v.Copy(true) } func (v *array) ThawedCopy() dgo.Value { return v.Copy(false) } func (v *array) GoSlice() []dgo.Value { if v.frozen { return util.SliceCopy(v.slice) } return v.slice } func (v *array) HashCode() int { return v.deepHashCode(nil) } func (v *array) deepHashCode(seen []dgo.Value) int { h := 1 s := v.slice for i := range s { h = h*31 + deepHashCode(seen, s[i]) } return h } func (v *array) Get(index int) dgo.Value { return v.slice[index] } func (v *array) IndexOf(vi interface{}) int { val := Value(vi) a := v.slice for i := range a { if val.Equals(a[i]) { return i } } return -1 } func (v *array) Insert(pos int, vi interface{}) { if v.frozen { panic(frozenArray(`Insert`)) } v.slice = append(v.slice[:pos], append([]dgo.Value{Value(vi)}, v.slice[pos:]...)...) } // InterfaceSlice returns the values held by the Array as a slice. The slice will // contain dgo.Value instances. func (v *array) InterfaceSlice() []interface{} { s := v.slice is := make([]interface{}, len(s)) for i := range s { is[i] = s[i] } return is } func (v *array) Len() int { return len(v.slice) } func (v *array) Map(mapper dgo.Mapper) dgo.Array { a := v.slice vs := make([]dgo.Value, len(a)) for i := range a { vs[i] = Value(mapper(a[i])) } return &array{slice: vs, frozen: v.frozen} } func (v *array) One(predicate dgo.Predicate) bool { a := v.slice f := false for i := range a { if predicate(a[i]) { if f { return false } f = true } } return f } func (v *array) Reduce(mi interface{}, reductor func(memo dgo.Value, elem dgo.Value) interface{}) dgo.Value { memo := Value(mi) a := v.slice for i := range a { memo = Value(reductor(memo, a[i])) } return memo } func (v *array) ReflectTo(value reflect.Value) { vt := value.Type() ptr := vt.Kind() == reflect.Ptr if ptr { vt = vt.Elem() } if vt.Kind() == reflect.Interface && vt.Name() == `` { vt = v.Type().ReflectType() } a := v.slice var s reflect.Value if !v.frozen && vt.Elem() == reflectValueType { s = reflect.ValueOf(a) } else { l := len(a) s = reflect.MakeSlice(vt, l, l) for i := range a { ReflectTo(a[i], s.Index(i)) } } if ptr { // The created slice cannot be addressed. A pointer to it is necessary x := reflect.New(s.Type()) x.Elem().Set(s) s = x } value.Set(s) } func (v *array) removePos(pos int) dgo.Value { a := v.slice if pos >= 0 && pos < len(a) { newLen := len(a) - 1 val := a[pos] copy(a[pos:], a[pos+1:]) a[newLen] = nil // release to GC v.slice = a[:newLen] return val } return nil } func (v *array) Remove(pos int) dgo.Value { if v.frozen { panic(frozenArray(`Remove`)) } return v.removePos(pos) } func (v *array) RemoveValue(value interface{}) bool { if v.frozen { panic(frozenArray(`RemoveValue`)) } return v.removePos(v.IndexOf(value)) != nil } func (v *array) Resolve(ap dgo.AliasAdder) { a := v.slice for i := range a { a[i] = ap.Replace(a[i]) } } func (v *array) Reject(predicate dgo.Predicate) dgo.Array { vs := make([]dgo.Value, 0) a := v.slice for i := range a { e := a[i] if !predicate(e) { vs = append(vs, e) } } return &array{slice: vs, frozen: v.frozen} } func (v *array) SameValues(other dgo.Iterable) bool { return len(v.slice) == other.Len() && v.ContainsAll(other) } func (v *array) Select(predicate dgo.Predicate) dgo.Array { vs := make([]dgo.Value, 0) a := v.slice for i := range a { e := a[i] if predicate(e) { vs = append(vs, e) } } return &array{slice: vs, frozen: v.frozen} } func (v *array) Set(pos int, vi interface{}) dgo.Value { if v.frozen { panic(frozenArray(`Set`)) } old := v.slice[pos] v.slice[pos] = Value(vi) return old } func (v *array) Slice(i, j int) dgo.Array { if v.frozen && i == 0 && j == len(v.slice) { return v } ss := v.slice[i:j] if !v.frozen { // a copy is needed. Two non frozen arrays cannot share the same slice storage ss = util.SliceCopy(ss) } return &array{slice: ss, frozen: v.frozen} } func (v *array) Sort() dgo.Array { sa := v.slice if len(sa) < 2 { return v } sorted := util.SliceCopy(sa) sort.SliceStable(sorted, func(i, j int) bool { a := sorted[i] b := sorted[j] if ac, ok := a.(dgo.Comparable); ok { var c int if c, ok = ac.CompareTo(b); ok { return c < 0 } } return a.Type().TypeIdentifier() < b.Type().TypeIdentifier() }) return &array{slice: sorted, frozen: v.frozen} } func (v *array) String() string { return util.ToStringERP(v) } func (v *array) ToMap() dgo.Map { ms := v.slice top := len(ms) ts := top / 2 if top%2 != 0 { ts++ } tbl := make([]*hashNode, tableSizeFor(ts)) hl := len(tbl) - 1 m := &hashMap{table: tbl, len: ts, frozen: v.frozen} for i := 0; i < top; { mk := ms[i] i++ var mv dgo.Value = Nil if i < top { mv = ms[i] i++ } hk := hl & hash(mk.HashCode()) nd := &hashNode{mapEntry: mapEntry{key: mk, value: mv}, hashNext: tbl[hk], prev: m.last} if m.first == nil { m.first = nd } else { m.last.next = nd } m.last = nd tbl[hk] = nd } return m } func (v *array) ToMapFromEntries() (dgo.Map, bool) { ms := v.slice top := len(ms) tbl := make([]*hashNode, tableSizeFor(top)) hl := len(tbl) - 1 m := &hashMap{table: tbl, len: top, frozen: v.frozen} for i := range ms { nd, ok := ms[i].(*hashNode) if !ok { var ea *array if ea, ok = ms[i].(*array); ok && len(ea.slice) == 2 { nd = &hashNode{mapEntry: mapEntry{key: ea.slice[0], value: ea.slice[1]}} } else { return nil, false } } else if nd.hashNext != nil { // Copy node, it belongs to another map c := *nd c.next = nil // this one might not get assigned below nd = &c } hk := hl & hash(nd.key.HashCode()) nd.hashNext = tbl[hk] nd.prev = m.last if m.first == nil { m.first = nd } else { m.last.next = nd } m.last = nd tbl[hk] = nd } return m, true } func (v *array) Type() dgo.Type { ea := &exactArrayType{value: v} ea.ExactType = ea return ea } func (v *array) Unique() dgo.Array { a := v.slice top := len(a) if top < 2 { return v } tbl := make([]*hashNode, tableSizeFor(int(float64(top)/loadFactor))) hl := len(tbl) - 1 u := make([]dgo.Value, top) ui := 0 nextVal: for i := range a { k := a[i] hk := hl & hash(k.HashCode()) for e := tbl[hk]; e != nil; e = e.hashNext { if k.Equals(e.key) { continue nextVal } } tbl[hk] = &hashNode{mapEntry: mapEntry{key: k}, hashNext: tbl[hk]} u[ui] = k ui++ } if ui == top { return v } return &array{slice: u[:ui], frozen: v.frozen} } func (v *array) Pop() (dgo.Value, bool) { if v.frozen { panic(frozenArray(`Pop`)) } p := len(v.slice) - 1 if p >= 0 { return v.removePos(p), true } return nil, false } func (v *array) With(vi interface{}) dgo.Array { return &array{slice: append(v.slice, Value(vi)), frozen: v.frozen} } func (v *array) WithAll(values dgo.Iterable) dgo.Array { if values.Len() == 0 { return v } c := v.Copy(false) if v.frozen { values = values.FrozenCopy().(dgo.Iterable) } c.AddAll(values) c.(*array).frozen = v.frozen return c } func (v *array) WithValues(values ...interface{}) dgo.Array { if len(values) == 0 { return v } return &array{slice: append(v.slice, valueSlice(values, v.frozen)...), frozen: v.frozen} } // ReplaceNil performs an in-place replacement of nil interfaces with the NilValue func ReplaceNil(vs []dgo.Value) { for i := range vs { if vs[i] == nil { vs[i] = Nil } } } // allInstance returns true when all elements of slice vs are assignable to the given type t func allInstance(guard dgo.RecursionGuard, t dgo.Type, vs []dgo.Value) bool { if t == DefaultAnyType { return true } for i := range vs { if !Instance(guard, t, vs[i]) { return false } } return true } func frozenArray(f string) error { return fmt.Errorf(`%s called on a frozen Array`, f) } func resolveSlice(ts []dgo.Value, ap dgo.AliasAdder) { for i := range ts { ts[i] = ap.Replace(ts[i]) } }
internal/array.go
0.696991
0.57069
array.go
starcoder
package vector import ( "math" ) // Finds angle relative to the y-axis. // Clockwise is positive and Counterclockwise is negative. func Angle(v Vector2D) float64 { x, y := Components(v) rotation := math.Pi / 2 theta := math.Atan2(-y, x) return -1*theta + rotation } // Finds standard angle func AngleReg(v Vector2D) (theta float64) { x, y := Components(v) theta = math.Atan2(y, x) return theta } type Vector2D struct { X float64 Y float64 } func (v *Vector2D) Add(v2 Vector2D) { v.X += v2.X v.Y += v2.Y } func (v *Vector2D) Subtract(v2 Vector2D) { v.X -= v2.X v.Y -= v2.Y } func (v *Vector2D) Normalize() { mag := math.Sqrt(v.MagnitudeSquared()) if mag == 0 { return } v.Divide(mag) } func (v *Vector2D) SetMagnitude(z float64) { v.Normalize() v.Multiply(z) } func (v *Vector2D) MagnitudeSquared() float64 { return v.X*v.X + v.Y*v.Y } func (v *Vector2D) Divide(z float64) { v.X /= z v.Y /= z } func (v *Vector2D) Multiply(z float64) { v.X *= z v.Y *= z } func (v *Vector2D) Limit(max float64) { magSq := v.MagnitudeSquared() if magSq > max*max { v.Divide(math.Sqrt(magSq)) v.Multiply(max) } } func (v *Vector2D) Mini(max float64) { magSq := v.MagnitudeSquared() barrier := .30 * max * max if magSq < barrier { v.Divide(math.Sqrt(magSq)) v.Multiply(max * 1.5) } } // RotatePoints Rotates points by an angle theta about an origin point // Rotates in-place func RotatePoints(theta float64, origin Vector2D, points ...*Vector2D) { sin, cos := math.Sincos(theta) originX, originY := Components(origin) for _, point := range points { tildeX, tildeY := point.X-originX, point.Y-originY point.X = cos*tildeX + -1*sin*tildeY + originX point.Y = sin*tildeX + cos*tildeY + originY } } // Ccomponents returns the components of the vector func Components(v Vector2D) (x, y float64) { x, y = v.X, v.Y return x, y } // IsIntersect detects if two linesegments intersect. // Intersect if 0 <= t <= 1 and 0 <= u <= 1 // https://en.wikipedia.org/wiki/Line%E2%80%93line_intersection // Points a and b is 1 segment // Points c and d is the 2 segment func IsIntersect(a, b, c, d Vector2D) (t, u float64, intersected bool) { // Grabbing notation to look like the formula var ( x1, y1 = a.X, a.Y x2, y2 = b.X, b.Y x3, y3 = c.X, c.Y x4, y4 = d.X, d.Y ) z := 1 / ((x1-x2)*(y3-y4) - (y1-y2)*(x3-x4)) t = ((x1-x3)*(y3-y4) - (y1-y3)*(x3-x4)) * z u = ((x2-x1)*(y1-y3) - (y2-y1)*(x1-x3)) * z if t >= 0. && t <= 1. && u >= 0. && u <= 1. { intersected = true } return t, u, intersected } // IntersectionPoints finds the actual intersection points. // Will return non-valid numbers if t vector was invalid from IsIntersect. // Uses the t arguement from IsIntersect. // a and b are position and newPos of boid respectively. func IntersectionPoint(a, b Vector2D, t float64) (x, y float64) { return a.X + t*(b.X-a.X), a.Y + t*(b.Y-a.Y) } func Distance(v, v2 Vector2D) float64 { return math.Sqrt(math.Pow(v2.X-v.X, 2) + math.Pow(v2.Y-v.Y, 2)) } // IsSeen calculates angle of boid relative to current boid func IsSeen(pos, newPos, otherPos *Vector2D) (cosAngle float64) { var ( dx1 = newPos.X - pos.X dy1 = newPos.Y - pos.Y dx2 = otherPos.X - pos.X dy2 = otherPos.Y - pos.Y ) den := math.Sqrt(dx1*dx1+dy1*dy1) * math.Sqrt(dx2*dx2+dy2*dy2) cosAngle = (dx1*dx2 + dy1*dy2) / den return cosAngle } func Sign(num float64) float64 { if num >= 0 { return 1 } return -1 }
vector/vector.go
0.801975
0.592637
vector.go
starcoder
package blockchain import ( "bytes" "crypto/sha256" "log" "math" "math/big" "github.com/neil-berg/blockchain/util" ) // Difficulty is a static number here, but would be dynamically changing in // reality to regulate block rates. const Difficulty = 12 // ProofOfWork is the shape of a block's proof of work type ProofOfWork struct { Block *Block Target *big.Int } // CreateData returns a single slice of bytes of the POW's data consisting of the // block's previous hash and data, along with the current nonce and the difficulty. func (pow *ProofOfWork) CreateData(nonce int) []byte { nonceBytes, err := util.NumToBytes(int64(nonce)) if err != nil { log.Panic(err) } difficultyBytes, err := util.NumToBytes(Difficulty) if err != nil { log.Panic(err) } timestampBytes, err := util.NumToBytes(pow.Block.Timestamp.Unix()) if err != nil { log.Panic(err) } data := bytes.Join( [][]byte{ pow.Block.PrevHash, pow.Block.Data, nonceBytes, difficultyBytes, timestampBytes, }, []byte{}, ) return data } // Run loops over a nearly inifnite range of nonces, computing hashes with each // until the hash's big int representation is less than the target hash's big // int representation. When that happens, we declare the block to be signed. func (pow *ProofOfWork) Run() (int, [32]byte) { var initHash big.Int var hash [32]byte nonce := 0 for nonce < math.MaxInt64 { data := pow.CreateData(nonce) hash = sha256.Sum256(data) // Convert the hash to a big int initHash.SetBytes(hash[:]) // Compare the big int hash to the target big int hash if initHash.Cmp(pow.Target) == -1 { // Computed hash is less than the target hash, so we signed the block break } else { // Increment nonce and try again nonce++ } } return nonce, hash } // Validate takes the block's nonce, recomputes the block's hash, and confirms // that the hash as a big int is less than the POW's target. func (pow *ProofOfWork) Validate() bool { var intHash big.Int data := pow.CreateData(pow.Block.Nonce) hash := sha256.Sum256(data) intHash.SetBytes(hash[:]) return intHash.Cmp(pow.Target) == -1 } // NewProof does a new proof func NewProof(block *Block) *ProofOfWork { // Initialize the target at 1, then left shift it by the difficulty target := big.NewInt(1) // 256 is the number of bits in the block's hash (sha256). As difficulty // increases, the target value decreases, making it harder to complete the proof. target.Lsh(target, uint(256-Difficulty)) pow := &ProofOfWork{block, target} return pow }
blockchain/proof.go
0.719876
0.47025
proof.go
starcoder
package remotewrite import ( "fmt" "github.com/prometheus/prometheus/pkg/timestamp" "github.com/prometheus/prometheus/prompb" "go.k6.io/k6/stats" ) // Note: k6 Registry is not used here since Output is getting // samples only from k6 engine, hence we assume they are already vetted. // metricsStorage is an in-memory gather point for metrics type metricsStorage struct { m map[string]stats.Sample } func newMetricsStorage() *metricsStorage { return &metricsStorage{ m: make(map[string]stats.Sample), } } // update modifies metricsStorage and returns updated sample // so that the stored metric and the returned metric hold the same value func (ms *metricsStorage) update(sample stats.Sample, add func(current, s stats.Sample) stats.Sample) stats.Sample { if current, ok := ms.m[sample.Metric.Name]; ok { if add == nil { current.Metric.Sink.Add(sample) } else { current = add(current, sample) } current.Time = sample.Time // to avoid duplicates in timestamps // Sometimes remote write endpoint throws an error about duplicates even if the values // sent were different. By current observations, this is a hard to repeat case and // potentially a bug. // Related: https://github.com/prometheus/prometheus/issues/9210 ms.m[current.Metric.Name] = current return current } else { sample.Metric.Sink.Add(sample) ms.m[sample.Metric.Name] = sample return sample } } // transform k6 sample into TimeSeries for remote-write func (ms *metricsStorage) transform(mapping Mapping, sample stats.Sample, labels []prompb.Label) ([]prompb.TimeSeries, error) { var newts []prompb.TimeSeries switch sample.Metric.Type { case stats.Counter: newts = mapping.MapCounter(ms, sample, labels) case stats.Gauge: newts = mapping.MapGauge(ms, sample, labels) case stats.Rate: newts = mapping.MapRate(ms, sample, labels) case stats.Trend: newts = mapping.MapTrend(ms, sample, labels) default: return nil, fmt.Errorf("Something is really off as I cannot recognize the type of metric %s: `%s`", sample.Metric.Name, sample.Metric.Type) } return newts, nil } // Mapping represents the specific way k6 metrics can be mapped to metrics of // remote agent. As each remote agent can use different ways to store metrics as well as // expect different values on remote write endpoint, they must have their own support. type Mapping interface { MapCounter(ms *metricsStorage, sample stats.Sample, labels []prompb.Label) []prompb.TimeSeries MapGauge(ms *metricsStorage, sample stats.Sample, labels []prompb.Label) []prompb.TimeSeries MapRate(ms *metricsStorage, sample stats.Sample, labels []prompb.Label) []prompb.TimeSeries MapTrend(ms *metricsStorage, sample stats.Sample, labels []prompb.Label) []prompb.TimeSeries // AdjustLabels(labels []prompb.Label) []prompb.Label } func NewMapping(mapping string) Mapping { switch mapping { case "prometheus": return &PrometheusMapping{} default: return &RawMapping{} } } type RawMapping struct{} func (rm *RawMapping) MapCounter(ms *metricsStorage, sample stats.Sample, labels []prompb.Label) []prompb.TimeSeries { return rm.processSample(sample, labels) } func (rm *RawMapping) MapGauge(ms *metricsStorage, sample stats.Sample, labels []prompb.Label) []prompb.TimeSeries { return rm.processSample(sample, labels) } func (rm *RawMapping) MapRate(ms *metricsStorage, sample stats.Sample, labels []prompb.Label) []prompb.TimeSeries { return rm.processSample(sample, labels) } func (rm *RawMapping) MapTrend(ms *metricsStorage, sample stats.Sample, labels []prompb.Label) []prompb.TimeSeries { return rm.processSample(sample, labels) } func (rm *RawMapping) processSample(sample stats.Sample, labels []prompb.Label) []prompb.TimeSeries { return []prompb.TimeSeries{ { Labels: append(labels, prompb.Label{ Name: "__name__", Value: fmt.Sprintf("%s%s", defaultMetricPrefix, sample.Metric.Name), }), Samples: []prompb.Sample{ { Value: sample.Value, Timestamp: timestamp.FromTime(sample.Time), }, }, }, } }
pkg/remotewrite/metrics.go
0.82379
0.404478
metrics.go
starcoder
package bits import ( "io" ) type BitWriter interface { // Write a single bit out to the byte stream. // Args: // is_one: 1 for writing a '1' else 0 for writing a '0' // Return: // error: Return an error if something failed to write Write(is_one int) error // BitWriter can only write at a byte level granularity. We flush the // last byte to the stream with the remaining valid 'bits'. The last // byte will be padded with 0's // Return: // error: Error if anything goes wrong. Flush() error // Returns the number of bits that have been written. // Return: // int: number of bits. NumBitsWritten() int } type BitsWriter struct { // A counter for the number of bits which have been written. NumBits int // The byte stream to write to. We can only write at a byte level // granularity so the last byte may potentially be zero padded. OutputStream io.ByteWriter // The index into the accumulationByte in which to write the next bit. // Must be a value between 7 -> 0 bitPos uint // A scratch byte where the bits are set. Once all 8 bits have been filled, // it is then wrote out into the byte stream accumulationByte byte } // Create a new BitsWriter. // Args: // stream: The byte stream in which to write the bits to. Note the bit writer // can only write at a byte level granularity. // For Example: // If we only have the bits '101' to write in the last byte. // The full byte '1010 0000' will still be output to the stream. // Return: // *BitsWriter: the bit writer object func NewBitsWriter(stream io.ByteWriter) *BitsWriter { return &BitsWriter{0, stream, uint(7), 0x00} } // Write a single bit to the stream. // Bits are written to the byte from left -> right. // Args: // is_one: 1 if we want to write a '1', else 0 if we want to write '0' // Return: // error: nil if succeeds, else an error func (self *BitsWriter) Write(is_one int) error { self.NumBits += 1 if is_one == 1 { self.accumulationByte |= (1 << self.bitPos) } self.bitPos -= 1 if int(self.bitPos) < 0 { // we have finsihed filling the scratch byte. Dump it out to the stream. err := self.OutputStream.WriteByte(self.accumulationByte) if err != nil { return err } // reset the bit position self.bitPos = uint(7) // Note that the byte is reset to all 0's self.accumulationByte = 0x00 } return nil } // Return the number of bits written. This count does not include any // padded 0's of the LAST byte which may have been outputted due to the // byte level granularity of the BitWriter // Return: // int: The number of bits written. func (self *BitsWriter) NumBitsWritten() int { return self.NumBits } // Flush the remaining byte to the output stream. // Return: // error: If anything goes wrong during the write. func (self *BitsWriter) Flush() error { if self.bitPos != uint(7) { err := self.OutputStream.WriteByte(self.accumulationByte) if err != nil { return err } } return nil }
bits/bit_writer.go
0.724188
0.431584
bit_writer.go
starcoder
package tests import ( "bytes" "reflect" "testing" ) func isZeroOfUnderlyingType(x interface{}) bool { if x == nil { return true } if _, ok := x.([]string); ok { return true } return x == reflect.Zero(reflect.TypeOf(x)).Interface() } func objEq(expected, actual interface{}) bool { if expected == nil || actual == nil { return expected == actual } if exp, ok := expected.([]byte); ok { act, ok := actual.([]byte) if !ok { return false } else if exp == nil || act == nil { return exp == nil && act == nil } return bytes.Equal(exp, act) } return reflect.DeepEqual(expected, actual) } func assertSlice(t *testing.T, expected, actual interface{}) { if objEq(expected, actual) { t.Logf("%s OK", reflect.TypeOf(actual)) return } actualType := reflect.TypeOf(actual) if actualType == nil { t.Fatal() } expectedValue := reflect.ValueOf(expected) if expectedValue.IsValid() && expectedValue.Type().ConvertibleTo(actualType) { // Attempt comparison after type conversion if reflect.DeepEqual(expectedValue.Convert(actualType).Interface(), actual) { t.Logf("%s OK", reflect.TypeOf(actual)) return } } t.Fatalf("FAIL %s: expected %#v, got %#v", reflect.TypeOf(expected), expected, actual) } func isPrimitive(exp interface{}) bool { t := reflect.TypeOf(exp) switch t.Kind() { case reflect.Interface: return false case reflect.Struct: return false case reflect.Array: return false case reflect.Func: return false case reflect.Map: return false case reflect.Ptr: return false case reflect.Slice: return false case reflect.UnsafePointer: return false default: return true } } // does not work on slices func assert(t *testing.T, expected interface{}, actual interface{}) { prexp := reflect.ValueOf(expected) pract := reflect.ValueOf(actual) if isPrimitive(actual) { if expected != actual { t.Fatalf("expected %#v, got %#v", expected, actual) } t.Logf("OK %s", reflect.TypeOf(expected).Name()) return } if pract.IsNil() { t.Errorf("nil actual value: %#v", actual) t.Fail() return } exp := prexp.Elem() act := pract.Elem() if !exp.IsValid() { t.Errorf("reflected expectation not valid (%#v)", expected) t.Fail() } if exp.Type() != act.Type() { t.Errorf("expected type %s, got %s", exp.Type(), act.Type()) t.Fail() } for i := 0; i < exp.NumField(); i++ { expValueField := exp.Field(i) expTypeField := exp.Type().Field(i) actValueField := act.Field(i) actTypeField := act.Type().Field(i) if expTypeField.Name != actTypeField.Name { t.Errorf("expected type %s, got %s", expTypeField.Name, actTypeField.Name) t.Errorf("%#v", actual) t.Fail() } if isZeroOfUnderlyingType(expValueField.Interface()) { continue } if !isZeroOfUnderlyingType(expValueField.Interface()) && isZeroOfUnderlyingType(actValueField.Interface()) { t.Errorf("expected %s, but was empty", expTypeField.Name) t.Errorf("%#v", actual) t.Fail() return } assert(t, expValueField.Interface(), actValueField.Interface()) /* if expValueField.Interface() != actValueField.Interface() { t.Errorf("expected %s %#v, got %#v", expTypeField.Name, expValueField.Interface(), actValueField.Interface()) t.Fail() } */ } if t.Failed() { t.Logf("FAIL %s", exp.Type().Name()) return } t.Logf("OK %s", exp.Type().Name()) }
tests/integration/v2/assert.go
0.51879
0.632673
assert.go
starcoder
package datastore import ( "bytes" "encoding/binary" "fmt" "math" "time" "code.google.com/p/log4go" "github.com/influxdb/influxdb/common" ) const maxSeqNumber = (1 << 64) - 1 // storageKey is the key that we use to store values in our key/value // store engine. The key contains the field id, timestamp and sequence // number of the value being stored. type storageKey struct { bytesBuf []byte id uint64 timestamp int64 seq uint64 } // Create a new storageKey. // timestamp: the timestamp in microseconds. timestamp can be negative. func newStorageKey(id uint64, timestamp int64, seq uint64) storageKey { return storageKey{ bytesBuf: nil, id: id, timestamp: timestamp, seq: seq, } } // Parse the given byte slice into a storageKey func parseKey(b []byte) (storageKey, error) { if len(b) != 8*3 { return storageKey{}, fmt.Errorf("Expected %d fields, found %d", 8*3, len(b)) } sk := storageKey{} buf := bytes.NewBuffer(b) binary.Read(buf, binary.BigEndian, &sk.id) var t uint64 binary.Read(buf, binary.BigEndian, &t) sk.timestamp = convertUintTimestampToInt64(t) binary.Read(buf, binary.BigEndian, &sk.seq) sk.bytesBuf = b log4go.Debug("Parsed %v to %v", b, sk) return sk, nil } // Return a byte representation of the storage key. If the given byte // representation was to be lexicographic sorted, then b1 < b2 iff // id1 < id2 (b1 is a byte representation of a storageKey with a smaller // id) or id1 == id2 and t1 < t2, or id1 == id2 and t1 == t2 and // seq1 < seq2. This means that the byte representation has the same // sort properties as the tuple (id, time, sequence) func (sk storageKey) bytes() []byte { if sk.bytesBuf != nil { return sk.bytesBuf } buf := bytes.NewBuffer(nil) binary.Write(buf, binary.BigEndian, sk.id) t := convertTimestampToUint(sk.timestamp) binary.Write(buf, binary.BigEndian, t) binary.Write(buf, binary.BigEndian, sk.seq) sk.bytesBuf = buf.Bytes() return sk.bytesBuf } func (sk storageKey) time() time.Time { return common.TimeFromMicroseconds(sk.timestamp) } // utility functions only used in this file func convertTimestampToUint(t int64) uint64 { if t < 0 { return uint64(math.MaxInt64 + t + 1) } return uint64(t) + uint64(math.MaxInt64) + uint64(1) } func convertUintTimestampToInt64(t uint64) int64 { if t > uint64(math.MaxInt64) { return int64(t-math.MaxInt64) - int64(1) } return int64(t) - math.MaxInt64 - int64(1) }
datastore/storage_key.go
0.757974
0.428771
storage_key.go
starcoder
package uid12 // Q: Why not use base32.StdEncoding? // A: The uid12 encoder is more terse, as it can make assumptions // about the size and shape of the value to encode. It also implements // aliases like I and 1, and O and 0, at no extra cost. // Benchmarks show ~2x improvement on encode and ~10x improvement // on decode (though this last one might be a benchmark error). const digitMask = 0x1f func rfc4648EncodeTo(value Value, encoded []byte) { if value < MinValue || value > MaxValue { return } if len(encoded) < 12 { // bounds hint return } v := int64(value) for i := 11; i >= 0; i-- { digit := v & digitMask encoded[i] = rfc4648EncodingAlphabet[digit] v >>= 5 } } func rfc4648Encode(value Value) string { if value < MinValue || value > MaxValue { return "" } var encoded [12]byte rfc4648EncodeTo(value, encoded[:]) return string(encoded[:]) } func rfc4648Decode(str string) Value { if len(str) != 12 { return Zero } v := int64(0) ch := str[0] digit := rfc4648DecodingAlphabet[ch] v |= digit if digit == 0 && ch != 'A' && ch != 'a' { return Zero } for i := 1; i < 12; i++ { ch = str[i] digit = rfc4648DecodingAlphabet[ch] v = (v << 5) | digit if digit == 0 && ch != 'A' && ch != 'a' { return Zero } } return Value(v) } const rfc4648EncodingAlphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567" var rfc4648DecodingAlphabet = [256]int64{ '0': 14, // alias of O '1': 8, // alias of I '2': 26, '3': 27, '4': 28, '5': 29, '6': 30, '7': 31, '8': 1, // alias of B '9': 15, // alias of P 'A': 0, 'B': 1, 'C': 2, 'D': 3, 'E': 4, 'F': 5, 'G': 6, 'H': 7, 'I': 8, 'J': 9, 'K': 10, 'L': 11, 'M': 12, 'N': 13, 'O': 14, 'P': 15, 'Q': 16, 'R': 17, 'S': 18, 'T': 19, 'U': 20, 'V': 21, 'W': 22, 'X': 23, 'Y': 24, 'Z': 25, 'a': 0, 'b': 1, 'c': 2, 'd': 3, 'e': 4, 'f': 5, 'g': 6, 'h': 7, 'i': 8, 'j': 9, 'k': 10, 'l': 11, 'm': 12, 'n': 13, 'o': 14, 'p': 15, 'q': 16, 'r': 17, 's': 18, 't': 19, 'u': 20, 'v': 21, 'w': 22, 'x': 23, 'y': 24, 'z': 25, }
base32.go
0.642208
0.442817
base32.go
starcoder
package linear import ( "bytes" "errors" "strconv" ) // Heap represents a min heap backed backed by a slice type Heap struct { array []int count int } // NewHeap returns a new empty heap func NewHeap() *Heap { heap := new(Heap) heap.array = make([]int, 20) heap.count = 0 return heap } func (H *Heap) String() string { var buffer bytes.Buffer buffer.WriteString("Count: ") buffer.WriteString(strconv.Itoa(H.count) + "\n[") for i := 1; i <= H.count; i++ { buffer.WriteString(strconv.Itoa(H.array[i]) + ", ") } buffer.WriteString("]") return buffer.String() } // Count returns the number of elements currently in the heap func (H *Heap) Count() int { return H.count } // Array return the heap func (H *Heap) Array() []int { return H.array[1 : H.count+1] } // Search determines if an element exists in the heap or not; index return(0-based) func (H *Heap) Search(value int) (int, bool) { //TODO improve; use tree pre-order and cut-off for i := 1; i <= H.count; i++ { if H.array[i] == value { return i - 1, true } } return 0, false } // ElementAt returns element at index(0-based). Sets bool if element at index exists; else false func (H *Heap) ElementAt(index int) (int, bool) { if index >= H.count { return 0, false } return H.array[index+1], true } // Insert adds the value to the heap; expanding the underlying structure if necessary func (H *Heap) Insert(value int) error { //first search for the element; if it exists then return the error _, present := H.Search(value) if present { return errors.New("Value already exists in the heap.") } //cause we don't use the first index if H.count+1 == len(H.array) { //expand array H.resize() } H.count++ H.array[H.count] = value H.swimUp(H.count) return nil } // RemoveAt removes the element at index specified. Index is 0-based func (H *Heap) RemoveAt(index int) error { index++ if index > H.count { return errors.New("Index out of boud") } H.swap(H.count, index) H.array[H.count] = 0 H.count-- if H.array[index] < H.array[H.parent(index)] { H.swimUp(index) } else { H.minHeapify(index) } if H.count < len(H.array)/4 { H.resize() } return nil } //EnsureCapacity ensures that the backing array has atleast length of capacity func (H *Heap) EnsureCapacity(capacity int) { if len(H.array) < capacity { var newArray []int newArray = make([]int, capacity) copy(newArray, H.array) H.array = newArray } } // DeleteRoot remove the root elements; throws error if heap is empty func (H *Heap) DeleteRoot() error { if H.count < 1 { return errors.New("No element in heap") } return H.RemoveAt(0) } // Remove removes the value from the heap func (H *Heap) Remove(value int) error { index, present := H.Search(value) if !present { return errors.New("Value not present in heap") } return H.RemoveAt(index) } // GetRoot returns min element func (H *Heap) GetRoot() (int, bool) { if H.count == 0 { return 0, false } //cause we don't use the first index return H.array[1], true } // DecreaseKey reduces the key to a new value func (H *Heap) DecreaseKey(oldValue, newValue int) error { if newValue >= oldValue { return errors.New("New value cannot be greater than or equal to old value") } index, present := H.Search(oldValue) if !present { return errors.New("Value not present in heap") } index++ H.array[index] = newValue H.swimUp(index) return nil } // IncreaseKey increases the value of a key func (H *Heap) IncreaseKey(oldValue, newValue int) error { if newValue <= oldValue { return errors.New("New value cannot be less than or equal to old value") } index, present := H.Search(oldValue) if !present { return errors.New("Value not present in heap") } H.array[index] = newValue H.sinkDown(index) return nil } func (H *Heap) resize() { var newArray []int if H.count+1 == len(H.array) { //double it newArray = make([]int, len(H.array)*2) } else if H.count < len(H.array)/4 { //reduce to half newArray = make([]int, len(H.array)/2) } //TODO do error checking copy(newArray, H.array) H.array = newArray } func (H *Heap) swap(a, b int) { H.array[a], H.array[b] = H.array[b], H.array[a] } func (H *Heap) parent(index int) int { return index / 2 } func (H *Heap) swimUp(index int) { for index != 1 && H.array[index]-H.array[H.parent(index)] < 0 { H.swap(index, H.parent(index)) index = H.parent(index) } } func (H *Heap) left(index int) int { return 2 * index } func (H *Heap) right(index int) int { return 2*index + 1 } func (H *Heap) minHeapify(index int) { l := H.left(index) r := H.right(index) min := index if l < H.count+1 && H.array[l] < H.array[min] { min = l } if r < H.count+1 && H.array[r] < H.array[min] { min = r } if index != min { H.swap(min, index) H.minHeapify(min) } } func (H *Heap) sinkDown(index int) { H.minHeapify(index) } // NaiveMergeHeap takes a heap and inserts all elements in the heap into the caller heap // Duplicates are rejected automatically func (H *Heap) NaiveMergeHeap(heap1 *Heap) { //some checking if heap1 == nil { return } array := heap1.Array() for value := range array { H.Insert(value) } } // MergeHeap merges two heaps in linear time func (H *Heap) MergeHeap(heap1 *Heap) { //some checking if heap1 == nil { return } array1 := heap1.Array() H.EnsureCapacity(H.Count() + heap1.Count()) index := H.Count() + 1 for i := 0; i < heap1.Count(); i++ { H.array[index] = array1[i] index++ } H.count = H.Count() + heap1.Count() for i := H.count / 2; i > 0; i-- { H.minHeapify(i) } }
data/linear/heap.go
0.668123
0.411111
heap.go
starcoder
package primitives import ( "bytes" "fmt" ) type Sha256 []byte func (x Sha256) String() string { return fmt.Sprintf("%x", []byte(x)) } func (x Sha256) Equal(y Sha256) bool { return bytes.Equal(x, y) } func (x Sha256) KeyForMap() string { return string(x) } type Keccak256 []byte func (x Keccak256) String() string { return fmt.Sprintf("%x", []byte(x)) } func (x Keccak256) Equal(y Keccak256) bool { return bytes.Equal(x, y) } func (x Keccak256) KeyForMap() string { return string(x) } type Ed25519Sig []byte func (x Ed25519Sig) String() string { return fmt.Sprintf("%x", []byte(x)) } func (x Ed25519Sig) Equal(y Ed25519Sig) bool { return bytes.Equal(x, y) } func (x Ed25519Sig) KeyForMap() string { return string(x) } type Ed25519PublicKey []byte func (x Ed25519PublicKey) String() string { return fmt.Sprintf("%x", []byte(x)) } func (x Ed25519PublicKey) Equal(y Ed25519PublicKey) bool { return bytes.Equal(x, y) } func (x Ed25519PublicKey) KeyForMap() string { return string(x) } type Ed25519PrivateKey []byte func (x Ed25519PrivateKey) String() string { return fmt.Sprintf("%x", []byte(x)) } func (x Ed25519PrivateKey) Equal(y Ed25519PrivateKey) bool { return bytes.Equal(x, y) } func (x Ed25519PrivateKey) KeyForMap() string { return string(x) } type Bls1Sig []byte func (x Bls1Sig) String() string { return fmt.Sprintf("%x", []byte(x)) } func (x Bls1Sig) Equal(y Bls1Sig) bool { return bytes.Equal(x, y) } func (x Bls1Sig) KeyForMap() string { return string(x) } type Bls1PublicKey []byte func (x Bls1PublicKey) String() string { return fmt.Sprintf("%x", []byte(x)) } func (x Bls1PublicKey) Equal(y Bls1PublicKey) bool { return bytes.Equal(x, y) } func (x Bls1PublicKey) KeyForMap() string { return string(x) } type Bls1PrivateKey []byte func (x Bls1PrivateKey) String() string { return fmt.Sprintf("%x", []byte(x)) } func (x Bls1PrivateKey) Equal(y Bls1PrivateKey) bool { return bytes.Equal(x, y) } func (x Bls1PrivateKey) KeyForMap() string { return string(x) } type EcdsaSecp256K1Sig []byte func (x EcdsaSecp256K1Sig) String() string { return fmt.Sprintf("%x", []byte(x)) } func (x EcdsaSecp256K1Sig) Equal(y EcdsaSecp256K1Sig) bool { return bytes.Equal(x, y) } func (x EcdsaSecp256K1Sig) KeyForMap() string { return string(x) } type EcdsaSecp256K1PublicKey []byte func (x EcdsaSecp256K1PublicKey) String() string { return fmt.Sprintf("%x", []byte(x)) } func (x EcdsaSecp256K1PublicKey) Equal(y EcdsaSecp256K1PublicKey) bool { return bytes.Equal(x, y) } func (x EcdsaSecp256K1PublicKey) KeyForMap() string { return string(x) } type EcdsaSecp256K1PrivateKey []byte func (x EcdsaSecp256K1PrivateKey) String() string { return fmt.Sprintf("%x", []byte(x)) } func (x EcdsaSecp256K1PrivateKey) Equal(y EcdsaSecp256K1PrivateKey) bool { return bytes.Equal(x, y) } func (x EcdsaSecp256K1PrivateKey) KeyForMap() string { return string(x) } type BloomFilter []byte func (x BloomFilter) String() string { return fmt.Sprintf("%x", []byte(x)) } func (x BloomFilter) Equal(y BloomFilter) bool { return bytes.Equal(x, y) } func (x BloomFilter) KeyForMap() string { return string(x) }
types/go/primitives/crypto.mb.go
0.658088
0.483526
crypto.mb.go
starcoder
package scene import ( "fmt" "github.com/andrepxx/sydney/color" "github.com/andrepxx/sydney/coordinates" "image" "math" ) /* * A scene is a plane onto which points are drawn. */ type Scene interface { Aggregate(data []coordinates.Cartesian) Clear() Render(mapping color.Mapping) (*image.NRGBA, error) Spread(amount uint8) } /* * Data structure representing a scene. */ type sceneStruct struct { bins []uint64 height uint32 maxX float64 maxY float64 minX float64 minY float64 width uint32 } /* * Calculate a bin index based on a pair of (integer) coordinates. */ func (this *sceneStruct) index(x uint32, y uint32) (uint64, bool) { width := this.width height := this.height /* * Check if coordinates are in valid range. */ if (x >= width) || (y >= height) { return 0, false } else { width64 := uint64(width) x64 := uint64(x) y64 := uint64(y) idx := (width64 * y64) + x64 return idx, true } } /* * Aggregate data into the scene. */ func (this *sceneStruct) Aggregate(data []coordinates.Cartesian) { minX := this.minX maxX := this.maxX width := this.width widthFloat := float64(width) scaleX := widthFloat / (maxX - minX) minY := this.minY maxY := this.maxY height := this.height heightFloat := float64(height) scaleY := heightFloat / (maxY - minY) /* * Iterate over all data points. */ for i := range data { point := &data[i] x := point.X() y := point.Y() /* * Check if point lies within plot bounds. */ if ((x >= minX) && (x < maxX)) && ((y > minY) && (y <= maxY)) { plotX := uint32((x - minX) * scaleX) plotY := uint32((maxY - y) * scaleY) idx, ok := this.index(plotX, plotY) /* * Check if point can be mapped to bin. */ if ok { val := this.bins[idx] /* * Make sure we are not exceeding datatype bounds. */ if val < math.MaxUint32 { this.bins[idx] = val + 1 } } } } } /* * Clear all data from the scene. */ func (this *sceneStruct) Clear() { bins := this.bins /* * Reset the count in each bin to zero. */ for i := range bins { bins[i] = 0 } } /* * Render a set of data points into an image using a color mapping. * * Generates an NRGBA-image of width times height pixels displaying * the data points with minX <= x < maxX and minY <= y < maxY. */ func (this *sceneStruct) Render(mapping color.Mapping) (*image.NRGBA, error) { /* * Verify that color mapping is non-nil. */ if mapping == nil { return nil, fmt.Errorf("%s", "Color mapping must not be nil when rendering an image!") } else { data := this.bins colors := mapping.Map(data) /* * Verify that color mapping returned non-nil slice. */ if colors == nil { return nil, fmt.Errorf("%s", "Color mapping must not map to nil slice when rendering an image!") } else { width := this.width widthInt := int(width) height := this.height heightInt := int(height) numColors := len(colors) expectedNumColors := widthInt * heightInt /* * Verify that the color mapping returned a result of the * expected length. */ if numColors != expectedNumColors { return nil, fmt.Errorf("%s", "Color mapping returned %d pixels, but expected %d for a (%d * %d) image.", numColors, expectedNumColors, width, height) } else { rect := image.Rect(0, 0, widthInt, heightInt) img := image.NewNRGBA(rect) /* * Iterate over the rows of the image. */ for y := uint32(0); y < height; y++ { yy := int(y) /* * Iterate over the columns of the image and set pixel data. */ for x := uint32(0); x < width; x++ { xx := int(x) idx, ok := this.index(x, y) /* * Check if index is valid. */ if ok { c := colors[idx] img.SetNRGBA(xx, yy, c) } } } return img, nil } } } } /* * Spreads data over multiple cells. */ func (this *sceneStruct) Spread(amount uint8) { /* * Only spread if needed. */ if amount > 0 { bins := this.bins numBins := len(bins) binsNew := make([]uint64, numBins) height := this.height width := this.width amount64 := int64(amount) /* * Iterate over the target rows. */ for y := uint32(0); y < height; y++ { y64 := int64(y) /* * Iterate over the target columns. */ for x := uint32(0); x < width; x++ { x64 := int64(x) sum := uint64(0) /* * Spread across rows. */ for j := -amount64; j <= amount64; j++ { /* * Spread across columns. */ for i := -amount64; i <= amount64; i++ { xx64 := x64 + i yy64 := y64 + j /* * Check if values are in range. */ if xx64 > 0 && xx64 <= math.MaxUint32 && yy64 > 0 && yy64 < math.MaxUint32 { xx := uint32(xx64) yy := uint32(yy64) idxSource, ok := this.index(xx, yy) sumOld := sum /* * Check if index is in range. */ if ok { sum += bins[idxSource] /* * Check for overflow. */ if sum < sumOld { sum = math.MaxUint64 } } } } } idxTarget, ok := this.index(x, y) /* * Check if index was calculated. */ if ok { binsNew[idxTarget] = sum } } } this.bins = binsNew } } /* * Create a new scene. */ func Create(width uint32, height uint32, minX float64, maxX float64, minY float64, maxY float64) Scene { width64 := uint64(width) height64 := uint64(height) numBins := width64 * height64 bins := make([]uint64, numBins) /* * Create scene data structure. */ scn := sceneStruct{ bins: bins, height: height, maxX: maxX, maxY: maxY, minX: minX, minY: minY, width: width, } return &scn }
scene/scene.go
0.690872
0.466056
scene.go
starcoder
package bitarray import ( "math" "math/bits" ) type Bit = uint64 const ( Zero = Bit(iota) One ) // BitArray is an array data structure that compactly stores bits. // Bits externally represented as `bool` are stored internally as `uint64`s. // The total number of bits stored is set at creation and is immutable. type BitArray struct { // buf is a backing array that bits writes into by default when the no. of bits requested to allocate is // < 512. Only if more is asked, we'll skip buf and allocate directly into bits buf [8]Bit bits []Bit n int // no. of bits } // New creates a new BitArray of `n` bits. If n <= 512, no allocation is done. func New(n int) (ba BitArray) { nblk := nbitsToNblks(n) ba.n = n ba.bits = ba.buf[:] if nblk <= 8 { ba.bits = ba.buf[:nblk] return } ba.bits = append(ba.bits, make([]Bit, nblk-len(ba.buf))...) return } // Copy copies src into dst. func Copy(dst, src *BitArray) { if dst != src && src != nil { if src.n != dst.n { panic("size of bit arrays must be the same for copy") } if dst.n == 0 { // nothing to do here, since the source `oa` has nothing to copy from return } copy(dst.bits, src.bits) } } // FromStr creates a BitArray from a bit string func FromStr(bs string) BitArray { ba := New(len(bs)) for i, b := range bs { if b == '1' { ba.Set(i) } } return ba } // FromUint64 creates a BitArray from the bit representation of u. func FromUint64(u uint64) BitArray { ba := New(64) ba.bits[0] = u return ba } // Size returns the no. of bits stored. func (ba *BitArray) Size() int { return ba.n } // Set sets the bit at position k. func (ba *BitArray) Set(k int) { bi, si := biandsi(k); set(&ba.bits[bi], si) } // SetAll sets all the bits. func (ba *BitArray) SetAll() { for i := range ba.bits { ba.bits[i] = math.MaxUint64 } } // Clr clears the bit at position k. func (ba *BitArray) Clr(k int) { bi, si := biandsi(k); clr(&ba.bits[bi], si) } // ClrAll clears all the bits. func (ba *BitArray) ClrAll() { for i := range ba.bits { ba.bits[i] = 0 } } // ChkSet returns the value of the bit at position k before setting it. func (ba *BitArray) ChkSet(k int) (b bool) { bi, si := biandsi(k) u := &ba.bits[bi] b = chk(*u, si) != 0 if !b { set(u, si) } return } // ChkClr returns the value of the bit at position k before clearing it. func (ba *BitArray) ChkClr(k int) (b bool) { bi, si := biandsi(k) u := &ba.bits[bi] b = chk(*u, si) != 0 if b { clr(u, si) } return } // Tgl toggles the bit at position k. func (ba *BitArray) Tgl(k int) { bi, si := biandsi(k) ba.bits[bi] ^= 1 << si } // Cnt returns the number of set bits. func (ba *BitArray) Cnt() (n int) { for _, b := range ba.bits { n += bits.OnesCount64(b) } return } // Chk returns the value of the bit at position k. func (ba *BitArray) Chk(k int) bool { bi, si := biandsi(k) return chk(ba.bits[bi], si) != 0 } // Put sets the value of the bit at position k to v. func (ba *BitArray) Put(k int, v Bit) { bi, si := biandsi(k) put(&ba.bits[bi], si, v) } // Swap swaps the value of bit at position k with v. On return, v contains the old value. func (ba *BitArray) Swap(k int, b *Bit) { bi, si := biandsi(k) t := &ba.bits[bi] ob := chk(*t, si) if ob == *b { return } put(t, si, ob) *b = ob } func (ba *BitArray) String() string { sb := make([]byte, ba.n) for i := range sb { sb[i] = '0' if ba.Chk(i) { sb[i] = '1' } } return string(sb) } func nbitsToNblks(n int) int { return int(math.Ceil(float64(n) / 64)) } func set(u *uint64, si uint64) { *u |= 1 << si } func clr(u *uint64, si uint64) { *u &= ^(1 << si) } func chk(u uint64, si uint64) Bit { return (u >> si) & 1 } func put(u *uint64, si uint64, b Bit) { *u = (*u & ^(1 << si)) | (b << si) } func biandsi(k int) (uint64, uint64) { i := uint64(k) return i / 64, i % 64 }
ba.go
0.757525
0.518241
ba.go
starcoder
package misc import ( "github.com/dreading/gospecfunc/integrals/internal/toms" ) // Abramowitz computes the Abramowitz function // ∫ 0 to ∞ of t^order exp( -t*t - x/t ) dt func Abramowitz(order int, x float64) float64 { switch order { case 0: return toms.ABRAM0(x) case 1: return toms.ABRAM1(x) case 2: return toms.ABRAM2(x) default: panic("Order must be 0, 1 or 2") } } // Clausen calculates the Clausen's integral, // ∫ 0 to x of (-ln(2*sin(t/2))) dt func Clausen(x float64) float64 { return toms.CLAUSN(x) } // Debye calculates the Debye function, defined as // order*∫ 0 to x of t^order/(exp(t)-1) dt] / x^order func Debye(order int, x float64) float64 { switch order { case 1: return toms.DEBYE1(x) case 2: return toms.DEBYE2(x) case 3: return toms.DEBYE3(x) case 4: return toms.DEBYE4(x) default: panic("Order must be between 1 and 4") } } // Goodst calculates the function defined as // ∫ 0 to ∞ of ( exp(-u*u)/(u+x) ) du func Goodst(x float64) float64 { return toms.GOODST(x) } // Lobach calculates the the Lobachewsky function L(x) // ∫ 0 to x of -ln | cos t | dt func Lobach(x float64) float64 { return toms.LOBACH(x) } // Strom calculates Stromgren's integral // ∫ 0 to x { t^7 exp(2t)/[exp(t)-1]^3 } dt func Strom(x float64) float64 { return toms.STROM(x) } // Synch calculates the synchrotron radiation function // For order 1: // x ∫ 0 to infinity { K(5/3)(t) } dt // where K(5/3) is a modified Bessel function of order 5/3. // For order 2: // x * K(2/3)(x) // where K(2/3) is a modified Bessel function of order 2/3. func Synch(order int, x float64) float64 { switch order { case 1: return toms.SYNCH1(x) case 2: return toms.SYNCH2(x) default: panic("Order must be 1 or 2") } } // Transport calculates the transport integral of order n // ∫ 0 to x {t^n exp(t)/[exp(t)-1]^2 } dt func Transport(order int, x float64) float64 { switch order { case 2: return toms.TRAN02(x) case 3: return toms.TRAN03(x) case 4: return toms.TRAN04(x) case 5: return toms.TRAN05(x) case 6: return toms.TRAN06(x) case 7: return toms.TRAN07(x) case 8: return toms.TRAN08(x) case 9: return toms.TRAN09(x) default: panic("order must be between 2 and 9") } } // Struve calculates the Struve function of order 0 and 1, H0(x) and H1(x) respectively. // H0(x) = (2/pi) ∫ {0 to pi/2} sin(x cos(t)) dt // and // H1(x) = (2/pi) ∫ {0 to pi/2} sin( x cos(t))*sin^2 t dt func Struve(order int, x float64) float64 { switch order { case 0: return toms.STRVH0(x) case 1: return toms.STRVH1(x) default: panic("order must be 1 or 2") } } // StruveModified calculates the modified Struve function of order 0 and 1, L0(x) and L1(x) respectively. // defined as the solution of the second-order equation // x*D(Df) + Df - x*f = 2x/pi // and // x^2*D(Df) + x*Df - (x^2+1)f = 2*x^2/pi func StruveModified(order int, x float64) float64 { switch order { case 0: return toms.STRVL0(x) case 1: return toms.STRVL1(x) default: panic("order must be 1 or 2") } } // BesselMinusStruveModified calculates // Ii(x) - Li(x) for i =1,2 // where Ii(x) is the modified Bessel function of the first kind of // order i, and Li(x) is the modified Struve function of order func BesselMinusStruveModified(order int, x float64) float64 { switch order { case 0: return toms.I0ML0(x) case 1: return toms.I1ML1(x) default: panic("order must be 1 or 2") } } // AtnInt calculates the value of the inverse-tangent integral defined by // ∫ 0 to x ( (arctan t)/t ) dt func AtnInt(x float64) float64 { return toms.ATNINT(x) } // Exp3 calculates the value of ∫ 0 to x (exp(-t*t*t)) dt func Exp3(x float64) float64 { return toms.EXP3(x) } // I0Int calculates the value of ∫ 0 to x I0(t) dt func I0Int(x float64) float64 { return toms.I0INT(x) } // J0Int calculates the value of ∫ 0 to x J0(t) dt func J0Int(x float64) float64 { return toms.J0INT(x) } // Y0Int calculates the value of ∫ 0 to x Y0(t) dt func Y0Int(x float64) float64 { return toms.Y0INT(x) } // K0Int calculates the value of ∫ 0 to x K0(t) dt func K0Int(x float64) float64 { return toms.K0INT(x) } // AiInt calculates the integral of the Airy function Ai, // ∫ 0 to x Ai(t) dt func AiInt(x float64) float64 { return toms.AIRINT(x) } // BiInt calculates the integral of the Airy function Bi, // ∫ 0 to x Bi(t) dt func BiInt(x float64) float64 { return toms.BIRINT(x) }
integrals/integrals.go
0.718496
0.700267
integrals.go
starcoder
package main import ( "fmt" "math" "math/bits" "sort" "github.com/pointlander/datum/iris" ) // RealLayer is a neural network layer type RealLayer struct { Columns int Weights []float32 Biases []float32 Rand Rand } // RealNetwork is a neural network type RealNetwork []RealLayer // Inference performs inference on a neural network func (n RealNetwork) Inference(inputs, outputs []float32) { last := len(n) - 1 for i, layer := range n { rnd := layer.Rand columns := len(outputs) if i < len(n)-1 { columns = n[i+1].Columns } mask, values, factor := uint32((1<<bits.TrailingZeros(uint(layer.Columns)))-1), make([]float32, columns), float32(math.Sqrt(2/float64(columns))) for j, weight := range layer.Weights { sum, index := layer.Biases[j], rnd.Uint32()&mask for k, input := range inputs { if k == int(index) { sum += input * weight } else { sum += input * (2*rnd.Float32() - 1) * factor } } e := float32(math.Exp(float64(sum))) values[j] = e / (e + 1) } if i == last { copy(outputs, values) } else { inputs = values } } } // Copy copies a network func (n RealNetwork) Copy() RealNetwork { var network RealNetwork for _, layer := range n { l := RealLayer{ Columns: layer.Columns, Weights: make([]float32, len(layer.Weights)), Biases: make([]float32, len(layer.Biases)), Rand: layer.Rand, } copy(l.Weights, layer.Weights) copy(l.Biases, layer.Biases) network = append(network, l) } return network } // RealNetworkModel is the real network model func RealNetworkModel(seed int) float64 { rnd := Rand(LFSRInit + seed) type Genome struct { Network RealNetwork Fitness float32 } var genomes []Genome addNetwork := func(i int) { var network RealNetwork layer := RealLayer{ Columns: 4, Weights: make([]float32, 4), Biases: make([]float32, 4), Rand: Rand(LFSRInit + i + seed + NumGenomes), } factor := float32(math.Sqrt(2 / float64(4))) for i := range layer.Weights { layer.Weights[i] = (2*rnd.Float32() - 1) * factor } network = append(network, layer) layer = RealLayer{ Columns: 4, Weights: make([]float32, 3), Biases: make([]float32, 3), Rand: Rand(LFSRInit + i + seed + 2*NumGenomes), } factor = float32(math.Sqrt(2 / float64(3))) for i := range layer.Weights { layer.Weights[i] = (2*rnd.Float32() - 1) * factor } network = append(network, layer) genomes = append(genomes, Genome{ Network: network, }) } for i := 0; i < NumGenomes; i++ { addNetwork(i) } datum, err := iris.Load() if err != nil { panic(err) } inputs, outputs := make([]float32, 4), make([]float32, 3) get := func() int { for { for i, genome := range genomes { if rnd.Float32() > genome.Fitness { return i } } } } i := 0 for { for j, genome := range genomes { sum := float32(0) for _, flower := range datum.Fisher { for k, value := range flower.Measures { inputs[k] = float32(value) } genome.Network.Inference(inputs, outputs) expected := make([]float32, 3) expected[iris.Labels[flower.Label]] = 1 loss := float32(0) for l, output := range outputs { diff := expected[l] - output loss += diff * diff } loss = float32(math.Sqrt(float64(loss))) sum += loss } sum /= float32(len(datum.Fisher)) * float32(math.Sqrt(3)) genomes[j].Fitness = sum } sort.Slice(genomes, func(i, j int) bool { return genomes[i].Fitness < genomes[j].Fitness }) genomes = genomes[:NumGenomes] i++ if i > 127 { break } for i := 0; i < 256; i++ { a, b := get(), get() layer, vector, valueA, valueB := rnd.Uint32()&1, rnd.Uint32()&1, rnd.Uint32()&3, rnd.Uint32()&3 networkA, networkB := genomes[a].Network.Copy(), genomes[b].Network.Copy() layerA, layerB := networkA[layer], networkB[layer] if layer == 1 { for valueA > 2 { valueA = rnd.Uint32() & 3 } for valueB > 2 { valueB = rnd.Uint32() & 3 } } if vector == 0 { layerA.Weights[valueA], layerB.Weights[valueB] = layerB.Weights[valueB], layerA.Weights[valueA] } else { layerA.Biases[valueA], layerB.Biases[valueB] = layerB.Biases[valueB], layerA.Biases[valueA] } genomes = append(genomes, Genome{ Network: networkA, }) genomes = append(genomes, Genome{ Network: networkB, }) } for i := 0; i < NumGenomes; i++ { layer, vector, value := rnd.Uint32()&1, rnd.Uint32()&1, rnd.Uint32()&3 network := genomes[i].Network.Copy() l := network[layer] if layer == 1 { for value > 2 { value = rnd.Uint32() & 3 } } if vector == 0 { l.Weights[value] += ((2 * rnd.Float32()) - 1) } else { l.Biases[value] += ((2 * rnd.Float32()) - 1) } genomes = append(genomes, Genome{ Network: network, }) } } network := genomes[0].Network misses, total := 0, 0 for _, flower := range datum.Fisher { for k, value := range flower.Measures { inputs[k] = float32(value) } network.Inference(inputs, outputs) max, index := float32(0), 0 for j, output := range outputs { if output > max { max, index = output, j } } if index != iris.Labels[flower.Label] { misses++ } total++ } quality := float64(misses) / float64(total) fmt.Println(genomes[0].Fitness, quality) return quality }
real.go
0.545044
0.551876
real.go
starcoder
package hashsets // New factory that creates a hash set func New[T comparable](values ...T) *HashSet[T] { set := HashSet[T]{data: make(map[T]struct{}, len(values))} set.Add(values...) return &set } // HashSet datastructure type HashSet[T comparable] struct { data map[T]struct{} } // Add adds values to the set func (s *HashSet[T]) Add(values ...T) { for _, value := range values { s.data[value] = struct{}{} } } // Remove removes values from the set func (s *HashSet[T]) Remove(values ...T) { for _, value := range values { delete(s.data, value) } } // Contains checks if the value is in the set func (s *HashSet[T]) Contains(value T) bool { _, exists := s.data[value] return exists } // ContainsAll checks if all values are in the set func (s *HashSet[T]) ContainsAll(values ...T) bool { for _, value := range values { if !s.Contains(value) { return false } } return true } // ContainsAny checks if any of the values are in the set func (s *HashSet[T]) ContainsAny(values ...T) bool { for _, value := range values { if s.Contains(value) { return true } } return false } // Merge the two sets func (s *HashSet[T]) Merge(sets ...*HashSet[T]) { for _, set := range sets { for _, value := range set.GetValues() { s.Add(value) } } } // Clear clears set func (s *HashSet[T]) Clear() { s.data = make(map[T]struct{}) } // GetValues returns values func (s *HashSet[T]) GetValues() []T { values := make([]T, 0, s.Size()) for key := range s.data { values = append(values, key) } return values } // IsEmpty checks if the set is empty func (s *HashSet[T]) IsEmpty() bool { return s.Size() == 0 } // Size returns size of the set func (s *HashSet[T]) Size() int { return len(s.data) } // Common set functions // Copy makes an identical copy of the set func (s *HashSet[T]) Copy() *HashSet[T] { return New[T](s.GetValues()...) } // Union makes a set that has all of the elements in either of two sets func (s *HashSet[T]) Union(ss *HashSet[T]) *HashSet[T] { new := s.Copy() new.Merge(ss) return new } // Intersection makes a set that has only the elements common to both of two sets func (s *HashSet[T]) Intersection(ss *HashSet[T]) *HashSet[T] { new := s.Copy() for _, v := range new.GetValues() { if !ss.Contains(v) { new.Remove(v) } } return new } // SymmetricDifference makes a set that has elements that are in one of two sets, but not both func (s *HashSet[T]) SymmetricDifference(ss *HashSet[T]) *HashSet[T] { new := &HashSet[T]{make(map[T]struct{}, s.Size())} for _, v := range s.GetValues() { if !ss.Contains(v) { new.Add(v) } } for _, v := range ss.GetValues() { if !s.Contains(v) { new.Add(v) } } return new } // Subtraction makes a set with the elements that are in the first set, but not the second func (s *HashSet[T]) Subtraction(ss *HashSet[T]) *HashSet[T] { new := s.Copy() for _, v := range ss.GetValues() { new.Remove(v) } return new }
datastructures/sets/hashsets/hash_set.go
0.804252
0.615059
hash_set.go
starcoder
package generator import ( "math" "math/rand" "time" "github.com/kwoodhouse93/audio-playground/source" "github.com/kwoodhouse93/audio-playground/types" "github.com/kwoodhouse93/audio-playground/utils" ) // UniformNoiseM returns a mono uniform noise generator func UniformNoiseM() source.Source { r := rand.New(rand.NewSource(time.Now().UnixNano())) return source.Cached(func(step int) types.Sample { out := types.NewSample(2) out[0] = (r.Float32() * 2) - 1 out[1] = out[0] return out }) } // UniformNoiseS returns a stereo uniform noise generator func UniformNoiseS() source.Source { r := rand.New(rand.NewSource(time.Now().UnixNano())) return source.Cached(func(step int) types.Sample { out := types.NewSample(2) out[0] = (r.Float32() * 2) - 1 out[1] = (r.Float32() * 2) - 1 return out }) } // SineM returns a mono sine wave generator func SineM(frequency, phase, sampleRate float64) source.Source { return applyWaveM(math.Sin, frequency, phase, sampleRate) } // SineS returns a stereo sine wave generator func SineS(frequencyL, frequencyR, phaseL, phaseR, sampleRate float64) source.Source { return applyWaveS(math.Sin, frequencyL, frequencyR, phaseL, phaseR, sampleRate) } // TriangleM returns a mono triangle wave generator func TriangleM(frequency, phase, sampleRate float64) source.Source { return applyWaveM(utils.Triangle, frequency, phase, sampleRate) } // TriangleS returns a stereo triangle wave generator func TriangleS(frequencyL, frequencyR, phaseL, phaseR, sampleRate float64) source.Source { return applyWaveS(utils.Triangle, frequencyL, frequencyR, phaseL, phaseR, sampleRate) } // SawtoothM returns a mono sawtooth wave generator func SawtoothM(frequency, phase, sampleRate float64) source.Source { return applyWaveM(utils.Sawtooth, frequency, phase, sampleRate) } // SawtoothS returns a stereo sawtooth wave generator func SawtoothS(frequencyL, frequencyR, phaseL, phaseR, sampleRate float64) source.Source { return applyWaveS(utils.Sawtooth, frequencyL, frequencyR, phaseL, phaseR, sampleRate) } // SquareM returns a mono square wave generator func SquareM(frequency, phase, dutyCycle, sampleRate float64) source.Source { return applyWaveM(utils.SquareFunc(dutyCycle), frequency, phase, sampleRate) } // SquareS returns a stereo square wave generator func SquareS(frequencyL, frequencyR, phaseL, phaseR, dutyCycle, sampleRate float64) source.Source { return applyWaveS(utils.SquareFunc(dutyCycle), frequencyL, frequencyR, phaseL, phaseR, sampleRate) } func applyWaveM(waveFunc func(float64) float64, frequency, phase, sampleRate float64) source.Source { stepChange := frequency / sampleRate return source.Cached(func(step int) types.Sample { out := types.NewSample(2) out[0] = float32(waveFunc(2 * math.Pi * phase)) _, phase = math.Modf(phase + stepChange) out[1] = out[0] return out }) } func applyWaveS(waveFunc func(float64) float64, frequencyL, frequencyR, phaseL, phaseR, sampleRate float64) source.Source { stepChangeL := frequencyL / sampleRate stepChangeR := frequencyR / sampleRate return source.Cached(func(step int) types.Sample { out := types.NewSample(2) out[0] = float32(waveFunc(2 * math.Pi * phaseL)) _, phaseL = math.Modf(phaseL + stepChangeL) out[1] = float32(waveFunc(2 * math.Pi * phaseR)) _, phaseR = math.Modf(phaseR + stepChangeR) return out }) }
generator/generator.go
0.898805
0.428413
generator.go
starcoder
package graph import ( "sort" "github.com/Tom-Johnston/mamba/ints" "github.com/Tom-Johnston/mamba/sortints" ) //NumberOfCycles returns a slice where the ith element contains the number of cycles of length i. //Any cycle is contained in a biconnected component so the algorithm first splits the graph into biconnected components. The algorithm involves finding a spanning tree and this implementation doesn't check it finds every vertex so splitting into at least components is necessary. Spliting into biconnected components should help prevent unnecessary XORing when checking all the cycles. //We find a set of fundamental cycles according to the paper <NAME>, An algorithm for finding a fundamental set of cycles for an undirected linear graph, Comm. ACM 12 (1969), pp. 514-518. We can then find all cycles XORing together every combination of fundamental cycles and ignoring ones which are made of copies of 2 or more disjoing cycles. This is done according to Gibb's Algorithm from ALGORITHMIC APPROACHES TO CIRCUIT ENUMERATION PROBLEMS AND APPLICATIONS by <NAME> avaible here: http://dspace.mit.edu/bitstream/handle/1721.1/68106/FTL_R_1982_07.pdf. //This effectively finds every cycle in the graph and could be adapted to output every cycle if required. Remember to switch from the labels in the biconnected component to the labels of the graph and that the edges are not stored in the order in the cycle. func NumberOfCycles(g EditableGraph) []int { n := g.N() numberFound := make([]int, n+1) if n == 0 { return numberFound } //Find the biconnected components and then perform the count on each one. bicoms, _ := BiconnectedComponents(g) for _, bicom := range bicoms { a := g.InducedSubgraph(bicom) n = a.N() if n < 3 { //Can't have any cycles with fewer than 3 vertices. continue } h := a.Copy() //This is the working copy. //The fundamental cycles. They will be stored as a list of edges where the edge (i,j) with i < j is encoded as (j*(j-1))/2 + i. fundCycles := make([][]int, 0, 1) T := make([]int, n) //T[u] will store the parent vertex of each vertex in the tree (except the root 0). If the vertex v is not in the tree T[v] = -1. for i := 1; i < n; i++ { T[i] = -1 } depth := make([]int, n) //Depth of the vertices in the tree. No need to set the value for vertices not in the tree as the check is always done on T. X := make([]int, 1, n) //This holds what the paper calls T intersection X, the vertices not yet examined and in the tree. var v int for len(X) > 0 { X, v = X[:len(X)-1], X[len(X)-1] for _, u := range h.Neighbours(v) { if T[u] != -1 { length := depth[v] - depth[T[u]] + 2 //As noted in the paper the back edge leads to something distance exactly one from the path to v. cycle := make([]int, length) //To make it easy to backtrack given the array of parents the cycle is given as the parent of u, u, v,... parent of u. if T[u] < u { cycle[0] = (u*(u-1))/2 + T[u] } else { cycle[0] = (T[u]*(T[u]-1))/2 + u } if u < v { cycle[1] = (v*(v-1))/2 + u } else { cycle[1] = (u*(u-1))/2 + v } previous := v for i := 2; i < length; i++ { if previous < T[previous] { cycle[i] = (T[previous]*(T[previous]-1))/2 + previous } else { cycle[i] = (previous*(previous-1))/2 + T[previous] } previous = T[previous] } sort.Ints(cycle) //We sort the edges of the cycle according to their encoding to make it easy to XOR. fundCycles = append(fundCycles, cycle) } else { T[u] = v X = append(X, u) depth[u] = depth[v] + 1 } h.RemoveEdge(u, v) } } if len(fundCycles) == 0 { //No need to do anything. continue } //Now find all cycles from the fundamental cycles using Gibb's algorithm. S := [][]int{fundCycles[0]} Q := [][]int{fundCycles[0]} R := [][]int{} P := [][]int{} //This is what the reference calls R* but that isn't convenient for programming. var V []int for i := 1; i < len(fundCycles); i++ { //Step 2 for _, t := range Q { tmp := sortints.XOR(t, fundCycles[i]) if len(tmp) != len(t)+len(fundCycles[i]) { //They have some intersection R = append(R, tmp) } else { P = append(P, tmp) } Q = append(Q, tmp) } //Step 3 for j := len(R) - 1; j >= 0; j-- { V = R[j] for k := 0; k < len(R); k++ { if k == j { continue } if sortints.ContainsSorted(V, R[k]) { R[j] = R[len(R)-1] R = R[:len(R)-1] P = append(P, V) break } } } //Step 4 S = append(S, R...) //TODO: Remove this and replace it with just counting the lengths? S = append(S, fundCycles[i]) Q = append(Q, fundCycles[i]) R = R[:0] P = P[:0] } //We have now found every cycle and we check the length of each one. for _, V = range S { numberFound[len(V)]++ } } return numberFound } //NumberOfInducedPaths returns a slice of length n containing the number of induced paths in g which are of length at most k. //The length of a path is the number of edges in the path. func NumberOfInducedPaths(g Graph, maxLength int) []int { n := g.N() if maxLength < 0 || maxLength > n-1 { maxLength = n - 1 } r := make([]int, n) type path struct { p []int length int bannedNeighbours sortints.SortedInts } com := ConnectedComponents(g) for _, v := range com { h := InducedSubgraph(g, v) n := h.N() for i := 0; i < n; i++ { var p path toCheck := make([]path, 1) toCheck[0] = path{[]int{i}, 0, []int{i}} //Look for paths in h starting at i. for len(toCheck) > 0 { p, toCheck = toCheck[len(toCheck)-1], toCheck[:len(toCheck)-1] options := sortints.SetMinus(h.Neighbours(p.p[len(p.p)-1]), p.bannedNeighbours) r[p.length+1] += len(options) if p.length >= maxLength-1 { continue } for _, v := range options { tmpP := make([]int, p.length+2) copy(tmpP, p.p) tmpP[p.length+1] = v tmpBannedNeighbours := sortints.Union(p.bannedNeighbours, h.Neighbours(p.p[len(p.p)-1])) tmpBannedNeighbours.Add(v) toCheck = append(toCheck, path{tmpP, p.length + 1, tmpBannedNeighbours}) } } } } for i := 1; i < len(r); i++ { r[i] /= 2 } r[0] = n return r } //NumberOfInducedCycles returns a slice of length n containing the number of induced cycles in g which are of length at most k. //The length of a cycle is the number of edges in the cycle, or equivalently, the number of vertices in the cycle. func NumberOfInducedCycles(g Graph, maxLength int) []int { n := g.N() if maxLength < 0 || maxLength > n { maxLength = n } r := make([]int, n+1) type cycle struct { p []int length int allowedEnds sortints.SortedInts bannedNeighbours sortints.SortedInts } com := ConnectedComponents(g) for _, v := range com { h := InducedSubgraph(g, v) n := h.N() for i := 0; i < n; i++ { var p cycle toCheck := make([]cycle, 1) toCheck[0] = cycle{[]int{i}, 0, h.Neighbours(i), []int{i}} //Look for paths in h starting at i. for len(toCheck) > 0 { p, toCheck = toCheck[len(toCheck)-1], toCheck[:len(toCheck)-1] if p.length > 0 { numCycles := len(sortints.Intersection(h.Neighbours(p.p[len(p.p)-1]), p.allowedEnds)) r[p.length+2] += numCycles } if p.length >= maxLength-2 { continue } options := sortints.SetMinus(h.Neighbours(p.p[len(p.p)-1]), p.bannedNeighbours) for _, v := range options { tmpP := make([]int, p.length+2) copy(tmpP, p.p) tmpP[p.length+1] = v tmpBannedNeighbours := sortints.Union(p.bannedNeighbours, h.Neighbours(p.p[len(p.p)-1])) tmpBannedNeighbours.Add(v) var tmpAllowedEnds sortints.SortedInts if p.length > 0 { tmpAllowedEnds = sortints.SetMinus(p.allowedEnds, h.Neighbours(p.p[len(p.p)-1])) } else { tmpAllowedEnds = make([]int, len(p.allowedEnds)) copy(tmpAllowedEnds, p.allowedEnds) tmpAllowedEnds.Remove(v) } toCheck = append(toCheck, cycle{tmpP, p.length + 1, tmpAllowedEnds, tmpBannedNeighbours}) } } } } for i := 1; i < len(r); i++ { r[i] /= 2 * i } return r } type inducedSubgraph struct { verts []int sortedV []int indices []int g Graph } func (h inducedSubgraph) N() int { return len(h.verts) } func (h inducedSubgraph) Degrees() []int { degrees := make([]int, len(h.verts)) for i, v := range h.verts { degrees[i] = sortints.IntersectionSize(h.g.Neighbours(v), h.sortedV) } return degrees } func (h inducedSubgraph) M() int { degrees := h.Degrees() sum := ints.Sum(degrees) return sum / 2 } func (h inducedSubgraph) IsEdge(i, j int) bool { return h.g.IsEdge(h.verts[i], h.verts[j]) } func (h inducedSubgraph) Neighbours(v int) []int { return intersectionByIndex(h.g.Neighbours(h.verts[v]), h.sortedV, h.indices) } //InducedSubgraph returns a graph which represents the subgraph of g induced by the vertices in V in the order they are in V. //The properties of the induced subgraph are calculated from g when called and reflect the current state of g. If a vertex in V is no longer in the graph, the behaviour of this function is unspecified. func InducedSubgraph(g Graph, V []int) Graph { values, indices := intsSort(V) return inducedSubgraph{verts: V, sortedV: values, indices: indices, g: g} } type sortWithIndex struct { values *[]int indices *[]int } func (s sortWithIndex) Len() int { return len(*s.values) } func (s sortWithIndex) Swap(i, j int) { (*s.values)[i], (*s.values)[j] = (*s.values)[j], (*s.values)[i] (*s.indices)[i], (*s.indices)[j] = (*s.indices)[j], (*s.indices)[i] } func (s sortWithIndex) Less(i, j int) bool { return (*s.values)[i] < (*s.values)[j] } func intsSort(a []int) (values, indices []int) { values = make([]int, len(a)) copy(values, a) indices = make([]int, len(a)) for i := range indices { indices[i] = i } toSort := sortWithIndex{values: &values, indices: &indices} sort.Sort(toSort) return values, indices } func intersectionByIndex(a, b sortints.SortedInts, indicesOfB []int) sortints.SortedInts { rV := make([]int, 0, sortints.IntersectionSize(a, b)) r := sortints.SortedInts(rV) i := 0 //Point in a j := 0 //Point in b for i < len(a) && j < len(b) { if a[i] == b[j] { r.Add(indicesOfB[j]) i++ j++ } else if a[i] > b[j] { j++ } else { i++ } } return r }
graph/subgraph.go
0.59843
0.615088
subgraph.go
starcoder
package sexpr import ( "fmt" "io/ioutil" ) // ParseFile processes the given file and stores all the nodes it finds in // the given AST instance. The parser uses the given syntax rule set to // perform the parsing. func ParseFile(ast *AST, file string, syntax *Syntax) (err error) { fileindex, new := ast.addFile(file) if !new { return fmt.Errorf("Parsing duplicate file %q", file) } data, err := ioutil.ReadFile(file) if err != nil { return } return parseData(ast, data, syntax, fileindex) } // Parse processes the given data and stores all the nodes it finds in // the given AST instance. The parser uses the given syntax rule set to // perform the parsing. func Parse(ast *AST, data []byte, syntax *Syntax) (err error) { fileindex, _ := ast.addFile("<raw data>") return parseData(ast, data, syntax, fileindex) } // ParseString processes the given data and stores all the nodes it finds in // the given AST instance. The parser uses the given syntax rule set to // perform the parsing. func ParseString(ast *AST, data string, syntax *Syntax) (err error) { return Parse(ast, []byte(data), syntax) } // parseData processes the given data and stores all the nodes it finds in // the given AST instance. The parser uses the given syntax rule set to // perform the parsing. func parseData(ast *AST, data []byte, syntax *Syntax, fileindex int) (err error) { var tok Token var node *Node lex := NewLexer(data, syntax) for { lex.Next(&tok) // This would lead to an infinite loop otherwise. if len(tok.Data) == 0 && tok.Type != TokEof && tok.Type != TokErr { tok.Type = TokErr } switch tok.Type { case TokEof: return case TokErr: return NewParseError(ast.Files[fileindex], tok.Line, tok.Col, string(tok.Data)) case TokListOpen: n := &Node{ File: uint8(fileindex), Line: tok.Line, Col: tok.Col, Data: tok.Data, Type: tok.Type, } if node == nil { n.Parent = &ast.Root ast.Root.Children = append(ast.Root.Children, n) node = n } else { n.Parent = node node.Children = append(node.Children, n) node = n } default: if node == nil { return NewParseError(ast.Files[fileindex], tok.Line, tok.Col, "Unexpected token %s; expected %s", tok, TokListOpen) } if tok.Type == TokListClose { node = node.Parent break } node.Children = append(node.Children, &Node{ File: uint8(fileindex), Line: tok.Line, Col: tok.Col, Data: tok.Data, Type: tok.Type, }) } } }
parse.go
0.615666
0.454593
parse.go
starcoder
package polynomial import ( "log" ) type Polynomial struct { c []float64 // Coefficients : a + bx + cx^2 + dx^3 -> {a, b, c, d} } // Create New object of Polynomial func NewPolynomial(c []float64) (p Polynomial) { p.c = c return } func (p Polynomial) Coefficients() []float64 { return p.c } func (p Polynomial) Plus(x float64) (r Polynomial) { r.c = make([]float64, len(p.c)) copy(r.c, p.c) for i := range r.c { r.c[i] = r.c[i] + x } return } func (p Polynomial) Minus(x float64) (r Polynomial) { r.c = make([]float64, len(p.c)) copy(r.c, p.c) for i := range r.c { r.c[i] = r.c[i] - x } return } func (p Polynomial) Muliply(x float64) (r Polynomial) { r.c = make([]float64, len(p.c)) copy(r.c, p.c) for i := range r.c { r.c[i] = r.c[i] * x } return } func (p Polynomial) Divide(x float64) (r Polynomial) { r.c = make([]float64, len(p.c)) copy(r.c, p.c) for i := range r.c { r.c[i] = r.c[i] / x } return } func (p Polynomial) PlusPolynomial(x Polynomial) (r Polynomial) { if len(p.c) > len(x.c) { r.c = make([]float64, len(p.c)) copy(r.c, p.c) for i := range x.c { r.c[i] = r.c[i] + x.c[i] } } else { r.c = make([]float64, len(x.c)) copy(r.c, x.c) for i := range p.c { r.c[i] = r.c[i] + p.c[i] } } return } func (p Polynomial) MinusPolynomial(x Polynomial) (r Polynomial) { if len(p.c) > len(x.c) { r.c = make([]float64, len(p.c)) copy(r.c, p.c) for i := range x.c { r.c[i] = r.c[i] - x.c[i] } } else { r.c = make([]float64, len(x.c)) copy(r.c, x.c) for i := range p.c { r.c[i] = r.c[i] - p.c[i] } } return } func (p Polynomial) MultiplyPolynomial(x Polynomial) (r Polynomial) { r.c = make([]float64, len(p.c)+len(x.c)-1) for i := 0; i < len(p.c); i++ { for j := 0; j < len(x.c); j++ { r.c[i+j] += p.c[i] * x.c[j] } } return } func (p Polynomial) Evaluate(x float64) (calculated float64) { power := 1.0 calculated = 0.0 for _, coefficient := range p.c { calculated += coefficient * power power = power * x } return } func (p Polynomial) Derivative(times ...int) (d_poly Polynomial) { switch len(times) { case 0: d_poly.c = make([]float64, len(p.c)-1) for i := 0; i < len(p.c)-1; i++ { d_poly.c[i] = p.c[i+1] * float64(i+1) } return case 1: d_poly = p for i := 0; i < times[0]; i++ { d_poly = d_poly.Derivative(1) } return default: log.Panic("Something is wrong. Derivative Parameter should be 0 or 1, but we got ", len(times), times) return } }
polynomial/polynomial.go
0.667039
0.678017
polynomial.go
starcoder
package data // ConditionParams contains settings for one portion of a Run. A ConditionParams refers to a single ConditionParams, along with // other information such as the number of times to run each instantiated Block. A single Block can be referenced by many RunBlocks. type ConditionParams struct { Nm string `desc:"identifier for this type of configuration"` Desc string `desc:"description of this configuration"` TrialBlkNm string `desc:"trial group name"` FixedProb bool `desc:"fixed probability for each trial group"` NIters int `desc:"number of iterations to run"` BlocksPerIter int `desc:"number of blocks (1 block = one behavioral trial = sequence of CS, US) in each iteration -- needs to be higher if there are stochastic variables (probabilities)."` PermuteTrialGps bool `desc:"permute list of fully-instantiated trials after generation"` SaveFinalWts bool `desc:"save final weights after training"` SaveWtsInterval int `desc:"how frequently to save weights during training (in blocks)"` TestInterval int `desc:"how frequently (blocks) to run a test during training"` LogTrials bool `desc:"should trial-level data be saved to log files?"` LoadWeights bool `desc:"load initial weights from a file (specified in weights_file)"` WeightsFile string `desc:"full relative path (from project) of weights file to load -- use CRR: prefix to load from cluster run results directory"` LoadStBlk int `desc:"after loading weights, reset block counter to this value (-1 = leave at value from the loaded weights)"` LrsStepBlks int `desc:"learning rate schedule blocks per step of decrease in learning rate"` LrsNSteps int `desc:"number of steps in the learning rate schedule"` LrsBumpStep int `desc:"if positive (3 is typical), then bump up the learning rate at this step in the schedule -- can help improve final performance level"` } type ConditionParamsMap map[string]ConditionParams func AllConditionParams() ConditionParamsMap { sets := map[string]ConditionParams{ "RunMaster": { Nm: "RunMaster", Desc: "default values for basic training parameters -- this is a 'master' param set -- make changes here and all others in group will auto-update", TrialBlkNm: "PosAcq", FixedProb: true, NIters: 50, BlocksPerIter: 8, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "NullStep": { Nm: "NullStep", Desc: "use for unused steps in sequences", TrialBlkNm: "BlankTemplate", FixedProb: true, NIters: 50, BlocksPerIter: 8, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "AutomatedTesting": { Nm: "AutomatedTesting", Desc: "This is the startup paramset for automated testing. The individual elements will get reset based on the sub Paramsets", TrialBlkNm: "PosAcq", FixedProb: true, NIters: 50, BlocksPerIter: 8, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "USDebug": { Nm: "USDebug", Desc: "For debugging, 100% reward, CS A", TrialBlkNm: "USDebug", FixedProb: true, NIters: 51, BlocksPerIter: 8, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosAcq_B50": { Nm: "PosAcq_B50", Desc: "Pavlovian conditioning w/ positively-valenced US: A_Rf_POS, B at 50%", TrialBlkNm: "PosAcq_B50", FixedProb: true, NIters: 51, BlocksPerIter: 8, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosAcq_A50": { Nm: "PosAcq_A50", Desc: "Pavlovian conditioning w/ positively-valenced US: A_Rf_POS at 50%", TrialBlkNm: "PosAcq_A50", FixedProb: true, NIters: 51, BlocksPerIter: 10, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "US0": { Nm: "US0", Desc: "No US at all", TrialBlkNm: "US0", FixedProb: true, NIters: 5, BlocksPerIter: 100, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosAcqPreSecondOrder": { Nm: "PosAcqPreSecondOrder", Desc: "Pavlovian conditioning w/ positively-valenced US: A_Rf_POS, B at 50%", TrialBlkNm: "PosAcqPreSecondOrder", FixedProb: true, NIters: 51, BlocksPerIter: 8, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosAcq_B50Cont": { Nm: "PosAcq_B50Cont", Desc: "Pavlovian conditioning w/ positively-valenced US: A_Rf_POS, B at 50% reinf, continue using prior weights", TrialBlkNm: "PosReacq", FixedProb: true, NIters: 50, BlocksPerIter: 8, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_cel_AB_POS_trn.00_0050.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosAcq_B100": { Nm: "PosAcq_B100", Desc: "Pavlovian conditioning w/ positively-valenced US: A_Rf_POS, B at 100%", TrialBlkNm: "PosAcq_B100", FixedProb: true, NIters: 50, BlocksPerIter: 8, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosAcq_B100Cont": { Nm: "PosAcq_B100Cont", Desc: "Pavlovian conditioning w/ positively-valenced US: A_Rf_POS -- continue w/ wts", TrialBlkNm: "PosAcq_B100_cont", FixedProb: true, NIters: 50, BlocksPerIter: 8, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_cel_AB_POS_trn.00_0050.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosAcqEarlyUS_test": { Nm: "PosAcqEarlyUS_test", Desc: "Testing session: after pos_acq trng, deliver US early or late", TrialBlkNm: "PosAcqEarlyUS_test", FixedProb: true, NIters: 5, BlocksPerIter: 2, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_cel_AB_POS_trn.00_0050.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosAcq_B25": { Nm: "PosAcq_B25", Desc: "Pavlovian conditioning w/ positively-valenced US: A_Rf_POS", TrialBlkNm: "PosAcq_B25", FixedProb: true, NIters: 200, BlocksPerIter: 8, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosExtinct": { Nm: "PosExtinct", Desc: "Pavlovian extinction: A_NRf_POS", TrialBlkNm: "PosExtinct", FixedProb: false, NIters: 50, BlocksPerIter: 8, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/PVLVNet_cel_AB_POS_trn.00_0050.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosCondInhib": { Nm: "PosCondInhib", Desc: "conditioned inhibition training: AX_NRf_POS, A_Rf_POS interleaved", TrialBlkNm: "PosCondInhib", FixedProb: false, NIters: 25, BlocksPerIter: 8, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_cel_AB_POS_trn.00_0050.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosSecondOrderCond": { Nm: "PosSecondOrderCond", Desc: "second order conditioning training: AB_NRf_POS, A_Rf_POS interleaved; A = 1st order, F = 2nd order CS", TrialBlkNm: "PosSecondOrderCond", FixedProb: false, NIters: 10, BlocksPerIter: 50, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_cel_AB_POS_trn.00_0050.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosCondInhib_test": { Nm: "PosCondInhib_test", Desc: "Testing session: A_NRf_POS, AX_NRf_POS, and X_NRf_POS cases", TrialBlkNm: "PosCondInhib_test", FixedProb: false, NIters: 5, BlocksPerIter: 6, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_cel_AB_POS_cond_inhib_trn.00_0050.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "NegAcq": { Nm: "NegAcq", Desc: "Pavlovian conditioning w/ negatively-valenced US: D_Rf_NEG", TrialBlkNm: "NegAcq", FixedProb: false, NIters: 76, BlocksPerIter: 10, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "NegAcqFixedProb": { Nm: "NegAcqFixedProb", Desc: "Pavlovian conditioning w/ negatively-valenced US: A_Rf_NEG", TrialBlkNm: "NegAcq", FixedProb: true, NIters: 150, BlocksPerIter: 8, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosAcqOmit": { Nm: "PosAcqOmit", Desc: "Pavlovian conditioning w/ positively-valenced US: A_Rf_POS, A_Rf_POS_omit trials, interleaved", TrialBlkNm: "PosAcqOmit", FixedProb: false, NIters: 10, BlocksPerIter: 0, PermuteTrialGps: false, SaveFinalWts: true, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "NegCondInh": { Nm: "NegCondInh", Desc: "condition inhibition w/ negatively-valenced US: CZ_NRf_NEG, C_Rf_NEG interleaved; i.e., Z = security signal", TrialBlkNm: "NegCondInhib", FixedProb: false, NIters: 75, BlocksPerIter: 10, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_DE_NEG_trn.00_0150.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "NegCondInh_test": { Nm: "NegCondInh_test", Desc: "condition inhibition w/ negatively-valenced US: CZ_NRf_NEG, C_Rf_NEG interleaved; i.e., Z = security signal", TrialBlkNm: "NegCondInhib_test", FixedProb: false, NIters: 5, BlocksPerIter: 6, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_DU_NEG_trn.00_0150.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "NegExtinct": { Nm: "NegExtinct", Desc: "Pavlovian conditioning w/ negatively-valenced US: A_Rf_NEG", TrialBlkNm: "NegExtinct", FixedProb: false, NIters: 75, BlocksPerIter: 8, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_DE_NEG_trn.00_0150.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosAcq_contextA": { Nm: "PosAcq_contextA", Desc: "Pavlovian conditioning w/ positively-valenced US: A_Rf_POS, A_Rf_POS_omit trials, interleaved", TrialBlkNm: "PosAcq_contextA", FixedProb: false, NIters: 26, BlocksPerIter: 10, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosExtinct_contextB": { Nm: "PosExtinct_contextB", Desc: "Pavlovian conditioning w/ positively-valenced US: A_Rf_POS, A_Rf_POS_omit trials, interleaved", TrialBlkNm: "PosExtinct_contextB", FixedProb: false, NIters: 25, BlocksPerIter: 10, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_A_contextA_vs.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosRenewal_contextA": { Nm: "PosRenewal_contextA", Desc: "Pavlovian conditioning w/ positively-valenced US: A_Rf_POS, A_Rf_POS_omit trials, interleaved", TrialBlkNm: "PosRenewal_contextA", FixedProb: false, NIters: 1, BlocksPerIter: 2, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_A_contextB_vs.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosBlocking_A_training": { Nm: "PosBlocking_A_training", Desc: "Blocking experiment", TrialBlkNm: "PosBlocking_A_training", FixedProb: false, NIters: 50, BlocksPerIter: 1, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosBlocking": { Nm: "PosBlocking", Desc: "Blocking experiment", TrialBlkNm: "PosBlocking", FixedProb: false, NIters: 50, BlocksPerIter: 2, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_posblocking_A_training_300.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosBlocking_test": { Nm: "PosBlocking_test", Desc: "Blocking experiment", TrialBlkNm: "PosBlocking_test", FixedProb: false, NIters: 25, BlocksPerIter: 1, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_posblocking_A_AB_200.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosBlocking2_test": { Nm: "PosBlocking2_test", Desc: "Blocking experiment", TrialBlkNm: "PosBlocking2_test", FixedProb: false, NIters: 25, BlocksPerIter: 2, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_posblocking_A_AB_200.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "NegBlocking_E_training": { Nm: "NegBlocking_E_training", Desc: "Blocking experiment", TrialBlkNm: "NegBlocking_E_training", FixedProb: false, NIters: 300, BlocksPerIter: 1, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "NegBlocking": { Nm: "NegBlocking", Desc: "Blocking experiment", TrialBlkNm: "NegBlocking", FixedProb: false, NIters: 200, BlocksPerIter: 2, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_negblocking_E_training.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "NegBlocking_test": { Nm: "NegBlocking_test", Desc: "Blocking experiment", TrialBlkNm: "NegBlocking_test", FixedProb: false, NIters: 25, BlocksPerIter: 1, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_negblocking_E_DE.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosAcqMag": { Nm: "PosAcqMag", Desc: "Magnitude experiment", TrialBlkNm: "PosAcqMagnitude", FixedProb: false, NIters: 50, BlocksPerIter: 8, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosSumAcq": { Nm: "PosSumAcq", Desc: "Conditioned Inhibition - A+, C+", TrialBlkNm: "PosSumAcq", FixedProb: false, NIters: 450, BlocksPerIter: 3, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosSumCondInhib": { Nm: "PosSumCondInhib", Desc: "Conditioned Inhibition - AX-, A+", TrialBlkNm: "PosCondInhib_BY", FixedProb: false, NIters: 300, BlocksPerIter: 3, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_AC_POS_trn.00_0450.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosSum_test": { Nm: "PosSum_test", Desc: "Conditioned Inhibition Summation Test", TrialBlkNm: "PosSumCondInhib_test", FixedProb: false, NIters: 5, BlocksPerIter: 6, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_sum_AX_POS_trn.00_0300.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "NegSumAcq": { Nm: "NegSumAcq", Desc: "Conditioned Inhibition - D-, E-", TrialBlkNm: "NegSumAcq", FixedProb: false, NIters: 50, BlocksPerIter: 3, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "NegSumCondInhib": { Nm: "NegSumCondInhib", Desc: "Conditioned Inhibition - DU, D-", TrialBlkNm: "NegCondInhib_FV", FixedProb: false, NIters: 100, BlocksPerIter: 3, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_DEF_NEG_trn.00_0050.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "NegSum_test": { Nm: "NegSum_test", Desc: "Conditioned Inhibition Summation Test", TrialBlkNm: "NegSumCondInhib_test", FixedProb: false, NIters: 5, BlocksPerIter: 6, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_sum_DU_NEG_trn.00_0050.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "Unblocking_train": { Nm: "Unblocking_train", Desc: "A+++,B+++,C+", TrialBlkNm: "Unblocking_train", FixedProb: false, NIters: 50, BlocksPerIter: 2, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "UnblockingValue": { Nm: "UnblockingValue", Desc: "AX+++,CZ+++", TrialBlkNm: "UnblockingValue", FixedProb: false, NIters: 25, BlocksPerIter: 1, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_ABC_POS_trn.00_0050.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "UnblockingValue_test": { Nm: "UnblockingValue_test", Desc: "A,X,C,Z", TrialBlkNm: "UnblockingValue_test", FixedProb: false, NIters: 5, BlocksPerIter: 1, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_AX_CZ_trn.00_0050.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "Unblocking_trainUS": { Nm: "Unblocking_trainUS", Desc: "A+++ (water) ,B+++ (food)", TrialBlkNm: "Unblocking_trainUS", FixedProb: false, NIters: 50, BlocksPerIter: 15, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "UnblockingIdentity": { Nm: "UnblockingIdentity", Desc: "AX+++(water),BY+++(water)", TrialBlkNm: "UnblockingIdentity", FixedProb: false, NIters: 25, BlocksPerIter: 20, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_ABC_POS_US_trn.00_0050.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "UnblockingIdentity_test": { Nm: "UnblockingIdentity_test", Desc: "A,X,B,Y", TrialBlkNm: "UnblockingIdentity_test", FixedProb: false, NIters: 5, BlocksPerIter: 4, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_AX_BY_trn.00_0050.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosAcqMagChange": { Nm: "PosAcqMagChange", Desc: "Magnitude experiment", TrialBlkNm: "PosAcqMagnitudeChange", FixedProb: false, NIters: 50, BlocksPerIter: 4, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_AB_POS_MAG_trn.00_0050.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "NegAcqMag": { Nm: "NegAcqMag", Desc: "Magnitude experiment", TrialBlkNm: "NegAcqMagnitude", FixedProb: false, NIters: 51, BlocksPerIter: 8, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "NegAcqMagChange": { Nm: "NegAcqMagChange", Desc: "Magnitude experiment", TrialBlkNm: "NegAcqMagnitudeChange", FixedProb: false, NIters: 50, BlocksPerIter: 4, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "Overexpect_train": { Nm: "Overexpect_train", Desc: "Overexpectation training (A+, B+, C+, X+, Y-)", TrialBlkNm: "Overexpectation_train", FixedProb: false, NIters: 150, BlocksPerIter: 5, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "OverexpectCompound": { Nm: "OverexpectCompound", Desc: "Overexpectation compound training (AX+, BY-, CX+, X+, Y-)", TrialBlkNm: "OverexpectationCompound", FixedProb: false, NIters: 150, BlocksPerIter: 5, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_ABCXY_trn.00_0150.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "Overexpect_test": { Nm: "Overexpect_test", Desc: "Overexpectation test ( A-, B-, C-, X-)", TrialBlkNm: "Overexpectation_test", FixedProb: false, NIters: 5, BlocksPerIter: 5, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "wts/bvPVLVNet_AXBYCY_trn.00_0150.wts.gz", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosNeg": { Nm: "PosNeg", Desc: "Positive negative test - W equally reinforced with reward + punishment", TrialBlkNm: "PosNeg", FixedProb: false, NIters: 150, BlocksPerIter: 6, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PosOrNegAcq": { Nm: "PosOrNegAcq", Desc: "Positive negative acquisition - with reward or punishment on interleaved trials according to user-set probabilities", TrialBlkNm: "PosOrNegAcq", FixedProb: false, NIters: 150, BlocksPerIter: 6, PermuteTrialGps: true, SaveFinalWts: false, SaveWtsInterval: 200, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "CondExp": { Nm: "CondExp", Desc: "", TrialBlkNm: "CondExp", FixedProb: false, NIters: 0, BlocksPerIter: 296, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 0, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, "PainExp": { Nm: "PainExp", Desc: "", TrialBlkNm: "CondExp", FixedProb: false, NIters: 0, BlocksPerIter: 48, PermuteTrialGps: false, SaveFinalWts: false, SaveWtsInterval: 0, TestInterval: 0, LogTrials: false, LoadWeights: false, WeightsFile: "", LoadStBlk: 0, LrsStepBlks: 0, LrsNSteps: 7, LrsBumpStep: -1, }, } return sets }
ch7/pvlv/data/condition_params.go
0.704364
0.546073
condition_params.go
starcoder
package token import ( "unicode" "github.com/gnames/bayes" gner "github.com/gnames/gner/ent/token" "github.com/gnames/gnfinder/io/dict" ) // tokenSN represents a word separated by spaces in a text. Words that are // split by new lines are concatenated. type tokenSN struct { gner.TokenNER // features is a collection of properties associated with the tokenSN. // They differ from properties coming from TokenNER. features Features // nlp contains NLP-related data. nlp NLP // indices of semantic elements of a possible name. indices Indices // decision tags the first token of a possible name with a classification // decision. decision Decision } // NLP collects data received from Bayes' algorithm type NLP struct { // Odds are posterior odds. Odds float64 // OddsDetails are elements from which Odds are calculated. OddsDetails // LabelFreq is used to calculate prior odds of names appearing in a // document. LabelFreq bayes.LabelFreq } // OddsDetails are elements from which Odds are calculated type OddsDetails map[string]map[bayes.FeatureName]map[bayes.FeatureValue]float64 func NewOddsDetails(l bayes.Likelihoods) OddsDetails { res := make(OddsDetails) for k, v := range l { res[k.String()] = v } return res } // Indices of the elmements for a name candidate. type Indices struct { Species int Rank int Infraspecies int } // NewTokenSN is a factory and a wrapper. It takes gner.TokenNER object and // wraps into TokenSN interface. func NewTokenSN(token gner.TokenNER) gner.TokenNER { t := &tokenSN{ TokenNER: token, } return t } // Features returns features that are specific to scientific name // finding. func (t *tokenSN) Features() *Features { return &t.features } // NLP returns natural language processing features of a scientific name. func (t *tokenSN) NLP() *NLP { return &t.nlp } func (t *tokenSN) Indices() *Indices { return &t.indices } // Decision returns the decision for a name candidate. func (t *tokenSN) Decision() Decision { return t.decision } // SetDecision saves made decision into the object. func (t *tokenSN) SetDecision(d Decision) { t.decision = d } // ProcessRaw overrides the function in TokenNER and introduces logic that is // needed for scientific names finding. The function sets cleand up version of // raw token value and computes several properties of a token. func (t *tokenSN) ProcessToken() { raw := t.Raw() l := len(raw) f := &t.features f.HasStartParens = raw[0] == rune('(') f.HasEndParens = raw[l-1] == rune(')') res, start, end := normalize(raw, f) f.setAbbr(t.Raw(), start, end) if f.IsCapitalized { res[0] = unicode.ToUpper(res[0]) f.setPotentialBinomialGenus(t.Raw(), start, end) if f.Abbr { res = append(res, rune('.')) } } else { // makes it impossible to have capitalized species f.setStartsWithLetter(start, end) f.setEndsWithLetter(t.Raw(), start, end) } t.SetCleaned(string(res)) } // normalize returns cleaned up name and indices of their start and end. // The normalization includes removal of non-letters from the start // and the end, substitutin of internal non-letters with '�'. func normalize(raw []rune, f *Features) ([]rune, int, int) { res := make([]rune, len(raw)) firstLetter := true var start, end int for i := range raw { hasDash := raw[i] == rune('-') if unicode.IsLetter(raw[i]) || hasDash { if firstLetter { start = i f.IsCapitalized = unicode.IsUpper(raw[i]) firstLetter = false } end = i res[i] = unicode.ToLower(raw[i]) } else { res[i] = rune('�') } if hasDash { f.HasDash = true } } return res[start : end+1], start, end } // SetIndices takes a slice of tokens that correspond to a name candidate. // It analyses the tokens and sets Token.Indices according to feasibility // of the input tokens to form a scientific name. It checks if there is // a possible species, ranks, and infraspecies. func SetIndices(ts []TokenSN, d *dict.Dictionary) { u := ts[0] uF := u.Features() uF.SetUninomialDict(u.Cleaned(), d) l := len(ts) if !uF.PotentialBinomialGenus || l == 1 { return } if l == 2 { sp := ts[1] spF := sp.Features() if !spF.StartsWithLetter || spF.IsCapitalized || len(sp.Cleaned()) < 3 { return } u.Indices().Species = 1 spF.SetSpeciesDict(sp.Cleaned(), d) return } spF := ts[1].Features() iSp := 1 if spF.HasStartParens && spF.HasEndParens { iSp = 2 } sp := ts[iSp] spF = sp.Features() if !spF.StartsWithLetter || spF.IsCapitalized || len(sp.Cleaned()) < 3 { return } u.Indices().Species = iSp sp.Features().SetSpeciesDict(sp.Cleaned(), d) if !sp.Features().EndsWithLetter || l == iSp+1 { return } iIsp := iSp + 1 if l > iIsp+1 && checkRank(ts[iIsp], d) { u.Indices().Rank = iIsp iIsp++ } tIsp := ts[iIsp] if l <= iIsp || tIsp.Features().IsCapitalized || !tIsp.Features().StartsWithLetter || len(tIsp.Cleaned()) < 3 { return } u.Indices().Infraspecies = iIsp isp := ts[iIsp] isp.Features().SetSpeciesDict(isp.Cleaned(), d) } func checkRank(t TokenSN, d *dict.Dictionary) bool { t.Features().SetRank(string(t.Raw()), d) return t.Features().RankLike } // UpperIndex takes an index of a token and length of the tokens slice and // returns an upper index of what could be a slice of a name. We expect that // that most of the names will fit into 5 words. Other cases would require // more thorough algorithims that we can run later as plugins. func UpperIndex(i int, l int) int { upperIndex := i + 5 if l < upperIndex { upperIndex = l } return upperIndex }
ent/token/token.go
0.753285
0.464841
token.go
starcoder
package stream import "golang.org/x/exp/slices" // SliceComparableStream Generics constraints based on comparable type SliceComparableStream[E comparable] struct { SliceStream[E] } // NewSliceByComparable new stream instance, generics constraints based on comparable func NewSliceByComparable[E comparable](source []E) SliceComparableStream[E] { return SliceComparableStream[E]{SliceStream: NewSlice(source)} } // Distinct Returns a stream consisting of the distinct elements of this stream. // Remove duplicate according to map comparable. func (stream SliceComparableStream[E]) Distinct() SliceComparableStream[E] { stream.evaluation() if stream.source == nil && len(stream.source) < 2 { return stream } newSlice := make([]E, 0) distinct := map[E]struct{}{} for _, v := range stream.source { if _, ok := distinct[v]; ok { continue } distinct[v] = struct{}{} newSlice = append(newSlice, v) } stream.source = newSlice return stream } // Equal Returns whether the source in the stream is equal to the destination source. // Equal according to the slices.Equal. func (stream SliceComparableStream[E]) Equal(dest []E) bool { stream.evaluation() return slices.Equal(stream.source, dest) } // Find Returns the index of the first element in the stream that matches the target element. // If not found then -1 is returned. func (stream SliceComparableStream[E]) Find(dest E) int { stream.evaluation() for i, v := range stream.source { if v == dest { return i } } return -1 } // Parallel See: SliceStream.Parallel func (stream SliceComparableStream[E]) Parallel(goroutines int) SliceComparableStream[E] { stream.SliceStream = stream.SliceStream.Parallel(goroutines) return stream } // ForEach See: SliceStream.ForEach func (stream SliceComparableStream[E]) ForEach(action func(int, E)) SliceComparableStream[E] { stream.SliceStream.ForEach(action) return stream } // Filter See: SliceStream.Filter func (stream SliceComparableStream[E]) Filter(predicate func(E) bool) SliceComparableStream[E] { stream.SliceStream = stream.SliceStream.Filter(predicate) return stream } // Limit See: SliceStream.Limit func (stream SliceComparableStream[E]) Limit(maxSize int) SliceComparableStream[E] { stream.SliceStream = stream.SliceStream.Limit(maxSize) return stream } // Map See: SliceStream.Map func (stream SliceComparableStream[E]) Map(mapper func(E) E) SliceComparableStream[E] { stream.SliceStream = stream.SliceStream.Map(mapper) return stream } // SortFunc See: SliceStream.SortFunc func (stream SliceComparableStream[E]) SortFunc(less func(a, b E) bool) SliceComparableStream[E] { stream.SliceStream = stream.SliceStream.SortFunc(less) return stream }
slice_comparable.go
0.863837
0.408867
slice_comparable.go
starcoder
package holee import ( "math" "math/rand" "time" "github.com/konimarti/fixedincome/pkg/term" ) // HoLee implements the Ho-Lee interest rate model type HoLee struct { // R0 is the initial rate (known today) R0 float64 // Sigma is the standard deviation of the short term interest rate Sigma float64 // T is the maturity (up to which to calculate the interes rates) T float64 // N represents number of steps N int // Theta are the parameters of the Ho-Lee model Theta []float64 // Rng is the random number generator (NormFloat64) Rng *rand.Rand // Payoff returns the discounted payoff for the given simulated rates Payoff func([]float64) float64 } // New creates a new Ho-Lee model func New(ts term.Structure, sigma, t float64, n int, payoff func([]float64) float64) (*HoLee, error) { hl := &HoLee{ R0: ts.Rate(t / float64(n)), Sigma: sigma, T: t, N: n, Theta: make([]float64, n), Rng: rand.New(rand.NewSource(time.Now().UnixNano())), Payoff: payoff, } err := Calibrate(hl, ts) return hl, err } // Calibrate calculates the parameters of the Ho-Lee model (theta's) to match the current yield curve func Calibrate(hl *HoLee, ts term.Structure) error { n := hl.N dt := hl.T / float64(n) r := make([]float64, n+2) f := make([]float64, n+1) // calculate current rates, forward rates and thetas on the grid for i := 0; i < n+2; i += 1 { r[i] = ts.Rate(float64(i+1)*dt) / 100.0 } for i := 0; i < n+1; i += 1 { // f[i] = r[i] + float64(i+1)*(r[i+1]-r[i]) f[i] = -math.Log(ts.Z(float64(i+2)*dt)/ts.Z(float64(i+1)*dt)) / dt } for i := 0; i < n; i += 1 { hl.Theta[i] = (f[i+1]-f[i])/dt + math.Pow(hl.Sigma, 2.0)*float64(i+1)*dt } return nil } // Measurement implements the model interface for the Monte Carlo engine func (hl *HoLee) Measurement() float64 { n := hl.N dt := hl.T / float64(n) rates := make([]float64, n) // simulate interest rates rates[0] = hl.R0 / 100.0 for i := 0; i < (n - 1); i += 1 { rates[i+1] = rates[i] + hl.Theta[i]*dt + hl.Sigma*math.Sqrt(dt)*hl.Rng.NormFloat64() } return hl.Payoff(rates) }
pkg/mc/model/holee/holee.go
0.74382
0.639947
holee.go
starcoder
package continuous import ( "github.com/jtejido/stats" "github.com/jtejido/stats/err" "math" "math/rand" ) // Pareto type-II // At μ = 0, see https://en.wikipedia.org/wiki/Lomax_distribution // https://reference.wolfram.com/language/ref/ParetoDistribution.html type ParetoType2 struct { xmin, shape, location float64 // xm, α, μ src rand.Source } func NewParetoType2(xmin, shape, location float64) (*ParetoType2, error) { return NewParetoType2WithSource(xmin, shape, location, nil) } func NewParetoType2WithSource(xmin, shape, location float64, src rand.Source) (*ParetoType2, error) { if xmin <= 0 || shape <= 0 { return nil, err.Invalid() } return &ParetoType2{xmin, shape, location, src}, nil } // xm ∈ (0,∞) // α ∈ (0,∞) // μ ∈ (-∞,∞) func (p *ParetoType2) Parameters() stats.Limits { return stats.Limits{ "xm": stats.Interval{0, math.Inf(1), true, true}, "α": stats.Interval{0, math.Inf(1), true, true}, "μ": stats.Interval{math.Inf(-1), math.Inf(1), true, true}, } } // x ∈ [μ,∞) func (p *ParetoType2) Support() stats.Interval { return stats.Interval{p.location, math.Inf(1), false, true} } func (p *ParetoType2) Probability(x float64) float64 { if p.Support().IsWithinInterval(x) { return (p.shape * math.Pow((p.xmin+x-p.location)/p.xmin, -1-p.shape)) / p.xmin } return 0 } func (p *ParetoType2) Distribution(x float64) float64 { if p.Support().IsWithinInterval(x) { return 1 - math.Pow(1+((x-p.location)/p.xmin), -p.shape) } return 0 } func (p *ParetoType2) Inverse(q float64) float64 { if q <= 0 { return p.location } if q >= 1 { return math.Inf(1) } return p.xmin*math.Pow(-1+(1-q), (-1/p.shape)) + p.location } func (p *ParetoType2) Mean() float64 { if p.shape > 1 { return (p.xmin / (p.shape - 1)) + p.location } return math.NaN() } func (p *ParetoType2) Median() float64 { return (p.xmin * (math.Pow(2., 1/p.shape) - 1)) + p.location } func (p *ParetoType2) Mode() float64 { return p.location } func (p *ParetoType2) Variance() float64 { if p.shape > 2 { return (math.Pow(p.xmin, 2.) * p.shape) / (math.Pow(p.shape-1, 2.) * (p.shape - 2)) } if p.shape > 1 && p.shape <= 2 { return math.Inf(1) } return math.NaN() } func (p *ParetoType2) ExKurtosis() float64 { if p.shape > 4 { return (6 * ((p.shape * p.shape * p.shape) + (p.shape * p.shape) - 6*p.shape - 2)) / (p.shape * (p.shape - 3) * (p.shape - 4)) } return math.NaN() } func (p *ParetoType2) Skewness() float64 { if p.shape > 3 { return ((2 * (1 + p.shape)) / (p.shape - 3)) * math.Sqrt((p.shape-2)/p.shape) } return math.NaN() } func (p *ParetoType2) Rand() float64 { var rnd float64 if p.src == nil { rnd = rand.Float64() } else { rnd = rand.New(p.src).Float64() } return p.Inverse(rnd) }
dist/continuous/pareto_type_2.go
0.785103
0.584923
pareto_type_2.go
starcoder
package proxyproto import "encoding/binary" // parseTLVs processes the Type-Length-Value bits for Proxy Protocol V2 // the buffer is expected to only include the TLV portion of the payload // it can also be used to process the SSL sub-TLVs by passing that buffer // into this function func parseTLVs(buf []byte) map[TLVType][]byte { m := make(map[TLVType][]byte) i := 0 for i+2 < len(buf) { t := buf[i] l := int(binary.BigEndian.Uint16(buf[i+1 : i+3])) i += 3 if i+l <= len(buf) { m[TLVType(t)] = buf[i : i+l] } i += l } return m } // TLVGetALPN gets the ALPN TLV from the data. // It is for Application-Layer Protocol Negotiation (ALPN). It is a byte sequence defining // the upper layer protocol in use over the connection. The most common use case // will be to pass the exact copy of the ALPN extension of the Transport Layer // Security (TLS) protocol as defined by RFC7301. // The second return value will be false if the TLV is not provided func (d *Data) TLVGetALPN() (string, bool) { if d.TLVs == nil { return "", false } if d, ok := d.TLVs[TLVTypeALPN]; ok { return string(d), true } return "", false } // TLVGetAuthority gets the host name value passed by the client, as an UTF8-encoded string. // In case of TLS being used on the client connection, this is the exact copy of // the "server_name" extension as defined by RFC3546 // The second return value will be false if the TLV is not provided func (d *Data) TLVGetAuthority() (string, bool) { if d.TLVs == nil { return "", false } if d, ok := d.TLVs[TLVTypeAuthority]; ok { return string(d), true } return "", false } // TLVGetCRC32Checksum gets a 32-bit number storing the CRC32c checksum of the PROXY protocol header // The second return value will be false if the TLV is not provided func (d *Data) TLVGetCRC32Checksum() (uint32, bool) { if d.TLVs == nil { return 0, false } if d, ok := d.TLVs[TLVTypeCRC32C]; ok && len(d) == 4 { return binary.BigEndian.Uint32(d), true } return 0, false } // TLVGetSSL gets the SSL TLV // The second return value will be false if the TLV is not provided func (d *Data) TLVGetSSL() (*SSLTLVData, bool) { if d.TLVs == nil { return nil, false } if d, ok := d.TLVs[TLVTypeSSL]; ok && len(d) > 5 { subs := parseTLVs(d[5:]) dest := make(map[SSLTLVSubType][]byte) for k := range subs { dest[SSLTLVSubType(k)] = subs[k] } return &SSLTLVData{ Client: SSLTLVClientField(d[0]), Verified: binary.BigEndian.Uint32(d[1:5]) == 0, SubTLVs: dest, }, true } return nil, false } // TLVGetNetworkNamespace gets the value as the US-ASCII string representation // of the namespace's name. // The second return value will be false if the TLV is not provided func (d *Data) TLVGetNetworkNamespace() (string, bool) { if d.TLVs == nil { return "", false } if d, ok := d.TLVs[TLVTypeNetNS]; ok { return string(d), true } return "", false }
parsetlvs.go
0.608245
0.455986
parsetlvs.go
starcoder
package main //@danielmatthewsgrout //A very basic Self Organising Map implementation with limited parallelism import ( "math" "math/rand" "sync" ) //DistanceFunction a function to measure distance between 2 vectors of same len type DistanceFunction func(a1, a2 []float64) float64 //InitFunction - the function used to initialise the SOM type InitFunction func(dimensions, xySize int, vectors [][]float64) [][]float64 //tweak this if needed - I found this was the best value on a 4c/8t Intel const concurrent = 32 //BasicSOM a basicSom type BasicSOM struct { nodes [][]float64 xySize int df DistanceFunction } //TrainBasicSOM ronseal func TrainBasicSOM(dimensions, xySize, maxSteps int, learningDecayRate float64, vectors [][]float64) SelfOrgMap { initf := RandomInit df := EuclideanDistanceSquared nodes := initf(dimensions, xySize, vectors) println("SOM processing...") fXYSize := float64(xySize) fMaxSteps := float64(maxSteps) v := 0 for i := 0; i < maxSteps; i++ { fI := float64(i) + 1 selection := vectors[v] //select vector for this run v++ if v == len(vectors) { v = 0 } winner := getNearest(selection, nodes, df) //find the nearest node to this selection radius := fXYSize * math.Exp(-(fI / (fMaxSteps / math.Log(fXYSize)))) //calculate the radius for neighbour sarch lRate := learningDecayRate * math.Exp(-fI/fMaxSteps) //calculate the learning rate for this run sq := (radius * radius) x2 := winner % xySize y2 := winner / xySize i := 0 wg := sync.WaitGroup{} k := len(nodes) / concurrent //find the smallest for i < len(nodes) { if i+k >= len(nodes) { k = len(nodes) - i } wg.Add(1) go func(i, k int) { //dividde and conquer the area that needs updating for z := i; z < i+k; z++ { n := nodes[z] x1 := z % xySize y1 := z / xySize if dist := float64((x1-x2)*(x1-x2) + (y1-y2)*(y1-y2)); dist < sq { //caclulate the distance from our node to this node neighbour := math.Exp(-dist / (2 * sq)) //neighbour modification value - decays as it gets further away for v, s := range selection { n[v] += lRate * neighbour * (s - n[v]) } } } wg.Done() }(i, k) i += k } wg.Wait() } println("SOM done.") return &BasicSOM{ nodes: nodes, xySize: xySize, df: df, } } //GetBMU get best matching unit x and y coords func (s *BasicSOM) GetBMU(vec []float64) (int, int) { winner := getNearest(vec, s.nodes, s.df) return (winner % s.xySize) + 1, (winner / s.xySize) + 1 } func getNearest(vec []float64, nodes [][]float64, df DistanceFunction) int { var minDistance float64 = math.MaxFloat64 var winner int i := 0 wg := sync.WaitGroup{} var lck sync.Mutex k := len(nodes) / concurrent //find the smallest for i < len(nodes) { if i+k >= len(nodes) { k = len(nodes) - i } wg.Add(1) go func(i, k int) { //divide and conquer the seach var md float64 = math.MaxFloat64 var w int for x := i; x < i+k; x++ { if d := df(vec, nodes[x]); d < md { md = d w = x } } lck.Lock() if md < minDistance { minDistance = md winner = w } lck.Unlock() wg.Done() }(i, k) i += k } wg.Wait() return winner } //EuclideanDistanceSquared squared Euclidean distance function func EuclideanDistanceSquared(a1, a2 []float64) float64 { var d float64 for i := range a1 { t := a1[i] - a2[i] d += t * t } return d } //RandomInit - init with 0 to 1 random values func RandomInit(dimensions, xySize int, vectors [][]float64) [][]float64 { //init with random values between 0,0 and 1.0 nodes := make([][]float64, xySize*xySize) println("initialising map") i := 0 for x := 0; x < xySize; x++ { for y := 0; y < xySize; y++ { weight := make([]float64, dimensions) for w := range weight { weight[w] = rand.Float64() } nodes[i] = weight i++ } } return nodes }
basicsom.go
0.53437
0.526769
basicsom.go
starcoder
package models // TableAdd defines the table to be added to an existing keyspace type TableAdd struct { Name string `validate:"required"` // Attempting to create an existing table returns an error unless the IF NOT EXISTS option is used. If the option is // used, the statement if a no-op is the table already exists. IfNotExists bool `json:"ifNotExists,omitempty"` ColumnDefinitions []ColumnDefinition `json:"columnDefinitions,omitempty"` // Defines a column list for the primary key. Can be either a single column, compound primary key, or composite partition key. PrimaryKey *PrimaryKey `json:"primaryKey" validate:"required"` TableOptions *TableOptions `json:"tableOptions,omitempty"` } // PrimaryKey defines a column list for the primary key. Can be either a single column, compound primary key, or composite partition // key. Provide multiple columns for the partition key to define a composite partition key. type PrimaryKey struct { // The column(s) that will constitute the partition key. PartitionKey []string `json:"partitionKey" validate:"required"` // The column(s) that will constitute the clustering key. ClusteringKey []string `json:"clusteringKey,omitempty"` } // TableOptions are various properties that tune data handling, including I/O operations, compression, and compaction. type TableOptions struct { // TTL (Time To Live) in seconds, where zero is disabled. The maximum configurable value is 630720000 (20 years). If // the value is greater than zero, TTL is enabled for the entire table and an expiration timestamp is added to each // column. A new TTL timestamp is calculated each time the data is updated and the row is removed after all the data expires. DefaultTimeToLive *int32 `json:"defaultTimeToLive,omitempty" validate:"gte=0,lte=630720000"` ClusteringExpression []ClusteringExpression `json:"clusteringExpression,omitempty"` } // ClusteringExpression allows for ordering rows so that storage is able to make use of the on-disk sorting of columns. Specifying // order can make query results more efficient. type ClusteringExpression struct { Column *string `json:"column" validate:"required"` Order *string `json:"order" validate:"required"` }
rest/models/model_table_add.go
0.804329
0.412353
model_table_add.go
starcoder
package objects import ( "errors" "io" "math/rand" ) // A simple two dimensional grid type Grid struct { Cells []bool Width int Height int } // Creates and returns a Grid of specified size func MakeGrid(width int, height int) Grid { g := Grid{ Cells: make([]bool, width*height), Width: width, Height: height, } return g } // Randomizes a grid's cells to alive/dead by weight func (g *Grid) Randomize(weight float32) (err error) { if weight < 0 || weight > 1 { err = errors.New("Weight must be between 0 and 1") } for i := range g.Cells { g.Cells[i] = rand.Float32() < weight } return err } // Checks if cell at specified position is alive func (g *Grid) Alive(x int, y int) bool { return g.Cells[g.Index(x, y)] } // Returns the total number of live cells in the grid func (g *Grid) CountLiveCells() int { var ret int for _, c := range g.Cells { if c { ret += 1 } } return ret } // Gets the life status of a cell func (g *Grid) Get(x int, y int) bool { if x < 0 || y < 0 { return false } if x >= g.Width || y >= g.Height { return false } return g.Cells[g.Index(x, y)] } // Sets the life status of a cell func (g *Grid) Set(x int, y int, state bool) { g.Cells[g.Index(x, y)] = state } // Projects the status of the grid to a new grid, based on Conways GoL func (in *Grid) Project(out *Grid) error { if len(in.Cells) != len(out.Cells) || in.Width != out.Width || in.Height != out.Height { return errors.New("in and out grids are not identical in size") } var n int for y := 0; y < in.Height; y++ { for x := 0; x < in.Width; x++ { n = in.AliveNeighbours(x, y) out.Set(x, y, n < 4 && n > 1) } } return nil } // Returns the base array index of the given coords func (g *Grid) Index(x int, y int) int { return y*g.Width + x } // Renders the Grid to stdout with X meaning alive and empty string meaing dead func (g *Grid) Render(out io.Writer) { var state string for i, c := range g.Cells { if c { state = "X" } else { state = " " } if i%g.Width == 0 { out.Write([]byte("\n")) } out.Write([]byte(state)) } } // Returns the x, y coords of a cell index func (g *Grid) GetCoords(i int) (x, y int) { x = i % g.Width y = i / g.Width return x, y } // Traverses the 8 squares around the coordinate and returns // how many are living int thouse squares func (g *Grid) AliveNeighbours(x int, y int) int { var ret int for _y := -1; _y <= 1; _y++ { for _x := -1; _x <= 1; _x++ { if !(_x == x && _y == y) && g.Get(x+_x, y+_y) { ret++ } } } return ret } // Shorthand for getting neighbour status by index func (g *Grid) AliveNeighboursByIndex(i int) int { x, y := g.GetCoords(i) return g.AliveNeighbours(x, y) }
objects/grid.go
0.812198
0.515742
grid.go
starcoder
package a2l import ( "errors" "strconv" "github.com/rs/zerolog/log" ) /*Description of rescaling the axis values of an adjustable object. A rescale axis consists mainly of a number of rescaling axis points pairs (axis i , virtual i ) which describe a rescale mapping between the axis points and a virtual axis that is used for the access of the table function values deposited in the control unit. Between two pairs the mapping is linear. Both, the axis points and the virtual axis points must be in ascending order. Consider, for example, the three rescale pairs (0x00, 0x00), (0x64, 0xC0) and (0xD8, 0xFF). Then all axis points between 0x00 and 0x64 are mapped linear to the virtual axis [0x00, 0xC0], and all axis points between 0x64 and 0xD8 are mapped linear to the virtual axis [0xC0, 0xFF]: Accordingly, to each axis point there is a virtual axis point. The virtual axis points are distributed equidistantly on the virtual axis including the axis limits, e.g. the virtual axis points can be derived from the size of the virtual axis and the number of axis points. According to the rescale mapping the axis point can be computed from the virtual axis points. The following algorithm can be applied, where D is the length of the (equidistant) intervals on virtual axis: The following example makes clear how the evaluation of the formula can be used to derive the actual axis points. We have no_of_rescale_pairs = 3 and virtual 1 = 0x00 = 0, virtual 2 = 0xC0 = 192, virtual 3 = 0xFF = 255, axis 1 = 0x00 = 0, axis 2 = 0x64 = 100, axis 3 = 0xD8 = 216. Assume no_axis_pts = 9, and therefore D = 32. The first of the two executions of the inner loop (j-loop) is on virtual 2 – virtual 1 / D = 192/32 = 6 iterations. For each iteration (axis 2 – axis 1 )/(virtual 2 – virtual 1 ) = 100/192, and therefore X 2 = 0 + 32 * 100/192 = 16,666, X 3 = 0 + 64 * 100/192 = 33,333, X 4 = 0 + 96 * 100/192 = 50, X 5 = 0 + 128 * 100/192 =66,666, X 6 = 0 + 160 * 100/192 = 83,333. For the second execution there are virtual 3 – virtual 2 / D = 2 iterations with (axis 3 – axis 2 )/(virtual 3 – virtual 2 ) = 116/64. Consequently X 7 = 100 + (192 – 192) * 116/64 = 100 and X 8 = 100 + (224 – 192) * 116/64 = 158. Also X 1 = axis 1 = 0 and X 9 = axis 3 = 216.*/ type axisRescaleX struct { //position of the rescale axis point value pairs in the deposit structure (description of sequence of elements in the data record). position uint16 positionSet bool datatype dataTypeEnum datatypeSet bool maxNumberOfRescalePairs uint16 maxNumberOfRescalePairsSet bool indexIncr indexOrderEnum indexIncrSet bool adressing addrTypeEnum adressingSet bool } func parseAxisRescaleX(tok *tokenGenerator) (axisRescaleX, error) { arX := axisRescaleX{} var err error forLoop: for { tok.next() if tok.current() == emptyToken { err = errors.New("unexpected end of file") log.Err(err).Msg("axisRescaleX could not be parsed") break forLoop } else if isKeyword(tok.current()) { err = errors.New("unexpected token " + tok.current()) log.Err(err).Msg("axisRescaleX could not be parsed") break forLoop } else if !arX.positionSet { var buf uint64 buf, err = strconv.ParseUint(tok.current(), 10, 16) if err != nil { log.Err(err).Msg("axisRescaleX position could not be parsed") break forLoop } arX.position = uint16(buf) arX.positionSet = true log.Info().Msg("axisRescaleX position successfully parsed") } else if !arX.datatypeSet { arX.datatype, err = parseDataTypeEnum(tok) if err != nil { log.Err(err).Msg("axisRescaleX datatype could not be parsed") break forLoop } arX.datatypeSet = true log.Info().Msg("axisRescaleX datatype successfully parsed") } else if !arX.maxNumberOfRescalePairsSet { var buf uint64 buf, err = strconv.ParseUint(tok.current(), 10, 16) if err != nil { log.Err(err).Msg("axisRescaleX maxNumberOfRescalePairs could not be parsed") break forLoop } arX.maxNumberOfRescalePairs = uint16(buf) arX.maxNumberOfRescalePairsSet = true log.Info().Msg("axisRescaleX maxNumberOfRescalePairs successfully parsed") } else if !arX.indexIncrSet { arX.indexIncr, err = parseIndexOrderEnum(tok) if err != nil { log.Err(err).Msg("axisRescaleX indexIncr could not be parsed") break forLoop } arX.indexIncrSet = true log.Info().Msg("axisRescaleX indexIncr successfully parsed") } else if !arX.adressingSet { arX.adressing, err = parseAddrTypeEnum(tok) if err != nil { log.Err(err).Msg("axisRescaleX adressing could not be parsed") break forLoop } arX.adressingSet = true log.Info().Msg("axisRescaleX adressing successfully parsed") break forLoop } } return arX, err }
a2l/axis_rescale_x.go
0.562417
0.787646
axis_rescale_x.go
starcoder
package world import ( "errors" "math" ) const degreeToRad = math.Pi / 180 const radToDegree = 180 / math.Pi type LatLonToGameFunc func(lat, lon float64) (x, y int) type GameToLatLonFunc func(x, y int) (lat, lon float64) // Creates functions for converting into and out of game pixels func CreateConverters(metadata *Metadata) (to LatLonToGameFunc, from GameToLatLonFunc, err error) { if metadata == nil { return nil, nil, errors.New("metadata cannot be nil") } // Define R such that the width of the map is correct R := float64(metadata.width) / ((metadata.lon2 * degreeToRad) - (metadata.lon1 * degreeToRad)) originX, originY := mercator(metadata.Lat1(), metadata.Lon1(), R) _, endY := mercator(metadata.Lat2(), metadata.Lon2(), R) // Determine the scale factor for height so that it comes out correctly scaleFactor := float64(metadata.height) / (endY - originY) to = func(lat, lon float64) (x, y int) { fx, fy := mercator(lat, lon, R) return int(math.Round(fx - originX)), int(math.Round((fy - originY) * scaleFactor)) } from = func(x, y int) (lat, lon float64) { lat, lon = invMercator(float64(x)+originX, float64(y)/scaleFactor+originY, R) return } return to, from, nil } // Converts from a latitude and longitude into game pixels. For batch conversions the CreateConverters function should // be used instead func LatLonToGame(metadata *Metadata, lat, lon float64) (x, y int, err error) { to, _, err := CreateConverters(metadata) if err != nil { return -1, -1, err } x, y = to(lat, lon) return } // Converts from game pixels into latitude and longitude. For batch conversions the CreateConverters function should // be used instead func GameToLatLon(metadata *Metadata, x, y int) (lat, lon float64, err error) { _, from, err := CreateConverters(metadata) if err != nil { return -1, -1, err } lat, lon = from(x, y) return } // https://en.wikipedia.org/wiki/Mercator_projection#Derivation_of_the_Mercator_projection func mercator(lat, lon, R float64) (x, y float64) { return R * lon * degreeToRad, R * math.Log(math.Tan((math.Pi/4)+(lat*degreeToRad/2.0))) } // https://en.wikipedia.org/wiki/Mercator_projection#Inverse_transformations func invMercator(x, y, R float64) (lat, lon float64) { return (2*math.Atan(math.Exp(y/R)) - (math.Pi / 2)) * radToDegree, (x / R) * radToDegree }
world/projection.go
0.851058
0.543469
projection.go
starcoder
package frontend type FieldType uint64 const ( FieldTypeBoolean FieldType = iota FieldTypeInteger FieldTypeString FieldTypeFloat FieldTypeNull ) func (f FieldType) String() string { switch f { case FieldTypeBoolean: return "boolean" case FieldTypeInteger: return "integer" case FieldTypeString: return "string" case FieldTypeFloat: return "float" case FieldTypeNull: return "null" } panic("programming error: unexpected field type in String() of FieldType") } type Value struct { Typ FieldType Val interface{} } func (v *Value) GetAsBoolean() bool { if v.Typ != FieldTypeBoolean { panic("programming error: expected type to be boolean") } return v.Val.(bool) } func (v *Value) GetAsInt() int64 { switch t := v.Val.(type) { case int64: return t case int: return int64(t) default: panic("programming error: expected type to be integer") } } func (v *Value) GetAsFloat() float64 { if v.Typ != FieldTypeFloat { panic("programming error: expected type to be float") } return v.Val.(float64) } func (v *Value) GetAsString() string { if v.Typ != FieldTypeString { panic("programming error: expected type to be string") } return v.Val.(string) } var ( // Types which can be operands of the '+' operator OperatorPlusOperandTypes = map[FieldType]bool{FieldTypeInteger: true, FieldTypeFloat: true, FieldTypeString: true} // Types which can be operands of the '-' operator OperatorMinusOperandTypes = map[FieldType]bool{FieldTypeInteger: true, FieldTypeFloat: true} // Types which can be operands of the '*' operator OperatorAsteriskOperandTypes = map[FieldType]bool{FieldTypeInteger: true, FieldTypeFloat: true} // Types which can be operands of the '/' operator OperatorSlashOperandTypes = map[FieldType]bool{FieldTypeInteger: true, FieldTypeFloat: true} // Types which can be operands of the '%' operator OperatorPercentOperandTypes = map[FieldType]bool{FieldTypeInteger: true} // Types which can be operands of the '>', '>=', '<' & '<=' operators OperatorComparisonOperandTypes = map[FieldType]bool{FieldTypeInteger: true, FieldTypeFloat: true, FieldTypeString: true} )
pkg/frontend/types.go
0.837554
0.448426
types.go
starcoder
package placement import ( "math" "sort" "github.com/pingcap/kvproto/pkg/metapb" "github.com/tikv/pd/server/core" ) // RegionFit is the result of fitting a region's peers to rule list. // All peers are divided into corresponding rules according to the matching // rules, and the remaining Peers are placed in the OrphanPeers list. type RegionFit struct { RuleFits []*RuleFit OrphanPeers []*metapb.Peer } // IsSatisfied returns if the rules are properly satisfied. // It means all Rules are fulfilled and there is no orphan peers. func (f *RegionFit) IsSatisfied() bool { if len(f.RuleFits) == 0 { return false } for _, r := range f.RuleFits { if !r.IsSatisfied() { return false } } return len(f.OrphanPeers) == 0 } // GetRuleFit returns the RuleFit that contains the peer. func (f *RegionFit) GetRuleFit(peerID uint64) *RuleFit { for _, rf := range f.RuleFits { for _, p := range rf.Peers { if p.GetId() == peerID { return rf } } } return nil } // CompareRegionFit determines the superiority of 2 fits. // It returns 1 when the first fit result is better. func CompareRegionFit(a, b *RegionFit) int { for i := range a.RuleFits { if i >= len(b.RuleFits) { break } if cmp := compareRuleFit(a.RuleFits[i], b.RuleFits[i]); cmp != 0 { return cmp } } switch { case len(a.OrphanPeers) < len(b.OrphanPeers): return 1 case len(a.OrphanPeers) > len(b.OrphanPeers): return -1 default: return 0 } } // RuleFit is the result of fitting status of a Rule. type RuleFit struct { Rule *Rule // Peers of the Region that are divided to this Rule. Peers []*metapb.Peer // PeersWithDifferentRole is subset of `Peers`. It contains all Peers that have // different Role from configuration (the Role can be migrated to target role // by scheduling). PeersWithDifferentRole []*metapb.Peer // IsolationScore indicates at which level of labeling these Peers are // isolated. A larger value is better. IsolationScore float64 } // IsSatisfied returns if the rule is properly satisfied. func (f *RuleFit) IsSatisfied() bool { return len(f.Peers) == f.Rule.Count && len(f.PeersWithDifferentRole) == 0 } func compareRuleFit(a, b *RuleFit) int { switch { case len(a.Peers) < len(b.Peers): return -1 case len(a.Peers) > len(b.Peers): return 1 case len(a.PeersWithDifferentRole) > len(b.PeersWithDifferentRole): return -1 case len(a.PeersWithDifferentRole) < len(b.PeersWithDifferentRole): return 1 case a.IsolationScore < b.IsolationScore: return -1 case a.IsolationScore > b.IsolationScore: return 1 default: return 0 } } // StoreSet represents the store container. type StoreSet interface { GetStores() []*core.StoreInfo GetStore(id uint64) *core.StoreInfo } // FitRegion tries to fit peers of a region to the rules. func FitRegion(stores StoreSet, region *core.RegionInfo, rules []*Rule) *RegionFit { w := newFitWorker(stores, region, rules) w.run() return &w.bestFit } type fitWorker struct { stores []*core.StoreInfo bestFit RegionFit // update during execution peers []*fitPeer // p.selected is updated during execution. rules []*Rule } func newFitWorker(stores StoreSet, region *core.RegionInfo, rules []*Rule) *fitWorker { regionPeers := region.GetPeers() peers := make([]*fitPeer, 0, len(regionPeers)) for _, p := range regionPeers { peers = append(peers, &fitPeer{ Peer: p, store: stores.GetStore(p.GetStoreId()), isLeader: region.GetLeader().GetId() == p.GetId(), }) } // Sort peers to keep the match result deterministic. sort.Slice(peers, func(i, j int) bool { return peers[i].GetId() < peers[j].GetId() }) return &fitWorker{ stores: stores.GetStores(), bestFit: RegionFit{RuleFits: make([]*RuleFit, len(rules))}, peers: peers, rules: rules, } } func (w *fitWorker) run() { w.fitRule(0) w.updateOrphanPeers(0) // All peers go to orphanList when RuleList is empty. } // Pick the most suitable peer combination for the rule. // Index specifies the position of the rule. // returns true if it replaces `bestFit` with a better alternative. func (w *fitWorker) fitRule(index int) bool { if index >= len(w.rules) { return false } var candidates []*fitPeer if checkRule(w.rules[index], w.stores) { // Only consider stores: // 1. Match label constraints // 2. Role match, or can match after transformed. // 3. Not selected by other rules. for _, p := range w.peers { if MatchLabelConstraints(p.store, w.rules[index].LabelConstraints) && p.matchRoleLoose(w.rules[index].Role) && !p.selected { candidates = append(candidates, p) } } } count := w.rules[index].Count if len(candidates) < count { count = len(candidates) } return w.enumPeers(candidates, nil, index, count) } // Recursively traverses all feasible peer combinations. // For each combination, call `compareBest` to determine whether it is better // than the existing option. // Returns true if it replaces `bestFit` with a better alternative. func (w *fitWorker) enumPeers(candidates, selected []*fitPeer, index int, count int) bool { if len(selected) == count { // We collect enough peers. End recursive. return w.compareBest(selected, index) } var better bool for i, p := range candidates { p.selected = true better = w.enumPeers(candidates[i+1:], append(selected, p), index, count) || better p.selected = false } return better } // compareBest checks if the selected peers is better then previous best. // Returns true if it replaces `bestFit` with a better alternative. func (w *fitWorker) compareBest(selected []*fitPeer, index int) bool { rf := newRuleFit(w.rules[index], selected) cmp := 1 if best := w.bestFit.RuleFits[index]; best != nil { cmp = compareRuleFit(rf, best) } switch cmp { case 1: w.bestFit.RuleFits[index] = rf // Reset previous result after position index. for i := index + 1; i < len(w.rules); i++ { w.bestFit.RuleFits[i] = nil } w.fitRule(index + 1) w.updateOrphanPeers(index + 1) return true case 0: if w.fitRule(index + 1) { w.bestFit.RuleFits[index] = rf return true } } return false } // determine the orphanPeers list based on fitPeer.selected flag. func (w *fitWorker) updateOrphanPeers(index int) { if index != len(w.rules) { return } w.bestFit.OrphanPeers = w.bestFit.OrphanPeers[:0] for _, p := range w.peers { if !p.selected { w.bestFit.OrphanPeers = append(w.bestFit.OrphanPeers, p.Peer) } } } func newRuleFit(rule *Rule, peers []*fitPeer) *RuleFit { rf := &RuleFit{Rule: rule, IsolationScore: isolationScore(peers, rule.LocationLabels)} for _, p := range peers { rf.Peers = append(rf.Peers, p.Peer) if !p.matchRoleStrict(rule.Role) { rf.PeersWithDifferentRole = append(rf.PeersWithDifferentRole, p.Peer) } } return rf } type fitPeer struct { *metapb.Peer store *core.StoreInfo isLeader bool selected bool } func (p *fitPeer) matchRoleStrict(role PeerRoleType) bool { switch role { case Voter: // Voter matches either Leader or Follower. return !core.IsLearner(p.Peer) case Leader: return p.isLeader case Follower: return !core.IsLearner(p.Peer) && !p.isLeader case Learner: return core.IsLearner(p.Peer) } return false } func (p *fitPeer) matchRoleLoose(role PeerRoleType) bool { // non-learner cannot become learner. All other roles can migrate to // others by scheduling. For example, Leader->Follower, Learner->Leader // are possible, but Voter->Learner is impossible. return role != Learner || core.IsLearner(p.Peer) } func isolationScore(peers []*fitPeer, labels []string) float64 { var score float64 if len(labels) == 0 || len(peers) <= 1 { return 0 } // NOTE: following loop is partially duplicated with `core.DistinctScore`. // The reason not to call it directly is that core.DistinctScore only // accepts `[]StoreInfo` not `[]*fitPeer` and I don't want alloc slice // here because it is kind of hot path. // After Go supports generics, we will be enable to do some refactor and // reuse `core.DistinctScore`. const replicaBaseScore = 100 for i, p1 := range peers { for _, p2 := range peers[i+1:] { if index := p1.store.CompareLocation(p2.store, labels); index != -1 { score += math.Pow(replicaBaseScore, float64(len(labels)-index-1)) } } } return score }
server/schedule/placement/fit.go
0.814791
0.47792
fit.go
starcoder
package math import ( "github.com/gopherjs/gopherjs/js" ) var math = js.Global.Get("Math") var zero float64 = 0 var posInf = 1 / zero var negInf = -1 / zero var nan = 0 / zero func Acos(x float64) float64 { return math.Call("acos", x).Float() } func Acosh(x float64) float64 { return math.Call("acosh", x).Float() } func Asin(x float64) float64 { return math.Call("asin", x).Float() } func Asinh(x float64) float64 { return math.Call("asinh", x).Float() } func Atan(x float64) float64 { return math.Call("atan", x).Float() } func Atanh(x float64) float64 { return math.Call("atanh", x).Float() } func Atan2(y, x float64) float64 { return math.Call("atan2", y, x).Float() } func Cbrt(x float64) float64 { return math.Call("cbrt", x).Float() } func Ceil(x float64) float64 { return math.Call("ceil", x).Float() } func Copysign(x, y float64) float64 { if (x < 0 || 1/x == negInf) != (y < 0 || 1/y == negInf) { return -x } return x } func Cos(x float64) float64 { return math.Call("cos", x).Float() } func Cosh(x float64) float64 { return math.Call("cosh", x).Float() } func Erf(x float64) float64 { return erf(x) } func Erfc(x float64) float64 { return erfc(x) } func Exp(x float64) float64 { return math.Call("exp", x).Float() } func Exp2(x float64) float64 { return math.Call("pow", 2, x).Float() } func Expm1(x float64) float64 { return expm1(x) } func Floor(x float64) float64 { return math.Call("floor", x).Float() } func Frexp(f float64) (frac float64, exp int) { return frexp(f) } func Hypot(p, q float64) float64 { return hypot(p, q) } func Inf(sign int) float64 { switch { case sign >= 0: return posInf default: return negInf } } func IsInf(f float64, sign int) bool { if f == posInf { return sign >= 0 } if f == negInf { return sign <= 0 } return false } func IsNaN(f float64) (is bool) { return f != f } func Ldexp(frac float64, exp int) float64 { if -1024 < exp && exp < 1024 { // Use Math.pow for small exp values where it's viable. For performance. if frac == 0 { return frac } return frac * math.Call("pow", 2, exp).Float() } return ldexp(frac, exp) } func Log(x float64) float64 { if x != x { // workaround for optimizer bug in V8, remove at some point return nan } return math.Call("log", x).Float() } func Log10(x float64) float64 { return log10(x) } func Log1p(x float64) float64 { return log1p(x) } func Log2(x float64) float64 { return log2(x) } func Max(x, y float64) float64 { return max(x, y) } func Min(x, y float64) float64 { return min(x, y) } func Mod(x, y float64) float64 { return js.Global.Call("$mod", x, y).Float() } func Modf(f float64) (float64, float64) { if f == posInf || f == negInf { return f, nan } if 1/f == negInf { return f, f } frac := Mod(f, 1) return f - frac, frac } func NaN() float64 { return nan } func Pow(x, y float64) float64 { if x == 1 || (x == -1 && (y == posInf || y == negInf)) { return 1 } return math.Call("pow", x, y).Float() } func Remainder(x, y float64) float64 { return remainder(x, y) } func Signbit(x float64) bool { return x < 0 || 1/x == negInf } func Sin(x float64) float64 { return math.Call("sin", x).Float() } func Sinh(x float64) float64 { return math.Call("sinh", x).Float() } func Sincos(x float64) (sin, cos float64) { return Sin(x), Cos(x) } func Sqrt(x float64) float64 { return math.Call("sqrt", x).Float() } func Tan(x float64) float64 { return math.Call("tan", x).Float() } func Tanh(x float64) float64 { return math.Call("tanh", x).Float() } func Trunc(x float64) float64 { if x == posInf || x == negInf || x != x || 1/x == negInf { return x } return Copysign(float64(int(x)), x) } var buf struct { uint32array [2]uint32 float32array [2]float32 float64array [1]float64 } func init() { ab := js.Global.Get("ArrayBuffer").New(8) js.InternalObject(buf).Set("uint32array", js.Global.Get("Uint32Array").New(ab)) js.InternalObject(buf).Set("float32array", js.Global.Get("Float32Array").New(ab)) js.InternalObject(buf).Set("float64array", js.Global.Get("Float64Array").New(ab)) } func Float32bits(f float32) uint32 { buf.float32array[0] = f return buf.uint32array[0] } func Float32frombits(b uint32) float32 { buf.uint32array[0] = b return buf.float32array[0] } func Float64bits(f float64) uint64 { buf.float64array[0] = f return uint64(buf.uint32array[1])<<32 + uint64(buf.uint32array[0]) } func Float64frombits(b uint64) float64 { buf.uint32array[0] = uint32(b) buf.uint32array[1] = uint32(b >> 32) return buf.float64array[0] }
vendor/github.com/gopherjs/gopherjs/compiler/natives/src/math/math.go
0.78403
0.649162
math.go
starcoder
package m32 import "math" const ( E = float32(2.71828182845904523536028747135266249775724709369995957496696763) // https://oeis.org/A001113 Pi = float32(3.14159265358979323846264338327950288419716939937510582097494459) // https://oeis.org/A000796 Phi = float32(1.61803398874989484820458683436563811772030917980576286213544862) // https://oeis.org/A001622 Sqrt2 = float32(1.41421356237309504880168872420969807856967187537694807317667974) // https://oeis.org/A002193 SqrtE = float32(1.64872127070012814684865078781416357165377610071014801157507931) // https://oeis.org/A019774 SqrtPi = float32(1.77245385090551602729816748334114518279754945612238712821380779) // https://oeis.org/A002161 SqrtPhi = float32(1.27201964951406896425242246173749149171560804184009624861664038) // https://oeis.org/A139339 Ln2 = float32(0.693147180559945309417232121458176568075500134360255254120680009) // https://oeis.org/A002162 Log2E = float32(1 / Ln2) Ln10 = float32(2.30258509299404568401799145468436420760110148862877297603332790) // https://oeis.org/A002392 Log10E = float32(1 / Ln10) MaxFloat32 = math.MaxFloat32 SmallestNonzeroFloat32 = math.SmallestNonzeroFloat32 ) // Abs = math.Abs func Abs(x float32) float32 { return float32(math.Abs(float64(x))) } // Acos = math.Acos func Acos(x float32) float32 { return float32(math.Acos(float64(x))) } // Acosh = math.Acosh func Acosh(x float32) float32 { return float32(math.Acosh(float64(x))) } // Asin = math.Asin func Asin(x float32) float32 { return float32(math.Asin(float64(x))) } // Asinh = math.Asinh func Asinh(x float32) float32 { return float32(math.Asinh(float64(x))) } // Atan = math.Atan func Atan(x float32) float32 { return float32(math.Atan(float64(x))) } // Atan2 = math.Atan2 func Atan2(y, x float32) float32 { return float32(math.Atan2(float64(x), float64(y))) } // Atanh = math.Atanh func Atanh(x float32) float32 { return float32(math.Atanh(float64(x))) } // Cbrt = math.Cbrt func Cbrt(x float32) float32 { return float32(math.Cbrt(float64(x))) } // Ceil = math.Ceil func Ceil(x float32) float32 { return float32(math.Ceil(float64(x))) } // Copysign = math.Copysign func Copysign(x, y float32) float32 { return float32(math.Copysign(float64(x), float64(y))) } // Cos = math.Cos func Cos(x float32) float32 { return float32(math.Cos(float64(x))) } // Cosh = math.Cosh func Cosh(x float32) float32 { return float32(math.Cosh(float64(x))) } // Dim = math.Dim func Dim(x, y float32) float32 { return float32(math.Dim(float64(x), float64(y))) } // Erf = math.Erf func Erf(x float32) float32 { return float32(math.Erf(float64(x))) } // Erfc = math.Erfc func Erfc(x float32) float32 { return float32(math.Erfc(float64(x))) } // Erfcinv = math.Erfcinv func Erfcinv(x float32) float32 { return float32(math.Erfcinv(float64(x))) } // Erfinv = math.Erfinv func Erfinv(x float32) float32 { return float32(math.Erfinv(float64(x))) } // Exp = math.Exp func Exp(x float32) float32 { return float32(math.Exp(float64(x))) } // Exp2 = math.Exp2 func Exp2(x float32) float32 { return float32(math.Exp2(float64(x))) } // Expm1 = math.Expm1 func Expm1(x float32) float32 { return float32(math.Expm1(float64(x))) } // Floor = math.Floor func Floor(x float32) float32 { return float32(math.Floor(float64(x))) } // Frexp = math.Frexp func Frexp(f float32) (float32, int) { frac, exp := math.Frexp(float64(f)) return float32(frac), exp } // Gamma = math.Gamma func Gamma(x float32) float32 { return float32(math.Gamma(float64(x))) } // Hypot = math.Hypot func Hypot(p, q float32) float32 { return float32(math.Hypot(float64(p), float64(q))) } // Ilogb = math.Ilogb func Ilogb(x float32) int { return math.Ilogb(float64(x)) } // Inf = math.Inf func Inf(sign int) float32 { return float32(math.Inf(sign)) } // IsInf = math.IsInf func IsInf(f float32, sign int) bool { return math.IsInf(float64(f), sign) } // IsNaN = math.IsNaN func IsNaN(f float32) bool { return math.IsNaN(float64(f)) } // J0 = math.J0 func J0(x float32) float32 { return float32(math.J0(float64(x))) } // J1 = math.J1 func J1(x float32) float32 { return float32(math.J1(float64(x))) } // Jn = math.Jn func Jn(n int, x float32) float32 { return float32(math.Jn(n, float64(x))) } // Ldexp = math.Ldexp func Ldexp(frac float32, exp int) float32 { return float32(math.Ldexp(float64(frac), exp)) } // Lgamma = math.Lgamma func Lgamma(x float32) (float32, int) { lgamma, sign := math.Lgamma(float64(x)) return float32(lgamma), sign } // Log = math.Log func Log(x float32) float32 { return float32(math.Log(float64(x))) } // Log10 = math.Log10 func Log10(x float32) float32 { return float32(math.Log10(float64(x))) } // Log1p = math.Log1p func Log1p(x float32) float32 { return float32(math.Log1p(float64(x))) } // Log2 = math.Log2 func Log2(x float32) float32 { return float32(math.Log2(float64(x))) } // Logb = math.Logb func Logb(x float32) float32 { return float32(math.Logb(float64(x))) } // Max = math.Max func Max(x, y float32) float32 { return float32(math.Max(float64(x), float64(y))) } // Min = math.Min func Min(x, y float32) float32 { return float32(math.Min(float64(x), float64(y))) } // Mod = math.Mod func Mod(x, y float32) float32 { return float32(math.Mod(float64(x), float64(y))) } // Modf = math.Modf func Modf(f float32) (float32, float32) { whole, frac := math.Modf(float64(f)) return float32(whole), float32(frac) } // NaN = math.NaN func NaN() float32 { return float32(math.NaN()) } // Pow = math.Pow func Pow(x, y float32) float32 { return float32(math.Pow(float64(x), float64(y))) } // Pow10 = math.Pow10 func Pow10(n int) float32 { return float32(math.Pow10(n)) } // Remainder = math.Remainder func Remainder(x, y float32) float32 { return float32(math.Remainder(float64(x), float64(y))) } // Round = math.Round func Round(x float32) float32 { return float32(math.Round(float64(x))) } // RoundToEven = math.RoundToEven func RoundToEven(x float32) float32 { return float32(math.RoundToEven(float64(x))) } // Signbit = math.Signbit func Signbit(x float32) bool { return math.Signbit(float64(x)) } // Sin = math.Sin func Sin(x float32) float32 { return float32(math.Sin(float64(x))) } // Sincos = math.Sincos func Sincos(x float32) (float32, float32) { sin, cos := math.Sincos(float64(x)) return float32(sin), float32(cos) } // Sinh = math.Sinh func Sinh(x float32) float32 { return float32(math.Sinh(float64(x))) } // Sqrt = math.Sqrt func Sqrt(x float32) float32 { return float32(math.Sqrt(float64(x))) } // Tan = math.Tan func Tan(x float32) float32 { return float32(math.Tan(float64(x))) } // Tanh = math.Tanh func Tanh(x float32) float32 { return float32(math.Tanh(float64(x))) } // Trunc = math.Trunc func Trunc(x float32) float32 { return float32(math.Trunc(float64(x))) } // Y0 = math.Y0 func Y0(x float32) float32 { return float32(math.Y0(float64(x))) } // Y1 = math.Y1 func Y1(x float32) float32 { return float32(math.Y1(float64(x))) } // Yn = math.Yn func Yn(n int, x float32) float32 { return float32(math.Yn(n, float64(x))) }
m32/m32.go
0.817429
0.584834
m32.go
starcoder
// Package skiplist is an implementation of a skiplist to store elements in increasing order. 递增排序 // It allows finding, insertion and deletion operations in approximately O(n log(n)). // Additionally, there are methods for retrieving the next and previous element as well as changing the actual value // without the need for re-insertion (as long as the key stays the same!) // Skiplist is a fast alternative to a balanced tree. package skiplist import ( "fmt" "math" "math/bits" "math/rand" "time" ) const ( // maxLevel denotes the maximum height of the skiplist. This height will keep the skiplist // efficient for up to 34m entries. If there is a need for much more, please adjust this constant accordingly. maxLevel = 25 eps = 0.00001 ) // ListElement is the interface to implement for elements that are inserted into the skiplist. type ListElement interface { // ExtractKey() returns a float64 representation of the key that is used for insertion/deletion/find. // It needs to establish an order over all elements ExtractKey() float64 // A string representation of the element. Can be used for pretty-printing the list. // Otherwise just return an empty string. String() string } // SkipListElement represents one actual Node in the skiplist structure. // It saves the actual element, pointers to the next nodes and a pointer // to one previous node. // 双向链表 type SkipListElement struct { next [maxLevel]*SkipListElement level int key float64 value ListElement // 具体值,接口类型 prev *SkipListElement } // SkipList is the actual skiplist representation. // It saves all nodes accessible from the start and end and keeps track of element count, eps and levels. type SkipList struct { startLevels [maxLevel]*SkipListElement endLevels [maxLevel]*SkipListElement maxNewLevel int maxLevel int elementCount int eps float64 // 误差 } // NewSeedEps returns a new empty, initialized Skiplist. // Given a seed, a deterministic height/list behaviour can be achieved. // Eps is used to compare keys given by the ExtractKey() function on equality. // Eps 用于比较 ExtractKey() 函数给出的键是否相等。 func NewSeedEps(seed int64, eps float64) SkipList { // Initialize random number generator. rand.Seed(seed) //fmt.Printf("SkipList seed: %v\n", seed) list := SkipList{ startLevels: [maxLevel]*SkipListElement{}, endLevels: [maxLevel]*SkipListElement{}, maxNewLevel: maxLevel, maxLevel: 0, elementCount: 0, eps: eps, } return list } // NewEps returns a new empty, initialized Skiplist. // Eps is used to compare keys given by the ExtractKey() function on equality. func NewEps(eps float64) SkipList { return NewSeedEps(time.Now().UTC().UnixNano(), eps) } // NewSeed returns a new empty, initialized Skiplist. // Given a seed, a deterministic height/list behaviour can be achieved. func NewSeed(seed int64) SkipList { return NewSeedEps(seed, eps) } // New returns a new empty, initialized Skiplist. func New() SkipList { return NewSeedEps(time.Now().UTC().UnixNano(), eps) } // IsEmpty checks, if the skiplist is empty. func (t *SkipList) IsEmpty() bool { return t.startLevels[0] == nil } func (t *SkipList) generateLevel(maxLevel int) int { level := maxLevel - 1 // First we apply some mask which makes sure that we don't get a level // above our desired level. Then we find the first set bit. var x uint64 = rand.Uint64() & ((1 << uint(maxLevel-1)) - 1) // func TrailingZeros64(x uint64) int zeroes := bits.TrailingZeros64(x) if zeroes <= maxLevel { level = zeroes } return level } func (t *SkipList) findEntryIndex(key float64, level int) int { // Find good entry point so we don't accidentally skip half the list. for i := t.maxLevel; i >= 0; i-- { if t.startLevels[i] != nil && t.startLevels[i].key <= key || i <= level { return i } } return 0 } func (t *SkipList) findExtended(key float64, findGreaterOrEqual bool) (foundElem *SkipListElement, ok bool) { foundElem = nil ok = false if t.IsEmpty() { return } index := t.findEntryIndex(key, 0) var currentNode *SkipListElement currentNode = t.startLevels[index] nextNode := currentNode // In case, that our first element is already greater-or-equal! if findGreaterOrEqual && currentNode.key > key { foundElem = currentNode ok = true return } for { if math.Abs(currentNode.key-key) <= t.eps { foundElem = currentNode ok = true return } nextNode = currentNode.next[index] // Which direction are we continuing next time? if nextNode != nil && nextNode.key <= key { // Go right currentNode = nextNode } else { if index > 0 { // Early exit if currentNode.next[0] != nil && math.Abs(currentNode.next[0].key-key) <= t.eps { foundElem = currentNode.next[0] ok = true return } // Go down index-- } else { // Element is not found and we reached the bottom. if findGreaterOrEqual { foundElem = nextNode ok = nextNode != nil } return } } } } // Find tries to find an element in the skiplist based on the key from the given ListElement. // elem can be used, if ok is true. // Find runs in approx. O(log(n)) func (t *SkipList) Find(e ListElement) (elem *SkipListElement, ok bool) { if t == nil || e == nil { return } elem, ok = t.findExtended(e.ExtractKey(), false) return elem, ok } // FindGreaterOrEqual finds the first element, that is greater or equal to the given ListElement e. // The comparison is done on the keys (So on ExtractKey()). // FindGreaterOrEqual runs in approx. O(log(n)) func (t *SkipList) FindGreaterOrEqual(e ListElement) (elem *SkipListElement, ok bool) { if t == nil || e == nil { return } elem, ok = t.findExtended(e.ExtractKey(), true) return } // Delete removes an element equal to e from the skiplist, if there is one. // If there are multiple entries with the same value, Delete will remove one of them // (Which one will change based on the actual skiplist layout) // Delete runs in approx. O(log(n)) func (t *SkipList) Delete(e ListElement) { if t == nil || t.IsEmpty() || e == nil { return } key := e.ExtractKey() index := t.findEntryIndex(key, 0) var currentNode *SkipListElement nextNode := currentNode for { if currentNode == nil { nextNode = t.startLevels[index] } else { nextNode = currentNode.next[index] } // Found and remove! if nextNode != nil && math.Abs(nextNode.key-key) <= t.eps { if currentNode != nil { currentNode.next[index] = nextNode.next[index] } if index == 0 { if nextNode.next[index] != nil { nextNode.next[index].prev = currentNode } t.elementCount-- } // Link from start needs readjustments. if t.startLevels[index] == nextNode { t.startLevels[index] = nextNode.next[index] // This was our currently highest node! if t.startLevels[index] == nil { t.maxLevel = index - 1 } } // Link from end needs readjustments. if nextNode.next[index] == nil { t.endLevels[index] = currentNode } nextNode.next[index] = nil } if nextNode != nil && nextNode.key < key { // Go right currentNode = nextNode } else { // Go down index-- if index < 0 { break } } } } // Insert inserts the given ListElement into the skiplist. // Insert runs in approx. O(log(n)) func (t *SkipList) Insert(e ListElement) { if t == nil || e == nil { return } level := t.generateLevel(t.maxNewLevel) // Only grow the height of the skiplist by one at a time! if level > t.maxLevel { level = t.maxLevel + 1 t.maxLevel = level } elem := &SkipListElement{ next: [maxLevel]*SkipListElement{}, level: level, key: e.ExtractKey(), value: e, } t.elementCount++ newFirst := true newLast := true if !t.IsEmpty() { newFirst = elem.key < t.startLevels[0].key newLast = elem.key > t.endLevels[0].key } normallyInserted := false if !newFirst && !newLast { normallyInserted = true index := t.findEntryIndex(elem.key, level) var currentNode *SkipListElement nextNode := t.startLevels[index] for { if currentNode == nil { nextNode = t.startLevels[index] } else { nextNode = currentNode.next[index] } // Connect node to next if index <= level && (nextNode == nil || nextNode.key > elem.key) { elem.next[index] = nextNode if currentNode != nil { currentNode.next[index] = elem } if index == 0 { elem.prev = currentNode if nextNode != nil { nextNode.prev = elem } } } if nextNode != nil && nextNode.key <= elem.key { // Go right currentNode = nextNode } else { // Go down index-- if index < 0 { break } } } } // Where we have a left-most position that needs to be referenced! for i := level; i >= 0; i-- { didSomething := false if newFirst || normallyInserted { if t.startLevels[i] == nil || t.startLevels[i].key > elem.key { if i == 0 && t.startLevels[i] != nil { t.startLevels[i].prev = elem } elem.next[i] = t.startLevels[i] t.startLevels[i] = elem } // link the endLevels to this element! if elem.next[i] == nil { t.endLevels[i] = elem } didSomething = true } if newLast { // Places the element after the very last element on this level! // This is very important, so we are not linking the very first element (newFirst AND newLast) to itself! if !newFirst { if t.endLevels[i] != nil { t.endLevels[i].next[i] = elem } if i == 0 { elem.prev = t.endLevels[i] } t.endLevels[i] = elem } // Link the startLevels to this element! if t.startLevels[i] == nil || t.startLevels[i].key > elem.key { t.startLevels[i] = elem } didSomething = true } if !didSomething { break } } } // GetValue extracts the ListElement value from a skiplist node. func (e *SkipListElement) GetValue() ListElement { return e.value } // GetSmallestNode returns the very first/smallest node in the skiplist. // GetSmallestNode runs in O(1) func (t *SkipList) GetSmallestNode() *SkipListElement { return t.startLevels[0] } // GetLargestNode returns the very last/largest node in the skiplist. // GetLargestNode runs in O(1) func (t *SkipList) GetLargestNode() *SkipListElement { return t.endLevels[0] } // Next returns the next element based on the given node. // Next will loop around to the first node, if you call it on the last! func (t *SkipList) Next(e *SkipListElement) *SkipListElement { if e.next[0] == nil { return t.startLevels[0] } return e.next[0] } // Prev returns the previous element based on the given node. // Prev will loop around to the last node, if you call it on the first! func (t *SkipList) Prev(e *SkipListElement) *SkipListElement { if e.prev == nil { return t.endLevels[0] } return e.prev } // GetNodeCount returns the number of nodes currently in the skiplist. func (t *SkipList) GetNodeCount() int { return t.elementCount } // ChangeValue can be used to change the actual value of a node in the skiplist // without the need of Deleting and reinserting the node again. // Be advised, that ChangeValue only works, if the actual key from ExtractKey() will stay the same! // ok is an indicator, wether the value is actually changed. func (t *SkipList) ChangeValue(e *SkipListElement, newValue ListElement) (ok bool) { // The key needs to stay correct, so this is very important! if (newValue.ExtractKey() - e.key) <= t.eps { e.value = newValue ok = true } else { ok = false } return } // String returns a string format of the skiplist. Useful to get a graphical overview and/or debugging. func (t *SkipList) String() string { s := "" s += " --> " for i, l := range t.startLevels { if l == nil { break } if i > 0 { s += " -> " } next := "---" if l != nil { next = l.value.String() } s += fmt.Sprintf("[%v]", next) if i == 0 { s += " " } } s += "\n" node := t.startLevels[0] for node != nil { s += fmt.Sprintf("%v: ", node.value) for i := 0; i <= node.level; i++ { l := node.next[i] next := "---" if l != nil { next = l.value.String() } if i == 0 { prev := "---" if node.prev != nil { prev = node.prev.value.String() } s += fmt.Sprintf("[%v|%v]", prev, next) } else { s += fmt.Sprintf("[%v]", next) } if i < node.level { s += " -> " } } s += "\n" node = node.next[0] } s += " --> " for i, l := range t.endLevels { if l == nil { break } if i > 0 { s += " -> " } next := "---" if l != nil { next = l.value.String() } s += fmt.Sprintf("[%v]", next) if i == 0 { s += " " } } s += "\n" return s }
MauriceGit_skiplist/skiplist.go
0.682256
0.627124
skiplist.go
starcoder
package elliptic import ( nativeelliptic "crypto/elliptic" "encoding/hex" "errors" "fmt" "math/big" ) // Point represents a point on an EllipticCurve. type Point struct { X *big.Int Y *big.Int } /* y**2 = x**3 + a*x + b % p */ // Curve represents the parameters of a short Weierstrass equation elliptic curve. type Curve struct { A *big.Int B *big.Int P *big.Int G Point N *big.Int H *big.Int Name string } // dump dumps the bytes of a point for debugging. func (p *Point) dump() { fmt.Print(p.format()) } // format formats the bytes of a point for debugging. func (p *Point) format() string { if p.X == nil && p.Y == nil { return "(inf,inf)" } return fmt.Sprintf("(%s,%s)", hex.EncodeToString(p.X.Bytes()), hex.EncodeToString(p.Y.Bytes())) } func (ec Curve) Params() *nativeelliptic.CurveParams { return &nativeelliptic.CurveParams{ P: ec.P, N: ec.N, B: ec.B, Gx: ec.G.X, Gy: ec.G.Y, BitSize: 256, Name: ec.Name, } } /*** Modular Arithmetic ***/ /* NOTE: Returning a new z each time below is very space inefficient, but the * alternate accumulator based design makes the point arithmetic functions look * absolutely hideous. I may still change this in the future. */ // addMod computes z = (x + y) % p. func addMod(x *big.Int, y *big.Int, p *big.Int) (z *big.Int) { z = new(big.Int).Add(x, y) z.Mod(z, p) return z } // subMod computes z = (x - y) % p. func subMod(x *big.Int, y *big.Int, p *big.Int) (z *big.Int) { z = new(big.Int).Sub(x, y) z.Mod(z, p) return z } // mulMod computes z = (x * y) % p. func mulMod(x *big.Int, y *big.Int, p *big.Int) (z *big.Int) { n := new(big.Int).Set(x) z = big.NewInt(0) for i := 0; i < y.BitLen(); i++ { if y.Bit(i) == 1 { z = addMod(z, n, p) } n = addMod(n, n, p) } return z } // invMod computes z = (1/x) % p. func invMod(x *big.Int, p *big.Int) (z *big.Int) { z = new(big.Int).ModInverse(x, p) return z } // expMod computes z = (x^e) % p. func expMod(x *big.Int, y *big.Int, p *big.Int) (z *big.Int) { z = new(big.Int).Exp(x, y, p) return z } // sqrtMod computes z = sqrt(x) % p. func sqrtMod(x *big.Int, p *big.Int) (z *big.Int) { /* assert that p % 4 == 3 */ if new(big.Int).Mod(p, big.NewInt(4)).Cmp(big.NewInt(3)) != 0 { panic("p is not equal to 3 mod 4!") } /* z = sqrt(x) % p = x^((p+1)/4) % p */ /* e = (p+1)/4 */ e := new(big.Int).Add(p, big.NewInt(1)) e = e.Rsh(e, 2) z = expMod(x, e, p) return z } /*** Point Arithmetic on Curve ***/ // IsInfinity checks if point P is infinity on EllipticCurve ec. func (ec *Curve) IsInfinity(P Point) bool { /* We use (nil,nil) to represent O, the point at infinity. */ if P.X == nil && P.Y == nil { return true } return false } // IsOnCurve checks if point P is on EllipticCurve ec. func (ec Curve) IsOnCurve(P1, P2 *big.Int) bool { P := Point{P1, P2} if ec.IsInfinity(P) { return false } /* y**2 = x**3 + a*x + b % p */ lhs := mulMod(P.Y, P.Y, ec.P) rhs := addMod( addMod( expMod(P.X, big.NewInt(3), ec.P), mulMod(ec.A, P.X, ec.P), ec.P), ec.B, ec.P) if lhs.Cmp(rhs) == 0 { return true } return false } // Add computes R = P + Q on EllipticCurve ec. func (ec Curve) Add(P1, P2, Q1, Q2 *big.Int) (R1 *big.Int, R2 *big.Int) { /* See rules 1-5 on SEC1 pg.7 http://www.secg.org/collateral/sec1_final.pdf */ P := Point{P1, P2} Q := Point{Q1, Q2} R := Point{} if ec.IsInfinity(P) && ec.IsInfinity(Q) { /* Rule #1 Identity */ /* R = O + O = O */ R.X = nil R.Y = nil } else if ec.IsInfinity(P) { /* Rule #2 Identity */ /* R = O + Q = Q */ R.X = new(big.Int).Set(Q.X) R.Y = new(big.Int).Set(Q.Y) } else if ec.IsInfinity(Q) { /* Rule #2 Identity */ /* R = P + O = P */ R.X = new(big.Int).Set(P.X) R.Y = new(big.Int).Set(P.Y) } else if P.X.Cmp(Q.X) == 0 && addMod(P.Y, Q.Y, ec.P).Sign() == 0 { /* Rule #3 Identity */ /* R = (x,y) + (x,-y) = O */ R.X = nil R.Y = nil } else if P.X.Cmp(Q.X) == 0 && P.Y.Cmp(Q.Y) == 0 && P.Y.Sign() != 0 { /* Rule #5 Point doubling */ /* R = P + P */ /* Lambda = (3*P.X*P.X + a) / (2*P.Y) */ num := addMod( mulMod(big.NewInt(3), mulMod(P.X, P.X, ec.P), ec.P), ec.A, ec.P) den := invMod(mulMod(big.NewInt(2), P.Y, ec.P), ec.P) lambda := mulMod(num, den, ec.P) /* R.X = lambda*lambda - 2*P.X */ R.X = subMod( mulMod(lambda, lambda, ec.P), mulMod(big.NewInt(2), P.X, ec.P), ec.P) /* R.Y = lambda*(P.X - R.X) - P.Y */ R.Y = subMod( mulMod(lambda, subMod(P.X, R.X, ec.P), ec.P), P.Y, ec.P) } else if P.X.Cmp(Q.X) != 0 { /* Rule #4 Point addition */ /* R = P + Q */ /* Lambda = (Q.Y - P.Y) / (Q.X - P.X) */ num := subMod(Q.Y, P.Y, ec.P) den := invMod(subMod(Q.X, P.X, ec.P), ec.P) lambda := mulMod(num, den, ec.P) /* R.X = lambda*lambda - P.X - Q.X */ R.X = subMod( subMod( mulMod(lambda, lambda, ec.P), P.X, ec.P), Q.X, ec.P) /* R.Y = lambda*(P.X - R.X) - P.Y */ R.Y = subMod( mulMod(lambda, subMod(P.X, R.X, ec.P), ec.P), P.Y, ec.P) } else { panic(fmt.Sprintf("Unsupported point addition: %v + %v", P.format(), Q.format())) } return R.X, R.Y } // ScalarMult computes Q = k * P on EllipticCurve ec. func (ec Curve) ScalarMult(P1, P2 *big.Int, l []byte) (Q1, Q2 *big.Int) { /* Note: this function is not constant time, due to the branching nature of * the underlying point Add() function. */ /* Montgomery Ladder Point Multiplication * * Implementation based on pseudocode here: * See https://en.wikipedia.org/wiki/Elliptic_curve_point_multiplication#Montgomery_ladder */ P := Point{P1, P2} k := big.Int{} k.SetBytes(l) var R0 Point var R1 Point R0.X = nil R0.Y = nil R1.X = new(big.Int).Set(P.X) R1.Y = new(big.Int).Set(P.Y) for i := ec.N.BitLen() - 1; i >= 0; i-- { if k.Bit(i) == 0 { R1.X, R1.Y = ec.Add(R0.X, R0.Y, R1.X, R1.Y) R0.X, R0.Y = ec.Add(R0.X, R0.Y, R0.X, R0.Y) } else { R0.X, R0.Y = ec.Add(R0.X, R0.Y, R1.X, R1.Y) R1.X, R1.Y = ec.Add(R1.X, R1.Y, R1.X, R1.Y) } } return R0.X, R0.Y } // ScalarBaseMult computes Q = k * G on EllipticCurve ec. func (ec Curve) ScalarBaseMult(k []byte) (Q1, Q2 *big.Int) { return ec.ScalarMult(ec.G.X, ec.G.Y, k) } // Decompress decompresses coordinate x and ylsb (y's least significant bit) into a Point P on EllipticCurve ec. func (ec *Curve) Decompress(x *big.Int, ylsb uint) (P Point, err error) { /* y**2 = x**3 + a*x + b % p */ rhs := addMod( addMod( expMod(x, big.NewInt(3), ec.P), mulMod(ec.A, x, ec.P), ec.P), ec.B, ec.P) /* y = sqrt(rhs) % p */ y := sqrtMod(rhs, ec.P) /* Use -y if opposite lsb is required */ if y.Bit(0) != (ylsb & 0x1) { y = subMod(big.NewInt(0), y, ec.P) } P.X = x P.Y = y if !ec.IsOnCurve(P.X, P.Y) { return P, errors.New("Compressed (x, ylsb) not on curve.") } return P, nil } func (ec Curve) Double(x1, y1 *big.Int) (x, y *big.Int) { x = &big.Int{} x.SetBytes([]byte{0x00}) y = &big.Int{} y.SetBytes([]byte{0x00}) return x, y }
pkg/crypto/elliptic/elliptic.go
0.706494
0.504883
elliptic.go
starcoder
Component of Slice: 1. Pointer - The pointer is used to points to the first element of the array that is accessible through the slice. Here, it is not necessary that the pointed element is the first element of the array. 2. Length - The length is the total number of elements present in the array. 3. Capacity - The capacity represents the maximum size upto which it can expand. */ package main import "fmt" func main() { // Creating an array arr := [7]string{"This", "is", "day-13", "of", "GoLang", "Programming", "chanllenge"} // Displaying array fmt.Println("Array:", arr) // Creating a slice myslice := arr[1:6] // Displaying slice fmt.Println("Slice:", myslice) // Displaying length of the slice fmt.Printf("Length of the slice: %d", len(myslice)) // Displaying the capacity of the slice fmt.Printf("\nCapacity of the slice: %d", cap(myslice)) } /* A slice can be created and initialized using: 1. Using Literal 2. Using an array 3. Using already existing slices 4. Using make() function */ /* Ways to iterate over a slice: 1. Using for loop 2. Using range in for loop 3. Using a blank identifier in for loop */ /* Important points about slice: 1. You are allowed to create a nil slice that does not contain any element in it. So the capacity and the length of this slice is 0. 2. As slice is a reference type it can refer an underlying array. So,if you made any changes in the slice, then it will also reflect in the array also. 3. You can only use == operator to check whether the given slice is same or not. If you try to compare two slices with the help of != operator then it will give you an error. 4. Multi-dimensional slice are just like the multidimensional array, except that slice does not contain the size. 5. You are allowed to sort the elements present in the slice. The standard library of Go language provides the sort package ( sort() ) which contains different types of sorting methods for sorting the slice of ints, float64s, and strings. These functions always sort the elements available is slice in ascending order. */
day-13/Sanskriti/day13.go
0.672439
0.733822
day13.go
starcoder
package sif import ( "fmt" "time" ) // String will return a string corresponding to the Datatype. func (d Datatype) String() string { switch d { case DataDeffile: return "Def.FILE" case DataEnvVar: return "Env.Vars" case DataLabels: return "JSON.Labels" case DataPartition: return "FS" case DataSignature: return "Signature" case DataGenericJSON: return "JSON.Generic" case DataGeneric: return "Generic/Raw" case DataCryptoMessage: return "Cryptographic Message" } return "Unknown" } // readableSize returns the size in human readable format. func readableSize(size uint64) string { var divs int var conversion string for ; size != 0; size >>= 10 { if size < 1024 { break } divs++ } switch divs { case 0: conversion = fmt.Sprintf("%d", size) case 1: conversion = fmt.Sprintf("%dKB", size) case 2: conversion = fmt.Sprintf("%dMB", size) case 3: conversion = fmt.Sprintf("%dGB", size) case 4: conversion = fmt.Sprintf("%dTB", size) } return conversion } // FmtHeader formats the output of a SIF file global header. func (fimg *FileImage) FmtHeader() string { s := fmt.Sprintln("Launch: ", trimZeroBytes(fimg.Header.Launch[:])) s += fmt.Sprintln("Magic: ", trimZeroBytes(fimg.Header.Magic[:])) s += fmt.Sprintln("Version: ", trimZeroBytes(fimg.Header.Version[:])) s += fmt.Sprintln("Arch: ", GetGoArch(trimZeroBytes(fimg.Header.Arch[:]))) s += fmt.Sprintln("ID: ", fimg.Header.ID) s += fmt.Sprintln("Ctime: ", time.Unix(fimg.Header.Ctime, 0).UTC()) s += fmt.Sprintln("Mtime: ", time.Unix(fimg.Header.Mtime, 0).UTC()) s += fmt.Sprintln("Dfree: ", fimg.Header.Dfree) s += fmt.Sprintln("Dtotal: ", fimg.Header.Dtotal) s += fmt.Sprintln("Descoff: ", fimg.Header.Descroff) s += fmt.Sprintln("Descrlen:", readableSize(uint64(fimg.Header.Descrlen))) s += fmt.Sprintln("Dataoff: ", fimg.Header.Dataoff) s += fmt.Sprintln("Datalen: ", readableSize(uint64(fimg.Header.Datalen))) return s } // fstypeStr returns a string representation of a file system type. func fstypeStr(ftype Fstype) string { switch ftype { case FsSquash: return "Squashfs" case FsExt3: return "Ext3" case FsImmuObj: return "Archive" case FsRaw: return "Raw" case FsEncryptedSquashfs: return "Encrypted squashfs" } return "Unknown fs-type" } // parttypeStr returns a string representation of a partition type. func parttypeStr(ptype Parttype) string { switch ptype { case PartSystem: return "System" case PartPrimSys: return "*System" case PartData: return "Data" case PartOverlay: return "Overlay" } return "Unknown part-type" } // hashtypeStr returns a string representation of a hash type. func hashtypeStr(htype Hashtype) string { switch htype { case HashSHA256: return "SHA256" case HashSHA384: return "SHA384" case HashSHA512: return "SHA512" case HashBLAKE2S: return "BLAKE2S" case HashBLAKE2B: return "BLAKE2B" } return "Unknown hash-type" } // formattypeStr returns a string representation of a format type. func formattypeStr(ftype Formattype) string { switch ftype { case FormatOpenPGP: return "OpenPGP" case FormatPEM: return "PEM" } return "Unknown format-type" } // messagetypeStr returns a string representation of a message type. func messagetypeStr(mtype Messagetype) string { switch mtype { case MessageClearSignature: return "Clear Signature" case MessageRSAOAEP: return "RSA-OAEP" } return "Unknown message-type" } // FmtDescrList formats the output of a list of all active descriptors from a SIF file. func (fimg *FileImage) FmtDescrList() string { s := fmt.Sprintf("%-4s %-8s %-8s %-26s %s\n", "ID", "|GROUP", "|LINK", "|SIF POSITION (start-end)", "|TYPE") s += fmt.Sprintln("------------------------------------------------------------------------------") for _, v := range fimg.DescrArr { if !v.Used { continue } else { s += fmt.Sprintf("%-4d ", v.ID) if v.Groupid == DescrUnusedGroup { s += fmt.Sprintf("|%-7s ", "NONE") } else { s += fmt.Sprintf("|%-7d ", v.Groupid&^DescrGroupMask) } if v.Link == DescrUnusedLink { s += fmt.Sprintf("|%-7s ", "NONE") } else { if v.Link&DescrGroupMask == DescrGroupMask { s += fmt.Sprintf("|%-3d (G) ", v.Link&^DescrGroupMask) } else { s += fmt.Sprintf("|%-7d ", v.Link) } } fposbuf := fmt.Sprintf("|%d-%d ", v.Fileoff, v.Fileoff+v.Filelen) s += fmt.Sprintf("%-26s ", fposbuf) switch v.Datatype { case DataPartition: f, _ := v.GetFsType() p, _ := v.GetPartType() a, _ := v.GetArch() s += fmt.Sprintf("|%s (%s/%s/%s)\n", v.Datatype, fstypeStr(f), parttypeStr(p), GetGoArch(trimZeroBytes(a[:]))) case DataSignature: h, _ := v.GetHashType() s += fmt.Sprintf("|%s (%s)\n", v.Datatype, hashtypeStr(h)) case DataCryptoMessage: f, _ := v.GetFormatType() m, _ := v.GetMessageType() s += fmt.Sprintf("|%s (%s/%s)\n", v.Datatype, formattypeStr(f), messagetypeStr(m)) default: s += fmt.Sprintf("|%s\n", v.Datatype) } } } return s } // FmtDescrInfo formats the output of detailed info about a descriptor from a SIF file. func (fimg *FileImage) FmtDescrInfo(id uint32) string { var s string for i, v := range fimg.DescrArr { if !v.Used { continue } else if v.ID == id { s = fmt.Sprintln("Descr slot#:", i) s += fmt.Sprintln(" Datatype: ", v.Datatype) s += fmt.Sprintln(" ID: ", v.ID) s += fmt.Sprintln(" Used: ", v.Used) if v.Groupid == DescrUnusedGroup { s += fmt.Sprintln(" Groupid: ", "NONE") } else { s += fmt.Sprintln(" Groupid: ", v.Groupid&^DescrGroupMask) } if v.Link == DescrUnusedLink { s += fmt.Sprintln(" Link: ", "NONE") } else { if v.Link&DescrGroupMask == DescrGroupMask { s += fmt.Sprintln(" Link: ", v.Link&^DescrGroupMask, "(G)") } else { s += fmt.Sprintln(" Link: ", v.Link) } } s += fmt.Sprintln(" Fileoff: ", v.Fileoff) s += fmt.Sprintln(" Filelen: ", v.Filelen) s += fmt.Sprintln(" Ctime: ", time.Unix(v.Ctime, 0).UTC()) s += fmt.Sprintln(" Mtime: ", time.Unix(v.Mtime, 0).UTC()) s += fmt.Sprintln(" UID: ", v.UID) s += fmt.Sprintln(" Gid: ", v.Gid) s += fmt.Sprintln(" Name: ", trimZeroBytes(v.Name[:])) switch v.Datatype { case DataPartition: f, _ := v.GetFsType() p, _ := v.GetPartType() a, _ := v.GetArch() s += fmt.Sprintln(" Fstype: ", fstypeStr(f)) s += fmt.Sprintln(" Parttype: ", parttypeStr(p)) s += fmt.Sprintln(" Arch: ", GetGoArch(trimZeroBytes(a[:]))) case DataSignature: h, _ := v.GetHashType() e, _ := v.GetEntityString() s += fmt.Sprintln(" Hashtype: ", hashtypeStr(h)) s += fmt.Sprintln(" Entity: ", e) case DataCryptoMessage: f, _ := v.GetFormatType() m, _ := v.GetMessageType() s += fmt.Sprintln(" Fmttype: ", formattypeStr(f)) s += fmt.Sprintln(" Msgtype: ", messagetypeStr(m)) } return s } } return "" }
pkg/sif/fmt.go
0.598782
0.409693
fmt.go
starcoder
package canvas import ( "strconv" ) //Canvas Represents a canvas of pixels (colors) of size width X height type Canvas struct { Width, Height int Pixels [][]*Color } //NewCanvas returns a new Canvas with given width and height func NewCanvas(width, height int) *Canvas { pixels := make([][]*Color, 0, 0) for i := 0; i < height; i++ { col := make([]*Color, 0, 0) for j := 0; j < width; j++ { defaultEl := &Color{0.0, 0.0, 0.0} col = append(col, defaultEl) } pixels = append(pixels, col[:]) } return &Canvas{Width: width, Height: height, Pixels: pixels} } //WritePixel writes a color to a given pixel func (c *Canvas) WritePixel(width, height int, color *Color) { c.Pixels[height][width] = color } //ToPpmHeader returns the string header format for the image displayed by a ppm file // maxColorValue dictates the upper bound that colors between 0 and 1 should be scaled to func (c *Canvas) ToPpmHeader(maxColorValue int) string { s := "P3\n" s += strconv.Itoa(c.Width) + " " s += strconv.Itoa(c.Height) + "\n" s += strconv.Itoa(maxColorValue) + "\n" return s } //ToPomBody returns the string body format for image displayed by a ppm file // maxColorValue dictates the upper bound that colors between 0 and 1 should be scaled to func (c *Canvas) ToPpmBody(maxColorValue int) string { res := "" for i := 0; i < c.Height; i++ { s := "" for j := 0; j < c.Width; j++ { p := c.Pixels[i][j] red, green, blue := clampValuesAndScale(p.Red(), p.Green(), p.Blue(), maxColorValue) s = addValuesToPpm(s, red, green, blue) } res += s res += "\n" } return res } func addValuesToPpm(s string, red, green, blue int) string { prev := len(s) % 70 redS := strconv.Itoa(red) greenS := strconv.Itoa(green) blueS := strconv.Itoa(blue) if (prev+len(redS))%70 < prev { s += "\n" s += redS + " " } else { s += redS + " " } prev = len(s) % 70 if (prev+len(greenS))%70 < prev { s += "\n" s += greenS + " " } else { s += greenS + " " } prev = len(s) % 70 if (prev+len(blueS))%70 < prev { s += "\n" s += blueS + " " } else { s += blueS + " " } return s } func clampValuesAndScale(red, green, blue float64, maxColorValue int) (int, int, int) { m := float64(maxColorValue) rred := red rgreen := green rblue := blue if red < 0 { rred = 0 } if green < 0 { rgreen = 0 } if blue < 0 { rblue = 0 } if red > 1 { rred = 1 } if green > 1 { rgreen = 1 } if blue > 1 { rblue = 1 } return int(rred * m), int(rgreen * m), int(rblue * m) }
pkg/canvas/canvas.go
0.86164
0.510435
canvas.go
starcoder
package board import "errors" // These two variables allow for conversion of each square status to // the status string and vice-versa. This allows for statuses to be stored // as integers for faster lookup and comparison. var ( values = map[string]int{ "Empty": 0, "Miss": 1, "Destroyer": 2, "Submarine": 3, "Cruiser": 4, "Battleship": 5, "Carrier": 6, "Hit": 7, } status = [8]string{ "Empty", "Miss", "Destroyer", "Submarine", "Cruiser", "Battleship", "Carrier", "Hit", } ) // Board is a type for holding a standard 10x10 Battleship game board. type Board [10][10]int // Board update methods // SetString sets a board value to a given string value. func (b *Board) SetString(s Square, value string) error { if val, ok := values[value]; ok { b[s.Letter][s.Number] = val return nil } return errors.New("Given value is not a valid value") } // SetInt sets a board value to a given integer value. func (b *Board) SetInt(s Square, value int) error { if value > 0 && value < 8 { b[s.Letter][s.Number] = value return nil } return errors.New("Given value out of range") } // SetPiece sets board values from a given piece's ship type and coordinates. func (b *Board) SetPiece(p Piece) { value := values[p.Type.GetType()] for _, square := range p.Coords { b.SetInt(square, value) } } // PlacePiece sets board values from a given piece's ship type, but only if the // squares are all empty. Useful when using Board for a player's ship placement, // as opposed to tracking hits and misses. func (b *Board) PlacePiece(p Piece) error { for _, square := range p.Coords { if !b.IsEmpty(square) { return errors.New("Piece coordinates are not empty") } } b.SetPiece(p) return nil } // Board retrieval methods // GetString returns a given Square's string value. func (b Board) GetString(s Square) string { return status[b[s.Letter][s.Number]] } // GetInt returns a given Square's integer value. func (b Board) GetInt(s Square) int { return b[s.Letter][s.Number] } // Board boolean methods // IsEmpty returns whether a given Square is empty. func (b Board) IsEmpty(s Square) bool { return (b[s.Letter][s.Number] == 0) } // IsMiss returns whether a given Square is a miss. func (b Board) IsMiss(s Square) bool { return (b[s.Letter][s.Number] == 1) } // IsHit returns whether a given square is any type of hit. func (b Board) IsHit(s Square) bool { return (b[s.Letter][s.Number] > 1) } // IsUnsunk returns whether a given square is to an unsunk ship. func (b Board) IsUnsunk(s Square) bool { return (b[s.Letter][s.Number] == 7) } // IsSunk returns whether a given square is to any sunk ship. func (b Board) IsSunk(s Square) bool { val := b[s.Letter][s.Number] return (val > 1 && val < 7) } // IsShip returns whether a square belongs to a specific ship type. func (b Board) IsShip(s Square, sh Ship) bool { return (status[b[s.Letter][s.Number]] == string(sh)) }
pkg/board/board.go
0.862656
0.575648
board.go
starcoder
package xrand import ( "encoding/binary" "fmt" "math" "math/bits" "time" ) // https://prng.di.unimi.it/xoroshiro1024plusplus.c type Xoroshiro1024pp struct { s [16]uint64 p int } func NewXoroshiro1024pp(seed int64) *Xoroshiro1024pp { x := Xoroshiro1024pp{} x.Seed(seed) return &x } func (x Xoroshiro1024pp) State() []byte { s := make([]byte, 128) binary.BigEndian.PutUint64(s[:8], x.s[0]) binary.BigEndian.PutUint64(s[8:16], x.s[1]) binary.BigEndian.PutUint64(s[16:24], x.s[2]) binary.BigEndian.PutUint64(s[24:32], x.s[3]) binary.BigEndian.PutUint64(s[32:40], x.s[4]) binary.BigEndian.PutUint64(s[40:48], x.s[5]) binary.BigEndian.PutUint64(s[48:56], x.s[6]) binary.BigEndian.PutUint64(s[56:64], x.s[7]) binary.BigEndian.PutUint64(s[64:72], x.s[8]) binary.BigEndian.PutUint64(s[72:80], x.s[9]) binary.BigEndian.PutUint64(s[80:88], x.s[10]) binary.BigEndian.PutUint64(s[88:96], x.s[11]) binary.BigEndian.PutUint64(s[96:104], x.s[12]) binary.BigEndian.PutUint64(s[104:112], x.s[13]) binary.BigEndian.PutUint64(s[112:120], x.s[14]) binary.BigEndian.PutUint64(s[120:128], x.s[15]) return s } func (x *Xoroshiro1024pp) SetState(state []byte) { mix := NewSplitMix64(time.Now().UTC().UnixNano()) x.s[0] = bytesToState64(state, 0, &mix) x.s[1] = bytesToState64(state, 1, &mix) x.s[2] = bytesToState64(state, 2, &mix) x.s[3] = bytesToState64(state, 3, &mix) x.s[4] = bytesToState64(state, 4, &mix) x.s[5] = bytesToState64(state, 5, &mix) x.s[6] = bytesToState64(state, 6, &mix) x.s[7] = bytesToState64(state, 7, &mix) x.s[8] = bytesToState64(state, 8, &mix) x.s[9] = bytesToState64(state, 9, &mix) x.s[10] = bytesToState64(state, 10, &mix) x.s[11] = bytesToState64(state, 11, &mix) x.s[12] = bytesToState64(state, 12, &mix) x.s[13] = bytesToState64(state, 13, &mix) x.s[14] = bytesToState64(state, 14, &mix) x.s[15] = bytesToState64(state, 15, &mix) } func (x *Xoroshiro1024pp) Seed(seed int64) { s := NewSplitMix64(seed) x.s[0] = s.Uint64() x.s[1] = s.Uint64() x.s[2] = s.Uint64() x.s[3] = s.Uint64() x.s[4] = s.Uint64() x.s[5] = s.Uint64() x.s[6] = s.Uint64() x.s[7] = s.Uint64() x.s[8] = s.Uint64() x.s[9] = s.Uint64() x.s[10] = s.Uint64() x.s[11] = s.Uint64() x.s[12] = s.Uint64() x.s[13] = s.Uint64() x.s[14] = s.Uint64() x.s[15] = s.Uint64() } func (x *Xoroshiro1024pp) Uint64() uint64 { q := x.p x.p = (x.p + 1) & 15 s0 := x.s[x.p] s15 := x.s[q] result := bits.RotateLeft64(s0+s15, 23) + s15 s15 ^= s0 x.s[q] = bits.RotateLeft64(s0, 25) ^ s15 ^ (s15 << 27) x.s[x.p] = bits.RotateLeft64(s15, 36) return result } func (x *Xoroshiro1024pp) Int64() int64 { return unsafeUint64ToInt64(x.Uint64()) } func (x *Xoroshiro1024pp) Int63() int64 { return int64(x.Uint64() & (1<<63 - 1)) } func (x *Xoroshiro1024pp) Float64() float64 { return math.Float64frombits(0x3ff<<52|x.Uint64()>>12) - 1.0 } // Call Uint64() * 2^512 func (x *Xoroshiro1024pp) Jump() { var ( jump = [...]uint64{ 0x931197d8e3177f17, 0xb59422e0b9138c5f, 0xf06a6afb49d668bb, 0xacb8a6412c8a1401, 0x12304ec85f0b3468, 0xb7dfe7079209891e, 0x405b7eec77d9eb14, 0x34ead68280c44e4a, 0xe0e4ba3e0ac9e366, 0x8f46eda8348905b7, 0x328bf4dbad90d6ff, 0xc8fd6fb31c9effc3, 0xe899d452d4b67652, 0x45f387286ade3205, 0x03864f454a8920bd, 0xa68fa28725b1b384, } ) s := [...]uint64{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} for i := range jump { for b := 0; b < 64; b++ { if (jump[i] & 1 << b) != 0 { s[0] ^= x.s[(0+x.p)&15] s[1] ^= x.s[(1+x.p)&15] s[2] ^= x.s[(2+x.p)&15] s[3] ^= x.s[(3+x.p)&15] s[4] ^= x.s[(4+x.p)&15] s[5] ^= x.s[(5+x.p)&15] s[6] ^= x.s[(6+x.p)&15] s[7] ^= x.s[(7+x.p)&15] s[8] ^= x.s[(8+x.p)&15] s[9] ^= x.s[(9+x.p)&15] s[10] ^= x.s[(10+x.p)&15] s[11] ^= x.s[(11+x.p)&15] s[12] ^= x.s[(12+x.p)&15] s[13] ^= x.s[(13+x.p)&15] s[14] ^= x.s[(14+x.p)&15] s[15] ^= x.s[(15+x.p)&15] } } x.Uint64() } x.s[(0+x.p)&15] = s[0] x.s[(1+x.p)&15] = s[1] x.s[(2+x.p)&15] = s[2] x.s[(3+x.p)&15] = s[3] x.s[(4+x.p)&15] = s[4] x.s[(5+x.p)&15] = s[5] x.s[(6+x.p)&15] = s[6] x.s[(7+x.p)&15] = s[7] x.s[(8+x.p)&15] = s[8] x.s[(9+x.p)&15] = s[9] x.s[(10+x.p)&15] = s[10] x.s[(11+x.p)&15] = s[11] x.s[(12+x.p)&15] = s[12] x.s[(13+x.p)&15] = s[13] x.s[(14+x.p)&15] = s[14] x.s[(15+x.p)&15] = s[15] } // Call Uint64() * 2^768 func (x *Xoroshiro1024pp) LongJump() { var ( jump = [...]uint64{ 0x7374156360bbf00f, 0x4630c2efa3b3c1f6, 0x6654183a892786b1, 0x94f7bfcbfb0f1661, 0x27d8243d3d13eb2d, 0x9701730f3dfb300f, 0x2f293baae6f604ad, 0xa661831cb60cd8b6, 0x68280c77d9fe008c, 0x50554160f5ba9459, 0x2fc20b17ec7b2a9a, 0x49189bbdc8ec9f8f, 0x92a65bca41852cc1, 0xf46820dd0509c12a, 0x52b00c35fbf92185, 0x1e5b3b7f589e03c1, } ) s := [...]uint64{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} for i := range jump { for b := 0; b < 64; b++ { if (jump[i] & 1 << b) != 0 { s[0] ^= x.s[(0+x.p)&15] s[1] ^= x.s[(1+x.p)&15] s[2] ^= x.s[(2+x.p)&15] s[3] ^= x.s[(3+x.p)&15] s[4] ^= x.s[(4+x.p)&15] s[5] ^= x.s[(5+x.p)&15] s[6] ^= x.s[(6+x.p)&15] s[7] ^= x.s[(7+x.p)&15] s[8] ^= x.s[(8+x.p)&15] s[9] ^= x.s[(9+x.p)&15] s[10] ^= x.s[(10+x.p)&15] s[11] ^= x.s[(11+x.p)&15] s[12] ^= x.s[(12+x.p)&15] s[13] ^= x.s[(13+x.p)&15] s[14] ^= x.s[(14+x.p)&15] s[15] ^= x.s[(15+x.p)&15] } } x.Uint64() } x.s[(0+x.p)&15] = s[0] x.s[(1+x.p)&15] = s[1] x.s[(2+x.p)&15] = s[2] x.s[(3+x.p)&15] = s[3] x.s[(4+x.p)&15] = s[4] x.s[(5+x.p)&15] = s[5] x.s[(6+x.p)&15] = s[6] x.s[(7+x.p)&15] = s[7] x.s[(8+x.p)&15] = s[8] x.s[(9+x.p)&15] = s[9] x.s[(10+x.p)&15] = s[10] x.s[(11+x.p)&15] = s[11] x.s[(12+x.p)&15] = s[12] x.s[(13+x.p)&15] = s[13] x.s[(14+x.p)&15] = s[14] x.s[(15+x.p)&15] = s[15] } func (x Xoroshiro1024pp) String() string { return fmt.Sprintf("%064x", x.State()) } func (x Xoroshiro1024pp) GoString() string { return "xrand.Xoshiro1024pp{state:\"" + x.String() + "\"}" }
xoroshiro1024pp.go
0.547464
0.405302
xoroshiro1024pp.go
starcoder
package chart import ( "math" "sort" ) // Return p percentil of pre-sorted integer data. 0 <= p <= 100. func PercentilInt(data []int, p int) int { n := len(data) if n == 0 { return 0 } if n == 1 { return data[0] } pos := float64(p) * float64(n+1) / 100 fpos := math.Floor(pos) intPos := int(fpos) dif := pos - fpos if intPos < 1 { return data[0] } if intPos >= n { return data[n-1] } lower := data[intPos-1] upper := data[intPos] val := float64(lower) + dif*float64(upper-lower) return int(math.Floor(val + 0.5)) } // Return p percentil of pre-sorted float64 data. 0 <= p <= 100. func percentilFloat64(data []float64, p int) float64 { n := len(data) if n == 0 { return 0 } if n == 1 { return data[0] } pos := float64(p) * float64(n+1) / 100 fpos := math.Floor(pos) intPos := int(fpos) dif := pos - fpos if intPos < 1 { return data[0] } if intPos >= n { return data[n-1] } lower := data[intPos-1] upper := data[intPos] val := lower + dif*(upper-lower) return val } // Compute minimum, p percentil, median, average, 100-p percentil and maximum of values in data. func SixvalInt(data []int, p int) (min, lq, med, avg, uq, max int) { min, max = math.MaxInt32, math.MinInt32 sum, n := 0, len(data) if n == 0 { return } if n == 1 { min = data[0] lq = data[0] med = data[0] avg = data[0] uq = data[0] max = data[0] return } for _, v := range data { if v < min { min = v } if v > max { max = v } sum += v } avg = sum / n sort.Ints(data) if n%2 == 1 { med = data[(n-1)/2] } else { med = (data[n/2] + data[n/2-1]) / 2 } lq = PercentilInt(data, p) uq = PercentilInt(data, 100-p) return } // Compute minimum, p percentil, median, average, 100-p percentil and maximum of values in data. func SixvalFloat64(data []float64, p int) (min, lq, med, avg, uq, max float64) { n := len(data) // Special cases 0 and 1 if n == 0 { return } if n == 1 { min = data[0] lq = data[0] med = data[0] avg = data[0] uq = data[0] max = data[0] return } // First pass (min, max, coarse average) var sum float64 min, max = math.MaxFloat64, -math.MaxFloat64 for _, v := range data { if v < min { min = v } if v > max { max = v } sum += v } avg = sum / float64(n) // Second pass: Correct average var corr float64 for _, v := range data { corr += v - avg } avg += corr / float64(n) // Median sort.Float64s(data) if n%2 == 1 { med = data[(n-1)/2] } else { med = (data[n/2] + data[n/2-1]) / 2 } // Percentiles if p < 0 { p = 0 } if p > 100 { p = 100 } lq = percentilFloat64(data, p) uq = percentilFloat64(data, 100-p) return }
stat.go
0.645232
0.561936
stat.go
starcoder
package main import ( "fmt" "flag" "image/color" "math" "math/big" "gonum.org/v1/plot" "gonum.org/v1/plot/plotter" "gonum.org/v1/plot/vg" ) // // https://projecteuler.net/problem=25 func getDigitLengthBinet(n int) int { // We use a reverse implementation of the Binet formula // Binet Formula gives the fibonacci digit but we can apply // A log to that to that formula to obtain the digit count if n < 2 { return 1 } var phi float64= (1 + math.Sqrt(5)) / 2 nDigits := float64(n) * math.Log10(phi) - (math.Log10(5) / 2) return int(math.Ceil(nDigits)) } func plotDifference(num float64){ p, err := plot.New() if err != nil { panic(err) } p.Title.Text = "Differences" p.X.Label.Text = "X" p.Y.Label.Text = "Y" exp := plotter.NewFunction(func(x float64) float64 { diff := float64(getFnIndex(int(x)) - getFnIndex(int(x - 1))) return diff }) exp.Dashes = []vg.Length{vg.Points(4), vg.Points(4)} exp.Width = vg.Points(2) exp.Color = color.RGBA{G: 255, A: 255} // Add the functions and their legend entries. p.Add(exp) p.Legend.Add("len(F(n)) - len(F(n-1))", exp) p.Legend.ThumbnailWidth = 2 * vg.Inch // Set the axis ranges. Unlike other data sets, // functions don't set the axis ranges automatically // since functions don't necessarily have a // finite range of x and y values. p.X.Min = 0 p.X.Max = num p.Y.Min = 0 p.Y.Max = 10 // Save the plot to a PNG file. if err := p.Save(16*vg.Inch, 16*vg.Inch, fmt.Sprintf("differences-%1f.png", num)); err != nil { panic(err) } } func plotFunc(num float64){ p, err := plot.New() if err != nil { panic(err) } p.Title.Text = "Functions" p.X.Label.Text = "X(5*k)" p.Y.Label.Text = "Y(k)" quad := plotter.NewFunction(func(x float64) float64 { return x / 5 }) quad.Color = color.RGBA{B: 255, A: 255} exp := plotter.NewFunction(func(x float64) float64 { return float64(getDigitLengthBinet(int(x))) }) exp.Dashes = []vg.Length{vg.Points(1), vg.Points(1)} exp.Width = vg.Points(2) exp.Color = color.RGBA{G: 255, A: 255} sin := plotter.NewFunction(func(x float64) float64 { return - (x/5) + float64(getDigitLengthBinet(int(x))) }) sin.Dashes = []vg.Length{vg.Points(4), vg.Points(5)} sin.Width = vg.Points(4) sin.Color = color.RGBA{R: 255, A: 255} // Add the functions and their legend entries. p.Add(quad, exp, sin) p.Legend.Add("5 * k", quad) p.Legend.Add("getDigitLengthBinet", exp) p.Legend.Add("getDigitLengthBinet - 5 * k", sin) p.Legend.ThumbnailWidth = 2 * vg.Inch p.Legend.Top = true // Set the axis ranges. Unlike other data sets, // functions don't set the axis ranges automatically // since functions don't necessarily have a // finite range of x and y values. p.X.Min = 0 p.X.Max = num p.Y.Min = 0 p.Y.Max = num * 1 / 4 // Save the plot to a PNG file. if err := p.Save(16*vg.Inch, 16*vg.Inch, fmt.Sprintf("functions-%1f.png", num)); err != nil { panic(err) } } func getDigitLength(n int) int{ bigFive := big.NewFloat(5) sqrt5 := big.NewFloat(1).Sqrt(bigFive) phi := big.NewFloat(1) phi.Quo(phi.Add(big.NewFloat(1), sqrt5), big.NewFloat(2)) temp := n val := big.NewFloat(1) // Raise val to the power of n // math/big doesn't have Exp on Float // So I just use a simple loop to multiply itself for temp > 0{ val = val.Mul(val, phi) temp -= 1 } ans := val.Quo(val, sqrt5) fmt.Printf("Value at n: %f in fibonnaci sequence is: %f\n", n, ans) // Go returns the exp in powers of 2 // 2 ^^ y == 10 ^^ x // Taking log // y log 2 = x log 10 = x // x in base10 = y log 2 hence the addition of that factor return int(float64(ans.MantExp(ans)) * math.Log10(2)) + 1 } func getFnIndex(numberOfDigits int) int{ // From my analysis, the fibonacci number index follows a plot along 5x of the number // approximately in relation to its number of digits maxDelta := 5 * (numberOfDigits) for maxDelta > 0 { if numberOfDigits > getDigitLengthBinet(maxDelta) { fmt.Println("Approximate value", 5 * numberOfDigits) fmt.Println("Actual value", maxDelta + 1) fmt.Println("Offset iterations", 5 * numberOfDigits - (maxDelta + 1)) fmt.Println("Iteration Percentage", float64((5 * numberOfDigits) - (maxDelta + 1)) * 100 / float64(maxDelta + 1)) break } maxDelta -- } return maxDelta + 1 } func main(){ numberOfDigits := flag.Int("number", 2, "Number of digits to look out for") plotGraphs := flag.Bool("plot", false, "Boolean to plot graph of deviations") flag.Parse() maxDelta := 5 * *numberOfDigits if *plotGraphs { plotFunc(float64(maxDelta)) plotDifference(float64(maxDelta)) } i := 2 result := []int{} prevDigitCount := 1 prevIndex := 0 for i < *numberOfDigits{ if prevDigitCount < getDigitLengthBinet(i) { result = append(result, i - prevIndex) prevIndex = i prevDigitCount = getDigitLengthBinet(i) } i += 1 } fmt.Println(result) // getDigitLength(*numberOfDigits) getFnIndex(*numberOfDigits) }
Problem_25_Euler/main.go
0.680135
0.528838
main.go
starcoder
package naive_bayesian import "math" var ( shape = map[string]int{"irregular": 1, "round": 2, "oval": 3} color = map[string]int{"red": 1, "orange": 2, "green": 3} grain = map[string]int{"noGrain": 1, "grain": 2} weight = map[string]int{"0-200": 1, "200-500": 2, "500>1000": 3} grip = map[string]int{"hard": 1, "soft": 2} taste = map[string]int{"sour": 1, "sweet": 2} ) type fruit struct { shape int color int grain int weight int grip int taste int } type Probability map[string]PAttr type PAttr map[string]float64 // classInTotal calculate the probability of of P(Class) // total[0] is the target which calculate probability func ClassInTotal(total ...[]fruit) float64 { class := total[0] classCount := len(class) var totalCount int for i := range total { for range total[i] { totalCount++ } } return float64(classCount) / float64(totalCount) } // AttributeAtTotal calculate the probability of of P(Attribute) func AttributeAtTotal(total ...[]fruit) *Probability { var totalCount int for i := range total { for range total[i] { totalCount++ } } var shapeCount = map[int]int{} var colorCount = map[int]int{} var grainCount = map[int]int{} var weightCount = map[int]int{} var gripCount = map[int]int{} var tasteCount = map[int]int{} var probability = Probability{ "shape": PAttr{}, "color": PAttr{}, "grain": PAttr{}, "weight": PAttr{}, "grip": PAttr{}, "taste": PAttr{}, } for _, class := range total { for j := range class { shapeCount[class[j].shape]++ colorCount[class[j].color]++ grainCount[class[j].grain]++ weightCount[class[j].weight]++ gripCount[class[j].grip]++ tasteCount[class[j].taste]++ } } calculateProbability("shape", shapeCount, shape, &probability, totalCount) calculateProbability("color", colorCount, color, &probability, totalCount) calculateProbability("grain", grainCount, grain, &probability, totalCount) calculateProbability("weight", weightCount, weight, &probability, totalCount) calculateProbability("grip", gripCount, grip, &probability, totalCount) calculateProbability("taste", tasteCount, taste, &probability, totalCount) return &probability } // ClassAtAttribute calculate the probability of P(Class|Attribute) // property["shape"]=irregular func ClassAtAttribute(properties map[string]string, pClass float64, pAttrAtClass, pAttrAtTotal *Probability) float64 { var p float64 = 1 for property, attr := range properties { p1 := (*pAttrAtClass)[property][attr] p2 := (*pAttrAtTotal)[property][attr] if isEqual(p1, 0) { p1 = 0.01 } if isEqual(p2, 0) { p2 = 0.01 } p *= p1 * pClass / p2 } return p } // AttributeAtClass calculate the probability of P(Attribute|Class) func AttributeAtClass(class []fruit) *Probability { fruitCount := len(class) var shapeCount = map[int]int{} var colorCount = map[int]int{} var grainCount = map[int]int{} var weightCount = map[int]int{} var gripCount = map[int]int{} var tasteCount = map[int]int{} var probability = Probability{ "shape": PAttr{}, "color": PAttr{}, "grain": PAttr{}, "weight": PAttr{}, "grip": PAttr{}, "taste": PAttr{}, } for _, f := range class { shapeCount[f.shape]++ colorCount[f.color]++ grainCount[f.grain]++ weightCount[f.weight]++ gripCount[f.grip]++ tasteCount[f.taste]++ } calculateProbability("shape", shapeCount, shape, &probability, fruitCount) calculateProbability("color", colorCount, color, &probability, fruitCount) calculateProbability("grain", grainCount, grain, &probability, fruitCount) calculateProbability("weight", weightCount, weight, &probability, fruitCount) calculateProbability("grip", gripCount, grip, &probability, fruitCount) calculateProbability("taste", tasteCount, taste, &probability, fruitCount) return &probability } func calculateProbability(property string, count map[int]int, relations map[string]int, p *Probability, total int) { for mark1, count := range count { for attr, mark2 := range relations { if mark1 == mark2 { (*p)[property][attr] = float64(count) / float64(total) } } } } func isEqual(f1, f2 float64) bool { return math.Dim(f1, f2) < 0.0000001 }
pkg/naive_bayesian/naive_beyesian.go
0.673406
0.50354
naive_beyesian.go
starcoder
package sip import () /** * This interface represents a unique IP network listening point, * which consists of port and transport. A ListeningPoint is a Java * representation of the port that a SipProvider messaging entity uses to send * and receive messages. * <p> * For any address and port that a server listens on for UDP, it MUST listen on * that same port and address for TCP. This is because a message may need to * be sent using TCP, rather than UDP, if it is too large. To handle this * a Listening point with the same port but with TCP transport would be * created and attached to a new SipProvider, upon which the SipListener is * registered. However the converse is not true, a server need not listen for * UDP on a particular address and port just because it is listening on that * same address and port for TCP. * <p> * ListeningPoints can be created from the * {@link SipStack#createListeningPoint(int, String)}. A SipStack object may * have multiple ListeningPoints, while a SipProvider as a messaging entity * may only have a single ListeningPoint. * * @author Sun Microsystems * @version 1.1 */ //Constants /** * Transport constant: TCP */ const TCP = "TCP" /** * Transport constant: UDP */ const UDP = "UDP" /** * Transport constant: SCTP * * */ const SCTP = "SCTP" /** * Transport constant: TLS over TCP * * */ const TLS = "TLS" /** * Port Constant: Default port 5060. This constant should only be used * when the transport of the ListeningPoint is set to UDP, TCP or SCTP. * * */ const PORT_5060 = 5060 /** * Port Constant: Default port 5061. This constant should only be used * when the transport of the Listening Point is set to TLS over TCP or TCP * assuming the scheme is "sips". * * */ const PORT_5061 = 5061 type ListeningPoint interface { // extends Cloneable, Serializable { /** * Gets the port of the ListeningPoint. The default port of a ListeningPoint * is dependent on the scheme and transport. For example: * <ul> * <li>The default port is 5060, if the transport UDP and the scheme is * <i>sip:</i>. * <li>The default port is 5060, if the transport is TCP and the scheme * is <i>sip:</i>. * <li>The default port is 5060, if the transport is SCTP and the scheme * is <i>sip:</i>. * <li>The default port is 5061, if the transport is TLS over TCP and the * scheme is <i>sip:</i>. * <li>The default port is 5061, if the transport is TCP and the scheme * is <i>sips:</i>. * </ul> * * @return the integer value of the port. */ GetPort() int /** * Gets the transport of the ListeningPoint. * * @return the string value of the transport. */ GetTransport() string /** * This method indicates whether the specified object is equal to this * Listening Point. The specified object is equal to this ListeningPoint * if the specified object is a ListeningPoint and the transport and port * in the specified Listening Point is the same as this Listening Point. * * @param obj - the object with which to compare this ListeningPoint. * @return true if this ListeningPoint is "equal to" the obj argument; * false otherwise. */ Equals(obj interface{}) bool }
sip/ListeningPoint.go
0.886174
0.407392
ListeningPoint.go
starcoder
package cube import ( "github.com/bahadrix/cardinalitycube/cube/pb" "sync" ) // Board is a table like data structure which consists of rows. // It is of course thread safe. type Board struct { cube *Cube rowMap map[string]*Row rowLock sync.RWMutex cellLock sync.Mutex } // A BoardSnapshot contains rows data of specific time type BoardSnapshot map[string]*RowSnapshot // NewBoard creates a new board for given cube func NewBoard(cube *Cube) *Board { return &Board{ cube: cube, rowMap: make(map[string]*Row), } } // GetCell returns cell that resides in given row. // If row or cell not found function returns nil func (b *Board) GetCell(rowName string, cellName string, createIfNotExists bool) *Cell { var cell *Cell b.rowLock.RLock() row, _ := b.rowMap[rowName] b.rowLock.RUnlock() if row == nil { if !createIfNotExists { return nil } b.rowLock.Lock() // row sync in ---- row, _ = b.rowMap[rowName] if row == nil { row = NewRow() b.rowMap[rowName] = row } b.rowLock.Unlock() // row sync out --- } cell = row.GetCell(cellName) if cell == nil && createIfNotExists { b.cellLock.Lock() // cell sync in ---- cell = row.GetCell(cellName) if cell == nil { cell = b.cube.generateCell() row.SetCell(cellName, cell) } b.cellLock.Unlock() // cell sync out --- } return cell } // GetRowSnapshot Returns snapshot of given row. // Blocks row while getting its snapshot func (b *Board) GetRowSnapshot(rowName string) *RowSnapshot { b.rowLock.RLock() row, _ := b.rowMap[rowName] b.rowLock.RUnlock() if row == nil { return nil } return row.GetSnapshot() } // GetSnapshot return board's snapshot. // Blocks whole board while getting snapshot. func (b *Board) GetSnapshot() *BoardSnapshot { ss := make(BoardSnapshot) b.rowLock.RLock() for key, row := range b.rowMap { ss[key] = row.GetSnapshot() } b.rowLock.RUnlock() return &ss } // CheckRowExists return true if row exists in board. func (b *Board) CheckRowExists(rowName string) bool { b.rowLock.RLock() _, exists := b.rowMap[rowName] b.rowLock.RUnlock() return exists } // DropRow drops given row from board if it exists func (b *Board) DropRow(rowName string) { b.rowLock.Lock() _, rowExists := b.rowMap[rowName] if rowExists { delete(b.rowMap, rowName) } b.rowLock.Unlock() } // GetRowKeys returns row names. Read blocking operation. func (b *Board) GetRowKeys() []string { b.rowLock.RLock() keys := make([]string, 0, len(b.rowMap)) for key := range b.rowMap { keys = append(keys, key) } b.rowLock.RUnlock() return keys } // GetRowCount returns roe count. func (b *Board) GetRowCount() int { return len(b.rowMap) } // GetCellKeys returns cell keys of row. Read blocking operation. func (b *Board) GetCellKeys(rowName string) (keys []string) { b.rowLock.RLock() r, rowExists := b.rowMap[rowName] b.rowLock.RUnlock() if !rowExists { return } return r.GetCellKeys() } // GetCellCount returns cell count of row. Read blocking operation. func (b *Board) GetCellCount(rowName string) int { b.rowLock.RLock() r, rowExists := b.rowMap[rowName] b.rowLock.RUnlock() if !rowExists { return 0 } return r.GetCellCount() } func (b *Board) Dump() (*pb.BoardData, error) { b.rowLock.RLock() defer b.rowLock.RUnlock() dataMap := make(map[string]*pb.RowData, len(b.rowMap)) var err error for k, r := range b.rowMap { dataMap[k], err = r.Dump() if err != nil { return nil, err } } return &pb.BoardData{RowMap:dataMap}, err } func (b *Board) LoadData(data *pb.BoardData) error { b.rowLock.Lock() defer b.rowLock.Unlock() for rowName, rowData := range data.RowMap { row, rowExists := b.rowMap[rowName] if !rowExists { row = NewRow() b.rowMap[rowName] = row } for cellName, cellData := range rowData.CellMap { cell, err := b.cube.deserializeCell(cellData.CoreData) if err != nil { return err } row.SetCell(cellName, cell) } } return nil }
cube/board.go
0.696681
0.401923
board.go
starcoder
package sheetfile import ( "fmt" "github.com/fourstring/sheetfs/master/config" "gorm.io/gorm" "gorm.io/gorm/clause" "strings" "text/template" ) /* Cell Represent a cell of a sheet. Every Cell is stored in a Chunk, starting at a fixed offset(see SheetFile), and every Chunk contains multiple Cells. The number of cells stored in a Chunk is specified by config.MaxCellsPerChunk. Because every Chunk is composed of slots for config.MaxCellsPerChunk, the Size of a Cell is set to config.BytesPerChunk / config.MaxCellsPerChunk, except for the special MetaCell, where the metadata of a SheetFile are stored(see SheetFile too). Cell plays as an index from row, column number to concrete Chunk which actually stores data, providing applications an interface to manipulate cell directly, instead of computing offset of some cell manually. This index is critical to API of our filesystem, and must be persistent. Cell is also a gorm model. All Cell of a SheetFile are stored in a sqlite table, named as 'cells_{filename}'. Cell belongs to different SheetFile will be stored in different tables, we implement this by executing templated SQL. See also create_tmpl below. */ type Cell struct { gorm.Model // CellID is used to accelerate looking up cell by row and column number // CellID is composed of row and column number, which makes CellID a standalone sqlite index, // rather than maintaining a joined index on (row,col) // uint64 is not supported in sqlite, use int64 as a workaround CellID int64 `gorm:"index"` Offset uint64 Size uint64 ChunkID uint64 SheetName string `gorm:"-"` } func NewCell(cellID int64, offset uint64, size uint64, chunkID uint64, sheetName string) *Cell { return &Cell{CellID: cellID, Offset: offset, Size: size, ChunkID: chunkID, SheetName: sheetName} } /* TableName Returns the table name which contains cells of some SheetFile. */ func (c *Cell) TableName() string { return GetCellTableName(c.SheetName) } /* Snapshot Returns a *Cell points to the copy of c. See SheetFile for the necessity of Snapshot. @return *Cell points to the copy of c. */ func (c *Cell) Snapshot() *Cell { var nc Cell nc = *c return &nc } /* GetCellTableName Same as Cell.TableName, for creation of Cell. @return sqlite table name to store Cell of SheetName */ func GetCellTableName(sheetName string) string { return fmt.Sprintf("cells_%s", sheetName) } /* GetCellID Compute CellID by row and column number. It is almost impossible for a sheet to scale up to 4294967295x4294967295, so it's enough to use an uint32 to represent row and column. Due to this, CellID is formed by put row number in higher 32bits of an uint64, and put column number in lower 32bits. @return uint64 CellID of Cell located at (row, col) */ func GetCellID(row uint32, col uint32) int64 { return int64(row)<<32 | int64(col) } /* GetSheetCellsAll Load all Cell of a SheetFile from sqlite database. This method should only be used to load checkpoints in sqlite. After loading from sqlite, subsequent mutations should be conducted in memory, and rely on journaling to tolerate failure, until checkpointing next time. @return []*Cell: All Cell stored in table corresponding to SheetName */ func GetSheetCellsAll(db *gorm.DB, sheetName string) []*Cell { cells := []*Cell{} db.Table(GetCellTableName(sheetName)).Find(&cells) return cells } type _tableName struct { Name string } /* create_tmpl create_tmpl is a SQL template used to create Cell table for a new SheetFile. These SQLs is generated by gorm from currently definition of Cell. If Cell are modified, remember to update the template too. Currently there is no check against .Name in template to counter SQL injection, applications should take care of it. */ var create_tmpl *template.Template func init() { create_tmpl, _ = template.New("create_table").Parse("CREATE TABLE `{{ .Name}}` (`id` integer,`created_at` datetime,`updated_at` datetime,`deleted_at` datetime,`cell_id` integer,`offset` integer,`size` integer,`chunk_id` integer,PRIMARY KEY (`id`),CONSTRAINT `fk_chunks_cells` FOREIGN KEY (`chunk_id`) REFERENCES `chunks`(`id`));" + "CREATE INDEX `idx_{{ .Name}}_cell_id` ON `{{ .Name}}`(`cell_id`);" + "CREATE INDEX `idx_{{ .Name}}_deleted_at` ON `{{ .Name}}`(`deleted_at`);") } /* CreateCellTableIfNotExists Query the sqlite_master table to check whether Cell table for SheetName has existed or not. If not, create such a table with create_tmpl. Creating a table in transactions is not allowed in sqlite, so this function should not be called in db.Transaction. @para db: a gorm connection, should not be a transaction. SheetName: filename of Cell belongs to. @return error from execution of queries. If this function is called in a transaction, a 'database is locked' will be returned. */ func CreateCellTableIfNotExists(db *gorm.DB, sheetName string) error { rawdb, err := db.DB() if err != nil { return err } rows, err := rawdb.Query("SELECT name FROM sqlite_master WHERE type='table' AND name= ?;", GetCellTableName(sheetName)) if err != nil { return err } if !rows.Next() { rows.Close() tn := _tableName{Name: GetCellTableName(sheetName)} var b strings.Builder err = create_tmpl.Execute(&b, tn) if err != nil { return err } createQuery := b.String() _, err = rawdb.Exec(createQuery) if err != nil { return err } } rows.Close() return nil } /* Persistent Flush Cell data in memory into sqlite. This method should be used only for checkpointing, and is supposed to be called in a transaction for atomicity. */ func (c *Cell) Persistent(tx *gorm.DB) { tx.Table(c.TableName()).Clauses(clause.OnConflict{ UpdateAll: true, }).Create(c) } /* IsMeta Returns true if c is the MetaCell. (See SheetFile) */ func (c *Cell) IsMeta() bool { return c.CellID == config.SheetMetaCellID }
master/sheetfile/cell.go
0.610453
0.55447
cell.go
starcoder
package common import ( "fmt" "log" "math/rand" "strconv" "time" ) //RngSeed sets the rand seed global variable using the randSeed and setSeed arguments func RngSeed(randSeed bool, setSeed int64) { if randSeed && setSeed != -1 { log.Fatalf("Cannot use a set seed and also a random seed.") } if randSeed { rand.Seed(time.Now().UnixNano()) } else if setSeed != -1 { rand.Seed(setSeed) } } //IntSliceContains returns true if a slice of ints a containts an int b, false otherwise. func IntSliceContains(a []int, b int) bool { for i := 0; i < len(a); i++ { if a[i] == b { return true } } return false } //StringToBool parses a string into a bool and will exit on error func StringToBool(s string) bool { b, err := strconv.ParseBool(s) if err != nil { log.Panic(fmt.Sprintf("Error: trouble converting \"%s\" to a bool", s)) } return b } // StringToInt parses a string into an int and will exit on error func StringToInt(s string) int { n, err := strconv.Atoi(s) if err != nil { log.Panic(fmt.Sprintf("Error: trouble converting \"%s\" to a int", s)) } return n } // StringToFloat64 parses a string into a float64 and will exit on error func StringToFloat32(s string) float32 { n, err := strconv.ParseFloat(s, 32) if err != nil { log.Panic(fmt.Sprintf("Error: trouble converting \"%s\" to a float32\n", s)) } return float32(n) } // StringToFloat64 parses a string into a float64 and will exit on error func StringToFloat64(s string) float64 { n, err := strconv.ParseFloat(s, 64) if err != nil { log.Panic(fmt.Sprintf("Error: trouble converting \"%s\" to a float64\n", s)) } return n } // StringToInt8 parses a string into an int8 and will exit on error func StringToInt8(s string) int8 { n, err := strconv.ParseInt(s, 10, 8) if err != nil { log.Panic(fmt.Sprintf("Error: trouble converting \"%s\" to a int8\n", s)) } return int8(n) } // StringToInt16 parses a string into an int16 and will exit on error func StringToInt16(s string) int16 { n, err := strconv.ParseInt(s, 10, 16) if err != nil { log.Panic(fmt.Sprintf("Error: trouble converting \"%s\" to a int16\n", s)) } return int16(n) } // StringToInt32 parses a string into an int32 and will exit on error func StringToInt32(s string) int32 { n, err := strconv.ParseInt(s, 10, 32) if err != nil { log.Panic(fmt.Sprintf("Error: trouble converting \"%s\" to a int32\n", s)) } return int32(n) } // StringToInt64 parses a string into an int64 and will exit on error func StringToInt64(s string) int64 { n, err := strconv.ParseInt(s, 10, 64) if err != nil { log.Panic(fmt.Sprintf("Error: trouble converting \"%s\" to a int64\n", s)) } return n } // StringToUint64 parses a string into a uint64 and will exit on error func StringToUint64(s string) uint64 { n, err := strconv.ParseUint(s, 10, 64) if err != nil { log.Panic(fmt.Sprintf("Error: trouble converting \"%s\" to a uint64\n", s)) } return n } // StringToUint32 parses a string into a uint32 and will exit on error func StringToUint32(s string) uint32 { n, err := strconv.ParseUint(s, 10, 32) if err != nil { log.Panic(fmt.Sprintf("Error: trouble converting \"%s\" to a uint32\n", s)) } return uint32(n) } // StringToUint16 parses a string into a uint16 and will exit on error func StringToUint16(s string) uint16 { n, err := strconv.ParseUint(s, 10, 16) if err != nil { log.Panic(fmt.Sprintf("Error: trouble converting \"%s\" to a uint16\n", s)) } return uint16(n) } // StringToUint8 parses a string into a uint8 and will exit on error func StringToUint8(s string) uint8 { n, err := strconv.ParseUint(s, 10, 8) if err != nil { log.Panic(fmt.Sprintf("Error: trouble converting \"%s\" to a uint8\n", s)) } return uint8(n) }
common/math.go
0.547706
0.447641
math.go
starcoder
package main import ( "math/rand" "os" "github.com/nate-anderson/traffic-simulator" ) /* Simulate an intersection of two roads, each with two lanes passing each direction. */ // a traffic signal to alternate between north-south and east-west traffic type alternatingTrafficSignal struct { northSouth bool } func (s *alternatingTrafficSignal) Proceed(lane traffic.Lane) bool { if s.northSouth { s.northSouth = !s.northSouth return lane.Direction() == traffic.DirectionN || lane.Direction() == traffic.DirectionS } s.northSouth = !s.northSouth return lane.Direction() == traffic.DirectionE || lane.Direction() == traffic.DirectionW } // fourWayIntersection demos a two-lane, four-way intersection with each lane permitting // either straight or turning traffic func fourWayIntersection() { // create lanes eastboundInN := traffic.NewFIFOLane("eastbound north", traffic.DirectionE) eastboundInS := traffic.NewFIFOLane("eastbound south", traffic.DirectionE) eastboundOutN := traffic.NewFIFOLane("eastbound north", traffic.DirectionE) eastboundOutS := traffic.NewFIFOLane("eastbound south", traffic.DirectionE) westboundInN := traffic.NewFIFOLane("westbound north", traffic.DirectionW) westboundInS := traffic.NewFIFOLane("westbound south", traffic.DirectionW) westboundOutN := traffic.NewFIFOLane("westbound north", traffic.DirectionW) westboundOutS := traffic.NewFIFOLane("westbound south", traffic.DirectionW) northboundInE := traffic.NewFIFOLane("northbound east", traffic.DirectionN) northboundInW := traffic.NewFIFOLane("northbound west", traffic.DirectionN) northboundOutE := traffic.NewFIFOLane("northbound east", traffic.DirectionN) northboundOutW := traffic.NewFIFOLane("northbound west", traffic.DirectionN) southboundInE := traffic.NewFIFOLane("southbound east", traffic.DirectionS) southboundInW := traffic.NewFIFOLane("southbound west", traffic.DirectionS) southboundOutE := traffic.NewFIFOLane("southbound east", traffic.DirectionS) southboundOutW := traffic.NewFIFOLane("southbound west", traffic.DirectionS) incomingLanes := []traffic.Lane{ eastboundInN, eastboundInS, westboundInN, westboundInS, northboundInE, northboundInW, southboundInE, southboundInW, } // link inbound lanes to outbound lanes eastboundInN.AddDestination(northboundOutW, eastboundOutN) eastboundInS.AddDestination(eastboundOutS, southboundOutW) westboundInN.AddDestination(westboundOutN, northboundOutE) westboundInS.AddDestination(westboundOutS, southboundOutE) northboundInE.AddDestination(northboundOutE, eastboundOutS) northboundInW.AddDestination(northboundOutW, westboundOutS) southboundInE.AddDestination(southboundOutE, eastboundOutN) southboundInW.AddDestination(southboundOutW, westboundOutN) // create traffic signal for the junction signal := alternatingTrafficSignal{false} // create junction intersection := traffic.NewJunction("four-way", signal.Proceed) intersection.AddEnteringLanes(incomingLanes...) // add some vehicles to initial simulation state in incoming lanes maxPerLane := 15 for _, incoming := range incomingLanes { n := rand.Intn(maxPerLane + 1) vehicles := traffic.MakeNVehicles(n) incoming.AddInitialVehicles(vehicles...) } // create simulation, 10 ticks, 3 vehicles per tick sim := traffic.NewSimulation("Four-way Intersection Simulation", 3, 10) sim.AddJunctions(intersection) report := sim.Run() report.Write(os.Stdout) }
demos/fourway.go
0.722821
0.482246
fourway.go
starcoder
package geometry import ( "github.com/g3n/engine/gls" "github.com/g3n/engine/math32" "math" ) // NewDisk creates a disk (filled circle) geometry with the specified // radius and number of radial segments/triangles (minimum 3). func NewDisk(radius float64, segments int) *Geometry { return NewDiskSector(radius, segments, 0, 2*math.Pi) } // NewDiskSector creates a disk (filled circle) or disk sector geometry with the specified radius, // number of radial segments/triangles (minimum 3), sector start angle in radians, and sector size angle in radians. // The center of the disk is at the origin, and theta runs counter-clockwise on the XY plane, starting at (x,y,z)=(1,0,0). func NewDiskSector(radius float64, segments int, thetaStart, thetaLength float64) *Geometry { d := NewGeometry() // Validate arguments if segments < 3 { panic("Invalid argument: segments. The number of segments needs to be greater or equal to 3.") } // Create buffers positions := math32.NewArrayF32(0, 16) normals := math32.NewArrayF32(0, 16) uvs := math32.NewArrayF32(0, 16) indices := math32.NewArrayU32(0, 16) // Append circle center position center := math32.NewVector3(0, 0, 0) positions.AppendVector3(center) // Append circle center normal var normal math32.Vector3 normal.Z = 1 normals.AppendVector3(&normal) // Append circle center uv coordinate centerUV := math32.NewVector2(0.5, 0.5) uvs.AppendVector2(centerUV) // Generate the segments for i := 0; i <= segments; i++ { segment := thetaStart + float64(i)/float64(segments)*thetaLength vx := float32(radius * math.Cos(segment)) vy := float32(radius * math.Sin(segment)) // Appends vertex position, normal and uv coordinates positions.Append(vx, vy, 0) normals.AppendVector3(&normal) uvs.Append((vx/float32(radius)+1)/2, (vy/float32(radius)+1)/2) } for i := 1; i <= segments; i++ { indices.Append(uint32(i), uint32(i)+1, 0) } d.SetIndices(indices) d.AddVBO(gls.NewVBO(positions).AddAttrib(gls.VertexPosition)) d.AddVBO(gls.NewVBO(normals).AddAttrib(gls.VertexNormal)) d.AddVBO(gls.NewVBO(uvs).AddAttrib(gls.VertexTexcoord)) // Update volume d.volume = 0 d.volumeValid = true return d }
geometry/disk.go
0.796728
0.575499
disk.go
starcoder
package d3 import ( "strconv" "strings" "github.com/adamcolton/geom/angle" "github.com/adamcolton/geom/calc/cmpr" "github.com/adamcolton/geom/geomerr" ) // T is a 3D transform. type T [4][4]float64 /* | x | | a b c d | | y | * | e f g h | = | ax+by+cz+d ex+fy+gz+h ix+jy+kz+l mx+ny+pz+q | | z | | i j k l | | 1 | | m n p q | | (0,0) (1,0) (2,0) | | [0][0] [0][1] [0][2] | | (0,1) (1,1) (2,1) | = | [1][0] [1][1] [1][2] | | (0,2) (1,2) (2,2) | | [2][0] [2][1] [2][2] | | a b c | | j k l | | aj+bm+cp ak+bn+cq al+bo+cr | | d e f | * | m n o | = | dj+em+fp dk+en+fq dl+eo+fr | | g h i | | p q r | | gj+hm+ip gk+hn+iq gl+ho+ir | | a1 b1 c1 d1 | | a2 b2 c2 d2 | | a1a2+b1e2+c1i2+d1m2 a1b2+b1f2+c1j2+d1n2 a1c2+b1g2+c1k2+d1p2 | | e1 f1 g1 h1 | * | e2 f2 g2 h2 | = | | i1 j1 k1 l1 | | i2 j2 k2 l2 | | | m1 n1 p1 q1 | | m2 n2 p2 q2 | | */ // Pt applies T to pt. func (t *T) Pt(pt Pt) Pt { return Pt{ pt.X*t[0][0] + pt.Y*t[0][1] + pt.Z*t[0][2] + t[0][3], pt.X*t[1][0] + pt.Y*t[1][1] + pt.Z*t[1][2] + t[1][3], pt.X*t[2][0] + pt.Y*t[2][1] + pt.Z*t[2][2] + t[2][3], } } // Pts applies T to pts. func (t *T) Pts(pts []Pt) []Pt { out := make([]Pt, len(pts)) for i, pt := range pts { out[i] = t.Pt(pt) } return out } // Pts applies T to v. func (t *T) V(v V) V { return V{ v.X*t[0][0] + v.Y*t[0][1] + v.Z*t[0][2] + t[0][3], v.X*t[1][0] + v.Y*t[1][1] + v.Z*t[1][2] + t[1][3], v.X*t[2][0] + v.Y*t[2][1] + v.Z*t[2][2] + t[2][3], } } // PtF applies T to pt and also returns the scalar. func (t *T) PtF(pt Pt) (Pt, float64) { return Pt{ pt.X*t[0][0] + pt.Y*t[0][1] + pt.Z*t[0][2] + t[0][3], pt.X*t[1][0] + pt.Y*t[1][1] + pt.Z*t[1][2] + t[1][3], pt.X*t[2][0] + pt.Y*t[2][1] + pt.Z*t[2][2] + t[2][3], }, pt.X*t[3][0] + pt.Y*t[3][1] + pt.Z*t[3][2] + t[3][3] } // PtScl applies T the pt and divides the result by the scalar. func (t *T) PtScl(pt Pt) Pt { w := pt.X*t[3][0] + pt.Y*t[3][1] + pt.Z*t[3][2] + t[3][3] return Pt{ (pt.X*t[0][0] + pt.Y*t[0][1] + pt.Z*t[0][2] + t[0][3]) / w, (pt.X*t[1][0] + pt.Y*t[1][1] + pt.Z*t[1][2] + t[1][3]) / w, (pt.X*t[2][0] + pt.Y*t[2][1] + pt.Z*t[2][2] + t[2][3]) / w, } } // PtsScl applies to the pts and divides the results by the scalar. func (t *T) PtsScl(pts []Pt) []Pt { out := make([]Pt, len(pts)) for i, pt := range pts { out[i] = t.PtScl(pt) } return out } // VF applies T to v and returns the result and the scalar. func (t *T) VF(v V) (V, float64) { return V{ v.X*t[0][0] + v.Y*t[0][1] + v.Z*t[0][2] + t[0][3], v.X*t[1][0] + v.Y*t[1][1] + v.Z*t[1][2] + t[1][3], v.X*t[2][0] + v.Y*t[2][1] + v.Z*t[2][2] + t[2][3], }, v.X*t[3][0] + v.Y*t[3][1] + v.Z*t[3][2] + t[3][3] } // T applies t2 to t. func (t *T) T(t2 *T) *T { return &T{ { t[0][0]*t2[0][0] + t[1][0]*t2[0][1] + t[2][0]*t2[0][2] + t[3][0]*t2[0][3], t[0][1]*t2[0][0] + t[1][1]*t2[0][1] + t[2][1]*t2[0][2] + t[3][1]*t2[0][3], t[0][2]*t2[0][0] + t[1][2]*t2[0][1] + t[2][2]*t2[0][2] + t[3][2]*t2[0][3], t[0][3]*t2[0][0] + t[1][3]*t2[0][1] + t[2][3]*t2[0][2] + t[3][3]*t2[0][3], }, { t[0][0]*t2[1][0] + t[1][0]*t2[1][1] + t[2][0]*t2[1][2] + t[3][0]*t2[1][3], t[0][1]*t2[1][0] + t[1][1]*t2[1][1] + t[2][1]*t2[1][2] + t[3][1]*t2[1][3], t[0][2]*t2[1][0] + t[1][2]*t2[1][1] + t[2][2]*t2[1][2] + t[3][2]*t2[1][3], t[0][3]*t2[1][0] + t[1][3]*t2[1][1] + t[2][3]*t2[1][2] + t[3][3]*t2[1][3], }, { t[0][0]*t2[2][0] + t[1][0]*t2[2][1] + t[2][0]*t2[2][2] + t[3][0]*t2[2][3], t[0][1]*t2[2][0] + t[1][1]*t2[2][1] + t[2][1]*t2[2][2] + t[3][1]*t2[2][3], t[0][2]*t2[2][0] + t[1][2]*t2[2][1] + t[2][2]*t2[2][2] + t[3][2]*t2[2][3], t[0][3]*t2[2][0] + t[1][3]*t2[2][1] + t[2][3]*t2[2][2] + t[3][3]*t2[2][3], }, { t[0][0]*t2[3][0] + t[1][0]*t2[3][1] + t[2][0]*t2[3][2] + t[3][0]*t2[3][3], t[0][1]*t2[3][0] + t[1][1]*t2[3][1] + t[2][1]*t2[3][2] + t[3][1]*t2[3][3], t[0][2]*t2[3][0] + t[1][2]*t2[3][1] + t[2][2]*t2[3][2] + t[3][2]*t2[3][3], t[0][3]*t2[3][0] + t[1][3]*t2[3][1] + t[2][3]*t2[3][2] + t[3][3]*t2[3][3], }, } } // AssertEqual fulfils geomtest.AssertEqualizer func (t *T) AssertEqual(to interface{}, tol cmpr.Tolerance) error { if err := geomerr.NewTypeMismatch(t, to); err != nil { return err } t2 := to.(*T) return geomerr.NewSliceErrs(4, 4, func(x int) error { return geomerr.NewSliceErrs(4, 4, func(y int) error { a, b := t[y][x], t2[y][x] return geomerr.NewNotEqual(tol.Equal(a, b), a, b) }) }) } // TProd returns the product of multiple transforms. func TProd(ts ...*T) *T { if len(ts) == 0 { return Identity() } t := ts[0] for _, t2 := range ts[1:] { t = t.T(t2) } return t } // Identity returns a copy of the identity transform. func Identity() *T { return &T{ {1, 0, 0, 0}, {0, 1, 0, 0}, {0, 0, 1, 0}, {0, 0, 0, 1}, } } // Scale fulfills TGen and produces a T that scales by V. type Scale V // T fulfills TGen. func (s Scale) T() *T { return &T{ {s.X, 0, 0, 0}, {0, s.Y, 0, 0}, {0, 0, s.Z, 0}, {0, 0, 0, 1}, } } // TInv fulfills TGenInv. func (s Scale) TInv() *T { return &T{ {1.0 / s.X, 0, 0, 0}, {0, 1.0 / s.Y, 0, 0}, {0, 0, 1.0 / s.Z, 0}, {0, 0, 0, 1}, } } // Pair fulfills TGenPair returning the Scale transform represented by V and // it's inverse. func (s Scale) Pair() [2]*T { return [2]*T{ { {s.X, 0, 0, 0}, {0, s.Y, 0, 0}, {0, 0, s.Z, 0}, {0, 0, 0, 1}, }, { {1.0 / s.X, 0, 0, 0}, {0, 1.0 / s.Y, 0, 0}, {0, 0, 1.0 / s.Z, 0}, {0, 0, 0, 1}, }, } } // ScaleF returns Scale with all dimensions equal to f. func ScaleF(f float64) Scale { return Scale(V{f, f, f}) } // Translate fulfills TGen that translates by V type Translate V // T fulfills TGen. func (t Translate) T() *T { return &T{ {1, 0, 0, t.X}, {0, 1, 0, t.Y}, {0, 0, 1, t.Z}, {0, 0, 0, 1}, } } // TInv fulfills TGenInv func (t Translate) TInv() *T { return &T{ {1, 0, 0, -t.X}, {0, 1, 0, -t.Y}, {0, 0, 1, -t.Z}, {0, 0, 0, 1}, } } // Pair fulfills TGenPair returning the translate transform represented by V and // it's inverse. func (t Translate) Pair() [2]*T { return [2]*T{ { {1, 0, 0, t.X}, {0, 1, 0, t.Y}, {0, 0, 1, t.Z}, {0, 0, 0, 1}, }, { {1, 0, 0, -t.X}, {0, 1, 0, -t.Y}, {0, 0, 1, -t.Z}, {0, 0, 0, 1}, }, } } // RotationPlane represents the planes for rotation. type RotationPlane byte // The 3 planes for rotation const ( XY RotationPlane = iota XZ YZ ) // Rotation fulfill TGen type Rotation struct { Angle angle.Rad Plane RotationPlane } // T fullfils TGen and produces a rotation of the given angle in the plane. func (r Rotation) T() *T { s, c := r.Angle.Sincos() if r.Plane == XZ { return &T{ {c, 0, -s, 0}, {0, 1, 0, 0}, {s, 0, c, 0}, {0, 0, 0, 1}, } } if r.Plane == YZ { return &T{ {1, 0, 0, 0}, {0, c, -s, 0}, {0, s, c, 0}, {0, 0, 0, 1}, } } return &T{ {c, -s, 0, 0}, {s, c, 0, 0}, {0, 0, 1, 0}, {0, 0, 0, 1}, } } // TInv fulfills TGenPair. func (r Rotation) TInv() *T { s, c := r.Angle.Sincos() if r.Plane == XZ { return &T{ {c, 0, s, 0}, {0, 1, 0, 0}, {-s, 0, c, 0}, {0, 0, 0, 1}, } } if r.Plane == YZ { return &T{ {1, 0, 0, 0}, {0, c, s, 0}, {0, -s, c, 0}, {0, 0, 0, 1}, } } return &T{ {c, s, 0, 0}, {-s, c, 0, 0}, {0, 0, 1, 0}, {0, 0, 0, 1}, } } // Pair fulfills GetTPair. func (r Rotation) Pair() [2]*T { s, c := r.Angle.Sincos() if r.Plane == XZ { return [2]*T{ { {c, 0, -s, 0}, {0, 1, 0, 0}, {s, 0, c, 0}, {0, 0, 0, 1}, }, { {c, 0, s, 0}, {0, 1, 0, 0}, {-s, 0, c, 0}, {0, 0, 0, 1}, }, } } if r.Plane == YZ { return [2]*T{ { {1, 0, 0, 0}, {0, c, -s, 0}, {0, s, c, 0}, {0, 0, 0, 1}, }, { {1, 0, 0, 0}, {0, c, s, 0}, {0, -s, c, 0}, {0, 0, 0, 1}, }, } } return [2]*T{ { {c, -s, 0, 0}, {s, c, 0, 0}, {0, 0, 1, 0}, {0, 0, 0, 1}, }, { {c, s, 0, 0}, {-s, c, 0, 0}, {0, 0, 1, 0}, {0, 0, 0, 1}, }, } } // String fulfills Stringer. func (t T) String() string { return strings.Join([]string{ "T[ (", strconv.FormatFloat(t[0][0], 'f', Prec, 64), ", ", strconv.FormatFloat(t[0][1], 'f', Prec, 64), ", ", strconv.FormatFloat(t[0][2], 'f', Prec, 64), ", ", strconv.FormatFloat(t[0][3], 'f', Prec, 64), "), (", strconv.FormatFloat(t[1][0], 'f', Prec, 64), ", ", strconv.FormatFloat(t[1][1], 'f', Prec, 64), ", ", strconv.FormatFloat(t[1][2], 'f', Prec, 64), ", ", strconv.FormatFloat(t[1][3], 'f', Prec, 64), "), (", strconv.FormatFloat(t[2][0], 'f', Prec, 64), ", ", strconv.FormatFloat(t[2][1], 'f', Prec, 64), ", ", strconv.FormatFloat(t[2][2], 'f', Prec, 64), ", ", strconv.FormatFloat(t[2][3], 'f', Prec, 64), "), (", strconv.FormatFloat(t[3][0], 'f', Prec, 64), ", ", strconv.FormatFloat(t[3][1], 'f', Prec, 64), ", ", strconv.FormatFloat(t[3][2], 'f', Prec, 64), ", ", strconv.FormatFloat(t[3][3], 'f', Prec, 64), ") ]", }, "") } // TInv fulfills TGenInv. Some transforms do not have an inversion and in those // cases the returned inverse will not be accurate. func (t *T) TInv() *T { inv, _ := t.Inversion() return inv } // Inversion returns the inverse of T and a bool indicating if the inversion is // valid. func (t *T) Inversion() (*T, bool) { //https://stackoverflow.com/questions/1148309/inverting-a-4x4-matrix out := &T{} out[0][0] = 0 + t[1][1]*t[2][2]*t[3][3] - t[1][1]*t[2][3]*t[3][2] - t[2][1]*t[1][2]*t[3][3] + t[2][1]*t[1][3]*t[3][2] + t[3][1]*t[1][2]*t[2][3] - t[3][1]*t[1][3]*t[2][2] out[1][0] = 0 - t[1][0]*t[2][2]*t[3][3] + t[1][0]*t[2][3]*t[3][2] + t[2][0]*t[1][2]*t[3][3] - t[2][0]*t[1][3]*t[3][2] - t[3][0]*t[1][2]*t[2][3] + t[3][0]*t[1][3]*t[2][2] out[2][0] = 0 + t[1][0]*t[2][1]*t[3][3] - t[1][0]*t[2][3]*t[3][1] - t[2][0]*t[1][1]*t[3][3] + t[2][0]*t[1][3]*t[3][1] + t[3][0]*t[1][1]*t[2][3] - t[3][0]*t[1][3]*t[2][1] out[3][0] = 0 - t[1][0]*t[2][1]*t[3][2] + t[1][0]*t[2][2]*t[3][1] + t[2][0]*t[1][1]*t[3][2] - t[2][0]*t[1][2]*t[3][1] - t[3][0]*t[1][1]*t[2][2] + t[3][0]*t[1][2]*t[2][1] out[0][1] = 0 - t[0][1]*t[2][2]*t[3][3] + t[0][1]*t[2][3]*t[3][2] + t[2][1]*t[0][2]*t[3][3] - t[2][1]*t[0][3]*t[3][2] - t[3][1]*t[0][2]*t[2][3] + t[3][1]*t[0][3]*t[2][2] out[1][1] = 0 + t[0][0]*t[2][2]*t[3][3] - t[0][0]*t[2][3]*t[3][2] - t[2][0]*t[0][2]*t[3][3] + t[2][0]*t[0][3]*t[3][2] + t[3][0]*t[0][2]*t[2][3] - t[3][0]*t[0][3]*t[2][2] out[2][1] = 0 - t[0][0]*t[2][1]*t[3][3] + t[0][0]*t[2][3]*t[3][1] + t[2][0]*t[0][1]*t[3][3] - t[2][0]*t[0][3]*t[3][1] - t[3][0]*t[0][1]*t[2][3] + t[3][0]*t[0][3]*t[2][1] out[3][1] = 0 + t[0][0]*t[2][1]*t[3][2] - t[0][0]*t[2][2]*t[3][1] - t[2][0]*t[0][1]*t[3][2] + t[2][0]*t[0][2]*t[3][1] + t[3][0]*t[0][1]*t[2][2] - t[3][0]*t[0][2]*t[2][1] out[0][2] = 0 + t[0][1]*t[1][2]*t[3][3] - t[0][1]*t[1][3]*t[3][2] - t[1][1]*t[0][2]*t[3][3] + t[1][1]*t[0][3]*t[3][2] + t[3][1]*t[0][2]*t[1][3] - t[3][1]*t[0][3]*t[1][2] out[1][2] = 0 - t[0][0]*t[1][2]*t[3][3] + t[0][0]*t[1][3]*t[3][2] + t[1][0]*t[0][2]*t[3][3] - t[1][0]*t[0][3]*t[3][2] - t[3][0]*t[0][2]*t[1][3] + t[3][0]*t[0][3]*t[1][2] out[2][2] = 0 + t[0][0]*t[1][1]*t[3][3] - t[0][0]*t[1][3]*t[3][1] - t[1][0]*t[0][1]*t[3][3] + t[1][0]*t[0][3]*t[3][1] + t[3][0]*t[0][1]*t[1][3] - t[3][0]*t[0][3]*t[1][1] out[3][2] = 0 - t[0][0]*t[1][1]*t[3][2] + t[0][0]*t[1][2]*t[3][1] + t[1][0]*t[0][1]*t[3][2] - t[1][0]*t[0][2]*t[3][1] - t[3][0]*t[0][1]*t[1][2] + t[3][0]*t[0][2]*t[1][1] out[0][3] = 0 - t[0][1]*t[1][2]*t[2][3] + t[0][1]*t[1][3]*t[2][2] + t[1][1]*t[0][2]*t[2][3] - t[1][1]*t[0][3]*t[2][2] - t[2][1]*t[0][2]*t[1][3] + t[2][1]*t[0][3]*t[1][2] out[1][3] = 0 + t[0][0]*t[1][2]*t[2][3] - t[0][0]*t[1][3]*t[2][2] - t[1][0]*t[0][2]*t[2][3] + t[1][0]*t[0][3]*t[2][2] + t[2][0]*t[0][2]*t[1][3] - t[2][0]*t[0][3]*t[1][2] out[2][3] = 0 - t[0][0]*t[1][1]*t[2][3] + t[0][0]*t[1][3]*t[2][1] + t[1][0]*t[0][1]*t[2][3] - t[1][0]*t[0][3]*t[2][1] - t[2][0]*t[0][1]*t[1][3] + t[2][0]*t[0][3]*t[1][1] out[3][3] = 0 + t[0][0]*t[1][1]*t[2][2] - t[0][0]*t[1][2]*t[2][1] - t[1][0]*t[0][1]*t[2][2] + t[1][0]*t[0][2]*t[2][1] + t[2][0]*t[0][1]*t[1][2] - t[2][0]*t[0][2]*t[1][1] det := t[0][0]*out[0][0] + t[0][1]*out[1][0] + t[0][2]*out[2][0] + t[0][3]*out[3][0] if det == 0 { return out, false } det = 1.0 / det for y := 0; y < 4; y++ { for x := 0; x < 4; x++ { out[y][x] *= det } } return out, true } // TransformSet builds up a chain of transformaitions. type TransformSet struct { Head, Middle, Tail []*T } // NewTSet creates a TransformSet. func NewTSet() *TransformSet { return &TransformSet{} } // AddBoth appends the transform and it's inverse to the head and tail. func (ts *TransformSet) AddBoth(t [2]*T) *TransformSet { ts.Head = append(ts.Head, t[0]) ts.Tail = append(ts.Tail, t[1]) return ts } // Add t to the middle func (ts *TransformSet) Add(t *T) *TransformSet { ts.Middle = append(ts.Middle, t) return ts } // Get produces a transform produces a transform by applying the transforms in // head, then middle then applying tail in reverse. func (ts *TransformSet) Get() *T { h := TProd(ts.Head...) m := TProd(ts.Middle...) var t *T if ln := len(ts.Tail); ln > 0 { t = ts.Tail[ln-1] for i := ln - 2; i >= 0; i-- { t = t.T(ts.Tail[i]) } } return TProd(h, m, t) }
d3/t.go
0.625552
0.623635
t.go
starcoder
package main import "fmt" type point struct { x, y float64 } type circle struct { x, y, r float64 } type printer interface { print() } func (p *point) print() { fmt.Println(p.x, p.y) } func (c *circle) print() { fmt.Println(c.x, c.y, c.r) } func main() { var i printer // polymorphic variable i = newPoint(3, 4) // assign one type i.print() // call polymorphic function i = newCircle(5, 12, 13) // assign different type to same variable i.print() // same call accesses different method now. } // Above is a sort of polymorphism: both types implement the printer // interface. The print function can be called through a variable // of type printer, without knowing the underlying type. // Below is other stuff the task asks for. Note that none of it is // needed for cases as simple as this task, and it is not idomatic // to write any of these functions in these simple cases. // Accessors are not idiomatic in Go. Instead, simply access struct // fields directly. To allow access from another package, you "export" // the field by capitalizing the field name. func (p *point) getX() float64 { return p.x } func (p *point) getY() float64 { return p.y } func (p *point) setX(v float64) { p.x = v } func (p *point) setY(v float64) { p.y = v } func (c *circle) getX() float64 { return c.x } func (c *circle) getY() float64 { return c.y } func (c *circle) getR() float64 { return c.r } func (c *circle) setX(v float64) { c.x = v } func (c *circle) setY(v float64) { c.y = v } func (c *circle) setR(v float64) { c.r = v } // Copy constructors, not idiomatic. Structs are assignable so // you can simply declare and assign them as needed. func (p *point) clone() *point { r := *p; return &r } func (c *circle) clone() *circle { r := *c; return &r } // Assignment methods, not idiomatic. Just use the assignment operator. func (p *point) set(q *point) { *p = *q } func (c *circle) set(d *circle) { *c = *d } // Constructors are idiomatic only when construction involves something // more than just assigning initial values. By default, structs // are created as "zero values," that is, with all fields zero, // empty, or nil. The struct literal synax allows for all fields to // initialized, or for any subset of fields to be initialized by name. // These feautures take the place of trivial default constructors. // When additional initialization is needed, it is conventional to // name a function New, New<Type>, or within a package, new<Type> // as shown here. func newPoint(x, y float64) *point { return &point{x, y} } func newCircle(x, y, r float64) *circle { return &circle{x, y, r} } // Destructors are never used in Go. Objects are garbage collected. //\Polymorphism\polymorphism.go
tasks/Polymorphism/polymorphism.go
0.623606
0.407923
polymorphism.go
starcoder
package collections import ( "github.com/freetaxii/libstix2/objects/bundle" "github.com/freetaxii/libstix2/resources/manifest" "github.com/freetaxii/libstix2/resources/properties" ) // ---------------------------------------------------------------------- // Define Message Type // ---------------------------------------------------------------------- /* Collections - This type implements the TAXII 2 Collections Resource and defines all of the properties and methods needed to create and work with the TAXII Collections Resource. All of the methods not defined local to this type are inherited from the individual properties. The following information comes directly from the TAXII 2 specification documents. This Endpoint provides information about the Collections hosted under this API Root. This is similar to the response to get a Collection (see section 5.2), but rather than providing information about one Collection it provides information about all of the Collections. Most importantly, it provides the Collection's id, which is used to request objects or manifest entries from the Collection. The collections resource is a simple wrapper around a list of collection resources. */ type Collections struct { Collections []Collection `json:"collections,omitempty"` } /* Collection - This type implements the TAXII 2 Collection Resource and defines all of the properties and methods needed to create and work with the TAXII Collection Resource. All of the methods not defined local to this type are inherited from the individual properties. DatastoreID = A unique integer that represents this collection DateAdded = The date that this collection was added to the system Enabled = Is this collection currently enabled Hidden = Is this collection currently hidden for the directory listing Size = The current size of the collection ID = The collection ID, a UUIDv4 value Title = The title of this collection Description = A long description about this collection CanRead = A boolean flag that indicates if one can read from this collection CanWrite = A boolean flag that indicates if one can write to this collection MediaTypes = A slice of strings of the media types that are found in this collection The following information comes directly from the TAXII 2 specification documents. This Endpoint provides general information about a Collection, which can be used to help users and clients decide whether and how they want to interact with it. For example, it will tell clients what it's called and what permissions they have to it. The collection resource contains general information about a Collection, such as its id, a human-readable title and description, an optional list of supported media_types (representing the media type of objects can be requested from or added to it), and whether the TAXII Client, as authenticated, can get objects from the Collection and/or add objects to it. */ type Collection struct { DatastoreID int `json:"-"` DateAdded string `json:"-"` Enabled bool `json:"-"` Hidden bool `json:"-"` Size int `json:"-"` properties.IDProperty properties.TitleProperty properties.DescriptionProperty CanRead bool `json:"can_read"` CanWrite bool `json:"can_write"` MediaTypes []string `json:"media_types,omitempty"` } /* CollectionQuery - This struct will hold all of the variables that a user can use to query a collection. */ type CollectionQuery struct { CollectionID string CollectionDatastoreID int STIXID []string // Passed in from the URL STIXType []string // Passed in from the URL STIXVersion []string // Passed in from the URL AddedAfter []string // Passed in from the URL AddedBefore []string // Passed in from the URL Limit []string // Passed in from the URL ServerRecordLimit int // Server defined value in the configuration file // RangeBegin int // Passed in from Range Headers // RangeEnd int // Passed in from Range Headers } /* CollectionQueryResult - This struct contains the various bits of meta data that are returned from a query against a collection on a TAXII server. This is done so that the method signatures do not need to change as time goes on and we add more meta data that needs to be returned. It is important to note that a collection may have more entries than the server or client wants to transmit. So it is important to keep track of which records are actually being delivered to the client. Size = The total size of the dataset returned from the database query. DateAddedFirst = The added date of the first record being sent to the client. DateAddedLast = The added date of the last record being sent to the client. BundleData = The STIX bundle that contains the requested data from the collection. ManifestData = The TAXII manifest resource that contains the requested data from the collection. RangeBegin = The range value of the first record being sent to the client. RangeEnd = The range value of the last record being sent to the client. */ type CollectionQueryResult struct { Size int DateAddedFirst string DateAddedLast string BundleData bundle.Bundle ManifestData manifest.Manifest // RangeBegin int // RangeEnd int } // ---------------------------------------------------------------------- // Initialization Functions // ---------------------------------------------------------------------- /* New - This function will create a new TAXII Collections object and return it as a pointer. */ func New() *Collections { var obj Collections return &obj } /* NewCollection - This function will create a new TAXII Collection object and return it as a pointer. */ func NewCollection() *Collection { var obj Collection return &obj } /* NewCollectionQuery - This function will take in a collection ID as a string and the Server Record Limit and return a CollectionQueryType object. */ func NewCollectionQuery(id string, limit int) *CollectionQuery { var obj CollectionQuery obj.CollectionID = id obj.ServerRecordLimit = limit return &obj } // ---------------------------------------------------------------------- // Public Methods - Collections // ---------------------------------------------------------------------- /* AddCollection - This method takes in an object that represents a collection and adds it to the list in the collections property and returns an integer of the location in the slice where the collection object was added. This method would be used if the collection was created separately and it just needs to be added in whole to the collections list. */ func (r *Collections) AddCollection(o *Collection) (int, error) { //r.initCollectionsProperty() positionThatAppendWillUse := len(r.Collections) r.Collections = append(r.Collections, *o) return positionThatAppendWillUse, nil } /* NewCollection - This method is used to create a collection and automatically add it to the collections array. It returns a resources.Collection which is a pointer to the actual Collection that was created in the collections slice. */ func (r *Collections) NewCollection() (*Collection, error) { //r.initCollectionsProperty() o := NewCollection() positionThatAppendWillUse := len(r.Collections) r.Collections = append(r.Collections, *o) return &r.Collections[positionThatAppendWillUse], nil } // ---------------------------------------------------------------------- // Private Methods - Collections // ---------------------------------------------------------------------- /* initCollectionsProperty - This method will initialize the Collections slice if it has not already been initialized. */ // func (r *Collections) initCollectionsProperty() error { // if r.Collections == nil { // a := make([]Collection, 0) // r.Collections = a // } // return nil // } // ---------------------------------------------------------------------- // Public Methods - Collection // ---------------------------------------------------------------------- /* SetEnabled - This method will set the collection to be enabled. */ func (r *Collection) SetEnabled() error { r.Enabled = true return nil } /* SetDisabled - This method will set the collection to be disabled. */ func (r *Collection) SetDisabled() error { r.Enabled = false return nil } /* SetHidden - This method will set the collection to be hidden. */ func (r *Collection) SetHidden() error { r.Hidden = true return nil } /* SetVisible - This method will set the collection to be visible. */ func (r *Collection) SetVisible() error { r.Hidden = false return nil } /* SetCanRead - This method will set the can_read boolean to true. */ func (r *Collection) SetCanRead() error { r.CanRead = true return nil } /* GetCanRead - This method will return the value of Can Read. */ func (r *Collection) GetCanRead() bool { return r.CanRead } /* SetCanWrite - This method will set the can_write boolean to true. */ func (r *Collection) SetCanWrite() error { r.CanWrite = true return nil } /* GetCanWrite - This method will return the value of Can Write. */ func (r *Collection) GetCanWrite() bool { return r.CanWrite } /* AddMediaType - This method takes in a string value that represents a version of the TAXII api that is supported and adds it to the list in media types property. */ func (r *Collection) AddMediaType(s string) error { if r.MediaTypes == nil { a := make([]string, 0) r.MediaTypes = a } r.MediaTypes = append(r.MediaTypes, s) return nil }
resources/collections/collections.go
0.625667
0.41834
collections.go
starcoder
package binary_tree import "github.com/99_problems/go/src/data_structures" type Comparable interface { LessThan(value data_structures.Any) bool GreaterThan(value data_structures.Any) bool } type Tree interface { Add(any Comparable) AddAll(any ...Comparable) Remove(any Comparable) bool Contains(any Comparable) bool Size() int IsEmpty() bool } type Node struct { Left *Node Right *Node Value Comparable } func newNode(value Comparable) *Node { return &Node{Value: value} } type BinaryTree struct { Head *Node } func New() Tree { return &BinaryTree{} } func (tree *BinaryTree) AddAll(values ...Comparable) { for _, value := range values { tree.Add(value) } } func (tree *BinaryTree) Add(value Comparable) { if tree.IsEmpty() { tree.Head = newNode(value) return } add(tree.Head, value) } func add(node *Node, value Comparable) { // == if node.Value == value { return } // < if value.LessThan(node.Value) { if node.Left == nil { node.Left = newNode(value) return } add(node.Left, value) return } // > if value.GreaterThan(node.Value) { if node.Right == nil { node.Right = newNode(value) return } add(node.Right, value) return } } func addAll(from *Node, to *Node) { if to == nil { panic("To node can't be nil") } if from == nil { return } if from.Left != nil { addAll(from.Left, to) } if from.Right != nil { addAll(from.Right, to) } } func (tree BinaryTree) Size() int { return size(tree.Head) } func size(node *Node) int { if node == nil { return 0 } if hasBothLeafs(node) { return size(node.Left) + size(node.Right) } if node.Left != nil { return 1 + size(node.Left) } if node.Right != nil { return 1 + size(node.Right) } return 1 } func (tree BinaryTree) IsEmpty() bool { if tree.Head == nil { return true } return false } func (tree *BinaryTree) Remove(value Comparable) bool { parent := findParent(tree.Head, value) if parent.isEmpty() { return false } if parent.pointToHead { tree.Head = nil return true } var old *Node if parent.node.Left != nil && parent.node.Left.Value == value { old = parent.node.Left parent.node.Left = nil } else { old = parent.node.Right parent.node.Right = nil } addAll(old, tree.Head) return true } type referenceToParent struct { node *Node pointToHead bool } func referenceToHeader(node *Node) referenceToParent { return referenceToParent{node: node, pointToHead: true} } func emptyReferenceToParent() referenceToParent { return referenceToParent{} } func (r referenceToParent) isEmpty() bool { var empty referenceToParent return r == empty } // return: node, is point to header func findParent(node *Node, value Comparable) referenceToParent { if node == nil { return emptyReferenceToParent() } if node.Value == value { return referenceToHeader(node) } if hasBothLeafs(node) { foundLeft := findParent(node.Left, value) if foundLeft.node != nil { return foundLeft } foundRight := findParent(node.Right, value) if foundRight.node != nil { return foundRight } return emptyReferenceToParent() } if node.Left != nil { if node.Left.Value == value { return referenceToParent{node: node} } return findParent(node.Left, value) } if node.Right != nil { if node.Right.Value == value { return referenceToParent{node: node} } return findParent(node.Right, value) } return emptyReferenceToParent() } func hasBothLeafs(node *Node) bool { return node.Left != nil && node.Right != nil } func (tree BinaryTree) Contains(value Comparable) bool { if tree.IsEmpty() { return false } return contains(tree.Head, value) } func contains(node *Node, value Comparable) bool { if node == nil { return false } if node.Value == value { return true } if value.LessThan(node.Value) { return contains(node.Left, value) } else { return contains(node.Right, value) } }
go/src/data_structures/binary_tree/binary_tree.go
0.855369
0.470737
binary_tree.go
starcoder
package stack import ( "eslang/core" "fmt" ) // StackValue interface  Represents a value in the stack. type StackValue interface { Type() core.Type Value() any TestTruthy() (bool, error) } // Stack struct  Represents a stack of elements. type Stack struct { // TODO: considering to use liked list instead of slice. stack []StackValue variables map[string]StackValue } // NewStack function  Returns a new stack. func NewStack() Stack { // TODO: Set a limit on the stack size. (add a flag to control this) return Stack{ stack: make([]StackValue, 0), variables: make(map[string]StackValue), } } // Content method  Returns the stack content. func (s *Stack) Content() []StackValue { return s.stack } // Size method  Returns the number of elements in the stack. func (s *Stack) Size() int { return len(s.stack) } // GetVariable method  Returns a StackValue for the variable and a boolean indicating if the variable was found. func (s *Stack) GetVariable(name string) (StackValue, bool) { value, found := s.variables[name] return value, found } // SetVariable method  Sets the value of the variable. func (s *Stack) SetVariable(name string, value StackValue) { s.variables[name] = value } // IsEmpty method  Returns true if the stack is empty. func (s *Stack) IsEmpty() bool { return len(s.stack) == 0 } // Push method  Pushes the element onto the stack. func (s *Stack) Push(value StackValue) { s.stack = append(s.stack, value) } // PeekAt method  Returns the element at the specified index. // Accepts negative indices, which are relative to the end of the stack. func (s *Stack) PeekAt(index int) (StackValue, error) { if index < 0 { index = len(s.stack) + index } if index < 0 || index >= len(s.stack) { return nil, fmt.Errorf("index out of bounds") } return s.stack[index], nil } func (s *Stack) Peek() (StackValue, error) { if s.IsEmpty() { return nil, fmt.Errorf("can not perform `Stack.Peek()`, stack is empty") } // Get the index of the top most element. index := len(s.stack) - 1 // Index into the slice and obtain the element. value := (s.stack)[index] return value, nil } // PeekTwo method  Returns the top two elements of the stack func (s *Stack) PeekTwo() (lhs, rhs StackValue, err error) { rhs, err = s.Peek() if err != nil { return lhs, rhs, err } lhs, err = s.PeekAt(-2) if err != nil { return lhs, rhs, err } return lhs, rhs, err } // Pop method  Removes and returns the top element of the stack. // TODO: Improve this method, it is not very efficient. func (s *Stack) Pop() (StackValue, error) { if s.IsEmpty() { return nil, fmt.Errorf("can not perform `Stack.Pop()`, stack is empty") } // Get the index of the top most element. index := len(s.stack) - 1 // Index into the slice and obtain the element. value := (s.stack)[index] // Remove it from the stack by slicing it off. s.stack = (s.stack)[:index] return value, nil } // PopTwo method  Removes and returns the top two elements of the // stack as pairs of cells. func (s *Stack) PopTwo() (lhs, rhs StackValue, err error) { rhs, err = s.Pop() if err != nil { return nil, nil, err } lhs, err = s.Pop() if err != nil { return nil, nil, err } return lhs, rhs, nil }
interpreter/stack/stack.go
0.557123
0.503418
stack.go
starcoder
package txscript import ( "sync" "github.com/p9c/p9/pkg/chainhash" "github.com/p9c/p9/pkg/ecc" ) // sigCacheEntry represents an entry in the SigCache. Entries within the SigCache are keyed according to the sigHash of // the signature. In the scenario of a cache-hit (according to the sigHash), an additional comparison of the signature, // and public key will be executed in order to ensure a complete match. In the occasion that two sigHashes collide, the // newer sigHash will simply overwrite the existing entry. type sigCacheEntry struct { sig *ecc.Signature pubKey *ecc.PublicKey } // SigCache implements an ECDSA signature verification cache with a randomized entry eviction policy. Only valid // signatures will be added to the cache. The benefits of SigCache are two fold. Firstly, usage of SigCache mitigates a // DoS attack wherein an attack causes a victim's client to hang due to worst-case behavior triggered while processing // attacker crafted invalid transactions. A detailed description of the mitigated DoS attack can be found here: // https://bitslog.wordpress.com/2013/01/23/fixed-bitcoin-vulnerability-explanation-why-the-signature-cache-is-a-dos-protection/. // Secondly, usage of the SigCache introduces a signature verification optimization which speeds up the validation of // transactions within a block, if they've already been seen and verified within the mempool. type SigCache struct { sync.RWMutex validSigs map[chainhash.Hash]sigCacheEntry maxEntries uint } // NewSigCache creates and initializes a new instance of SigCache. Its sole parameter 'maxEntries' represents the // maximum number of entries allowed to exist in the SigCache at any particular moment. Random entries are evicted make // room for new entries that would cause the number of entries in the cache to exceed the max. func NewSigCache(maxEntries uint) *SigCache { return &SigCache{ validSigs: make(map[chainhash.Hash]sigCacheEntry, maxEntries), maxEntries: maxEntries, } } // Exists returns true if an existing entry of 'sig' over 'sigHash' for public key 'pubKey' is found within the // SigCache. Otherwise, false is returned. NOTE: This function is safe for concurrent access. Readers won't be blocked // unless there exists a writer, adding an entry to the SigCache. func (s *SigCache) Exists(sigHash chainhash.Hash, sig *ecc.Signature, pubKey *ecc.PublicKey) bool { s.RLock() entry, ok := s.validSigs[sigHash] s.RUnlock() return ok && entry.pubKey.IsEqual(pubKey) && entry.sig.IsEqual(sig) } // Add adds an entry for a signature over 'sigHash' under public key 'pubKey' to the signature cache. In the event that // the SigCache is 'full', an existing entry is randomly chosen to be evicted in order to make space for the new entry. // NOTE: This function is safe for concurrent access. Writers will block simultaneous readers until function execution // has concluded. func (s *SigCache) Add(sigHash chainhash.Hash, sig *ecc.Signature, pubKey *ecc.PublicKey) { s.Lock() defer s.Unlock() if s.maxEntries <= 0 { return } // If adding this new entry will put us over the max number of allowed entries, then evict an entry. if uint(len(s.validSigs)+1) > s.maxEntries { // Remove a random entry from the map. Relying on the random starting point of Go's map iteration. It's worth // noting that the random iteration starting point is not 100% guaranteed by the spec, however most Go compilers // support it. Ultimately, the iteration order isn't important here because in order to manipulate which items // are evicted, an adversary would need to be able to execute preimage attacks on the hashing function in order // to start eviction at a specific entry. for sigEntry := range s.validSigs { delete(s.validSigs, sigEntry) break } } s.validSigs[sigHash] = sigCacheEntry{sig, pubKey} }
pkg/txscript/sigcache.go
0.681939
0.460046
sigcache.go
starcoder
package interpreter import ( "errors" "fmt" ) // A type that contains information about values in a given scope type Environment map[string]Value /** * Simply determines if an S-Expression is one of the supported special forms by checking * the name of the function/form to evaulate. */ func isSpecialForm(formName string) bool { return formName == "define" || formName == "if" || formName == "function" } /** * Evaluate a `define` form to extract the names of variables to assign to, evaluate values, * and update the environment. */ func EvaluateDefine(sexp SExpression, env Environment) (error, Value, Environment) { // Each value is (or should be) an S-Expression with a name to assign to and a value to evalute var lastValue Value for _, definition := range sexp.Values { switch definition.(type) { case SExpression: def := definition.(SExpression) if len(def.Values) != 1 { errMsg := "Definitions must be S-Expressions of the form (name <thing-to-evaluate>)." return errors.New(errMsg), Value{}, env } evalErr, value, newEnv := Evaluate(def.Values[0], env) if evalErr != nil { return evalErr, value, newEnv } lastValue = value newEnv[def.FormName.Contained] = value env = newEnv default: errMsg := "Pairs of names to assign to and their corresponding values must be contained in S-Expressions." return errors.New(errMsg), Value{}, env } } return nil, lastValue, env } /** * Evaluate an `if` form to extract and evaluate the condition and then evaluate the appropriate * branch expression. */ func EvaluateIf(sexp SExpression, env Environment) (error, Value, Environment) { if len(sexp.Values) != 3 { return errors.New("If expects one condition and two branches."), Value{}, env } conditionErr, conditionResult, newEnv := Evaluate(sexp.Values[0], env) if conditionErr != nil { return conditionErr, conditionResult, newEnv } if conditionResult.Type != BooleanT { return errors.New("Conditions for branching must evaluate to either true or false."), conditionResult, newEnv } if conditionResult.Boolean.Contained { return Evaluate(sexp.Values[1], newEnv) } else { return Evaluate(sexp.Values[2], newEnv) } } /** * Evaluate a `function` form to extract the list of argument names and the body expression. */ func EvaluateFunction(sexp SExpression, env Environment) (error, Value, Environment) { if len(sexp.Values) != 2 { errMsg := "Function declarations expect one S-Expression with a set of argument names and one with a body." return errors.New(errMsg), Value{}, env } switch sexp.Values[0].(type) { case SExpression: break default: return errors.New("Function argument names must be declared in an S-Expression."), Value{}, env } argumentNames := make([]string, 0) argumentList := sexp.Values[0].(SExpression) // Expect the character "_" to signify that a function takes no arguments, as opposed to an empty S-Expression if argumentList.FormName.Contained != "_" { argumentNames = append(argumentNames, argumentList.FormName.Contained) for i := 0; i < len(argumentList.Values); i++ { switch argumentList.Values[i].(type) { case Value: if argumentList.Values[i].(Value).Type != NameT { return errors.New("All items in a function argument list must be names."), Value{}, env } default: return errors.New("All items in a function argument list must be names."), Value{}, env } argumentNames = append(argumentNames, argumentList.Values[i].(Value).Name.Contained) } } newFn := NewFunction("tempname", argumentNames, sexp.Values[1]) for k, v := range env { newFn.Function.Scope[k] = v } return nil, newFn, env } /** * Once a special form is encountered, determine which one it is and call the appropriate evaluator. */ func EvaluateSpecialForm(sexp SExpression, env Environment) (error, Value, Environment) { switch sexp.FormName.Contained { case "define": return EvaluateDefine(sexp, env) case "if": return EvaluateIf(sexp, env) case "function": return EvaluateFunction(sexp, env) } return errors.New("Unrecognized special form " + sexp.FormName.Contained), Value{}, env } /** * Evaluate a value by resolving a name to its associated value or just returning the value itself. */ func EvaluateValue(value Value, env Environment) (error, Value, Environment) { if value.Type == NameT { varName := value.Name.Contained actual, found := env[varName] if !found { return errors.New("Variable " + varName + " not assigned."), Value{}, env } else { return nil, actual, env } } else { // Already a value return nil, value, env } } /** * Evaluate an S-Expression by evaluating the first name as either a function name or a special form * and either applying the successive values as arguments to the function or having the special form handled. */ func EvaluateSexp(sexp SExpression, env Environment) (error, Value, Environment) { fnName := sexp.FormName.Contained function, found := env[fnName] if !found { return errors.New("No such function " + fnName), Value{}, env } arguments := make([]Value, 0) for _, arg := range sexp.Values { evalErr, value, newEnv := Evaluate(arg, env) if evalErr != nil { return evalErr, Value{}, newEnv } arguments = append(arguments, value) } for k, v := range env { function.Function.Scope[k] = v } value, err := Apply(function.Function, arguments...) return err, value, env } /** * The catch-all evaluate function that determines the type of its contents and invokes the appropriate * evaluator for that type. */ func Evaluate(thing interface{}, env Environment) (error, Value, Environment) { switch thing.(type) { case Value: return EvaluateValue(thing.(Value), env) case SExpression: sexp := thing.(SExpression) if isSpecialForm(sexp.FormName.Contained) { return EvaluateSpecialForm(sexp, env) } else { return EvaluateSexp(thing.(SExpression), env) } default: return errors.New(fmt.Sprintf("No way to evaluate %v\n", thing)), Value{}, env } } /** * Apply a function to supplied arguments. If the function was defined in fig code, then the body expression * will be evaluated with a new scope relative to the function. * The scope will be augmented with the function's argument names so that they are available in deeper * scopes. */ func Apply(fn Function, arguments ...Value) (Value, error) { for i := 0; i < len(fn.ArgumentNames); i++ { if i >= len(arguments) { return Value{}, errors.New("Not enough arguments passed to " + fn.FunctionName.Contained) } fn.Scope[fn.ArgumentNames[i].Contained] = arguments[i] } var err error var computedValue Value //var newEnv Environment if fn.IsCallable { goValues := make([]interface{}, len(arguments)) for i, arg := range arguments { goValues[i] = Unwrap(arg) } computedValue, err = fn.Call(goValues...) } else { err, computedValue, _ = Evaluate(fn.Body, fn.Scope) } if err != nil { return Value{}, err } err, computedValue, _ = EvaluateValue(computedValue, fn.Scope) return computedValue, err }
src/interpreter/evaluator.go
0.695545
0.496155
evaluator.go
starcoder
package argparse import "github.com/skillian/errors" // Namespace maps argument destination names with their values. Values // are of the type the Argument's Type function converts them to (string, by // default). If an argument's Nargs are >1, then the value is a slice of // interface{} with the elements being the type set by the argument's Type // function. type Namespace map[string]interface{} // Append a set of values to the namespace. func (ns Namespace) Append(a *Argument, vs ...interface{}) { var values []interface{} existing, ok := ns[a.Dest] if ok { values, ok = existing.([]interface{}) if !ok { values = make([]interface{}, 1, len(vs)+1) values[0] = existing } } values = append(values, vs...) ns[a.Dest] = values } // Get the value from the Namespace associated with the given argument's Dest. func (ns Namespace) Get(a *Argument) (v interface{}, ok bool) { v, ok = ns[a.Dest] return } // MustGet retrieves an argument from the given namespace. It panics if the // argument wasn't found in the namespace. func (ns Namespace) MustGet(a *Argument) interface{} { v, ok := ns.Get(a) if !ok { panic(errors.Errorf("failed to get argument %q", a.Dest)) } return v } // GetStrings is a helper function to get an argument's associated values as // a slice of strings. func (ns Namespace) GetStrings(a *Argument) ([]string, error) { v := ns.MustGet(a) vs, ok := v.([]interface{}) if !ok { return nil, errors.Errorf( "%v (type: %T) is not %v (type: %T)", v, v, vs, vs) } ss := make([]string, len(vs)) for i, v := range vs { ss[i], ok = v.(string) if !ok { return nil, errors.Errorf( "index %d of argument %v is %v (type: %T), "+ "not type %T", i, a, v, v, "") } } return ss, nil } // MustGetStrings gets the arguments associated with a as a slice of strings. // This function panics if a's values are not a slice of strings. func (ns Namespace) MustGetStrings(a *Argument) []string { ss, err := ns.GetStrings(a) if err != nil { panic(err) } return ss } // Set a value in the namespace for the given Arg. func (ns Namespace) Set(a *Argument, v interface{}) { ns[a.Dest] = v }
namespace.go
0.678007
0.518241
namespace.go
starcoder
package taxi // Data file contains sample data obtained from google big query. // Dataset: bigquery-public-data.new_york.tlc_yellow_trips_2015. // Development to be done using sample data before calling the big query api. // Tests are also ran using this file. // Data: sample for fare amount and pickup locations // Fields: pickup_latitude, pickup_longitude, fare_amount var FaresData = []string{ `[ { "pickup_longitude": "-73.994873046875", "pickup_latitude": "40.703060150146484", "fare_amount": "27.0" }, { "pickup_longitude": "-73.92514038085938", "pickup_latitude": "40.807518005371094", "fare_amount": "0.0" }, { "pickup_longitude": "-51.1395916", "pickup_latitude": "-30.0436491", "fare_amount": "10.0" }, { "pickup_longitude": "-51.1905683", "pickup_latitude": "-30.0320944", "fare_amount": "10.0" } ]`, `[ { "pickup_longitude": "-51.1408982", "pickup_latitude" : "-30.0441054", "fare_amount": "4.32" }, { "pickup_longitude": "-73.91737365722656", "pickup_latitude": "40.61402130126953", "fare_amount": "27.5" }, { "pickup_longitude": "-73.9610824584961", "pickup_latitude": "40.7188835144043", "fare_amount": "24.5" }, { "pickup_longitude": "-73.97857666015625", "pickup_latitude": "40.670406341552734", "fare_amount": "26.5" }, { "pickup_longitude": "-73.85911560058594", "pickup_latitude": "40.73643112182617", "fare_amount": "40.0" }, { "pickup_longitude": "-73.85911560058594", "pickup_latitude": "40.73643112182617", "fare_amount": "35.0" }, { "pickup_longitude": "-73.85911560058594", "pickup_latitude": "40.73643112182617", "fare_amount": "37.8" } ]`, `[ { "pickup_longitude": "-51.1905683", "pickup_latitude": "-30.0331944", "fare_amount": "10.0" }, { "pickup_longitude": "-74.005106", "pickup_latitude": "40.710977", "fare_amount": "10.0" }, { "pickup_longitude": "-74.005433", "pickup_latitude": "40.711306", "fare_amount": "15.0" }, { "pickup_longitude": "-74.009666", "pickup_latitude": "40.714963", "fare_amount": "10.0" }, { "pickup_longitude": "-74.010205", "pickup_latitude": "40.715170", "fare_amount": "15.0" }, { "pickup_longitude": "-74.009881", "pickup_latitude": "40.714709", "fare_amount": "30.0", "test_property":12345 }, { "pickup_longitude": "-74.009881", "pickup_latitude": "40.714709", "fare_amount": "40.0", "test_property":12345 }, { "pickup_longitude": "-74.009881", "pickup_latitude": "40.714709", "fare_amount": "50.0", "test_property":12345 }, { "pickup_longitude": "-74.009881", "pickup_latitude": "40.714709", "fare_amount": "60.0", "test_property":12345 } ]`, ``, } // Data: sample for average speed. // Fields: average_speed var AverageSpeedData = []string{ `[ { "average_speed": 14.1 } ]`, `[ { "average_speed": 5.0, "date": 2 } ]`, `[ { } ]`, } // Data: sample for total trips on a day. // Fields: date, tota_trips var TotalTripsData = []string{ `[ { "date": "2015-01-01", "total_trips": 382014 }, { "date": "2015-01-02", "total_trips": 345296 }, { "date": "2015-01-03", "total_trips": 406769 } ]`, ` [ { "date": "2015-01-04", "total_trips": 328848 }, { "date": "2015-01-05", "total_trips": 363454 }, { "date": "2015-01-06", "total_trips": 384324 }, { "date": "2015-01-07", "total_trips": 429653 } ]`, ` [ { "date": "2015-01-08", "total_trips": 450920 }, { "date": "2015-01-09", "total_trips": 447947 }, { "date": "2015-01-10", "total_trips": 515540 }, { "date": "2015-01-11", "total_trips": 419629 }, { "date": "2015-01-12", "total_trips": 396367, "random_property": "123" } ]`, } // Expected result for test for total trips. var totalTripsResult = [][]TotalTripsByDay{ { { Date: "2015-01-01", TotalTrips: 382014, }, { Date: "2015-01-02", TotalTrips: 345296, }, { Date: "2015-01-03", TotalTrips: 406769, }, }, { { Date: "2015-01-04", TotalTrips: 328848, }, { Date: "2015-01-05", TotalTrips: 363454, }, { Date: "2015-01-06", TotalTrips: 384324, }, { Date: "2015-01-07", TotalTrips: 429653, }, }, { { Date: "2015-01-08", TotalTrips: 450920, }, { Date: "2015-01-09", TotalTrips: 447947, }, { Date: "2015-01-10", TotalTrips: 515540, }, { Date: "2015-01-11", TotalTrips: 419629, }, { Date: "2015-01-12", TotalTrips: 396367, }, }, {}, } // Expected result for test for average speed. var averageSpeedResult = [][]AverageSpeedByDay{ { { AverageSpeed: 14.1, }, }, { { AverageSpeed: 5.0, }, }, { {}, }, } // Expected result for average fare of a location. var averageFaresLocationResult = [][]S2idFare{ { { S2id: "89c25a3a1", Fare: 27.0, }, { S2id: "89c2f5dd3", Fare: 0.0, }, { S2id: "951977d37", Fare: 10.0, }, { S2id: "951978321", Fare: 10.0, }, }, { { S2id: "951977d39", Fare: 4.32, }, { S2id: "89c243469", Fare: 27.5, }, { S2id: "89c259671", Fare: 24.5, }, { S2id: "89c25b03f", Fare: 26.5, }, { S2id: "89c25e335", Fare: 37.6, }, }, { { S2id: "95197831f", Fare: 10, }, { S2id: "89c25a229", Fare: 12.5, }, { S2id: "89c25a1ed", Fare: 34.166666666666664, }, }, {}, }
taxi/data.go
0.538741
0.523725
data.go
starcoder
package distuvx import ( "golang.org/x/exp/rand" "gonum.org/v1/gonum/stat/distuv" ) // PERT represents a PERT distribution, which is a four parameter Beta // distribution described by the parameters min, max, and mode, as well as the // requirement that the mean = (max + 4 * mod + min) / 6. // (https://en.wikipedia.org/wiki/PERT_distribution) type PERT struct { min float64 max float64 mode float64 bd distuv.Beta } // NewPERT constructs a new PERT distribution using the given min, max, and // mode. Constraints are min < max and min ≤ mode ≤ max. func NewPERT(min, max, mode float64, src rand.Source) PERT { checkPERTParameters(min, max, mode) alpha := 1 + 4*(mode-min)/(max-min) beta := 1 + 4*(max-mode)/(max-min) return PERT{ min: min, max: max, mode: mode, bd: distuv.Beta{ Alpha: alpha, Beta: beta, Src: src, }, } } // NewPERTOne create a new PERT distribution one time. Thereafter, the same // fixed value is always returned. func NewPERTOne(a, b, c float64, src rand.Source) Fixed { pert := NewPERT(a, b, c, src) val := pert.Rand() return NewFixed(val) } func checkPERTParameters(min, max, mode float64) { if min >= max { panic("pert: constraint of min < max violated") } if min > mode { panic("pert: constraint of min <= mode violated") } if mode > max { panic("pert: constraint of mode <= max violated") } } // CDF computes the value of the cumulative distribution function at y. func (p PERT) CDF(y float64) float64 { x := (y - p.min) / (p.max - p.min) return p.bd.CDF(x) / (p.max - p.min) } // Mean returns the mean of the PERT probability distribution. func (p PERT) Mean() float64 { return (p.min + 4*p.mode + p.max) / 6 } // Mode returns the mode of the PERT probability distribution. func (p PERT) Mode() float64 { return p.mode } // NumParameters returns the number of parameters in the PERT distribution. func (p PERT) NumParameters() int { return 3 } // Prob computes the value of the probability density function at y. func (p PERT) Prob(y float64) float64 { x := (y - p.min) / (p.max - p.min) return p.bd.Prob(x) / (p.max - p.min) } // Rand implements the Rander interface for the PERT distribution. func (p PERT) Rand() float64 { return p.bd.Rand()*(p.max-p.min) + p.min }
pert.go
0.89197
0.464902
pert.go
starcoder
package dsl import "fmt" var helpText = `keybite A flexible, serverless key-value store USAGE: keybite (keybite will start in standalone server mode when started with no args) keybite [command] [index] [options] [data] FLAGS: -h, --help Display this help text. QUERY COMMANDS: - Auto-incrementing indexes (keybite assigns an integer ID): query Retrieve a value from an auto-index by ID Example: query user 13 insert Insert a value into an auto index. Returns the assigned integer ID. Example: insert user <EMAIL> update Update an existing value in an auto index. Returns the ID. Example: update user 10 <EMAIL> delete Delete a record from an auto index. Returns the ID. Example: delete user 10 list List the contents of an index in the order of insertion. Optional limit and offset. Example: list user 10 50 count Count the records in an index. Example: count user - Map indexes (user assigns a string or integer key) query_key Retrieve a value from a map index by key Example: query_key user_email <EMAIL> insert_key Insert a value into a map index with the specified key. Returns the key. Example: insert_key user_email <EMAIL> 10 update_key Update an existing value at the provided key. Returns the key. Example: update_key user_email <EMAIL> 9 upsert_key If a record with the specified key exists, update it, else insert a new one. Returns the key Example: upsert_key user_email <EMAIL> 9 delete_key Delete the record with the specified key if it exists. Example: delete_key user_email <EMAIL> list_key List the contents of an index in the order of the key hashes (roughly alphabetical, but long keys can cause integer overflow and break alphabetization). Optional limit and offset. Example: list_key user_email 10 50 count_key Count the records in an index. Example: count user_email CONFIGURATION: Keybite requires some configuration to work. All configuration is pulled from the environment, so exporting environment variables or prefixing the keybite binary launch with environment variables is a valid approach. The recommended approach is to use a .env file. ENVIRONMENT VARS: DATA_DIR The directory where keybite should store its data. AUTO_PAGE_SIZE The number of records to store per file for auto indexes. This value should be decided based on the environment and use case for the server. When using an S3 bucket, the entire page file must be transmitted across the network when retrieving records, so smaller sizes are preferable. In local environments. Because IDs are automatically incremented in auto-indexes, each page will be completely filled before a new page is created. When records are deleted, the size of a page will be reduced. MAP_PAGE_SIZE The number of records to store per file for map indexes. Since string keys are hashed to integers and stored in a page file based on the hashed ID, map pages will usually be much more sparse than auto pages. In most cases, the map page size should be quite a bit larger than the auto page size. HTTP_PORT Required when running as a standalone server. Unused when running in CLI or Lambda modes. DRIVER The storage driver for storing data. Should be set to 'filesystem' when running on a server or when using an EFS volume with Lambda, and 's3' when using an S3 bucket. PAGE_EXTENSION=.kb The file extension for keybite data files. AWS_ACCESS_KEY_ID The AWS access key ID. Only required when using the S3 driver. This environment variable is set automatically in Lambda environments. AWS_SECRET_ACCESS_KEY The AWS access key ID and access key. Only required when using the S3 driver. This environment variable is set automatically in Lambda environments. BUCKET_NAME The name of the S3 bucket where keybite should store data. ENVIRONMENT=linux The environment in which keybite is running. Either 'linux' or 'lambda'. LOG_LEVEL=debug The detail level of logs that should be printed to stderr. One of 'error', 'warn', 'info' or 'debug'. 'error' only logs critical errors. Default is 'warn' when an invalid log level is provided. LOCK_DURATION_FS Duration of write locks in milliseconds when using the filesystem driver. Unnecessary when using S3 driver. LOCK_DURATION_S3 Duration of write locks in milliseconds when using the S3 driver. Unnecessary when using filesystem driver. ` // DisplayHelp displays help text func DisplayHelp() { fmt.Println(helpText) }
dsl/help.go
0.672439
0.444866
help.go
starcoder
package pnm import ( "fmt" "image" "io" "math" "strings" ) type PNMType int // Known PNM file types. const ( BitmapAscii PNMType = iota BitmapBinary GraymapAscii GraymapBinary PixmapAscii PixmapBinary ) // Encode writes the Image m to w in PPM format. // The type of output file is determined by the given PNM type value. // Any image can be encoded, but depending on the chosen type, // the encoding may be lossy. func Encode(w io.Writer, m image.Image, ptype PNMType) (err error) { defer func() { if x := recover(); x != nil { err = fmt.Errorf("DecodeConfig: %v", x) } }() switch ptype { case BitmapAscii: encodeP1(w, m) case GraymapAscii: encodeP2(w, m) case PixmapAscii: encodeP3(w, m) case BitmapBinary: encodeP4(w, m) case GraymapBinary: encodeP5(w, m) case PixmapBinary: encodeP6(w, m) default: return fmt.Errorf("Invalid PPM type %d", ptype) } return } func encodeP1(w io.Writer, m image.Image) { b := m.Bounds() row := make([]string, b.Dx()) write(w, "P1\n%d %d\n", b.Dx(), b.Dy()) for y := 0; y < b.Dy(); y++ { for x := 0; x < b.Dx(); x++ { r, _, _, _ := m.At(x, y).RGBA() if byte(r)/0xff == 1 { row[x] = "1" } else { row[x] = "0" } } write(w, "%s\n", strings.Join(row, " ")) } } func encodeP2(w io.Writer, m image.Image) { b := m.Bounds() row := make([]string, b.Dx()) write(w, "P2\n%d %d\n255\n", b.Dx(), b.Dy()) for y := 0; y < b.Dy(); y++ { for x := 0; x < b.Dx(); x++ { r, _, _, _ := m.At(x, y).RGBA() row[x] = fmt.Sprintf("%d", byte(r)) } write(w, "%s\n", strings.Join(row, " ")) } } func encodeP3(w io.Writer, m image.Image) { b := m.Bounds() row := make([]string, b.Dx()) write(w, "P3\n%d %d\n255\n", b.Dx(), b.Dy()) for y := 0; y < b.Dy(); y++ { for x := 0; x < b.Dx(); x++ { r, g, b, _ := m.At(x, y).RGBA() row[x] = fmt.Sprintf("%d %d %d", byte(r), byte(g), byte(b)) } write(w, "%s\n", strings.Join(row, " ")) } } func encodeP4(w io.Writer, m image.Image) { var bit int b := m.Bounds() bytes := int(math.Ceil((float64(b.Dx()) / 8))) bits := newBitset(uint(bytes * b.Dy() * 8)) pad := (bytes * 8) - b.Dx() write(w, "P4\n%d %d\n", b.Dx(), b.Dy()) for y := 0; y < b.Dy(); y++ { for x := 0; x < b.Dx(); x++ { r, _, _, _ := m.At(x, y).RGBA() if r > 0 { bits.Set(bit) } bit++ } bit += pad } _, err := w.Write(bits) check(err) } func encodeP5(w io.Writer, m image.Image) { b := m.Bounds() data := make([]byte, 0, b.Dx()*b.Dy()) write(w, "P5\n%d %d\n255\n", b.Dx(), b.Dy()) for y := 0; y < b.Dy(); y++ { for x := 0; x < b.Dx(); x++ { r, _, _, _ := m.At(x, y).RGBA() data = append(data, byte(r)) } } _, err := w.Write(data) check(err) } func encodeP6(w io.Writer, m image.Image) { b := m.Bounds() data := make([]byte, 0, b.Dx()*b.Dy()*3) write(w, "P6\n%d %d\n255\n", b.Dx(), b.Dy()) for y := 0; y < b.Dy(); y++ { for x := 0; x < b.Dx(); x++ { r, g, b, _ := m.At(x, y).RGBA() data = append(data, byte(r), byte(g), byte(b)) } } _, err := w.Write(data) check(err) } func write(w io.Writer, f string, argv ...interface{}) { _, err := fmt.Fprintf(w, f, argv...) check(err) }
encode.go
0.614047
0.464416
encode.go
starcoder
package crypto import ( "crypto/cipher" "fmt" "github.com/turtledex/errors" "github.com/turtledex/fastrand" //lint:ignore SA1019 keeping for compatibility "golang.org/x/crypto/twofish" ) const ( // twofishOverhead is the number of bytes added by EncryptBytes. twofishOverhead = 28 ) var ( // ErrInsufficientLen is an error when supplied ciphertext is not // long enough to contain a nonce. ErrInsufficientLen = errors.New("supplied ciphertext is not long enough to contain a nonce") ) type ( // twofishKey is a key used for encrypting and decrypting data. twofishKey [EntropySize]byte ) // generateTwofishKey produces a twofishKey that can be used for encrypting and // decrypting data using Twofish-GCM. func generateTwofishKey() (key twofishKey) { fastrand.Read(key[:]) return } // newCipher creates a new Twofish cipher from the key. func (key twofishKey) newCipher() cipher.Block { cipher, err := twofish.NewCipher(key[:]) if err != nil { panic("NewCipher only returns an error if len(key) != 16, 24, or 32.") } return cipher } // newTwofishKey creates a new twofishKey from a given entropy. func newTwofishKey(entropy []byte) (key twofishKey, err error) { // check key length if len(entropy) != len(key) { err = fmt.Errorf("twofish key should have size %v but was %v", EntropySize, len(entropy)) return } // create key copy(key[:], entropy) return } // DecryptBytes decrypts a ciphertext created by EncryptPiece. The nonce is // expected to be the first 12 bytes of the ciphertext. func (key twofishKey) DecryptBytes(ct Ciphertext) ([]byte, error) { // Create the cipher. aead, err := cipher.NewGCM(key.newCipher()) if err != nil { return nil, errors.AddContext(err, "NewGCM should only return an error if twofishCipher.BlockSize != 16") } return DecryptWithNonce(ct, aead) } // DecryptBytesInPlace decrypts the ciphertext created by EncryptBytes. The // nonce is expected to be the first 12 bytes of the ciphertext. // DecryptBytesInPlace reuses the memory of ct to be able to operate in-place. // This means that ct can't be reused after calling DecryptBytesInPlace. func (key twofishKey) DecryptBytesInPlace(ct Ciphertext, blockIndex uint64) ([]byte, error) { if blockIndex != 0 { return nil, errors.New("twofish doesn't support a blockIndex != 0") } // Create the cipher. aead, err := cipher.NewGCM(key.newCipher()) if err != nil { return nil, errors.AddContext(err, "NewGCM should only return an error if twofishCipher.BlockSize != 16") } // Check for a nonce. if len(ct) < aead.NonceSize() { return nil, ErrInsufficientLen } // Decrypt the data. nonce := ct[:aead.NonceSize()] ciphertext := ct[aead.NonceSize():] return aead.Open(ciphertext[:0], nonce, ciphertext, nil) } // Derive derives a child key for a given combination of chunk and piece index. func (key twofishKey) Derive(chunkIndex, pieceIndex uint64) CipherKey { entropy := HashAll(key, chunkIndex, pieceIndex) ck, err := NewTurtleDexKey(TypeTwofish, entropy[:]) if err != nil { panic("this should not be possible when deriving from a valid key") } return ck } // EncryptBytes encrypts arbitrary data using the TwofishKey, prepending a 12 // byte nonce to the ciphertext in the process. GCM and prepends the nonce (12 // bytes) to the ciphertext. func (key twofishKey) EncryptBytes(piece []byte) Ciphertext { // Create the cipher. aead, err := cipher.NewGCM(key.newCipher()) if err != nil { panic("NewGCM only returns an error if twofishCipher.BlockSize != 16") } return EncryptWithNonce(piece, aead) } // Key returns the twofish key. func (key twofishKey) Key() []byte { return key[:] } // Type returns the type of the twofish key. func (twofishKey) Type() CipherType { return TypeTwofish }
crypto/twofish.go
0.743913
0.41739
twofish.go
starcoder
package vile import ( "bytes" ) // VectorEqual - return true of the two vectors are equal, i.e. the same length and // all the elements are also equal func VectorEqual(v1 *Object, v2 *Object) bool { el1 := v1.elements el2 := v2.elements count := len(el1) if count != len(el2) { return false } for i := 0; i < count; i++ { if !Equal(el1[i], el2[i]) { return false } } return true } func vectorToString(vec *Object) string { el := vec.elements var buf bytes.Buffer buf.WriteString("[") count := len(el) if count > 0 { buf.WriteString(el[0].String()) for i := 1; i < count; i++ { buf.WriteString(" ") buf.WriteString(el[i].String()) } } buf.WriteString("]") return buf.String() } // MakeVector - create a new <vector> object of the specified size, with all elements initialized to // the specified value func MakeVector(size int, init *Object) *Object { elements := make([]*Object, size) for i := 0; i < size; i++ { elements[i] = init } return VectorFromElementsNoCopy(elements) } // Vector - create a new <vector> object from the given element objects. func Vector(elements ...*Object) *Object { return VectorFromElements(elements, len(elements)) } // VectorFromElements - return a new <vector> object from the given slice of elements. The slice is copied. func VectorFromElements(elements []*Object, count int) *Object { el := make([]*Object, count) copy(el, elements[0:count]) return VectorFromElementsNoCopy(el) } // VectorFromElementsNoCopy - create a new <vector> object from the given slice of elements. The slice is NOT copied. func VectorFromElementsNoCopy(elements []*Object) *Object { vec := new(Object) vec.Type = VectorType vec.elements = elements return vec } // CopyVector - return a copy of the <vector> func CopyVector(vec *Object) *Object { return VectorFromElements(vec.elements, len(vec.elements)) } // ToVector - convert the object to a <vector>, if possible func ToVector(obj *Object) (*Object, error) { switch obj.Type { case VectorType: return obj, nil case ListType: return listToVector(obj), nil case StructType: return structToVector(obj), nil case StringType: return stringToVector(obj), nil } return nil, Error(ArgumentErrorKey, "to-vector expected <vector>, <list>, <struct>, or <string>, got a ", obj.Type) }
src/vector.go
0.761095
0.529689
vector.go
starcoder
package nn import ( "encoding/json" "fmt" "os" tsr "../tensor" ) // NeuralNetwork is a basic neural network that can handle multiple layer types. type NeuralNetwork struct { layers []Layer } // NewNeuralNetwork Creates a new instance of a NeuralNetwork. func NewNeuralNetwork() *NeuralNetwork { return &NeuralNetwork{layers: []Layer{}} } // Copy creates a deep copy of the neural network. func (neuralNetwork *NeuralNetwork) Copy() *NeuralNetwork { newNeuralNetwork := NewNeuralNetwork() for _, layer := range neuralNetwork.layers { newNeuralNetwork.Add(layer.Copy()) } return newNeuralNetwork } // LayerCount returns the number of layers in the neural network. func (neuralNetwork *NeuralNetwork) LayerCount() int { return len(neuralNetwork.layers) } // LayerAt gets a layer at a certain index. func (neuralNetwork *NeuralNetwork) LayerAt(index int) Layer { if index < 0 || index >= len(neuralNetwork.layers) { return nil } return neuralNetwork.layers[index] } // Add adds a number of new layers to the neural network. func (neuralNetwork *NeuralNetwork) Add(layers ...Layer) error { for _, layer := range layers { if len(neuralNetwork.layers) > 0 { lastLayer := neuralNetwork.layers[len(neuralNetwork.layers)-1] if lastLayer.OutputShape() != layer.InputShape() { return fmt.Errorf( "Output shape of last layer does not match input shape of new layer: (%d, %d, %d) != (%d, %d, %d)", lastLayer.OutputShape().Rows, lastLayer.OutputShape().Cols, lastLayer.OutputShape().Frames, layer.InputShape().Rows, layer.InputShape().Cols, layer.InputShape().Frames, ) } } neuralNetwork.layers = append(neuralNetwork.layers, layer) } return nil } // Predict generates a prediction for a certain set of inputs. func (neuralNetwork *NeuralNetwork) Predict(inputs [][][]float32) ([][][]float32, error) { outputs, err := neuralNetwork.feedForward(inputs) if err != nil { return nil, err } return outputs.Copy().GetAll(), nil } // Train takes a set of inputs and their respective targets, and adjusts the layers to produce the // given outputs through supervised learning. func (neuralNetwork *NeuralNetwork) Train(inputs [][][]float32, targets [][][]float32, learningRate float32, momentum float32) error { outputs, err := neuralNetwork.feedForward(inputs) if err != nil { return err } deltas := tsr.NewValueTensor3D(targets) err = deltas.SubtractTensor(outputs) if err != nil { return err } return neuralNetwork.backPropagate(deltas, learningRate, momentum) } func (neuralNetwork *NeuralNetwork) feedForward(inputs [][][]float32) (*tsr.Tensor, error) { nextInputs := tsr.NewValueTensor3D(inputs) var err error for _, layer := range neuralNetwork.layers { nextInputs, err = layer.FeedForward(nextInputs) if err != nil { return nil, err } } return nextInputs, nil } func (neuralNetwork *NeuralNetwork) backPropagate(deltas *tsr.Tensor, learningRate float32, momentum float32) error { nextDeltas := deltas var err error for i := len(neuralNetwork.layers) - 1; i >= 0; i-- { layer := neuralNetwork.layers[i] nextDeltas, err = layer.BackPropagate(nextDeltas, learningRate, momentum) if err != nil { return err } } return nil } // SaveToFile saves a neural network to a file. func (neuralNetwork *NeuralNetwork) SaveToFile(fileName string) error { file, err := os.Create(fileName) if err != nil { return err } defer file.Close() neuralNetworkData := struct { Layers []Layer `json:"layers"` }{ Layers: neuralNetwork.layers, } return json.NewEncoder(file).Encode(neuralNetworkData) } // LoadFromFile loads a neural network from a file. func (neuralNetwork *NeuralNetwork) LoadFromFile(fileName string) error { file, err := os.Open(fileName) if err != nil { return err } defer file.Close() neuralNetworkData := struct { Layers []map[string]interface{} `json:"layers"` }{} err = json.NewDecoder(file).Decode(&neuralNetworkData) if err != nil { return err } for _, layerData := range neuralNetworkData.Layers { layerType, _ := layerData["type"].(string) layer, err := layerForType(LayerType(layerType)) if err != nil { return err } layerBytes, _ := json.Marshal(layerData) err = json.Unmarshal(layerBytes, layer) if err != nil { return err } err = neuralNetwork.Add(layer) if err != nil { return err } } return nil }
nn/neuralNetwork.go
0.827689
0.549641
neuralNetwork.go
starcoder
package partitioner import "github.com/Shopify/sarama" // Murmur2Partitioner is a matching implemention for the current (10.1) kafka partitioner. It will generate the same partition ID's a the default Kafka client and hence can be as a replacement with correct inter op type Murmur2Partitioner struct { random sarama.Partitioner } // NewMurmur2Partitioner creates a partitioner func NewMurmur2Partitioner(topic string) sarama.Partitioner { p := new(Murmur2Partitioner) p.random = sarama.NewRandomPartitioner(topic) return p } // Called by Samza to get the partition for a message. // Will return a random partition if the message key is nil func (p *Murmur2Partitioner) Partition(message *sarama.ProducerMessage, numPartitions int32) (int32, error) { if message.Key == nil { return p.random.Partition(message, numPartitions) } bytes, err := message.Key.Encode() if err != nil { return -1, err } return Murmur2Partition(bytes, numPartitions), nil } // RequiresConsistency is always true for this implemention func (p *Murmur2Partitioner) RequiresConsistency() bool { return true } func Murmur2Partition(bytes []byte, numPartitions int32) int32 { hash := MurmurHash2(bytes) partition := positive(hash) % numPartitions return partition } // From https://github.com/apache/kafka/blob/0.10.1/clients/src/main/java/org/apache/kafka/common/utils/Utils.java#L728 func positive(v int32) int32 { return v & 0x7fffffff } // The original MurmurHash2 32-bit algorithm by <NAME>. // Taken from https://github.com/aviddiviner/go-murmur by <NAME> // Adapted to match the behavior of the Java Kafka Client func MurmurHash2(data []byte) (h int32) { const ( M = 0x5bd1e995 R = 24 // From https://github.com/apache/kafka/blob/0.10.1/clients/src/main/java/org/apache/kafka/common/utils/Utils.java#L342 seed = int32(-1756908916) ) var k int32 h = seed ^ int32(len(data)) // Mix 4 bytes at a time into the hash for l := len(data); l >= 4; l -= 4 { k = int32(data[0]) | int32(data[1])<<8 | int32(data[2])<<16 | int32(data[3])<<24 k *= M k ^= int32(uint32(k) >> R) // To match Kafka Impl k *= M h *= M h ^= k data = data[4:] } // Handle the last few bytes of the input array switch len(data) { case 3: h ^= int32(data[2]) << 16 fallthrough case 2: h ^= int32(data[1]) << 8 fallthrough case 1: h ^= int32(data[0]) h *= M } // Do a few final mixes of the hash to ensure the last few bytes are well incorporated h ^= int32(uint32(h) >> 13) h *= M h ^= int32(uint32(h) >> 15) return }
kafka/vendor/github.com/movio/go-kafka/partitioner/murmur2.go
0.670824
0.445952
murmur2.go
starcoder
package render import ( "github.com/emily33901/lambda-core/core/mesh" "github.com/emily33901/lambda-core/core/mesh/util" "github.com/go-gl/mathgl/mgl32" ) type Composition struct { Vertices []float32 Normals []float32 UVs []float32 Tangents []float32 LightmapCoordinates []float32 Colors []float32 materialCompositions []*compositionMesh indices []uint32 } // AddVertex func (mesh *Composition) AddVertex(vertex []mgl32.Vec3) { for _, x := range vertex { mesh.Vertices = append(mesh.Vertices, x.X(), x.Y(), x.Z()) } } // AddNormal func (mesh *Composition) AddNormal(normal []mgl32.Vec3) { for _, x := range normal { mesh.Normals = append(mesh.Normals, x.X(), x.Y(), x.Z()) } } // AddUV func (mesh *Composition) AddUV(uv []mgl32.Vec2) { for _, x := range uv { mesh.UVs = append(mesh.UVs, x.X(), x.Y()) } } // AddTangent func (mesh *Composition) AddTangent(tangent []mgl32.Vec4) { for _, x := range tangent { mesh.Tangents = append(mesh.Tangents, x.X(), x.Y(), x.Z(), x.W()) } } // AddLightmapCoordinate func (mesh *Composition) AddLightmapCoordinate(uv []mgl32.Vec3) { for _, x := range uv { mesh.LightmapCoordinates = append(mesh.LightmapCoordinates, x.X(), x.Y(), x.Z()) } } // Compose constructs the indices information for the current state of the Composition func (comp *Composition) Compose() { comp.indices = make([]uint32, 0) for _, materialComposition := range comp.materialCompositions { materialComposition.GenerateIndicesList() comp.indices = append(comp.indices, materialComposition.indices...) } } // MaterialMeshes returns composed material information func (comp *Composition) MaterialMeshes() []*compositionMesh { return comp.materialCompositions } // Indices returns the indices of this compositions faces func (comp *Composition) Indices() []uint32 { return comp.indices } // AddMesh func (comp *Composition) AddMesh(mat *compositionMesh) { comp.materialCompositions = append(comp.materialCompositions, mat) } func (mesh *Composition) AddColor(colors ...float32) { mesh.Colors = append(mesh.Colors, colors...) } func (comp *Composition) GenerateTangents() { comp.Tangents = util.GenerateTangentsOld(comp.Vertices, comp.Normals, comp.UVs) } // NewComposition returns a new Composition. func NewComposition() *Composition { return &Composition{} } type compositionMesh struct { texturePath string offset int length int indices []uint32 } func (texMesh *compositionMesh) Material() string { return texMesh.texturePath } // Indices returns all indices for vertices that use this material func (texMesh *compositionMesh) Indices() []uint32 { return texMesh.indices } // Indices returns the Offset for vertices that use this material func (texMesh *compositionMesh) Offset() int32 { return int32(texMesh.offset) } // Indices returns the number for vertices that use this material func (texMesh *compositionMesh) Length() int32 { return int32(texMesh.length) } // GenerateIndicesList generates the indices list from offset and length of Composition vertex data. func (texMesh *compositionMesh) GenerateIndicesList() { indices := make([]uint32, 0) for i := texMesh.offset; i < texMesh.offset+texMesh.length; i++ { indices = append(indices, uint32(i)) } texMesh.indices = indices } // NewCompositionMesh returns a new compositionMesh func NewCompositionMesh(texName string, offset int, length int) *compositionMesh { return &compositionMesh{ texturePath: texName, length: length, offset: offset, } } // Compositor is a struct that provides a mechanism to compose 1 or more models into a single renderable set of data, // indexed by material. // This is super handy for reducing draw calls down a bunch. // A resultant Composition should result in a single set of vertex data + 1 pair of index offset+length info per material // referenced by all models composed. type Compositor struct { meshes []mesh.IMesh isOutdated bool } // AddModel adds a new model to be composed. func (compositor *Compositor) AddMesh(m mesh.IMesh) { compositor.meshes = append(compositor.meshes, m) compositor.isOutdated = true } func (compositor *Compositor) IsOutdated() bool { return compositor.isOutdated } // ComposeScene builds a sceneComposition mesh for rendering func (compositor *Compositor) ComposeScene() *Composition { compositor.isOutdated = false texMappings := map[string][]mesh.IMesh{} // Step 1. Map meshes into contiguous groups by texture for idx, m := range compositor.meshes { if m.Material() == nil { texMappings["nil"] = append(texMappings["nil"], compositor.meshes[idx]) continue } if _, ok := texMappings[m.Material().FilePath()]; !ok { texMappings[m.Material().FilePath()] = make([]mesh.IMesh, 0) } texMappings[m.Material().FilePath()] = append(texMappings[m.Material().FilePath()], compositor.meshes[idx]) } // Step 2. Construct a single vertex object Composition ordered by material sceneComposition := NewComposition() vertCount := 0 for key, texMesh := range texMappings { // TODO verify if this is the vertex offset of the actual array offset (vertexOffset * 3) matVertOffset := vertCount matVertCount := 0 for _, sMesh := range texMesh { sceneComposition.AddVertex(sMesh.Vertices()) sceneComposition.AddNormal(sMesh.Normals()) sceneComposition.AddUV(sMesh.UVs()) sceneComposition.AddColor(sMesh.Colors()...) matVertCount += len(sMesh.Vertices()) } sceneComposition.AddMesh(NewCompositionMesh(key, matVertOffset, matVertCount)) vertCount += matVertCount } sceneComposition.GenerateTangents() // Step 3. Generate indices from composed materials sceneComposition.Compose() return sceneComposition }
render/compositor.go
0.778228
0.41941
compositor.go
starcoder
package processor import ( "context" "fmt" "github.com/benthosdev/benthos/v4/internal/bloblang/field" "github.com/benthosdev/benthos/v4/internal/component/metrics" "github.com/benthosdev/benthos/v4/internal/component/processor" "github.com/benthosdev/benthos/v4/internal/docs" "github.com/benthosdev/benthos/v4/internal/interop" "github.com/benthosdev/benthos/v4/internal/log" "github.com/benthosdev/benthos/v4/internal/message" "github.com/benthosdev/benthos/v4/internal/tracing" ) //------------------------------------------------------------------------------ func init() { Constructors[TypeGroupByValue] = TypeSpec{ constructor: func(conf Config, mgr interop.Manager, log log.Modular, stats metrics.Type) (processor.V1, error) { p, err := newGroupByValue(conf.GroupByValue, mgr) if err != nil { return nil, err } return processor.NewV2BatchedToV1Processor("group_by_value", p, mgr.Metrics()), nil }, Categories: []string{ "Composition", }, Summary: `Splits a batch of messages into N batches, where each resulting batch contains a group of messages determined by a [function interpolated string](/docs/configuration/interpolation#bloblang-queries) evaluated per message.`, Description: ` This allows you to group messages using arbitrary fields within their content or metadata, process them individually, and send them to unique locations as per their group.`, Footnotes: ` ## Examples If we were consuming Kafka messages and needed to group them by their key, archive the groups, and send them to S3 with the key as part of the path we could achieve that with the following: ` + "```yaml" + ` pipeline: processors: - group_by_value: value: ${! meta("kafka_key") } - archive: format: tar - compress: algorithm: gzip output: aws_s3: bucket: TODO path: docs/${! meta("kafka_key") }/${! count("files") }-${! timestamp_unix_nano() }.tar.gz ` + "```" + ``, Config: docs.FieldComponent().WithChildren( docs.FieldString( "value", "The interpolated string to group based on.", "${! meta(\"kafka_key\") }", "${! json(\"foo.bar\") }-${! meta(\"baz\") }", ).IsInterpolated(), ), UsesBatches: true, } } //------------------------------------------------------------------------------ // GroupByValueConfig is a configuration struct containing fields for the // GroupByValue processor, which breaks message batches down into N batches of a // smaller size according to a function interpolated string evaluated per // message part. type GroupByValueConfig struct { Value string `json:"value" yaml:"value"` } // NewGroupByValueConfig returns a GroupByValueConfig with default values. func NewGroupByValueConfig() GroupByValueConfig { return GroupByValueConfig{ Value: "", } } //------------------------------------------------------------------------------ type groupByValueProc struct { log log.Modular value *field.Expression } func newGroupByValue(conf GroupByValueConfig, mgr interop.Manager) (processor.V2Batched, error) { value, err := mgr.BloblEnvironment().NewField(conf.Value) if err != nil { return nil, fmt.Errorf("failed to parse value expression: %v", err) } return &groupByValueProc{ log: mgr.Logger(), value: value, }, nil } //------------------------------------------------------------------------------ func (g *groupByValueProc) ProcessBatch(ctx context.Context, spans []*tracing.Span, batch *message.Batch) ([]*message.Batch, error) { if batch.Len() == 0 { return nil, nil } groupKeys := []string{} groupMap := map[string]*message.Batch{} _ = batch.Iter(func(i int, p *message.Part) error { v := g.value.String(i, batch) spans[i].LogKV( "event", "grouped", "type", v, ) spans[i].SetTag("group", v) if group, exists := groupMap[v]; exists { group.Append(p) } else { g.log.Tracef("New group formed: %v\n", v) groupKeys = append(groupKeys, v) newMsg := message.QuickBatch(nil) newMsg.Append(p) groupMap[v] = newMsg } return nil }) msgs := []*message.Batch{} for _, key := range groupKeys { msgs = append(msgs, groupMap[key]) } if len(msgs) == 0 { return nil, nil } return msgs, nil } func (g *groupByValueProc) Close(context.Context) error { return nil }
internal/old/processor/group_by_value.go
0.681303
0.446253
group_by_value.go
starcoder
package wallGenerator import ( "fmt" ) // LineSegment - segment of line type LineSegment struct { BeginPosition float64 EndPosition float64 Thk float64 } // Line - array of position of X type Line []LineSegment // NewLine - create new line func NewLine(lenght float64, thk float64) (line Line) { l := make([]LineSegment, 1, 1) l[0] = LineSegment{ BeginPosition: 0.0, EndPosition: lenght, Thk: thk, } return Line(l) } func (l *Line) String() (s string) { ls := ([]LineSegment)(*l) s += fmt.Sprintf("Line\n") for inx := range ls { s += fmt.Sprintf("Segment %v\t", inx) s += fmt.Sprintf("{%.5e,%5e,%5e}\n", ls[inx].BeginPosition, ls[inx].EndPosition, ls[inx].Thk) } return s } // AddStiffiner - add stiffiner to line func (l *Line) AddStiffiner(positionX float64, s Stiffiner) (Line, error) { _ = s return l.AddPoint(positionX) } // AddPoint - add point to line func (l *Line) AddPoint(positionX float64) (Line, error) { ls := ([]LineSegment)(*l) if positionX < ls[0].BeginPosition { return *l, fmt.Errorf("Wrong position less begin") } if ls[len(ls)-1].EndPosition < positionX { return *l, fmt.Errorf("Wrong position more end") } for inx := range ls { segment := ls[inx] if float64(segment.BeginPosition) < float64(positionX) && float64(positionX) < float64(segment.EndPosition) { s1 := LineSegment{ BeginPosition: segment.BeginPosition, EndPosition: positionX, Thk: segment.Thk, } s2 := LineSegment{ BeginPosition: positionX, EndPosition: segment.EndPosition, Thk: segment.Thk, } buffer := make([]LineSegment, len(ls)+1, len(ls)+1) for i := 0; i < inx; i++ { buffer[i] = ls[i] } buffer[inx] = s1 buffer[inx+1] = s2 for i := inx + 1; i < len(ls); i++ { buffer[i+1] = ls[i] } return Line(buffer), nil } } return *l, fmt.Errorf("Point is not on line") } // AddShape - add shape to line func (l *Line) AddShape(positionX float64, shape Shape) (line Line, err error) { r := ([5]side(shape)) line, err = l.AddPoint(positionX) if err != nil { return line, err } if shape[side04].exist { segments := ([]LineSegment)(r[side04].line) for inx := range segments { lenght := segments[inx].EndPosition line, err = line.AddPoint(positionX - lenght) if err != nil { return line, err } } line.modifyThickness((shape[side04].line)[0].Thk, positionX-segments[len(segments)-1].EndPosition, positionX) } if shape[side05].exist { segments := ([]LineSegment)(r[side05].line) for inx := range segments { lenght := segments[inx].EndPosition line, err = line.AddPoint(positionX + lenght) if err != nil { return line, err } } line.modifyThickness((shape[side05].line)[0].Thk, positionX, positionX+segments[len(segments)-1].EndPosition) } return line, nil } func (l *Line) modifyThickness(thk, fromPosition, toPosition float64) { segments := ([]LineSegment)(*l) for inx := range segments { if fromPosition <= segments[inx].BeginPosition && segments[inx].EndPosition <= toPosition { segments[inx].Thk += thk } if toPosition < segments[inx].BeginPosition { break } } }
wallGenerator/line.go
0.54819
0.416144
line.go
starcoder