code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package trxdbtest
import (
"context"
"testing"
"time"
ct "github.com/dfuse-io/dfuse-eosio/codec/testing"
pbcodec "github.com/dfuse-io/dfuse-eosio/pb/dfuse/eosio/codec/v1"
"github.com/streamingfast/kvdb"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
var noon = time.Date(2020, time.February, 02, 12, 0, 0, 0, time.UTC)
var twopm = time.Date(2020, time.February, 02, 14, 0, 0, 0, time.UTC)
var fourpm = time.Date(2020, time.February, 02, 16, 0, 0, 0, time.UTC)
var timelineExplorerTests = []DriverTestFunc{
TestBlockIDAt,
TestBlockIDAfter,
TestBlockIDBefore,
}
func TestBlockIDAt(t *testing.T, driverFactory DriverFactory) {
tests := []struct {
name string
blocks []*pbcodec.Block
time time.Time
expectBlockID string
expectErr error
}{
{
name: "sunny path",
blocks: []*pbcodec.Block{
ct.Block(t, "00000008aa", ct.BlockTimestamp(noon)),
ct.Block(t, "00000003aa", ct.BlockTimestamp(twopm)),
},
time: noon,
expectBlockID: "00000008aa",
},
{
name: "no block that matches",
blocks: []*pbcodec.Block{
ct.Block(t, "00000008aa", ct.BlockTimestamp(noon)),
ct.Block(t, "00000003aa", ct.BlockTimestamp(twopm)),
},
time: fourpm,
expectErr: kvdb.ErrNotFound,
},
{
name: "no blocks",
blocks: []*pbcodec.Block{},
time: fourpm,
expectErr: kvdb.ErrNotFound,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
var ctx = context.Background()
db, clean := driverFactory()
defer clean()
for _, blk := range test.blocks {
require.NoError(t, db.PutBlock(ctx, blk))
require.NoError(t, db.UpdateNowIrreversibleBlock(ctx, blk))
}
require.NoError(t, db.Flush(ctx))
id, err := db.BlockIDAt(ctx, test.time)
if test.expectErr != nil {
assert.Equal(t, test.expectErr, err)
} else {
require.NoError(t, err)
assert.Equal(t, test.expectBlockID, id)
}
})
}
}
func TestBlockIDAfter(t *testing.T, driverFactory DriverFactory) {
tests := []struct {
name string
blocks []*pbcodec.Block
time time.Time
inclusive bool
expectBlockID string
expectTime time.Time
expectErr error
}{
{
name: "sunny path",
blocks: []*pbcodec.Block{
ct.Block(t, "00000008aa", ct.BlockTimestamp(noon)),
ct.Block(t, "00000003aa", ct.BlockTimestamp(fourpm)),
},
time: twopm,
expectTime: fourpm,
expectBlockID: "00000003aa",
},
{
name: "no block that matches",
blocks: []*pbcodec.Block{
ct.Block(t, "00000008aa", ct.BlockTimestamp(noon)),
ct.Block(t, "00000003aa", ct.BlockTimestamp(twopm)),
},
time: fourpm,
expectErr: kvdb.ErrNotFound,
},
{
name: "should not match block when not inclusive",
blocks: []*pbcodec.Block{
ct.Block(t, "00000008aa", ct.BlockTimestamp(noon)),
ct.Block(t, "00000003aa", ct.BlockTimestamp(twopm)),
},
inclusive: false,
time: twopm,
expectErr: kvdb.ErrNotFound,
},
{
name: "should match block when inclusive",
blocks: []*pbcodec.Block{
ct.Block(t, "00000008aa", ct.BlockTimestamp(noon)),
ct.Block(t, "00000003aa", ct.BlockTimestamp(twopm)),
},
inclusive: true,
time: twopm,
expectTime: twopm,
expectBlockID: "00000003aa",
},
{
name: "no blocks",
blocks: []*pbcodec.Block{},
time: fourpm,
expectErr: kvdb.ErrNotFound,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
var ctx = context.Background()
db, clean := driverFactory()
defer clean()
for _, blk := range test.blocks {
require.NoError(t, db.PutBlock(ctx, blk))
require.NoError(t, db.UpdateNowIrreversibleBlock(ctx, blk))
}
require.NoError(t, db.Flush(ctx))
id, foundTime, err := db.BlockIDAfter(ctx, test.time, test.inclusive)
if test.expectErr != nil {
assert.Equal(t, test.expectErr, err)
} else {
require.NoError(t, err)
assert.Equal(t, test.expectBlockID, id)
assert.Equal(t, test.expectTime, foundTime.UTC())
}
})
}
}
func TestBlockIDBefore(t *testing.T, driverFactory DriverFactory) {
tests := []struct {
name string
blocks []*pbcodec.Block
time time.Time
inclusive bool
expectBlockID string
expectTime time.Time
expectErr error
}{
{
name: "no block that matches",
blocks: []*pbcodec.Block{
ct.Block(t, "00000008aa", ct.BlockTimestamp(twopm)),
ct.Block(t, "00000003aa", ct.BlockTimestamp(fourpm)),
},
time: noon,
expectErr: kvdb.ErrNotFound,
},
{
name: "sunny path",
blocks: []*pbcodec.Block{
ct.Block(t, "00000008aa", ct.BlockTimestamp(noon)),
ct.Block(t, "00000003aa", ct.BlockTimestamp(fourpm)),
},
time: twopm,
expectTime: noon,
expectBlockID: "00000008aa",
},
{
name: "should not match block when not inclusive",
blocks: []*pbcodec.Block{
ct.Block(t, "00000008aa", ct.BlockTimestamp(noon)),
ct.Block(t, "00000003aa", ct.BlockTimestamp(twopm)),
},
inclusive: false,
time: noon,
expectErr: kvdb.ErrNotFound,
},
{
name: "should match block when inclusive",
blocks: []*pbcodec.Block{
ct.Block(t, "00000008aa", ct.BlockTimestamp(noon)),
ct.Block(t, "00000003aa", ct.BlockTimestamp(twopm)),
},
inclusive: true,
time: noon,
expectTime: noon,
expectBlockID: "00000008aa",
},
{
name: "no blocks",
blocks: []*pbcodec.Block{},
time: fourpm,
expectErr: kvdb.ErrNotFound,
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
var ctx = context.Background()
db, clean := driverFactory()
defer clean()
for _, blk := range test.blocks {
require.NoError(t, db.PutBlock(ctx, blk))
require.NoError(t, db.UpdateNowIrreversibleBlock(ctx, blk))
}
require.NoError(t, db.Flush(ctx))
id, foundTime, err := db.BlockIDBefore(ctx, test.time, test.inclusive)
if test.expectErr != nil {
assert.Equal(t, test.expectErr, err)
} else {
require.NoError(t, err)
assert.Equal(t, test.expectBlockID, id)
assert.Equal(t, test.expectTime, foundTime.UTC())
}
})
}
} | trxdb/trxdbtest/timelineexplorertest.go | 0.52902 | 0.561515 | timelineexplorertest.go | starcoder |
package semantics
type TwoInts struct {
x uint64
y uint64
}
type S struct {
a uint64
b TwoInts
c bool
}
func NewS() *S {
return &S{
a: 2,
b: TwoInts{x: 1, y: 2},
c: true,
}
}
func (s *S) readA() uint64 {
return s.a
}
func (s *S) readB() TwoInts {
return s.b
}
func (s S) readBVal() TwoInts {
return s.b
}
func (s *S) updateBValX(i uint64) {
s.b.x = i
}
func (s *S) negateC() {
s.c = !s.c
}
func failing_testStructUpdates() bool {
var ok = true
ns := NewS()
ok = ok && (ns.readA() == 2)
var b1 = ns.readB()
ok = ok && (b1.x == 1)
ns.negateC()
ok = ok && (ns.c == false)
b1.x = 3
var b2 = ns.readB()
ok = ok && (b2.x == 1)
var b3 = &ns.b
ok = ok && b3.x == 1
ns.updateBValX(4)
ok = ok && (ns.readBVal().x == 4)
return ok
}
func testNestedStructUpdates() bool {
var ok = true
var ns = NewS()
ns.b.x = 5
ok = ok && ns.b.x == 5
ns = NewS()
var p = &ns.b
p.x = 5
ok = ok && ns.b.x == 5
ns = NewS()
p = &ns.b
ns.b.x = 5
ok = ok && (*p).x == 5
ns = NewS()
p = &ns.b
ns.b.x = 5
ok = ok && p.x == 5
return ok
}
func testStructConstructions() bool {
var ok = true
var p1 *TwoInts // p1 == nil
var p2 TwoInts // p2 == TwoInts{0, 0}
p3 := TwoInts{y: 0, x: 0} // p3 == TwoInts{0, 0}
p4 := TwoInts{x: 0, y: 0} // p4 == TwoInts{0, 0}
ok = ok && (p1 == nil)
p1 = new(TwoInts) // p1 == &TwoInts{0, 0}
ok = ok && (p2 == p3)
ok = ok && (p3 == p4)
ok = ok && (p4 == *p1)
ok = ok && (&p4 != p1)
return ok
}
func testIncompleteStruct() bool {
var ok = true
p1 := TwoInts{x: 0}
ok = ok && (p1.y == 0)
p2 := S{a: 2}
ok = ok && (p2.b.x == 0)
ok = ok && (p2.c == false)
return ok
}
type StructWrap struct {
i uint64
}
func testStoreInStructVar() bool {
var p StructWrap = StructWrap{i: 0}
p.i = 5
return p.i == 5
}
func testStoreInStructPointerVar() bool {
var p *StructWrap = new(StructWrap)
p.i = 5
return p.i == 5
}
func testStoreComposite() bool {
p := new(TwoInts)
*p = TwoInts{x: 3, y: 4}
return (*p).y == 4
}
func testStoreSlice() bool {
p := new([]uint64)
s := make([]uint64, 3)
*p = s
return uint64(len(*p)) == uint64(3)
} | internal/examples/semantics/structs.go | 0.643665 | 0.570212 | structs.go | starcoder |
package clustering
import (
"time"
)
// ClusterResults represents the results of clustering a list of
// test failures.
type ClusterResults struct {
// AlgorithmsVersion is the version of clustering algorithms used to
// cluster test results in this chunk. (This is a version over the
// set of algorithms, distinct from the versions of a single algorithm,
// e.g.: v1 -> {failurereason-v1}, v2 -> {failurereason-v1, testname-v1},
// v3 -> {failurereason-v2, testname-v1}.)
AlgorithmsVersion int64
// RulesVersion is the version of failure association rules used
// to cluster test results. This is the RulesLastUpdated
// time of the most-recently-updated failure association rule in
// the snapshot of failure association rules used to cluster
// the test results.
RulesVersion time.Time
// Algorithms is the set of algorithms that were used to cluster
// the test results. Each entry is an algorithm name.
// When stored alongside the clustered test results, this allows only
// the new algorithms to be run when re-clustering (for efficiency).
Algorithms map[string]struct{}
// Clusters records the clusters each test result is in;
// one slice of ClusterIDs for each test result.
Clusters [][]*ClusterID
}
// AlgorithmsAndClustersEqual returns whether the algorithms and clusters of
// two cluster results are equivalent.
func AlgorithmsAndClustersEqual(a *ClusterResults, b *ClusterResults) bool {
if !setsEqual(a.Algorithms, b.Algorithms) {
return false
}
if len(a.Clusters) != len(b.Clusters) {
return false
}
for i, aClusters := range a.Clusters {
bClusters := b.Clusters[i]
if !ClusterSetsEqual(aClusters, bClusters) {
return false
}
}
return true
}
// ClusterSetsEqual returns whether the the set of clusters represented
// by the slice `as` is equivalent to the set of clusters represented
// by the slice `bs`.
// Order of the slices is not considered. Each cluster in as and bs
// should be unique.
func ClusterSetsEqual(as []*ClusterID, bs []*ClusterID) bool {
if len(as) != len(bs) {
return false
}
aKeys := make(map[string]struct{})
for _, a := range as {
aKeys[a.Key()] = struct{}{}
}
bKeys := make(map[string]struct{})
for _, b := range bs {
bKeys[b.Key()] = struct{}{}
}
return setsEqual(aKeys, bKeys)
}
// setsEqual returns whether two sets are equal.
func setsEqual(a map[string]struct{}, b map[string]struct{}) bool {
if len(a) != len(b) {
return false
}
for key := range a {
if _, ok := b[key]; !ok {
return false
}
}
return true
} | go/src/infra/appengine/weetbix/internal/clustering/clusterresults.go | 0.749179 | 0.514278 | clusterresults.go | starcoder |
package inventory
// nolint[lll]
import "sigs.k8s.io/k8s-container-image-promoter/lib/container"
// Various set manipulation operations. Some set operations are missing,
// because, we don't use them.
// Minus is a set operation.
func (a RegInvImageDigest) Minus(b RegInvImageDigest) RegInvImageDigest {
aSet := a.ToSet()
bSet := b.ToSet()
cSet := aSet.Minus(bSet)
return setToRegInvImageDigest(cSet)
}
// Intersection is a set operation.
func (a RegInvImageDigest) Intersection(b RegInvImageDigest) RegInvImageDigest {
aSet := a.ToSet()
bSet := b.ToSet()
cSet := aSet.Intersection(bSet)
return setToRegInvImageDigest(cSet)
}
// ToSet converts a RegInvFlat to a Set.
func (a RegInvImageDigest) ToSet() container.Set {
b := make(container.Set)
for k, v := range a {
b[k] = v
}
return b
}
func setToRegInvImageDigest(a container.Set) RegInvImageDigest {
b := make(RegInvImageDigest)
for k, v := range a {
b[k.(ImageDigest)] = v.(TagSlice)
}
return b
}
// ToSet converts a RegInvFlat to a Set.
func (a RegInvFlat) ToSet() container.Set {
b := make(container.Set)
for k, v := range a {
b[k] = v
}
return b
}
// Minus is a set operation.
func (a RegInvImageTag) Minus(b RegInvImageTag) RegInvImageTag {
aSet := a.ToSet()
bSet := b.ToSet()
cSet := aSet.Minus(bSet)
return setToRegInvImageTag(cSet)
}
// Intersection is a set operation.
func (a RegInvImageTag) Intersection(b RegInvImageTag) RegInvImageTag {
aSet := a.ToSet()
bSet := b.ToSet()
cSet := aSet.Intersection(bSet)
return setToRegInvImageTag(cSet)
}
// ToSet converts a RegInvImageTag to a Set.
func (a RegInvImageTag) ToSet() container.Set {
b := make(container.Set)
for k, v := range a {
b[k] = v
}
return b
}
func setToRegInvImageTag(a container.Set) RegInvImageTag {
b := make(RegInvImageTag)
for k, v := range a {
b[k.(ImageTag)] = v.(Digest)
}
return b
}
// ToSet converts a RegInvImage to a Set.
func (a RegInvImage) ToSet() container.Set {
b := make(container.Set)
for k, v := range a {
b[k] = v
}
return b
}
func toRegistryInventory(a container.Set) RegInvImage {
b := make(RegInvImage)
for k, v := range a {
b[k.(ImageName)] = v.(DigestTags)
}
return b
}
// Minus is a set operation.
func (a RegInvImage) Minus(b RegInvImage) RegInvImage {
aSet := a.ToSet()
bSet := b.ToSet()
cSet := aSet.Minus(bSet)
return toRegistryInventory(cSet)
}
// Union is a set operation.
func (a RegInvImage) Union(b RegInvImage) RegInvImage {
aSet := a.ToSet()
bSet := b.ToSet()
cSet := aSet.Union(bSet)
return toRegistryInventory(cSet)
}
// ToTagSet converts a TagSlice to a TagSet.
func (a TagSlice) ToTagSet() TagSet {
b := make(TagSet)
for _, t := range a {
// The value doesn't matter.
b[t] = nil
}
return b
}
// Minus is a set operation.
func (a TagSlice) Minus(b TagSlice) TagSet {
aSet := a.ToTagSet()
bSet := b.ToTagSet()
cSet := aSet.Minus(bSet)
return cSet
}
// Union is a set operation.
func (a TagSlice) Union(b TagSlice) TagSet {
aSet := a.ToTagSet()
bSet := b.ToTagSet()
cSet := aSet.Union(bSet)
return cSet
}
// Intersection is a set operation.
func (a TagSlice) Intersection(b TagSlice) TagSet {
aSet := a.ToTagSet()
bSet := b.ToTagSet()
cSet := aSet.Intersection(bSet)
return cSet
}
// ToSet converts a TagSet to a Set.
func (a TagSet) ToSet() container.Set {
b := make(container.Set)
for t := range a {
// The value doesn't matter.
b[t] = nil
}
return b
}
func setToTagSet(a container.Set) TagSet {
b := make(TagSet)
for k := range a {
b[k.(Tag)] = nil
}
return b
}
// Minus is a set operation.
func (a TagSet) Minus(b TagSet) TagSet {
aSet := a.ToSet()
bSet := b.ToSet()
cSet := aSet.Minus(bSet)
return setToTagSet(cSet)
}
// Union is a set operation.
func (a TagSet) Union(b TagSet) TagSet {
aSet := a.ToSet()
bSet := b.ToSet()
cSet := aSet.Union(bSet)
return setToTagSet(cSet)
}
// Intersection is a set operation.
func (a TagSet) Intersection(b TagSet) TagSet {
aSet := a.ToSet()
bSet := b.ToSet()
cSet := aSet.Intersection(bSet)
return setToTagSet(cSet)
} | lib/dockerregistry/set.go | 0.759404 | 0.449211 | set.go | starcoder |
package wonderEffectMath
import "math"
// ApplyEasing Возможно данная фукнция выглядит ужасно
// Но это максимальная и быстрая реализация применения функций плавности
// И предполагаю, что здесь что-то очень редко будет меняться (кроме добавления еще нериализованных функций)
func ApplyEasing(t float32, easingFunction string) float32 {
switch easingFunction {
case "easeInSine":
t = easeInSine(t)
case "easeInCubic":
t = easeInCubic(t)
case "easeInQuint":
t = easeInQuint(t)
case "easeInCirc":
t = easeInCirc(t)
case "easeInElastic":
t = easeInElastic(t)
case "easeOutSine":
t = easeOutSine(t)
case "easeOutCubic":
t = easeOutCubic(t)
case "easeOutQuint":
t = easeOutQuint(t)
case "easeOutCirc":
t = easeOutCirc(t)
case "easeOutElastic":
t = easeOutElastic(t)
case "easeInOutSine":
t = easeInOutSine(t)
case "easeInOutCubic":
t = easeInOutCubic(t)
case "easeInOutQuint":
t = easeInOutQuint(t)
case "easeInOutCirc":
t = easeInOutCirc(t)
case "easeInOutElastic":
t = easeInOutElastic(t)
case "easeInOutBack":
t = easeInOutBack(t)
case "easeInBounce":
t = easeInBounce(t)
case "easeOutBounce":
t = easeOutBounce(t)
case "easeInOutBounce":
t = easeInOutBounce(t)
}
return t
}
// Блок easeIn
// https://easings.net/ru#easeInSine
func easeInSine(t float32) float32 {
return float32(1 - math.Cos((float64(t)*math.Pi)/2))
}
// https://easings.net/ru#easeInCubic
func easeInCubic(t float32) float32 {
return t * t * t
}
// https://easings.net/ru#easeInQuint
func easeInQuint(t float32) float32 {
return t * t * t * t * t
}
// https://easings.net/ru#easeInCirc
func easeInCirc(t float32) float32 {
return 1 - float32(math.Sqrt(1-math.Pow(float64(t), 2)))
}
// https://easings.net/ru#easeInElastic
func easeInElastic(t float32) float32 {
if 0 == t {
return 0
}
if 1 == t {
return 1
}
return float32(-math.Pow(2, 10*float64(t)-10) * math.Sin((float64(t)*10-10.75)*((2*math.Pi)/3)))
}
// Блок easeOut
// https://easings.net/ru#easeOutSine
func easeOutSine(t float32) float32 {
return float32(math.Sin((float64(t) * math.Pi) / 2))
}
// https://easings.net/ru#easeOutCubic
func easeOutCubic(t float32) float32 {
return 1 - float32(math.Pow(1-float64(t), 3))
}
// https://easings.net/ru#easeOutQuint
func easeOutQuint(t float32) float32 {
return 1 - float32(math.Pow(1-float64(t), 5))
}
// https://easings.net/ru#easeOutCirc
func easeOutCirc(t float32) float32 {
return float32(math.Sqrt(1 - math.Pow(float64(t)-1, 2)))
}
// https://easings.net/ru#easeOutElastic
func easeOutElastic(t float32) float32 {
if 0 == t {
return 0
}
if 1 == t {
return 1
}
return float32(math.Pow(2, -10*float64(t))*math.Sin((float64(t)*10-0.75)*(2*math.Pi)/3)) + 1
}
// Блок easeInOut
// https://easings.net/ru#easeInOutSine
func easeInOutSine(t float32) float32 {
return -float32(math.Cos(math.Pi*float64(t))-1) / 2
}
// https://easings.net/ru#easeInOutCubic
func easeInOutCubic(t float32) float32 {
if 0.5 > t {
return 4 * t * t * t
}
return 1 - float32(math.Pow(-2*float64(t)+2, 3)/2)
}
// https://easings.net/ru#easeInOutQuint
func easeInOutQuint(t float32) float32 {
if 0.5 > t {
return 16 * t * t * t * t * t
}
return 1 - float32(math.Pow(-2*float64(t)+2, 5)/2)
}
// https://easings.net/ru#easeInOutCirc
func easeInOutCirc(t float32) float32 {
if 0.5 > t {
return float32(1-math.Sqrt(1-math.Pow(2*float64(t), 2))) / 2
}
return float32(math.Sqrt(1-math.Pow(-2*float64(t)+2, 2))+1) / 2
}
// https://easings.net/ru#easeInOutElastic
func easeInOutElastic(t float32) float32 {
const c5 = (2 * math.Pi) / 4.5
if 0 == t {
return 0
}
if 1 == t {
return 1
}
if 0.5 > t {
return -float32(math.Pow(2, 20*float64(t)-10)*math.Sin((20*float64(t)-11.125)*c5)) / 2
}
return float32(math.Pow(2, -20*float64(t)+10)*math.Sin((20*float64(t)-11.125)*c5))/2 + 1
}
// Блок Back
func easeInOutBack(t float32) float32 {
const c1 = 1.70158
const c2 = c1 * 1.525
if 0.5 > t {
return float32(math.Pow(2*float64(t), 2)*((c2+1)*2*float64(t)-c2)) / 2
}
return float32(math.Pow(2*float64(t)-2, 2)*((c2+1)*(float64(t)*2-2)+c2)+2) / 2
}
// Блок Bounce
// https://easings.net/ru#easeInBounce
func easeInBounce(t float32) float32 {
return 1 - easeOutBounce(1-t)
}
// https://easings.net/ru#easeOutBounce
func easeOutBounce(t float32) float32 {
const n1 = 7.5625
const d1 = 2.75
if 1/d1 > t {
return n1 * t * t
}
if 2/d1 > t {
t2 := t - 1.5/d1
return n1*t2*t2 + 0.75
}
if 2.5/d1 > t {
t2 := t - 2.25/d1
return n1*t2*t2 + 0.9375
}
t2 := t - 2.625/d1
return n1*t2*t2 + 0.984375
}
// https://easings.net/ru#easeInOutBounce
func easeInOutBounce(t float32) float32 {
if 0.5 > t {
return (1 - easeOutBounce(1-2*t)) / 2
}
return (1 + easeOutBounce(2*t-1)) / 2
} | wonderEffectMath/easing.go | 0.500732 | 0.739387 | easing.go | starcoder |
package main
import (
"log"
"math"
"strings"
"github.com/hajimehoshi/ebiten/v2"
)
// BallScale is the scale factor of the ball made by the Renderer#GeoM
const BallScale = 0.05
// Properties for ball movement prior to any interactions
const (
BallInitialVelocity = 1.75
BallInitialAngle = -135
)
// Ball represents a ball
type Ball struct {
tex *ebiten.Image
x, y float64
width, height float64
velocity, angle float64
visited string
}
// CreateBall creates a new ball
func CreateBall(g *Game) *Ball {
// Load the texture
src := g.assets["assets/ball.png"]
texture, err := LoadTexture(src)
if err != nil {
log.Fatalf("creating ball sprite: %v", err)
}
// Set the width and height to the texture size adjusted by the scale factor
w, h := texture.Size()
width := float64(w) * BallScale
height := float64(h) * BallScale
return &Ball{
tex: texture,
x: WindowSizeWidth/2 - width/2,
y: WindowSizeHeight - 65,
width: width,
height: height,
velocity: BallInitialVelocity,
angle: BallInitialAngle,
visited: "",
}
}
// Update re-draws Ball during each frame call
func (b *Ball) Update(g *Game, screen *ebiten.Image, c *Cursor) {
// Reset the GeoM
g.renderer.GeoM.Reset()
// Scale the GeoM by the BallScale
g.renderer.GeoM.Scale(BallScale, BallScale)
// If the game is active
if g.active {
// check if the ball is at the bottom of the screen
if b.y > float64(WindowSizeHeight-b.width) {
// end the game
g.ended = true
g.active = false
}
paddle := g.sprites["paddle"]
w, h := paddle.Dimensions()
/* Check coordinates to see if paddle and ball are colliding
Colliding() method doesn't work here */
if (int(paddle.Y())-h)-int(b.y) <= 2 && int(b.x) >= int(paddle.X())-w/2 && int(b.x) <= int(paddle.X())+w/2 {
// Play the hit sound effect
g.audioPlayer["hit"].Play()
// Find the distance from the center of the paddle
val := paddle.X() - float64(b.x)
if val > 0 {
b.angle = float64(-180 + (45 - val/2))
} else {
b.angle = float64(360 - (45 + val/2))
}
}
// Adjusts the angle if the ball hits a wall
if b.x < 0 || b.x > float64(WindowSizeWidth-b.width) {
b.angle = 180 - b.angle
// Play the hit sound effect
g.audioPlayer["hit"].Play()
} else if b.y < 0 || b.y > float64(WindowSizeHeight-b.width) {
// Play the hit sound effect
g.audioPlayer["hit"].Play()
b.angle = 360 - b.angle
}
// Set the x and y adjustments of the ball
b.x += b.velocity * math.Cos(b.angle*math.Pi/180)
b.y += b.velocity * math.Sin(b.angle*math.Pi/180)
// Translate the GeoM's matrix to coordinates b.x and b.y
g.renderer.GeoM.Translate(b.x, b.y)
} else {
// Translate the GeoM's matrix to coordinates b.x and b.y
g.renderer.GeoM.Translate(b.x, b.y)
}
// Draw the updated sprite
screen.DrawImage(b.tex, g.renderer.DrawImageOptions)
}
// IncreaseVelocity increases the velocity of the ball if the ball has not visited a block of color
func (b *Ball) IncreaseVelocity(color string) {
if !strings.Contains(b.visited, color) {
b.velocity += velocities[color]
b.visited += color
}
}
func (b *Ball) Dimensions() (int, int) {
return int(b.width), int(b.height)
}
// X returns the ball x position
func (b *Ball) X() float64 {
return b.x
}
// Y returns the ball y position
func (b *Ball) Y() float64 {
return b.y
} | src/ball.go | 0.770206 | 0.445771 | ball.go | starcoder |
package tr
import (
"strings"
)
type trset string
var EmptySet = trset("")
// Replace translates, squeezes, or deletes characters from the src string.
// If the first character of from is '^' then the from set is complemented.
// Ranges are specified with '-' between to characters.
// If the to set is empty, then characters in the from set are deleted.
// If the to set is shorter than the from set, then
// the last character in the to set is repeated to make the sets the same length,
// and this repeated character is never put more than once in a row in the output.
func Replace(src string, from trset, to trset) string {
srclen := len(src)
if srclen == 0 || len(from) == 0 {
return src
}
allbut := from[0] == '^'
if allbut {
from = from[1:]
}
si := 0
for ; si < srclen; si++ {
c := src[si]
p := strings.IndexByte(string(from), c)
if allbut == (p == -1) {
break
}
}
if si == srclen {
return src // no changes
}
buf := strings.Builder{}
buf.Grow(srclen)
buf.WriteString(src[:si])
lastto := len(to)
collapse := lastto > 0 && (allbut || lastto < len(from))
lastto--
scan:
for ; si < srclen; si++ {
c := src[si]
i := xindex(from, c, allbut, lastto)
if collapse && i >= lastto {
buf.WriteByte(to[lastto])
for {
si++
if si >= srclen {
break scan
}
c = src[si]
i = xindex(from, c, allbut, lastto)
if i < lastto {
break
}
}
}
if i < 0 {
buf.WriteByte(c)
} else if lastto >= 0 {
buf.WriteByte(to[i])
} /* else
delete */
}
return buf.String()
}
func Set(s string) trset {
if len(s) < 3 {
return trset(s)
}
i := 0
if s[0] == '^' {
i++
}
dash := strings.IndexByte(s[i+1:len(s)-1], '-')
if dash == -1 {
return trset(s) // no ranges to expand
}
return expandRanges(s)
}
func expandRanges(s string) trset {
slen := len(s)
buf := strings.Builder{}
buf.Grow(slen)
if s[0] == '^' {
buf.WriteByte('^')
s = s[1:]
slen--
}
for i := 0; i < slen; i++ {
c := s[i]
if c == '-' && i > 0 && i+1 < slen {
for r := s[i-1] + 1; r < s[i+1]; r++ {
buf.WriteByte(r)
}
} else {
buf.WriteByte(c)
}
}
return trset(buf.String())
}
func xindex(from trset, c byte, allbut bool, lastto int) int {
i := strings.IndexByte(string(from), c)
if allbut {
if i == -1 {
return lastto + 1
}
return -1
}
return i
} | util/tr/tr.go | 0.518546 | 0.411761 | tr.go | starcoder |
package primitive
import (
"fmt"
"math"
"github.com/fogleman/gg"
)
type Triangle struct {
Worker *Worker
X1, Y1 int
X2, Y2 int
X3, Y3 int
}
//Generates a new trianlge with at least one vertex in the image and the other vertices in 30 by 30 triangle around it.
func NewRandomTriangle(worker *Worker) *Triangle {
rnd := worker.Rnd
x1 := rnd.Intn(worker.W)
y1 := rnd.Intn(worker.H)
x2 := x1 + rnd.Intn(31) - 15
y2 := y1 + rnd.Intn(31) - 15
x3 := x1 + rnd.Intn(31) - 15
y3 := y1 + rnd.Intn(31) - 15
t := &Triangle{worker, x1, y1, x2, y2, x3, y3}
t.Mutate()
return t
}
//draws the lines
func (t *Triangle) Draw(dc *gg.Context, scale float64) {
dc.LineTo(float64(t.X1), float64(t.Y1))
dc.LineTo(float64(t.X2), float64(t.Y2))
dc.LineTo(float64(t.X3), float64(t.Y3))
dc.ClosePath()
dc.Fill()
}
//converts to listing of vertices.
func (t *Triangle) SVG(attrs string) string {
return fmt.Sprintf(
"<polygon %s points=\"%d,%d %d,%d %d,%d\" />",
attrs, t.X1, t.Y1, t.X2, t.Y2, t.X3, t.Y3)
}
func (t *Triangle) Copy() Shape {
a := *t
return &a
}
//randomly moves a single vertex in 16*16 square
func (t *Triangle) Mutate() {
w := t.Worker.W
h := t.Worker.H
rnd := t.Worker.Rnd
const m = 16
for {
switch rnd.Intn(3) {
case 0:
t.X1 = clampInt(t.X1+int(rnd.NormFloat64()*16), -m, w-1+m)
t.Y1 = clampInt(t.Y1+int(rnd.NormFloat64()*16), -m, h-1+m)
case 1:
t.X2 = clampInt(t.X2+int(rnd.NormFloat64()*16), -m, w-1+m)
t.Y2 = clampInt(t.Y2+int(rnd.NormFloat64()*16), -m, h-1+m)
case 2:
t.X3 = clampInt(t.X3+int(rnd.NormFloat64()*16), -m, w-1+m)
t.Y3 = clampInt(t.Y3+int(rnd.NormFloat64()*16), -m, h-1+m)
}
if t.Valid() {
break
}
}
}
//checks each angle at least 15 degrees
func (t *Triangle) Valid() bool {
const minDegrees = 15
var a1, a2, a3 float64
{
x1 := float64(t.X2 - t.X1)
y1 := float64(t.Y2 - t.Y1)
x2 := float64(t.X3 - t.X1)
y2 := float64(t.Y3 - t.Y1)
d1 := math.Sqrt(x1*x1 + y1*y1)
d2 := math.Sqrt(x2*x2 + y2*y2)
x1 /= d1
y1 /= d1
x2 /= d2
y2 /= d2
a1 = degrees(math.Acos(x1*x2 + y1*y2))
}
{
x1 := float64(t.X1 - t.X2)
y1 := float64(t.Y1 - t.Y2)
x2 := float64(t.X3 - t.X2)
y2 := float64(t.Y3 - t.Y2)
d1 := math.Sqrt(x1*x1 + y1*y1)
d2 := math.Sqrt(x2*x2 + y2*y2)
x1 /= d1
y1 /= d1
x2 /= d2
y2 /= d2
a2 = degrees(math.Acos(x1*x2 + y1*y2))
}
a3 = 180 - a1 - a2
return a1 > minDegrees && a2 > minDegrees && a3 > minDegrees
}
//rasterizes the triangle
func (t *Triangle) Rasterize() []Scanline {
buf := t.Worker.Lines[:0]
lines := rasterizeTriangle(t.X1, t.Y1, t.X2, t.Y2, t.X3, t.Y3, buf)
return cropScanlines(lines, t.Worker.W, t.Worker.H)
}
//rasterizes the triangle
func rasterizeTriangle(x1, y1, x2, y2, x3, y3 int, buf []Scanline) []Scanline {
if y1 > y3 {
x1, x3 = x3, x1
y1, y3 = y3, y1
}
if y1 > y2 {
x1, x2 = x2, x1
y1, y2 = y2, y1
}
if y2 > y3 {
x2, x3 = x3, x2
y2, y3 = y3, y2
}
if y2 == y3 {
return rasterizeTriangleBottom(x1, y1, x2, y2, x3, y3, buf)
} else if y1 == y2 {
return rasterizeTriangleTop(x1, y1, x2, y2, x3, y3, buf)
} else {
x4 := x1 + int((float64(y2-y1)/float64(y3-y1))*float64(x3-x1))
y4 := y2
buf = rasterizeTriangleBottom(x1, y1, x2, y2, x4, y4, buf)
buf = rasterizeTriangleTop(x2, y2, x4, y4, x3, y3, buf)
return buf
}
}
func rasterizeTriangleBottom(x1, y1, x2, y2, x3, y3 int, buf []Scanline) []Scanline {
s1 := float64(x2-x1) / float64(y2-y1)
s2 := float64(x3-x1) / float64(y3-y1)
ax := float64(x1)
bx := float64(x1)
for y := y1; y <= y2; y++ {
a := int(ax)
b := int(bx)
ax += s1
bx += s2
if a > b {
a, b = b, a
}
buf = append(buf, Scanline{y, a, b, 0xffff})
}
return buf
}
func rasterizeTriangleTop(x1, y1, x2, y2, x3, y3 int, buf []Scanline) []Scanline {
s1 := float64(x3-x1) / float64(y3-y1)
s2 := float64(x3-x2) / float64(y3-y2)
ax := float64(x3)
bx := float64(x3)
for y := y3; y > y1; y-- {
ax -= s1
bx -= s2
a := int(ax)
b := int(bx)
if a > b {
a, b = b, a
}
buf = append(buf, Scanline{y, a, b, 0xffff})
}
return buf
} | primitive/triangle.go | 0.729134 | 0.562116 | triangle.go | starcoder |
// Package compliance provides the tools to validate the compliance of driver
// implementations and BQL behavior testing. The compliance package is built
// around stories. A story is a collection of graphs and a sequence of
// assertions against the provided data. An assertion is defined by a tuple
// containing a BQL, the execution status, and the expected result table.
package compliance
import (
"encoding/json"
"fmt"
"time"
"github.com/google/badwolf/bql/table"
"github.com/google/badwolf/triple/literal"
"github.com/google/badwolf/triple/node"
"github.com/google/badwolf/triple/predicate"
)
// Graph contains the graph binding name and the list of parseable triples
// that define it.
type Graph struct {
// ID of the binding name to use for the graph.
ID string
// Facts contains the parseable triples which define the graph.
Facts []string
}
// Assertion contains a BQL, the expected status of the BQL query execution,
// and the returned results table.
type Assertion struct {
// Requires of the assertion.
Requires string
// Statement contains the BQL query to assert.
Statement string
// WillFail indicates if the query should fail with and error.
WillFail bool
// MustReturn contains the table containing the expected results provided
// by the BQL statement execution.
MustReturn []map[string]string
// The equivalent table representation of the MustReturn information.
table *table.Table
}
// AssertionOutcome contains the result of running one assertion of a given
// story.
type AssertionOutcome struct {
Equal bool
Got *table.Table
Want *table.Table
}
// Story contains the available graphs and the collection of assertions to
// validate.
type Story struct {
// Name of the story.
Name string
// Sources contains the list of graphs used in the story.
Sources []*Graph
// Assertions that need to be validated against the provided sources.
Assertions []*Assertion
}
// Marshal serializes the story into a JSON readable string.
func (s *Story) Marshal() (string, error) {
b, err := json.MarshalIndent(s, "", " ")
if err != nil {
return "", err
}
return string(b), nil
}
// Unmarshal rebuilds a story from a JSON readable string.
func (s *Story) Unmarshal(ss string) error {
return json.Unmarshal([]byte(ss), s)
}
// inferCell builds a Cell out of the provided string.
func inferCell(s string) *table.Cell {
if n, err := node.Parse(s); err == nil {
return &table.Cell{N: n}
}
if p, err := predicate.Parse(s); err == nil {
return &table.Cell{P: p}
}
if l, err := literal.DefaultBuilder().Parse(s); err == nil {
return &table.Cell{L: l}
}
t, err := time.Parse(time.RFC3339Nano, s)
if err == nil {
return &table.Cell{T: &t}
}
return &table.Cell{S: table.CellString(s)}
}
// OutputTable returns the expected result table for the must result table
// provided by the story.
func (a *Assertion) OutputTable(bo []string) (*table.Table, error) {
// Return the already computed output table.
if a.table != nil {
return a.table, nil
}
// Compute the output table.
var (
first bool
mBdngs map[string]bool
data []table.Row
bs []string
)
mBdngs, first = make(map[string]bool), true
for _, row := range a.MustReturn {
nr := table.Row{}
for k, v := range row {
_, ok := mBdngs[k]
if first && !ok {
bs = append(bs, k)
}
if !first && !ok {
return nil, fmt.Errorf("unknow binding %q; available ones are %v", k, mBdngs)
}
mBdngs[k], nr[k] = true, inferCell(v)
}
data = append(data, nr)
first = false
}
if first {
// No data was provided. This will create the empty table with the right
// bindings.
bs = bo
}
// Build the table.
if len(bo) != len(bs) {
return nil, fmt.Errorf("incompatible bindings; got %v, want %v", bs, bo)
}
for _, b := range bo {
if _, ok := mBdngs[b]; !first && !ok {
return nil, fmt.Errorf("missing binding %q; want bining in %v", b, bo)
}
}
t, err := table.New(bo)
if err != nil {
return nil, err
}
for _, r := range data {
t.AddRow(r)
}
return t, nil
} | tools/compliance/entry.go | 0.737536 | 0.707569 | entry.go | starcoder |
package dataframe
import (
"fmt"
"sync"
"strings"
"github.com/isuruceanu/gota/series"
)
// GroupedDataFrame a DataFrame which is grouped by columns
type GroupedDataFrame struct {
DataFrame
groupedBy []string
}
// Group create a GroupedDataFrame with cols groups
func (d DataFrame) Group(cols ...string) GroupedDataFrame {
r := GroupedDataFrame{DataFrame: d}
for _, col := range cols {
colIndex := d.ColIndex(col)
if colIndex < 0 {
r.Err = fmt.Errorf("colname %v doesn't exist", col)
return r
}
r.groupedBy = append(r.groupedBy, col)
}
return r
}
func (g GroupedDataFrame) Summarize(f func(DataFrame) series.Series) DataFrame {
keyIndexes := g.parseInternal()
rowlen := -1
elements := make([][]series.Element, len(keyIndexes))
i := 0
for k, indexes := range keyIndexes {
row := f(g.Subset(indexes))
keys := strings.Split(k, "$_$")
if len(keys) != len(g.groupedBy) {
return DataFrame{
Err: fmt.Errorf("error keys lens differs from len of groups %v: %v", len(keys), len(g.groupedBy)),
}
}
if row.Err != nil {
return DataFrame{
Err: fmt.Errorf("error applying function on row %v: %v", keys, row.Err),
}
}
if rowlen != -1 && rowlen != row.Len() {
return DataFrame{
Err: fmt.Errorf("error applying function: rows have different lengths"),
}
}
rowlen = row.Len()
groupedLevels := series.Strings(keys)
rowElems := make([]series.Element, rowlen+len(keys))
//Add group levels
for j := 0; j < len(keys); j++ {
rowElems[j] = groupedLevels.Elem(j)
}
for j := 0; j < rowlen; j++ {
rowElems[j+len(keys)] = row.Elem(j)
}
elements[i] = rowElems
i++
}
ncol := rowlen + len(g.groupedBy)
nrow := len(keyIndexes)
// Cast columns if necessary
columns := make([]series.Series, ncol)
for j := 0; j < ncol; j++ {
types := make([]series.Type, nrow)
for i := 0; i < nrow; i++ {
types[i] = elements[i][j].Type()
}
colType := detectType(types)
s := series.New(nil, colType, "").Empty()
for i := 0; i < nrow; i++ {
s.Append(elements[i][j])
}
columns[j] = s
}
names := make([]string, len(g.groupedBy)+rowlen)
orders := make([]Order, len(g.groupedBy))
for i := 0; i < len(g.groupedBy)+rowlen; i++ {
if i < len(g.groupedBy) {
names[i] = g.groupedBy[i]
orders[i] = Sort(g.groupedBy[i])
} else {
names[i] = fmt.Sprintf("X%v", i-len(g.groupedBy))
}
}
dfr := New(columns...)
dfr.SetNames(names)
dfr = dfr.Arrange(orders...)
return dfr
}
func (g GroupedDataFrame) SummarizeAsync(f func(DataFrame) series.Series) DataFrame {
keyIndexes := g.parseInternal()
out := make(chan DataFrame)
input := make(chan series.Series)
var wg sync.WaitGroup
go g.handleSummarizeAsyncResults(input, out, &wg)
for key, indexes := range keyIndexes {
wg.Add(1)
go g.aggregateGroup(f, key, indexes, input)
}
wg.Wait()
close(input)
df := <-out
return df
}
func (g GroupedDataFrame) aggregateGroup(f func(DataFrame) series.Series, key string, indexes []int, row chan series.Series) {
agg := f(g.SubsetNoColumnCopy(indexes))
if agg.Err != nil {
row <- agg
return
}
keys := strings.Split(key, "$_$")
if len(keys) != len(g.groupedBy) {
agg.Err = fmt.Errorf("error keys lens differs from len of groups %v: %v", len(keys), len(g.groupedBy))
}
agg.Name = key
row <- agg
}
func (g GroupedDataFrame) handleSummarizeAsyncResults(input chan series.Series, out chan DataFrame, wg *sync.WaitGroup) {
var results []series.Series
colLen := -1
for s := range input {
wg.Done()
if s.Err != nil {
out <- DataFrame{Err: s.Err}
return
}
keys := strings.Split(s.Name, "$_$")
if len(keys) != len(g.groupedBy) {
s.Err = fmt.Errorf("error keys lens differs from len of groups %v: %v", len(keys), len(g.groupedBy))
results = append(results, s)
continue
}
s := series.Strings(keys).Concat(s)
if colLen == -1 {
colLen = s.Len()
} else if colLen != s.Len() {
panic("The length of aggregation differ from preview value")
}
results = append(results, s)
}
columns := make([]series.Series, colLen)
orders := make([]Order, len(g.groupedBy))
rowLen := len(results)
for j := 0; j < colLen; j++ {
elements := make([]series.Element, rowLen)
for i := 0; i < rowLen; i++ {
elements[i] = results[i].Elem(j)
}
colType := series.Float
colName := ""
if j < len(g.groupedBy) {
colType = series.String
colName = g.groupedBy[j]
orders[j] = Sort(colName)
}
columns[j] = series.New(elements, colType, colName)
}
df := New(columns...).Arrange(orders...)
out <- df
}
func (g GroupedDataFrame) parseInternal() map[string][]int {
groupSO := make(map[string][]int)
key := make([]string, len(g.groupedBy))
groupedColumnOnlyIdxs := make([]int, len(g.groupedBy))
for i, groupName := range g.groupedBy {
idx := findInStringSlice(groupName, g.Names())
if idx < 0 {
panic(fmt.Sprintf("filter: can't find column name %s", groupName))
}
groupedColumnOnlyIdxs[i] = idx
}
for i := 0; i < g.Nrow(); i++ {
for j := 0; j < len(groupedColumnOnlyIdxs); j++ {
key[j] = g.columns[groupedColumnOnlyIdxs[j]].Elem(i).String()
}
dkey := strings.Join(key, "$_$")
groupSO[dkey] = append(groupSO[dkey], i)
}
return groupSO
} | dataframe/dataframe.grouped.go | 0.618089 | 0.432842 | dataframe.grouped.go | starcoder |
package continuous
import (
"github.com/jtejido/stats"
"github.com/jtejido/stats/err"
"math"
"math/rand"
)
// Wigner semicircle distribution
// https://en.wikipedia.org/wiki/Wigner_semicircle_distribution
type WignerSemiCircle struct {
radius, center float64
src rand.Source
}
func NewWignerSemiCircle(radius, center float64) (*WignerSemiCircle, error) {
return NewWignerSemiCircleWithSource(radius, center, nil)
}
func NewWignerSemiCircleWithSource(radius, center float64, src rand.Source) (*WignerSemiCircle, error) {
if radius <= 0 {
return nil, err.Invalid()
}
return &WignerSemiCircle{radius, center, src}, nil
}
// a ∈ (-∞,∞)
// R ∈ (0,∞)
func (ws *WignerSemiCircle) Parameters() stats.Limits {
return stats.Limits{
"a": stats.Interval{math.Inf(-1), math.Inf(1), true, true},
"R": stats.Interval{0, math.Inf(1), true, true},
}
}
// x ∈ [a-R,a+R]
func (ws *WignerSemiCircle) Support() stats.Interval {
return stats.Interval{ws.center - ws.radius, ws.center + ws.radius, false, false}
}
func (ws *WignerSemiCircle) Probability(x float64) float64 {
if ws.Support().IsWithinInterval(x) {
return (2. / math.Pi * (ws.radius * ws.radius)) * math.Sqrt((ws.radius*ws.radius)-math.Pow(-ws.center+x, 2))
} else if x >= ws.center+ws.radius {
return 1
}
return 0
}
func (ws *WignerSemiCircle) Distribution(x float64) float64 {
if ws.Support().IsWithinInterval(x) {
return .5 + ((-ws.center+x)*math.Sqrt((ws.radius*ws.radius)-math.Pow(-ws.center+x, 2)))/(math.Pi*(ws.radius*ws.radius)) + math.Asin((-ws.center+x)/ws.radius)/math.Pi
} else if x >= ws.center+ws.radius {
return 1
}
return 0
}
func (ws *WignerSemiCircle) Mean() float64 {
return ws.center
}
func (ws *WignerSemiCircle) Median() float64 {
return ws.center
}
func (ws *WignerSemiCircle) Mode() float64 {
return 0
}
func (ws *WignerSemiCircle) Skewness() float64 {
return 0
}
func (ws *WignerSemiCircle) ExKurtosis() float64 {
return -1
}
func (ws *WignerSemiCircle) Entropy() float64 {
return math.Log(math.Pi*ws.radius) - (1. / 2)
}
func (ws *WignerSemiCircle) Variance() float64 {
return (ws.radius * ws.radius) / 4
}
func (ws *WignerSemiCircle) Rand() float64 {
var rnd float64
if ws.src == nil {
rnd = rand.Float64()
} else {
rnd = rand.New(ws.src).Float64()
}
rnd += rnd - 1
return ws.radius * rnd
} | dist/continuous/wigner_semi_circle.go | 0.806358 | 0.462594 | wigner_semi_circle.go | starcoder |
package lamb
import (
"math"
"github.com/nlpodyssey/spago/gd"
"github.com/nlpodyssey/spago/mat"
"github.com/nlpodyssey/spago/mat/float"
"github.com/nlpodyssey/spago/nn"
)
var _ gd.MethodConfig = &Config{}
// Config provides configuration settings for Lamb optimizer.
type Config struct {
gd.MethodConfig
StepSize float64
Beta1 float64
Beta2 float64
Epsilon float64
Lambda float64
}
// NewConfig returns a new Lamb Config.
func NewConfig(stepSize, beta1, beta2, epsilon, lambda float64) Config {
if !(beta1 >= 0.0 && beta1 < 1.0) {
panic("lamb: `beta1` must be in the range [0.0, 1.0)")
}
if !(beta2 >= 0.0 && beta2 < 1.0) {
panic("lamb: `beta2` must be in the range [0.0, 1.0)")
}
return Config{
StepSize: stepSize,
Beta1: beta1,
Beta2: beta2,
Epsilon: epsilon,
Lambda: lambda,
}
}
// NewDefaultConfig returns a new Config with generically reasonable default values.
func NewDefaultConfig() Config {
return Config{
StepSize: 0.001,
Beta1: 0.9,
Beta2: 0.999,
Epsilon: 1.0e-8,
Lambda: 0.1,
}
}
var _ gd.Method = &Lamb[float32]{}
// Lamb implements the Lamb gradient descent optimization method.
type Lamb[T float.DType] struct {
Config
Alpha float64
TimeStep int
}
// New returns a new Lamb optimizer, initialized according to the given configuration.
func New[T float.DType](c Config) *Lamb[T] {
lamb := &Lamb[T]{
Config: c,
Alpha: c.StepSize,
}
lamb.IncExample() // initialize 'alpha' coefficient
return lamb
}
// Label returns the enumeration-like value which identifies this gradient descent method.
func (o *Lamb[_]) Label() int {
return gd.Lamb
}
const (
v int = 0
m int = 1
buf1 int = 2 // contains 'grads.ProdScalar(1.0 - beta1)'
buf2 int = 3 // contains 'grads.Prod(grads).ProdScalar(1.0 - beta2)'
buf3 int = 4
)
// NewSupport returns a new support structure with the given dimensions.
func (o *Lamb[T]) NewSupport(r, c int) *nn.Payload {
supp := make([]mat.Matrix, 5)
supp[v] = mat.NewEmptyDense[T](r, c)
supp[m] = mat.NewEmptyDense[T](r, c)
supp[buf1] = mat.NewEmptyDense[T](r, c)
supp[buf2] = mat.NewEmptyDense[T](r, c)
supp[buf3] = mat.NewEmptyDense[T](r, c)
return &nn.Payload{
Label: o.Label(),
Data: supp,
}
}
// IncExample beats the occurrence of a new example.
func (o *Lamb[_]) IncExample() {
o.TimeStep++
o.updateAlpha()
}
func (o *Lamb[T]) updateAlpha() {
ts := float64(o.TimeStep)
o.Alpha = o.StepSize * math.Sqrt(1.0-math.Pow(o.Beta2, ts)) / (1.0 - math.Pow(o.Beta1, ts))
}
// Delta returns the difference between the current params and where the method wants it to be.
func (o *Lamb[T]) Delta(param nn.Param) mat.Matrix {
return o.calcDelta(param.Grad(), gd.GetOrSetPayload(param, o).Data, param.Value())
}
// v = v*beta1 + grads*(1.0-beta1)
// m = m*beta2 + (grads*grads)*(1.0-beta2)
// weights = ||params|| / || (v / (sqrt(m) + eps)) + (lambda * weights)
// d = (v / (sqrt(m) + eps)) + (lambda * weights) * alpha
func (o *Lamb[T]) calcDelta(grads mat.Matrix, supp []mat.Matrix, weights mat.Matrix) mat.Matrix {
updateV(grads, supp, o.Beta1)
updateM(grads, supp, o.Beta2)
buf := supp[m].Sqrt().AddScalarInPlace(o.Epsilon)
defer mat.ReleaseMatrix(buf)
suppDiv := supp[v].Div(buf)
if o.Lambda != 0.0 {
scaledW := weights.ProdScalar(o.Lambda)
suppDiv.AddInPlace(scaledW)
}
weightsNorm := norm(weights)
adamStepNorm := norm(suppDiv)
var trustRatio float64 = 1
if !(weightsNorm == 0.0 || adamStepNorm == 0.0) {
trustRatio = weightsNorm / adamStepNorm
}
defer mat.ReleaseMatrix(suppDiv)
supp[buf3].ProdMatrixScalarInPlace(suppDiv, o.Alpha*trustRatio)
return supp[buf3]
}
// v = v*beta1 + grads*(1.0-beta1)
func updateV(grads mat.Matrix, supp []mat.Matrix, beta1 float64) {
supp[v].ProdScalarInPlace(beta1)
supp[buf1].ProdMatrixScalarInPlace(grads, 1.0-beta1)
supp[v].AddInPlace(supp[buf1])
}
// m = m*beta2 + (grads*grads)*(1.0-beta2)
func updateM(grads mat.Matrix, supp []mat.Matrix, beta2 float64) {
supp[m].ProdScalarInPlace(beta2)
sqGrad := grads.Prod(grads)
defer mat.ReleaseMatrix(sqGrad)
supp[buf2].ProdMatrixScalarInPlace(sqGrad, 1.0-beta2)
supp[m].AddInPlace(supp[buf2])
}
func norm(grads mat.Matrix) float64 {
prod := grads.Prod(grads)
defer mat.ReleaseMatrix(prod)
sum := prod.Sum()
defer mat.ReleaseMatrix(sum)
return math.Sqrt(sum.Scalar().F64())
} | gd/lamb/lamb.go | 0.82887 | 0.452838 | lamb.go | starcoder |
package opensimplex
const (
stretchConstant2D = -0.211324865405187 // (1/Math.sqrt(2+1)-1)/2
squishConstant2D = 0.366025403784439 // (Math.sqrt(2+1)-1)/2
stretchConstant3D = -1.0 / 6 // (1/Math.sqrt(3+1)-1)/3
squishConstant3D = 1.0 / 3 // (Math.sqrt(3+1)-1)/3
stretchConstant4D = -0.138196601125011 // (1/Math.sqrt(4+1)-1)/4
squishConstant4D = 0.309016994374947 // (Math.sqrt(4+1)-1)/4
normConstant2D = 47
normConstant3D = 103
normConstant4D = 30
defaultSeed = 0
)
func (s *noise) extrapolate2(xsb, ysb int32, dx, dy float64) float64 {
index := s.perm[(int32(s.perm[xsb&0xFF])+ysb)&0xFF] & 0x0E
return float64(gradients2D[index])*dx + float64(gradients2D[index+1])*dy
}
func (s *noise) extrapolate3(xsb, ysb, zsb int32, dx, dy, dz float64) float64 {
index := s.permGradIndex3D[(int32(s.perm[(int32(s.perm[xsb&0xFF])+ysb)&0xFF])+zsb)&0xFF]
return float64(gradients3D[index])*dx + float64(gradients3D[index+1])*dy + float64(gradients3D[index+2])*dz
}
func (s *noise) extrapolate4(xsb, ysb, zsb, wsb int32, dx, dy, dz, dw float64) float64 {
index := s.perm[(int32(s.perm[(int32(s.perm[(int32(s.perm[xsb&0xFF])+ysb)&0xFF])+zsb)&0xFF])+wsb)&0xFF] & 0xFC
return float64(gradients4D[index])*dx + float64(gradients4D[index+1])*dy + float64(gradients4D[index+2])*dz + float64(gradients4D[index+3])*dw
}
// Gradients for 2D. They approximate the directions to the
// vertices of an octagon from the center.
var gradients2D = []int8{
5, 2, 2, 5,
-5, 2, -2, 5,
5, -2, 2, -5,
-5, -2, -2, -5,
}
// Gradients for 3D. They approximate the directions to the
// vertices of a rhombicuboctahedron from the center, skewed so
// that the triangular and square facets can be inscribed inside
// circles of the same radius.
var gradients3D = []int8{
-11, 4, 4, -4, 11, 4, -4, 4, 11,
11, 4, 4, 4, 11, 4, 4, 4, 11,
-11, -4, 4, -4, -11, 4, -4, -4, 11,
11, -4, 4, 4, -11, 4, 4, -4, 11,
-11, 4, -4, -4, 11, -4, -4, 4, -11,
11, 4, -4, 4, 11, -4, 4, 4, -11,
-11, -4, -4, -4, -11, -4, -4, -4, -11,
11, -4, -4, 4, -11, -4, 4, -4, -11,
}
// Gradients for 4D. They approximate the directions to the
// vertices of a disprismatotesseractihexadecachoron from the center,
// skewed so that the tetrahedral and cubic facets can be inscribed inside
// spheres of the same radius.
var gradients4D = []int8{
3, 1, 1, 1, 1, 3, 1, 1, 1, 1, 3, 1, 1, 1, 1, 3,
-3, 1, 1, 1, -1, 3, 1, 1, -1, 1, 3, 1, -1, 1, 1, 3,
3, -1, 1, 1, 1, -3, 1, 1, 1, -1, 3, 1, 1, -1, 1, 3,
-3, -1, 1, 1, -1, -3, 1, 1, -1, -1, 3, 1, -1, -1, 1, 3,
3, 1, -1, 1, 1, 3, -1, 1, 1, 1, -3, 1, 1, 1, -1, 3,
-3, 1, -1, 1, -1, 3, -1, 1, -1, 1, -3, 1, -1, 1, -1, 3,
3, -1, -1, 1, 1, -3, -1, 1, 1, -1, -3, 1, 1, -1, -1, 3,
-3, -1, -1, 1, -1, -3, -1, 1, -1, -1, -3, 1, -1, -1, -1, 3,
3, 1, 1, -1, 1, 3, 1, -1, 1, 1, 3, -1, 1, 1, 1, -3,
-3, 1, 1, -1, -1, 3, 1, -1, -1, 1, 3, -1, -1, 1, 1, -3,
3, -1, 1, -1, 1, -3, 1, -1, 1, -1, 3, -1, 1, -1, 1, -3,
-3, -1, 1, -1, -1, -3, 1, -1, -1, -1, 3, -1, -1, -1, 1, -3,
3, 1, -1, -1, 1, 3, -1, -1, 1, 1, -3, -1, 1, 1, -1, -3,
-3, 1, -1, -1, -1, 3, -1, -1, -1, 1, -3, -1, -1, 1, -1, -3,
3, -1, -1, -1, 1, -3, -1, -1, 1, -1, -3, -1, 1, -1, -1, -3,
-3, -1, -1, -1, -1, -3, -1, -1, -1, -1, -3, -1, -1, -1, -1, -3,
} | vendor/github.com/ojrac/opensimplex-go/opensimplex_internal.go | 0.54359 | 0.541106 | opensimplex_internal.go | starcoder |
package zgeo
import (
"fmt"
"image"
"math"
"github.com/torlangballe/zutil/zmath"
)
type Pos struct {
X float64 `json:"x"`
Y float64 `json:"y"`
}
func (p Pos) String() string {
return fmt.Sprintf("%g,%g", p.X, p.Y)
}
func (p Pos) Vertice(vertical bool) float64 {
if vertical {
return p.Y
}
return p.X
}
func (p *Pos) VerticeP(vertical bool) *float64 {
if vertical {
return &p.Y
}
return &p.X
}
func (p *Pos) SetOne(vertical bool, v float64) {
if vertical {
p.Y = v
}
p.X = v
}
func (p Pos) Size() Size {
return Size{p.X, p.Y}
}
func (p *Pos) Set(x, y float64) {
*p = Pos{x, y}
}
func (p *Pos) SetF(x, y float32) {
*p = Pos{float64(x), float64(y)}
}
func (p *Pos) Swap() {
*p = Pos{p.Y, p.X}
}
func (p Pos) Swapped() Pos {
return Pos{p.Y, p.X}
}
func (p Pos) Max(a Pos) Pos {
return Pos{math.Max(p.X, a.X), math.Max(p.Y, a.Y)}
}
func (p Pos) Min(a Pos) Pos {
return Pos{math.Min(p.X, a.X), math.Min(p.Y, a.Y)}
}
func (p Pos) GetRot90CW() Pos { return Pos{p.Y, -p.X} }
func (p Pos) Dot(a Pos) float64 { return p.X*a.X + p.Y*a.Y }
func (p Pos) Length() float64 { return math.Sqrt(p.X*p.X + p.Y*p.Y) }
func (p Pos) IsNull() bool { return p.X == 0.0 && p.Y == 0.0 }
func (p Pos) GetNormalized() Pos { return p.DividedByD(p.Length()) }
func (p Pos) Sign() Pos { return Pos{zmath.Sign(p.X), zmath.Sign(p.Y)} }
func (p Pos) Negative() Pos {
return Pos{-p.X, -p.Y}
}
func (p Pos) Abs() Pos {
return Pos{math.Abs(p.X), math.Abs(p.Y)}
}
func (p Pos) IsSameDirection(pos Pos) bool {
if p == pos {
return true
}
if zmath.Sign(pos.X) != zmath.Sign(p.X) || zmath.Sign(pos.Y) != zmath.Sign(p.Y) {
return false
}
if pos.Y == 0.0 {
return p.Y == 0.0
}
if p.Y == 0.0 {
return p.Y == 0.0
}
if p.X/p.Y == pos.X/pos.Y {
return true
}
return false
}
func (p Pos) RotatedCCW(angle float64) Pos {
s := math.Sin(angle)
c := math.Cos(angle)
return Pos{p.X*c - p.Y*s, p.X*s + p.Y*c}
}
func (p *Pos) MultiplyD(a float64) {
p.X *= a
p.Y *= a
}
func (p Pos) GoPoint() image.Point {
return image.Pt(int(p.X), int(p.Y))
}
func PosFromGoPoint(point image.Point) Pos {
return Pos{float64(point.X), float64(point.Y)}
}
func (p Pos) PlusD(a float64) Pos { return Pos{p.X + a, p.Y + a} }
func (p Pos) MinusD(a float64) Pos { return Pos{p.X - a, p.Y - a} }
func (p Pos) TimesD(a float64) Pos { return Pos{p.X * a, p.Y * a} }
func (p Pos) DividedByD(a float64) Pos { return Pos{p.X / a, p.Y / a} }
func (p Pos) Plus(a Pos) Pos { return Pos{p.X + a.X, p.Y + a.Y} }
func (p Pos) Minus(a Pos) Pos { return Pos{p.X - a.X, p.Y - a.Y} }
func (p Pos) Times(a Pos) Pos { return Pos{p.X * a.X, p.Y * a.Y} }
func (p Pos) DividedBy(a Pos) Pos { return Pos{p.X / a.X, p.Y / a.Y} }
func (p Pos) AddedSize(s Size) Pos { return Pos{p.X + s.W, p.Y + s.H} }
func (p Pos) Equals(a Pos) bool { return p.X == a.X && p.Y == a.Y }
func (p *Pos) Add(a Pos) { p.X += a.X; p.Y += a.Y }
func (p *Pos) Subtract(a Pos) { p.X -= a.X; p.Y -= a.Y }
type FPos struct {
X float32 `json:"x"`
Y float32 `json:"y"`
}
func (p FPos) Pos() Pos {
return Pos{float64(p.X), float64(p.Y)}
}
// itterates through positions, making vector between them, optionally closing
func ForVectors(positions []Pos, close bool, handle func(s Pos, v Pos) bool) {
var i = 0
for i < len(positions) {
s := positions[i]
e := Pos{}
if i == len(positions)-1 {
if close {
e = positions[0].Minus(s)
} else {
break
}
} else {
e = positions[i+1]
}
if !handle(s, e.Minus(s)) {
break
}
i++
}
}
func GetTPositionInPosPath(path []Pos, t float64, close bool) Pos {
var length = 0.0
var resultPos = Pos{}
if t <= 0 {
return path[0]
}
ForVectors(path, close, func(s, v Pos) bool {
length += v.Length()
return true
})
if t >= 1 {
if close {
return path[0]
}
return path[len(path)-1]
}
tlen := t * length
length = 0.0
ForVectors(path, close, func(s, v Pos) bool {
vlen := v.Length()
l := length + vlen
if l >= tlen {
ldiff := tlen - length
f := ldiff / vlen
resultPos = s.Plus(v.TimesD(f))
return false
}
length = l
return true
})
return resultPos
}
func (p Pos) Copy() Pos {
return p
}
func PosFromAngleDeg(deg float64) Pos {
return Pos{math.Sin(zmath.DegToRad(deg)), -math.Cos(zmath.DegToRad(deg))}
}
func (p Pos) ToAngleDeg() float64 {
return zmath.RadToDeg(p.ArcTanToRad())
}
func PosLongLatToMeters(pos1 Pos, pos2 Pos) float64 {
R := 6371.0 // Radius of the earth in km
dLat := zmath.DegToRad(pos2.Y - pos1.Y)
dLon := zmath.DegToRad(pos2.X - pos1.X)
a := (math.Pow(math.Sin(dLat/2.0), 2.0) + math.Cos(zmath.DegToRad(pos1.Y))) * math.Cos(zmath.DegToRad(pos2.Y)) * math.Pow(math.Sin(dLon/2.0), 2.0)
c := 2.0 * float64(math.Asin(math.Sqrt(math.Abs(a))))
return c * R * 1000.0
}
func (pos Pos) ArcTanToRad() float64 {
var a = float64(math.Atan2(pos.Y, pos.X))
if a < 0 {
a += math.Pi * 2
}
return a
} | zgeo/pos.go | 0.829561 | 0.529385 | pos.go | starcoder |
package operator
import (
"github.com/matrixorigin/matrixone/pkg/container/nulls"
"github.com/matrixorigin/matrixone/pkg/container/types"
"github.com/matrixorigin/matrixone/pkg/container/vector"
"github.com/matrixorigin/matrixone/pkg/vectorize/ne"
"github.com/matrixorigin/matrixone/pkg/vm/process"
)
func nequal[T OrderedValue](d1, d2 interface{}, aScale, bScale int32) bool {
l, v := d1.(T), d2.(T)
return l != v
}
func nequal_B(d1, d2 interface{}, aScale, bScale int32) bool {
l, v := d1.(bool), d2.(bool)
return l != v
}
func nequal_D(d1, d2 interface{}, aScale, bScale int32) bool {
l, v := d1.(types.Decimal128), d2.(types.Decimal128)
return types.CompareDecimal128Decimal128(l, v, aScale, bScale) != 0
}
type NeOpFunc = func(d1, d2 interface{}, aScale, bScale int32) bool
var NeOpFuncMap = map[int]NeOpFunc{}
var NeOpFuncVec = []NeOpFunc{
nequal[int8], nequal[int16], nequal[int32], nequal[int64], nequal[uint8], nequal[uint16], nequal[uint32],
nequal[uint64], nequal[float32], nequal[float64], nequal[string], nequal_B, nequal[types.Date],
nequal[types.Datetime], nequal[types.Decimal64], nequal_D,
}
func InitNeOpFuncMap() {
for i := 0; i < len(NeOpFuncVec); i++ {
NeOpFuncMap[i] = NeOpFuncVec[i]
}
}
var StrNeOpFuncMap = map[int]StrCompOpFunc{}
var StrNeOpFuncVec = []StrCompOpFunc{
nequalCol_Col, nequalCol_Const, nequalConst_Col, nequalConst_Const,
}
func nequalCol_Col(d1, d2 interface{}) []bool {
lvs, rvs := d1.(*types.Bytes), d2.(*types.Bytes)
rs := make([]int64, len(lvs.Lengths))
rs = ne.StrNe(lvs, rvs, rs)
col := make([]bool, len(lvs.Lengths))
rsi := 0
for i := 0; i < len(col); i++ {
if rsi >= len(rs) {
break
}
if int64(i) == rs[rsi] {
col[i] = true
rsi++
} else {
col[i] = false
}
}
return col
}
func nequalCol_Const(d1, d2 interface{}) []bool {
lvs, rvs := d1.(*types.Bytes), d2.(*types.Bytes)
rs := make([]int64, len(lvs.Lengths))
rs = ne.StrNeScalar(rvs.Data, lvs, rs)
col := make([]bool, len(lvs.Lengths))
rsi := 0
for i := 0; i < len(col); i++ {
if rsi >= len(rs) {
break
}
if int64(i) == rs[rsi] {
col[i] = true
rsi++
} else {
col[i] = false
}
}
return col
}
func nequalConst_Col(d1, d2 interface{}) []bool {
lvs, rvs := d1.(*types.Bytes), d2.(*types.Bytes)
rs := make([]int64, len(rvs.Lengths))
rs = ne.StrNeScalar(lvs.Data, rvs, rs)
col := make([]bool, len(rvs.Lengths))
rsi := 0
for i := 0; i < len(col); i++ {
if rsi >= len(rs) {
break
}
if int64(i) == rs[rsi] {
col[i] = true
rsi++
} else {
col[i] = false
}
}
return col
}
func nequalConst_Const(d1, d2 interface{}) []bool {
lvs, rvs := d1.(*types.Bytes), d2.(*types.Bytes)
return []bool{string(lvs.Data) != string(rvs.Data)}
}
func InitStrNeOpFuncMap() {
for i := 0; i < len(StrEqOpFuncVec); i++ {
StrNeOpFuncMap[i] = StrNeOpFuncVec[i]
}
}
func ColNeCol[T DataValue](lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
n := GetRetColLen[T](lv)
vec, err := proc.AllocVector(proc.GetBoolTyp(lv.Typ), int64(n)*1)
if err != nil {
return nil, err
}
nulls.Or(lv.Nsp, rv.Nsp, vec.Nsp)
vector.SetCol(vec, GetRetCol[T](lv, rv, col_col, NeOpFuncMap, StrNeOpFuncMap))
return vec, nil
}
func ColNeConst[T DataValue](lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
n := GetRetColLen[T](lv)
vec, err := proc.AllocVector(proc.GetBoolTyp(lv.Typ), int64(n)*1)
if err != nil {
return nil, err
}
nulls.Or(lv.Nsp, rv.Nsp, vec.Nsp)
vector.SetCol(vec, GetRetCol[T](lv, rv, col_const, NeOpFuncMap, StrNeOpFuncMap))
return vec, nil
}
func ColNeNull[T DataValue](lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
return proc.AllocScalarNullVector(proc.GetBoolTyp(lv.Typ)), nil
}
func ConstNeCol[T DataValue](lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
return ColNeConst[T](rv, lv, proc)
}
func ConstNeConst[T DataValue](lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
vec := proc.AllocScalarVector(proc.GetBoolTyp(lv.Typ))
vector.SetCol(vec, GetRetCol[T](lv, rv, const_const, NeOpFuncMap, StrNeOpFuncMap))
return vec, nil
}
func ConstNeNull[T DataValue](lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
return proc.AllocScalarNullVector(proc.GetBoolTyp(lv.Typ)), nil
}
func NullNeCol[T DataValue](lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
return proc.AllocScalarNullVector(proc.GetBoolTyp(lv.Typ)), nil
}
func NullNeConst[T DataValue](lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
return proc.AllocScalarNullVector(proc.GetBoolTyp(lv.Typ)), nil
}
func NullNeNull[T DataValue](lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error) {
return proc.AllocScalarNullVector(proc.GetBoolTyp(lv.Typ)), nil
}
type NeFunc = func(lv, rv *vector.Vector, proc *process.Process) (*vector.Vector, error)
var NeFuncMap = map[int]NeFunc{}
var NeFuncVec = []NeFunc{
ColNeCol[int8], ColNeCol[int16], ColNeCol[int32], ColNeCol[int64], ColNeCol[uint8], ColNeCol[uint16],
ColNeCol[uint32], ColNeCol[uint64], ColNeCol[float32], ColNeCol[float64], ColNeCol[string], ColNeCol[bool],
ColNeCol[types.Date], ColNeCol[types.Datetime], ColNeCol[types.Decimal64], ColNeCol[types.Decimal128],
ColNeConst[int8], ColNeConst[int16], ColNeConst[int32], ColNeConst[int64], ColNeConst[uint8], ColNeConst[uint16],
ColNeConst[uint32], ColNeConst[uint64], ColNeConst[float32], ColNeConst[float64], ColNeConst[string], ColNeConst[bool],
ColNeConst[types.Date], ColNeConst[types.Datetime], ColNeConst[types.Decimal64], ColNeConst[types.Decimal128],
ColNeNull[int8], ColNeNull[int16], ColNeNull[int32], ColNeNull[int64], ColNeNull[uint8], ColNeNull[uint16],
ColNeNull[uint32], ColNeNull[uint64], ColNeNull[float32], ColNeNull[float64], ColNeNull[string], ColNeNull[bool],
ColNeNull[types.Date], ColNeNull[types.Datetime], ColNeNull[types.Decimal64], ColNeNull[types.Decimal128],
ConstNeCol[int8], ConstNeCol[int16], ConstNeCol[int32], ConstNeCol[int64], ConstNeCol[uint8], ConstNeCol[uint16],
ConstNeCol[uint32], ConstNeCol[uint64], ConstNeCol[float32], ConstNeCol[float64], ConstNeCol[string], ConstNeCol[bool],
ConstNeCol[types.Date], ConstNeCol[types.Datetime], ConstNeCol[types.Decimal64], ConstNeCol[types.Decimal128],
ConstNeConst[int8], ConstNeConst[int16], ConstNeConst[int32], ConstNeConst[int64], ConstNeConst[uint8], ConstNeConst[uint16],
ConstNeConst[uint32], ConstNeConst[uint64], ConstNeConst[float32], ConstNeConst[float64], ConstNeConst[string], ConstNeConst[bool],
ConstNeConst[types.Date], ConstNeConst[types.Datetime], ConstNeConst[types.Decimal64], ConstNeConst[types.Decimal128],
ConstNeNull[int8], ConstNeNull[int16], ConstNeNull[int32], ConstNeNull[int64], ConstNeNull[uint8], ConstNeNull[uint16],
ConstNeNull[uint32], ConstNeNull[uint64], ConstNeNull[float32], ConstNeNull[float64], ConstNeNull[string], ConstNeNull[bool],
ConstNeNull[types.Date], ConstNeNull[types.Datetime], ConstNeNull[types.Decimal64], ConstNeNull[types.Decimal128],
NullNeCol[int8], NullNeCol[int16], NullNeCol[int32], NullNeCol[int64], NullNeCol[uint8], NullNeCol[uint16],
NullNeCol[uint32], NullNeCol[uint64], NullNeCol[float32], NullNeCol[float64], NullNeCol[string], NullNeCol[bool],
NullNeCol[types.Date], NullNeCol[types.Datetime], NullNeCol[types.Decimal64], NullNeCol[types.Decimal128],
NullNeConst[int8], NullNeConst[int16], NullNeConst[int32], NullNeConst[int64], NullNeConst[uint8], NullNeConst[uint16],
NullNeConst[uint32], NullNeConst[uint64], NullNeConst[float32], NullNeConst[float64], NullNeConst[string], NullNeConst[bool],
NullNeConst[types.Date], NullNeConst[types.Datetime], NullNeConst[types.Decimal64], NullNeConst[types.Decimal128],
NullNeNull[int8], NullNeNull[int16], NullNeNull[int32], NullNeNull[int64], NullNeNull[uint8], NullNeNull[uint16],
NullNeNull[uint32], NullNeNull[uint64], NullNeNull[float32], NullNeNull[float64], NullNeNull[string], NullNeNull[bool],
NullNeNull[types.Date], NullNeNull[types.Datetime], NullNeNull[types.Decimal64], NullNeNull[types.Decimal128],
}
func InitNeFuncMap() {
InitNeOpFuncMap()
InitStrNeOpFuncMap()
for i := 0; i < len(NeFuncVec); i++ {
NeFuncMap[i] = NeFuncVec[i]
}
}
func NeDataValue[T DataValue](vectors []*vector.Vector, proc *process.Process) (*vector.Vector, error) {
lv := vectors[0]
rv := vectors[1]
lt, rt := GetTypeID(lv), GetTypeID(rv)
dataID := GetDatatypeID[T]()
vec, err := NeFuncMap[(lt*3+rt)*dataTypeNum+dataID](lv, rv, proc)
if err != nil {
return nil, err
}
return vec, nil
} | pkg/sql/plan2/function/operator/ne.go | 0.591487 | 0.46557 | ne.go | starcoder |
package ast
import (
"fmt"
)
func walkLexNode(node LexNode, visitor LexNodeVisitor) LexNodeVisitor {
switch n := node.(type) {
case *LexAlt:
return n.Walk(visitor)
case *LexCharLit:
return n.Walk(visitor)
case *LexCharRange:
return n.Walk(visitor)
case *LexDot:
return n.Walk(visitor)
case *LexGroupPattern:
return n.Walk(visitor)
case *LexIgnoredTokDef:
return n.Walk(visitor)
case *LexImports:
return n.Walk(visitor)
case *LexOptPattern:
return n.Walk(visitor)
case *LexPattern:
return n.Walk(visitor)
case *LexProductions:
return n.Walk(visitor)
case *LexRegDef:
return n.Walk(visitor)
case *LexRegDefId:
return n.Walk(visitor)
case *LexRepPattern:
return n.Walk(visitor)
case *LexTokDef:
return n.Walk(visitor)
}
panic(fmt.Sprintf("Unsupported LexNode type %T", node))
}
func (this *LexAlt) Walk(visitor LexNodeVisitor) LexNodeVisitor {
for _, term := range this.Terms {
if v := walkLexNode(term, visitor); v == nil {
return nil
}
}
return visitor.Visit(this)
}
func (this *LexCharLit) Walk(visitor LexNodeVisitor) LexNodeVisitor {
return visitor.Visit(this)
}
func (this *LexCharRange) Walk(visitor LexNodeVisitor) LexNodeVisitor {
return visitor.Visit(this)
}
func (this *LexDot) Walk(visitor LexNodeVisitor) LexNodeVisitor {
return visitor.Visit(this)
}
func (this *LexGroupPattern) Walk(visitor LexNodeVisitor) LexNodeVisitor {
for _, term := range this.LexPattern.Alternatives {
if v := walkLexNode(term, visitor); v == nil {
return nil
}
}
return visitor.Visit(this)
}
func (this *LexIgnoredTokDef) Walk(visitor LexNodeVisitor) LexNodeVisitor {
fmt.Printf(" walk on: %s\n", this.Id())
return visitor.Visit(this)
}
func (this *LexImports) Walk(visitor LexNodeVisitor) LexNodeVisitor {
return visitor.Visit(this)
}
func (this *LexOptPattern) Walk(visitor LexNodeVisitor) LexNodeVisitor {
for _, term := range this.LexPattern.Alternatives {
if v := walkLexNode(term, visitor); v == nil {
return nil
}
}
return visitor.Visit(this)
}
func (this *LexPattern) Walk(visitor LexNodeVisitor) LexNodeVisitor {
for _, term := range this.Alternatives {
if v := walkLexNode(term, visitor); v == nil {
return nil
}
}
return visitor.Visit(this)
}
func (this *LexProductions) Walk(visitor LexNodeVisitor) LexNodeVisitor {
for _, term := range this.Productions {
if v := walkLexNode(term, visitor); v == nil {
return nil
}
}
return visitor.Visit(this)
}
func (this *LexRegDef) Walk(visitor LexNodeVisitor) LexNodeVisitor {
return visitor.Visit(this)
}
func (this *LexRegDefId) Walk(visitor LexNodeVisitor) LexNodeVisitor {
return visitor.Visit(this)
}
func (this *LexRepPattern) Walk(visitor LexNodeVisitor) LexNodeVisitor {
for _, term := range this.LexPattern.Alternatives {
if v := walkLexNode(term, visitor); v == nil {
return nil
}
}
return visitor.Visit(this)
}
func (this *LexTokDef) Walk(visitor LexNodeVisitor) LexNodeVisitor {
return visitor.Visit(this)
} | internal/ast/lexnodewalker.go | 0.609757 | 0.490053 | lexnodewalker.go | starcoder |
package factory
import (
"github.com/Yiling-J/carrier/examples/ent_recipe/ent"
"context"
)
type EntIngredientMutator struct {
Name string
}
type entIngredientMutation struct {
nameType int
nameFunc func(ctx context.Context, i *EntIngredientMutator, c int, creator *ent.IngredientCreate) error
afterCreateFunc func(ctx context.Context, i *ent.Ingredient) error
}
type EntIngredientMetaFactory struct {
mutation entIngredientMutation
}
type entIngredientTrait struct {
mutation entIngredientMutation
updates []func(m *entIngredientMutation)
}
func EntIngredientTrait() *entIngredientTrait {
return &entIngredientTrait{}
}
func (*entIngredientMutation) afterCreateMutateFunc(fn func(ctx context.Context, i *ent.Ingredient) error) func(m *entIngredientMutation) {
return func(m *entIngredientMutation) {
m.afterCreateFunc = fn
}
}
func (*entIngredientMutation) nameSequenceMutateFunc(fn func(ctx context.Context, i int) (string, error)) func(m *entIngredientMutation) {
return func(m *entIngredientMutation) {
m.nameType = TypeSequence
m.nameFunc = func(ctx context.Context, i *EntIngredientMutator, c int, creator *ent.IngredientCreate) error {
if fn == nil {
return nil
}
value, err := fn(ctx, c)
if err != nil {
return err
}
creator.SetName(value)
i.Name = value
return nil
}
}
}
func (*entIngredientMutation) nameLazyMutateFunc(fn func(ctx context.Context, i *EntIngredientMutator) (string, error)) func(m *entIngredientMutation) {
return func(m *entIngredientMutation) {
m.nameType = TypeLazy
m.nameFunc = func(ctx context.Context, i *EntIngredientMutator, c int, creator *ent.IngredientCreate) error {
if fn == nil {
return nil
}
value, err := fn(ctx, i)
if err != nil {
return err
}
creator.SetName(value)
i.Name = value
return nil
}
}
}
func (*entIngredientMutation) nameDefaultMutateFunc(v string) func(m *entIngredientMutation) {
return func(m *entIngredientMutation) {
m.nameType = TypeDefault
m.nameFunc = func(ctx context.Context, i *EntIngredientMutator, c int, creator *ent.IngredientCreate) error {
creator.SetName(v)
i.Name = v
return nil
}
}
}
func (*entIngredientMutation) nameFactoryMutateFunc(fn func(ctx context.Context) (string, error)) func(m *entIngredientMutation) {
return func(m *entIngredientMutation) {
m.nameType = TypeFactory
m.nameFunc = func(ctx context.Context, i *EntIngredientMutator, c int, creator *ent.IngredientCreate) error {
if fn == nil {
return nil
}
value, err := fn(ctx)
if err != nil {
return err
}
creator.SetName(value)
i.Name = value
return nil
}
}
}
func (f *EntIngredientMetaFactory) SetNameSequence(fn func(ctx context.Context, i int) (string, error)) *EntIngredientMetaFactory {
f.mutation.nameSequenceMutateFunc(fn)(&f.mutation)
return f
}
func (f *EntIngredientMetaFactory) SetNameLazy(fn func(ctx context.Context, i *EntIngredientMutator) (string, error)) *EntIngredientMetaFactory {
f.mutation.nameLazyMutateFunc(fn)(&f.mutation)
return f
}
func (f *EntIngredientMetaFactory) SetNameDefault(v string) *EntIngredientMetaFactory {
f.mutation.nameDefaultMutateFunc(v)(&f.mutation)
return f
}
func (f *EntIngredientMetaFactory) SetNameFactory(fn func(ctx context.Context) (string, error)) *EntIngredientMetaFactory {
f.mutation.nameFactoryMutateFunc(fn)(&f.mutation)
return f
}
func (t *entIngredientTrait) SetNameSequence(fn func(ctx context.Context, i int) (string, error)) *entIngredientTrait {
t.updates = append(t.updates, t.mutation.nameSequenceMutateFunc(fn))
return t
}
func (t *entIngredientTrait) SetNameLazy(fn func(ctx context.Context, i *EntIngredientMutator) (string, error)) *entIngredientTrait {
t.updates = append(t.updates, t.mutation.nameLazyMutateFunc(fn))
return t
}
func (t *entIngredientTrait) SetNameDefault(v string) *entIngredientTrait {
t.updates = append(t.updates, t.mutation.nameDefaultMutateFunc(v))
return t
}
func (t *entIngredientTrait) SetNameFactory(fn func(ctx context.Context) (string, error)) *entIngredientTrait {
t.updates = append(t.updates, t.mutation.nameFactoryMutateFunc(fn))
return t
}
func (f *EntIngredientMetaFactory) SetAfterCreateFunc(fn func(ctx context.Context, i *ent.Ingredient) error) *EntIngredientMetaFactory {
f.mutation.afterCreateFunc = fn
return f
}
func (t *entIngredientTrait) SetAfterCreateFunc(fn func(ctx context.Context, i *ent.Ingredient) error) *entIngredientTrait {
t.updates = append(t.updates, t.mutation.afterCreateMutateFunc(fn))
return t
}
func (f *EntIngredientMetaFactory) Build() *EntIngredientFactory {
return &EntIngredientFactory{meta: *f, counter: &Counter{}}
}
type EntIngredientFactory struct {
meta EntIngredientMetaFactory
counter *Counter
client *ent.Client
}
func (f *EntIngredientFactory) SetName(i string) *EntIngredientBuilder {
builder := &EntIngredientBuilder{mutation: f.meta.mutation, counter: f.counter, factory: f}
builder.SetName(i)
builder.client = f.client
return builder
}
func (f *EntIngredientFactory) Create(ctx context.Context) (*ent.Ingredient, error) {
builder := &EntIngredientBuilder{mutation: f.meta.mutation, counter: f.counter, factory: f}
builder.client = f.client
return builder.Create(ctx)
}
func (f *EntIngredientFactory) CreateV(ctx context.Context) (ent.Ingredient, error) {
builder := &EntIngredientBuilder{mutation: f.meta.mutation, counter: f.counter, factory: f}
builder.client = f.client
return builder.CreateV(ctx)
}
func (f *EntIngredientFactory) CreateBatch(ctx context.Context, n int) ([]*ent.Ingredient, error) {
builder := &EntIngredientBuilder{mutation: f.meta.mutation, counter: f.counter, factory: f}
builder.client = f.client
return builder.CreateBatch(ctx, n)
}
func (f *EntIngredientFactory) CreateBatchV(ctx context.Context, n int) ([]ent.Ingredient, error) {
builder := &EntIngredientBuilder{mutation: f.meta.mutation, counter: f.counter, factory: f}
builder.client = f.client
return builder.CreateBatchV(ctx, n)
}
func (f *EntIngredientFactory) Client(c *ent.Client) *EntIngredientFactory {
f.client = c
return f
}
type EntIngredientBuilder struct {
factory *EntIngredientFactory
mutation entIngredientMutation
counter *Counter
nameOverride string
nameOverriden bool
client *ent.Client
}
func (b *EntIngredientBuilder) Client(c *ent.Client) *EntIngredientBuilder {
b.client = c
return b
}
func (b *EntIngredientBuilder) SetName(i string) *EntIngredientBuilder {
b.nameOverride = i
b.nameOverriden = true
return b
}
func (b *EntIngredientBuilder) CreateV(ctx context.Context) (ent.Ingredient, error) {
var d ent.Ingredient
p, err := b.Create(ctx)
if err == nil {
d = *p
}
return d, err
}
func (b *EntIngredientBuilder) Create(ctx context.Context) (*ent.Ingredient, error) {
var preSlice = []func(ctx context.Context, i *EntIngredientMutator, c int, creator *ent.IngredientCreate) error{}
var lazySlice = []func(ctx context.Context, i *EntIngredientMutator, c int, creator *ent.IngredientCreate) error{}
var postSlice = []func(ctx context.Context, i *ent.Ingredient, c int, creator *ent.IngredientCreate) error{}
index := b.counter.Get()
_ = index
client := b.client
entBuilder := client.Ingredient.Create()
if b.nameOverriden {
preSlice = append(preSlice, func(ctx context.Context, i *EntIngredientMutator, c int, creator *ent.IngredientCreate) error {
value := b.nameOverride
creator.SetName(value)
i.Name = value
return nil
})
} else {
switch b.mutation.nameType {
case TypeDefault:
preSlice = append(preSlice, b.mutation.nameFunc)
case TypeLazy:
lazySlice = append(lazySlice, b.mutation.nameFunc)
case TypeSequence:
preSlice = append(preSlice, b.mutation.nameFunc)
case TypeFactory:
preSlice = append(preSlice, b.mutation.nameFunc)
}
}
v := &EntIngredientMutator{}
for _, f := range preSlice {
err := f(ctx, v, index, entBuilder)
if err != nil {
return nil, err
}
}
for _, f := range lazySlice {
err := f(ctx, v, index, entBuilder)
if err != nil {
return nil, err
}
}
new, err := entBuilder.Save(ctx)
if err != nil {
return nil, err
}
if b.mutation.afterCreateFunc != nil {
err := b.mutation.afterCreateFunc(ctx, new)
if err != nil {
return nil, err
}
}
for _, f := range postSlice {
err := f(ctx, new, index, entBuilder)
if err != nil {
return nil, err
}
}
return new, nil
}
func (b *EntIngredientBuilder) CreateBatch(ctx context.Context, n int) ([]*ent.Ingredient, error) {
var results []*ent.Ingredient
for i := 0; i < n; i++ {
d, err := b.Create(ctx)
if err != nil {
return results, err
}
results = append(results, d)
}
return results, nil
}
func (b *EntIngredientBuilder) CreateBatchV(ctx context.Context, n int) ([]ent.Ingredient, error) {
var results []ent.Ingredient
for i := 0; i < n; i++ {
d, err := b.CreateV(ctx)
if err != nil {
return results, err
}
results = append(results, d)
}
return results, nil
} | examples/ent_recipe/carrier/factory/ent_ingredient.go | 0.510741 | 0.484685 | ent_ingredient.go | starcoder |
package mapqueryparam
import (
"encoding/json"
"fmt"
"math"
"net/url"
"reflect"
"strconv"
"time"
)
var zeroValue reflect.Value
// DecodeValues takes a set of query parameters and uses reflection to decode the content into an output structure.
// Output must be a pointer to a struct. Same as Decode.
func DecodeValues(query url.Values, v interface{}) error {
return Decode(query, v)
}
// Decode takes a set of query parameters and uses reflection to decode the content into an output structure.
// Output must be a pointer to a struct. Same as DecodeValues.
func Decode(query map[string][]string, v interface{}) error {
val := reflect.ValueOf(v)
t := reflect.TypeOf(v)
if val.Kind() != reflect.Ptr {
return newDecodeError("must decode to pointer", "", nil)
}
for t.Kind() == reflect.Ptr {
t = t.Elem()
if val.IsNil() {
val.Set(reflect.New(t))
}
val = val.Elem()
}
if t.Kind() != reflect.Struct {
return newDecodeError(fmt.Sprintf("cannot decode into value of type: %s", t.String()), "", nil)
}
newVal := reflect.New(t)
err := decodeFields(query, val, newVal.Elem())
if err != nil {
return err
}
val.Set(newVal.Elem())
return nil
}
// decodeFields iterates over the fields of the value passed to it, decodes the query values appropriate for the field,
// and stores the values in the field. The original value is also passed and is used for fields that are found in the
// query.
func decodeFields(query map[string][]string, oldVal reflect.Value, newVal reflect.Value) error {
t := newVal.Type()
for i := 0; i < newVal.NumField(); i++ {
f := t.Field(i)
// don't decode to unexported fields
isUnexported := f.PkgPath != ""
if isUnexported {
continue
}
fTyp := f.Type
fVal := newVal.Field(i)
oldFVal := zeroValue
if oldVal != zeroValue {
oldFVal = oldVal.Field(i)
}
// iterate over embedded fields
if f.Anonymous {
for fTyp.Kind() == reflect.Ptr {
fTyp = fTyp.Elem()
if fVal.IsNil() {
fVal.Set(reflect.New(fTyp))
}
if oldFVal != zeroValue && oldFVal.IsNil() {
oldFVal = zeroValue
}
fVal = fVal.Elem()
if oldFVal != zeroValue {
oldFVal = oldFVal.Elem()
}
}
err := decodeFields(query, oldFVal, fVal)
if err != nil {
return err
}
continue
}
var s []string
var tag string
var ok bool
fieldTags := getFieldTags(f)
for _, tag = range fieldTags {
if s, ok = query[tag]; ok {
break
}
}
if len(s) == 0 {
if oldFVal != zeroValue {
fVal.Set(oldFVal)
}
continue
}
err := decodeField(s, fVal)
if err != nil {
return newDecodeError(fmt.Sprintf("unable to decode value in field '%s'", tag), tag, err)
}
}
return nil
}
// decodeField decodes a set of parameter strings as a field of the output struct. Arrays and slices are represented as
// multiple values. Other values are decoded as a single value.
func decodeField(s []string, v reflect.Value) error {
if len(s) == 0 {
return nil
}
switch v.Kind() {
case reflect.Array:
for i := 0; i < v.Len() && i < len(s); i++ {
iVal := v.Index(i)
err := decodeValue(s[i], iVal.Addr())
if err != nil {
return err
}
}
case reflect.Slice:
sVal := reflect.New(v.Type()).Elem()
for i := 0; i < len(s); i++ {
iVal := reflect.New(v.Type().Elem())
err := decodeValue(s[i], iVal)
if err != nil {
return err
}
sVal = reflect.Append(sVal, iVal.Elem())
}
v.Set(sVal)
case reflect.Ptr:
if v.IsNil() {
v.Set(reflect.New(v.Type().Elem()))
}
return decodeField(s, v.Elem())
default:
return decodeValue(s[0], v.Addr())
}
return nil
}
// decodeValue decodes a parameter string as a value. Base types are parsed using `strconv`. Maps and structs are
// decoded as json objects using standard json unmarshaling. Channels and functions are skipped, as they're not
// supported.
func decodeValue(s string, v reflect.Value) error {
switch v.Elem().Kind() {
case reflect.String:
v.Elem().SetString(s)
case reflect.Bool:
b, err := strconv.ParseBool(s)
if err != nil {
return err
}
v.Elem().SetBool(b)
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
i, err := strconv.ParseInt(s, 10, 64)
if err != nil {
return err
}
v.Elem().SetInt(i)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
i, err := strconv.ParseUint(s, 10, 64)
if err != nil {
return err
}
v.Elem().SetUint(i)
case reflect.Float32, reflect.Float64:
f, err := strconv.ParseFloat(s, 64)
if err != nil {
return err
}
v.Elem().SetFloat(f)
case reflect.Complex64, reflect.Complex128:
f, err := strconv.ParseComplex(s, 128)
if err != nil {
return err
}
v.Elem().SetComplex(f)
case reflect.Map, reflect.Struct:
i := v.Interface()
switch i.(type) {
case *time.Time:
t, err := parseTime(s)
if err != nil {
return err
}
v.Elem().Set(reflect.ValueOf(t))
default:
err := json.Unmarshal([]byte(s), i)
if err != nil {
return err
}
}
case reflect.Chan, reflect.Func:
default:
return fmt.Errorf("unsupported field kind: %s", v.Elem().Kind().String())
}
return nil
}
// parseTime parses a string as time.Time. It supports the RFC3339 format, unix seconds, and json marshalled time.Time
// structs.
func parseTime(s string) (time.Time, error) {
// attempt to parse time as RFC3339 string
t, err := time.Parse(time.RFC3339Nano, s)
if err == nil {
return t, nil
}
// attempt to parse time as float number of unix seconds
if f, err := strconv.ParseFloat(s, 64); err == nil {
sec, dec := math.Modf(f)
return time.Unix(int64(sec), int64(dec*(1e9))), nil
}
// attempt to parse time as json marshaled value
if err := json.Unmarshal([]byte(s), &t); err == nil {
return t, nil
}
return time.Time{}, err
} | decode.go | 0.725162 | 0.504761 | decode.go | starcoder |
package timeseries
import (
"errors"
"time"
tu "github.com/grokify/simplego/time/timeutil"
)
type TimeSeriesSimple struct {
Name string
DisplayName string
Times []time.Time
}
func NewTimeSeriesSimple(name, displayName string) TimeSeriesSimple {
return TimeSeriesSimple{
Name: name,
DisplayName: displayName,
Times: []time.Time{}}
}
func (tss *TimeSeriesSimple) ToTimeSeriesQuarter() TimeSeries {
ts := NewTimeSeries(tss.Name)
ts.SeriesName = tss.Name
for _, t := range tss.Times {
ts.AddItems(TimeItem{
SeriesName: tss.Name,
Time: tu.QuarterStart(t),
Value: int64(1)})
}
return ts
}
type TimeSeriesFunnel struct {
Series map[string]TimeSeriesSimple
Order []string
}
func (tsf *TimeSeriesFunnel) Times() []time.Time {
times := []time.Time{}
for _, s := range tsf.Series {
times = append(times, s.Times...)
}
return times
}
func (tsf *TimeSeriesFunnel) TimesSorted() []time.Time {
times := tsf.Times()
return tu.Sort(times)
}
func (tsf *TimeSeriesFunnel) TimeSeriesSetByQuarter() (TimeSeriesSet, error) {
dss := TimeSeriesSet{Order: tsf.Order}
seriesMapQuarter := map[string]TimeSeries{}
allTimes := []time.Time{}
for _, s := range tsf.Series {
allTimes = append(allTimes, s.Times...)
}
if len(allTimes) == 0 {
return dss, errors.New("No times")
}
earliest, err := tu.Earliest(allTimes, false)
if err != nil {
return dss, err
}
latest, err := tu.Latest(allTimes, false)
if err != nil {
return dss, err
}
earliestQuarter := tu.QuarterStart(earliest)
latestQuarter := tu.QuarterStart(latest)
sliceQuarter := tu.QuarterSlice(earliestQuarter, latestQuarter)
dss.Times = sliceQuarter
for name, tss := range tsf.Series {
timeSeries := tss.ToTimeSeriesQuarter()
timeSeries.SeriesName = tss.Name
for _, q := range sliceQuarter {
q = q.UTC()
rfc := q.Format(time.RFC3339)
if _, ok := timeSeries.ItemMap[rfc]; !ok {
timeSeries.AddItems(TimeItem{
SeriesName: tss.Name,
Time: q,
Value: int64(0)})
}
}
seriesMapQuarter[name] = timeSeries
}
dss.Series = seriesMapQuarter
return dss, nil
} | data/timeseries/time_series_simple.go | 0.636805 | 0.418994 | time_series_simple.go | starcoder |
package values
import (
"fmt"
"github.com/influxdata/flux/codes"
"github.com/influxdata/flux/internal/errors"
"github.com/influxdata/flux/semantic"
)
type Scope interface {
// Lookup a name in the scope.
Lookup(name string) (Value, bool)
// LocalLookup a name in current scope only.
LocalLookup(name string) (Value, bool)
// Set binds a variable in the current scope.
Set(name string, v Value)
// SetOption binds a variable in the package option scope.
// Setting an option must occur on the specific package value.
// If the package cannot be found no option is set, in which case the boolean return is false.
// An error is reported if the specified package is not a package value.
SetOption(pkg, name string, v Value) (bool, error)
// Nest creates a new scope by nesting the current scope.
// If the passed in object is not nil, its values will be added to the new nested scope.
Nest(Object) Scope
// Pop returns the parent of the current scope.
Pop() Scope
// Size is the number of visible names in scope.
Size() int
// Range iterates over all variable bindings in scope applying f.
Range(f func(k string, v Value))
// LocalRange iterates over all variable bindings only in the current scope.
LocalRange(f func(k string, v Value))
// SetReturn binds the return value of the scope.
SetReturn(Value)
// Return reports the bound return value of the scope.
Return() Value
// Copy creates a deep copy of the scope, values are not copied.
// Copy preserves the nesting structure.
Copy() Scope
}
type scope struct {
parent Scope
values Object
returnValue Value
}
// NewScope creates a new empty scope with no parent.
func NewScope() Scope {
return &scope{
values: NewObject(),
}
}
//NewNestedScope creates a new scope with bindings from obj and a parent.
func NewNestedScope(parent Scope, obj Object) Scope {
if obj == nil {
obj = NewObject()
}
return &scope{
parent: parent,
values: obj,
}
}
func (s *scope) Lookup(name string) (Value, bool) {
v, ok := s.values.Get(name)
if !ok && s.parent != nil {
return s.parent.Lookup(name)
}
return v, ok
}
func (s *scope) LocalLookup(name string) (Value, bool) {
return s.values.Get(name)
}
func (s *scope) Set(name string, v Value) {
s.values.Set(name, v)
}
func (s *scope) SetOption(pkg, name string, v Value) (bool, error) {
pv, ok := s.LocalLookup(pkg)
if !ok {
parent := s.Pop()
if parent != nil {
return parent.SetOption(pkg, name, v)
}
return false, nil
}
p, ok := pv.(Package)
if !ok {
return false, errors.Newf(codes.Invalid, "cannot set option %q is not a package", pkg)
}
p.SetOption(name, v)
return true, nil
}
func (s *scope) Nest(obj Object) Scope {
return NewNestedScope(s, obj)
}
func (s *scope) Pop() Scope {
return s.parent
}
func (s *scope) Size() int {
if s.parent == nil {
return s.values.Len()
}
return s.values.Len() + s.parent.Size()
}
func (s *scope) Range(f func(k string, v Value)) {
s.values.Range(f)
if s.parent != nil {
s.parent.Range(f)
}
}
func (s *scope) LocalRange(f func(k string, v Value)) {
s.values.Range(f)
}
func (s *scope) SetReturn(v Value) {
s.returnValue = v
}
func (s *scope) Return() Value {
return s.returnValue
}
func (s *scope) Copy() Scope {
obj := NewObjectWithBacking(s.values.Len())
s.values.Range(func(k string, v Value) {
obj.Set(k, v)
})
var parent Scope
if s.parent != nil {
parent = s.parent.Copy()
}
return &scope{
values: obj,
parent: parent,
}
}
// FormattedScope produces a fmt.Formatter for pretty printing a scope.
func FormattedScope(scope Scope) fmt.Formatter {
return scopeFormatter{scope}
}
type scopeFormatter struct {
scope Scope
}
func (s scopeFormatter) Format(state fmt.State, _ rune) {
state.Write([]byte("["))
for scope := s.scope; scope != nil; scope = scope.Pop() {
state.Write([]byte("{"))
j := 0
scope.LocalRange(func(k string, v Value) {
if j != 0 {
state.Write([]byte(", "))
}
fmt.Fprintf(state, "%s = %v", k, v)
j++
})
state.Write([]byte("} -> "))
}
state.Write([]byte("nil ]"))
}
// BuildExternAssignments constructs nested semantic.ExternAssignment nodes mirroring the nested structure of the scope.
func BuildExternAssignments(node semantic.Node, scope Scope) semantic.Node {
var n = node
for s := scope; s != nil; s = s.Pop() {
extern := &semantic.Extern{
Block: &semantic.ExternBlock{
Node: n,
},
}
s.LocalRange(func(k string, v Value) {
extern.Assignments = append(extern.Assignments, &semantic.ExternalVariableAssignment{
Identifier: &semantic.Identifier{Name: k},
ExternType: v.PolyType(),
})
})
n = extern
}
return n
} | vendor/github.com/influxdata/flux/values/scope.go | 0.677581 | 0.422683 | scope.go | starcoder |
package euler1
import (
"math/big"
"github.com/andrew-field/maths"
)
// LargeSum returns an int whose digits are the first ten digits of the sum of the following one-hundred 50-digit numbers.
func LargeSum() int {
numbers := make([]big.Int, 100)
numbers[0].SetString("37107287533902102798797998220837590246510135740250", 10)
numbers[1].SetString("46376937677490009712648124896970078050417018260538", 10)
numbers[2].SetString("74324986199524741059474233309513058123726617309629", 10)
numbers[3].SetString("91942213363574161572522430563301811072406154908250", 10)
numbers[4].SetString("23067588207539346171171980310421047513778063246676", 10)
numbers[5].SetString("89261670696623633820136378418383684178734361726757", 10)
numbers[6].SetString("28112879812849979408065481931592621691275889832738", 10)
numbers[7].SetString("44274228917432520321923589422876796487670272189318", 10)
numbers[8].SetString("47451445736001306439091167216856844588711603153276", 10)
numbers[9].SetString("70386486105843025439939619828917593665686757934951", 10)
numbers[10].SetString("62176457141856560629502157223196586755079324193331", 10)
numbers[11].SetString("64906352462741904929101432445813822663347944758178", 10)
numbers[12].SetString("92575867718337217661963751590579239728245598838407", 10)
numbers[13].SetString("58203565325359399008402633568948830189458628227828", 10)
numbers[14].SetString("80181199384826282014278194139940567587151170094390", 10)
numbers[15].SetString("35398664372827112653829987240784473053190104293586", 10)
numbers[16].SetString("86515506006295864861532075273371959191420517255829", 10)
numbers[17].SetString("71693888707715466499115593487603532921714970056938", 10)
numbers[18].SetString("54370070576826684624621495650076471787294438377604", 10)
numbers[19].SetString("53282654108756828443191190634694037855217779295145", 10)
numbers[20].SetString("36123272525000296071075082563815656710885258350721", 10)
numbers[21].SetString("45876576172410976447339110607218265236877223636045", 10)
numbers[22].SetString("17423706905851860660448207621209813287860733969412", 10)
numbers[23].SetString("81142660418086830619328460811191061556940512689692", 10)
numbers[24].SetString("51934325451728388641918047049293215058642563049483", 10)
numbers[25].SetString("62467221648435076201727918039944693004732956340691", 10)
numbers[26].SetString("15732444386908125794514089057706229429197107928209", 10)
numbers[27].SetString("55037687525678773091862540744969844508330393682126", 10)
numbers[28].SetString("18336384825330154686196124348767681297534375946515", 10)
numbers[29].SetString("80386287592878490201521685554828717201219257766954", 10)
numbers[30].SetString("78182833757993103614740356856449095527097864797581", 10)
numbers[31].SetString("16726320100436897842553539920931837441497806860984", 10)
numbers[32].SetString("48403098129077791799088218795327364475675590848030", 10)
numbers[33].SetString("87086987551392711854517078544161852424320693150332", 10)
numbers[34].SetString("59959406895756536782107074926966537676326235447210", 10)
numbers[35].SetString("69793950679652694742597709739166693763042633987085", 10)
numbers[36].SetString("41052684708299085211399427365734116182760315001271", 10)
numbers[37].SetString("65378607361501080857009149939512557028198746004375", 10)
numbers[38].SetString("35829035317434717326932123578154982629742552737307", 10)
numbers[39].SetString("94953759765105305946966067683156574377167401875275", 10)
numbers[40].SetString("88902802571733229619176668713819931811048770190271", 10)
numbers[41].SetString("25267680276078003013678680992525463401061632866526", 10)
numbers[42].SetString("36270218540497705585629946580636237993140746255962", 10)
numbers[43].SetString("24074486908231174977792365466257246923322810917141", 10)
numbers[44].SetString("91430288197103288597806669760892938638285025333403", 10)
numbers[45].SetString("34413065578016127815921815005561868836468420090470", 10)
numbers[46].SetString("23053081172816430487623791969842487255036638784583", 10)
numbers[47].SetString("11487696932154902810424020138335124462181441773470", 10)
numbers[48].SetString("63783299490636259666498587618221225225512486764533", 10)
numbers[49].SetString("67720186971698544312419572409913959008952310058822", 10)
numbers[50].SetString("95548255300263520781532296796249481641953868218774", 10)
numbers[51].SetString("76085327132285723110424803456124867697064507995236", 10)
numbers[52].SetString("37774242535411291684276865538926205024910326572967", 10)
numbers[53].SetString("23701913275725675285653248258265463092207058596522", 10)
numbers[54].SetString("29798860272258331913126375147341994889534765745501", 10)
numbers[55].SetString("18495701454879288984856827726077713721403798879715", 10)
numbers[56].SetString("38298203783031473527721580348144513491373226651381", 10)
numbers[57].SetString("34829543829199918180278916522431027392251122869539", 10)
numbers[58].SetString("40957953066405232632538044100059654939159879593635", 10)
numbers[59].SetString("29746152185502371307642255121183693803580388584903", 10)
numbers[60].SetString("41698116222072977186158236678424689157993532961922", 10)
numbers[61].SetString("62467957194401269043877107275048102390895523597457", 10)
numbers[62].SetString("23189706772547915061505504953922979530901129967519", 10)
numbers[63].SetString("86188088225875314529584099251203829009407770775672", 10)
numbers[64].SetString("11306739708304724483816533873502340845647058077308", 10)
numbers[65].SetString("82959174767140363198008187129011875491310547126581", 10)
numbers[66].SetString("97623331044818386269515456334926366572897563400500", 10)
numbers[67].SetString("42846280183517070527831839425882145521227251250327", 10)
numbers[68].SetString("55121603546981200581762165212827652751691296897789", 10)
numbers[69].SetString("32238195734329339946437501907836945765883352399886", 10)
numbers[70].SetString("75506164965184775180738168837861091527357929701337", 10)
numbers[71].SetString("62177842752192623401942399639168044983993173312731", 10)
numbers[72].SetString("32924185707147349566916674687634660915035914677504", 10)
numbers[73].SetString("99518671430235219628894890102423325116913619626622", 10)
numbers[74].SetString("73267460800591547471830798392868535206946944540724", 10)
numbers[75].SetString("76841822524674417161514036427982273348055556214818", 10)
numbers[76].SetString("97142617910342598647204516893989422179826088076852", 10)
numbers[77].SetString("87783646182799346313767754307809363333018982642090", 10)
numbers[78].SetString("10848802521674670883215120185883543223812876952786", 10)
numbers[79].SetString("71329612474782464538636993009049310363619763878039", 10)
numbers[80].SetString("62184073572399794223406235393808339651327408011116", 10)
numbers[81].SetString("66627891981488087797941876876144230030984490851411", 10)
numbers[82].SetString("60661826293682836764744779239180335110989069790714", 10)
numbers[83].SetString("85786944089552990653640447425576083659976645795096", 10)
numbers[84].SetString("66024396409905389607120198219976047599490197230297", 10)
numbers[85].SetString("64913982680032973156037120041377903785566085089252", 10)
numbers[86].SetString("16730939319872750275468906903707539413042652315011", 10)
numbers[87].SetString("94809377245048795150954100921645863754710598436791", 10)
numbers[88].SetString("78639167021187492431995700641917969777599028300699", 10)
numbers[89].SetString("15368713711936614952811305876380278410754449733078", 10)
numbers[90].SetString("40789923115535562561142322423255033685442488917353", 10)
numbers[91].SetString("44889911501440648020369068063960672322193204149535", 10)
numbers[92].SetString("41503128880339536053299340368006977710650566631954", 10)
numbers[93].SetString("81234880673210146739058568557934581403627822703280", 10)
numbers[94].SetString("82616570773948327592232845941706525094512325230608", 10)
numbers[95].SetString("22918802058777319719839450180888072429661980811197", 10)
numbers[96].SetString("77158542502016545090413245809786882778948721859617", 10)
numbers[97].SetString("72107838435069186155435662884062257473692284509516", 10)
numbers[98].SetString("20849603980134001723930671666823555245252804609722", 10)
numbers[99].SetString("53503534226472524250874054075591789781264330331690", 10)
// Sum.
sum := big.NewInt(0)
for _, val := range numbers {
sum.Add(sum, &val)
}
numberOfDigits := maths.NumberOfDigitsBig(sum)
digits := maths.DigitsBig(sum)
for i := 0; i < numberOfDigits-10; i++ {
<-digits
}
firstTen := 0
powerOfTen := 1
for val := range digits {
firstTen += val * powerOfTen
powerOfTen *= 10
}
return firstTen
}
// Go makes this quite easy. This returns an int instead of individual ints so it is easier to test. | 1-20/13_LargeSum.go | 0.599016 | 0.429788 | 13_LargeSum.go | starcoder |
package indexeddb
import (
"errors"
"reflect"
)
// Compare compares two values.
func Compare(left interface{}, right interface{}) (int, error) {
leftRt := reflect.TypeOf(left)
rightRt := reflect.TypeOf(right)
leftKind := leftRt.Kind()
rightKind := rightRt.Kind()
if leftKind != rightKind {
if leftKind == reflect.Array ||
leftKind == reflect.Slice ||
leftKind == reflect.String {
return 1, nil
} else if rightKind == reflect.Array ||
rightKind == reflect.Slice ||
rightKind == reflect.String {
return -1, nil
}
return -1, errors.New("Invalid type comparison")
}
if leftKind == reflect.Slice || leftKind == reflect.Array {
leftArr := left.([]interface{})
rightArr := right.([]interface{})
if len(leftArr) > len(rightArr) {
return 1, nil
} else if len(leftArr) < len(rightArr) {
return -1, nil
}
return 0, nil
} else if leftKind == reflect.Interface || leftKind == reflect.Map {
if reflect.DeepEqual(left, right) {
return 0, nil
} else {
return 1, nil
}
} else if leftKind == reflect.Bool {
if left == right {
return 0, nil
} else if left.(bool) == true {
return 1, nil
}
return -1, nil
} else if leftKind == reflect.Int ||
leftKind == reflect.Int8 ||
leftKind == reflect.Int16 ||
leftKind == reflect.Int32 ||
leftKind == reflect.Int64 {
var leftInt int64
var rightInt int64
if leftKind == reflect.Int {
leftInt = int64(left.(int))
rightInt = int64(right.(int))
} else if leftKind == reflect.Int8 {
leftInt = int64(left.(int8))
rightInt = int64(right.(int8))
} else if leftKind == reflect.Int16 {
leftInt = int64(left.(int16))
rightInt = int64(right.(int16))
} else if leftKind == reflect.Int32 {
leftInt = int64(left.(int32))
rightInt = int64(right.(int32))
} else {
leftInt = left.(int64)
rightInt = right.(int64)
}
if leftInt > rightInt {
return 1, nil
} else if leftInt < rightInt {
return -1, nil
}
return 0, nil
} else if leftKind == reflect.Uint ||
leftKind == reflect.Uint8 ||
leftKind == reflect.Uint16 ||
leftKind == reflect.Uint32 ||
leftKind == reflect.Uint64 {
var leftInt uint64
var rightInt uint64
if leftKind == reflect.Int {
leftInt = uint64(left.(uint))
rightInt = uint64(right.(uint))
} else if leftKind == reflect.Int8 {
leftInt = uint64(left.(uint8))
rightInt = uint64(right.(uint8))
} else if leftKind == reflect.Int16 {
leftInt = uint64(left.(uint16))
rightInt = uint64(right.(uint16))
} else if leftKind == reflect.Int32 {
leftInt = uint64(left.(uint32))
rightInt = uint64(right.(uint32))
} else {
leftInt = left.(uint64)
rightInt = right.(uint64)
}
if leftInt > rightInt {
return 1, nil
} else if leftInt < rightInt {
return -1, nil
}
return 0, nil
} else if leftKind == reflect.Float32 || leftKind == reflect.Float64 {
var leftFloat float64
var rightFloat float64
if leftKind == reflect.Int32 {
leftFloat = float64(left.(float32))
rightFloat = float64(right.(float32))
} else {
leftFloat = left.(float64)
rightFloat = right.(float64)
}
if leftFloat > rightFloat {
return 1, nil
} else if leftFloat < rightFloat {
return -1, nil
}
return 0, nil
} else if leftKind == reflect.String {
if left.(string) > right.(string) {
return 1, nil
} else if left.(string) < right.(string) {
return -1, nil
}
return 0, nil
}
return 0, errors.New("Unknown type")
} | indexeddb/compare.go | 0.692018 | 0.745908 | compare.go | starcoder |
package chart
import (
"fmt"
"math"
"strings"
)
// TicksProvider is a type that provides ticks.
type TicksProvider interface {
GetTicks(r Renderer, defaults Style, vf ValueFormatter) []Tick
}
// Tick represents a label on an axis.
type Tick struct {
Value float64
Label string
}
// Ticks is an array of ticks.
type Ticks []Tick
// Len returns the length of the ticks set.
func (t Ticks) Len() int {
return len(t)
}
// Swap swaps two elements.
func (t Ticks) Swap(i, j int) {
t[i], t[j] = t[j], t[i]
}
// Less returns if i's value is less than j's value.
func (t Ticks) Less(i, j int) bool {
return t[i].Value < t[j].Value
}
// String returns a string representation of the set of ticks.
func (t Ticks) String() string {
values := make([]string, 0, len(t))
for i, tick := range t {
values = append(values, fmt.Sprintf("[%d: %s]", i, tick.Label))
}
return strings.Join(values, ", ")
}
// GenerateContinuousTicks generates a set of ticks.
func GenerateContinuousTicks(r Renderer, ra Range, isVertical bool, style Style, vf ValueFormatter) []Tick {
if vf == nil {
vf = FloatValueFormatter
}
min, max := ra.GetMin(), ra.GetMax()
isDescending := ra.IsDescending()
minLabel := vf(min)
style.GetTextOptions().WriteToRenderer(r)
labelBox := r.MeasureText(minLabel)
var tickSize float64
if isVertical {
tickSize = float64(labelBox.Height() + DefaultMinimumTickVerticalSpacing)
} else {
tickSize = float64(labelBox.Width() + DefaultMinimumTickHorizontalSpacing)
}
domain := float64(ra.GetDomain())
domainRemainder := domain - (tickSize * 2)
intermediateTickCount := int(math.Floor(domainRemainder / tickSize))
rangeDelta := math.Abs(max - min)
tickStep := rangeDelta / float64(intermediateTickCount)
roundTo := GetRoundToForDelta(rangeDelta) / 10
intermediateTickCount = MinInt2(intermediateTickCount, DefaultTickCountSanityCheck)
// check if intermediateTickCount is < 0
tickAmount := 2
if intermediateTickCount > 0 {
tickAmount += intermediateTickCount - 1
}
ticks := make([]Tick, 0, tickAmount)
if isDescending {
ticks = append(ticks, Tick{
Value: max,
Label: vf(max),
})
} else {
ticks = append(ticks, Tick{
Value: min,
Label: vf(min),
})
}
for x := 1; x < intermediateTickCount; x++ {
var tickValue float64
if isDescending {
tickValue = max - RoundUp(tickStep*float64(x), roundTo)
} else {
tickValue = min + RoundUp(tickStep*float64(x), roundTo)
}
ticks = append(ticks, Tick{
Value: tickValue,
Label: vf(tickValue),
})
}
if isDescending {
ticks = append(ticks, Tick{
Value: min,
Label: vf(min),
})
} else {
ticks = append(ticks, Tick{
Value: max,
Label: vf(max),
})
}
return ticks
} | tick.go | 0.782538 | 0.415551 | tick.go | starcoder |
package geom
// Simplify returns a simplified version of the geometry using the
// Ramer-Douglas-Peucker algorithm. Sometimes a simplified geometry can become
// invalid, in which case an error is returned rather than attempting to fix
// the geometry. Validation of the result can be skipped by making use of the
// geometry constructor options.
func Simplify(g Geometry, threshold float64, opts ...ConstructorOption) (Geometry, error) {
s := simplifier{threshold, opts}
switch g.gtype {
case TypeGeometryCollection:
gc, err := s.simplifyGeometryCollection(g.AsGeometryCollection())
return gc.AsGeometry(), wrapSimplified(err)
case TypePoint:
return g, nil
case TypeLineString:
ls, err := s.simplifyLineString(g.AsLineString())
return ls.AsGeometry(), wrapSimplified(err)
case TypePolygon:
poly, err := s.simplifyPolygon(g.AsPolygon())
return poly.AsGeometry(), wrapSimplified(err)
case TypeMultiPoint:
return g, nil
case TypeMultiLineString:
mls, err := s.simplifyMultiLineString(g.AsMultiLineString())
return mls.AsGeometry(), wrapSimplified(err)
case TypeMultiPolygon:
mp, err := s.simplifyMultiPolygon(g.AsMultiPolygon())
return mp.AsGeometry(), wrapSimplified(err)
default:
panic("unknown geometry: " + g.gtype.String())
}
}
type simplifier struct {
threshold float64
opts []ConstructorOption
}
func (s simplifier) simplifyLineString(ls LineString) (LineString, error) {
seq := ls.Coordinates()
floats := s.ramerDouglasPeucker(nil, seq)
seq = NewSequence(floats, seq.CoordinatesType())
if seq.Length() > 0 && !hasAtLeast2DistinctPointsInSeq(seq) {
return LineString{}, nil
}
return NewLineString(seq, s.opts...)
}
func (s simplifier) simplifyMultiLineString(mls MultiLineString) (MultiLineString, error) {
n := mls.NumLineStrings()
lss := make([]LineString, 0, n)
for i := 0; i < n; i++ {
ls := mls.LineStringN(i)
ls, err := s.simplifyLineString(ls)
if err != nil {
return MultiLineString{}, err
}
if !ls.IsEmpty() {
lss = append(lss, ls)
}
}
return NewMultiLineStringFromLineStrings(lss, s.opts...), nil
}
func (s simplifier) simplifyPolygon(poly Polygon) (Polygon, error) {
exterior, err := s.simplifyLineString(poly.ExteriorRing())
if err != nil {
return Polygon{}, err
}
// If we don't have at least 4 coordinates, then we can't form a ring, and
// the polygon has collapsed either to a point or a single linear element.
// Both cases are represented by an empty polygon.
if exterior.Coordinates().Length() < 4 {
return Polygon{}, nil
}
n := poly.NumInteriorRings()
rings := make([]LineString, 0, n+1)
rings = append(rings, exterior)
for i := 0; i < n; i++ {
interior, err := s.simplifyLineString(poly.InteriorRingN(i))
if err != nil {
return Polygon{}, err
}
if interior.IsRing() {
rings = append(rings, interior)
}
}
return NewPolygonFromRings(rings, s.opts...)
}
func (s simplifier) simplifyMultiPolygon(mp MultiPolygon) (MultiPolygon, error) {
n := mp.NumPolygons()
polys := make([]Polygon, 0, n)
for i := 0; i < n; i++ {
poly, err := s.simplifyPolygon(mp.PolygonN(i))
if err != nil {
return MultiPolygon{}, err
}
if !poly.IsEmpty() {
polys = append(polys, poly)
}
}
return NewMultiPolygonFromPolygons(polys, s.opts...)
}
func (s simplifier) simplifyGeometryCollection(gc GeometryCollection) (GeometryCollection, error) {
n := gc.NumGeometries()
geoms := make([]Geometry, n)
for i := 0; i < n; i++ {
var err error
geoms[i], err = Simplify(gc.GeometryN(i), s.threshold)
if err != nil {
return GeometryCollection{}, err
}
}
return NewGeometryCollection(geoms, s.opts...), nil
}
func (s simplifier) ramerDouglasPeucker(dst []float64, seq Sequence) []float64 {
if seq.Length() <= 2 {
return seq.appendAllPoints(dst)
}
start := 0
end := seq.Length() - 1
for start < end {
dst = seq.appendPoint(dst, start)
newEnd := end
for {
var maxDist float64
var maxDistIdx int
for i := start + 1; i < newEnd; i++ {
if d := perpendicularDistance(
seq.GetXY(i),
seq.GetXY(start),
seq.GetXY(newEnd),
); d > maxDist {
maxDistIdx = i
maxDist = d
}
}
if maxDist <= s.threshold {
break
}
newEnd = maxDistIdx
}
start = newEnd
}
dst = seq.appendPoint(dst, end)
return dst
}
// perpendicularDistance is the distance from 'p' to the infinite line going
// through 'a' and 'b'. If 'a' and 'b' are the same, then the distance between
// 'a'/'b' and 'p' is returned.
func perpendicularDistance(p, a, b XY) float64 {
if a == b {
return p.Sub(a).Length()
}
aSubP := a.Sub(p)
bSubA := b.Sub(a)
unit := bSubA.Scale(1 / bSubA.Length())
perpendicular := aSubP.Sub(unit.Scale(aSubP.Dot(unit)))
return perpendicular.Length()
} | geom/alg_simplify.go | 0.807612 | 0.58602 | alg_simplify.go | starcoder |
package main
import (
"crypto/sha1"
"errors"
"flag"
"fmt"
"io/ioutil"
"sort"
"github.com/Binject/debug/pe"
)
func main() {
//again, not sure this should really be in bananaphone, will move/etc when sensible..
var loc string
flag.StringVar(&loc, "i", "", "File to get PE hash of")
flag.Parse()
if loc == "" {
panic("need an in file pls")
}
fmt.Printf("%x\n", getHash(loc))
}
func getHash(loc string) []byte {
/*
https://www.symbolcrash.com/wp-content/uploads/2019/02/Authenticode_PE-1.pdf
1. Load the image header into memory.
2. Initialize a hash algorithm context.
3. Hash the image header from its base to immediately before the start of the checksum address, as specified in Optional Header Windows-Specific Fields.
4. Skip over the checksum, which is a 4-byte field.
5. Hash everything from the end of the checksum field to immediately before the start of the Certificate Table entry, as specified in Optional Header Data Directories.
6. Get the Attribute Certificate Table address and size from the Certificate Table entry. For details, see section 5.7 of the PE/COFF specification.
7. Exclude the Certificate Table entry from the calculation and hash everything from the end of the Certificate Table entry to the end of image header, including Section Table (headers).The Certificate Table entry is 8 bytes long, as specified in Optional Header Data Directories.
8. Create a counter called SUM_OF_BYTES_HASHED, which is not part of the signature. Set this counter to the SizeOfHeaders field, as specified in Optional Header Windows-Specific Field.
9. Build a temporary table of pointers to all of the section headers in the image. The NumberOfSections field of COFF File Header indicates how big the table should be. Do not include any section headers in the table whose SizeOfRawData field is zero.
10. Using the PointerToRawData field (offset 20) in the referenced SectionHeader structure as a key, arrange the table's elements in ascending order. In other words, sort the section headers in ascending order according to the disk-file offset of the sections.
11. Walk through the sorted table, load the corresponding section into memory, and hash the entire section. Use the SizeOfRawData field in the SectionHeader structure to determine the amount of data to hash.
12. Add the section’s SizeOfRawData value to SUM_OF_BYTES_HASHED.
13. Repeat steps 11 and 12 for all of the sections in the sorted table.
14. Create a value called FILE_SIZE, which is not part of the signature. Set this value to the image’s file size, acquired from the underlying file system. If FILE_SIZE is greater than SUM_OF_BYTES_HASHED, the file contains extra data that must be added to the hash. This data begins at the SUM_OF_BYTES_HASHED file offset, and its length is:
(File Size) – ((Size of AttributeCertificateTable) + SUM_OF_BYTES_HASHED)
Note: The size of Attribute Certificate Table is specified in the second ULONG value in the Certificate Table entry (32 bit: offset 132, 64 bit: offset 148) in Optional Header Data Directories.
15. Finalize the hash algorithm context.
Note: This procedure uses offset values from the PE/COFF specification, version 8.1 . For authoritative offset values, refer to the most recent version of the PE/COFF specification.
*/
p, e := pe.Open(loc)
if e != nil {
panic(e)
}
hasher := sha1.New() //todo, pick hashing alg I guess?
//bb, e := p.Bytes()
bb, e := ioutil.ReadFile(loc) //todo, work out why binject gives a different value
if e != nil {
panic(e)
}
ctr := 0
n, e := hasher.Write(bb[:0xd8])
if e != nil {
panic(e)
}
ctr = 0xd8 + 4 //skip checksum
//this shoudl be checked to see if some silly assembler has placed the cert table in a section (it shouldn't, but it would be valid..? probably?)
//var certTableOffset, certTableSize uint32
switch p.FileHeader.Machine {
case pe.IMAGE_FILE_MACHINE_I386:
//certTableOffset = p.OptionalHeader.(*pe.OptionalHeader32).DataDirectory[pe.CERTIFICATE_TABLE].VirtualAddress
//certTableSize = p.OptionalHeader.(*pe.OptionalHeader32).DataDirectory[pe.CERTIFICATE_TABLE].Size
panic("non x64 not supported soz lol")
case pe.IMAGE_FILE_MACHINE_AMD64:
//certTableOffset = p.OptionalHeader.(*pe.OptionalHeader64).DataDirectory[pe.CERTIFICATE_TABLE].VirtualAddress
//certTableSize = p.OptionalHeader.(*pe.OptionalHeader64).DataDirectory[pe.CERTIFICATE_TABLE].Size
n, e = hasher.Write(bb[ctr:0x128])
if e != nil {
panic(e)
}
ctr = 0x128 + 8 //skip cert table entry thing
default:
panic(errors.New("architecture not supported"))
}
n, e = hasher.Write(bb[ctr:p.OptionalHeader.(*pe.OptionalHeader64).SizeOfHeaders])
if e != nil {
panic(e)
}
hashcount := int(p.OptionalHeader.(*pe.OptionalHeader64).SizeOfHeaders) //this isn't actually used (yet)
//9 get section headers, sort by raw data offset
sections := make([]*pe.Section, 0, p.NumberOfSections)
for _, x := range p.Sections {
if x.Size != 0 {
sections = append(sections, x)
}
}
sort.Sort(sortBy(sections))
for _, sec := range sections {
n, e = hasher.Write(bb[sec.Offset : sec.Offset+sec.Size])
hashcount += n
}
return hasher.Sum(nil)
}
type sortBy []*pe.Section
func (a sortBy) Len() int { return len(a) }
func (a sortBy) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a sortBy) Less(i, j int) bool { return a[i].Offset < a[j].Offset } | example/participationcertificate/main.go | 0.544559 | 0.448607 | main.go | starcoder |
package evaluation
import (
"github.com/vodinhphuc/golearn/base"
"math/rand"
)
// GetCrossValidatedMetric returns the mean and variance of the confusion-matrix-derived
// metric across all folds.
func GetCrossValidatedMetric(in []ConfusionMatrix, metric func(ConfusionMatrix) float64) (mean, variance float64) {
scores := make([]float64, len(in))
for i, c := range in {
scores[i] = metric(c)
}
// Compute mean, variance
sum := 0.0
for _, s := range scores {
sum += s
}
sum /= float64(len(scores))
mean = sum
sum = 0.0
for _, s := range scores {
sum += (s - mean) * (s - mean)
}
sum /= float64(len(scores))
variance = sum
return mean, variance
}
// GenerateCrossFoldValidationConfusionMatrices divides the data into a number of folds
// then trains and evaluates the classifier on each fold, producing a new ConfusionMatrix.
func GenerateCrossFoldValidationConfusionMatrices(data base.FixedDataGrid, cls base.Classifier, folds int) ([]ConfusionMatrix, error) {
_, rows := data.Size()
// Assign each row to a fold
foldMap := make([]int, rows)
inverseFoldMap := make(map[int][]int)
for i := 0; i < rows; i++ {
fold := rand.Intn(folds)
foldMap[i] = fold
if _, ok := inverseFoldMap[fold]; !ok {
inverseFoldMap[fold] = make([]int, 0)
}
inverseFoldMap[fold] = append(inverseFoldMap[fold], i)
}
ret := make([]ConfusionMatrix, folds)
// Create training/test views for each fold
for i := 0; i < folds; i++ {
// Fold i is for testing
testData := base.NewInstancesViewFromVisible(data, inverseFoldMap[i], data.AllAttributes())
otherRows := make([]int, 0)
for j := 0; j < folds; j++ {
if i == j {
continue
}
otherRows = append(otherRows, inverseFoldMap[j]...)
}
trainData := base.NewInstancesViewFromVisible(data, otherRows, data.AllAttributes())
// Train
err := cls.Fit(trainData)
if err != nil {
return nil, err
}
// Predict
pred, err := cls.Predict(testData)
if err != nil {
return nil, err
}
// Evaluate
cf, err := GetConfusionMatrix(testData, pred)
if err != nil {
return nil, err
}
ret[i] = cf
}
return ret, nil
} | evaluation/cross_fold.go | 0.811003 | 0.410343 | cross_fold.go | starcoder |
package task2
import (
"aoc-2021-day22/geometry"
"aoc-2021-day22/input"
)
func Solve(data input.Data) (uint64, error) {
cuboids := make([]geometry.Cuboid, 0)
for _, op := range data.Operations {
cuboidsAfterOp := make([]geometry.Cuboid, 0)
for _, existing := range cuboids {
if notIntersect(op.Cuboid, existing) {
cuboidsAfterOp = append(cuboidsAfterOp, existing)
continue
}
if op.Cuboid.LowerX > existing.LowerX {
cube := geometry.Cuboid{
LowerX: existing.LowerX,
UpperX: op.Cuboid.LowerX - 1,
LowerY: existing.LowerY,
UpperY: existing.UpperY,
LowerZ: existing.LowerZ,
UpperZ: existing.UpperZ,
}
cuboidsAfterOp = append(cuboidsAfterOp, cube)
}
if op.Cuboid.UpperX < existing.UpperX {
cube := geometry.Cuboid{
LowerX: op.Cuboid.UpperX + 1,
UpperX: existing.UpperX,
LowerY: existing.LowerY,
UpperY: existing.UpperY,
LowerZ: existing.LowerZ,
UpperZ: existing.UpperZ,
}
cuboidsAfterOp = append(cuboidsAfterOp, cube)
}
if op.Cuboid.LowerY > existing.LowerY {
cube := geometry.Cuboid{
LowerX: max(op.Cuboid.LowerX, existing.LowerX),
UpperX: min(op.Cuboid.UpperX, existing.UpperX),
LowerY: existing.LowerY,
UpperY: op.Cuboid.LowerY - 1,
LowerZ: existing.LowerZ,
UpperZ: existing.UpperZ,
}
cuboidsAfterOp = append(cuboidsAfterOp, cube)
}
if op.Cuboid.UpperY < existing.UpperY {
cube := geometry.Cuboid{
LowerX: max(op.Cuboid.LowerX, existing.LowerX),
UpperX: min(op.Cuboid.UpperX, existing.UpperX),
LowerY: op.Cuboid.UpperY + 1,
UpperY: existing.UpperY,
LowerZ: existing.LowerZ,
UpperZ: existing.UpperZ,
}
cuboidsAfterOp = append(cuboidsAfterOp, cube)
}
if op.Cuboid.LowerZ > existing.LowerZ {
cube := geometry.Cuboid{
LowerX: max(op.Cuboid.LowerX, existing.LowerX),
UpperX: min(op.Cuboid.UpperX, existing.UpperX),
LowerY: max(op.Cuboid.LowerY, existing.LowerY),
UpperY: min(op.Cuboid.UpperY, existing.UpperY),
LowerZ: existing.LowerZ,
UpperZ: op.Cuboid.LowerZ - 1,
}
cuboidsAfterOp = append(cuboidsAfterOp, cube)
}
if op.Cuboid.UpperZ < existing.UpperZ {
cube := geometry.Cuboid{
LowerX: max(op.Cuboid.LowerX, existing.LowerX),
UpperX: min(op.Cuboid.UpperX, existing.UpperX),
LowerY: max(op.Cuboid.LowerY, existing.LowerY),
UpperY: min(op.Cuboid.UpperY, existing.UpperY),
LowerZ: op.Cuboid.UpperZ + 1,
UpperZ: existing.UpperZ,
}
cuboidsAfterOp = append(cuboidsAfterOp, cube)
}
}
if op.On {
cuboidsAfterOp = append(cuboidsAfterOp, op.Cuboid)
}
cuboids = cuboidsAfterOp
}
totalVol := uint64(0)
for _, x := range cuboids {
totalVol += uint64(x.Volume())
}
return totalVol, nil
}
func min(a, b int64) int64 {
if a < b {
return a
}
return b
}
func max(a, b int64) int64 {
if a > b {
return a
}
return b
}
func notIntersect(c1, c2 geometry.Cuboid) bool {
return c1.LowerX > c2.UpperX ||
c1.UpperX < c2.LowerX ||
c1.LowerY > c2.UpperY ||
c1.UpperY < c2.LowerY ||
c1.LowerZ > c2.UpperZ ||
c1.UpperZ < c2.LowerZ
}
func valid(num int64) bool {
return num >= -50 && num <= 50
} | 2021/day22/task2/solver.go | 0.604749 | 0.602032 | solver.go | starcoder |
package pcapng
import (
"bytes"
"encoding/hex"
"fmt"
"github.com/bearmini/pcapng-go/pcapng/blocktype"
"github.com/pkg/errors"
)
/*
4.4. Simple Packet Block
The Simple Packet Block (SPB) is a lightweight container for storing
the packets coming from the network. Its presence is optional.
A Simple Packet Block is similar to an Enhanced Packet Block (see
Section 4.3), but it is smaller, simpler to process and contains only
a minimal set of information. This block is preferred to the
standard Enhanced Packet Block when performance or space occupation
are critical factors, such as in sustained traffic capture
applications. A capture file can contain both Enhanced Packet Blocks
and Simple Packet Blocks: for example, a capture tool could switch
from Enhanced Packet Blocks to Simple Packet Blocks when the hardware
resources become critical.
The Simple Packet Block does not contain the Interface ID field.
Therefore, it MUST be assumed that all the Simple Packet Blocks have
been captured on the interface previously specified in the first
Interface Description Block.
Figure 12 shows the format of the Simple Packet Block.
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+---------------------------------------------------------------+
0 | Block Type = 0x00000003 |
+---------------------------------------------------------------+
4 | Block Total Length |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
8 | Original Packet Length |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
12 / /
/ Packet Data /
/ variable length, padded to 32 bits /
/ /
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Block Total Length |
+---------------------------------------------------------------+
Figure 12: Simple Packet Block Format
The Simple Packet Block has the following fields:
o Block Type: The block type of the Simple Packet Block is 3.
o Block Total Length: total size of this block, as described in
Section 3.1.
o Original Packet Length: actual length of the packet when it was
transmitted on the network. It can be different from length of
the Packet Data field's length if the packet has been truncated by
the capture process, in which case the SnapLen value in
Section 4.2 will be less than this Original Packet Length value,
and the SnapLen value MUST be used to determine the size of the
Packet Data field length.
o Packet Data: the data coming from the network, including link-
layer headers. The length of this field can be derived from the
field Block Total Length, present in the Block Header, and it is
the minimum value among the SnapLen (present in the Interface
Description Block) and the Original Packet Length (present in this
header). The format of the data within this Packet Data field
depends on the LinkType field specified in the Interface
Description Block (see Section 4.2) and it is specified in the
entry for that format in the tcpdump.org link-layer header types
registry [3].
The Simple Packet Block does not contain the timestamp because this
is often one of the most costly operations on PCs. Additionally,
there are applications that do not require it; e.g. an Intrusion
Detection System is interested in packets, not in their timestamp.
A Simple Packet Block cannot be present in a Section that has more
than one interface because of the impossibility to refer to the
correct one (it does not contain any Interface ID field).
The Simple Packet Block is very efficient in term of disk space: a
snapshot whose length is 100 octets requires only 16 octets of
overhead, which corresponds to an efficiency of more than 86%.
*/
type SimplePacketBlock struct {
BlockType blocktype.BlockType
BlockTotalLength uint32
OriginalPacketLength uint32
PacketData []byte
}
func (b *SimplePacketBlock) String() string {
return fmt.Sprintf("%s block_len:%d orig_len:%d data:%s",
b.BlockType.String(), b.BlockTotalLength, b.OriginalPacketLength, hex.EncodeToString(b.PacketData))
}
func (b *SimplePacketBlock) GetType() blocktype.BlockType {
return b.BlockType
}
func (r *Reader) parseSimplePacketBlock(blockTotalLength uint32, bodyBytes []byte) (*SimplePacketBlock, error) {
br := newEndiannessAwareReader(r.endian, bytes.NewReader(bodyBytes))
opl, err := br.readUint32()
if err != nil {
return nil, errors.Wrap(err, "unable to read original packet length")
}
data, err := br.readBytes(uint(opl))
if err != nil {
return nil, errors.Wrap(err, "unable to read packet data")
}
return &SimplePacketBlock{
BlockType: blocktype.SimplePacket,
BlockTotalLength: blockTotalLength,
OriginalPacketLength: opl,
PacketData: data,
}, nil
} | pcapng/simple_packet_block.go | 0.730097 | 0.531149 | simple_packet_block.go | starcoder |
package validator
// ErrorTemplates returns a map of validation tags with error messages as html templates.
func ErrorTemplates() map[string]string {
return map[string]string{
"alpha": `{{.Namespace}} can only contain alphabetic characters`,
"alphanum": `{{.Namespace}} can only contain alphanumeric characters`,
"alphanumunicode": `{{.Namespace}} can only contain unicode alphanumeric characters`,
"alphaunicode": `{{.Namespace}} can only contain unicode alphabetic characters`,
"ascii": `{{.Namespace}} must contain only ascii characters`,
"base64": `{{.Namespace}} must be a valid Base64 string`,
"base64url": `{{.Namespace}} must be a valid Base64 URL string`,
"btc_addr": `{{.Namespace}} must be a valid Bitcoin address`,
"btc_addr_bech32": `{{.Namespace}} must be a valid bech32 Bitcoin address`,
"cidr": `{{.Namespace}} must contain a valid CIDR notation`,
"cidrv4": `{{.Namespace}} must contain a valid CIDR notation for an IPv4 address`,
"cidrv6": `{{.Namespace}} must contain a valid CIDR notation for an IPv6 address`,
"contains": `{{.Namespace}} must contain the text '{{.Param}}'`,
"containsany": `{{.Namespace}} must contain at least one of the following characters '{{.Param}}'`,
"containsrune": `{{.Namespace}} must contain the following '{{.Param}}'`,
"datauri": `{{.Namespace}} must contain a valid Data URI`,
"datetime": `{{.Namespace}} does not match the {{.Param}} format`,
"dir": `{{.Namespace}} must be a valid directory`,
"e164": `{{.Namespace}} must be a valid E.164 formatted phone number`,
"e164noplus": `{{.Namespace}} must be a valid E.164 formatted phone number without the leading '+' symbol`,
"ein": `{{.Namespace}} must be a valid US EIN tax code`,
"email": `{{.Namespace}} must be a valid email address`,
"endsnotwith": `{{.Namespace}} must not end with {{.Param}}`,
"endswith": `{{.Namespace}} must end with {{.Param}}`,
"eq": `{{.Namespace}} must be equal to {{.Param}}`,
"eqcsfield": `{{.Namespace}} must be equal to {{.Param}}`,
"eqfield": `{{.Namespace}} must be equal to {{.Param}}`,
"eth_addr": `{{.Namespace}} must be a valid Ethereum address`,
"excluded_with": `{{.Namespace}} must not be present or it must be empty`,
"excluded_with_all": `{{.Namespace}} must not be present or it must be empty`,
"excluded_without": `{{.Namespace}} must not be present or it must be empty`,
"excluded_without_all": `{{.Namespace}} must not be present or it must be empty`,
"excludes": `{{.Namespace}} cannot contain the text '{{.Param}}'`,
"excludesall": `{{.Namespace}} cannot contain any of the following characters '{{.Param}}'`,
"excludesrune": `{{.Namespace}} cannot contain the following '{{.Param}}'`,
"fieldcontains": `{{.Namespace}} must contain the field {{.Param}}`,
"fieldexcludes": `{{.Namespace}} must not contain the field {{.Param}}`,
"file": `{{.Namespace}} must be a valid file path`,
"fqdn": `{{.Namespace}} must be a Fully Qualified Domain Name (FQDN)`,
"gt": `{{.Namespace}} must {{ if (eq .Kind "String") }}be greater than {{.Param}} characters in length{{ else }}{{ if or (eq .Kind "Array") (eq .Kind "Map") (eq .Kind "String") }}contain more than {{.Param}} items{{ else }}be greater than {{.Param}}{{ end }}{{ end }}`,
"gtcsfield": `{{.Namespace}} must {{ if (eq .Kind "String") }}be greater than {{.Param}} characters in length{{ else }}{{ if or (eq .Kind "Array") (eq .Kind "Map") (eq .Kind "String") }}contain more than {{.Param}} items{{ else }}be greater than {{.Param}}{{ end }}{{ end }}`,
"gte": `{{.Namespace}} must {{ if (eq .Kind "String") }}be at least {{.Param}} characters in length{{ else }}{{ if or (eq .Kind "Array") (eq .Kind "Map") (eq .Kind "String") }}contain at least {{.Param}} items{{ else }}be {{.Param}} or greater{{ end }}{{ end }}`,
"gtecsfield": `{{.Namespace}} must {{ if (eq .Kind "String") }}be at least {{.Param}} characters in length{{ else }}{{ if or (eq .Kind "Array") (eq .Kind "Map") (eq .Kind "String") }}contain at least {{.Param}} items{{ else }}be {{.Param}} or greater{{ end }}{{ end }}`,
"gtefield": `{{.Namespace}} must {{ if (eq .Kind "String") }}be at least {{.Param}} characters in length{{ else }}{{ if or (eq .Kind "Array") (eq .Kind "Map") (eq .Kind "String") }}contain at least {{.Param}} items{{ else }}be {{.Param}} or greater{{ end }}{{ end }}`,
"gtfield": `{{.Namespace}} must {{ if (eq .Kind "String") }}be greater than {{.Param}} characters in length{{ else }}{{ if or (eq .Kind "Array") (eq .Kind "Map") (eq .Kind "String") }}contain more than {{.Param}} items{{ else }}be greater than {{.Param}}{{ end }}{{ end }}`,
"hexadecimal": `{{.Namespace}} must be a valid hexadecimal`,
"hexcolor": `{{.Namespace}} must be a valid HEX color`,
"hostname": `{{.Namespace}} must be a valid hostname as per RFC 952`,
"hostname_port": `{{.Namespace}} must be in the format DNS:PORT`,
"hostname_rfc1123": `{{.Namespace}} must be a valid hostname as per RFC 1123`,
"hsl": `{{.Namespace}} must be a valid HSL color`,
"hsla": `{{.Namespace}} must be a valid HSLA color`,
"html": `{{.Namespace}} must be valid HTML`,
"html_encoded": `{{.Namespace}} must be HTML-encoded`,
"ip": `{{.Namespace}} must be a valid IP address`,
"ip4_addr": `{{.Namespace}} must be a resolvable IPv4 address`,
"ip6_addr": `{{.Namespace}} must be a resolvable IPv6 address`,
"ip_addr": `{{.Namespace}} must be a resolvable IP address`,
"ipv4": `{{.Namespace}} must be a valid IPv4 address`,
"ipv6": `{{.Namespace}} must be a valid IPv6 address`,
"isbn": `{{.Namespace}} must be a valid ISBN number`,
"isbn10": `{{.Namespace}} must be a valid ISBN-10 number`,
"isbn13": `{{.Namespace}} must be a valid ISBN-13 number`,
"iscolor": `{{.Namespace}} must be a valid color`,
"isdefault": `{{.Namespace}} must not be present or it must be empty`,
"iso3166_1_alpha2": `{{.Namespace}} must be a valid iso3166-1 alpha-2 country code`,
"iso3166_1_alpha3": `{{.Namespace}} must be a valid iso3166-1 alpha-3 country code`,
"iso3166_1_alpha_numeric": `{{.Namespace}} must be a valid iso3166-1 alpha-numeric country code`,
"json": `{{.Namespace}} must be a valid json string`,
"latitude": `{{.Namespace}} must contain valid latitude coordinates`,
"len": `{{.Namespace}} must {{ if (eq .Kind "String") }}be {{.Param}} characters in length{{ else }}{{ if or (eq .Kind "Array") (eq .Kind "Map") (eq .Kind "String") }}contain {{.Param}} items{{ else }}be equal to {{.Param}}{{ end }}{{ end }}`,
"longitude": `{{.Namespace}} must contain a valid longitude coordinates`,
"lowercase": `{{.Namespace}} must be a lowercase string`,
"lt": `{{.Namespace}} must {{ if (eq .Kind "String") }}be less than {{.Param}} characters in length{{ else }}{{ if or (eq .Kind "Array") (eq .Kind "Map") (eq .Kind "String") }}contain less than {{.Param}} items{{ else }}be less than {{.Param}}{{ end }}{{ end }}`,
"ltcsfield": `{{.Namespace}} must {{ if (eq .Kind "String") }}be less than {{.Param}} characters in length{{ else }}{{ if or (eq .Kind "Array") (eq .Kind "Map") (eq .Kind "String") }}contain less than {{.Param}} items{{ else }}be less than {{.Param}}{{ end }}{{ end }}`,
"lte": `{{.Namespace}} must {{ if (eq .Kind "String") }}be a maximum of {{.Param}} characters in length{{ else }}{{ if or (eq .Kind "Array") (eq .Kind "Map") (eq .Kind "String") }}contain at maximum {{.Param}} items{{ else }}be {{.Param}} or less{{ end }}{{ end }}`,
"ltecsfield": `{{.Namespace}} must {{ if (eq .Kind "String") }}be a maximum of {{.Param}} characters in length{{ else }}{{ if or (eq .Kind "Array") (eq .Kind "Map") (eq .Kind "String") }}contain at maximum {{.Param}} items{{ else }}be {{.Param}} or less{{ end }}{{ end }}`,
"ltefield": `{{.Namespace}} must {{ if (eq .Kind "String") }}be a maximum of {{.Param}} characters in length{{ else }}{{ if or (eq .Kind "Array") (eq .Kind "Map") (eq .Kind "String") }}contain at maximum {{.Param}} items{{ else }}be {{.Param}} or less{{ end }}{{ end }}`,
"ltfield": `{{.Namespace}} must {{ if (eq .Kind "String") }}be less than {{.Param}} characters in length{{ else }}{{ if or (eq .Kind "Array") (eq .Kind "Map") (eq .Kind "String") }}contain less than {{.Param}} items{{ else }}be less than {{.Param}}{{ end }}{{ end }}`,
"mac": `{{.Namespace}} must contain a valid MAC address`,
"max": `{{.Namespace}} must {{ if (eq .Kind "String") }}be a maximum of {{.Param}} characters in length{{ else }}{{ if or (eq .Kind "Array") (eq .Kind "Map") (eq .Kind "String") }}contain at maximum {{.Param}} items{{ else }}be {{.Param}} or less{{ end }}{{ end }}`,
"min": `{{.Namespace}} must {{ if (eq .Kind "String") }}be at least {{.Param}} characters in length{{ else }}{{ if or (eq .Kind "Array") (eq .Kind "Map") (eq .Kind "String") }}contain at least {{.Param}} items{{ else }}be {{.Param}} or greater{{ end }}{{ end }}`,
"multibyte": `{{.Namespace}} must contain multibyte characters`,
"ne": `{{.Namespace}} must be different than {{.Param}}`,
"necsfield": `{{.Namespace}} must be different than {{.Param}}`,
"nefield": `{{.Namespace}} must be different than {{.Param}}`,
"number": `{{.Namespace}} must be a valid number`,
"numeric": `{{.Namespace}} must be a valid numeric value`,
"oneof": `{{.Namespace}} must be one of {{.Param}}`,
"printascii": `{{.Namespace}} must contain only printable ascii characters`,
"required": `{{.Namespace}} is required`,
"required_if": `{{.Namespace}} is required`,
"required_unless": `{{.Namespace}} is required`,
"required_with": `{{.Namespace}} is required`,
"required_with_all": `{{.Namespace}} is required`,
"required_without": `{{.Namespace}} is required`,
"required_without_all": `{{.Namespace}} is required`,
"rgb": `{{.Namespace}} must be a valid RGB color`,
"rgba": `{{.Namespace}} must be a valid RGBA color`,
"ssn": `{{.Namespace}} must be a valid US SSN number`,
"startsnotwith": `{{.Namespace}} must not start with {{.Param}}`,
"startswith": `{{.Namespace}} must start with {{.Param}}`,
"tcp4_addr": `{{.Namespace}} must be a valid IPv4 TCP address`,
"tcp6_addr": `{{.Namespace}} must be a valid IPv6 TCP address`,
"tcp_addr": `{{.Namespace}} must be a valid TCP address`,
"timezone": `{{.Namespace}} must be a valid time zone string`,
"udp4_addr": `{{.Namespace}} must be a valid IPv4 UDP address`,
"udp6_addr": `{{.Namespace}} must be a valid IPv6 UDP address`,
"udp_addr": `{{.Namespace}} must be a valid UDP address`,
"unique": `{{.Namespace}} must contain unique values`,
"unix_addr": `{{.Namespace}} must be a resolvable UNIX address`,
"uppercase": `{{.Namespace}} must be an uppercase string`,
"uri": `{{.Namespace}} must be a valid URI`,
"url": `{{.Namespace}} must be a valid URL`,
"url_encoded": `{{.Namespace}} must be URL-encoded`,
"urn_rfc2141": `{{.Namespace}} must be a valid URN as per RFC 2141`,
"usstate": `{{.Namespace}} must be a valid 2-letter US state`,
"usterritory": `{{.Namespace}} must be a valid 2-letter US territory`,
"uuid": `{{.Namespace}} must be a valid UUID`,
"uuid3": `{{.Namespace}} must be a valid version 3 UUID`,
"uuid3_rfc4122": `{{.Namespace}} must be a valid version 3 UUID as per RFC 4122`,
"uuid4": `{{.Namespace}} must be a valid version 4 UUID`,
"uuid4_rfc4122": `{{.Namespace}} must be a valid version 4 UUID as per RFC 4122`,
"uuid5": `{{.Namespace}} must be a valid version 5 UUID`,
"uuid5_rfc4122": `{{.Namespace}} must be a valid version 5 UUID as per RFC 4122`,
"uuid_rfc4122": `{{.Namespace}} must be a valid UUID as per RFC 4122`,
"zipcode": `{{.Namespace}} must be a valid US ZIP code`,
}
} | pkg/validator/templates.go | 0.796886 | 0.579162 | templates.go | starcoder |
package broker
import (
"time"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/types"
)
//------------------------------------------------------------------------------
// FanIn is a broker that implements types.Producer, takes an array of inputs
// and routes them through a single message channel.
type FanIn struct {
stats metrics.Type
transactions chan types.Transaction
closables []types.Closable
inputClosedChan chan int
inputMap map[int]struct{}
closedChan chan struct{}
}
// NewFanIn creates a new FanIn type by providing inputs.
func NewFanIn(inputs []types.Producer, stats metrics.Type) (*FanIn, error) {
i := &FanIn{
stats: stats,
transactions: make(chan types.Transaction),
inputClosedChan: make(chan int),
inputMap: make(map[int]struct{}),
closables: []types.Closable{},
closedChan: make(chan struct{}),
}
for n, input := range inputs {
if closable, ok := input.(types.Closable); ok {
i.closables = append(i.closables, closable)
}
// Keep track of # open inputs
i.inputMap[n] = struct{}{}
// Launch goroutine that async writes input into single channel
go func(index int) {
defer func() {
// If the input closes we need to signal to the broker
i.inputClosedChan <- index
}()
for {
in, open := <-inputs[index].TransactionChan()
if !open {
return
}
i.transactions <- in
}
}(n)
}
go i.loop()
return i, nil
}
//------------------------------------------------------------------------------
// TransactionChan returns the channel used for consuming transactions from this
// broker.
func (i *FanIn) TransactionChan() <-chan types.Transaction {
return i.transactions
}
// Connected returns a boolean indicating whether this output is currently
// connected to its target.
func (i *FanIn) Connected() bool {
type connector interface {
Connected() bool
}
for _, in := range i.closables {
if c, ok := in.(connector); ok {
if !c.Connected() {
return false
}
}
}
return true
}
//------------------------------------------------------------------------------
// loop is an internal loop that brokers incoming messages to many outputs.
func (i *FanIn) loop() {
defer func() {
close(i.inputClosedChan)
close(i.transactions)
close(i.closedChan)
}()
for len(i.inputMap) > 0 {
index := <-i.inputClosedChan
delete(i.inputMap, index)
}
}
// CloseAsync shuts down the FanIn broker and stops processing requests.
func (i *FanIn) CloseAsync() {
for _, closable := range i.closables {
closable.CloseAsync()
}
}
// WaitForClose blocks until the FanIn broker has closed down.
func (i *FanIn) WaitForClose(timeout time.Duration) error {
select {
case <-i.closedChan:
case <-time.After(timeout):
return types.ErrTimeout
}
return nil
}
//------------------------------------------------------------------------------ | lib/broker/fan_in.go | 0.594551 | 0.455804 | fan_in.go | starcoder |
package truetype
// The Truetype opcodes are summarized at
// https://developer.apple.com/fonts/TTRefMan/RM07/appendixA.html
const (
opSVTCA0 = 0x00 // Set freedom and projection Vectors To Coordinate Axis
opSVTCA1 = 0x01 // .
opSPVTCA0 = 0x02 // Set Projection Vector To Coordinate Axis
opSPVTCA1 = 0x03 // .
opSFVTCA0 = 0x04 // Set Freedom Vector to Coordinate Axis
opSFVTCA1 = 0x05 // .
opSPVTL0 = 0x06 // Set Projection Vector To Line
opSPVTL1 = 0x07 // .
opSFVTL0 = 0x08 // Set Freedom Vector To Line
opSFVTL1 = 0x09 // .
opSPVFS = 0x0a // Set Projection Vector From Stack
opSFVFS = 0x0b // Set Freedom Vector From Stack
opGPV = 0x0c // Get Projection Vector
opGFV = 0x0d // Get Freedom Vector
opSFVTPV = 0x0e // Set Freedom Vector To Projection Vector
opISECT = 0x0f // moves point p to the InterSECTion of two lines
opSRP0 = 0x10 // Set Reference Point 0
opSRP1 = 0x11 // Set Reference Point 1
opSRP2 = 0x12 // Set Reference Point 2
opSZP0 = 0x13 // Set Zone Pointer 0
opSZP1 = 0x14 // Set Zone Pointer 1
opSZP2 = 0x15 // Set Zone Pointer 2
opSZPS = 0x16 // Set Zone PointerS
opSLOOP = 0x17 // Set LOOP variable
opRTG = 0x18 // Round To Grid
opRTHG = 0x19 // Round To Half Grid
opSMD = 0x1a // Set Minimum Distance
opELSE = 0x1b // ELSE clause
opJMPR = 0x1c // JuMP Relative
opSCVTCI = 0x1d // Set Control Value Table Cut-In
opSSWCI = 0x1e // Set Single Width Cut-In
opSSW = 0x1f // Set Single Width
opDUP = 0x20 // DUPlicate top stack element
opPOP = 0x21 // POP top stack element
opCLEAR = 0x22 // CLEAR the stack
opSWAP = 0x23 // SWAP the top two elements on the stack
opDEPTH = 0x24 // DEPTH of the stack
opCINDEX = 0x25 // Copy the INDEXed element to the top of the stack
opMINDEX = 0x26 // Move the INDEXed element to the top of the stack
opALIGNPTS = 0x27 // ALIGN PoinTS
op_0x28 = 0x28 // deprecated
opUTP = 0x29 // UnTouch Point
opLOOPCALL = 0x2a // LOOP and CALL function
opCALL = 0x2b // CALL function
opFDEF = 0x2c // Function DEFinition
opENDF = 0x2d // END Function definition
opMDAP0 = 0x2e // Move Direct Absolute Point
opMDAP1 = 0x2f // .
opIUP0 = 0x30 // Interpolate Untouched Points through the outline
opIUP1 = 0x31 // .
opSHP0 = 0x32 // SHift Point using reference point
opSHP1 = 0x33 // .
opSHC0 = 0x34 // SHift Contour using reference point
opSHC1 = 0x35 // .
opSHZ0 = 0x36 // SHift Zone using reference point
opSHZ1 = 0x37 // .
opSHPIX = 0x38 // SHift point by a PIXel amount
opIP = 0x39 // Interpolate Point
opMSIRP0 = 0x3a // Move Stack Indirect Relative Point
opMSIRP1 = 0x3b // .
opALIGNRP = 0x3c // ALIGN to Reference Point
opRTDG = 0x3d // Round To Double Grid
opMIAP0 = 0x3e // Move Indirect Absolute Point
opMIAP1 = 0x3f // .
opNPUSHB = 0x40 // PUSH N Bytes
opNPUSHW = 0x41 // PUSH N Words
opWS = 0x42 // Write Store
opRS = 0x43 // Read Store
opWCVTP = 0x44 // Write Control Value Table in Pixel units
opRCVT = 0x45 // Read Control Value Table entry
opGC0 = 0x46 // Get Coordinate projected onto the projection vector
opGC1 = 0x47 // .
opSCFS = 0x48 // Sets Coordinate From the Stack using projection vector and freedom vector
opMD0 = 0x49 // Measure Distance
opMD1 = 0x4a // .
opMPPEM = 0x4b // Measure Pixels Per EM
opMPS = 0x4c // Measure Point Size
opFLIPON = 0x4d // set the auto FLIP Boolean to ON
opFLIPOFF = 0x4e // set the auto FLIP Boolean to OFF
opDEBUG = 0x4f // DEBUG call
opLT = 0x50 // Less Than
opLTEQ = 0x51 // Less Than or EQual
opGT = 0x52 // Greater Than
opGTEQ = 0x53 // Greater Than or EQual
opEQ = 0x54 // EQual
opNEQ = 0x55 // Not EQual
opODD = 0x56 // ODD
opEVEN = 0x57 // EVEN
opIF = 0x58 // IF test
opEIF = 0x59 // End IF
opAND = 0x5a // logical AND
opOR = 0x5b // logical OR
opNOT = 0x5c // logical NOT
opDELTAP1 = 0x5d // DELTA exception P1
opSDB = 0x5e // Set Delta Base in the graphics state
opSDS = 0x5f // Set Delta Shift in the graphics state
opADD = 0x60 // ADD
opSUB = 0x61 // SUBtract
opDIV = 0x62 // DIVide
opMUL = 0x63 // MULtiply
opABS = 0x64 // ABSolute value
opNEG = 0x65 // NEGate
opFLOOR = 0x66 // FLOOR
opCEILING = 0x67 // CEILING
opROUND00 = 0x68 // ROUND value
opROUND01 = 0x69 // .
opROUND10 = 0x6a // .
opROUND11 = 0x6b // .
opNROUND00 = 0x6c // No ROUNDing of value
opNROUND01 = 0x6d // .
opNROUND10 = 0x6e // .
opNROUND11 = 0x6f // .
opWCVTF = 0x70 // Write Control Value Table in Funits
opDELTAP2 = 0x71 // DELTA exception P2
opDELTAP3 = 0x72 // DELTA exception P3
opDELTAC1 = 0x73 // DELTA exception C1
opDELTAC2 = 0x74 // DELTA exception C2
opDELTAC3 = 0x75 // DELTA exception C3
opSROUND = 0x76 // Super ROUND
opS45ROUND = 0x77 // Super ROUND 45 degrees
opJROT = 0x78 // Jump Relative On True
opJROF = 0x79 // Jump Relative On False
opROFF = 0x7a // Round OFF
op_0x7b = 0x7b // deprecated
opRUTG = 0x7c // Round Up To Grid
opRDTG = 0x7d // Round Down To Grid
opSANGW = 0x7e // Set ANGle Weight
opAA = 0x7f // Adjust Angle
opFLIPPT = 0x80 // FLIP PoinT
opFLIPRGON = 0x81 // FLIP RanGe ON
opFLIPRGOFF = 0x82 // FLIP RanGe OFF
op_0x83 = 0x83 // deprecated
op_0x84 = 0x84 // deprecated
opSCANCTRL = 0x85 // SCAN conversion ConTRoL
opSDPVTL0 = 0x86 // Set Dual Projection Vector To Line
opSDPVTL1 = 0x87 // .
opGETINFO = 0x88 // GET INFOrmation
opIDEF = 0x89 // Instruction DEFinition
opROLL = 0x8a // ROLL the top three stack elements
opMAX = 0x8b // MAXimum of top two stack elements
opMIN = 0x8c // MINimum of top two stack elements
opSCANTYPE = 0x8d // SCANTYPE
opINSTCTRL = 0x8e // INSTRuction execution ConTRoL
op_0x8f = 0x8f
op_0x90 = 0x90
op_0x91 = 0x91
op_0x92 = 0x92
op_0x93 = 0x93
op_0x94 = 0x94
op_0x95 = 0x95
op_0x96 = 0x96
op_0x97 = 0x97
op_0x98 = 0x98
op_0x99 = 0x99
op_0x9a = 0x9a
op_0x9b = 0x9b
op_0x9c = 0x9c
op_0x9d = 0x9d
op_0x9e = 0x9e
op_0x9f = 0x9f
op_0xa0 = 0xa0
op_0xa1 = 0xa1
op_0xa2 = 0xa2
op_0xa3 = 0xa3
op_0xa4 = 0xa4
op_0xa5 = 0xa5
op_0xa6 = 0xa6
op_0xa7 = 0xa7
op_0xa8 = 0xa8
op_0xa9 = 0xa9
op_0xaa = 0xaa
op_0xab = 0xab
op_0xac = 0xac
op_0xad = 0xad
op_0xae = 0xae
op_0xaf = 0xaf
opPUSHB000 = 0xb0 // PUSH Bytes
opPUSHB001 = 0xb1 // .
opPUSHB010 = 0xb2 // .
opPUSHB011 = 0xb3 // .
opPUSHB100 = 0xb4 // .
opPUSHB101 = 0xb5 // .
opPUSHB110 = 0xb6 // .
opPUSHB111 = 0xb7 // .
opPUSHW000 = 0xb8 // PUSH Words
opPUSHW001 = 0xb9 // .
opPUSHW010 = 0xba // .
opPUSHW011 = 0xbb // .
opPUSHW100 = 0xbc // .
opPUSHW101 = 0xbd // .
opPUSHW110 = 0xbe // .
opPUSHW111 = 0xbf // .
opMDRP00000 = 0xc0 // Move Direct Relative Point
opMDRP00001 = 0xc1 // .
opMDRP00010 = 0xc2 // .
opMDRP00011 = 0xc3 // .
opMDRP00100 = 0xc4 // .
opMDRP00101 = 0xc5 // .
opMDRP00110 = 0xc6 // .
opMDRP00111 = 0xc7 // .
opMDRP01000 = 0xc8 // .
opMDRP01001 = 0xc9 // .
opMDRP01010 = 0xca // .
opMDRP01011 = 0xcb // .
opMDRP01100 = 0xcc // .
opMDRP01101 = 0xcd // .
opMDRP01110 = 0xce // .
opMDRP01111 = 0xcf // .
opMDRP10000 = 0xd0 // .
opMDRP10001 = 0xd1 // .
opMDRP10010 = 0xd2 // .
opMDRP10011 = 0xd3 // .
opMDRP10100 = 0xd4 // .
opMDRP10101 = 0xd5 // .
opMDRP10110 = 0xd6 // .
opMDRP10111 = 0xd7 // .
opMDRP11000 = 0xd8 // .
opMDRP11001 = 0xd9 // .
opMDRP11010 = 0xda // .
opMDRP11011 = 0xdb // .
opMDRP11100 = 0xdc // .
opMDRP11101 = 0xdd // .
opMDRP11110 = 0xde // .
opMDRP11111 = 0xdf // .
opMIRP00000 = 0xe0 // Move Indirect Relative Point
opMIRP00001 = 0xe1 // .
opMIRP00010 = 0xe2 // .
opMIRP00011 = 0xe3 // .
opMIRP00100 = 0xe4 // .
opMIRP00101 = 0xe5 // .
opMIRP00110 = 0xe6 // .
opMIRP00111 = 0xe7 // .
opMIRP01000 = 0xe8 // .
opMIRP01001 = 0xe9 // .
opMIRP01010 = 0xea // .
opMIRP01011 = 0xeb // .
opMIRP01100 = 0xec // .
opMIRP01101 = 0xed // .
opMIRP01110 = 0xee // .
opMIRP01111 = 0xef // .
opMIRP10000 = 0xf0 // .
opMIRP10001 = 0xf1 // .
opMIRP10010 = 0xf2 // .
opMIRP10011 = 0xf3 // .
opMIRP10100 = 0xf4 // .
opMIRP10101 = 0xf5 // .
opMIRP10110 = 0xf6 // .
opMIRP10111 = 0xf7 // .
opMIRP11000 = 0xf8 // .
opMIRP11001 = 0xf9 // .
opMIRP11010 = 0xfa // .
opMIRP11011 = 0xfb // .
opMIRP11100 = 0xfc // .
opMIRP11101 = 0xfd // .
opMIRP11110 = 0xfe // .
opMIRP11111 = 0xff // .
)
// popCount is the number of stack elements that each opcode pops.
var popCount = [256]uint8{
// 1, 2, 3, 4, 5, 6, 7, 8, 9, a, b, c, d, e, f
0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 0, 0, 0, 5, // 0x00 - 0x0f
1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, // 0x10 - 0x1f
1, 1, 0, 2, 0, 1, 1, 2, 0, 1, 2, 1, 1, 0, 1, 1, // 0x20 - 0x2f
0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 2, 2, 0, 0, 2, 2, // 0x30 - 0x3f
0, 0, 2, 1, 2, 1, 1, 1, 2, 2, 2, 0, 0, 0, 0, 0, // 0x40 - 0x4f
2, 2, 2, 2, 2, 2, 1, 1, 1, 0, 2, 2, 1, 1, 1, 1, // 0x50 - 0x5f
2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 0x60 - 0x6f
2, 1, 1, 1, 1, 1, 1, 1, 2, 2, 0, 0, 0, 0, 1, 1, // 0x70 - 0x7f
0, 2, 2, 0, 0, 1, 2, 2, 1, 1, 3, 2, 2, 1, 2, 0, // 0x80 - 0x8f
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0x90 - 0x9f
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0xa0 - 0xaf
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // 0xb0 - 0xbf
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 0xc0 - 0xcf
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 0xd0 - 0xdf
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, // 0xe0 - 0xef
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, // 0xf0 - 0xff
} | vendor/github.com/golang/freetype/truetype/opcodes.go | 0.624294 | 0.600393 | opcodes.go | starcoder |
package bmp
var x = `
BI_RLE8
When the biCompression member is set to BI_RLE8, the bitmap is compressed
using a run-length encoding format for an 8-bit bitmap. This format may be
compressed in either of two modes: encoded and absolute. Both modes can occur
anywhere throughout a single bitmap.
Encoded mode consists of two bytes: the first byte specifies the number of
consecutive pixels to be drawn using the color index contained in the second
byte. In addition, the first byte of the pair can be set to zero to indicate
an escape that denotes an end of line, end of bitmap, or a delta. The
interpretation of the escape depends on the value of the second byte of the
pair. The following list shows the meaning of the second byte:
Value Meaning
0 End of line.
1 End of bitmap.
2 Delta. The two bytes following the escape contain unsigned values
indicating the horizontal and vertical offset of the next pixel from the
current position.
Absolute mode is signaled by the first byte set to zero and the second byte
set to a value between 0x03 and 0xFF. In absolute mode, the second byte
represents the number of bytes that follow, each of which contains the color
index of a single pixel. When the second byte is set to 2 or less, the escape
has the same meaning as in encoded mode. In absolute mode, each run must be
aligned on a word boundary. The following example shows the hexadecimal
values of an 8-bit compressed bitmap:
BI_RLE4
When the biCompression member is set to BI_RLE4, the bitmap is compressed
using a run-length encoding (RLE) format for a 4-bit bitmap, which also uses
encoded and absolute modes. In encoded mode, the first byte of the pair
contains the number of pixels to be drawn using the color indexes in the
second byte. The second byte contains two color indexes, one in its
high-order nibble (that is, its low-order four bits) and one in its low-order
nibble. The first of the pixels is drawn using the color specified by the
high-order nibble, the second is drawn using the color in the low-order
nibble, the third is drawn with the color in the high-order nibble, and so
on, until all the pixels specified by the first byte have been drawn. In
absolute mode, the first byte contains zero, the second byte contains the
number of color indexes that follow, and subsequent bytes contain color
indexes in their high- and low-order nibbles, one color index for each pixel.
In absolute mode, each run must be aligned on a word boundary. The
end-of-line, end-of-bitmap, and delta escapes also apply to BI_RLE4.
` | docs.go | 0.81134 | 0.78695 | docs.go | starcoder |
package function
import (
"fmt"
"strings"
errors "gopkg.in/src-d/go-errors.v1"
"github.com/dolthub/go-mysql-server/sql"
"github.com/dolthub/go-mysql-server/sql/expression"
)
// STX is a function that returns the x value from a given point.
type STX struct {
expression.NaryExpression
}
var _ sql.FunctionExpression = (*STX)(nil)
var ErrInvalidType = errors.NewKind("%s received non-point type")
// NewSTX creates a new STX expression.
func NewSTX(args ...sql.Expression) (sql.Expression, error) {
if len(args) != 1 && len(args) != 2 {
return nil, sql.ErrInvalidArgumentNumber.New("ST_X", "1 or 2", len(args))
}
return &STX{expression.NaryExpression{ChildExpressions: args}}, nil
}
// FunctionName implements sql.FunctionExpression
func (s *STX) FunctionName() string {
return "st_x"
}
// Description implements sql.FunctionExpression
func (s *STX) Description() string {
return "returns the x value of given point. If given a second argument, returns a new point with second argument as x value."
}
// Type implements the sql.Expression interface.
func (s *STX) Type() sql.Type {
if len(s.ChildExpressions) == 1 {
return sql.Float64
} else {
return sql.PointType{}
}
}
func (s *STX) String() string {
var args = make([]string, len(s.ChildExpressions))
for i, arg := range s.ChildExpressions {
args[i] = arg.String()
}
return fmt.Sprintf("ST_X(%s)", strings.Join(args, ","))
}
// WithChildren implements the Expression interface.
func (s *STX) WithChildren(children ...sql.Expression) (sql.Expression, error) {
return NewSTX(children...)
}
// Eval implements the sql.Expression interface.
func (s *STX) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
// Evaluate point
p, err := s.ChildExpressions[0].Eval(ctx, row)
if err != nil {
return nil, err
}
// Return null if geometry is null
if p == nil {
return nil, nil
}
// Check that it is a point
_p, ok := p.(sql.Point)
if !ok {
return nil, ErrInvalidType.New(s.FunctionName())
}
// If just one argument, return X
if len(s.ChildExpressions) == 1 {
return _p.X, nil
}
// Evaluate second argument
x, err := s.ChildExpressions[1].Eval(ctx, row)
if err != nil {
return nil, err
}
// Return null if second argument is null
if x == nil {
return nil, nil
}
// Convert to float64
_x, err := sql.Float64.Convert(x)
if err != nil {
return nil, err
}
// Create point with new X and old Y
return sql.Point{SRID: _p.SRID, X: _x.(float64), Y: _p.Y}, nil
}
// STY is a function that returns the y value from a given point.
type STY struct {
expression.NaryExpression
}
var _ sql.FunctionExpression = (*STY)(nil)
// NewSTY creates a new STY expression.
func NewSTY(args ...sql.Expression) (sql.Expression, error) {
if len(args) != 1 && len(args) != 2 {
return nil, sql.ErrInvalidArgumentNumber.New("ST_Y", "1 or 2", len(args))
}
return &STY{expression.NaryExpression{ChildExpressions: args}}, nil
}
// FunctionName implements sql.FunctionExpression
func (s *STY) FunctionName() string {
return "st_y"
}
// Description implements sql.FunctionExpression
func (s *STY) Description() string {
return "returns the y value of given point. If given a second argument, returns a new point with second argument as y value."
}
// Type implements the sql.Expression interface.
func (s *STY) Type() sql.Type {
if len(s.ChildExpressions) == 1 {
return sql.Float64
} else {
return sql.PointType{}
}
}
func (s *STY) String() string {
var args = make([]string, len(s.ChildExpressions))
for i, arg := range s.ChildExpressions {
args[i] = arg.String()
}
return fmt.Sprintf("ST_Y(%s)", strings.Join(args, ","))
}
// WithChildren implements the Expression interface.
func (s *STY) WithChildren(children ...sql.Expression) (sql.Expression, error) {
return NewSTY(children...)
}
// Eval implements the sql.Expression interface.
func (s *STY) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
// Evaluate point
p, err := s.ChildExpressions[0].Eval(ctx, row)
if err != nil {
return nil, err
}
// Return null if geometry is null
if p == nil {
return nil, nil
}
// Check that it is a point
_p, ok := p.(sql.Point)
if !ok {
return nil, ErrInvalidType.New(s.FunctionName())
}
// If just one argument, return Y
if len(s.ChildExpressions) == 1 {
return _p.Y, nil
}
// Evaluate second argument
y, err := s.ChildExpressions[1].Eval(ctx, row)
if err != nil {
return nil, err
}
// Return null if second argument is null
if y == nil {
return nil, nil
}
// Convert to float64
_y, err := sql.Float64.Convert(y)
if err != nil {
return nil, err
}
// Create point with old X and new Ys
return sql.Point{SRID: _p.SRID, X: _p.X, Y: _y.(float64)}, nil
}
// Longitude is a function that returns the x value from a given point.
type Longitude struct {
expression.NaryExpression
}
var _ sql.FunctionExpression = (*Longitude)(nil)
var ErrNonGeographic = errors.NewKind("function %s is only defined for geographic spatial reference systems, but one of its argument is in SRID %v, which is not geographic")
var ErrLatitudeOutOfRange = errors.NewKind("latitude %v is out of range in function %s. it must be within [-90.0, 90.0]")
var ErrLongitudeOutOfRange = errors.NewKind("longitude %v is out of range in function %s. it must be within [-180.0, 180.0]")
// NewLongitude creates a new ST_LONGITUDE expression.
func NewLongitude(args ...sql.Expression) (sql.Expression, error) {
if len(args) != 1 && len(args) != 2 {
return nil, sql.ErrInvalidArgumentNumber.New("ST_LONGITUDE", "1 or 2", len(args))
}
return &Longitude{expression.NaryExpression{ChildExpressions: args}}, nil
}
// FunctionName implements sql.FunctionExpression
func (l *Longitude) FunctionName() string {
return "st_longitude"
}
// Description implements sql.FunctionExpression
func (l *Longitude) Description() string {
return "returns the longitude value of given point. If given a second argument, returns a new point with second argument as longitude value."
}
// Type implements the sql.Expression interface.
func (l *Longitude) Type() sql.Type {
if len(l.ChildExpressions) == 1 {
return sql.Float64
} else {
return sql.PointType{}
}
}
func (l *Longitude) String() string {
var args = make([]string, len(l.ChildExpressions))
for i, arg := range l.ChildExpressions {
args[i] = arg.String()
}
return fmt.Sprintf("ST_LONGITUDE(%s)", strings.Join(args, ","))
}
// WithChildren implements the Expression interface.
func (l *Longitude) WithChildren(children ...sql.Expression) (sql.Expression, error) {
return NewLongitude(children...)
}
// Eval implements the sql.Expression interface.
func (l *Longitude) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
// Evaluate point
p, err := l.ChildExpressions[0].Eval(ctx, row)
if err != nil {
return nil, err
}
// Return null if geometry is null
if p == nil {
return nil, nil
}
// Check that it is a point
_p, ok := p.(sql.Point)
if !ok {
return nil, ErrInvalidType.New(l.FunctionName())
}
// Point needs to have SRID 4326
// TODO: might need to be == Cartesian instead for other SRIDs
if _p.SRID != GeoSpatialSRID {
return nil, ErrNonGeographic.New(l.FunctionName(), _p.SRID)
}
// If just one argument, return X
if len(l.ChildExpressions) == 1 {
return _p.X, nil
}
// Evaluate second argument
x, err := l.ChildExpressions[1].Eval(ctx, row)
if err != nil {
return nil, err
}
// Return null if second argument is null
if x == nil {
return nil, nil
}
// Convert to float64
x, err = sql.Float64.Convert(x)
if err != nil {
return nil, err
}
// Check that value is within longitude range [-180, 180]
_x := x.(float64)
if _x < -180.0 || _x > 180.0 {
return nil, ErrLongitudeOutOfRange.New(_x, l.FunctionName())
}
// Create point with new X and old Y
return sql.Point{SRID: _p.SRID, X: _x, Y: _p.Y}, nil
}
// Latitude is a function that returns the x value from a given point.
type Latitude struct {
expression.NaryExpression
}
var _ sql.FunctionExpression = (*Latitude)(nil)
// NewLatitude creates a new ST_LATITUDE expression.
func NewLatitude(args ...sql.Expression) (sql.Expression, error) {
if len(args) != 1 && len(args) != 2 {
return nil, sql.ErrInvalidArgumentNumber.New("ST_LATITUDE", "1 or 2", len(args))
}
return &Latitude{expression.NaryExpression{ChildExpressions: args}}, nil
}
// FunctionName implements sql.FunctionExpression
func (l *Latitude) FunctionName() string {
return "st_latitude"
}
// Description implements sql.FunctionExpression
func (l *Latitude) Description() string {
return "returns the latitude value of given point. If given a second argument, returns a new point with second argument as latitude value."
}
// Type implements the sql.Expression interface.
func (l *Latitude) Type() sql.Type {
if len(l.ChildExpressions) == 1 {
return sql.Float64
} else {
return sql.PointType{}
}
}
func (l *Latitude) String() string {
var args = make([]string, len(l.ChildExpressions))
for i, arg := range l.ChildExpressions {
args[i] = arg.String()
}
return fmt.Sprintf("ST_LATITUDE(%s)", strings.Join(args, ","))
}
// WithChildren implements the Expression interface.
func (l *Latitude) WithChildren(children ...sql.Expression) (sql.Expression, error) {
return NewLatitude(children...)
}
// Eval implements the sql.Expression interface.
func (l *Latitude) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
// Evaluate point
p, err := l.ChildExpressions[0].Eval(ctx, row)
if err != nil {
return nil, err
}
// Return null if geometry is null
if p == nil {
return nil, nil
}
// Check that it is a point
_p, ok := p.(sql.Point)
if !ok {
return nil, ErrInvalidType.New(l.FunctionName())
}
// Point needs to have SRID 4326
// TODO: might need to be == Cartesian instead for other SRIDs
if _p.SRID != GeoSpatialSRID {
return nil, ErrNonGeographic.New(l.FunctionName(), _p.SRID)
}
// If just one argument, return Y
if len(l.ChildExpressions) == 1 {
return _p.Y, nil
}
// Evaluate second argument
y, err := l.ChildExpressions[1].Eval(ctx, row)
if err != nil {
return nil, err
}
// Return null if second argument is null
if y == nil {
return nil, nil
}
// Convert to float64
y, err = sql.Float64.Convert(y)
if err != nil {
return nil, err
}
// Check that value is within latitude range [-90, 90]
_y := y.(float64)
if _y < -90.0 || _y > 90.0 {
return nil, ErrLongitudeOutOfRange.New(_y, l.FunctionName())
}
// Create point with old X and new Y
return sql.Point{SRID: _p.SRID, X: _p.X, Y: _y}, nil
} | sql/expression/function/x_y_latitude_longitude.go | 0.782039 | 0.449876 | x_y_latitude_longitude.go | starcoder |
package times
import (
"math"
"time"
)
// GetDateOfFirstMondayOfMonth : get date of first monday
func GetDateOfFirstMondayOfMonth(t time.Time) int {
dayFirstDateOfMonth := getDayOfFirstDateOfMonth(t)
if dayFirstDateOfMonth < 2 {
return 2 - dayFirstDateOfMonth
}
return 9 - dayFirstDateOfMonth
}
// GetDateOfFirstTuesdayOfMonth : get date of first tuesday
func GetDateOfFirstTuesdayOfMonth(t time.Time) int {
dayFirstDateOfMonth := getDayOfFirstDateOfMonth(t)
if dayFirstDateOfMonth < 3 {
return 3 - dayFirstDateOfMonth
}
return 10 - dayFirstDateOfMonth
}
// GetDateOfFirstWednesdayOfMonth : get date of first wednesday
func GetDateOfFirstWednesdayOfMonth(t time.Time) int {
dayFirstDateOfMonth := getDayOfFirstDateOfMonth(t)
if dayFirstDateOfMonth < 4 {
return 4 - dayFirstDateOfMonth
}
return 11 - dayFirstDateOfMonth
}
// GetDateOfFirstThursdayOfMonth : get date of first thursday
func GetDateOfFirstThursdayOfMonth(t time.Time) int {
dayFirstDateOfMonth := getDayOfFirstDateOfMonth(t)
if dayFirstDateOfMonth < 5 {
return 5 - dayFirstDateOfMonth
}
return 12 - dayFirstDateOfMonth
}
// GetDateOfFirstFridayOfMonth : get date of first friday
func GetDateOfFirstFridayOfMonth(t time.Time) int {
dayFirstDateOfMonth := getDayOfFirstDateOfMonth(t)
if dayFirstDateOfMonth < 6 {
return 6 - dayFirstDateOfMonth
}
return 13 - dayFirstDateOfMonth
}
// GetDateOfFirstSaturdayOfMonth : get date of first saturday
func GetDateOfFirstSaturdayOfMonth(t time.Time) int {
return 7 - getDayOfFirstDateOfMonth(t)
}
// GetDateOfFirstSaturdayOfMonth : get date of first sunday
func GetDateOfFirstSundayOfMonth(t time.Time) int {
dayFirstDateOfMonth := getDayOfFirstDateOfMonth(t)
if dayFirstDateOfMonth == 0 {
return 1
}
return 8 - dayFirstDateOfMonth
}
// GetNthWeekOfMonth : get {n}th Week of Month
// Regard the start of the week as Sunday
func GetNthWeekOfMonth(t time.Time) int {
date := t.Day()
daysOfFirstSaturdayOfMonth := GetDateOfFirstSaturdayOfMonth(t)
if daysOfFirstSaturdayOfMonth >= date {
return 1
}
return 1 + int(math.Ceil(float64(date-daysOfFirstSaturdayOfMonth)/7))
}
// GetLastTimeOfMonth : get Last TimeObject Of Request Month
func GetLastTimeOfMonth(t time.Time) time.Time {
return getFirstTimeOfMonth(t).AddDate(0, 1, -1)
}
// GetFirstWorkingTimeOfMonth : get First Working TimeObject Of Request Month
func GetFirstWorkingTimeOfMonth(year, month int) time.Time {
firstDateOfMonth := time.Date(year, MonthMapByInt[month], 1, 0, 0, 0, 0, time.UTC)
if firstDateOfMonth.Weekday() == 0 {
return firstDateOfMonth.AddDate(0, 0, 1)
} else if firstDateOfMonth.Weekday() == 6 {
return firstDateOfMonth.AddDate(0, 0, 2)
}
return firstDateOfMonth
}
// getDayOfFirstDateOfMonth : get day of first date
func getDayOfFirstDateOfMonth(t time.Time) int {
return DaysMap[(getFirstTimeOfMonth(t)).Weekday()]
}
// getFirstTimeOfMonth : First time of Month
func getFirstTimeOfMonth(t time.Time) time.Time {
return time.Date(t.Year(), t.Month(), 1, 0, 0, 0, 0, time.UTC)
} | times.go | 0.721351 | 0.88258 | times.go | starcoder |
package main
import (
"de.knallisworld/aoc/aoc2018/dayless"
"errors"
"fmt"
"math"
"strconv"
"strings"
"time"
)
const AocDay = 6
const AocDayName = "day06"
func main() {
dayless.PrintDayHeader(AocDay)
defer dayless.TimeTrack(time.Now(), AocDayName)
dayless.PrintStepHeader(1)
lines, _ := dayless.ReadFileToArray(AocDayName + "/puzzle.txt")
grid, coordinates, _ := createGrid(lines)
fillGridManhattenDistance(grid, coordinates)
// fmt.Println(renderGrid(grid))
maxAreaSize, _ := getLargestFiniteArea(grid)
fmt.Printf("Size of the largest area: %d\n", maxAreaSize)
fmt.Println()
dayless.PrintStepHeader(2)
grid, _, _ = createGrid(lines)
fillGridManhattenDistanceSum(grid, coordinates)
sum := 0
for _, row := range grid.rows {
for _, col := range row {
if col == 1 {
sum++
}
}
}
fmt.Printf("Size of region: %d\n", sum)
// fmt.Println(renderGrid(grid))
fmt.Println()
}
type coordinate struct {
x int
y int
id int
}
type grid struct {
rows map[int]map[int]int
}
func createGrid(lines []string) (grid, []coordinate, error) {
var coordinates []coordinate
maxX := 0
maxY := 0
for i, line := range lines {
parts := strings.Split(line, ", ")
if len(parts) != 2 {
return grid{}, nil, errors.New("invalid size of numbers in line")
}
x, err := strconv.Atoi(parts[0])
if err != nil {
return grid{}, nil, errors.New("invalid number")
}
y, err := strconv.Atoi(parts[1])
if err != nil {
return grid{}, nil, errors.New("invalid number")
}
if x > maxX {
maxX = x
}
if y > maxY {
maxY = y
}
coordinates = append(coordinates, coordinate{x: x, y: y, id: i})
}
maxX++
maxY++
var rows = make(map[int]map[int]int, maxX)
for x := 0; x < maxX; x++ {
var row = make(map[int]int, maxY)
for y := 0; y < maxY; y++ {
row[y] = -1
}
rows[x] = row
}
return grid{rows: rows}, coordinates, nil
}
func fillGridManhattenDistance(grid grid, coordinates []coordinate) {
for x := 0; x < len(grid.rows); x++ {
row := grid.rows[x]
for y := 0; y < len(row); y++ {
closestCoordinate, _, err := calcClosestManhattenDistance(coordinates, coordinate{x: x, y: y, id: -1})
if err != nil {
row[y] = -1
} else {
row[y] = closestCoordinate.id
}
}
}
}
func fillGridManhattenDistanceSum(grid grid, coordinates []coordinate) {
for x := 0; x < len(grid.rows); x++ {
row := grid.rows[x]
for y := 0; y < len(row); y++ {
sum := 0
for _, c := range coordinates {
sum += calcManhattenDistance(c, coordinate{x: x, y: y, id: -1})
}
if sum < 10000 {
row[y] = 1
} else {
row[y] = -1
}
}
}
}
func calcManhattenDistance(from coordinate, to coordinate) int {
return int(math.Abs(float64(from.x-to.x))) + int(math.Abs(float64(from.y-to.y)))
}
func calcClosestManhattenDistance(froms []coordinate, to coordinate) (coordinate, int, error) {
min := math.MaxInt16
minC := coordinate{}
for _, from := range froms {
distance := calcManhattenDistance(from, to)
if distance < min {
min = distance
minC = from
} else if distance == min {
return coordinate{}, 0, errors.New("same distance")
}
}
return minC, min, nil
}
func getLargestFiniteArea(grid grid) (int, int) {
// skip edges (0, max)
infiniteAreaIds := make(map[int]struct{})
for _, id := range grid.rows[0] {
if _, ok := infiniteAreaIds[id]; !ok {
infiniteAreaIds[id] = struct{}{}
}
}
for _, id := range grid.rows[len(grid.rows)-1] {
if _, ok := infiniteAreaIds[id]; !ok {
infiniteAreaIds[id] = struct{}{}
}
}
for _, row := range grid.rows {
id := row[0]
if _, ok := infiniteAreaIds[id]; !ok {
infiniteAreaIds[id] = struct{}{}
}
id = row[len(row)-1]
if _, ok := infiniteAreaIds[id]; !ok {
infiniteAreaIds[id] = struct{}{}
}
}
var areaSizes = make(map[int]int)
for x := 0; x < len(grid.rows)-1; x++ {
row := grid.rows[x]
for y := 0; y < len(row)-1; y++ {
id := row[y]
if _, ok := infiniteAreaIds[id]; ok {
continue // infinite area
}
areaSizes[id] += 1
}
}
maxSize := 0
maxId := 0
for id, size := range areaSizes {
if size > maxSize {
maxSize = size
maxId = id
}
}
return maxSize, maxId
}
func renderGrid(grid grid) string {
var result = ""
for x := 0; x < len(grid.rows); x++ {
row := grid.rows[x]
result += fmt.Sprintf("%03d | ", x)
for y := 0; y < len(row); y++ {
col := fmt.Sprintf("%3d", row[y])
result += col
}
result += "\n"
}
return result
} | day06/main.go | 0.580947 | 0.442335 | main.go | starcoder |
package meta
import (
"fmt"
"strings"
)
/*
SatisfyingValueSchema represents a high-level schema of a given leaf's
satisfying values. It is used to generate value schema predicates by
validating its representative values against the provided schema using
a JSON schema validator.
SatisfyingValueSchema is an immutable type.
*/
type SatisfyingValueSchema struct {
generateRepresentativeValue func(interface{}) interface{}
representativeValues []interface{}
}
func NewSatisfyingValueSchema() SatisfyingValueSchema {
return SatisfyingValueSchema{
generateRepresentativeValue: func(v interface{}) interface{} {
return v
},
}
}
// AddObject adds an object with the specified key to svs
func (svs SatisfyingValueSchema) AddObject(key string) SatisfyingValueSchema {
if len(key) <= 0 {
panic("svs.AddObject called with an empty key")
}
return svs.add(func(value interface{}) interface{} {
return map[string]interface{}{
// We only care about matching keys, which is the first key
// s.t. upcase(matching_key) == upcase(key).
strings.ToUpper(key): value,
}
})
}
// AddArray adds an array to svs
func (svs SatisfyingValueSchema) AddArray() SatisfyingValueSchema {
return svs.add(func(value interface{}) interface{} {
return []interface{}{value}
})
}
// EndsWithPrimitiveValue indicates that the svs ends with
// a primitive value
func (svs SatisfyingValueSchema) EndsWithPrimitiveValue() SatisfyingValueSchema {
return svs.endsWith(nil)
}
// EndsWithObject indicates that the svs ends with an
// object
func (svs SatisfyingValueSchema) EndsWithObject() SatisfyingValueSchema {
return svs.endsWith(map[string]interface{}{})
}
// EndsWithArray indicates that the svs ends with an
// array
func (svs SatisfyingValueSchema) EndsWithArray() SatisfyingValueSchema {
return svs.endsWith([]interface{}{})
}
// EndsWithAnything indicates that the svs can end with any
// value
func (svs SatisfyingValueSchema) EndsWithAnything() SatisfyingValueSchema {
return svs.endsWith(
map[string]interface{}{},
[]interface{}{},
nil,
)
}
func (svs SatisfyingValueSchema) add(segmentRepresentativeValue func(interface{}) interface{}) SatisfyingValueSchema {
if svs.isComplete() {
panic(fmt.Sprintf("svs#add: attempting to add to a completed SatisfyingValueSchema %T", svs))
}
return SatisfyingValueSchema{
generateRepresentativeValue: func(endValue interface{}) interface{} {
return svs.generateRepresentativeValue(segmentRepresentativeValue(endValue))
},
}
}
func (svs SatisfyingValueSchema) endsWith(endValues ...interface{}) SatisfyingValueSchema {
representativeValues := []interface{}{}
for _, endValue := range endValues {
representativeValues = append(representativeValues, svs.generateRepresentativeValue(endValue))
}
return SatisfyingValueSchema{
representativeValues: representativeValues,
}
}
func (svs SatisfyingValueSchema) isComplete() bool {
return svs.representativeValues != nil
} | api/rql/internal/primary/meta/satisfyingValueSchema.go | 0.725843 | 0.428771 | satisfyingValueSchema.go | starcoder |
package ecc
import (
"fmt"
"math/big"
)
// s256Field allows finite field math for 256 bit integers.
type s256Field struct {
Num *big.Int
Prime *big.Int
}
func newS256FieldFromInt64(num int64, prime int64) *s256Field {
return newS256Field(big.NewInt(num), big.NewInt(prime))
}
func newS256Field(num *big.Int, prime *big.Int) *s256Field {
if num.Sign() < 0 || num.Cmp(prime) >= 0 {
panic(fmt.Sprintf("Num %d not in valid field range", num))
}
return &s256Field{Num: num, Prime: prime}
}
func (field *s256Field) String() string {
return fmt.Sprintf("s256Field(%d)(%d)", field.Num, field.Prime)
}
func (field *s256Field) Eq(other FieldInteger) bool {
if other == nil {
return false
}
o := other.(*s256Field)
return field.Num.Cmp(o.Num) == 0 && field.Prime.Cmp(o.Prime) == 0
}
func (field *s256Field) Ne(other FieldInteger) bool {
return !field.Eq(other)
}
func (field *s256Field) Add(x, y FieldInteger) FieldInteger {
fx, fy := x.(*s256Field), y.(*s256Field)
if fx.Prime.Cmp(fy.Prime) != 0 {
panic("Cannot add two numbers in different Fields")
}
var num = new(big.Int)
num.Add(fx.Num, fy.Num).Mod(num, fx.Prime)
*field = s256Field{Num: num, Prime: fx.Prime}
return field
}
func (field *s256Field) Sub(x, y FieldInteger) FieldInteger {
fx, fy := x.(*s256Field), y.(*s256Field)
if fx.Prime.Cmp(fy.Prime) != 0 {
panic("Cannot subtract two numbers in different Fields")
}
var num = new(big.Int)
num.Sub(fx.Num, fy.Num).Mod(num, fx.Prime)
if num.Sign() < 0 {
num.Add(num, fx.Prime)
}
*field = s256Field{Num: num, Prime: fx.Prime}
return field
}
func (field *s256Field) Mul(x, y FieldInteger) FieldInteger {
fx, fy := x.(*s256Field), y.(*s256Field)
if fx.Prime.Cmp(fy.Prime) != 0 {
panic("Cannot multiply two numbers in different Fields")
}
var num = new(big.Int)
num.Mul(fx.Num, fy.Num).Mod(num, fx.Prime)
*field = s256Field{Num: num, Prime: fx.Prime}
return field
}
func (field *s256Field) Div(x, y FieldInteger) FieldInteger {
fx, fy := x.(*s256Field), y.(*s256Field)
if fx.Prime.Cmp(fy.Prime) != 0 {
panic("Cannot divide two numbers in different Fields")
}
/*
* field.num and other.num are the actual values
* field.prime is what we need to mod against
* use fermat's little theorem:
* field.num**(p-1) % p == 1
* this means:
* 1/n == pow(n, p-2, p)
*/
var num = new(big.Int)
var b = new(big.Int)
var e = new(big.Int)
e.Sub(fx.Prime, big.NewInt(2))
b.Exp(fy.Num, e, fx.Prime)
num.Mul(fx.Num, b).Mod(num, fx.Prime)
*field = s256Field{Num: num, Prime: fx.Prime}
return field
}
func (field *s256Field) Pow(n FieldInteger, exponent *big.Int) FieldInteger {
f := n.(*s256Field)
var num = new(big.Int)
var e = new(big.Int)
var m = new(big.Int)
m.Sub(f.Prime, big.NewInt(1))
e.Mod(exponent, m)
num.Exp(f.Num, e, f.Prime)
*field = s256Field{Num: num, Prime: f.Prime}
return field
}
func (field *s256Field) Cmul(n FieldInteger, coefficient *big.Int) FieldInteger {
var num = new(big.Int)
var c = new(big.Int)
f := n.(*s256Field)
c.Mod(coefficient, f.Prime)
num.Mul(f.Num, c).Mod(num, f.Prime)
*field = s256Field{Num: num, Prime: f.Prime}
return field
}
func (field *s256Field) Copy() FieldInteger {
num := new(big.Int)
num.Set(field.Num)
return &s256Field{num, field.Prime}
}
func (field *s256Field) Set(n FieldInteger) FieldInteger {
f := n.(*s256Field)
field.Num.Set(f.Num)
field.Prime.Set(f.Prime)
return field
}
func (field *s256Field) Sqrt() *s256Field {
e := new(big.Int)
e.Add(field.Prime, big.NewInt(1))
e.Div(e, big.NewInt(4))
result := field.Copy()
return result.Pow(result, e).(*s256Field)
} | ecc/s256field.go | 0.734215 | 0.401306 | s256field.go | starcoder |
package june
import "sort"
/*
# Two City Scheduling
# https://leetcode.com/explore/challenge/card/june-leetcoding-challenge/539/week-1-june-1st-june-7th/3349/
There are 2N people a company is planning to interview. The cost of flying the i-th person to city A is costs[i][0], and the cost of flying the i-th person to city B is costs[i][1].
Return the minimum cost to fly every person to a city such that exactly N people arrive in each city.
Example 1:
Input: [[10,20],[30,200],[400,50],[30,20]]
Output: 110
Explanation:
The first person goes to city A for a cost of 10.
The second person goes to city A for a cost of 30.
The third person goes to city B for a cost of 50.
The fourth person goes to city B for a cost of 20.
The total minimum cost is 10 + 30 + 50 + 20 = 110 to have half the people interviewing in each city.
Note:
1 <= costs.length <= 100
It is guaranteed that costs.length is even.
1 <= costs[i][0], costs[i][1] <= 1000
*/
func TwoCitySchedCost(costs [][]int) int {
return twoCitySchedCost(costs)
}
// 思路:将所有的人派去B地,然后将一半的人派去A地,则AB之间的差值从小到大排序,前半部分就是B去A消费最少的金额
func twoCitySchedCost(costs [][]int) int {
var res int
clen := len(costs)
diff := make([]int, clen)
for i, cost := range costs {
res += cost[1]
diff[i] = cost[0] - cost[1]
}
sort.Ints(diff)
for i := 0; i < clen/2; i++ {
res += diff[i]
}
return res
}
/*
# Queue Reconstruction by Height
# https://leetcode.com/explore/challenge/card/june-leetcoding-challenge/539/week-1-june-1st-june-7th/3352/
Suppose you have a random list of people standing in a queue. Each person is described by a pair of integers (h, k),
where h is the height of the person and k is the number of people in front of this person who have a height greater than or equal to h.
Write an algorithm to reconstruct the queue.
Note:
The number of people is less than 1,100.
Example
Input:
[[7,0], [4,4], [7,1], [5,0], [6,1], [5,2]]
Output:
[[5,0], [7,0], [5,2], [6,1], [4,4], [7,1]]
*/
func ReconstructQueue(people [][]int) [][]int {
return reconstructQueue(people)
}
func reconstructQueue(people [][]int) [][]int {
sort.Slice(people, func(i, j int) bool {
if people[i][0] == people[j][0] {
return people[i][1] < people[j][1]
}
return people[i][0] > people[j][0]
})
var res [][]int
for _, p := range people {
if p[1] >= len(res) {
res = append(res, p)
} else {
host := res[p[1]:]
tmp := append([][]int{p}, host...)
res = append(res[:p[1]], tmp...)
}
}
return res
} | june/matrix.go | 0.601828 | 0.449151 | matrix.go | starcoder |
package plaid
import (
"encoding/json"
)
// PSLFStatus Information about the student's eligibility in the Public Service Loan Forgiveness program. This is only returned if the institution is Fedloan (`ins_116527`).
type PSLFStatus struct {
// The estimated date borrower will have completed 120 qualifying monthly payments. Returned in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format (YYYY-MM-DD).
EstimatedEligibilityDate NullableString `json:"estimated_eligibility_date"`
// The number of qualifying payments that have been made.
PaymentsMade NullableFloat32 `json:"payments_made"`
// The number of qualifying payments remaining.
PaymentsRemaining NullableFloat32 `json:"payments_remaining"`
AdditionalProperties map[string]interface{}
}
type _PSLFStatus PSLFStatus
// NewPSLFStatus instantiates a new PSLFStatus object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewPSLFStatus(estimatedEligibilityDate NullableString, paymentsMade NullableFloat32, paymentsRemaining NullableFloat32) *PSLFStatus {
this := PSLFStatus{}
this.EstimatedEligibilityDate = estimatedEligibilityDate
this.PaymentsMade = paymentsMade
this.PaymentsRemaining = paymentsRemaining
return &this
}
// NewPSLFStatusWithDefaults instantiates a new PSLFStatus object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewPSLFStatusWithDefaults() *PSLFStatus {
this := PSLFStatus{}
return &this
}
// GetEstimatedEligibilityDate returns the EstimatedEligibilityDate field value
// If the value is explicit nil, the zero value for string will be returned
func (o *PSLFStatus) GetEstimatedEligibilityDate() string {
if o == nil || o.EstimatedEligibilityDate.Get() == nil {
var ret string
return ret
}
return *o.EstimatedEligibilityDate.Get()
}
// GetEstimatedEligibilityDateOk returns a tuple with the EstimatedEligibilityDate field value
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *PSLFStatus) GetEstimatedEligibilityDateOk() (*string, bool) {
if o == nil {
return nil, false
}
return o.EstimatedEligibilityDate.Get(), o.EstimatedEligibilityDate.IsSet()
}
// SetEstimatedEligibilityDate sets field value
func (o *PSLFStatus) SetEstimatedEligibilityDate(v string) {
o.EstimatedEligibilityDate.Set(&v)
}
// GetPaymentsMade returns the PaymentsMade field value
// If the value is explicit nil, the zero value for float32 will be returned
func (o *PSLFStatus) GetPaymentsMade() float32 {
if o == nil || o.PaymentsMade.Get() == nil {
var ret float32
return ret
}
return *o.PaymentsMade.Get()
}
// GetPaymentsMadeOk returns a tuple with the PaymentsMade field value
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *PSLFStatus) GetPaymentsMadeOk() (*float32, bool) {
if o == nil {
return nil, false
}
return o.PaymentsMade.Get(), o.PaymentsMade.IsSet()
}
// SetPaymentsMade sets field value
func (o *PSLFStatus) SetPaymentsMade(v float32) {
o.PaymentsMade.Set(&v)
}
// GetPaymentsRemaining returns the PaymentsRemaining field value
// If the value is explicit nil, the zero value for float32 will be returned
func (o *PSLFStatus) GetPaymentsRemaining() float32 {
if o == nil || o.PaymentsRemaining.Get() == nil {
var ret float32
return ret
}
return *o.PaymentsRemaining.Get()
}
// GetPaymentsRemainingOk returns a tuple with the PaymentsRemaining field value
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *PSLFStatus) GetPaymentsRemainingOk() (*float32, bool) {
if o == nil {
return nil, false
}
return o.PaymentsRemaining.Get(), o.PaymentsRemaining.IsSet()
}
// SetPaymentsRemaining sets field value
func (o *PSLFStatus) SetPaymentsRemaining(v float32) {
o.PaymentsRemaining.Set(&v)
}
func (o PSLFStatus) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["estimated_eligibility_date"] = o.EstimatedEligibilityDate.Get()
}
if true {
toSerialize["payments_made"] = o.PaymentsMade.Get()
}
if true {
toSerialize["payments_remaining"] = o.PaymentsRemaining.Get()
}
for key, value := range o.AdditionalProperties {
toSerialize[key] = value
}
return json.Marshal(toSerialize)
}
func (o *PSLFStatus) UnmarshalJSON(bytes []byte) (err error) {
varPSLFStatus := _PSLFStatus{}
if err = json.Unmarshal(bytes, &varPSLFStatus); err == nil {
*o = PSLFStatus(varPSLFStatus)
}
additionalProperties := make(map[string]interface{})
if err = json.Unmarshal(bytes, &additionalProperties); err == nil {
delete(additionalProperties, "estimated_eligibility_date")
delete(additionalProperties, "payments_made")
delete(additionalProperties, "payments_remaining")
o.AdditionalProperties = additionalProperties
}
return err
}
type NullablePSLFStatus struct {
value *PSLFStatus
isSet bool
}
func (v NullablePSLFStatus) Get() *PSLFStatus {
return v.value
}
func (v *NullablePSLFStatus) Set(val *PSLFStatus) {
v.value = val
v.isSet = true
}
func (v NullablePSLFStatus) IsSet() bool {
return v.isSet
}
func (v *NullablePSLFStatus) Unset() {
v.value = nil
v.isSet = false
}
func NewNullablePSLFStatus(val *PSLFStatus) *NullablePSLFStatus {
return &NullablePSLFStatus{value: val, isSet: true}
}
func (v NullablePSLFStatus) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullablePSLFStatus) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | plaid/model_pslf_status.go | 0.742048 | 0.428891 | model_pslf_status.go | starcoder |
package graph
import (
i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55 "github.com/microsoft/kiota/abstractions/go/serialization"
)
// RecurrencePattern
type RecurrencePattern struct {
// Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
additionalData map[string]interface{};
// The day of the month on which the event occurs. Required if type is absoluteMonthly or absoluteYearly.
dayOfMonth *int32;
// A collection of the days of the week on which the event occurs. The possible values are: sunday, monday, tuesday, wednesday, thursday, friday, saturday. If type is relativeMonthly or relativeYearly, and daysOfWeek specifies more than one day, the event falls on the first day that satisfies the pattern. Required if type is weekly, relativeMonthly, or relativeYearly.
daysOfWeek []DayOfWeek;
// The first day of the week. The possible values are: sunday, monday, tuesday, wednesday, thursday, friday, saturday. Default is sunday. Required if type is weekly.
firstDayOfWeek *DayOfWeek;
// Specifies on which instance of the allowed days specified in daysOfWeek the event occurs, counted from the first instance in the month. The possible values are: first, second, third, fourth, last. Default is first. Optional and used if type is relativeMonthly or relativeYearly.
index *WeekIndex;
// The number of units between occurrences, where units can be in days, weeks, months, or years, depending on the type. Required.
interval *int32;
// The month in which the event occurs. This is a number from 1 to 12.
month *int32;
// The recurrence pattern type: daily, weekly, absoluteMonthly, relativeMonthly, absoluteYearly, relativeYearly. Required.
type_escaped *RecurrencePatternType;
}
// NewRecurrencePattern instantiates a new recurrencePattern and sets the default values.
func NewRecurrencePattern()(*RecurrencePattern) {
m := &RecurrencePattern{
}
m.SetAdditionalData(make(map[string]interface{}));
return m
}
// GetAdditionalData gets the AdditionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *RecurrencePattern) GetAdditionalData()(map[string]interface{}) {
if m == nil {
return nil
} else {
return m.additionalData
}
}
// GetDayOfMonth gets the dayOfMonth property value. The day of the month on which the event occurs. Required if type is absoluteMonthly or absoluteYearly.
func (m *RecurrencePattern) GetDayOfMonth()(*int32) {
if m == nil {
return nil
} else {
return m.dayOfMonth
}
}
// GetDaysOfWeek gets the daysOfWeek property value. A collection of the days of the week on which the event occurs. The possible values are: sunday, monday, tuesday, wednesday, thursday, friday, saturday. If type is relativeMonthly or relativeYearly, and daysOfWeek specifies more than one day, the event falls on the first day that satisfies the pattern. Required if type is weekly, relativeMonthly, or relativeYearly.
func (m *RecurrencePattern) GetDaysOfWeek()([]DayOfWeek) {
if m == nil {
return nil
} else {
return m.daysOfWeek
}
}
// GetFirstDayOfWeek gets the firstDayOfWeek property value. The first day of the week. The possible values are: sunday, monday, tuesday, wednesday, thursday, friday, saturday. Default is sunday. Required if type is weekly.
func (m *RecurrencePattern) GetFirstDayOfWeek()(*DayOfWeek) {
if m == nil {
return nil
} else {
return m.firstDayOfWeek
}
}
// GetIndex gets the index property value. Specifies on which instance of the allowed days specified in daysOfWeek the event occurs, counted from the first instance in the month. The possible values are: first, second, third, fourth, last. Default is first. Optional and used if type is relativeMonthly or relativeYearly.
func (m *RecurrencePattern) GetIndex()(*WeekIndex) {
if m == nil {
return nil
} else {
return m.index
}
}
// GetInterval gets the interval property value. The number of units between occurrences, where units can be in days, weeks, months, or years, depending on the type. Required.
func (m *RecurrencePattern) GetInterval()(*int32) {
if m == nil {
return nil
} else {
return m.interval
}
}
// GetMonth gets the month property value. The month in which the event occurs. This is a number from 1 to 12.
func (m *RecurrencePattern) GetMonth()(*int32) {
if m == nil {
return nil
} else {
return m.month
}
}
// GetType_escaped gets the type_escaped property value. The recurrence pattern type: daily, weekly, absoluteMonthly, relativeMonthly, absoluteYearly, relativeYearly. Required.
func (m *RecurrencePattern) GetType_escaped()(*RecurrencePatternType) {
if m == nil {
return nil
} else {
return m.type_escaped
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *RecurrencePattern) GetFieldDeserializers()(map[string]func(interface{}, i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode)(error)) {
res := make(map[string]func(interface{}, i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode)(error))
res["dayOfMonth"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetDayOfMonth(val)
}
return nil
}
res["daysOfWeek"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetCollectionOfEnumValues(ParseDayOfWeek)
if err != nil {
return err
}
if val != nil {
res := make([]DayOfWeek, len(val))
for i, v := range val {
res[i] = *(v.(*DayOfWeek))
}
m.SetDaysOfWeek(res)
}
return nil
}
res["firstDayOfWeek"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetEnumValue(ParseDayOfWeek)
if err != nil {
return err
}
if val != nil {
cast := val.(DayOfWeek)
m.SetFirstDayOfWeek(&cast)
}
return nil
}
res["index"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetEnumValue(ParseWeekIndex)
if err != nil {
return err
}
if val != nil {
cast := val.(WeekIndex)
m.SetIndex(&cast)
}
return nil
}
res["interval"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetInterval(val)
}
return nil
}
res["month"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetMonth(val)
}
return nil
}
res["type_escaped"] = func (o interface{}, n i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.ParseNode) error {
val, err := n.GetEnumValue(ParseRecurrencePatternType)
if err != nil {
return err
}
if val != nil {
cast := val.(RecurrencePatternType)
m.SetType_escaped(&cast)
}
return nil
}
return res
}
func (m *RecurrencePattern) IsNil()(bool) {
return m == nil
}
// Serialize serializes information the current object
func (m *RecurrencePattern) Serialize(writer i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.SerializationWriter)(error) {
{
err := writer.WriteInt32Value("dayOfMonth", m.GetDayOfMonth())
if err != nil {
return err
}
}
{
err := writer.WriteCollectionOfStringValues("daysOfWeek", SerializeDayOfWeek(m.GetDaysOfWeek()))
if err != nil {
return err
}
}
if m.GetFirstDayOfWeek() != nil {
cast := m.GetFirstDayOfWeek().String()
err := writer.WriteStringValue("firstDayOfWeek", &cast)
if err != nil {
return err
}
}
if m.GetIndex() != nil {
cast := m.GetIndex().String()
err := writer.WriteStringValue("index", &cast)
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("interval", m.GetInterval())
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("month", m.GetMonth())
if err != nil {
return err
}
}
if m.GetType_escaped() != nil {
cast := m.GetType_escaped().String()
err := writer.WriteStringValue("type_escaped", &cast)
if err != nil {
return err
}
}
{
err := writer.WriteAdditionalData(m.GetAdditionalData())
if err != nil {
return err
}
}
return nil
}
// SetAdditionalData sets the AdditionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *RecurrencePattern) SetAdditionalData(value map[string]interface{})() {
m.additionalData = value
}
// SetDayOfMonth sets the dayOfMonth property value. The day of the month on which the event occurs. Required if type is absoluteMonthly or absoluteYearly.
func (m *RecurrencePattern) SetDayOfMonth(value *int32)() {
m.dayOfMonth = value
}
// SetDaysOfWeek sets the daysOfWeek property value. A collection of the days of the week on which the event occurs. The possible values are: sunday, monday, tuesday, wednesday, thursday, friday, saturday. If type is relativeMonthly or relativeYearly, and daysOfWeek specifies more than one day, the event falls on the first day that satisfies the pattern. Required if type is weekly, relativeMonthly, or relativeYearly.
func (m *RecurrencePattern) SetDaysOfWeek(value []DayOfWeek)() {
m.daysOfWeek = value
}
// SetFirstDayOfWeek sets the firstDayOfWeek property value. The first day of the week. The possible values are: sunday, monday, tuesday, wednesday, thursday, friday, saturday. Default is sunday. Required if type is weekly.
func (m *RecurrencePattern) SetFirstDayOfWeek(value *DayOfWeek)() {
m.firstDayOfWeek = value
}
// SetIndex sets the index property value. Specifies on which instance of the allowed days specified in daysOfWeek the event occurs, counted from the first instance in the month. The possible values are: first, second, third, fourth, last. Default is first. Optional and used if type is relativeMonthly or relativeYearly.
func (m *RecurrencePattern) SetIndex(value *WeekIndex)() {
m.index = value
}
// SetInterval sets the interval property value. The number of units between occurrences, where units can be in days, weeks, months, or years, depending on the type. Required.
func (m *RecurrencePattern) SetInterval(value *int32)() {
m.interval = value
}
// SetMonth sets the month property value. The month in which the event occurs. This is a number from 1 to 12.
func (m *RecurrencePattern) SetMonth(value *int32)() {
m.month = value
}
// SetType_escaped sets the type_escaped property value. The recurrence pattern type: daily, weekly, absoluteMonthly, relativeMonthly, absoluteYearly, relativeYearly. Required.
func (m *RecurrencePattern) SetType_escaped(value *RecurrencePatternType)() {
m.type_escaped = value
} | models/microsoft/graph/recurrence_pattern.go | 0.753467 | 0.657016 | recurrence_pattern.go | starcoder |
package knn
import (
"github.com/gonum/matrix/mat64"
"github.com/sjwhitworth/golearn/base"
pairwiseMetrics "github.com/sjwhitworth/golearn/metrics/pairwise"
util "github.com/sjwhitworth/golearn/utilities"
)
// A KNNClassifier consists of a data matrix, associated labels in the same order as the matrix, and a distance function.
// The accepted distance functions at this time are 'euclidean' and 'manhattan'.
type KNNClassifier struct {
base.BaseEstimator
TrainingData base.FixedDataGrid
DistanceFunc string
NearestNeighbours int
}
// NewKnnClassifier returns a new classifier
func NewKnnClassifier(distfunc string, neighbours int) *KNNClassifier {
KNN := KNNClassifier{}
KNN.DistanceFunc = distfunc
KNN.NearestNeighbours = neighbours
return &KNN
}
// Fit stores the training data for later
func (KNN *KNNClassifier) Fit(trainingData base.FixedDataGrid) {
KNN.TrainingData = trainingData
}
// Predict returns a classification for the vector, based on a vector input, using the KNN algorithm.
func (KNN *KNNClassifier) Predict(what base.FixedDataGrid) base.FixedDataGrid {
// Check what distance function we are using
var distanceFunc pairwiseMetrics.PairwiseDistanceFunc
switch KNN.DistanceFunc {
case "euclidean":
distanceFunc = pairwiseMetrics.NewEuclidean()
case "manhattan":
distanceFunc = pairwiseMetrics.NewManhattan()
default:
panic("unsupported distance function")
}
// Check compatability
allAttrs := base.CheckCompatable(what, KNN.TrainingData)
if allAttrs == nil {
// Don't have the same Attributes
return nil
}
// Remove the Attributes which aren't numeric
allNumericAttrs := make([]base.Attribute, 0)
for _, a := range allAttrs {
if fAttr, ok := a.(*base.FloatAttribute); ok {
allNumericAttrs = append(allNumericAttrs, fAttr)
}
}
// Generate return vector
ret := base.GeneratePredictionVector(what)
// Resolve Attribute specifications for both
whatAttrSpecs := base.ResolveAttributes(what, allNumericAttrs)
trainAttrSpecs := base.ResolveAttributes(KNN.TrainingData, allNumericAttrs)
// Reserve storage for most the most similar items
distances := make(map[int]float64)
// Reserve storage for voting map
maxmap := make(map[string]int)
// Reserve storage for row computations
trainRowBuf := make([]float64, len(allNumericAttrs))
predRowBuf := make([]float64, len(allNumericAttrs))
// Iterate over all outer rows
what.MapOverRows(whatAttrSpecs, func(predRow [][]byte, predRowNo int) (bool, error) {
// Read the float values out
for i, _ := range allNumericAttrs {
predRowBuf[i] = base.UnpackBytesToFloat(predRow[i])
}
predMat := util.FloatsToMatrix(predRowBuf)
// Find the closest match in the training data
KNN.TrainingData.MapOverRows(trainAttrSpecs, func(trainRow [][]byte, srcRowNo int) (bool, error) {
// Read the float values out
for i, _ := range allNumericAttrs {
trainRowBuf[i] = base.UnpackBytesToFloat(trainRow[i])
}
// Compute the distance
trainMat := util.FloatsToMatrix(trainRowBuf)
distances[srcRowNo] = distanceFunc.Distance(predMat, trainMat)
return true, nil
})
sorted := util.SortIntMap(distances)
values := sorted[:KNN.NearestNeighbours]
// Reset maxMap
for a := range maxmap {
maxmap[a] = 0
}
// Refresh maxMap
for _, elem := range values {
label := base.GetClass(KNN.TrainingData, elem)
if _, ok := maxmap[label]; ok {
maxmap[label]++
} else {
maxmap[label] = 1
}
}
// Sort the maxMap
var maxClass string
maxVal := -1
for a := range maxmap {
if maxmap[a] > maxVal {
maxVal = maxmap[a]
maxClass = a
}
}
base.SetClass(ret, predRowNo, maxClass)
return true, nil
})
return ret
}
// A KNNRegressor consists of a data matrix, associated result variables in the same order as the matrix, and a name.
type KNNRegressor struct {
base.BaseEstimator
Values []float64
DistanceFunc string
}
// NewKnnRegressor mints a new classifier.
func NewKnnRegressor(distfunc string) *KNNRegressor {
KNN := KNNRegressor{}
KNN.DistanceFunc = distfunc
return &KNN
}
func (KNN *KNNRegressor) Fit(values []float64, numbers []float64, rows int, cols int) {
if rows != len(values) {
panic(mat64.ErrShape)
}
KNN.Data = mat64.NewDense(rows, cols, numbers)
KNN.Values = values
}
func (KNN *KNNRegressor) Predict(vector *mat64.Dense, K int) float64 {
// Get the number of rows
rows, _ := KNN.Data.Dims()
rownumbers := make(map[int]float64)
labels := make([]float64, 0)
// Check what distance function we are using
var distanceFunc pairwiseMetrics.PairwiseDistanceFunc
switch KNN.DistanceFunc {
case "euclidean":
distanceFunc = pairwiseMetrics.NewEuclidean()
case "manhattan":
distanceFunc = pairwiseMetrics.NewManhattan()
default:
panic("unsupported distance function")
}
for i := 0; i < rows; i++ {
row := KNN.Data.RowView(i)
rowMat := util.FloatsToMatrix(row)
distance := distanceFunc.Distance(rowMat, vector)
rownumbers[i] = distance
}
sorted := util.SortIntMap(rownumbers)
values := sorted[:K]
var sum float64
for _, elem := range values {
value := KNN.Values[elem]
labels = append(labels, value)
sum += value
}
average := sum / float64(K)
return average
} | knn/knn.go | 0.817319 | 0.539954 | knn.go | starcoder |
package list
// Map applies mapping to values and returns the results as a new slice.
func Map[T, R any](mapping func(T) R, values []T) []R {
output := make([]R, len(values))
iter := func(i int, t T) { output[i] = mapping(t) }
Iteri(iter, values)
return output
}
// Map2 applies mapping to pairs of values from the two slices and returns the results as a new slice.
func Map2[T, T2, R any](mapping func(T, T2) R, values1 []T, values2 []T2) []R {
min, _ := Min(len(values1), len(values2))
t3 := make([]R, min)
iter := func(i int, t T, t2 T2) { t3[i] = mapping(t, t2) }
Iteri2(iter, values1, values2)
return t3
}
// Map3 applies mapping to three values from the three slices and returns the results as a new slice.
func Map3[T, T2, T3, R any](action func(T, T2, T3) R, values1 []T, values2 []T2, values3 []T3) []R {
min, _ := Min(len(values1), len(values2), len(values3))
output := make([]R, min)
iter := func(i int, t T, t2 T2, t3 T3) { output[i] = action(t, t2, t3) }
Iteri3(iter, values1, values2, values3)
return output
}
// Mapi applies mapping to values with the index of each value and returns the results as a new slice.
func Mapi[T, R any](mapping func(int, T) R, values []T) []R {
output := make([]R, len(values))
iter := func(i int, t T) { output[i] = mapping(i, t) }
Iteri(iter, values)
return output
}
// Mapi2 applies mapping to pairs of values with the index of each value from the two slices and returns the results as a new slice.
func Mapi2[T, T2, R any](mapping func(int, T, T2) R, values1 []T, values2 []T2) []R {
min, _ := Min(len(values1), len(values2))
t3 := make([]R, min)
iter := func(i int, t T, t2 T2) { t3[i] = mapping(i, t, t2) }
Iteri2(iter, values1, values2)
return t3
}
// Mapi3 applies mapping to three values with the index of each value from the three slices and returns the results as a new slice.
func Mapi3[T, T2, T3, R any](mapping func(int, T, T2, T3) R, values1 []T, values2 []T2, values3 []T3) []R {
min, _ := Min(len(values1), len(values2), len(values3))
output := make([]R, min)
iter := func(i int, t T, t2 T2, t3 T3) { output[i] = mapping(i, t, t2, t3) }
Iteri3(iter, values1, values2, values3)
return output
}
// Map2D applies mapping to each value in the two dimensional slice and returns the results as a new two dimensional slice.
func Map2D[T, R any](mapping func(T) R, values [][]T) [][]R {
output := CreateFromStructure2D[T, R](values)
iter := func(i, j int, t T) { output[i][j] = mapping(t) }
Iteri2D(iter, values)
return output
}
// Mapi2D applies mapping to each value in the two dimensional slice with the indexes and returns the results as a new two dimensional slice.
func Mapi2D[T, R any](mapping func(int, int, T) R, values [][]T) [][]R {
output := CreateFromStructure2D[T, R](values)
iter := func(i, j int, t T) { output[i][j] = mapping(i, j, t) }
Iteri2D(iter, values)
return output
}
// Map3D applies mapping to each value in the three dimensional slice and returns the results as a new three dimensional slice.
func Map3D[T, R any](mapping func(T) R, values [][][]T) [][][]R {
output := CreateFromStructure3D[T, R](values)
iter := func(i, j, k int, t T) { output[i][j][k] = mapping(t) }
Iteri3D(iter, values)
return output
}
// Mapi3D applies mapping to each value in the three dimensional slice with the indexes and returns the results as a new three dimensional slice.
func Mapi3D[T, R any](mapping func(int, int, int, T) R, values [][][]T) [][][]R {
output := CreateFromStructure3D[T, R](values)
iter := func(i, j, k int, t T) { output[i][j][k] = mapping(i, j, k, t) }
Iteri3D(iter, values)
return output
} | list/map.go | 0.859133 | 0.851212 | map.go | starcoder |
package auth
import "github.com/Jeffail/benthos/v3/internal/docs"
// Description returns a markdown version of NATs authentication documentation.
func Description() string {
return `### Authentication
There are several components within Benthos which utilise NATS services. You will find that each of these components
support optional advanced authentication parameters for [NKeys](https://docs.nats.io/nats-server/configuration/securing_nats/auth_intro/nkey_auth)
and [User Credentials](https://docs.nats.io/developing-with-nats/security/creds).
An in depth tutorial can be found [here](https://docs.nats.io/developing-with-nats/tutorials/jwt).
#### NKey file
The NATS server can use these NKeys in several ways for authentication. The simplest is for the server to be configured
with a list of known public keys and for the clients to respond to the challenge by signing it with its private NKey
configured in the ` + "`nkey_file`" + ` field.
More details [here](https://docs.nats.io/developing-with-nats/security/nkey).
#### User Credentials file
NATS server supports decentralized authentication based on JSON Web Tokens (JWT). Clients need an [user JWT](https://docs.nats.io/nats-server/configuration/securing_nats/jwt#json-web-tokens)
and a corresponding [NKey secret](https://docs.nats.io/developing-with-nats/security/nkey) when connecting to a server
which is configured to use this authentication scheme.
The ` + "`user_credentials_file`" + ` field should point to a file containing both the private key and the JWT and can be
generated with the [nsc tool](https://docs.nats.io/nats-tools/nsc).
More details [here](https://docs.nats.io/developing-with-nats/security/creds).`
}
// FieldSpec returns documentation authentication specs for NATS components
func FieldSpec() docs.FieldSpec {
return docs.FieldAdvanced("auth", "Optional configuration of NATS authentication parameters. More information can be found [in this document](/docs/guides/nats).").WithChildren(
docs.FieldString("nkey_file", "An optional file containing a NKey seed.", "./seed.nk").Optional(),
docs.FieldString("user_credentials_file", "An optional file containing user credentials which consist of an user JWT and corresponding NKey seed.", "./user.creds").Optional(),
)
} | internal/impl/nats/auth/docs.go | 0.851398 | 0.475666 | docs.go | starcoder |
package graphhopper
type MatrixRequest struct {
// Specifiy multiple points for which the weight-, route-, time- or distance-matrix should be calculated. In this case the starts are identical to the destinations. If there are N points, then NxN entries will be calculated. The order of the point parameter is important. Specify at least three points. Cannot be used together with from_point or to_point. Is a string with the format longitude,latitude.
Points [][]float64 `json:"points,omitempty"`
// The starting points for the routes. E.g. if you want to calculate the three routes A->1, A->2, A->3 then you have one from_point parameter and three to_point parameters. Is a string with the format longitude,latitude.
FromPoints [][]float64 `json:"from_points,omitempty"`
// The destination points for the routes. Is a string with the format longitude,latitude.
ToPoints [][]float64 `json:"to_points,omitempty"`
// Optional parameter. Specifies a hint for each point in the `points` array to prefer a certain street for the closest location lookup. E.g. if there is an address or house with two or more neighboring streets you can control for which street the closest location is looked up.
PointHints []string `json:"point_hints,omitempty"`
// More information for the `from_points` array. See `point_hints`
FromPointHints []string `json:"from_point_hints,omitempty"`
// More information for the `to_points` array. See `point_hints`
ToPointHints []string `json:"to_point_hints,omitempty"`
// Specifies which arrays should be included in the response. Specify one or more of the following options 'weights', 'times', 'distances'. To specify more than one array use e.g. out_array=times&out_array=distances. The units of the entries of distances are meters, of times are seconds and of weights is arbitrary and it can differ for different vehicles or versions of this API.
OutArrays []string `json:"out_arrays,omitempty"`
// The vehicle for which the route should be calculated. Other vehicles are foot, small_truck etc, see here for the details.
Vehicle string `json:"vehicle,omitempty"`
} | go/model_matrix_request.go | 0.825871 | 0.731754 | model_matrix_request.go | starcoder |
package tetromino
// SRS: https://tetris.fandom.com/wiki/SRS
func (s SRSSprite) Rotate(x, y, clockwise int) int {
switch len(s.Piece) {
case 9:
return s.Rotate9(x, y, clockwise)
case 12:
return s.Rotate12(x, y, clockwise)
case 16:
return s.Rotate16(x, y, clockwise)
default:
panic("unreachable")
}
}
func (s SRSSprite) Rotate9(x, y, clockwise int) int {
switch clockwise % 4 {
case 0:
// 0 1 2
// 3 4 5
// 6 7 8
return y*3 + x
case 1:
// 6 3 0
// 7 4 1
// 8 5 2
return 6 + y - x*3
case 2:
// 8 7 6
// 5 4 3
// 2 1 0
return 8 - y*3 - x
case 3:
// 2 5 8
// 1 4 7
// 0 3 6
return 2 - y + x*3
default:
panic("invalid clockwise")
}
}
func (s SRSSprite) Rotate12(x, y, clockwise int) int {
// Never rotates.
// 0 1 2 3
// 4 5 6 7
// 8 9 A B
return y*4 + x
}
func (s SRSSprite) Rotate16(x, y, clockwise int) int {
switch clockwise % 4 {
case 0:
// 0 1 2 3
// 4 5 6 7
// 8 9 A B
// C D E F
return y*4 + x
case 1:
// C 8 4 0
// D 9 5 1
// E A 6 2
// F B 7 3
return 12 + y - x*4
case 2:
// F E D C
// B A 9 8
// 7 6 5 4
// 3 2 1 0
return 15 - y - x*4
case 3:
// 3 7 B F
// 2 6 A E
// 1 5 9 D
// 0 4 8 C
return 3 - y + x*4
default:
panic("invalid clockwise")
}
}
type SRSSprite struct {
Width int
Height int
Piece []bool
}
func NewSRSSprite(mask string) SRSSprite {
size := len(mask)
sprite := SRSSprite{}
switch size {
case 9:
sprite.Width = 3
sprite.Height = 3
case 12:
sprite.Width = 4
sprite.Height = 3
case 16:
sprite.Width = 4
sprite.Height = 4
default:
panic("invalid mask size")
}
sprite.Piece = make([]bool, size)
for i := 0; i < size; i++ {
sprite.Piece[i] = mask[i] != '.'
}
return sprite
}
var (
SRS_I_PIECE = NewSRSSprite("....IIII........")
SRS_J_PEICE = NewSRSSprite("J..JJJ...")
SRS_L_PEICE = NewSRSSprite("..LLLL...")
SRS_O_PIECE = NewSRSSprite(".OO..OO.....")
SRS_S_PEICE = NewSRSSprite(".SSSS....")
SRS_T_PEICE = NewSRSSprite(".T.TTT...")
SRS_Z_PEICE = NewSRSSprite("ZZ..ZZ...")
) | tetromino/srs.go | 0.717111 | 0.492981 | srs.go | starcoder |
package ch11q03a
import "errors"
// Given an array and a guarrantee that any element is no more than k positions
// away from where it would be if the array were sorted, sort the array.
func SortAlmostSortedArray(arr []int, k int) {
if k >= len(arr) {
return
}
minHeap := MinHeap{}
for i := 0; i < k; i++ {
minHeap.Insert(arr[i])
}
for i := k; i < len(arr); i++ {
minHeap.Insert(arr[i])
arr[i-k], _ = minHeap.ExtractMin()
}
for i := len(arr) - k; i < len(arr); i++ {
arr[i], _ = minHeap.ExtractMin()
}
}
type MinHeap struct {
heap []int
size int
}
func (h *MinHeap) Insert(val int) {
h.heap = append(h.heap, val)
h.size++
currIndex := len(h.heap) - 1
parentIndex := h.parentIndex(currIndex)
for h.heap[parentIndex] > h.heap[currIndex] {
h.heap[parentIndex], h.heap[currIndex] = h.heap[currIndex], h.heap[parentIndex]
currIndex = parentIndex
parentIndex = h.parentIndex(currIndex)
}
}
func (h *MinHeap) ExtractMin() (result int, err error) {
if h.size <= 0 {
err = errors.New("ExtractMin() failed on an empty heap")
// errors.New("ExtractMin() failed on an empty heap")
} else if h.size == 1 {
result = h.heap[0]
h.heap = []int{}
} else {
result = h.heap[0]
h.heap[0], h.heap[len(h.heap)-1] = h.heap[len(h.heap)-1], h.heap[0]
h.heap = h.heap[:len(h.heap)-1]
if len(h.heap) > 1 {
currIndex := 0
leftIndex, rightIndex := h.childIndices(currIndex)
for h.heap[leftIndex] < h.heap[currIndex] || h.heap[rightIndex] < h.heap[currIndex] {
if h.heap[leftIndex] < h.heap[rightIndex] {
h.heap[currIndex], h.heap[leftIndex] = h.heap[leftIndex], h.heap[currIndex]
currIndex = leftIndex
} else {
h.heap[currIndex], h.heap[rightIndex] = h.heap[rightIndex], h.heap[currIndex]
currIndex = rightIndex
}
leftIndex, rightIndex = h.childIndices(currIndex)
if leftIndex == -1 {
break
}
}
}
}
h.size--
return
}
func (h *MinHeap) Min() int {
return h.heap[0]
}
func (h *MinHeap) Size() int {
return h.size
}
// If index is 0 then return 0, otherwise find the parent index.
func (h *MinHeap) parentIndex(index int) (result int) {
if index < 1 {
result = 0
} else {
if index%2 == 0 {
result = index/2 - 1
} else {
result = index / 2
}
}
return
}
// Return both child indices if they are within the array bounds. If only
// the right child index is out of bounds then set the right child index to the
// left child index. If both are out of bounds then return -1 for both child
// indices.
func (h *MinHeap) childIndices(index int) (leftIndex, rightIndex int) {
leftIndexVal := index*2 + 1
rightIndexVal := index*2 + 2
if leftIndexVal > len(h.heap)-1 {
leftIndex = -1
} else {
leftIndex = leftIndexVal
}
if rightIndexVal > len(h.heap)-1 {
if leftIndex != -1 {
rightIndex = leftIndex
} else {
rightIndex = -1
}
} else {
rightIndex = rightIndexVal
}
return
} | ch11/q03a/sort_almost_sorted_array.go | 0.681303 | 0.451387 | sort_almost_sorted_array.go | starcoder |
package gt
import (
"database/sql/driver"
"io"
)
/*
Creates a random UUID using `gt.ReadNullUuid` and "crypto/rand". Panics if
random bytes can't be read.
*/
func RandomNullUuid() NullUuid {
return NullUuid(RandomUuid())
}
// Creates a UUID (version 4 variant 1) from bytes from the provided reader.
func ReadNullUuid(src io.Reader) (NullUuid, error) {
val, err := ReadUuid(src)
return NullUuid(val), err
}
/*
Shortcut: parses successfully or panics. Should be used only in root scope. When
error handling is relevant, use `.Parse`.
*/
func ParseNullUuid(src string) (val NullUuid) {
try(val.Parse(src))
return
}
/*
Variant of `gt.Uuid` where zero value is considered empty in text, and null in
JSON and SQL. Features:
* Reversible encoding/decoding in text. Zero value is "".
* Reversible encoding/decoding in JSON. Zero value is `null`.
* Reversible encoding/decoding in SQL. Zero value is `null`.
* Text encoding uses simplified format without dashes.
* Text decoding supports formats with and without dashes, case-insensitive.
Differences from `"github.com/google/uuid".UUID`:
* Text encoding uses simplified format without dashes.
* Text decoding supports only simplified and canonical format.
* Supports only version 4 (random except for a few bits).
* Zero value is considered empty in text, and null in JSON and SQL.
Differences from `"github.com/google/uuid".NullUUID`:
* Fewer states: there is NO "00000000000000000000000000000000".
* Easier to use: `NullUuid` is a typedef of `Uuid`, not a wrapper.
For database columns, `NullUuid` is recommended over `Uuid`, even when columns
are non-nullable. It prevents you from accidentally using zero-initialized
"00000000000000000000000000000000" in SQL or JSON, without the hassle of
pointers or additional fields.
*/
type NullUuid Uuid
var (
_ = Encodable(NullUuid{})
_ = Decodable((*NullUuid)(nil))
)
// Implement `gt.Zeroable`. Equivalent to `reflect.ValueOf(self).IsZero()`.
func (self NullUuid) IsZero() bool { return Uuid(self).IsZero() }
// Implement `gt.Nullable`. True if zero.
func (self NullUuid) IsNull() bool { return self.IsZero() }
/*
Implement `gt.Getter`. If zero, returns `nil`, otherwise returns `[16]byte`
understood by many DB drivers.
*/
func (self NullUuid) Get() interface{} {
if self.IsNull() {
return nil
}
return Uuid(self).Get()
}
// Implement `gt.Setter`, using `.Scan`. Panics on error.
func (self *NullUuid) Set(src interface{}) { try(self.Scan(src)) }
// Implement `gt.Zeroer`, zeroing the receiver.
func (self *NullUuid) Zero() { (*Uuid)(self).Zero() }
/*
Implement `fmt.Stringer`. If zero, returns an empty string. Otherwise returns a
simplified text representation: lowercase without dashes.
*/
func (self NullUuid) String() string {
if self.IsNull() {
return ``
}
return Uuid(self).String()
}
/*
Implement `gt.Parser`. If the input is empty, zeroes the receiver. Otherwise
requires a valid UUID representation. Supports both the short format without
dashes, and the canonical format with dashes. Parsing is case-insensitive.
*/
func (self *NullUuid) Parse(src string) error {
if len(src) == 0 {
self.Zero()
return nil
}
return (*Uuid)(self).Parse(src)
}
// Implement `gt.Appender`, using the same representation as `.String`.
func (self NullUuid) Append(buf []byte) []byte {
if self.IsNull() {
return buf
}
return Uuid(self).Append(buf)
}
/*
Implement `encoding.TextMarhaler`. If zero, returns nil. Otherwise returns the
same representation as `.String`.
*/
func (self NullUuid) MarshalText() ([]byte, error) {
if self.IsNull() {
return nil, nil
}
return Uuid(self).MarshalText()
}
// Implement `encoding.TextUnmarshaler`, using the same algorithm as `.Parse`.
func (self *NullUuid) UnmarshalText(src []byte) error {
return self.Parse(bytesString(src))
}
/*
Implement `json.Marshaler`. If zero, returns bytes representing `null`.
Otherwise returns bytes representing a JSON string with the same text as in
`.String`.
*/
func (self NullUuid) MarshalJSON() ([]byte, error) {
if self.IsNull() {
return bytesNull, nil
}
return Uuid(self).MarshalJSON()
}
/*
Implement `json.Unmarshaler`. If the input is empty or represents JSON `null`,
zeroes the receiver. Otherwise parses a JSON string, using the same algorithm
as `.Parse`.
*/
func (self *NullUuid) UnmarshalJSON(src []byte) error {
if isJsonEmpty(src) {
self.Zero()
return nil
}
if isJsonStr(src) {
return self.UnmarshalText(cutJsonStr(src))
}
return errJsonString(src, self)
}
// Implement `driver.Valuer`, using `.Get`.
func (self NullUuid) Value() (driver.Value, error) {
return self.Get(), nil
}
/*
Implement `sql.Scanner`, converting an arbitrary input to `gt.NullUuid` and
modifying the receiver. Acceptable inputs:
* `nil` -> use `.Zero`
* `string` -> use `.Parse`
* `[16]byte` -> assign
* `*[16]byte` -> use `.Zero` or assign
* `gt.Uuid` -> assign
* `gt.NullUuid` -> assign
* `gt.Getter` -> scan underlying value
*/
func (self *NullUuid) Scan(src interface{}) error {
switch src := src.(type) {
case nil:
self.Zero()
return nil
case string:
return self.Parse(src)
case []byte:
return self.UnmarshalText(src)
case [UuidLen]byte:
*self = NullUuid(src)
return nil
case *[UuidLen]byte:
if src == nil {
self.Zero()
} else {
*self = NullUuid(*src)
}
return nil
case Uuid:
*self = NullUuid(src)
return nil
case NullUuid:
*self = src
return nil
default:
val, ok := get(src)
if ok {
return self.Scan(val)
}
return errScanType(self, src)
}
}
// Equivalent to `a.String() < b.String()`. Useful for sorting.
func (self NullUuid) Less(other NullUuid) bool {
return Uuid(self).Less(Uuid(other))
}
/*
Implement `fmt.GoStringer`, returning valid Go code that constructs this value.
The rendered code is biased for readability over performance: it parses a
string instead of using a literal constructor.
*/
func (self NullUuid) GoString() string {
if self.IsNull() {
return `gt.NullUuid{}`
}
const fun = `gt.ParseNullUuid`
var arr [len(fun) + len("(`") + len(uuidStrZero) + len("`)")]byte
buf := arr[:0]
buf = append(buf, fun...)
buf = append(buf, "(`"...)
buf = Uuid(self).Append(buf) // `NullUuid.Append` would use another zero check.
buf = append(buf, "`)"...)
return string(buf)
} | gt_null_uuid.go | 0.814016 | 0.609873 | gt_null_uuid.go | starcoder |
package hm
import (
"fmt"
)
// Type represents all the possible type constructors.
type Type interface {
Substitutable
Name() string // Name is the name of the constructor
Normalize(TypeVarSet, TypeVarSet) (Type, error) // Normalize normalizes all the type variable names in the type
Types() Types // If the type is made up of smaller types, then it will return them
Eq(Type) bool // equality operation
fmt.Formatter
fmt.Stringer
}
// Substitutable is any type that can have a set of substitutions applied on it, as well as being able to know what its free type variables are
type Substitutable interface {
Apply(Subs) Substitutable
FreeTypeVar() TypeVarSet
}
// TypeConst are the default implementation of a constant type. Feel free to implement your own. TypeConsts should be immutable (so no pointer types plz)
type TypeConst string
func (t TypeConst) Name() string { return string(t) }
func (t TypeConst) Apply(Subs) Substitutable { return t }
func (t TypeConst) FreeTypeVar() TypeVarSet { return nil }
func (t TypeConst) Normalize(k, v TypeVarSet) (Type, error) { return t, nil }
func (t TypeConst) Types() Types { return nil }
func (t TypeConst) String() string { return string(t) }
func (t TypeConst) Format(s fmt.State, c rune) { fmt.Fprintf(s, "%s", string(t)) }
func (t TypeConst) Eq(other Type) bool { return other == t }
// Record is a basic record/tuple type. It takes an optional name.
type Record struct {
ts []Type
name string
}
// NewRecordType creates a new Record Type
func NewRecordType(name string, ts ...Type) *Record {
return &Record{
ts: ts,
name: name,
}
}
func (t *Record) Apply(subs Subs) Substitutable {
ts := make([]Type, len(t.ts))
for i, v := range t.ts {
ts[i] = v.Apply(subs).(Type)
}
return NewRecordType(t.name, ts...)
}
func (t *Record) FreeTypeVar() TypeVarSet {
var tvs TypeVarSet
for _, v := range t.ts {
tvs = v.FreeTypeVar().Union(tvs)
}
return tvs
}
func (t *Record) Name() string {
if t.name != "" {
return t.name
}
return t.String()
}
func (t *Record) Normalize(k, v TypeVarSet) (Type, error) {
ts := make([]Type, len(t.ts))
var err error
for i, tt := range t.ts {
if ts[i], err = tt.Normalize(k, v); err != nil {
return nil, err
}
}
return NewRecordType(t.name, ts...), nil
}
func (t *Record) Types() Types {
ts := BorrowTypes(len(t.ts))
copy(ts, t.ts)
return ts
}
func (t *Record) Eq(other Type) bool {
if ot, ok := other.(*Record); ok {
if len(ot.ts) != len(t.ts) {
return false
}
for i, v := range t.ts {
if !v.Eq(ot.ts[i]) {
return false
}
}
return true
}
return false
}
func (t *Record) Format(f fmt.State, c rune) {
f.Write([]byte("("))
for i, v := range t.ts {
if i < len(t.ts)-1 {
fmt.Fprintf(f, "%v, ", v)
} else {
fmt.Fprintf(f, "%v)", v)
}
}
}
func (t *Record) String() string { return fmt.Sprintf("%v", t) }
// Clone implements Cloner
func (t *Record) Clone() interface{} {
retVal := new(Record)
ts := BorrowTypes(len(t.ts))
for i, tt := range t.ts {
if c, ok := tt.(Cloner); ok {
ts[i] = c.Clone().(Type)
} else {
ts[i] = tt
}
}
retVal.ts = ts
retVal.name = t.name
return retVal
} | vendor/github.com/chewxy/hm/type.go | 0.565779 | 0.46035 | type.go | starcoder |
package simplifier
import "github.com/twtiger/gosecco/tree"
func reduceTransformers(inp tree.Expression, ss ...tree.Transformer) tree.Expression {
result := inp
for _, s := range ss {
result = s.Transform(result)
}
return result
}
// SimplifyPolicy will take a policy and simplify all expressions in it
func SimplifyPolicy(pol *tree.Policy) {
for _, r := range pol.Rules {
r.Body = Simplify(r.Body)
}
}
// Simplify will take an expression and reduce it as much as possible using state operations
func Simplify(inp tree.Expression) tree.Expression {
return reduceTransformers(inp,
// X in [P] ==> P == Q
// X in [P, Q, R] where X and R can be determined to not be equal ==> X in [P, Q]
// X in [P, Q, R] where X and one of the values can be determined to be equal ==> true
// X notIn [P] ==> X != P
// X notIn [P, Q, R] where X and R can be determined to not be equal ==> X notIn [P, Q]
// X notIn [P, Q, R] where X and one of the values can be determined to be equal ==> false
createInclusionSimplifier(),
// X in [P, Q, R] ==> X == P || X == Q || X == R
// X notIn [P, Q, R] ==> X != P && X != Q && X != R
createInclusionRemoverSimplifier(),
// X < Y ==> Y >= X
// X <= Y ==> Y > X
createLtExpressionsSimplifier(),
// Where X and Y can be determined statically:
// X + Y ==> [X+Y]
// X - Y ==> [X-Y]
// X * Y ==> [X*Y]
// X / Y ==> [X/Y]
// X % Y ==> [X%Y]
// X & Y ==> [X&Y]
// X | Y ==> [X|Y]
// X ^ Y ==> [X^Y]
// X << Y ==> [X<<Y]
// X >> Y ==> [X<<Y]
// ~X ==> [~X]
// Note that these calculations will all be done on 64bit unsigned values
// - this could lead to different result than if they were evaluated by the BPF engine.
createArithmeticSimplifier(),
// Where X and Y can be determined statically:
// X == Y where X == Y ==> true
// X == Y where X != Y ==> false
// X != Y where X == Y ==> false
// X != Y where X != Y ==> true
// X > Y where X > Y ==> true
// X > Y where X <= Y ==> false
// X >= Y where X >= Y ==> true
// X >= Y where X < Y ==> false
// X < Y where X < Y ==> true
// X < Y where X >= Y ==> false
// X <= Y where X <= Y ==> true
// X <= Y where X > Y ==> false
createComparisonSimplifier(),
// !true ==> false
// !false ==> true
// false || Y ==> Y
// false || true ==> true
// false || false ==> false
// true || Y ==> true
// true && true ==> true
// true && false ==> false
// true && Y ==> Y
// false && [any] ==> false
createBooleanSimplifier(),
// ~X ==> X ^ 0xFFFFFFFFFFFFFFFF
createBinaryNegationSimplifier(),
// Where X can be determined statically (the opposite order is also valid)
// arg0 == X ==> argL0 == X.low && argH0 == X.high
// arg0 != X ==> argL0 != X.low || argH0 != X.high
// arg0 > X ==> argH0 > X.high || (argH0 == X.high && argL0 > X.low)
// arg0 >= X ==> argH0 > X.high || (argH0 == X.high && argL0 >= X.low)
// arg0 == arg1 ==> argL0 == argL1 && argH0 == argH1
// arg0 != arg1 ==> argL0 != argL1 || argH0 != argH1
// arg0 > arg1 ==> argH0 > argH1 || (argH0 == argH1 && argL0 > argL1)
// arg0 >= arg1 ==> argH0 > argH1 || (argH0 == argH1 && argL0 >= argL1)
createFullArgumentSplitterSimplifier(),
// We repeat some of the simplifiers in the hope that the above operations have opened up new avenues of simplification
createArithmeticSimplifier(),
createComparisonSimplifier(),
createBooleanSimplifier(),
createBinaryNegationSimplifier(),
)
}
func potentialExtractFullArgument(a tree.Expression) (int, bool) {
v, ok := a.(tree.Argument)
if ok && v.Type == tree.Full {
return v.Index, ok
}
return 0, false
}
func potentialExtractValue(a tree.Numeric) (uint64, bool) {
v, ok := a.(tree.NumericLiteral)
if ok {
return v.Value, ok
}
return 0, false
}
func potentialExtractValueParts(a tree.Numeric) (uint64, uint64, bool) {
v, ok := a.(tree.NumericLiteral)
if ok {
low := v.Value & 0xFFFFFFFF
high := (v.Value >> 32) & 0xFFFFFFFF
return low, high, ok
}
return 0, 0, false
}
func potentialExtractBooleanValue(a tree.Boolean) (bool, bool) {
v, ok := a.(tree.BooleanLiteral)
if ok {
return v.Value, ok
}
return false, false
} | vendor/github.com/twtiger/gosecco/simplifier/simplifier.go | 0.560734 | 0.633821 | simplifier.go | starcoder |
package rtc
import "math"
// SmoothTriangle returns a new smooth SmoothTriangleT.
func SmoothTriangle(p1, p2, p3, n1, n2, n3 Tuple) *SmoothTriangleT {
t := Triangle(p1, p2, p3)
return &SmoothTriangleT{
TriangleT: *t,
N1: n1,
N2: n2,
N3: n3,
}
}
// SmoothTriangleT represents a smooth triangle object.
type SmoothTriangleT struct {
TriangleT
N1 Tuple
N2 Tuple
N3 Tuple
}
var _ Object = &SmoothTriangleT{}
// SetTransform sets the object's transform 4x4 matrix.
func (s *SmoothTriangleT) SetTransform(m M4) Object {
s.Transform = m
return s
}
// SetMaterial sets the object's material.
func (s *SmoothTriangleT) SetMaterial(material MaterialT) Object {
s.Material = material
return s
}
// SetParent sets the object's parent object.
func (s *SmoothTriangleT) SetParent(parent Object) Object {
s.Parent = parent
return s
}
// Bounds returns the minimum bounding box of the object in object
// (untransformed) space.
func (s *SmoothTriangleT) Bounds() *BoundsT {
return s.bounds
}
// LocalIntersect returns a slice of IntersectionT values where the
// transformed (object space) ray intersects the object.
func (s *SmoothTriangleT) LocalIntersect(ray RayT) []IntersectionT {
dirCrossE2 := ray.Direction.Cross(s.E2)
det := s.E1.Dot(dirCrossE2)
if math.Abs(det) < epsilon {
return nil
}
f := 1 / det
p1ToOrigin := ray.Origin.Sub(s.P1)
u := f * p1ToOrigin.Dot(dirCrossE2)
if u < 0 || u > 1 {
return nil
}
originCrossE1 := p1ToOrigin.Cross(s.E1)
v := f * ray.Direction.Dot(originCrossE1)
if v < 0 || u+v > 1 {
return nil
}
tv := f * s.E2.Dot(originCrossE1)
return Intersections(IntersectionWithUV(tv, s, u, v))
}
// LocalNormalAt returns the normal vector at the given point of intersection
// (transformed to object space) with the object.
func (s *SmoothTriangleT) LocalNormalAt(objectPoint Tuple, hit *IntersectionT) Tuple {
return s.N2.MultScalar(hit.U).Add(s.N3.MultScalar(hit.V).Add(s.N1.MultScalar(1 - hit.U - hit.V)))
}
// Includes returns whether this object includes (or actually is) the
// other object.
func (s *SmoothTriangleT) Includes(other Object) bool {
return s == other
} | rtc/smooth-triangle.go | 0.923333 | 0.622746 | smooth-triangle.go | starcoder |
package engine
import (
"math"
"math/rand"
)
func init() {
DeclFunc("ext_make3dgrains", Voronoi3d, "3D Voronoi tesselation over shape (grain size, starting region number, num regions, shape, seed)")
}
func Voronoi3d(grainsize float64, startRegion int, numRegions int, inputShape Shape, seed int) {
Refer("Lel2014")
SetBusy(true)
defer SetBusy(false)
t := newTesselation3d(grainsize, numRegions, int64(seed), startRegion, inputShape)
regions.hist = append(regions.hist, t.RegionOf)
regions.render(t.RegionOf)
}
type tesselation3d struct {
grainsize float64
maxRegion int
rnd *rand.Rand
startRegion int
shape Shape //Shape of the tesselated region
centers []center3d //List of Voronoi centers
}
// Stores location of each Voronoi center
type center3d struct {
x, y, z float64 // center position (m)
region byte // region for all cells near center
}
// Stores location of each cell
type cellLocs struct{ x, y, z float64 }
// nRegion exclusive
func newTesselation3d(grainsize float64, nRegion int, seed int64, startRegion int, inputShape Shape) *tesselation3d {
t := tesselation3d{grainsize,
nRegion,
//make(map[int3d][]center3d),
rand.New(rand.NewSource(seed)),
startRegion,
inputShape,
make([]center3d, 0)}
t.makeRandomCenters()
return &t
}
// Permutes the slice of cell locations. I don't understand why this needs to be done if we're choosing
// random (Intn()) cells out of the slice of cell locations, but hey, it seems to do the trick.
func shuffleCells(src []cellLocs) []cellLocs {
dest := make([]cellLocs, len(src))
perm := rand.Perm(len(src))
for i, v := range perm {
dest[v] = src[i]
}
return dest
}
func (t *tesselation3d) makeRandomCenters() {
//Make a list of all the cells in the shape.
cells := t.tabulateCells()
cells = shuffleCells(cells)
//Choose number of grains to make. Assume volume of grain is given by (4/3)*pi*r^3
shapeVolume := cellVolume() * float64(len(cells))
grainVolume := (float64(1) / 6) * math.Pi * t.grainsize * t.grainsize * t.grainsize
nAvgGrains := shapeVolume / grainVolume
nGrains := t.truncNorm(nAvgGrains)
//TODO: same cell can be chosen twice by random chance
t.centers = make([]center3d, nGrains)
for p := 0; p < nGrains; p++ {
rndCell := cells[t.rnd.Intn(nGrains)]
t.centers[p].x = rndCell.x
t.centers[p].y = rndCell.y
t.centers[p].z = rndCell.z
randRegion := t.startRegion + t.rnd.Intn(t.maxRegion)
t.centers[p].region = byte(randRegion)
}
return
}
// Creates a slice of all cells which fall in the shape specified in the constructor.
func (t *tesselation3d) tabulateCells() []cellLocs {
//Initialze array of cells
cells := make([]cellLocs, 0)
//Get the mesh size
meshSize := MeshSize()
//Iterate across all cells in the mesh, and append those that are inside the shape
for ix := 0; ix < meshSize[0]; ix++ {
for iy := 0; iy < meshSize[1]; iy++ {
for iz := 0; iz < meshSize[2]; iz++ {
cell := Index2Coord(ix, iy, iz)
x := cell.X()
y := cell.Y()
z := cell.Z()
if t.shape(x, y, z) {
cells = append(cells, cellLocs{x, y, z})
}
}
}
}
print("Number of cells in region: ", len(cells), "\n")
print("Number of cells in universe: ", meshSize[0]*meshSize[1]*meshSize[2], "\n")
return cells
}
// Find the nearest Voronoi center to the point (x, y, z). Only points inside the given shape will be
// assigned a region.
func (t *tesselation3d) RegionOf(x, y, z float64) int {
if t.shape(x, y, z) {
nearest := center3d{x, y, z, 0}
mindist := math.Inf(1)
for _, c := range t.centers {
dist := sqr(x-c.x) + sqr(y-c.y) + sqr(z-c.z)
if dist < mindist {
nearest = c
mindist = dist
}
}
return int(nearest.region)
} else {
return -1 //When the regions are rendered, any region < 0 will not be rastered.
}
}
// Generate normally distributed numbers; mean = lambda, variance = lambda. If generated number < 0, return 1.
// Equivalent to Poisson distribution (with mean = lambda) for large lambda (which is usually true, since the volume
// of a grain is usually much less than the simulation volume.
func (t *tesselation3d) truncNorm(lambda float64) int {
ret := lambda + math.Sqrt(lambda)*t.rnd.NormFloat64()
if ret <= 0 {
return 1
} else {
return int(ret + 0.5)
}
} | engine/ext_make3dgrains.go | 0.626924 | 0.601359 | ext_make3dgrains.go | starcoder |
package canvas
import (
"image"
"image/color"
"math"
)
// LinearGradient defines a Gradient travelling straight at a given angle.
// The only supported values for the angle are `0.0` (vertical) and `90.0` (horizontal), currently.
type LinearGradient struct {
baseObject
StartColor color.Color // The beginning RGBA color of the gradient
EndColor color.Color // The end RGBA color of the gradient
Angle float64 // The angle of the gradient (0/180 for vertical; 90/270 for horizontal)
}
// Generate calculates an image of the gradient with the specified width and height.
func (g *LinearGradient) Generate(w, h int) image.Image {
var generator func(x, y, w, h float64) float64
if g.Angle == 90 {
// horizontal flipped
generator = func(x, _, w, _ float64) float64 {
return ((w - 1) - x) / (w - 1)
}
} else if g.Angle == 270 {
// horizontal
generator = func(x, _, w, _ float64) float64 {
return x / (w - 1)
}
} else if g.Angle == 180 {
// vertical flipped
generator = func(_, y, _, h float64) float64 {
return ((h - 1) - y) / (h - 1)
}
} else {
// vertical
generator = func(_, y, _, h float64) float64 {
return y / (h - 1)
}
}
return computeGradient(generator, w, h, g.StartColor, g.EndColor)
}
// RadialGradient defines a Gradient travelling radially from a center point outward.
type RadialGradient struct {
baseObject
StartColor color.Color // The beginning RGBA color of the gradient
EndColor color.Color // The end RGBA color of the gradient
// The offset of the center for generation of the gradient.
// This is not a DP measure but relates to the width/height.
// A value of 0.5 would move the center by the half width/height.
CenterOffsetX, CenterOffsetY float64
}
// Generate calculates an image of the gradient with the specified width and height.
func (g *RadialGradient) Generate(w, h int) image.Image {
generator := func(x, y, w, h float64) float64 {
// define center plus offset
centerX := w/2 + w*g.CenterOffsetX
centerY := h/2 + h*g.CenterOffsetY
// handle negative offsets
var a, b float64
if g.CenterOffsetX < 0 {
a = w - centerX
} else {
a = centerX
}
if g.CenterOffsetY < 0 {
b = h - centerY
} else {
b = centerY
}
// calculate distance from center for gradient multiplier
dx, dy := centerX-x, centerY-y
da := math.Sqrt(dx*dx + dy*dy*a*a/b/b)
if da > a {
return 1
}
return da / a
}
return computeGradient(generator, w, h, g.StartColor, g.EndColor)
}
func calculatePixel(d float64, startColor, endColor color.Color) *color.RGBA64 {
// fetch RGBA values
aR, aG, aB, aA := startColor.RGBA()
bR, bG, bB, bA := endColor.RGBA()
// Get difference
dR := float64(bR) - float64(aR)
dG := float64(bG) - float64(aG)
dB := float64(bB) - float64(aB)
dA := float64(bA) - float64(aA)
// Apply gradations
pixel := &color.RGBA64{
R: uint16(float64(aR) + d*dR),
B: uint16(float64(aB) + d*dB),
G: uint16(float64(aG) + d*dG),
A: uint16(float64(aA) + d*dA),
}
return pixel
}
func computeGradient(generator func(x, y, w, h float64) float64, w, h int, startColor, endColor color.Color) image.Image {
img := image.NewRGBA(image.Rect(0, 0, w, h))
if startColor == nil && endColor == nil {
return img
} else if startColor == nil {
startColor = color.Transparent
} else if endColor == nil {
endColor = color.Transparent
}
for x := 0; x < w; x++ {
for y := 0; y < h; y++ {
distance := generator(float64(x), float64(y), float64(w), float64(h))
img.Set(x, y, calculatePixel(distance, startColor, endColor))
}
}
return img
}
// NewHorizontalGradient creates a new horizontally travelling linear gradient.
func NewHorizontalGradient(start, end color.Color) *LinearGradient {
g := &LinearGradient{StartColor: start, EndColor: end}
g.Angle = 270
return g
}
// NewRadialGradient creates a new radial gradient.
func NewRadialGradient(start, end color.Color) *RadialGradient {
return &RadialGradient{StartColor: start, EndColor: end}
}
// NewVerticalGradient creates a new vertically travelling linear gradient.
func NewVerticalGradient(start color.Color, end color.Color) *LinearGradient {
return &LinearGradient{StartColor: start, EndColor: end}
} | canvas/gradient.go | 0.922517 | 0.698856 | gradient.go | starcoder |
package adaptortest
import (
"context"
"github.com/dogmatiq/dogma"
"github.com/dogmatiq/dogma/fixtures"
"github.com/dogmatiq/projectionkit/resource"
"github.com/onsi/ginkgo"
"github.com/onsi/ginkgo/extensions/table"
"github.com/onsi/gomega"
)
// DescribeAdaptor declares generic behavioral tests for a specific adaptor
// implementation.
func DescribeAdaptor(
ctxP *context.Context,
adaptorP *dogma.ProjectionMessageHandler,
) {
var (
ctx context.Context
adaptor dogma.ProjectionMessageHandler
)
ginkgo.BeforeEach(func() {
ctx = *ctxP
adaptor = *adaptorP
})
ginkgo.Describe("func HandleEvent()", func() {
ginkgo.It("does not produce errors when OCC parameters are supplied correctly", func() {
ginkgo.By("persisting the initial resource version")
ok, err := adaptor.HandleEvent(
context.Background(),
[]byte("<resource>"),
nil,
[]byte("<version 01>"),
nil,
fixtures.MessageA1,
)
gomega.Expect(ok).Should(gomega.BeTrue())
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
v, err := adaptor.ResourceVersion(
context.Background(),
[]byte("<resource>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Expect(v).To(gomega.Equal([]byte("<version 01>")))
ginkgo.By("persisting the next resource version")
ok, err = adaptor.HandleEvent(
context.Background(),
[]byte("<resource>"),
[]byte("<version 01>"),
[]byte("<version 02>"),
nil,
fixtures.MessageA2,
)
gomega.Expect(ok).Should(gomega.BeTrue())
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
v, err = adaptor.ResourceVersion(
context.Background(),
[]byte("<resource>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Expect(v).To(gomega.Equal([]byte("<version 02>")))
ginkgo.By("discarding a resource if the next resource version is empty")
ok, err = adaptor.HandleEvent(
context.Background(),
[]byte("<resource>"),
[]byte("<version 02>"),
nil,
nil,
fixtures.MessageA3,
)
gomega.Expect(ok).Should(gomega.BeTrue())
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
v, err = adaptor.ResourceVersion(
context.Background(),
[]byte("<resource>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Expect(v).To(gomega.BeEmpty())
})
ginkgo.It("returns false if supplied resource version is not the current version", func() {
ok, err := adaptor.HandleEvent(
context.Background(),
[]byte("<resource>"),
nil,
[]byte("<version 01>"),
nil,
fixtures.MessageA1,
)
gomega.Expect(ok).Should(gomega.BeTrue())
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
ok, err = adaptor.HandleEvent(
context.Background(),
[]byte("<resource>"),
[]byte("<incorrect current version>"),
[]byte("<version 02>"),
nil,
fixtures.MessageA2,
)
gomega.Expect(ok).Should(gomega.BeFalse())
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
})
})
ginkgo.Describe("func ResourceVersion()", func() {
ginkgo.It("returns a resource version", func() {
ok, err := adaptor.HandleEvent(
context.Background(),
[]byte("<resource>"),
nil,
[]byte("<version 01>"),
nil,
fixtures.MessageA1,
)
gomega.Expect(ok).Should(gomega.BeTrue())
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
v, err := adaptor.ResourceVersion(
context.Background(),
[]byte("<resource>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Expect(v).To(gomega.Equal([]byte("<version 01>")))
})
ginkgo.It("returns nil if no current resource version present in the database", func() {
v, err := adaptor.ResourceVersion(
context.Background(),
[]byte("<resource>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Expect(v).To(gomega.BeEmpty())
})
})
ginkgo.Describe("func CloseResource()", func() {
ginkgo.It("removes a resource version", func() {
ok, err := adaptor.HandleEvent(
context.Background(),
[]byte("<resource>"),
nil,
[]byte("<version 01>"),
nil,
fixtures.MessageA2,
)
gomega.Expect(ok).Should(gomega.BeTrue())
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
err = adaptor.CloseResource(
context.Background(),
[]byte("<resource>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
v, err := adaptor.ResourceVersion(
context.Background(),
[]byte("<resource>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Expect(v).To(gomega.BeEmpty())
})
})
ginkgo.Context("low-level resource API", func() {
ginkgo.When("the resource does not exist", func() {
ginkgo.It("stores the version", func() {
err := resource.StoreVersion(
ctx,
adaptor,
[]byte("<resource>"),
[]byte("<version>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
ver, err := adaptor.ResourceVersion(
ctx,
[]byte("<resource>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Expect(ver).To(gomega.Equal([]byte("<version>")))
})
table.DescribeTable(
"it updates the version",
func(current []byte) {
ok, err := resource.UpdateVersion(
ctx,
adaptor,
[]byte("<resource>"),
current,
[]byte("<version>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Expect(ok).To(gomega.BeTrue())
ver, err := adaptor.ResourceVersion(
ctx,
[]byte("<resource>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Expect(ver).To(gomega.Equal([]byte("<version>")))
},
table.Entry("nil byte-slice", nil),
table.Entry("empty byte-slice", []byte{}),
)
ginkgo.It("does not update the version if the supplied current version is incorrect", func() {
ok, err := resource.UpdateVersion(
ctx,
adaptor,
[]byte("<resource>"),
[]byte("<incorrect>"),
[]byte("<version>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Expect(ok).To(gomega.BeFalse())
})
})
ginkgo.When("the resource exists", func() {
ginkgo.BeforeEach(func() {
err := resource.StoreVersion(
ctx,
adaptor,
[]byte("<resource>"),
[]byte("<version>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
})
ginkgo.It("reports the expected version", func() {
ver, err := adaptor.ResourceVersion(
ctx,
[]byte("<resource>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Expect(ver).To(gomega.Equal([]byte("<version>")))
})
ginkgo.It("stores the version", func() {
err := resource.StoreVersion(
ctx,
adaptor,
[]byte("<resource>"),
[]byte("<version>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
ver, err := adaptor.ResourceVersion(
ctx,
[]byte("<resource>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Expect(ver).To(gomega.Equal([]byte("<version>")))
})
table.DescribeTable(
"it stores an empty version",
func(next []byte) {
err := resource.StoreVersion(
ctx,
adaptor,
[]byte("<resource>"),
next,
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
ver, err := adaptor.ResourceVersion(
ctx,
[]byte("<resource>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Expect(ver).To(gomega.BeEmpty())
},
table.Entry("nil byte-slice", nil),
table.Entry("empty byte-slice", []byte{}),
)
table.DescribeTable(
"it updates the version",
func(next []byte) {
ok, err := resource.UpdateVersion(
ctx,
adaptor,
[]byte("<resource>"),
[]byte("<version>"),
next,
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Expect(ok).To(gomega.BeTrue())
ver, err := adaptor.ResourceVersion(
ctx,
[]byte("<resource>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Expect(ver).To(gomega.Equal(next))
},
table.Entry("nil byte-slice", nil),
table.Entry("empty byte-slice", []byte{}),
table.Entry("non-empty byte-slice", []byte("<next-version>")),
)
table.DescribeTable(
"it does not update the version if the supplied current version is incorrect",
func(current []byte) {
ok, err := resource.UpdateVersion(
ctx,
adaptor,
[]byte("<resource>"),
current,
[]byte("<version>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Expect(ok).To(gomega.BeFalse())
},
table.Entry("nil byte-slice", nil),
table.Entry("empty byte-slice", []byte{}),
table.Entry("non-empty byte-slice", []byte("<incorrect>")),
)
ginkgo.It("can delete the resource", func() {
err := resource.DeleteResource(
ctx,
adaptor,
[]byte("<resource>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
ver, err := adaptor.ResourceVersion(
ctx,
[]byte("<resource>"),
)
gomega.Expect(err).ShouldNot(gomega.HaveOccurred())
gomega.Expect(ver).To(gomega.BeEmpty())
})
})
})
} | internal/adaptortest/suite.go | 0.587943 | 0.536495 | suite.go | starcoder |
package clang
// #include "./clang-c/Index.h"
// #include "go-clang.h"
import "C"
import "fmt"
/*
Describes a single piece of text within a code-completion string.
Each "chunk" within a code-completion string (CXCompletionString) is
either a piece of text with a specific "kind" that describes how that text
should be interpreted by the client or is another completion string.
*/
type CompletionChunkKind uint32
const (
/*
A code-completion string that describes "optional" text that
could be a part of the template (but is not required).
The Optional chunk is the only kind of chunk that has a code-completion
string for its representation, which is accessible via
clang_getCompletionChunkCompletionString(). The code-completion string
describes an additional part of the template that is completely optional.
For example, optional chunks can be used to describe the placeholders for
arguments that match up with defaulted function parameters, e.g. given:
\code
void f(int x, float y = 3.14, double z = 2.71828);
\endcode
The code-completion string for this function would contain:
- a TypedText chunk for "f".
- a LeftParen chunk for "(".
- a Placeholder chunk for "int x"
- an Optional chunk containing the remaining defaulted arguments, e.g.,
- a Comma chunk for ","
- a Placeholder chunk for "float y"
- an Optional chunk containing the last defaulted argument:
- a Comma chunk for ","
- a Placeholder chunk for "double z"
- a RightParen chunk for ")"
There are many ways to handle Optional chunks. Two simple approaches are:
- Completely ignore optional chunks, in which case the template for the
function "f" would only include the first parameter ("int x").
- Fully expand all optional chunks, in which case the template for the
function "f" would have all of the parameters.
*/
CompletionChunk_Optional CompletionChunkKind = C.CXCompletionChunk_Optional
/*
Text that a user would be expected to type to get this
code-completion result.
There will be exactly one "typed text" chunk in a semantic string, which
will typically provide the spelling of a keyword or the name of a
declaration that could be used at the current code point. Clients are
expected to filter the code-completion results based on the text in this
chunk.
*/
CompletionChunk_TypedText = C.CXCompletionChunk_TypedText
/*
Text that should be inserted as part of a code-completion result.
A "text" chunk represents text that is part of the template to be
inserted into user code should this particular code-completion result
be selected.
*/
CompletionChunk_Text = C.CXCompletionChunk_Text
/*
Placeholder text that should be replaced by the user.
A "placeholder" chunk marks a place where the user should insert text
into the code-completion template. For example, placeholders might mark
the function parameters for a function declaration, to indicate that the
user should provide arguments for each of those parameters. The actual
text in a placeholder is a suggestion for the text to display before
the user replaces the placeholder with real code.
*/
CompletionChunk_Placeholder = C.CXCompletionChunk_Placeholder
/*
Informative text that should be displayed but never inserted as
part of the template.
An "informative" chunk contains annotations that can be displayed to
help the user decide whether a particular code-completion result is the
right option, but which is not part of the actual template to be inserted
by code completion.
*/
CompletionChunk_Informative = C.CXCompletionChunk_Informative
/*
Text that describes the current parameter when code-completion is
referring to function call, message send, or template specialization.
A "current parameter" chunk occurs when code-completion is providing
information about a parameter corresponding to the argument at the
code-completion point. For example, given a function
\code
int add(int x, int y);
\endcode
and the source code add(, where the code-completion point is after the
"(", the code-completion string will contain a "current parameter" chunk
for "int x", indicating that the current argument will initialize that
parameter. After typing further, to add(17, (where the code-completion
point is after the ","), the code-completion string will contain a
"current paremeter" chunk to "int y".
*/
CompletionChunk_CurrentParameter = C.CXCompletionChunk_CurrentParameter
// A left parenthesis ('('), used to initiate a function call or signal the beginning of a function parameter list.
CompletionChunk_LeftParen = C.CXCompletionChunk_LeftParen
// A right parenthesis (')'), used to finish a function call or signal the end of a function parameter list.
CompletionChunk_RightParen = C.CXCompletionChunk_RightParen
// A left bracket ('[').
CompletionChunk_LeftBracket = C.CXCompletionChunk_LeftBracket
// A right bracket (']').
CompletionChunk_RightBracket = C.CXCompletionChunk_RightBracket
// A left brace ('{').
CompletionChunk_LeftBrace = C.CXCompletionChunk_LeftBrace
// A right brace ('}').
CompletionChunk_RightBrace = C.CXCompletionChunk_RightBrace
// A left angle bracket ('<').
CompletionChunk_LeftAngle = C.CXCompletionChunk_LeftAngle
// A right angle bracket ('>').
CompletionChunk_RightAngle = C.CXCompletionChunk_RightAngle
// A comma separator (',').
CompletionChunk_Comma = C.CXCompletionChunk_Comma
/*
Text that specifies the result type of a given result.
This special kind of informative chunk is not meant to be inserted into
the text buffer. Rather, it is meant to illustrate the type that an
expression using the given completion string would have.
*/
CompletionChunk_ResultType = C.CXCompletionChunk_ResultType
// A colon (':').
CompletionChunk_Colon = C.CXCompletionChunk_Colon
// A semicolon (';').
CompletionChunk_SemiColon = C.CXCompletionChunk_SemiColon
// An '=' sign.
CompletionChunk_Equal = C.CXCompletionChunk_Equal
// Horizontal space (' ').
CompletionChunk_HorizontalSpace = C.CXCompletionChunk_HorizontalSpace
// Vertical space ('\n'), after which it is generally a good idea to perform indentation.
CompletionChunk_VerticalSpace = C.CXCompletionChunk_VerticalSpace
)
func (cck CompletionChunkKind) Spelling() string {
switch cck {
case CompletionChunk_Optional:
return "CompletionChunk=Optional"
case CompletionChunk_TypedText:
return "CompletionChunk=TypedText"
case CompletionChunk_Text:
return "CompletionChunk=Text"
case CompletionChunk_Placeholder:
return "CompletionChunk=Placeholder"
case CompletionChunk_Informative:
return "CompletionChunk=Informative"
case CompletionChunk_CurrentParameter:
return "CompletionChunk=CurrentParameter"
case CompletionChunk_LeftParen:
return "CompletionChunk=LeftParen"
case CompletionChunk_RightParen:
return "CompletionChunk=RightParen"
case CompletionChunk_LeftBracket:
return "CompletionChunk=LeftBracket"
case CompletionChunk_RightBracket:
return "CompletionChunk=RightBracket"
case CompletionChunk_LeftBrace:
return "CompletionChunk=LeftBrace"
case CompletionChunk_RightBrace:
return "CompletionChunk=RightBrace"
case CompletionChunk_LeftAngle:
return "CompletionChunk=LeftAngle"
case CompletionChunk_RightAngle:
return "CompletionChunk=RightAngle"
case CompletionChunk_Comma:
return "CompletionChunk=Comma"
case CompletionChunk_ResultType:
return "CompletionChunk=ResultType"
case CompletionChunk_Colon:
return "CompletionChunk=Colon"
case CompletionChunk_SemiColon:
return "CompletionChunk=SemiColon"
case CompletionChunk_Equal:
return "CompletionChunk=Equal"
case CompletionChunk_HorizontalSpace:
return "CompletionChunk=HorizontalSpace"
case CompletionChunk_VerticalSpace:
return "CompletionChunk=VerticalSpace"
}
return fmt.Sprintf("CompletionChunkKind unknown %d", int(cck))
}
func (cck CompletionChunkKind) String() string {
return cck.Spelling()
} | clang/completionchunkkind_gen.go | 0.558688 | 0.591428 | completionchunkkind_gen.go | starcoder |
package mortems
var templateContent = `<!--
^ The bits with these tags are just helpful comments, and won't show up on your post-mortem
Post Mortem Agenda:
---
Initial statement
- Read aloud: "We're trying to prepare for a future where we are as stupid as we are today"
- During the meeting we will disallow counterfactual phrases "would have" and "could have"
- When creating your timeline, try and stick to actual metrics rather than subjective human judgement
Reach an agreement on a complete timeline of the incident
- Severity
- Total down time
- Time to detect
- Time to resolve
Reach an agreement on the list of corrective actions
Follow-up: set a reminder (maybe in slack) to follow up on the corrective actions
-->
<!-- The title of your incident. Make sure the title is a h1 (single #)-->
# Love Lost Globally: Jerry Develops Malicious App
<!-- The date which the incident started on. The no letters after the numbers please 1, 2, 3 NOT 1st, 2nd, 3rd -->
## Date: July 1, 2020
<!-- The owner of the post mortem, responsible for following up on actions -->
## Owner: <NAME>
A short description of the event. Rick help develop the malicious app of an innocent alien.
Hostile aliens almost take over the planets water supply.
## Timeline
| Time | Event |
| --- | --- |
| 9:16 | Breakfast. Rick introduces alien. "Do not develop my app" is tattooed on forehead |
| 10:37 | Jerry begins app development with alien |
| 12:30 | App released |
| 12:34 | Morty realises aliens app is released |
| 15:36 | Lovefinderrz reaches 100,000 users |
| 18:44 | Jerry and Morty install paywall, number of users rapidly decreases |
| 20:03 | No app users remain |
<!-- Crucial metrics to agree on. Format: x unit[, x smaller_unit] -->
<!-- Units can be seconds, minutes, hours, days. Use full unit (1 second, not 1s) -->
<!-- Severity can be on your own scale, it is tracked as a category rather than a metric -->
<!-- Could be: 1 = Service down completely, 2 = Service hindered for many users, 3 = Service hindered for some -->
## Metrics
| Metric | Time |
| --- | --- |
| Severity | 1 |
| Time To Detect | 4 minutes |
| Time To Resolve | 6 hours, 14 minutes |
| Total Downtime | 6 hours, 28 minutes | <!-- Detect + Resolve, Sanity check. -->
## Cause of the Problem
Alien with malicious intent invited into the house. Family members not informed of the severity
of alien app. Forehead tattoo documentation inadequate.
## Corrective Actions with Owners
* All house members must be debriefed before high risk aliens are brought into house [Rick]
- Enforced using debrief document created in family-process repo
* App review process to require 3 reviewers before release [Jerry]
` | mortems/template.go | 0.591251 | 0.631182 | template.go | starcoder |
package Util
import (
"encoding/json"
"fmt"
"reflect"
"strconv"
"strings"
)
func ToBool(v interface{}) bool {
switch val := v.(type) {
case bool:
return val
case float32, float64:
// direct type conversion may cause data loss, use reflection instead
return reflect.ValueOf(v).Float() != 0
case int, int8, int16, int32, int64:
return reflect.ValueOf(v).Int() != 0
case uint, uint8, uint16, uint32, uint64:
return reflect.ValueOf(v).Uint() != 0
case string:
return str2bool(val)
case []byte:
return str2bool(string(val))
case nil:
return false
default:
rv := reflect.ValueOf(v)
if rv.Kind() == reflect.Ptr || rv.Kind() == reflect.Interface {
rv = rv.Elem()
}
// none empty array/slice/map convert to true, otherwise false
if rv.Kind() == reflect.Array ||
rv.Kind() == reflect.Slice ||
rv.Kind() == reflect.Map {
return rv.Len() != 0
}
// valid value convert to true, otherwise false
return rv.IsValid()
}
}
func ToInt(v interface{}) int {
switch val := v.(type) {
case bool:
if val {
return 1
} else {
return 0
}
case float32, float64:
// direct type conversion may cause data loss, use reflection instead
return int(reflect.ValueOf(v).Float())
case int, int8, int16, int32, int64:
return int(reflect.ValueOf(v).Int())
case uint, uint8, uint16, uint32, uint64:
return int(reflect.ValueOf(v).Uint())
case string:
return str2int(val)
case []byte:
return str2int(string(val))
case nil:
return 0
default:
panic(fmt.Sprintf("ToInt: invalid type: %T", v))
}
}
func ToFloat(v interface{}) float64 {
switch val := v.(type) {
case bool:
if val {
return 1
} else {
return 0
}
case float32, float64:
// direct type conversion may cause data loss, use reflection instead
return reflect.ValueOf(v).Float()
case int, int8, int16, int32, int64:
return float64(reflect.ValueOf(v).Int())
case uint, uint8, uint16, uint32, uint64:
return float64(reflect.ValueOf(v).Uint())
case string:
return str2float(val)
case []byte:
return str2float(string(val))
case nil:
return 0
default:
panic(fmt.Sprintf("ToFloat: invalid type: %T", v))
}
}
func ToString(v interface{}) string {
switch val := v.(type) {
case bool:
return strconv.FormatBool(val)
case int, int8, int16, int32, int64:
return strconv.FormatInt(reflect.ValueOf(v).Int(), 10)
case uint, uint8, uint16, uint32, uint64:
return strconv.FormatUint(reflect.ValueOf(v).Uint(), 10)
case float32, float64:
return strconv.FormatFloat(reflect.ValueOf(v).Float(), 'g', -1, 64)
case []byte:
return string(val)
case string:
return val
case error:
return val.Error()
case fmt.Stringer:
return val.String()
default:
// convert to json encoded string
if j, e := json.Marshal(v); e == nil {
return string(j)
}
// convert to default print string
return fmt.Sprintf("%+v", v)
}
}
func str2bool(s string) bool {
s = strings.TrimSpace(s)
if b, e := strconv.ParseBool(s); e == nil {
return b
}
return len(s) != 0
}
func str2int(s string) int {
s = strings.TrimSpace(s)
if i64, e := strconv.ParseInt(s, 0, 0); e == nil {
// convert int string(decimal, hexadecimal, octal)
return int(i64)
} else if f64, e := strconv.ParseFloat(s, 64); e == nil {
// convert float string
return int(f64)
} else {
return 0
}
}
func str2float(s string) float64 {
s = strings.TrimSpace(s)
if f64, e := strconv.ParseFloat(s, 64); e == nil {
// convert float string
return f64
} else if i64, e := strconv.ParseInt(s, 0, 0); e == nil {
// convert int string(decimal, hexadecimal, octal)
return float64(i64)
} else {
return 0
}
} | Util/Conv.go | 0.657868 | 0.444625 | Conv.go | starcoder |
package bitset
const uintSize = 32 << (^uint(0) >> 32 & 1)
// Set a set of unsigned interger which store unique values, without any particular order
type Set struct {
m []uint
}
// New creates a new Set, initially empty set structure
func New() *Set {
return NewWithSize(uintSize)
}
// NewWithSize creates a new Set with size, initially empty set structure
func NewWithSize(size uint) *Set {
return &Set{
m: make([]uint, size/uintSize),
}
}
// Add adds elements to Set, if it is not present already
func (s *Set) Add(x ...uint) {
for _, e := range x {
i, j := getIntegerAndRemainder(e)
if uint(len(s.m)) < i+1 {
size := uint(2)
if i > 0 {
size = i * 2
}
tmpM := make([]uint, size)
copy(tmpM, s.m)
s.m = tmpM
}
s.m[i] |= 1 << j
}
}
// AddInt adds elements to Set, if it is not present already
func (s *Set) AddInt(x ...int) {
y := []uint{}
for _, e := range x {
if e < 0 {
continue
}
y = append(y, uint(e))
}
s.Add(y...)
}
// AddInt64 adds elements to Set, if it is not present already
func (s *Set) AddInt64(x ...int64) {
y := []uint{}
for _, e := range x {
if e < 0 {
continue
}
y = append(y, uint(e))
}
s.Add(y...)
}
// Remove removes elements from Set, if it is present
func (s *Set) Remove(x ...uint) {
for _, e := range x {
i, j := getIntegerAndRemainder(e)
if uint(len(s.m)) < i+1 {
return
}
s.m[i] &= ^(1 << j)
}
}
// RemoveInt removes elements from Set, if it is present
func (s *Set) RemoveInt(x ...int) {
y := []uint{}
for _, e := range x {
if e < 0 {
continue
}
y = append(y, uint(e))
}
}
// RemoveInt64 removes elements from Set, if it is present
func (s *Set) RemoveInt64(x ...int64) {
y := []uint{}
for _, e := range x {
if e < 0 {
continue
}
y = append(y, uint(e))
}
}
// Contains checks whether the value x is in the set Set
func (s Set) Contains(x uint) bool {
i, j := getIntegerAndRemainder(x)
if uint(len(s.m)) < i+1 {
return false
}
if 1 == s.m[i]>>j&1 {
return true
}
return false
}
// ContainsInt checks whether the value x is in the set Set
func (s Set) ContainsInt(x int) bool {
if x < 0 {
return false
}
return s.Contains(uint(x))
}
// ContainsInt64 checks whether the value x is in the set Set
func (s Set) ContainsInt64(x int64) bool {
if x < 0 {
return false
}
return s.Contains(uint(x))
}
// IsEmpty checks whether the set Set is empty
func (s Set) IsEmpty() bool {
if len(s.m) == 0 {
return true
}
return false
}
// Enumerate returns an array of all values
func (s *Set) Enumerate() []uint {
result := []uint{}
for factor, value := range s.m {
for i := 0; i < uintSize; i++ {
if 1 == value>>i&1 {
result = append(result, uint((uintSize*factor)+i))
}
}
}
return result
}
// Union makes union of set s with one or more set ss
func (s *Set) Union(ss ...*Set) {
for _, n := range ss {
s.Add(n.Enumerate()...)
}
}
// Intersection makes the intersection of set s with one or more set ss
func (s *Set) Intersection(ss ...*Set) {
for _, n := range ss {
for i := uint(0); i < s.Size(); i++ {
s.m[i] &= n.m[i]
}
}
}
// Difference makes the difference of set s with one or more set ss
func (s *Set) Difference(ss ...*Set) {
tmp := NewWithSize(s.Size())
tmpM := make([]uint, s.Size())
copy(tmpM, s.m)
tmp.m = tmpM
s.Union(ss...)
ss = append(ss, tmp)
for i := 0; i < len(ss); i++ {
for j := i + 1; j < len(ss); j++ {
tmp := NewWithSize(ss[i].Size())
tmpM := make([]uint, s.Size())
copy(tmpM, ss[i].m)
tmp.m = tmpM
tmp.Intersection(ss[j])
s.Remove(tmp.Enumerate()...)
}
}
}
// Size returns the number of elements in Set
func (s Set) Size() uint {
return uint(len(s.m))
}
func getIntegerAndRemainder(value uint) (x uint, y uint) {
x = value / uintSize
y = value % uintSize
return
} | bitset.go | 0.565299 | 0.405684 | bitset.go | starcoder |
package variable
import (
"github.com/pkg/errors"
)
func NewUndoableDecisionVariables() UndoableDecisionVariables {
return make(UndoableDecisionVariables, 0)
}
// UndoableDecisionVariables offers up a name-indexed collection of UndoableDecisionVariable instances, along with
// convenience methods for the collection's management. It is typically expected that a model would contain only a
// single instance of UndoableDecisionVariables to house all of its decision variables.
type UndoableDecisionVariables []UndoableDecisionVariable
// Adds a number of UndoableDecisionVariables to the collection
func (vs *UndoableDecisionVariables) Add(newVariables ...UndoableDecisionVariable) {
*vs = append(*vs, newVariables...)
}
// NewForName creates and adds to its collection, a new BaseInductiveDecisionVariable with the supplied name.
func (vs *UndoableDecisionVariables) NewForName(name string) {
newVariable := new(SimpleUndoableDecisionVariable)
newVariable.SetName(name)
*vs = append(*vs, newVariable)
}
// SetValue finds the variableOld with supplied name in its collection, and sets its Value appropriately.
// If the collection has no variableOld for the supplied name, it panics.
func (vs *UndoableDecisionVariables) SetValue(name string, value float64) {
vs.find(name).SetValue(value)
}
func variableMissing(name string) error {
return errors.New("decision variable [" + name + "] does not exist.")
}
// Variable returns a pointer to the variableOld in its collection with the supplied name.
// If the collection has no variableOld for the supplied name, it panics.
func (vs *UndoableDecisionVariables) Variable(name string) UndoableDecisionVariable {
return vs.find(name)
}
func (vs *UndoableDecisionVariables) find(name string) UndoableDecisionVariable {
for _, variable := range *vs {
if variable.Name() == name {
return variable
}
}
panic(variableMissing(name))
}
// Value returns the Value of the variableOld in its collection with the supplied name.
// If the collection has no variableOld for the supplied name, it panics.
func (vs *UndoableDecisionVariables) Value(name string) float64 {
return vs.find(name).Value()
}
// DifferenceInValues reports the difference in values of the variableOld in its collection with the supplied name.
func (vs *UndoableDecisionVariables) DifferenceInValues(variableName string) float64 {
decisionVariable := vs.Variable(variableName)
return decisionVariable.DifferenceInValues()
}
// AcceptAll accepts the inductive Value of all the BaseInductiveDecisionVariable instances in its collection.
func (vs *UndoableDecisionVariables) AcceptAll() {
for _, variable := range *vs {
variable.ApplyDoneValue()
}
}
// RejectAll rejects the inductive Value of all the BaseInductiveDecisionVariable instances in its collection.
func (vs *UndoableDecisionVariables) RejectAll() {
for _, variable := range *vs {
variable.ApplyUndoneValue()
}
} | internal/pkg/model/variable/UndoableDecisionVariables.go | 0.718199 | 0.423279 | UndoableDecisionVariables.go | starcoder |
package main
import (
"fmt"
"math"
"math/rand"
"image"
"image/color"
"image/png"
"os"
"gfx/bitmap"
)
const width = 512
const height = 512
type Bitmap struct {
image *image.RGBA
}
func New(width int, height int) Bitmap {
image := image.NewRGBA(image.Rect(0, 0, width, height))
return Bitmap{image: image}
}
func (bitmap Bitmap) Putpixel(x int, y int, red byte, green byte, blue byte) {
c := color.RGBA{red, green, blue, 255}
bitmap.image.SetRGBA(x, y, c)
}
func (bitmap Bitmap) Save(filename string) error {
outfile, err := os.Create(filename)
if err != nil {
return err
}
defer outfile.Close()
png.Encode(outfile, bitmap.image)
return nil
}
func abs(a int) int {
if a < 0 {
return -a
}
return a
}
func max(a int, b int) int {
if a > b {
return a
}
return b
}
func min(a int, b int) int {
if a < b {
return a
}
return b
}
func hLine(bitmap *bitmap.Bitmap, x1 int, x2 int, y int) {
fromX := min(x1, x2)
toX := max(x1, x2)
for x := fromX; x <= toX; x++ {
bitmap.Putpixel(x, y, 0, 0, 0)
}
}
func vLine(bitmap *bitmap.Bitmap, x int, y1 int, y2 int) {
fromY := min(y1, y2)
toY := max(y1, y2)
for y := fromY; y <= toY; y++ {
bitmap.Putpixel(x, y, 0, 0, 0)
}
}
func line(bitmap *bitmap.Bitmap, x1 int, y1 int, x2 int, y2 int) {
dx := x2 - x1
dy := y2 - y1
var s float64
var p float64
var e float64 = 255.0
var x, y, xdelta, ydelta, xpdelta, ypdelta, xp, yp int
var imin, imax int
if x1 == x2 {
vLine(bitmap, x1, y1, y2)
return
}
if y1 == y2 {
hLine(bitmap, x1, x2, y1)
return
}
if x1 > x2 {
x1, x2 = x2, x1
y1, y2 = y2, y1
}
if abs(dx) > abs(dy) {
s = float64(dy) / float64(dx)
imin = x1
imax = x2
x = x1
y = y1
xdelta = 1
ydelta = 0
xpdelta = 0
xp = 0
if y2 > y1 {
ypdelta = 1
yp = 1
} else {
s = -s
ypdelta = -1
yp = -1
}
} else {
s = float64(dx) / float64(dy)
xdelta = 0
ydelta = 1
ypdelta = 0
yp = 0
if y2 > y1 {
imin = y1
imax = y2
x = x1
y = y1
xpdelta = 1
xp = 1
} else {
s = -s
imin = y2
imax = y1
x = x2
y = y2
xpdelta = -1
xp = -1
}
}
p = s * 256.0
for i := imin; i <= imax; i++ {
var c1 byte = byte(e)
var c2 byte = 255 - c1
bitmap.Putpixel(x+xp, y+yp, c1, c1, c1)
bitmap.Putpixel(x, y, c2, c2, c2)
e = e - p
x += xdelta
y += ydelta
if e < 0.0 {
e += 256.0
x += xpdelta
y += ypdelta
}
}
}
func nprLine(bitmap *bitmap.Bitmap, x1 int, y1 int, x2 int, y2 int, maxd float64) {
dist := abs(x2-x1) + abs(y2-y1)
if dist < 20 {
line(bitmap, x1, y1, x2, y2)
} else {
midx := (x1 + x2) >> 1
midy := (y1 + y2) >> 1
nx := float64(y1 - y2)
ny := float64(x2 - x1)
nd := math.Sqrt(nx*nx + ny*ny)
nx /= nd
ny /= nd
d := maxd * (rand.Float64() - 0.5)
midx += int(nx * d)
midy += int(ny * d)
nprLine(bitmap, x1, y1, midx, midy, maxd/1.8)
nprLine(bitmap, midx, midy, x2, y2, maxd/1.8)
}
}
func drawDemo(bitmap *bitmap.Bitmap) {
for i := 0; i < 500; i += 25 {
rand.Seed(6503)
nprLine(bitmap, 50, 25+i, width-50, 25+i, float64(i)/2.0)
}
}
func drawSquare(bitmap *bitmap.Bitmap) {
nprLine(bitmap, 20, 20, 200, 20, 10)
nprLine(bitmap, 200, 20, 200, 200, 10)
nprLine(bitmap, 20, 200, 200, 200, 10)
nprLine(bitmap, 20, 200, 20, 20, 10)
}
func drawDiamond(bitmap *bitmap.Bitmap) {
const W = 50
nprLine(bitmap, 60-W, 160, 60, 160-W, 10)
nprLine(bitmap, 60, 160-W, 60+W, 160, 10)
nprLine(bitmap, 60+W, 160, 60, 160+W, 10)
nprLine(bitmap, 60, 160+W, 60-W, 160, 10)
}
func drawStar(bitmap *bitmap.Bitmap) {
const STEP = 5
for i := 0; i < 360; i += 360 / STEP {
x1 := 200.0 * math.Cos(float64(i)*math.Pi/180.0)
y1 := 200.0 * math.Sin(float64(i)*math.Pi/180.0)
x2 := 200.0 * math.Cos((float64(i)+3*360/STEP)*math.Pi/180.0)
y2 := 200.0 * math.Sin((float64(i)+3*360/STEP)*math.Pi/180.0)
nprLine(bitmap, 400+int(x1), 220+int(y1), 400+int(x2), 220+int(y2), 10)
}
}
func main() {
bitmap := bitmap.New(width, height)
for y := 0; y < height; y++ {
for x := 0; x < width; x++ {
bitmap.Putpixel(x, y, 255, 255, 255)
}
}
fmt.Println("Draw lines")
drawDemo(&bitmap)
drawSquare(&bitmap)
drawDiamond(&bitmap)
drawStar(&bitmap)
err := bitmap.Save("test.png")
if err != nil {
panic(err)
}
} | npr_lines/npr_lines.go | 0.629319 | 0.48749 | npr_lines.go | starcoder |
package main
import (
vec "github.com/etic4/vecmath"
"github.com/etic4/vecmath/maths"
)
var (
defaulAngle float64 = maths.Rad(90)
defaultMaxForce float64 = 3
)
type tentacle struct {
base vec.Vec2
nbrSegs int
segSize float64
speed vec.Vec2
maxSpeed float64
maxForce float64
detectionRay float64
angle float64
thick int
restPos vec.Vec2
segments []*segment
*segment
}
func newTentacle(base vec.Vec2, nbrSegs int, segSize float64, speed vec.Vec2, maxSpeed float64, maxForce float64, thick int, detectionRay float64) *tentacle {
t := new(tentacle)
t.base = base
t.nbrSegs = nbrSegs
t.segSize = segSize
t.speed = speed
t.maxSpeed = maxSpeed
t.maxForce = maxForce
t.thick = thick
t.detectionRay = detectionRay
t.angle = defaulAngle
t.build()
return t
}
func (t *tentacle) build() {
t.segment = newSegment(t.base, t.angle, t.segSize)
t.segment.thick = t.thick
parent := t.segment
for i := 1; i < t.nbrSegs; i++ {
next := newChild(parent, parent.angle, parent.length)
next.thick = int(maths.Lerp(float64(t.thick), float64(1), float64(i)/float64(t.nbrSegs)))
parent = next
}
t.restPos = parent.b
}
//followNearest suit le plus près
func (t *tentacle) followNearest(objects []*ball) {
nearest := t.getNearest(objects)
t.followSteering(nearest.center)
}
func (t *tentacle) followSteering(pos vec.Vec2) {
if pos == (vec.Vec2{}) || !t.isInDetectionRay(pos) {
pos = t.restPos
}
desired := pos.Sub(t.getHead().b)
desired = desired.LimitMag(-t.maxSpeed, t.maxSpeed)
steer := desired.Sub(t.speed)
steer = steer.LimitMag(-t.maxForce, t.maxForce)
t.speed = t.speed.Add(steer)
t.followPos(t.getHead().b.Add(t.speed))
}
//followPos suit une position
func (t *tentacle) followPos(pos vec.Vec2) {
base := t.a
t.getHead().moveHeadTo(pos)
t.moveBack(base)
}
func (t *tentacle) isInDetectionRay(pos vec.Vec2) bool {
return t.getHead().b.Distance(pos) <= t.detectionRay
}
//getNearest retourne le plus proche
func (t *tentacle) getNearest(objects []*ball) *ball {
detected := []*ball{}
for _, obj := range objects {
if t.isInDetectionRay(obj.center) {
detected = append(detected, obj)
}
}
var nearest = new(ball)
if len(detected) > 0 {
nearest = detected[0]
for i := 1; i < len(detected); i++ {
if t.b.Distance(detected[i].center) < t.b.Distance(nearest.center) {
nearest = detected[i]
}
}
}
return nearest
} | tentacle.go | 0.665737 | 0.442034 | tentacle.go | starcoder |
package pano2cube
import (
"image"
"math"
"github.com/gonum/floats"
"image/color"
)
// get x,y,z coords from out image pixels coords
// i,j are pixel coords
// faceIdx is face number
// faceSize is edge length
func outImgToXYZ(i int, j int, faceIdx int, faceSize int)(float64, float64, float64) {
a := 2.0 * float64(i) / float64(faceSize)
b := 2.0 * float64(j) / float64(faceSize)
var x,y,z float64
if faceIdx == 0 { // back
x,y,z = -1.0, 1.0 - a, 1.0 - b
}else if faceIdx == 1{ // left
x,y,z = a - 1.0, -1.0, 1.0 - b
}else if faceIdx == 2 { // front
x,y,z = 1.0, a - 1.0, 1.0 - b
}else if faceIdx == 3 { // right
x,y,z = 1.0 - a, 1.0, 1.0 - b
}else if faceIdx == 4 { // top
x,y,z = b - 1.0, a - 1.0, 1.0
}else if faceIdx == 5 { //bottom
x,y,z = 1.0 - b, a - 1.0, -1.0
}
return x, y, z
}
func clip(vi float64, min int, max int) int {
if int(vi) < min {
return min
} else if int(vi) > max {
return max
}
return int(vi)
}
func round(src float64) int{
return int(floats.Round(src, 0))
}
// convert using an inverse transformation
func convertFace(imgIn *image.RGBA, imgOut *image.RGBA, faceIdx int) {
inSize := imgIn.Rect.Size()
outSize := imgOut.Rect.Size()
faceSize := outSize.X
for xOut := 0; xOut < faceSize; xOut++ {
for yOut := 0; yOut < faceSize; yOut++ {
x,y,z := outImgToXYZ(xOut, yOut, faceIdx, faceSize)
theta := math.Atan2(y,x) //# range -pi to pi
rad := math.Hypot(x,y)
phi := math.Atan2(z,rad) //# range -pi/2 to pi/2
// source img coords
uf := 0.5 * float64(inSize.X) * (theta + math.Pi) / math.Pi
vf := 0.5 * float64(inSize.X) * (math.Pi/2 - phi) / math.Pi
// Use bilinear interpolation between the four surrounding pixels
ui := math.Floor(uf) //# coord of pixel to bottom left
vi := math.Floor(vf)
u2 := ui+1 //# coords of pixel to top right
v2 := vi+1
mu := uf-ui //# fraction of way across pixel
nu := vf-vi
// Pixel values of four corners
A := imgIn.RGBAAt(int(ui) % inSize.X, clip(vi, 0, inSize.Y-1))
B := imgIn.RGBAAt(int(u2) % inSize.X, clip(vi, 0, inSize.Y-1))
C := imgIn.RGBAAt(int(ui) % inSize.X, clip(v2, 0, inSize.Y-1))
D := imgIn.RGBAAt(int(u2) % inSize.X, clip(v2, 0, inSize.Y-1))
// interpolate
r,g,b := float64(A.R)*(1.0-mu)*(1.0-nu)+ float64(B.R)*((mu)*(1.0-nu)) + float64(C.R)*((1.0-mu)*nu) + float64(D.R)*(mu*nu),
float64(A.G)*((1-mu)*(1-nu)) + float64(B.G)*((mu)*(1-nu)) + float64(C.G)*((1-mu)*nu) + float64(D.G)*(mu*nu),
float64(A.B)*((1-mu)*(1-nu)) + float64(B.B)*((mu)*(1-nu)) + float64(C.B)*((1-mu)*nu) + float64(D.B)*(mu*nu)
imgOut.SetRGBA(xOut, yOut, color.RGBA{R:uint8(round(r)), G:uint8(round(g)), B:uint8(round(b)), A:uint8(255)})
}
}
} | pano2cube.go | 0.675658 | 0.44559 | pano2cube.go | starcoder |
package rendering
import (
. "github.com/drbrain/go-unicornify/unicornify/core"
"math"
)
type FacetTracer struct {
countRoot int
countRootF float64
facets []*GroupTracer
bounds Bounds
isEmpty bool
}
func NewFacetTracer(bounds Bounds, countRoot int) *FacetTracer {
return &FacetTracer{
countRoot: countRoot,
countRootF: float64(countRoot),
facets: make([]*GroupTracer, countRoot*countRoot),
bounds: bounds,
isEmpty: true,
}
}
func (t *FacetTracer) IsEmpty() bool {
return t.isEmpty
}
func (t *FacetTracer) Trace(x, y float64, ray Vector) (bool, float64, Vector, Color) {
facet := t.facets[t.facetNum(x, y)]
if facet == nil {
return false, 0, NoDirection, Color{}
}
return facet.Trace(x, y, ray)
}
func (t *FacetTracer) Add(ts ...Tracer) {
for _, nt := range ts {
t.isEmpty = false
b := nt.GetBounds()
minx, miny := t.facetCoords(b.XMin, b.YMin)
maxx, maxy := t.facetCoords(b.XMax, b.YMax)
for y := miny; y <= maxy; y++ {
for x := minx; x <= maxx; x++ {
n := y*t.countRoot + x
facet := t.facets[n]
if facet == nil {
facet = NewGroupTracer()
t.facets[n] = facet
}
facet.Add(nt)
}
}
}
}
func (t *FacetTracer) TraceDeep(x, y float64, ray Vector) (bool, TraceIntervals) {
facet := t.facets[t.facetNum(x, y)]
if facet == nil {
return false, EmptyIntervals
}
return facet.TraceDeep(x, y, ray)
}
func (t *FacetTracer) facetNum(x, y float64) int {
fx, fy := t.facetCoords(x, y)
return fy*t.countRoot + fx
}
func (t *FacetTracer) facetCoords(x, y float64) (fx, fy int) {
b := t.bounds
facetx := math.Min(t.countRootF-1, math.Max(0, t.countRootF*(x-b.XMin)/b.Dx()))
facety := math.Min(t.countRootF-1, math.Max(0, t.countRootF*(y-b.YMin)/b.Dy()))
return int(facetx), int(facety)
}
func (t *FacetTracer) GetBounds() Bounds {
return t.bounds
}
func (t *FacetTracer) Sort() {
for _, f := range t.facets {
if f != nil {
f.Sort()
}
}
}
func (t *FacetTracer) Pruned(rp RenderingParameters) Tracer {
return SimplyPruned(t, rp) // FIXME maybe? facet tracers are the *result* of pruning, so may be fine
} | unicornify/rendering/facettracer.go | 0.772531 | 0.406155 | facettracer.go | starcoder |
package cudnn
// #include <cudnn.h>
import "C"
import (
"runtime"
"unsafe"
"github.com/pkg/errors"
)
// Op is a tuple describing the operation that needs to be done
type Op struct {
internal C.cudnnOpTensorDescriptor_t
op OpTensorOp // The Operation that needs to be done
dataType DataType // The Data type
nanPropagation NanPropagation // NaN propagation strategies
}
// NewOp creates a new Op with the provided settings
func NewOp(op OpTensorOp, dt DataType, prop NanPropagation) (*Op, error) {
var internal C.cudnnOpTensorDescriptor_t
if err := result(C.cudnnCreateOpTensorDescriptor(&internal)); err != nil {
return nil, err
}
if err := result(C.cudnnSetOpTensorDescriptor(internal, op.C(), dt.C(), prop.C())); err != nil {
return nil, err
}
retVal := &Op{
internal: internal,
op: op,
dataType: dt,
nanPropagation: prop,
}
runtime.SetFinalizer(retVal, destroyOp)
return retVal, nil
}
// Op returns the operation that needs to be done.
func (op *Op) Op() OpTensorOp { return op.op }
// DataType returns the data type that the operation is supposed to work on.
func (op *Op) DataType() DataType { return op.dataType }
// NaNPropagation returns the NaN propagation strategy.
func (op *Op) NaNPropagation() NanPropagation { return op.nanPropagation }
// DoOp actually performs the operation.
func (ctx *Context) DoOp(op *Op,
alpha1 float64, aDesc *TensorDescriptor, aData Memory,
alpha2 float64, bDesc *TensorDescriptor, bData Memory,
beta float64, cDesc *TensorDescriptor, cData Memory) error {
// dtype checks
if !(aDesc.dataType == bDesc.dataType && bDesc.dataType == cDesc.dataType) {
return errors.Errorf(dtypeMismatch3, cDesc.dataType, aDesc.dataType, bDesc.dataType)
}
if cDesc.dataType == Double && op.dataType != cDesc.dataType {
return errors.Errorf(dtypeMismatch3, Double, cDesc.dataType, op.dataType)
}
if op.dataType != Float && op.dataType != Double {
return errors.Errorf(dtypeMismatch2, Float, Double, op.dataType)
}
// shapecheck
if !(shapeEq(aDesc.shape, bDesc.shape) && shapeEq(bDesc.shape, cDesc.shape)) {
return errors.Errorf(shapeMismatch3, aDesc.shape, bDesc.shape, cDesc.shape)
}
// location check
if bData.Uintptr() == cData.Uintptr() && aData.Uintptr() != cData.Uintptr() {
// If the input tensor B is the same tensor as the destination tensor C,
// then the input tensor A also must be the same tensor as the destination tensor C.
return errors.Errorf(memoryError3, cData.Uintptr(), aData.Uintptr(), bData.Uintptr())
}
// alpha beta generation
var alpha1C, alpha2C, betaC unsafe.Pointer
if op.dataType == Float {
var a1, a2, b C.float
a1 = C.float(float32(alpha1))
a2 = C.float(float32(alpha2))
b = C.float(float32(beta))
alpha1C = unsafe.Pointer(&a1)
alpha2C = unsafe.Pointer(&a2)
betaC = unsafe.Pointer(&b)
} else {
var a1, a2, b C.double
a1 = C.double(alpha1)
a2 = C.double(alpha2)
b = C.double(beta)
alpha1C = unsafe.Pointer(&a1)
alpha2C = unsafe.Pointer(&a2)
betaC = unsafe.Pointer(&b)
}
res := C.cudnnOpTensor(ctx.internal, op.internal,
alpha1C, aDesc.internal, aData.Pointer(),
alpha2C, bDesc.internal, bData.Pointer(),
betaC, cDesc.internal, cData.Pointer(),
)
return result(res)
}
func destroyOp(obj *Op) { C.cudnnDestroyOpTensorDescriptor(obj.internal) } | vendor/gorgonia.org/cu/dnn/optensor.go | 0.713332 | 0.507751 | optensor.go | starcoder |
package funcs
import (
"strings"
)
//ToLower returns a toLower function with the input function as its parameter.
func ToLower(s String) String {
return TrimString(&toLower{
S: s,
hash: Hash("toLower", s),
hasVariable: s.HasVariable(),
})
}
type toLower struct {
S String
hash uint64
hasVariable bool
}
func (this *toLower) Eval() (string, error) {
s, err := this.S.Eval()
if err != nil {
return "", err
}
return strings.ToLower(s), nil
}
func (this *toLower) Compare(that Comparable) int {
if this.Hash() != that.Hash() {
if this.Hash() < that.Hash() {
return -1
}
return 1
}
if other, ok := that.(*toLower); ok {
if c := this.S.Compare(other.S); c != 0 {
return c
}
return 0
}
return strings.Compare(this.String(), that.String())
}
func (this *toLower) HasVariable() bool {
return this.hasVariable
}
func (this *toLower) String() string {
return "toLower(" + this.S.String() + ")"
}
func (this *toLower) Hash() uint64 {
return this.hash
}
func init() {
Register("toLower", ToLower)
}
//ToUpper returns a toUpper function with the input function as its parameter.
func ToUpper(s String) String {
return TrimString(&toUpper{
S: s,
hash: Hash("toUpper", s),
hasVariable: s.HasVariable(),
})
}
type toUpper struct {
S String
hash uint64
hasVariable bool
}
func (this *toUpper) Eval() (string, error) {
s, err := this.S.Eval()
if err != nil {
return "", err
}
return strings.ToUpper(s), nil
}
func (this *toUpper) Compare(that Comparable) int {
if this.Hash() != that.Hash() {
if this.Hash() < that.Hash() {
return -1
}
return 1
}
if other, ok := that.(*toUpper); ok {
if c := this.S.Compare(other.S); c != 0 {
return c
}
return 0
}
return strings.Compare(this.String(), that.String())
}
func (this *toUpper) HasVariable() bool {
return this.hasVariable
}
func (this *toUpper) String() string {
return "toUpper(" + this.S.String() + ")"
}
func (this *toUpper) Hash() uint64 {
return this.hash
}
func init() {
Register("toUpper", ToUpper)
}
//Contains returns a contains function with the two input function as its parameter.
func Contains(s, sub String) Bool {
return TrimBool(&contains{
S: s,
Substr: sub,
hash: Hash("contains", s, sub),
hasVariable: s.HasVariable() || sub.HasVariable(),
})
}
type contains struct {
S String
Substr String
hash uint64
hasVariable bool
}
func (this *contains) Eval() (bool, error) {
s, err := this.S.Eval()
if err != nil {
return false, err
}
subStr, err := this.Substr.Eval()
if err != nil {
return false, err
}
return strings.Contains(s, subStr), nil
}
func (this *contains) Compare(that Comparable) int {
if this.Hash() != that.Hash() {
if this.Hash() < that.Hash() {
return -1
}
return 1
}
if other, ok := that.(*contains); ok {
if c := this.S.Compare(other.S); c != 0 {
return c
}
if c := this.Substr.Compare(other.Substr); c != 0 {
return c
}
return 0
}
return strings.Compare(this.String(), that.String())
}
func (this *contains) HasVariable() bool {
return this.hasVariable
}
func (this *contains) String() string {
return "contains(" + sjoin(this.S, this.Substr) + ")"
}
func (this *contains) Hash() uint64 {
return this.hash
}
func init() {
Register("contains", Contains)
}
//EqualFold returns a eqFold function with the two input functions as its parameters.
func EqualFold(s, t String) Bool {
return TrimBool(&equalFold{
S: s,
T: t,
hash: Hash("eqFold", s, t),
hasVariable: s.HasVariable() || t.HasVariable(),
})
}
type equalFold struct {
S String
T String
hash uint64
hasVariable bool
}
func (this *equalFold) Eval() (bool, error) {
s, err := this.S.Eval()
if err != nil {
return false, err
}
t, err := this.T.Eval()
if err != nil {
return false, err
}
return strings.EqualFold(s, t), nil
}
func (this *equalFold) Compare(that Comparable) int {
if this.Hash() != that.Hash() {
if this.Hash() < that.Hash() {
return -1
}
return 1
}
if other, ok := that.(*equalFold); ok {
if c := this.S.Compare(other.S); c != 0 {
return c
}
if c := this.T.Compare(other.T); c != 0 {
return c
}
return 0
}
return strings.Compare(this.String(), that.String())
}
func (this *equalFold) HasVariable() bool {
return this.hasVariable
}
func (this *equalFold) String() string {
return "eqFold(" + sjoin(this.S, this.T) + ")"
}
func (this *equalFold) Hash() uint64 {
return this.hash
}
func init() {
Register("eqFold", EqualFold)
}
//HasPrefix returns a hasPrefix function with the two input functions as its parameters.
func HasPrefix(a, b String) Bool {
return TrimBool(&hasPrefix{
V1: a,
V2: b,
hash: Hash("hasPrefix", a, b),
hasVariable: a.HasVariable() || b.HasVariable(),
})
}
type hasPrefix struct {
V1 String
V2 String
hash uint64
hasVariable bool
}
func (this *hasPrefix) Eval() (bool, error) {
v1, err := this.V1.Eval()
if err != nil {
return false, err
}
v2, err := this.V2.Eval()
if err != nil {
return false, err
}
return strings.HasPrefix(v1, v2), nil
}
func (this *hasPrefix) Compare(that Comparable) int {
if this.Hash() != that.Hash() {
if this.Hash() < that.Hash() {
return -1
}
return 1
}
if other, ok := that.(*hasPrefix); ok {
if c := this.V1.Compare(other.V1); c != 0 {
return c
}
if c := this.V2.Compare(other.V2); c != 0 {
return c
}
return 0
}
return strings.Compare(this.String(), that.String())
}
func (this *hasPrefix) HasVariable() bool {
return this.hasVariable
}
func (this *hasPrefix) String() string {
return "hasPrefix(" + sjoin(this.V1, this.V2) + ")"
}
func (this *hasPrefix) Hash() uint64 {
return this.hash
}
func init() {
Register("hasPrefix", HasPrefix)
}
//HasSuffix returns a hasSuffix function with the two input functions as its parameters.
func HasSuffix(a, b String) Bool {
return TrimBool(&hasSuffix{
V1: a,
V2: b,
hash: Hash("hasSuffix", a, b),
hasVariable: a.HasVariable() || b.HasVariable(),
})
}
type hasSuffix struct {
V1 String
V2 String
hash uint64
hasVariable bool
}
func (this *hasSuffix) Eval() (bool, error) {
v1, err := this.V1.Eval()
if err != nil {
return false, err
}
v2, err := this.V2.Eval()
if err != nil {
return false, err
}
return strings.HasSuffix(v1, v2), nil
}
func (this *hasSuffix) Compare(that Comparable) int {
if this.Hash() != that.Hash() {
if this.Hash() < that.Hash() {
return -1
}
return 1
}
if other, ok := that.(*hasSuffix); ok {
if c := this.V1.Compare(other.V1); c != 0 {
return c
}
if c := this.V2.Compare(other.V2); c != 0 {
return c
}
return 0
}
return strings.Compare(this.String(), that.String())
}
func (this *hasSuffix) HasVariable() bool {
return this.hasVariable
}
func (this *hasSuffix) String() string {
return "hasSuffix(" + sjoin(this.V1, this.V2) + ")"
}
func (this *hasSuffix) Hash() uint64 {
return this.hash
}
func init() {
Register("hasSuffix", HasSuffix)
} | relapse/funcs/string.go | 0.792223 | 0.443118 | string.go | starcoder |
package slices
import (
"constraints"
"github.com/dairaga/gs"
"github.com/dairaga/gs/funcs"
)
type S[T any] []T
// Empty returns an empty slice.
func Empty[T any]() S[T] {
return []T{}
}
// One returns an one element slice.
func One[T any](v T) S[T] {
return []T{v}
}
// From returns a slice from given elements.
func From[T any](a ...T) S[T] {
return a
}
// Fill returns a slice with length n and filled with given element.
func Fill[T any](size int, v T) S[T] {
ret := make([]T, size)
for i := range ret {
ret[i] = v
}
return ret
}
// FillWith returns a slice that contains the results of applying given function op with n times.
func FillWith[T any](n int, op funcs.Unit[T]) S[T] {
ret := make(S[T], n)
for i := range ret {
ret[i] = op()
}
return ret
}
// Range returns a slice containing equally spaced values in a gevin interval.
func Range[T gs.Numeric](start, end, step T) S[T] {
ret := Empty[T]()
for i := start; i < end; i += step {
ret = append(ret, i)
}
return ret
}
// Tabulate returns a slice containing values of a given function op over a range of integer values starting from 0 to given n.
func Tabulate[T any](n int, op funcs.Func[int, T]) S[T] {
if n <= 0 {
return Empty[T]()
}
ret := make([]T, n)
for i := 0; i < n; i++ {
ret[i] = op(i)
}
return ret
}
// -----------------------------------------------------------------------------
// TODO: refactor following functions to methods when go 1.19 releases.
// IndexFromFunc returns index of the first element is same as given x after or at given start index.
func IndexFromFunc[T, U any](s S[T], x U, start int, eq funcs.Equal[T, U]) int {
p := funcs.EqualTo(x, eq)
return s.IndexWhereFrom(p, start)
}
// IndexFunc returns index of the first element is same as given x.
func IndexFunc[T, U any](s S[T], x U, eq funcs.Equal[T, U]) int {
return IndexFromFunc(s, x, 0, eq)
}
// IndexFrom returns index of the first element is same as given x after or at given start index.
func IndexFrom[T comparable](s S[T], x T, start int) int {
return IndexFromFunc(s, x, start, funcs.Same[T])
}
// IndexFrom returns index of the first element is same as given x.
func Index[T comparable](s S[T], x T) int {
return IndexFrom(s, x, 0)
}
// LastIndexFromFunc returns index of the last element is same as given x before or at given end index.
func LastIndexFromFunc[T, U any](s S[T], x U, end int, eq funcs.Equal[T, U]) int {
p := funcs.EqualTo(x, eq)
return s.LastIndexWhereFrom(p, end)
}
// LastIndexFunc returns index of the last element is same as given x.
func LastIndexFunc[T, U any](s S[T], x U, eq funcs.Equal[T, U]) int {
return LastIndexFromFunc(s, x, -1, eq)
}
// LastIndexFrom returns index of the last element is same as given x before or at given end index.
func LastIndexFrom[T comparable](s S[T], x T, end int) int {
return LastIndexFromFunc(s, x, end, funcs.Same[T])
}
// LastIndex returns index of the last element is same as given x.
func LastIndex[T comparable](s S[T], x T) int {
return LastIndexFrom(s, x, -1)
}
// Contain returns true if s contains given x.
func Contain[T comparable](s S[T], x T) bool {
return Index(s, x) >= 0
}
// ContainFunc returns true if s contains an element is equal to given x.
func ContainFunc[T, U any](s S[T], x U, eq funcs.Equal[T, U]) bool {
return IndexFunc(s, x, eq) >= 0
}
// EqualFunc returns true if s1 is equal to s2.
func EqualFunc[T, U any](s1 S[T], s2 S[U], eq funcs.Equal[T, U]) bool {
size1 := len(s1)
size2 := len(s2)
if size1 != size2 {
return false
}
for i := range s1 {
if !eq(s1[i], s2[i]) {
return false
}
}
return true
}
// EqualFunc returns true if s1 is same as s2.
func Equal[T comparable](s1 S[T], s2 S[T]) bool {
return EqualFunc(s1, s2, funcs.Same[T])
}
// Collect returns a new slice containing results applying given partial function p on which it is defined.
func Collect[T, U any](s S[T], p funcs.Partial[T, U]) S[U] {
return Fold(
s,
Empty[U](),
func(z S[U], v T) S[U] {
if u, ok := p(v); ok {
return append(z, u)
}
return z
},
)
}
// CollectFirst returns the first result applying given function p successfully.
func CollectFirst[T, U any](s S[T], p funcs.Partial[T, U]) gs.Option[U] {
for i := range s {
if u, ok := p(s[i]); ok {
return gs.Some(u)
}
}
return gs.None[U]()
}
// FoldLeft applies given function op to given start value z and all elements in slice s from left to right.
func FoldLeft[T, U any](s S[T], z U, op func(U, T) U) (ret U) {
ret = z
for i := range s {
ret = op(ret, s[i])
}
return
}
// FoldRight applies given function op to given start value z and all elements in slice s from right to left.
func FoldRight[T, U any](s S[T], z U, op func(T, U) U) (ret U) {
ret = z
size := len(s)
for i := size - 1; i >= 0; i-- {
ret = op(s[i], ret)
}
return ret
}
// Fold is same as FoldLeft.
func Fold[T, U any](s S[T], z U, op func(U, T) U) U {
return FoldLeft(s, z, op)
}
// ScanLeft produces a new slice containing cumulative results of applying the given function op to all elements in slices s from left to right.
func ScanLeft[T, U any](s S[T], z U, op func(U, T) U) S[U] {
return FoldLeft(s, One(z), func(a S[U], b T) S[U] {
return append(a, op(a[len(a)-1], b))
})
}
// ScanRight produces a new slice containing cumulative results of applying the given function op to all elements in slices s from right to left.
func ScanRight[T, U any](s S[T], z U, op func(T, U) U) (ret S[U]) {
ret = FoldRight(s, One(z), func(a T, b S[U]) S[U] {
return append(b, op(a, b[len(b)-1]))
})
ret.ReverseSelf()
return
}
// Scan is same as ScanLeft.
func Scan[T, U any](s S[T], z U, op func(U, T) U) S[U] {
return ScanLeft(s, z, op)
}
// FlatMap returns a new slice s by applying given function op to all elements of slice s.
func FlatMap[T, U any](s S[T], op funcs.Func[T, S[U]]) S[U] {
return Fold(s, Empty[U](), func(a S[U], v T) S[U] {
return append(a, op(v)...)
})
}
// Map returns a new slice by applying given function op to all elements of slices s.
func Map[T, U any](s S[T], op funcs.Func[T, U]) S[U] {
return Fold(s, Empty[U](), func(a S[U], v T) S[U] {
return append(a, op(v))
})
}
// PartitionMap applies given function op to each element of a slice and returns a pair of slices: the first contains Left result from op, and the second one contains Right result from op.
func PartitionMap[T, A, B any](s S[T], op func(T) gs.Either[A, B]) (S[A], S[B]) {
t2 := Fold(
s,
gs.T2(Empty[A](), Empty[B]()),
func(z gs.Tuple2[S[A], S[B]], v T) gs.Tuple2[S[A], S[B]] {
e := op(v)
if e.IsRight() {
z.V2 = append(z.V2, e.Right())
} else {
z.V1 = append(z.V1, e.Left())
}
return z
},
)
return t2.V1, t2.V2
}
// GroupMap partitions a slice into a map according to a discriminator function key. All the values that have the same discriminator are then transformed by the function val
func GroupMap[T any, K comparable, V any](s S[T], key funcs.Func[T, K], val funcs.Func[T, V]) map[K]S[V] {
return Fold(
s,
make(map[K]S[V]),
func(z map[K]S[V], x T) map[K]S[V] {
k := key(x)
v := val(x)
z[k] = append(z[k], v)
return z
},
)
}
// GroupBy partitions a slice into a map of arrays according to the discriminator function key.
func GroupBy[T any, K comparable](s S[T], key funcs.Func[T, K]) map[K]S[T] {
return GroupMap(s, key, funcs.Self[T])
}
// GroupMapReduce partitions a slice into a map according to a discriminator function key. All the values that have the same discriminator are then transformed by the function val and then reduced into a single value with the reduce function op.
func GroupMapReduce[T any, K comparable, V any](s S[T], key funcs.Func[T, K], val funcs.Func[T, V], op func(V, V) V) map[K]V {
m := GroupMap(s, key, val)
ret := make(map[K]V)
for k := range m {
ret[k] = m[k].Reduce(op).Get()
}
return ret
}
// MaxBy returns Some with the maximum value in s according to the ordered results transformed from given ordering function op.
func MaxBy[T any, R constraints.Ordered](s S[T], op funcs.Orderize[T, R]) gs.Option[T] {
return s.Max(func(a, b T) int { return funcs.Order(op(a), op(b)) })
}
// MinBy returns Some with minimum value in s according to the ordered results transformed from given ordering function op.
func MinBy[T any, R constraints.Ordered](s S[T], op funcs.Orderize[T, R]) gs.Option[T] {
return s.Min(func(a, b T) int { return funcs.Order(op(a), op(b)) })
}
// SortBy sorts a slice according to the ordered results transformed from given ordering function op.
func SortBy[T any, R constraints.Ordered](s S[T], op funcs.Orderize[T, R]) S[T] {
return s.Sort(func(a, b T) int {
return funcs.Order(op(a), op(b))
})
}
// IsEmpty returns true if the given slice is empty.
func IsEmpty[T any](s S[T]) bool {
return len(s) <= 0
} | slices/slices.go | 0.812979 | 0.608507 | slices.go | starcoder |
package rgif
import (
"image/gif"
"os"
r "github.com/lachee/raylib-goplus/raylib"
)
type FrameDisposal int
const (
FrameDisposalNone FrameDisposal = iota
FrameDisposalDontDispose
FrameDisposalRestoreBackground
FrameDisposalRestorePrevious
)
//GifImage represents a gif texture
type GifImage struct {
//Texture is the current frame of the gif
Texture r.Texture2D
//Width is the width of a single frame
Width int
//Height is the height of a single frame
Height int
//Frames is the number of frames available
Frames int
//Timing is the delay (in 100ths of seconds) a frame has
Timing []int
//Disposal is the disposal for each frame
Disposal []FrameDisposal
pixels [][]r.Color //Cache of each frame's pixels
currentFrame int //The current frame
lastFrameTime float32 //Update since last frame
}
//LoadGifFromFile loads a new gif
func LoadGifFromFile(fileName string) (*GifImage, error) {
//Read the GIF file
file, err := os.Open(fileName)
defer file.Close()
if err != nil {
return nil, err
}
/*//Defer any panics
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("Error while decoding: %s", r)
}
}()*/
//Decode teh gif
gif, err := gif.DecodeAll(file)
if err != nil {
return nil, err
}
//Prepare the tilesheet and overpaint image.
imgWidth, imgHeight := getGifDimensions(gif)
frames := len(gif.Image)
disposals := make([]FrameDisposal, frames)
images := make([][]r.Color, frames, imgWidth*imgHeight)
clumative := make([]r.Color, imgWidth*imgHeight)
previousNonDisposed := gif.Image[0]
for i, img := range gif.Image {
disposals[i] = FrameDisposal(gif.Disposal[i])
pixels := make([]r.Color, imgWidth*imgHeight)
for y := 0; y < imgHeight; y++ {
for x := 0; x < imgWidth; x++ {
color := img.At(x, y)
red, green, blue, alpha := color.RGBA()
switch disposals[i] {
case FrameDisposalNone:
//Use all our pixels always
pixels[x+y*imgWidth] = r.NewColor(uint8(red), uint8(green), uint8(blue), uint8(alpha))
clumative[x+y*imgWidth] = pixels[x+y*imgWidth]
previousNonDisposed = img
case FrameDisposalDontDispose:
if alpha > 0 {
//Use our own pixels (clumative)
pixels[x+y*imgWidth] = r.NewColor(uint8(red), uint8(green), uint8(blue), uint8(alpha))
clumative[x+y*imgWidth] = pixels[x+y*imgWidth]
} else {
//Use the previous pixels
pixels[x+y*imgWidth] = clumative[x+y*imgWidth]
}
previousNonDisposed = img
case FrameDisposalRestoreBackground:
if disposals[0] == FrameDisposalDontDispose && alpha == 0 {
red, green, blue, alpha = gif.Image[0].At(x, y).RGBA()
}
pixels[x+y*imgWidth] = r.NewColor(uint8(red), uint8(green), uint8(blue), uint8(alpha))
clumative[x+y*imgWidth] = pixels[x+y*imgWidth]
case FrameDisposalRestorePrevious:
if alpha == 0 {
red, green, blue, alpha = previousNonDisposed.At(x, y).RGBA()
}
pixels[x+y*imgWidth] = r.NewColor(uint8(red), uint8(green), uint8(blue), uint8(alpha))
clumative[x+y*imgWidth] = pixels[x+y*imgWidth]
}
}
}
images[i] = pixels
}
//Load the first initial texture
texture := r.LoadTextureFromGo(gif.Image[0])
return &GifImage{
Texture: texture,
pixels: images,
Width: imgWidth,
Height: imgHeight,
Frames: frames,
Timing: gif.Delay,
Disposal: disposals,
}, nil
}
//Step performs a time step.
func (gif *GifImage) Step(timeSinceLastStep float32) {
gif.lastFrameTime += (timeSinceLastStep * 100)
diff := gif.lastFrameTime - float32(gif.Timing[gif.currentFrame])
if diff >= 0 {
gif.NextFrame()
}
}
//NextFrame increments the frame counter and resets the timing buffer
func (gif *GifImage) NextFrame() {
gif.lastFrameTime -= float32(gif.Timing[gif.currentFrame])
gif.currentFrame = (gif.currentFrame + 1) % gif.Frames
if gif.lastFrameTime < 0 {
gif.lastFrameTime = 0
}
gif.Texture.UpdateTexture(gif.pixels[gif.currentFrame])
}
//Reset clears the last frame time and resets the current frame to zero
func (gif *GifImage) Reset() {
gif.currentFrame = 0
gif.lastFrameTime = 0
}
//Unload unloads all the textures and images, making this gif unusable.
func (gif *GifImage) Unload() {
gif.Texture.Unload()
}
//CurrentFrame returns the current frame index
func (gif *GifImage) CurrentFrame() int { return gif.currentFrame }
//CurrentTiming gets the current timing for the current frame
func (gif *GifImage) CurrentTiming() int { return gif.Timing[gif.currentFrame] }
//GetRectangle gets a rectangle crop for a specified frame
func (gif *GifImage) GetRectangle(frame int) r.Rectangle {
return r.NewRectangle(float32(gif.Width*frame), 0, float32(gif.Width), float32(gif.Height))
}
//DrawGif draws a single frame of a gif
func DrawGif(gif *GifImage, x int, y int, tint r.Color) {
r.DrawTexture(gif.Texture, x, y, tint)
}
//DrawGifEx draws a gif with rotation and scale
func DrawGifEx(gif *GifImage, position r.Vector2, rotation float32, scale float32, tint r.Color) {
r.DrawTextureEx(gif.Texture, position, rotation, scale, tint)
}
func getGifDimensions(gif *gif.GIF) (x, y int) {
var lowestX int
var lowestY int
var highestX int
var highestY int
for _, img := range gif.Image {
if img.Rect.Min.X < lowestX {
lowestX = img.Rect.Min.X
}
if img.Rect.Min.Y < lowestY {
lowestY = img.Rect.Min.Y
}
if img.Rect.Max.X > highestX {
highestX = img.Rect.Max.X
}
if img.Rect.Max.Y > highestY {
highestY = img.Rect.Max.Y
}
}
return highestX - lowestX, highestY - lowestY
} | raylib-gif/gif.go | 0.653238 | 0.407569 | gif.go | starcoder |
package phys
import (
"phys/vect"
"log"
)
func k_scalar_body(body *Body, r, n vect.Vect) float32 {
rcn := vect.Cross(r, n)
return body.m_inv + (body.i_inv * rcn * rcn)
}
func k_scalar(a, b *Body, r1, r2, n vect.Vect) float32 {
value := k_scalar_body(a, r1, n) + k_scalar_body(b, r2, n)
if value == 0.0 {
log.Printf("Warning: Unsolvable collision or constraint.")
}
return value
}
func k_scalar2(a, b *Body, r1, r2, n vect.Vect) float32 {
rcn := (r1.X * n.Y) - (r1.Y * n.X)
rcn = a.m_inv + (a.i_inv * rcn * rcn)
rcn2 := (r2.X * n.Y) - (r2.Y * n.X)
rcn2 = b.m_inv + (b.i_inv * rcn2 * rcn2)
value := rcn + rcn2
if value == 0.0 {
log.Printf("Warning: Unsolvable collision or constraint.")
}
return value
}
func relative_velocity2(a, b *Body, r1, r2 vect.Vect) vect.Vect {
v1 := vect.Add(b.v, vect.Mult(vect.Perp(r2), b.w))
v2 := vect.Add(a.v, vect.Mult(vect.Perp(r1), a.w))
return vect.Sub(v1, v2)
}
func relative_velocity(a, b *Body, r1, r2 vect.Vect) vect.Vect {
return vect.Vect{(-r2.Y*b.w + b.v.X) - (-r1.Y*a.w + a.v.X), (r2.X*b.w + b.v.Y) - (r1.X*a.w + a.v.Y)}
}
func normal_relative_velocity(a, b *Body, r1, r2, n vect.Vect) float32 {
return vect.Dot(relative_velocity(a, b, r1, r2), n)
}
func apply_impulses(a, b *Body, r1, r2, j vect.Vect) {
j1 := vect.Vect{-j.X, -j.Y}
a.v.Add(vect.Mult(j1, a.m_inv))
a.w += a.i_inv * vect.Cross(r1, j1)
b.v.Add(vect.Mult(j, b.m_inv))
b.w += b.i_inv * vect.Cross(r2, j)
}
func apply_bias_impulses(a, b *Body, r1, r2, j vect.Vect) {
j1 := vect.Vect{-j.X, -j.Y}
a.v_bias.Add(vect.Mult(j1, a.m_inv))
a.w_bias += a.i_inv * vect.Cross(r1, j1)
b.v_bias.Add(vect.Mult(j, b.m_inv))
b.w_bias += b.i_inv * vect.Cross(r2, j)
}
/*
func apply_impulses(a, b *Body, r1, r2, j vect.Vect) {
a.v = vect.Vect{(-j.X*a.m_inv)+a.v.X, (-j.Y*a.m_inv)+a.v.Y}
a.w += a.i_inv * ((r1.X*-j.Y) - (r1.Y*-j.X))
b.v = vect.Vect{(j.X*b.m_inv)+b.v.X, (j.Y*b.m_inv)+b.v.Y}
b.w += b.i_inv * ((r2.X*j.Y) - (r2.Y*j.X))
}
func apply_bias_impulses(a, b *Body, r1, r2, j vect.Vect) {
a.v_bias = vect.Vect{(-j.X*a.m_inv)+a.v_bias.X, (-j.Y*a.m_inv)+a.v_bias.Y}
a.w_bias += a.i_inv * ((r1.X*-j.Y) - (r1.Y*-j.X))
b.v_bias = vect.Vect{(j.X*b.m_inv)+b.v_bias.X, (j.Y*b.m_inv)+b.v_bias.Y}
b.w_bias += b.i_inv * ((r2.X*j.Y) - (r2.Y*j.X))
}
<<<<<<< HEAD
=======
/*
func apply_impulses(a, b *Body, r1, r2, j vect.Vect) {
a.v = vect.Vect{(-j.X*a.m_inv)+a.v.X, (-j.Y*a.m_inv)+a.v.Y}
a.w += a.i_inv * ((r1.X*-j.Y) - (r1.Y*-j.X))
b.v = vect.Vect{(j.X*b.m_inv)+b.v.X, (j.Y*b.m_inv)+b.v.Y}
b.w += b.i_inv * ((r2.X*j.Y) - (r2.Y*j.X))
}
func apply_bias_impulses(a, b *Body, r1, r2, j vect.Vect) {
a.v_bias = vect.Vect{(-j.X*a.m_inv)+a.v_bias.X, (-j.Y*a.m_inv)+a.v_bias.Y}
a.w_bias += a.i_inv * ((r1.X*-j.Y) - (r1.Y*-j.X))
b.v_bias = vect.Vect{(j.X*b.m_inv)+b.v_bias.X, (j.Y*b.m_inv)+b.v_bias.Y}
b.w_bias += b.i_inv * ((r2.X*j.Y) - (r2.Y*j.X))
}
>>>>>>> Performance improvement
*/ | vendor/phys/misc.go | 0.552057 | 0.438966 | misc.go | starcoder |
package eoy
import (
"fmt"
"time"
)
//Donor is used to provide a primary key for storing stats by month.
type Donor struct {
//ID is YYYY-MM
SupporterID string `gorm:"supporter_id"`
FirstName string
LastName string
CreatedDate *time.Time
}
//DonorResult holds a month and a stats record.
type DonorResult struct {
SupporterID string `gorm:"supporter_id"`
FirstName string
LastName string
Stat
}
//TopDonor is used to provide a primary key for storing stats by month.
type TopDonor struct {
Donor
}
//TopDonorResult holds a month and a stats record for the "month over month" sheet.
type TopDonorResult struct {
DonorResult
}
//KeyValue implements KeyValuer by returning the value of a key for the
//DonorResult object.
func (r DonorResult) KeyValue(i int) (key interface{}) {
switch i {
case 0:
key = r.FirstName
case 1:
key = r.LastName
default:
fmt.Printf("Error in DonorResult\n%+v\n", r)
err := fmt.Errorf("Not a valid DonorResult index, %v", i)
panic(err)
}
return key
}
//FillKeys implements KeyFiller by filling Excel cells with keys from the
//year table.
func (r DonorResult) FillKeys(rt *Runtime, sheet Sheet, row, col int) int {
for j := 0; j < len(sheet.KeyNames); j++ {
v := r.KeyValue(j)
s := sheet.KeyStyles[j]
rt.Cell(sheet.Name, row, col+j, v, s)
}
return row
}
//FillKeys implements KeyFiller by filling Excel cells with keys from the
//year table.
func (r TopDonorResult) FillKeys(rt *Runtime, sheet Sheet, row, col int) int {
m := DonorResult{}
return m.FillKeys(rt, sheet, row, col)
}
//Fill implements Filler by filling in a spreadsheet using data from the years table.
func (r Donor) Fill(rt *Runtime, sheet Sheet, row, col int) int {
var a []DonorResult
rt.DB.Table("supporters").Select("supporters.first_name, supporters.last_name, stats.*").Joins("left join stats on stats.id = supporters.supporter_id").Order("stats.all_amount desc").Scan(&a)
for _, r := range a {
rt.Spreadsheet.InsertRow(sheet.Name, row+1)
r.FillKeys(rt, sheet, row, 0)
r.Stat.Fill(rt, sheet.Name, row, len(sheet.KeyNames))
row++
}
return row
}
//Fill implements Filler by filling in a spreadsheet using data from the years table.
func (r TopDonor) Fill(rt *Runtime, sheet Sheet, row, col int) int {
var a []DonorResult
rt.DB.Order("stats.all_amount desc").Table("supporters").Select("first_name, last_name, stats.*").Joins("left join stats on stats.id = supporters.supporter_id").Limit(rt.TopDonorLimit).Scan(&a)
for _, r := range a {
rt.Spreadsheet.InsertRow(sheet.Name, row+1)
r.FillKeys(rt, sheet, row, 0)
r.Stat.Fill(rt, sheet.Name, row, len(sheet.KeyNames))
row++
}
return row
}
//NewAllDonorsSheet builds the data used to decorate all donors sheet.
func (rt *Runtime) NewAllDonorsSheet() Sheet {
filler := Donor{}
result := DonorResult{}
name := fmt.Sprintf("All donors, %d", rt.Year)
sheet := Sheet{
Titles: []string{
fmt.Sprintf("Ranked donors for %d", rt.Year),
},
Name: name,
KeyNames: []string{"<NAME>", "<NAME>"},
KeyStyles: []int{rt.KeyStyle, rt.KeyStyle, rt.KeyStyle},
Filler: filler,
KeyFiller: result,
}
return sheet
}
//NewTopDonorsSheet builds the data used to decorate the Top donors for the year sheet.
func (rt *Runtime) NewTopDonorsSheet() Sheet {
filler := TopDonor{}
result := TopDonorResult{}
name := fmt.Sprintf("Top donors for %d", rt.Year)
sheet := Sheet{
Titles: []string{
fmt.Sprintf("Top %d donors for %d", rt.TopDonorLimit, rt.Year),
},
Name: name,
KeyNames: []string{"<NAME>", "<NAME>"},
KeyStyles: []int{rt.KeyStyle, rt.KeyStyle, rt.KeyStyle},
Filler: filler,
KeyFiller: result,
}
return sheet
} | pkg/supporters.go | 0.510008 | 0.437884 | supporters.go | starcoder |
package reducer
// GroupBy groups the input using the given key function.
// For each key, one instance of the valReducer is created to further process the values with that key.
func GroupBy[A any, K comparable, V any](key func(A) K, valReduce Reducer[A, V]) Reducer[A, map[K]V] {
return func() ReducerInstance[A, map[K]V] {
reducers := make(map[K]ReducerInstance[A, V])
done := make(map[K]bool)
return ReducerInstance[A, map[K]V]{
Complete: func() map[K]V {
res := make(map[K]V)
for k, r := range reducers {
res[k] = r.Complete()
}
return res
},
Step: func(v A) bool {
k := key(v)
if done[k] {
return true
}
i, ok := reducers[k]
if !ok {
i = valReduce()
reducers[k] = i
}
cont := i.Step(v)
if !cont {
done[k] = true
}
return true
},
}
}
}
// GroupByCollect groups the input using the given key function.
// The resulting map contains all values with the same key as a slice under the same key.
func GroupByCollect[V any, K comparable](key func(V) K) Reducer[V, map[K][]V] {
return GroupBy(key, ToSlice[V]())
}
// ToMap turns the input into a map using the given key and value functions to extract key and value from elements in the input.
// If keys appear multiple times, only take the first key.
func ToMap[T any, K comparable, V any](key func(T) K, value func(T) V) Reducer[T, map[K]V] {
return GroupBy(key, Map(value, First[V]()))
}
// ToMapId turns the input into a map using the given key function to extract a key from elements in the input.
func ToMapId[V any, K comparable](key func(V) K) Reducer[V, map[K]V] {
return GroupBy(key, First[V]())
}
// ToSet turns the input into a set represented as a map[T]bool
func ToSet[T comparable]() Reducer[T, map[T]bool] {
return GroupBy(func(a T) T { return a }, Constant[T](true))
}
// Constant is a reducer that always returns a constant value
func Constant[S, T any](value T) Reducer[S, T] {
return func() ReducerInstance[S, T] {
return ReducerInstance[S, T]{
Complete: func() T {
return value
},
Step: func(a S) bool {
return false
},
}
}
} | reducer/gouping.go | 0.774413 | 0.408867 | gouping.go | starcoder |
package tuple
import (
"fmt"
"golang.org/x/exp/constraints"
)
// T5 is a tuple type holding 5 generic values.
type T5[Ty1, Ty2, Ty3, Ty4, Ty5 any] struct {
V1 Ty1
V2 Ty2
V3 Ty3
V4 Ty4
V5 Ty5
}
// Len returns the number of values held by the tuple.
func (t T5[Ty1, Ty2, Ty3, Ty4, Ty5]) Len() int {
return 5
}
// Values returns the values held by the tuple.
func (t T5[Ty1, Ty2, Ty3, Ty4, Ty5]) Values() (Ty1, Ty2, Ty3, Ty4, Ty5) {
return t.V1, t.V2, t.V3, t.V4, t.V5
}
// Array returns an array of the tuple values.
func (t T5[Ty1, Ty2, Ty3, Ty4, Ty5]) Array() [5]any {
return [5]any{
t.V1,
t.V2,
t.V3,
t.V4,
t.V5,
}
}
// Slice returns a slice of the tuple values.
func (t T5[Ty1, Ty2, Ty3, Ty4, Ty5]) Slice() []any {
a := t.Array()
return a[:]
}
// String returns the string representation of the tuple.
func (t T5[Ty1, Ty2, Ty3, Ty4, Ty5]) String() string {
return tupString(t.Slice())
}
// GoString returns a Go-syntax representation of the tuple.
func (t T5[Ty1, Ty2, Ty3, Ty4, Ty5]) GoString() string {
return tupGoString(t.Slice())
}
// New5 creates a new tuple holding 5 generic values.
func New5[Ty1, Ty2, Ty3, Ty4, Ty5 any](v1 Ty1, v2 Ty2, v3 Ty3, v4 Ty4, v5 Ty5) T5[Ty1, Ty2, Ty3, Ty4, Ty5] {
return T5[Ty1, Ty2, Ty3, Ty4, Ty5]{
V1: v1,
V2: v2,
V3: v3,
V4: v4,
V5: v5,
}
}
// FromArray5 returns a tuple from an array of length 5.
// If any of the values can not be converted to the generic type, an error is returned.
func FromArray5[Ty1, Ty2, Ty3, Ty4, Ty5 any](arr [5]any) (T5[Ty1, Ty2, Ty3, Ty4, Ty5], error) {
v1, ok := arr[0].(Ty1)
if !ok {
return T5[Ty1, Ty2, Ty3, Ty4, Ty5]{}, fmt.Errorf("value at array index 0 expected to have type %s but has type %T", typeName[Ty1](), arr[0])
}
v2, ok := arr[1].(Ty2)
if !ok {
return T5[Ty1, Ty2, Ty3, Ty4, Ty5]{}, fmt.Errorf("value at array index 1 expected to have type %s but has type %T", typeName[Ty2](), arr[1])
}
v3, ok := arr[2].(Ty3)
if !ok {
return T5[Ty1, Ty2, Ty3, Ty4, Ty5]{}, fmt.Errorf("value at array index 2 expected to have type %s but has type %T", typeName[Ty3](), arr[2])
}
v4, ok := arr[3].(Ty4)
if !ok {
return T5[Ty1, Ty2, Ty3, Ty4, Ty5]{}, fmt.Errorf("value at array index 3 expected to have type %s but has type %T", typeName[Ty4](), arr[3])
}
v5, ok := arr[4].(Ty5)
if !ok {
return T5[Ty1, Ty2, Ty3, Ty4, Ty5]{}, fmt.Errorf("value at array index 4 expected to have type %s but has type %T", typeName[Ty5](), arr[4])
}
return New5(v1, v2, v3, v4, v5), nil
}
// FromArray5X returns a tuple from an array of length 5.
// If any of the values can not be converted to the generic type, the function panics.
func FromArray5X[Ty1, Ty2, Ty3, Ty4, Ty5 any](arr [5]any) T5[Ty1, Ty2, Ty3, Ty4, Ty5] {
return FromSlice5X[Ty1, Ty2, Ty3, Ty4, Ty5](arr[:])
}
// FromSlice5 returns a tuple from a slice of length 5.
// If the length of the slice doesn't match, or any of the values can not be converted to the generic type, an error is returned.
func FromSlice5[Ty1, Ty2, Ty3, Ty4, Ty5 any](values []any) (T5[Ty1, Ty2, Ty3, Ty4, Ty5], error) {
if len(values) != 5 {
return T5[Ty1, Ty2, Ty3, Ty4, Ty5]{}, fmt.Errorf("slice length %d must match number of tuple values 5", len(values))
}
v1, ok := values[0].(Ty1)
if !ok {
return T5[Ty1, Ty2, Ty3, Ty4, Ty5]{}, fmt.Errorf("value at slice index 0 expected to have type %s but has type %T", typeName[Ty1](), values[0])
}
v2, ok := values[1].(Ty2)
if !ok {
return T5[Ty1, Ty2, Ty3, Ty4, Ty5]{}, fmt.Errorf("value at slice index 1 expected to have type %s but has type %T", typeName[Ty2](), values[1])
}
v3, ok := values[2].(Ty3)
if !ok {
return T5[Ty1, Ty2, Ty3, Ty4, Ty5]{}, fmt.Errorf("value at slice index 2 expected to have type %s but has type %T", typeName[Ty3](), values[2])
}
v4, ok := values[3].(Ty4)
if !ok {
return T5[Ty1, Ty2, Ty3, Ty4, Ty5]{}, fmt.Errorf("value at slice index 3 expected to have type %s but has type %T", typeName[Ty4](), values[3])
}
v5, ok := values[4].(Ty5)
if !ok {
return T5[Ty1, Ty2, Ty3, Ty4, Ty5]{}, fmt.Errorf("value at slice index 4 expected to have type %s but has type %T", typeName[Ty5](), values[4])
}
return New5(v1, v2, v3, v4, v5), nil
}
// FromSlice5X returns a tuple from a slice of length 5.
// If the length of the slice doesn't match, or any of the values can not be converted to the generic type, the function panics.
func FromSlice5X[Ty1, Ty2, Ty3, Ty4, Ty5 any](values []any) T5[Ty1, Ty2, Ty3, Ty4, Ty5] {
if len(values) != 5 {
panic(fmt.Errorf("slice length %d must match number of tuple values 5", len(values)))
}
v1 := values[0].(Ty1)
v2 := values[1].(Ty2)
v3 := values[2].(Ty3)
v4 := values[3].(Ty4)
v5 := values[4].(Ty5)
return New5(v1, v2, v3, v4, v5)
}
// Equal5 returns whether the host tuple is equal to the other tuple.
// All tuple elements of the host and guest parameters must match the "comparable" built-in constraint.
// To test equality of tuples that hold custom Equalable values, use the Equal5E function.
// To test equality of tuples that hold custom Comparable values, use the Equal5C function.
// Otherwise, use Equal or reflect.DeepEqual to test tuples of any types.
func Equal5[Ty1, Ty2, Ty3, Ty4, Ty5 comparable](host, guest T5[Ty1, Ty2, Ty3, Ty4, Ty5]) bool {
return host.V1 == guest.V1 && host.V2 == guest.V2 && host.V3 == guest.V3 && host.V4 == guest.V4 && host.V5 == guest.V5
}
// Equal5E returns whether the host tuple is semantically equal to the guest tuple.
// All tuple elements of the host and guest parameters must match the Equalable constraint.
// To test equality of tuples that hold built-in "comparable" values, use the Equal5 function.
// To test equality of tuples that hold custom Comparable values, use the Equal5C function.
// Otherwise, use Equal or reflect.DeepEqual to test tuples of any types.
func Equal5E[Ty1 Equalable[Ty1], Ty2 Equalable[Ty2], Ty3 Equalable[Ty3], Ty4 Equalable[Ty4], Ty5 Equalable[Ty5]](host, guest T5[Ty1, Ty2, Ty3, Ty4, Ty5]) bool {
return host.V1.Equal(guest.V1) && host.V2.Equal(guest.V2) && host.V3.Equal(guest.V3) && host.V4.Equal(guest.V4) && host.V5.Equal(guest.V5)
}
// Equal5C returns whether the host tuple is semantically less than, equal to, or greater than the guest tuple.
// All tuple elements of the host and guest parameters must match the Comparable constraint.
// To test equality of tuples that hold built-in "comparable" values, use the Equal5 function.
// To test equality of tuples that hold custom Equalable values, use the Equal5E function.
// Otherwise, use Equal or reflect.DeepEqual to test tuples of any types.
func Equal5C[Ty1 Comparable[Ty1], Ty2 Comparable[Ty2], Ty3 Comparable[Ty3], Ty4 Comparable[Ty4], Ty5 Comparable[Ty5]](host, guest T5[Ty1, Ty2, Ty3, Ty4, Ty5]) bool {
return host.V1.CompareTo(guest.V1).EQ() && host.V2.CompareTo(guest.V2).EQ() && host.V3.CompareTo(guest.V3).EQ() && host.V4.CompareTo(guest.V4).EQ() && host.V5.CompareTo(guest.V5).EQ()
}
// Compare5 returns whether the host tuple is semantically less than, equal to, or greater than the guest tuple.
// All tuple elements of the host and guest parameters must match the "Ordered" constraint.
// To compare tuples that hold custom comparable values, use the Compare5C function.
func Compare5[Ty1, Ty2, Ty3, Ty4, Ty5 constraints.Ordered](host, guest T5[Ty1, Ty2, Ty3, Ty4, Ty5]) OrderedComparisonResult {
return multiCompare(
func() OrderedComparisonResult { return compareOrdered(host.V1, guest.V1) },
func() OrderedComparisonResult { return compareOrdered(host.V2, guest.V2) },
func() OrderedComparisonResult { return compareOrdered(host.V3, guest.V3) },
func() OrderedComparisonResult { return compareOrdered(host.V4, guest.V4) },
func() OrderedComparisonResult { return compareOrdered(host.V5, guest.V5) },
)
}
// Compare5C returns whether the host tuple is semantically less than, equal to, or greater than the guest tuple.
// All tuple elements of the host and guest parameters must match the Comparable constraint.
// To compare tuples that hold built-in "Ordered" values, use the Compare5 function.
func Compare5C[Ty1 Comparable[Ty1], Ty2 Comparable[Ty2], Ty3 Comparable[Ty3], Ty4 Comparable[Ty4], Ty5 Comparable[Ty5]](host, guest T5[Ty1, Ty2, Ty3, Ty4, Ty5]) OrderedComparisonResult {
return multiCompare(
func() OrderedComparisonResult { return host.V1.CompareTo(guest.V1) },
func() OrderedComparisonResult { return host.V2.CompareTo(guest.V2) },
func() OrderedComparisonResult { return host.V3.CompareTo(guest.V3) },
func() OrderedComparisonResult { return host.V4.CompareTo(guest.V4) },
func() OrderedComparisonResult { return host.V5.CompareTo(guest.V5) },
)
}
// LessThan5 returns whether the host tuple is semantically less than the guest tuple.
// All tuple elements of the host and guest parameters must match the "Ordered" constraint.
// To compare tuples that hold custom comparable values, use the LessThan5C function.
func LessThan5[Ty1, Ty2, Ty3, Ty4, Ty5 constraints.Ordered](host, guest T5[Ty1, Ty2, Ty3, Ty4, Ty5]) bool {
return Compare5(host, guest).LT()
}
// LessThan5C returns whether the host tuple is semantically less than the guest tuple.
// All tuple elements of the host and guest parameters must match the Comparable constraint.
// To compare tuples that hold built-in "Ordered" values, use the LessThan5 function.
func LessThan5C[Ty1 Comparable[Ty1], Ty2 Comparable[Ty2], Ty3 Comparable[Ty3], Ty4 Comparable[Ty4], Ty5 Comparable[Ty5]](host, guest T5[Ty1, Ty2, Ty3, Ty4, Ty5]) bool {
return Compare5C(host, guest).LT()
}
// LessOrEqual5 returns whether the host tuple is semantically less than or equal to the guest tuple.
// All tuple elements of the host and guest parameters must match the "Ordered" constraint.
// To compare tuples that hold custom comparable values, use the LessOrEqual5C function.
func LessOrEqual5[Ty1, Ty2, Ty3, Ty4, Ty5 constraints.Ordered](host, guest T5[Ty1, Ty2, Ty3, Ty4, Ty5]) bool {
return Compare5(host, guest).LE()
}
// LessOrEqual5C returns whether the host tuple is semantically less than or equal to the guest tuple.
// All tuple elements of the host and guest parameters must match the Comparable constraint.
// To compare tuples that hold built-in "Ordered" values, use the LessOrEqual5 function.
func LessOrEqual5C[Ty1 Comparable[Ty1], Ty2 Comparable[Ty2], Ty3 Comparable[Ty3], Ty4 Comparable[Ty4], Ty5 Comparable[Ty5]](host, guest T5[Ty1, Ty2, Ty3, Ty4, Ty5]) bool {
return Compare5C(host, guest).LE()
}
// GreaterThan5 returns whether the host tuple is semantically greater than the guest tuple.
// All tuple elements of the host and guest parameters must match the "Ordered" constraint.
// To compare tuples that hold custom comparable values, use the GreaterThan5C function.
func GreaterThan5[Ty1, Ty2, Ty3, Ty4, Ty5 constraints.Ordered](host, guest T5[Ty1, Ty2, Ty3, Ty4, Ty5]) bool {
return Compare5(host, guest).GT()
}
// GreaterThan5C returns whether the host tuple is semantically greater than the guest tuple.
// All tuple elements of the host and guest parameters must match the Comparable constraint.
// To compare tuples that hold built-in "Ordered" values, use the GreaterThan5 function.
func GreaterThan5C[Ty1 Comparable[Ty1], Ty2 Comparable[Ty2], Ty3 Comparable[Ty3], Ty4 Comparable[Ty4], Ty5 Comparable[Ty5]](host, guest T5[Ty1, Ty2, Ty3, Ty4, Ty5]) bool {
return Compare5C(host, guest).GT()
}
// GreaterOrEqual5 returns whether the host tuple is semantically greater than or equal to the guest tuple.
// All tuple elements of the host and guest parameters must match the "Ordered" constraint.
// To compare tuples that hold custom comparable values, use the GreaterOrEqual5C function.
func GreaterOrEqual5[Ty1, Ty2, Ty3, Ty4, Ty5 constraints.Ordered](host, guest T5[Ty1, Ty2, Ty3, Ty4, Ty5]) bool {
return Compare5(host, guest).GE()
}
// GreaterOrEqual5C returns whether the host tuple is semantically greater than or equal to the guest tuple.
// All tuple elements of the host and guest parameters must match the Comparable constraint.
// To compare tuples that hold built-in "Ordered" values, use the GreaterOrEqual5 function.
func GreaterOrEqual5C[Ty1 Comparable[Ty1], Ty2 Comparable[Ty2], Ty3 Comparable[Ty3], Ty4 Comparable[Ty4], Ty5 Comparable[Ty5]](host, guest T5[Ty1, Ty2, Ty3, Ty4, Ty5]) bool {
return Compare5C(host, guest).GE()
} | tuple5.go | 0.79053 | 0.460713 | tuple5.go | starcoder |
package geometry
import (
_ "text/tabwriter"
)
type Mat1x1 [1]float64
func (a Mat1x1) Add(b Mat1x1) Mat1x1 {
return Mat1x1{a[0] + b[0]}
}
func (m Mat1x1) AddScalar(f float64) Mat1x1 {
return Mat1x1{
m[0] + f,
}
}
func (m Mat1x1) Concatenate(f float64) Mat1x2 {
return Mat1x2{m[0], f}
}
func (m Mat1x1) Homogenize() Mat2x2 {
return Mat2x2{
Mat1x2{m[0], 0},
Mat1x2{0, 1},
}
}
func (m Mat1x1) Len() int {
return 1
}
func (m Mat1x1) Inverse() Mat1x1 {
return Mat1x1{m[0]}
}
func (m Mat1x1) MultiplyScalar(f float64) Mat1x1 {
return Mat1x1{m[0] * f}
}
func (a Mat1x1) MultiplyMat1x1(b Mat1x1) Mat1x1 {
return Mat1x1{a[0] * b[0]}
}
func (a Mat1x1) MultiplyMat1x2(b Mat1x2) Mat1x2 {
return Mat1x2{
a[0] * b[0],
a[0] * b[1],
}
}
func (a Mat1x1) MultiplyMat1x3(b Mat1x3) Mat1x3 {
return Mat1x3{
a[0] * b[0],
a[0] * b[1],
a[0] * b[2],
}
}
func (a Mat1x1) MultiplyMat1x4(b Mat1x4) Mat1x4 {
return Mat1x4{
a[0] * b[0],
a[0] * b[1],
a[0] * b[2],
a[0] * b[3],
}
}
func (m Mat1x1) MultiplyVec1(v Vec1) float64 {
return m[0] * v[0]
}
func (m Mat1x1) String() string {
return ""
}
func (a Mat1x1) Subtract(b Mat1x1) Mat1x1 {
return Mat1x1{a[0] - b[0]}
}
func (m Mat1x1) SubtractScalar(f float64) Mat1x1 {
return Mat1x1{
m[0] - f,
}
}
func (m Mat1x1) Trace() float64 {
return m[0]
}
func (m Mat1x1) Transpose() Mat1x1 {
return Mat1x1{m[0]}
}
type Mat1x2 [2]float64
func (a Mat1x2) Add(b Mat1x2) Mat1x2 {
return Mat1x2{
a[0] + b[0],
a[1] + b[1],
}
}
func (m Mat1x2) AddScalar(f float64) Mat1x2 {
return Mat1x2{
m[0] + f,
m[1] + f,
}
}
func (m Mat1x2) Concatenate(f float64) Mat1x3 {
return Mat1x3{m[0], m[1], f}
}
func (m Mat1x2) Homogenize() Mat1x3 {
return Mat1x3{m[0], m[1], 0}
}
func (m Mat1x2) Len() int {
return 2
}
func (m Mat1x2) Mat1x1() Mat1x1 {
return Mat1x1{m[0]}
}
func (m Mat1x2) MultiplyScalar(f float64) Mat1x2 {
return Mat1x2{m[0] * f, m[1] * f}
}
func (a Mat1x2) MultiplyMat2x1(b Mat2x1) Mat1x1 {
return Mat1x1{a[0]*b[0][0] + a[1]*b[1][0]}
}
func (a Mat1x2) MultiplyMat2x2(b Mat2x2) Mat1x2 {
return Mat1x2{
a[0]*b[0][0] + a[1]*b[1][0],
a[0]*b[0][1] + a[1]*b[1][1],
}
}
func (a Mat1x2) MultiplyMat2x3(b Mat2x3) Mat1x3 {
return Mat1x3{
a[0]*b[0][0] + a[1]*b[1][0],
a[0]*b[0][1] + a[1]*b[1][1],
a[0]*b[0][2] + a[1]*b[1][2],
}
}
func (a Mat1x2) MultiplyMat2x4(b Mat2x4) Mat1x4 {
return Mat1x4{
a[0]*b[0][0] + a[1]*b[1][0],
a[0]*b[0][1] + a[1]*b[1][1],
a[0]*b[0][2] + a[1]*b[1][2],
a[0]*b[0][3] + a[1]*b[1][3],
}
}
func (m Mat1x2) MultiplyVec2(v Vec2) float64 {
return m[0]*v[0] + m[1]*v[1]
}
func (a Mat1x2) Subtract(b Mat1x2) Mat1x2 {
return Mat1x2{
a[0] - b[0],
a[1] - b[1],
}
}
func (m Mat1x2) SubtractScalar(f float64) Mat1x2 {
return Mat1x2{
m[0] - f,
m[1] - f,
}
}
func (m Mat1x2) Transpose() Mat2x1 {
return Mat2x1{
Mat1x1{m[0]},
Mat1x1{m[1]},
}
}
type Mat1x3 [3]float64
func (a Mat1x3) Add(b Mat1x3) Mat1x3 {
return Mat1x3{
a[0] + b[0],
a[1] + b[1],
a[2] + b[2],
}
}
func (m Mat1x3) AddScalar(f float64) Mat1x3 {
return Mat1x3{
m[0] + f,
m[1] + f,
m[2] + f,
}
}
func (m Mat1x3) Concatenate(f float64) Mat1x4 {
return Mat1x4{m[0], m[1], m[2], f}
}
func (m Mat1x3) Homogenize() Mat1x4 {
return Mat1x4{m[0], m[1], m[2], 0}
}
func (m Mat1x3) Len() int {
return 3
}
func (m Mat1x3) Mat1x1() Mat1x1 {
return Mat1x1{m[0]}
}
func (m Mat1x3) Mat1x2() Mat1x2 {
return Mat1x2{m[0], m[1]}
}
func (m Mat1x3) MultiplyScalar(f float64) Mat1x3 {
return Mat1x3{m[0] * f, m[1] * f, m[2] * f}
}
func (a Mat1x3) MultiplyMat3x1(b Mat3x1) Mat1x1 {
return Mat1x1{a[0]*b[0][0] + a[1]*b[1][0] + a[2]*b[2][0]}
}
func (a Mat1x3) MultiplyMat3x2(b Mat3x2) Mat1x2 {
return Mat1x2{
a[0]*b[0][0] + a[1]*b[1][0] + a[2]*b[2][0],
a[0]*b[0][1] + a[1]*b[1][1] + a[2]*b[2][1],
}
}
func (a Mat1x3) MultiplyMat3x3(b Mat3x3) Mat1x3 {
return Mat1x3{
a[0]*b[0][0] + a[1]*b[1][0] + a[2]*b[2][0],
a[0]*b[0][1] + a[1]*b[1][1] + a[2]*b[2][1],
a[0]*b[0][2] + a[1]*b[1][2] + a[2]*b[2][2],
}
}
func (a Mat1x3) MultiplyMat3x4(b Mat3x4) Mat1x4 {
return Mat1x4{
a[0]*b[0][0] + a[1]*b[1][0] + a[2]*b[2][0],
a[0]*b[0][1] + a[1]*b[1][1] + a[2]*b[2][1],
a[0]*b[0][2] + a[1]*b[1][2] + a[2]*b[2][2],
a[0]*b[0][3] + a[1]*b[1][3] + a[2]*b[2][3],
}
}
func (m Mat1x3) MultiplyVec3(v Vec3) float64 {
return m[0]*v[0] + m[1]*v[1] + m[2]*v[2]
}
func (a Mat1x3) Subtract(b Mat1x3) Mat1x3 {
return Mat1x3{
a[0] - b[0],
a[1] - b[1],
a[2] - b[2],
}
}
func (m Mat1x3) SubtractScalar(f float64) Mat1x3 {
return Mat1x3{
m[0] - f,
m[1] - f,
m[2] - f,
}
}
func (m Mat1x3) Transpose() Mat3x1 {
return Mat3x1{
Mat1x1{m[0]},
Mat1x1{m[1]},
Mat1x1{m[2]},
}
}
type Mat1x4 [4]float64
func (a Mat1x4) Add(b Mat1x4) Mat1x4 {
return Mat1x4{
a[0] + b[0],
a[1] + b[1],
a[2] + b[2],
a[3] + b[3],
}
}
func (m Mat1x4) AddScalar(f float64) Mat1x4 {
return Mat1x4{
m[0] + f,
m[1] + f,
m[2] + f,
m[3] + f,
}
}
func (m Mat1x4) Concatenate(f float64) Mat1xN {
return Mat1xN{m[0], m[1], m[2], m[3], f}
}
func (m Mat1x4) Homogenize() Mat1xN {
return Mat1xN{m[0], m[1], m[2], m[3], 0}
}
func (m Mat1x4) Len() int {
return 4
}
func (m Mat1x4) Mat1x1() Mat1x1 {
return Mat1x1{m[0]}
}
func (m Mat1x4) Mat1x2() Mat1x2 {
return Mat1x2{m[0], m[1]}
}
func (m Mat1x4) Mat1x3() Mat1x3 {
return Mat1x3{m[0], m[1], m[2]}
}
func (m Mat1x4) MultiplyScalar(f float64) Mat1x4 {
return Mat1x4{m[0] * f, m[1] * f, m[2] * f, m[3] * f}
}
func (a Mat1x4) MultiplyMat4x1(b Mat4x1) Mat1x1 {
return Mat1x1{a[0]*b[0][0] + a[1]*b[1][0] + a[2]*b[2][0] + a[3]*b[3][0]}
}
func (a Mat1x4) MultiplyMat4x2(b Mat4x2) Mat1x2 {
return Mat1x2{
a[0]*b[0][0] + a[1]*b[1][0] + a[2]*b[2][0] + a[3]*b[3][0],
a[0]*b[0][1] + a[1]*b[1][1] + a[2]*b[2][1] + a[3]*b[3][1],
}
}
func (a Mat1x4) MultiplyMat4x3(b Mat4x3) Mat1x3 {
return Mat1x3{
a[0]*b[0][0] + a[1]*b[1][0] + a[2]*b[2][0] + a[3]*b[3][0],
a[0]*b[0][1] + a[1]*b[1][1] + a[2]*b[2][1] + a[3]*b[3][1],
a[0]*b[0][2] + a[1]*b[1][2] + a[2]*b[2][2] + a[3]*b[3][2],
}
}
func (a Mat1x4) MultiplyMat4x4(b Mat4x4) Mat1x4 {
return Mat1x4{
a[0]*b[0][0] + a[1]*b[1][0] + a[2]*b[2][0] + a[3]*b[3][0],
a[0]*b[0][1] + a[1]*b[1][1] + a[2]*b[2][1] + a[3]*b[3][1],
a[0]*b[0][2] + a[1]*b[1][2] + a[2]*b[2][2] + a[3]*b[3][2],
a[0]*b[0][3] + a[1]*b[1][3] + a[2]*b[2][3] + a[3]*b[3][3],
}
}
func (m Mat1x4) MultiplyVec4(v Vec4) float64 {
return m[0]*v[0] + m[1]*v[1] + m[2]*v[2] + m[3]*v[3]
}
func (a Mat1x4) Subtract(b Mat1x4) Mat1x4 {
return Mat1x4{
a[0] - b[0],
a[1] - b[1],
a[2] - b[2],
a[3] - b[3],
}
}
func (m Mat1x4) SubtractScalar(f float64) Mat1x4 {
return Mat1x4{
m[0] - f,
m[1] - f,
m[2] - f,
m[3] - f,
}
}
func (m Mat1x4) Transpose() Mat4x1 {
return Mat4x1{
Mat1x1{m[0]},
Mat1x1{m[1]},
Mat1x1{m[2]},
Mat1x1{m[3]},
}
}
type Mat1xN []float64
func (m Mat1xN) Concatenate(f float64) Mat1xN {
return append(m, f)
} | mat1.go | 0.745028 | 0.712882 | mat1.go | starcoder |
package geometry
import "math"
type Direction3D struct {
direction Vector3D
}
func NewDirection(x float32, y float32, z float32) Direction3D {
return NewDirection_FromVector(NewVector(x, y, z))
}
func NewDirection_BetweenPoints(from Point3D, to Point3D) Direction3D {
return NewDirection_FromVector(NewVector_BetweenPoints(from, to))
}
func NewDirection_FromPoint(point Point3D) Direction3D {
return NewDirection_FromVector(NewVector_FromPoint(point))
}
func NewDirection_FromVector(vector Vector3D) Direction3D {
var magnitude = vector.Magnitude()
if magnitude > 0.0 {
return Direction3D{direction: vector.Scale(1.0 / magnitude)}
} else {
return Direction3D{direction: vector}
}
}
func newDirection_FromNormalizedVector(normalizedVector Vector3D) Direction3D {
return Direction3D{direction: normalizedVector}
}
func UnitX() Direction3D {
return Direction3D{direction: Vector3D{X: 1.0, Y: 0.0, Z: 0.0}}
}
func UnitY() Direction3D {
return Direction3D{direction: Vector3D{X: 0.0, Y: 1.0, Z: 0.0}}
}
func UnitZ() Direction3D {
return Direction3D{direction: Vector3D{X: 0.0, Y: 0.0, Z: 1.0}}
}
func (direction Direction3D) ToVector() Vector3D {
return direction.direction
}
func (direction Direction3D) X() float32 {
return direction.direction.X
}
func (direction Direction3D) Y() float32 {
return direction.direction.Y
}
func (direction Direction3D) Z() float32 {
return direction.direction.Z
}
func (direction Direction3D) ToOrthonormalBasis() Matrix3D {
if math.Abs(float64(direction.direction.X)) >= math.Abs(float64(direction.direction.Y)) && math.Abs(float64(direction.direction.X)) >= math.Abs(float64(direction.direction.Z)) {
var dirX = direction.direction
var invXYMagnitude = float32(1.0 / math.Sqrt(float64(dirX.X*dirX.X+dirX.Y*dirX.Y)))
var dirY = NewVector(
-dirX.Y*invXYMagnitude,
dirX.X*invXYMagnitude,
0.0)
var dirZ = NewVector(
-dirX.Z*dirY.Y,
dirX.Z*dirY.X,
dirX.X*dirY.Y-dirX.Y*dirY.X)
return NewMatrix(dirX, dirY, dirZ)
} else if math.Abs(float64(direction.direction.Y)) >= math.Abs(float64(direction.direction.Z)) {
var dirY = direction.direction
var invYZMagnitude = float32(1.0 / math.Sqrt(float64(dirY.Y*dirY.Y+dirY.Z*dirY.Z)))
var dirZ = NewVector(
0.0,
-dirY.Z*invYZMagnitude,
dirY.Y*invYZMagnitude)
var dirX = NewVector(
dirY.Y*dirZ.Z-dirY.Z*dirZ.Y,
-dirY.X*dirZ.Z,
dirY.X*dirZ.Y)
return NewMatrix(dirX, dirY, dirZ)
} else {
var dirZ = direction.direction
var invZXMagnitude = float32(1.0 / math.Sqrt(float64(dirZ.Z*dirZ.Z+dirZ.X*dirZ.X)))
var dirX = NewVector(
dirZ.Z*invZXMagnitude,
0.0,
-dirZ.X*invZXMagnitude)
var dirY = NewVector(
dirZ.Y*dirX.Z,
dirZ.Z*dirX.X-dirZ.X*dirX.Z,
-dirZ.Y*dirX.X)
return NewMatrix(dirX, dirY, dirZ)
}
}
func (direction Direction3D) Equals(other Direction3D) bool {
return direction.direction.Equals(other.direction)
}
func (direction Direction3D) EqualsTol(other Direction3D, tolerance float32) bool {
return direction.direction.EqualsTol(other.direction, tolerance)
}
func (direction Direction3D) Neg() Direction3D {
return newDirection_FromNormalizedVector(direction.direction.Neg())
} | geometry/Direction3D.go | 0.872931 | 0.873647 | Direction3D.go | starcoder |
package signdigest
import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
)
// SignDigestRequestBody provides operations to call the signDigest method.
type SignDigestRequestBody struct {
// Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
additionalData map[string]interface{}
// The digest property
digest []byte
}
// NewSignDigestRequestBody instantiates a new signDigestRequestBody and sets the default values.
func NewSignDigestRequestBody()(*SignDigestRequestBody) {
m := &SignDigestRequestBody{
}
m.SetAdditionalData(make(map[string]interface{}));
return m
}
// CreateSignDigestRequestBodyFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateSignDigestRequestBodyFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewSignDigestRequestBody(), nil
}
// GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *SignDigestRequestBody) GetAdditionalData()(map[string]interface{}) {
if m == nil {
return nil
} else {
return m.additionalData
}
}
// GetDigest gets the digest property value. The digest property
func (m *SignDigestRequestBody) GetDigest()([]byte) {
if m == nil {
return nil
} else {
return m.digest
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *SignDigestRequestBody) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) {
res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error))
res["digest"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetByteArrayValue()
if err != nil {
return err
}
if val != nil {
m.SetDigest(val)
}
return nil
}
return res
}
// Serialize serializes information the current object
func (m *SignDigestRequestBody) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {
{
err := writer.WriteByteArrayValue("digest", m.GetDigest())
if err != nil {
return err
}
}
{
err := writer.WriteAdditionalData(m.GetAdditionalData())
if err != nil {
return err
}
}
return nil
}
// SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *SignDigestRequestBody) SetAdditionalData(value map[string]interface{})() {
if m != nil {
m.additionalData = value
}
}
// SetDigest sets the digest property value. The digest property
func (m *SignDigestRequestBody) SetDigest(value []byte)() {
if m != nil {
m.digest = value
}
} | informationprotection/signdigest/sign_digest_request_body.go | 0.774839 | 0.438244 | sign_digest_request_body.go | starcoder |
package protos
import (
"fmt"
"github.com/golang/protobuf/proto"
"bytes"
"errors"
"sort"
"reflect"
)
// IsValidBlockExtension checks whether the other txSetStateValue is a valid extension of this txSetStateValue blockwise
// meaning it only adds new blocks or nothing, and the txNumber is consistent with the total number of transactions
// declared
func (txSetStateValue *TxSetStateValue) IsValidBlockExtension(other *TxSetStateValue) error {
if txSetStateValue.TxNumber > other.TxNumber {
return fmt.Errorf("The next state for this transactions set contains less transactions. "+
"Number of transactions info at current state: %d; other state: %d", txSetStateValue.TxNumber, other.TxNumber)
}
if nextTxInx := other.IndexAtBlock[len(other.IndexAtBlock) - 1].InBlockIndex; nextTxInx != other.TxNumber-1 {
return fmt.Errorf("The index of the new set is not correct. Expected: [%d], Actual: [%d]", other.TxNumber-1, nextTxInx)
}
if nextBlock := other.IndexAtBlock[len(other.IndexAtBlock) - 1].BlockNr; nextBlock != other.LastModifiedAtBlock {
return fmt.Errorf("The block of the new set is not correct. Expected: [%d], Actual: [%d]", other.LastModifiedAtBlock, nextBlock)
}
for i, indexInfo := range txSetStateValue.IndexAtBlock {
if indexInfo.BlockNr != other.IndexAtBlock[i].BlockNr || indexInfo.InBlockIndex != other.IndexAtBlock[i].InBlockIndex {
return fmt.Errorf("The next state for this transactions set contains conflicting index information at " +
"IndexAtBlock[%d]. Previous: Block[%d], StartInx[%d], next: Block[%d], StartInx[%d].", i, indexInfo.BlockNr, indexInfo.InBlockIndex, other.IndexAtBlock[i].BlockNr, other.IndexAtBlock[i].InBlockIndex)
}
}
if txSetStateValue.IntroBlock != 0 && other.Index != txSetStateValue.Index {
return errors.New("It is not possible to modify the index in a set extension.")
}
return nil
}
func (txSetStateValue *TxSetStateValue) IsValidMutation(other *TxSetStateValue) error {
if txSetStateValue.LastModifiedAtBlock >= other.LastModifiedAtBlock {
return fmt.Errorf("It is not allow to modify a transaction before the last time it was modified. Block last time modified: [%d], Current modifying block: [%d]", txSetStateValue.LastModifiedAtBlock, other.LastModifiedAtBlock)
}
if txSetStateValue.TxNumber != other.TxNumber {
return errors.New("A mutant transaction cannot extend a set.")
}
if txSetStateValue.Index == other.Index {
return errors.New("Mutating, but the active index did not change.")
}
if other.Index >= other.TxNumber {
return fmt.Errorf("Provided an out of bound new index for the transaction. Num transactions: [%d], provided new index: [%d]", other.TxNumber, other.Index)
}
if !reflect.DeepEqual(txSetStateValue.IndexAtBlock, other.IndexAtBlock) {
return errors.New("A mutant transaction cannot extend a set.")
}
return nil
}
func (txSetStateValue *TxSetStateValue) PositionForIndex(inx uint64) (int, error) {
i := sort.Search(len(txSetStateValue.IndexAtBlock), func(i int) bool { return inx <= txSetStateValue.IndexAtBlock[i].InBlockIndex})
if i < len(txSetStateValue.IndexAtBlock) {
return i, nil
} else {
return i, fmt.Errorf("Block for index [%d] not found.", inx)
}
}
// Bytes returns this block as an array of bytes.
func (txStateValue *TxSetStateValue) Bytes() ([]byte, error) {
data, err := proto.Marshal(txStateValue)
if err != nil {
return nil, fmt.Errorf("Could not marshal txSetStateValue: %s", err)
}
return data, nil
}
func (txSetStVal *TxSetStateValue) ToString() string {
var buffer bytes.Buffer
buffer.WriteString(fmt.Sprintln("Nonce:", txSetStVal.Nonce))
buffer.WriteString(fmt.Sprintln("Introduced at block number:", txSetStVal.IntroBlock))
buffer.WriteString(fmt.Sprintln("Last modified at block number:", txSetStVal.LastModifiedAtBlock))
buffer.WriteString(fmt.Sprintln("Active transaction index:", txSetStVal.Index))
buffer.WriteString(fmt.Sprintln("Number of transactions in the set:", txSetStVal.TxNumber))
buffer.WriteString(fmt.Sprintln("Number of transactions belonging to this set at a given block:"))
buffer.WriteString(fmt.Sprintln("Block\t\t\tLast Index"))
for _, inx := range txSetStVal.IndexAtBlock {
buffer.WriteString(fmt.Sprint(inx.BlockNr, "\t\t\t", inx.InBlockIndex, "\n"))
}
return buffer.String()
}
// UnmarshalTxSetStateValue converts a byte array generated by Bytes() back to a block.
func UnmarshalTxSetStateValue(marshalledState []byte) (*TxSetStateValue, error) {
stateValue := &TxSetStateValue{}
err := proto.Unmarshal(marshalledState, stateValue)
if err != nil {
return nil, fmt.Errorf("Could not unmarshal txSetStateValue: %s", err)
}
return stateValue, nil
} | protos/txsetstatevalue.go | 0.638046 | 0.420064 | txsetstatevalue.go | starcoder |
package texttable
import (
"fmt"
"math"
)
// BoundingBox is a rectangular bounding box
type BoundingBox struct {
XMin float64 `json:"xMin" xml:"xMin,attr"`
YMin float64 `json:"yMin" xml:"yMin,attr"`
XMax float64 `json:"xMax" xml:"xMax,attr"`
YMax float64 `json:"yMax" xml:"yMax,attr"`
}
// Size returns the width and height of the BoundingBox
func (bb BoundingBox) Size() (width, height float64) {
return bb.XMax - bb.XMin, bb.YMax - bb.YMin
}
// SizeIsZero returns if the area of the box is zero
func (bb BoundingBox) SizeIsZero() bool {
return bb.XMax == bb.XMin && bb.YMax == bb.YMin
}
// IsZero returns if all values are zero
func (bb BoundingBox) IsZero() bool {
return bb.XMin == 0 && bb.YMin == 0 && bb.XMax == 0 && bb.YMax == 0
}
// Width returns XMax - XMin
func (bb BoundingBox) Width() float64 {
return bb.XMax - bb.XMin
}
// Height returns YMax - YMin
func (bb BoundingBox) Height() float64 {
return bb.YMax - bb.YMin
}
// Center returns the x/y coordinates of the box center
func (bb BoundingBox) Center() (x, y float64) {
return bb.XMin + bb.Width()/2, bb.YMin + bb.Height()/2
}
// Contains returns if the point x/y is contained within the box
func (bb BoundingBox) Contains(x, y float64) bool {
return x >= bb.XMin && x <= bb.XMax && y >= bb.YMin && y <= bb.YMax
}
// Include modifies bb to include other.
// If the size of bb is zero, then all values from other will be assigned.
func (bb *BoundingBox) Include(other BoundingBox) {
if bb.SizeIsZero() {
*bb = other
return
}
bb.XMin = math.Min(bb.XMin, other.XMin)
bb.YMin = math.Min(bb.YMin, other.YMin)
bb.XMax = math.Max(bb.XMax, other.XMax)
bb.YMax = math.Max(bb.YMax, other.YMax)
}
func (bb BoundingBox) Validate() error {
if isInvalidFloat(bb.XMin) {
return fmt.Errorf("invalid XMin in %s", bb)
}
if isInvalidFloat(bb.XMax) {
return fmt.Errorf("invalid XMax in %s", bb)
}
if isInvalidFloat(bb.YMin) {
return fmt.Errorf("invalid YMin in %s", bb)
}
if isInvalidFloat(bb.YMax) {
return fmt.Errorf("invalid YMax in %s", bb)
}
if bb.Width() < 0 {
return fmt.Errorf("negative width of %s", bb)
}
if bb.Height() < 0 {
return fmt.Errorf("negative heght of %s", bb)
}
return nil
}
func (bb BoundingBox) String() string {
return fmt.Sprintf("BoundingBox((%f, %f), (%f, %f))", bb.XMin, bb.YMin, bb.XMax, bb.YMax)
}
func isInvalidFloat(f float64) bool {
return math.IsNaN(f) || math.IsInf(f, 0)
} | texttable/boundingbox.go | 0.911805 | 0.501587 | boundingbox.go | starcoder |
package unrolledlinkedlist
import (
"errors"
List "github.com/zimmski/container/list"
)
// node holds a single node with values of a unrolled linked list
type node struct {
next *node // The node after this node in the list
previous *node // The node before this node in the list
values []interface{} // The values stored with this node
}
// iterator holds the iterator for a doubly linked list
type iterator struct {
current *node // The current node in traversal
i int // The current index of the current node
}
// Next iterates to the next element in the list and returns the iterator, or nil if there is no next element
func (iter *iterator) Next() List.Iterator {
iter.i++
if iter.current != nil && iter.i >= len(iter.current.values) {
iter.i = 0
iter.current = iter.current.next
}
if iter.current == nil {
return nil
}
return iter
}
// Previous iterates to the previous element in the list and returns the iterator, or nil if there is no previous element
func (iter *iterator) Previous() List.Iterator {
iter.i--
if iter.current != nil && iter.i < 0 {
iter.current = iter.current.previous
if iter.current != nil {
iter.i = len(iter.current.values) - 1
}
}
if iter.current == nil {
return nil
}
return iter
}
// Get returns the value of the iterator's current element
func (iter *iterator) Get() interface{} {
return iter.current.values[iter.i]
}
// Set sets the value of the iterator's current element
func (iter *iterator) Set(v interface{}) {
iter.current.values[iter.i] = v
}
// list holds a unrolled linked list
type list struct {
first *node // The first node of the list
last *node // The last node of the list
maxElements int // Maximum of elements per node
len int // The current list length
}
// New returns a new unrolled linked list
// @param maxElements defines how many elements should fit in a node
func New(maxElements int) *list {
if maxElements < 1 {
panic("maxElements must be at least 1")
}
l := new(list)
l.Clear()
l.maxElements = maxElements
return l
}
// Clear resets the list to zero elements and resets the list's meta data
func (l *list) Clear() {
i := l.first
for i != nil {
j := i.next
i.next = nil
i.previous = nil
i.values = nil
i = j
}
l.first = nil
l.last = nil
l.len = 0
}
// Len returns the current list length
func (l *list) Len() int {
return l.len
}
// Empty returns true if the current list length is zero
func (l *list) Empty() bool {
return l.len == 0
}
// insertElement inserts the given value at index ic in the given node
func (l *list) insertElement(v interface{}, c *node, ic int) {
if c == nil || ic == 0 || len(c.values) == 0 { // begin of node
n := l.insertNode(c, false)
n.values = append(n.values, v)
} else if len(c.values) == ic { // end of node
n := c
if len(n.values) == cap(n.values) {
n = l.insertNode(c, true)
// move half of the old node if possible
if l.maxElements > 3 {
ic = (len(c.values) + 1) / 2
n.values = append(n.values, c.values[ic:len(c.values)]...)
c.values = c.values[:ic]
}
}
n.values = append(n.values, v)
} else { // "middle" of the node
n := l.insertNode(c, true)
n.values = append(n.values, c.values[ic:len(c.values)]...)
c.values[ic] = v
c.values = c.values[:ic+1]
}
l.len++
}
// removeElement removes the value at index ic in the given node
func (l *list) removeElement(c *node, ic int) interface{} {
v := c.values[ic]
for ; ic < len(c.values)-1; ic++ {
c.values[ic] = c.values[ic+1]
}
c.values = c.values[:len(c.values)-1]
l.len--
if len(c.values) == 0 {
l.removeNode(c)
} else if n := c.next; l.maxElements > 3 && n != nil && len(c.values) < l.maxElements/2 {
if len(n.values)-2 < l.maxElements/2 { // copy the next node into the current node
for _, v := range n.values {
c.values = append(c.values, v)
}
l.removeNode(n)
} else { // copy 2 elements of the next node to the current node
c.values = append(c.values, n.values[0], n.values[1])
for ic = 2; ic < len(n.values); ic++ {
n.values[ic-2] = n.values[ic]
}
n.values = n.values[:len(n.values)-2]
}
}
return v
}
// newNode returns a new node for the list
func (l *list) newNode() *node {
return &node{
values: make([]interface{}, 0, l.maxElements),
}
}
// getNode returns the node with the given value index and the elements index, or nil and -1 if there is no such element
func (l *list) getNode(i int) (*node, int) {
for c := l.first; c != nil; c = c.next {
if i < len(c.values) {
return c, i
}
i -= len(c.values)
}
return nil, -1
}
// insertNode creates a new node from a value, inserts it after/before a given node and returns the new one
func (l *list) insertNode(p *node, after bool) *node {
n := l.newNode()
if l.len == 0 {
l.first = n
l.last = n
} else if after {
n.next = p.next
if p.next != nil {
p.next.previous = n
}
p.next = n
n.previous = p
if p == l.last {
l.last = n
}
} else {
if p == l.first {
l.first = n
} else {
if p.previous != nil {
p.previous.next = n
n.previous = p.previous
}
}
n.next = p
p.previous = n
}
return n
}
// remove removes a given node from the list
func (l *list) removeNode(c *node) *node {
if c == l.first {
l.first = c.next
if c.next != nil {
c.next.previous = nil
}
// c is the last node
if c == l.last {
l.last = nil
}
} else {
if c.previous != nil {
c.previous.next = c.next
if c.next != nil {
c.next.previous = c.previous
} else if c == l.last {
l.last = c.previous
}
}
}
c.next = nil
c.previous = nil
c.values = nil
return c
}
// newIterator returns a new iterator
func (l *list) newIterator(current *node, i int) *iterator {
return &iterator{
i: i,
current: current,
}
}
// Chan returns a channel which iterates from the front to the back of the list
func (l *list) Chan(n int) <-chan interface{} {
ch := make(chan interface{})
go func() {
for iter := l.Iter(); iter != nil; iter = iter.Next() {
ch <- iter.Get()
}
close(ch)
}()
return ch
}
// ChanBack returns a channel which iterates from the back to the front of the list
func (l *list) ChanBack(n int) <-chan interface{} {
ch := make(chan interface{})
go func() {
for iter := l.IterBack(); iter != nil; iter = iter.Previous() {
ch <- iter.Get()
}
close(ch)
}()
return ch
}
// Iter returns an iterator which starts at the front of the list, or nil if there are no elements in the list
func (l *list) Iter() List.Iterator {
if l.len == 0 {
return nil
}
return l.newIterator(l.first, 0)
}
// IterBack returns an iterator which starts at the back of the list, or nil if there are no elements in the list
func (l *list) IterBack() List.Iterator {
if l.len == 0 {
return nil
}
return l.newIterator(l.last, len(l.last.values)-1)
}
// First returns the first value of the list and true, or false if there is no value
func (l *list) First() (interface{}, bool) {
if l.len == 0 {
return nil, false
}
return l.first.values[0], true
}
// Last returns the last value of the list and true, or false if there is no value
func (l *list) Last() (interface{}, bool) {
if l.len == 0 {
return nil, false
}
return l.last.values[len(l.last.values)-1], true
}
// Get returns the value of the given index and nil, or an out of bound error if the index is incorrect
func (l *list) Get(i int) (interface{}, error) {
if i > -1 && i < l.len {
for c := l.first; c != nil; c = c.next {
if i < len(c.values) {
return c.values[i], nil
}
i -= len(c.values)
}
}
return nil, errors.New("index bounds out of range")
}
// GetFunc returns the value of the first element selected by the given function and true, or false if there is no such element
func (l *list) GetFunc(m func(v interface{}) bool) (interface{}, bool) {
for iter := l.Iter(); iter != nil; iter = iter.Next() {
if m(iter.Get()) {
return iter.Get(), true
}
}
return nil, false
}
// Set sets the value of the given index and returns nil, or an out of bound error if the index is incorrect
func (l *list) Set(i int, v interface{}) error {
if i > -1 && i < l.len {
for c := l.first; c != nil; c = c.next {
if i < len(c.values) {
c.values[i] = v
return nil
}
i -= len(c.values)
}
}
return errors.New("index bounds out of range")
}
// SetFunc sets the value of the first element selected by the given function and returns true, or false if there is no such element
func (l *list) SetFunc(m func(v interface{}) bool, v interface{}) bool {
for iter := l.Iter(); iter != nil; iter = iter.Next() {
if m(iter.Get()) {
iter.Set(v)
return true
}
}
return false
}
// Swap swaps the value of index i with the value of index j
func (l *list) Swap(i, j int) {
ni, ici := l.getNode(i)
nj, icj := l.getNode(j)
if ni != nil && nj != nil {
ni.values[ici], nj.values[icj] = nj.values[icj], ni.values[ici]
}
}
// Contains returns true if the value exists in the list, or false if it does not
func (l *list) Contains(v interface{}) bool {
_, ok := l.IndexOf(v)
return ok
}
// IndexOf returns the first index of the given value and true, or false if it does not exists
func (l *list) IndexOf(v interface{}) (int, bool) {
i := 0
for n := l.first; n != nil; n = n.next {
for _, c := range n.values {
if c == v {
return i, true
}
i++
}
}
return -1, false
}
// LastIndexOf returns the last index of the given value and true, or false if it does not exists
func (l *list) LastIndexOf(v interface{}) (int, bool) {
i := l.len - 1
for n := l.last; n != nil; n = n.previous {
for j := len(n.values) - 1; j > -1; j-- {
if n.values[j] == v {
return i, true
}
i--
}
}
return -1, false
}
// Copy returns an exact copy of the list
func (l *list) Copy() List.List {
n := New(l.maxElements)
for iter := l.Iter(); iter != nil; iter = iter.Next() {
n.Push(iter.Get())
}
return n
}
// Slice returns a copy of the list as slice
func (l *list) Slice() []interface{} {
a := make([]interface{}, l.len)
j := 0
for iter := l.Iter(); iter != nil; iter = iter.Next() {
a[j] = iter.Get()
j++
}
return a
}
// Insert inserts a value into the list and returns nil, or an out of bound error if the index is incorrect
func (l *list) Insert(i int, v interface{}) error {
if i < 0 || i > l.len {
return errors.New("index bounds out of range")
}
if i != l.len {
c, ic := l.getNode(i)
l.insertElement(v, c, ic)
} else { // getNode returns nil for lastIndex + 1
l.Push(v)
}
return nil
}
// Remove removes and returns the value with the given index and nil, or an out of bound error if the index is incorrect
func (l *list) Remove(i int) (interface{}, error) {
if i < 0 || i >= l.len {
return nil, errors.New("index bounds out of range")
}
return l.removeElement(l.getNode(i)), nil
}
// RemoveFirstOccurrence removes the first occurrence of the given value in the list and returns true, or false if there is no such element
func (l *list) RemoveFirstOccurrence(v interface{}) bool {
for n := l.first; n != nil; n = n.next {
for ic, c := range n.values {
if c == v {
l.removeElement(n, ic)
return true
}
}
}
return false
}
// RemoveLastOccurrence removes the last occurrence of the given value in the list and returns true, or false if there is no such element
func (l *list) RemoveLastOccurrence(v interface{}) bool {
for n := l.last; n != nil; n = n.previous {
for ic := len(n.values) - 1; ic > -1; ic-- {
if n.values[ic] == v {
l.removeElement(n, ic)
return true
}
}
}
return false
}
// Pop removes and returns the last element and true, or false if there is no such element
func (l *list) Pop() (interface{}, bool) {
r, _ := l.Remove(l.len - 1)
return r, r != nil
}
// Push inserts the given value at the end of the list
func (l *list) Push(v interface{}) {
if l.last == nil {
l.insertElement(v, nil, 0)
} else {
l.insertElement(v, l.last, len(l.last.values))
}
}
// PushList pushes the given list
func (l *list) PushList(l2 List.List) {
for iter := l2.Iter(); iter != nil; iter = iter.Next() {
l.Push(iter.Get())
}
}
// Shift removes and returns the first element and true, or false if there is no such element
func (l *list) Shift() (interface{}, bool) {
r, _ := l.Remove(0)
return r, r != nil
}
// Unshift inserts the given value at the beginning of the list
func (l *list) Unshift(v interface{}) {
l.insertElement(v, l.first, 0)
}
// UnshiftList unshifts the given list
func (l *list) UnshiftList(l2 List.List) {
for iter := l2.Iter(); iter != nil; iter = iter.Next() {
l.Unshift(iter.Get())
}
}
// MoveAfter moves the element at index i after the element at index m and returns nil, or an out of bound error if an index is incorrect
func (l *list) MoveAfter(i, m int) error {
if i < 0 || i >= l.len {
return errors.New("i bounds out of range")
} else if m < 0 || m >= l.len {
return errors.New("m bounds out of range")
}
if i == m || i-1 == m {
return nil
}
v, _ := l.Remove(i)
if i < m {
m--
}
l.Insert(m+1, v)
return nil
}
// MoveToBack moves the element at index i to the back of the list and returns nil, or an out of bound error if the index is incorrect
func (l *list) MoveToBack(i int) error {
return l.MoveAfter(i, l.len-1)
}
// MoveBefore moves the element at index i before the element at index m and returns nil, or an out of bound error if an index is incorrect
func (l *list) MoveBefore(i, m int) error {
if i < 0 || i >= l.len {
return errors.New("i bounds out of range")
} else if m < 0 || m >= l.len {
return errors.New("m bounds out of range")
}
if i == m || i == m-1 {
return nil
}
v, _ := l.Remove(i)
if i < m {
m--
}
l.Insert(m, v)
return nil
}
// MoveToFront moves the element at index i to the front of the list and returns nil, or an out of bound error if the index is incorrect
func (l *list) MoveToFront(i int) error {
return l.MoveBefore(i, 0)
} | list/unrolledlinkedlist/unrolledlinkedlist.go | 0.664431 | 0.543348 | unrolledlinkedlist.go | starcoder |
package kuddle
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"os"
"github.com/ghodss/yaml"
kyaml "k8s.io/apimachinery/pkg/util/yaml"
)
/*
Using the full k8s API to deserialize their typed, versioned objects is...
somewhat of an interesting errand, and one this code doesn't pursue.
One of the nearest examples I see to a clear entrypoint to the
factory/registry/codec/schema shenanigans is this snippet from the k8s main repo:
```
switch obj.(type) {
case runtime.Unstructured, *runtime.Unknown:
actualObj, err = runtime.Decode(
api.Codecs.UniversalDecoder(),
[]byte(/.../)))
default:
actualObj = obj
err = nil
}
```
However, the overall impression of this is:
- very complicated
- still results in a huge swath of case-switches for the real types
- using it is likely to result in a kind of tight coupling which will be
unpleasant for our own maintainability;
- and does not cause our code to Do The Right Thing for types it doesn't
recognize, which doesn't play well with extensions, nor ease upgrade cycles.
Instead, we proceed from some observations:
- PodSpecs are the single most solid and consistent part of the k8s API over time;
- PodSpecs are often embedded in other API types;
- but the way *we* are interested in PodSpecs never changes,
no matter what they're embedded in.
As a result, it makes sense for us to
deserialize objects nearly schema-free,
detect PodSpecs by their (quite clear) structure,
alter our fields of interest patch-wise (leaving the rest untouched),
and emit the result without further processing.
*/
func Interpolate(k8sDocuments []byte, getFrm FormulaLoader) (result []byte, err error) {
decoder := kyaml.NewYAMLOrJSONDecoder(bytes.NewBuffer(k8sDocuments), 2<<6)
resultBuf := bytes.Buffer{}
for err == nil {
var slot interface{}
err = decoder.Decode(&slot)
if err == io.EOF {
return resultBuf.Bytes(), nil
} else if err != nil {
return resultBuf.Bytes(), err
}
err = interpolateObj(slot, getFrm)
if err != nil {
return resultBuf.Bytes(), err
}
bs, err := yaml.Marshal(&slot)
resultBuf.WriteString("---\n")
resultBuf.Write(bs)
if err != nil {
return resultBuf.Bytes(), err
}
}
panic("unreachable")
}
func InterpolateFile(k8sDocumentPath string, getFrm FormulaLoader, writePath string) error {
f, err := os.Open(k8sDocumentPath)
if err != nil {
return err
}
defer f.Close()
bs, err := ioutil.ReadAll(f)
if err != nil {
return err
}
bs, err = Interpolate(bs, getFrm)
if err != nil {
return err
}
err = ioutil.WriteFile(writePath, bs, 0644)
return err
}
func interpolateObj(obj interface{}, getFrm FormulaLoader) error {
switch obj2 := obj.(type) {
case map[string]interface{}:
spec, ok := obj2["spec"]
if ok { // Might be a PodSpec!
specMap, ok := spec.(map[string]interface{})
if !ok {
goto notAPod
}
containers, ok := specMap["containers"]
if !ok {
goto notAPod
}
_, ok = containers.([]interface{})
if !ok {
goto notAPod
}
// Looks like a PodSpec!
formulize(specMap, getFrm)
}
notAPod:
// If this object didn't contain any pod spec, recurse; its children might.
for _, v := range obj2 {
if err := interpolateObj(v, getFrm); err != nil {
return err
}
}
return nil
case []interface{}:
// Always recurse.
for _, v := range obj2 {
if err := interpolateObj(v, getFrm); err != nil {
return err
}
}
return nil
case interface{}:
// All leaf types like string and float64 end up here.
// None of which can contain a PodSpec, so, ignore.
return nil
default:
panic(fmt.Errorf("unhandled type %T", obj))
}
} | pkg/kuddle/serial.go | 0.573559 | 0.47591 | serial.go | starcoder |
package v1alpha2
// GetIsDefault returns the value of the boolean property. If unset, it's the default value specified in the devfile:default:value marker
func (in *CommandGroup) GetIsDefault() bool {
return getBoolOrDefault(in.IsDefault, false)
}
// GetHotReloadCapable returns the value of the boolean property. If unset, it's the default value specified in the devfile:default:value marker
func (in *ExecCommand) GetHotReloadCapable() bool {
return getBoolOrDefault(in.HotReloadCapable, false)
}
// GetParallel returns the value of the boolean property. If unset, it's the default value specified in the devfile:default:value marker
func (in *CompositeCommand) GetParallel() bool {
return getBoolOrDefault(in.Parallel, false)
}
// GetDedicatedPod returns the value of the boolean property. If unset, it's the default value specified in the devfile:default:value marker
func (in *Container) GetDedicatedPod() bool {
return getBoolOrDefault(in.DedicatedPod, false)
}
// GetAutoBuild returns the value of the boolean property. If unset, it's the default value specified in the devfile:default:value marker
func (in *ImageUnion) GetAutoBuild() bool {
return getBoolOrDefault(in.AutoBuild, false)
}
// GetRootRequired returns the value of the boolean property. If unset, it's the default value specified in the devfile:default:value marker
func (in *Dockerfile) GetRootRequired() bool {
return getBoolOrDefault(in.RootRequired, false)
}
// GetDeployByDefault returns the value of the boolean property. If unset, it's the default value specified in the devfile:default:value marker
func (in *K8sLikeComponent) GetDeployByDefault() bool {
return getBoolOrDefault(in.DeployByDefault, false)
}
// GetEphemeral returns the value of the boolean property. If unset, it's the default value specified in the devfile:default:value marker
func (in *Volume) GetEphemeral() bool {
return getBoolOrDefault(in.Ephemeral, false)
}
// GetSecure returns the value of the boolean property. If unset, it's the default value specified in the devfile:default:value marker
func (in *Endpoint) GetSecure() bool {
return getBoolOrDefault(in.Secure, false)
}
func getBoolOrDefault(input *bool, defaultVal bool) bool {
if input != nil {
return *input
}
return defaultVal
} | vendor/github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2/zz_generated.getters.go | 0.830457 | 0.44065 | zz_generated.getters.go | starcoder |
package bheap
import (
"container/heap"
"godev/basic"
)
// Heap use heap interface from "container/heap"
type Heap struct {
values []interface{}
comparator basic.Comparator
}
func (h Heap) Len() int {
return len(h.values)
}
func (h Heap) Less(i, j int) bool {
return h.comparator(h.values[i], h.values[j]) < 0
}
func (h Heap) Swap(i, j int) {
h.values[i], h.values[j] = h.values[j], h.values[i]
}
// Push pushes value into the heap
func (h *Heap) Push(v interface{}) {
h.values = append(h.values, v)
}
// Pop pops heap roof
func (h *Heap) Pop() interface{} {
size := len(h.values)
lastNode := h.values[size-1]
// avoid memory leak
h.values[size-1] = nil
h.values = h.values[:size-1]
return lastNode
}
// Size returns the number of values inside the heap
func (h *Heap) Size() int {
return len(h.values)
}
// Empty returns true if no value inside the heap
func (h *Heap) Empty() bool {
return len(h.values) == 0
}
// Clear clears values inside the heap
func (h *Heap) Clear() {
h.values = ([]interface{})(nil)
}
// Values returns values inside the heap
func (h *Heap) Values() []interface{} {
return h.values
}
func (h *Heap) set(i int, v interface{}) {
if i > len(h.values)-1 || i < 0 {
panic("invalid index")
}
h.values[i] = v
}
// MinHeap heap stored minimum value in its root
type MinHeap struct {
heap *Heap
Comparator basic.Comparator
}
// Init initializes MinHeap
func (h *MinHeap) Init() {
h.heap = &Heap{
values: nil,
comparator: h.Comparator,
}
heap.Init(h.heap)
}
// Push pushes values into the heap
func (h *MinHeap) Push(v interface{}) {
heap.Push(h.heap, v)
}
// Pop pops heap roof
func (h *MinHeap) Pop() interface{} {
return heap.Pop(h.heap)
}
// Remove removes value with index `i`
func (h *MinHeap) Remove(i int) interface{} {
return heap.Remove(h.heap, i)
}
// Set sets index `i` with value `v`
func (h *MinHeap) Set(i int, v interface{}) {
h.heap.set(i, v)
heap.Fix(h.heap, i)
}
// Size returns the number of values inside the heap
func (h *MinHeap) Size() int {
return len(h.heap.values)
}
// Empty returns true if no value inside the heap
func (h *MinHeap) Empty() bool {
return len(h.heap.values) == 0
}
// Clear clears values inside the heap
func (h *MinHeap) Clear() {
h.heap.values = ([]interface{})(nil)
}
// Values returns values inside the heap
func (h *MinHeap) Values() []interface{} {
return h.heap.values
}
// MaxHeap heap stored maximum value in its root
type MaxHeap struct {
heap *Heap
Comparator basic.Comparator
}
// Init initializes MaxHeap
func (h *MaxHeap) Init() {
h.heap = &Heap{
values: nil,
comparator: func(a, b interface{}) int {
return -h.Comparator(a, b)
},
}
heap.Init(h.heap)
}
// Push pushes values into the heap
func (h *MaxHeap) Push(v interface{}) {
heap.Push(h.heap, v)
}
// Pop pops heap roof
func (h *MaxHeap) Pop() interface{} {
return heap.Pop(h.heap)
}
// Remove removes value with index `i`
func (h *MaxHeap) Remove(i int) interface{} {
return heap.Remove(h.heap, i)
}
// Set sets index `i` with value `v`
func (h *MaxHeap) Set(i int, v interface{}) {
h.heap.set(i, v)
heap.Fix(h.heap, i)
}
// Size returns the number of values inside the heap
func (h *MaxHeap) Size() int {
return len(h.heap.values)
}
// Empty returns true if no value inside the heap
func (h *MaxHeap) Empty() bool {
return len(h.heap.values) == 0
}
// Clear clears values inside the heap
func (h *MaxHeap) Clear() {
h.heap.values = ([]interface{})(nil)
}
// Values returns values inside the heap
func (h *MaxHeap) Values() []interface{} {
return h.heap.values
} | basic/datastructure/heap/bheap/bheap.go | 0.809012 | 0.431824 | bheap.go | starcoder |
package seq
import (
"errors"
"fmt"
"runtime"
"sync"
"github.com/shenwei356/util/byteutil"
)
/*Alphabet could be defined. Attention that,
**the letters are case sensitive**.
For example, DNA:
DNA, _ = NewAlphabet(
"DNA",
[]byte("acgtACGT"),
[]byte("tgcaTGCA"),
[]byte(" -"),
[]byte("nN"))
*/
type Alphabet struct {
t string
isUnlimit bool
letters []byte
pairs []byte
gap []byte
ambiguous []byte
allLetters []byte
pairLetters []byte
}
// NewAlphabet is Constructor for type *Alphabet*
func NewAlphabet(
t string,
isUnlimit bool,
letters []byte,
pairs []byte,
gap []byte,
ambiguous []byte,
) (*Alphabet, error) {
a := &Alphabet{t, isUnlimit, letters, pairs, gap, ambiguous, []byte{}, []byte{}}
if isUnlimit {
return a, nil
}
if len(letters) != len(pairs) {
return a, errors.New("mismatch of length of letters and pairs")
}
for i := 0; i < len(letters); i++ {
a.allLetters = append(a.allLetters, letters[i])
}
// add gap and ambiguous code
a.allLetters = append(a.allLetters, gap...)
a.allLetters = append(a.allLetters, ambiguous...)
// construct special slice.
// index are the integer of a byte, and value is the original byte.
// it's faster than map!!!!
max := -1
for i := 0; i < len(a.allLetters); i++ {
b := int(a.allLetters[i])
if max < b {
max = b
}
}
a.pairLetters = make([]byte, max+1)
for i := 0; i < len(letters); i++ {
a.pairLetters[letters[i]-'\x00'] = pairs[i]
}
for _, v := range gap {
a.pairLetters[v-'\x00'] = v
}
for _, v := range ambiguous {
a.pairLetters[v-'\x00'] = v
}
return a, nil
}
// Clone of a Alphabet
func (a *Alphabet) Clone() *Alphabet {
return &Alphabet{
t: a.t,
isUnlimit: a.isUnlimit,
letters: []byte(string(a.letters)),
pairs: []byte(string(a.pairs)),
gap: []byte(string(a.gap)),
ambiguous: []byte(string(a.ambiguous)),
allLetters: []byte(string(a.allLetters)),
pairLetters: []byte(string(a.pairLetters)),
}
}
// Type returns type of the alphabet
func (a *Alphabet) Type() string {
return a.t
}
// Letters returns letters
func (a *Alphabet) Letters() []byte {
return a.letters
}
// Gaps returns gaps
func (a *Alphabet) Gaps() []byte {
return a.gap
}
// AmbiguousLetters returns AmbiguousLetters
func (a *Alphabet) AmbiguousLetters() []byte {
return a.ambiguous
}
// AllLetters return all letters
func (a *Alphabet) AllLetters() []byte {
return a.allLetters
}
// String returns type of the alphabet
func (a *Alphabet) String() string {
return a.t
}
// IsValidLetter is used to validate a letter
func (a *Alphabet) IsValidLetter(b byte) bool {
if a.isUnlimit {
return true
}
i := int(b)
if i >= len(a.pairLetters) {
return false
}
return a.pairLetters[i] != 0
}
// ValidSeqLengthThreshold is the threshold of sequence length that
// needed to parallelly checking sequence
var ValidSeqLengthThreshold = 10000
// ValidateWholeSeq is used to determin whether validate all bases of a seq
var ValidateWholeSeq = true
// ValidSeqThreads is the threads number of parallelly checking sequence
var ValidSeqThreads = runtime.NumCPU()
type seqCheckStatus struct {
err error
}
// IsValid is used to validate a byte slice
func (a *Alphabet) IsValid(s []byte) error {
if len(s) == 0 {
return nil
}
if a == nil || a.isUnlimit {
return nil
}
l := len(s)
var i int
if l < ValidSeqLengthThreshold {
for _, b := range s {
i = int(b)
if i >= len(a.pairLetters) || a.pairLetters[i] == 0 {
return fmt.Errorf("seq: invalid %s letter: %s", a, []byte{b})
}
}
return nil
}
if ValidateWholeSeq || ValidSeqThreads == 0 {
ValidSeqThreads = len(s)
}
chunkSize, start, end := int(l/ValidSeqThreads), 0, 0
var wg sync.WaitGroup
tokens := make(chan int, ValidSeqThreads)
ch := make(chan seqCheckStatus, ValidSeqThreads)
done := make(chan struct{})
finished := false
for i := 0; i < ValidSeqThreads; i++ {
start = i * chunkSize
end = (i + 1) * chunkSize
if end > l {
end = l
}
tokens <- 1
wg.Add(1)
go func(start, end int) {
defer func() {
<-tokens
wg.Done()
}()
select {
case <-done:
if !finished {
finished = true
close(ch)
return
}
default:
}
var j int
for i := start; i < end; i++ {
j = int(s[i])
if j >= len(a.pairLetters) || a.pairLetters[j] == 0 {
ch <- seqCheckStatus{fmt.Errorf("seq: invalid %s lebtter: %s at %d", a, []byte{s[i]}, i)}
close(done)
return
}
}
ch <- seqCheckStatus{nil}
}(start, end)
}
wg.Wait()
close(ch)
for status := range ch {
if status.err != nil {
return status.err
}
}
return nil
}
// PairLetter return the Pair Letter
func (a *Alphabet) PairLetter(b byte) (byte, error) {
if a.isUnlimit {
return b, nil
}
if int(b) >= len(a.pairLetters) {
return b, fmt.Errorf("seq: invalid letter: %c", b)
}
p := a.pairLetters[b-'\x00']
if p == 0 {
return b, fmt.Errorf("seq: invalid letter: %c", b)
}
return p, nil
}
/*Four types of alphabets are pre-defined:
DNA Deoxyribonucleotide code
DNAredundant DNA + Ambiguity Codes
RNA Oxyribonucleotide code
RNAredundant RNA + Ambiguity Codes
Protein Amino Acide single-letter Code
Unlimit Self-defined, including all 26 English letters
*/
var (
DNA *Alphabet
DNAredundant *Alphabet
RNA *Alphabet
RNAredundant *Alphabet
Protein *Alphabet
Unlimit *Alphabet
abProtein map[byte]bool
abDNAredundant map[byte]bool
abDNA map[byte]bool
abRNAredundant map[byte]bool
abRNA map[byte]bool
)
func init() {
DNA, _ = NewAlphabet(
"DNA",
false,
[]byte("acgtACGT"),
[]byte("tgcaTGCA"),
[]byte(" -."),
[]byte("nN."))
DNAredundant, _ = NewAlphabet(
"DNAredundant",
false,
[]byte("acgtryswkmbdhvACGTRYSWKMBDHV"),
[]byte("tgcayrswmkvhdbTGCAYRSWMKVHDB"),
[]byte(" -."),
[]byte("nN."))
RNA, _ = NewAlphabet(
"RNA",
false,
[]byte("acguACGU"),
[]byte("ugcaUGCA"),
[]byte(" -."),
[]byte("nN"))
RNAredundant, _ = NewAlphabet(
"RNAredundant",
false,
[]byte("acguryswkmbdhvACGURYSWKMBDHV"),
[]byte("ugcayrswmkvhdbUGCAYRSWMKVHDB"),
[]byte(" -."),
[]byte("nN"))
Protein, _ = NewAlphabet(
"Protein",
false,
[]byte("abcdefghijklmnopqrstuvwyzABCDEFGHIJKLMNOPQRSTUVWYZ"),
[]byte("abcdefghijklmnopqrstuvwyzABCDEFGHIJKLMNOPQRSTUVWYZ"),
[]byte(" -"),
[]byte("xX*_."))
Unlimit, _ = NewAlphabet(
"Unlimit",
true,
nil,
nil,
nil,
nil)
abProtein = slice2map(byteutil.Alphabet(Protein.AllLetters()))
abDNAredundant = slice2map(byteutil.Alphabet(DNAredundant.AllLetters()))
abDNA = slice2map(byteutil.Alphabet(DNA.AllLetters()))
abRNAredundant = slice2map(byteutil.Alphabet(RNAredundant.AllLetters()))
abRNA = slice2map(byteutil.Alphabet(RNA.AllLetters()))
}
// AlphabetGuessSeqLengthThreshold is the length of sequence prefix of the first FASTA record
// based which FastaRecord guesses the sequence type. 0 for whole seq
var AlphabetGuessSeqLengthThreshold = 10000
// GuessAlphabet guesses alphabet by given
func GuessAlphabet(seqs []byte) *Alphabet {
if len(seqs) == 0 {
return Unlimit
}
var alphabetMap map[byte]bool
if AlphabetGuessSeqLengthThreshold == 0 || len(seqs) <= AlphabetGuessSeqLengthThreshold {
alphabetMap = slice2map(byteutil.Alphabet(seqs))
} else { // reduce guessing time
alphabetMap = slice2map(byteutil.Alphabet(seqs[0:AlphabetGuessSeqLengthThreshold]))
}
if isSubset(alphabetMap, abDNA) {
return DNA
}
if isSubset(alphabetMap, abRNA) {
return RNA
}
if isSubset(alphabetMap, abDNAredundant) {
return DNAredundant
}
if isSubset(alphabetMap, abRNAredundant) {
return RNAredundant
}
if isSubset(alphabetMap, abProtein) {
return Protein
}
return Unlimit
}
// GuessAlphabetLessConservatively change DNA to DNAredundant and RNA to RNAredundant
func GuessAlphabetLessConservatively(seqs []byte) *Alphabet {
ab := GuessAlphabet(seqs)
if ab == DNA {
return DNAredundant
}
if ab == RNA {
return RNAredundant
}
return ab
}
func isSubset(query, subject map[byte]bool) bool {
for b := range query {
if _, ok := subject[b]; !ok {
return false
}
}
return true
}
func slice2map(s []byte) map[byte]bool {
m := make(map[byte]bool)
for _, b := range s {
m[b] = true
}
return m
} | seq/alphabet.go | 0.578091 | 0.414662 | alphabet.go | starcoder |
package discovery
import "fmt"
type Humidifier struct {
// A list of MQTT topics subscribed to receive availability (online/offline) updates. Must not be used together with `availability_topic`
// Default: <no value>
Availability []Availability `json:"availability,omitempty"`
// When `availability` is configured, this controls the conditions needed to set the entity to `available`. Valid entries are `all`, `any`, and `latest`. If set to `all`, `payload_available` must be received on all configured availability topics before the entity is marked as online. If set to `any`, `payload_available` must be received on at least one configured availability topic before the entity is marked as online. If set to `latest`, the last `payload_available` or `payload_not_available` received on any configured availability topic controls the availability
// Default: latest
AvailabilityMode string `json:"availability_mode,omitempty"`
// Defines a [template](/docs/configuration/templating/#processing-incoming-data) to extract device's availability from the `availability_topic`. To determine the devices's availability result of this template will be compared to `payload_available` and `payload_not_available`
// Default: <no value>
AvailabilityTemplate string `json:"availability_template,omitempty"`
// The MQTT topic subscribed to receive availability (online/offline) updates. Must not be used together with `availability`
// Default: <no value>
AvailabilityTopic string `json:"availability_topic,omitempty"`
// Defines a [template](/docs/configuration/templating/#processing-incoming-data) to generate the payload to send to `command_topic`
// Default: <no value>
CommandTemplate string `json:"command_template,omitempty"`
// The MQTT topic to publish commands to change the humidifier state
// Default: <no value>
CommandTopic string `json:"command_topic"`
// Information about the device this humidifier is a part of to tie it into the [device registry](https://developers.home-assistant.io/docs/en/device_registry_index.html). Only works through [MQTT discovery](/docs/mqtt/discovery/) and when [`unique_id`](#unique_id) is set. At least one of identifiers or connections must be present to identify the device
// Default: <no value>
Device *Device `json:"device,omitempty"`
// The device class of the MQTT device. Must be either `humidifier` or `dehumidifier`
// Default: humidifier
DeviceClass string `json:"device_class,omitempty"`
// Flag which defines if the entity should be enabled when first added
// Default: true
EnabledByDefault bool `json:"enabled_by_default,omitempty"`
// The [category](https://developers.home-assistant.io/docs/core/entity#generic-properties) of the entity
// Default: None
EntityCategory string `json:"entity_category,omitempty"`
// [Icon](/docs/configuration/customizing-devices/#icon) for the entity
// Default: <no value>
Icon string `json:"icon,omitempty"`
// Defines a [template](/docs/configuration/templating/#processing-incoming-data) to extract the JSON dictionary from messages received on the `json_attributes_topic`. Usage example can be found in [MQTT sensor](/integrations/sensor.mqtt/#json-attributes-template-configuration) documentation
// Default: <no value>
JsonAttributesTemplate string `json:"json_attributes_template,omitempty"`
// The MQTT topic subscribed to receive a JSON dictionary payload and then set as sensor attributes. Usage example can be found in [MQTT sensor](/integrations/sensor.mqtt/#json-attributes-topic-configuration) documentation
// Default: <no value>
JsonAttributesTopic string `json:"json_attributes_topic,omitempty"`
// The minimum target humidity percentage that can be set
// Default: 100
MaxHumidity int `json:"max_humidity,omitempty"`
// The maximum target humidity percentage that can be set
// Default: 0
MinHumidity int `json:"min_humidity,omitempty"`
// Defines a [template](/docs/configuration/templating/#processing-incoming-data) to generate the payload to send to `mode_command_topic`
// Default: <no value>
ModeCommandTemplate string `json:"mode_command_template,omitempty"`
// The MQTT topic to publish commands to change the `mode` on the humidifier. This attribute ust be configured together with the `modes` attribute
// Default: <no value>
ModeCommandTopic string `json:"mode_command_topic,omitempty"`
// Defines a [template](/docs/configuration/templating/#processing-incoming-data) to extract a value for the humidifier `mode` state
// Default: <no value>
ModeStateTemplate string `json:"mode_state_template,omitempty"`
// The MQTT topic subscribed to receive the humidifier `mode`
// Default: <no value>
ModeStateTopic string `json:"mode_state_topic,omitempty"`
// List of available modes this humidifier is capable of running at. Common examples include `normal`, `eco`, `away`, `boost`, `comfort`, `home`, `sleep`, `auto` and `baby`. These examples offer built-in translations but other custom modes are allowed as well. This attribute ust be configured together with the `mode_command_topic` attribute
// Default: []
Modes string `json:"modes,omitempty"`
// The name of the humidifier
// Default: MQTT humidifier
Name string `json:"name,omitempty"`
// Used instead of `name` for automatic generation of `entity_id
// Default: <no value>
ObjectId string `json:"object_id,omitempty"`
// Flag that defines if humidifier works in optimistic mod
// Default: `true` if no state topic defined, else `false`.
Optimistic bool `json:"optimistic,omitempty"`
// The payload that represents the available state
// Default: online
PayloadAvailable string `json:"payload_available,omitempty"`
// The payload that represents the unavailable state
// Default: offline
PayloadNotAvailable string `json:"payload_not_available,omitempty"`
// The payload that represents the stop state
// Default: OFF
PayloadOff string `json:"payload_off,omitempty"`
// The payload that represents the running state
// Default: ON
PayloadOn string `json:"payload_on,omitempty"`
// A special payload that resets the `target_humidity` state attribute to `None` when received at the `target_humidity_state_topic`
// Default: None
PayloadResetHumidity string `json:"payload_reset_humidity,omitempty"`
// A special payload that resets the `mode` state attribute to `None` when received at the `mode_state_topic`
// Default: None
PayloadResetMode string `json:"payload_reset_mode,omitempty"`
// The maximum QoS level of the state topic
// Default: 0
Qos int `json:"qos,omitempty"`
// If the published message should have the retain flag on or not
// Default: true
Retain bool `json:"retain,omitempty"`
// The MQTT topic subscribed to receive state updates
// Default: <no value>
StateTopic string `json:"state_topic,omitempty"`
// Defines a [template](/docs/configuration/templating/#processing-incoming-data) to extract a value from the state
// Default: <no value>
StateValueTemplate string `json:"state_value_template,omitempty"`
// Defines a [template](/docs/configuration/templating/#processing-incoming-data) to generate the payload to send to `target_humidity_command_topic`
// Default: <no value>
TargetHumidityCommandTemplate string `json:"target_humidity_command_template,omitempty"`
// The MQTT topic to publish commands to change the humidifier target humidity state based on a percentage
// Default: <no value>
TargetHumidityCommandTopic string `json:"target_humidity_command_topic"`
// Defines a [template](/docs/configuration/templating/#processing-incoming-data) to extract a value for the humidifier `target_humidity` state
// Default: <no value>
TargetHumidityStateTemplate string `json:"target_humidity_state_template,omitempty"`
// The MQTT topic subscribed to receive humidifier target humidity
// Default: <no value>
TargetHumidityStateTopic string `json:"target_humidity_state_topic,omitempty"`
// An ID that uniquely identifies this humidifier. If two humidifiers have the same unique ID, Home Assistant will raise an exception
// Default: <no value>
UniqueId string `json:"unique_id,omitempty"`
}
// AnnounceTopic returns the topic to announce the discoverable Humidifier
// Topic has the format below:
// <discovery_prefix>/<component>/<object_id>/config
// 'object_id' is either the UniqueId, the Name, or a hash of the Humidifier
func (d *Humidifier) AnnounceTopic(prefix string) string {
topicFormat := "%s/humidifier/%s/config"
objectID := ""
switch {
case d.UniqueId != "":
objectID = d.UniqueId
case d.Name != "":
objectID = d.Name
default:
objectID = hash(d)
}
return fmt.Sprintf(topicFormat, prefix, objectID)
} | humidifier.go | 0.828662 | 0.457016 | humidifier.go | starcoder |
package dfs
import (
"fmt"
"github.com/wangyoucao577/algorithms_practice/graph"
)
type implementMethod int
const (
// Recurse implement DFS by recurse
Recurse implementMethod = iota
// StackBased implement DFS by stack and loop
StackBased
)
type dfsTree struct {
root graph.NodeID // DFS start node, i.e. root of a tree
}
// Dfs defined a structure to store result after DFS search
type Dfs struct {
time int // as timestamp during DFS, should be a global var during DFS
forest []dfsTree // generated forest by DFS
nodesAttr nodeAttrArray // store nodes' attr during DFS
edgesAttr edgeAttrArray // store edges' attr during DFS
}
// SearchControlCondition will control all search functions' behavior, continue or break
type SearchControlCondition int
const (
// Break will let the search func break immdiately
Break SearchControlCondition = iota
// Continue will let the search func go on
Continue
)
// SearchControl will control all search functions' behavior, continue or break
type SearchControl func(graph.NodeID) SearchControlCondition
// NewDfs execute the DFS search on a graph, start from the root node
func NewDfs(g graph.Graph, root graph.NodeID, m implementMethod) (*Dfs, error) {
// Initialize
dfsContext := &Dfs{0, []dfsTree{}, nodeAttrArray{}, edgeAttrArray{}}
dfsContext.initialize(g)
dfsContext.forest = append(dfsContext.forest, dfsTree{root}) //record a tree's root
// execute one tree search
switch m {
case Recurse:
dfsContext.dfsRecurseVisit(g, root)
case StackBased:
dfsContext.dfsStackBasedVisit(g, root, nil)
}
return dfsContext, nil
}
// NewControllableDfs execute the DFS search on a graph, start from the root node
// can be exit by control condition
// only stack based implemetation
func NewControllableDfs(g graph.Graph, root graph.NodeID, control SearchControl) (*Dfs, error) {
// Initialize
dfsContext := &Dfs{0, []dfsTree{}, nodeAttrArray{}, edgeAttrArray{}}
dfsContext.initialize(g)
dfsContext.forest = append(dfsContext.forest, dfsTree{root}) //record a tree's root
// execute one tree search
dfsContext.dfsStackBasedVisit(g, root, control)
return dfsContext, nil
}
// NewDfsForest execute the DFS search on a graph, traversing all nodes
func NewDfsForest(g graph.Graph, m implementMethod) (*Dfs, error) {
// Initialize
dfsContext := &Dfs{0, []dfsTree{}, nodeAttrArray{}, edgeAttrArray{}}
dfsContext.initialize(g)
// DFS
g.IterateAllNodes(func(k graph.NodeID) {
if dfsContext.nodesAttr[k].nodeColor == white {
dfsContext.forest = append(dfsContext.forest, dfsTree{k}) //record a tree's root
// execute one tree search
switch m {
case Recurse:
dfsContext.dfsRecurseVisit(g, k)
case StackBased:
dfsContext.dfsStackBasedVisit(g, k, nil)
}
}
})
return dfsContext, nil
}
// Query retrieve path from source to target based on a dfs tree/forest
func (d *Dfs) Query(source, target graph.NodeID) (graph.Path, error) {
path := graph.Path{}
curr := target
for {
path = append(path, curr)
parent := d.nodesAttr[curr].parent
if parent == source {
path = append(path, source)
break
} else if parent == graph.InvalidNodeID {
return nil, fmt.Errorf("no valid path from %v to %v", source, target)
}
curr = parent
}
for i, j := 0, len(path)-1; i < j; i, j = i+1, j-1 {
path[i], path[j] = path[j], path[i]
}
return path, nil
} | dfs/dfs.go | 0.617974 | 0.480479 | dfs.go | starcoder |
package pcapng
import (
"bytes"
"fmt"
"strings"
"github.com/bearmini/pcapng-go/pcapng/blocktype"
"github.com/bearmini/pcapng-go/pcapng/optioncode"
"github.com/pkg/errors"
)
/*
4.6. Interface Statistics Block
The Interface Statistics Block (ISB) contains the capture statistics
for a given interface and it is optional. The statistics are
referred to the interface defined in the current Section identified
by the Interface ID field. An Interface Statistics Block is normally
placed at the end of the file, but no assumptions can be taken about
its position - it can even appear multiple times for the same
interface.
The format of the Interface Statistics Block is shown in Figure 14.
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+---------------------------------------------------------------+
0 | Block Type = 0x00000005 |
+---------------------------------------------------------------+
4 | Block Total Length |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
8 | Interface ID |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
12 | Timestamp (High) |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
16 | Timestamp (Low) |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
20 / /
/ Options (variable) /
/ /
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Block Total Length |
+---------------------------------------------------------------+
Figure 14: Interface Statistics Block Format
The fields have the following meaning:
o Block Type: The block type of the Interface Statistics Block is 5.
o Block Total Length: total size of this block, as described in
Section 3.1.
o Interface ID: specifies the interface these statistics refers to;
the correct interface will be the one whose Interface Description
Block (within the current Section of the file) is identified by
same number (see Section 4.2) of this field.
o Timestamp: time this statistics refers to. The format of the
timestamp is the same already defined in the Enhanced Packet Block
(Section 4.3).
o Options: optionally, a list of options (formatted according to the
rules defined in Section 3.5) can be present.
All the statistic fields are defined as options in order to deal with
systems that do not have a complete set of statistics. Therefore, In
addition to the options defined in Section 3.5, the following options
are valid within this block:
+------------------+------+--------+-------------------+
| Name | Code | Length | Multiple allowed? |
+------------------+------+--------+-------------------+
| isb_starttime | 2 | 8 | no |
| isb_endtime | 3 | 8 | no |
| isb_ifrecv | 4 | 8 | no |
| isb_ifdrop | 5 | 8 | no |
| isb_filteraccept | 6 | 8 | no |
| isb_osdrop | 7 | 8 | no |
| isb_usrdeliv | 8 | 8 | no |
+------------------+------+--------+-------------------+
Table 7: Interface Statistics Block Options
isb_starttime:
The isb_starttime option specifies the time the capture
started; time will be stored in two blocks of four octets
each. The format of the timestamp is the same as the one
defined in the Enhanced Packet Block (Section 4.3).
Example: '97 c3 04 00 aa 47 ca 64' in Little Endian, decodes
to 06/29/2012 06:16:50 UTC.
isb_endtime:
The isb_endtime option specifies the time the capture ended;
time will be stored in two blocks of four octets each. The
format of the timestamp is the same as the one defined in the
Enhanced Packet Block (Section 4.3).
Example: '96 c3 04 00 73 89 6a 65', in Little Endian, decodes
to 06/29/2012 06:17:00 UTC.
isb_ifrecv:
The isb_ifrecv option specifies the 64-bit unsigned integer
number of packets received from the physical interface
starting from the beginning of the capture.
Example: the decimal number 100.
isb_ifdrop:
The isb_ifdrop option specifies the 64-bit unsigned integer
number of packets dropped by the interface due to lack of
resources starting from the beginning of the capture.
Example: '0'.
isb_filteraccept:
The isb_filteraccept option specifies the 64-bit unsigned
integer number of packets accepted by filter starting from
the beginning of the capture.
Example: the decimal number 100.
isb_osdrop:
The isb_osdrop option specifies the 64-bit unsigned integer
number of packets dropped by the operating system starting
from the beginning of the capture.
Example: '0'.
isb_usrdeliv:
The isb_usrdeliv option specifies the 64-bit unsigned integer
number of packets delivered to the user starting from the
beginning of the capture. The value contained in this field
can be different from the value 'isb_filteraccept -
isb_osdrop' because some packets could still be in the OS
buffers when the capture ended.
Example: '0'.
All the fields that refer to packet counters are 64-bit values,
represented with the octet order of the current section. Special
care must be taken in accessing these fields: since all the blocks
are aligned to a 32-bit boundary, such fields are not guaranteed to
be aligned on a 64-bit boundary.
*/
type InterfaceStatisticsBlock struct {
BlockType blocktype.BlockType
BlockTotalLength uint32
InterfaceID uint32
TimestampHigh uint32
TimestampLow uint32
Options InterfaceStatisticsBlockOptions
}
func (b *InterfaceStatisticsBlock) GetType() blocktype.BlockType {
return b.BlockType
}
func (b *InterfaceStatisticsBlock) String() string {
return fmt.Sprintf("%s block_len:%d if_id:%d ts_hi:%d ts_lo:%d options:{%s}",
b.BlockType.String(), b.BlockTotalLength, b.InterfaceID, b.TimestampHigh, b.TimestampLow, b.Options.String())
}
type InterfaceStatisticsBlockOptions struct {
StartTime *uint64
EndTime *uint64
IFRecv *uint64
IFDrop *uint64
FilterAccept *uint64
OSDrop *uint64
UsrDeliv *uint64
Comments []string
CustomOptions []CustomOption
}
func (o InterfaceStatisticsBlockOptions) String() string {
options := make([]string, 0)
if o.StartTime != nil {
options = append(options, fmt.Sprintf("start_time:%s", *o.StartTime))
}
if o.EndTime != nil {
options = append(options, fmt.Sprintf("end_time:%s", *o.EndTime))
}
if o.IFRecv != nil {
options = append(options, fmt.Sprintf("if_recv:%d", *o.IFRecv))
}
if o.IFDrop != nil {
options = append(options, fmt.Sprintf("if_drop:%d", *o.IFDrop))
}
if o.FilterAccept != nil {
options = append(options, fmt.Sprintf("filter_accept:%d", *o.FilterAccept))
}
if o.OSDrop != nil {
options = append(options, fmt.Sprintf("os_drop:%d", *o.OSDrop))
}
if o.UsrDeliv != nil {
options = append(options, fmt.Sprintf("usr_deliv:%d", *o.UsrDeliv))
}
return strings.Join(options, ",")
}
func (r *Reader) parseInterfaceStatisticsBlock(blockTotalLength uint32, bodyBytes []byte) (*InterfaceStatisticsBlock, error) {
br := newEndiannessAwareReader(r.endian, bytes.NewReader(bodyBytes))
ifid, err := br.readUint32()
if err != nil {
return nil, errors.Wrap(err, "unable to read interface id")
}
tshi, err := br.readUint32()
if err != nil {
return nil, errors.Wrap(err, "unable to read timestamp hi")
}
tslo, err := br.readUint32()
if err != nil {
return nil, errors.Wrap(err, "unable to read timestamp lo")
}
var opts InterfaceStatisticsBlockOptions
loop:
for {
oc, err := br.readUint16()
if err != nil {
break
}
ol, err := br.readUint16()
if err != nil {
break
}
switch optioncode.OptionCode(oc) {
case optioncode.EndOfOpt:
break loop
case optioncode.Comment:
readCommonOptionComment(ol, br, &opts.Comments)
case optioncode.CustomUTF8, optioncode.CustomUTF8WithoutNull, optioncode.CustomBinary, optioncode.CustomBinaryShouldNotCopied:
err := readCustomOption(oc, ol, br, &opts.CustomOptions)
if err != nil {
return nil, err
}
case optioncode.ISB_StartTime:
ov, err := br.readUint64()
if err != nil {
return nil, errors.Wrap(err, "unable to read isb_starttime")
}
opts.StartTime = &ov
case optioncode.ISB_EndTime:
ov, err := br.readUint64()
if err != nil {
return nil, errors.Wrap(err, "unable to read isb_endtime")
}
opts.EndTime = &ov
case optioncode.ISB_IFRecv:
ov, err := br.readUint64()
if err != nil {
return nil, errors.Wrap(err, "unable to read isb_ifrecv")
}
opts.IFRecv = &ov
case optioncode.ISB_IFDrop:
ov, err := br.readUint64()
if err != nil {
return nil, errors.Wrap(err, "unable to read isb_ifdrop")
}
opts.IFDrop = &ov
case optioncode.ISB_FilterAccept:
ov, err := br.readUint64()
if err != nil {
return nil, errors.Wrap(err, "unable to read isb_filteraccept")
}
opts.FilterAccept = &ov
case optioncode.ISB_OSDrop:
ov, err := br.readUint64()
if err != nil {
return nil, errors.Wrap(err, "unable to read isb_osdrop")
}
opts.OSDrop = &ov
case optioncode.ISB_UsrDeliv:
ov, err := br.readUint64()
if err != nil {
return nil, errors.Wrap(err, "unable to read isb_usrdeliv")
}
opts.UsrDeliv = &ov
default:
_, err := br.readBytes(uint(ol))
if err != nil {
return nil, errors.Wrapf(err, "unable to read unknown option (%d)", oc)
}
}
// read padding
padLen := 4 - (ol & 0x3)
_, err = br.readBytes(uint(padLen))
if err != nil {
return nil, errors.Wrap(err, "unable to read padding in an option value")
}
}
return &InterfaceStatisticsBlock{
BlockType: blocktype.InterfaceStatistics,
BlockTotalLength: blockTotalLength,
InterfaceID: ifid,
TimestampHigh: tshi,
TimestampLow: tslo,
Options: opts,
}, nil
} | pcapng/interface_statistics_block.go | 0.692642 | 0.435181 | interface_statistics_block.go | starcoder |
package ros
import (
"bytes"
"encoding/binary"
"math"
"github.com/pkg/errors"
)
// LEByteDecoder is a little-endian byte decoder, implements the ByteDecoder interface.
type LEByteDecoder struct{}
var _ ByteDecoder = LEByteDecoder{}
// Helpers
func CheckSize(buf *bytes.Reader, size int) error {
if size < 0 {
return errors.New("unexpected negative size")
}
if buf.Len() < size {
return errors.New("buffer size too small")
}
return nil
}
// Array decoders.
// DecodeBoolArray decodes an array of boolean values.
func (d LEByteDecoder) DecodeBoolArray(buf *bytes.Reader, size int) ([]bool, error) {
if err := CheckSize(buf, size); err != nil {
return nil, errors.Wrap(err, "decoding bool array")
}
var arr [1]byte
slice := make([]bool, size)
for i := 0; i < size; i++ {
if n, err := buf.Read(arr[:]); n != 1 || err != nil {
return slice, errors.New("Could not read 1 byte from buffer")
}
slice[i] = (arr[0] != 0x00)
}
return slice, nil
}
// DecodeInt8Array decodes an array of int8 values.
func (d LEByteDecoder) DecodeInt8Array(buf *bytes.Reader, size int) ([]int8, error) {
if err := CheckSize(buf, size); err != nil {
return nil, errors.Wrap(err, "decoding i8 array")
}
var arr [1]byte
slice := make([]int8, size)
for i := 0; i < size; i++ {
if n, err := buf.Read(arr[:]); n != 1 || err != nil {
return slice, errors.New("Could not read 1 byte from buffer")
}
slice[i] = int8(arr[0])
}
return slice, nil
}
// DecodeUint8Array decodes an array of uint8 values.
func (d LEByteDecoder) DecodeUint8Array(buf *bytes.Reader, size int) ([]uint8, error) {
if err := CheckSize(buf, size); err != nil {
return nil, errors.Wrap(err, "decoding u8 array")
}
slice := make([]uint8, size)
if size == 0 {
// Early return to avoid potentail EOF error.
return slice, nil
}
n, err := buf.Read(slice)
if n != size || err != nil {
return slice, errors.New("Did not read entire uint8 buffer")
}
return slice, nil
}
// DecodeInt16Array decodes an array of int16 values.
func (d LEByteDecoder) DecodeInt16Array(buf *bytes.Reader, size int) ([]int16, error) {
if err := CheckSize(buf, size*2); err != nil {
return nil, errors.Wrap(err, "decoding i16 array")
}
var arr [2]byte
slice := make([]int16, size)
for i := 0; i < size; i++ {
if n, err := buf.Read(arr[:]); n != 2 || err != nil {
return slice, errors.New("Could not read 2 bytes from buffer")
}
slice[i] = int16(binary.LittleEndian.Uint16(arr[:]))
}
return slice, nil
}
// DecodeUint16Array decodes an array of uint16 values.
func (d LEByteDecoder) DecodeUint16Array(buf *bytes.Reader, size int) ([]uint16, error) {
if err := CheckSize(buf, size*2); err != nil {
return nil, errors.Wrap(err, "decoding u16 array")
}
var arr [2]byte
slice := make([]uint16, size)
for i := 0; i < size; i++ {
if n, err := buf.Read(arr[:]); n != 2 || err != nil {
return slice, errors.New("Could not read 2 bytes from buffer")
}
slice[i] = binary.LittleEndian.Uint16(arr[:])
}
return slice, nil
}
// DecodeInt32Array decodes an array of int32 values.
func (d LEByteDecoder) DecodeInt32Array(buf *bytes.Reader, size int) ([]int32, error) {
if err := CheckSize(buf, size*4); err != nil {
return nil, errors.Wrap(err, "decoding i32 array")
}
var arr [4]byte
slice := make([]int32, size)
for i := 0; i < size; i++ {
if n, err := buf.Read(arr[:]); n != 4 || err != nil {
return slice, errors.New("Could not read 4 bytes from buffer")
}
slice[i] = int32(binary.LittleEndian.Uint32(arr[:]))
}
return slice, nil
}
// DecodeUint32Array decodes an array of uint32 values.
func (d LEByteDecoder) DecodeUint32Array(buf *bytes.Reader, size int) ([]uint32, error) {
if err := CheckSize(buf, size*4); err != nil {
return nil, errors.Wrap(err, "decoding u32 array")
}
var arr [4]byte
slice := make([]uint32, size)
for i := 0; i < size; i++ {
if n, err := buf.Read(arr[:]); n != 4 || err != nil {
return slice, errors.New("Could not read 4 bytes from buffer")
}
slice[i] = binary.LittleEndian.Uint32(arr[:])
}
return slice, nil
}
// DecodeFloat32Array decodes an array of float32 values.
func (d LEByteDecoder) DecodeFloat32Array(buf *bytes.Reader, size int) ([]JsonFloat32, error) {
if err := CheckSize(buf, size*4); err != nil {
return nil, errors.Wrap(err, "decoding f32 array")
}
var arr [4]byte
var value float32
slice := make([]JsonFloat32, size)
for i := 0; i < size; i++ {
if n, err := buf.Read(arr[:]); n != 4 || err != nil {
return slice, errors.New("Could not read 4 bytes from buffer")
}
value = math.Float32frombits(binary.LittleEndian.Uint32(arr[:]))
slice[i] = JsonFloat32{F: value}
}
return slice, nil
}
// DecodeInt64Array decodes an array of int64 values.
func (d LEByteDecoder) DecodeInt64Array(buf *bytes.Reader, size int) ([]int64, error) {
if err := CheckSize(buf, size*8); err != nil {
return nil, errors.Wrap(err, "decoding i64 array")
}
var arr [8]byte
slice := make([]int64, size)
for i := 0; i < size; i++ {
if n, err := buf.Read(arr[:]); n != 8 || err != nil {
return slice, errors.New("Could not read 8 bytes from buffer")
}
slice[i] = int64(binary.LittleEndian.Uint64(arr[:]))
}
return slice, nil
}
// DecodeUint64Array decodes an array of uint64 values.
func (d LEByteDecoder) DecodeUint64Array(buf *bytes.Reader, size int) ([]uint64, error) {
if err := CheckSize(buf, size*8); err != nil {
return nil, errors.Wrap(err, "decoding u64 array")
}
var arr [8]byte
slice := make([]uint64, size)
for i := 0; i < size; i++ {
if n, err := buf.Read(arr[:]); n != 8 || err != nil {
return slice, errors.New("Could not read 8 bytes from buffer")
}
slice[i] = binary.LittleEndian.Uint64(arr[:])
}
return slice, nil
}
// DecodeFloat64Array decodes an array of float64 values.
func (d LEByteDecoder) DecodeFloat64Array(buf *bytes.Reader, size int) ([]JsonFloat64, error) {
if err := CheckSize(buf, size*8); err != nil {
return nil, errors.Wrap(err, "decoding f64 array")
}
var arr [8]byte
var value float64
slice := make([]JsonFloat64, size)
for i := 0; i < size; i++ {
if n, err := buf.Read(arr[:]); n != 8 || err != nil {
return slice, errors.New("Could not read 8 bytes from buffer")
}
value = math.Float64frombits(binary.LittleEndian.Uint64(arr[:]))
slice[i] = JsonFloat64{F: value}
}
return slice, nil
}
// DecodeStringArray decodes an array of strings.
func (d LEByteDecoder) DecodeStringArray(buf *bytes.Reader, size int) ([]string, error) {
// Use minimum string byte size = 4
if err := CheckSize(buf, size*4); err != nil {
return nil, errors.Wrap(err, "decoding string array")
}
// String format is: [size|string] where size is a u32.
slice := make([]string, size)
for i := 0; i < size; i++ {
var strSize uint32
var err error
if strSize, err = d.DecodeUint32(buf); err != nil {
return slice, errors.Wrap(err, "decoding string array")
}
var value []uint8
value, err = d.DecodeUint8Array(buf, int(strSize))
if err != nil {
return slice, errors.Wrap(err, "decoding string array")
}
slice[i] = string(value)
}
return slice, nil
}
// DecodeTimeArray decodes an array of Time structs.
func (d LEByteDecoder) DecodeTimeArray(buf *bytes.Reader, size int) ([]Time, error) {
if err := CheckSize(buf, size*8); err != nil {
return nil, errors.Wrap(err, "decoding time array")
}
// Time format is: [sec|nanosec] where sec and nanosec are unsigned integers.
slice := make([]Time, size)
for i := 0; i < size; i++ {
var err error
if slice[i].Sec, err = d.DecodeUint32(buf); err != nil {
return slice, errors.Wrap(err, "decoding time array")
}
if slice[i].NSec, err = d.DecodeUint32(buf); err != nil {
return slice, errors.Wrap(err, "decoding time array")
}
}
return slice, nil
}
// DecodeDurationArray decodes an array of Duration structs.
func (d LEByteDecoder) DecodeDurationArray(buf *bytes.Reader, size int) ([]Duration, error) {
if err := CheckSize(buf, size*8); err != nil {
return nil, errors.Wrap(err, "decoding duration array")
}
// Duration format is: [sec|nanosec] where sec and nanosec are unsigned integers.
slice := make([]Duration, size)
for i := 0; i < size; i++ {
var err error
if slice[i].Sec, err = d.DecodeUint32(buf); err != nil {
return slice, errors.Wrap(err, "decoding duration array")
}
if slice[i].NSec, err = d.DecodeUint32(buf); err != nil {
return slice, errors.Wrap(err, "decoding duration array")
}
}
return slice, nil
}
// DecodeMessageArray decodes an array of DynamicMessages.
func (d LEByteDecoder) DecodeMessageArray(buf *bytes.Reader, size int, msgType *DynamicMessageType) ([]Message, error) {
// Not an exact check, but at least prevents an impossible allocation
if err := CheckSize(buf, size); err != nil {
return nil, errors.Wrap(err, "decoding message array")
}
slice := make([]Message, size)
for i := 0; i < size; i++ {
// Skip the zero value initialization, this would just get discarded anyway.
msg := &DynamicMessage{}
msg.dynamicType = msgType
if err := msg.Deserialize(buf); err != nil {
return slice, errors.Wrap(err, "decoding message array")
}
slice[i] = msg
}
return slice, nil
}
// Singular decodes.
// DecodeBool decodes a boolean.
func (d LEByteDecoder) DecodeBool(buf *bytes.Reader) (bool, error) {
raw, err := d.DecodeUint8(buf)
return (raw != 0x00), err
}
// DecodeInt8 decodes a int8.
func (d LEByteDecoder) DecodeInt8(buf *bytes.Reader) (int8, error) {
raw, err := d.DecodeUint8(buf)
return int8(raw), err
}
// DecodeUint8 decodes a uint8.
func (d LEByteDecoder) DecodeUint8(buf *bytes.Reader) (uint8, error) {
var arr [1]byte
if n, err := buf.Read(arr[:]); n != 1 || err != nil {
return 0, errors.New("Could not read 1 byte from buffer")
}
return arr[0], nil
}
// DecodeInt16 decodes a int16.
func (d LEByteDecoder) DecodeInt16(buf *bytes.Reader) (int16, error) {
raw, err := d.DecodeUint16(buf)
return int16(raw), err
}
// DecodeUint16 decodes a uint16.
func (d LEByteDecoder) DecodeUint16(buf *bytes.Reader) (uint16, error) {
var arr [2]byte
if n, err := buf.Read(arr[:]); n != 2 || err != nil {
return 0, errors.New("Could not read 2 bytes from buffer")
}
return binary.LittleEndian.Uint16(arr[:]), nil
}
// DecodeInt32 decodes a int32.
func (d LEByteDecoder) DecodeInt32(buf *bytes.Reader) (int32, error) {
raw, err := d.DecodeUint32(buf)
return int32(raw), err
}
// DecodeUint32 decodes a uint32.
func (d LEByteDecoder) DecodeUint32(buf *bytes.Reader) (uint32, error) {
var arr [4]byte
if n, err := buf.Read(arr[:]); n != 4 || err != nil {
return 0, errors.New("Could not read 4 bytes from buffer")
}
return binary.LittleEndian.Uint32(arr[:]), nil
}
// DecodeFloat32 decodes a JsonFloat32.
func (d LEByteDecoder) DecodeFloat32(buf *bytes.Reader) (JsonFloat32, error) {
raw, err := d.DecodeUint32(buf)
return JsonFloat32{F: math.Float32frombits(raw)}, err
}
// DecodeInt64 decodes a int64.
func (d LEByteDecoder) DecodeInt64(buf *bytes.Reader) (int64, error) {
raw, err := d.DecodeUint64(buf)
return int64(raw), err
}
// DecodeUint64 decodes a uint64.
func (d LEByteDecoder) DecodeUint64(buf *bytes.Reader) (uint64, error) {
var arr [8]byte
if n, err := buf.Read(arr[:]); n != 8 || err != nil {
return 0, errors.New("Could not read 8 bytes from buffer")
}
return binary.LittleEndian.Uint64(arr[:]), nil
}
// DecodeFloat64 decodes a JsonFloat64.
func (d LEByteDecoder) DecodeFloat64(buf *bytes.Reader) (JsonFloat64, error) {
raw, err := d.DecodeUint64(buf)
return JsonFloat64{F: math.Float64frombits(raw)}, err
}
// DecodeString decodes a string.
func (d LEByteDecoder) DecodeString(buf *bytes.Reader) (string, error) {
var err error
var strSize uint32
// String format is: [size|string] where size is a u32.
if strSize, err = d.DecodeUint32(buf); err != nil {
return "", errors.Wrap(err, "decoding string")
}
var value []uint8
if value, err = d.DecodeUint8Array(buf, int(strSize)); err != nil {
return "", errors.Wrap(err, "decoding string")
}
return string(value), nil
}
// DecodeTime decodes a Time struct.
func (d LEByteDecoder) DecodeTime(buf *bytes.Reader) (Time, error) {
var err error
var value Time
// Time format is: [sec|nanosec] where sec and nanosec are unsigned integers.
if value.Sec, err = d.DecodeUint32(buf); err != nil {
return Time{}, errors.Wrap(err, "decoding time")
}
if value.NSec, err = d.DecodeUint32(buf); err != nil {
return Time{}, errors.Wrap(err, "decoding time")
}
return value, nil
}
// DecodeDuration decodes a Duraction struct.
func (d LEByteDecoder) DecodeDuration(buf *bytes.Reader) (Duration, error) {
var err error
var value Duration
// Duration format is: [sec|nanosec] where sec and nanosec are unsigned integers.
if value.Sec, err = d.DecodeUint32(buf); err != nil {
return Duration{}, errors.Wrap(err, "decoding duration")
}
if value.NSec, err = d.DecodeUint32(buf); err != nil {
return Duration{}, errors.Wrap(err, "decoding duration")
}
return value, nil
}
// DecodeMessage decodes a DynamicMessage.
func (d LEByteDecoder) DecodeMessage(buf *bytes.Reader, msgType *DynamicMessageType) (Message, error) {
// Skip the zero value initialization, this would just get discarded anyway.
msg := &DynamicMessage{}
msg.dynamicType = msgType
if err := msg.Deserialize(buf); err != nil {
return nil, errors.Wrap(err, "decoding message")
}
return msg, nil
} | ros/byte_decoder_le.go | 0.761627 | 0.435781 | byte_decoder_le.go | starcoder |
// Package flow contains a number of constructors for Flow nodes
// that are convenient for testing.
package flow
import (
"math/rand"
"net/url"
"regexp"
"github.com/grailbio/reflow"
"github.com/grailbio/reflow/flow"
"github.com/grailbio/reflow/values"
)
func randString() string {
const (
minLen = 10
maxLen = 20
chars = "abcdefghijklmnopqrstuvwxyxABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
)
l := minLen + rand.Intn(maxLen-minLen+1)
b := make([]byte, l)
for i := 0; i < l; i++ {
b[i] = chars[rand.Intn(len(chars))]
}
return string(b)
}
// Exec constructs a new flow.OpExec node.
func Exec(image, cmd string, resources reflow.Resources, deps ...*flow.Flow) *flow.Flow {
return &flow.Flow{Op: flow.Exec, Ident: randString(), Deps: deps, Cmd: cmd, Image: image, Resources: resources}
}
// Intern constructs a new flow.OpIntern node.
func Intern(rawurl string) *flow.Flow {
u, err := url.Parse(rawurl)
if err != nil {
panic(err)
}
return &flow.Flow{Op: flow.Intern, Ident: randString(), URL: u}
}
// Extern constructs a new flow.Extern node.
func Extern(rawurl string, dep *flow.Flow) *flow.Flow {
u, err := url.Parse(rawurl)
if err != nil {
panic(err)
}
return &flow.Flow{Op: flow.Extern, Ident: randString(), Deps: []*flow.Flow{dep}, URL: u}
}
// K constructs a new flow.K node.
func K(id string, k func(vs []values.T) *flow.Flow, deps ...*flow.Flow) *flow.Flow {
return &flow.Flow{Op: flow.K, Deps: deps, K: k, FlowDigest: reflow.Digester.FromString(id), Ident: id}
}
// Groupby constructs a new flow.Groupby node.
func Groupby(re string, dep *flow.Flow) *flow.Flow {
return &flow.Flow{Op: flow.Groupby, Ident: randString(), Deps: []*flow.Flow{dep}, Re: regexp.MustCompile(re)}
}
// Map constructs a new flow.Map node.
func Map(fn func(*flow.Flow) *flow.Flow, dep *flow.Flow) *flow.Flow {
f := &flow.Flow{Op: flow.Map, Ident: randString(), Deps: []*flow.Flow{dep}, MapFunc: fn}
f.MapInit()
return f
}
// Collect constructs a new flow.Collect node.
func Collect(re, repl string, dep *flow.Flow) *flow.Flow {
return &flow.Flow{Op: flow.Collect, Ident: randString(), Re: regexp.MustCompile(re), Repl: repl, Deps: []*flow.Flow{dep}}
}
// Merge constructs a new flow.Merge node.
func Merge(deps ...*flow.Flow) *flow.Flow {
return &flow.Flow{Op: flow.Merge, Ident: randString(), Deps: deps}
}
// Pullup constructs a new flow.Pullup node.
func Pullup(deps ...*flow.Flow) *flow.Flow {
return &flow.Flow{Op: flow.Pullup, Ident: randString(), Deps: deps}
}
// Val constructs a new flow.Val node.
func Val(v reflow.Fileset) *flow.Flow {
return &flow.Flow{Op: flow.Val, Ident: randString(), Value: values.T(v), State: flow.Done}
}
// Data constructs a new reflow.Data node.
func Data(b []byte) *flow.Flow {
return &flow.Flow{Op: flow.Data, Ident: randString(), Data: b}
} | test/flow/constructor.go | 0.754463 | 0.499023 | constructor.go | starcoder |
package life
import (
"log"
"math/rand"
"strconv"
"time"
)
// World is our representation of the board or grid of Cells
type World struct {
emptyCell Cell
param Param
step int64
grid map[string]*Cell
}
var neighboursAt = [8][2]int{
{-1, 1}, {0, 1}, {1, 1}, // above
{-1, 0} /* self */, {1, 0}, // beside
{-1, -1}, {0, -1}, {1, -1}, // below
}
// Init initialises the World board or grid
func (w *World) Init(board string, rules string, generateRandom bool) {
log.Printf("Initialising World on '%s' board with '%s' rules", board, rules)
w.emptyCell = Cell{false, false}
w.param.Init(board, rules)
w.step = 0
w.grid = make(map[string]*Cell)
if generateRandom {
w.randomiseWorld()
} else {
w.predefinedWorld("../test_world.gol")
}
}
func (w *World) randomiseWorld() {
seed := time.Now().UnixNano()
rand.Seed(seed)
var x, y int
for x = 1; x <= w.param.Width; x++ {
for y = 1; y <= w.param.Height; y++ {
isAlive := rand.Intn(100) < w.param.SpawnPercent
w.addCell(x, y, isAlive)
}
}
}
func (w *World) predefinedWorld(fileName string) {
var x, y int
for x = 1; x <= w.param.Width; x++ {
for y = 1; y <= w.param.Height; y++ {
isAlive := false
if x == 5 && (y == 4 || y == 5 || y == 6) {
isAlive = true
}
w.addCell(x, y, isAlive)
}
}
}
func (w *World) addCell(x, y int, isAlive bool) {
var cell Cell
cell.Init(isAlive)
w.grid[w.gridReference(x, y)] = &cell
}
func (w *World) gridReference(x, y int) string {
x = w.wrapCoord(x, 1, w.param.Width, w.param.WrapX)
y = w.wrapCoord(y, 1, w.param.Height, w.param.WrapY)
return strconv.Itoa(x) + "|" + strconv.Itoa(y)
}
func (w *World) wrapCoord(val, min, max int, wrapEnabled bool) int {
if val < min {
if wrapEnabled {
val = max - val
} else {
val = min - 1
}
} else if val > max {
if wrapEnabled {
val = val - max
} else {
val = max + 1
}
}
return val
}
// Render generates a text display of the World state
func (w *World) Render(htmlise bool) string {
var x, y int
output := ""
for y = 1; y <= w.param.Height; y++ {
for x = 1; x <= w.param.Width; x++ {
cell := w.cellAt(x, y)
output += cell.String() // String() auto-stringification not recognised in '+' concatenation
}
if htmlise {
output += "<br />"
}
output += "\n"
}
return output
}
func (w *World) cellAt(x, y int) *Cell {
gridRef := w.gridReference(x, y)
if cell, ok := w.grid[gridRef]; ok {
return cell
}
return &w.emptyCell
}
// Step returns the count of iterations the World has gone through
func (w *World) Step() int64 {
return w.step
}
func (w *World) countNeighbours(x, y int) int {
totalNeighbours := 0
for _, dir := range neighboursAt {
cell := w.cellAt(x+dir[0], y+dir[1])
if cell.IsAlive() {
totalNeighbours++
}
}
return totalNeighbours
}
// Calculate performs the next iteration of the simulation across all Cells in the World
func (w *World) Calculate() {
for y := 1; y <= w.param.Height; y++ {
for x := 1; x <= w.param.Width; x++ {
cell := w.cellAt(x, y)
nCount := w.countNeighbours(x, y)
if cell.IsAlive() {
willSurvive := w.param.RuleValues["s"][strconv.Itoa(nCount)]
if !willSurvive {
cell.UpdateState(false)
}
} else {
willSpawn := w.param.RuleValues["b"][strconv.Itoa(nCount)]
if willSpawn {
cell.UpdateState(true)
}
}
}
}
for y := 1; y <= w.param.Height; y++ {
for x := 1; x <= w.param.Width; x++ {
cell := w.cellAt(x, y)
cell.Refresh()
}
}
w.step++
} | go/life/world.go | 0.582254 | 0.508727 | world.go | starcoder |
package main
/*
* Quick validation
*
*/
import (
"fmt"
"log"
"reflect"
"time"
"github.com/blewater/rsaints/lib"
)
func assertBool(funcName string, res, expected bool) {
if res != expected {
log.Fatalf("%s resulted in %t, expected %t\n", funcName, res, expected)
}
}
func assertEqInt64(funcName string, res, expected lib.Integer) {
if res != expected {
log.Fatalf("%s resulted in %d, expected %d\n", funcName, res, expected)
}
}
func assertEqFactors(funcName string, res, expected lib.Factors) {
if !reflect.DeepEqual(res, expected) {
log.Fatalf("%s resulted in %v, expected %v\n", funcName, res, expected)
}
}
func main() {
assertBool("IsPrimeOptimized",
lib.IsPrimeOptimized(5), true)
assertBool("IsPrimeOptimized",
lib.IsPrimeOptimized(23), true)
assertBool("IsPrimeOptimized",
lib.IsPrimeOptimized(81), false)
// non-prime number: 7 * 157 * 8365633
assertBool("IsPrimeOptimized",
lib.IsPrimeOptimized(9193830667), false)
// known 10 digit prime number < 0.5 sec
start := time.Now()
res := lib.IsPrimeOptimized(9576890767)
fmt.Printf("large prime number check took %v\n", time.Since(start))
assertBool("IsPrimeOptimized", res, true)
assertEqFactors("Factor",
lib.Factor(23), lib.Factors{23})
assertEqFactors("Factor",
lib.Factor(26), lib.Factors{2, 13})
assertEqFactors("Factor",
lib.Factor(81), lib.Factors{3, 3, 3, 3})
assertEqFactors("Factor",
lib.Factor(150), lib.Factors{2, 3, 5, 5})
assertEqFactors("Factor",
lib.Factor(147), lib.Factors{3, 7, 7})
assertEqFactors("Factor",
lib.Factor(150), lib.Factors{2, 3, 5, 5})
assertEqFactors("Factor",
lib.Factor(330), lib.Factors{2, 3, 5, 11})
// non-prime number: 7 * 157 * 8365633
assertEqFactors("Factor",
lib.Factor(9193830667), lib.Factors{7, 157, 8365633})
// known 10 digit prime number
assertEqFactors("Factor", lib.Factor(9576890767),
lib.Factors{9576890767})
assertEqInt64("Euclid",
lib.CalcEuclid(499017086208, 676126714752),
93312)
assertEqInt64("Euclid",
lib.CalcEuclid(5988737349, 578354589),
9)
assertEqInt64("Mod Mult Inverse",
lib.CalcModInvByEuclid(15, 26),
7)
assertEqInt64("Mod Mult Inverse",
lib.CalcModInvByEuclid(342952340, 4230493243),
583739113)
assertBool("Validate RSA Encryption and Decryption",
lib.CheckRSA(654321, 937513, 638471), true)
assertBool("Validate RSA Encryption and Decryption",
lib.CheckRSA(10000, 937513, 638471), true)
assertBool("Validate RSA Encryption and Decryption",
lib.CheckRSA(937512, 937513, 638471), true)
assertBool("Validate RSA Encryption and Decryption",
lib.CheckRSA(1, 937513, 638471), true)
fmt.Println("Successful completion of all tests.")
} | main.go | 0.570092 | 0.458349 | main.go | starcoder |
package indicator
import (
"fmt"
"time"
"github.com/rodrigo-brito/ninjabot/model"
"github.com/rodrigo-brito/ninjabot/plot"
"github.com/markcheno/go-talib"
)
func Spertrend(period int, factor float64, color string) plot.Indicator {
return &supertrend{
Period: period,
Factor: factor,
Color: color,
}
}
type supertrend struct {
Period int
Factor float64
Color string
Close model.Series
BasicUpperBand model.Series
FinalUpperBand model.Series
BasicLowerBand model.Series
FinalLowerBand model.Series
SuperTrend model.Series
Time []time.Time
}
func (s supertrend) Name() string {
return fmt.Sprintf("SuperTrend(%d,%.1f)", s.Period, s.Factor)
}
func (s supertrend) Overlay() bool {
return true
}
func (s *supertrend) Load(df *model.Dataframe) {
if len(df.Time) < s.Period {
return
}
atr := talib.Atr(df.High, df.Low, df.Close, s.Period)
s.BasicUpperBand = make([]float64, len(atr))
s.BasicLowerBand = make([]float64, len(atr))
s.FinalUpperBand = make([]float64, len(atr))
s.FinalLowerBand = make([]float64, len(atr))
s.SuperTrend = make([]float64, len(atr))
for i := 1; i < len(s.BasicLowerBand); i++ {
s.BasicUpperBand[i] = (df.High[i]+df.Low[i])/2.0 + atr[i]*s.Factor
s.BasicLowerBand[i] = (df.High[i]+df.Low[i])/2.0 - atr[i]*s.Factor
if i == 0 {
s.FinalUpperBand[i] = s.BasicUpperBand[i]
} else if s.BasicUpperBand[i] < s.FinalUpperBand[i-1] ||
df.Close[i-1] > s.FinalUpperBand[i-1] {
s.FinalUpperBand[i] = s.BasicUpperBand[i]
} else {
s.FinalUpperBand[i] = s.FinalUpperBand[i-1]
}
if i == 0 || s.BasicLowerBand[i] > s.FinalLowerBand[i-1] ||
df.Close[i-1] < s.FinalLowerBand[i-1] {
s.FinalLowerBand[i] = s.BasicLowerBand[i]
} else {
s.FinalLowerBand[i] = s.FinalLowerBand[i-1]
}
if i == 0 || s.FinalUpperBand[i-1] == s.SuperTrend[i-1] {
if df.Close[i] > s.FinalUpperBand[i] {
s.SuperTrend[i] = s.FinalLowerBand[i]
} else {
s.SuperTrend[i] = s.FinalUpperBand[i]
}
} else {
if df.Close[i] < s.FinalLowerBand[i] {
s.SuperTrend[i] = s.FinalUpperBand[i]
} else {
s.SuperTrend[i] = s.FinalLowerBand[i]
}
}
}
s.Time = df.Time[s.Period:]
s.SuperTrend = s.SuperTrend[s.Period:]
}
func (s supertrend) Metrics() []plot.IndicatorMetric {
return []plot.IndicatorMetric{
{
Style: "scatter",
Color: s.Color,
Values: s.SuperTrend,
Time: s.Time,
},
}
} | plot/indicator/supertrend.go | 0.513181 | 0.426142 | supertrend.go | starcoder |
package models
import (
"github.com/shopspring/decimal"
"time"
)
// MarketOrder is just a simple struct to homologue the exchange responses
type MarketOrder struct {
Amount decimal.Decimal `json:"amount"`
Price decimal.Decimal `json:"price"`
}
// FiatRates is the struct to handle internally the OpenRate response
type FiatRates struct {
Rates map[string]float64
LastUpdated time.Time
}
// Rate is the struct to homologue exchange rate responses
type RateV2 struct {
Name string `json:"name"`
Rate float64 `json:"rate"`
}
// Rate is the struct to homologue exchange rate responses
type Rate struct {
Code string `json:"code"`
Name string `json:"name"`
Rate float64 `json:"rate"`
}
// FixerRates is the structure of the FixerRates response
type FixerRates struct {
Success bool `json:"success"`
Timestamp int `json:"timestamp"`
Base string `json:"base"`
Date string `json:"date"`
Error FixerError `json:"error"`
Rates struct {
AED float64 `json:"AED"`
AFN float64 `json:"AFN"`
ALL float64 `json:"ALL"`
AMD float64 `json:"AMD"`
ANG float64 `json:"ANG"`
AOA float64 `json:"AOA"`
ARS float64 `json:"ARS"`
AUD float64 `json:"AUD"`
AWG float64 `json:"AWG"`
AZN float64 `json:"AZN"`
BAM float64 `json:"BAM"`
BBD float64 `json:"BBD"`
BDT float64 `json:"BDT"`
BGN float64 `json:"BGN"`
BHD float64 `json:"BHD"`
BIF float64 `json:"BIF"`
BMD float64 `json:"BMD"`
BND float64 `json:"BND"`
BOB float64 `json:"BOB"`
BRL float64 `json:"BRL"`
BSD float64 `json:"BSD"`
BTC float64 `json:"BTC"`
BTN float64 `json:"BTN"`
BWP float64 `json:"BWP"`
BYN float64 `json:"BYN"`
BYR float64 `json:"BYR"`
BZD float64 `json:"BZD"`
CAD float64 `json:"CAD"`
CDF float64 `json:"CDF"`
CHF float64 `json:"CHF"`
CLF float64 `json:"CLF"`
CLP float64 `json:"CLP"`
CNY float64 `json:"CNY"`
COP float64 `json:"COP"`
CRC float64 `json:"CRC"`
CUC float64 `json:"CUC"`
CUP float64 `json:"CUP"`
CVE float64 `json:"CVE"`
CZK float64 `json:"CZK"`
DJF float64 `json:"DJF"`
DKK float64 `json:"DKK"`
DOP float64 `json:"DOP"`
DZD float64 `json:"DZD"`
EGP float64 `json:"EGP"`
ERN float64 `json:"ERN"`
ETB float64 `json:"ETB"`
EUR int `json:"EUR"`
FJD float64 `json:"FJD"`
FKP float64 `json:"FKP"`
GBP float64 `json:"GBP"`
GEL float64 `json:"GEL"`
GGP float64 `json:"GGP"`
GHS float64 `json:"GHS"`
GIP float64 `json:"GIP"`
GMD float64 `json:"GMD"`
GNF float64 `json:"GNF"`
GTQ float64 `json:"GTQ"`
GYD float64 `json:"GYD"`
HKD float64 `json:"HKD"`
HNL float64 `json:"HNL"`
HRK float64 `json:"HRK"`
HTG float64 `json:"HTG"`
HUF float64 `json:"HUF"`
IDR float64 `json:"IDR"`
ILS float64 `json:"ILS"`
IMP float64 `json:"IMP"`
INR float64 `json:"INR"`
IQD float64 `json:"IQD"`
IRR float64 `json:"IRR"`
ISK float64 `json:"ISK"`
JEP float64 `json:"JEP"`
JMD float64 `json:"JMD"`
JOD float64 `json:"JOD"`
JPY float64 `json:"JPY"`
KES float64 `json:"KES"`
KGS float64 `json:"KGS"`
KHR float64 `json:"KHR"`
KMF float64 `json:"KMF"`
KPW float64 `json:"KPW"`
KRW float64 `json:"KRW"`
KWD float64 `json:"KWD"`
KYD float64 `json:"KYD"`
KZT float64 `json:"KZT"`
LAK float64 `json:"LAK"`
LBP float64 `json:"LBP"`
LKR float64 `json:"LKR"`
LRD float64 `json:"LRD"`
LSL float64 `json:"LSL"`
LTL float64 `json:"LTL"`
LVL float64 `json:"LVL"`
LYD float64 `json:"LYD"`
MAD float64 `json:"MAD"`
MDL float64 `json:"MDL"`
MGA float64 `json:"MGA"`
MKD float64 `json:"MKD"`
MMK float64 `json:"MMK"`
MNT float64 `json:"MNT"`
MOP float64 `json:"MOP"`
MRO float64 `json:"MRO"`
MUR float64 `json:"MUR"`
MVR float64 `json:"MVR"`
MWK float64 `json:"MWK"`
MXN float64 `json:"MXN"`
MYR float64 `json:"MYR"`
MZN float64 `json:"MZN"`
NAD float64 `json:"NAD"`
NGN float64 `json:"NGN"`
NIO float64 `json:"NIO"`
NOK float64 `json:"NOK"`
NPR float64 `json:"NPR"`
NZD float64 `json:"NZD"`
OMR float64 `json:"OMR"`
PAB float64 `json:"PAB"`
PEN float64 `json:"PEN"`
PGK float64 `json:"PGK"`
PHP float64 `json:"PHP"`
PKR float64 `json:"PKR"`
PLN float64 `json:"PLN"`
PYG float64 `json:"PYG"`
QAR float64 `json:"QAR"`
RON float64 `json:"RON"`
RSD float64 `json:"RSD"`
RUB float64 `json:"RUB"`
RWF float64 `json:"RWF"`
SAR float64 `json:"SAR"`
SBD float64 `json:"SBD"`
SCR float64 `json:"SCR"`
SDG float64 `json:"SDG"`
SEK float64 `json:"SEK"`
SGD float64 `json:"SGD"`
SHP float64 `json:"SHP"`
SLL float64 `json:"SLL"`
SOS float64 `json:"SOS"`
SRD float64 `json:"SRD"`
STD float64 `json:"STD"`
SVC float64 `json:"SVC"`
SYP float64 `json:"SYP"`
SZL float64 `json:"SZL"`
THB float64 `json:"THB"`
TJS float64 `json:"TJS"`
TMT float64 `json:"TMT"`
TND float64 `json:"TND"`
TOP float64 `json:"TOP"`
TRY float64 `json:"TRY"`
TTD float64 `json:"TTD"`
TWD float64 `json:"TWD"`
TZS float64 `json:"TZS"`
UAH float64 `json:"UAH"`
UGX float64 `json:"UGX"`
USD float64 `json:"USD"`
UYU float64 `json:"UYU"`
UZS float64 `json:"UZS"`
VEF float64 `json:"VEF"`
VND float64 `json:"VND"`
VUV float64 `json:"VUV"`
WST float64 `json:"WST"`
XAF float64 `json:"XAF"`
XAG float64 `json:"XAG"`
XAU float64 `json:"XAU"`
XCD float64 `json:"XCD"`
XDR float64 `json:"XDR"`
XOF float64 `json:"XOF"`
XPF float64 `json:"XPF"`
YER float64 `json:"YER"`
ZAR float64 `json:"ZAR"`
ZMK float64 `json:"ZMK"`
ZMW float64 `json:"ZMW"`
ZWL float64 `json:"ZWL"`
} `json:"rates"`
}
type FixerError struct {
Code int `json:"code"`
Info string `json:"info"`
}
var FixerRatesNames = map[string]string{
"AED": "United Arab Emirates Dirham",
"AFN": "Afghan Afghani",
"ALL": "Albanian Lek",
"AMD": "Armenian Dram",
"ANG": "Netherlands Antillean Guilder",
"AOA": "Angolan Kwanza",
"ARS": "Argentine Peso",
"AUD": "Australian Dollar",
"AWG": "Aruban Florin",
"AZN": "Azerbaijani Manat",
"BAM": "Bosnia-Herzegovina Convertible Mark",
"BBD": "Barbadian Dollar",
"BDT": "Bangladeshi Taka",
"BGN": "Bulgarian Lev",
"BHD": "Bahraini Dinar",
"BIF": "Burundian Franc",
"BMD": "Bermudan Dollar",
"BND": "Brunei Dollar",
"BOB": "Bolivian Boliviano",
"BRL": "Brazilian Real",
"BSD": "Bahamian Dollar",
"BTC": "Bitcoin",
"BTN": "Bhutanese Ngultrum",
"BWP": "Botswanan Pula",
"BYN": "New Belarusian Ruble",
"BYR": "Belarusian Ruble",
"BZD": "Belize Dollar",
"CAD": "Canadian Dollar",
"CDF": "Congolese Franc",
"CHF": "Swiss Franc",
"CLF": "Chilean Unit of Account (UF)",
"CLP": "Chilean Peso",
"CNY": "Chinese Yuan",
"COP": "Colombian Peso",
"CRC": "Costa Rican Colón",
"CUC": "Cuban Convertible Peso",
"CUP": "Cuban Peso",
"CVE": "Cape Verdean Escudo",
"CZK": "Czech Republic Koruna",
"DJF": "Djiboutian Franc",
"DKK": "Danish Krone",
"DOP": "Dominican Peso",
"DZD": "Algerian Dinar",
"EGP": "Egyptian Pound",
"ERN": "Eritrean Nakfa",
"ETB": "Ethiopian Birr",
"EUR": "Euro",
"FJD": "Fijian Dollar",
"FKP": "Falkland Islands Pound",
"GBP": "British Pound Sterling",
"GEL": "Georgian Lari",
"GGP": "Guernsey Pound",
"GHS": "Ghanaian Cedi",
"GIP": "Gibraltar Pound",
"GMD": "Gambian Dalasi",
"GNF": "Guinean Franc",
"GTQ": "Guatemalan Quetzal",
"GYD": "Guyanaese Dollar",
"HKD": "Hong Kong Dollar",
"HNL": "Honduran Lempira",
"HRK": "Croatian Kuna",
"HTG": "Haitian Gourde",
"HUF": "Hungarian Forint",
"IDR": "Indonesian Rupiah",
"ILS": "Israeli New Sheqel",
"IMP": "Manx pound",
"INR": "Indian Rupee",
"IQD": "Iraqi Dinar",
"IRR": "Iranian Rial",
"ISK": "Icelandic Króna",
"JEP": "Jersey Pound",
"JMD": "Jamaican Dollar",
"JOD": "Jordanian Dinar",
"JPY": "Japanese Yen",
"KES": "Kenyan Shilling",
"KGS": "Kyrgystani Som",
"KHR": "Cambodian Riel",
"KMF": "Comorian Franc",
"KPW": "North Korean Won",
"KRW": "South Korean Won",
"KWD": "Kuwaiti Dinar",
"KYD": "Cayman Islands Dollar",
"KZT": "Kazakhstani Tenge",
"LAK": "Laotian Kip",
"LBP": "Lebanese Pound",
"LKR": "Sri Lankan Rupee",
"LRD": "Liberian Dollar",
"LSL": "<NAME>",
"LTL": "Lithuanian Litas",
"LVL": "Latvian Lats",
"LYD": "Libyan Dinar",
"MAD": "<NAME>",
"MDL": "Moldovan Leu",
"MGA": "Malagasy Ariary",
"MKD": "Macedonian Denar",
"MMK": "Myanma Kyat",
"MNT": "Mongolian Tugrik",
"MOP": "Macanese Pataca",
"MRO": "Mauritanian Ouguiya",
"MUR": "Mauritian Rupee",
"MVR": "Maldivian Rufiyaa",
"MWK": "Malawian Kwacha",
"MXN": "Mexican Peso",
"MYR": "Malaysian Ringgit",
"MZN": "Mozambican Metical",
"NAD": "Namibian Dollar",
"NGN": "Nigerian Naira",
"NIO": "Nicaraguan Córdoba",
"NOK": "Norwegian Krone",
"NPR": "Nepalese Rupee",
"NZD": "New Zealand Dollar",
"OMR": "Omani Rial",
"PAB": "Panamanian Balboa",
"PEN": "Peruvian Nuevo Sol",
"PGK": "Papua New Guinean Kina",
"PHP": "Philippine Peso",
"PKR": "Pakistani Rupee",
"PLN": "Polish Zloty",
"PYG": "Paraguayan Guarani",
"QAR": "Qatari Rial",
"RON": "Romanian Leu",
"RSD": "Serbian Dinar",
"RUB": "Russian Ruble",
"RWF": "Rwandan Franc",
"SAR": "Saudi Riyal",
"SBD": "Solomon Islands Dollar",
"SCR": "Seychellois Rupee",
"SDG": "Sudanese Pound",
"SEK": "Swedish Krona",
"SGD": "Singapore Dollar",
"SHP": "Saint Helena Pound",
"SLL": "Sierra Leonean Leone",
"SOS": "Somali Shilling",
"SRD": "Surinamese Dollar",
"STD": "São Tomé and Príncipe Dobra",
"SVC": "Salvadoran Colón",
"SYP": "Syrian Pound",
"SZL": "Swazi Lilangeni",
"THB": "Thai Baht",
"TJS": "Tajikistani Somoni",
"TMT": "Turkmenistani Manat",
"TND": "Tunisian Dinar",
"TOP": "Tongan Paʻanga",
"TRY": "Turkish Lira",
"TTD": "Trinidad and Tobago Dollar",
"TWD": "New Taiwan Dollar",
"TZS": "Tanzanian Shilling",
"UAH": "Ukrainian Hryvnia",
"UGX": "Ugandan Shilling",
"USD": "United States Dollar",
"UYU": "Uruguayan Peso",
"UZS": "Uzbekistan Som",
"VEF": "Venezuelan Bolívar Fuerte",
"VND": "Vietnamese Dong",
"VUV": "Vanuatu Vatu",
"WST": "Samoan Tala",
"XAF": "CFA Franc BEAC",
"XAG": "Silver (troy ounce)",
"XAU": "Gold (troy ounce)",
"XCD": "East Caribbean Dollar",
"XDR": "Special Drawing Rights",
"XOF": "CFA Franc BCEAO",
"XPF": "CFP Franc",
"YER": "Yemeni Rial",
"ZAR": "South African Rand",
"ZMK": "Zambian Kwacha (pre-2013)",
"ZMW": "Zambian Kwacha",
"ZWL": "Zimbabwean Dollar",
} | models/rates.go | 0.597138 | 0.437884 | rates.go | starcoder |
package main
import (
"math"
"math/rand"
"time"
)
type Boid struct {
Position Vector2D
Velocity Vector2D
Id int
}
func (b Boid) start() {
for {
b.moveOne()
time.Sleep(5 * time.Microsecond)
}
}
func (b Boid) CalculateAcceleration() Vector2D {
upper, lower := b.position.AddV(viewRadius), b.position.AddV(-viewRadius)
avgPosition, avgVelocity, separation := Vector2D{0, 0}, Vector2D{0, 0}, Vector2D{0, 0}
count := 0.0
for i := math.Max(lower.x, 0); i <= math.Min(upper.x, screenWidth); i++ {
for j := math.Max(lower.y, 0); j <= math.Min(upper.y, screenHeight); j++ {
if otherBoidId := boidMap[int(i)][int(j)]; otherBoidId != -1 && otherBoidId != b.id {
if dist := boids[otherBoidId].position.Distance(b.position); dist < viewRadius {
count++
avgVelocity = avgVelocity.Add(boids[otherBoidId].velocity)
avgPosition = avgPosition.Add(boids[otherBoidId].position)
separation = separation.Add(b.position.Subtract(boids[otherBoidId].position).DivisionV(dist))
}
}
}
}
accel := Vector2D{b.borderBounce(b.position.x, screenWidth), b.borderBounce(b.position.y, screenHeight)}
if count > 0 {
avgPosition, avgVelocity = avgPosition.DivisionV(count), avgVelocity.DivisionV(count)
accelAlignment := avgVelocity.Subtract(b.velocity).MultiplyV(adjRate)
accelCohesion := avgPosition.Subtract(b.position).MultiplyV(adjRate)
accelSeparation := separation.MultiplyV(adjRate)
accel = accel.Add(accelAlignment).Add(accelCohesion).Add(accelSeparation)
}
return accel
}
func (b Boid) moveOne() {
b.Velocity = b.Velocity.Add(b.CalculateAcceleration())
boidMap[int(b.Position.x)][int(b.Position.y)] = -1
b.Position = b.Position.Add(b.Velocity)
boidMap[int(b.Position.x)][int(b.Position.y)] = b.Id
next := b.Position.Add(b.Velocity)
if next.x >= screenwidth || next.x < 0 {
b.Velocity = Vector2D{-b.Velocity.x, b.Velocity.y}
}
if next.y >= screenheight || next.y < 0 {
b.Velocity = Vector2D{b.Velocity.x, -b.Velocity.y}
}
}
func CreateBoid(id int) {
b := Boid{
Position: Vector2D{rand.Float64() * screenwidth, rand.Float64() * screenheight},
Velocity: Vector2D{(rand.Float64() * 2) - 1.0, (rand.Float64() * 2) - 1.0},
Id: id,
}
boids[id] = &b
boidMap[int(b.Position.x)][int(b.Position.y)] = b.Id
go b.start()
} | boids/boid.go | 0.675229 | 0.529872 | boid.go | starcoder |
package parse
import (
"github.com/cdkini/Okra/src/okraerr"
"github.com/cdkini/Okra/src/interpreter/ast"
)
// A Parser evaluates a collection of tokens and constructs abstract syntax trees (ASTs) out of
// the resulting expressions and statements. It is also responsible for consolidating parse errors
// and providing useful feedback to the user.
type Parser struct {
tokens []ast.Token // As created by the scanner
current int
hadError bool
}
func NewParser(tokens []ast.Token) *Parser {
return &Parser{tokens, 0, false}
}
// Parse triggers the recursive descent parsing, checking the given tokens and their state against
// Okra's EBNF context free grammar. Upon reaching a terminal, the function adds an instance of
// the appropriate statement to a resulting statement slice.
// Args: nil
// Returns: Slice of statements to interpret
// Bool that tracks whether or not a parse error occurred
func (p *Parser) Parse() []ast.Stmt {
stmts := []ast.Stmt{}
for !p.isAtEOF() {
p.parse(&stmts)
}
return stmts
}
// parse is a helper method used in Parse to actually kick off the recursive descent parsing.
// It is responsible for syncronizing the parser in the case a parser error is met.
// Args: stmts [*[]Stmt] - The resulting slice to be returned and interpreted upon completion of parsing.
// Returns: nil
func (p *Parser) parse(stmts *[]ast.Stmt) {
stmt := p.declaration()
*stmts = append(*stmts, stmt)
}
// consume is a helper method used throughout the parser to iterate over and utilize the input token slice.
// If the token type argument does not match expectations, an error is raised. Otherwise, it is evaluated
// and the parser moves along to the next item.
// Args: t (ast.TokenType) - The token type the parser is expecting to evaluate
// msg (string) - The error message to be used in the raised error, if applicable
// Returns: The next token in the sequence
// Raises: OkraError if the token type does not match the parser's expectation
func (p *Parser) consume(t ast.TokenType, msg string) ast.Token {
if !p.check(t) {
curr := p.currToken()
okraerr.ReportErr(curr.Line, curr.Col, msg)
}
return p.advance()
}
func (p *Parser) match(types ...ast.TokenType) bool {
for _, t := range types {
if p.check(t) {
p.advance()
return true
}
}
return false
}
func (p *Parser) check(t ast.TokenType) bool {
if p.isAtEOF() {
return false
}
return p.peek().Type == t
}
func (p *Parser) advance() ast.Token {
if !p.isAtEOF() {
p.current++
}
return p.prevToken()
}
func (p *Parser) isAtEOF() bool {
return p.peek().Type == ast.EOF
}
func (p *Parser) peek() ast.Token {
return p.tokens[p.current]
}
func (p *Parser) currToken() ast.Token {
return p.tokens[p.current]
}
func (p *Parser) prevToken() ast.Token {
return p.tokens[p.current-1]
} | src/interpreter/parse/parser.go | 0.770637 | 0.456228 | parser.go | starcoder |
package main
import (
"fmt"
"math"
"strconv"
"strings"
"time"
"github.com/sanderploegsma/advent-of-code/2019/utils"
)
func main() {
paths, _ := utils.ReadLines("input.txt")
start := time.Now()
result := DistanceToClosestIntersection(paths[0], paths[1])
end := time.Since(start)
fmt.Printf("[PART ONE] distance to closest intersection: %d (took %s)\n", result, end)
start = time.Now()
result = FewestStepsToIntersection(paths[0], paths[1])
end = time.Since(start)
fmt.Printf("[PART TWO] fewest steps to intersection: %d (took %s)\n", result, end)
}
// DistanceToClosestIntersection finds all intersections between the given lines `a` and `b`,
// and returns the distance between the origin and the closest intersection.
func DistanceToClosestIntersection(a, b string) int {
pathA := generatePath(a)
pathB := generatePath(b)
closest := point{0, 0}
for i := range pathA {
for j := range pathB {
if !pathA[i].Equals(pathB[j]) {
continue
}
if closest.DistanceToOrigin() == 0 || closest.DistanceToOrigin() > pathA[i].DistanceToOrigin() {
closest = pathA[i]
}
}
}
return closest.DistanceToOrigin()
}
// FewestStepsToIntersection finds all intersections between the given lines `a` and `b`,
// and returns the fewest combined steps needed to reach that intersection.
func FewestStepsToIntersection(a, b string) int {
pathA := generatePath(a)
pathB := generatePath(b)
minSteps := math.MaxInt32
for i := range pathA {
for j := range pathB {
if !pathA[i].Equals(pathB[j]) {
continue
}
// i and j start at 0, but the first point is actually 1 step,
// so the steps taken to get to this point is 2 more than i + j
steps := i + j + 2
minSteps = utils.Min(minSteps, steps)
}
}
return minSteps
}
// generatePath converts instructions such as 'U1,R2' to a list of points from the origin,
// representing the path taken when following each instruction.
func generatePath(line string) []point {
points := make([]point, 0)
lastPoint := point{0, 0}
for _, segmentBytes := range strings.Split(line, ",") {
segment := segmentBytes
dir := segment[0:1]
dist, _ := strconv.Atoi(segment[1:])
for i := 1; i <= dist; i++ {
dx, dy := move(dir)
point := point{lastPoint.X + dx, lastPoint.Y + dy}
points = append(points, point)
lastPoint = point
}
}
return points
}
// move returns (dx,dy) based on the given direction.
// For example, direction "U" (up) yields (dx,dy) = (0,1)
func move(dir string) (dx, dy int) {
switch dir {
case "U":
return 0, 1
case "D":
return 0, -1
case "L":
return -1, 0
case "R":
return 1, 0
default:
return 0, 0 // This should of course never happen
}
}
type point struct {
X, Y int
}
// DistanceToOrigin returns the Manhattan Distance to the origin (0, 0)
func (p *point) DistanceToOrigin() int {
return utils.Abs(p.X) + utils.Abs(p.Y)
}
// Equals returns true if the given point has the same coordinates as this
func (p *point) Equals(o point) bool {
return p.X == o.X && p.Y == o.Y
} | 2019/go/03/main.go | 0.815673 | 0.45417 | main.go | starcoder |
package streaming
import "github.com/ipld/go-ipld-prime/schema"
func GetHeaderType() schema.Type {
ts := schema.TypeSystem{}
ts.Init()
ts.Accumulate(schema.SpawnList("Aunts", "Hash", false))
ts.Accumulate(schema.SpawnStruct("Proof",
[]schema.StructField{
schema.SpawnStructField("Total", "Int", false, false),
schema.SpawnStructField("Index", "Int", false, false),
schema.SpawnStructField("LeafHash", "Hash", false, false),
schema.SpawnStructField("Aunts", "Aunts", false, false),
},
schema.SpawnStructRepresentationMap(nil),
))
ts.Accumulate(schema.SpawnStruct("Part",
[]schema.StructField{
schema.SpawnStructField("Index", "Uint", false, false),
schema.SpawnStructField("Bytes", "HexBytes", false, false),
schema.SpawnStructField("Proof", "Proof", false, false),
},
schema.SpawnStructRepresentationMap(nil),
))
ts.Accumulate(schema.SpawnList("PartSet", "Part", false))
ts.Accumulate(schema.SpawnStruct("PartSetHeader",
[]schema.StructField{
schema.SpawnStructField("Total", "Uint", false, false),
schema.SpawnStructField("Hash", "Link", false, false), // link to the root node of a merkle tree created from part set
},
schema.SpawnStructRepresentationMap(nil),
))
ts.Accumulate(schema.SpawnStruct("BlockID",
[]schema.StructField{
schema.SpawnStructField("Hash", "Link", false, false), // HeaderCID, link to the root node of a merkle tree created from all the consensus fields in a header
schema.SpawnStructField("PartSetHeader", "PartSetHeader", false, false),
},
schema.SpawnStructRepresentationMap(nil),
))
ts.Accumulate(schema.SpawnStruct("Header",
[]schema.StructField{
// schema.SpawnStructField("Version", "Version", false, false),
schema.SpawnStructField("ChainID", "String", true, true),
schema.SpawnStructField("Height", "Int", true, true),
// schema.SpawnStructField("Time", "Time", true, true),
// schema.SpawnStructField("LastBlockID", "BlockID", true, true),
schema.SpawnStructField("LastCommitHash", "Link", true, true), // CommitTreeCID
schema.SpawnStructField("DataHash", "Link", true, true), // TxTreeCID
schema.SpawnStructField("ValidatorsHash", "Link", true, true), // ValidatorTreeCID
schema.SpawnStructField("NextValidatorsHash", "Link", true, true), // ValidatorTreeCID
schema.SpawnStructField("ConsensusHash", "Link", true, true), // HashedParamsCID
schema.SpawnStructField("AppHash", "Link", true, true), // AppStateTreeCID
schema.SpawnStructField("LastResultsHash", "Link", true, true), // LastResultsHash
schema.SpawnStructField("EvidenceHash", "Link", true, true), // EvidenceTreeCID
// schema.SpawnStructField("ProposerAddress", "Address", false, false),
},
schema.SpawnStructRepresentationMap(nil),
))
return ts.TypeByName("Header")
}
/*
# HashedParamsCID is a CID link to the HashedParams for this Header
# This CID is composed of the SHA_256 multihash of the linked protobuf encoded HashedParams struct and the HashedParmas codec (tbd)
type HashParamsCID &HashedParams
# EvidenceTreeCID is a CID link to the root node of a Evidence merkle tree
# This CID is composed of the SHA_256 multihash of the root node in the Evidence merkle tree and the Evidence codec (tbd)
# The Evidence merkle tree is Merkle tree build from the list of evidence of Byzantine behaviour included in this block.
type EvidenceTreeCID &MerkleTreeNode
# ResultTreeCID is a CID link to the root node of a Result merkle tree
# This CID is composed of the SHA_256 multihash of the root node in a Result merkle tree and the Result codec (tbd)
# Result merkle tree is a Merkle tree built from ResponseDeliverTx responses (Log, Info, Codespace and Events fields are ignored)
type ResultTreeCID &MerkleTreeNode
# AppStateTreeCID is a CID link to the state root returned by the state machine after executing and commiting the previous block
# It serves as the basis for validating any Merkle proofs that comes from the ABCI application and represents the state of the actual application rather than the state of the blockchain itself.
# This nature of the hash is determined by the application, Tendermint can not perform validation on it
type AppStateReference &MerkleTreeNode
# ValidatorTreeCID is a CID link to the root node of a Validator merkle tree
# This CID is composed of the SHA_256 multihash of the root node in the Validator merkle tree and the Validator codec (tbd)
# Validator merkle tree is a Merkle tree built from the set of validators for the given block
# The validators are first sorted by voting power (descending), then by address (ascending) prior to computing the MerkleRoot
type ValidatorTreeCID &MerkleTreeNode
# TxTreeCID is a CID link to the root node of a Tx merkle tree
# This CID is composed of the SHA_256 multihash of the root node in the Tx merkle tree and the Tx codec (tbd)
# Tx merkle tree is a Merkle tree built from the set of Txs at the given block
# Note: The transactions are hashed before being included in the Merkle tree, the leaves of the Merkle tree contain the hashes, not the transactions themselves.
type TxTreeCID &MerkleTreeNode
# CommitTreeCID is a CID link to the root node of a Commit merkle tree
# This CID is composed of the SHA_256 multihash of the root node in a Commit merkle tree and the Commit codec (tbd)
# Commit merkle tree is a Merkle tree built from a set of validator's commits
type CommitTreeCID &MerkleTreeNode
# BlockID contains two distinct Merkle roots of the block.
# The BlockID includes these two hashes, as well as the number of parts (ie. len(MakeParts(block)))
type BlockID struct {
Hash HeaderCID
PartSetHeader PartSetHeader
}
# HeaderCID is a CID link to the root node of a Header merkle tree
# This CID is composed of the SHA_256 multihash of the root node in the Header merkle tree and the Header codec (tbd)
# Header merkle tree is a Merklization of all of the fields in the header
type HeaderCID &MerkleTreeNode
*/ | x/anconprotocol/store/streaming/dag_cosmos_schema.go | 0.507324 | 0.510192 | dag_cosmos_schema.go | starcoder |
package mesh
import (
"github.com/go-gl/mathgl/mgl32"
)
// Mesh ...
type Mesh struct {
Indices []uint32
Positions []float32
Colors []float32
Coords []float32
Normals []float32
Target []int32
}
// NewMesh ...
func NewMesh() *Mesh {
return &Mesh{}
}
// FromGob ...
func FromGob(path string) (m *Mesh) {
m = NewMesh()
err := readGob(path, m)
if err != nil {
panic(err)
}
return
}
// ToGob ...
func ToGob(m *Mesh, path string) {
err := writeGob(path, *m)
if err != nil {
panic(err)
}
}
// Scale ...
func (m *Mesh) Scale(f float32) *Mesh {
for i := range m.Positions {
m.Positions[i] *= f
}
return m
}
// ScaleXYZ ...
func (m *Mesh) ScaleXYZ(x, y, z float32) *Mesh {
for i := 0; i < len(m.Positions); i += 3 {
m.Positions[i+0] *= x
m.Positions[i+1] *= y
m.Positions[i+2] *= z
}
return m
}
// Rotate ...
func (m *Mesh) Rotate() *Mesh {
return m
}
// GenerateBoundingBoxMeshSolid ...
func (m *Mesh) GenerateBoundingBoxMeshSolid() *Mesh {
var (
minx, maxx float32 = 0, 0
miny, maxy float32 = 0, 0
minz, maxz float32 = 0, 0
)
for i := 0; i < len(m.Positions); i += 3 {
x := m.Positions[i+0]
y := m.Positions[i+1]
z := m.Positions[i+2]
if x < minx {
minx = x
}
if x > maxx {
maxx = x
}
if y < miny {
miny = y
}
if y > maxy {
maxy = y
}
if z < minz {
minz = z
}
if z > maxz {
maxz = z
}
}
return &Mesh{
Positions: []float32{
minx, miny, minz,
minx, miny, maxz,
minx, maxy, minz,
minx, maxy, maxz,
maxx, miny, minz,
maxx, miny, maxz,
maxx, maxy, minz,
maxx, maxy, maxz,
minx, miny, minz,
minx, miny, maxz,
maxx, miny, minz,
maxx, miny, maxz,
minx, miny, minz,
minx, maxy, minz,
maxx, miny, minz,
maxx, maxy, minz,
minx, miny, maxz,
maxx, miny, maxz,
minx, maxy, maxz,
maxx, maxy, maxz,
minx, maxy, minz,
maxx, maxy, minz,
minx, maxy, maxz,
maxx, maxy, maxz,
},
Indices: []uint32{
0, 1, 2, 2, 1, 3,
4, 5, 6, 6, 5, 7,
8, 9, 10, 10, 9, 11,
12, 13, 14, 14, 13, 15,
16, 17, 18, 18, 17, 19,
20, 21, 22, 22, 21, 23,
},
Colors: []float32{
0.1, 0.1, 0.7, 1.0,
0.1, 0.1, 0.7, 1.0,
0.1, 0.1, 0.7, 1.0,
0.1, 0.1, 0.7, 1.0,
0.5, 0.1, 0.1, 1.0,
0.5, 0.1, 0.1, 1.0,
0.5, 0.1, 0.1, 1.0,
0.5, 0.1, 0.1, 1.0,
0.1, 0.5, 0.1, 1.0,
0.1, 0.5, 0.1, 1.0,
0.1, 0.5, 0.1, 1.0,
0.1, 0.5, 0.1, 1.0,
0.7, 0.1, 0.7, 1.0,
0.7, 0.1, 0.7, 1.0,
0.7, 0.1, 0.7, 1.0,
0.7, 0.1, 0.7, 1.0,
0.5, 0.5, 0.1, 1.0,
0.5, 0.5, 0.1, 1.0,
0.5, 0.5, 0.1, 1.0,
0.5, 0.5, 0.1, 1.0,
0.1, 0.5, 0.5, 1.0,
0.1, 0.5, 0.5, 1.0,
0.1, 0.5, 0.5, 1.0,
0.1, 0.5, 0.5, 1.0,
},
}
}
// GenerateBoundingBoxMeshWireframe ...
func (m *Mesh) GenerateBoundingBoxMeshWireframe() *Mesh {
var (
minx, maxx float32 = 0, 0
miny, maxy float32 = 0, 0
minz, maxz float32 = 0, 0
)
for i := 0; i < len(m.Positions); i += 3 {
x := m.Positions[i+0]
y := m.Positions[i+1]
z := m.Positions[i+2]
if x < minx {
minx = x
}
if x > maxx {
maxx = x
}
if y < miny {
miny = y
}
if y > maxy {
maxy = y
}
if z < minz {
minz = z
}
if z > maxz {
maxz = z
}
}
return &Mesh{
Positions: []float32{
minx, miny, minz,
maxx, miny, minz,
maxx, maxy, minz,
minx, maxy, minz,
minx, miny, maxz,
maxx, miny, maxz,
maxx, maxy, maxz,
minx, maxy, maxz,
},
Indices: []uint32{
0, 1, 1, 2, 2, 3, 3, 0,
4, 5, 5, 6, 6, 7, 7, 4,
0, 4, 1, 5, 2, 6, 3, 7,
},
Colors: []float32{
0.1, 0.1, 0.7, 1.0,
0.1, 0.1, 0.7, 1.0,
0.1, 0.1, 0.7, 1.0,
0.1, 0.1, 0.7, 1.0,
0.1, 0.1, 0.7, 1.0,
0.1, 0.1, 0.7, 1.0,
0.1, 0.1, 0.7, 1.0,
0.1, 0.1, 0.7, 1.0,
},
}
}
// MinMaxPositions ...
func (m *Mesh) MinMaxPositions() (min, max mgl32.Vec3) {
var (
minx, maxx float32 = 0, 0
miny, maxy float32 = 0, 0
minz, maxz float32 = 0, 0
)
for i := 0; i < len(m.Positions); i += 3 {
x := m.Positions[i+0]
y := m.Positions[i+1]
z := m.Positions[i+2]
if x < minx {
minx = x
}
if x > maxx {
maxx = x
}
if y < miny {
miny = y
}
if y > maxy {
maxy = y
}
if z < minz {
minz = z
}
if z > maxz {
maxz = z
}
}
return mgl32.Vec3{minx, miny, minz}, mgl32.Vec3{maxx, maxy, maxz}
} | pkg/mesh/mesh.go | 0.50415 | 0.4133 | mesh.go | starcoder |
package deep
import (
"math"
)
// GetLoss returns a loss function given a LossType
func GetLoss(loss LossType) Loss {
switch loss {
case LossCrossEntropy:
return CrossEntropy{}
case LossMeanSquared:
return MeanSquared{}
case LossBinaryCrossEntropy:
return BinaryCrossEntropy{}
}
return CrossEntropy{}
}
// LossType represents a loss function
type LossType int
func (l LossType) String() string {
switch l {
case LossCrossEntropy:
return "CE"
case LossBinaryCrossEntropy:
return "BinCE"
case LossMeanSquared:
return "MSE"
}
return "N/A"
}
const (
// LossNone signifies unspecified loss
LossNone LossType = 0
// LossCrossEntropy is cross entropy loss
LossCrossEntropy LossType = 1
// LossBinaryCrossEntropy is the special case of binary cross entropy loss
LossBinaryCrossEntropy LossType = 2
// LossMeanSquared is MSE
LossMeanSquared LossType = 3
)
// Loss is satisfied by loss functions
type Loss interface {
F(estimate, ideal [][]float64) float64
Df(estimate, ideal, activation float64) float64
}
// CrossEntropy is CE loss
type CrossEntropy struct{}
// F is CE(...)
func (l CrossEntropy) F(estimate, ideal [][]float64) float64 {
var sum float64
for i := range estimate {
ce := 0.0
for j := range estimate[i] {
ce += ideal[i][j] * math.Log(estimate[i][j])
}
sum -= ce
}
return sum / float64(len(estimate))
}
// Df is CE'(...)
func (l CrossEntropy) Df(estimate, ideal, activation float64) float64 {
return estimate - ideal
}
// BinaryCrossEntropy is binary CE loss
type BinaryCrossEntropy struct{}
// F is CE(...)
func (l BinaryCrossEntropy) F(estimate, ideal [][]float64) float64 {
epsilon := 1e-16
var sum float64
for i := range estimate {
ce := 0.0
for j := range estimate[i] {
ce += ideal[i][j] * (math.Log(estimate[i][j]+epsilon) - (1.0-ideal[i][j])*math.Log(1.0-estimate[i][j]+epsilon))
}
sum -= ce
}
return sum / float64(len(estimate))
}
// Df is CE'(...)
func (l BinaryCrossEntropy) Df(estimate, ideal, activation float64) float64 {
return estimate - ideal
}
// MeanSquared in MSE loss
type MeanSquared struct{}
// F is MSE(...)
func (l MeanSquared) F(estimate, ideal [][]float64) float64 {
var sum float64
for i := 0; i < len(estimate); i++ {
for j := 0; j < len(estimate[i]); j++ {
sum += math.Pow(estimate[i][j]-ideal[i][j], 2)
}
}
return sum / float64(len(estimate)*len(estimate[0]))
}
// Df is MSE'(...)
func (l MeanSquared) Df(estimate, ideal, activation float64) float64 {
return activation * (estimate - ideal)
} | plugins/data/learn/ml-libs-godeep/loss.go | 0.883695 | 0.712732 | loss.go | starcoder |
package curves
import (
"github.com/elainaaa/gosu-pp/math/vector"
"sync"
)
const BezierQuantization = 0.5
const BezierQuantizationSq = BezierQuantization * BezierQuantization
type ItemStack struct {
items [][]vector.Vector2f
lock sync.RWMutex
}
func NewStack() *ItemStack {
return &ItemStack{items: make([][]vector.Vector2f, 0)}
}
// Push adds an Item to the top of the stack
func (s *ItemStack) Push(t []vector.Vector2f) {
s.lock.Lock()
s.items = append(s.items, t)
s.lock.Unlock()
}
// Pop removes an Item from the top of the stack
func (s *ItemStack) Pop() []vector.Vector2f {
s.lock.Lock()
item := s.items[len(s.items)-1]
s.items = s.items[0 : len(s.items)-1]
s.lock.Unlock()
return item
}
func (s *ItemStack) Count() int {
return len(s.items)
}
type BezierApproximator struct {
count int
controlPoints []vector.Vector2f
subdivisionBuffer1 []vector.Vector2f
subdivisionBuffer2 []vector.Vector2f
}
func NewBezierApproximator(controlPoints []vector.Vector2f) *BezierApproximator {
return &BezierApproximator{count: len(controlPoints), controlPoints: controlPoints, subdivisionBuffer1: make([]vector.Vector2f, len(controlPoints)), subdivisionBuffer2: make([]vector.Vector2f, len(controlPoints)*2-1)}
}
func IsFlatEnough(controlPoints []vector.Vector2f) bool {
for i := 1; i < len(controlPoints)-1; i++ {
if controlPoints[i-1].Sub(controlPoints[i].Scl(2)).Add(controlPoints[i+1]).LenSq() > BezierQuantizationSq {
return false
}
}
return true
}
func (approximator *BezierApproximator) Subdivide(controlPoints, l, r []vector.Vector2f) {
midpoints := approximator.subdivisionBuffer1
for i := 0; i < approximator.count; i++ {
midpoints[i] = controlPoints[i]
}
for i := 0; i < approximator.count; i++ {
l[i] = midpoints[0]
r[approximator.count-i-1] = midpoints[approximator.count-i-1]
for j := 0; j < approximator.count-i-1; j++ {
midpoints[j] = (midpoints[j].Add(midpoints[j+1])).Scl(0.5)
}
}
}
func (approximator *BezierApproximator) Approximate(controlPoints []vector.Vector2f, output *[]vector.Vector2f) {
l := approximator.subdivisionBuffer2
r := approximator.subdivisionBuffer1
approximator.Subdivide(controlPoints, l, r)
for i := 0; i < approximator.count-1; i++ {
l[approximator.count+i] = r[i+1]
}
*output = append(*output, controlPoints[0])
for i := 1; i < approximator.count-1; i++ {
index := 2 * i
p := (l[index-1].Add(l[index].Scl(2.0)).Add(l[index+1])).Scl(0.25)
*output = append(*output, p)
}
}
func (approximator *BezierApproximator) CreateBezier() []vector.Vector2f {
output := make([]vector.Vector2f, 0)
if approximator.count == 0 {
return output
}
toFlatten := NewStack()
freeBuffers := NewStack()
// "toFlatten" contains all the curves which are not yet approximated well enough.
// We use a stack to emulate recursion without the risk of running into a stack overflow.
// (More specifically, we iteratively and adaptively refine our curve with a
// <a href="https://en.wikipedia.org/wiki/Depth-first_search">Depth-first search</a>
// over the tree resulting from the subdivisions we make.)
nCP := make([]vector.Vector2f, len(approximator.controlPoints))
copy(nCP, approximator.controlPoints)
toFlatten.Push(nCP)
leftChild := approximator.subdivisionBuffer2
for toFlatten.Count() > 0 {
parent := toFlatten.Pop()
if IsFlatEnough(parent) {
// If the control points we currently operate on are sufficiently "flat", we use
// an extension to De Casteljau's algorithm to obtain a piecewise-linear approximation
// of the bezier curve represented by our control points, consisting of the same amount
// of points as there are control points.
approximator.Approximate(parent, &output)
freeBuffers.Push(parent)
continue
}
// If we do not yet have a sufficiently "flat" (in other words, detailed) approximation we keep
// subdividing the curve we are currently operating on.
var rightChild []vector.Vector2f = nil
if freeBuffers.Count() > 0 {
rightChild = freeBuffers.Pop()
} else {
rightChild = make([]vector.Vector2f, approximator.count)
}
approximator.Subdivide(parent, leftChild, rightChild)
// We re-use the buffer of the parent for one of the children, so that we save one allocation per iteration.
for i := 0; i < approximator.count; i++ {
parent[i] = leftChild[i]
}
toFlatten.Push(rightChild)
toFlatten.Push(parent)
}
output = append(output, approximator.controlPoints[approximator.count-1])
return output
} | math/curves/bezierapproximator.go | 0.759582 | 0.481881 | bezierapproximator.go | starcoder |
package processor
import (
"fmt"
"time"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/types"
"github.com/Jeffail/benthos/v3/lib/x/docs"
jmespath "github.com/jmespath/go-jmespath"
"github.com/opentracing/opentracing-go"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeJMESPath] = TypeSpec{
constructor: NewJMESPath,
Summary: `
Parses a message as a JSON document and attempts to apply a JMESPath expression
to it, replacing the contents of the part with the result. Please refer to the
[JMESPath website](http://jmespath.org/) for information and tutorials regarding
the syntax of expressions.`,
Description: `
For example, with the following config:
` + "``` yaml" + `
jmespath:
query: locations[?state == 'WA'].name | sort(@) | {Cities: join(', ', @)}
` + "```" + `
If the initial contents of a message were:
` + "``` json" + `
{
"locations": [
{"name": "Seattle", "state": "WA"},
{"name": "New York", "state": "NY"},
{"name": "Bellevue", "state": "WA"},
{"name": "Olympia", "state": "WA"}
]
}
` + "```" + `
Then the resulting contents would be:
` + "``` json" + `
{"Cities": "Bellevue, Olympia, Seattle"}
` + "```" + `
It is possible to create boolean queries with JMESPath, in order to filter
messages with boolean queries please instead use the
` + "[`jmespath`](/docs/components/conditions/jmespath)" + ` condition.`,
FieldSpecs: docs.FieldSpecs{
docs.FieldCommon("query", "The JMESPath query to apply to messages."),
partsFieldSpec,
},
}
}
//------------------------------------------------------------------------------
// JMESPathConfig contains configuration fields for the JMESPath processor.
type JMESPathConfig struct {
Parts []int `json:"parts" yaml:"parts"`
Query string `json:"query" yaml:"query"`
}
// NewJMESPathConfig returns a JMESPathConfig with default values.
func NewJMESPathConfig() JMESPathConfig {
return JMESPathConfig{
Parts: []int{},
Query: "",
}
}
//------------------------------------------------------------------------------
// JMESPath is a processor that executes JMESPath queries on a message part and
// replaces the contents with the result.
type JMESPath struct {
parts []int
query *jmespath.JMESPath
conf Config
log log.Modular
stats metrics.Type
mCount metrics.StatCounter
mErrJSONP metrics.StatCounter
mErrJMES metrics.StatCounter
mErrJSONS metrics.StatCounter
mErr metrics.StatCounter
mSent metrics.StatCounter
mBatchSent metrics.StatCounter
}
// NewJMESPath returns a JMESPath processor.
func NewJMESPath(
conf Config, mgr types.Manager, log log.Modular, stats metrics.Type,
) (Type, error) {
query, err := jmespath.Compile(conf.JMESPath.Query)
if err != nil {
return nil, fmt.Errorf("failed to compile JMESPath query: %v", err)
}
j := &JMESPath{
parts: conf.JMESPath.Parts,
query: query,
conf: conf,
log: log,
stats: stats,
mCount: stats.GetCounter("count"),
mErrJSONP: stats.GetCounter("error.json_parse"),
mErrJMES: stats.GetCounter("error.jmespath_search"),
mErrJSONS: stats.GetCounter("error.json_set"),
mErr: stats.GetCounter("error"),
mSent: stats.GetCounter("sent"),
mBatchSent: stats.GetCounter("batch.sent"),
}
return j, nil
}
//------------------------------------------------------------------------------
func safeSearch(part interface{}, j *jmespath.JMESPath) (res interface{}, err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("jmespath panic: %v", r)
}
}()
return j.Search(part)
}
// ProcessMessage applies the processor to a message, either creating >0
// resulting messages or a response to be sent back to the message source.
func (p *JMESPath) ProcessMessage(msg types.Message) ([]types.Message, types.Response) {
p.mCount.Incr(1)
newMsg := msg.Copy()
proc := func(index int, span opentracing.Span, part types.Part) error {
jsonPart, err := part.JSON()
if err != nil {
p.mErrJSONP.Incr(1)
p.mErr.Incr(1)
p.log.Debugf("Failed to parse part into json: %v\n", err)
return err
}
var result interface{}
if result, err = safeSearch(jsonPart, p.query); err != nil {
p.mErrJMES.Incr(1)
p.mErr.Incr(1)
p.log.Debugf("Failed to search json: %v\n", err)
return err
}
if err = newMsg.Get(index).SetJSON(result); err != nil {
p.mErrJSONS.Incr(1)
p.mErr.Incr(1)
p.log.Debugf("Failed to convert jmespath result into part: %v\n", err)
return err
}
return nil
}
IteratePartsWithSpan(TypeJMESPath, p.parts, newMsg, proc)
msgs := [1]types.Message{newMsg}
p.mBatchSent.Incr(1)
p.mSent.Incr(int64(newMsg.Len()))
return msgs[:], nil
}
// CloseAsync shuts down the processor and stops processing requests.
func (p *JMESPath) CloseAsync() {
}
// WaitForClose blocks until the processor has closed down.
func (p *JMESPath) WaitForClose(timeout time.Duration) error {
return nil
}
//------------------------------------------------------------------------------ | lib/processor/jmespath.go | 0.690872 | 0.801198 | jmespath.go | starcoder |
package funk
import (
"bytes"
"math/rand"
"reflect"
)
var numericZeros = []interface{}{
int(0),
int8(0),
int16(0),
int32(0),
int64(0),
uint(0),
uint8(0),
uint16(0),
uint32(0),
uint64(0),
float32(0),
float64(0),
}
// ToFloat64 converts any numeric value to float64.
func ToFloat64(x interface{}) (float64, bool) {
var xf float64
xok := true
switch xn := x.(type) {
case uint8:
xf = float64(xn)
case uint16:
xf = float64(xn)
case uint32:
xf = float64(xn)
case uint64:
xf = float64(xn)
case int:
xf = float64(xn)
case int8:
xf = float64(xn)
case int16:
xf = float64(xn)
case int32:
xf = float64(xn)
case int64:
xf = float64(xn)
case float32:
xf = float64(xn)
case float64:
xf = float64(xn)
default:
xok = false
}
return xf, xok
}
// PtrOf makes a copy of the given interface and returns a pointer.
func PtrOf(itf interface{}) interface{} {
t := reflect.TypeOf(itf)
cp := reflect.New(t)
cp.Elem().Set(reflect.ValueOf(itf))
// Avoid double pointers if itf is a pointer
if t.Kind() == reflect.Ptr {
return cp.Elem().Interface()
}
return cp.Interface()
}
// IsFunc returns if the argument is a function.
func IsFunc(in interface{}, numIns []int, numOut []int) bool {
funcType := reflect.TypeOf(in)
result := funcType.Kind() == reflect.Func
if len(numIns) > 0 {
result = result && ContainsInt(numIns, funcType.NumIn())
}
if len(numOut) > 0 {
result = result && ContainsInt(numOut, funcType.NumOut())
}
return result
}
// IsEqual returns if the two objects are equal
func IsEqual(expected interface{}, actual interface{}) bool {
if expected == nil || actual == nil {
return expected == actual
}
if exp, ok := expected.([]byte); ok {
act, ok := actual.([]byte)
if !ok {
return false
}
if exp == nil || act == nil {
return true
}
return bytes.Equal(exp, act)
}
return reflect.DeepEqual(expected, actual)
}
// IsType returns if the two objects are in the same type
func IsType(expected interface{}, actual interface{}) bool {
return IsEqual(reflect.TypeOf(expected), reflect.TypeOf(actual))
}
// Equal returns if the two objects are equal
func Equal(expected interface{}, actual interface{}) bool {
return IsEqual(expected, actual)
}
// NotEqual returns if the two objects are not equal
func NotEqual(expected interface{}, actual interface{}) bool {
return !IsEqual(expected, actual)
}
// IsIteratee returns if the argument is an iteratee.
func IsIteratee(in interface{}) bool {
if in == nil {
return false
}
switch reflect.TypeOf(in).Kind() {
case reflect.Array, reflect.Slice, reflect.Map:
return true
}
return false
}
// IsCollection returns if the argument is a collection.
func IsCollection(in interface{}) bool {
switch reflect.TypeOf(in).Kind() {
case reflect.Array, reflect.Slice:
return true
}
return false
}
// SliceOf returns a slice which contains the element.
func SliceOf(in interface{}) interface{} {
value := reflect.ValueOf(in)
sliceType := reflect.SliceOf(reflect.TypeOf(in))
slice := reflect.New(sliceType)
sliceValue := reflect.MakeSlice(sliceType, 0, 0)
sliceValue = reflect.Append(sliceValue, value)
slice.Elem().Set(sliceValue)
return slice.Elem().Interface()
}
// Any returns true if any element of the iterable is not empty. If the iterable is empty, return False.
func Any(objs ...interface{}) bool {
if len(objs) == 0 {
return false
}
for _, obj := range objs {
if !IsEmpty(obj) {
return true
}
}
return false
}
// All returns true if all elements of the iterable are not empty (or if the iterable is empty)
func All(objs ...interface{}) bool {
if len(objs) == 0 {
return true
}
for _, obj := range objs {
if IsEmpty(obj) {
return false
}
}
return true
}
// IsEmpty returns if the object is considered as empty or not.
func IsEmpty(obj interface{}) bool {
if obj == nil || obj == "" || obj == false {
return true
}
for _, v := range numericZeros {
if obj == v {
return true
}
}
objValue := reflect.ValueOf(obj)
switch objValue.Kind() {
case reflect.Map:
fallthrough
case reflect.Slice, reflect.Chan:
return objValue.Len() == 0
case reflect.Struct:
return reflect.DeepEqual(obj, ZeroOf(obj))
case reflect.Ptr:
if objValue.IsNil() {
return true
}
obj = redirectValue(objValue).Interface()
return reflect.DeepEqual(obj, ZeroOf(obj))
}
return false
}
// IsZero returns if the object is considered as zero value
func IsZero(obj interface{}) bool {
if obj == nil || obj == "" || obj == false {
return true
}
for _, v := range numericZeros {
if obj == v {
return true
}
}
return reflect.DeepEqual(obj, ZeroOf(obj))
}
// NotEmpty returns if the object is considered as non-empty or not.
func NotEmpty(obj interface{}) bool {
return !IsEmpty(obj)
}
// ZeroOf returns a zero value of an element.
func ZeroOf(in interface{}) interface{} {
if in == nil {
return nil
}
return reflect.Zero(reflect.TypeOf(in)).Interface()
}
// RandomInt generates a random int, based on a min and max values
func RandomInt(min, max int) int {
return min + rand.Intn(max-min)
}
// Shard will shard a string name
func Shard(str string, width int, depth int, restOnly bool) []string {
var results []string
for i := 0; i < depth; i++ {
results = append(results, str[(width*i):(width*(i+1))])
}
if restOnly {
results = append(results, str[(width*depth):])
} else {
results = append(results, str)
}
return results
}
var defaultLetters = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
// RandomString returns a random string with a fixed length
func RandomString(n int, allowedChars ...[]rune) string {
var letters []rune
if len(allowedChars) == 0 {
letters = defaultLetters
} else {
letters = allowedChars[0]
}
b := make([]rune, n)
for i := range b {
b[i] = letters[rand.Intn(len(letters))]
}
return string(b)
} | helpers.go | 0.725162 | 0.469034 | helpers.go | starcoder |
package gotrader
import (
"github.com/cornelk/hashmap"
"go.uber.org/atomic"
)
// Side represents the type of position, which can be short or long
type Side int
const (
//Short represents a selling position
Short Side = iota
// Long represents a buying position
Long
)
func (s Side) String() string {
names := [...]string{"SHORT", "LONG"}
return names[s]
}
// Position represents the total exposure in a single side of an instrument.
// Is the aggregation of all the trades of that side.
type Position struct {
side Side
trades *hashmap.HashMap
tradesTimeOrder *sortedTrades
tradesNumber *atomic.Int32
units *atomic.Int32
unrealizedNetProfit float64
unrealizedEffectiveProfit float64
marginUsed float64
chargedFees float64
averagePrice float64
}
/**************************
*
* Internal Methods
*
***************************/
func newPosition(side Side) *Position {
return &Position{
side: side,
trades: &hashmap.HashMap{},
tradesTimeOrder: newSortedTrades(),
tradesNumber: atomic.NewInt32(0),
units: atomic.NewInt32(0),
}
}
func (p *Position) openTrade(trade *Trade) {
p.tradesTimeOrder.Append(trade.id)
p.trades.Set(trade.id, trade)
p.tradesNumber.Inc()
p.averagePrice = (p.averagePrice*float64(p.units.Load()) + trade.openPrice*float64(trade.units)) /
float64(p.units.Load()+trade.units)
p.units.Add(trade.units)
trade.calculateMarginUsed()
p.marginUsed += trade.marginUsed
}
func (p *Position) closeTrade(trade *Trade) {
p.tradesTimeOrder.Delete(trade.id)
p.trades.Del(trade.id)
p.tradesNumber.Dec()
p.averagePrice = (p.averagePrice*float64(p.units.Load()) - trade.openPrice*float64(trade.units)) /
float64(p.units.Load()-trade.units)
p.units.Sub(trade.units)
trade.calculateMarginUsed()
p.marginUsed -= trade.marginUsed
}
func (p *Position) calculateUnrealized() {
unrealizedNet := 0.0
unrealizedEffective := 0.0
averagePrice := 0.0 // recalculate to prevent possible cumulative errors
totalUnits := 0.0
chargedFees := 0.0
for kv := range p.trades.Iter() {
trade := kv.Value.(*Trade)
trade.calculateUnrealized()
unrealizedNet += trade.unrealizedNetProfit
unrealizedEffective += trade.unrealizedEffectiveProfit
chargedFees += trade.chargedFees.Load()
averagePrice = (averagePrice*totalUnits + trade.openPrice*float64(trade.units)) / (totalUnits + float64(trade.units))
totalUnits += float64(trade.units)
}
p.unrealizedNetProfit = unrealizedNet
p.unrealizedEffectiveProfit = unrealizedEffective
p.averagePrice = averagePrice
p.chargedFees = chargedFees
}
func (p *Position) calculateMarginUsed() {
marginUsed := 0.0
for kv := range p.trades.Iter() {
trade := kv.Value.(*Trade)
trade.calculateMarginUsed()
marginUsed += trade.marginUsed
}
p.marginUsed = marginUsed
}
/**************************
*
* Accessible Methods
*
***************************/
func (p *Position) Side() Side {
return p.side
}
func (p *Position) TradeByOrder(index int) *Trade {
return p.Trade(p.tradesTimeOrder.Get(index))
}
func (p *Position) TradesByAscendingOrder(tradesNumber int) <-chan *Trade {
ch := make(chan *Trade)
go func() {
for id := range p.tradesTimeOrder.AscendIter(tradesNumber) {
tr, exist := p.trades.GetStringKey(id)
if exist {
ch <- tr.(*Trade)
}
}
close(ch)
}()
return ch
}
func (p *Position) TradesByDescendingOrder(tradesNumber int) <-chan *Trade {
ch := make(chan *Trade)
go func() {
for id := range p.tradesTimeOrder.DescendIter(tradesNumber) {
tr, exist := p.trades.GetStringKey(id)
if exist {
ch <- tr.(*Trade)
}
}
close(ch)
}()
return ch
}
func (p *Position) Trade(id string) *Trade {
trade, exist := p.trades.GetStringKey(id)
if exist {
return trade.(*Trade)
}
return nil
}
func (p *Position) Trades() <-chan *Trade {
ch := make(chan *Trade)
go func() {
for kv := range p.trades.Iter() {
ch <- kv.Value.(*Trade)
}
close(ch)
}()
return ch
}
func (p *Position) TradesNumber() int32 {
return p.tradesNumber.Load()
}
func (p *Position) Units() int32 {
return p.units.Load()
}
func (p *Position) UnrealizedNetProfit() float64 {
return p.unrealizedNetProfit
}
func (p *Position) UnrealizedEffectiveProfit() float64 {
return p.unrealizedEffectiveProfit
}
func (p *Position) MarginUsed() float64 {
return p.marginUsed
}
func (p *Position) ChargedFees() float64 {
return p.chargedFees
}
func (p *Position) AveragePrice() float64 {
return p.averagePrice
} | position.go | 0.794584 | 0.416203 | position.go | starcoder |
package ents
import (
"fmt"
"strings"
"entgo.io/ent/dialect/sql"
"github.com/volatiletech/boilbench/ents/airport"
)
// Airport is the model entity for the Airport schema.
type Airport struct {
config `json:"-"`
// ID of the ent.
ID int `json:"id,omitempty"`
// Size holds the value of the "size" field.
Size int `json:"size,omitempty"`
// Edges holds the relations/edges for other nodes in the graph.
// The values are being populated by the AirportQuery when eager-loading is set.
Edges AirportEdges `json:"edges"`
}
// AirportEdges holds the relations/edges for other nodes in the graph.
type AirportEdges struct {
// Jets holds the value of the jets edge.
Jets []*Jet `json:"jets,omitempty"`
// loadedTypes holds the information for reporting if a
// type was loaded (or requested) in eager-loading or not.
loadedTypes [1]bool
}
// JetsOrErr returns the Jets value or an error if the edge
// was not loaded in eager-loading.
func (e AirportEdges) JetsOrErr() ([]*Jet, error) {
if e.loadedTypes[0] {
return e.Jets, nil
}
return nil, &NotLoadedError{edge: "jets"}
}
// scanValues returns the types for scanning values from sql.Rows.
func (*Airport) scanValues(columns []string) ([]interface{}, error) {
values := make([]interface{}, len(columns))
for i := range columns {
switch columns[i] {
case airport.FieldID, airport.FieldSize:
values[i] = new(sql.NullInt64)
default:
return nil, fmt.Errorf("unexpected column %q for type Airport", columns[i])
}
}
return values, nil
}
// assignValues assigns the values that were returned from sql.Rows (after scanning)
// to the Airport fields.
func (a *Airport) assignValues(columns []string, values []interface{}) error {
if m, n := len(values), len(columns); m < n {
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
}
for i := range columns {
switch columns[i] {
case airport.FieldID:
value, ok := values[i].(*sql.NullInt64)
if !ok {
return fmt.Errorf("unexpected type %T for field id", value)
}
a.ID = int(value.Int64)
case airport.FieldSize:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field size", values[i])
} else if value.Valid {
a.Size = int(value.Int64)
}
}
}
return nil
}
// QueryJets queries the "jets" edge of the Airport entity.
func (a *Airport) QueryJets() *JetQuery {
return (&AirportClient{config: a.config}).QueryJets(a)
}
// Update returns a builder for updating this Airport.
// Note that you need to call Airport.Unwrap() before calling this method if this Airport
// was returned from a transaction, and the transaction was committed or rolled back.
func (a *Airport) Update() *AirportUpdateOne {
return (&AirportClient{config: a.config}).UpdateOne(a)
}
// Unwrap unwraps the Airport entity that was returned from a transaction after it was closed,
// so that all future queries will be executed through the driver which created the transaction.
func (a *Airport) Unwrap() *Airport {
tx, ok := a.config.driver.(*txDriver)
if !ok {
panic("ents: Airport is not a transactional entity")
}
a.config.driver = tx.drv
return a
}
// String implements the fmt.Stringer.
func (a *Airport) String() string {
var builder strings.Builder
builder.WriteString("Airport(")
builder.WriteString(fmt.Sprintf("id=%v", a.ID))
builder.WriteString(", size=")
builder.WriteString(fmt.Sprintf("%v", a.Size))
builder.WriteByte(')')
return builder.String()
}
// Airports is a parsable slice of Airport.
type Airports []*Airport
func (a Airports) config(cfg config) {
for _i := range a {
a[_i].config = cfg
}
} | ents/airport.go | 0.619701 | 0.400749 | airport.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.