code
stringlengths
114
1.05M
path
stringlengths
3
312
quality_prob
float64
0.5
0.99
learning_prob
float64
0.2
1
filename
stringlengths
3
168
kind
stringclasses
1 value
package model import ( "encoding/json" "fmt" "go.etcd.io/bbolt" "reflect" "strconv" "strings" ) // Encapsulates all persistence operations for a particular data type represented by a struct. type table struct { bolt *bbolt.DB recordType reflect.Type name string bucketKey []byte idFieldIndex *int manualId bool } // Registers a new table for a struct, given its zero value. func (database *Database) newTable(recordType interface{}) (*table, error) { recordTypeValue := reflect.ValueOf(recordType) if recordTypeValue.Kind() != reflect.Struct { return nil, fmt.Errorf("record type must be a struct; got %v", recordTypeValue.Kind()) } var table table table.bolt = database.bolt table.recordType = reflect.TypeOf(recordType) table.name = table.recordType.Name() table.bucketKey = []byte(table.name) // Determine which field in the struct is tagged as the ID and cache its index. idFound := false for i := 0; i < recordTypeValue.Type().NumField(); i++ { field := recordTypeValue.Type().Field(i) tags := map[string]struct{}{} for _, tag := range strings.Split(field.Tag.Get("db"), ",") { tags[tag] = struct{}{} } if _, ok := tags["id"]; ok { if field.Type.Kind() != reflect.Int { return nil, fmt.Errorf( "field in struct %s tagged with 'id' must be an int; got %v", table.name, field.Type.Kind(), ) } table.idFieldIndex = new(int) *table.idFieldIndex = i idFound = true _, table.manualId = tags["manual"] break } } if !idFound { return nil, fmt.Errorf("struct %s has no field tagged as the id", table.name) } // Create the Bolt bucket corresponding to the struct. err := table.bolt.Update(func(tx *bbolt.Tx) error { _, err := tx.CreateBucketIfNotExists(table.bucketKey) return err }) if err != nil { return nil, err } return &table, nil } // Populates the given double pointer to a record with the data from the record with the given ID, or nil if it doesn't // exist. func (table *table) getById(id int, record interface{}) error { if err := table.validateType(record, reflect.Ptr, reflect.Ptr, reflect.Struct); err != nil { return err } return table.bolt.View(func(tx *bbolt.Tx) error { bucket, err := table.getBucket(tx) if err != nil { return err } if recordJson := bucket.Get(idToKey(id)); recordJson != nil { return json.Unmarshal(recordJson, record) } // If the record does not exist, set the record pointer to nil. recordPointerValue := reflect.ValueOf(record).Elem() recordPointerValue.Set(reflect.Zero(recordPointerValue.Type())) return nil }) } // Populates the given slice passed by pointer with the data from every record in the table, ordered by ID. func (table *table) getAll(recordSlice interface{}) error { if err := table.validateType(recordSlice, reflect.Ptr, reflect.Slice, reflect.Struct); err != nil { return err } return table.bolt.View(func(tx *bbolt.Tx) error { bucket, err := table.getBucket(tx) if err != nil { return err } recordSliceValue := reflect.ValueOf(recordSlice).Elem() recordSliceValue.Set(reflect.MakeSlice(recordSliceValue.Type(), 0, 0)) return bucket.ForEach(func(key, value []byte) error { record := reflect.New(table.recordType) err := json.Unmarshal(value, record.Interface()) if err != nil { return err } recordSliceValue.Set(reflect.Append(recordSliceValue, record.Elem())) return nil }) }) } // Persists the given record as a new row in the table. func (table *table) create(record interface{}) error { if err := table.validateType(record, reflect.Ptr, reflect.Struct); err != nil { return err } // Validate that the record has its ID set to zero or not as expected, depending on whether it is configured for // autogenerated IDs. value := reflect.ValueOf(record).Elem() id := int(value.Field(*table.idFieldIndex).Int()) if table.manualId && id == 0 { return fmt.Errorf("can't create %s with zero ID since table is configured for manual IDs", table.name) } else if !table.manualId && id != 0 { return fmt.Errorf( "can't create %s with non-zero ID since table is configured for autogenerated IDs: %d", table.name, id, ) } return table.bolt.Update(func(tx *bbolt.Tx) error { bucket, err := table.getBucket(tx) if err != nil { return err } if !table.manualId { // Generate a new ID for the record. newSequence, err := bucket.NextSequence() if err != nil { return err } id = int(newSequence) value.Field(*table.idFieldIndex).SetInt(int64(id)) } // Ensure that a record having the same ID does not already exist in the table. key := idToKey(id) oldRecord := bucket.Get(key) if oldRecord != nil { return fmt.Errorf("%s with ID %d already exists: %s", table.name, id, string(oldRecord)) } recordJson, err := json.Marshal(record) if err != nil { return err } return bucket.Put(key, recordJson) }) } // Persists the given record as an update to the existing row in the table. Returns an error if the record does not // already exist. func (table *table) update(record interface{}) error { if err := table.validateType(record, reflect.Ptr, reflect.Struct); err != nil { return err } // Validate that the record has a non-zero ID. value := reflect.ValueOf(record).Elem() id := int(value.Field(*table.idFieldIndex).Int()) if id == 0 { return fmt.Errorf("can't update %s with zero ID", table.name) } return table.bolt.Update(func(tx *bbolt.Tx) error { bucket, err := table.getBucket(tx) if err != nil { return err } // Ensure that a record having the same ID exists in the table. key := idToKey(id) oldRecord := bucket.Get(key) if oldRecord == nil { return fmt.Errorf("can't update non-existent %s with ID %d", table.name, id) } recordJson, err := json.Marshal(record) if err != nil { return err } return bucket.Put(key, recordJson) }) } // Deletes the record having the given ID from the table. Returns an error if the record does not exist. func (table *table) delete(id int) error { return table.bolt.Update(func(tx *bbolt.Tx) error { bucket, err := table.getBucket(tx) if err != nil { return err } // Ensure that a record having the same ID exists in the table. key := idToKey(id) oldRecord := bucket.Get(key) if oldRecord == nil { return fmt.Errorf("can't delete non-existent %s with ID %d", table.name, id) } return bucket.Delete(key) }) } // Deletes all records from the table. func (table *table) truncate() error { return table.bolt.Update(func(tx *bbolt.Tx) error { _, err := table.getBucket(tx) if err != nil { return err } // Carry out the truncation by way of deleting the whole bucket and then recreate it. err = tx.DeleteBucket(table.bucketKey) if err != nil { return err } _, err = tx.CreateBucket(table.bucketKey) return err }) } // Obtains the Bolt bucket belonging to the table. func (table *table) getBucket(tx *bbolt.Tx) (*bbolt.Bucket, error) { bucket := tx.Bucket(table.bucketKey) if bucket == nil { return nil, fmt.Errorf("unknown table %s", table.name) } return bucket, nil } // Validates that the given record is of the expected derived type (e.g. pointer, slice, etc.), that the base type is // the same as that stored in the table, and that the table is configured correctly. func (table *table) validateType(record interface{}, kinds ...reflect.Kind) error { // Check the hierarchy of kinds against the expected list until reaching the base record type. recordType := reflect.ValueOf(record).Type() expectedKind := "" actualKind := "" for i, kind := range kinds { if i > 0 { expectedKind += " -> " actualKind += " -> " } expectedKind += kind.String() actualKind += recordType.Kind().String() if recordType.Kind() != kind { return fmt.Errorf("input must be a %s; got a %s", expectedKind, actualKind) } if i < len(kinds)-1 { recordType = recordType.Elem() } } if recordType != table.recordType { return fmt.Errorf("given record of type %s does not match expected type for table %s", recordType, table.name) } if table.idFieldIndex == nil { return fmt.Errorf("struct %s has no field tagged as the id", table.name) } return nil } // Serializes the given integer ID to a byte array containing its Base-10 string representation. func idToKey(id int) []byte { return []byte(strconv.Itoa(id)) }
model/table.go
0.666171
0.439868
table.go
starcoder
package v2dot0 import ( "encoding/json" "fmt" "testing" dtoErrorV2dot0 "github.com/edgexfoundry/edgex-go/internal/pkg/v2/application/dto/v2dot0/common/error" dtoV2dot0 "github.com/edgexfoundry/edgex-go/internal/pkg/v2/application/dto/v2dot0/common/metrics" "github.com/edgexfoundry/edgex-go/internal/pkg/v2/infrastructure" "github.com/edgexfoundry/edgex-go/internal/pkg/v2/ui/common/batchdto" "github.com/stretchr/testify/assert" ) // assertValid validates metrics response; we can't predict what will be returned so we consider valid // response to have all non-zero fields. func assertValid(t *testing.T, response *dtoV2dot0.Response, requestIDs []string) { inList := func() bool { for index := range requestIDs { if response.RequestID == requestIDs[index] { return true } } return false } if !inList() { assert.Fail(t, fmt.Sprintf("requestID %s not in list %v", response.RequestID, requestIDs)) } assert.NotEqual(t, 0, response.Alloc) assert.NotEqual(t, 0, response.TotalAlloc) assert.NotEqual(t, 0, response.Sys) assert.NotEqual(t, 0, response.Mallocs) assert.NotEqual(t, 0, response.Frees) assert.NotEqual(t, 0, response.LiveObjects) assert.NotEqual(t, 0, response.CpuBusyAvg) } // assertValidV2dot0UseCaseMetricsResponse validates metrics response; we can't predict what will be returned so we // consider valid response to have all non-zero fields. func assertValidV2dot0UseCaseMetricsResponse(t *testing.T, actual []byte, requestIDs []string) { var responseDTO dtoV2dot0.Response // single response? err := json.Unmarshal(actual, &responseDTO) if err == nil { assertValid(t, &responseDTO, requestIDs) return } // multiple responses? var responseDTOs []dtoV2dot0.Response err = json.Unmarshal(actual, &responseDTOs) if err == nil { for i := range responseDTOs { assertValid(t, &responseDTOs[i], requestIDs) } return } assert.Fail(t, "unable to validate metrics response: %s", err.Error()) } // assertV2dot0UseCaseMetricsResponseOneValidAndOneError validates one successful result and one error result of a // specific type. func assertV2dot0UseCaseMetricsResponseOneValidAndOneError( t *testing.T, actual []byte, validRequestID string, status infrastructure.Status) { var responseDTOs []*json.RawMessage if err := json.Unmarshal(actual, &responseDTOs); err != nil { assert.Fail(t, "unable to unmarshal: %s", err.Error()) return } assert.Equal(t, 2, len(responseDTOs)) var validExists = false for i := range responseDTOs { var responseDTO dtoV2dot0.Response if err := json.Unmarshal(*responseDTOs[i], &responseDTO); err != nil { continue } if responseDTO.StatusCode != infrastructure.StatusSuccess { continue } validExists = true assertValid(t, &responseDTO, []string{validRequestID}) } assert.True(t, validExists) var invalidExists = false for i := range responseDTOs { var responseDTO dtoErrorV2dot0.Response if err := json.Unmarshal(*responseDTOs[i], &responseDTO); err != nil { continue } if responseDTO.StatusCode == infrastructure.StatusSuccess { continue } invalidExists = true assert.Equal(t, status, responseDTO.StatusCode) } assert.True(t, invalidExists) } // assertValidV2dot0BatchMetricsResponse validates metrics response; we can't predict what will be returned so we // consider valid response to have all non-zero fields. func assertValidV2dot0BatchMetricsResponse( t *testing.T, actual []byte, version string, kind string, action string, requestIDs []string) { assertValidBatch := func(response *batchdto.TestResponse) error { assert.Equal(t, version, response.Version) assert.Equal(t, kind, response.Kind) assert.Equal(t, action, response.Action) var responseDTO dtoV2dot0.Response err := json.Unmarshal(*response.Content, &responseDTO) if err == nil { assertValid(t, &responseDTO, requestIDs) } return err } responseDTOs := batchdto.EmptyTestResponseSlice() err := json.Unmarshal(actual, &responseDTOs) if err == nil { for i := range responseDTOs { if err := assertValidBatch(&responseDTOs[i]); err != nil { assert.Fail(t, "unable to unmarshal responseDTO: %s", err.Error()) } } return } assert.Fail(t, "unable to validate response: %s", err.Error()) }
internal/pkg/v2/ui/http/acceptance/common/metrics/v2dot0/assert.go
0.564819
0.440951
assert.go
starcoder
package czml // Polyline is a line in the scene composed of multiple segments. // https://github.com/AnalyticalGraphicsInc/czml-writer/wiki/Polyline type Polyline struct { Show *bool `json:"show,omitempty"` Positions *PositionList `json:"positions"` ArcType *ArcType `json:"arcType,omitempty"` Width *float64 `json:"width,omitempty"` Granularity *float64 `json:"granularity,omitempty"` Material *PolylineMaterial `json:"material,omitempty"` FollowSurface *bool `json:"followSurface,omitempty"` Shadows ShadowMode `json:"shadows,omitempty"` DepthFailMaterial *PolylineMaterial `json:"depthFailMaterial,omitempty"` DistanceDisplayCondition *DistanceDisplayCondition `json:"distanceDisplayCondition,omitempty"` ClampToGround *bool `json:"clampToGround,omitempty"` ClassificationType ClassificationType `json:"classificationType,omitempty"` ZIndex *int `json:"zIndex,omitempty"` } // PolylineVolume is a polyline with a volume, defined as a 2D shape extruded along a polyline // that conforms to the curvature of the globe. // https://github.com/AnalyticalGraphicsInc/czml-writer/wiki/PolylineVolume type PolylineVolume struct { Show *bool `json:"show,omitempty"` Positions *PositionList `json:"positions"` Shape *Shape `json:"shape"` CornerType *CornerType `json:"cornerType,omitempty"` Granularity *float64 `json:"granularity,omitempty"` Fill *bool `json:"fill,omitempty"` Material *PolylineMaterial `json:"material,omitempty"` Outline *bool `json:"outline,omitempty"` OutlineColor *Color `json:"outlineColor,omitempty"` OutlineWidth *float64 `json:"outlineWidth,omitempty"` Shadows ShadowMode `json:"shadows,omitempty"` DistanceDisplayCondition *DistanceDisplayCondition `json:"distanceDisplayCondition,omitempty"` } // UpdateColor adds or updates a solid-colored line specified by rgba value func (p *Polyline) UpdateColor(rgba []int) { c := Color{Rgba: rgba} s := SolidColorMaterial{Color: &c} m := PolylineMaterial{SolidColor: &s} p.Material = &m } // AddPoint adds a geographical point func (p *Polyline) AddPoint(lat, lon, ele float64) { if p.Positions == nil { p.Positions = &PositionList{} } else if p.Positions.CartographicDegrees == nil { p.Positions.CartographicDegrees = []float64{} } p.Positions.CartographicDegrees = append(p.Positions.CartographicDegrees, lon, lat, ele) }
polyline.go
0.835685
0.415492
polyline.go
starcoder
package advent var _ Problem = &lanternFish{} type lanternFish struct { dailyProblem } func NewLanternFish() Problem { return &lanternFish{ dailyProblem{ day: 6, }, } } func (l *lanternFish) Solve() interface{} { input := l.GetInputLines() var results []int results = append(results, l.fishCount80(input)) results = append(results, l.fishCount256(input)) return results } /* The sea floor is getting steeper. Maybe the sleigh keys got carried this way? A massive school of glowing lanternfish swims past. They must spawn quickly to reach such large numbers - maybe exponentially quickly? You should model their growth rate to be sure. Although you know nothing about this specific species of lanternfish, you make some guesses about their attributes. Surely, each lanternfish creates a new lanternfish once every 7 days. However, this process isn't necessarily synchronized between every lanternfish - one lanternfish might have 2 days left until it creates another lanternfish, while another might have 4. So, you can model each fish as a single number that represents the number of days until it creates a new lanternfish. Furthermore, you reason, a new lanternfish would surely need slightly longer before it's capable of producing more lanternfish: two more days for its first cycle. So, suppose you have a lanternfish with an internal timer value of 3: After one day, its internal timer would become 2. After another day, its internal timer would become 1. After another day, its internal timer would become 0. After another day, its internal timer would reset to 6, and it would create a new lanternfish with an internal timer of 8. After another day, the first lanternfish would have an internal timer of 5, and the second lanternfish would have an internal timer of 7. A lanternfish that creates a new fish resets its timer to 6, not 7 (because 0 is included as a valid timer value). The new lanternfish starts with an internal timer of 8 and does not start counting down until the next day. Realizing what you're trying to do, the submarine automatically produces a list of the ages of several hundred nearby lanternfish (your puzzle input). For example, suppose you were given the following list: 3,4,3,1,2 This list means that the first fish has an internal timer of 3, the second fish has an internal timer of 4, and so on until the fifth fish, which has an internal timer of 2. Simulating these fish over several days would proceed as follows: Initial state: 3,4,3,1,2 After 1 day: 2,3,2,0,1 After 2 days: 1,2,1,6,0,8 After 3 days: 0,1,0,5,6,7,8 After 4 days: 6,0,6,4,5,6,7,8,8 After 5 days: 5,6,5,3,4,5,6,7,7,8 After 6 days: 4,5,4,2,3,4,5,6,6,7 After 7 days: 3,4,3,1,2,3,4,5,5,6 After 8 days: 2,3,2,0,1,2,3,4,4,5 After 9 days: 1,2,1,6,0,1,2,3,3,4,8 After 10 days: 0,1,0,5,6,0,1,2,2,3,7,8 After 11 days: 6,0,6,4,5,6,0,1,1,2,6,7,8,8,8 After 12 days: 5,6,5,3,4,5,6,0,0,1,5,6,7,7,7,8,8 After 13 days: 4,5,4,2,3,4,5,6,6,0,4,5,6,6,6,7,7,8,8 After 14 days: 3,4,3,1,2,3,4,5,5,6,3,4,5,5,5,6,6,7,7,8 After 15 days: 2,3,2,0,1,2,3,4,4,5,2,3,4,4,4,5,5,6,6,7 After 16 days: 1,2,1,6,0,1,2,3,3,4,1,2,3,3,3,4,4,5,5,6,8 After 17 days: 0,1,0,5,6,0,1,2,2,3,0,1,2,2,2,3,3,4,4,5,7,8 After 18 days: 6,0,6,4,5,6,0,1,1,2,6,0,1,1,1,2,2,3,3,4,6,7,8,8,8,8 Each day, a 0 becomes a 6 and adds a new 8 to the end of the list, while each other number decreases by 1 if it was present at the start of the day. In this example, after 18 days, there are a total of 26 fish. After 80 days, there would be a total of 5934. Find a way to simulate lanternfish. How many lanternfish would there be after 80 days? */ func (l *lanternFish) fishCount80(input []string) int { return l.fishAfterDays(l.parseInput(input[0]), 80) } /* Suppose the lanternfish live forever and have unlimited food and space. Would they take over the entire ocean? After 256 days in the example above, there would be a total of 26984457539 lanternfish! How many lanternfish would there be after 256 days? */ func (l *lanternFish) fishCount256(input []string) int { return l.fishAfterDays(l.parseInput(input[0]), 256) } func (l *lanternFish) fishAfterDays(initialStates []int, days int) int { var states [9]int for _, state := range initialStates { states[state]++ } for day := 0; day < days; day++ { temp := states[0] //move states down one day for i := 0; i < 8; i++ { states[i] = states[i+1] } states[8] = temp //add new fist at day 8 states[6] += temp //reset 0 day fish } sum := 0 for _, state := range states { sum += state } return sum } func (l *lanternFish) parseInput(input string) []int { var nums []int for i := 0; i < len(input); i += 2 { nums = append(nums, int(input[i])-48) //48 = '0' } return nums }
internal/advent/day6.go
0.644896
0.505737
day6.go
starcoder
package internal import ( "math" "github.com/go-gl/mathgl/mgl32" ) const order = mgl32.ZXY func Clamp(value, min, max float64) float64 { return math.Max(min, math.Min(max, value)) } func Euler(eulers mgl32.Vec3) mgl32.Quat { switch order { case mgl32.ZXY: return mgl32.AnglesToQuat(mgl32.DegToRad(eulers.Z()), mgl32.DegToRad(eulers.X()), mgl32.DegToRad(eulers.Y()), mgl32.ZXY) case mgl32.YZX: return mgl32.AnglesToQuat(mgl32.DegToRad(eulers.Y()), mgl32.DegToRad(eulers.Z()), mgl32.DegToRad(eulers.X()), mgl32.YZX) } panic("unsupport order") } func QuatMulVec3(q mgl32.Quat, v mgl32.Vec3) mgl32.Vec3 { return q.Mul(mgl32.Quat{V: v}).Mul(q.Inverse()).V } func toEulerYZX(rotation mgl32.Quat) mgl32.Vec3 { var x, y, z, w float32 r := rotation.Normalize() w, x, y, z = r.W, r.V[0], r.V[1], r.V[2] m11 := float64(1 - 2*y*y - 2*z*z) m13 := float64(2*x*z + 2*w*y) m21 := float64(2*x*y + 2*w*z) m22 := float64(1 - 2*x*x - 2*z*z) m23 := float64(2*y*z - 2*w*x) m31 := float64(2*x*z - 2*w*y) m33 := float64(1 - 2*x*x - 2*y*y) z = float32(math.Asin(Clamp(m21, -1, 1))) if math.Abs(m21) < 0.9999999 { x = float32(math.Atan2(-m23, m22)) y = float32(math.Atan2(-m31, m11)) } else { x = float32(math.Atan2(m13, m33)) y = 0 } return mgl32.Vec3{mgl32.RadToDeg(x), mgl32.RadToDeg(y), mgl32.RadToDeg(z)} } func toEulerZXY(rotation mgl32.Quat) mgl32.Vec3 { var x, y, z, w float32 r := rotation.Normalize() w, x, y, z = r.W, r.V[0], r.V[1], r.V[2] m11 := float64(1 - 2*y*y - 2*z*z) m12 := float64(2*x*y - 2*w*z) m21 := float64(2*x*y + 2*w*z) m22 := float64(1 - 2*x*x - 2*z*z) m31 := float64(2*x*z - 2*w*y) m32 := float64(2*y*z + 2*w*x) m33 := float64(1 - 2*x*x - 2*y*y) x = float32(math.Asin(Clamp(m32, -1, 1))) if math.Abs(m32) < 0.9999999 { y = float32(math.Atan2(-m31, m33)) z = float32(math.Atan2(-m12, m22)) } else { y = 0 z = float32(math.Atan2(m21, m11)) } return mgl32.Vec3{mgl32.RadToDeg(x), mgl32.RadToDeg(y), mgl32.RadToDeg(z)} } func ToEuler(rotation mgl32.Quat) mgl32.Vec3 { switch order { case mgl32.ZXY: return toEulerZXY(rotation) case mgl32.YZX: return toEulerYZX(rotation) } panic("unsupport order") } func LookAt(position, target mgl32.Vec3) mgl32.Quat { direction := target.Sub(position).Normalize() return mgl32.QuatBetweenVectors(mgl32.Vec3{0, 0, 1}, direction) }
gameplay/internal/quatutil.go
0.779364
0.781289
quatutil.go
starcoder
package is import ( "errors" "fmt" "reflect" "strings" "testing" "github.com/go-test/deep" ) // Is is provides helpers for writing tests. type Is func(cond bool, msg string, i ...interface{}) // Equal checks if the given values are equal func (is Is) Equal(v1, v2 interface{}, msg string, i ...interface{}) { if !reflect.DeepEqual(v1, v2) { if dif := deep.Equal(v1, v2); len(dif) != 0 { is.T().Helper() is(false, fmt.Sprintf("%s\nValues are not equal:\n\t%s", fmt.Sprintf(msg, i...), strings.Join(dif, "\n\t"))) } } } // Fail immediately fails the test. func (is Is) Fail(msg string, i ...interface{}) { is.T().Helper() is(false, msg, i...) } // Err checks if any error in err's chain matches target. func (is Is) Err(err, target error, msg string, i ...interface{}) { if !errors.Is(err, target) { is.T().Helper() is(false, fmt.Sprintf("%s\nError `%s` is not `%s`", fmt.Sprintf(msg, i...), err, target)) } } // Panic checks if calling the given function causes a panic. // If the given function does not panic the test fails. func (is Is) Panic(panicable func(), msg string, i ...interface{}) { if !callPanic(panicable) { is.T().Helper() is(false, fmt.Sprintf("%s\nFunction did not panic", fmt.Sprintf(msg, i...))) } } // Log logs the given message. // This is the equivalent of calling is.T().Log(msg). // This function can be called from multiple goroutines concurrently. func (is Is) Log(msg string, i ...interface{}) { t := is.T() t.Helper() t.Logf(msg, i...) } // Run runs the given test. func (is Is) Run(name string, f func(Is)) { is.T().Run(name, func(t *testing.T) { f(New(t)) }) } // RunP runs the given test in parallel with the current test. func (is Is) RunP(name string, f func(Is)) { is.T().Run(name, func(t *testing.T) { t.Parallel(); f(New(t)) }) } // T gets the underlying *testing.T for this test. func (is Is) T() (t *testing.T) { // This is a ugly hack to get the testing.T value from `is`. // Calling `is(false, "", internalIsCall, **testing.T)` sets the the value to the given ptr. // This is done by calling `setT`. is(false, "", internalIsCall, &t) return } var internalIsCall = new(uint16) func setT(t *testing.T, msg string, i []interface{}) (ok bool) { if msg == "" && len(i) == 2 { if i[0] == internalIsCall { var dst **testing.T if dst, ok = i[1].(**testing.T); ok { *dst = t } return ok } } return } func callPanic(f func()) (paniced bool) { defer func() { if r := recover(); r != nil { paniced = true } }() f() return } // New creates a new test func New(t *testing.T) Is { return func(cond bool, msg string, i ...interface{}) { t.Helper() if !cond { if ok := setT(t, msg, i); ok { // see comment in is.T() return } t.Errorf(msg, i...) t.FailNow() } } }
is.go
0.561215
0.400515
is.go
starcoder
package disruptor // Cursors should be a party of the same backing array to keep them as close together as possible: // https://news.ycombinator.com/item?id=7800825 type ( Wireup struct { capacity int64 groups [][]Consumer cursors []*Cursor // backing array keeps cursors (with padding) in contiguous memory } ) func Configure(capacity int64) Wireup { return Wireup{ capacity: capacity, groups: [][]Consumer{}, cursors: []*Cursor{NewCursor()}, } } func (this Wireup) WithConsumerGroup(consumers ...Consumer) Wireup { if len(consumers) == 0 { return this } target := make([]Consumer, len(consumers)) copy(target, consumers) for i := 0; i < len(consumers); i++ { this.cursors = append(this.cursors, NewCursor()) } this.groups = append(this.groups, target) return this } func (this Wireup) Build() Disruptor { allReaders := []*Reader{} written := this.cursors[0] var upstream Barrier = this.cursors[0] cursorIndex := 1 // 0 index is reserved for the writer Cursor for groupIndex, group := range this.groups { groupReaders, groupBarrier := this.buildReaders(groupIndex, cursorIndex, written, upstream) for _, item := range groupReaders { allReaders = append(allReaders, item) } upstream = groupBarrier cursorIndex += len(group) } writer := NewWriter(written, upstream, this.capacity) return Disruptor{writer: writer, readers: allReaders} } func (this Wireup) BuildShared() SharedDisruptor { allReaders := []*Reader{} written := this.cursors[0] writerBarrier := NewSharedWriterBarrier(written, this.capacity) var upstream Barrier = writerBarrier cursorIndex := 1 // 0 index is reserved for the writer Cursor for groupIndex, group := range this.groups { groupReaders, groupBarrier := this.buildReaders(groupIndex, cursorIndex, written, upstream) for _, item := range groupReaders { allReaders = append(allReaders, item) } upstream = groupBarrier cursorIndex += len(group) } writer := NewSharedWriter(writerBarrier, upstream) return SharedDisruptor{writer: writer, readers: allReaders} } func (this Wireup) buildReaders(consumerIndex, cursorIndex int, written *Cursor, upstream Barrier) ([]*Reader, Barrier) { barrierCursors := []*Cursor{} readers := []*Reader{} for _, consumer := range this.groups[consumerIndex] { cursor := this.cursors[cursorIndex] barrierCursors = append(barrierCursors, cursor) reader := NewReader(cursor, written, upstream, consumer) readers = append(readers, reader) cursorIndex++ } if len(this.groups[consumerIndex]) == 1 { return readers, barrierCursors[0] } else { return readers, NewCompositeBarrier(barrierCursors...) } }
vendor/github.com/smartystreets-prototypes/go-disruptor/wireup.go
0.740644
0.41745
wireup.go
starcoder
package neptune import ( "strings" "sync" ) // batchProcessor defines a generic function type to process a batch and may return a result and an error. type batchProcessor = func(map[string]string) (map[string]string, error) // processInConcurrentBatches splits the provided items in batches and calls processBatch for each batch batch, concurrently. // The results of the batch Processor functions, if provided, are aggregated as unique items and returned. // note that the items are not processed in any deterministic order func processInConcurrentBatches(items map[string]string, processBatch batchProcessor, batchSize, maxWorkers int) (result map[string]string, numChunks int, errs []error) { wg := sync.WaitGroup{} chWait := make(chan struct{}) chErr := make(chan error) chSemaphore := make(chan struct{}, maxWorkers) result = make(map[string]string) lockResult := sync.Mutex{} // worker add delta to workgroup and acquire semaphore acquire := func() { wg.Add(1) chSemaphore <- struct{}{} } // worker release semaphore and workgroup delta release := func() { <-chSemaphore wg.Done() } // func executed in each go-routine to process the batch, aggregate results, and send errors to the error channel doProcessBatch := func(chunk map[string]string) { defer release() res, err := processBatch(chunk) if err != nil { chErr <- err return } lockResult.Lock() for k, v := range res { result[k] = v } lockResult.Unlock() } // func that triggers the batch processing for a chunk, in a parallel go-routine goProcessBatch := func(chunk map[string]string) { acquire() go doProcessBatch(chunk) } // split in batches, and trigger a go-routine for each batch numChunks = processInBatches(items, goProcessBatch, batchSize) // func that will close wait channel when all go-routines complete their execution go func() { wg.Wait() close(chWait) }() // Block until all workers finish their work, keeping track of errors for { select { case err := <-chErr: errs = append(errs, err) case <-chWait: return } } } // processInBatches is an aux function that splits the provided items in batches and calls processBatch for each batch // note that the items are not processed in any deterministic order func processInBatches(items map[string]string, processBatch func(map[string]string), batchSize int) (numChunks int) { numChunks = 0 // process full bathes, reseting the batch at the end of each process batch := make(map[string]string, batchSize) for k, v := range items { batch[k] = v if len(batch) == batchSize { numChunks++ processBatch(batch) batch = make(map[string]string, batchSize) } } // process any remaining items if len(batch) > 0 { processBatch(batch) numChunks++ } return numChunks } // unique returns an array containing the unique elements of the provided array func unique(duplicated []string) (unique []string) { return createArray(createMapFromArrays(duplicated)) } // createArray creates an array of keys from the provided map func createArray(m map[string]string) (a []string) { for k := range m { a = append(a, k) } return a } // createMapFromArrays creates a map whose keys are the unique values of the provided array(s). // values are empty structs for memory efficiency reasons (no storage used) func createMapFromArrays(a ...[]string) (m map[string]string) { m = make(map[string]string) for _, aa := range a { for _, val := range aa { m[val] = "" } } return m } // createMap creates a map whose keys are the unique values of the provided array(s). // values are empty strings func createStringMapFromArrays(a ...[]string) (m map[string]string) { m = make(map[string]string) for _, aa := range a { for _, val := range aa { m[val] = "" } } return m } // statementSummary returns a summarized statement for logging, removing long lists of IDs or codes func statementSummary(stmt string) string { if strings.HasPrefix(stmt, "g.V('") { i := strings.Index(stmt, "')") return "g.V(...)" + stmt[i+2:] } if i := strings.Index(stmt, "within(["); i != -1 { j := strings.Index(stmt[i:], "])") return stmt[:i] + "within([...])" + stmt[i+j+2:] } return stmt }
neptune/utils.go
0.660172
0.420659
utils.go
starcoder
package osgb import ( "fmt" "math" ) const ( degreeInRadians = 180 / math.Pi radianInDegrees = 1 / degreeInRadians ) type direction string const ( north direction = "N" south direction = "S" east direction = "E" west direction = "W" ) // ETRS89Coordinate represents a coordinate position in // the ETRS89 geodetic datum. Whilst not being identical // this can be treated as a GPS coordinate. type ETRS89Coordinate struct { // Longitude in decimal degrees Lon float64 // Latitude in decimal degrees Lat float64 // Height in metres Height float64 } // NewETRS89Coord creates a new coordinate position in the ETRS89 geodetic datum. func NewETRS89Coord(lon, lat, height float64) *ETRS89Coordinate { return &ETRS89Coordinate{ Lon: lon, Lat: lat, Height: height, } } // OSGB36Coordinate represents a coordinate position in // the OSGB36/ODN geodetic datum. type OSGB36Coordinate struct { // Easting in metres Easting float64 // Northing in metres Northing float64 // Height in metres Height float64 } // NewOSGB36Coord creates a new coordinate position in the OSGB36/ODN geodetic datum. func NewOSGB36Coord(easting, northing, height float64) *OSGB36Coordinate { return &OSGB36Coordinate{ Easting: easting, Northing: northing, Height: height, } } type geographicCoord struct { lat, lon, height float64 } type cartesianCoord struct { x, y, z float64 } type planeCoord struct { easting, northing float64 } func dmsToDecimal(degrees, minutes, seconds float64, direction direction) (float64, error) { if direction == "N" || direction == "S" { if degrees < 0 || degrees > 90 { return 0, fmt.Errorf("invalid latitude degrees %f", degrees) } } else if direction == "E" || direction == "W" { if degrees < 0 || degrees > 180 { return 0, fmt.Errorf("invalid longitude degrees %f", degrees) } } else { return 0, fmt.Errorf("invalid direction %s", direction) } if minutes < 0 || minutes > 60 { return 0, fmt.Errorf("invalid minutes %f", minutes) } if seconds < 0 || seconds > 60 { return 0, fmt.Errorf("invalid secondss %f", seconds) } rad := (degrees + minutes/60 + seconds/3600) if direction == "N" || direction == "E" { return rad, nil } return rad * -1, nil } func radiansToDegrees(rad float64) float64 { return rad * degreeInRadians } func degreesToRadians(degrees float64) float64 { return degrees * radianInDegrees }
coordinates.go
0.853027
0.515498
coordinates.go
starcoder
package strings import ( "io" "unicode/utf8" "unsafe" ) // A Builder is used to efficiently build a string using Write methods. It // minimizes memory copying. The zero value is ready to use. Do not copy a non- // zero Builder. type Builder struct { addr *Builder // of receiver, to detect copies by value buf []byte off int // read at &buf[off], write at &buf[len(buf)] lastRead readOp // last read operation, so that Unread* can work correctly. } // The readOp constants describe the last action performed on the buffer, so // that UnreadRune and UnreadByte can check for invalid usage. opReadRuneX // constants are chosen such that converted to int they correspond to the rune // size that was read. type readOp int8 // Don't use iota for these, as the values need to correspond with the names and // comments, which is easier to see when being explicit. const ( opRead readOp = -1 // Any other read operation. opInvalid readOp = 0 // Non-read operation. ) // noescape hides a pointer from escape analysis. noescape is the identity // function but escape analysis doesn't think the output depends on the input. // noescape is inlined and currently compiles down to zero instructions. USE // CAREFULLY! This was copied from the runtime; see issues 23382 and 7921. //go:nosplit func noescape(p unsafe.Pointer) unsafe.Pointer { x := uintptr(p) return unsafe.Pointer(x ^ 0) } func (b *Builder) copyCheck() { if b.addr == nil { // This hack works around a failing of Go's escape analysis that was // causing b to escape and be heap allocated. // See issue 23382. // TODO: once issue 7921 is fixed, this should be reverted to just // "b.addr = b". b.addr = (*Builder)(noescape(unsafe.Pointer(b))) } else if b.addr != b { panic("strings: illegal use of non-zero Builder copied by value") } } // String returns the accumulated string. It doesn't matter if the Read() method // has been called, it always returns the contents. func (b *Builder) String() string { return *(*string)(unsafe.Pointer(&b.buf)) } // Len returns the number of accumulated bytes; b.Len() == len(b.String()). func (b *Builder) Len() int { return len(b.buf) } // Reset resets the Builder to be empty. func (b *Builder) Reset() { b.addr = nil b.buf = nil } // grow copies the buffer to a new, larger buffer so that there are at least n // bytes of capacity beyond len(b.buf). func (b *Builder) grow(n int) { buf := make([]byte, len(b.buf), 2*cap(b.buf)+n) copy(buf, b.buf) b.buf = buf } // Grow grows b's capacity, if necessary, to guarantee space for another n // bytes. After Grow(n), at least n bytes can be written to b without another // allocation. If n is negative, Grow panics. func (b *Builder) Grow(n int) { b.copyCheck() if n < 0 { panic("strings.Builder.Grow: negative count") } if cap(b.buf)-len(b.buf) < n { b.grow(n) } } // Write appends the contents of p to b's buffer. Write always returns len(p), // nil. func (b *Builder) Write(p []byte) (int, error) { b.copyCheck() b.lastRead = opInvalid b.buf = append(b.buf, p...) return len(p), nil } // WriteByte appends the byte c to b's buffer. The returned error is always nil. func (b *Builder) WriteByte(c byte) error { b.copyCheck() b.lastRead = opInvalid b.buf = append(b.buf, c) return nil } // WriteBytes appends the s to b's buffer. The returned error is always nil. func (b *Builder) WriteBytes(s []byte) (int, error) { b.copyCheck() b.lastRead = opInvalid b.buf = append(b.buf, s...) return len(s), nil } // WriteRune appends the UTF-8 encoding of Unicode code point r to b's buffer. // It returns the length of r and a nil error. func (b *Builder) WriteRune(r rune) (int, error) { b.copyCheck() b.lastRead = opInvalid if r < utf8.RuneSelf { b.buf = append(b.buf, byte(r)) return 1, nil } l := len(b.buf) if cap(b.buf)-l < utf8.UTFMax { b.grow(utf8.UTFMax) } n := utf8.EncodeRune(b.buf[l:l+utf8.UTFMax], r) b.buf = b.buf[:l+n] return n, nil } // WriteString appends the contents of s to b's buffer. It returns the length of // s and a nil error. func (b *Builder) WriteString(s string) (int, error) { b.copyCheck() b.lastRead = opInvalid b.buf = append(b.buf, s...) return len(s), nil } // Bytes returns a slice of length b.Len() holding the unread portion of the // buffer. The slice is valid for use only until the next buffer modification // (that is, only until the next call to a method like Read, Write, Reset, or // Truncate). The slice aliases the buffer content at least until the next // buffer modification, so immediate changes to the slice will affect the result // of future reads. func (b *Builder) Bytes() []byte { return b.buf[b.off:] } // Read reads the next len(p) bytes from the Builder or until all contents are // read. The return value n is the number of bytes read. If the Builder has no // data to return, err is io.EOF (unless len(p) is zero); otherwise it is nil. // This method doesn't alter the internal buffer. func (b *Builder) Read(p []byte) (n int, err error) { b.lastRead = opInvalid if len(b.buf) <= b.off { b.lastRead = opInvalid if len(p) == 0 { return 0, nil } return 0, io.EOF } n = copy(p, b.buf[b.off:]) b.off += n if n > 0 { b.lastRead = opRead } return n, nil }
builder.go
0.518059
0.434101
builder.go
starcoder
package bst import ( "github.com/Kaiser925/algorithms4go/base" ) type node struct { key interface{} value interface{} left *node right *node n int //The total number of nodes in the subtree Rooted at this node. } // BST is a binary search tree. type BST struct { Root *node Comparator base.CompareFunc } // NewBST returns a new BST with Comparator. func NewBST(Comparator base.CompareFunc) *BST { return &BST{ nil, Comparator, } } func (x *node) size() int { if x == nil { return 0 } return x.n } func (t *BST) put(x *node, key, val interface{}) *node { if x == nil { x = &node{ key: key, value: val, left: nil, right: nil, n: 1, } } cmp := t.Comparator(key, x.key) if cmp < 0 { x.left = t.put(x.left, key, val) } else if cmp > 0 { x.right = t.put(x.right, key, val) } else { x.value = val } x.n = 1 + x.left.size() + x.right.size() return x } func (t *BST) get(x *node, key interface{}) interface{} { if x == nil { return nil } cmp := t.Comparator(key, x.key) if cmp < 0 { return t.get(x.left, key) } else if cmp > 0 { return t.get(x.right, key) } else { return x.value } } // min returns the min node in tree. func (t *BST) min(x *node) *node { if x.left == nil { return x } return t.min(x.left) } func (t *BST) max(x *node) *node { if x.right == nil { return x } return t.max(x.right) } func (t *BST) deleteMin(x *node) *node { if x.left == nil { return x.right } x.left = t.deleteMin(x.left) x.n = 1 + x.left.size() + x.right.size() return x } func (t *BST) deleteMax(x *node) *node { if x.right == nil { return x.left } x.right = t.deleteMax(x.right) x.n = 1 + x.left.size() + x.right.size() return x } func (t *BST) delete(x *node, key interface{}) *node { if x == nil { return nil } cmp := t.Comparator(key, x.key) if cmp < 0 { x.left = t.delete(x.left, key) } else if cmp > 0 { x.right = t.delete(x.right, key) } else { if x.right == nil { return x.left } if x.left == nil { return x.right } tmp := x x = t.min(tmp.right) x.right = t.deleteMin(x.right) x.left = tmp.left } return x } func (t *BST) selects(x *node, k int) interface{} { if x == nil { return nil } n := x.left.size() if n > k { return t.selects(x.left, k) } else if n < k { return t.selects(x.right, k-n-1) } else { return x.key } } func (t *BST) rank(x *node, key interface{}) int { if x == nil { return 0 } cmp := t.Comparator(key, x.key) if cmp < 0 { return t.rank(x.left, key) } else if cmp > 0 { return 1 + x.left.size() + t.rank(x.right, key) } else { return x.left.size() } } func (t *BST) keysByIndex(x *node, keys *[]interface{}, lo interface{}, hi interface{}, cur *int) { if x == nil { return } cmplo := t.Comparator(lo, x.key) cmphi := t.Comparator(hi, x.key) if cmplo < 0 { t.keysByIndex(x.left, keys, lo, hi, cur) } if cmplo <= 0 && cmphi >= 0 { (*keys)[*cur] = x.key *cur++ } if cmphi > 0 { t.keysByIndex(x.right, keys, lo, hi, cur) } } func (t *BST) floor(x *node, key interface{}) *node { if x == nil { return nil } cmp := t.Comparator(key, x.key) if cmp == 0 { return x } else if cmp < 0 { return t.floor(x.left, key) } tmp := t.floor(x.right, key) if tmp != nil { return tmp } return x } func (t *BST) ceiling(x *node, key interface{}) *node { if x == nil { return nil } cmp := t.Comparator(key, x.key) if cmp == 0 { return x } else if cmp > 0 { return t.ceiling(x.right, key) } tmp := t.ceiling(x.left, key) if tmp != nil { return tmp } return x } // Put inserts key-value into the tree. // If there is already a "key" in the tree, Enqueue will update the value of key. func (t *BST) Put(key, val interface{}) { t.Root = t.put(t.Root, key, val) } // Get returns value of node by its key or nil if key is not found in tree. func (t *BST) Get(key interface{}) interface{} { return t.get(t.Root, key) } // Contains returns true if tree contains key or false if doesn't contain. func (t *BST) Contains(key interface{}) bool { return t.Get(key) != nil } // Delete deletes node from tree by key. func (t *BST) Delete(key interface{}) { t.Root = t.delete(t.Root, key) } // DeleteMin deletes min node of tree. func (t *BST) DeleteMin() { t.Root = t.deleteMin(t.Root) } // DeleteMax deletes max node of tree. func (t *BST) DeleteMax() { t.Root = t.deleteMax(t.Root) } // Min returns the min value in tree. func (t *BST) Min() interface{} { x := t.min(t.Root) if x == nil { return nil } return x.key } // Max returns the max value in tree. func (t *BST) Max() interface{} { x := t.max(t.Root) if x == nil { return nil } return x.key } // Select returns the key in the symbol table whose rank is k. // This is the (k+1)st smallest key in the symbol table. // t.Select(k) == Keys[k] func (t *BST) Select(k int) interface{} { if k < 0 || k > t.Size() { return nil } return t.selects(t.Root, k) } // Rank returns the number of keys in the symbol table strictly less than input key. func (t *BST) Rank(key interface{}) int { if key == nil { return -1 } return t.rank(t.Root, key) } // Size returns number of nodes in the tree. func (t *BST) Size() int { return t.Root.size() } // Keys returns all keys in order. func (t *BST) Keys() []interface{} { return t.KeysByIndex(t.Min(), t.Max()) } // KeysByIndex returns all keys between "lo" and "hi" in order. func (t *BST) KeysByIndex(lo, hi interface{}) []interface{} { keys := make([]interface{}, t.Root.size()) cur := 0 t.keysByIndex(t.Root, &keys, lo, hi, &cur) return keys } // Floor returns floor key of the input key, or nil if no floor is found. func (t *BST) Floor(key interface{}) interface{} { x := t.floor(t.Root, key) if x == nil { return nil } return x.key } // Ceiling returns ceiling key of the input key, or nil if no ceiling is found. func (t *BST) Ceiling(key interface{}) interface{} { x := t.ceiling(t.Root, key) if x == nil { return nil } return x.key } // Empty returns true if there is no node, else return false. func (t *BST) Empty() bool { return t.Root.size() == 0 }
tree/bst/bst.go
0.747155
0.419588
bst.go
starcoder
package birthday import ( "errors" "time" "github.com/hyperjiang/php" ) // Birthday is the birthday detail type Birthday struct { Date time.Time Year int Month time.Month Day int Age int Constellation string } // ParseFromTime parse birthday from given time func ParseFromTime(t time.Time) *Birthday { birthday := &Birthday{ Date: t, Year: t.Year(), Month: t.Month(), Day: t.Day(), } now := time.Now() birthday.Age = now.Year() - birthday.Year if now.YearDay() < t.YearDay() && birthday.Age > 0 { birthday.Age-- } birthday.Constellation = parseConstellation(birthday.Month, birthday.Day) return birthday } // Parse parse birthday from given string date func Parse(date string) (*Birthday, error) { t, err := php.DateCreate(date) if err != nil { return nil, err } return ParseFromTime(t), nil } // Format returns the birthday in given format func (b Birthday) Format(layout string) string { return b.Date.Format(layout) } // String returns normalized date func (b Birthday) String() string { return b.Format("2006-01-02") } // GetConstellation gets constellation in given language func (b Birthday) GetConstellation(lang string) (string, error) { var m map[string]string var ok bool if m, ok = constellations[lang]; !ok { return b.Constellation, errors.New("Unsupported language: " + lang) } if v, ok := m[b.Constellation]; ok { return v, nil } return b.Constellation, nil } func parseConstellation(month time.Month, day int) string { switch month { case time.January: if day >= 20 { return "Aquarius" } return "Capricorn" case time.February: if day >= 19 { return "Pisces" } return "Aquarius" case time.March: if day >= 21 { return "Aries" } return "Pisces" case time.April: if day >= 20 { return "Taurus" } return "Aries" case time.May: if day >= 21 { return "Gemini" } return "Taurus" case time.June: if day >= 22 { return "Cancer" } return "Gemini" case time.July: if day >= 23 { return "Leo" } return "Cancer" case time.August: if day >= 23 { return "Virgo" } return "Leo" case time.September: if day >= 23 { return "Libra" } return "Virgo" case time.October: if day >= 24 { return "Scorpio" } return "Libra" case time.November: if day >= 23 { return "Sagittarius" } return "Scorpio" case time.December: if day >= 22 { return "Capricorn" } return "Sagittarius" } return "Unknown" }
birthday.go
0.704872
0.412234
birthday.go
starcoder
package platform import ( "context" "reflect" "strconv" "strings" "chromiumos/tast/errors" "chromiumos/tast/local/croshealthd" "chromiumos/tast/testing" ) func init() { testing.AddTest(&testing.Test{ Func: CrosHealthdProbeCPUInfo, Desc: "Check that we can probe cros_healthd for CPU info", Contacts: []string{ "<EMAIL>", "<EMAIL>", "<EMAIL>", }, Attr: []string{"group:mainline"}, SoftwareDeps: []string{"diagnostics"}, }) } func verifyPhysicalCPU(lines []string) error { // Make sure we've received at least nine lines. The first should be the // physical CPU header, followed by one line of keys, one line of values. if len(lines) < 3 { return errors.New("could not find any lines of physical CPU info") } // Verify the first line is the correct header. actualHeader := lines[0] expectedHeader := "Physical CPU:" if actualHeader != expectedHeader { return errors.Errorf("incorrect physical CPU header: got %v, want %v", actualHeader, expectedHeader) } // Verify the key is correct. want := "model_name" got := lines[1] if want != got { return errors.Errorf("incorrect physical CPU key: got %v; want %v", got, want) } // Verify the value is a non-empty string. if lines[2] == "" { return errors.New("empty model_name") } // Verify each logical CPU. for start, i := 3, 4; i <= len(lines); i++ { if i == len(lines) || lines[i] == "Logical CPU:" { if err := verifyLogicalCPU(lines[start:i]); err != nil { return errors.Wrap(err, "failed to verify logical CPU") } start = i } } return nil } func verifyLogicalCPU(lines []string) error { // Make sure we've received at least three lines. The first should be the // logical CPU header, followed by one line of keys, and one line of values. if len(lines) < 3 { return errors.New("could not find any lines of logical CPU info") } // Verify the first line is the correct header. actualHeader := lines[0] expectedHeader := "Logical CPU:" if actualHeader != expectedHeader { return errors.Errorf("incorrect logical CPU header: got %v, want %v", actualHeader, expectedHeader) } // Verify the keys are correct. want := []string{"max_clock_speed_khz", "scaling_max_frequency_khz", "scaling_current_frequency_khz", "user_time_user_hz", "system_time_user_hz", "idle_time_user_hz"} got := strings.Split(lines[1], ",") if !reflect.DeepEqual(want, got) { return errors.Errorf("incorrect logical CPU keys: got %v; want %v", got, want) } // Check for error values. vals := strings.Split(lines[2], ",") if len(vals) != len(want) { return errors.Errorf("wrong number of logical CPU values: got %v, want %v", len(vals), len(want)) } for i, val := range vals { if parsed, err := strconv.Atoi(val); err != nil { return errors.Wrapf(err, "failed to convert %q to integer: %q", want[i], val) } else if parsed < 0 { return errors.Errorf("invalid %q: %v", want[i], parsed) } } return verifyCStates(lines[3:]) } func verifyCStates(lines []string) error { // Make sure we've received at least two lines. The first should be the // C-state header, followed by one line of keys and zero or more lines of // C-states. if len(lines) < 2 { return errors.New("could not find any lines of C-state info") } // Verify the first line is the correct header. actualHeader := lines[0] expectedHeader := "C-states:" if actualHeader != expectedHeader { return errors.Errorf("incorrect C-state header: got %v, want %v", actualHeader, expectedHeader) } // Verify the keys are correct. want := []string{"name", "time_in_state_since_last_boot_us"} got := strings.Split(lines[1], ",") if !reflect.DeepEqual(want, got) { return errors.Errorf("incorrect C-state keys: got %v; want %v", got, want) } // Verify each C-state value that exists. for _, line := range lines[2:] { vals := strings.Split(line, ",") if len(vals) != 2 { return errors.Errorf("wrong number of C-state values: got %v, want 2", len(vals)) } if vals[0] == "" { return errors.New("empty name") } if i, err := strconv.ParseInt(vals[1], 10, 64); err != nil { return errors.Wrapf(err, "failed to convert time_in_state_since_last_boot_us to integer: %q", vals[1]) } else if i < 0 { return errors.Errorf("invalid time_in_state_since_last_boot_us: %d", i) } } return nil } func CrosHealthdProbeCPUInfo(ctx context.Context, s *testing.State) { b, err := croshealthd.RunTelem(ctx, croshealthd.TelemCategoryCPU, s.OutDir()) if err != nil { s.Fatal("Failed to run telem command: ", err) } // Every board should have at least one physical CPU, which contains at // least one logical CPU. That section is at least nine lines long, and is // preceded by one line of keys and one line of values. lines := strings.Split(strings.TrimRight(string(b), "\n"), "\n") if len(lines) < 11 { s.Fatal("Could not find any lines of CPU info") } for i := range lines { lines[i] = strings.TrimSpace(lines[i]) } // Verify the top-level CpuInfo keys are correct. want := []string{"num_total_threads", "architecture"} got := strings.Split(lines[0], ",") if !reflect.DeepEqual(want, got) { s.Fatalf("Incorrect CpuInfo keys: got %v; want %v", got, want) } // Verify the CpuInfo values are valid. vals := strings.Split(lines[1], ",") if len(vals) != 2 { s.Fatalf("Wrong number of values: got %v, want 2", len(vals)) } if numThreads, err := strconv.Atoi(vals[0]); err != nil { s.Error("Failed to convert num_total_threads to integer: ", err) } else if numThreads <= 0 { s.Error("Invalid num_total_threads") } if vals[1] == "" { s.Error("Empty architecture") } // Verify the output for each physical CPU. Start on the third line, because // that should always be the first line of "Physical CPU:". If it isn't, the // test will fail verifying the first physical CPU, so it's a safe // assumption. Don't verify the temperature channels, because they are // optional. for start, i := 2, 3; i < len(lines); i++ { line := lines[i] if i == len(lines) || line == "Physical CPU:" || line == "Temperature Channels:" { err := verifyPhysicalCPU(lines[start:i]) if err != nil { s.Error("Failed to verify physical CPU: ", err) } if line == "Temperature Channels:" { break } start = i } } }
src/chromiumos/tast/local/bundles/cros/platform/cros_healthd_probe_cpu_info.go
0.55929
0.405861
cros_healthd_probe_cpu_info.go
starcoder
package quantile const ( agentBufCap = 512 ) var agentConfig = Default() // An Agent sketch is an insert optimized version of the sketch for use in the // datadog-agent. type Agent struct { Sketch Sketch Buf []Key CountBuf []KeyCount } // IsEmpty returns true if the sketch is empty func (a *Agent) IsEmpty() bool { return a.Sketch.Basic.Cnt == 0 && len(a.Buf) == 0 } // Finish flushes any pending inserts and returns a deep copy of the sketch. func (a *Agent) Finish() *Sketch { a.flush() if a.IsEmpty() { return nil } return a.Sketch.Copy() } // flush buffered values into the sketch. func (a *Agent) flush() { if len(a.Buf) != 0 { a.Sketch.insert(agentConfig, a.Buf) a.Buf = nil } if len(a.CountBuf) != 0 { a.Sketch.insertCounts(agentConfig, a.CountBuf) a.CountBuf = nil } } // Reset the agent sketch to the empty state. func (a *Agent) Reset() { a.Sketch.Reset() a.Buf = nil // TODO: pool } // Insert v into the sketch. func (a *Agent) Insert(v float64) { a.Sketch.Basic.Insert(v) a.Buf = append(a.Buf, agentConfig.key(v)) if len(a.Buf) < agentBufCap { return } a.flush() } // InsertInterpolate linearly interpolates a count from the given lower to upper bounds func (a *Agent) InsertInterpolate(lower float64, upper float64, count uint) { keys := make([]Key, 0) for k := agentConfig.key(lower); k <= agentConfig.key(upper); k++ { keys = append(keys, k) } whatsLeft := int(count) distance := upper - lower kStartIdx := 0 lowerB := agentConfig.binLow(keys[kStartIdx]) kEndIdx := 1 var remainder float64 for kEndIdx < len(keys) && whatsLeft > 0 { upperB := agentConfig.binLow(keys[kEndIdx]) // ((upperB - lowerB) / distance) is the ratio of the distance between the current buckets to the total distance // which tells us how much of the remaining value to put in this bucket fkn := ((upperB - lowerB) / distance) * float64(count) // only track the remainder if fkn is >1 because we designed this to not store a bunch of 0 count buckets if fkn > 1 { remainder += fkn - float64(int(fkn)) } kn := int(fkn) if remainder > 1 { kn++ remainder-- } if kn > 0 { // Guard against overflow at the end if kn > whatsLeft { kn = whatsLeft } a.Sketch.Basic.InsertN(lowerB, uint(kn)) a.CountBuf = append(a.CountBuf, KeyCount{k: keys[kStartIdx], n: uint(kn)}) whatsLeft -= kn kStartIdx = kEndIdx lowerB = upperB } kEndIdx++ } if whatsLeft > 0 { a.Sketch.Basic.InsertN(agentConfig.binLow(keys[kStartIdx]), uint(whatsLeft)) a.CountBuf = append(a.CountBuf, KeyCount{k: keys[kStartIdx], n: uint(whatsLeft)}) } a.flush() }
pkg/quantile/agent.go
0.6488
0.406626
agent.go
starcoder
package vaa import ( "bytes" "crypto/ecdsa" "encoding/binary" "encoding/hex" "fmt" "io" "strings" "time" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/crypto" ) type ( // VAA is a verifiable action approval of the Wormhole protocol VAA struct { // Version of the VAA schema Version uint8 // GuardianSetIndex is the index of the guardian set that signed this VAA GuardianSetIndex uint32 // SignatureData is the signature of the guardian set Signatures []*Signature // Timestamp when the VAA was created Timestamp time.Time // Nonce of the VAA Nonce uint32 // Sequence of the VAA Sequence uint64 /// ConsistencyLevel of the VAA ConsistencyLevel uint8 // EmitterChain the VAA was emitted on EmitterChain ChainID // EmitterAddress of the contract that emitted the Message EmitterAddress Address // Payload of the message Payload []byte } // ChainID of a Wormhole chain ChainID uint16 // Action of a VAA Action uint8 // Address is a Wormhole protocol address, it contains the native chain's address. If the address data type of a // chain is < 32bytes the value is zero-padded on the left. Address [32]byte // Signature of a single guardian Signature struct { // Index of the validator Index uint8 // Signature data Signature SignatureData } SignatureData [65]byte ) func (a Address) MarshalJSON() ([]byte, error) { return []byte(fmt.Sprintf(`"%s"`, a)), nil } func (a Address) String() string { return hex.EncodeToString(a[:]) } func (a Address) Bytes() []byte { return a[:] } func (a SignatureData) MarshalJSON() ([]byte, error) { return []byte(fmt.Sprintf(`"%s"`, a)), nil } func (a SignatureData) String() string { return hex.EncodeToString(a[:]) } func (c ChainID) String() string { switch c { case ChainIDUnset: return "unset" case ChainIDSolana: return "solana" case ChainIDEthereum: return "ethereum" case ChainIDTerra: return "terra" case ChainIDBSC: return "bsc" case ChainIDPolygon: return "polygon" case ChainIDAvalanche: return "avalanche" case ChainIDOasis: return "oasis" case ChainIDAlgorand: return "algorand" case ChainIDEthereumRopsten: return "ethereum-ropsten" default: return fmt.Sprintf("unknown chain ID: %d", c) } } func ChainIDFromString(s string) (ChainID, error) { s = strings.ToLower(s) switch s { case "solana": return ChainIDSolana, nil case "ethereum": return ChainIDEthereum, nil case "terra": return ChainIDTerra, nil case "bsc": return ChainIDBSC, nil case "polygon": return ChainIDPolygon, nil case "avalanche": return ChainIDAvalanche, nil case "oasis": return ChainIDOasis, nil case "algorand": return ChainIDAlgorand, nil case "ethereum-ropsten": return ChainIDEthereumRopsten, nil default: return ChainIDUnset, fmt.Errorf("unknown chain ID: %s", s) } } const ( ChainIDUnset ChainID = 0 // ChainIDSolana is the ChainID of Solana ChainIDSolana ChainID = 1 // ChainIDEthereum is the ChainID of Ethereum ChainIDEthereum ChainID = 2 // ChainIDTerra is the ChainID of Terra ChainIDTerra ChainID = 3 // ChainIDBSC is the ChainID of Binance Smart Chain ChainIDBSC ChainID = 4 // ChainIDPolygon is the ChainID of Polygon ChainIDPolygon ChainID = 5 // ChainIDAvalanche is the ChainID of Avalanche ChainIDAvalanche ChainID = 6 // ChainIDOasis is the ChainID of Oasis ChainIDOasis ChainID = 7 // ChainIDAlgorand is the ChainID of Algorand ChainIDAlgorand ChainID = 8 // ChainIDEthereumRopsten is the ChainID of Ethereum Ropsten ChainIDEthereumRopsten ChainID = 10001 minVAALength = 1 + 4 + 52 + 4 + 1 + 1 SupportedVAAVersion = 0x01 ) // Unmarshal deserializes the binary representation of a VAA func Unmarshal(data []byte) (*VAA, error) { if len(data) < minVAALength { return nil, fmt.Errorf("VAA is too short") } v := &VAA{} v.Version = data[0] if v.Version != SupportedVAAVersion { return nil, fmt.Errorf("unsupported VAA version: %d", v.Version) } reader := bytes.NewReader(data[1:]) if err := binary.Read(reader, binary.BigEndian, &v.GuardianSetIndex); err != nil { return nil, fmt.Errorf("failed to read guardian set index: %w", err) } lenSignatures, er := reader.ReadByte() if er != nil { return nil, fmt.Errorf("failed to read signature length") } v.Signatures = make([]*Signature, lenSignatures) for i := 0; i < int(lenSignatures); i++ { index, err := reader.ReadByte() if err != nil { return nil, fmt.Errorf("failed to read validator index [%d]", i) } signature := [65]byte{} if n, err := reader.Read(signature[:]); err != nil || n != 65 { return nil, fmt.Errorf("failed to read signature [%d]: %w", i, err) } v.Signatures[i] = &Signature{ Index: index, Signature: signature, } } unixSeconds := uint32(0) if err := binary.Read(reader, binary.BigEndian, &unixSeconds); err != nil { return nil, fmt.Errorf("failed to read timestamp: %w", err) } v.Timestamp = time.Unix(int64(unixSeconds), 0) if err := binary.Read(reader, binary.BigEndian, &v.Nonce); err != nil { return nil, fmt.Errorf("failed to read nonce: %w", err) } if err := binary.Read(reader, binary.BigEndian, &v.EmitterChain); err != nil { return nil, fmt.Errorf("failed to read emitter chain: %w", err) } emitterAddress := Address{} if n, err := reader.Read(emitterAddress[:]); err != nil || n != 32 { return nil, fmt.Errorf("failed to read emitter address [%d]: %w", n, err) } v.EmitterAddress = emitterAddress if err := binary.Read(reader, binary.BigEndian, &v.Sequence); err != nil { return nil, fmt.Errorf("failed to read sequence: %w", err) } if err := binary.Read(reader, binary.BigEndian, &v.ConsistencyLevel); err != nil { return nil, fmt.Errorf("failed to read commitment: %w", err) } payload := make([]byte, 1000) n, err := reader.Read(payload) if err != nil || n == 0 { return nil, fmt.Errorf("failed to read payload [%d]: %w", n, err) } v.Payload = payload[:n] return v, nil } // signingBody returns the binary representation of the data that is relevant for signing and verifying the VAA func (v *VAA) signingBody() []byte { return v.serializeBody() } // SigningMsg returns the hash of the signing body. This is used for signature generation and verification func (v *VAA) SigningMsg() common.Hash { // In order to save space in the solana signature verification instruction, we hash twice so we only need to pass in // the first hash (32 bytes) vs the full body data. hash := crypto.Keccak256Hash(crypto.Keccak256Hash(v.signingBody()).Bytes()) return hash } // VerifySignatures verifies the signature of the VAA given the signer addresses. // Returns true if the signatures were verified successfully. func (v *VAA) VerifySignatures(addresses []common.Address) bool { if len(addresses) < len(v.Signatures) { return false } h := v.SigningMsg() for _, sig := range v.Signatures { if int(sig.Index) >= len(addresses) { return false } pubKey, err := crypto.Ecrecover(h.Bytes(), sig.Signature[:]) if err != nil { return false } addr := common.BytesToAddress(crypto.Keccak256(pubKey[1:])[12:]) if addr != addresses[sig.Index] { return false } } return true } // Marshal returns the binary representation of the VAA func (v *VAA) Marshal() ([]byte, error) { buf := new(bytes.Buffer) MustWrite(buf, binary.BigEndian, v.Version) MustWrite(buf, binary.BigEndian, v.GuardianSetIndex) // Write signatures MustWrite(buf, binary.BigEndian, uint8(len(v.Signatures))) for _, sig := range v.Signatures { MustWrite(buf, binary.BigEndian, sig.Index) buf.Write(sig.Signature[:]) } // Write Body buf.Write(v.serializeBody()) return buf.Bytes(), nil } // MessageID returns a human-readable emitter_chain/emitter_address/sequence tuple. func (v *VAA) MessageID() string { return fmt.Sprintf("%d/%s/%d", v.EmitterChain, v.EmitterAddress, v.Sequence) } // HexDigest returns the hex-encoded digest. func (v *VAA) HexDigest() string { return hex.EncodeToString(v.SigningMsg().Bytes()) } func (v *VAA) serializeBody() []byte { buf := new(bytes.Buffer) MustWrite(buf, binary.BigEndian, uint32(v.Timestamp.Unix())) MustWrite(buf, binary.BigEndian, v.Nonce) MustWrite(buf, binary.BigEndian, v.EmitterChain) buf.Write(v.EmitterAddress[:]) MustWrite(buf, binary.BigEndian, v.Sequence) MustWrite(buf, binary.BigEndian, v.ConsistencyLevel) buf.Write(v.Payload) return buf.Bytes() } func (v *VAA) AddSignature(key *ecdsa.PrivateKey, index uint8) { sig, err := crypto.Sign(v.SigningMsg().Bytes(), key) if err != nil { panic(err) } sigData := [65]byte{} copy(sigData[:], sig) v.Signatures = append(v.Signatures, &Signature{ Index: index, Signature: sigData, }) } // MustWrite calls binary.Write and panics on errors func MustWrite(w io.Writer, order binary.ByteOrder, data interface{}) { if err := binary.Write(w, order, data); err != nil { panic(fmt.Errorf("failed to write binary data: %v", data).Error()) } } // StringToAddress converts a hex-encoded adress into a vaa.Address func StringToAddress(value string) (Address, error) { var address Address res, err := hex.DecodeString(value) if err != nil { return address, err } copy(address[:], res) return address, nil }
node/pkg/vaa/structs.go
0.580947
0.416559
structs.go
starcoder
package dirtrally import ( "bytes" "encoding/binary" "encoding/json" "github.com/grafana/grafana-plugin-sdk-go/backend/log" "github.com/grafana/grafana-plugin-sdk-go/data" "time" ) type TelemetryFrame struct { Time float32 LapTime float32 LapDistance float32 TotalDistance float32 X float32 // World space position Y float32 // World space position Z float32 // World space position Speed float32 Xv float32 // Velocity in world space Yv float32 // Velocity in world space Zv float32 // Velocity in world space Xr float32 // World space right direction Yr float32 // World space right direction Zr float32 // World space right direction Xd float32 // World space forward direction Yd float32 // World space forward direction Zd float32 // World space forward direction SuspPosBL float32 SuspPosBR float32 SuspPosFL float32 SuspPosFR float32 SuspVelBL float32 SuspVelBR float32 SuspVelFL float32 SuspVelFR float32 WheelSpeedBL float32 WheelSpeedBR float32 WheelSpeedFL float32 WheelSpeedFR float32 Throttle float32 Steer float32 Brake float32 Clutch float32 Gear float32 GForceLat float32 GForceLon float32 Lap float32 EngineRate float32 SliProNativeSupport float32 // SLI Pro support CarPosition float32 // car race position KersLevel float32 // kers energy left KersMaxLevel float32 // kers maximum energy Drs float32 // 0 = off, 1 = on TractionControl float32 // 0 (off) - 2 (high) AntiLockBRakes float32 // 0 (off) - 1 (on) FuelInTank float32 // current fuel mass FuelCapacity float32 // fuel capacity InPits float32 // 0 = none, 1 = pitting, 2 = in pit area Sector float32 // 0 = sector1, 1 = sector2; 2 = sector3 Sector1Time float32 // time of sector1 (or 0) Sector2Time float32 // time of sector2 (or 0) BRakesTempBL float32 // brakes temperature (centigrade) BRakesTempBR float32 // brakes temperature (centigrade) BRakesTempFL float32 // brakes temperature (centigrade) BRakesTempFR float32 // brakes temperature (centigrade) TeamInfo float32 // team ID TotalLaps float32 // total number of laps in this race TrackSize float32 // track size meters LastLapTime float32 // last lap time MaxGears float32 // maximum number of gears SessionType float32 // 0 = unknown, 1 = practice, 2 = qualifying, 3 = race DRSAllowed float32 // 0 = not allowed, 1 = allowed, -1 = invalid / unknown TrackNumber float32 // -1 for unknown, 0-21 for tracks MaxRPM float32 // cars max RPM, at which point the rev limiter will kick in IdleRPM float32 // cars idle RPM VehicleFIAFlags float32 // -1 = invalid/unknown, 0 = none, 1 = green, 2 = blue, 3 = yellow, 4 = red } func ReadPacket(b []byte) (*TelemetryFrame, error) { buf := bytes.NewReader(b) frame := &TelemetryFrame{} err := binary.Read(buf, binary.LittleEndian, frame) if err != nil { return nil, err } return frame, nil } func TelemetryToDataFrame(tf TelemetryFrame) *data.Frame { frame := data.NewFrame("response") telemetryMap := telemetryFrameToMap(tf) frame.Fields = append(frame.Fields, data.NewField("time", nil, []time.Time{time.Now()}), ) for name, value := range telemetryMap { frame.Fields = append(frame.Fields, data.NewField(name, nil, []float32{value}), ) } return frame } func telemetryFrameToMap(frame TelemetryFrame) map[string]float32 { var frameMap map[string]float32 frame = convertTelemetryValues(frame) frameJson, err := json.Marshal(&frame) if err != nil { log.DefaultLogger.Error("Error converting frame", "error", err) } json.Unmarshal(frameJson, &frameMap) return frameMap } func convertTelemetryValues(f TelemetryFrame) TelemetryFrame { f.Speed = f.Speed * 3.6 f.EngineRate = f.EngineRate * 10 return f }
pkg/dirtrally/packet.go
0.589126
0.419291
packet.go
starcoder
package transaction import ( "fmt" "math/big" "Chain3Go/lib/common/hexutil" "Chain3Go/lib/crypto" "Chain3Go/lib/log" ) type bytesBacked interface { Bytes() []byte } const ( // BloomByteLength represents the number of bytes used in a header log bloom. BloomByteLength = 256 // BloomBitLength represents the number of bits used in a header log bloom. BloomBitLength = 8 * BloomByteLength ) // Bloom represents a 2048 bit bloom filter. type Bloom [BloomByteLength]byte // BytesToBloom converts a byte slice to a bloom filter. // It panics if b is not of suitable size. func BytesToBloom(b []byte) Bloom { var bloom Bloom bloom.SetBytes(b) return bloom } // SetBytes sets the content of b to the given bytes. // It panics if d is not of suitable size. func (b *Bloom) SetBytes(d []byte) { if len(b) < len(d) { panic(fmt.Sprintf("bloom bytes too big %d %d", len(b), len(d))) } copy(b[BloomByteLength-len(d):], d) } // Add adds d to the filter. Future calls of Test(d) will return true. func (b *Bloom) Add(d *big.Int) { bin := new(big.Int).SetBytes(b[:]) bin.Or(bin, bloom9(d.Bytes())) b.SetBytes(bin.Bytes()) } // Big converts b to a big integer. func (b Bloom) Big() *big.Int { return new(big.Int).SetBytes(b[:]) } func (b Bloom) Bytes() []byte { return b[:] } func (b Bloom) Test(test *big.Int) bool { return BloomLookup(b, test) } func (b Bloom) TestBytes(test []byte) bool { return b.Test(new(big.Int).SetBytes(test)) } // MarshalText encodes b as a hex string with 0x prefix. func (b Bloom) MarshalText() ([]byte, error) { return hexutil.Bytes(b[:]).MarshalText() } // UnmarshalText b as a hex string with 0x prefix. func (b *Bloom) UnmarshalText(input []byte) error { log.Info("[core/types/bloom9.go->Bloom.UnmarshalText] input=" + string(input)) return hexutil.UnmarshalFixedText("Bloom", input, b[:]) } func CreateBloom(receipts Receipts) Bloom { bin := new(big.Int) for _, receipt := range receipts { bin.Or(bin, LogsBloom(receipt.Logs)) } return BytesToBloom(bin.Bytes()) } func LogsBloom(logs []*Log) *big.Int { bin := new(big.Int) for _, log := range logs { bin.Or(bin, bloom9(log.Address.Bytes())) for _, b := range log.Topics { bin.Or(bin, bloom9(b[:])) } } return bin } func bloom9(b []byte) *big.Int { b = crypto.Keccak256(b[:]) r := new(big.Int) for i := 0; i < 6; i += 2 { t := big.NewInt(1) b := (uint(b[i+1]) + (uint(b[i]) << 8)) & 2047 r.Or(r, t.Lsh(t, b)) } return r } var Bloom9 = bloom9 func BloomLookup(bin Bloom, topic bytesBacked) bool { bloom := bin.Big() cmp := bloom9(topic.Bytes()[:]) return bloom.And(bloom, cmp).Cmp(cmp) == 0 }
Chain3Go/lib/transaction/bloom9.go
0.793706
0.552359
bloom9.go
starcoder
package tsdbutil import ( "math" "github.com/prometheus/tsdb" ) // BufferedSeriesIterator wraps an iterator with a look-back buffer. type BufferedSeriesIterator struct { it tsdb.SeriesIterator buf *sampleRing lastTime int64 } // NewBuffer returns a new iterator that buffers the values within the time range // of the current element and the duration of delta before. func NewBuffer(it tsdb.SeriesIterator, delta int64) *BufferedSeriesIterator { return &BufferedSeriesIterator{ it: it, buf: newSampleRing(delta, 16), lastTime: math.MinInt64, } } // PeekBack returns the previous element of the iterator. If there is none buffered, // ok is false. func (b *BufferedSeriesIterator) PeekBack() (t int64, v float64, ok bool) { return b.buf.last() } // Buffer returns an iterator over the buffered data. func (b *BufferedSeriesIterator) Buffer() tsdb.SeriesIterator { return b.buf.iterator() } // Seek advances the iterator to the element at time t or greater. func (b *BufferedSeriesIterator) Seek(t int64) bool { t0 := t - b.buf.delta // If the delta would cause us to seek backwards, preserve the buffer // and just continue regular advancement while filling the buffer on the way. if t0 > b.lastTime { b.buf.reset() ok := b.it.Seek(t0) if !ok { return false } b.lastTime, _ = b.At() } if b.lastTime >= t { return true } for b.Next() { if b.lastTime >= t { return true } } return false } // Next advances the iterator to the next element. func (b *BufferedSeriesIterator) Next() bool { // Add current element to buffer before advancing. b.buf.add(b.it.At()) ok := b.it.Next() if ok { b.lastTime, _ = b.At() } return ok } // At returns the current element of the iterator. func (b *BufferedSeriesIterator) At() (int64, float64) { return b.it.At() } // Err returns the last encountered error. func (b *BufferedSeriesIterator) Err() error { return b.it.Err() } type sample struct { t int64 v float64 } type sampleRing struct { delta int64 buf []sample // lookback buffer i int // position of most recent element in ring buffer f int // position of first element in ring buffer l int // number of elements in buffer } func newSampleRing(delta int64, sz int) *sampleRing { r := &sampleRing{delta: delta, buf: make([]sample, sz)} r.reset() return r } func (r *sampleRing) reset() { r.l = 0 r.i = -1 r.f = 0 } func (r *sampleRing) iterator() tsdb.SeriesIterator { return &sampleRingIterator{r: r, i: -1} } type sampleRingIterator struct { r *sampleRing i int } func (it *sampleRingIterator) Next() bool { it.i++ return it.i < it.r.l } func (it *sampleRingIterator) Seek(int64) bool { return false } func (it *sampleRingIterator) Err() error { return nil } func (it *sampleRingIterator) At() (int64, float64) { return it.r.at(it.i) } func (r *sampleRing) at(i int) (int64, float64) { j := (r.f + i) % len(r.buf) s := r.buf[j] return s.t, s.v } // add adds a sample to the ring buffer and frees all samples that fall // out of the delta range. func (r *sampleRing) add(t int64, v float64) { l := len(r.buf) // Grow the ring buffer if it fits no more elements. if l == r.l { buf := make([]sample, 2*l) copy(buf[l+r.f:], r.buf[r.f:]) copy(buf, r.buf[:r.f]) r.buf = buf r.i = r.f r.f += l } else { r.i++ if r.i >= l { r.i -= l } } r.buf[r.i] = sample{t: t, v: v} r.l++ // Free head of the buffer of samples that just fell out of the range. for r.buf[r.f].t < t-r.delta { r.f++ if r.f >= l { r.f -= l } r.l-- } } // last returns the most recent element added to the ring. func (r *sampleRing) last() (int64, float64, bool) { if r.l == 0 { return 0, 0, false } s := r.buf[r.i] return s.t, s.v, true } func (r *sampleRing) samples() []sample { res := make([]sample, r.l) var k = r.f + r.l var j int if k > len(r.buf) { k = len(r.buf) j = r.l - k + r.f } n := copy(res, r.buf[r.f:k]) copy(res[n:], r.buf[:j]) return res }
tsdbutil/buffer.go
0.826327
0.477311
buffer.go
starcoder
package delphix_dct_api import ( "encoding/json" "time" ) // DataPointByTimestampParameters struct for DataPointByTimestampParameters type DataPointByTimestampParameters struct { // The point in time from which to execute the operation. Mutually exclusive with timestamp_in_database_timezone. If the timestamp is not set, selects the latest point. Timestamp *time.Time `json:"timestamp,omitempty"` // The point in time from which to execute the operation, expressed as a date-time in the timezone of the source database. Mutually exclusive with timestamp. TimestampInDatabaseTimezone *string `json:"timestamp_in_database_timezone,omitempty"` } // NewDataPointByTimestampParameters instantiates a new DataPointByTimestampParameters object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewDataPointByTimestampParameters() *DataPointByTimestampParameters { this := DataPointByTimestampParameters{} return &this } // NewDataPointByTimestampParametersWithDefaults instantiates a new DataPointByTimestampParameters object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewDataPointByTimestampParametersWithDefaults() *DataPointByTimestampParameters { this := DataPointByTimestampParameters{} return &this } // GetTimestamp returns the Timestamp field value if set, zero value otherwise. func (o *DataPointByTimestampParameters) GetTimestamp() time.Time { if o == nil || o.Timestamp == nil { var ret time.Time return ret } return *o.Timestamp } // GetTimestampOk returns a tuple with the Timestamp field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *DataPointByTimestampParameters) GetTimestampOk() (*time.Time, bool) { if o == nil || o.Timestamp == nil { return nil, false } return o.Timestamp, true } // HasTimestamp returns a boolean if a field has been set. func (o *DataPointByTimestampParameters) HasTimestamp() bool { if o != nil && o.Timestamp != nil { return true } return false } // SetTimestamp gets a reference to the given time.Time and assigns it to the Timestamp field. func (o *DataPointByTimestampParameters) SetTimestamp(v time.Time) { o.Timestamp = &v } // GetTimestampInDatabaseTimezone returns the TimestampInDatabaseTimezone field value if set, zero value otherwise. func (o *DataPointByTimestampParameters) GetTimestampInDatabaseTimezone() string { if o == nil || o.TimestampInDatabaseTimezone == nil { var ret string return ret } return *o.TimestampInDatabaseTimezone } // GetTimestampInDatabaseTimezoneOk returns a tuple with the TimestampInDatabaseTimezone field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *DataPointByTimestampParameters) GetTimestampInDatabaseTimezoneOk() (*string, bool) { if o == nil || o.TimestampInDatabaseTimezone == nil { return nil, false } return o.TimestampInDatabaseTimezone, true } // HasTimestampInDatabaseTimezone returns a boolean if a field has been set. func (o *DataPointByTimestampParameters) HasTimestampInDatabaseTimezone() bool { if o != nil && o.TimestampInDatabaseTimezone != nil { return true } return false } // SetTimestampInDatabaseTimezone gets a reference to the given string and assigns it to the TimestampInDatabaseTimezone field. func (o *DataPointByTimestampParameters) SetTimestampInDatabaseTimezone(v string) { o.TimestampInDatabaseTimezone = &v } func (o DataPointByTimestampParameters) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} if o.Timestamp != nil { toSerialize["timestamp"] = o.Timestamp } if o.TimestampInDatabaseTimezone != nil { toSerialize["timestamp_in_database_timezone"] = o.TimestampInDatabaseTimezone } return json.Marshal(toSerialize) } type NullableDataPointByTimestampParameters struct { value *DataPointByTimestampParameters isSet bool } func (v NullableDataPointByTimestampParameters) Get() *DataPointByTimestampParameters { return v.value } func (v *NullableDataPointByTimestampParameters) Set(val *DataPointByTimestampParameters) { v.value = val v.isSet = true } func (v NullableDataPointByTimestampParameters) IsSet() bool { return v.isSet } func (v *NullableDataPointByTimestampParameters) Unset() { v.value = nil v.isSet = false } func NewNullableDataPointByTimestampParameters(val *DataPointByTimestampParameters) *NullableDataPointByTimestampParameters { return &NullableDataPointByTimestampParameters{value: val, isSet: true} } func (v NullableDataPointByTimestampParameters) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableDataPointByTimestampParameters) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
model_data_point_by_timestamp_parameters.go
0.82176
0.415551
model_data_point_by_timestamp_parameters.go
starcoder
package framework import pkgFramework "github.com/stackrox/rox/pkg/compliance/framework" // A Check is a single piece of logic executed as part of a compliance run. It usually corresponds to one or multiple // controls in a compliance standard. type Check interface { // ID returns an ID uniquely identifying a check. ID() string // AppliesToScope checks if the check applies to the given scope. This has no effect as to how the check is executed // by the framework (see `Run` below), but informs how results are collected and, in particular, how missing results // are detected. AppliesToScope(scope pkgFramework.TargetKind) bool // Scope returns the target of the check Scope() pkgFramework.TargetKind // DataDependencies is a list of IDs for data required by a check. DataDependencies() []string // InterpretationText returns a string describing how StackRox is implementing this check. InterpretationText() string // Run is the entry point for the check logic. It is *always* invoked on a context with a 'cluster' target kind; // it is the responsibility of the implementation to call `RunForTarget`/`ForEachNode`/`ForEachDeployment` to cover // all objects at the indicated scope. Run(ctx ComplianceContext) } // CheckMetadata stores metadata associated with a check. type CheckMetadata struct { ID string Scope pkgFramework.TargetKind AdditionalScopes []pkgFramework.TargetKind DataDependencies []string InterpretationText string RemoteCheck bool } // CheckFunc is the function realizing a compliance check. While every `Check` has a `CheckFunc` (namely `chk.Run` for // a Check `chk`), not every `CheckFunc` corresponds to a check. Rather, a `Check` (or a `CheckFunc`) can be realized // by invoking multiple `CheckFunc`s, e.g., one for each node/deployment in the cluster (remember that a `Check` is // always invoked at cluster scope). type CheckFunc func(ComplianceContext) type checkFromFunc struct { metadata CheckMetadata checkFn CheckFunc } // NewCheckFromFunc returns a new check with the given metadata from the given `CheckFunc`. func NewCheckFromFunc(metadata CheckMetadata, checkFn CheckFunc) Check { return &checkFromFunc{ metadata: metadata, checkFn: checkFn, } } func (c *checkFromFunc) ID() string { return c.metadata.ID } func (c *checkFromFunc) InterpretationText() string { return c.metadata.InterpretationText } func (c *checkFromFunc) Scope() pkgFramework.TargetKind { return c.metadata.Scope } func (c *checkFromFunc) AppliesToScope(scope pkgFramework.TargetKind) bool { if c.metadata.Scope == scope { return true } for _, addlScope := range c.metadata.AdditionalScopes { if addlScope == scope { return true } } return false } func (c *checkFromFunc) DataDependencies() []string { return c.metadata.DataDependencies } func (c *checkFromFunc) Run(ctx ComplianceContext) { if c.metadata.RemoteCheck { return } c.checkFn(ctx) }
central/compliance/framework/check.go
0.788583
0.507507
check.go
starcoder
package datadog import ( "encoding/json" ) // LogsRetentionAggSumUsage Object containing indexed logs usage aggregated across organizations and months for a retention period. type LogsRetentionAggSumUsage struct { // Total indexed logs for this retention period. LogsIndexedLogsUsageAggSum *int64 `json:"logs_indexed_logs_usage_agg_sum,omitempty"` // Live indexed logs for this retention period. LogsLiveIndexedLogsUsageAggSum *int64 `json:"logs_live_indexed_logs_usage_agg_sum,omitempty"` // Rehydrated indexed logs for this retention period. LogsRehydratedIndexedLogsUsageAggSum *int64 `json:"logs_rehydrated_indexed_logs_usage_agg_sum,omitempty"` // The retention period in days or \"custom\" for all custom retention periods. Retention *string `json:"retention,omitempty"` // UnparsedObject contains the raw value of the object if there was an error when deserializing into the struct UnparsedObject map[string]interface{} `json:-` } // NewLogsRetentionAggSumUsage instantiates a new LogsRetentionAggSumUsage object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewLogsRetentionAggSumUsage() *LogsRetentionAggSumUsage { this := LogsRetentionAggSumUsage{} return &this } // NewLogsRetentionAggSumUsageWithDefaults instantiates a new LogsRetentionAggSumUsage object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewLogsRetentionAggSumUsageWithDefaults() *LogsRetentionAggSumUsage { this := LogsRetentionAggSumUsage{} return &this } // GetLogsIndexedLogsUsageAggSum returns the LogsIndexedLogsUsageAggSum field value if set, zero value otherwise. func (o *LogsRetentionAggSumUsage) GetLogsIndexedLogsUsageAggSum() int64 { if o == nil || o.LogsIndexedLogsUsageAggSum == nil { var ret int64 return ret } return *o.LogsIndexedLogsUsageAggSum } // GetLogsIndexedLogsUsageAggSumOk returns a tuple with the LogsIndexedLogsUsageAggSum field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *LogsRetentionAggSumUsage) GetLogsIndexedLogsUsageAggSumOk() (*int64, bool) { if o == nil || o.LogsIndexedLogsUsageAggSum == nil { return nil, false } return o.LogsIndexedLogsUsageAggSum, true } // HasLogsIndexedLogsUsageAggSum returns a boolean if a field has been set. func (o *LogsRetentionAggSumUsage) HasLogsIndexedLogsUsageAggSum() bool { if o != nil && o.LogsIndexedLogsUsageAggSum != nil { return true } return false } // SetLogsIndexedLogsUsageAggSum gets a reference to the given int64 and assigns it to the LogsIndexedLogsUsageAggSum field. func (o *LogsRetentionAggSumUsage) SetLogsIndexedLogsUsageAggSum(v int64) { o.LogsIndexedLogsUsageAggSum = &v } // GetLogsLiveIndexedLogsUsageAggSum returns the LogsLiveIndexedLogsUsageAggSum field value if set, zero value otherwise. func (o *LogsRetentionAggSumUsage) GetLogsLiveIndexedLogsUsageAggSum() int64 { if o == nil || o.LogsLiveIndexedLogsUsageAggSum == nil { var ret int64 return ret } return *o.LogsLiveIndexedLogsUsageAggSum } // GetLogsLiveIndexedLogsUsageAggSumOk returns a tuple with the LogsLiveIndexedLogsUsageAggSum field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *LogsRetentionAggSumUsage) GetLogsLiveIndexedLogsUsageAggSumOk() (*int64, bool) { if o == nil || o.LogsLiveIndexedLogsUsageAggSum == nil { return nil, false } return o.LogsLiveIndexedLogsUsageAggSum, true } // HasLogsLiveIndexedLogsUsageAggSum returns a boolean if a field has been set. func (o *LogsRetentionAggSumUsage) HasLogsLiveIndexedLogsUsageAggSum() bool { if o != nil && o.LogsLiveIndexedLogsUsageAggSum != nil { return true } return false } // SetLogsLiveIndexedLogsUsageAggSum gets a reference to the given int64 and assigns it to the LogsLiveIndexedLogsUsageAggSum field. func (o *LogsRetentionAggSumUsage) SetLogsLiveIndexedLogsUsageAggSum(v int64) { o.LogsLiveIndexedLogsUsageAggSum = &v } // GetLogsRehydratedIndexedLogsUsageAggSum returns the LogsRehydratedIndexedLogsUsageAggSum field value if set, zero value otherwise. func (o *LogsRetentionAggSumUsage) GetLogsRehydratedIndexedLogsUsageAggSum() int64 { if o == nil || o.LogsRehydratedIndexedLogsUsageAggSum == nil { var ret int64 return ret } return *o.LogsRehydratedIndexedLogsUsageAggSum } // GetLogsRehydratedIndexedLogsUsageAggSumOk returns a tuple with the LogsRehydratedIndexedLogsUsageAggSum field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *LogsRetentionAggSumUsage) GetLogsRehydratedIndexedLogsUsageAggSumOk() (*int64, bool) { if o == nil || o.LogsRehydratedIndexedLogsUsageAggSum == nil { return nil, false } return o.LogsRehydratedIndexedLogsUsageAggSum, true } // HasLogsRehydratedIndexedLogsUsageAggSum returns a boolean if a field has been set. func (o *LogsRetentionAggSumUsage) HasLogsRehydratedIndexedLogsUsageAggSum() bool { if o != nil && o.LogsRehydratedIndexedLogsUsageAggSum != nil { return true } return false } // SetLogsRehydratedIndexedLogsUsageAggSum gets a reference to the given int64 and assigns it to the LogsRehydratedIndexedLogsUsageAggSum field. func (o *LogsRetentionAggSumUsage) SetLogsRehydratedIndexedLogsUsageAggSum(v int64) { o.LogsRehydratedIndexedLogsUsageAggSum = &v } // GetRetention returns the Retention field value if set, zero value otherwise. func (o *LogsRetentionAggSumUsage) GetRetention() string { if o == nil || o.Retention == nil { var ret string return ret } return *o.Retention } // GetRetentionOk returns a tuple with the Retention field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *LogsRetentionAggSumUsage) GetRetentionOk() (*string, bool) { if o == nil || o.Retention == nil { return nil, false } return o.Retention, true } // HasRetention returns a boolean if a field has been set. func (o *LogsRetentionAggSumUsage) HasRetention() bool { if o != nil && o.Retention != nil { return true } return false } // SetRetention gets a reference to the given string and assigns it to the Retention field. func (o *LogsRetentionAggSumUsage) SetRetention(v string) { o.Retention = &v } func (o LogsRetentionAggSumUsage) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} if o.UnparsedObject != nil { return json.Marshal(o.UnparsedObject) } if o.LogsIndexedLogsUsageAggSum != nil { toSerialize["logs_indexed_logs_usage_agg_sum"] = o.LogsIndexedLogsUsageAggSum } if o.LogsLiveIndexedLogsUsageAggSum != nil { toSerialize["logs_live_indexed_logs_usage_agg_sum"] = o.LogsLiveIndexedLogsUsageAggSum } if o.LogsRehydratedIndexedLogsUsageAggSum != nil { toSerialize["logs_rehydrated_indexed_logs_usage_agg_sum"] = o.LogsRehydratedIndexedLogsUsageAggSum } if o.Retention != nil { toSerialize["retention"] = o.Retention } return json.Marshal(toSerialize) } func (o *LogsRetentionAggSumUsage) UnmarshalJSON(bytes []byte) (err error) { raw := map[string]interface{}{} all := struct { LogsIndexedLogsUsageAggSum *int64 `json:"logs_indexed_logs_usage_agg_sum,omitempty"` LogsLiveIndexedLogsUsageAggSum *int64 `json:"logs_live_indexed_logs_usage_agg_sum,omitempty"` LogsRehydratedIndexedLogsUsageAggSum *int64 `json:"logs_rehydrated_indexed_logs_usage_agg_sum,omitempty"` Retention *string `json:"retention,omitempty"` }{} err = json.Unmarshal(bytes, &all) if err != nil { err = json.Unmarshal(bytes, &raw) if err != nil { return err } o.UnparsedObject = raw return nil } o.LogsIndexedLogsUsageAggSum = all.LogsIndexedLogsUsageAggSum o.LogsLiveIndexedLogsUsageAggSum = all.LogsLiveIndexedLogsUsageAggSum o.LogsRehydratedIndexedLogsUsageAggSum = all.LogsRehydratedIndexedLogsUsageAggSum o.Retention = all.Retention return nil } type NullableLogsRetentionAggSumUsage struct { value *LogsRetentionAggSumUsage isSet bool } func (v NullableLogsRetentionAggSumUsage) Get() *LogsRetentionAggSumUsage { return v.value } func (v *NullableLogsRetentionAggSumUsage) Set(val *LogsRetentionAggSumUsage) { v.value = val v.isSet = true } func (v NullableLogsRetentionAggSumUsage) IsSet() bool { return v.isSet } func (v *NullableLogsRetentionAggSumUsage) Unset() { v.value = nil v.isSet = false } func NewNullableLogsRetentionAggSumUsage(val *LogsRetentionAggSumUsage) *NullableLogsRetentionAggSumUsage { return &NullableLogsRetentionAggSumUsage{value: val, isSet: true} } func (v NullableLogsRetentionAggSumUsage) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableLogsRetentionAggSumUsage) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
api/v1/datadog/model_logs_retention_agg_sum_usage.go
0.70912
0.421105
model_logs_retention_agg_sum_usage.go
starcoder
package canvas import ( "image" "fyne.io/fyne" ) // ImageFill defines the different type of ways an image can stretch to fill its space. type ImageFill int const ( // ImageFillStretch will scale the image to match the Size() values. // This is the default and does not maintain aspect ratio. ImageFillStretch ImageFill = iota // ImageFillContain makes the image fit within the object Size(), // centrally and maintaining aspect ratio. // There may be transparent sections top and bottom or left and right. ImageFillContain //(Fit) // ImageFillOriginal ensures that the container grows to the pixel dimensions // required to fit the original image. The aspect of the image will be maintained so, // as with ImageFillContain there may be transparent areas around the image. // Note that the minSize may be smaller than the image dimensions if scale > 1. ImageFillOriginal ) // Declare conformity with CanvasObject interface var _ fyne.CanvasObject = (*Image)(nil) // Image describes a drawable image area that can render in a Fyne canvas // The image may be a vector or a bitmap representation and it will fill the area. // The fill mode can be changed by setting FillMode to a different ImageFill. type Image struct { baseObject // one of the following sources will provide our image data File string // Load the image from a file Resource fyne.Resource // Load the image from an in-memory resource Image image.Image // Specify a loaded image to use in this canvas object Translucency float64 // Set a translucency value > 0.0 to fade the image FillMode ImageFill // Specify how the image should scale to fill or fit } // Alpha is a convenience function that returns the alpha value for an image // based on its Translucency value. The result is 1.0 - Translucency. func (i *Image) Alpha() float64 { return 1.0 - i.Translucency } // NewImageFromFile creates a new image from a local file. // Images returned from this method will scale to fit the canvas object. // The method for scaling can be set using the Fill field. func NewImageFromFile(file string) *Image { return &Image{ File: file, } } // NewImageFromResource creates a new image by loading the specified resource. // Images returned from this method will scale to fit the canvas object. // The method for scaling can be set using the Fill field. func NewImageFromResource(res fyne.Resource) *Image { return &Image{ Resource: res, } } // NewImageFromImage returns a new Image instance that is rendered from the Go // image.Image passed in. // Images returned from this method will scale to fit the canvas object. // The method for scaling can be set using the Fill field. func NewImageFromImage(img image.Image) *Image { return &Image{ Image: img, } }
canvas/image.go
0.810479
0.496704
image.go
starcoder
package benchmark import ( "reflect" "testing" ) func isBoolToInt32FuncCalibrated(supplier func() bool) bool { return isCalibrated(reflect.Bool, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func isIntToInt32FuncCalibrated(supplier func() int) bool { return isCalibrated(reflect.Int, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func isInt8ToInt32FuncCalibrated(supplier func() int8) bool { return isCalibrated(reflect.Int8, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func isInt16ToInt32FuncCalibrated(supplier func() int16) bool { return isCalibrated(reflect.Int16, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func isInt32ToInt32FuncCalibrated(supplier func() int32) bool { return isCalibrated(reflect.Int32, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func isInt64ToInt32FuncCalibrated(supplier func() int64) bool { return isCalibrated(reflect.Int64, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func isUintToInt32FuncCalibrated(supplier func() uint) bool { return isCalibrated(reflect.Uint, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func isUint8ToInt32FuncCalibrated(supplier func() uint8) bool { return isCalibrated(reflect.Uint8, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func isUint16ToInt32FuncCalibrated(supplier func() uint16) bool { return isCalibrated(reflect.Uint16, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func isUint32ToInt32FuncCalibrated(supplier func() uint32) bool { return isCalibrated(reflect.Uint32, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func isUint64ToInt32FuncCalibrated(supplier func() uint64) bool { return isCalibrated(reflect.Uint64, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func setBoolToInt32FuncCalibrated(supplier func() bool) { setCalibrated(reflect.Bool, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func setIntToInt32FuncCalibrated(supplier func() int) { setCalibrated(reflect.Int, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func setInt8ToInt32FuncCalibrated(supplier func() int8) { setCalibrated(reflect.Int8, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func setInt16ToInt32FuncCalibrated(supplier func() int16) { setCalibrated(reflect.Int16, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func setInt32ToInt32FuncCalibrated(supplier func() int32) { setCalibrated(reflect.Int32, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func setInt64ToInt32FuncCalibrated(supplier func() int64) { setCalibrated(reflect.Int64, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func setUintToInt32FuncCalibrated(supplier func() uint) { setCalibrated(reflect.Uint, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func setUint8ToInt32FuncCalibrated(supplier func() uint8) { setCalibrated(reflect.Uint8, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func setUint16ToInt32FuncCalibrated(supplier func() uint16) { setCalibrated(reflect.Uint16, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func setUint32ToInt32FuncCalibrated(supplier func() uint32) { setCalibrated(reflect.Uint32, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } func setUint64ToInt32FuncCalibrated(supplier func() uint64) { setCalibrated(reflect.Uint64, reflect.Int32, reflect.ValueOf(supplier).Pointer()) } // BoolToInt32Func benchmarks a function with the signature: // func(bool) int32 // ID: B-5-1 func BoolToInt32Func(b *testing.B, supplier func() bool, toInt32Func func(bool) int32) { if !isBoolSupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isBoolToInt32FuncCalibrated(supplier) { panic("BoolToInt32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toInt32Func(supplier()) } } // IntToInt32Func benchmarks a function with the signature: // func(int) int32 // ID: B-5-2 func IntToInt32Func(b *testing.B, supplier func() int, toInt32Func func(int) int32) { if !isIntSupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isIntToInt32FuncCalibrated(supplier) { panic("IntToInt32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toInt32Func(supplier()) } } // Int8ToInt32Func benchmarks a function with the signature: // func(int8) int32 // ID: B-5-3 func Int8ToInt32Func(b *testing.B, supplier func() int8, toInt32Func func(int8) int32) { if !isInt8SupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isInt8ToInt32FuncCalibrated(supplier) { panic("Int8ToInt32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toInt32Func(supplier()) } } // Int16ToInt32Func benchmarks a function with the signature: // func(int16) int32 // ID: B-5-4 func Int16ToInt32Func(b *testing.B, supplier func() int16, toInt32Func func(int16) int32) { if !isInt16SupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isInt16ToInt32FuncCalibrated(supplier) { panic("Int16ToInt32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toInt32Func(supplier()) } } // Int32ToInt32Func benchmarks a function with the signature: // func(int32) int32 // ID: B-5-5 func Int32ToInt32Func(b *testing.B, supplier func() int32, toInt32Func func(int32) int32) { if !isInt32SupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isInt32ToInt32FuncCalibrated(supplier) { panic("Int32ToInt32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toInt32Func(supplier()) } } // Int64ToInt32Func benchmarks a function with the signature: // func(int64) int32 // ID: B-5-6 func Int64ToInt32Func(b *testing.B, supplier func() int64, toInt32Func func(int64) int32) { if !isInt64SupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isInt64ToInt32FuncCalibrated(supplier) { panic("Int64ToInt32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toInt32Func(supplier()) } } // UintToInt32Func benchmarks a function with the signature: // func(uint) int32 // ID: B-5-7 func UintToInt32Func(b *testing.B, supplier func() uint, toInt32Func func(uint) int32) { if !isUintSupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isUintToInt32FuncCalibrated(supplier) { panic("UintToInt32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toInt32Func(supplier()) } } // Uint8ToInt32Func benchmarks a function with the signature: // func(uint8) int32 // ID: B-5-8 func Uint8ToInt32Func(b *testing.B, supplier func() uint8, toInt32Func func(uint8) int32) { if !isUint8SupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isUint8ToInt32FuncCalibrated(supplier) { panic("Uint8ToInt32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toInt32Func(supplier()) } } // Uint16ToInt32Func benchmarks a function with the signature: // func(uint16) int32 // ID: B-5-9 func Uint16ToInt32Func(b *testing.B, supplier func() uint16, toInt32Func func(uint16) int32) { if !isUint16SupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isUint16ToInt32FuncCalibrated(supplier) { panic("Uint16ToInt32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toInt32Func(supplier()) } } // Uint32ToInt32Func benchmarks a function with the signature: // func(uint32) int32 // ID: B-5-10 func Uint32ToInt32Func(b *testing.B, supplier func() uint32, toInt32Func func(uint32) int32) { if !isUint32SupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isUint32ToInt32FuncCalibrated(supplier) { panic("Uint32ToInt32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toInt32Func(supplier()) } } // Uint64ToInt32Func benchmarks a function with the signature: // func(int32) int32 // ID: B-5-11 func Uint64ToInt32Func(b *testing.B, supplier func() uint64, toInt32Func func(uint64) int32) { if !isUint64SupplierCalibrated(supplier) { panic("supplier function not calibrated") } if !isUint64ToInt32FuncCalibrated(supplier) { panic("Uint64ToInt32Func not calibrated with this supplier") } for i, count := 0, b.N; i < count; i++ { toInt32Func(supplier()) } }
common/benchmark/05_to_int32_func.go
0.699254
0.804866
05_to_int32_func.go
starcoder
package goexpression import ( "math" ) type Floater interface { Float64() float64 } type expression struct { ast *TreeNode context map[string]interface{} } // Bug(zdebeer): functions is eval from right to left instead from left to right. func Eval(input string, context map[string]interface{}) float64 { node, err := Parse(input) if err != nil { panic(err) } expr := &expression{node, context} return expr.eval(expr.ast) } func (this *expression) eval(basenode *TreeNode) float64 { for _, node := range basenode.items { switch node.Value.Category() { case CatFunction: return this.switchFunction(node) case CatValue: return this.getNumber(node) case CatOther: this.switchOther(node) } } panic("eval failed. f.") } func (this *expression) switchOther(node *TreeNode) { switch v1 := node.Value.(type) { case *GroupToken: if v1.GroupType == "()" { this.eval(node) return } } panic("Invalid Node " + node.String()) } func (this *expression) switchFunction(node *TreeNode) float64 { val1 := node.Value.(*OperatorToken) switch val1.Operator { case "+": return this.evalMathOperator(this.evalMathPlus, node.Items()) case "-": return this.evalMathOperator(this.evalMathMinus, node.Items()) case "*": return this.evalMathOperator(this.evalMathMultiply, node.Items()) case "/": return this.evalMathOperator(this.evalMathDevide, node.Items()) case "^": return this.evalMathOperator(this.evalMathPower, node.Items()) default: panic("Function not supported") } } func (this *expression) getNumber(node *TreeNode) float64 { switch v := node.Value.(type) { case *NumberToken: return v.Value case *IdentityToken: r1 := this.getValue(v) return this.toFloat64(r1) case *OperatorToken: return this.switchFunction(node) case *GroupToken: if v.GroupType == "()" { return this.eval(node) } panic("Unexpected grouping type: " + node.String()) default: panic("Unexpected value: " + node.String()) } } func (this *expression) evalMathOperator(fn func(float64, float64) float64, args []*TreeNode) float64 { cnt := len(args) switch { case cnt < 2: panic("Operator Missing Arguments.") case cnt == 2: return fn(this.getNumber(args[0]), this.getNumber(args[1])) default: answ := fn(this.getNumber(args[0]), this.getNumber(args[1])) for i := 2; i < cnt; i++ { answ = fn(answ, this.getNumber(args[i])) } return answ } } func (this *expression) evalMathPlus(val1, val2 float64) float64 { return val1 + val2 } func (this *expression) evalMathMinus(val1, val2 float64) float64 { return val1 - val2 } func (this *expression) evalMathMultiply(val1, val2 float64) float64 { return val1 * val2 } func (this *expression) evalMathDevide(val1, val2 float64) float64 { return val1 / val2 } func (this *expression) evalMathPower(val1, val2 float64) float64 { return math.Pow(val1, val2) } //Get a value from the context. func (this *expression) getValue(token *IdentityToken) interface{} { return this.context[token.Name] } func (this *expression) toFloat64(value interface{}) float64 { switch i := value.(type) { case float64: return i case float32: return float64(i) case int64: return float64(i) case int32: return float64(i) case int: return float64(i) case Floater: return i.Float64() default: panic("toFloat: unknown value is of incompatible type") } }
eval.go
0.506347
0.476823
eval.go
starcoder
// Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // Copyright ©1984, ©1987, ©1995 by <NAME> // Portions Copyright ©2017 The Gonum Authors. All rights reserved. package cephes import "math" // IgamI computes the inverse of the incomplete Gamma function. That is, it // returns the x such that: // IgamC(a, x) = p // The input argument a must be positive and p must be between 0 and 1 // inclusive or IgamI will panic. IgamI should return a positive number, but // can return 0 even with non-zero y due to underflow. func IgamI(a, p float64) float64 { // Bound the solution x0 := math.MaxFloat64 yl := 0.0 x1 := 0.0 yh := 1.0 dithresh := 5.0 * machEp if p < 0 || p > 1 || a <= 0 { panic(paramOutOfBounds) } if p == 0 { return math.Inf(1) } if p == 1 { return 0.0 } // Starting with the approximate value // x = a y^3 // where // y = 1 - d - ndtri(p) sqrt(d) // and // d = 1/9a // the routine performs up to 10 Newton iterations to find the root of // IgamC(a, x) - p = 0 d := 1.0 / (9.0 * a) y := 1.0 - d - Ndtri(p)*math.Sqrt(d) x := a * y * y * y lgm := lgam(a) for i := 0; i < 10; i++ { if x > x0 || x < x1 { break } y = IgamC(a, x) if y < yl || y > yh { break } if y < p { x0 = x yl = y } else { x1 = x yh = y } // Compute the derivative of the function at this point d = (a-1)*math.Log(x) - x - lgm if d < -maxLog { break } d = -math.Exp(d) // Compute the step to the next approximation of x d = (y - p) / d if math.Abs(d/x) < machEp { return x } x = x - d } d = 0.0625 if x0 == math.MaxFloat64 { if x <= 0 { x = 1 } for x0 == math.MaxFloat64 { x = (1 + d) * x y = IgamC(a, x) if y < p { x0 = x yl = y break } d = d + d } } d = 0.5 dir := 0 for i := 0; i < 400; i++ { x = x1 + d*(x0-x1) y = IgamC(a, x) lgm = (x0 - x1) / (x1 + x0) if math.Abs(lgm) < dithresh { break } lgm = (y - p) / p if math.Abs(lgm) < dithresh { break } if x <= 0 { break } if y >= p { x1 = x yh = y if dir < 0 { dir = 0 d = 0.5 } else if dir > 1 { d = 0.5*d + 0.5 } else { d = (p - yl) / (yh - yl) } dir++ } else { x0 = x yl = y if dir > 0 { dir = 0 d = 0.5 } else if dir < -1 { d = 0.5 * d } else { d = (p - yl) / (yh - yl) } dir-- } } return x }
mathext/internal/cephes/igami.go
0.725162
0.449211
igami.go
starcoder
package datebin import ( "time" ) // 设置一周的开始日期 func (this Datebin) SetWeekStartsAt(day string) Datebin { if this.IsInvalid() { return this } // 判断周几 switch day { case WeekMonday: this.weekStartAt = time.Monday case WeekTuesday: this.weekStartAt = time.Tuesday case WeekWednesday: this.weekStartAt = time.Wednesday case WeekThursday: this.weekStartAt = time.Thursday case WeekFriday: this.weekStartAt = time.Friday case WeekSaturday: this.weekStartAt = time.Saturday case WeekSunday: this.weekStartAt = time.Sunday } return this } // 日期时间带纳秒 func (this Datebin) SetDatetimeWithNanosecond(year, month, day, hour, minute, second, nanosecond int) Datebin { if this.IsInvalid() { return this } this.time = time.Date(year, time.Month(month), day, hour, minute, second, nanosecond, this.loc) return this } // 日期时间带微秒 func (this Datebin) SetDatetimeWithMicrosecond(year, month, day, hour, minute, second, microsecond int) Datebin { nanosecond := microsecond * 1e3 return this.SetDatetimeWithNanosecond(year, month, day, hour, minute, second, nanosecond) } // 日期时间带毫秒 func (this Datebin) SetDatetimeWithMillisecond(year, month, day, hour, minute, second, millisecond int) Datebin { nanosecond := millisecond * 1e6 return this.SetDatetimeWithNanosecond(year, month, day, hour, minute, second, nanosecond) } // 日期时间 func (this Datebin) SetDatetime(year, month, day, hour, minute, second int) Datebin { nanosecond := this.Nanosecond() return this.SetDatetimeWithNanosecond(year, month, day, hour, minute, second, nanosecond) } // 日期 func (this Datebin) SetDate(year, month, day int) Datebin { hour, minute, second := this.Time() nanosecond := this.Nanosecond() return this.SetDatetimeWithNanosecond(year, month, day, hour, minute, second, nanosecond) } // 时间 func (this Datebin) SetTime(hour, minute, second int) Datebin { year, month, day := this.Date() nanosecond := this.Nanosecond() return this.SetDatetimeWithNanosecond(year, month, day, hour, minute, second, nanosecond) } // 设置年份 func (this Datebin) SetYear(year int) Datebin { _, month, day, hour, minute, second, nanosecond := this.DatetimeWithNanosecond() return this.SetDatetimeWithNanosecond(year, month, day, hour, minute, second, nanosecond) } // 设置月份 func (this Datebin) SetMonth(month int) Datebin { year, _, day, hour, minute, second, nanosecond := this.DatetimeWithNanosecond() return this.SetDatetimeWithNanosecond(year, month, day, hour, minute, second, nanosecond) } // 设置日期 func (this Datebin) SetDay(day int) Datebin { year, month, _, hour, minute, second, nanosecond := this.DatetimeWithNanosecond() return this.SetDatetimeWithNanosecond(year, month, day, hour, minute, second, nanosecond) } // 设置小时 func (this Datebin) SetHour(hour int) Datebin { year, month, day, _, minute, second, nanosecond := this.DatetimeWithNanosecond() return this.SetDatetimeWithNanosecond(year, month, day, hour, minute, second, nanosecond) } // 设置分钟 func (this Datebin) SetMinute(minute int) Datebin { year, month, day, hour, _, second, nanosecond := this.DatetimeWithNanosecond() return this.SetDatetimeWithNanosecond(year, month, day, hour, minute, second, nanosecond) } // 设置秒数 func (this Datebin) SetSecond(second int) Datebin { year, month, day, hour, minute, _, nanosecond := this.DatetimeWithNanosecond() return this.SetDatetimeWithNanosecond(year, month, day, hour, minute, second, nanosecond) } // 设置毫秒 func (this Datebin) SetMillisecond(millisecond int) Datebin { year, month, day, hour, minute, second := this.Datetime() nanosecond := millisecond*1e6 return this.SetDatetimeWithNanosecond(year, month, day, hour, minute, second, nanosecond) } // 设置微秒 func (this Datebin) SetMicrosecond(microsecond int) Datebin { year, month, day, hour, minute, second := this.Datetime() nanosecond := microsecond*1e3 return this.SetDatetimeWithNanosecond(year, month, day, hour, minute, second, nanosecond) } // 设置纳秒 func (this Datebin) SetNanosecond(nanosecond int) Datebin { year, month, day, hour, minute, second := this.Datetime() return this.SetDatetimeWithNanosecond(year, month, day, hour, minute, second, nanosecond) }
pkg/lakego-pkg/go-datebin/datebin/set.go
0.581541
0.462716
set.go
starcoder
package async import ( "sort" "sync" ) // IterAsync executes the given function f up to n times concurrently. // Each call is done in a separate goroutine. On each iteration, the function f // will be called with a unique sequential index i such that the index can be // used to reference an element in an array or slice. If an error is returned // by the function f for any index, an error will be returned. Otherwise, // a nil result will be returned once all function calls have completed. func IterAsync(n int, f func(i int) error) error { wg := sync.WaitGroup{} asyncErrors := make(chan error, n) wg.Add(n) for i := 0; i < n; i++ { go func(j int) { err := f(j) if err != nil { asyncErrors <- err } wg.Done() }(i) } go func() { wg.Wait() close(asyncErrors) }() for err := range asyncErrors { return err } return nil } // ExecuteAsync executes the given function f up to n times concurrently, populating // the given results slice with the results of each function call. // Each call is done in a separate goroutine. On each iteration, the function f // will be called with a unique sequential index i such that the index can be // used to reference an element in an array or slice. If an error is returned // by the function f for any index, an error will be returned. Otherwise, // a nil result will be returned once all function calls have completed. func ExecuteAsync(n int, f func(i int) (interface{}, error)) ([]interface{}, error) { wg := sync.WaitGroup{} asyncErrors := make(chan error, n) asyncResults := make(chan interface{}, n) wg.Add(n) for i := 0; i < n; i++ { go func(j int) { result, err := f(j) if err != nil { asyncErrors <- err } else { asyncResults <- result } wg.Done() }(i) } go func() { wg.Wait() close(asyncErrors) close(asyncResults) }() for err := range asyncErrors { return nil, err } results := make([]interface{}, 0, n) for result := range asyncResults { results = append(results, result) } return results, nil } // ExecuteOrderedAsync executes the given function f up to n times concurrently, populating // the given results slice with the results of each function call. // Each call is done in a separate goroutine. On each iteration, the function f // will be called with a unique sequential index i such that the index can be // used to reference an element in an array or slice. If an error is returned // by the function f for any index, an error will be returned. Otherwise, // a nil result will be returned once all function calls have completed. func ExecuteOrderedAsync(n int, f func(i int) (interface{}, error)) ([]interface{}, error) { wg := sync.WaitGroup{} asyncErrors := make(chan error, n) asyncResults := make(chan *asyncResult, n) wg.Add(n) for i := 0; i < n; i++ { go func(j int) { result, err := f(j) if err != nil { asyncErrors <- err } else { asyncResults <- &asyncResult{ i: j, result: result, } } wg.Done() }(i) } go func() { wg.Wait() close(asyncErrors) close(asyncResults) }() for err := range asyncErrors { return nil, err } sortedResults := make([]*asyncResult, 0, n) for result := range asyncResults { sortedResults = append(sortedResults, result) } sort.Slice(sortedResults, func(i, j int) bool { return sortedResults[i].i < sortedResults[j].i }) results := make([]interface{}, n) for i, result := range sortedResults { results[i] = result.result } return results, nil } type asyncResult struct { i int result interface{} }
pkg/util/async/async.go
0.703549
0.420957
async.go
starcoder
package processor import ( "reflect" "github.com/twitchscience/aws_utils/logger" ) var ( // CriticalPercentage is the percentage of events that a property must be seen in in order to be considered part of the schema for an event. CriticalPercentage = 0.0 // CriticalThreshold is the number of events of a specific event name that must occur for the event to be summarized. CriticalThreshold = 2 ) // EventProcessor processes events of a certain type and flushes metadata about the schema. type EventProcessor interface { Accept(map[string]interface{}) Flush(string) } // Outputter outputs a given event's property summary and number of rows. type Outputter interface { Output(string, []PropertySummary, int) error } // NonTrackedEventProcessor takes in events type NonTrackedEventProcessor struct { // Out outputs events to a directory. Out Outputter // Aggregator summarizes the properties for this event for the purposes of creating a SQL table. Aggregator *EventAggregator // In is the channel of event properties. In chan map[string]interface{} // F is a channel that receives the event name when we're done aggregating and want to compute the transformation. F chan string } // PropertySummary gives information about a field contained in an event. type PropertySummary struct { // Name of the property. Name string // OccurrenceProbability is an estimate of how often the field appears when the event is sent. OccurrenceProbability float64 // T is the Go type of the property. T reflect.Type // Len gives an approximate length of the values for this property if it is a string. Len int } // NewNonTrackedEventProcessor allocates a new NonTrackedEventProcessor. func NewNonTrackedEventProcessor(outputDir string) EventProcessor { p := &NonTrackedEventProcessor{ Out: NewOutputter(outputDir), In: make(chan map[string]interface{}, 100), F: make(chan string), } logger.Go(p.Listen) return p } // Listen for events. func (e *NonTrackedEventProcessor) Listen() { for { select { case p := <-e.In: if e.Aggregator == nil { e.Aggregator = NewEventAggregator(CriticalPercentage) } e.Aggregator.Aggregate(p) case eventName := <-e.F: // drain close(e.In) for p := range e.In { if e.Aggregator == nil { e.Aggregator = NewEventAggregator(CriticalPercentage) } e.Aggregator.Aggregate(p) } nRows, cols := e.Aggregator.Summarize() if nRows > CriticalThreshold { err := e.Out.Output(eventName, cols, nRows) if err != nil { logger.WithError(err).Error("Outputter failed") } } e.Aggregator = NewEventAggregator(CriticalPercentage) return } } } // Accept an event's properties. func (e *NonTrackedEventProcessor) Accept(propertyBag map[string]interface{}) { e.In <- propertyBag } // Flush events received. Label the flush with a given name. func (e *NonTrackedEventProcessor) Flush(eventName string) { e.F <- eventName }
schema_suggestor/processor/processor.go
0.71602
0.425725
processor.go
starcoder
package native import ( "reflect" "unsafe" "github.com/pkg/errors" . "gorgonia.org/tensor" ) func checkNativeSelectable(t *Dense, axis int, dt Dtype) error { if !t.IsNativelyAccessible() { return errors.New("Cannot select on non-natively accessible data") } if axis >= t.Shape().Dims() && !(t.IsScalar() && axis == 0) { return errors.Errorf("Cannot select on axis %d. Shape is %v", axis, t.Shape()) } if t.F() || t.RequiresIterator() { return errors.Errorf("Not yet implemented: native select for colmajor or unpacked matrices") } if t.Dtype() != dt { return errors.Errorf("Native selection only works on %v. Got %v", dt, t.Dtype()) } return nil } /* Native Select for bool */ // SelectB creates a slice of flat data types. See Example of NativeSelectF64. func SelectB(t *Dense, axis int) (retVal [][]bool, err error) { if err := checkNativeSelectable(t, axis, Bool); err != nil { return nil, err } switch t.Shape().Dims() { case 0, 1: retVal = make([][]bool, 1) retVal[0] = t.Bools() case 2: if axis == 0 { return MatrixB(t) } fallthrough default: // size := t.Shape()[axis] data := t.Bools() stride := t.Strides()[axis] upper := ProdInts(t.Shape()[:axis+1]) retVal = make([][]bool, 0, upper) for i, r := 0, 0; r < upper; i += stride { s := make([]bool, 0) hdr := (*reflect.SliceHeader)(unsafe.Pointer(&s)) hdr.Data = uintptr(unsafe.Pointer(&data[i])) hdr.Cap = stride hdr.Len = stride retVal = append(retVal, s) r++ } return retVal, nil } return } /* Native Select for int */ // SelectI creates a slice of flat data types. See Example of NativeSelectF64. func SelectI(t *Dense, axis int) (retVal [][]int, err error) { if err := checkNativeSelectable(t, axis, Int); err != nil { return nil, err } switch t.Shape().Dims() { case 0, 1: retVal = make([][]int, 1) retVal[0] = t.Ints() case 2: if axis == 0 { return MatrixI(t) } fallthrough default: // size := t.Shape()[axis] data := t.Ints() stride := t.Strides()[axis] upper := ProdInts(t.Shape()[:axis+1]) retVal = make([][]int, 0, upper) for i, r := 0, 0; r < upper; i += stride { s := make([]int, 0) hdr := (*reflect.SliceHeader)(unsafe.Pointer(&s)) hdr.Data = uintptr(unsafe.Pointer(&data[i])) hdr.Cap = stride hdr.Len = stride retVal = append(retVal, s) r++ } return retVal, nil } return } /* Native Select for int8 */ // SelectI8 creates a slice of flat data types. See Example of NativeSelectF64. func SelectI8(t *Dense, axis int) (retVal [][]int8, err error) { if err := checkNativeSelectable(t, axis, Int8); err != nil { return nil, err } switch t.Shape().Dims() { case 0, 1: retVal = make([][]int8, 1) retVal[0] = t.Int8s() case 2: if axis == 0 { return MatrixI8(t) } fallthrough default: // size := t.Shape()[axis] data := t.Int8s() stride := t.Strides()[axis] upper := ProdInts(t.Shape()[:axis+1]) retVal = make([][]int8, 0, upper) for i, r := 0, 0; r < upper; i += stride { s := make([]int8, 0) hdr := (*reflect.SliceHeader)(unsafe.Pointer(&s)) hdr.Data = uintptr(unsafe.Pointer(&data[i])) hdr.Cap = stride hdr.Len = stride retVal = append(retVal, s) r++ } return retVal, nil } return } /* Native Select for int16 */ // SelectI16 creates a slice of flat data types. See Example of NativeSelectF64. func SelectI16(t *Dense, axis int) (retVal [][]int16, err error) { if err := checkNativeSelectable(t, axis, Int16); err != nil { return nil, err } switch t.Shape().Dims() { case 0, 1: retVal = make([][]int16, 1) retVal[0] = t.Int16s() case 2: if axis == 0 { return MatrixI16(t) } fallthrough default: // size := t.Shape()[axis] data := t.Int16s() stride := t.Strides()[axis] upper := ProdInts(t.Shape()[:axis+1]) retVal = make([][]int16, 0, upper) for i, r := 0, 0; r < upper; i += stride { s := make([]int16, 0) hdr := (*reflect.SliceHeader)(unsafe.Pointer(&s)) hdr.Data = uintptr(unsafe.Pointer(&data[i])) hdr.Cap = stride hdr.Len = stride retVal = append(retVal, s) r++ } return retVal, nil } return } /* Native Select for int32 */ // SelectI32 creates a slice of flat data types. See Example of NativeSelectF64. func SelectI32(t *Dense, axis int) (retVal [][]int32, err error) { if err := checkNativeSelectable(t, axis, Int32); err != nil { return nil, err } switch t.Shape().Dims() { case 0, 1: retVal = make([][]int32, 1) retVal[0] = t.Int32s() case 2: if axis == 0 { return MatrixI32(t) } fallthrough default: // size := t.Shape()[axis] data := t.Int32s() stride := t.Strides()[axis] upper := ProdInts(t.Shape()[:axis+1]) retVal = make([][]int32, 0, upper) for i, r := 0, 0; r < upper; i += stride { s := make([]int32, 0) hdr := (*reflect.SliceHeader)(unsafe.Pointer(&s)) hdr.Data = uintptr(unsafe.Pointer(&data[i])) hdr.Cap = stride hdr.Len = stride retVal = append(retVal, s) r++ } return retVal, nil } return } /* Native Select for int64 */ // SelectI64 creates a slice of flat data types. See Example of NativeSelectF64. func SelectI64(t *Dense, axis int) (retVal [][]int64, err error) { if err := checkNativeSelectable(t, axis, Int64); err != nil { return nil, err } switch t.Shape().Dims() { case 0, 1: retVal = make([][]int64, 1) retVal[0] = t.Int64s() case 2: if axis == 0 { return MatrixI64(t) } fallthrough default: // size := t.Shape()[axis] data := t.Int64s() stride := t.Strides()[axis] upper := ProdInts(t.Shape()[:axis+1]) retVal = make([][]int64, 0, upper) for i, r := 0, 0; r < upper; i += stride { s := make([]int64, 0) hdr := (*reflect.SliceHeader)(unsafe.Pointer(&s)) hdr.Data = uintptr(unsafe.Pointer(&data[i])) hdr.Cap = stride hdr.Len = stride retVal = append(retVal, s) r++ } return retVal, nil } return } /* Native Select for uint */ // SelectU creates a slice of flat data types. See Example of NativeSelectF64. func SelectU(t *Dense, axis int) (retVal [][]uint, err error) { if err := checkNativeSelectable(t, axis, Uint); err != nil { return nil, err } switch t.Shape().Dims() { case 0, 1: retVal = make([][]uint, 1) retVal[0] = t.Uints() case 2: if axis == 0 { return MatrixU(t) } fallthrough default: // size := t.Shape()[axis] data := t.Uints() stride := t.Strides()[axis] upper := ProdInts(t.Shape()[:axis+1]) retVal = make([][]uint, 0, upper) for i, r := 0, 0; r < upper; i += stride { s := make([]uint, 0) hdr := (*reflect.SliceHeader)(unsafe.Pointer(&s)) hdr.Data = uintptr(unsafe.Pointer(&data[i])) hdr.Cap = stride hdr.Len = stride retVal = append(retVal, s) r++ } return retVal, nil } return } /* Native Select for uint8 */ // SelectU8 creates a slice of flat data types. See Example of NativeSelectF64. func SelectU8(t *Dense, axis int) (retVal [][]uint8, err error) { if err := checkNativeSelectable(t, axis, Uint8); err != nil { return nil, err } switch t.Shape().Dims() { case 0, 1: retVal = make([][]uint8, 1) retVal[0] = t.Uint8s() case 2: if axis == 0 { return MatrixU8(t) } fallthrough default: // size := t.Shape()[axis] data := t.Uint8s() stride := t.Strides()[axis] upper := ProdInts(t.Shape()[:axis+1]) retVal = make([][]uint8, 0, upper) for i, r := 0, 0; r < upper; i += stride { s := make([]uint8, 0) hdr := (*reflect.SliceHeader)(unsafe.Pointer(&s)) hdr.Data = uintptr(unsafe.Pointer(&data[i])) hdr.Cap = stride hdr.Len = stride retVal = append(retVal, s) r++ } return retVal, nil } return } /* Native Select for uint16 */ // SelectU16 creates a slice of flat data types. See Example of NativeSelectF64. func SelectU16(t *Dense, axis int) (retVal [][]uint16, err error) { if err := checkNativeSelectable(t, axis, Uint16); err != nil { return nil, err } switch t.Shape().Dims() { case 0, 1: retVal = make([][]uint16, 1) retVal[0] = t.Uint16s() case 2: if axis == 0 { return MatrixU16(t) } fallthrough default: // size := t.Shape()[axis] data := t.Uint16s() stride := t.Strides()[axis] upper := ProdInts(t.Shape()[:axis+1]) retVal = make([][]uint16, 0, upper) for i, r := 0, 0; r < upper; i += stride { s := make([]uint16, 0) hdr := (*reflect.SliceHeader)(unsafe.Pointer(&s)) hdr.Data = uintptr(unsafe.Pointer(&data[i])) hdr.Cap = stride hdr.Len = stride retVal = append(retVal, s) r++ } return retVal, nil } return } /* Native Select for uint32 */ // SelectU32 creates a slice of flat data types. See Example of NativeSelectF64. func SelectU32(t *Dense, axis int) (retVal [][]uint32, err error) { if err := checkNativeSelectable(t, axis, Uint32); err != nil { return nil, err } switch t.Shape().Dims() { case 0, 1: retVal = make([][]uint32, 1) retVal[0] = t.Uint32s() case 2: if axis == 0 { return MatrixU32(t) } fallthrough default: // size := t.Shape()[axis] data := t.Uint32s() stride := t.Strides()[axis] upper := ProdInts(t.Shape()[:axis+1]) retVal = make([][]uint32, 0, upper) for i, r := 0, 0; r < upper; i += stride { s := make([]uint32, 0) hdr := (*reflect.SliceHeader)(unsafe.Pointer(&s)) hdr.Data = uintptr(unsafe.Pointer(&data[i])) hdr.Cap = stride hdr.Len = stride retVal = append(retVal, s) r++ } return retVal, nil } return } /* Native Select for uint64 */ // SelectU64 creates a slice of flat data types. See Example of NativeSelectF64. func SelectU64(t *Dense, axis int) (retVal [][]uint64, err error) { if err := checkNativeSelectable(t, axis, Uint64); err != nil { return nil, err } switch t.Shape().Dims() { case 0, 1: retVal = make([][]uint64, 1) retVal[0] = t.Uint64s() case 2: if axis == 0 { return MatrixU64(t) } fallthrough default: // size := t.Shape()[axis] data := t.Uint64s() stride := t.Strides()[axis] upper := ProdInts(t.Shape()[:axis+1]) retVal = make([][]uint64, 0, upper) for i, r := 0, 0; r < upper; i += stride { s := make([]uint64, 0) hdr := (*reflect.SliceHeader)(unsafe.Pointer(&s)) hdr.Data = uintptr(unsafe.Pointer(&data[i])) hdr.Cap = stride hdr.Len = stride retVal = append(retVal, s) r++ } return retVal, nil } return } /* Native Select for float32 */ // SelectF32 creates a slice of flat data types. See Example of NativeSelectF64. func SelectF32(t *Dense, axis int) (retVal [][]float32, err error) { if err := checkNativeSelectable(t, axis, Float32); err != nil { return nil, err } switch t.Shape().Dims() { case 0, 1: retVal = make([][]float32, 1) retVal[0] = t.Float32s() case 2: if axis == 0 { return MatrixF32(t) } fallthrough default: // size := t.Shape()[axis] data := t.Float32s() stride := t.Strides()[axis] upper := ProdInts(t.Shape()[:axis+1]) retVal = make([][]float32, 0, upper) for i, r := 0, 0; r < upper; i += stride { s := make([]float32, 0) hdr := (*reflect.SliceHeader)(unsafe.Pointer(&s)) hdr.Data = uintptr(unsafe.Pointer(&data[i])) hdr.Cap = stride hdr.Len = stride retVal = append(retVal, s) r++ } return retVal, nil } return } /* Native Select for float64 */ // SelectF64 creates a slice of flat data types. See Example of NativeSelectF64. func SelectF64(t *Dense, axis int) (retVal [][]float64, err error) { if err := checkNativeSelectable(t, axis, Float64); err != nil { return nil, err } switch t.Shape().Dims() { case 0, 1: retVal = make([][]float64, 1) retVal[0] = t.Float64s() case 2: if axis == 0 { return MatrixF64(t) } fallthrough default: // size := t.Shape()[axis] data := t.Float64s() stride := t.Strides()[axis] upper := ProdInts(t.Shape()[:axis+1]) retVal = make([][]float64, 0, upper) for i, r := 0, 0; r < upper; i += stride { s := make([]float64, 0) hdr := (*reflect.SliceHeader)(unsafe.Pointer(&s)) hdr.Data = uintptr(unsafe.Pointer(&data[i])) hdr.Cap = stride hdr.Len = stride retVal = append(retVal, s) r++ } return retVal, nil } return } /* Native Select for complex64 */ // SelectC64 creates a slice of flat data types. See Example of NativeSelectF64. func SelectC64(t *Dense, axis int) (retVal [][]complex64, err error) { if err := checkNativeSelectable(t, axis, Complex64); err != nil { return nil, err } switch t.Shape().Dims() { case 0, 1: retVal = make([][]complex64, 1) retVal[0] = t.Complex64s() case 2: if axis == 0 { return MatrixC64(t) } fallthrough default: // size := t.Shape()[axis] data := t.Complex64s() stride := t.Strides()[axis] upper := ProdInts(t.Shape()[:axis+1]) retVal = make([][]complex64, 0, upper) for i, r := 0, 0; r < upper; i += stride { s := make([]complex64, 0) hdr := (*reflect.SliceHeader)(unsafe.Pointer(&s)) hdr.Data = uintptr(unsafe.Pointer(&data[i])) hdr.Cap = stride hdr.Len = stride retVal = append(retVal, s) r++ } return retVal, nil } return } /* Native Select for complex128 */ // SelectC128 creates a slice of flat data types. See Example of NativeSelectF64. func SelectC128(t *Dense, axis int) (retVal [][]complex128, err error) { if err := checkNativeSelectable(t, axis, Complex128); err != nil { return nil, err } switch t.Shape().Dims() { case 0, 1: retVal = make([][]complex128, 1) retVal[0] = t.Complex128s() case 2: if axis == 0 { return MatrixC128(t) } fallthrough default: // size := t.Shape()[axis] data := t.Complex128s() stride := t.Strides()[axis] upper := ProdInts(t.Shape()[:axis+1]) retVal = make([][]complex128, 0, upper) for i, r := 0, 0; r < upper; i += stride { s := make([]complex128, 0) hdr := (*reflect.SliceHeader)(unsafe.Pointer(&s)) hdr.Data = uintptr(unsafe.Pointer(&data[i])) hdr.Cap = stride hdr.Len = stride retVal = append(retVal, s) r++ } return retVal, nil } return } /* Native Select for string */ // SelectStr creates a slice of flat data types. See Example of NativeSelectF64. func SelectStr(t *Dense, axis int) (retVal [][]string, err error) { if err := checkNativeSelectable(t, axis, String); err != nil { return nil, err } switch t.Shape().Dims() { case 0, 1: retVal = make([][]string, 1) retVal[0] = t.Strings() case 2: if axis == 0 { return MatrixStr(t) } fallthrough default: // size := t.Shape()[axis] data := t.Strings() stride := t.Strides()[axis] upper := ProdInts(t.Shape()[:axis+1]) retVal = make([][]string, 0, upper) for i, r := 0, 0; r < upper; i += stride { s := make([]string, 0) hdr := (*reflect.SliceHeader)(unsafe.Pointer(&s)) hdr.Data = uintptr(unsafe.Pointer(&data[i])) hdr.Cap = stride hdr.Len = stride retVal = append(retVal, s) r++ } return retVal, nil } return }
native/iterator_native2.go
0.770119
0.444927
iterator_native2.go
starcoder
package iso20022 // Specifies the billing adjustments for a specific service. type BillingServiceAdjustment1 struct { // Identifies the type of adjustment. Type *ServiceAdjustmentType1Code `xml:"Tp"` // Free-form description and clarification of the adjustment. Description *Max140Text `xml:"Desc"` // Amount of the adjustment, expressed in the settlement currency. // // Usage: If the amount would reduce charges due then the amount should be negatively signed. Amount *AmountAndDirection34 `xml:"Amt"` // Specifies whether the balance amount requires an adjustment. BalanceRequiredAmount *AmountAndDirection34 `xml:"BalReqrdAmt,omitempty"` // Date on which the situation causing the service adjustment occurred. If the date is not known then used the last day of the month in which the situation occurred or the date of the billing statement which reported the original service to which this adjustment applies. ErrorDate *ISODate `xml:"ErrDt,omitempty"` // Financial institution's own, internal service identification code, used to uniquely identify the service within the financial institution. AdjustmentIdentification *Max35Text `xml:"AdjstmntId,omitempty"` // Defines the financial institution sub-service identification if the financial institution service identification code is used for more than one service. SubService *BillingSubServiceIdentification1 `xml:"SubSvc,omitempty"` // Change in the service price, expressed in the pricing currency. A negative value indicates a price reduction. PriceChange *AmountAndDirection34 `xml:"PricChng,omitempty"` // Price that was applied to the service, prior to the change, expressed in the pricing currency. OriginalPrice *AmountAndDirection34 `xml:"OrgnlPric,omitempty"` // New, adjusted service price, expressed in the pricing currency. NewPrice *AmountAndDirection34 `xml:"NewPric,omitempty"` // Change in the service volume. A negative value indicates a volume reduction. VolumeChange *DecimalNumber `xml:"VolChng,omitempty"` // Original service volume. OriginalVolume *DecimalNumber `xml:"OrgnlVol,omitempty"` // New, adjusted service volume. NewVolume *DecimalNumber `xml:"NewVol,omitempty"` // Service charge that was applied to the service, prior to the change, expressed in the pricing currency. OriginalChargeAmount *AmountAndDirection34 `xml:"OrgnlChrgAmt,omitempty"` // New, adjusted service charge, expressed in the pricing currency. NewChargeAmount *AmountAndDirection34 `xml:"NewChrgAmt,omitempty"` } func (b *BillingServiceAdjustment1) SetType(value string) { b.Type = (*ServiceAdjustmentType1Code)(&value) } func (b *BillingServiceAdjustment1) SetDescription(value string) { b.Description = (*Max140Text)(&value) } func (b *BillingServiceAdjustment1) AddAmount() *AmountAndDirection34 { b.Amount = new(AmountAndDirection34) return b.Amount } func (b *BillingServiceAdjustment1) AddBalanceRequiredAmount() *AmountAndDirection34 { b.BalanceRequiredAmount = new(AmountAndDirection34) return b.BalanceRequiredAmount } func (b *BillingServiceAdjustment1) SetErrorDate(value string) { b.ErrorDate = (*ISODate)(&value) } func (b *BillingServiceAdjustment1) SetAdjustmentIdentification(value string) { b.AdjustmentIdentification = (*Max35Text)(&value) } func (b *BillingServiceAdjustment1) AddSubService() *BillingSubServiceIdentification1 { b.SubService = new(BillingSubServiceIdentification1) return b.SubService } func (b *BillingServiceAdjustment1) AddPriceChange() *AmountAndDirection34 { b.PriceChange = new(AmountAndDirection34) return b.PriceChange } func (b *BillingServiceAdjustment1) AddOriginalPrice() *AmountAndDirection34 { b.OriginalPrice = new(AmountAndDirection34) return b.OriginalPrice } func (b *BillingServiceAdjustment1) AddNewPrice() *AmountAndDirection34 { b.NewPrice = new(AmountAndDirection34) return b.NewPrice } func (b *BillingServiceAdjustment1) SetVolumeChange(value string) { b.VolumeChange = (*DecimalNumber)(&value) } func (b *BillingServiceAdjustment1) SetOriginalVolume(value string) { b.OriginalVolume = (*DecimalNumber)(&value) } func (b *BillingServiceAdjustment1) SetNewVolume(value string) { b.NewVolume = (*DecimalNumber)(&value) } func (b *BillingServiceAdjustment1) AddOriginalChargeAmount() *AmountAndDirection34 { b.OriginalChargeAmount = new(AmountAndDirection34) return b.OriginalChargeAmount } func (b *BillingServiceAdjustment1) AddNewChargeAmount() *AmountAndDirection34 { b.NewChargeAmount = new(AmountAndDirection34) return b.NewChargeAmount }
BillingServiceAdjustment1.go
0.773644
0.427994
BillingServiceAdjustment1.go
starcoder
package config import ( "encoding/json" "os" temperature "github.com/turing-complete/temperature/analytic" ) // Config is a configuration of a problem. type Config struct { Inherit string // The system System System // The quantity of interest Quantity Quantity // The probability model Uncertainty Uncertainty // The approximation algorithm Solution Solution // The assessment procedure Assessment Assessment // The flag for displaying diagnostic information. Verbose bool } // System is a configuration of the system. type System struct { // The TGFF file describing the platform and application. Specification string // The static-power model. StaticPower StaticPower temperature.Config } // StaticPower is a configuration of the static-power model. type StaticPower struct { // The portion of the total power ascribed to the static power. Contribution float64 // ∈ [0, 1) // The temperature values for fitting. Temperature []float64 // The coefficient of proportionality for fitting. Coefficient []float64 } // Quantity is a configuration of the quantity of interest. type Quantity struct { // The name of the quantity. The options are “end-to-end-delay,” // “total-energy,” and “maximum-temperature.” Name string } // Uncertainty is a configuration of the probability model. type Uncertainty struct { // The tasks whose execution times should be considered as uncertain. Tasks string // ⊂ {0, …, #tasks-1} // The marginal distributions of tasks’ delays. Distribution string // The multiplier used to calculate the range of deviation. Deviation float64 // ≥ 0 // The strength of correlations between tasks. Correlation float64 // > 0 // The portion of the variance to be preserved. Variance float64 // ∈ (0, 1] } // Solution is a configuration of the approximation algorithm. type Solution struct { // The flag for interpolating with the probability distribution of the // uncertain parameters embedded into the surrogate. Aleatory bool // The quadrature rule, which is either “closed” or “open.” Rule string // The total order of polynomials. Power uint // The minimum level of approximation. MinLevel uint // The maximum level of approximation. MaxLevel uint // The maximum number of evaluations. MaxEvaluations uint // The tolerance of the absolute error. AbsoluteError float64 // The tolerance of the relative error. RelativeError float64 // The tolerance of the score error. ScoreError float64 } // Assessment is a configuration of the assessment procedure. type Assessment struct { // The seed for generating samples. Seed int64 // The number of samples to draw. Samples uint } func New(path string) (*Config, error) { paths := []string{path} for { config := Config{} if err := populate(&config, path); err != nil { return nil, err } if len(config.Inherit) > 0 { path = config.Inherit paths = append([]string{path}, paths...) continue } break } config := &Config{} for _, path := range paths { if err := populate(config, path); err != nil { return nil, err } } return config, nil } func populate(config *Config, path string) error { file, err := os.Open(path) if err != nil { return err } defer file.Close() return json.NewDecoder(file).Decode(config) }
src/internal/config/main.go
0.750278
0.438966
main.go
starcoder
package function import ( "errors" "fmt" ) const ( _SRT_HEADER_SIZE = 4 * 256 // freqs ) // SRT Sorted Ranks Transform // Sorted Ranks Transform is typically used after a BWT to reduce the variance // of the data prior to entropy coding. type SRT struct { } // NewSRT creates a new instance of SRT func NewSRT() (*SRT, error) { this := &SRT{} return this, nil } // NewSRTWithCtx creates a new instance of SRT using a // configuration map as parameter. func NewSRTWithCtx(ctx *map[string]interface{}) (*SRT, error) { this := &SRT{} return this, nil } // Forward applies the function to the src and writes the result // to the destination. Returns number of bytes read, number of bytes // written and possibly an error. func (this *SRT) Forward(src, dst []byte) (uint, uint, error) { if len(src) == 0 { return 0, 0, nil } if &src[0] == &dst[0] { return 0, 0, errors.New("Input and output buffers cannot be equal") } if n := this.MaxEncodedLen(len(src)); len(dst) < n { return 0, 0, fmt.Errorf("Output buffer is too small - size: %d, required %d", len(dst), n) } count := len(src) s2r := [256]byte{} r2s := [256]byte{} freqs := [256]int32{} // find first symbols and count occurrences for i, b := 0, 0; i < count; { c := src[i] var j int for j = i + 1; (j < count) && (src[j] == c); j++ { } if freqs[c] == 0 { r2s[b] = c s2r[c] = byte(b) b++ } freqs[c] += int32(j - i) i = j } // init arrays symbols := [256]byte{} nbSymbols := this.preprocess(freqs[:], symbols[:]) buckets := [256]int{} for i, bucketPos := 0, 0; i < nbSymbols; i++ { c := symbols[i] buckets[c] = bucketPos bucketPos += int(freqs[c]) } headerSize, err := this.encodeHeader(freqs[:], dst) if err != nil { return 0, 0, err } dst = dst[headerSize:] // encoding for i := 0; i < count; { c := src[i] r := s2r[c] p := buckets[c] dst[p] = r p++ if r > 0 { for { r2s[r] = r2s[r-1] s2r[r2s[r]] = r if r == 1 { break } r-- } r2s[0] = c s2r[c] = 0 } j := i + 1 for (j < count) && (src[j] == c) { dst[p] = 0 p++ j++ } buckets[c] = p i = j } return uint(count), uint(count + _SRT_HEADER_SIZE), nil } func (this SRT) preprocess(freqs []int32, symbols []byte) int { nbSymbols := 0 for i := range freqs { if freqs[i] == 0 { continue } symbols[nbSymbols] = byte(i) nbSymbols++ } h := 4 for h < nbSymbols { h = h*3 + 1 } for { h /= 3 for i := h; i < nbSymbols; i++ { t := symbols[i] var b int for b = i - h; (b >= 0) && ((freqs[symbols[b]] < freqs[t]) || ((freqs[t] == freqs[symbols[b]]) && (t < symbols[b]))); b -= h { symbols[b+h] = symbols[b] } symbols[b+h] = t } if h == 1 { break } } return nbSymbols } // Inverse applies the reverse function to the src and writes the result // to the destination. Returns number of bytes read, number of bytes // written and possibly an error. func (this *SRT) Inverse(src, dst []byte) (uint, uint, error) { if len(src) == 0 { return 0, 0, nil } if &src[0] == &dst[0] { return 0, 0, errors.New("Input and output buffers cannot be equal") } // init arrays freqs := [256]int32{} headerSize, err := this.decodeHeader(src, freqs[:]) if err != nil { return 0, 0, err } src = src[headerSize:] count := len(src) symbols := [256]byte{} nbSymbols := this.preprocess(freqs[:], symbols[:]) buckets := [256]int{} bucketEnds := [256]int{} r2s := [256]byte{} for i, bucketPos := 0, 0; i < nbSymbols; i++ { c := symbols[i] r2s[src[bucketPos]] = c buckets[c] = bucketPos + 1 bucketPos += int(freqs[c]) bucketEnds[c] = bucketPos } // decoding c := r2s[0] for i := range dst { dst[i] = c if buckets[c] < bucketEnds[c] { r := src[buckets[c]] buckets[c]++ if r == 0 { continue } for s := byte(0); s < r; s++ { r2s[s] = r2s[s+1] } r2s[r] = c c = r2s[0] } else { if nbSymbols == 1 { continue } nbSymbols-- for s := 0; s < nbSymbols; s++ { r2s[s] = r2s[s+1] } c = r2s[0] } } return uint(count + _SRT_HEADER_SIZE), uint(count), nil } func (this SRT) encodeHeader(freqs []int32, dst []byte) (int, error) { if len(dst) < _SRT_HEADER_SIZE { return 0, errors.New("SRT forward failed: cannot encode header") } for i := range freqs { dst[4*i] = byte(freqs[i] >> 24) dst[4*i+1] = byte(freqs[i] >> 16) dst[4*i+2] = byte(freqs[i] >> 8) dst[4*i+3] = byte(freqs[i]) } return _SRT_HEADER_SIZE, nil } func (this SRT) decodeHeader(src []byte, freqs []int32) (int, error) { if len(src) < _SRT_HEADER_SIZE { return 0, errors.New("SRT inverse failed: cannot decode header") } for i := range freqs { f1 := int32(src[4*i]) f2 := int32(src[4*i+1]) f3 := int32(src[4*i+2]) f4 := int32(src[4*i+3]) freqs[i] = (f1 << 24) | (f2 << 16) | (f3 << 8) | f4 } return _SRT_HEADER_SIZE, nil } // MaxEncodedLen returns the max size required for the encoding output buffer func (this SRT) MaxEncodedLen(srcLen int) int { return srcLen + _SRT_HEADER_SIZE }
function/SRT.go
0.796807
0.422445
SRT.go
starcoder
package tensor import ( "errors" "github.com/dereklstinson/gocunets/devices/gpu/nvidia/cudnn" gocudnn "github.com/dereklstinson/gocudnn" ) /* cudnnOpTensor from the cudnn sdk documentation This function implements the equation C = op ( alpha1[0] * A, alpha2[0] * B ) + beta[0] * C, given tensors A, B, and C and scaling factors alpha1, alpha2, and beta. The op to use is indicated by the descriptor opTensorDesc. Currently-supported ops are listed by the cudnnOpTensorOp_t enum. Each dimension of the input tensor A must match the corresponding dimension of the destination tensor C, and each dimension of the input tensor B must match the corresponding dimension of the destination tensor C or must be equal to 1. In the latter case, the same value from the input tensor B for those dimensions will be used to blend into the C tensor. The data types of the input tensors A and B must match. If the data type of the destination tensor C is double, then the data type of the input tensors also must be double. If the data type of the destination tensor C is double, then opTensorCompType in opTensorDesc must be double. Else opTensorCompType must be float. If the input tensor B is the same tensor as the destination tensor C, then the input tensor A also must be the same tensor as the destination tensor C. */ //OpAdd does addition Operation C = op ( alpha1[0] * A, alpha2[0] * B ) + beta[0] * C, //Or vol= op(alpha1 *A, alpha2 *B)+(beta *vol) type tensops struct { add optensorop mult optensorop min optensorop max optensorop sqrt optensorop not optensorop flg gocudnn.OpTensorOp } type optensorop struct { mode gocudnn.OpTensorOp desc *gocudnn.OPTensorD } func (o *optensorop) isset() bool { if o.desc == nil { return false } return true } //OpAdd adds the op into t func (t *Volume) OpAdd(h *cudnn.Handler, A, B *Volume, alpha1, alpha2, beta float64) error { if !t.op.add.isset() { _, dtypet, _, err := t.Properties() if err != nil { return err } t.op.add.desc, err = gocudnn.CreateOpTensorDescriptor() err = t.op.add.desc.Set(t.op.flg.Add(), dtypet, t.propnan) if err != nil { return errorappend("NewOpTensorDescriptor: ", err) } return t.op.add.desc.OpTensor(h.Cudnn(), alpha1, A.current.tD, A, alpha2, B.current.tD, B, beta, t.current.tD, t) } return t.op.add.desc.OpTensor(h.Cudnn(), alpha1, A.current.tD, A, alpha2, B.current.tD, B, beta, t.current.tD, t) } func errorappend(comment string, err error) error { return errors.New(comment + ": " + err.Error()) } //OpMult does a multiplication Operation C = op ( alpha1[0] * A, alpha2[0] * B ) + beta[0] * C, func (t *Volume) OpMult(h *cudnn.Handler, A, B *Volume, alpha1, alpha2, beta float64) error { if !t.op.mult.isset() { _, dtypet, _, err := t.Properties() if err != nil { return err } t.op.mult.desc, err = gocudnn.CreateOpTensorDescriptor() err = t.op.mult.desc.Set(t.op.flg.Mul(), dtypet, t.propnan) if err != nil { return errorappend("NewOpTensorDescriptor: ", err) } if err != nil { return errorappend("NewOpTensorDescriptor: ", err) } return t.op.mult.desc.OpTensor(h.Cudnn(), alpha1, A.current.tD, A, alpha2, B.current.tD, B, beta, t.current.tD, t) } return t.op.mult.desc.OpTensor(h.Cudnn(), alpha1, A.current.tD, A, alpha2, B.current.tD, B, beta, t.current.tD, t) } //OpNot does negation Operation performed on only the A C = op ( alpha1[0] * A) + beta[0] * C, func (t *Volume) OpNot(h *cudnn.Handler, A *Volume, alpha1, beta float64) error { if !t.op.not.isset() { _, dtypet, _, err := t.Properties() if err != nil { return err } t.op.not.desc, err = gocudnn.CreateOpTensorDescriptor() if err != nil { return errorappend("NewOpTensorDescriptor: ", err) } err = t.op.not.desc.Set(t.op.flg.Not(), dtypet, t.propnan) if err != nil { return err } return t.op.not.desc.OpTensor(h.Cudnn(), alpha1, A.current.tD, A, 0, nil, nil, beta, t.current.tD, t) } return t.op.not.desc.OpTensor(h.Cudnn(), alpha1, A.current.tD, A, 0, nil, nil, beta, t.current.tD, t) } //OpMax does max comparison Operation C = op ( alpha1[0] * A, alpha2[0] * B ) + beta[0] * C, func (t *Volume) OpMax(h *cudnn.Handler, A, B *Volume, alpha1, alpha2, beta float64) error { if !t.op.max.isset() { _, dtypet, _, err := t.Properties() if err != nil { return err } t.op.max.desc, err = gocudnn.CreateOpTensorDescriptor() if err != nil { return errorappend("NewOpTensorDescriptor: ", err) } err = t.op.max.desc.Set(t.op.flg.Max(), dtypet, t.propnan) if err != nil { return errorappend("NewOpTensorDescriptor: ", err) } return t.op.max.desc.OpTensor(h.Cudnn(), alpha1, A.current.tD, A, alpha2, B.current.tD, B, beta, t.current.tD, t) } return t.op.max.desc.OpTensor(h.Cudnn(), alpha1, A.current.tD, A, alpha2, B.current.tD, B, beta, t.current.tD, t) } //OpMin does min comparison Operation C = op ( alpha1[0] * A, alpha2[0] * B ) + beta[0] * C, func (t *Volume) OpMin(h *cudnn.Handler, A, B *Volume, alpha1, alpha2, beta float64) error { if !t.op.min.isset() { _, dtypet, _, err := t.Properties() if err != nil { return err } t.op.min.desc, err = gocudnn.CreateOpTensorDescriptor() if err != nil { return errorappend("NewOpTensorDescriptor: ", err) } err = t.op.min.desc.Set(t.op.flg.Min(), dtypet, t.propnan) if err != nil { return errorappend("NewOpTensorDescriptor: ", err) } return t.op.min.desc.OpTensor(h.Cudnn(), alpha1, A.current.tD, A, alpha2, B.current.tD, B, beta, t.current.tD, t) } return t.op.min.desc.OpTensor(h.Cudnn(), alpha1, A.current.tD, A, alpha2, B.current.tD, B, beta, t.current.tD, t) } //OpSqrt does squareroot Operation C = op ( alpha1[0] * A ) + beta[0] * C, func (t *Volume) OpSqrt(h *cudnn.Handler, A *Volume, alpha1, beta float64) error { if !t.op.sqrt.isset() { _, dtypet, _, err := t.Properties() if err != nil { return err } t.op.sqrt.desc, err = gocudnn.CreateOpTensorDescriptor() if err != nil { return errorappend("NewOpTensorDescriptor: ", err) } err = t.op.sqrt.desc.Set(t.op.flg.Sqrt(), dtypet, t.propnan) if err != nil { return errorappend("NewOpTensorDescriptor: ", err) } return t.op.sqrt.desc.OpTensor(h.Cudnn(), alpha1, A.current.tD, A, 0, nil, nil, beta, t.current.tD, t) } return t.op.sqrt.desc.OpTensor(h.Cudnn(), alpha1, A.current.tD, A, 0, nil, nil, beta, t.current.tD, t) }
devices/gpu/nvidia/cudnn/tensor/ops.go
0.687945
0.652075
ops.go
starcoder
package main import ( "log" "runtime" "time" ) type Transaction struct { TxID string Address string InputTxID string Value uint } type TestData struct { Txs []Transaction } // Analyze a block of transactions from a Bitcoin family blockchain and, given 2 addresses, // find all transaction chains that lead from our address to the banned address. // Implement function findChains that returns an array fo chains: // linked list of TxIDs in as array where every element is the TxID of the tx originating from the previous, // starting from transaction that sends funds to our address. // There is only one transaction that sends funds to our address. // There are no transactions from the banned address. func main() { startTime := time.Now() var m1, m2 runtime.MemStats runtime.ReadMemStats(&m1) testData := [1]TestData{} testData[0] = TestData{Txs: []Transaction{ Transaction{TxID: "tx1", Address: "bgl", Value: 10, InputTxID: "none"}, Transaction{TxID: "tx2", Address: "address1", Value: 1, InputTxID: "tx1"}, Transaction{TxID: "tx3", Address: "address2", Value: 2, InputTxID: "tx1"}, Transaction{TxID: "tx4", Address: "address3", Value: 3, InputTxID: "tx1"}, Transaction{TxID: "tx5", Address: "address4", Value: 4, InputTxID: "tx1"}, Transaction{TxID: "tx6", Address: "address5", Value: 1, InputTxID: "tx2"}, Transaction{TxID: "tx7", Address: "address6", Value: 1, InputTxID: "tx3"}, Transaction{TxID: "tx8", Address: "bgl", Value: 1, InputTxID: "tx3"}, Transaction{TxID: "tx9", Address: "address6", Value: 1, InputTxID: "tx4"}, Transaction{TxID: "tx10", Address: "address7", Value: 2, InputTxID: "tx5"}, Transaction{TxID: "tx11", Address: "banned", Value: 2, InputTxID: "tx100"}, Transaction{TxID: "tx12", Address: "banned", Value: 2, InputTxID: "tx5"}, Transaction{TxID: "tx13", Address: "banned", Value: 1, InputTxID: "tx7"}, }} for _, td := range testData { txsChain := findChains("bgl", "banned", td.Txs) log.Println(txsChain) } endTime := time.Now() log.Println("Time: ", endTime.Sub(startTime)) runtime.ReadMemStats(&m2) log.Println("Memory: ", (m2.TotalAlloc-m1.TotalAlloc)/1024) } func findChains(beginAddress string, endAddress string, txs []Transaction) [][]string { chain := map[string]bool{} ans := [][]string{} var startTx Transaction for _, tx := range txs { if tx.Address == beginAddress { startTx = tx break } } traverse(endAddress, startTx, chain, txs, []string{}, &ans) return ans } func traverse(endAddress string, startTx Transaction, visited map[string]bool, txs []Transaction, chain []string, ans *[][]string) { visited[startTx.TxID] = true subChain := make([]string, len(chain)+1) copy(subChain, append(chain, startTx.TxID)) log.Println(subChain) if startTx.Address == endAddress { *ans = append(*ans, subChain) } else { for _, tx := range txs { if !visited[tx.TxID] && tx.InputTxID == startTx.TxID { traverse(endAddress, tx, visited, txs, subChain, ans) } } } }
bitcoin_transactions/bitcoin_transactions.go
0.645232
0.453322
bitcoin_transactions.go
starcoder
package parser import ( "strconv" "strings" "github.com/alecthomas/participle" "github.com/alecthomas/participle/lexer" "github.com/alecthomas/participle/lexer/stateful" ) var ( Lexer = stateful.MustSimple([]stateful.Rule{ {"Whitespace", `\s+`, nil}, {"Bool", `(?i)\b(TRUE|FALSE)\b`, nil}, {"Type", `(?i)\b(STRING|NUMBER|BINARY)\b`, nil}, {"Null", `(?i)\bNULL\b`, nil}, {"Keyword", keywordsRe(), nil}, {"QuotedIdent", "`[^`]+`", nil}, {"Ident", `[a-zA-Z_][a-zA-Z0-9_]*`, nil}, {"Number", `[-+]?\d*\.?\d+([eE][-+]?\d+)?`, nil}, {"String", `'[^']*'|"[^"]*"`, nil}, {"Operator", `<>|!=|<=|>=|[-+*/%:?,.()=<>\[\]{};]`, nil}, }, ) parser = participle.MustBuild( &AST{}, participle.Lexer(Lexer), participle.Unquote("String"), UnquoteIdent(), participle.CaseInsensitive("Keyword", "Bool", "Type", "Null"), participle.UseLookahead(2), participle.Elide("Whitespace"), ) ) var keywords = []string{ "SELECT", "FROM", "WHERE", "LIMIT", "OFFSET", "INSERT", "INTO", "VALUES", "NOT", "BETWEEN", "AND", "OR", "USE", "INDEX", "ASC", "DESC", "DROP", "CREATE", "TABLE", "HASH", "RANGE", "PROJECTION", "PROVISIONED", "THROUGHPUT", "READ", "WRITE", "GLOBAL", "LOCAL", "INDEX", "SECONDARY", "RETURNING", "NONE", "ALL_OLD", "UPDATED_OLD", "ALL_NEW", "UPDATED_NEW", "DELETE", "CHECK", } func keywordsRe() string { return `(?i)\b(` + strings.Join(keywords, "|") + `)\b` } func Parse(s string) (*AST, error) { var ast AST err := parser.ParseString("", s, &ast) return &ast, err } // EBNF grammar for the SQL parser. func EBNF() string { return parser.String() } // UnquoteIdent removes surrounding backticks (`) from quoted identifiers func UnquoteIdent() participle.Option { return participle.Map(func(t lexer.Token) (lexer.Token, error) { t.Value = t.Value[1 : len(t.Value)-1] return t, nil }, "QuotedIdent") } type Boolean bool func (b *Boolean) Capture(values []string) error { *b = strings.ToUpper(values[0]) == "TRUE" return nil } type ScanDescending bool func (b *ScanDescending) Capture(values []string) error { *b = strings.ToUpper(values[0]) == "DESC" return nil } // Node is an interface implemented by all AST nodes. type Node interface { children() (children []Node) } type AST struct { Select *Select `( @@` Insert *InsertOrReplace ` | @@` CreateTable *CreateTable ` | @@` DropTable *DropTable ` | @@ ) ";"?` } func (a *AST) children() (children []Node) { return []Node{a.Select, a.Insert, a.CreateTable, a.DropTable} } type JSONObjectEntry struct { Key string `@(Ident | String)` Value *JSONValue `":" @@` } func (j *JSONObjectEntry) children() (children []Node) { return []Node{j.Value} } type JSONObject struct { Entries []*JSONObjectEntry `"{" (@@ ("," @@)* ","?)? "}"` } func (j *JSONObject) children() (children []Node) { for _, entry := range j.Entries { children = append(children, entry) } return } func (j *JSONObject) String() string { out := make([]string, 0, len(j.Entries)) for _, entry := range j.Entries { out = append(out, strconv.Quote(entry.Key)+":"+entry.Value.String()) } return "{" + strings.Join(out, ",") + "}" } type JSONArray struct { Entries []*JSONValue `"[" (@@ ("," @@)* ","?)? "]"` } func (j *JSONArray) children() (children []Node) { for _, entry := range j.Entries { children = append(children, entry) } return } func (j *JSONArray) String() string { out := make([]string, 0, len(j.Entries)) for _, v := range j.Entries { out = append(out, v.String()) } return "[" + strings.Join(out, ",") + "]" } type JSONValue struct { Scalar Object *JSONObject `| @@` Array *JSONArray `| @@` } func (j *JSONValue) children() (children []Node) { return append(j.Scalar.children(), j.Object, j.Array) } type Scalar struct { Number *float64 ` @Number` Str *string `| @String` Boolean *Boolean `| @Bool` Null bool `| @Null` } func (l *Scalar) children() []Node { return nil } func (l *Scalar) String() string { switch { case l.Number != nil: return strconv.FormatFloat(*l.Number, 'g', -1, 64) case l.Str != nil: return strconv.Quote(*l.Str) case l.Boolean != nil: return strconv.FormatBool(bool(*l.Boolean)) case l.Null: return "NULL" default: panic("unexpected code path") } } type Value struct { Scalar PlaceHolder *string `| @":" @Ident ` PositionalPlaceholder bool `| @"?" ` } func (v *Value) children() (children []Node) { return v.Scalar.children() } func (v Value) String() string { switch { case v.PlaceHolder != nil: return *v.PlaceHolder case v.PositionalPlaceholder: return "?" default: return v.Scalar.String() } }
parser/parser.go
0.510008
0.416025
parser.go
starcoder
package vme import ( "fmt" "reflect" ) func BoolType() reflect.Type { return reflect.TypeOf(false) } func Int8Type() reflect.Type { return reflect.TypeOf(int8(0)) } func Int16Type() reflect.Type { return reflect.TypeOf(int16(0)) } func Int32Type() reflect.Type { return reflect.TypeOf(int32(0)) } func Int64Type() reflect.Type { return reflect.TypeOf(int64(0)) } func IntType() reflect.Type { return reflect.TypeOf(int(0)) } func Uint8Type() reflect.Type { return reflect.TypeOf(uint8(0)) } func Uint16Type() reflect.Type { return reflect.TypeOf(uint16(0)) } func Uint32Type() reflect.Type { return reflect.TypeOf(uint32(0)) } func Uint64Type() reflect.Type { return reflect.TypeOf(uint64(0)) } func UintType() reflect.Type { return reflect.TypeOf(uint(0)) } func Float32Type() reflect.Type { return reflect.TypeOf(float32(0)) } func Float64Type() reflect.Type { return reflect.TypeOf(float64(0)) } func StringType() reflect.Type { return reflect.TypeOf("") } func BytesType() reflect.Type { return reflect.TypeOf([]byte{}) } func StructOf(fieldTypes...reflect.Type) reflect.Type { count := len(fieldTypes) fs := make([]reflect.StructField, count) for i := range fs { fs[i] = reflect.StructField{ Name: fmt.Sprintf("Field_%d", i), Type: fieldTypes[i], } } return reflect.StructOf(fs) } func StructValue(fieldValues...interface{}) interface{} { count := len(fieldValues) types := make([]reflect.Type, count) for i := range types { types[i] = reflect.TypeOf(fieldValues[i]) } val := reflect.New(StructOf(types...)).Elem() for i := 0; i < count; i++ { val.Field(i).Set(reflect.ValueOf(fieldValues[i])) } return val.Interface() } type structBuilder struct { fields []reflect.StructField } func NewStructBuilder() *structBuilder { return &structBuilder{} } func (b *structBuilder) AddField(name string, typ reflect.Type, tag string) *structBuilder { b.fields = append(b.fields, reflect.StructField{ Name: name, Type: typ, Tag: reflect.StructTag(tag), }) return b } func (b *structBuilder) Build() reflect.Type { return reflect.StructOf(b.fields) }
common/encoding/vme/type.go
0.617743
0.564158
type.go
starcoder
package iso20022 // Provides the details of the security pledge as collateral. type Collateral14 struct { // Provides the values of the security pledged as collateral. Valuation *SecuredCollateral2Choice `xml:"Valtn"` // Risk control measure applied to underlying collateral whereby the value of that underlying collateral is calculated as the market value of the assets reduced by a certain percentage. // // For reporting purposes the collateral haircut will be calculated as 100 minus the ratio between the cash lent/borrowed and the market value including accrued interest of the collateral pledged times 100. // // In the case of multi-collateral repos the haircut will be based on the ratio between the cash borrowed/lent and the market value, including accrued interest of each of the individual collateral pledged. // // Only actual values, as opposed to estimated or default values will be reported for this variable. // Haircut *PercentageRate `xml:"Hrcut,omitempty"` // Identifies all repurchase agreements conducted against general collateral and those conducted against special collateral. // - General Collateral is a repurchase transaction in which the security lender may choose the security to pledge as collateral with the cash provider amongst a relatively wide range of securities meeting predefined criteria; // - Special Collateral is a repurchase transaction in which the cash provider requests a specific security (individual ISIN) to be provided by the cash borrower. // // Usage: // This field is optional and it should be provided only in case it is feasible for the reporting agent. SpecialCollateralIndicator *SpecialCollateral1Code `xml:"SpclCollInd,omitempty"` } func (c *Collateral14) AddValuation() *SecuredCollateral2Choice { c.Valuation = new(SecuredCollateral2Choice) return c.Valuation } func (c *Collateral14) SetHaircut(value string) { c.Haircut = (*PercentageRate)(&value) } func (c *Collateral14) SetSpecialCollateralIndicator(value string) { c.SpecialCollateralIndicator = (*SpecialCollateral1Code)(&value) }
Collateral14.go
0.708011
0.508056
Collateral14.go
starcoder
package shp // shapes var tet4, tet10 Shape // register shapes func init() { // tet4 tet4.Type = "tet4" tet4.Func = Tet4 tet4.FaceFunc = Tri3 tet4.BasicType = "tet4" tet4.FaceType = "tri3" tet4.Gndim = 3 tet4.Nverts = 4 tet4.VtkCode = VTK_TETRA tet4.FaceNverts = 3 tet4.FaceLocalV = [][]int{{0, 3, 2}, {0, 1, 3}, {0, 2, 1}, {1, 2, 3}} tet4.NatCoords = [][]float64{ {0, 1, 0, 0}, {0, 0, 1, 0}, {0, 0, 0, 1}, } tet4.init_scratchpad() factory["tet4"] = &tet4 ipsfactory["tet4_0"] = ips_tet_1 ipsfactory["tet4_1"] = ips_tet_1 ipsfactory["tet4_4"] = ips_tet_4 ipsfactory["tet4_5"] = ips_tet_5 // tet10 tet10.Type = "tet10" tet10.Func = Tet10 tet10.FaceFunc = Tri6 tet10.BasicType = "tet4" tet10.FaceType = "tri6" tet10.Gndim = 3 tet10.Nverts = 10 tet10.VtkCode = VTK_QUADRATIC_TETRA tet10.FaceNverts = 6 tet10.FaceLocalV = [][]int{{0, 3, 2, 7, 9, 6}, {0, 1, 3, 4, 8, 7}, {0, 2, 1, 6, 5, 4}, {1, 2, 3, 5, 9, 8}} tet10.NatCoords = [][]float64{ {0, 1, 0, 0, 0.5, 0.5, 0, 0, 0.5, 0}, {0, 0, 1, 0, 0, 0.5, 0.5, 0, 0, 0.5}, {0, 0, 0, 1, 0, 0, 0, 0.5, 0.5, 0.5}, } tet10.init_scratchpad() factory["tet10"] = &tet10 ipsfactory["tet10_0"] = ips_tet_4 ipsfactory["tet10_4"] = ips_tet_4 ipsfactory["tet10_5"] = ips_tet_5 } // Tet4 calculates the shape functions (S) and derivatives of shape functions (dSdR) of tet4 // elements at {r,s,t} natural coordinates. The derivatives are calculated only if derivs==true. func Tet4(S []float64, dSdR [][]float64, r, s, t float64, derivs bool) { /* t | 3 /|`. || `, / | ', | | \ / | `. | | `, / | `, | | \ / | `. | | ', / | \ | 0.,,_ `. | / ``'-.,,__ `. | / ``''-.,,_ ', | / `` 2 ,,s | ,' ,.-`` | , _,-'` ' / ,.'` | / _.-`` '/ ,-'` |/ ,.-`` / _,-`` 1 '` / r */ S[0] = 1.0 - r - s - t S[1] = r S[2] = s S[3] = t if !derivs { return } dSdR[0][0] = -1.0 dSdR[1][0] = 1.0 dSdR[2][0] = 0.0 dSdR[3][0] = 0.0 dSdR[0][1] = -1.0 dSdR[1][1] = 0.0 dSdR[2][1] = 1.0 dSdR[3][1] = 0.0 dSdR[0][2] = -1.0 dSdR[1][2] = 0.0 dSdR[2][2] = 0.0 dSdR[3][2] = 1.0 } // Tet10 calculates the shape functions (S) and derivatives of shape functions (dSdR) of tet10 // elements at {r,s,t} natural coordinates. The derivatives are calculated only if derivs==true. func Tet10(S []float64, dSdR [][]float64, r, s, t float64, derivs bool) { /* t | 3 /|`. || `, / | ', | | \ / | `. | | `, / 7 9 | | \ / | `. | | ', 8 | \ | 0 ,,_ `. | / ``'-., 6 `. | / `''-.,,_ ', | / ``'2 ,,s | ' ,.-`` | 4 _,-'` ' / ,.'` | / _ 5 ` '/ ,-'` |/ ,.-`` / _,-`` 1 '` / r */ u := 1.0 - r - s - t S[0] = u * (2.0*u - 1.0) S[1] = r * (2.0*r - 1.0) S[2] = s * (2.0*s - 1.0) S[3] = t * (2.0*t - 1.0) S[4] = 4.0 * u * r S[5] = 4.0 * r * s S[6] = 4.0 * s * u S[7] = 4.0 * u * t S[8] = 4.0 * r * t S[9] = 4.0 * s * t if !derivs { return } dSdR[0][0] = 4.0*(r+s+t) - 3.0 dSdR[1][0] = 4.0*r - 1.0 dSdR[2][0] = 0.0 dSdR[3][0] = 0.0 dSdR[4][0] = 4.0 - 8.0*r - 4.0*s - 4.0*t dSdR[5][0] = 4.0 * s dSdR[6][0] = -4.0 * s dSdR[7][0] = -4.0 * t dSdR[8][0] = 4.0 * t dSdR[9][0] = 0.0 dSdR[0][1] = 4.0*(r+s+t) - 3.0 dSdR[1][1] = 0.0 dSdR[2][1] = 4.0*s - 1.0 dSdR[3][1] = 0.0 dSdR[4][1] = -4.0 * r dSdR[5][1] = 4.0 * r dSdR[6][1] = 4.0 - 4.0*r - 8.0*s - 4.0*t dSdR[7][1] = -4.0 * t dSdR[8][1] = 0.0 dSdR[9][1] = 4.0 * t dSdR[0][2] = 4.0*(r+s+t) - 3.0 dSdR[1][2] = 0.0 dSdR[2][2] = 0.0 dSdR[3][2] = 4.0*t - 1.0 dSdR[4][2] = -4.0 * r dSdR[5][2] = 0.0 dSdR[6][2] = -4.0 * s dSdR[7][2] = 4.0 - 4.0*r - 4.0*s - 8.0*t dSdR[8][2] = 4.0 * r dSdR[9][2] = 4.0 * s }
shp/tets.go
0.568056
0.461441
tets.go
starcoder
package topojson import geojson "github.com/paulmach/go.geojson" func (t *Topology) ToGeoJSON() *geojson.FeatureCollection { fc := geojson.NewFeatureCollection() for _, obj := range t.Objects { switch obj.Type { case geojson.GeometryCollection: for _, geometry := range obj.Geometries { feat := geojson.NewFeature(t.toGeometry(geometry)) feat.ID = geometry.ID feat.Properties = geometry.Properties fc.AddFeature(feat) } default: feat := geojson.NewFeature(t.toGeometry(obj)) feat.ID = obj.ID feat.Properties = obj.Properties fc.AddFeature(feat) } } return fc } func (t *Topology) toGeometry(g *Geometry) *geojson.Geometry { switch g.Type { case geojson.GeometryPoint: return geojson.NewPointGeometry(t.packPoint(g.Point)) case geojson.GeometryMultiPoint: return geojson.NewMultiPointGeometry(t.packPoints(g.MultiPoint)...) case geojson.GeometryLineString: return geojson.NewLineStringGeometry(t.packLinestring(g.LineString)) case geojson.GeometryMultiLineString: return geojson.NewMultiLineStringGeometry(t.packMultiLinestring(g.MultiLineString)...) case geojson.GeometryPolygon: return geojson.NewPolygonGeometry(t.packMultiLinestring(g.Polygon)) case geojson.GeometryMultiPolygon: polygons := make([][][][]float64, len(g.MultiPolygon)) for i, poly := range g.MultiPolygon { polygons[i] = t.packMultiLinestring(poly) } return geojson.NewMultiPolygonGeometry(polygons...) case geojson.GeometryCollection: geometries := make([]*geojson.Geometry, len(g.Geometries)) for i, geometry := range g.Geometries { geometries[i] = t.toGeometry(geometry) } return geojson.NewCollectionGeometry(geometries...) } return nil } func (t *Topology) packPoint(in []float64) []float64 { if t.Transform == nil { return in } out := make([]float64, len(in)) for i, v := range in { out[i] = v if i < 2 { out[i] = v*t.Transform.Scale[i] + t.Transform.Translate[i] } } return out } func (t *Topology) packPoints(in [][]float64) [][]float64 { out := make([][]float64, len(in)) for i, p := range in { out[i] = t.packPoint(p) } return out } func (t *Topology) packLinestring(ls []int) [][]float64 { result := make([][]float64, 0) for _, a := range ls { reverse := false if a < 0 { a = ^a reverse = true } arc := t.Arcs[a] // Copy arc newArc := make([][]float64, len(arc)) for i, point := range arc { newArc[i] = append([]float64{}, point...) } if t.Transform != nil { x := float64(0) y := float64(0) for k, p := range newArc { x += p[0] y += p[1] newArc[k][0] = x*t.Transform.Scale[0] + t.Transform.Translate[0] newArc[k][1] = y*t.Transform.Scale[1] + t.Transform.Translate[1] } } if reverse { for j := len(newArc) - 1; j >= 0; j-- { result = append(result, newArc[j]) } } else { result = append(result, newArc...) } } return result } func (t *Topology) packMultiLinestring(ls [][]int) [][][]float64 { result := make([][][]float64, len(ls)) for i, l := range ls { result[i] = t.packLinestring(l) } return result }
vendor/github.com/rubenv/topojson/geojson.go
0.648021
0.470615
geojson.go
starcoder
package main import ( "bytes" "encoding/binary" "fmt" "log" "unsafe" "github.com/lukeroth/gdal" ) type NdvSlab struct { RangeByBand [][2]float64 } func (self *NdvSlab) Empty() bool { return len(self.RangeByBand) == 0 } func contains_templated(interval [2]float64, p interface{}) bool { switch p.(type) { case byte: return float64(p.(byte)) >= interval[0] && float64(p.(byte)) <= interval[1] case uint16: return float64(p.(uint16)) >= interval[0] && float64(p.(uint16)) <= interval[1] case int16: return float64(p.(int16)) >= interval[0] && float64(p.(int16)) <= interval[1] case int32: return float64(p.(int32)) >= interval[0] && float64(p.(int32)) <= interval[1] case uint32: return float64(p.(uint32)) >= interval[0] && float64(p.(uint32)) <= interval[1] case float32: return float64(p.(float32)) >= interval[0] && float64(p.(float32)) <= interval[1] case float64: return p.(float64) >= interval[0] && p.(float64) <= interval[1] case complex64: return float64(real(p.(complex64))) >= interval[0] && float64(real(p.(complex64))) <= interval[1] case complex128: return real(p.(complex128)) >= interval[0] && real(p.(complex128)) <= interval[1] default: log.Fatal("interval doesnot match any type") return false } } type NdvDef struct { Slabs []NdvSlab Invert bool } func (self *NdvDef) PrintUsage() { print( "No-data values:\n", " Ndv [[val val]] Set a no-data value\n", " Ndv '[[val1 val1] [val2 val2] [val3 val3] ...' Set a no-data value using all input bands\n", " Ndv '[[valMin1 valMax1] [valMin2 valMax2] [valMin3 valMax3] ...' Set a range of no-data values\n", " Ndv (-Inf and Inf are allowed; [[math.Inf(-1) math.Inf(1)]])\n", " ValidRange '[[valMin1 valMax1] [valMin2 valMax2] [valMin3 valMax3] ...' Set a range of valid data values\n", ) } func (self *NdvDef) DebugPrint() { print("=== NDV\n") for i, iv := range self.Slabs { for j, jv := range iv.RangeByBand { fmt.Printf("range %d,%d = [%e,%e]\n", i, j, jv[0], jv[1]) } } print("=== end NDV\n") } func (self *NdvDef) Empty() bool { return len(self.Slabs) == 0 } func (self *NdvDef) IsInert() bool { return self.Invert } func (self *NdvDef) GetNdvMaskA(band float64, dt gdal.DataType, mask_out []uint8, num_pixels int) { var ( bands []float64 dt_list []gdal.DataType ) bands = append(bands, band) dt_list = append(dt_list, dt) self.GetNdvMaskB(bands, dt_list, mask_out, num_pixels) } func (self *NdvDef) GetNdvMaskB(bands []float64, dt_list []gdal.DataType, mask_out []uint8, num_pixels int) { var ( in_p [][]uint8 dt_sizes []int ) for i, v := range bands { in_p = append(in_p, convertToBytes(v)) dt_sizes = append(dt_sizes, (dt_list[i] / 8).Size()) } // MemSet(unsafe.Pointer(&mask_out), 0, uintptr(num_pixels)) for pix_idx := 0; pix_idx < num_pixels; pix_idx++ { mask_out[pix_idx] = 0 for j := range bands { if gdal_scalar_pointer_isnan(in_p[j][0], dt_list[j]) { mask_out[pix_idx] = 1 } } for _, v := range self.Slabs { var all_match uint8 = 1 for j := 0; j < len(bands); j++ { var k int if len(v.RangeByBand) == 1 { k = 0 } else { k = j } if !contains_templated(v.RangeByBand[k], dt_list[j]) { all_match = 0 } } mask_out[pix_idx] |= all_match } if self.Invert { if mask_out[pix_idx] == 0 { mask_out[pix_idx] = 1 } else { mask_out[pix_idx] = 0 } } for band_idx := 0; band_idx < len(bands); band_idx++ { in_p[band_idx] = in_p[band_idx][dt_sizes[band_idx]:] } } } func (self *NdvDef) GetNdvMaskC(bands [][]float64, mask_out []uint8, num_pixels int) { dt := gdal.Float64 var ( band_p []float64 dt_list []gdal.DataType ) for _, v := range bands { band_p = append(band_p, v[0]) dt_list = append(dt_list, dt) } self.GetNdvMaskB(band_p, dt_list, mask_out, num_pixels) } func (self *NdvDef) GetNdvMaskD(bands [][]float64, dt_list []gdal.DataType, mask_out []uint8, num_pixels int) { var band_p []float64 for _, v := range bands { band_p = append(band_p, v[0]) } self.GetNdvMaskB(band_p, dt_list, mask_out, num_pixels) } func gdal_scalar_pointer_isnan(p interface{}, dt gdal.DataType) bool { var ok bool switch dt { case gdal.Byte: _, ok = p.(uint8) case gdal.UInt16: _, ok = p.(uint16) case gdal.Int16: _, ok = p.(int16) case gdal.UInt32: _, ok = p.(uint32) case gdal.Int32: _, ok = p.(int32) case gdal.Float32: _, ok = p.(float32) case gdal.Float64: _, ok = p.(float64) // case gdal.CInt16: // case gdal.CInt32: case gdal.CFloat32: _, ok = p.(complex64) case gdal.CFloat64: _, ok = p.(complex128) default: log.Fatal("unrecognized datatype") } return ok } func convertToBytes(n interface{}) []uint8 { var buf bytes.Buffer err := binary.Write(&buf, binary.BigEndian, n) if err != nil { log.Panic(err) } return buf.Bytes() } func MemSet(s unsafe.Pointer, c byte, n uintptr) { var ptr uintptr ptr = uintptr(s) var i uintptr for i = 0; i < n; i++ { pByte := (*byte)(unsafe.Pointer(ptr + i)) *pByte = c } }
ndv.go
0.58059
0.536252
ndv.go
starcoder
package iso20022 // Chain of parties involved in the settlement of a transaction, including receipts and deliveries, book transfers, treasury deals, or other activities, resulting in the movement of a security or amount of money from one account to another. type DeliveringPartiesAndAccount13 struct { // Party that sells goods or services, or a financial instrument. DelivererDetails *InvestmentAccount55 `xml:"DlvrrDtls,omitempty"` // Party that acts on behalf of the seller of securities when the seller does not have a direct relationship with the delivering agent. DeliverersCustodianDetails *PartyIdentificationAndAccount124 `xml:"DlvrrsCtdnDtls,omitempty"` // Party that the deliverer's custodian uses to effect the delivery of a security, when the deliverer's custodian does not have a direct relationship with the delivering agent. DeliverersIntermediary1Details *PartyIdentificationAndAccount124 `xml:"DlvrrsIntrmy1Dtls,omitempty"` // Party that interacts with the deliverer's intermediary. DeliverersIntermediary2Details *PartyIdentificationAndAccount124 `xml:"DlvrrsIntrmy2Dtls,omitempty"` // Party that delivers securities to the receiving agent at the place of settlement, for example, central securities depository. DeliveringAgentDetails *PartyIdentificationAndAccount124 `xml:"DlvrgAgtDtls"` // Identifies the securities settlement system to be used. SecuritiesSettlementSystem *Max35Text `xml:"SctiesSttlmSys,omitempty"` // Place where settlement of the securities takes place. PlaceOfSettlementDetails *PartyIdentification97 `xml:"PlcOfSttlmDtls,omitempty"` } func (d *DeliveringPartiesAndAccount13) AddDelivererDetails() *InvestmentAccount55 { d.DelivererDetails = new(InvestmentAccount55) return d.DelivererDetails } func (d *DeliveringPartiesAndAccount13) AddDeliverersCustodianDetails() *PartyIdentificationAndAccount124 { d.DeliverersCustodianDetails = new(PartyIdentificationAndAccount124) return d.DeliverersCustodianDetails } func (d *DeliveringPartiesAndAccount13) AddDeliverersIntermediary1Details() *PartyIdentificationAndAccount124 { d.DeliverersIntermediary1Details = new(PartyIdentificationAndAccount124) return d.DeliverersIntermediary1Details } func (d *DeliveringPartiesAndAccount13) AddDeliverersIntermediary2Details() *PartyIdentificationAndAccount124 { d.DeliverersIntermediary2Details = new(PartyIdentificationAndAccount124) return d.DeliverersIntermediary2Details } func (d *DeliveringPartiesAndAccount13) AddDeliveringAgentDetails() *PartyIdentificationAndAccount124 { d.DeliveringAgentDetails = new(PartyIdentificationAndAccount124) return d.DeliveringAgentDetails } func (d *DeliveringPartiesAndAccount13) SetSecuritiesSettlementSystem(value string) { d.SecuritiesSettlementSystem = (*Max35Text)(&value) } func (d *DeliveringPartiesAndAccount13) AddPlaceOfSettlementDetails() *PartyIdentification97 { d.PlaceOfSettlementDetails = new(PartyIdentification97) return d.PlaceOfSettlementDetails }
DeliveringPartiesAndAccount13.go
0.650134
0.431884
DeliveringPartiesAndAccount13.go
starcoder
Package srp Secure Remote Password protocol The principle interface provided by this package is the SRP type. The end aim of the caller is to to have an SRP server and SRP client arrive at the same Key. See the documentation for the SRP structure and its methods for the nitty gritty of use. BUG(jpg): This does not use the same padding and hashing scheme as in RFC 5054, and therefore is not interoperable with those clients and servers. Perhaps someday we'll add an RFC 5054 mode that does that, but today is not that day. The SRP protocol It would be nice if this package could be used without having some understanding of the SRP protocol, but too much of the language and naming is depends on at least some familiarity. Here is a summary. The Secure Remote Password protocol involves a server and a client proving to each other that they know (or can derive) their long term secrets. The client long term secret is known as "x" and the corresponding server secret, the verifier, is known as "v". The verifier is mathematically related to x and is computed by the client on first enrollment and transmistted to the server. Typically the server will store the verifier and the client will derive x from a user secret such as a password. Because the verifier can used like a password hash with respect to cracking, the derivation of x should be designed to resist password cracking if the verifier compromised. The client and the server must both use the same Diffie-Hellman group to perform their computations. The server and the client each send an ephemeral public key to each other (The client sends A; the server sends B) With their private knowledge of their own ephemeral secrets (a or b) and their private knowledge of x (for the client) and v (for the server) along with public knowledge they are able to prove to each other that they know their respective secrets and can generate a session key, K, which may be used for further encryption during the session. Quoting from http://srp.stanford.edu/design.html (with some modification for KDF) Names and notation N A large safe prime (N = 2q+1, where q is prime) All arithmetic is done modulo N. g A generator modulo N k Multiplier parameter (k = H(N, g) in SRP-6a; k = 3 for legacy SRP-6; k is a hash of the session ID within 1Password H() One-way hash function ^ (Modular) Exponentiation u Random scrambling parameter a,b Secret ephemeral values A,B Public ephemeral values x Long term client secret (derived via KDF) v Long term server Verifier (derived from x) s Salt for key derivation function I User identifiers (username, account ID, etc) KDF() Key Derivation Function The authentication protocol itself goes as follows User -> Host: I, A = g^a (identifies self, a = random number) Host -> User: s, B = kv + g^b (sends salt, b = random number) Both: u = H(A, B) User: x = KDF(s, ...) (user derives x) User: S = (B - kg^x) ^ (a + ux) (computes raw session key) User: K = H(S) (computes session key) Host: S = (Av^u) ^ b (computes raw session key) Host: K = H(S) (computes session key) Now the two parties have a shared, strong session key K. To complete authentication, they need to prove to each other that their keys match. This package does not address the actual communication between client and server. But through the SRP type it not only performs the calculations needed, it also performs safety and sanity checks on its input, and it hides everything from the caller except what the caller absolutely needs to provide. The key derivation function, KDF() x is computed by client via KDF, user secrets, and random salt, s. x = KDF(...) v = g^x v is sent to the server on first enrollment. v should be transmitted over a secure channel. The server then stores {I, s, v} long term. v needs to be protected in the same way that a password hash should be protected. User's security responsibilities The consumer is responsible for 1. Both client and server: Checking whether methods have returned without error. This is particularly true of SRP.Key() and SetOthersPublic() 2. Client: Using an appropriate key derivation function for deriving x from the user's password (and nudging user toward a good password) 3. Server: Storing the v (send by the client on first enrollment) securely. A captured v can be used to masquerade as the server and be used like a password hash in a password cracking attempt 4. Both: Proving to each other that both have the same key. The package includes methods that can assist with that. */ package srp // This file is just for the package documentation
doc.go
0.639286
0.831656
doc.go
starcoder
package webrtc import ( "fmt" "github.com/pion/ice" ) func supportedNetworkTypes() []NetworkType { return []NetworkType{ NetworkTypeUDP4, NetworkTypeUDP6, // NetworkTypeTCP4, // Not supported yet // NetworkTypeTCP6, // Not supported yet } } // NetworkType represents the type of network type NetworkType int const ( // NetworkTypeUDP4 indicates UDP over IPv4. NetworkTypeUDP4 NetworkType = iota + 1 // NetworkTypeUDP6 indicates UDP over IPv6. NetworkTypeUDP6 // NetworkTypeTCP4 indicates TCP over IPv4. NetworkTypeTCP4 // NetworkTypeTCP6 indicates TCP over IPv6. NetworkTypeTCP6 ) // This is done this way because of a linter. const ( networkTypeUDP4Str = "udp4" networkTypeUDP6Str = "udp6" networkTypeTCP4Str = "tcp4" networkTypeTCP6Str = "tcp6" ) func (t NetworkType) String() string { switch t { case NetworkTypeUDP4: return networkTypeUDP4Str case NetworkTypeUDP6: return networkTypeUDP6Str case NetworkTypeTCP4: return networkTypeTCP4Str case NetworkTypeTCP6: return networkTypeTCP6Str default: return ErrUnknownType.Error() } } // Protocol returns udp or tcp func (t NetworkType) Protocol() string { switch t { case NetworkTypeUDP4: return "udp" case NetworkTypeUDP6: return "udp" case NetworkTypeTCP4: return "tcp" case NetworkTypeTCP6: return "tcp" default: return ErrUnknownType.Error() } } // NewNetworkType allows create network type from string // It will be useful for getting custom network types from external config. func NewNetworkType(raw string) (NetworkType, error) { switch raw { case networkTypeUDP4Str: return NetworkTypeUDP4, nil case networkTypeUDP6Str: return NetworkTypeUDP6, nil case networkTypeTCP4Str: return NetworkTypeTCP4, nil case networkTypeTCP6Str: return NetworkTypeTCP6, nil default: return NetworkType(Unknown), fmt.Errorf("unknown network type: %s", raw) } } func getNetworkType(iceNetworkType ice.NetworkType) (NetworkType, error) { switch iceNetworkType { case ice.NetworkTypeUDP4: return NetworkTypeUDP4, nil case ice.NetworkTypeUDP6: return NetworkTypeUDP6, nil case ice.NetworkTypeTCP4: return NetworkTypeTCP4, nil case ice.NetworkTypeTCP6: return NetworkTypeTCP6, nil default: return NetworkType(Unknown), fmt.Errorf("unknown network type: %s", iceNetworkType.String()) } }
networktype.go
0.620277
0.432723
networktype.go
starcoder
package datasetAPI import ( "encoding/json" "time" "github.com/globalsign/mgo/bson" "github.com/ONSdigital/dp-api-tests/testDataSetup/mongo" datasetAPI "github.com/ONSdigital/dp-dataset-api/models" ) var alert = mongo.Alert{ Date: "2017-12-10", Description: "A correction to an observation for males of age 25, previously 11 now changed to 12", Type: "Correction", } var contact = mongo.ContactDetails{ Email: "<EMAIL>", Name: "<NAME>", Telephone: "+44 (0)1633 123456", } var latestChanges = mongo.LatestChange{ Description: "The border of Southampton changed after the south east cliff face fell into the sea.", Name: "Changes in Classification", Type: "Summary of Changes", } var methodology = mongo.GeneralDetails{ Description: "Consumer price inflation is the rate at which the prices of the goods and services bought by households rise or fall, and is estimated by using consumer price indices.", HRef: "https://www.ons.gov.uk/economy/inflationandpriceindices/qmis/consumerpriceinflationqmi", Title: "Consumer Price Inflation (includes all 3 indices – CPIH, CPI and RPI)", } var publication = mongo.GeneralDetails{ Description: "Price indices, percentage changes and weights for the different measures of consumer price inflation.", HRef: "https://www.ons.gov.uk/economy/inflationandpriceindices/bulletins/consumerpriceinflation/aug2017", Title: "UK consumer price inflation: August 2017", } var relatedDatasets = mongo.GeneralDetails{ HRef: "https://www.ons.gov.uk/economy/inflationandpriceindices/datasets/consumerpriceindices", Title: "Consumer Price Inflation time series dataset", } var dimension = mongo.CodeList{ Description: "A list of ages between 18 and 75+", HRef: "http://localhost:8080/codelists/408064B3-A808-449B-9041-EA3A2F72CFAC", ID: "408064B3-A808-449B-9041-EA3A2F72CFAC", Name: "age", } var dimensionTwo = mongo.CodeList{ Description: "An aggregate of the data", HRef: "http://localhost:8080/codelists/508064B3-A808-449B-9041-EA3A2F72CFAD", ID: "508064B3-A808-449B-9041-EA3A2F72CFAD", Name: "aggregate", } var dimensionThree = mongo.CodeList{ Description: "The time in which this dataset spans", HRef: "http://localhost:8080/codelists/608064B3-A808-449B-9041-EA3A2F72CFAE", ID: "508064B3-A808-449B-9041-EA3A2F72CFAD", Name: "time", } var dimensionFour = mongo.CodeList{ Description: "The locations in which this dataset spans", HRef: "http://localhost:8080/codelists/708064B3-A808-449B-9041-EA3A2F72CFAF", ID: "708064B3-A808-449B-9041-EA3A2F72CFAF", Name: "geography", } var temporal = mongo.TemporalFrequency{ EndDate: "2017-09-09", Frequency: "monthly", StartDate: "2014-09-09", } // ValidPublishedWithUpdatesDatasetData returns an example of a published dataset func ValidPublishedWithUpdatesDatasetData(datasetID string) bson.M { return bson.M{ "$set": bson.M{ "id": datasetID, "current.contacts": []mongo.ContactDetails{contact}, "current.description": "Comprehensive database of time series covering measures of inflation data including CPIH, CPI and RPI.", "current.id": datasetID, "current.keywords": []string{"cpi", "boy"}, "current.last_updated": "2017-06-06", // TODO this should be an isodate "current.license": "ONS license", "current.links.access_rights.href": "http://ons.gov.uk/accessrights", "current.links.editions.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions", "current.links.latest_version.id": "1", "current.links.latest_version.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/2017/versions/1", "current.links.self.href": cfg.DatasetAPIURL + "/datasets/" + datasetID, "current.methodologies": []mongo.GeneralDetails{methodology}, "current.national_statistic": true, "current.next_release": "2017-10-10", "current.publications": []mongo.GeneralDetails{publication}, "current.publisher.name": "Automation Tester", "current.publisher.type": "publisher", "current.publisher.href": "https://www.ons.gov.uk/economy/inflationandpriceindices/bulletins/consumerpriceinflation/aug2017", "current.qmi.description": "Consumer price inflation is the rate at which the prices of goods and services bought by households rise and fall", "current.qmi.href": "https://www.ons.gov.uk/economy/inflationandpriceindices/qmis/consumerpriceinflationqmi", "current.qmi.title": "Consumer Price Inflation (includes all 3 indices – CPIH, CPI and RPI)", "current.related_datasets": []mongo.GeneralDetails{relatedDatasets}, "current.release_frequency": "Monthly", "current.state": "published", "current.theme": "Goods and services", "current.title": "CPI", "current.unit_of_measure": "Pounds Sterling", "current.uri": "https://www.ons.gov.uk/economy/inflationandpriceindices/datasets/consumerpriceinflation", "next.contacts": []mongo.ContactDetails{contact}, "next.description": "Comprehensive database of time series covering measures of inflation data including CPIH, CPI and RPI.", "next.id": datasetID, "next.keywords": []string{"cpi", "boy"}, "next.last_updated": "2017-10-11", // TODO this should be an isodate "next.license": "ONS license", "next.links.access_rights.href": "http://ons.gov.uk/accessrights", "next.links.editions.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions", "next.links.latest_version.id": "1", "next.links.latest_version.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/2018/versions/1", "next.links.self.href": cfg.DatasetAPIURL + "/datasets/" + datasetID, "next.methodologies": []mongo.GeneralDetails{methodology}, "next.national_statistic": true, "next.next_release": "2018-10-10", "next.publications": []mongo.GeneralDetails{publication}, "next.publisher.name": "Automation Tester", "next.publisher.type": "publisher", "next.publisher.href": "https://www.ons.gov.uk/economy/inflationandpriceindices/bulletins/consumerpriceinflation/aug2017", "next.qmi.description": "Consumer price inflation is the rate at which the prices of goods and services bought by households rise and fall", "next.qmi.href": "https://www.ons.gov.uk/economy/inflationandpriceindices/qmis/consumerpriceinflationqmi", "next.qmi.title": "Consumer Price Inflation (includes all 3 indices – CPIH, CPI and RPI)", "next.related_datasets": []mongo.GeneralDetails{relatedDatasets}, "next.release_frequency": "Monthly", "next.state": "created", "next.theme": "Goods and services", "next.title": "CPI", "next.unit_of_measure": "Pounds Sterling", "next.uri": "https://www.ons.gov.uk/economy/inflationandpriceindices/datasets/consumerpriceinflation", "test_data": "true", }, } } func validPublishedDatasetData(datasetID string) bson.M { return bson.M{ "$set": bson.M{ "id": datasetID, "current.contacts": []mongo.ContactDetails{contact}, "current.description": "Comprehensive database of time series covering measures of inflation data including CPIH, CPI and RPI.", "current.id": datasetID, "current.keywords": []string{"cpi", "boy"}, "current.last_updated": "2017-06-06", // TODO this should be an isodate "current.license": "ONS license", "current.links.access_rights.href": "http://ons.gov.uk/accessrights", "current.links.editions.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions", "current.links.latest_version.id": "1", "current.links.latest_version.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/2017/versions/1", "current.links.self.href": cfg.DatasetAPIURL + "/datasets/" + datasetID, "current.methodologies": []mongo.GeneralDetails{methodology}, "current.national_statistic": true, "current.next_release": "2017-10-10", "current.publications": []mongo.GeneralDetails{publication}, "current.publisher.name": "Automation Tester", "current.publisher.type": "publisher", "current.publisher.href": "https://www.ons.gov.uk/economy/inflationandpriceindices/bulletins/consumerpriceinflation/aug2017", "current.qmi.description": "Consumer price inflation is the rate at which the prices of goods and services bought by households rise and fall", "current.qmi.href": "https://www.ons.gov.uk/economy/inflationandpriceindices/qmis/consumerpriceinflationqmi", "current.qmi.title": "Consumer Price Inflation (includes all 3 indices – CPIH, CPI and RPI)", "current.related_datasets": []mongo.GeneralDetails{relatedDatasets}, "current.release_frequency": "Monthly", "current.state": "published", "current.theme": "Goods and services", "current.title": "CPI", "current.unit_of_measure": "Pounds Sterling", "current.uri": "https://www.ons.gov.uk/economy/inflationandpriceindices/datasets/consumerpriceinflation", "next.contacts": []mongo.ContactDetails{contact}, "next.description": "Comprehensive database of time series covering measures of inflation data including CPIH, CPI and RPI.", "next.id": datasetID, "next.keywords": []string{"cpi", "boy"}, "next.last_updated": "2017-06-06", // TODO this should be an isodate "next.license": "ONS license", "next.links.access_rights.href": "http://ons.gov.uk/accessrights", "next.links.editions.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions", "next.links.latest_version.id": "1", "next.links.latest_version.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/2018/versions/1", "next.links.self.href": cfg.DatasetAPIURL + "/datasets/" + datasetID, "next.methodologies": []mongo.GeneralDetails{methodology}, "next.national_statistic": true, "next.next_release": "2017-10-10", "next.publications": []mongo.GeneralDetails{publication}, "next.publisher.name": "Automation Tester", "next.publisher.type": "publisher", "next.publisher.href": "https://www.ons.gov.uk/economy/inflationandpriceindices/bulletins/consumerpriceinflation/aug2017", "next.qmi.description": "Consumer price inflation is the rate at which the prices of goods and services bought by households rise and fall", "next.qmi.href": "https://www.ons.gov.uk/economy/inflationandpriceindices/qmis/consumerpriceinflationqmi", "next.qmi.title": "Consumer Price Inflation (includes all 3 indices – CPIH, CPI and RPI)", "next.related_datasets": []mongo.GeneralDetails{relatedDatasets}, "next.release_frequency": "Monthly", "next.state": "published", "next.theme": "Goods and services", "next.title": "CPI", "next.unit_of_measure": "Pounds Sterling", "next.uri": "https://www.ons.gov.uk/economy/inflationandpriceindices/datasets/consumerpriceinflation", "test_data": "true", }, } } func validAssociatedDatasetData(datasetID string) bson.M { return bson.M{ "$set": bson.M{ "id": datasetID, "next.collection_id": "208064B3-A808-449B-9041-EA3A2F72CFAB", "next.contacts": []mongo.ContactDetails{contact}, "next.description": "Comprehensive database of time series covering measures of inflation data including CPIH, CPI and RPI.", "next.id": datasetID, "next.keywords": []string{"cpi", "boy"}, "next.last_updated": "2017-10-11", // TODO this should be an isodate "next.links.access_rights.href": "http://ons.gov.uk/accessrights", "next.links.editions.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions", "next.links.latest_version.id": "1", "next.links.latest_version.href": cfg.DatasetAPIURL + "/datasets" + datasetID + "/editions/2018/versions/1", "next.links.self.href": cfg.DatasetAPIURL + "/datasets/" + datasetID, "next.methodologies": []mongo.GeneralDetails{methodology}, "next.national_statistic": true, "next.next_release": "2018-10-10", "next.publications": []mongo.GeneralDetails{publication}, "next.publisher.name": "Automation Tester", "next.publisher.type": "publisher", "next.publisher.href": "https://www.ons.gov.uk/economy/inflationandpriceindices/bulletins/consumerpriceinflation/aug2017", "next.qmi.description": "Consumer price inflation is the rate at which the prices of goods and services bought by households rise and fall", "next.qmi.href": "https://www.ons.gov.uk/economy/inflationandpriceindices/qmis/consumerpriceinflationqmi", "next.qmi.title": "Consumer Price Inflation (includes all 3 indices – CPIH, CPI and RPI)", "next.related_datasets": []mongo.GeneralDetails{relatedDatasets}, "next.release_frequency": "Monthly", "next.state": "associated", "next.theme": "Goods and services", "next.title": "CPI", "next.unit_of_measure": "Pounds Sterling", "next.uri": "https://www.ons.gov.uk/economy/inflationandpriceindices/datasets/consumerpriceinflation", "test_data": "true", }, } } func validCreatedDatasetData(datasetID string) bson.M { return bson.M{ "$set": bson.M{ "id": datasetID, "next.contacts": []mongo.ContactDetails{contact}, "next.description": "Comprehensive database of time series covering measures of inflation data including CPIH, CPI and RPI.", "next.id": datasetID, "next.keywords": []string{"cpi", "boy"}, "next.last_updated": "2017-10-11", // TODO this should be an isodate "next.links.access_rights.href": "http://ons.gov.uk/accessrights", "next.links.editions.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions", "next.links.latest_version.id": "1", "next.links.latest_version.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/2018/versions/1", "next.links.self.href": cfg.DatasetAPIURL + "/datasets/" + datasetID, "next.methodologies": []mongo.GeneralDetails{methodology}, "next.national_statistic": true, "next.next_release": "2018-10-10", "next.publications": []mongo.GeneralDetails{publication}, "next.publisher.name": "Automation Tester", "next.publisher.type": "publisher", "next.publisher.href": "https://www.ons.gov.uk/economy/inflationandpriceindices/bulletins/consumerpriceinflation/aug2017", "next.qmi.description": "Consumer price inflation is the rate at which the prices of goods and services bought by households rise and fall", "next.qmi.href": "https://www.ons.gov.uk/economy/inflationandpriceindices/qmis/consumerpriceinflationqmi", "next.qmi.title": "Consumer Price Inflation (includes all 3 indices – CPIH, CPI and RPI)", "next.related_datasets": []mongo.GeneralDetails{relatedDatasets}, "next.release_frequency": "Monthly", "next.state": "created", "next.theme": "Goods and services", "next.title": "CPI", "next.unit_of_measure": "Pounds Sterling", "next.uri": "https://www.ons.gov.uk/economy/inflationandpriceindices/datasets/consumerpriceinflation", "test_data": "true", }, } } func validTimeDimensionsData(dimensionID, instanceID string) bson.M { return bson.M{ "$set": bson.M{ "_id": dimensionID, "instance_id": instanceID, "name": "time", "option": "202.45", "links.code_list.id": "64d384f1-ea3b-445c-8fb8-aa453f96e58a", "links.code_list.href": cfg.CodeListAPIURL + "/code-lists/64d384f1-ea3b-445c-8fb8-aa453f96e58a", "links.code.id": "202.45", "links.code.href": cfg.CodeListAPIURL + "/code-lists/64d384f1-ea3b-445c-8fb8-aa453f96e58a/codes/202.45", "node_id": "", "last_updated": "2017-09-09", // TODO Should be isodate "test_data": "true", }, } } func validTimeDimensionsDataWithOutOptions(dimensionID, instanceID string) bson.M { return bson.M{ "$set": bson.M{ "_id": dimensionID, "instance_id": instanceID, "name": "time", "links.code_list.id": "64d384f1-ea3b-445c-8fb8-aa453f96e58a", "links.code_list.href": cfg.CodeListAPIURL + "/code-lists/64d384f1-ea3b-445c-8fb8-aa453f96e58a", "links.code.id": "202.45", "links.code.href": cfg.CodeListAPIURL + "/code-lists/64d384f1-ea3b-445c-8fb8-aa453f96e58a/codes/202.45", "node_id": "", "last_updated": "2017-09-09", // TODO Should be isodate "test_data": "true", }, } } func validAggregateDimensionsData(dimensionID, instanceID string) bson.M { return bson.M{ "$set": bson.M{ "_id": dimensionID, "instance_id": instanceID, "name": "aggregate", "option": "cpi1dimA19", "label": "CPI (Overall Index)", "links.code_list.id": "64d384f1-ea3b-445c-8fb8-aa453f96e58a", "links.code_list.href": cfg.CodeListAPIURL + "/code-lists/64d384f1-ea3b-445c-8fb8-aa453f96e58a", "links.code.id": "cpi1dimA19", "links.code.href": cfg.CodeListAPIURL + "/code-lists/64d384f1-ea3b-445c-8fb8-aa453f96e58a/codes/cpi1dimA19", "last_updated": "2017-09-08", // TODO Should be isodate "test_data": "true", }, } } // ValidPublishedEditionData returns an example bson object for a published edition resource func ValidPublishedEditionData(datasetID, editionID, edition string) bson.M { return bson.M{ "$set": bson.M{ "id": editionID, "current.edition": edition, "current.last_updated": "2017-09-08", // TODO Should be isodate "current.links.dataset.id": datasetID, "current.links.dataset.href": cfg.DatasetAPIURL + "/datasets/" + datasetID, "current.links.latest_version.id": "1", "current.links.latest_version.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition + "/versions/1", "current.links.self.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition, "current.links.versions.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition + "/versions", "current.state": "published", "next.edition": edition, "next.last_updated": "2017-09-08", // TODO Should be isodate "next.links.dataset.id": datasetID, "next.links.dataset.href": cfg.DatasetAPIURL + "/datasets/" + datasetID, "next.links.latest_version.id": "1", "next.links.latest_version.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition + "/versions/1", "next.links.self.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition, "next.links.versions.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition + "/versions", "next.state": "edition-confirmed", "test_data": "true", }, } } func ValidUnpublishedEditionData(datasetID, editionID, edition string) bson.M { return bson.M{ "$set": bson.M{ "next.edition": edition, "id": editionID, "next.last_updated": "2017-10-08", // TODO Should be isodate "next.links.dataset.id": datasetID, "next.links.dataset.href": cfg.DatasetAPIURL + "/datasets/" + datasetID, "next.links.self.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition, "next.links.versions.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition + "/versions", "next.links.latest_version.id": "2", "next.links.latest_version.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition + "/versions/2", "next.state": "edition-confirmed", "test_data": "true", }, } } func validPublishedInstanceData(datasetID, edition, instanceID string, uniqueTimestamp bson.MongoTimestamp) bson.M { return bson.M{ "$set": bson.M{ "alerts": []mongo.Alert{alert}, "dimensions": []mongo.CodeList{dimensionTwo, dimensionThree, dimensionFour}, "downloads.csv.href": cfg.DatasetAPIURL + "/aws/census-2017-1-csv", "downloads.csv.size": "10", "downloads.csv.public": "https://s3-eu-west-1.amazon.com/public/myfile.csv", "downloads.csv.private": "s3://private/myfile.csv", "downloads.csvw.href": cfg.DatasetAPIURL + "/aws/census-2017-1-csv-metadata.json", "downloads.csvw.size": "10", "downloads.csvw.public": "https://s3-eu-west-1.amazon.com/public/myfile.csv-metadata.json", "downloads.csvw.private": "s3://private/myfile.csv-metadata.json", "downloads.xls.href": cfg.DatasetAPIURL + "/aws/census-2017-1-xls", "downloads.xls.size": "24", "downloads.xls.public": "https://s3-eu-west-1.amazon.com/public/myfile.xls", "downloads.xls.private": "s3://private/myfile.xls", "edition": edition, "headers": []string{"v4_0", "time", "time", "uk-only", "geography", "cpi1dim1aggid", "aggregate"}, "id": instanceID, "import_tasks.import_observations.state": "completed", "import_tasks.import_observations.total_inserted_observations": 1000, "latest_changes": []mongo.LatestChange{latestChanges}, "last_updated": "2017-09-08", // TODO Should be isodate "license": "ONS License", "links.job.id": "042e216a-7822-4fa0-a3d6-e3f5248ffc35", "links.job.href": cfg.DatasetAPIURL + "/jobs/042e216a-7822-4fa0-a3d6-e3f5248ffc35", "links.dataset.id": datasetID, "links.dataset.href": cfg.DatasetAPIURL + "/datasets/" + datasetID, "links.dimensions.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition + "/versions/1/dimensions", "links.edition.id": edition, "links.edition.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition, "links.self.href": cfg.DatasetAPIURL + "/instances/" + instanceID, "links.spatial.href": "http://ons.gov.uk/geographylist", "links.version.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition + "/versions/1", "links.version.id": "1", "release_date": "2017-12-12", // TODO Should be isodate "state": "published", "temporal": []mongo.TemporalFrequency{temporal}, "total_inserted_observations": 1000, "total_observations": 1000, "unique_timestamp": uniqueTimestamp, "version": 1, "test_data": "true", }, } } func validAssociatedInstanceData(datasetID, edition, instanceID string, uniqueTimestamp bson.MongoTimestamp) bson.M { return bson.M{ "$set": bson.M{ "collection_id": "208064B3-A808-449B-9041-EA3A2F72CFAB", "dimensions": []mongo.CodeList{dimensionTwo, dimensionThree, dimensionFour}, "downloads.csv.href": cfg.DatasetAPIURL + "/aws/census-2017-2-csv", "downloads.csv.size": "10", "downloads.csv.public": "https://s3-eu-west-1.amazon.com/public/myfile.csv", "downloads.csv.private": "s3://private/myfile.csv", "downloads.csvw.href": cfg.DatasetAPIURL + "/aws/census-2017-2-csv-metadata.json", "downloads.csvw.size": "10", "downloads.csvw.public": "https://s3-eu-west-1.amazon.com/public/myfile.csv-metadata.json", "downloads.csvw.private": "s3://private/myfile.csv-metadata.json", "downloads.xls.href": cfg.DatasetAPIURL + "/aws/census-2017-2-xls", "downloads.xls.size": "24", "downloads.xls.public": "https://s3-eu-west-1.amazon.com/public/myfile.xls", "downloads.xls.private": "s3://private/myfile.xls", "edition": edition, "headers": []string{"v4_0", "time", "time", "uk-only", "geography", "cpi1dim1aggid", "aggregate"}, "id": instanceID, "import_tasks.import_observations.total_inserted_observations": 1000, "import_tasks.import_observations.state": "completed", "last_updated": "2017-09-08", // TODO Should be isodate "latest_changes": []mongo.LatestChange{latestChanges}, "license": "ONS license", "links.job.id": "042e216a-7822-4fa0-a3d6-e3f5248ffc35", "links.job.href": cfg.DatasetAPIURL + "/jobs/042e216a-7822-4fa0-a3d6-e3f5248ffc35", "links.dataset.id": datasetID, "links.dataset.href": cfg.DatasetAPIURL + "/datasets/" + datasetID, "links.dimensions.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition + "/versions/2/dimensions", "links.edition.id": edition, "links.edition.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition, "links.self.href": cfg.DatasetAPIURL + "/instances/" + instanceID, "links.spatial.href": "http://ons.gov.uk/geographylist", "links.version.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition + "/versions/2", "links.version.id": "2", "release_date": "2017-12-12", // TODO Should be isodate "state": "associated", "temporal": []mongo.TemporalFrequency{temporal}, "total_observations": 1000, "unique_timestamp": uniqueTimestamp, "version": 2, "test_data": "true", }, } } func validEditionConfirmedInstanceData(datasetID, edition, instanceID string, uniqueTimestamp bson.MongoTimestamp) bson.M { return bson.M{ "$set": bson.M{ "dimensions": []mongo.CodeList{dimension}, "downloads.csv.href": cfg.DatasetAPIURL + "/aws/census-2017-2-csv", "downloads.csv.private": "s3://private/myfile.csv", "downloads.csv.size": "10", "downloads.csvw.href": cfg.DatasetAPIURL + "/aws/census-2017-2-csv-metadata.json", "downloads.csvw.private": "s3://private/myfile.csv-metadata.json", "downloads.csvw.size": "10", "downloads.xls.href": cfg.DatasetAPIURL + "/aws/census-2017-2-xls", "downloads.xls.private": "s3://private/myfile.xls", "downloads.xls.size": "24", "edition": edition, "headers": []string{"time", "geography"}, "id": instanceID, "import_tasks.import_observations.state": "completed", "import_tasks.import_observations.total_inserted_observations": 1000, "last_updated": "2017-09-08", // TODO Should be isodate "license": "ONS license", "links.job.id": "042e216a-7822-4fa0-a3d6-e3f5248ffc35", "links.job.href": cfg.DatasetAPIURL + "/jobs/042e216a-7822-4fa0-a3d6-e3f5248ffc35", "links.dataset.id": datasetID, "links.dataset.href": cfg.DatasetAPIURL + "/datasets/" + datasetID, "links.dimensions.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition + "/versions/2/dimensions", "links.edition.id": edition, "links.edition.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition, "links.self.href": cfg.DatasetAPIURL + "/instances/" + instanceID, "links.spatial.href": "http://ons.gov.uk/geographylist", "links.version.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition + "/versions/2", "links.version.id": "2", "release_date": "2017-12-12", // TODO Should be isodate "state": "edition-confirmed", "temporal": []mongo.TemporalFrequency{temporal}, "total_observations": 1000, "unique_timestamp": uniqueTimestamp, "version": 2, "test_data": "true", }, } } func editionConfirmedInstanceInvalidFields(datasetID, edition, instanceID string, uniqueTimestamp bson.MongoTimestamp) bson.M { return bson.M{ "$set": bson.M{ "dimensions": []mongo.CodeList{dimension}, "downloads.csv.href": cfg.DatasetAPIURL + "/aws/census-2017-2-csv", "downloads.csv.private": "s3://private/myfile.csv", "downloads.csv.size": "ten", "downloads.csvw.href": cfg.DatasetAPIURL + "/aws/census-2017-2-csv-metadata.json", "downloads.csvw.private": "s3://private/myfile.csv-metadata.json", "downloads.csvw.size": "ten", "downloads.xls.href": cfg.DatasetAPIURL + "/aws/census-2017-2-xls", "downloads.xls.private": "s3://private/myfile.xls", "downloads.xls.size": "twenty four", "edition": edition, "headers": []string{"time", "geography"}, "id": instanceID, "import_tasks.import_observations.state": "completed", "import_tasks.import_observations.total_inserted_observations": 1000, "last_updated": "2017-09-08", // TODO Should be isodate "license": "ONS license", "links.job.id": "042e216a-7822-4fa0-a3d6-e3f5248ffc35", "links.job.href": cfg.DatasetAPIURL + "/jobs/042e216a-7822-4fa0-a3d6-e3f5248ffc35", "links.dataset.id": datasetID, "links.dataset.href": cfg.DatasetAPIURL + "/datasets/" + datasetID, "links.dimensions.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition + "/versions/2/dimensions", "links.edition.id": edition, "links.edition.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition, "links.self.href": cfg.DatasetAPIURL + "/instances/" + instanceID, "links.spatial.href": "http://ons.gov.uk/geographylist", "links.version.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition + "/versions/2", "links.version.id": "2", "release_date": "2017-12-12", // TODO Should be isodate "state": "edition-confirmed", "temporal": []mongo.TemporalFrequency{temporal}, "unique_timestamp": uniqueTimestamp, "version": 2, "test_data": "true", "total_observations": 1000, }, } } func editionConfirmedInstanceMissingMandatoryFields(datasetID, edition, instanceID string, uniqueTimestamp bson.MongoTimestamp) bson.M { return bson.M{ "$set": bson.M{ "dimensions": []mongo.CodeList{dimension}, "downloads.csv.url": cfg.DatasetAPIURL + "/aws/census-2017-2-csv", "downloads.csvw.url": cfg.DatasetAPIURL + "/aws/census-2017-2-csv-metadata.json", "downloads.xls.url": cfg.DatasetAPIURL + "/aws/census-2017-2-xls", "edition": edition, "headers": []string{"time", "geography"}, "id": instanceID, "import_tasks.import_observations.state": "completed", "import_tasks.import_observations.total_inserted_observations": 1000, "last_updated": "2017-09-08", // TODO Should be isodate "license": "ONS license", "links.job.id": "042e216a-7822-4fa0-a3d6-e3f5248ffc35", "links.job.href": cfg.DatasetAPIURL + "/jobs/042e216a-7822-4fa0-a3d6-e3f5248ffc35", "links.dataset.id": datasetID, "links.dataset.href": cfg.DatasetAPIURL + "/datasets/" + datasetID, "links.dimensions.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition + "/versions/2/dimensions", "links.edition.id": edition, "links.edition.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition, "links.self.href": cfg.DatasetAPIURL + "/instances/" + instanceID, "links.spatial.href": "http://ons.gov.uk/geographylist", "links.version.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/" + edition + "/versions/2", "links.version.id": "2", "state": "edition-confirmed", "temporal": []mongo.TemporalFrequency{temporal}, "total_observations": 1000, "unique_timestamp": uniqueTimestamp, "version": 2, "test_data": "true", }, } } func validCompletedInstanceData(datasetID, edition, instanceID string, uniqueTimestamp bson.MongoTimestamp) bson.M { return bson.M{ "$set": bson.M{ "collection_id": "208064B3-A808-449B-9041-EA3A2F72CFAB", "downloads.csv.href": cfg.DatasetAPIURL + "/aws/census-2017-2-csv", "downloads.csv.private": "s3://private/myfile.csv", "downloads.csv.size": "10", "downloads.csvw.href": cfg.DatasetAPIURL + "/aws/census-2017-2-csv-metadata.json", "downloads.csvw.private": "s3://private/myfile.csv-metadata.json", "downloads.csvw.size": "10", "downloads.xls.href": cfg.DatasetAPIURL + "/aws/census-2017-2-xls", "downloads.xls.private": "s3://private/myfile.xls", "downloads.xls.size": "24", "edition": edition, "headers": []string{"time", "geography"}, "id": instanceID, "import_tasks.import_observations.state": "completed", "import_tasks.import_observations.total_inserted_observations": 1000, "last_updated": "2017-09-08", // TODO Should be isodate "latest_changes": []mongo.LatestChange{latestChanges}, "license": "ONS license", "links.job.id": "042e216a-7822-4fa0-a3d6-e3f5248ffc35", "links.job.href": cfg.DatasetAPIURL + "/jobs/042e216a-7822-4fa0-a3d6-e3f5248ffc35", "links.dataset.id": datasetID, "links.dataset.href": cfg.DatasetAPIURL + "/datasets/" + datasetID, "links.dimensions.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/2017/versions/2/dimensions", "links.edition.id": edition, "links.edition.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/2017", "links.self.href": cfg.DatasetAPIURL + "/instances/" + instanceID, "release_date": "2017-12-12", // TODO Should be isodate "state": "completed", "total_observations": 1000, "test_data": "true", "unique_timestamp": uniqueTimestamp, }, } } var hierarchy = mongo.BuildHierarchyTask{ State: "created", DimensionName: "geography", CodeListID: "K02000001", } var searchIndex = mongo.BuildSearchIndexTask{ State: "created", DimensionName: "geography", } func validSubmittedInstanceData(datasetID, edition, instanceID, state string, uniqueTimestamp bson.MongoTimestamp) bson.M { return bson.M{ "$set": bson.M{ "collection_id": "208064B3-A808-449B-9041-EA3A2F72CFAB", "dimensions": []mongo.CodeList{dimensionFour}, "edition": edition, "headers": []string{"time", "geography"}, "id": instanceID, "import_tasks.import_observations.state": "created", "import_tasks.import_observations.total_inserted_observations": 1000, "import_tasks.build_hierarchies": []mongo.BuildHierarchyTask{hierarchy}, "import_tasks.build_search_indexes": []mongo.BuildSearchIndexTask{searchIndex}, "last_updated": "2017-09-08", // TODO Should be isodate "latest_changes": []mongo.LatestChange{latestChanges}, "license": "ONS license", "links.job.id": "042e216a-7822-4fa0-a3d6-e3f5248ffc35", "links.job.href": cfg.DatasetAPIURL + "/jobs/042e216a-7822-4fa0-a3d6-e3f5248ffc35", "links.dataset.id": datasetID, "links.dataset.href": cfg.DatasetAPIURL + "/datasets/" + datasetID, "links.dimensions.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/2017/versions/2/dimensions", "links.edition.id": edition, "links.edition.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/2017", "links.self.href": cfg.DatasetAPIURL + "/instances/" + instanceID, "links.version.href": cfg.DatasetAPIURL + "/datasets/" + datasetID + "/editions/2017/versions/2", "links.version.id": "2", "release_date": "2017-12-12", // TODO Should be isodate "state": state, "total_observations": 1000, "test_data": "true", "unique_timestamp": uniqueTimestamp, }, } } func validCreatedInstanceData(datasetID, edition, instanceID, state string, uniqueTimestamp bson.MongoTimestamp) bson.M { return bson.M{ "$set": bson.M{ "edition": edition, "headers": []string{"time", "geography"}, "id": instanceID, "last_updated": "2017-09-08", // TODO Should be isodate "links.job.id": "042e216a-7822-4fa0-a3d6-e3f5248ffc35", "links.job.href": cfg.DatasetAPIURL + "/jobs/042e216a-7822-4fa0-a3d6-e3f5248ffc35", "links.dataset.id": datasetID, "links.dataset.href": cfg.DatasetAPIURL + "/datasets/" + datasetID, "links.self.href": cfg.DatasetAPIURL + "/instances/" + instanceID, "state": state, "total_observations": 1000, "test_data": "true", "unique_timestamp": uniqueTimestamp, }, } } var validPOSTCreateDatasetJSON = ` { "collection_id": "108064B3-A808-449B-9041-EA3A2F72CFAA", "contacts": [ { "email": "<EMAIL>", "name": "<NAME>", "telephone": "+44 (0)1633 123456" } ], "description": "Comprehensive database of time series covering measures of inflation data including CPIH, CPI and RPI.", "keywords": [ "cpi" ], "license": "ONS license", "links": { "access_rights": { "href": "http://ons.gov.uk/accessrights" } }, "methodologies": [ { "description": "Consumer price inflation is the rate at which the prices of the goods and services bought by households rise or fall, and is estimated by using consumer price indices.", "href": "https://www.ons.gov.uk/economy/inflationandpriceindices/qmis/consumerpriceinflationqmi", "title": "Consumer Price Inflation (includes all 3 indices – CPIH, CPI and RPI)" } ], "national_statistic": true, "next_release": "17 October 2017", "publications": [ { "description": "Price indices, percentage changes and weights for the different measures of consumer price inflation.", "href": "https://www.ons.gov.uk/economy/inflationandpriceindices/bulletins/consumerpriceinflation/aug2017", "title": "UK consumer price inflation: August 2017" } ], "publisher": { "name": "<NAME>", "type": "publisher", "href": "https://www.ons.gov.uk/economy/inflationandpriceindices/bulletins/consumerpriceinflation/aug2017" }, "qmi": { "description": "Consumer price inflation is the rate at which the prices of goods and services bought by households rise and fall", "href": "https://www.ons.gov.uk/economy/inflationandpriceindices/qmis/consumerpriceinflationqmi", "title": "Consumer Price Inflation (includes all 3 indices – CPIH, CPI and RPI)" }, "related_datasets": [ { "href": "https://www.ons.gov.uk/economy/inflationandpriceindices/datasets/consumerpriceindices", "title": "Consumer Price Inflation time series dataset" } ], "release_frequency": "Monthly", "state": "created", "theme": "Goods and services", "title": "CPI", "unit_of_measure": "Pounds Sterling", "uri": "https://www.ons.gov.uk/economy/inflationandpriceindices/datasets/consumerpriceinflation" }` var validPUTUpdateDatasetJSON = `{ "collection_id": "308064B3-A808-449B-9041-EA3A2F72CFAC", "contacts": [ { "email": "<EMAIL>", "name": "<NAME>", "telephone": "+44 (0)1833 456123" } ], "description": "Producer Price Indices (PPIs) are a series of economic indicators that measure the price movement of goods bought and sold by UK manufacturers", "keywords": [ "rpi" ], "methodologies": [ { "description": "The Producer Price Index (PPI) is a monthly survey that measures the price changes of goods bought and sold by UK manufacturers", "href": "https://www.ons.gov.uk/economy/inflationandpriceindices/qmis/producerpriceindicesqmi", "title": "Producer price indices QMI" } ], "national_statistic": false, "next_release": "18 September 2017", "publications": [ { "description": "Changes in the prices of goods bought and sold by UK manufacturers including price indices of materials and fuels purchased (input prices) and factory gate prices (output prices)", "href": "https://www.ons.gov.uk/economy/inflationandpriceindices/bulletins/producerpriceinflation/september2017", "title": "Producer price inflation, UK: September 2017" } ], "publisher": { "name": "Test Automation Engineer", "type": "publisher", "href": "https://www.ons.gov.uk/economy/inflationandpriceindices/bulletins/producerpriceinflation/september2017" }, "qmi": { "description": "PPI provides an important measure of inflation", "href": "https://www.ons.gov.uk/economy/inflationandpriceindices/qmis/producerpriceindicesqmi", "title": "The Producer Price Index (PPI) is a monthly survey that measures the price changes" }, "related_datasets": [ { "href": "https://www.ons.gov.uk/economy/inflationandpriceindices/datasets/producerpriceindex", "title": "Producer Price Index time series dataset" } ], "release_frequency": "Quarterly", "state": "associated", "theme": "Price movement of goods", "title": "RPI", "unit_of_measure": "Pounds", "uri": "https://www.ons.gov.uk/economy/inflationandpriceindices/datasets/producerpriceindex" }` var validPUTUpdateDatasetWithoutStateJSON = `{ "collection_id": "308064B3-A808-449B-9041-EA3A2F72CFAC", "contacts": [ { "email": "<EMAIL>", "name": "<NAME>", "telephone": "+44 (0)1833 456123" } ], "description": "Producer Price Indices (PPIs) are a series of economic indicators that measure the price movement of goods bought and sold by UK manufacturers", "keywords": [ "rpi" ], "methodologies": [ { "description": "The Producer Price Index (PPI) is a monthly survey that measures the price changes of goods bought and sold by UK manufacturers", "href": "https://www.ons.gov.uk/economy/inflationandpriceindices/qmis/producerpriceindicesqmi", "title": "Producer price indices QMI" } ], "national_statistic": false, "next_release": "18 September 2017", "publications": [ { "description": "Changes in the prices of goods bought and sold by UK manufacturers including price indices of materials and fuels purchased (input prices) and factory gate prices (output prices)", "href": "https://www.ons.gov.uk/economy/inflationandpriceindices/bulletins/producerpriceinflation/september2017", "title": "Producer price inflation, UK: September 2017" } ], "publisher": { "name": "Test Automation Engineer", "type": "publisher", "href": "https://www.ons.gov.uk/economy/inflationandpriceindices/bulletins/producerpriceinflation/september2017" }, "qmi": { "description": "PPI provides an important measure of inflation", "href": "https://www.ons.gov.uk/economy/inflationandpriceindices/qmis/producerpriceindicesqmi", "title": "The Producer Price Index (PPI) is a monthly survey that measures the price changes" }, "related_datasets": [ { "href": "https://www.ons.gov.uk/economy/inflationandpriceindices/datasets/producerpriceindex", "title": "Producer Price Index time series dataset" } ], "release_frequency": "Quarterly", "theme": "Price movement of goods", "title": "RPI", "unit_of_measure": "Pounds", "uri": "https://www.ons.gov.uk/economy/inflationandpriceindices/datasets/producerpriceindex" }` var validPOSTCreateInstanceJSON = ` { "links": { "job": { "id": "042e216a-7822-4fa0-a3d6-e3f5248ffc35", "href": "http://localhost:21800/jobs/042e216a-7822-4fa0-a3d6-e3f5248ffc35" } } }` var validPOSTCreateFullInstanceJSON = ` { "links": { "job": { "id": "042e216a-7822-4fa0-a3d6-e3f5248ffc35", "href": "http://localhost:21800/jobs/042e216a-7822-4fa0-a3d6-e3f5248ffc35" }, "dataset": { "id": "34B13D18-B4D8-4227-9820-492B2971E221", "href": "http://localhost:21800/datasets/34B13D18-B4D8-4227-9820-492B2971E221" } }, "dimensions": [ { "description": "The age ranging from 16 to 75+", "href": "http://localhost:22400/code-lists/43513D18-B4D8-4227-9820-492B2971E7T5", "id": "43513D18-B4D8-4227-9820-492B2971E7T5", "name": "age" } ] }` var validPUTUpdateInstanceJSON = ` { "state": "edition-confirmed" }` var validPUTCompletedInstanceJSON = ` { "state": "completed" }` var validPUTFullInstanceJSON = ` { "alerts": [ { "date": "2017-04-05", "description": "All data entries (observations) for Plymouth have been updated", "type": "Correction" } ], "dimensions": [ { "description": "The age ranging from 16 to 75+", "href": "http://localhost:22400//code-lists/43513D18-B4D8-4227-9820-492B2971E7T5", "id": "43513D18-B4D8-4227-9820-492B2971E7T5", "name": "age" } ], "latest_changes": [ { "description": "change to the period frequency from quarterly to monthly", "name": "Changes to the period frequency", "type": "Summary of Changes" } ], "links": { "spatial": { "href": "http://ons.gov.uk/geography-list" } }, "release_date": "2017-11-11", "temporal": [ { "start_date": "2014-10-10", "end_date": "2016-10-10", "frequency": "monthly" } ] }` var validPUTEditionConfirmedInstanceJSON = ` { "alerts": [ { "date": "2017-04-05", "description": "All data entries (observations) for Plymouth have been updated", "type": "Correction" } ], "dimensions": [ { "description": "The age ranging from 16 to 75+", "href": "http://localhost:22400//code-lists/43513D18-B4D8-4227-9820-492B2971E7T5", "id": "43513D18-B4D8-4227-9820-492B2971E7T5", "name": "age" } ], "latest_changes": [ { "description": "change to the period frequency from quarterly to monthly", "name": "Changes to the period frequency", "type": "Summary of Changes" } ], "links": { "spatial": { "href": "http://ons.gov.uk/geography-list" } }, "release_date": "2017-11-11", "state": "edition-confirmed", "temporal": [ { "start_date": "2014-10-10", "end_date": "2016-10-10", "frequency": "monthly" } ], "total_inserted_observations": 1000 }` var validPUTUpdateVersionMetaDataJSON = ` { "alerts": [ { "date": "2017-04-05", "description": "All data entries (observations) for Plymouth have been updated", "type": "Correction" } ], "usage_notes": [ { "title": "Coefficients of variation", "note": "CV" }, { "title": "Data Markings", "note":"x - value not reliable" } ], "latest_changes": [ { "description": "change to the period frequency from quarterly to monthly", "name": "Changes to the period frequency", "type": "Summary of Changes" } ], "links": { "spatial": { "href": "http://ons.gov.uk/new-geography-list" }, "self": { "href": "http://bogus/bad-link" } }, "release_date": "2018-11-11", "temporal": [ { "start_date": "2014-11-11", "end_date": "2017-11-11", "frequency": "monthly" } ] }` var validPUTUpdateVersionAlertsJSON = ` { "alerts": [ { "date": "2017-04-05", "description": "All data entries (observations) for Plymouth have been updated", "type": "Correction" } ], }` var validPUTUpdateVersionToAssociatedJSON = ` { "state": "associated", "collection_id": "45454545" }` var validPUTUpdateVersionFromAssociatedToEditionConfirmedJSON = ` { "collection_id": "" }` var validPUTUpdateVersionToPublishedWithCollectionIDJSON = ` { "collection_id": "33333333", "state": "published" }` var validPUTUpdateVersionToPublishedJSON = ` { "state": "published" }` var invalidPOSTCreateInstanceJSON = ` { "links": { "dataset": { "id": "34B13D18-B4D8-4227-9820-492B2971E221", "href": "http://localhost:21800/datasets/34B13D18-B4D8-4227-9820-492B2971E221" } } }` var validPOSTAgeDimensionJSON = ` { "code": "ABC123DEF456", "code_list": "age-list", "dimension": "age", "label": "25", "option": "25" }` var invalidPOSTDimensionJSONMissingDimension = ` { "code": "ABC123DEF456", "code_list": "age-list", "label": "25", "option": "25" }` var invalidPOSTDimensionJSONMissingOptionAndCodelist = ` { "code": "ABC123DEF456", "dimension": "age", "label": "25" }` var validPUTGeographyDimensionJSON = ` { "label": "geo-sites", "description": "The sites in which this dataset spans" }` var validPUTAgeDimensionJSON = ` { "label": "age", "description": "age ranging from 1 to 75" }` var validObservationImportTaskJSON = ` { "import_observations": { "state": "completed" } }` var validHierarchyImportTaskJSON = ` { "build_hierarchies": [ { "state": "completed", "dimension_name": "geography" } ] }` var validSearchIndexImportTaskJSON = ` { "build_search_indexes": [ { "state": "completed", "dimension_name": "geography" } ] }` var validMultipleImportTaskJSON = ` { "import_observations": { "state": "completed" }, "build_hierarchies": [ { "state": "completed", "dimension_name": "geography" } ], "build_search_indexes": [ { "state": "completed", "dimension_name": "geography" } ] }` func createValidPOSTEventJSON(time time.Time) ([]byte, error) { event := &datasetAPI.Event{ Message: "unable to add observation to neo4j", MessageOffset: "5", Time: &time, Type: "error", } return json.Marshal(event) } func createInvalidPOSTEventJSONWithoutMessage(time time.Time) ([]byte, error) { event := &datasetAPI.Event{ MessageOffset: "5", Time: &time, Type: "error", } return json.Marshal(event) } func createInvalidPOSTEventJSONWithoutMessageOffset(time time.Time) ([]byte, error) { event := &datasetAPI.Event{ Message: "unable to add observation to neo4j", Time: &time, Type: "error", } return json.Marshal(event) } func createInvalidPOSTEventJSONWithoutTime() ([]byte, error) { event := &datasetAPI.Event{ Message: "unable to add observation to neo4j", MessageOffset: "5", Type: "error", } return json.Marshal(event) } func createInvalidPOSTEventJSONWithoutType(time time.Time) ([]byte, error) { event := &datasetAPI.Event{ Message: "unable to add observation to neo4j", MessageOffset: "5", Time: &time, } return json.Marshal(event) }
publishing/datasetAPI/json.go
0.515864
0.504272
json.go
starcoder
package untyped import ( "github.com/liquidata-inc/dolt/go/libraries/doltcore/row" "github.com/liquidata-inc/dolt/go/libraries/doltcore/schema" "github.com/liquidata-inc/dolt/go/libraries/doltcore/table/typed" "github.com/liquidata-inc/dolt/go/store/types" ) // NewUntypedSchema takes an array of field names and returns a schema where the fields use the provided names, are of // kind types.StringKind, and are not required. func NewUntypedSchema(colNames ...string) (map[string]uint64, schema.Schema) { return NewUntypedSchemaWithFirstTag(0, colNames...) } func NewUntypedSchemaWithFirstTag(firstTag uint64, colNames ...string) (map[string]uint64, schema.Schema) { cols := make([]schema.Column, len(colNames)) nameToTag := make(map[string]uint64, len(colNames)) for i, name := range colNames { tag := uint64(i) + firstTag // We need at least one primary key col, so choose the first one isPk := i == 0 cols[i] = schema.NewColumn(name, tag, types.StringKind, isPk) nameToTag[name] = tag } colColl, _ := schema.NewColCollection(cols...) sch := schema.SchemaFromCols(colColl) return nameToTag, sch } // NewRowFromStrings is a utility method that takes a schema for an untyped row, and a slice of strings and uses the strings // as the field values for the row by converting them to noms type.String func NewRowFromStrings(nbf *types.NomsBinFormat, sch schema.Schema, valStrs []string) (row.Row, error) { allCols := sch.GetAllCols() taggedVals := make(row.TaggedValues) for i, valStr := range valStrs { tag := uint64(i) _, ok := allCols.GetByTag(tag) if !ok { panic("") } taggedVals[tag] = types.String(valStr) } return row.New(nbf, sch, taggedVals) } // NewRowFromTaggedStrings takes an untyped schema and a map of column tag to string value and returns a row func NewRowFromTaggedStrings(nbf *types.NomsBinFormat, sch schema.Schema, taggedStrs map[uint64]string) (row.Row, error) { taggedVals := make(row.TaggedValues) for tag, valStr := range taggedStrs { taggedVals[tag] = types.String(valStr) } return row.New(nbf, sch, taggedVals) } // UntypeSchema takes a schema and returns a schema with the same columns, but with the types of each of those columns // as types.StringKind func UntypeSchema(sch schema.Schema) (schema.Schema, error) { var cols []schema.Column err := sch.GetAllCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) { col.Kind = types.StringKind cols = append(cols, col) return false, nil }) if err != nil { return nil, err } colColl, err := schema.NewColCollection(cols...) if err != nil { return nil, err } return schema.SchemaFromCols(colColl), nil } // UnkeySchema takes a schema and returns a schema with the same columns and types, but stripped of constraints and // primary keys. Meant for use in result sets. func UnkeySchema(sch schema.Schema) (schema.Schema, error) { var cols []schema.Column err := sch.GetAllCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) { col.IsPartOfPK = false col.Constraints = nil cols = append(cols, col) return false, nil }) if err != nil { return nil, err } colColl, err := schema.NewColCollection(cols...) if err != nil { return nil, err } return schema.UnkeyedSchemaFromCols(colColl), nil } // UntypeUnkeySchema takes a schema and returns a schema with the same columns, but stripped of constraints and primary // keys and using only string types. Meant for displaying output and tests. func UntypeUnkeySchema(sch schema.Schema) (schema.Schema, error) { var cols []schema.Column err := sch.GetAllCols().Iter(func(tag uint64, col schema.Column) (stop bool, err error) { col.Kind = types.StringKind col.IsPartOfPK = false col.Constraints = nil cols = append(cols, col) return false, nil }) if err != nil { return nil, err } colColl, err := schema.NewColCollection(cols...) if err != nil { return nil, err } return schema.UnkeyedSchemaFromCols(colColl), nil } // UntypedSchemaUnion takes an arbitrary number of schemas and provides the union of all of their key and non-key columns. // The columns will all be of type types.StringKind and and IsPartOfPK will be false for every column, and all of the // columns will be in the schemas non-key ColumnCollection. func UntypedSchemaUnion(schemas ...schema.Schema) (schema.Schema, error) { unionSch, err := typed.TypedSchemaUnion(schemas...) if err != nil { return nil, err } return UntypeSchema(unionSch) }
go/libraries/doltcore/table/untyped/untyped_rows.go
0.680772
0.443058
untyped_rows.go
starcoder
package ggrenderer import ( "unsafe" "github.com/EngoEngine/glm" "github.com/oyberntzen/gogame/ggdebug" ) const ( maxQuads uint32 = 10_000 maxVertices uint32 = maxQuads * 4 maxIndices uint32 = maxQuads * 6 maxTextureSlots uint32 = 32 ) var ( quadVertexArray VertexArray quadVertexBuffer VertexBuffer textureShader Shader whiteTexture Texture quadIndexCount uint32 = 0 quadVertices []quadVertex textureSlots [maxTextureSlots]Texture textureSlotIndex uint32 = 1 quadVertexPositions [4]glm.Vec4 stats Statistics ) type Statistics struct { DrawCalls uint32 QuadCount uint32 } func (stats Statistics) GetTotalVertexCount() uint32 { return stats.QuadCount * 4 } func (stats Statistics) GetTotalIndexCount() uint32 { return stats.QuadCount * 6 } type Quad2D struct { Position *glm.Vec2 Z float32 Size *glm.Vec2 Rotation float32 Color *glm.Vec4 Texture Texture TilingFactor float32 } func NewQuad2D() *Quad2D { return &Quad2D{ Position: &glm.Vec2{0, 0}, Z: 0, Size: &glm.Vec2{1, 1}, Rotation: 0, Color: &glm.Vec4{1, 1, 1, 1}, Texture: nil, TilingFactor: 1, } } type quadVertex struct { Position glm.Vec3 Color glm.Vec4 TexCoord glm.Vec2 TexIndex float32 TilingFactor float32 } func Renderer2DInit() { defer ggdebug.Stop(ggdebug.Start()) quadVertices = make([]quadVertex, 0, maxQuads) quadVertexBuffer = NewEmptyVertexBuffer(maxVertices * uint32(unsafe.Sizeof(quadVertex{}))) quadVertexBuffer.SetLayout(NewBufferLayout([]*BufferElement{ NewBufferElement(ShaderDataTypeFloat3, "a_Position", false), NewBufferElement(ShaderDataTypeFloat4, "a_Color", false), NewBufferElement(ShaderDataTypeFloat2, "a_TexCoord", false), NewBufferElement(ShaderDataTypeFloat, "a_TexIndex", false), NewBufferElement(ShaderDataTypeFloat, "a_TilingFactor", false), })) indices := make([]uint32, maxIndices) var offset uint32 = 0 for i := 0; i < int(maxIndices); i += 6 { indices[i+0] = offset + 0 indices[i+1] = offset + 1 indices[i+2] = offset + 2 indices[i+3] = offset + 2 indices[i+4] = offset + 3 indices[i+5] = offset + 0 offset += 4 } indexBuffer := NewIndexBuffer(indices) quadVertexArray = NewVertexArray() quadVertexArray.AddVertexBuffer(quadVertexBuffer) quadVertexArray.SetIndexBuffer(indexBuffer) whiteTexture = NewTexture2DEmpty(1, 1) whiteTexture.SetData(unsafe.Pointer(&[4]uint8{255, 255, 255, 255})) textureSlots[0] = whiteTexture vertex := ` #version 330 core layout(location = 0) in vec3 a_Position; layout(location = 1) in vec4 a_Color; layout(location = 2) in vec2 a_TexCoord; layout(location = 3) in float a_TexIndex; layout(location = 4) in float a_TilingFactor; uniform mat4 u_ViewProjection; out vec4 v_Color; out vec2 v_TexCoord; out float v_TexIndex; out float v_TilingFactor; void main() { v_Color = a_Color; v_TexCoord = a_TexCoord; v_TexIndex = a_TexIndex; v_TilingFactor = a_TilingFactor; gl_Position = u_ViewProjection * vec4(a_Position, 1.0); } ` fragment := ` #version 330 core layout(location = 0) out vec4 color; uniform sampler2D u_Textures[32]; in vec4 v_Color; in vec2 v_TexCoord; in float v_TexIndex; in float v_TilingFactor; void main() { color = texture(u_Textures[int(v_TexIndex)], v_TexCoord * v_TilingFactor) * v_Color; } ` textureShader = NewShaderFromSrc("texture", vertex, fragment) samplers := make([]int32, maxTextureSlots) for i := 0; i < len(samplers); i++ { samplers[i] = int32(i) } textureShader.Bind() textureShader.(*OpenGLShader).UploadUniformIntArray("u_Textures", samplers) quadVertexPositions[0] = glm.Vec4{-0.5, -0.5, 0, 1} quadVertexPositions[1] = glm.Vec4{0.5, -0.5, 0, 1} quadVertexPositions[2] = glm.Vec4{0.5, 0.5, 0, 1} quadVertexPositions[3] = glm.Vec4{-0.5, 0.5, 0, 1} } func Renderer2DShutdown() { defer ggdebug.Stop(ggdebug.Start()) quadVertexArray.Delete() } func Renderer2DBeginScene(camera *OrthographicCamera) { defer ggdebug.Stop(ggdebug.Start()) textureShader.Bind() textureShader.(*OpenGLShader).UploadUniformMat4("u_ViewProjection", camera.GetViewProjectionMatrix()) textureShader.(*OpenGLShader).UploadUniformInt("u_Texture", 0) reset() } func Renderer2DEndScene() { defer ggdebug.Stop(ggdebug.Start()) quadVertexBuffer.SetData(quadVertices, uint32(unsafe.Sizeof(quadVertex{}))*uint32(len(quadVertices))) flush() } func Renderer2DDrawQuad(quad *Quad2D) { defer ggdebug.Stop(ggdebug.Start()) if quadIndexCount >= maxIndices { Renderer2DEndScene() reset() } var textureIndex uint32 = 0 if quad.Texture != nil { for i := 0; i < int(textureSlotIndex); i++ { if textureSlots[i] == quad.Texture { textureIndex = uint32(i) break } } if textureIndex == 0 { textureIndex = textureSlotIndex textureSlots[textureIndex] = quad.Texture textureSlotIndex++ } } transform := glm.Translate3D(quad.Position.X(), quad.Position.Y(), quad.Z) scale := glm.Scale3D(quad.Size.X(), quad.Size.Y(), 1) transform.Mul4With(&scale) if quad.Rotation != 0 { rotation := glm.HomogRotate3DZ(quad.Rotation) transform.Mul4With(&rotation) } position := transform.Mul4x1(&quadVertexPositions[0]) quadVertices = append(quadVertices, quadVertex{ Position: glm.Vec3{position.X(), position.Y(), quad.Z}, Color: *quad.Color, TexCoord: glm.Vec2{0, 0}, TexIndex: float32(textureIndex), TilingFactor: quad.TilingFactor, }) position = transform.Mul4x1(&quadVertexPositions[1]) quadVertices = append(quadVertices, quadVertex{ Position: glm.Vec3{position.X(), position.Y(), quad.Z}, Color: *quad.Color, TexCoord: glm.Vec2{1, 0}, TexIndex: float32(textureIndex), TilingFactor: quad.TilingFactor, }) position = transform.Mul4x1(&quadVertexPositions[2]) quadVertices = append(quadVertices, quadVertex{ Position: glm.Vec3{position.X(), position.Y(), quad.Z}, Color: *quad.Color, TexCoord: glm.Vec2{1, 1}, TexIndex: float32(textureIndex), TilingFactor: quad.TilingFactor, }) position = transform.Mul4x1(&quadVertexPositions[3]) quadVertices = append(quadVertices, quadVertex{ Position: glm.Vec3{position.X(), position.Y(), quad.Z}, Color: *quad.Color, TexCoord: glm.Vec2{0, 1}, TexIndex: float32(textureIndex), TilingFactor: quad.TilingFactor, }) quadIndexCount += 6 stats.QuadCount++ } func Renderer2DResetStats() { stats.DrawCalls = 0 stats.QuadCount = 0 } func Renderer2DStats() Statistics { return stats } func flush() { var i uint32 for i = 0; i < textureSlotIndex; i++ { textureSlots[i].Bind(i) } RenderCommandDrawIndexed(quadVertexArray, quadIndexCount) stats.DrawCalls++ } func reset() { quadVertices = quadVertices[0:0] quadIndexCount = 0 textureSlotIndex = 1 }
ggrenderer/renderer2D.go
0.588416
0.406096
renderer2D.go
starcoder
package phys import ( "fmt" "math" ) // Point represents a point in traditional Cartesian space. // X is the primary axis, Y is the secondary axis. // X>0 => right // X<0 => left // Y>0 => up // Y<0 => down type Point struct { X Meters Y Meters } func (p Point) String() string { return fmt.Sprintf("Point{X: %v, Y: %v}", p.X, p.Y) } // Pose represents a location and an orientation. // Theta==0 => facing primary axis (eg +X, or right) // Theta==pi/2 => facing secondary axis (eg +Y, or up) // Theta==pi => facing away from primary axis (eg -X, or left) // Theta==3*pi/2 => facing away from secondary axis (eg -Y, or down) // Theta==-pi/2 => same as (3*pi/2) type Pose struct { Point Theta Radians } func (p Pose) String() string { return fmt.Sprintf("Pose{X: %v, Y: %v, Theta: %v}", p.X, p.Y, p.Theta) } // Dist returns the Cartesian distance between two points. func Dist(p1, p2 Point) Meters { dx := (p1.X - p2.X) dy := (p1.Y - p2.Y) dist := math.Sqrt(float64((dx * dx) + (dy * dy))) return Meters(dist) } // AdvancePose advances by p2, starting from Pose p1. // Specifically: // 1. X/Y translation in the direction of p1.Theta // 2. Rotation by p2.Theta func (p1 Pose) AdvancePose(p2 Pose) Pose { // Assume p1 is at the origin, and move by p2's X,Y in p1.Theta direction pp := Point{X: p2.X, Y: p2.Y}.ToPolarPoint() pp.A = NormalizeRadians(p1.Theta + pp.A) p := pp.ToPoint() // Add original displacemet of p1 // DEBUG: fmt.Printf("pp=%s\n p=%s\n p1=%s\n\n", pp.String(), p.String(), p1.String()) p.X += p1.X p.Y += p1.Y // Compute orientation of final pose pose := Pose{Point: Point{X: p.X, Y: p.Y}, Theta: p1.Theta + p2.Theta} pose.Theta = NormalizeRadians(pose.Theta) return pose } // RelativeTo expresses pose p1 relative to pose p2 frame-of-reference. func (p1 Pose) RelativeTo(p2 Pose) Pose { // translate point to origin xlatePoint := Point{X: p1.X - p2.X, Y: p1.Y - p2.Y} pp := xlatePoint.ToPolarPoint() // rotate about the origin pp.A = NormalizeRadians(pp.A - p2.Theta) p := pp.ToPoint() // correct the new pose angle pose := Pose{Point: p, Theta: p1.Theta - p2.Theta} pose.Theta = NormalizeRadians(pose.Theta) return pose } // PolarPoint is a polar representation of a point, ie radius + angle. type PolarPoint struct { R Meters A Radians } func (pp PolarPoint) String() string { return fmt.Sprintf("PolarPoint{R: %v, A: %v}", pp.R, pp.A) } // ToPolarPoint converts a Cartesian Point to its PolarPoint representation. func (p Point) ToPolarPoint() PolarPoint { r := Meters(math.Sqrt(float64((p.X * p.X) + (p.Y * p.Y)))) a := Radians(math.Atan2(float64(p.Y), float64(p.X))) a = NormalizeRadians(a) // TODO: Is this the right thing to do? return PolarPoint{R: r, A: a} } // ToPoint converts a PolarPoint to its Cartesian Point representation. func (pp PolarPoint) ToPoint() Point { // TODO: Is noramlizing the right thing to do? pp.A = NormalizeRadians(pp.A) // range [-Pi,+Pi] return Point{ X: pp.R * Meters(math.Cos(float64(pp.A))), Y: pp.R * Meters(math.Sin(float64(pp.A))), } }
goverdrive/phys/coord.go
0.756897
0.451085
coord.go
starcoder
package animagi import ( "errors" "reflect" ) const ( dstError = "dst must be settable" unsupportedTransformation = "could not transform to dst" ) type typeDescription struct { FieldType reflect.Type FieldValue reflect.Value } /* Transform will map the data from src into dst by calculating the fields most similar counterpart and copying the values over. If src and dst are of the same type then Transform basically does a copy. dst must be settable or an error will be returned */ func Transform(src, dst interface{}) (err error) { if cannotModifyField(dst) { return errors.New(dstError) } valueOfSrc := findValueOf(src) valueOfDst := findValueOf(dst) if valueOfSrc.Kind() == valueOfDst.Kind() { switch valueOfDst.Kind() { case reflect.Struct: srcDescription := describeStructure(src) mapToDestination("", dst, srcDescription) default: setValueOfDst(valueOfDst, valueOfSrc) } } else { err = errors.New(unsupportedTransformation) } return err } func describeStructure(structure interface{}) map[string]typeDescription { structureDescription := make(map[string]typeDescription) structureValue := findValueOf(structure) for i := 0; i < structureValue.NumField(); i++ { field := structureValue.Field(i) fieldName := structureValue.Type().Field(i).Name switch reflect.Indirect(field).Kind() { case reflect.Struct: subDescription := describeStructure(field) for k, v := range subDescription { structureDescription[fieldName+"."+k] = v } default: structureDescription[fieldName] = typeDescription{field.Type(), findValueOf(field)} } } return structureDescription } func mapToDestination(currentLevel string, dst interface{}, srcDescription map[string]typeDescription) { dstValue := findValueOf(dst) for i := 0; i < dstValue.NumField(); i++ { field := dstValue.Field(i) fieldName := dstValue.Type().Field(i).Name fullPathName := appendFieldName(currentLevel, fieldName) if field.IsValid() && field.CanSet() { switch field.Kind() { case reflect.Struct: mapToDestination(fullPathName, field, srcDescription) case reflect.Ptr: if val, found := findMostSimlilarSource(fullPathName, srcDescription); found { field.Set(reflect.New(reflect.TypeOf(field.Interface()).Elem())) setValueOfDst(field.Elem(), val.FieldValue) } default: if val, found := findMostSimlilarSource(fullPathName, srcDescription); found { setValueOfDst(field, val.FieldValue) } } } } } func findMostSimlilarSource(fullPathName string, srcDescription map[string]typeDescription) (typeDescription, bool) { val, ok := srcDescription[fullPathName] return val, ok } func setValueOfDst(dst, src reflect.Value) { if dst.Type() == reflect.Indirect(src).Type() { dst.Set(reflect.Indirect(src)) } else if reflect.Indirect(src).Type().ConvertibleTo(reflect.Indirect(dst).Type()) { dst.Set(reflect.Indirect(src).Convert(reflect.Indirect(dst).Type())) } } func findValueOf(val interface{}) (valueOf reflect.Value) { if reflect.TypeOf(val) != reflect.TypeOf(valueOf) { valueOf = reflect.Indirect(reflect.ValueOf(val)) } else { valueOf = val.(reflect.Value) } return valueOf } func appendFieldName(prefix, fieldName string) (fullName string) { if len(prefix) != 0 { fullName = prefix + "." + fieldName } else { fullName = fieldName } return fullName } func cannotModifyField(field interface{}) bool { return reflect.ValueOf(field).Kind() != reflect.Ptr || !reflect.ValueOf(field).Elem().CanSet() }
animagi.go
0.61451
0.416025
animagi.go
starcoder
package main import "fmt" // Bus carries Passengers from A to B if they have a valid bus ticket. type Bus struct { Company BusCompany name string passengers Passengers stops []*BusStop currentStop int16 } // NewBus returns a new Bus with an empty passenger set. func NewBus(name string) Bus { b := Bus{} b.name = name b.currentStop = -1 b.passengers = NewPassengerSet() return b } // AddStop adds the given BusStop to the list of stops that the Bus will stop at. Each stop is visited in order. func (b *Bus) AddStop(busStop *BusStop) { b.stops = append(b.stops, busStop) } // add adds a single Passenger to the Bus. For brevity, we don't care too much about accidentally adding the same Passenger more than once. func (b *Bus) add(p Passenger) { if b.passengers == nil { b.passengers = make(map[string]Passenger) } b.passengers[p.SSN] = p fmt.Printf("%s: boarded passenger %q\n", b.name, p.Name) fmt.Println("") } // Remove removes a single Passenger from the Bus. func (b *Bus) Remove(p Passenger) { delete(b.passengers, p.SSN) fmt.Printf("\n%s: unboarded passenger %q\n\n", b.name, p.Name) } // Board adds the given Passenger to the Bus and charges them a ticket price calculated by chargeFn if they don't already have a paid ticket. // Board returns false if the Passenger was not allowed to board the Bus. func (b *Bus) Board(p *Passenger, chargeFn PriceCalculator) bool { var allowed bool // Default value is false if p.HasValidTicket { allowed = true } else { amount := chargeFn(*p) p2 := p.Charge(amount) p = &p2 allowed = true } if allowed { b.add(*p) } return allowed } // Go takes the Bus to the next BusStop. Go returns true if there are still more stops to visit. func (b *Bus) Go() bool { b.currentStop++ lastIndex := int16(len(b.stops) - 1) if b.currentStop == lastIndex { fmt.Printf("%s: reached the end of the line, everybody out\n", b.name) b.VisitPassengers(func(p Passenger) { b.Remove(p) }) return false } if b.currentStop == 0 { fmt.Printf("%s: starting\n", b.name) } else { fmt.Println("") fmt.Printf("%s: carrying %d passengers: heading for next stop\n", b.name, len(b.passengers)) } curr := b.stops[b.currentStop] fmt.Println("") fmt.Printf("%s: arriving at %q\n", b.name, curr.Name) fmt.Println("") curr.NotifyBusArrival(b) return b.currentStop < lastIndex } // Manifest asks Passengers for a SSN manifest and returns it. func (b Bus) Manifest() []string { return b.passengers.Manifest() } // VisitPassengers calls function visitor for each Passenger on the bus. func (b *Bus) VisitPassengers(visitor func(Passenger)) { b.passengers.Visit(visitor) } // FindPassenger returns the Passenger that matches the given SSN, if found. Otherwise, an empty Passenger is returned. func (b *Bus) FindPassenger(ssn string) Passenger { if p, ok := b.passengers[ssn]; ok { return p } return Passenger{} // A nobody. } // UpdatePassengers calls function visitor for each Passenger on the bus. Passengers are passed by reference and may be modified. func (b *Bus) UpdatePassengers(visitor func(*Passenger)) { ps := make(map[string]Passenger, len(b.passengers)) for ssn, p := range b.passengers { visitor(&p) ps[ssn] = p } b.passengers = ps } // NotifyBoardingIntent is called by BusStop every time a Prospect arrives and instructs the Bus to signal its arrival when at that BusStop. func (b *Bus) NotifyBoardingIntent(busStop *BusStop) { if b.StopsAt(busStop) { return // We already intend to stop here. } b.AddStop(busStop) } // NotifyArrival notifies the current BusStop that the Bus has arrived. func (b *Bus) NotifyArrival() { curr := b.stops[b.currentStop] curr.NotifyBusArrival(b) } // StopsAt checks if Bus stops at the given BusStop, and returns true if it does, and false otherwise. func (b Bus) StopsAt(busStop *BusStop) bool { for _, stop := range b.stops { if stop.Equals(busStop) { return true } } return false } // CurrentStop returns the BusStop that the Bus is currently stopped at. func (b Bus) CurrentStop() *BusStop { return b.stops[b.currentStop] }
bus-service/bus.go
0.670608
0.442877
bus.go
starcoder
package mock import ( "fmt" "strconv" "strings" testing "github.com/mitchellh/go-testing-interface" "github.com/hashicorp/nomad/nomad/structs" "github.com/stretchr/testify/assert" ) // StateStore defines the methods required from state.StateStore but avoids a // circular dependency. type StateStore interface { UpsertACLPolicies(index uint64, policies []*structs.ACLPolicy) error UpsertACLTokens(index uint64, tokens []*structs.ACLToken) error } // NamespacePolicy is a helper for generating the policy hcl for a given // namespace. Either policy or capabilities may be nil but not both. func NamespacePolicy(namespace string, policy string, capabilities []string) string { policyHCL := fmt.Sprintf("namespace %q {", namespace) if policy != "" { policyHCL += fmt.Sprintf("\n\tpolicy = %q", policy) } if len(capabilities) != 0 { for i, s := range capabilities { if !strings.HasPrefix(s, "\"") { capabilities[i] = strconv.Quote(s) } } policyHCL += fmt.Sprintf("\n\tcapabilities = [%v]", strings.Join(capabilities, ",")) } policyHCL += "\n}" return policyHCL } // AgentPolicy is a helper for generating the hcl for a given agent policy. func AgentPolicy(policy string) string { return fmt.Sprintf("agent {\n\tpolicy = %q\n}\n", policy) } // NodePolicy is a helper for generating the hcl for a given node policy. func NodePolicy(policy string) string { return fmt.Sprintf("node {\n\tpolicy = %q\n}\n", policy) } // QuotaPolicy is a helper for generating the hcl for a given quota policy. func QuotaPolicy(policy string) string { return fmt.Sprintf("quota {\n\tpolicy = %q\n}\n", policy) } // CreatePolicy creates a policy with the given name and rule. func CreatePolicy(t testing.T, state StateStore, index uint64, name, rule string) { t.Helper() // Create the ACLPolicy policy := &structs.ACLPolicy{ Name: name, Rules: rule, } policy.SetHash() assert.Nil(t, state.UpsertACLPolicies(index, []*structs.ACLPolicy{policy})) } // CreateToken creates a local, client token for the given policies func CreateToken(t testing.T, state StateStore, index uint64, policies []string) *structs.ACLToken { t.Helper() // Create the ACLToken token := ACLToken() token.Policies = policies token.SetHash() assert.Nil(t, state.UpsertACLTokens(index, []*structs.ACLToken{token})) return token } // CreatePolicyAndToken creates a policy and then returns a token configured for // just that policy. CreatePolicyAndToken uses the given index and index+1. func CreatePolicyAndToken(t testing.T, state StateStore, index uint64, name, rule string) *structs.ACLToken { CreatePolicy(t, state, index, name, rule) return CreateToken(t, state, index+1, []string{name}) }
vendor/github.com/hashicorp/nomad/nomad/mock/acl.go
0.714329
0.410756
acl.go
starcoder
package gaia import ( "fmt" "time" "github.com/globalsign/mgo/bson" "github.com/mitchellh/copystructure" "go.aporeto.io/elemental" ) // GraphEdgeDestinationTypeValue represents the possible values for attribute "destinationType". type GraphEdgeDestinationTypeValue string const ( // GraphEdgeDestinationTypeExternalNetwork represents the value ExternalNetwork. GraphEdgeDestinationTypeExternalNetwork GraphEdgeDestinationTypeValue = "ExternalNetwork" // GraphEdgeDestinationTypeNamespace represents the value Namespace. GraphEdgeDestinationTypeNamespace GraphEdgeDestinationTypeValue = "Namespace" // GraphEdgeDestinationTypeNode represents the value Node. GraphEdgeDestinationTypeNode GraphEdgeDestinationTypeValue = "Node" // GraphEdgeDestinationTypeProcessingUnit represents the value ProcessingUnit. GraphEdgeDestinationTypeProcessingUnit GraphEdgeDestinationTypeValue = "ProcessingUnit" // GraphEdgeDestinationTypeRemoteController represents the value RemoteController. GraphEdgeDestinationTypeRemoteController GraphEdgeDestinationTypeValue = "RemoteController" ) // GraphEdgeSourceTypeValue represents the possible values for attribute "sourceType". type GraphEdgeSourceTypeValue string const ( // GraphEdgeSourceTypeExternalNetwork represents the value ExternalNetwork. GraphEdgeSourceTypeExternalNetwork GraphEdgeSourceTypeValue = "ExternalNetwork" // GraphEdgeSourceTypeNamespace represents the value Namespace. GraphEdgeSourceTypeNamespace GraphEdgeSourceTypeValue = "Namespace" // GraphEdgeSourceTypeNode represents the value Node. GraphEdgeSourceTypeNode GraphEdgeSourceTypeValue = "Node" // GraphEdgeSourceTypeProcessingUnit represents the value ProcessingUnit. GraphEdgeSourceTypeProcessingUnit GraphEdgeSourceTypeValue = "ProcessingUnit" // GraphEdgeSourceTypeRemoteController represents the value RemoteController. GraphEdgeSourceTypeRemoteController GraphEdgeSourceTypeValue = "RemoteController" ) // GraphEdgeIdentity represents the Identity of the object. var GraphEdgeIdentity = elemental.Identity{ Name: "graphedge", Category: "graphedges", Package: "meteor", Private: true, } // GraphEdgesList represents a list of GraphEdges type GraphEdgesList []*GraphEdge // Identity returns the identity of the objects in the list. func (o GraphEdgesList) Identity() elemental.Identity { return GraphEdgeIdentity } // Copy returns a pointer to a copy the GraphEdgesList. func (o GraphEdgesList) Copy() elemental.Identifiables { copy := append(GraphEdgesList{}, o...) return &copy } // Append appends the objects to the a new copy of the GraphEdgesList. func (o GraphEdgesList) Append(objects ...elemental.Identifiable) elemental.Identifiables { out := append(GraphEdgesList{}, o...) for _, obj := range objects { out = append(out, obj.(*GraphEdge)) } return out } // List converts the object to an elemental.IdentifiablesList. func (o GraphEdgesList) List() elemental.IdentifiablesList { out := make(elemental.IdentifiablesList, len(o)) for i := 0; i < len(o); i++ { out[i] = o[i] } return out } // DefaultOrder returns the default ordering fields of the content. func (o GraphEdgesList) DefaultOrder() []string { return []string{} } // ToSparse returns the GraphEdgesList converted to SparseGraphEdgesList. // Objects in the list will only contain the given fields. No field means entire field set. func (o GraphEdgesList) ToSparse(fields ...string) elemental.Identifiables { out := make(SparseGraphEdgesList, len(o)) for i := 0; i < len(o); i++ { out[i] = o[i].ToSparse(fields...).(*SparseGraphEdge) } return out } // Version returns the version of the content. func (o GraphEdgesList) Version() int { return 1 } // GraphEdge represents the model of a graphedge type GraphEdge struct { // DB Identifier of the edge. ID string `json:"-" msgpack:"-" bson:"-" mapstructure:"-,omitempty"` // Number of accepted flows in the edge. AcceptedFlows bool `json:"acceptedFlows" msgpack:"acceptedFlows" bson:"acceptedflows" mapstructure:"acceptedFlows,omitempty"` // The date for the day bucket. BucketDay time.Time `json:"-" msgpack:"-" bson:"bucketday" mapstructure:"-,omitempty"` // The date for the hour bucket. BucketHour time.Time `json:"-" msgpack:"-" bson:"buckethour" mapstructure:"-,omitempty"` // The date for the minute bucket. BucketMinute time.Time `json:"-" msgpack:"-" bson:"bucketminute" mapstructure:"-,omitempty"` // The date for the month bucket. BucketMonth time.Time `json:"-" msgpack:"-" bson:"bucketmonth" mapstructure:"-,omitempty"` // The truth value that indicates if there are accepted default flows. DefaultAcceptedFlows bool `json:"defaultAcceptedFlows" msgpack:"defaultAcceptedFlows" bson:"defaultacceptedflows" mapstructure:"defaultAcceptedFlows,omitempty"` // The truth value that indicates if there are rejected default flows. DefaultRejectedFlows bool `json:"defaultRejectedFlows" msgpack:"defaultRejectedFlows" bson:"defaultrejectedflows" mapstructure:"defaultRejectedFlows,omitempty"` // Name of the remote destination controller if different than the current one. DestinationController string `json:"destinationController,omitempty" msgpack:"destinationController,omitempty" bson:"destinationcontroller,omitempty" mapstructure:"destinationController,omitempty"` // ID of the destination `GraphNode` of the edge. DestinationID string `json:"destinationID" msgpack:"destinationID" bson:"destinationid" mapstructure:"destinationID,omitempty"` // Type of the destination `GraphNode` of the edge. DestinationType GraphEdgeDestinationTypeValue `json:"destinationType" msgpack:"destinationType" bson:"destinationtype" mapstructure:"destinationType,omitempty"` // Contains more flow details grouped by their destination protocol/ports. Details map[string]*GraphEdgeFlowDetails `json:"details,omitempty" msgpack:"details,omitempty" bson:"-" mapstructure:"details,omitempty"` // The number of encrypted flows in the edge. Encrypted bool `json:"encrypted" msgpack:"encrypted" bson:"encrypted" mapstructure:"encrypted,omitempty"` // Contains the date when the edge was first seen. FirstSeen time.Time `json:"firstSeen,omitempty" msgpack:"firstSeen,omitempty" bson:"firstseen,omitempty" mapstructure:"firstSeen,omitempty"` // Identifier of the edge. FlowID string `json:"ID" msgpack:"ID" bson:"flowid" mapstructure:"ID,omitempty"` // Contains the date when the edge was last seen. LastSeen time.Time `json:"lastSeen,omitempty" msgpack:"lastSeen,omitempty" bson:"lastseen,omitempty" mapstructure:"lastSeen,omitempty"` // Namespace of the object that reported the flow. Namespace string `json:"namespace" msgpack:"namespace" bson:"namespace" mapstructure:"namespace,omitempty"` // Number of accepted observed flows. ObservedAcceptedFlows bool `json:"observedAcceptedFlows" msgpack:"observedAcceptedFlows" bson:"observedacceptedflows" mapstructure:"observedAcceptedFlows,omitempty"` // Number of encrypted observed flows. ObservedEncrypted bool `json:"observedEncrypted" msgpack:"observedEncrypted" bson:"observedencrypted" mapstructure:"observedEncrypted,omitempty"` // Number of rejected observed flows. ObservedRejectedFlows bool `json:"observedRejectedFlows" msgpack:"observedRejectedFlows" bson:"observedrejectedflows" mapstructure:"observedRejectedFlows,omitempty"` // Number of rejected flows in the edge. RejectedFlows bool `json:"rejectedFlows" msgpack:"rejectedFlows" bson:"rejectedflows" mapstructure:"rejectedFlows,omitempty"` // Namespace of the object that was targeted by the flow. RemoteNamespace string `json:"remoteNamespace,omitempty" msgpack:"remoteNamespace,omitempty" bson:"remotenamespace,omitempty" mapstructure:"remoteNamespace,omitempty"` // Name of the remote source controller if different than the current one. SourceController string `json:"sourceController,omitempty" msgpack:"sourceController,omitempty" bson:"sourcecontroller,omitempty" mapstructure:"sourceController,omitempty"` // ID of the source `GraphNode` of the edge. SourceID string `json:"sourceID" msgpack:"sourceID" bson:"sourceid" mapstructure:"sourceID,omitempty"` // Type of the source `GraphNode` of the edge. SourceType GraphEdgeSourceTypeValue `json:"sourceType" msgpack:"sourceType" bson:"sourcetype" mapstructure:"sourceType,omitempty"` // geographical hash of the data. This is used for sharding and // georedundancy. ZHash int `json:"-" msgpack:"-" bson:"zhash" mapstructure:"-,omitempty"` // Logical storage zone. Used for sharding. Zone int `json:"-" msgpack:"-" bson:"zone" mapstructure:"-,omitempty"` ModelVersion int `json:"-" msgpack:"-" bson:"_modelversion"` } // NewGraphEdge returns a new *GraphEdge func NewGraphEdge() *GraphEdge { return &GraphEdge{ ModelVersion: 1, Details: map[string]*GraphEdgeFlowDetails{}, } } // Identity returns the Identity of the object. func (o *GraphEdge) Identity() elemental.Identity { return GraphEdgeIdentity } // Identifier returns the value of the object's unique identifier. func (o *GraphEdge) Identifier() string { return o.ID } // SetIdentifier sets the value of the object's unique identifier. func (o *GraphEdge) SetIdentifier(id string) { o.ID = id } // GetBSON implements the bson marshaling interface. // This is used to transparently convert ID to MongoDBID as ObectID. func (o *GraphEdge) GetBSON() (interface{}, error) { if o == nil { return nil, nil } s := &mongoAttributesGraphEdge{} if o.ID != "" { s.ID = bson.ObjectIdHex(o.ID) } s.AcceptedFlows = o.AcceptedFlows s.BucketDay = o.BucketDay s.BucketHour = o.BucketHour s.BucketMinute = o.BucketMinute s.BucketMonth = o.BucketMonth s.DefaultAcceptedFlows = o.DefaultAcceptedFlows s.DefaultRejectedFlows = o.DefaultRejectedFlows s.DestinationController = o.DestinationController s.DestinationID = o.DestinationID s.DestinationType = o.DestinationType s.Encrypted = o.Encrypted s.FirstSeen = o.FirstSeen s.FlowID = o.FlowID s.LastSeen = o.LastSeen s.Namespace = o.Namespace s.ObservedAcceptedFlows = o.ObservedAcceptedFlows s.ObservedEncrypted = o.ObservedEncrypted s.ObservedRejectedFlows = o.ObservedRejectedFlows s.RejectedFlows = o.RejectedFlows s.RemoteNamespace = o.RemoteNamespace s.SourceController = o.SourceController s.SourceID = o.SourceID s.SourceType = o.SourceType s.ZHash = o.ZHash s.Zone = o.Zone return s, nil } // SetBSON implements the bson marshaling interface. // This is used to transparently convert ID to MongoDBID as ObectID. func (o *GraphEdge) SetBSON(raw bson.Raw) error { if o == nil { return nil } s := &mongoAttributesGraphEdge{} if err := raw.Unmarshal(s); err != nil { return err } o.ID = s.ID.Hex() o.AcceptedFlows = s.AcceptedFlows o.BucketDay = s.BucketDay o.BucketHour = s.BucketHour o.BucketMinute = s.BucketMinute o.BucketMonth = s.BucketMonth o.DefaultAcceptedFlows = s.DefaultAcceptedFlows o.DefaultRejectedFlows = s.DefaultRejectedFlows o.DestinationController = s.DestinationController o.DestinationID = s.DestinationID o.DestinationType = s.DestinationType o.Encrypted = s.Encrypted o.FirstSeen = s.FirstSeen o.FlowID = s.FlowID o.LastSeen = s.LastSeen o.Namespace = s.Namespace o.ObservedAcceptedFlows = s.ObservedAcceptedFlows o.ObservedEncrypted = s.ObservedEncrypted o.ObservedRejectedFlows = s.ObservedRejectedFlows o.RejectedFlows = s.RejectedFlows o.RemoteNamespace = s.RemoteNamespace o.SourceController = s.SourceController o.SourceID = s.SourceID o.SourceType = s.SourceType o.ZHash = s.ZHash o.Zone = s.Zone return nil } // Version returns the hardcoded version of the model. func (o *GraphEdge) Version() int { return 1 } // BleveType implements the bleve.Classifier Interface. func (o *GraphEdge) BleveType() string { return "graphedge" } // DefaultOrder returns the list of default ordering fields. func (o *GraphEdge) DefaultOrder() []string { return []string{} } // Doc returns the documentation for the object func (o *GraphEdge) Doc() string { return `Represents an edge from the dependency map.` } func (o *GraphEdge) String() string { return fmt.Sprintf("<%s:%s>", o.Identity().Name, o.Identifier()) } // GetZHash returns the ZHash of the receiver. func (o *GraphEdge) GetZHash() int { return o.ZHash } // SetZHash sets the property ZHash of the receiver using the given value. func (o *GraphEdge) SetZHash(zHash int) { o.ZHash = zHash } // GetZone returns the Zone of the receiver. func (o *GraphEdge) GetZone() int { return o.Zone } // SetZone sets the property Zone of the receiver using the given value. func (o *GraphEdge) SetZone(zone int) { o.Zone = zone } // ToSparse returns the sparse version of the model. // The returned object will only contain the given fields. No field means entire field set. func (o *GraphEdge) ToSparse(fields ...string) elemental.SparseIdentifiable { if len(fields) == 0 { // nolint: goimports return &SparseGraphEdge{ ID: &o.ID, AcceptedFlows: &o.AcceptedFlows, BucketDay: &o.BucketDay, BucketHour: &o.BucketHour, BucketMinute: &o.BucketMinute, BucketMonth: &o.BucketMonth, DefaultAcceptedFlows: &o.DefaultAcceptedFlows, DefaultRejectedFlows: &o.DefaultRejectedFlows, DestinationController: &o.DestinationController, DestinationID: &o.DestinationID, DestinationType: &o.DestinationType, Details: &o.Details, Encrypted: &o.Encrypted, FirstSeen: &o.FirstSeen, FlowID: &o.FlowID, LastSeen: &o.LastSeen, Namespace: &o.Namespace, ObservedAcceptedFlows: &o.ObservedAcceptedFlows, ObservedEncrypted: &o.ObservedEncrypted, ObservedRejectedFlows: &o.ObservedRejectedFlows, RejectedFlows: &o.RejectedFlows, RemoteNamespace: &o.RemoteNamespace, SourceController: &o.SourceController, SourceID: &o.SourceID, SourceType: &o.SourceType, ZHash: &o.ZHash, Zone: &o.Zone, } } sp := &SparseGraphEdge{} for _, f := range fields { switch f { case "ID": sp.ID = &(o.ID) case "acceptedFlows": sp.AcceptedFlows = &(o.AcceptedFlows) case "bucketDay": sp.BucketDay = &(o.BucketDay) case "bucketHour": sp.BucketHour = &(o.BucketHour) case "bucketMinute": sp.BucketMinute = &(o.BucketMinute) case "bucketMonth": sp.BucketMonth = &(o.BucketMonth) case "defaultAcceptedFlows": sp.DefaultAcceptedFlows = &(o.DefaultAcceptedFlows) case "defaultRejectedFlows": sp.DefaultRejectedFlows = &(o.DefaultRejectedFlows) case "destinationController": sp.DestinationController = &(o.DestinationController) case "destinationID": sp.DestinationID = &(o.DestinationID) case "destinationType": sp.DestinationType = &(o.DestinationType) case "details": sp.Details = &(o.Details) case "encrypted": sp.Encrypted = &(o.Encrypted) case "firstSeen": sp.FirstSeen = &(o.FirstSeen) case "flowID": sp.FlowID = &(o.FlowID) case "lastSeen": sp.LastSeen = &(o.LastSeen) case "namespace": sp.Namespace = &(o.Namespace) case "observedAcceptedFlows": sp.ObservedAcceptedFlows = &(o.ObservedAcceptedFlows) case "observedEncrypted": sp.ObservedEncrypted = &(o.ObservedEncrypted) case "observedRejectedFlows": sp.ObservedRejectedFlows = &(o.ObservedRejectedFlows) case "rejectedFlows": sp.RejectedFlows = &(o.RejectedFlows) case "remoteNamespace": sp.RemoteNamespace = &(o.RemoteNamespace) case "sourceController": sp.SourceController = &(o.SourceController) case "sourceID": sp.SourceID = &(o.SourceID) case "sourceType": sp.SourceType = &(o.SourceType) case "zHash": sp.ZHash = &(o.ZHash) case "zone": sp.Zone = &(o.Zone) } } return sp } // Patch apply the non nil value of a *SparseGraphEdge to the object. func (o *GraphEdge) Patch(sparse elemental.SparseIdentifiable) { if !sparse.Identity().IsEqual(o.Identity()) { panic("cannot patch from a parse with different identity") } so := sparse.(*SparseGraphEdge) if so.ID != nil { o.ID = *so.ID } if so.AcceptedFlows != nil { o.AcceptedFlows = *so.AcceptedFlows } if so.BucketDay != nil { o.BucketDay = *so.BucketDay } if so.BucketHour != nil { o.BucketHour = *so.BucketHour } if so.BucketMinute != nil { o.BucketMinute = *so.BucketMinute } if so.BucketMonth != nil { o.BucketMonth = *so.BucketMonth } if so.DefaultAcceptedFlows != nil { o.DefaultAcceptedFlows = *so.DefaultAcceptedFlows } if so.DefaultRejectedFlows != nil { o.DefaultRejectedFlows = *so.DefaultRejectedFlows } if so.DestinationController != nil { o.DestinationController = *so.DestinationController } if so.DestinationID != nil { o.DestinationID = *so.DestinationID } if so.DestinationType != nil { o.DestinationType = *so.DestinationType } if so.Details != nil { o.Details = *so.Details } if so.Encrypted != nil { o.Encrypted = *so.Encrypted } if so.FirstSeen != nil { o.FirstSeen = *so.FirstSeen } if so.FlowID != nil { o.FlowID = *so.FlowID } if so.LastSeen != nil { o.LastSeen = *so.LastSeen } if so.Namespace != nil { o.Namespace = *so.Namespace } if so.ObservedAcceptedFlows != nil { o.ObservedAcceptedFlows = *so.ObservedAcceptedFlows } if so.ObservedEncrypted != nil { o.ObservedEncrypted = *so.ObservedEncrypted } if so.ObservedRejectedFlows != nil { o.ObservedRejectedFlows = *so.ObservedRejectedFlows } if so.RejectedFlows != nil { o.RejectedFlows = *so.RejectedFlows } if so.RemoteNamespace != nil { o.RemoteNamespace = *so.RemoteNamespace } if so.SourceController != nil { o.SourceController = *so.SourceController } if so.SourceID != nil { o.SourceID = *so.SourceID } if so.SourceType != nil { o.SourceType = *so.SourceType } if so.ZHash != nil { o.ZHash = *so.ZHash } if so.Zone != nil { o.Zone = *so.Zone } } // DeepCopy returns a deep copy if the GraphEdge. func (o *GraphEdge) DeepCopy() *GraphEdge { if o == nil { return nil } out := &GraphEdge{} o.DeepCopyInto(out) return out } // DeepCopyInto copies the receiver into the given *GraphEdge. func (o *GraphEdge) DeepCopyInto(out *GraphEdge) { target, err := copystructure.Copy(o) if err != nil { panic(fmt.Sprintf("Unable to deepcopy GraphEdge: %s", err)) } *out = *target.(*GraphEdge) } // Validate valides the current information stored into the structure. func (o *GraphEdge) Validate() error { errors := elemental.Errors{} requiredErrors := elemental.Errors{} if err := elemental.ValidateStringInList("destinationType", string(o.DestinationType), []string{"ProcessingUnit", "ExternalNetwork", "Namespace", "Node", "RemoteController"}, false); err != nil { errors = errors.Append(err) } for _, sub := range o.Details { if sub == nil { continue } elemental.ResetDefaultForZeroValues(sub) if err := sub.Validate(); err != nil { errors = errors.Append(err) } } if err := elemental.ValidateStringInList("sourceType", string(o.SourceType), []string{"ProcessingUnit", "ExternalNetwork", "Namespace", "Node", "RemoteController"}, false); err != nil { errors = errors.Append(err) } if len(requiredErrors) > 0 { return requiredErrors } if len(errors) > 0 { return errors } return nil } // SpecificationForAttribute returns the AttributeSpecification for the given attribute name key. func (*GraphEdge) SpecificationForAttribute(name string) elemental.AttributeSpecification { if v, ok := GraphEdgeAttributesMap[name]; ok { return v } // We could not find it, so let's check on the lower case indexed spec map return GraphEdgeLowerCaseAttributesMap[name] } // AttributeSpecifications returns the full attribute specifications map. func (*GraphEdge) AttributeSpecifications() map[string]elemental.AttributeSpecification { return GraphEdgeAttributesMap } // ValueForAttribute returns the value for the given attribute. // This is a very advanced function that you should not need but in some // very specific use cases. func (o *GraphEdge) ValueForAttribute(name string) interface{} { switch name { case "ID": return o.ID case "acceptedFlows": return o.AcceptedFlows case "bucketDay": return o.BucketDay case "bucketHour": return o.BucketHour case "bucketMinute": return o.BucketMinute case "bucketMonth": return o.BucketMonth case "defaultAcceptedFlows": return o.DefaultAcceptedFlows case "defaultRejectedFlows": return o.DefaultRejectedFlows case "destinationController": return o.DestinationController case "destinationID": return o.DestinationID case "destinationType": return o.DestinationType case "details": return o.Details case "encrypted": return o.Encrypted case "firstSeen": return o.FirstSeen case "flowID": return o.FlowID case "lastSeen": return o.LastSeen case "namespace": return o.Namespace case "observedAcceptedFlows": return o.ObservedAcceptedFlows case "observedEncrypted": return o.ObservedEncrypted case "observedRejectedFlows": return o.ObservedRejectedFlows case "rejectedFlows": return o.RejectedFlows case "remoteNamespace": return o.RemoteNamespace case "sourceController": return o.SourceController case "sourceID": return o.SourceID case "sourceType": return o.SourceType case "zHash": return o.ZHash case "zone": return o.Zone } return nil } // GraphEdgeAttributesMap represents the map of attribute for GraphEdge. var GraphEdgeAttributesMap = map[string]elemental.AttributeSpecification{ "ID": { AllowedChoices: []string{}, BSONFieldName: "_id", ConvertedName: "ID", Description: `DB Identifier of the edge.`, Identifier: true, Name: "ID", Stored: true, Type: "string", }, "AcceptedFlows": { AllowedChoices: []string{}, BSONFieldName: "acceptedflows", ConvertedName: "AcceptedFlows", Description: `Number of accepted flows in the edge.`, Exposed: true, Name: "acceptedFlows", Stored: true, Type: "boolean", }, "BucketDay": { AllowedChoices: []string{}, BSONFieldName: "bucketday", ConvertedName: "BucketDay", Description: `The date for the day bucket.`, Name: "bucketDay", Stored: true, Type: "time", }, "BucketHour": { AllowedChoices: []string{}, BSONFieldName: "buckethour", ConvertedName: "BucketHour", Description: `The date for the hour bucket.`, Name: "bucketHour", Stored: true, Type: "time", }, "BucketMinute": { AllowedChoices: []string{}, BSONFieldName: "bucketminute", ConvertedName: "BucketMinute", Description: `The date for the minute bucket.`, Name: "bucketMinute", Stored: true, Type: "time", }, "BucketMonth": { AllowedChoices: []string{}, BSONFieldName: "bucketmonth", ConvertedName: "BucketMonth", Description: `The date for the month bucket.`, Name: "bucketMonth", Stored: true, Type: "time", }, "DefaultAcceptedFlows": { AllowedChoices: []string{}, BSONFieldName: "defaultacceptedflows", ConvertedName: "DefaultAcceptedFlows", Description: `The truth value that indicates if there are accepted default flows.`, Exposed: true, Name: "defaultAcceptedFlows", Stored: true, Type: "boolean", }, "DefaultRejectedFlows": { AllowedChoices: []string{}, BSONFieldName: "defaultrejectedflows", ConvertedName: "DefaultRejectedFlows", Description: `The truth value that indicates if there are rejected default flows.`, Exposed: true, Name: "defaultRejectedFlows", Stored: true, Type: "boolean", }, "DestinationController": { AllowedChoices: []string{}, BSONFieldName: "destinationcontroller", ConvertedName: "DestinationController", Description: `Name of the remote destination controller if different than the current one.`, Exposed: true, Name: "destinationController", Stored: true, Type: "string", }, "DestinationID": { AllowedChoices: []string{}, BSONFieldName: "destinationid", ConvertedName: "DestinationID", Description: `ID of the destination ` + "`" + `GraphNode` + "`" + ` of the edge.`, Exposed: true, Name: "destinationID", Stored: true, Type: "string", }, "DestinationType": { AllowedChoices: []string{"ProcessingUnit", "ExternalNetwork", "Namespace", "Node", "RemoteController"}, BSONFieldName: "destinationtype", ConvertedName: "DestinationType", Description: `Type of the destination ` + "`" + `GraphNode` + "`" + ` of the edge.`, Exposed: true, Name: "destinationType", Stored: true, Type: "enum", }, "Details": { AllowedChoices: []string{}, ConvertedName: "Details", Description: `Contains more flow details grouped by their destination protocol/ports.`, Exposed: true, Name: "details", SubType: "graphedgeflowdetails", Type: "refMap", }, "Encrypted": { AllowedChoices: []string{}, BSONFieldName: "encrypted", ConvertedName: "Encrypted", Description: `The number of encrypted flows in the edge.`, Exposed: true, Name: "encrypted", Stored: true, Type: "boolean", }, "FirstSeen": { AllowedChoices: []string{}, BSONFieldName: "firstseen", ConvertedName: "FirstSeen", Description: `Contains the date when the edge was first seen.`, Exposed: true, Name: "firstSeen", Stored: true, Type: "time", }, "FlowID": { AllowedChoices: []string{}, BSONFieldName: "flowid", ConvertedName: "FlowID", Description: `Identifier of the edge.`, Exposed: true, Name: "flowID", Stored: true, Type: "string", }, "LastSeen": { AllowedChoices: []string{}, BSONFieldName: "lastseen", ConvertedName: "LastSeen", Description: `Contains the date when the edge was last seen.`, Exposed: true, Name: "lastSeen", Stored: true, Type: "time", }, "Namespace": { AllowedChoices: []string{}, BSONFieldName: "namespace", ConvertedName: "Namespace", Description: `Namespace of the object that reported the flow.`, Exposed: true, Name: "namespace", Stored: true, Type: "string", }, "ObservedAcceptedFlows": { AllowedChoices: []string{}, BSONFieldName: "observedacceptedflows", ConvertedName: "ObservedAcceptedFlows", Description: `Number of accepted observed flows.`, Exposed: true, Name: "observedAcceptedFlows", Stored: true, Type: "boolean", }, "ObservedEncrypted": { AllowedChoices: []string{}, BSONFieldName: "observedencrypted", ConvertedName: "ObservedEncrypted", Description: `Number of encrypted observed flows.`, Exposed: true, Name: "observedEncrypted", Stored: true, Type: "boolean", }, "ObservedRejectedFlows": { AllowedChoices: []string{}, BSONFieldName: "observedrejectedflows", ConvertedName: "ObservedRejectedFlows", Description: `Number of rejected observed flows.`, Exposed: true, Name: "observedRejectedFlows", Stored: true, Type: "boolean", }, "RejectedFlows": { AllowedChoices: []string{}, BSONFieldName: "rejectedflows", ConvertedName: "RejectedFlows", Description: `Number of rejected flows in the edge.`, Exposed: true, Name: "rejectedFlows", Stored: true, Type: "boolean", }, "RemoteNamespace": { AllowedChoices: []string{}, BSONFieldName: "remotenamespace", ConvertedName: "RemoteNamespace", Description: `Namespace of the object that was targeted by the flow.`, Exposed: true, Name: "remoteNamespace", Stored: true, Type: "string", }, "SourceController": { AllowedChoices: []string{}, BSONFieldName: "sourcecontroller", ConvertedName: "SourceController", Description: `Name of the remote source controller if different than the current one.`, Exposed: true, Name: "sourceController", Stored: true, Type: "string", }, "SourceID": { AllowedChoices: []string{}, BSONFieldName: "sourceid", ConvertedName: "SourceID", Description: `ID of the source ` + "`" + `GraphNode` + "`" + ` of the edge.`, Exposed: true, Name: "sourceID", Stored: true, Type: "string", }, "SourceType": { AllowedChoices: []string{"ProcessingUnit", "ExternalNetwork", "Namespace", "Node", "RemoteController"}, BSONFieldName: "sourcetype", ConvertedName: "SourceType", Description: `Type of the source ` + "`" + `GraphNode` + "`" + ` of the edge.`, Exposed: true, Name: "sourceType", Stored: true, Type: "enum", }, "ZHash": { AllowedChoices: []string{}, Autogenerated: true, BSONFieldName: "zhash", ConvertedName: "ZHash", Description: `geographical hash of the data. This is used for sharding and georedundancy.`, Getter: true, Name: "zHash", ReadOnly: true, Setter: true, Stored: true, Type: "integer", }, "Zone": { AllowedChoices: []string{}, Autogenerated: true, BSONFieldName: "zone", ConvertedName: "Zone", Description: `Logical storage zone. Used for sharding.`, Getter: true, Name: "zone", ReadOnly: true, Setter: true, Stored: true, Transient: true, Type: "integer", }, } // GraphEdgeLowerCaseAttributesMap represents the map of attribute for GraphEdge. var GraphEdgeLowerCaseAttributesMap = map[string]elemental.AttributeSpecification{ "id": { AllowedChoices: []string{}, BSONFieldName: "_id", ConvertedName: "ID", Description: `DB Identifier of the edge.`, Identifier: true, Name: "ID", Stored: true, Type: "string", }, "acceptedflows": { AllowedChoices: []string{}, BSONFieldName: "acceptedflows", ConvertedName: "AcceptedFlows", Description: `Number of accepted flows in the edge.`, Exposed: true, Name: "acceptedFlows", Stored: true, Type: "boolean", }, "bucketday": { AllowedChoices: []string{}, BSONFieldName: "bucketday", ConvertedName: "BucketDay", Description: `The date for the day bucket.`, Name: "bucketDay", Stored: true, Type: "time", }, "buckethour": { AllowedChoices: []string{}, BSONFieldName: "buckethour", ConvertedName: "BucketHour", Description: `The date for the hour bucket.`, Name: "bucketHour", Stored: true, Type: "time", }, "bucketminute": { AllowedChoices: []string{}, BSONFieldName: "bucketminute", ConvertedName: "BucketMinute", Description: `The date for the minute bucket.`, Name: "bucketMinute", Stored: true, Type: "time", }, "bucketmonth": { AllowedChoices: []string{}, BSONFieldName: "bucketmonth", ConvertedName: "BucketMonth", Description: `The date for the month bucket.`, Name: "bucketMonth", Stored: true, Type: "time", }, "defaultacceptedflows": { AllowedChoices: []string{}, BSONFieldName: "defaultacceptedflows", ConvertedName: "DefaultAcceptedFlows", Description: `The truth value that indicates if there are accepted default flows.`, Exposed: true, Name: "defaultAcceptedFlows", Stored: true, Type: "boolean", }, "defaultrejectedflows": { AllowedChoices: []string{}, BSONFieldName: "defaultrejectedflows", ConvertedName: "DefaultRejectedFlows", Description: `The truth value that indicates if there are rejected default flows.`, Exposed: true, Name: "defaultRejectedFlows", Stored: true, Type: "boolean", }, "destinationcontroller": { AllowedChoices: []string{}, BSONFieldName: "destinationcontroller", ConvertedName: "DestinationController", Description: `Name of the remote destination controller if different than the current one.`, Exposed: true, Name: "destinationController", Stored: true, Type: "string", }, "destinationid": { AllowedChoices: []string{}, BSONFieldName: "destinationid", ConvertedName: "DestinationID", Description: `ID of the destination ` + "`" + `GraphNode` + "`" + ` of the edge.`, Exposed: true, Name: "destinationID", Stored: true, Type: "string", }, "destinationtype": { AllowedChoices: []string{"ProcessingUnit", "ExternalNetwork", "Namespace", "Node", "RemoteController"}, BSONFieldName: "destinationtype", ConvertedName: "DestinationType", Description: `Type of the destination ` + "`" + `GraphNode` + "`" + ` of the edge.`, Exposed: true, Name: "destinationType", Stored: true, Type: "enum", }, "details": { AllowedChoices: []string{}, ConvertedName: "Details", Description: `Contains more flow details grouped by their destination protocol/ports.`, Exposed: true, Name: "details", SubType: "graphedgeflowdetails", Type: "refMap", }, "encrypted": { AllowedChoices: []string{}, BSONFieldName: "encrypted", ConvertedName: "Encrypted", Description: `The number of encrypted flows in the edge.`, Exposed: true, Name: "encrypted", Stored: true, Type: "boolean", }, "firstseen": { AllowedChoices: []string{}, BSONFieldName: "firstseen", ConvertedName: "FirstSeen", Description: `Contains the date when the edge was first seen.`, Exposed: true, Name: "firstSeen", Stored: true, Type: "time", }, "flowid": { AllowedChoices: []string{}, BSONFieldName: "flowid", ConvertedName: "FlowID", Description: `Identifier of the edge.`, Exposed: true, Name: "flowID", Stored: true, Type: "string", }, "lastseen": { AllowedChoices: []string{}, BSONFieldName: "lastseen", ConvertedName: "LastSeen", Description: `Contains the date when the edge was last seen.`, Exposed: true, Name: "lastSeen", Stored: true, Type: "time", }, "namespace": { AllowedChoices: []string{}, BSONFieldName: "namespace", ConvertedName: "Namespace", Description: `Namespace of the object that reported the flow.`, Exposed: true, Name: "namespace", Stored: true, Type: "string", }, "observedacceptedflows": { AllowedChoices: []string{}, BSONFieldName: "observedacceptedflows", ConvertedName: "ObservedAcceptedFlows", Description: `Number of accepted observed flows.`, Exposed: true, Name: "observedAcceptedFlows", Stored: true, Type: "boolean", }, "observedencrypted": { AllowedChoices: []string{}, BSONFieldName: "observedencrypted", ConvertedName: "ObservedEncrypted", Description: `Number of encrypted observed flows.`, Exposed: true, Name: "observedEncrypted", Stored: true, Type: "boolean", }, "observedrejectedflows": { AllowedChoices: []string{}, BSONFieldName: "observedrejectedflows", ConvertedName: "ObservedRejectedFlows", Description: `Number of rejected observed flows.`, Exposed: true, Name: "observedRejectedFlows", Stored: true, Type: "boolean", }, "rejectedflows": { AllowedChoices: []string{}, BSONFieldName: "rejectedflows", ConvertedName: "RejectedFlows", Description: `Number of rejected flows in the edge.`, Exposed: true, Name: "rejectedFlows", Stored: true, Type: "boolean", }, "remotenamespace": { AllowedChoices: []string{}, BSONFieldName: "remotenamespace", ConvertedName: "RemoteNamespace", Description: `Namespace of the object that was targeted by the flow.`, Exposed: true, Name: "remoteNamespace", Stored: true, Type: "string", }, "sourcecontroller": { AllowedChoices: []string{}, BSONFieldName: "sourcecontroller", ConvertedName: "SourceController", Description: `Name of the remote source controller if different than the current one.`, Exposed: true, Name: "sourceController", Stored: true, Type: "string", }, "sourceid": { AllowedChoices: []string{}, BSONFieldName: "sourceid", ConvertedName: "SourceID", Description: `ID of the source ` + "`" + `GraphNode` + "`" + ` of the edge.`, Exposed: true, Name: "sourceID", Stored: true, Type: "string", }, "sourcetype": { AllowedChoices: []string{"ProcessingUnit", "ExternalNetwork", "Namespace", "Node", "RemoteController"}, BSONFieldName: "sourcetype", ConvertedName: "SourceType", Description: `Type of the source ` + "`" + `GraphNode` + "`" + ` of the edge.`, Exposed: true, Name: "sourceType", Stored: true, Type: "enum", }, "zhash": { AllowedChoices: []string{}, Autogenerated: true, BSONFieldName: "zhash", ConvertedName: "ZHash", Description: `geographical hash of the data. This is used for sharding and georedundancy.`, Getter: true, Name: "zHash", ReadOnly: true, Setter: true, Stored: true, Type: "integer", }, "zone": { AllowedChoices: []string{}, Autogenerated: true, BSONFieldName: "zone", ConvertedName: "Zone", Description: `Logical storage zone. Used for sharding.`, Getter: true, Name: "zone", ReadOnly: true, Setter: true, Stored: true, Transient: true, Type: "integer", }, } // SparseGraphEdgesList represents a list of SparseGraphEdges type SparseGraphEdgesList []*SparseGraphEdge // Identity returns the identity of the objects in the list. func (o SparseGraphEdgesList) Identity() elemental.Identity { return GraphEdgeIdentity } // Copy returns a pointer to a copy the SparseGraphEdgesList. func (o SparseGraphEdgesList) Copy() elemental.Identifiables { copy := append(SparseGraphEdgesList{}, o...) return &copy } // Append appends the objects to the a new copy of the SparseGraphEdgesList. func (o SparseGraphEdgesList) Append(objects ...elemental.Identifiable) elemental.Identifiables { out := append(SparseGraphEdgesList{}, o...) for _, obj := range objects { out = append(out, obj.(*SparseGraphEdge)) } return out } // List converts the object to an elemental.IdentifiablesList. func (o SparseGraphEdgesList) List() elemental.IdentifiablesList { out := make(elemental.IdentifiablesList, len(o)) for i := 0; i < len(o); i++ { out[i] = o[i] } return out } // DefaultOrder returns the default ordering fields of the content. func (o SparseGraphEdgesList) DefaultOrder() []string { return []string{} } // ToPlain returns the SparseGraphEdgesList converted to GraphEdgesList. func (o SparseGraphEdgesList) ToPlain() elemental.IdentifiablesList { out := make(elemental.IdentifiablesList, len(o)) for i := 0; i < len(o); i++ { out[i] = o[i].ToPlain() } return out } // Version returns the version of the content. func (o SparseGraphEdgesList) Version() int { return 1 } // SparseGraphEdge represents the sparse version of a graphedge. type SparseGraphEdge struct { // DB Identifier of the edge. ID *string `json:"-" msgpack:"-" bson:"-" mapstructure:"-,omitempty"` // Number of accepted flows in the edge. AcceptedFlows *bool `json:"acceptedFlows,omitempty" msgpack:"acceptedFlows,omitempty" bson:"acceptedflows,omitempty" mapstructure:"acceptedFlows,omitempty"` // The date for the day bucket. BucketDay *time.Time `json:"-" msgpack:"-" bson:"bucketday,omitempty" mapstructure:"-,omitempty"` // The date for the hour bucket. BucketHour *time.Time `json:"-" msgpack:"-" bson:"buckethour,omitempty" mapstructure:"-,omitempty"` // The date for the minute bucket. BucketMinute *time.Time `json:"-" msgpack:"-" bson:"bucketminute,omitempty" mapstructure:"-,omitempty"` // The date for the month bucket. BucketMonth *time.Time `json:"-" msgpack:"-" bson:"bucketmonth,omitempty" mapstructure:"-,omitempty"` // The truth value that indicates if there are accepted default flows. DefaultAcceptedFlows *bool `json:"defaultAcceptedFlows,omitempty" msgpack:"defaultAcceptedFlows,omitempty" bson:"defaultacceptedflows,omitempty" mapstructure:"defaultAcceptedFlows,omitempty"` // The truth value that indicates if there are rejected default flows. DefaultRejectedFlows *bool `json:"defaultRejectedFlows,omitempty" msgpack:"defaultRejectedFlows,omitempty" bson:"defaultrejectedflows,omitempty" mapstructure:"defaultRejectedFlows,omitempty"` // Name of the remote destination controller if different than the current one. DestinationController *string `json:"destinationController,omitempty" msgpack:"destinationController,omitempty" bson:"destinationcontroller,omitempty" mapstructure:"destinationController,omitempty"` // ID of the destination `GraphNode` of the edge. DestinationID *string `json:"destinationID,omitempty" msgpack:"destinationID,omitempty" bson:"destinationid,omitempty" mapstructure:"destinationID,omitempty"` // Type of the destination `GraphNode` of the edge. DestinationType *GraphEdgeDestinationTypeValue `json:"destinationType,omitempty" msgpack:"destinationType,omitempty" bson:"destinationtype,omitempty" mapstructure:"destinationType,omitempty"` // Contains more flow details grouped by their destination protocol/ports. Details *map[string]*GraphEdgeFlowDetails `json:"details,omitempty" msgpack:"details,omitempty" bson:"-" mapstructure:"details,omitempty"` // The number of encrypted flows in the edge. Encrypted *bool `json:"encrypted,omitempty" msgpack:"encrypted,omitempty" bson:"encrypted,omitempty" mapstructure:"encrypted,omitempty"` // Contains the date when the edge was first seen. FirstSeen *time.Time `json:"firstSeen,omitempty" msgpack:"firstSeen,omitempty" bson:"firstseen,omitempty" mapstructure:"firstSeen,omitempty"` // Identifier of the edge. FlowID *string `json:"ID,omitempty" msgpack:"ID,omitempty" bson:"flowid,omitempty" mapstructure:"ID,omitempty"` // Contains the date when the edge was last seen. LastSeen *time.Time `json:"lastSeen,omitempty" msgpack:"lastSeen,omitempty" bson:"lastseen,omitempty" mapstructure:"lastSeen,omitempty"` // Namespace of the object that reported the flow. Namespace *string `json:"namespace,omitempty" msgpack:"namespace,omitempty" bson:"namespace,omitempty" mapstructure:"namespace,omitempty"` // Number of accepted observed flows. ObservedAcceptedFlows *bool `json:"observedAcceptedFlows,omitempty" msgpack:"observedAcceptedFlows,omitempty" bson:"observedacceptedflows,omitempty" mapstructure:"observedAcceptedFlows,omitempty"` // Number of encrypted observed flows. ObservedEncrypted *bool `json:"observedEncrypted,omitempty" msgpack:"observedEncrypted,omitempty" bson:"observedencrypted,omitempty" mapstructure:"observedEncrypted,omitempty"` // Number of rejected observed flows. ObservedRejectedFlows *bool `json:"observedRejectedFlows,omitempty" msgpack:"observedRejectedFlows,omitempty" bson:"observedrejectedflows,omitempty" mapstructure:"observedRejectedFlows,omitempty"` // Number of rejected flows in the edge. RejectedFlows *bool `json:"rejectedFlows,omitempty" msgpack:"rejectedFlows,omitempty" bson:"rejectedflows,omitempty" mapstructure:"rejectedFlows,omitempty"` // Namespace of the object that was targeted by the flow. RemoteNamespace *string `json:"remoteNamespace,omitempty" msgpack:"remoteNamespace,omitempty" bson:"remotenamespace,omitempty" mapstructure:"remoteNamespace,omitempty"` // Name of the remote source controller if different than the current one. SourceController *string `json:"sourceController,omitempty" msgpack:"sourceController,omitempty" bson:"sourcecontroller,omitempty" mapstructure:"sourceController,omitempty"` // ID of the source `GraphNode` of the edge. SourceID *string `json:"sourceID,omitempty" msgpack:"sourceID,omitempty" bson:"sourceid,omitempty" mapstructure:"sourceID,omitempty"` // Type of the source `GraphNode` of the edge. SourceType *GraphEdgeSourceTypeValue `json:"sourceType,omitempty" msgpack:"sourceType,omitempty" bson:"sourcetype,omitempty" mapstructure:"sourceType,omitempty"` // geographical hash of the data. This is used for sharding and // georedundancy. ZHash *int `json:"-" msgpack:"-" bson:"zhash,omitempty" mapstructure:"-,omitempty"` // Logical storage zone. Used for sharding. Zone *int `json:"-" msgpack:"-" bson:"zone,omitempty" mapstructure:"-,omitempty"` ModelVersion int `json:"-" msgpack:"-" bson:"_modelversion"` } // NewSparseGraphEdge returns a new SparseGraphEdge. func NewSparseGraphEdge() *SparseGraphEdge { return &SparseGraphEdge{} } // Identity returns the Identity of the sparse object. func (o *SparseGraphEdge) Identity() elemental.Identity { return GraphEdgeIdentity } // Identifier returns the value of the sparse object's unique identifier. func (o *SparseGraphEdge) Identifier() string { if o.ID == nil { return "" } return *o.ID } // SetIdentifier sets the value of the sparse object's unique identifier. func (o *SparseGraphEdge) SetIdentifier(id string) { if id != "" { o.ID = &id } else { o.ID = nil } } // GetBSON implements the bson marshaling interface. // This is used to transparently convert ID to MongoDBID as ObectID. func (o *SparseGraphEdge) GetBSON() (interface{}, error) { if o == nil { return nil, nil } s := &mongoAttributesSparseGraphEdge{} if o.ID != nil { s.ID = bson.ObjectIdHex(*o.ID) } if o.AcceptedFlows != nil { s.AcceptedFlows = o.AcceptedFlows } if o.BucketDay != nil { s.BucketDay = o.BucketDay } if o.BucketHour != nil { s.BucketHour = o.BucketHour } if o.BucketMinute != nil { s.BucketMinute = o.BucketMinute } if o.BucketMonth != nil { s.BucketMonth = o.BucketMonth } if o.DefaultAcceptedFlows != nil { s.DefaultAcceptedFlows = o.DefaultAcceptedFlows } if o.DefaultRejectedFlows != nil { s.DefaultRejectedFlows = o.DefaultRejectedFlows } if o.DestinationController != nil { s.DestinationController = o.DestinationController } if o.DestinationID != nil { s.DestinationID = o.DestinationID } if o.DestinationType != nil { s.DestinationType = o.DestinationType } if o.Encrypted != nil { s.Encrypted = o.Encrypted } if o.FirstSeen != nil { s.FirstSeen = o.FirstSeen } if o.FlowID != nil { s.FlowID = o.FlowID } if o.LastSeen != nil { s.LastSeen = o.LastSeen } if o.Namespace != nil { s.Namespace = o.Namespace } if o.ObservedAcceptedFlows != nil { s.ObservedAcceptedFlows = o.ObservedAcceptedFlows } if o.ObservedEncrypted != nil { s.ObservedEncrypted = o.ObservedEncrypted } if o.ObservedRejectedFlows != nil { s.ObservedRejectedFlows = o.ObservedRejectedFlows } if o.RejectedFlows != nil { s.RejectedFlows = o.RejectedFlows } if o.RemoteNamespace != nil { s.RemoteNamespace = o.RemoteNamespace } if o.SourceController != nil { s.SourceController = o.SourceController } if o.SourceID != nil { s.SourceID = o.SourceID } if o.SourceType != nil { s.SourceType = o.SourceType } if o.ZHash != nil { s.ZHash = o.ZHash } if o.Zone != nil { s.Zone = o.Zone } return s, nil } // SetBSON implements the bson marshaling interface. // This is used to transparently convert ID to MongoDBID as ObectID. func (o *SparseGraphEdge) SetBSON(raw bson.Raw) error { if o == nil { return nil } s := &mongoAttributesSparseGraphEdge{} if err := raw.Unmarshal(s); err != nil { return err } id := s.ID.Hex() o.ID = &id if s.AcceptedFlows != nil { o.AcceptedFlows = s.AcceptedFlows } if s.BucketDay != nil { o.BucketDay = s.BucketDay } if s.BucketHour != nil { o.BucketHour = s.BucketHour } if s.BucketMinute != nil { o.BucketMinute = s.BucketMinute } if s.BucketMonth != nil { o.BucketMonth = s.BucketMonth } if s.DefaultAcceptedFlows != nil { o.DefaultAcceptedFlows = s.DefaultAcceptedFlows } if s.DefaultRejectedFlows != nil { o.DefaultRejectedFlows = s.DefaultRejectedFlows } if s.DestinationController != nil { o.DestinationController = s.DestinationController } if s.DestinationID != nil { o.DestinationID = s.DestinationID } if s.DestinationType != nil { o.DestinationType = s.DestinationType } if s.Encrypted != nil { o.Encrypted = s.Encrypted } if s.FirstSeen != nil { o.FirstSeen = s.FirstSeen } if s.FlowID != nil { o.FlowID = s.FlowID } if s.LastSeen != nil { o.LastSeen = s.LastSeen } if s.Namespace != nil { o.Namespace = s.Namespace } if s.ObservedAcceptedFlows != nil { o.ObservedAcceptedFlows = s.ObservedAcceptedFlows } if s.ObservedEncrypted != nil { o.ObservedEncrypted = s.ObservedEncrypted } if s.ObservedRejectedFlows != nil { o.ObservedRejectedFlows = s.ObservedRejectedFlows } if s.RejectedFlows != nil { o.RejectedFlows = s.RejectedFlows } if s.RemoteNamespace != nil { o.RemoteNamespace = s.RemoteNamespace } if s.SourceController != nil { o.SourceController = s.SourceController } if s.SourceID != nil { o.SourceID = s.SourceID } if s.SourceType != nil { o.SourceType = s.SourceType } if s.ZHash != nil { o.ZHash = s.ZHash } if s.Zone != nil { o.Zone = s.Zone } return nil } // Version returns the hardcoded version of the model. func (o *SparseGraphEdge) Version() int { return 1 } // ToPlain returns the plain version of the sparse model. func (o *SparseGraphEdge) ToPlain() elemental.PlainIdentifiable { out := NewGraphEdge() if o.ID != nil { out.ID = *o.ID } if o.AcceptedFlows != nil { out.AcceptedFlows = *o.AcceptedFlows } if o.BucketDay != nil { out.BucketDay = *o.BucketDay } if o.BucketHour != nil { out.BucketHour = *o.BucketHour } if o.BucketMinute != nil { out.BucketMinute = *o.BucketMinute } if o.BucketMonth != nil { out.BucketMonth = *o.BucketMonth } if o.DefaultAcceptedFlows != nil { out.DefaultAcceptedFlows = *o.DefaultAcceptedFlows } if o.DefaultRejectedFlows != nil { out.DefaultRejectedFlows = *o.DefaultRejectedFlows } if o.DestinationController != nil { out.DestinationController = *o.DestinationController } if o.DestinationID != nil { out.DestinationID = *o.DestinationID } if o.DestinationType != nil { out.DestinationType = *o.DestinationType } if o.Details != nil { out.Details = *o.Details } if o.Encrypted != nil { out.Encrypted = *o.Encrypted } if o.FirstSeen != nil { out.FirstSeen = *o.FirstSeen } if o.FlowID != nil { out.FlowID = *o.FlowID } if o.LastSeen != nil { out.LastSeen = *o.LastSeen } if o.Namespace != nil { out.Namespace = *o.Namespace } if o.ObservedAcceptedFlows != nil { out.ObservedAcceptedFlows = *o.ObservedAcceptedFlows } if o.ObservedEncrypted != nil { out.ObservedEncrypted = *o.ObservedEncrypted } if o.ObservedRejectedFlows != nil { out.ObservedRejectedFlows = *o.ObservedRejectedFlows } if o.RejectedFlows != nil { out.RejectedFlows = *o.RejectedFlows } if o.RemoteNamespace != nil { out.RemoteNamespace = *o.RemoteNamespace } if o.SourceController != nil { out.SourceController = *o.SourceController } if o.SourceID != nil { out.SourceID = *o.SourceID } if o.SourceType != nil { out.SourceType = *o.SourceType } if o.ZHash != nil { out.ZHash = *o.ZHash } if o.Zone != nil { out.Zone = *o.Zone } return out } // GetZHash returns the ZHash of the receiver. func (o *SparseGraphEdge) GetZHash() (out int) { if o.ZHash == nil { return } return *o.ZHash } // SetZHash sets the property ZHash of the receiver using the address of the given value. func (o *SparseGraphEdge) SetZHash(zHash int) { o.ZHash = &zHash } // GetZone returns the Zone of the receiver. func (o *SparseGraphEdge) GetZone() (out int) { if o.Zone == nil { return } return *o.Zone } // SetZone sets the property Zone of the receiver using the address of the given value. func (o *SparseGraphEdge) SetZone(zone int) { o.Zone = &zone } // DeepCopy returns a deep copy if the SparseGraphEdge. func (o *SparseGraphEdge) DeepCopy() *SparseGraphEdge { if o == nil { return nil } out := &SparseGraphEdge{} o.DeepCopyInto(out) return out } // DeepCopyInto copies the receiver into the given *SparseGraphEdge. func (o *SparseGraphEdge) DeepCopyInto(out *SparseGraphEdge) { target, err := copystructure.Copy(o) if err != nil { panic(fmt.Sprintf("Unable to deepcopy SparseGraphEdge: %s", err)) } *out = *target.(*SparseGraphEdge) } type mongoAttributesGraphEdge struct { ID bson.ObjectId `bson:"_id,omitempty"` AcceptedFlows bool `bson:"acceptedflows"` BucketDay time.Time `bson:"bucketday"` BucketHour time.Time `bson:"buckethour"` BucketMinute time.Time `bson:"bucketminute"` BucketMonth time.Time `bson:"bucketmonth"` DefaultAcceptedFlows bool `bson:"defaultacceptedflows"` DefaultRejectedFlows bool `bson:"defaultrejectedflows"` DestinationController string `bson:"destinationcontroller,omitempty"` DestinationID string `bson:"destinationid"` DestinationType GraphEdgeDestinationTypeValue `bson:"destinationtype"` Encrypted bool `bson:"encrypted"` FirstSeen time.Time `bson:"firstseen,omitempty"` FlowID string `bson:"flowid"` LastSeen time.Time `bson:"lastseen,omitempty"` Namespace string `bson:"namespace"` ObservedAcceptedFlows bool `bson:"observedacceptedflows"` ObservedEncrypted bool `bson:"observedencrypted"` ObservedRejectedFlows bool `bson:"observedrejectedflows"` RejectedFlows bool `bson:"rejectedflows"` RemoteNamespace string `bson:"remotenamespace,omitempty"` SourceController string `bson:"sourcecontroller,omitempty"` SourceID string `bson:"sourceid"` SourceType GraphEdgeSourceTypeValue `bson:"sourcetype"` ZHash int `bson:"zhash"` Zone int `bson:"zone"` } type mongoAttributesSparseGraphEdge struct { ID bson.ObjectId `bson:"_id,omitempty"` AcceptedFlows *bool `bson:"acceptedflows,omitempty"` BucketDay *time.Time `bson:"bucketday,omitempty"` BucketHour *time.Time `bson:"buckethour,omitempty"` BucketMinute *time.Time `bson:"bucketminute,omitempty"` BucketMonth *time.Time `bson:"bucketmonth,omitempty"` DefaultAcceptedFlows *bool `bson:"defaultacceptedflows,omitempty"` DefaultRejectedFlows *bool `bson:"defaultrejectedflows,omitempty"` DestinationController *string `bson:"destinationcontroller,omitempty"` DestinationID *string `bson:"destinationid,omitempty"` DestinationType *GraphEdgeDestinationTypeValue `bson:"destinationtype,omitempty"` Encrypted *bool `bson:"encrypted,omitempty"` FirstSeen *time.Time `bson:"firstseen,omitempty"` FlowID *string `bson:"flowid,omitempty"` LastSeen *time.Time `bson:"lastseen,omitempty"` Namespace *string `bson:"namespace,omitempty"` ObservedAcceptedFlows *bool `bson:"observedacceptedflows,omitempty"` ObservedEncrypted *bool `bson:"observedencrypted,omitempty"` ObservedRejectedFlows *bool `bson:"observedrejectedflows,omitempty"` RejectedFlows *bool `bson:"rejectedflows,omitempty"` RemoteNamespace *string `bson:"remotenamespace,omitempty"` SourceController *string `bson:"sourcecontroller,omitempty"` SourceID *string `bson:"sourceid,omitempty"` SourceType *GraphEdgeSourceTypeValue `bson:"sourcetype,omitempty"` ZHash *int `bson:"zhash,omitempty"` Zone *int `bson:"zone,omitempty"` }
graphedge.go
0.773901
0.509215
graphedge.go
starcoder
package tree import ( "sort" "testing" . "github.com/stretchr/testify/assert" ) // VRaw holds the unsorted value for basic tree tests var VRaw = []interface{}{5, 3, 1, 4, 6, 2} // V holds the sorted value for basic tree tests var V = []interface{}{1, 2, 3, 4, 5, 6} // VLen is the length of V var VLen = len(V) // TreeTest is the base for all tests of trees type TreeTest struct { New func(t *testing.T) Tree } // Run executes all basic tree tests func (tt *TreeTest) Run(t *testing.T) { tt.NewFilledTree(t) tt.TestBasic(t) tt.TestIterator(t) tt.TestChannels(t) tt.TestSlice(t) tt.TestRemove(t) tt.TestClear(t) tt.TestCopy(t) tt.TestContains(t) tt.TestGetSet(t) tt.TestFuncs(t) } // FillTree fills up a given tree with V func (tt *TreeTest) FillTree(t *testing.T, tr Tree) { for i, va := range VRaw { tr.Insert(va) Equal(t, tr.Len(), i+1) vr, ok := tr.Get(va) True(t, ok) Equal(t, vr, va) } Equal(t, tr.Len(), VLen) n, ok := tr.First() True(t, ok) Equal(t, n, V[0]) n, ok = tr.Last() True(t, ok) Equal(t, n, V[VLen-1]) } // NewFilledTree creates a new tree and calls FillTree on it func (tt *TreeTest) NewFilledTree(t *testing.T) Tree { tr := tt.New(t) tt.FillTree(t, tr) return tr } // TestBasic tests basic tree functionality func (tt *TreeTest) TestBasic(t *testing.T) { tr := tt.New(t) Equal(t, tr.Len(), 0) True(t, tr.Empty()) n, ok := tr.First() False(t, ok) Nil(t, n) n, ok = tr.Last() False(t, ok) Nil(t, n) n, ok = tr.Pop() Nil(t, n) False(t, ok) n, ok = tr.Shift() Nil(t, n) False(t, ok) tt.FillTree(t, tr) i := 0 iter := tr.Iter() NotNil(t, iter) for i < VLen { Equal(t, V[i], iter.Get()) i++ iter = iter.Next() if i < VLen { NotNil(t, iter) } else { Nil(t, iter) } } i = VLen - 1 n, ok = tr.Pop() for i > -1 && n != nil { Equal(t, V[i], n) True(t, ok) Equal(t, tr.Len(), i) if i == 0 { True(t, tr.Empty()) } else { False(t, tr.Empty()) } i-- n, ok = tr.Pop() } Equal(t, i, -1) Nil(t, n) False(t, ok) Equal(t, tr.Len(), 0) True(t, tr.Empty()) tt.FillTree(t, tr) i = 0 n, ok = tr.Shift() for i < VLen && n != nil { Equal(t, V[i], n) True(t, ok) Equal(t, tr.Len(), VLen-i-1) i++ n, ok = tr.Shift() } Equal(t, i, VLen) Nil(t, n) Equal(t, tr.Len(), 0) } // TestIterator tests tree iterators func (tt *TreeTest) TestIterator(t *testing.T) { // empty iterators tr := tt.New(t) Nil(t, tr.Iter()) Nil(t, tr.IterBack()) // one element tr.Insert(V[0]) iter := tr.Iter() NotNil(t, iter) Equal(t, V[0], iter.Get()) Nil(t, iter.Next()) iter = tr.IterBack() NotNil(t, iter) Equal(t, V[0], iter.Get()) Nil(t, iter.Previous()) // full iterators tr = tt.NewFilledTree(t) i := 0 for iter = tr.Iter(); iter != nil; iter = iter.Next() { Equal(t, iter.Get(), V[i]) i++ } Equal(t, i, VLen) tr = tt.NewFilledTree(t) i = VLen - 1 for iter = tr.IterBack(); iter != nil; iter = iter.Previous() { Equal(t, iter.Get(), V[i]) i-- } Equal(t, i, -1) // iterate in wrong direction iter = tr.Iter() Nil(t, iter.Previous()) iter = tr.IterBack() Nil(t, iter.Next()) // iterate only within the left lane tr = tt.New(t) for i := 6; i > -1; i-- { tr.Insert(i) } iter = tr.Iter() for i := 0; i <= 6; i++ { Equal(t, iter.Get(), i) iter = iter.Next() } Nil(t, iter) // iterate only within the right lane tr = tt.New(t) for i := 0; i < 6; i++ { tr.Insert(i) } iter = tr.Iter() for i := 0; i < 6; i++ { Equal(t, iter.Get(), i) iter = iter.Next() } Nil(t, iter) // full tree testFullTree := func(cV []int) { max := len(cV) - 1 tr = tt.New(t) for _, v := range cV { tr.Insert(v) } sort.Ints(cV) iter = tr.Iter() for _, v := range cV { Equal(t, iter.Get(), v) iter = iter.Next() } Nil(t, iter) // traverse back and forth iter = tr.Iter() i = 0 for iter.Get() != max { Equal(t, iter.Get(), cV[i]) iter = iter.Next() i++ } for iter.Get() != 0 { Equal(t, iter.Get(), cV[i]) iter = iter.Previous() i-- } for iter.Get() != max { Equal(t, iter.Get(), cV[i]) iter = iter.Next() i++ } Nil(t, iter.Next()) iter = tr.IterBack() i = len(cV) - 1 for iter.Get() != 0 { Equal(t, iter.Get(), cV[i]) iter = iter.Previous() i-- } for iter.Get() != max { Equal(t, iter.Get(), cV[i]) iter = iter.Next() i++ } for iter.Get() != 0 { Equal(t, iter.Get(), cV[i]) iter = iter.Previous() i-- } Nil(t, iter.Previous()) } testFullTree([]int{7, 3, 2, 0, 1, 5, 4, 6, 11, 9, 8, 10, 13, 12, 14}) testFullTree([]int{8, 3, 1, 0, 2, 6, 5, 4, 7, 13, 10, 9, 11, 12, 15, 14, 16}) testFullTree([]int{5, 1, 0, 4, 3, 2}) // change direction in the middle of the tree tr = tt.NewFilledTree(t) iter = tr.Iter() for i := 1; i <= 3; i++ { Equal(t, iter.Get(), V[i-1]) iter = iter.Next() } Equal(t, iter.Get(), V[3]) for i := 4; i > 0; i-- { Equal(t, iter.Get(), V[i-1]) iter = iter.Previous() } Nil(t, iter) iter = tr.IterBack() for i := 6; i > 3; i-- { Equal(t, iter.Get(), V[i-1]) iter = iter.Previous() } Equal(t, iter.Get(), V[2]) for i := 3; i <= 6; i++ { Equal(t, iter.Get(), V[i-1]) iter = iter.Next() } Nil(t, iter) } // TestChannels tests tree channels func (tt *TreeTest) TestChannels(t *testing.T) { // empty channels tr := tt.New(t) i := 0 for v := range tr.Chan(0) { Equal(t, v, V[i]) i++ } Equal(t, i, 0) i = 0 for v := range tr.ChanBack(0) { Equal(t, v, V[i]) i++ } Equal(t, i, 0) // one element tr.Insert(1) i = 0 for v := range tr.Chan(0) { Equal(t, v, V[i]) i++ } Equal(t, i, 1) i = 0 for v := range tr.ChanBack(0) { Equal(t, v, V[i]) i++ } Equal(t, i, 1) // full iterators tr = tt.NewFilledTree(t) i = 0 for v := range tr.Chan(0) { Equal(t, v, V[i]) i++ } Equal(t, i, VLen) i = VLen - 1 for v := range tr.ChanBack(0) { Equal(t, v, V[i]) i-- } Equal(t, i, -1) } // TestSlice tests converting the tree to slice func (tt *TreeTest) TestSlice(t *testing.T) { tr := tt.New(t) Equal(t, tr.Slice(), []interface{}{}) tt.FillTree(t, tr) Equal(t, tr.Slice(), V) tr.Shift() Equal(t, tr.Slice(), V[1:]) tr.Pop() Equal(t, tr.Slice(), V[1:len(V)-1]) } // TestRemove tests some remove methods func (tt *TreeTest) TestRemove(t *testing.T) { tr := tt.NewFilledTree(t) // remove leaf v, ok := tr.Remove(4) True(t, ok) Equal(t, v, 4) Equal(t, tr.Slice(), []interface{}{1, 2, 3, 5, 6}) // remove parent with left child v, ok = tr.Remove(3) True(t, ok) Equal(t, v, 3) Equal(t, tr.Slice(), []interface{}{1, 2, 5, 6}) // remove parent with right child v, ok = tr.Remove(1) True(t, ok) Equal(t, v, 1) Equal(t, tr.Slice(), []interface{}{2, 5, 6}) // remove parent with both childs v, ok = tr.Remove(5) True(t, ok) Equal(t, v, 5) Equal(t, tr.Slice(), []interface{}{2, 6}) // remove last v, ok = tr.Remove(2) True(t, ok) Equal(t, v, 2) Equal(t, tr.Slice(), []interface{}{6}) v, ok = tr.Remove(6) True(t, ok) Equal(t, v, 6) Equal(t, tr.Slice(), []interface{}{}) // remove nothing v, ok = tr.Remove(-100) False(t, ok) v, ok = tr.Remove(100) False(t, ok) tr = tt.New(t) v, ok = tr.Remove(-100) False(t, ok) v, ok = tr.Remove(100) False(t, ok) tr = tt.NewFilledTree(t) v, ok = tr.Remove(-100) False(t, ok) v, ok = tr.Remove(100) False(t, ok) // prepare special cases tr = tt.New(t) for _, v := range []interface{}{4, 1, 3, 2, 5, 6, 12, 10, 7, 8, 9, 11} { tr.Insert(v) } // remove right child with left child v, ok = tr.Remove(3) True(t, ok) Equal(t, v, 3) Equal(t, tr.Slice(), []interface{}{1, 2, 4, 5, 6, 7, 8, 9, 10, 11, 12}) // remove right child with right child v, ok = tr.Remove(6) True(t, ok) Equal(t, v, 6) Equal(t, tr.Slice(), []interface{}{1, 2, 4, 5, 7, 8, 9, 10, 11, 12}) // remove with two children put removed right children at the end of left right children v, ok = tr.Remove(10) True(t, ok) Equal(t, v, 10) Equal(t, tr.Slice(), []interface{}{1, 2, 4, 5, 7, 8, 9, 11, 12}) } // TestClear tests clearing the list func (tt *TreeTest) TestClear(t *testing.T) { tr := tt.NewFilledTree(t) tr.Clear() Equal(t, tr.Len(), 0) n, ok := tr.First() False(t, ok) Nil(t, n) n, ok = tr.Last() False(t, ok) Nil(t, n) n, ok = tr.Pop() Nil(t, n) False(t, ok) } // TestCopy tests copying a list func (tt *TreeTest) TestCopy(t *testing.T) { l1 := tt.NewFilledTree(t) l2 := l1.Copy() Equal(t, l1.Len(), l2.Len()) n1 := l1.Iter() NotNil(t, n1) n2 := l2.Iter() NotNil(t, n2) if n1 != nil && n2 != nil { for { Equal(t, n1.Get(), n2.Get()) n1 = n1.Next() n2 = n2.Next() if (n1 == nil && n2 != nil) || (n1 != nil && n2 == nil) { Fail(t, "n1 not equal to n2") } if n1 == nil { break } } } } // TestContains tests contains methods func (tt *TreeTest) TestContains(t *testing.T) { tr := tt.New(t) for _, vi := range V { ok := tr.Contains(vi) Equal(t, ok, false) } tr = tt.NewFilledTree(t) for _, vi := range V { ok := tr.Contains(vi) Equal(t, ok, true) } } // TestGetSet tests getters and setters func (tt *TreeTest) TestGetSet(t *testing.T) { tr := tt.New(t) for i := range V { n, ok := tr.Get(V[i]) False(t, ok) Nil(t, n) ok = tr.Set(V[i], i+10) False(t, ok) n, ok = tr.Get(i + 10) False(t, ok) Nil(t, n) } tt.FillTree(t, tr) for i := range V { n, ok := tr.Get(V[i]) True(t, ok) Equal(t, n, V[i]) ok = tr.Set(V[i], i+10) True(t, ok) n, ok = tr.Get(i + 10) True(t, ok) Equal(t, n, i+10) } } // TestFuncs tests all methods with functions as parameters func (tt *TreeTest) TestFuncs(t *testing.T) { tr := tt.NewFilledTree(t) n, ok := tr.GetFunc(func(v interface{}) bool { return v == 2 }) Equal(t, V[1], n) True(t, ok) n, ok = tr.GetFunc(func(v interface{}) bool { return v == 100 }) Nil(t, nil) False(t, ok) True(t, tr.SetFunc(func(v interface{}) bool { return v == 4 }, 99)) Equal(t, tr.Slice(), []interface{}{1, 2, 3, 5, 6, 99}) False(t, tr.SetFunc(func(v interface{}) bool { return v == 100 }, 100)) Equal(t, tr.Slice(), []interface{}{1, 2, 3, 5, 6, 99}) }
tree/treeTest.go
0.611846
0.683378
treeTest.go
starcoder
package math // CartesianToSpherical converts 3-dimensional cartesian coordinates (x,y,z) to // spherical coordinates with radius r, inclination theta, and azimuth phi. func CartesianToSpherical(coord Vec3) (r, theta, phi float32) { r = coord.Len() theta = Acos(coord[2] / r) phi = Atan2(coord[1], coord[0]) return } // SphericalToCartesian converts spherical coordinates with radius r, // inclination theta, and azimuth phi to cartesian coordinates (x,y,z). func SphericalToCartesian(r, theta, phi float32) Vec3 { st, ct := Sincos(theta) sp, cp := Sincos(phi) return Vec3{r * st * cp, r * st * sp, r * ct} } // CartesianToCylindrical converts 3-dimensional cartesian coordinates (x,y,z) // to cylindrical coordinates with radial distance r, azimuth phi, and height z. func CartesianToCylindrical(coord Vec3) (rho, phi, z float32) { rho = Hypot(coord[0], coord[1]) phi = Atan2(coord[1], coord[0]) z = coord[2] return } // CylindricalToCartesian converts cylindrical coordinates with radial distance // r, azimuth phi, and height z to cartesian coordinates (x,y,z). func CylindricalToCartesian(rho, phi, z float32) Vec3 { s, c := Sincos(phi) return Vec3{rho * c, rho * s, z} } // SphericalToCylindrical converts spherical coordinates with radius r, // inclination theta, and azimuth phi to cylindrical coordinates with radial // distance r, azimuth phi, and height z. func SphericalToCylindrical(r, theta, phi float32) (rho, phi2, z float32) { s, c := Sincos(theta) rho = r * s z = r * c phi2 = phi return } // CylindricalToSpherical converts cylindrical coordinates with radial distance // r, azimuth phi, and height z to spherical coordinates with radius r, // inclination theta, and azimuth phi. func CylindricalToSpherical(rho, phi, z float32) (r, theta, phi2 float32) { r = Hypot(rho, z) phi2 = phi theta = Atan2(rho, z) return } // DegToRad converts degrees to radians func DegToRad(angle float32) float32 { return angle * Pi / 180 } // RadToDeg converts radians to degrees func RadToDeg(angle float32) float32 { return angle * 180 / Pi }
math/conv.go
0.907021
0.894283
conv.go
starcoder
package uitext // StarSep provides a 75 character star separator for terminal output const StarSep = "*****************************************************************" // DashSep provides a 75 character dash separator for terminal output const DashSep = "-----------------------------------------------------------------" // ConfirmNewWritePrompt provides a basic text prompt asking for confirmation of writing new file to disk const ConfirmNewWritePrompt = "Are you sure you want to write the new file to disk? (y/n)" // ConfirmOverwritePrompt provides a basic text prompt asking for confirmation of overwriting a files on disk const ConfirmOverwritePrompt = "Are you sure you want to overwrite the file with new content? This cannot be reversed. (y/n)" // RootUse is the usage description for the pufctl command const RootUse = "pufctl" // RootShort is the short description for the pufctl command const RootShort = "pufctl is a multitool for Puppetfiles" // RootLong is the long description for the pufctl command const RootLong = ` A tool for doing everything you need to with Puppetfiles right from the command line.` // ShowUse is the usage description for the pufctl show command const ShowUse = "show" // ShowShort is the short description for the pufctl show command const ShowShort = "show prints a sorted and organized Puppetfile to screen" // ShowLong is the long description for the pufctl show command const ShowLong = ` The pufctl show command prints a sorted Puppetfile to screen. As pufctl is an opinionated tool, pufctl show gives you a look at how all pufctl commands will organize your Puppetfile, should you decide to save any results. ` // AddUse is the usage description for the pufctl add command const AddUse = "add [type]" // AddShort is the short description for the pufctl add command const AddShort = "add new content to the Puppetfile" // AddLong is the long description for the pufctl add command const AddLong = ` The pufctl add command adds new content such as modules and metadata to the Puppetfile` // AddModuleUse is the usage description for the pufctl add module command const AddModuleUse = "module [modulename]" // AddModuleShort is the short description for the pufctl add module command const AddModuleShort = "module adds a new module to the Puppetfile" // AddModuleLong is the long description for the pufctl add module command const AddModuleLong = ` The pufctl add module command adds a new module to the Puppetfile. By just specifying a name, the module will be added with the format "mod '<name>'". In order to resolve the modules dependencies, the module name must be in slug format (<namespace>-<modulename>) if it is a Forge module. You can also specify a URL to a git repository to add non-Forge modules. Flags can be passed to add properties and metadata to the module entry. ` // AddMetaUse is the usage description for the pufctl add meta command const AddMetaUse = "meta [<modulename>|top|bottom]" // AddMetaShort is the short description for the pufctl add meta command const AddMetaShort = "meta adds new metadata to a module or the top/bottom comment block" // AddMetaLong is the long description for the pufctl add meta command const AddMetaLong = ` The pufctl add meta command allows you to add metadata to a Puppetfile programatically. This metadata can be a regular comment or a meta tag with optional data. To add a module comment of metadata to a module, the first argument should be the module name as it appears in the Puppetfile, such as "puppetlabs-apache". To add a top-block comment or metadata to the top block, the first argument should be "top". To add a bottom-block comment or metadata to the bottom block, the first argument should be "bottom". Use the --metadata (-m) flag to specify the metadata you want to add. You can specify multiple metadata statements / comments at a time by putting them in a comma-separated list. Metadata and comments should be single-quoted. Examples: $ pufctl add meta puppetlabs-apache -m '# @maintainer: puppetlabs' $ pufctl add meta top -m '# Production Puppetfile' ` // DiffUse is the usage description for the pufctl diff command const DiffUse = "diff [Puppetfile] [<optional> Puppetfile]" // DiffShort is the short description for the pufctl diff command const DiffShort = "diff finds the difference between two Puppetfiles" // DiffLong is the long description for the pufctl diff command const DiffLong = ` The pufctl diff command compares Puppetfiles at the parsed object level to find differences in them. You must supply the path to at least one Puppetfile. If only one Puppetfile is specified, the configured default Puppetfile will be used as the first Puppetfile in the comparison. Meaningful exit codes have been added to the diff command to assist with programatic implementations of this command (such as use in CI/CD systems). Exit Codes: 0: No difference between the Puppetfiles 3: Only the first Puppetfile has differences 4: Only the second Puppetfile has differences 5: Differences in both Puppetfiles ` // SearchUse is the usage description for the pufctl search command const SearchUse = "search [subcommand]" // SearchShort is the short description for the pufctl search command const SearchShort = "search operations for all things Puppetfile related" // SearchLong is the long description of the pufctl search command const SearchLong = ` The pufctl search command allows you to search for all things Puppetfile realted. Read the descriptions of the subcommands for more information.` // ForgeSearchUse is the usage description of the pufctl search forge command const ForgeSearchUse = "forge [query]" // ForgeSearchShort is the short description of the pufctl search forge command const ForgeSearchShort = "forge allows you to search for modules on the Puppet Forge" // ForgeSearchLong is the long description of the pufctl search forge command const ForgeSearchLong = ` The pufctl search forge command allows you to search for modules on the Puppet Forge. Search queries are passed as command args, and the search results can be fine-tuned with flags. ` // BumpUse is the usage description of the pufctl bump command const BumpUse = "bump [module]..." // BumpShort is the short desccription of the pufctl bump command const BumpShort = "bump the semantic version of a module" // BumpLong is the long description of the pufctl bump command const BumpLong = ` The pufctl bump command allows you "bump" (increment by 1) a module's semantic version, if it has one. You must use valid module name slugs, <org>-<module name>, as command arguments. The command will look for a semver in the modules properties, specifically the :ref and :tag symbols. The command works with both regular semver strings, as well as semver strings that have a leading "v". By default, pufctl bump increments the Patch portion of the module's semver (x.y.Z). Which portion of the semver gets bumped can be changed with flags. ` // EditUse is the usage description of the pufctl edit command const EditUse = "edit [subcommand]" //EditShort is the short description of the pufctl edit command const EditShort = "edit objects in a Puppetfile" //EditLong is the long description of the pufctl edit command const EditLong = ` The pufctl edit command allows you to edit objects in a Puppetfile such as modules, metadata, and the Forge declaration. Read the descriptions of the subcommands for more details.` // EditModuleUse is the usage description of the pufctl edit module command const EditModuleUse = "module [name]" // EditModuleShort is the short description of the pufctl edit module command const EditModuleShort = "edit module name/properties" // EditModuleLong is the long description of the pufctl edit module command const EditModuleLong = ` The pufctl edit module command allows you to edit a specific module in the Puppetfile. You can use the --name (-n) flag to edit a module's name. You can use the --key (-k) flag to specify the Module property you would like to edit. When using the --key (-k) flag, you should specify Module properties as strings with the form ':key=>value'. The --key (-k) flag accepts multiple values as a comma-separated string, ex. --key ':key=>value',':key=>value',':key=>value'. If you attempt to edit a Module property that doesn't exist, that property will be created. If you want to edit a "bare" property (i.e. :latest or single version string), your --key (-k) flag string would look like this: --key 'bare=>value'. Since bare values are mutually exclusive, a bare value will overwrite all other properties of the Module. ` // CompletionUse is the usage description for the pufctl completion command const CompletionUse = "completion [bash|zsh|powershell]" // CompletionShort is the short description of the pufctl completion command const CompletionShort = "completion generates a completion script" // CompletionLong is the long description of the pufctl completion command const CompletionLong = ` The completion command generates scripts that provide tab completions for all Pufctl commands. To load completions: Bash: $ source <(pufctl completion bash) To load completions for each session, execute once: Linux: $ pufctl completion bash > /etc/bash_completion.d/pufctl MacOS: $ pufctl completion bash > /usr/local/etc/bash_completion.d/pufctl Zsh: If shell completion is not already enabled in your environment you will need to enable it. You can execute the following once: $ echo "autoload -U compinit; compinit" >> ~/.zshrc To load completions for each session, execute once: $ pufctl completion zsh > "${fpath[1]}/_pufctl" You will need to start a new shell for this setup to take effect. PowerShell: PS > pufctl.exe completion powershell | Out-File pufctl-completion.ps1 Add the following line to your $PROFILE: . C:\path\to\where\you\saved\pufctl-completion.ps1 You will need to close and reopen PowerShell for it to take effect. ` // DocGenUse is the usage description of the pufctl docgen command const DocGenUse = "docgen" // DocGenShort is the short description of the pufctl docgen command const DocGenShort = "docgen generates pufctl documentation" // DocGenLong is the long description of the pufctl docgen command const DocGenLong = ` Generate pufctl documentation in markdown. The documentation will be saved to a directory specified by the docgen_path config option. ` // ConfGenUse is the usage description of the pufctl confgen command const ConfGenUse = "confgen" // ConfGenShort is the short description of the pufctl confgen command const ConfGenShort = "confgen generates a default config file" // ConfGenLong is the long description of the pufctl confgen command const ConfGenLong = ` Generate a default pufctl config file. You can use the --out-file flag to specify where the config file will be generated. If --out-file is not used, the config file will be generated at $HOME/.pufctl.yaml. ` // NewUse is the usage description of the pufctl new command const NewUse = "new [subcommand]" // NewShort is the the short description of the pufctl new command const NewShort = "new allows you to create new Puppetfiles or .fixtures.yml files" // NewLong is the long description of the pufctl new command const NewLong = ` Create a new Puppetfile or .fixtures.yml file. NOT CURRENTLY IMPLEMENTED ` // NewFixturesUse is the usage description of the pufctl new fixtures command const NewFixturesUse = "fixtures [nameslug|URL]" // NewFixturesShort is the short description of the pufctl new fixtures command const NewFixturesShort = "fixtures allows you to create a new .fixtures.yml file" // NewFixturesLong is the long description of the pufctl new fixtures command const NewFixturesLong = ` Create a new .fixtures.yml file from a module name slug (<org>-<module>) or a Git URL. If you pass in a Git URL, the module must have a metadata.json file so that the dependencies can be properly parsed. NOT CURRENTLY IMPLEMENTED `
internal/uitext/uitext.go
0.728652
0.446133
uitext.go
starcoder
package geo import ( "math" "github.com/edwindvinas/bleve/numeric" ) // GeoBits is the number of bits used for a single geo point // Currently this is 32bits for lon and 32bits for lat var GeoBits uint = 32 var minLon = -180.0 var minLat = -90.0 var geoTolerance = 1E-6 var lonScale = float64((uint64(0x1)<<GeoBits)-1) / 360.0 var latScale = float64((uint64(0x1)<<GeoBits)-1) / 180.0 // MortonHash computes the morton hash value for the provided geo point // This point is ordered as lon, lat. func MortonHash(lon, lat float64) uint64 { return numeric.Interleave(scaleLon(lon), scaleLat(lat)) } func scaleLon(lon float64) uint64 { rv := uint64((lon - minLon) * lonScale) return rv } func scaleLat(lat float64) uint64 { rv := uint64((lat - minLat) * latScale) return rv } // MortonUnhashLon extracts the longitude value from the provided morton hash. func MortonUnhashLon(hash uint64) float64 { return unscaleLon(numeric.Deinterleave(hash)) } // MortonUnhashLat extracts the latitude value from the provided morton hash. func MortonUnhashLat(hash uint64) float64 { return unscaleLat(numeric.Deinterleave(hash >> 1)) } func unscaleLon(lon uint64) float64 { return (float64(lon) / lonScale) + minLon } func unscaleLat(lat uint64) float64 { return (float64(lat) / latScale) + minLat } // compareGeo will compare two float values and see if they are the same // taking into consideration a known geo tolerance. func compareGeo(a, b float64) float64 { compare := a - b if math.Abs(compare) <= geoTolerance { return 0 } return compare } // RectIntersects checks whether rectangles a and b intersect func RectIntersects(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY float64) bool { return !(aMaxX < bMinX || aMinX > bMaxX || aMaxY < bMinY || aMinY > bMaxY) } // RectWithin checks whether box a is within box b func RectWithin(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY float64) bool { rv := !(aMinX < bMinX || aMinY < bMinY || aMaxX > bMaxX || aMaxY > bMaxY) return rv } // BoundingBoxContains checks whether the lon/lat point is within the box func BoundingBoxContains(lon, lat, minLon, minLat, maxLon, maxLat float64) bool { return compareGeo(lon, minLon) >= 0 && compareGeo(lon, maxLon) <= 0 && compareGeo(lat, minLat) >= 0 && compareGeo(lat, maxLat) <= 0 } // ComputeBoundingBox will compute a bounding box around the provided point // which surrounds a circle of the provided radius (in meters). func ComputeBoundingBox(centerLon, centerLat, radius float64) (upperLeftLon float64, upperLeftLat float64, lowerRightLon float64, lowerRightLat float64) { _, tlat := pointFromLonLatBearing(centerLon, centerLat, 0, radius) rlon, _ := pointFromLonLatBearing(centerLon, centerLat, 90, radius) _, blat := pointFromLonLatBearing(centerLon, centerLat, 180, radius) llon, _ := pointFromLonLatBearing(centerLon, centerLat, 270, radius) return normalizeLon(llon), normalizeLat(tlat), normalizeLon(rlon), normalizeLat(blat) } const degreesToRadian = math.Pi / 180 const radiansToDegrees = 180 / math.Pi const flattening = 1.0 / 298.257223563 const semiMajorAxis = 6378137 const semiMinorAxis = semiMajorAxis * (1.0 - flattening) const semiMajorAxis2 = semiMajorAxis * semiMajorAxis const semiMinorAxis2 = semiMinorAxis * semiMinorAxis // DegreesToRadians converts an angle in degrees to radians func DegreesToRadians(d float64) float64 { return d * degreesToRadian } // RadiansToDegrees converts an angle in radians to degress func RadiansToDegrees(r float64) float64 { return r * radiansToDegrees } // pointFromLonLatBearing starts that the provide lon,lat // then moves in the bearing direction (in degrees) // this move continues for the provided distance (in meters) // The lon, lat of this destination location is returned. func pointFromLonLatBearing(lon, lat, bearing, dist float64) (float64, float64) { alpha1 := DegreesToRadians(bearing) cosA1 := math.Cos(alpha1) sinA1 := math.Sin(alpha1) tanU1 := (1 - flattening) * math.Tan(DegreesToRadians(lat)) cosU1 := 1 / math.Sqrt(1+tanU1*tanU1) sinU1 := tanU1 * cosU1 sig1 := math.Atan2(tanU1, cosA1) sinAlpha := cosU1 * sinA1 cosSqAlpha := 1 - sinAlpha*sinAlpha uSq := cosSqAlpha * (semiMajorAxis2 - semiMinorAxis2) / semiMinorAxis2 A := 1 + uSq/16384*(4096+uSq*(-768+uSq*(320-175*uSq))) B := uSq / 1024 * (256 + uSq*(-128+uSq*(74-47*uSq))) sigma := dist / (semiMinorAxis * A) cos25SigmaM := math.Cos(2*sig1 + sigma) sinSigma := math.Sin(sigma) cosSigma := math.Cos(sigma) deltaSigma := B * sinSigma * (cos25SigmaM + (B/4)* (cosSigma*(-1+2*cos25SigmaM*cos25SigmaM)-(B/6)*cos25SigmaM* (-1+4*sinSigma*sinSigma)*(-3+4*cos25SigmaM*cos25SigmaM))) sigmaP := sigma sigma = dist/(semiMinorAxis*A) + deltaSigma for math.Abs(sigma-sigmaP) > 1E-12 { cos25SigmaM = math.Cos(2*sig1 + sigma) sinSigma = math.Sin(sigma) cosSigma = math.Cos(sigma) deltaSigma = B * sinSigma * (cos25SigmaM + (B/4)* (cosSigma*(-1+2*cos25SigmaM*cos25SigmaM)-(B/6)*cos25SigmaM* (-1+4*sinSigma*sinSigma)*(-3+4*cos25SigmaM*cos25SigmaM))) sigmaP = sigma sigma = dist/(semiMinorAxis*A) + deltaSigma } tmp := sinU1*sinSigma - cosU1*cosSigma*cosA1 lat2 := math.Atan2(sinU1*cosSigma+cosU1*sinSigma*cosA1, (1-flattening)*math.Sqrt(sinAlpha*sinAlpha+tmp*tmp)) lamda := math.Atan2(sinSigma*sinA1, cosU1*cosSigma-sinU1*sinSigma*cosA1) c := flattening / 16 * cosSqAlpha * (4 + flattening*(4-3*cosSqAlpha)) lam := lamda - (1-c)*flattening*sinAlpha* (sigma+c*sinSigma*(cos25SigmaM+c*cosSigma*(-1+2*cos25SigmaM*cos25SigmaM))) rvlon := lon + RadiansToDegrees(lam) rvlat := RadiansToDegrees(lat2) return rvlon, rvlat } // normalizeLon normalizes a longitude value within the -180 to 180 range func normalizeLon(lonDeg float64) float64 { if lonDeg >= -180 && lonDeg <= 180 { return lonDeg } off := math.Mod(lonDeg+180, 360) if off < 0 { return 180 + off } else if off == 0 && lonDeg > 0 { return 180 } return -180 + off } // normalizeLat normalizes a latitude value within the -90 to 90 range func normalizeLat(latDeg float64) float64 { if latDeg >= -90 && latDeg <= 90 { return latDeg } off := math.Abs(math.Mod(latDeg+90, 360)) if off <= 180 { return off - 90 } return (360 - off) - 90 }
geo/geo.go
0.781164
0.54583
geo.go
starcoder
package levels import ( "math" mgl "github.com/go-gl/mathgl/mgl32" "github.com/inkyblackness/hacked/editor/graphics" "github.com/inkyblackness/hacked/editor/render" "github.com/inkyblackness/hacked/ss1/content/archive/level" "github.com/inkyblackness/hacked/ui/opengl" ) var mapTexturesVertexShaderSource = ` #version 150 precision mediump float; in vec3 vertexPosition; uniform mat4 modelMatrix; uniform mat4 viewMatrix; uniform mat4 projectionMatrix; uniform mat4 uvMatrix; out vec2 uv; void main(void) { gl_Position = projectionMatrix * viewMatrix * modelMatrix * vec4(vertexPosition, 1.0); uv = (uvMatrix * vec4(vertexPosition, 1.0)).xy; } ` var mapTexturesFragmentShaderSource = ` #version 150 precision mediump float; in vec2 uv; uniform sampler2D palette; uniform sampler2D bitmap; out vec4 fragColor; void main(void) { vec4 pixel = texture(bitmap, uv); fragColor = texture(palette, vec2(pixel.r, 0.5)); } ` // TextureQuery is a getter function to retrieve the texture for the given // level texture index. type TextureQuery func(index level.TextureIndex) (*graphics.BitmapTexture, error) // TileTextureQuery is a getter function to retrieve properties for rendering a texture of a tile. type TileTextureQuery func(pos level.TilePosition) (tileType level.TileType, textureIndex level.TextureIndex, textureRotations int) // MapTextures is a renderable for textures. type MapTextures struct { context *render.Context program uint32 vao *opengl.VertexArrayObject vertexPositionBuffer uint32 vertexPositionAttrib int32 modelMatrixUniform opengl.Matrix4Uniform viewMatrixUniform opengl.Matrix4Uniform projectionMatrixUniform opengl.Matrix4Uniform uvMatrixUniform opengl.Matrix4Uniform paletteUniform int32 bitmapUniform int32 textureQuery TextureQuery lastTileType level.TileType } type tileTypeVertices struct { vertices []float32 equivalentType level.TileType } var tileTypeVerticesLookup []tileTypeVertices var uvRotations [4]*mgl.Mat4 func init() { for i := 0; i < 4; i++ { matrix := mgl.Translate3D(0.5, 0.5, 0.0). Mul4(mgl.HomogRotate3DZ(math.Pi * float32(i) / -2.0)). Mul4(mgl.Translate3D(-0.5, -0.5, 0.0)). Mul4(mgl.Scale3D(1.0, -1.0, 1.0)) uvRotations[i] = &matrix } for _, tileType := range level.TileTypes() { for int(tileType) >= len(tileTypeVerticesLookup) { tileTypeVerticesLookup = append(tileTypeVerticesLookup, tileTypeVertices{equivalentType: level.TileTypeSolid}) } switch tileType { case level.TileTypeSolid: case level.TileTypeDiagonalOpenNorthEast: tileTypeVerticesLookup[int(tileType)] = tileTypeVertices{ vertices: []float32{ 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, }, equivalentType: level.TileTypeDiagonalOpenNorthEast, } case level.TileTypeDiagonalOpenNorthWest: tileTypeVerticesLookup[int(tileType)] = tileTypeVertices{ vertices: []float32{ 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, }, equivalentType: level.TileTypeDiagonalOpenNorthWest, } case level.TileTypeDiagonalOpenSouthEast: tileTypeVerticesLookup[int(tileType)] = tileTypeVertices{ vertices: []float32{ 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, }, equivalentType: level.TileTypeDiagonalOpenSouthEast, } case level.TileTypeDiagonalOpenSouthWest: tileTypeVerticesLookup[int(tileType)] = tileTypeVertices{ vertices: []float32{ 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, }, equivalentType: level.TileTypeDiagonalOpenSouthWest, } default: tileTypeVerticesLookup[int(tileType)] = tileTypeVertices{ vertices: []float32{ 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, }, equivalentType: level.TileTypeOpen, } } } } // NewMapTextures returns a new instance of a renderable for tile map textures. func NewMapTextures(context *render.Context, textureQuery TextureQuery) *MapTextures { gl := context.OpenGL program, programErr := opengl.LinkNewStandardProgram(gl, mapTexturesVertexShaderSource, mapTexturesFragmentShaderSource) if programErr != nil { panic(opengl.NamedShaderError{Name: "MapTexturesShader", Nested: programErr}) } renderable := &MapTextures{ context: context, program: program, vao: opengl.NewVertexArrayObject(gl, program), vertexPositionBuffer: gl.GenBuffers(1)[0], vertexPositionAttrib: gl.GetAttribLocation(program, "vertexPosition"), modelMatrixUniform: opengl.Matrix4Uniform(gl.GetUniformLocation(program, "modelMatrix")), viewMatrixUniform: opengl.Matrix4Uniform(gl.GetUniformLocation(program, "viewMatrix")), projectionMatrixUniform: opengl.Matrix4Uniform(gl.GetUniformLocation(program, "projectionMatrix")), uvMatrixUniform: opengl.Matrix4Uniform(gl.GetUniformLocation(program, "uvMatrix")), paletteUniform: gl.GetUniformLocation(program, "palette"), bitmapUniform: gl.GetUniformLocation(program, "bitmap"), textureQuery: textureQuery, lastTileType: level.TileTypeSolid, } renderable.vao.WithSetter(func(gl opengl.OpenGL) { gl.EnableVertexAttribArray(uint32(renderable.vertexPositionAttrib)) gl.BindBuffer(opengl.ARRAY_BUFFER, renderable.vertexPositionBuffer) gl.VertexAttribOffset(uint32(renderable.vertexPositionAttrib), 3, opengl.FLOAT, false, 0, 0) gl.BindBuffer(opengl.ARRAY_BUFFER, 0) }) return renderable } // Dispose releases any internal resources. func (renderable *MapTextures) Dispose() { gl := renderable.context.OpenGL renderable.vao.Dispose() gl.DeleteProgram(renderable.program) gl.DeleteBuffers([]uint32{renderable.vertexPositionBuffer}) } // Render renders the textures. func (renderable *MapTextures) Render(columns, rows int, tileTextureQuery TileTextureQuery, paletteTexture *graphics.PaletteTexture) { gl := renderable.context.OpenGL renderable.vao.OnShader(func() { renderable.viewMatrixUniform.Set(gl, renderable.context.ViewMatrix) renderable.projectionMatrixUniform.Set(gl, &renderable.context.ProjectionMatrix) textureUnit := int32(0) gl.ActiveTexture(opengl.TEXTURE0 + uint32(textureUnit)) gl.BindTexture(opengl.TEXTURE_2D, paletteTexture.Handle()) gl.Uniform1i(renderable.paletteUniform, textureUnit) textureUnit = 1 gl.ActiveTexture(opengl.TEXTURE0 + uint32(textureUnit)) gl.Uniform1i(renderable.bitmapUniform, textureUnit) scaling := mgl.Scale3D(level.FineCoordinatesPerTileSide, level.FineCoordinatesPerTileSide, 1.0) for y := 0; y < rows; y++ { for x := 0; x < columns; x++ { tileType, textureIndex, textureRotations := tileTextureQuery(level.TilePosition{X: byte(x), Y: byte(y)}) if tileType == level.TileTypeSolid { continue } texture, _ := renderable.textureQuery(textureIndex) if texture == nil { continue } modelMatrix := mgl.Translate3D(float32(x)*level.FineCoordinatesPerTileSide, float32(y)*level.FineCoordinatesPerTileSide, 0.0). Mul4(scaling) uvMatrix := uvRotations[textureRotations] renderable.uvMatrixUniform.Set(gl, uvMatrix) renderable.modelMatrixUniform.Set(gl, &modelMatrix) gl.BindTexture(opengl.TEXTURE_2D, texture.Handle()) renderable.renderTileType(tileType) } } gl.BindTexture(opengl.TEXTURE_2D, 0) }) } func (renderable *MapTextures) renderTileType(tileType level.TileType) { info := tileTypeVerticesLookup[int(tileType)] vertexCount := len(info.vertices) / 3 gl := renderable.context.OpenGL if info.equivalentType != renderable.lastTileType { renderable.lastTileType = info.equivalentType gl.BindBuffer(opengl.ARRAY_BUFFER, renderable.vertexPositionBuffer) gl.BufferData(opengl.ARRAY_BUFFER, len(info.vertices)*4, info.vertices, opengl.STATIC_DRAW) gl.BindBuffer(opengl.ARRAY_BUFFER, 0) } gl.DrawArrays(opengl.TRIANGLES, 0, int32(vertexCount)) }
editor/levels/MapTextures.go
0.727879
0.527195
MapTextures.go
starcoder
package cellular import ( "bytes" "encoding/base64" "encoding/json" //"fmt" "log" "math/rand" //"time" ) // Field represents a two-dimensional field of cells. type Field struct { s [][]uint8 w, h int } // Neighborhood represents the total color counts in the surrounding cells. func NewNeighborhood() map[uint8]uint8 { return map[uint8]uint8{ 1: 0, // Red 2: 0, // Green 3: 0, // Blue } } // WhoEatsMe returns the color that consumes the specified color, if nearby. // If will only return a 0 if it passes through all of the cases. func WhoEatsMe(myColor uint8) uint8 { switch myColor { case 1: return 3 case 2: return 1 case 3: return 2 } return 0 } // Abundance the color(s), which corresponds to a value [1-3], // and the number of counted neighbors of that color [0-8]. // Since there could be a tie for the highest count, // multiple colors are returned in an array. func Abundance(neighborhood map[uint8]uint8) ([]uint8, uint8) { var greatColor []uint8 var greatValue uint8 for k, v := range neighborhood { if v == greatValue { greatColor = append(greatColor, k) continue } if v > greatValue { greatColor = []uint8{k} greatValue = v } } return greatColor, greatValue } // NewField returns an empty field of the specified width and height. func NewField(w, h int) *Field { s := make([][]uint8, h) for i := range s { s[i] = make([]uint8, w) } return &Field{s: s, w: w, h: h} } // Set sets the state of the specified cell to the given value. func (f *Field) Set(x, y int, b uint8) { f.s[y][x] = b } // WhatsIs reports the number that is at the specified cell. // NOTE: THIS WRAPS AROUND THE MAP. func (f *Field) WhatIs(x, y int) uint8 { //x += f.w //x %= f.w //y += f.h //y %= f.h if (x < 0) || (y < 0) || (x >= f.w) || (y >= f.h) { return 0 } return f.s[y][x] } // Next returns the state of the specified cell at the next time step. func (f *Field) Next(x, y int) uint8 { // Count the adjacent cells that are alive. n := NewNeighborhood() for i := -1; i <= 1; i++ { for j := -1; j <= 1; j++ { if j != 0 || i != 0 { switch f.WhatIs(x+i, y+j) { case 1: n[1]++ case 2: n[2]++ case 3: n[3]++ } } } } // Return next state according to the game rules: me := f.WhatIs(x, y) // Rules if you are an empty square. Contested squares are not filled. if me == 0 { greatColors, greatValue := Abundance(n) if ((greatValue == 3) || (greatValue == 5)) && len(greatColors) == 1 { return greatColors[0] } else { return 0 } } // Find out what color you are, and what color your enemy is. enemy := WhoEatsMe(me) if n[enemy] >= 5 { return enemy } totalN := n[1] + n[2] + n[3] if totalN > 5 { return 0 } if n[enemy] >= 2 { return enemy } // If no enemies are nearby, and you have 2 or 3 allies, you stay. if (n[me] == 3) || (n[me] == 2) { return me } // If an enemy is nearby, then it will consume your square. if n[enemy] == 1 { return enemy } return 0 } // Life stores the state of a round of Conway's Game of Life. type Life struct { a, b *Field w, h int } // NewLife returns a new Life game state with a random initial state. func NewLife(w, h int) *Life { a := NewField(w, h) for i := 0; i < (w * h / 1); i++ { a.Set(rand.Intn(w), rand.Intn(h), uint8(rand.Intn(4))) } return &Life{ a: a, b: NewField(w, h), w: w, h: h, } } // Step advances the game by one instant, recomputing and updating all cells. func (l *Life) Step() { // Update the state of the next field (b) from the current field (a). for y := 0; y < l.h; y++ { for x := 0; x < l.w; x++ { l.b.Set(x, y, l.a.Next(x, y)) } } // Swap fields a and b. l.a, l.b = l.b, l.a } // String returns the game board as a string. func (l *Life) String() string { var buf bytes.Buffer for y := 0; y < l.h; y++ { for x := 0; x < l.w; x++ { b := ' ' // █ // ░ // ▒ // ▓ //b = '💙' //b = '💚' //b = '💗' switch l.a.WhatIs(x, y) { case 1: b = '▓' case 2: b = '▒' case 3: b = '█' } buf.WriteRune(b) } buf.WriteByte('\n') } return buf.String() } type GridState struct { GridState string } // encodeFieldData converts the field of cells into an array, then encodes // them in base64. Then, it is encapsulated in a json message called // "GridState". The JSON is returned in the form of a byte array. func (f *Field) encodeFieldData() []byte { arr := []byte{} for _, v := range f.s { arr = append(arr, v...) } b64 := base64.StdEncoding.EncodeToString(arr) msg, err := json.Marshal(GridState{b64}) if err != nil { log.Println(err) return []byte{} } return msg } // LifeStateMessage returns an encoded Json message, ready to be sent. func (l *Life) LifeStateMessage() []byte { return l.a.encodeFieldData() } // AlterAt changes the value at a specific position of the field. func (l *Life) AlterAt(x, y int, val uint8) { if (x < 0) || (y < 0) || (x >= l.w) || (y >= l.h) || (val > 3) { return } l.a.Set(x, y, val) } /* func main() { iters := 100 l := NewLife(48, 48) for i := 0; i < iters; i++ { l.Step() fmt.Print("\x0c") // Clear screen and print field. x := l.a.encodeFieldData() fmt.Println(len(x)) fmt.Println(string(x)[:100]) fmt.Print(l) time.Sleep(time.Second / 30) } } */ //░ // ▒ // ▓
cellular/special-cells.go
0.696268
0.415729
special-cells.go
starcoder
package eval import ( "math/big" "golang.org/x/xerrors" "github.com/mmcloughlin/ec3/efd/op3/ast" "github.com/mmcloughlin/ec3/internal/errutil" ) type Evaluator struct { state map[ast.Variable]*big.Int m *big.Int } // NewEvaluator builds an evaluator using arithmetic modulo m. func NewEvaluator(m *big.Int) *Evaluator { return &Evaluator{ state: make(map[ast.Variable]*big.Int), m: m, } } // Load the variable v. func (e *Evaluator) Load(v ast.Variable) (*big.Int, bool) { x, ok := e.state[v] return x, ok } // Store x into the variable v. func (e *Evaluator) Store(v ast.Variable, x *big.Int) { e.state[v] = x } // Initialize the variable v to x. Errors if v is already defined. func (e *Evaluator) Initialize(v ast.Variable, x *big.Int) error { if _, ok := e.Load(v); ok { return xerrors.Errorf("variable %q is already defined", v) } e.Store(v, x) return nil } // Execute the program p. func (e *Evaluator) Execute(p *ast.Program) error { for _, a := range p.Assignments { if err := e.assignment(a); err != nil { return err } } return nil } func (e *Evaluator) assignment(a ast.Assignment) error { lhs := e.dst(a.LHS) switch expr := a.RHS.(type) { case ast.Pow: x, err := e.operands(expr.X, expr.N) if err != nil { return err } lhs.Exp(x[0], x[1], e.m) case ast.Inv: x, err := e.operand(expr.X) if err != nil { return err } lhs.ModInverse(x, e.m) case ast.Mul: x, err := e.operands(expr.X, expr.Y) if err != nil { return err } lhs.Mul(x[0], x[1]) case ast.Neg: x, err := e.operand(expr.X) if err != nil { return err } lhs.Neg(x) case ast.Add: x, err := e.operands(expr.X, expr.Y) if err != nil { return err } lhs.Add(x[0], x[1]) case ast.Sub: x, err := e.operands(expr.X, expr.Y) if err != nil { return err } lhs.Sub(x[0], x[1]) case ast.Cond: x, err := e.operands(expr.X, expr.C) if err != nil { return err } if x[1].Sign() != 0 { lhs.Set(x[0]) } case ast.Variable, ast.Constant: x, err := e.operand(expr) if err != nil { return err } lhs.Set(x) default: return errutil.UnexpectedType(expr) } lhs.Mod(lhs, e.m) return nil } func (e Evaluator) dst(v ast.Variable) *big.Int { if x, ok := e.Load(v); ok { return x } x := new(big.Int) e.Store(v, x) return x } func (e *Evaluator) operands(operands ...ast.Operand) ([]*big.Int, error) { xs := make([]*big.Int, 0, len(operands)) for _, operand := range operands { x, err := e.operand(operand) if err != nil { return nil, err } xs = append(xs, x) } return xs, nil } func (e *Evaluator) operand(operand ast.Operand) (*big.Int, error) { switch op := operand.(type) { case ast.Variable: x, ok := e.Load(op) if !ok { return nil, xerrors.Errorf("variable %q is not defined", op) } return x, nil case ast.Constant: return new(big.Int).SetUint64(uint64(op)), nil default: return nil, errutil.UnexpectedType(op) } }
efd/op3/eval/eval.go
0.605682
0.441553
eval.go
starcoder
package neo4j import ( "fmt" "time" ) // Date represents a date value, without a time zone and time related components. type Date struct { epochDays int64 } // LocalTime represents a local time value. type LocalTime struct { nanosOfDay time.Duration } // OffsetTime represents a time value with a UTC offset. type OffsetTime struct { nanosOfDay time.Duration offset int } // LocalDateTime represents a local date time value, without a time zone. type LocalDateTime struct { sec int64 nsec int } // Duration represents temporal amount containing months, days, seconds and nanoseconds. type Duration struct { months int64 days int64 seconds int64 nanos int } const ( nanosPerDay int64 = 24 * int64(time.Hour) ) var ( epochUtc = time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC) ) // DateOf creates a local date from the provided instant by extracting year, month and day fields. func DateOf(of time.Time) Date { ofUtc := time.Date(of.Year(), of.Month(), of.Day(), 0, 0, 0, 0, time.UTC) diffHours := ofUtc.Sub(epochUtc).Hours() epochDays := diffHours / 24 return Date{epochDays: int64(epochDays)} } // Time converts the local date to a time instant with fields other than year, month and day set to 0. // Returned time's location is time.UTC. func (date Date) Time() time.Time { return epochUtc.Add(time.Duration(date.epochDays) * time.Duration(24) * time.Hour) } // Year returns the year component of this instance. func (date Date) Year() int { return date.Time().Year() } // Month returns the month component of this instance. func (date Date) Month() time.Month { return date.Time().Month() } // Day returns the day component of this instance. func (date Date) Day() int { return date.Time().Day() } // String returns the string representation of this Date in ISO-8601 compliant form. func (date Date) String() string { return date.Time().Format("2006-01-02") } // LocalTimeOf creates a local time from the provided instant by extracting hour, minute, second and nanosecond // fields. func LocalTimeOf(of time.Time) LocalTime { nanosOfDay := time.Duration(of.Hour())*time.Hour + time.Duration(of.Minute())*time.Minute + time.Duration(of.Second())*time.Second + time.Duration(of.Nanosecond()) return LocalTime{nanosOfDay} } // Time converts the local time to a time instant with fields other than hour, minute, second and nanosecond // set to 0. Returned time's location is time.Local. func (localTime LocalTime) Time() time.Time { return time.Date(0, 0, 0, 0, 0, 0, 0, time.Local).Add(localTime.nanosOfDay) } // Hour returns the hour component of this instance. func (localTime LocalTime) Hour() int { return localTime.Time().Hour() } // Minute returns the minute component of this instance. func (localTime LocalTime) Minute() int { return localTime.Time().Minute() } // Second returns the second component of this instance. func (localTime LocalTime) Second() int { return localTime.Time().Second() } // Nanosecond returns the nanosecond component of this instance. func (localTime LocalTime) Nanosecond() int { return localTime.Time().Nanosecond() } // String returns the string representation of this LocalTime in ISO-8601 compliant form. func (localTime LocalTime) String() string { return localTime.Time().Format("15:04:05.999999999") } // OffsetTimeOf creates an offset time from the provided instant by extracting hour, minute, second and nanosecond // fields and it's zone offset. func OffsetTimeOf(of time.Time) OffsetTime { nanosOfDay := time.Duration(of.Hour())*time.Hour + time.Duration(of.Minute())*time.Minute + time.Duration(of.Second())*time.Second + time.Duration(of.Nanosecond()) _, offset := of.Zone() return OffsetTime{nanosOfDay, offset} } // Time converts the offset time to a time instant with fields other than hour, minute, second and nanosecond // set to 0. Returned time's location is a fixed zone with name 'Offset' and corresponding offset value. func (offsetTime OffsetTime) Time() time.Time { year, month, day := time.Now().Date() return time.Date(year, month, day, 0, 0, 0, 0, time.FixedZone("Offset", offsetTime.offset)).Add(offsetTime.nanosOfDay) } // Hour returns the hour component of this instance. func (offsetTime OffsetTime) Hour() int { return offsetTime.Time().Hour() } // Minute returns the minute component of this instance. func (offsetTime OffsetTime) Minute() int { return offsetTime.Time().Minute() } // Second returns the second component of this instance. func (offsetTime OffsetTime) Second() int { return offsetTime.Time().Second() } // Nanosecond returns the nanosecond component of this instance. func (offsetTime OffsetTime) Nanosecond() int { return offsetTime.Time().Nanosecond() } // Offset returns the offset of this instance in seconds. func (offsetTime OffsetTime) Offset() int { return offsetTime.offset } // String returns the string representation of this OffsetTime in ISO-8601 compliant form. func (offsetTime OffsetTime) String() string { return offsetTime.Time().Format("15:04:05.999999999Z07:00") } // LocalDateTimeOf creates an local date time from the provided instant by extracting its temporal fields. func LocalDateTimeOf(of time.Time) LocalDateTime { utc := time.Date(of.Year(), of.Month(), of.Day(), of.Hour(), of.Minute(), of.Second(), of.Nanosecond(), time.UTC) return LocalDateTime{utc.Unix(), utc.Nanosecond()} } // Time converts the local date time to a corresponding time instant. // Returned time's location is time.UTC. func (localDateTime LocalDateTime) Time() time.Time { return time.Date(1970, 1, 1, 0, 0, 0, 0, time.UTC).Add(time.Second*time.Duration(localDateTime.sec) + time.Duration(localDateTime.nsec)) } // Year returns the year component of this instance. func (localDateTime LocalDateTime) Year() int { return localDateTime.Time().Year() } // Month returns the month component of this instance. func (localDateTime LocalDateTime) Month() time.Month { return localDateTime.Time().Month() } // Day returns the day component of this instance. func (localDateTime LocalDateTime) Day() int { return localDateTime.Time().Day() } // Hour returns the hour component of this instance. func (localDateTime LocalDateTime) Hour() int { return localDateTime.Time().Hour() } // Minute returns the minute component of this instance. func (localDateTime LocalDateTime) Minute() int { return localDateTime.Time().Minute() } // Second returns the second component of this instance. func (localDateTime LocalDateTime) Second() int { return localDateTime.Time().Second() } // Nanosecond returns the nanosecond component of this instance. func (localDateTime LocalDateTime) Nanosecond() int { return localDateTime.Time().Nanosecond() } // String returns the string representation of this LocalDateTime in ISO-8601 compliant form. func (localDateTime LocalDateTime) String() string { return localDateTime.Time().Format("2006-01-02T15:04:05.999999999") } // DurationOf creates a Duration with provided temporal fields. func DurationOf(months int64, days int64, seconds int64, nanos int) Duration { return Duration{months, days, seconds, nanos} } // Months returns the number of months in this duration. func (duration Duration) Months() int64 { return duration.months } // Days returns the number of days in this duration. func (duration Duration) Days() int64 { return duration.days } // Seconds returns the number of seconds in this duration. func (duration Duration) Seconds() int64 { return duration.seconds } // Nanos returns the number of nanoseconds in this duration. func (duration Duration) Nanos() int { return duration.nanos } // String returns the string representation of this Duration in ISO-8601 compliant form. func (duration Duration) String() string { sign := "" if duration.seconds < 0 && duration.nanos > 0 { duration.seconds++ duration.nanos = int(time.Second) - duration.nanos if duration.seconds == 0 { sign = "-" } } timePart := "" if duration.nanos == 0 { timePart = fmt.Sprintf("%s%d", sign, duration.seconds) } else { timePart = fmt.Sprintf("%s%d.%09d", sign, duration.seconds, duration.nanos) } return fmt.Sprintf("P%dM%dDT%sS", duration.months, duration.days, timePart) }
neo4j/temporaltypes.go
0.932476
0.516108
temporaltypes.go
starcoder
package trader // CurrencyInformation contains all the information relevant to a currency type CurrencyInformation struct { // Number is the iso 4217 number of the currency Number uint // Places is the number of places after decimal separator Places int // FullName is the full name of the currency FullName string // Countries is a list of country names using said currency Countries []string } // Verify returns whether a currency is valid according to the ISO 4217 func (c CurrencyCode) Verify() bool { _, ok := validCurrencies[c.format()] return ok } // Information returns the information about said currency (see CurrencyInformation) // If said currency doesn't exist, nil is retured. (Might want to call Verify() first) func (c CurrencyCode) Information() *CurrencyInformation { new, ok := validCurrencies[c.format()] if !ok { return nil } return &new } // ValidCurrencies according to the ISO 4217, though it is recommended // to pass through Verify() or Information() to access this map. // This was parsed from https://en.wikipedia.org/wiki/ISO_4217#cite_note-ReferenceA-6 func ValidCurrencies() map[CurrencyCode]CurrencyInformation { return validCurrencies } // Unofficial currencies have an iso 4217 number of 0 (BTC, XBT, ETH) var ( validCurrencies = map[CurrencyCode]CurrencyInformation{ "AED": CurrencyInformation{784, 2, "United Arab Emirates dirham", []string{"United Arab Emirates"}}, "AFN": CurrencyInformation{971, 2, "Afghan afghani", []string{"Afghanistan"}}, "ALL": CurrencyInformation{8, 2, "Albanian lek", []string{"Albania"}}, "AMD": CurrencyInformation{51, 2, "Armenian dram", []string{"Armenia"}}, "ANG": CurrencyInformation{532, 2, "Netherlands Antillean guilder", []string{"Curaçao (CW)", "Sint Maarten (SX)"}}, "AOA": CurrencyInformation{973, 2, "Angolan kwanza", []string{"Angola"}}, "ARS": CurrencyInformation{32, 2, "Argentine peso", []string{"Argentina"}}, "AUD": CurrencyInformation{36, 2, "Australian dollar", []string{"Australia", "Christmas Island (CX)", "Cocos (Keeling) Islands (CC)", "Heard Island and McDonald Islands (HM)", "Kiribati (KI)", "Nauru (NR)", "Norfolk Island (NF)", "Tuvalu (TV)", "Australian Antarctic Territory"}}, "AWG": CurrencyInformation{533, 2, "Aruban florin", []string{"Aruba"}}, "AZN": CurrencyInformation{944, 2, "Azerbaijani manat", []string{"Azerbaijan"}}, "BAM": CurrencyInformation{977, 2, "Bosnia and Herzegovina convertible mark", []string{"Bosnia and Herzegovina"}}, "BBD": CurrencyInformation{52, 2, "Barbados dollar", []string{"Barbados"}}, "BDT": CurrencyInformation{50, 2, "Bangladeshi taka", []string{"Bangladesh"}}, "BGN": CurrencyInformation{975, 2, "Bulgarian lev", []string{"Bulgaria"}}, "BHD": CurrencyInformation{48, 3, "Bahraini dinar", []string{"Bahrain"}}, "BIF": CurrencyInformation{108, 0, "Burundian franc", []string{"Burundi"}}, "BMD": CurrencyInformation{60, 2, "Bermudian dollar", []string{"Bermuda"}}, "BND": CurrencyInformation{96, 2, "Brunei dollar", []string{"Brunei", "auxiliary in Singapore (SG)"}}, "BOB": CurrencyInformation{68, 2, "Boliviano", []string{"Bolivia"}}, "BOV": CurrencyInformation{984, 2, "Bolivian Mvdol (funds code)", []string{"Bolivia"}}, "BRL": CurrencyInformation{986, 2, "Brazilian real", []string{"Brazil"}}, "BSD": CurrencyInformation{44, 2, "Bahamian dollar", []string{"Bahamas"}}, "BTC": CurrencyInformation{0, 8, "Bitcoin", []string{}}, "XBT": CurrencyInformation{0, 8, "Bitcoin", []string{}}, "BTN": CurrencyInformation{64, 2, "Bhutanese ngultrum", []string{"Bhutan"}}, "BWP": CurrencyInformation{72, 2, "Botswana pula", []string{"Botswana"}}, "BYN": CurrencyInformation{933, 2, "Belarusian ruble", []string{"Belarus"}}, "BYR": CurrencyInformation{974, 0, "Belarusian ruble", []string{"Belarus"}}, "BZD": CurrencyInformation{84, 2, "Belize dollar", []string{"Belize"}}, "CAD": CurrencyInformation{124, 2, "Canadian dollar", []string{"Canada"}}, "CDF": CurrencyInformation{976, 2, "Congolese franc", []string{"Democratic Republic of the Congo"}}, "CHE": CurrencyInformation{947, 2, "WIR Euro (complementary currency)", []string{"Switzerland"}}, "CHF": CurrencyInformation{756, 2, "Swiss franc", []string{"Switzerland", "Liechtenstein (LI)"}}, "CHW": CurrencyInformation{948, 2, "WIR Franc (complementary currency)", []string{"Switzerland"}}, "CLF": CurrencyInformation{990, 4, "Unidad de Fomento (funds code)", []string{"Chile"}}, "CLP": CurrencyInformation{152, 0, "Chilean peso", []string{"Chile"}}, "CNY": CurrencyInformation{156, 2, "Chinese yuan", []string{"China"}}, "COP": CurrencyInformation{170, 2, "Colombian peso", []string{"Colombia"}}, "COU": CurrencyInformation{970, 2, "Unidad de Valor Real (UVR) (funds code)", []string{"Colombia"}}, "CRC": CurrencyInformation{188, 2, "Costa Rican colon", []string{"Costa Rica"}}, "CUC": CurrencyInformation{931, 2, "Cuban convertible peso", []string{"Cuba"}}, "CUP": CurrencyInformation{192, 2, "Cuban peso", []string{"Cuba"}}, "CVE": CurrencyInformation{132, 0, "Cape Verde escudo", []string{"Cape Verde"}}, "CZK": CurrencyInformation{203, 2, "Czech koruna", []string{"Czech Republic"}}, "DJF": CurrencyInformation{262, 0, "Djiboutian franc", []string{"Djibouti"}}, "DKK": CurrencyInformation{208, 2, "Danish krone", []string{"Denmark", "Faroe Islands (FO)", "Greenland (GL)"}}, "DOP": CurrencyInformation{214, 2, "Dominican peso", []string{"Dominican Republic"}}, "DZD": CurrencyInformation{12, 2, "Algerian dinar", []string{"Algeria"}}, "EGP": CurrencyInformation{818, 2, "Egyptian pound", []string{"Egypt", "auxiliary in Gaza Strip"}}, "ERN": CurrencyInformation{232, 2, "Eritrean nakfa", []string{"Eritrea"}}, "ETB": CurrencyInformation{230, 2, "Ethiopian birr", []string{"Ethiopia"}}, "ETH": CurrencyInformation{0, 2, "Ether", []string{}}, "EUR": CurrencyInformation{978, 2, "Euro", []string{"Akrotiri and Dhekelia", "Andorra (AD)", "Austria (AT)", "Belgium (BE)", "Cyprus (CY)", "Estonia (EE)", "Finland (FI)", "France (FR)", "Germany (DE)", "Greece (GR)", "Guadeloupe (GP)", "Ireland (IE)", "Italy (IT)", "Kosovo", "Latvia (LV)", "Lithuania (LT)", "Luxembourg (LU)", "Malta (MT)", "Martinique (MQ)", "Mayotte (YT)", "Monaco (MC)", "Montenegro (ME)", "Netherlands (NL)", "Portugal (PT)", "Réunion (RE)", "Saint Barthélemy (BL)", "Saint Pierre and Miquelon (PM)", "San Marino (SM)", "Slovakia (SK)", "Slovenia (SI)", "Spain (ES)", "Vatican City (VA); see Eurozone"}}, "FJD": CurrencyInformation{242, 2, "Fiji dollar", []string{"Fiji"}}, "FKP": CurrencyInformation{238, 2, "Falkland Islands pound", []string{"Falkland Islands (pegged to GBP 1:1)"}}, "GBP": CurrencyInformation{826, 2, "Pound sterling", []string{"United Kingdom", "the Isle of Man (IM", "see Manx pound)", "Jersey (JE", "see Jersey pound)", "Guernsey (GG", "see Guernsey pound)", "South Georgia and the South Sandwich Islands (GS)", "British Indian Ocean Territory (IO) (also uses USD)", "Tristan da Cunha (SH-TA)", "and British Antarctic Territory"}}, "GEL": CurrencyInformation{981, 2, "Georgian lari", []string{"Georgia (except Abkhazia (GE-AB) and South Ossetia)"}}, "GHS": CurrencyInformation{936, 2, "Ghanaian cedi", []string{"Ghana"}}, "GIP": CurrencyInformation{292, 2, "Gibraltar pound", []string{"Gibraltar (pegged to GBP 1:1)"}}, "GMD": CurrencyInformation{270, 2, "Gambian dalasi", []string{"Gambia"}}, "GNF": CurrencyInformation{324, 0, "Guinean franc", []string{"Guinea"}}, "GTQ": CurrencyInformation{320, 2, "Guatemalan quetzal", []string{"Guatemala"}}, "GYD": CurrencyInformation{328, 2, "Guyanese dollar", []string{"Guyana"}}, "HKD": CurrencyInformation{344, 2, "Hong Kong dollar", []string{"Hong Kong", "Macao (MO)"}}, "HNL": CurrencyInformation{340, 2, "Honduran lempira", []string{"Honduras"}}, "HRK": CurrencyInformation{191, 2, "Croatian kuna", []string{"Croatia"}}, "HTG": CurrencyInformation{332, 2, "Haitian gourde", []string{"Haiti"}}, "HUF": CurrencyInformation{348, 2, "Hungarian forint", []string{"Hungary"}}, "IDR": CurrencyInformation{360, 2, "Indonesian rupiah", []string{"Indonesia"}}, "ILS": CurrencyInformation{376, 2, "Israeli new shekel", []string{"Israel", "State of Palestine (PS)"}}, "INR": CurrencyInformation{356, 2, "Indian rupee", []string{"India", "Bhutan", "Nepal", "Zimbabwe"}}, "IQD": CurrencyInformation{368, 3, "Iraqi dinar", []string{"Iraq"}}, "IRR": CurrencyInformation{364, 2, "Iranian rial", []string{"Iran"}}, "ISK": CurrencyInformation{352, 0, "Icelandic króna", []string{"Iceland"}}, "JMD": CurrencyInformation{388, 2, "Jamaican dollar", []string{"Jamaica"}}, "JOD": CurrencyInformation{400, 3, "Jordanian dinar", []string{"Jordan", "auxiliary in West Bank"}}, "JPY": CurrencyInformation{392, 0, "Japanese yen", []string{"Japan"}}, "KES": CurrencyInformation{404, 2, "Kenyan shilling", []string{"Kenya"}}, "KGS": CurrencyInformation{417, 2, "Kyrgyzstani som", []string{"Kyrgyzstan"}}, "KHR": CurrencyInformation{116, 2, "Cambodian riel", []string{"Cambodia"}}, "KMF": CurrencyInformation{174, 0, "Comoro franc", []string{"Comoros"}}, "KPW": CurrencyInformation{408, 2, "North Korean won", []string{"North Korea"}}, "KRW": CurrencyInformation{410, 0, "South Korean won", []string{"South Korea"}}, "KWD": CurrencyInformation{414, 3, "Kuwaiti dinar", []string{"Kuwait"}}, "KYD": CurrencyInformation{136, 2, "Cayman Islands dollar", []string{"Cayman Islands"}}, "KZT": CurrencyInformation{398, 2, "Kazakhstani tenge", []string{"Kazakhstan"}}, "LAK": CurrencyInformation{418, 2, "Lao kip", []string{"Laos"}}, "LBP": CurrencyInformation{422, 2, "Lebanese pound", []string{"Lebanon"}}, "LKR": CurrencyInformation{144, 2, "Sri Lankan rupee", []string{"Sri Lanka"}}, "LRD": CurrencyInformation{430, 2, "Liberian dollar", []string{"Liberia"}}, "LSL": CurrencyInformation{426, 2, "Lesotho loti", []string{"Lesotho"}}, "LYD": CurrencyInformation{434, 3, "Libyan dinar", []string{"Libya"}}, "MAD": CurrencyInformation{504, 2, "Moroccan dirham", []string{"Morocco"}}, "MDL": CurrencyInformation{498, 2, "Moldovan leu", []string{"Moldova (except Transnistria)"}}, "MGA": CurrencyInformation{969, 1, "Malagasy ariary", []string{"Madagascar"}}, "MKD": CurrencyInformation{807, 2, "Macedonian denar", []string{"Macedonia"}}, "MMK": CurrencyInformation{104, 2, "Myanmar kyat", []string{"Myanmar"}}, "MNT": CurrencyInformation{496, 2, "Mongolian tögrög", []string{"Mongolia"}}, "MOP": CurrencyInformation{446, 2, "Macanese pataca", []string{"Macao"}}, "MRO": CurrencyInformation{478, 1, "Mauritanian ouguiya", []string{"Mauritania"}}, "MUR": CurrencyInformation{480, 2, "Mauritian rupee", []string{"Mauritius"}}, "MVR": CurrencyInformation{462, 2, "Maldivian rufiyaa", []string{"Maldives"}}, "MWK": CurrencyInformation{454, 2, "Malawian kwacha", []string{"Malawi"}}, "MXN": CurrencyInformation{484, 2, "Mexican peso", []string{"Mexico"}}, "MXV": CurrencyInformation{979, 2, "Mexican Unidad de Inversion (UDI) (funds code)", []string{"Mexico"}}, "MYR": CurrencyInformation{458, 2, "Malaysian ringgit", []string{"Malaysia"}}, "MZN": CurrencyInformation{943, 2, "Mozambican metical", []string{"Mozambique"}}, "NAD": CurrencyInformation{516, 2, "Namibian dollar", []string{"Namibia"}}, "NGN": CurrencyInformation{566, 2, "Nigerian naira", []string{"Nigeria"}}, "NIO": CurrencyInformation{558, 2, "Nicaraguan córdoba", []string{"Nicaragua"}}, "NOK": CurrencyInformation{578, 2, "Norwegian krone", []string{"Norway", "Svalbard and <NAME> (SJ)", "Bouvet Island (BV)", "Queen Maud Land", "Peter I Island"}}, "NPR": CurrencyInformation{524, 2, "Nepalese rupee", []string{"Nepal"}}, "NZD": CurrencyInformation{554, 2, "New Zealand dollar", []string{"New Zealand", "Cook Islands (CK)", "Niue (NU)", "Pitcairn Islands (PN; see also Pitcairn Islands dollar)", "Tokelau (TK)", "Ross Dependency"}}, "OMR": CurrencyInformation{512, 3, "Omani rial", []string{"Oman"}}, "PAB": CurrencyInformation{590, 2, "Panamanian balboa", []string{"Panama"}}, "PEN": CurrencyInformation{604, 2, "Peruvian Sol", []string{"Peru"}}, "PGK": CurrencyInformation{598, 2, "Papua New Guinean kina", []string{"Papua New Guinea"}}, "PHP": CurrencyInformation{608, 2, "Philippine peso", []string{"Philippines"}}, "PKR": CurrencyInformation{586, 2, "Pakistani rupee", []string{"Pakistan"}}, "PLN": CurrencyInformation{985, 2, "Polish złoty", []string{"Poland"}}, "PYG": CurrencyInformation{600, 0, "Paraguayan guaraní", []string{"Paraguay"}}, "QAR": CurrencyInformation{634, 2, "Qatari riyal", []string{"Qatar"}}, "RON": CurrencyInformation{946, 2, "Romanian leu", []string{"Romania"}}, "RSD": CurrencyInformation{941, 2, "Serbian dinar", []string{"Serbia"}}, "RUB": CurrencyInformation{643, 2, "Russian ruble", []string{"Russia", "Abkhazia (GE-AB)", "South Ossetia", "Crimea"}}, "RWF": CurrencyInformation{646, 0, "Rwandan franc", []string{"Rwanda"}}, "SAR": CurrencyInformation{682, 2, "Saudi riyal", []string{"Saudi Arabia"}}, "SBD": CurrencyInformation{90, 2, "Solomon Islands dollar", []string{"Solomon Islands"}}, "SCR": CurrencyInformation{690, 2, "Seychelles rupee", []string{"Seychelles"}}, "SDG": CurrencyInformation{938, 2, "Sudanese pound", []string{"Sudan"}}, "SEK": CurrencyInformation{752, 2, "Swedish krona/kronor", []string{"Sweden"}}, "SGD": CurrencyInformation{702, 2, "Singapore dollar", []string{"Singapore", "auxiliary in Brunei (BN)"}}, "SHP": CurrencyInformation{654, 2, "Saint Helena pound", []string{"Saint Helena (SH-SH)", "Ascension Island (SH-AC) (pegged to GBP 1:1)"}}, "SLL": CurrencyInformation{694, 2, "Sierra Leonean leone", []string{"Sierra Leone"}}, "SOS": CurrencyInformation{706, 2, "Somali shilling", []string{"Somalia (except Somaliland)"}}, "SRD": CurrencyInformation{968, 2, "Surinamese dollar", []string{"Suriname"}}, "SSP": CurrencyInformation{728, 2, "South Sudanese pound", []string{"South Sudan"}}, "STD": CurrencyInformation{678, 2, "São Tomé and Príncipe dobra", []string{"São Tomé and Príncipe"}}, "SVC": CurrencyInformation{222, 2, "Salvadoran colón", []string{"El Salvador"}}, "SYP": CurrencyInformation{760, 2, "Syrian pound", []string{"Syria"}}, "SZL": CurrencyInformation{748, 2, "Swazi lilangeni", []string{"Swaziland"}}, "THB": CurrencyInformation{764, 2, "Thai baht", []string{"Thailand", "Cambodia", "Myanmar", "Laos"}}, "TJS": CurrencyInformation{972, 2, "Tajikistani somoni", []string{"Tajikistan"}}, "TMT": CurrencyInformation{934, 2, "Turkmenistani manat", []string{"Turkmenistan"}}, "TND": CurrencyInformation{788, 3, "Tunisian dinar", []string{"Tunisia"}}, "TOP": CurrencyInformation{776, 2, "Tongan paʻanga", []string{"Tonga"}}, "TRY": CurrencyInformation{949, 2, "Turkish lira", []string{"Turkey", "Northern Cyprus"}}, "TTD": CurrencyInformation{780, 2, "Trinidad and Tobago dollar", []string{"Trinidad and Tobago"}}, "TWD": CurrencyInformation{901, 2, "New Taiwan dollar", []string{"Taiwan"}}, "TZS": CurrencyInformation{834, 2, "Tanzanian shilling", []string{"Tanzania"}}, "UAH": CurrencyInformation{980, 2, "Ukrainian hryvnia", []string{"Ukraine"}}, "UGX": CurrencyInformation{800, 0, "Ugandan shilling", []string{"Uganda"}}, "USD": CurrencyInformation{840, 2, "United States dollar", []string{"United States", "American Samoa (AS)", "Barbados (BB) (as well as Barbados Dollar)", "Bermuda (BM) (as well as Bermudian Dollar)", "British Indian Ocean Territory (IO) (also uses GBP)", "British Virgin Islands (VG)", "Caribbean Netherlands (BQ - Bonaire", "Sint Eustatius and Saba)", "Ecuador (EC)", "El Salvador (SV)", "Guam (GU)", "Haiti (HT)", "Marshall Islands (MH)", "Federated States of Micronesia (FM)", "Northern Mariana Islands (MP)", "Palau (PW)", "Panama (PA)", "Puerto Rico (PR)", "Timor-Leste (TL)", "Turks and Caicos Islands (TC)", "U.S. Virgin Islands (VI)", "Zimbabwe (ZW)"}}, "USN": CurrencyInformation{997, 2, "United States dollar (next day) (funds code)", []string{"United States"}}, "UYI": CurrencyInformation{940, 0, "Uruguay Peso en Unidades Indexadas (URUIURUI) (funds code)", []string{"Uruguay"}}, "UYU": CurrencyInformation{858, 2, "Uruguayan peso", []string{"Uruguay"}}, "UZS": CurrencyInformation{860, 2, "Uzbekistan som", []string{"Uzbekistan"}}, "VEF": CurrencyInformation{937, 2, "Venezuelan bolívar", []string{"Venezuela"}}, "VND": CurrencyInformation{704, 0, "Vietnamese dong", []string{"Vietnam"}}, "VUV": CurrencyInformation{548, 0, "Vanuatu vatu", []string{"Vanuatu"}}, "WST": CurrencyInformation{882, 2, "Samoan tala", []string{"Samoa"}}, "XAF": CurrencyInformation{950, 0, "CFA franc BEAC", []string{"Cameroon (CM)", "Central African Republic (CF)", "Republic of the Congo (CG)", "Chad (TD)", "Equatorial Guinea (GQ)", "Gabon (GA)"}}, "XAG": CurrencyInformation{961, -1, "Silver (one troy ounce)", []string{}}, "XAU": CurrencyInformation{959, -1, "Gold (one troy ounce)", []string{}}, "XBA": CurrencyInformation{955, -1, "European Composite Unit (EURCO) (bond market unit)", []string{}}, "XBB": CurrencyInformation{956, -1, "European Monetary Unit (E.M.U.-6) (bond market unit)", []string{}}, "XBC": CurrencyInformation{957, -1, "European Unit of Account 9 (E.U.A.-9) (bond market unit)", []string{}}, "XBD": CurrencyInformation{958, -1, "European Unit of Account 17 (E.U.A.-17) (bond market unit)", []string{}}, "XCD": CurrencyInformation{951, 2, "East Caribbean dollar", []string{"Anguilla (AI)", "Antigua and Barbuda (AG)", "Dominica (DM)", "Grenada (GD)", "Montserrat (MS)", "Saint Kitts and Nevis (KN)", "Saint Lucia (LC)", "Saint Vincent and the Grenadines (VC)"}}, "XDR": CurrencyInformation{960, -1, "Special drawing rights", []string{"International Monetary Fund"}}, "XOF": CurrencyInformation{952, 0, "CFA franc BCEAO", []string{"Benin (BJ)", "Burkina Faso (BF)", "Côte d'Ivoire (CI)", "Guinea-Bissau (GW)", "Mali (ML)", "Niger (NE)", "Senegal (SN)", "Togo (TG)"}}, "XPD": CurrencyInformation{964, -1, "Palladium (one troy ounce)", []string{}}, "XPF": CurrencyInformation{953, 0, "CFP franc (franc Pacifique)", []string{"French territories of the Pacific Ocean: French Polynesia (PF)", "New Caledonia (NC)", "Wallis and Futuna (WF)"}}, "XPT": CurrencyInformation{962, -1, "Platinum (one troy ounce)", []string{}}, "XSU": CurrencyInformation{994, -1, "SUCRE", []string{"Unified System for Regional Compensation (SUCRE)"}}, "XTS": CurrencyInformation{963, -1, "Code reserved for testing purposes", []string{}}, "XUA": CurrencyInformation{965, -1, "ADB Unit of Account", []string{"African Development Bank"}}, "XXX": CurrencyInformation{999, -1, "No currency", []string{}}, "YER": CurrencyInformation{886, 2, "Yemeni rial", []string{"Yemen"}}, "ZAR": CurrencyInformation{710, 2, "South African rand", []string{"South Africa"}}, "ZMW": CurrencyInformation{967, 2, "Zambian kwacha", []string{"Zambia"}}, "ZWL": CurrencyInformation{932, 2, "Zimbabwean dollar A/10", []string{"Zimbabwe"}}, } )
currency-list.go
0.695441
0.515437
currency-list.go
starcoder
package db type DBDescriptor struct { ID string `bson:"_id"` FirstObligation string `bson:first_obligation` TotalObligation string `bson:total_obligation` Duration string `bson:duration` InterestRate string `bson:interest_rate` PunitiveInterestRate string `bson:punitive_interest_rate` Frequency string `bson:frequency` Installments string `bson:installments` } type DBLoan struct { ID string `bson:"_id"` Open bool `bson:open` Approved bool `bson:approved` Expiration string `bson:expiration` Amount string `bson:amount` Cosigner string `bson:cosigner` Model string `bson:model` Creator string `bson:creator` Oracle string `bson:oracle` Borrower string `bson:borrower` LoanData string `bson:loanData` Created string `bson:created` Currency string `bson:currency` Status string `bson:status` Canceled bool `bson:canceled` } type DBDebt struct { ID string `bson:"_id"` Error bool `bson:error` Balance string `bson:balance` Model string `bson:model` Creator string `bson:creator` Oracle string `bson:oracle` Created string `bson:created` Owner string `bson:owner` } type State struct { Status string `bson:status` Clock string `bson:clock` LastPayment string `bson:last_payment` Paid string `bson:paid` PaidBase string `bson:paid_base` Interest string `bson:interest` } type Config struct { Installments string `bson:installments` TimeUnit string `bson:time_unit` Duration string `bson:duration` LentTime string `bson:lent_time` Cuota string `bson:cuota` InterestRate string `bson:interest_rate` } type DBInstallment struct { ID string `bson:"_id"` Config *Config `bson:config` State *State `bson:state` } type DBCollateral struct { ID string `bson:"_id"` DebtID string `bson:"debt_id"` Oracle string `bson:oracle` Token string `bson:token` LiquidationRatio string `bson:liquidation_ratio` BalanceRatio string `bson:balance_ratio` BurnFee string `bson:burn_fee` RewardFee string `bson:reward_fee` Owner string `bson:owner` Amount string `bson:amount` Status string `bson:status` } type DAL interface { // LOAN GetLoan(id string) (*DBLoan, error) SaveLoan(loan *DBLoan) error UpdateLoan(loan *DBLoan) error DeleteLoan(loan *DBLoan) error // DEBT GetDebt(id string) (*DBDebt, error) SaveDebt(debt *DBDebt) error UpdateDebt(debt *DBDebt) error DeleteDebt(debt *DBDebt) error // INSTALLMENTS GetInstallment(id string) (*DBInstallment, error) SaveInstallment(installment *DBInstallment) error UpdateInstallment(installment *DBInstallment) error DeleteInstallment(installment *DBInstallment) error // Collateral GetCollateral(id string) (*DBCollateral, error) GetCollaterals(debtID string) ([]DBCollateral, error) SaveCollateral(collateral *DBCollateral) error UpdateCollateral(collateral *DBCollateral) error DeleteCollateral(collateral *DBCollateral) error // Descriptor GetDescriptor(id string) (*DBDescriptor, error) SaveDescriptor(descriptor *DBDescriptor) error }
db/DAL.go
0.655557
0.64225
DAL.go
starcoder
package sort import ( "github.com/howz97/algorithm/strings/alphabet" ) // Quick3 seems to be a faster string sorting algorithm than the sort.Strings method in the standard library func Quick3(strings []string) { quick3(strings, 0, len(strings)-1, 0) } func quick3(strings []string, lo, hi, depth int) { if lo+1 >= hi { if lo >= hi { return } for depth < len(strings[lo]) && depth < len(strings[hi]) && byteAt(strings[lo], depth) == byteAt(strings[hi], depth) { depth++ } if byteAt(strings[lo], depth) > byteAt(strings[hi], depth) { strings[lo], strings[hi] = strings[hi], strings[lo] } return } median(strings, lo, hi, depth) middleV := byteAt(strings[lo], depth) tail, i := lo, lo+1 head := hi for i <= head { v := byteAt(strings[i], depth) switch true { case v < middleV: strings[tail], strings[i] = strings[i], strings[tail] tail++ i++ case v > middleV: strings[i], strings[head] = strings[head], strings[i] head-- default: i++ } } // bytes[0...tail] < middleV // bytes[tail...head] = middleV // bytes[head...] > middleV quick3(strings, lo, tail-1, depth) if middleV >= 0 { quick3(strings, tail, head, depth+1) } quick3(strings, head+1, hi, depth) } func byteAt(str string, depth int) int { if depth >= len(str) { return -1 } return int(str[depth]) } func median(bytes []string, lo, hi, depth int) { m := int(uint(lo+hi) >> 1) if byteAt(bytes[m], depth) < byteAt(bytes[lo], depth) { bytes[m], bytes[lo] = bytes[lo], bytes[m] } if byteAt(bytes[hi], depth) < byteAt(bytes[m], depth) { bytes[hi], bytes[m] = bytes[m], bytes[hi] if byteAt(bytes[m], depth) < byteAt(bytes[lo], depth) { bytes[lo], bytes[m] = bytes[m], bytes[lo] } } bytes[hi], bytes[m] = bytes[m], bytes[hi] } func Quick3Alp(a alphabet.IAlp, data []string) { runes := make([][]rune, len(data)) for i := range runes { runes[i] = []rune(data[i]) } quick3alp(a, runes, 0, len(data)-1, 0) for i := range data { data[i] = string(runes[i]) } } func quick3alp(a alphabet.IAlp, runes [][]rune, lo, hi, depth int) { if lo+1 >= hi { if lo >= hi { return } for toIndex(a, runes[lo], depth) >= 0 && toIndex(a, runes[hi], depth) >= 0 && toIndex(a, runes[lo], depth) == toIndex(a, runes[hi], depth) { depth++ } if toIndex(a, runes[lo], depth) > toIndex(a, runes[hi], depth) { runes[lo], runes[hi] = runes[hi], runes[lo] } return } medianAlp(a, runes, lo, hi, depth) middleV := toIndex(a, runes[lo], depth) tail, i := lo, lo+1 head := hi for i <= head { v := toIndex(a, runes[i], depth) switch true { case v < middleV: runes[tail], runes[i] = runes[i], runes[tail] tail++ i++ case v > middleV: runes[i], runes[head] = runes[head], runes[i] head-- default: i++ } } // runes[0...tail] < middleV // runes[tail...head] = middleV // runes[head...] > middleV quick3alp(a, runes, lo, tail-1, depth) if middleV >= 0 { quick3alp(a, runes, tail, head, depth+1) } quick3alp(a, runes, head+1, hi, depth) } func medianAlp(a alphabet.IAlp, runes [][]rune, lo, hi, depth int) { m := int(uint(lo+hi) >> 1) if toIndex(a, runes[m], depth) < toIndex(a, runes[lo], depth) { runes[m], runes[lo] = runes[lo], runes[m] } if toIndex(a, runes[hi], depth) < toIndex(a, runes[m], depth) { runes[hi], runes[m] = runes[m], runes[hi] if toIndex(a, runes[m], depth) < toIndex(a, runes[lo], depth) { runes[lo], runes[m] = runes[m], runes[lo] } } runes[hi], runes[m] = runes[m], runes[hi] }
strings/sort/quick3.go
0.553505
0.631552
quick3.go
starcoder
package engine // The orderbook currently uses the four following data structures to store engine // state in mongo // 1. Pricepoints set // 2. Pricepoints volume set // 3. Pricepoints hashes set // 4. Orders map // 1. The pricepoints set is an ordered set that store all pricepoints. // Keys: ~ pair addresses + side (BUY or SELL) // Values: pricepoints set (sorted set but all ranks are actually 0) // 2. The pricepoints volume set is an order set that store the volume for a given pricepoint // Keys: pair addresses + side + pricepoint // Values: volume for corresponding (pair, pricepoint) // 3. The pricepoints hashes set is an ordered set that stores a set of hashes ranked by creation time for a given pricepoint // Keys: pair addresses + side + pricepoint // Values: hashes of orders with corresponding pricepoint // 4. The orders hashmap is a mapping that stores serialized orders // Keys: hash // Values: serialized order import ( "sync" "github.com/tomochain/tomox-sdk/interfaces" "github.com/tomochain/tomox-sdk/rabbitmq" "github.com/tomochain/tomox-sdk/types" ) type OrderBook struct { rabbitMQConn *rabbitmq.Connection orderDao interfaces.OrderDao stopOrderDao interfaces.StopOrderDao tradeDao interfaces.TradeDao pair *types.Pair mutex *sync.Mutex topic string } func NewOrderBook( rabbitMQConn *rabbitmq.Connection, orderDao interfaces.OrderDao, stopOrderDao interfaces.StopOrderDao, tradeDao interfaces.TradeDao, p types.Pair, ) *OrderBook { return &OrderBook{ rabbitMQConn: rabbitMQConn, orderDao: orderDao, stopOrderDao: stopOrderDao, tradeDao: tradeDao, pair: &p, mutex: &sync.Mutex{}, } } // newOrder calls buyOrder/sellOrder based on type of order recieved and // publishes the response back to rabbitmq func (ob *OrderBook) newOrder(o *types.Order) error { // Attain lock on engineResource, so that recovery or cancel order function doesn't interfere ob.mutex.Lock() defer ob.mutex.Unlock() topic := ob.pair.EncodedTopic() err := ob.orderDao.AddNewOrder(o, topic) if err != nil { logger.Error(err) return err } return nil } // newStopOrder adds a new stop order into "stop_orders" collection // It checks for duplicate func (ob *OrderBook) newStopOrder(so *types.StopOrder) error { // Attain lock on engineResource, so that recovery or cancel order function doesn't interfere ob.mutex.Lock() defer ob.mutex.Unlock() _, err := ob.stopOrderDao.FindAndModify(so.Hash, so) if err != nil { logger.Error(err) return err } return nil } // CancelOrder is used to cancel the order from orderbook func (ob *OrderBook) cancelOrder(o *types.Order) error { ob.mutex.Lock() defer ob.mutex.Unlock() topic := ob.pair.EncodedTopic() err := ob.orderDao.CancelOrder(o, topic) if err != nil { logger.Error(err) return err } return nil } // cancelStopOrder is used to cancel the stop order from stop_order collection func (ob *OrderBook) cancelStopOrder(so *types.StopOrder) error { ob.mutex.Lock() defer ob.mutex.Unlock() so.Status = types.StopOrderStatusCancelled err := ob.stopOrderDao.UpdateByHash(so.Hash, so) if err != nil { logger.Error(err) return err } res := &types.EngineResponse{ Status: "STOP_ORDER_CANCELLED", StopOrder: so, } err = ob.rabbitMQConn.PublishEngineResponse(res) if err != nil { logger.Error(err) return err } return nil }
engine/orderbook.go
0.638835
0.413714
orderbook.go
starcoder
package numberline import ( "errors" "log" "strconv" ) // Range data type type Range struct { LowerBound, UpperBound int } var ( errInvalidRange error = errors.New("error: invalid range limits") errInvalidValue error = errors.New("error: invalid value") ) // NewRange creates a new instance of type Range func (r Range) NewRange(expression string) (Range, error) { validLowerRange := [2]byte{40, 91} validUpperRange := [2]byte{41, 93} var isLowerRange, isUpperRange bool isLowerRange = contains(expression[0], validLowerRange) isUpperRange = contains(expression[len(expression)-1], validUpperRange) if !(isLowerRange && isUpperRange) { return Range{}, errInvalidRange } var isLowerBound bool = true var lowerBound, upperBound string for i := 1; i < len(expression)-1; i++ { if expression[i] == 44 { isLowerBound = false } else if isLowerBound { lowerBound += string(expression[i]) } else { upperBound += string(expression[i]) } } lowerLimit, err := strconv.Atoi(lowerBound) if err != nil { return Range{}, errInvalidValue } upperLimit, err := strconv.Atoi(upperBound) if err != nil { return Range{}, errInvalidValue } if expression[0] == validLowerRange[0] { lowerLimit++ } if expression[len(expression)-1] == validUpperRange[0] { upperLimit-- } return Range{lowerLimit, upperLimit}, nil } // Contains checks if Range r contains all of the specified numbers func (r Range) Contains(numbers ...int) bool { for _, number := range numbers { if !(r.LowerBound <= number && r.UpperBound >= number) { return false } } return true } // DoesNotContain checks if Range r doesn't contain all of the specified numbers func (r Range) DoesNotContain(numbers ...int) bool { if r.Contains(numbers...) { return false } return true } // GetAllPoints returns a slice with all the numbers inside Range r func (r Range) GetAllPoints() []int { var points []int for i := r.LowerBound; i <= r.UpperBound; i++ { points = append(points, i) } return points } // ContainsRange checks if Range r contains the range in the specified expression func (r Range) ContainsRange(expression string) bool { var comparingRange Range var err error comparingRange, err = comparingRange.NewRange(expression) if err != nil { log.Fatalln(err) } var insideRange bool = r.LowerBound <= comparingRange.LowerBound && r.UpperBound >= comparingRange.UpperBound if insideRange { return true } return false } // DoesNotContainRange checks if Range r doesn't contain the range in the specified expression func (r Range) DoesNotContainRange(expression string) bool { if r.ContainsRange(expression) { return false } return true } // GetEndPoints returns the lower and upper bound of Range r func (r Range) GetEndPoints() (lower, upper int) { lower = r.LowerBound upper = r.UpperBound return } // OverlapsRange checks if the lower and upper bound of the specified expression overlaps with Range r func (r Range) OverlapsRange(expression string) bool { var comparingRange Range var err error comparingRange, err = comparingRange.NewRange(expression) if err != nil { log.Fatalln(err) } var containsLowerBound bool = r.LowerBound <= comparingRange.LowerBound && r.UpperBound >= comparingRange.LowerBound var containsUpperBound bool = r.LowerBound <= comparingRange.UpperBound && r.UpperBound >= comparingRange.UpperBound if containsLowerBound || containsUpperBound { return true } return false } // Equals checks if Range r is equal to the specified expression func (r Range) Equals(expression string) bool { var comparingRange Range var err error comparingRange, err = comparingRange.NewRange(expression) if err != nil { log.Fatalln(err) } if r == comparingRange { return true } return false } // NotEquals checks if Range r is not equal to the specified expression func (r Range) NotEquals(expression string) bool { if r.Equals(expression) { return false } return true } func contains(char byte, byteArr [2]byte) bool { for i := range byteArr { if char == byteArr[i] { return true } } return false }
numberline/range.go
0.752377
0.476336
range.go
starcoder
package sliceutil import ( "errors" "reflect" ) // Filter func Filter(slice, function interface{}) interface{} { result, _ := filter(slice, function, false) return result } // FilterInPlace func FilterInPlace(slicePtr, function interface{}) { in := reflect.ValueOf(slicePtr) if in.Kind() != reflect.Ptr { panic("FilterInPlace: not a pointer to slice") } _, n := filter(in.Elem().Interface(), function, true) in.Elem().SetLen(n) } func filter(slice, function interface{}, inPlace bool) (interface{}, int) { sliceInType := reflect.ValueOf(slice) if sliceInType.Kind() != reflect.Slice { panic("filter: not slice") } fn := reflect.ValueOf(function) elemType := sliceInType.Type().Elem() if !verifyFuncSignature(fn, elemType, nil) { panic("filter: function must be of type func(" + sliceInType.Type().Elem().String() + ") outputElemType") } var count []int for i := 0; i < sliceInType.Len(); i++ { if fn.Call([]reflect.Value{sliceInType.Index(i)})[0].Bool() { count = append(count, i) } } out := sliceInType if !inPlace { out = reflect.MakeSlice(sliceInType.Type(), len(count), len(count)) } for i := range count { out.Index(i).Set(sliceInType.Index(count[i])) } return out.Interface(), len(count) } // Transform 生成新的slice返回 func Transform(slice, function interface{}) interface{} { return transform(slice, function, false) } // Transform 替换原有的slice func TransformInPlace(slice, function interface{}) interface{} { return transform(slice, function, true) } func transform(slice, function interface{}, inPlace bool) interface{} { sliceInType := reflect.ValueOf(slice) if sliceInType.Kind() != reflect.Slice { panic("transform: not slice") } fn := reflect.ValueOf(function) elemType := sliceInType.Type().Elem() if !verifyFuncSignature(fn, elemType, nil) { panic("Transform: function must be of type func(" + sliceInType.Type().Elem().String() + ") outputElemType") } sliceOutType := sliceInType if !inPlace { sliceOutType = reflect.MakeSlice(reflect.SliceOf(fn.Type().Out(0)), sliceInType.Len(), sliceInType.Len()) } for i := 0; i < sliceInType.Len(); i++ { sliceOutType.Index(i).Set(fn.Call([]reflect.Value{sliceInType.Index(i)})[0]) } return sliceOutType.Interface() } // 聚合的Item type GroupItems struct { Group interface{} `json:"group"` Count int `json:"count"` Items []interface{} `json:"items"` } // GroupBy 把slice按回调函数的值分组 func GroupBy(slice, function interface{}) interface{} { sliceInType := reflect.ValueOf(slice) if sliceInType.Kind() != reflect.Slice { panic("GroupBy: not slice") } fn := reflect.ValueOf(function) elemType := sliceInType.Type().Elem() if !verifyFuncSignature(fn, elemType, nil) { panic("GroupBy: function must be of type func(" + sliceInType.Type().Elem().String() + ") outputElemType") } // group by callback function result := make([]*GroupItems, 0) flag := make(map[interface{}]*GroupItems) for i := 0; i < sliceInType.Len(); i++ { group := fn.Call([]reflect.Value{sliceInType.Index(i)})[0].Interface() if node, exist := flag[group]; !exist { node = &GroupItems{Group: group} result = append(result, node) flag[group] = node } flag[group].Items = append(flag[group].Items, sliceInType.Index(i).Interface()) flag[group].Count++ } return result } // 校验函数 func verifyFuncSignature(fn reflect.Value, types ...reflect.Type) bool { // check is a function if fn.Kind() != reflect.Func { return false } // check parameter count if (fn.Type().NumIn() != len(types)-1) || (fn.Type().NumOut() != 1) { return false } // check in parameter type for i := 0; i < len(types)-1; i++ { if fn.Type().In(i) != types[i] { return false } } // check out parameter type outType := types[len(types)-1] if outType != nil && fn.Type().Out(0) != outType { return false } return true } // 求最小值 func Min(slice []int) (int, error) { if len(slice) == 0 { return 0, errors.New("empty slice") } min := slice[0] for _, v := range slice { if v < min { min = v } } return min, nil } // InArray 是否存在于数组 func InArray(val interface{}, array interface{}) (exists bool, index int) { exists = false index = -1 switch reflect.TypeOf(array).Kind() { case reflect.Slice: s := reflect.ValueOf(array) for i := 0; i < s.Len(); i++ { if reflect.DeepEqual(val, s.Index(i).Interface()) == true { index = i exists = true return } } } return }
sliceutil/sliceutil.go
0.591133
0.428054
sliceutil.go
starcoder
package slicez import ( "errors" "github.com/modfin/henry/compare" "github.com/modfin/henry/slicez/sort" "math/rand" "time" ) // Equal takes two slices of that is of the interface comparable. It returns true if they are of equal length and each // element in a[x] == b[x] for every element func Equal[A comparable](s1, s2 []A) bool { return EqualFunc(s1, s2, compare.Equal[A]) } // EqualFunc takes two slices and an equality check function. It returns true if they are of equal length and each // element in eq(a[x], b[x]) == true for every element func EqualFunc[E1, E2 any](s1 []E1, s2 []E2, eq func(E1, E2) bool) bool { if len(s1) != len(s2) { return false } for i, v1 := range s1 { v2 := s2[i] if !eq(v1, v2) { return false } } return true } // Index finds the first index of an element in an array. It returns -1 if it is not present func Index[E comparable](s []E, needle E) int { return IndexFunc(s, func(e E) bool { return needle == e }) } // IndexFunc finds the first index of an element where the passed in function returns true. It returns -1 if it is not present func IndexFunc[E any](s []E, f func(E) bool) int { for i, v := range s { if f(v) { return i } } return -1 } // LastIndex finds the last index of an element in an array. It returns -1 if it is not present func LastIndex[E comparable](s []E, needle E) int { return LastIndexFunc(s, func(e E) bool { return e == needle }) } // LastIndexFunc finds the last index of an element where the passed in function returns true. It returns -1 if it is not present func LastIndexFunc[E any](s []E, f func(E) bool) int { n := len(s) for i := 0; i < n; i++ { if f(s[n-i-1]) { return n - i - 1 } } return -1 } //Cut will cut a slice into a left and a right part at the first instance where the needle is found. The needle is not included func Cut[E comparable](s []E, needle E) (left, right []E, found bool) { return CutFunc(s, func(e E) bool { return e == needle }) } //CutFunc will cut a slice into a left and a right part at the first instance where the on function returns true. //The element that makes the "on" function return true will not be included. func CutFunc[E any](s []E, on func(E) bool) (left, right []E, found bool) { i := IndexFunc(s, on) if i == -1 { return s, nil, false } return s[:i], s[i+1:], true } //Find will find the first instance of an element in a slice where the equal func returns true func Find[E any](s []E, equal func(E) bool) (e E, found bool) { i := IndexFunc(s, equal) if i == -1 { return e, false } return s[i], true } //FindLast will find the last instance of an element in a slice where the equal func returns true func FindLast[E any](s []E, equal func(E) bool) (e E, found bool) { i := LastIndexFunc(s, equal) if i == -1 { return e, false } return s[i], true } //Join will join a two-dimensional slice into a one dimensional slice with the glue slice between them. // Similar to strings.Join or bytes.Join func Join[E any](slices [][]E, glue []E) []E { if len(slices) == 0 { return []E{} } if len(slices) == 1 { return append([]E(nil), slices[0]...) } n := len(glue) * (len(slices) - 1) for _, v := range slices { n += len(v) } b := make([]E, n) bp := copy(b, slices[0]) for _, v := range slices[1:] { bp += copy(b[bp:], glue) bp += copy(b[bp:], v) } return b } // Contains returns true if the needle is present in the slice func Contains[E comparable](s []E, needle E) bool { return Index(s, needle) >= 0 } // ContainsFunc returns true if the passed in func returns true on any of the element in the slice func ContainsFunc[E any](s []E, f func(e E) bool) bool { return IndexFunc(s, f) >= 0 } // Clone will create a copy of the slice func Clone[E any](s []E) []E { // Preserve nil in case it matters. if s == nil { return nil } return append([]E{}, s...) } // Compare will compare two slices func Compare[E compare.Ordered](s1, s2 []E) int { return CompareFunc(s1, s2, compare.Compare[E]) } // CompareFunc will compare two slices using a compare function func CompareFunc[E1, E2 any](s1 []E1, s2 []E2, cmp func(E1, E2) int) int { s2len := len(s2) for i, v1 := range s1 { if i >= s2len { return +1 } v2 := s2[i] if c := cmp(v1, v2); c != 0 { return c } } if len(s1) < s2len { return -1 } return 0 } // Concat will concatenate supplied slices in the given order into a new slice func Concat[A any](slices ...[]A) []A { var capacity int for _, s := range slices { capacity += len(s) } var ret = make([]A, 0, capacity) for _, slice := range slices { ret = append(ret, slice...) } return ret } // Reverse will return a news slice, but reversed of the original one func Reverse[A any](slice []A) []A { l := len(slice) res := make([]A, l) for i, val := range slice { res[l-i-1] = val } return res } // Head will return the first element of the slice, or an error if the length of the slice is 0 func Head[A any](slice []A) (A, error) { if len(slice) > 0 { return slice[0], nil } var zero A return zero, errors.New("slice does not have any elements") } // Tail will return a new slice with all but the first element func Tail[A any](slice []A) []A { return Drop(slice, 1) } // Last will return the last element of the slice, or an error if the length of the slice is 0 func Last[A any](slice []A) (A, error) { if len(slice) > 0 { return slice[len(slice)-1], nil } var zero A return zero, errors.New("slice does not have any elements") } // Nth will return the nth element in the slice. It returns the zero value if len(slice) == 0. // Nth looks as the slice of a modul group and will wrap around from both ends. Eg Nth(-1) will return the last element // and Nth(10) where len(slice) == 10 will return the first element func Nth[A any](slice []A, i int) A { var zero A n := len(slice) if n == 0 { return zero } if n == 1 { return slice[0] } i = i % n if i < 0 { i = len(slice) + i } return slice[i] } // Each will apply the "apply" func on each element of the slice func Each[A any](slice []A, apply func(a A)) { for _, a := range slice { apply(a) } } // TakeWhile will produce a new slice containing all elements from the left until the "take" func returns false func TakeWhile[A any](slice []A, take func(a A) bool) []A { var res []A for _, val := range slice { if !take(val) { break } res = append(res, val) } return res } // TakeRightWhile will produce a new slice containing all elements from the right until the "take" func returns false func TakeRightWhile[A any](slice []A, take func(a A) bool) []A { var l = len(slice) var res []A for i := range slice { i = l - i - 1 val := slice[i] if !take(val) { break } res = append([]A{val}, res...) } return res } // Take will produce a new slice containing the "i" first element of the passed in slice func Take[A any](slice []A, i int) []A { var j int return TakeWhile(slice, func(_ A) bool { res := j < i j += 1 return res }) } // TakeRight will produce a new slice containing the "i" last element of the passed in slice func TakeRight[A any](slice []A, i int) []A { i = len(slice) - i - 1 j := len(slice) - 1 return TakeRightWhile(slice, func(_ A) bool { res := j > i j -= 1 return res }) } // DropWhile will produce a new slice, where the left most elements are dropped until the first instance the // "drop" function returns false func DropWhile[A any](slice []A, drop func(a A) bool) []A { if len(slice) == 0 { return nil } var index int = -1 for i, val := range slice { if !drop(val) { break } index = i } var a []A if index == -1 { a = make([]A, len(slice)) copy(a, slice) return a } if index+1 < len(slice) { a = make([]A, len(slice)-index-1) copy(a, slice[index+1:]) return a } return a } // DropRightWhile will produce a new slice, where the right most elements are dropped until the first instance the // "drop" function returns false func DropRightWhile[A any](slice []A, drop func(a A) bool) []A { if len(slice) == 0 { return nil } var index int = -1 var l = len(slice) for i := range slice { i = l - i - 1 val := slice[i] if !drop(val) { break } index = i } var a []A if index == -1 { a = make([]A, len(slice)) copy(a, slice) return a } if 0 < index && index < len(slice) { a = make([]A, index) copy(a, slice[:index]) return a } return a } // Drop will produce a new slice where the "i" first element of the passed in slice are removed func Drop[A any](slice []A, i int) []A { var j int return DropWhile(slice, func(_ A) bool { res := j < i j += 1 return res }) } // DropRight will produce a new slice where the "i" last element of the passed in slice are removed func DropRight[A any](slice []A, i int) []A { i = len(slice) - i - 1 j := len(slice) - 1 return DropRightWhile(slice, func(_ A) bool { res := j > i j -= 1 return res }) } // Filter will produce a new slice only containing elements where the "include" function returns true func Filter[A any](slice []A, include func(a A) bool) []A { var res []A for _, val := range slice { if include(val) { res = append(res, val) } } return res } // Reject is the complement of Filter and will produce a new slice only containing elements where the "exclude" function returns false func Reject[A any](slice []A, exclude func(a A) bool) []A { return Filter(slice, func(a A) bool { return !exclude(a) }) } // Every returns true if every element in the slice is equal to the needle func Every[A comparable](slice []A, needle A) bool { return EveryFunc(slice, compare.EqualOf[A](needle)) } // EveryFunc returns true if the predicate function returns true for every element in the slice func EveryFunc[A any](slice []A, predicate func(A) bool) bool { for _, val := range slice { if !predicate(val) { return false } } return true } // Some returns true there exist an element in the slice that is equal to the needle, an alias for Contains func Some[A comparable](slice []A, needle A) bool { return SomeFunc(slice, compare.EqualOf[A](needle)) } // SomeFunc returns true if there is an element in the slice for which the predicate function returns true func SomeFunc[A any](slice []A, predicate func(A) bool) bool { for _, val := range slice { if predicate(val) { return true } } return false } // None returns true if there is no element in the slice that matches the needle func None[A comparable](slice []A, needle A) bool { return !SomeFunc(slice, compare.EqualOf[A](needle)) } // NoneFunc returns true if there are no element in the slice for which the predicate function returns true func NoneFunc[A any](slice []A, predicate func(A) bool) bool { return !SomeFunc(slice, predicate) } // Partition will partition a slice into to slices. One where every element for which the predicate function returns true // and where it returns false func Partition[A any](slice []A, predicate func(a A) bool) (satisfied, notSatisfied []A) { for _, a := range slice { if predicate(a) { satisfied = append(satisfied, a) continue } notSatisfied = append(notSatisfied, a) } return satisfied, notSatisfied } // Shuffle will return a new slice where the elements from the original slice is shuffled func Shuffle[A any](slice []A) []A { var ret = append([]A{}, slice...) rand.Seed(time.Now().UnixNano()) rand.Shuffle(len(ret), func(i, j int) { ret[i], ret[j] = ret[j], ret[i] }) return ret } // Sample will return a slice containing "n" random elements from the original slice func Sample[A any](slice []A, n int) []A { var ret []A if n > len(slice) { n = len(slice) } if n > len(slice)/3 { // sqare root? ret = Shuffle(slice) return ret[:n] } idxs := map[int]struct{}{} rand.Seed(time.Now().UnixNano()) for i := 0; i < n; i++ { var idx int for { idx = rand.Intn(len(slice)) _, found := idxs[idx] if found { continue } idxs[idx] = struct{}{} break } ret = append(ret, slice[idx]) } return ret } // Sort will return a new slice that is sorted in the natural order func Sort[A compare.Ordered](slice []A) []A { return SortFunc(slice, compare.Less[A]) } // SortFunc will return a new slice that is sorted using the supplied less function for natural ordering func SortFunc[A any](slice []A, less func(a, b A) bool) []A { var res = append([]A{}, slice...) sort.Slice(res, less) return res } // Search given a slice data sorted in ascending order, // the call // Search[int](data, func(e int) bool { return e >= 23 }) // returns the smallest index i and element e such that e >= 23. func Search[A any](slice []A, f func(e A) bool) (index int, e A) { return sort.Search(slice, f) } // Compact will remove any duplicate elements following each other in a slice, eg // {1,1,2,1,2,2,2} => {1,2,1,2} func Compact[A comparable](slice []A) []A { return CompactFunc(slice, compare.Equal[A]) } // CompactFunc will remove any duplicate elements following each other determined by the equal func. // eg removing duplicate whitespaces from a string might look like // CompactFunc([]rune("a b"), func(a, b rune) { // return a == ' ' && a == b // }) // resulting in "a b" func CompactFunc[A any](slice []A, equal func(a, b A) bool) []A { if len(slice) == 0 { return slice } head := slice[0] last := head tail := Fold(slice[1:], func(accumulator []A, current A) []A { if equal(last, current) { return accumulator } last = current return append(accumulator, current) }, []A{}) return append([]A{head}, tail...) } // Max returns the largest element of the slice func Max[E compare.Ordered](slice ...E) E { var zero E if slice == nil || len(slice) == 0 { return zero } cur := slice[0] for _, c := range slice { if cur < c { cur = c } } return cur } // Min returns the smalest element of the slice func Min[E compare.Ordered](slice ...E) E { var zero E if slice == nil || len(slice) == 0 { return zero } cur := slice[0] for _, c := range slice { if cur > c { cur = c } } return cur } // Flatten will flatten a 2d slice into a 1d slice func Flatten[A any](slice [][]A) []A { var capacity int for _, s := range slice { capacity += len(s) } var res = make([]A, 0, capacity) for _, val := range slice { res = append(res, val...) } return res } // Map will map entries in one slice to entries in another slice func Map[A any, B any](slice []A, f func(a A) B) []B { res := make([]B, 0, len(slice)) for _, a := range slice { res = append(res, f(a)) } return res } // FlatMap will map entries in one slice to enteris in another slice and then flatten the map func FlatMap[A any, B any](slice []A, f func(a A) []B) []B { return Flatten(Map(slice, f)) } // Fold will iterate through the slice, from the left, and execute the combine function on each element accumulating the result into a value func Fold[I any, A any](slice []I, combined func(accumulator A, val I) A, init A) A { for _, val := range slice { init = combined(init, val) } return init } // FoldRight will iterate through the slice, from the right, and execute the combine function on each element accumulating the result into a value func FoldRight[I any, A any](slice []I, combined func(accumulator A, val I) A, init A) A { l := len(slice) for i := range slice { i := l - i - 1 init = combined(init, slice[i]) } return init } // KeyBy will iterate through the slice and create a map where the key function generates the key value pair. // If multiple values generate the same key, it is the first value that is stored in the map func KeyBy[A any, B comparable](slice []A, key func(a A) B) map[B]A { m := make(map[B]A) for _, v := range slice { k := key(v) _, exist := m[k] if exist { continue } m[k] = v } return m } // GroupBy will iterate through the slice and create a map where entries are grouped into slices using the key function generates the key . func GroupBy[A any, B comparable](slice []A, key func(a A) B) map[B][]A { m := make(map[B][]A) for _, v := range slice { k := key(v) m[k] = append(m[k], v) } return m } // Uniq returns a slice with no duplicate entries func Uniq[A comparable](slice []A) []A { return UniqBy(slice, compare.Identity[A]) } // UniqBy returns a slice with no duplicate entries using the by function to determine the key func UniqBy[A any, B comparable](slice []A, by func(a A) B) []A { var res []A var set = map[B]struct{}{} for _, e := range slice { key := by(e) _, exist := set[key] if exist { continue } set[key] = struct{}{} res = append(res, e) } return res } // Union will return the union of an arbitrary number of slices. This is equivalent to Uniq(Concat(sliceA, sliceB)) func Union[A comparable](slices ...[]A) []A { return UnionBy(compare.Identity[A], slices...) } // UnionBy will return the union of an arbitrary number of slices where the by function is used to determine the key. This is equivalent to UniqBy(Concat(sliceA, sliceB), by) func UnionBy[A any, B comparable](by func(a A) B, slices ...[]A) []A { if len(slices) == 0 { return nil } var maxCapacity = 0 for _, slice := range slices { if len(slice) > maxCapacity { maxCapacity = len(slice) } } var res = make([]A, 0, maxCapacity) var set = map[B]struct{}{} for _, slice := range slices { for _, e := range slice { key := by(e) _, ok := set[key] if ok { continue } set[key] = struct{}{} res = append(res, e) } } return res } // Intersection returns a slice containing the intersection between passed in slices func Intersection[A comparable](slices ...[]A) []A { return IntersectionBy(compare.Identity[A], slices...) } // IntersectionBy returns a slice containing the intersection between passed in slices determined by the "by" function func IntersectionBy[A any, B comparable](by func(a A) B, slices ...[]A) []A { if len(slices) == 0 { return nil } var res = UniqBy(slices[0], by) for _, slice := range slices[1:] { var set = map[B]bool{} for _, e := range slice { set[by(e)] = true } res = Filter(res, func(a A) bool { return set[by(a)] }) } return res } // Difference returns a slice containing the difference between passed in slices func Difference[A comparable](slices ...[]A) []A { return DifferenceBy(compare.Identity[A], slices...) } // DifferenceBy returns a slice containing the difference between passed in slices determined by the "by" function func DifferenceBy[A any, B comparable](by func(a A) B, slices ...[]A) []A { if len(slices) == 0 { return nil } var exclude = map[B]bool{} for _, v := range IntersectionBy(by, slices...) { exclude[by(v)] = true } var res []A for _, slice := range slices { for _, e := range slice { key := by(e) if exclude[key] { continue } exclude[key] = true res = append(res, e) } } return res } // Complement returns a slice containing all elements in "b" that is not present in "a" func Complement[A comparable](a, b []A) []A { return ComplementBy(compare.Identity[A], a, b) } // ComplementBy returns a slice containing all elements in "b" that is not present in "a" determined using the "by" function func ComplementBy[A any, B comparable](by func(a A) B, a, b []A) []A { if len(a) == 0 { return b } var exclude = map[B]bool{} for _, e := range a { exclude[by(e)] = true } var res []A for _, e := range b { key := by(e) if exclude[key] { continue } exclude[key] = true res = append(res, e) } return res } // Zip will zip two slices, a and b, into one slice, c, using the zip function to combined elements func Zip[A any, B any, C any](aSlice []A, bSlice []B, zipper func(a A, b B) C) []C { var capacity = Min(len(aSlice), len(bSlice)) var cSlice = make([]C, 0, capacity) for i := 0; i < capacity; i++ { cSlice = append(cSlice, zipper(aSlice[i], bSlice[i])) } return cSlice } // Unzip will unzip a slice slices, c, into two slices, a and b, using the supplied unziper function func Unzip[A any, B any, C any](cSlice []C, unzipper func(c C) (a A, b B)) ([]A, []B) { var aSlice = make([]A, 0, len(cSlice)) var bSlice = make([]B, 0, len(cSlice)) for _, c := range cSlice { a, b := unzipper(c) aSlice = append(aSlice, a) bSlice = append(bSlice, b) } return aSlice, bSlice } // Zip2 will zip three slices, a, b and c, into one slice, d, using the zip function to combined elements func Zip2[A any, B any, C any, D any](aSlice []A, bSlice []B, cSlice []C, zipper func(a A, b B, c C) D) []D { var capacity = Min(len(aSlice), len(bSlice), len(cSlice)) var dSlice = make([]D, 0, capacity) for i := 0; i < capacity; i++ { dSlice = append(dSlice, zipper(aSlice[i], bSlice[i], cSlice[i])) } return dSlice } // Unzip2 will unzip a slice slices, d, into three slices, a, b and c, using the supplied unziper function func Unzip2[A any, B any, C any, D any](dSlice []D, unzipper func(d D) (a A, b B, c C)) ([]A, []B, []C) { var aSlice = make([]A, 0, len(dSlice)) var bSlice = make([]B, 0, len(dSlice)) var cSlice = make([]C, 0, len(dSlice)) for _, d := range dSlice { a, b, c := unzipper(d) aSlice = append(aSlice, a) bSlice = append(bSlice, b) cSlice = append(cSlice, c) } return aSlice, bSlice, cSlice }
slicez/slices.go
0.844729
0.519217
slices.go
starcoder
package openapi import ( "reflect" ) // setSchemaMax sets the given maximum to the appropriate // schema field based on the given type. func setSchemaMax(schema *Schema, max int, t reflect.Type) { if isNumber(t) { schema.Maximum = max } else if isString(t) { if max >= 0 { schema.MaxLength = max } } else if isMap(t) { if max >= 0 { schema.MaxProperties = max } } else if t.Kind() == reflect.Slice { if max >= 0 { schema.MaxItems = max } } } // setSchemaMin sets the given minimum to the appropriate // schema field based on the given type. func setSchemaMin(schema *Schema, min int, t reflect.Type) { if isNumber(t) { schema.Minimum = min } else if isString(t) { if min >= 0 { schema.MinLength = min } } else if isMap(t) { if min >= 0 { schema.MinProperties = min } } else if t.Kind() == reflect.Slice { if min >= 0 { schema.MinItems = min } } } // setSchemaEq sets the given equals value to the appropriate // schema field based on the given type. func setSchemaEq(schema *Schema, eq int, t reflect.Type) { // For numbers and strings, equals tag would translate // to the `const` property of the JSON Validation spec // but OpenAPI doesn't support it. if isNumber(t) || isString(t) { return } setSchemaLen(schema, eq, t) } // setSchemaLen sets the given len to the appropriate // schema field based on the given type. func setSchemaLen(schema *Schema, len int, t reflect.Type) { setSchemaMax(schema, len, t) setSchemaMin(schema, len, t) } // isString returns whether the given reflect type represents a string. func isString(typ reflect.Type) bool { return typ.Kind() == reflect.String } // isMap returns whether the given reflect type represents a string. func isMap(typ reflect.Type) bool { return typ.Kind() == reflect.Map } // isNumber returns whether the given reflect type // represents a number. func isNumber(typ reflect.Type) bool { switch typ.Kind() { case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Float32, reflect.Float64: return true } return false }
openapi/validation.go
0.697506
0.441432
validation.go
starcoder
package draw import ( "encoding/hex" "fmt" "image/color" "log" "math" "math/rand" "strconv" ) // RandomColorFromArrayWithFreq returns a random color from the given array. // It will return the first color with a specific frequency, // and all the other colors with the complementary frequency. func RandomColorFromArrayWithFreq(colors []color.RGBA, prob float64) color.RGBA { if rf := rand.Float64(); rf < prob { return colors[0] } return RandomColorFromArray(colors[1:]) } // RandomColorFromArray returns a random color from the given array. func RandomColorFromArray(colors []color.RGBA) color.RGBA { r := rand.Intn(len(colors)) return colors[r] } // RandomIndexFromArrayWithFreq returns a random index from the given array. // It will return the first index with a specific frequency, // and all the other indexes with the complementary frequency. func RandomIndexFromArrayWithFreq(colors []color.RGBA, prob float64) int { if rf := rand.Float64(); rf < prob { return 0 } return RandomIndexFromArray(colors[1:]) + 1 } // RandomIndexFromArray returns an index from the given array. func RandomIndexFromArray(colors []color.RGBA) int { r := rand.Intn(len(colors)) return r } // ColorByPercentage returns a color based on the given percentage and the // number of colors present in the 'colors' array in a gradient way. func ColorByPercentage(colors []color.RGBA, percentage int) color.RGBA { r := rand.Intn(100) colorChange := 100 / len(colors) frontier := 0 frontier = int(math.Ceil(float64(r) / float64(colorChange))) if r < percentage { if frontier == 0 { frontier++ } return RandomColorFromArray(colors[:frontier]) } if frontier == len(colors) { frontier-- } return RandomColorFromArray(colors[frontier:]) } // FillFromRGBA return a "fill" SVG style from a color.RGBA func FillFromRGBA(c color.RGBA) string { return fmt.Sprintf("fill:rgb(%d,%d,%d)", c.R, c.G, c.B) } // PickColor returns a color given a key string, an array of colors and an index. // key: should be a md5 hash string. // index: is an index from the key string. Should be in interval [0, 16] // Algorithm: PickColor converts the key[index] value to a decimal value. // We pick the ith colors that respects the equality value%numberOfColors == i. func PickColor(key string, colors []color.RGBA, index int) color.RGBA { n := len(colors) i := PickIndex(key, n, index) return colors[i] } // PickIndex returns an index of given a key string, the size of an array of colors // and an index. // key: should be a md5 hash string. // index: is an index from the key string. Should be in interval [0, 16] // Algorithm: PickIndex converts the key[index] value to a decimal value. // We pick the ith index that respects the equality value%sizeOfArray == i. func PickIndex(key string, n int, index int) int { s := hex.EncodeToString([]byte{key[index]}) if r, err := strconv.ParseInt(s, 16, 0); err == nil { for i := 0; i < n; i++ { if int(r)%n == i { return i } } } else { log.Printf("Error calling ParseInt(%v, 16, 0): %v\n", s, err) } return 0 } // RGBToHex converts an RGB triple to an Hex string. func RGBToHex(r, g, b uint8) string { return fmt.Sprintf("#%02X%02X%02X", r, g, b) } // DefaultSvgAttributes returns the default attributes to apply to SVG canvases func DefaultSvgAttributes() []string { // shape-rendering provides hints to the renderer about how to render the image // crispedges and optimizeSpeed prevent the edges of generated blocks from being // antialiased causing a tiled effect (visible lines between each block) return []string{"shape-rendering = \"optimizeSpeed\""} }
draw/tools.go
0.764628
0.527621
tools.go
starcoder
package neat import "math" // Represents a neural network type Network interface { // Activates the neural network using the inputs. Returns the ouput values. Activate(inputs []float64) (outputs []float64, err error) } type NeuronType byte const ( Bias NeuronType = iota + 1 // 1 Input // 2 Hidden // 3 Output // 4 ) func (n NeuronType) String() string { switch n { case Bias: return "Bias" case Input: return "Input" case Hidden: return "Hidden" case Output: return "Output" default: return "Unknown NeuronType" } } type ActivationType byte const ( Direct ActivationType = iota + 1 // 1 SteependSigmoid // 2 Sigmoid // 3 Tanh // 4 InverseAbs // 5 ) var ( Activations []ActivationType = []ActivationType{SteependSigmoid, Sigmoid, Tanh, InverseAbs} ) func (a ActivationType) String() string { switch a { case Direct: return "Direct" case SteependSigmoid: return "Steepend Sigmoid" case Sigmoid: return "Sigmoid" case Tanh: return "Tanh" case InverseAbs: return "Inverse ABS" default: return "Unknown ActivationType" } } func (a ActivationType) Range() (float64, float64) { switch a { case Direct: return math.Inf(-1), math.Inf(1) case SteependSigmoid: return 0, 1.0 case Sigmoid: return 0, 1.0 case Tanh: return -1.0, 1.0 case InverseAbs: return -1.0, 1.0 default: return math.NaN(), math.NaN() } } func DirectActivation(x float64) float64 { return x } func SigmoidActivation(x float64) float64 { return 1.0 / (1.0 + exp1(-x)) } func SteependSigmoidActivation(x float64) float64 { return 1.0 / (1.0 + exp1(-4.9*x)) } func TanhActivation(x float64) float64 { return math.Tanh(0.9 * x) } func InverseAbsActivation(x float64) float64 { return x / (1.0 + math.Abs(x)) } // Speed up over math.Exp by using less precision // https://codingforspeed.com/using-faster-exponential-approximation/ func exp1(x float64) float64 { x = 1.0 + x/256.0 x *= x x *= x x *= x x *= x x *= x x *= x x *= x x *= x return x } func exp2(x float64) float64 { x = 1.0 + x/1024 x *= x x *= x x *= x x *= x x *= x x *= x x *= x x *= x x *= x x *= x return x }
network.go
0.822546
0.637609
network.go
starcoder
package curves import ( "crypto/elliptic" crand "crypto/rand" "crypto/sha512" "fmt" "io" "math/big" "filippo.io/edwards25519" "github.com/btcsuite/btcd/btcec" "github.com/bwesterb/go-ristretto" "github.com/coinbase/kryptology/internal" "github.com/coinbase/kryptology/pkg/core" "github.com/coinbase/kryptology/pkg/core/curves/native/bls12381" ) type EcScalar interface { Add(x, y *big.Int) *big.Int Sub(x, y *big.Int) *big.Int Neg(x *big.Int) *big.Int Mul(x, y *big.Int) *big.Int Hash(input []byte) *big.Int Div(x, y *big.Int) *big.Int Random() (*big.Int, error) IsValid(x *big.Int) bool Bytes(x *big.Int) []byte // fixed-length byte array } type K256Scalar struct{} // Static interface assertion var _ EcScalar = (*K256Scalar)(nil) // warning: the Euclidean alg which Mod uses is not constant-time. func NewK256Scalar() *K256Scalar { return &K256Scalar{} } func (k K256Scalar) Add(x, y *big.Int) *big.Int { v := new(big.Int).Add(x, y) v.Mod(v, btcec.S256().N) return v } func (k K256Scalar) Sub(x, y *big.Int) *big.Int { v := new(big.Int).Sub(x, y) v.Mod(v, btcec.S256().N) return v } func (k K256Scalar) Neg(x *big.Int) *big.Int { v := new(big.Int).Sub(btcec.S256().N, x) v.Mod(v, btcec.S256().N) return v } func (k K256Scalar) Mul(x, y *big.Int) *big.Int { v := new(big.Int).Mul(x, y) v.Mod(v, btcec.S256().N) return v } func (k K256Scalar) Div(x, y *big.Int) *big.Int { t := new(big.Int).ModInverse(y, btcec.S256().N) return k.Mul(x, t) } func (k K256Scalar) Hash(input []byte) *big.Int { return new(ScalarK256).Hash(input).BigInt() } func (k K256Scalar) Random() (*big.Int, error) { b := make([]byte, 48) n, err := crand.Read(b) if err != nil { return nil, err } if n != 48 { return nil, fmt.Errorf("insufficient bytes read") } v := new(big.Int).SetBytes(b) v.Mod(v, btcec.S256().N) return v, nil } func (k K256Scalar) IsValid(x *big.Int) bool { return core.In(x, btcec.S256().N) == nil } func (k K256Scalar) Bytes(x *big.Int) []byte { bytes := make([]byte, 32) x.FillBytes(bytes) // big-endian; will left-pad. return bytes } type P256Scalar struct{} // Static interface assertion var _ EcScalar = (*P256Scalar)(nil) func NewP256Scalar() *P256Scalar { return &P256Scalar{} } func (k P256Scalar) Add(x, y *big.Int) *big.Int { v := new(big.Int).Add(x, y) v.Mod(v, elliptic.P256().Params().N) return v } func (k P256Scalar) Sub(x, y *big.Int) *big.Int { v := new(big.Int).Sub(x, y) v.Mod(v, elliptic.P256().Params().N) return v } func (k P256Scalar) Neg(x *big.Int) *big.Int { v := new(big.Int).Sub(elliptic.P256().Params().N, x) v.Mod(v, elliptic.P256().Params().N) return v } func (k P256Scalar) Mul(x, y *big.Int) *big.Int { v := new(big.Int).Mul(x, y) v.Mod(v, elliptic.P256().Params().N) return v } func (k P256Scalar) Div(x, y *big.Int) *big.Int { t := new(big.Int).ModInverse(y, elliptic.P256().Params().N) return k.Mul(x, t) } func (k P256Scalar) Hash(input []byte) *big.Int { return new(ScalarP256).Hash(input).BigInt() } func (k P256Scalar) Random() (*big.Int, error) { b := make([]byte, 48) n, err := crand.Read(b) if err != nil { return nil, err } if n != 48 { return nil, fmt.Errorf("insufficient bytes read") } v := new(big.Int).SetBytes(b) v.Mod(v, elliptic.P256().Params().N) return v, nil } func (k P256Scalar) IsValid(x *big.Int) bool { return core.In(x, elliptic.P256().Params().N) == nil } func (k P256Scalar) Bytes(x *big.Int) []byte { bytes := make([]byte, 32) x.FillBytes(bytes) // big-endian; will left-pad. return bytes } type Bls12381Scalar struct{} // Static interface assertion var _ EcScalar = (*Bls12381Scalar)(nil) func NewBls12381Scalar() *Bls12381Scalar { return &Bls12381Scalar{} } func (k Bls12381Scalar) Add(x, y *big.Int) *big.Int { a := bls12381.Bls12381FqNew().SetBigInt(x) b := bls12381.Bls12381FqNew().SetBigInt(y) return a.Add(a, b).BigInt() } func (k Bls12381Scalar) Sub(x, y *big.Int) *big.Int { a := bls12381.Bls12381FqNew().SetBigInt(x) b := bls12381.Bls12381FqNew().SetBigInt(y) return a.Sub(a, b).BigInt() } func (k Bls12381Scalar) Neg(x *big.Int) *big.Int { a := bls12381.Bls12381FqNew().SetBigInt(x) return a.Neg(a).BigInt() } func (k Bls12381Scalar) Mul(x, y *big.Int) *big.Int { a := bls12381.Bls12381FqNew().SetBigInt(x) b := bls12381.Bls12381FqNew().SetBigInt(y) return a.Mul(a, b).BigInt() } func (k Bls12381Scalar) Div(x, y *big.Int) *big.Int { c := bls12381.Bls12381FqNew() a := bls12381.Bls12381FqNew().SetBigInt(x) b := bls12381.Bls12381FqNew().SetBigInt(y) _, wasInverted := c.Invert(b) c.Mul(a, c) tt := map[bool]int{false: 0, true: 1} return a.CMove(a, c, tt[wasInverted]).BigInt() } func (k Bls12381Scalar) Hash(input []byte) *big.Int { return new(ScalarBls12381).Hash(input).BigInt() } func (k Bls12381Scalar) Random() (*big.Int, error) { a := BLS12381G1().NewScalar().Random(crand.Reader) if a == nil { return nil, fmt.Errorf("invalid random value") } return a.BigInt(), nil } func (k Bls12381Scalar) Bytes(x *big.Int) []byte { bytes := make([]byte, 32) x.FillBytes(bytes) // big-endian; will left-pad. return bytes } func (k Bls12381Scalar) IsValid(x *big.Int) bool { a := bls12381.Bls12381FqNew().SetBigInt(x) return a.BigInt().Cmp(x) == 0 } // taken from https://datatracker.ietf.org/doc/html/rfc8032 var ed25519N, _ = new(big.Int).SetString("1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED", 16) type Ed25519Scalar struct{} // Static interface assertion var _ EcScalar = (*Ed25519Scalar)(nil) func NewEd25519Scalar() *Ed25519Scalar { return &Ed25519Scalar{} } func (k Ed25519Scalar) Add(x, y *big.Int) *big.Int { a, err := internal.BigInt2Ed25519Scalar(x) if err != nil { panic(err) } b, err := internal.BigInt2Ed25519Scalar(y) if err != nil { panic(err) } a.Add(a, b) return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes())) } func (k Ed25519Scalar) Sub(x, y *big.Int) *big.Int { a, err := internal.BigInt2Ed25519Scalar(x) if err != nil { panic(err) } b, err := internal.BigInt2Ed25519Scalar(y) if err != nil { panic(err) } a.Subtract(a, b) return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes())) } func (k Ed25519Scalar) Neg(x *big.Int) *big.Int { a, err := internal.BigInt2Ed25519Scalar(x) if err != nil { panic(err) } a.Negate(a) return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes())) } func (k Ed25519Scalar) Mul(x, y *big.Int) *big.Int { a, err := internal.BigInt2Ed25519Scalar(x) if err != nil { panic(err) } b, err := internal.BigInt2Ed25519Scalar(y) if err != nil { panic(err) } a.Multiply(a, b) return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes())) } func (k Ed25519Scalar) Div(x, y *big.Int) *big.Int { b, err := internal.BigInt2Ed25519Scalar(y) if err != nil { panic(err) } b.Invert(b) a, err := internal.BigInt2Ed25519Scalar(x) if err != nil { panic(err) } a.Multiply(a, b) return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes())) } func (k Ed25519Scalar) Hash(input []byte) *big.Int { v := new(ristretto.Scalar).Derive(input) var data [32]byte v.BytesInto(&data) return new(big.Int).SetBytes(internal.ReverseScalarBytes(data[:])) } func (k Ed25519Scalar) Bytes(x *big.Int) []byte { a, err := internal.BigInt2Ed25519Scalar(x) if err != nil { panic(err) } return internal.ReverseScalarBytes(a.Bytes()) } func (k Ed25519Scalar) Random() (*big.Int, error) { return k.RandomWithReader(crand.Reader) } func (k Ed25519Scalar) RandomWithReader(r io.Reader) (*big.Int, error) { b := make([]byte, 64) n, err := r.Read(b) if err != nil { return nil, err } if n != 64 { return nil, fmt.Errorf("insufficient bytes read") } digest := sha512.Sum512(b) var hBytes [32]byte copy(hBytes[:], digest[:]) s, err := edwards25519.NewScalar().SetBytesWithClamping(hBytes[:]) if err != nil { return nil, err } return new(big.Int).SetBytes(internal.ReverseScalarBytes(s.Bytes())), nil } func (k Ed25519Scalar) IsValid(x *big.Int) bool { return x.Cmp(ed25519N) == -1 }
pkg/core/curves/ec_scalar.go
0.670285
0.409339
ec_scalar.go
starcoder
package types import ( "bytes" "fmt" "io" "math" "strings" "reflect" "regexp" "github.com/lyraproj/pcore/px" "github.com/lyraproj/pcore/utils" ) type ( // String that is unconstrained stringType struct{} // String constrained to content vcStringType struct { stringType value string } // String constrained by length of string scStringType struct { stringType size *IntegerType } // stringValue represents string as a pcore.Value stringValue string ) var stringTypeDefault = &stringType{} var stringTypeNotEmpty = &scStringType{size: NewIntegerType(1, math.MaxInt64)} var StringMetaType px.ObjectType func init() { StringMetaType = newObjectType(`Pcore::StringType`, `Pcore::ScalarDataType { attributes => { size_type_or_value => { type => Variant[Undef,String,Type[Integer]], value => undef }, } }`, func(ctx px.Context, args []px.Value) px.Value { return newStringType2(args...) }) newGoConstructor2(`String`, func(t px.LocalTypes) { t.Type2(`Format`, NewPatternType([]*RegexpType{NewRegexpTypeR(px.FormatPattern)})) t.Type(`ContainerFormat`, `Struct[{ Optional[format] => Format, Optional[separator] => String, Optional[separator2] => String, Optional[string_formats] => Hash[Type, Format] }]`) t.Type(`TypeMap`, `Hash[Type, Variant[Format, ContainerFormat]]`) t.Type(`Formats`, `Variant[Default, String[1], TypeMap]`) }, func(d px.Dispatch) { d.Param(`Any`) d.OptionalParam(`Formats`) d.Function(func(c px.Context, args []px.Value) px.Value { f := None if len(args) > 1 { var err error f, err = px.NewFormatContext3(args[0], args[1]) if err != nil { panic(illegalArgument(`String`, 1, err.Error())) } } return stringValue(px.ToString2(args[0], f)) }) }, ) } func DefaultStringType() *stringType { return stringTypeDefault } func NewStringType(rng *IntegerType, s string) px.Type { if s == `` { if rng == nil || *rng == *IntegerTypePositive { return DefaultStringType() } return &scStringType{size: rng} } return &vcStringType{value: s} } func newStringType2(args ...px.Value) px.Type { var rng *IntegerType var ok bool switch len(args) { case 0: return DefaultStringType() case 1: var value stringValue if value, ok = args[0].(stringValue); ok { return NewStringType(nil, string(value)) } rng, ok = args[0].(*IntegerType) if !ok { var min int64 min, ok = toInt(args[0]) if !ok { panic(illegalArgumentType(`String[]`, 0, `String, Integer or Type[Integer]`, args[0])) } rng = NewIntegerType(min, math.MaxInt64) } case 2: var min, max int64 min, ok = toInt(args[0]) if !ok { panic(illegalArgumentType(`String[]`, 0, `Integer`, args[0])) } max, ok = toInt(args[1]) if !ok { panic(illegalArgumentType(`String[]`, 1, `Integer`, args[1])) } rng = NewIntegerType(min, max) default: panic(illegalArgumentCount(`String[]`, `0 - 2`, len(args))) } return NewStringType(rng, ``) } func (t *stringType) Accept(v px.Visitor, g px.Guard) { v(t) } func (t *scStringType) Accept(v px.Visitor, g px.Guard) { v(t) t.size.Accept(v, g) } func (t *stringType) Default() px.Type { return stringTypeDefault } func (t *stringType) Equals(o interface{}, g px.Guard) bool { _, ok := o.(*stringType) return ok } func (t *scStringType) Equals(o interface{}, g px.Guard) bool { if ot, ok := o.(*scStringType); ok { return t.size.Equals(ot.size, g) } return false } func (t *vcStringType) Equals(o interface{}, g px.Guard) bool { if ot, ok := o.(*vcStringType); ok { return t.value == ot.value } return false } func (t *stringType) Get(key string) (value px.Value, ok bool) { switch key { case `size_type_or_value`: return IntegerTypePositive, true } return nil, false } func (t *scStringType) Get(key string) (value px.Value, ok bool) { switch key { case `size_type_or_value`: return t.size, true } return nil, false } func (t *vcStringType) Get(key string) (value px.Value, ok bool) { switch key { case `size_type_or_value`: return stringValue(t.value), true } return nil, false } func (t *stringType) IsAssignable(o px.Type, g px.Guard) bool { switch o.(type) { case *stringType, *scStringType, *vcStringType, *EnumType, *PatternType: return true } return false } func (t *scStringType) IsAssignable(o px.Type, g px.Guard) bool { switch o := o.(type) { case *vcStringType: return t.size.IsInstance3(len(o.value)) case *scStringType: return t.size.IsAssignable(o.size, g) case *EnumType: for _, str := range o.values { if !t.size.IsInstance3(len(string(str))) { return false } } return true } return false } func (t *vcStringType) IsAssignable(o px.Type, g px.Guard) bool { if st, ok := o.(*vcStringType); ok { return t.value == st.value } return false } func (t *stringType) IsInstance(o px.Value, g px.Guard) bool { _, ok := o.(stringValue) return ok } func (t *scStringType) IsInstance(o px.Value, g px.Guard) bool { str, ok := o.(stringValue) return ok && t.size.IsInstance3(len(string(str))) } func (t *vcStringType) IsInstance(o px.Value, g px.Guard) bool { str, ok := o.(stringValue) return ok && t.value == string(str) } func (t *stringType) MetaType() px.ObjectType { return StringMetaType } func (t *stringType) Name() string { return `String` } func (t *stringType) Parameters() []px.Value { return px.EmptyValues } func (t *scStringType) Parameters() []px.Value { return t.size.Parameters() } func (t *stringType) ReflectType(c px.Context) (reflect.Type, bool) { return reflect.TypeOf(`x`), true } func (t *stringType) CanSerializeAsString() bool { return true } func (t *stringType) SerializationString() string { return t.String() } func (t *stringType) String() string { return px.ToString2(t, None) } func (t *scStringType) String() string { return px.ToString2(t, None) } func (t *vcStringType) String() string { return px.ToString2(t, None) } func (t *stringType) Size() px.Type { return IntegerTypePositive } func (t *scStringType) Size() px.Type { return t.size } func (t *stringType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { TypeToString(t, b, s, g) } func (t *scStringType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { TypeToString(t, b, s, g) } func (t *vcStringType) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { TypeToString(t, b, s, g) } func (t *stringType) PType() px.Type { return &TypeType{t} } func (t *stringType) Value() *string { return nil } func (t *vcStringType) Value() *string { return &t.value } func WrapString(str string) px.StringValue { return stringValue(str) } func (sv stringValue) Add(v px.Value) px.List { if ov, ok := v.(stringValue); ok { return stringValue(string(sv) + string(ov)) } panic(fmt.Sprintf(`No auto conversion from %s to String`, v.PType().String())) } var OneCharStringType = NewStringType(NewIntegerType(1, 1), ``) func (sv stringValue) AddAll(tv px.List) px.List { s := bytes.NewBufferString(sv.String()) tv.Each(func(e px.Value) { ev, ok := e.(stringValue) if !ok { panic(fmt.Sprintf(`No auto conversion from %s to String`, e.PType().String())) } s.WriteString(string(ev)) }) return stringValue(s.String()) } func (sv stringValue) All(predicate px.Predicate) bool { for _, c := range sv.String() { if !predicate(stringValue(string(c))) { return false } } return true } func (sv stringValue) Any(predicate px.Predicate) bool { for _, c := range sv.String() { if predicate(stringValue(string(c))) { return true } } return false } func (sv stringValue) AppendTo(slice []px.Value) []px.Value { for _, c := range sv.String() { slice = append(slice, stringValue(string(c))) } return slice } func (sv stringValue) AsArray() px.List { return WrapValues(sv.Elements()) } func (sv stringValue) At(i int) px.Value { if i >= 0 && i < len(sv.String()) { return stringValue(sv.String()[i : i+1]) } return undef } func (sv stringValue) Delete(v px.Value) px.List { panic(`Operation not supported`) } func (sv stringValue) DeleteAll(tv px.List) px.List { panic(`Operation not supported`) } func (sv stringValue) Elements() []px.Value { str := sv.String() top := len(str) el := make([]px.Value, top) for idx, c := range str { el[idx] = stringValue(string(c)) } return el } func (sv stringValue) Each(consumer px.Consumer) { for _, c := range sv.String() { consumer(stringValue(string(c))) } } func (sv stringValue) EachSlice(n int, consumer px.SliceConsumer) { s := sv.String() top := len(s) for i := 0; i < top; i += n { e := i + n if e > top { e = top } consumer(stringValue(s[i:e])) } } func (sv stringValue) EachWithIndex(consumer px.IndexedConsumer) { for i, c := range sv.String() { consumer(stringValue(string(c)), i) } } func (sv stringValue) ElementType() px.Type { return OneCharStringType } func (sv stringValue) Equals(o interface{}, g px.Guard) bool { if ov, ok := o.(stringValue); ok { return string(sv) == string(ov) } return false } func (sv stringValue) EqualsIgnoreCase(o px.Value) bool { if os, ok := o.(stringValue); ok { return strings.EqualFold(string(sv), string(os)) } return false } func (sv stringValue) Find(predicate px.Predicate) (px.Value, bool) { for _, c := range string(sv) { e := stringValue(string(c)) if predicate(e) { return e, true } } return nil, false } func (sv stringValue) Flatten() px.List { return sv } func (sv stringValue) IsEmpty() bool { return sv.Len() == 0 } func (sv stringValue) IsHashStyle() bool { return false } func (sv stringValue) Len() int { return len(sv) } func (sv stringValue) Map(mapper px.Mapper) px.List { s := sv.String() mapped := make([]px.Value, len(s)) for i, c := range s { mapped[i] = mapper(stringValue(string(c))) } return WrapValues(mapped) } func (sv stringValue) Reduce(redactor px.BiMapper) px.Value { s := sv.String() if len(s) == 0 { return undef } return reduceString(s[1:], sv.At(0), redactor) } func (sv stringValue) Reduce2(initialValue px.Value, redactor px.BiMapper) px.Value { return reduceString(sv.String(), initialValue, redactor) } func (sv stringValue) Reflect(c px.Context) reflect.Value { return reflect.ValueOf(sv.String()) } func (sv stringValue) ReflectTo(c px.Context, value reflect.Value) { switch value.Kind() { case reflect.Interface: value.Set(sv.Reflect(c)) case reflect.Ptr: s := string(sv) value.Set(reflect.ValueOf(&s)) default: value.SetString(string(sv)) } } func (sv stringValue) Reject(predicate px.Predicate) px.List { selected := bytes.NewBufferString(``) for _, c := range sv.String() { if !predicate(stringValue(string(c))) { selected.WriteRune(c) } } return stringValue(selected.String()) } func (sv stringValue) Select(predicate px.Predicate) px.List { selected := bytes.NewBufferString(``) for _, c := range sv.String() { if predicate(stringValue(string(c))) { selected.WriteRune(c) } } return stringValue(selected.String()) } func (sv stringValue) Slice(i int, j int) px.List { return stringValue(sv.String()[i:j]) } func (sv stringValue) Split(pattern *regexp.Regexp) px.List { parts := pattern.Split(sv.String(), -1) result := make([]px.Value, len(parts)) for i, s := range parts { result[i] = stringValue(s) } return WrapValues(result) } func (sv stringValue) String() string { return string(sv) } func (sv stringValue) ToString(b io.Writer, s px.FormatContext, g px.RDetect) { f := px.GetFormat(s.FormatMap(), sv.PType()) val := string(sv) switch f.FormatChar() { case 's': _, err := fmt.Fprintf(b, f.OrigFormat(), val) if err != nil { panic(err) } case 'p': f.ApplyStringFlags(b, val, true) case 'c': val = utils.CapitalizeSegment(val) f.ReplaceFormatChar('s').ApplyStringFlags(b, val, f.IsAlt()) case 'C': val = utils.CapitalizeSegments(val) f.ReplaceFormatChar('s').ApplyStringFlags(b, val, f.IsAlt()) case 'u': val = strings.ToUpper(val) f.ReplaceFormatChar('s').ApplyStringFlags(b, val, f.IsAlt()) case 'd': val = strings.ToLower(val) f.ReplaceFormatChar('s').ApplyStringFlags(b, val, f.IsAlt()) case 't': val = strings.TrimSpace(val) f.ReplaceFormatChar('s').ApplyStringFlags(b, val, f.IsAlt()) default: //noinspection SpellCheckingInspection panic(s.UnsupportedFormat(sv.PType(), `cCudspt`, f)) } } func (sv stringValue) ToKey() px.HashKey { return px.HashKey(sv.String()) } func (sv stringValue) ToLower() px.StringValue { return stringValue(strings.ToLower(string(sv))) } func (sv stringValue) ToUpper() px.StringValue { return stringValue(strings.ToUpper(string(sv))) } func (sv stringValue) PType() px.Type { return &vcStringType{value: string(sv)} } func (sv stringValue) Unique() px.List { s := sv.String() top := len(s) if top < 2 { return sv } result := bytes.NewBufferString(``) exists := make(map[rune]bool, top) for _, c := range s { if !exists[c] { exists[c] = true result.WriteRune(c) } } if result.Len() == len(s) { return sv } return stringValue(result.String()) } func reduceString(slice string, initialValue px.Value, redactor px.BiMapper) px.Value { memo := initialValue for _, v := range slice { memo = redactor(memo, stringValue(string(v))) } return memo }
types/stringtype.go
0.610918
0.447581
stringtype.go
starcoder
package creature import ( "math" "github.com/karlek/reason/ui" "github.com/karlek/worc/area" "github.com/karlek/worc/coord" ) // DrawFOV draws a field of view around a creature as well as the creatures // memory of already explored areas. func (c Creature) DrawFOV(a *area.Area) { // Clear screen. ui.Clear() // Get viewport coordinate offset. camX, camY := camXY(c, a) // Draw already explored areas. a.DrawExplored(ui.Area, camX, camY) // Draw hero. a.Draw(c.X(), c.Y(), camX, camY, ui.Area) // Visible coordinates of character. cs := c.FOV(a) for p := range cs { // Set terrain as explored. a.Terrain[p.X][p.Y].IsExplored = true // TODO(_): refactor cam. a.Draw(p.X, p.Y, camX, camY, ui.Area) } } func (c *Creature) FOV(a *area.Area) (cs map[coord.Coord]struct{}) { radius := c.Sight cs = make(map[coord.Coord]struct{}) for x := c.X() - radius; x <= c.X()+radius; x++ { for y := c.Y() - radius; y <= c.Y()+radius; y++ { // Distance between creature x and y coordinates and sight radius. dx := float64(x - c.X()) dy := float64(y - c.Y()) // Distance between creature and sight radius. dist := math.Sqrt(math.Pow(dx, 2) + math.Pow(dy, 2)) // Discriminate coordinates which are outside of the circle. if dist > float64(radius) { continue } // Ignore hero. for _, p := range get_line(c.X(), c.Y(), x, y)[1:] { if !a.ExistsXY(p.X, p.Y) { break } cs[p] = struct{}{} // Terrain that breaks line of sight. if !a.IsXYPathable(p.X, p.Y) { break } } } } return cs } func get_line(x1, y1, x2, y2 int) (points []coord.Coord) { points = make([]coord.Coord, 0) steep := math.Abs(float64(y2-y1)) > math.Abs(float64(x2-x1)) if steep { x1, y1 = y1, x1 x2, y2 = y2, x2 } rev := false if x1 > x2 { x1, x2 = x2, x1 y1, y2 = y2, y1 rev = true } dx := x2 - x1 dy := int(math.Abs(float64(y2 - y1))) err := dx / 2 y := y1 ystep := 0 if y1 < y2 { ystep = 1 } else { ystep = -1 } for x := x1; x < x2+1; x++ { if steep { points = append(points, coord.Coord{X: y, Y: x}) } else { points = append(points, coord.Coord{X: x, Y: y}) } err -= dy if err < 0 { y += ystep err += dx } } if rev { reverse(points) } return points } func reverse(s []coord.Coord) []coord.Coord { for i, j := 0, len(s)-1; i < j; i, j = i+1, j-1 { s[i], s[j] = s[j], s[i] } return s } // camXY returns the coordinate of offset for the viewport. Since the area can // be larger than the viewport. func camXY(c Creature, a *area.Area) (int, int) { cameraX, cameraY := camX(c, a), camY(c, a) if ui.Area.Width > len(a.Terrain) { cameraX = 0 } if ui.Area.Height > len(a.Terrain[0]) { cameraY = 0 } return cameraX, cameraY } func camX(c Creature, a *area.Area) int { // ui.Area is the viewport size. cameraX := c.X() - ui.Area.Width/2 if c.X() < ui.Area.Width/2 { cameraX = 0 } if c.X() >= a.Width-ui.Area.Width/2 { cameraX = a.Width - ui.Area.Width } return cameraX } func camY(c Creature, a *area.Area) int { cameraY := c.Y() - ui.Area.Height/2 if c.Y() < ui.Area.Height/2 { cameraY = 0 } if c.Y() > a.Height-ui.Area.Height/2 { cameraY = a.Height - ui.Area.Height } return cameraY }
creature/fov.go
0.577972
0.41484
fov.go
starcoder
package math3 import ( "fmt" "math" "regexp" "strconv" "github.com/golang/glog" "github.com/rydrman/three.go" ) type Color struct { R float64 G float64 B float64 } func NewColor() *Color { return &Color{ 1, 1, 1, } } func (color *Color) R32() float32 { return float32(color.R) } func (color *Color) G32() float32 { return float32(color.G) } func (color *Color) B32() float32 { return float32(color.B) } func (color *Color) Set(r, g, b float64) *Color { color.R = r color.G = g color.B = b return color } func (color *Color) SetScalar(scalar float64) *Color { color.R = scalar color.G = scalar color.B = scalar return color } func (color *Color) SetHex(hex int) *Color { hex = int(math.Floor(float64(hex))) color.R = float64((hex >> 16 & 255)) / 255 color.G = float64((hex >> 8 & 255)) / 255 color.B = float64((hex & 255)) / 255 return color } func (color *Color) SetRGB(r, g, b float64) *Color { color.R = r color.G = g color.B = b return color } func (color *Color) SetHSL(h, s, l float64) *Color { // h,s,l ranges are in 0.0 - 1.0 h = EuclideanModulo(h, 1) s = Clamp(s, 0, 1) l = Clamp(l, 0, 1) if s == 0 { color.R = l color.G = l color.B = l } else { var p float64 if l <= 0.5 { p = l * (1 + s) } else { p = l + s - (l * s) } q := (2 * l) - p color.R = hue2RGB(q, p, h+1.0/3.0) color.G = hue2RGB(q, p, h) color.B = hue2RGB(q, p, h-1.0/3.0) } return color } func (color *Color) SetStyle(style string) *Color { match := regexp.MustCompile(`^((?:rgb|hsl)a?)\(\s*([^\)]*)\)`). FindStringSubmatch(style) if len(match) > 0 { // rgb / hsl name := match[1] components := match[2] switch name { case "rgb": fallthrough case "rgba": colorMatch := regexp.MustCompile( `^(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*(,\s*([0-9]*\.?[0-9]+)\s*)?$`). FindStringSubmatch(components) if len(colorMatch) > 0 { // rgb(255,0,0) rgba(255,0,0,0.5) r, _ := strconv.ParseInt(colorMatch[1], 10, 32) g, _ := strconv.ParseInt(colorMatch[2], 10, 32) b, _ := strconv.ParseInt(colorMatch[3], 10, 32) color.R = math.Min(255, float64(r)) / 255 color.G = math.Min(255, float64(g)) / 255 color.B = math.Min(255, float64(b)) / 255 handleAlpha(colorMatch[5]) return color } colorMatch = regexp.MustCompile(`^(\d+)\%\s*,\s*(\d+)\%\s*,\s*(\d+)\%\s*(,\s*([0-9]*\.?[0-9]+)\s*)?$`). FindStringSubmatch(components) if len(colorMatch) > 0 { // rgb(100%,0%,0%) rgba(100%,0%,0%,0.5) r, _ := strconv.ParseInt(colorMatch[1], 10, 32) g, _ := strconv.ParseInt(colorMatch[2], 10, 32) b, _ := strconv.ParseInt(colorMatch[3], 10, 32) color.R = math.Min(100, float64(r)) / 100 color.G = math.Min(100, float64(g)) / 100 color.B = math.Min(100, float64(b)) / 100 handleAlpha(colorMatch[5]) return color } break case "hsl": fallthrough case "hsla": colorMatch := regexp.MustCompile(`^([0-9]*\.?[0-9]+)\s*,\s*(\d+)\%\s*,\s*(\d+)\%\s*(,\s*([0-9]*\.?[0-9]+)\s*)?$`). FindStringSubmatch(components) if len(colorMatch) > 0 { // hsl(120,50%,50%) hsla(120,50%,50%,0.5) h, _ := strconv.ParseFloat(colorMatch[1], 10) h = h / 360 s, _ := strconv.ParseInt(colorMatch[2], 10, 32) sf := float64(s) / 100 l, _ := strconv.ParseInt(colorMatch[3], 10, 32) lf := float64(l) / 100 handleAlpha(colorMatch[5]) return color.SetHSL(h, sf, lf) } break } } else { match = regexp.MustCompile(`^\#([A-Fa-f0-9]+)$`). FindStringSubmatch(style) if len(match) > 0 { // hex color hex := match[1] size := len(hex) if size == 3 { // #ff0 r, _ := strconv.ParseInt(three.CharAt(hex, 0)+three.CharAt(hex, 0), 16, 32) g, _ := strconv.ParseInt(three.CharAt(hex, 1)+three.CharAt(hex, 1), 16, 32) b, _ := strconv.ParseInt(three.CharAt(hex, 2)+three.CharAt(hex, 2), 16, 32) color.R = float64(r) / 255 color.G = float64(g) / 255 color.B = float64(b) / 255 return color } else if size == 6 { // #ff0000 r, _ := strconv.ParseInt(three.CharAt(hex, 0)+three.CharAt(hex, 1), 16, 34) g, _ := strconv.ParseInt(three.CharAt(hex, 2)+three.CharAt(hex, 3), 16, 32) b, _ := strconv.ParseInt(three.CharAt(hex, 4)+three.CharAt(hex, 5), 16, 32) color.R = float64(r) / 255 color.G = float64(g) / 255 color.B = float64(b) / 255 return color } } } if len(style) > 0 { // color keywords hex := ColorKeywords[style] if hex > 0 { // red color.SetHex(hex) } else { // unknown color glog.Warningf("three.Color: Unknown color %s", style) } } return color } func (color *Color) Clone() *Color { return NewColor().Copy(color) } func (color *Color) Copy(src *Color) *Color { color.R = src.R color.G = src.G color.B = src.B return color } func (color *Color) CopyGammaToLinear(gammaColor *Color) *Color { gammaFactor := 2.0 color.R = math.Pow(gammaColor.R, gammaFactor) color.G = math.Pow(gammaColor.G, gammaFactor) color.B = math.Pow(gammaColor.B, gammaFactor) return color } func (color *Color) CopyLinearToGamma(gammaColor *Color) *Color { gammaFactor := 2.0 if gammaFactor > 0 { gammaFactor = 1.0 / gammaFactor } else { gammaFactor = 1.0 } color.R = math.Pow(gammaColor.R, gammaFactor) color.G = math.Pow(gammaColor.G, gammaFactor) color.B = math.Pow(gammaColor.B, gammaFactor) return color } func (color *Color) ConvertGammaToLinear() *Color { r := color.R g := color.G b := color.B color.R = r * r color.G = g * g color.B = b * b return color } func (color *Color) ConvertLinearToGamma() *Color { color.R = math.Sqrt(color.R) color.G = math.Sqrt(color.G) color.B = math.Sqrt(color.B) return color } func (color *Color) GetHex() int { return int(color.R*255)<<16 ^ int(color.G*255)<<8 ^ int(color.B*255)<<0 } func (color *Color) GetHexString() string { return fmt.Sprintf("%x", color.GetHex()) } func (color *Color) GetHSL() (hue, saturation, lightness float64) { // h,s,l ranges are in 0.0 - 1.0 r := color.R g := color.G b := color.B max := math.Max(r, math.Max(g, b)) min := math.Min(r, math.Min(g, b)) hue = (min + max) / 2.0 saturation = hue lightness = hue if min == max { hue = 0 saturation = 0 } else { delta := max - min if lightness <= 0.5 { saturation = delta / (max + min) } else { saturation = delta / (2 - max - min) } switch max { case r: if g < b { hue = (g-b)/delta + 6 } else { hue = (g-b)/delta + 0 } case g: hue = (b-r)/delta + 2 case b: hue = (r-g)/delta + 4 } hue /= 6 } return } func (color *Color) GetStyle() string { return fmt.Sprintf( "rgb(%003d,%003d,%003d)", int(color.R*255), int(color.G*255), int(color.B*255)) } func (color *Color) OffsetHSL(h, s, l float64) *Color { hc, sc, lc := color.GetHSL() hc += h sc += s lc += l color.SetHSL(hc, sc, lc) return color } func (color *Color) Add(c *Color) *Color { color.R += c.R color.G += c.G color.B += c.B return color } func (color *Color) AddColors(color1, color2 *Color) *Color { color.R = color1.R + color2.R color.G = color1.G + color2.G color.B = color1.B + color2.B return color } func (color *Color) AddScalar(s float64) *Color { color.R += s color.G += s color.B += s return color } func (color *Color) Sub(c *Color) *Color { color.R = math.Max(0, color.R-c.R) color.G = math.Max(0, color.G-c.G) color.B = math.Max(0, color.B-c.B) return color } func (color *Color) Multiply(c *Color) *Color { color.R *= c.R color.G *= c.G color.B *= c.B return color } func (color *Color) MultiplyScalar(s float64) *Color { color.R *= s color.G *= s color.B *= s return color } func (color *Color) Lerp(c *Color, alpha float64) *Color { color.R += (c.R - color.R) * alpha color.G += (c.G - color.G) * alpha color.B += (c.B - color.B) * alpha return color } func (color *Color) Equals(c *Color) bool { return (c.R == color.R) && (c.G == color.G) && (c.B == color.B) } func (color *Color) FromArray(array []float64, offset int) *Color { color.R = array[offset] color.G = array[offset+1] color.B = array[offset+2] return color } func (color *Color) ToArray(array []float64, offset int) []float64 { if array == nil { array = make([]float64, offset+3) } array[offset] = color.R array[offset+1] = color.G array[offset+2] = color.B return array } func hue2RGB(p, q, t float64) float64 { if t < 0 { t += 1 } if t > 1 { t -= 1 } if t < 1.0/6.0 { return p + (q-p)*6*t } if t < 1.0/2.0 { return q } if t < 2.0/3.0 { return p + (q-p)*6*(2/3-t) } return p } func handleAlpha(str string) { if str == "" { return } if f, _ := strconv.ParseFloat(str, 10); f < 1 { glog.Warningf("three.Color: Alpha component of %s will be ignored.", str) } } func Colors(name string) *Color { val := ColorKeywords[name] return NewColor().SetHex(val) } var ColorKeywords = map[string]int{ "aliceblue": 0xF0F8FF, "antiquewhite": 0xFAEBD7, "aqua": 0x00FFFF, "aquamarine": 0x7FFFD4, "azure": 0xF0FFFF, "beige": 0xF5F5DC, "bisque": 0xFFE4C4, "black": 0x000000, "blanchedalmond": 0xFFEBCD, "blue": 0x0000FF, "blueviolet": 0x8A2BE2, "brown": 0xA52A2A, "burlywood": 0xDEB887, "cadetblue": 0x5F9EA0, "chartreuse": 0x7FFF00, "chocolate": 0xD2691E, "coral": 0xFF7F50, "cornflowerblue": 0x6495ED, "cornsilk": 0xFFF8DC, "crimson": 0xDC143C, "cyan": 0x00FFFF, "darkblue": 0x00008B, "darkcyan": 0x008B8B, "darkgoldenrod": 0xB8860B, "darkgray": 0xA9A9A9, "darkgreen": 0x006400, "darkgrey": 0xA9A9A9, "darkkhaki": 0xBDB76B, "darkmagenta": 0x8B008B, "darkolivegreen": 0x556B2F, "darkorange": 0xFF8C00, "darkorchid": 0x9932CC, "darkred": 0x8B0000, "darksalmon": 0xE9967A, "darkseagreen": 0x8FBC8F, "darkslateblue": 0x483D8B, "darkslategray": 0x2F4F4F, "darkslategrey": 0x2F4F4F, "darkturquoise": 0x00CED1, "darkviolet": 0x9400D3, "deeppink": 0xFF1493, "deepskyblue": 0x00BFFF, "dimgray": 0x696969, "dimgrey": 0x696969, "dodgerblue": 0x1E90FF, "firebrick": 0xB22222, "floralwhite": 0xFFFAF0, "forestgreen": 0x228B22, "fuchsia": 0xFF00FF, "gainsboro": 0xDCDCDC, "ghostwhite": 0xF8F8FF, "gold": 0xFFD700, "goldenrod": 0xDAA520, "gray": 0x808080, "green": 0x008000, "greenyellow": 0xADFF2F, "grey": 0x808080, "honeydew": 0xF0FFF0, "hotpink": 0xFF69B4, "indianred": 0xCD5C5C, "indigo": 0x4B0082, "ivory": 0xFFFFF0, "khaki": 0xF0E68C, "lavender": 0xE6E6FA, "lavenderblush": 0xFFF0F5, "lawngreen": 0x7CFC00, "lemonchiffon": 0xFFFACD, "lightblue": 0xADD8E6, "lightcoral": 0xF08080, "lightcyan": 0xE0FFFF, "lightgoldenrodyellow": 0xFAFAD2, "lightgray": 0xD3D3D3, "lightgreen": 0x90EE90, "lightgrey": 0xD3D3D3, "lightpink": 0xFFB6C1, "lightsalmon": 0xFFA07A, "lightseagreen": 0x20B2AA, "lightskyblue": 0x87CEFA, "lightslategray": 0x778899, "lightslategrey": 0x778899, "lightsteelblue": 0xB0C4DE, "lightyellow": 0xFFFFE0, "lime": 0x00FF00, "limegreen": 0x32CD32, "linen": 0xFAF0E6, "magenta": 0xFF00FF, "maroon": 0x800000, "mediumaquamarine": 0x66CDAA, "mediumblue": 0x0000CD, "mediumorchid": 0xBA55D3, "mediumpurple": 0x9370DB, "mediumseagreen": 0x3CB371, "mediumslateblue": 0x7B68EE, "mediumspringgreen": 0x00FA9A, "mediumturquoise": 0x48D1CC, "mediumvioletred": 0xC71585, "midnightblue": 0x191970, "mintcream": 0xF5FFFA, "mistyrose": 0xFFE4E1, "moccasin": 0xFFE4B5, "navajowhite": 0xFFDEAD, "navy": 0x000080, "oldlace": 0xFDF5E6, "olive": 0x808000, "olivedrab": 0x6B8E23, "orange": 0xFFA500, "orangered": 0xFF4500, "orchid": 0xDA70D6, "palegoldenrod": 0xEEE8AA, "palegreen": 0x98FB98, "paleturquoise": 0xAFEEEE, "palevioletred": 0xDB7093, "papayawhip": 0xFFEFD5, "peachpuff": 0xFFDAB9, "peru": 0xCD853F, "pink": 0xFFC0CB, "plum": 0xDDA0DD, "powderblue": 0xB0E0E6, "purple": 0x800080, "red": 0xFF0000, "rosybrown": 0xBC8F8F, "royalblue": 0x4169E1, "saddlebrown": 0x8B4513, "salmon": 0xFA8072, "sandybrown": 0xF4A460, "seagreen": 0x2E8B57, "seashell": 0xFFF5EE, "sienna": 0xA0522D, "silver": 0xC0C0C0, "skyblue": 0x87CEEB, "slateblue": 0x6A5ACD, "slategray": 0x708090, "slategrey": 0x708090, "snow": 0xFFFAFA, "springgreen": 0x00FF7F, "steelblue": 0x4682B4, "tan": 0xD2B48C, "teal": 0x008080, "colortle": 0xD8BFD8, "tomato": 0xFF6347, "turquoise": 0x40E0D0, "violet": 0xEE82EE, "wheat": 0xF5DEB3, "white": 0xFFFFFF, "whitesmoke": 0xF5F5F5, "yellow": 0xFFFF00, "yellowgreen": 0x9ACD32, }
math3/color.go
0.731922
0.458046
color.go
starcoder
package goroslib import ( "time" ) // TimeNow returns the current time. // It supports simulated clocks provided by ROS clock servers. func (n *Node) TimeNow() time.Time { if !n.simtimeEnabled { return time.Now() } n.simtimeMutex.RLock() defer n.simtimeMutex.RUnlock() return n.simtimeValue } // TimeSleepChan returns a channel that allows to sleeps for the given amount of time. // It supports simulated clocks provided by ROS clock servers. func (n *Node) TimeSleepChan(d time.Duration) <-chan time.Time { if !n.simtimeEnabled { return time.After(d) } done := make(chan time.Time) func() { n.simtimeMutex.Lock() defer n.simtimeMutex.Unlock() n.simtimeSleeps = append(n.simtimeSleeps, &simtimeSleep{n.simtimeValue.Add(d), done}) }() return done } // TimeSleep sleeps for the given amount of time. // It supports simulated clocks provided by ROS clock servers. func (n *Node) TimeSleep(d time.Duration) { <-n.TimeSleepChan(d) } // NodeRate allows to sleep with a given period. type NodeRate struct { n *Node d time.Duration lastSleep *simtimeSleep } // SleepChan returns a channel that allows to sleep with a given period. // It supports simulated clocks provided by ROS clock servers. func (nr *NodeRate) SleepChan() <-chan time.Time { if !nr.n.simtimeEnabled { now := time.Now() if nr.lastSleep == nil { nr.lastSleep = &simtimeSleep{now.Add(nr.d), nil} } else { nr.lastSleep = &simtimeSleep{nr.lastSleep.value.Add(nr.d), nil} } return time.After(nr.lastSleep.value.Sub(now)) } done := make(chan time.Time) func() { nr.n.simtimeMutex.Lock() defer nr.n.simtimeMutex.Unlock() if nr.lastSleep == nil { nr.lastSleep = &simtimeSleep{nr.n.simtimeValue.Add(nr.d), done} } else { nr.lastSleep = &simtimeSleep{nr.lastSleep.value.Add(nr.d), done} } nr.n.simtimeSleeps = append(nr.n.simtimeSleeps, nr.lastSleep) }() return done } // Sleep sleeps with a given period. // It supports simulated clocks provided by ROS clock servers. func (nr *NodeRate) Sleep() { <-nr.SleepChan() } // TimeRate returns an object that can be used to sleep periodically. // It supports simulated clocks provided by ROS clock servers. func (n *Node) TimeRate(d time.Duration) *NodeRate { return &NodeRate{ n: n, d: d, } }
nodefuncstime.go
0.787768
0.448487
nodefuncstime.go
starcoder
package calendar /*title: Module State In this file, we'll implement `api.ModuleState`. A module's state will be written to and loaded from the file system and has interfaces to both the module's renderer and the Web Client. */ import ( "github.com/QuestScreen/api/comms" "github.com/QuestScreen/api/modules" "github.com/QuestScreen/api/server" shared "github.com/QuestScreen/plugin-tutorial" "gopkg.in/yaml.v3" ) type state struct { Date shared.UniversityDate } /* Our state only holds a UniversityDate. The `Date` field must be publicly visible so that it can be serialized properly. */ type endpoint struct { *state } /* The endpoint is the object that is handling requests coming from the Web Client via HTTP. We need an endpoint object (instead of defining its methods directly on `state`) since we can have more than one endpoint. */ func newState(input *yaml.Node, ctx server.Context, ms server.MessageSender) (modules.State, error) { s := new(state) if input == nil { return s, nil } if err := input.Decode(&s.Date); err != nil { ms.Warning("unable to load UniversityDate: " + err.Error()) s.Date = shared.UniversityDate(0) } return s, nil } /* This is the constructor that is used for creating a `state` from YAML input. As discussed previously, YAML is the file format all persistent data is stored in. The input is a subtree of the whole state which also contains other modules. You do not need to know details about YAML, just use `Decode`. `input` may be **`nil`** if the currently stored state has no information about our module. This will always be the case after adding a module to a scene or loading a new group the first time, so we need to deal with it. Here, we just return a state with the default value (which will be 0, corresponding to 1st of Ick, year 0). An error during decoding means that the input data is corrupted. If that is the case, we issue a warning and load the default value. Returning an error from the module constructor will halt the main app, so don't do it as long as you can load some default value! */ func (s *state) Send(ctx server.Context) interface{} { return s.Date } func (s *state) Persist(ctx server.Context) interface{} { return s.Date } /* Now come the serialization functions. `WebView` returns the data that should be serialized to JSON and send to the web client. The caller will use JSON serialization on the returned value, which in turn will use `UniversityDate`'s `MarshalJSON` method. In `PersistingView`, we need to give the same data we `Decode` the input to in the constructor. This is the data that will be written to the scene state on the file system. */ func (s *state) CreateRendererData(ctx server.Context) interface{} { return s.Date } /* This function defines the data we send to the renderer so that it can rebuild its state (e.g. when the group is loaded or the scene changes). As the renderer runs in another thread, it has its own state and cannot access the `state` object. The returned value must not contain a pointer to data owned by `state` for thread safety as it will be received by the renderer in another thread. `Date` neither is nor contains a pointer, so we are safe here. */ func (s *state) PureEndpoint(index int) modules.PureEndpoint { if index != 0 { panic("Endpoint index out of bounds") } return endpoint{s} } /* This function creates our endpoint and implements `api.PureEndpointProvider`. The module's descriptor will later describe how many and what kind of endpoints a module has, which in turn leads to calls to this function. Since we only have one endpoint, we can assume that index is always `0`. */ func (e endpoint) Post(payload []byte) (interface{}, interface{}, server.Error) { var daysDelta int if err := comms.ReceiveData(payload, &daysDelta); err != nil { return nil, nil, &server.BadRequest{Inner: err, Message: "received invalid data"} } e.state.Date = e.state.Date.Add(daysDelta) // first value is sent back to client as JSON. // second value is sent to Renderer.InitTransition. return e.state.Date, e.state.Date, nil } /* Finally, this is our endpoint implementation. We receive the delta (in days) we want to change, and simply apply it to our date. `api.ReceiveData` is a helper function that uses JSON unmarshaling, wraps any error into an `api.SendableError`, and can do some additional validation (which we do not need here). We send the full date back to the Web Client and also to the Renderer. Generally, a call to an endpoint might lead to a smaller change that does not update the whole data. For example, think about when you want to hide just one of your heroes with the herolist plugin. In such a case, we would not send the whole data to the renderer, but a data object that identifies the change. This way, we can animate small changes (one hero fading out) while keeping other stuff intact. In our case, to keep things simple, we only want the old date to fade out and the new one to replace it, so there's no point in sending a smaller data package, and thus we just send the whole data. This is everything we need to do in order to implement `api.ModuleState`. */
calendar/state.go
0.8415
0.648605
state.go
starcoder
package logic import ( "math" "time" "k8s.io/autoscaler/vertical-pod-autoscaler/pkg/recommender/model" ) // TODO: Split the estimator to have a separate estimator object for CPU and memory. // ResourceEstimator is a function from AggregateContainerState to // model.Resources, e.g. a prediction of resources needed by a group of // containers. type ResourceEstimator interface { GetResourceEstimation(s *model.AggregateContainerState) model.Resources } // Implementation of ResourceEstimator that returns constant amount of // resources. This can be used as by a fake recommender for test purposes. type constEstimator struct { resources model.Resources } // Simple implementation of the ResourceEstimator interface. It returns specific // percentiles of CPU usage distribution and memory peaks distribution. type percentileEstimator struct { cpuPercentile float64 memoryPercentile float64 } type marginEstimator struct { marginFraction float64 baseEstimator ResourceEstimator } type minResourcesEstimator struct { minResources model.Resources baseEstimator ResourceEstimator } type confidenceMultiplier struct { multiplier float64 exponent float64 baseEstimator ResourceEstimator } // NewConstEstimator returns a new constEstimator with given resources. func NewConstEstimator(resources model.Resources) ResourceEstimator { return &constEstimator{resources} } // NewPercentileEstimator returns a new percentileEstimator that uses provided percentiles. func NewPercentileEstimator(cpuPercentile float64, memoryPercentile float64) ResourceEstimator { return &percentileEstimator{cpuPercentile, memoryPercentile} } // WithMargin returns a given ResourceEstimator with margin applied. // The returned resources are equal to the original resources plus (originalResource * marginFraction) func WithMargin(marginFraction float64, baseEstimator ResourceEstimator) ResourceEstimator { return &marginEstimator{marginFraction, baseEstimator} } // WithMinResources returns a given ResourceEstimator with minResources applied. // The returned resources are equal to the max(original resources, minResources) func WithMinResources(minResources model.Resources, baseEstimator ResourceEstimator) ResourceEstimator { return &minResourcesEstimator{minResources, baseEstimator} } // WithConfidenceMultiplier returns a given ResourceEstimator with confidenceMultiplier applied. func WithConfidenceMultiplier(multiplier, exponent float64, baseEstimator ResourceEstimator) ResourceEstimator { return &confidenceMultiplier{multiplier, exponent, baseEstimator} } // Returns a constant amount of resources. func (e *constEstimator) GetResourceEstimation(s *model.AggregateContainerState) model.Resources { return e.resources } // Returns specific percentiles of CPU and memory peaks distributions. func (e *percentileEstimator) GetResourceEstimation(s *model.AggregateContainerState) model.Resources { return model.Resources{ model.ResourceCPU: model.CPUAmountFromCores( s.AggregateCPUUsage.Percentile(e.cpuPercentile)), model.ResourceMemory: model.MemoryAmountFromBytes( s.AggregateMemoryPeaks.Percentile(e.memoryPercentile)), } } // Returns a non-negative real number that heuristically measures how much // confidence the history aggregated in the AggregateContainerState provides. // For a workload producing a steady stream of samples over N days at the rate // of 1 sample per minute, this metric is equal to N. // This implementation is a very simple heuristic which looks at the total count // of samples and the time between the first and the last sample. func getConfidence(s *model.AggregateContainerState) float64 { // Distance between the first and the last observed sample time, measured in days. lifespanInDays := float64(s.LastSampleStart.Sub(s.FirstSampleStart)) / float64(time.Hour*24) // Total count of samples normalized such that it equals the number of days for // frequency of 1 sample/minute. samplesAmount := float64(s.TotalSamplesCount) / (60 * 24) return math.Min(lifespanInDays, samplesAmount) } // Returns resources computed by the underlying estimator, scaled based on the // confidence metric, which depends on the amount of available historical data. // Each resource is transformed as follows: // scaledResource = originalResource * (1 + 1/confidence)^exponent. // This can be used to widen or narrow the gap between the lower and upper bound // estimators depending on how much input data is available to the estimators. func (e *confidenceMultiplier) GetResourceEstimation(s *model.AggregateContainerState) model.Resources { confidence := getConfidence(s) originalResources := e.baseEstimator.GetResourceEstimation(s) scaledResources := make(model.Resources) for resource, resourceAmount := range originalResources { scaledResources[resource] = model.ScaleResource( resourceAmount, math.Pow(1.+e.multiplier/confidence, e.exponent)) } return scaledResources } func (e *marginEstimator) GetResourceEstimation(s *model.AggregateContainerState) model.Resources { originalResources := e.baseEstimator.GetResourceEstimation(s) newResources := make(model.Resources) for resource, resourceAmount := range originalResources { margin := model.ScaleResource(resourceAmount, e.marginFraction) newResources[resource] = originalResources[resource] + margin } return newResources } func (e *minResourcesEstimator) GetResourceEstimation(s *model.AggregateContainerState) model.Resources { originalResources := e.baseEstimator.GetResourceEstimation(s) newResources := make(model.Resources) for resource, resourceAmount := range originalResources { if resourceAmount < e.minResources[resource] { resourceAmount = e.minResources[resource] } newResources[resource] = resourceAmount } return newResources }
vertical-pod-autoscaler/pkg/recommender/logic/estimator.go
0.668556
0.477311
estimator.go
starcoder
package graphics2d import "github.com/jphsd/graphics2d/util" // CurveStyle determines how the curve behaves relative to the path points. With Bezier, the // path will intersect the mid-point of each path step. With Catmul, the path will intersect // point. type CurveStyle int // Constants for curve styles. const ( Bezier CurveStyle = iota CatmullRom ) // CurveProc replaces the steps on a path with cubics. The locations of the control points // are controlled by the Style setting and whether or not the path is closed. type CurveProc struct { Scale float64 Style CurveStyle } // Process implements the PathProcessor interface. func (cp *CurveProc) Process(p *Path) []*Path { steps := p.Steps() ns := len(steps) if ns < 2 { return []*Path{p} } // Truncate steps to end points points := make([][]float64, ns) for i, step := range steps { points[i] = step[len(step)-1] } if p.closed && util.EqualsP(points[0], points[ns-1]) { ns-- } res := []*Path{} // Bezier if cp.Style == Bezier { // Calc mid points mp := make([][]float64, ns) for i := 0; i < ns-1; i++ { mp[i] = util.Centroid(points[i], points[i+1]) } mp[ns-1] = util.Centroid(points[ns-1], points[0]) // Create path if p.closed { res = append(res, NewPath(mp[0])) } else { res = append(res, NewPath(points[0])) res[0].AddStep(mp[0]) } for i := 1; i < ns-1; i++ { c1 := Lerp(cp.Scale, mp[i-1], points[i]) c2 := Lerp(cp.Scale, mp[i], points[i]) res[0].AddStep(c1, c2, mp[i]) } if p.closed { c1 := Lerp(cp.Scale, mp[ns-2], points[ns-1]) c2 := Lerp(cp.Scale, mp[ns-1], points[ns-1]) res[0].AddStep(c1, c2, mp[ns-1]) c1 = Lerp(cp.Scale, mp[ns-1], points[0]) c2 = Lerp(cp.Scale, mp[0], points[0]) res[0].AddStep(c1, c2, mp[0]) res[0].Close() } else { res[0].AddStep(points[ns-1]) } return res } // Catmull-Rom // Calc opposite tangents ops := make([][]float64, ns) for i := 1; i < ns-1; i++ { ops[i] = []float64{(points[i+1][0] - points[i-1][0]) / 2, (points[i+1][1] - points[i-1][1]) / 2} } if p.closed { ops[0] = []float64{points[1][0] - points[ns-1][0], points[1][1] - points[ns-1][1]} ops[ns-1] = []float64{points[0][0] - points[ns-2][0], points[0][1] - points[ns-2][1]} } else { ops[0] = []float64{0, 0} ops[ns-1] = ops[0] } // Create path res = append(res, NewPath(points[0])) if p.closed { for i := 0; i < ns-1; i++ { c1, c2 := cp.calcControlOpp(points[i], ops[i], points[i+1], ops[i+1]) res[0].AddStep(c1, c2, points[i+1]) } c1, c2 := cp.calcControlOpp(points[ns-1], ops[ns-1], points[0], ops[0]) res[0].AddStep(c1, c2, points[0]) res[0].Close() } else { // Insert quads for start and end _, c2 := cp.calcControlOpp(points[0], ops[0], points[1], ops[1]) res[0].AddStep(c2, points[1]) for i := 1; i < ns-2; i++ { c1, c2 := cp.calcControlOpp(points[i], ops[i], points[i+1], ops[i+1]) res[0].AddStep(c1, c2, points[i+1]) } c1, _ := cp.calcControlOpp(points[ns-2], ops[ns-2], points[ns-1], ops[ns-1]) res[0].AddStep(c1, points[ns-1]) } return res } // Lerp performs a linear interpolation between two points. func Lerp(t float64, p1, p2 []float64) []float64 { return []float64{util.Lerp(t, p1[0], p2[0]), util.Lerp(t, p1[1], p2[1])} } func (cp *CurveProc) calcControlOpp(p1, op1, p2, op2 []float64) ([]float64, []float64) { dx1, dy1 := op1[0]*cp.Scale, op1[1]*cp.Scale dx2, dy2 := -op2[0]*cp.Scale, -op2[1]*cp.Scale return []float64{p1[0] + dx1, p1[1] + dy1}, []float64{p2[0] + dx2, p2[1] + dy2} }
curveproc.go
0.756807
0.500793
curveproc.go
starcoder
package flate import ( "fmt" "math" "math/bits" "github.com/chronos-tachyon/assert" "github.com/chronos-tachyon/huffman" ) type tokenType byte const ( invalidToken tokenType = iota copyToken literalToken stopToken treeLenToken treeDupToken treeSZRToken treeLZRToken ) type token struct { literalOrLength uint16 distance uint16 } func makeCopyToken(length uint16, distance uint16) token { assert.Assertf(length >= 3, "copy length %d < minimum 3", length) assert.Assertf(length <= 258, "copy length %d > maximum 258", length) assert.Assertf(distance >= 1, "copy distance %d < minimum 1", distance) assert.Assertf(distance <= 32768, "copy distance %d > maximum 32768", distance) return token{literalOrLength: length, distance: distance} } func makeLiteralToken(ch byte) token { return token{literalOrLength: uint16(ch), distance: 0} } func makeStopToken() token { return token{literalOrLength: 256, distance: 0} } func makeTreeLenToken(size byte) token { assert.Assertf(size < 16, "symbol bit length %d >= 16", size) return token{literalOrLength: 512 + uint16(size), distance: 0} } func makeTreeDupToken(count uint) token { assert.Assertf(count >= 3, "symbol bit length copy count %d < minimum 3", count) assert.Assertf(count <= 6, "symbol bit length copy count %d > maximum 6", count) return token{literalOrLength: 1024 + uint16(count-3), distance: 0} } func makeTreeZeroRunToken(count uint) token { assert.Assertf(count >= 3, "symbol bit length zero count %d < minimum 3", count) assert.Assertf(count <= 138, "symbol bit length zero count %d > maximum 138", count) if count < 11 { return token{literalOrLength: 2048 + uint16(count-3), distance: 0} } return token{literalOrLength: 4096 + uint16(count-11), distance: 0} } func makeInvalidToken() token { return token{literalOrLength: math.MaxUint16, distance: 0} } func (t token) tokenType() tokenType { switch { case t.distance != 0: return copyToken case t.literalOrLength < 256: return literalToken case t.literalOrLength == 256: return stopToken case t.literalOrLength >= 512 && t.literalOrLength < (512+16): return treeLenToken case t.literalOrLength >= 1024 && t.literalOrLength < (1024+4): return treeDupToken case t.literalOrLength >= 2048 && t.literalOrLength < (2048+8): return treeSZRToken case t.literalOrLength >= 4096 && t.literalOrLength < (4096+128): return treeLZRToken default: assert.Raisef("invalid token literalOrLength=%d distance=%d", t.literalOrLength, t.distance) return invalidToken } } func (t token) symbolLL() (symbol huffman.Symbol, bitLen byte, bitBlock block) { switch { case t.distance == 0 && t.literalOrLength > 256: return huffman.InvalidSymbol, 0, 0 case t.distance == 0 && t.literalOrLength == 256: return huffman.Symbol(256), 0, 0 case t.distance == 0: return huffman.Symbol(t.literalOrLength), 0, 0 case t.literalOrLength <= 2: assert.Raisef("copy length %d < minimum 3", t.literalOrLength) return huffman.InvalidSymbol, 0, 0 case t.literalOrLength <= 10: return huffman.Symbol(254 + t.literalOrLength), 0, 0 case t.literalOrLength <= 18: x := (t.literalOrLength - 11) y, z := (x / 2), (x % 2) return huffman.Symbol(265 + y), 1, block(z) case t.literalOrLength <= 34: x := (t.literalOrLength - 19) y, z := (x / 4), (x % 4) return huffman.Symbol(269 + y), 2, block(z) case t.literalOrLength <= 66: x := (t.literalOrLength - 35) y, z := (x / 8), (x % 8) return huffman.Symbol(273 + y), 3, block(z) case t.literalOrLength <= 130: x := (t.literalOrLength - 67) y, z := (x / 16), (x % 16) return huffman.Symbol(277 + y), 4, block(z) case t.literalOrLength <= 257: x := (t.literalOrLength - 131) y, z := (x / 32), (x % 32) return huffman.Symbol(281 + y), 5, block(z) case t.literalOrLength == 258: return huffman.Symbol(285), 0, 0 default: assert.Raisef("copy length %d > maximum 258", t.literalOrLength) return huffman.InvalidSymbol, 0, 0 } } func (t token) symbolD() (symbol huffman.Symbol, bitLen byte, bitBlock block) { switch { case t.distance == 0: return huffman.InvalidSymbol, 0, 0 case t.distance <= 4: d := (t.distance - 1) return huffman.Symbol(0 + d), 0, 0 case t.distance <= 32768: d := (t.distance - 1) k := 16 - bits.LeadingZeros16(d) code := k*2 - 1 if bit := uint16(1) << (k - 2); (d & bit) == 0 { code-- } size := byte((code / 2) - 1) mask := (uint16(1) << size) - 1 bits := (d & mask) return huffman.Symbol(code), size, block(bits) default: assert.Raisef("copy distance %d > maximum 32768", t.distance) return huffman.InvalidSymbol, 0, 0 } } func (t token) symbolX() (symbol huffman.Symbol, bitLen byte, bitBlock block) { switch { case t.distance != 0: return huffman.InvalidSymbol, 0, 0 case t.literalOrLength < 512: return huffman.InvalidSymbol, 0, 0 case t.literalOrLength >= 512 && t.literalOrLength < (512+16): return huffman.Symbol(t.literalOrLength - 512), 0, 0 case t.literalOrLength >= 1024 && t.literalOrLength < (1024+4): return huffman.Symbol(16), 2, block(t.literalOrLength - 1024) case t.literalOrLength >= 2048 && t.literalOrLength < (2048+8): return huffman.Symbol(17), 3, block(t.literalOrLength - 2048) case t.literalOrLength >= 4096 && t.literalOrLength < (4096+128): return huffman.Symbol(18), 7, block(t.literalOrLength - 4096) default: return huffman.InvalidSymbol, 0, 0 } } func (t token) encodeLLD(bw bitwriter, hLL *huffman.Encoder, hD *huffman.Encoder) bool { symLL, sizeLL, bitsLL := t.symbolLL() symD, sizeD, bitsD := t.symbolD() if symLL >= 0 { hc := hLL.Encode(symLL) if !bw.outputBitsWriteHC(hc) { return false } if sizeLL != 0 { if !bw.outputBitsWrite(sizeLL, bitsLL) { return false } } if symD >= 0 { hc := hD.Encode(symD) if !bw.outputBitsWriteHC(hc) { return false } if sizeD != 0 { if !bw.outputBitsWrite(sizeD, bitsD) { return false } } } } return true } func (t token) encodeX(bw bitwriter, hX *huffman.Encoder) bool { symX, sizeX, bitsX := t.symbolX() if symX >= 0 { hc := hX.Encode(symX) if !bw.outputBitsWriteHC(hc) { return false } if sizeX != 0 { if !bw.outputBitsWrite(sizeX, bitsX) { return false } } } return true } func (t token) String() string { switch t.tokenType() { case copyToken: return fmt.Sprintf("[copy token: distance=%d length=%d]", t.distance, t.literalOrLength) case literalToken: return fmt.Sprintf("[literal token: %#02x]", t.literalOrLength) case stopToken: return "[stop token]" case treeLenToken: return fmt.Sprintf("[tree len token: length=%d]", t.literalOrLength-512) case treeDupToken: return fmt.Sprintf("[tree dup token: count=%d]", t.literalOrLength-1024+3) case treeSZRToken: return fmt.Sprintf("[tree short zero repeat token: count=%d]", t.literalOrLength-2048+3) case treeLZRToken: return fmt.Sprintf("[tree long zero repeat token: count=%d]", t.literalOrLength-4096+11) default: return fmt.Sprintf("[invalid token: ll=%d d=%d]", t.literalOrLength, t.distance) } }
token.go
0.799912
0.493958
token.go
starcoder
package onshape import ( "encoding/json" ) // BTOrFilter167 struct for BTOrFilter167 type BTOrFilter167 struct { BTQueryFilter183 BtType *string `json:"btType,omitempty"` Operand1 *BTQueryFilter183 `json:"operand1,omitempty"` Operand2 *BTQueryFilter183 `json:"operand2,omitempty"` } // NewBTOrFilter167 instantiates a new BTOrFilter167 object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewBTOrFilter167() *BTOrFilter167 { this := BTOrFilter167{} return &this } // NewBTOrFilter167WithDefaults instantiates a new BTOrFilter167 object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewBTOrFilter167WithDefaults() *BTOrFilter167 { this := BTOrFilter167{} return &this } // GetBtType returns the BtType field value if set, zero value otherwise. func (o *BTOrFilter167) GetBtType() string { if o == nil || o.BtType == nil { var ret string return ret } return *o.BtType } // GetBtTypeOk returns a tuple with the BtType field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTOrFilter167) GetBtTypeOk() (*string, bool) { if o == nil || o.BtType == nil { return nil, false } return o.BtType, true } // HasBtType returns a boolean if a field has been set. func (o *BTOrFilter167) HasBtType() bool { if o != nil && o.BtType != nil { return true } return false } // SetBtType gets a reference to the given string and assigns it to the BtType field. func (o *BTOrFilter167) SetBtType(v string) { o.BtType = &v } // GetOperand1 returns the Operand1 field value if set, zero value otherwise. func (o *BTOrFilter167) GetOperand1() BTQueryFilter183 { if o == nil || o.Operand1 == nil { var ret BTQueryFilter183 return ret } return *o.Operand1 } // GetOperand1Ok returns a tuple with the Operand1 field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTOrFilter167) GetOperand1Ok() (*BTQueryFilter183, bool) { if o == nil || o.Operand1 == nil { return nil, false } return o.Operand1, true } // HasOperand1 returns a boolean if a field has been set. func (o *BTOrFilter167) HasOperand1() bool { if o != nil && o.Operand1 != nil { return true } return false } // SetOperand1 gets a reference to the given BTQueryFilter183 and assigns it to the Operand1 field. func (o *BTOrFilter167) SetOperand1(v BTQueryFilter183) { o.Operand1 = &v } // GetOperand2 returns the Operand2 field value if set, zero value otherwise. func (o *BTOrFilter167) GetOperand2() BTQueryFilter183 { if o == nil || o.Operand2 == nil { var ret BTQueryFilter183 return ret } return *o.Operand2 } // GetOperand2Ok returns a tuple with the Operand2 field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *BTOrFilter167) GetOperand2Ok() (*BTQueryFilter183, bool) { if o == nil || o.Operand2 == nil { return nil, false } return o.Operand2, true } // HasOperand2 returns a boolean if a field has been set. func (o *BTOrFilter167) HasOperand2() bool { if o != nil && o.Operand2 != nil { return true } return false } // SetOperand2 gets a reference to the given BTQueryFilter183 and assigns it to the Operand2 field. func (o *BTOrFilter167) SetOperand2(v BTQueryFilter183) { o.Operand2 = &v } func (o BTOrFilter167) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} serializedBTQueryFilter183, errBTQueryFilter183 := json.Marshal(o.BTQueryFilter183) if errBTQueryFilter183 != nil { return []byte{}, errBTQueryFilter183 } errBTQueryFilter183 = json.Unmarshal([]byte(serializedBTQueryFilter183), &toSerialize) if errBTQueryFilter183 != nil { return []byte{}, errBTQueryFilter183 } if o.BtType != nil { toSerialize["btType"] = o.BtType } if o.Operand1 != nil { toSerialize["operand1"] = o.Operand1 } if o.Operand2 != nil { toSerialize["operand2"] = o.Operand2 } return json.Marshal(toSerialize) } type NullableBTOrFilter167 struct { value *BTOrFilter167 isSet bool } func (v NullableBTOrFilter167) Get() *BTOrFilter167 { return v.value } func (v *NullableBTOrFilter167) Set(val *BTOrFilter167) { v.value = val v.isSet = true } func (v NullableBTOrFilter167) IsSet() bool { return v.isSet } func (v *NullableBTOrFilter167) Unset() { v.value = nil v.isSet = false } func NewNullableBTOrFilter167(val *BTOrFilter167) *NullableBTOrFilter167 { return &NullableBTOrFilter167{value: val, isSet: true} } func (v NullableBTOrFilter167) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableBTOrFilter167) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
onshape/model_bt_or_filter_167.go
0.709523
0.441372
model_bt_or_filter_167.go
starcoder
package ft232h import ( "fmt" "math" "math/bits" ) // Pin defines the methods required for representing an FT232H port pin. type Pin interface { IsMPSSE() bool // true if DPin (port "D"), false if CPin (GPIO/port "C") Mask() uint8 // the bitmask used to address the pin, equal to 1<<Pos() Pos() uint // the ordinal pin number (0-7), equal to log2(Mask()) String() string // the string representation "D#" or "C#", with # = Pos() Valid() bool // true IFF bitmask has exactly one bit set Equals(q Pin) bool // true IFF p and q have equal port and bitmask } // IsMPSSE is true for pins on FT232H port "D". func (p DPin) IsMPSSE() bool { return true } // IsMPSSE is false for pins on FT232H port "C". func (p CPin) IsMPSSE() bool { return false } // Mask is the bitmask used to address the pin on port "D". func (p DPin) Mask() uint8 { return uint8(p) } // Mask is the bitmask used to address the pin on port "C". func (p CPin) Mask() uint8 { return uint8(p) } // Pos is the ordinal pin number (0-7) on port "D". func (p DPin) Pos() uint { return uint(math.Log2(float64(p))) } // Pos is the ordinal pin number (0-7) on port "C". func (p CPin) Pos() uint { return uint(math.Log2(float64(p))) } // String is the string representation "D#" of the pin, with # equal to Pos. func (p DPin) String() string { return fmt.Sprintf("D%d", p.Pos()) } // String is the string representation "C#" of the pin, with # equal to Pos. func (p CPin) String() string { return fmt.Sprintf("C%d", p.Pos()) } // Valid is true if the pin bitmask has exactly one bit set, otherwise false. func (p DPin) Valid() bool { return 1 == bits.OnesCount64(uint64(p)) } // Valid is true if the pin bitmask has exactly one bit set, otherwise false. func (p CPin) Valid() bool { return 1 == bits.OnesCount64(uint64(p)) } // Equals is true if the given pin is on port "D" and has the same bitmask, // otherwise false. func (p DPin) Equals(q Pin) bool { return q.IsMPSSE() && p.Mask() == q.Mask() } // Equals is true if the given pin is on port "C" and has the same bitmask, // otherwise false. func (p CPin) Equals(q Pin) bool { return !q.IsMPSSE() && p.Mask() == q.Mask() } // Dir represents the direction of a GPIO pin type Dir bool // Constants of GPIO pin direction type Dir const ( Input Dir = false // GPIO input pins (bit clear) Output Dir = true // GPIO output pins (bit set) ) // Types representing individual port pins. type ( DPin uint8 // pin bitmask on MPSSE low-byte lines (port "D" of FT232H) CPin uint8 // pin bitmask on MPSSE high-byte lines (port "C" of FT232H) ) // Constants related to GPIO pin configuration const ( PinLO byte = 0 // pin value clear PinHI byte = 1 // pin value set PinIN byte = 0 // pin direction input PinOT byte = 1 // pin direction output NumDPins = 8 // number of MPSSE low-byte line pins, port "D" NumCPins = 8 // number of MPSSE high-byte line pins, port "C" ) // D returns a DPin bitmask with only the given bit at position pin set. // If the given pin position is greater than 7, the invalid bitmask (0) is // returned. func D(pin uint) DPin { if pin >= 0 && pin < NumDPins { return DPin(1 << pin) } else { return DPin(0) // invalid DPin } } // C returns a CPin bitmask with only the given bit at position pin set. // If the given pin position is greater than 7, the invalid bitmask (0) is // returned. func C(pin uint) CPin { if pin < NumCPins { return CPin(1 << pin) } else { return CPin(0) // invalid CPin } }
pin.go
0.632049
0.425665
pin.go
starcoder
package common import ( "fmt" "strconv" ) // ImportFeaturesForLinReg import linear regression features from file func ImportFeaturesForLinReg(fileRows [][]string) ([]*DataFeature, error) { if fileRows == nil { return nil, fmt.Errorf("empty file content") } // read the first row to get all features featureNum := len(fileRows[0]) features := make([]*DataFeature, featureNum) for i := 0; i < featureNum; i++ { features[i] = new(DataFeature) features[i].Sets = make(map[int]float64) features[i].FeatureName = fileRows[0][i] } // read from all rows to get feature values sample := 0 for row := 1; row < len(fileRows); row++ { for i := 0; i < featureNum; i++ { value, err := strconv.ParseFloat(fileRows[row][i], 64) if err != nil { return nil, fmt.Errorf("failed to parse value, err: %v", err) } features[i].Sets[sample] = value } sample++ } return features, nil } // ImportFeaturesForLogReg import logic regression features from file, target variable imported as 1 or 0 // - fileRows file rows, first row is feature list // - label target feature // - labelName target variable func ImportFeaturesForLogReg(fileRows [][]string, label, labelName string) ([]*DataFeature, error) { if fileRows == nil { return nil, fmt.Errorf("empty file content") } // read the first row to get all features featureNum := len(fileRows[0]) features := make([]*DataFeature, featureNum) for i := 0; i < featureNum; i++ { features[i] = new(DataFeature) features[i].Sets = make(map[int]float64) features[i].FeatureName = fileRows[0][i] } // read from all rows to get feature values sample := 0 for row := 1; row < len(fileRows); row++ { for i := 0; i < featureNum; i++ { if features[i].FeatureName == label { // parse target feature variable to 0 or 1 if fileRows[row][i] == labelName { features[i].Sets[sample] = 1.0 } else { features[i].Sets[sample] = 0.0 } } else { value, err := strconv.ParseFloat(fileRows[row][i], 64) if err != nil { return nil, fmt.Errorf("failed to parse value, err: %v", err) } features[i].Sets[sample] = value } } sample++ } return features, nil }
crypto/core/machine_learning/common/feature_import.go
0.575111
0.462898
feature_import.go
starcoder
package xpath import ( "errors" "strconv" "strings" ) // The XPath function list. func predicate(q query) func(NodeNavigator) bool { type Predicater interface { Test(NodeNavigator) bool } if p, ok := q.(Predicater); ok { return p.Test } return func(NodeNavigator) bool { return true } } // positionFunc is a XPath Node Set functions position(). func positionFunc(q query, t iterator) interface{} { var ( count = 1 node = t.Current() ) test := predicate(q) for node.MoveToPrevious() { if test(node) { count++ } } return float64(count) } // lastFunc is a XPath Node Set functions last(). func lastFunc(q query, t iterator) interface{} { var ( count = 0 node = t.Current() ) node.MoveToFirst() test := predicate(q) for { if test(node) { count++ } if !node.MoveToNext() { break } } return float64(count) } // countFunc is a XPath Node Set functions count(node-set). func countFunc(q query, t iterator) interface{} { var count = 0 test := predicate(q) switch typ := q.Evaluate(t).(type) { case query: for node := typ.Select(t); node != nil; node = typ.Select(t) { if test(node) { count++ } } } return float64(count) } // sumFunc is a XPath Node Set functions sum(node-set). func sumFunc(q query, t iterator) interface{} { var sum float64 switch typ := q.Evaluate(t).(type) { case query: for node := typ.Select(t); node != nil; node = typ.Select(t) { if v, err := strconv.ParseFloat(node.Value(), 64); err == nil { sum += v } } case float64: sum = typ case string: v, err := strconv.ParseFloat(typ, 64) if err != nil { panic(errors.New("sum() function argument type must be a node-set or number")) } sum = v } return sum } // nameFunc is a XPath functions name([node-set]). func nameFunc(q query, t iterator) interface{} { return t.Current().LocalName() } // startwithFunc is a XPath functions starts-with(string, string). func startwithFunc(arg1, arg2 query) func(query, iterator) interface{} { return func(q query, t iterator) interface{} { var ( m, n string ok bool ) switch typ := arg1.Evaluate(t).(type) { case string: m = typ case query: node := typ.Select(t) if node == nil { return false } m = node.Value() default: panic(errors.New("starts-with() function argument type must be string")) } n, ok = arg2.Evaluate(t).(string) if !ok { panic(errors.New("starts-with() function argument type must be string")) } return strings.HasPrefix(m, n) } } // endwithFunc is a XPath functions ends-with(string, string). func endwithFunc(arg1, arg2 query) func(query, iterator) interface{} { return func(q query, t iterator) interface{} { var ( m, n string ok bool ) switch typ := arg1.Evaluate(t).(type) { case string: m = typ case query: node := typ.Select(t) if node == nil { return false } m = node.Value() default: panic(errors.New("ends-with() function argument type must be string")) } n, ok = arg2.Evaluate(t).(string) if !ok { panic(errors.New("ends-with() function argument type must be string")) } return strings.HasSuffix(m, n) } } // containsFunc is a XPath functions contains(string or @attr, string). func containsFunc(arg1, arg2 query) func(query, iterator) interface{} { return func(q query, t iterator) interface{} { var ( m, n string ok bool ) switch typ := arg1.Evaluate(t).(type) { case string: m = typ case query: node := typ.Select(t) if node == nil { return false } m = node.Value() default: panic(errors.New("contains() function argument type must be string")) } n, ok = arg2.Evaluate(t).(string) if !ok { panic(errors.New("contains() function argument type must be string")) } return strings.Contains(m, n) } } // normalizespaceFunc is XPath functions normalize-space(string?) func normalizespaceFunc(q query, t iterator) interface{} { var m string switch typ := q.Evaluate(t).(type) { case string: m = typ case query: node := typ.Select(t) if node == nil { return false } m = node.Value() } return strings.TrimSpace(m) } // substringFunc is XPath functions substring function returns a part of a given string. func substringFunc(arg1, arg2, arg3 query) func(query, iterator) interface{} { return func(q query, t iterator) interface{} { var m string switch typ := arg1.Evaluate(t).(type) { case string: m = typ case query: node := typ.Select(t) if node == nil { return "" } m = node.Value() } var start, length float64 var ok bool if start, ok = arg2.Evaluate(t).(float64); !ok { panic(errors.New("substring() function first argument type must be int")) } if arg3 != nil { if length, ok = arg3.Evaluate(t).(float64); !ok { panic(errors.New("substring() function second argument type must be int")) } } if (len(m) - int(start)) < int(length) { panic(errors.New("substring() function start and length argument out of range")) } if length > 0 { return m[int(start):int(length+start)] } return m[int(start):] } } // stringLengthFunc is XPATH string-length( [string] ) function that returns a number // equal to the number of characters in a given string. func stringLengthFunc(arg1 query) func(query, iterator) interface{} { return func(q query, t iterator) interface{} { switch v := arg1.Evaluate(t).(type) { case string: return float64(len(v)) case query: node := v.Select(t) if node == nil { break } return float64(len(node.Value())) } return float64(0) } } // notFunc is XPATH functions not(expression) function operation. func notFunc(q query, t iterator) interface{} { switch v := q.Evaluate(t).(type) { case bool: return !v case query: node := v.Select(t) return node == nil default: return false } } // concatFunc is the concat function concatenates two or more // strings and returns the resulting string. // concat( string1 , string2 [, stringn]* ) func concatFunc(args ...query) func(query, iterator) interface{} { return func(q query, t iterator) interface{} { var a []string for _, v := range args { switch v := v.Evaluate(t).(type) { case string: a = append(a, v) case query: node := v.Select(t) if node != nil { a = append(a, node.Value()) } } } return strings.Join(a, "") } }
vendor/github.com/antchfx/xpath/func.go
0.68215
0.40157
func.go
starcoder
package chess // PieceKind is the type representing for the pieces on the board type PieceKind int const ( // EMPTY_SQUARE aka Empty EMPTY_SQUARE PieceKind = 0 + iota // PAWN (♙, ♟) PAWN // ROOK (♖, ♜) ROOK // KNIGHT (♘, ♞) KNIGHT // BISHOP (♗, ♝) BISHOP // QUEEN (♕, ♛) QUEEN // KING (♔, ♚) KING ) // Position is the place where a piece stand type Position struct { x int y int } // IsValid returns if a position is on the board or not func (p Position) IsValid() bool { return p.x >= 0 && p.x < 8 && p.y >= 0 && p.y < 8 } // Equal returns if two positions are in the same place func (p Position) Equal(p2 Position) bool { return p.x == p2.x && p.y == p2.y } // Side can be either white or black type Side int const ( // EMPTY is where neither white nor black piece stand on it EMPTY Side = 0 + iota // WHITE is the side that start the game second WHITE // BLACK move after the WHITE side is moved at the beginning BLACK ) // Square represents for one of the 64 squares on the board type Square interface { String() string PieceKind() PieceKind Side() Side Move(b *Board, pos1, pos2 Position) bool } // The Piece stand at wherever on the board type Piece struct { pk PieceKind sd Side } func (p Piece) String() string { return " " } // Side returns the side of the piece func (p Piece) Side() Side { return p.sd } // Movable is used to attach to the piece that need recording if // it's been moved or not type Movable struct { moved bool } // isMoved check if a square is moved or not func (m Movable) isMoved() bool { return m.moved } // Pawn - the weakest type Pawn struct { Piece *Movable } func (p Pawn) String() string { if p.sd == WHITE { return "♙" } return "♟" } // PieceKind of Pawn func (p Pawn) PieceKind() PieceKind { return PAWN } // Move like a Pawn func (p Pawn) Move(b *Board, pos1, pos2 Position) bool { // Allowed move count amc := 1 if p.moved == false { amc = 2 } dir := 1 if p.Side() == BLACK { dir = -1 } for i := 1; i <= amc; i++ { dx := dir * i if !(Position{pos1.x + dx, pos2.y}).IsValid() { break } if pos2.Equal(Position{pos1.x + dx, pos1.y}) { s2 := b.squares[pos2.x][pos2.y] if p.Side() == s2.Side() { return false } b.squares[pos2.x][pos2.y] = b.squares[pos1.x][pos1.y] movedPawn := b.squares[pos2.x][pos2.y].(Pawn) movedPawn.moved = true b.squares[pos1.x][pos1.y] = EmptySquare{} return true } } ds := [][]int{{1, 1}, {1, -1}} for _, d := range ds { x, y := pos1.x+d[0]*dir, pos1.y+d[1]*dir if !(Position{x, y}).IsValid() { break } if pos2.Equal(Position{x, y}) { s2 := b.squares[pos2.x][pos2.y] if s2.Side() == p.Side()%2+1 { b.squares[pos2.x][pos2.y] = b.squares[pos1.x][pos1.y] b.squares[pos1.x][pos1.y] = EmptySquare{} return true } return false } } return false } // Rook - aka. the Tower type Rook struct { Piece *Movable } func (r Rook) String() string { if r.sd == WHITE { return "♖" } return "♜" } // PieceKind of Rook func (r Rook) PieceKind() PieceKind { return ROOK } // Move like a Rook func (r Rook) Move(b *Board, pos1, pos2 Position) bool { dirs := [][]int{{1, 0}, {-1, 0}, {0, 1}, {0, -1}} for _, dir := range dirs { dx, dy := dir[0], dir[1] for i, j := pos1.x+dx, pos1.y+dy; (Position{i, j}.IsValid()); i, j = i+dx, j+dy { p2 := b.squares[i][j] // blocked by it's own piece if r.Side() == p2.Side() { break } if pos2.Equal(Position{i, j}) { b.squares[pos2.x][pos2.y] = b.squares[pos1.x][pos1.y] b.squares[pos1.x][pos1.y] = EmptySquare{} movedRook := b.squares[pos2.x][pos2.y].(Rook) movedRook.moved = true return true } } } return false } // Knight - Piece that moves in the weird way type Knight struct { Piece } func (k Knight) String() string { if k.sd == WHITE { return "♘" } return "♞" } // PieceKind of Knight func (k Knight) PieceKind() PieceKind { return KNIGHT } // Move like a Knight func (k Knight) Move(b *Board, pos1, pos2 Position) bool { for i := -1; i <= 1; i++ { for j := -1; j <= 1; j++ { if i == 0 && j == 0 { continue } for t := 0; t <= 1; t++ { dirs := []int{i, j} dirs[t] *= 2 x, y := pos1.x+dirs[0], pos1.y+dirs[1] if pos2.Equal(Position{x, y}) { p2 := b.squares[pos2.x][pos2.y] if p2.Side() == k.Side() { return false } b.squares[pos2.x][pos2.y] = b.squares[pos1.x][pos1.y] b.squares[pos1.x][pos1.y] = EmptySquare{} return true } } } } return false } // Bishop - The bishop type Bishop struct { Piece } func (b Bishop) String() string { if b.sd == WHITE { return "♗" } return "♝" } // PieceKind of Bishop func (b Bishop) PieceKind() PieceKind { return BISHOP } // Move like a Bishop func (b Bishop) Move(bd *Board, pos1, pos2 Position) bool { dirs := [][]int{{1, 1}, {1, -1}, {-1, 1}, {-1, -1}} for _, dir := range dirs { dx, dy := dir[0], dir[1] for i, j := pos1.x+dx, pos1.y+dy; (Position{i, j}.IsValid()); i, j = i+dx, j+dy { p2 := bd.squares[i][j] // blocked by it's own piece if b.Side() == p2.Side() { break } if pos2.Equal(Position{i, j}) { bd.squares[pos2.x][pos2.y] = bd.squares[pos1.x][pos1.y] bd.squares[pos1.x][pos1.y] = EmptySquare{} return true } } } return false } // Queen - The strongest piece on the board type Queen struct { Piece } func (q Queen) String() string { if q.sd == WHITE { return "♕" } return "♛" } // PieceKind of Queen func (q Queen) PieceKind() PieceKind { return QUEEN } // Move like the Queen func (q Queen) Move(b *Board, pos1, pos2 Position) bool { for dx := -1; dx <= 1; dx++ { for dy := -1; dy <= 1; dy++ { if dx == 0 && dy == 0 { break } for i, j := pos1.x+dx, pos1.y+dy; (Position{i, j}.IsValid()); i, j = i+dx, j+dy { p2 := b.squares[i][j] // blocked by it's own piece if q.Side() == p2.Side() { break } if pos2.Equal(Position{i, j}) { b.squares[pos2.x][pos2.y] = b.squares[pos1.x][pos1.y] b.squares[pos1.x][pos1.y] = EmptySquare{} return true } } } } return false } // King - The piece to protect type King struct { Piece *Movable } func (k King) String() string { if k.sd == WHITE { return "♔" } return "♚" } // PieceKind of KING func (k King) PieceKind() PieceKind { return KING } // Move in the King's way func (k King) Move(b *Board, pos1, pos2 Position) bool { for i := -1; i <= 1; i++ { for j := -1; j <= 1; j++ { if i == 0 && j == 0 || !pos2.Equal(Position{pos1.x + i, pos1.y + j}) { continue } s2 := b.squares[pos2.x][pos2.y] if k.Side() == s2.Side() { return false } b.squares[pos2.x][pos2.y] = b.squares[pos1.x][pos1.y] b.squares[pos1.x][pos1.y] = EmptySquare{} movedKing := b.squares[pos2.x][pos2.y].(King) movedKing.moved = true return true } } if k.castling(b, pos1, pos2) { return true } return false } func (k King) castling(b *Board, pos1, pos2 Position) bool { s2 := b.squares[pos2.x][pos2.y] if s2.Side() != k.Side() || s2.PieceKind() != ROOK { return false } if k.moved == false && s2.(Rook).moved == false { var vec int if pos1.y-pos2.y > 0 { vec = 1 } else { vec = -1 } nrp := Position{pos2.x, pos2.y + vec*2} nkp := Position{pos2.x, pos2.y + vec} if b.squares[nrp.x][nrp.y].PieceKind() != EMPTY_SQUARE || b.squares[nkp.x][nkp.y].PieceKind() != EMPTY_SQUARE { return false } b.squares[nkp.x][nkp.y] = b.squares[pos1.x][pos1.y] b.squares[pos1.x][pos1.y] = EmptySquare{} movedKing := b.squares[nkp.x][nkp.y].(King) movedKing.moved = true b.squares[nrp.x][nrp.y] = b.squares[pos2.x][pos2.y] b.squares[pos2.x][pos2.y] = EmptySquare{} movedRook := b.squares[nrp.x][nrp.y].(Rook) movedRook.moved = true return true } return false } // EmptySquare - aka no piece on the board type EmptySquare struct { Piece } // PieceKind of KING func (es EmptySquare) PieceKind() PieceKind { return EMPTY_SQUARE } // Move always returns false if the squre is empty func (es EmptySquare) Move(b *Board, pos1, pos2 Position) bool { return false }
pieces.go
0.706596
0.573499
pieces.go
starcoder
package loops import ( "fmt" "math" ) type Mat4 [16]float32 func NewIdentityMat4() Mat4 { return Mat4{ 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, } } func NewTranslateMatrix(tx, ty, tz float32) Mat4 { return Mat4{ 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, tx, ty, tz, 1, } } func NewScaleMatrix(sx, sy, sz float32) Mat4 { return Mat4{ sx, 0, 0, 0, 0, sy, 0, 0, 0, 0, sz, 0, 0, 0, 0, 1, } } func NewRotate2DMatrix(theta float32) Mat4 { c := float32(math.Cos(float64(theta))) s := float32(math.Sin(float64(theta))) return Mat4{ c, s, 0, 0, -s, c, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, } } func NewRotateMatrix(theta, x, y, z float32) Mat4 { dir := NewVec3(x, y, z).Normalize() c := float32(math.Cos(float64(theta))) s := float32(math.Sin(float64(theta))) x = dir[0] y = dir[1] z = dir[2] c1 := 1 - c return Mat4{ x*x*c1 + c, x*y*c1 - z*s, x*z*c1 + y*s, 0, y*x*c1 + z*s, y*y*c1 + c, y*z*c1 - x*s, 0, x*z*c1 - y*s, y*z*c1 + x*s, z*z*c1 + c, 0, 0, 0, 0, 1, }.Transpose() } // fov: field of view in degrees // aspect: width/height func NewPerspectiveMatrix(fovy, aspect, zNear, zFar float64) Mat4 { fovy = fovy / 180.0 * math.Pi // deg to rad f := math.Tan(fovy / 2.0) nearFar := zNear - zFar return Mat4{ float32(f / aspect), 0, 0, 0, 0, float32(f), 0, 0, 0, 0, float32((zFar + zNear) / nearFar), float32(2 * zFar * zNear / nearFar), 0, 0, -1, 0, }.Transpose() } func (self Mat4) Mul(other Mat4) Mat4 { return Mat4{ self[0]*other[0] + self[1]*other[4] + self[2]*other[8] + self[3]*other[12], self[0]*other[1] + self[1]*other[5] + self[2]*other[9] + self[3]*other[13], self[0]*other[2] + self[1]*other[6] + self[2]*other[10] + self[3]*other[14], self[0]*other[3] + self[1]*other[7] + self[2]*other[11] + self[3]*other[15], self[4]*other[0] + self[5]*other[4] + self[6]*other[8] + self[7]*other[12], self[4]*other[1] + self[5]*other[5] + self[6]*other[9] + self[7]*other[13], self[4]*other[2] + self[5]*other[6] + self[6]*other[10] + self[7]*other[14], self[4]*other[3] + self[5]*other[7] + self[6]*other[11] + self[7]*other[15], self[8]*other[0] + self[9]*other[4] + self[10]*other[8] + self[11]*other[12], self[8]*other[1] + self[9]*other[5] + self[10]*other[9] + self[11]*other[13], self[8]*other[2] + self[9]*other[6] + self[10]*other[10] + self[11]*other[14], self[8]*other[3] + self[9]*other[7] + self[10]*other[11] + self[11]*other[15], self[12]*other[0] + self[13]*other[4] + self[14]*other[8] + self[15]*other[12], self[12]*other[1] + self[13]*other[5] + self[14]*other[9] + self[15]*other[13], self[12]*other[2] + self[13]*other[6] + self[14]*other[10] + self[15]*other[14], self[12]*other[3] + self[13]*other[7] + self[14]*other[11] + self[15]*other[15], } } func (self Mat4) Print() { for i, val := range self { if i != 0 && i%4 == 0 { fmt.Print("\n") } fmt.Printf("%.2f, ", val) } fmt.Print("\n") } func (self Mat4) Translate(tx, ty, tz float32) Mat4 { return self.Mul(NewTranslateMatrix(tx, ty, tz)) } func (self Mat4) TranslateVec3(v Vec3) Mat4 { return self.Mul(NewTranslateMatrix(v[0], v[1], v[2])) } func (self Mat4) Scale(sx, sy, sz float32) Mat4 { return self.Mul(NewScaleMatrix(sx, sy, sz)) } func (self Mat4) Rotate(theta float32) Mat4 { return self.Mul(NewRotate2DMatrix(theta)) } func (self Mat4) Rotate3d(theta, x, y, z float32) Mat4 { return self.Mul(NewRotateMatrix(theta, x, y, z)) } func (self Mat4) Transpose() Mat4 { return Mat4{ self[0], self[4], self[8], self[12], self[1], self[5], self[9], self[13], self[2], self[6], self[10], self[14], self[3], self[7], self[11], self[15], } }
loops/matrix.go
0.646014
0.535888
matrix.go
starcoder
package openapi import ( "encoding/json" ) // SchemaField SchemaField defines the properties of a field in the schema. type SchemaField struct { // The name of the field. Name string `json:"name"` // The description of the field. Description *string `json:"description,omitempty"` Type SchemaFieldType `json:"type"` Mode SchemaFieldMode `json:"mode"` // Array indicates if the field is an array of values. For example, if `type` is string and `array` is `true`, then the field is an array of strings. Array *bool `json:"array,omitempty"` // The required length of the array, if `array` is `true`. This allows you to enforce that an array contains an exact number of items. For example, to store a 2x2 vector, you could set `type` to float, `array` to `true` and `array_length` to `4`. ArrayLength *int32 `json:"array_length,omitempty"` } // NewSchemaField instantiates a new SchemaField object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewSchemaField(name string, type_ SchemaFieldType, mode SchemaFieldMode) *SchemaField { this := SchemaField{} this.Name = name this.Type = type_ this.Mode = mode return &this } // NewSchemaFieldWithDefaults instantiates a new SchemaField object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewSchemaFieldWithDefaults() *SchemaField { this := SchemaField{} var type_ SchemaFieldType = "TYPE_UNSPECIFIED" this.Type = type_ var mode SchemaFieldMode = "MODE_UNSPECIFIED" this.Mode = mode return &this } // GetName returns the Name field value func (o *SchemaField) GetName() string { if o == nil { var ret string return ret } return o.Name } // GetNameOk returns a tuple with the Name field value // and a boolean to check if the value has been set. func (o *SchemaField) GetNameOk() (*string, bool) { if o == nil { return nil, false } return &o.Name, true } // SetName sets field value func (o *SchemaField) SetName(v string) { o.Name = v } // GetDescription returns the Description field value if set, zero value otherwise. func (o *SchemaField) GetDescription() string { if o == nil || o.Description == nil { var ret string return ret } return *o.Description } // GetDescriptionOk returns a tuple with the Description field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *SchemaField) GetDescriptionOk() (*string, bool) { if o == nil || o.Description == nil { return nil, false } return o.Description, true } // HasDescription returns a boolean if a field has been set. func (o *SchemaField) HasDescription() bool { if o != nil && o.Description != nil { return true } return false } // SetDescription gets a reference to the given string and assigns it to the Description field. func (o *SchemaField) SetDescription(v string) { o.Description = &v } // GetType returns the Type field value func (o *SchemaField) GetType() SchemaFieldType { if o == nil { var ret SchemaFieldType return ret } return o.Type } // GetTypeOk returns a tuple with the Type field value // and a boolean to check if the value has been set. func (o *SchemaField) GetTypeOk() (*SchemaFieldType, bool) { if o == nil { return nil, false } return &o.Type, true } // SetType sets field value func (o *SchemaField) SetType(v SchemaFieldType) { o.Type = v } // GetMode returns the Mode field value func (o *SchemaField) GetMode() SchemaFieldMode { if o == nil { var ret SchemaFieldMode return ret } return o.Mode } // GetModeOk returns a tuple with the Mode field value // and a boolean to check if the value has been set. func (o *SchemaField) GetModeOk() (*SchemaFieldMode, bool) { if o == nil { return nil, false } return &o.Mode, true } // SetMode sets field value func (o *SchemaField) SetMode(v SchemaFieldMode) { o.Mode = v } // GetArray returns the Array field value if set, zero value otherwise. func (o *SchemaField) GetArray() bool { if o == nil || o.Array == nil { var ret bool return ret } return *o.Array } // GetArrayOk returns a tuple with the Array field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *SchemaField) GetArrayOk() (*bool, bool) { if o == nil || o.Array == nil { return nil, false } return o.Array, true } // HasArray returns a boolean if a field has been set. func (o *SchemaField) HasArray() bool { if o != nil && o.Array != nil { return true } return false } // SetArray gets a reference to the given bool and assigns it to the Array field. func (o *SchemaField) SetArray(v bool) { o.Array = &v } // GetArrayLength returns the ArrayLength field value if set, zero value otherwise. func (o *SchemaField) GetArrayLength() int32 { if o == nil || o.ArrayLength == nil { var ret int32 return ret } return *o.ArrayLength } // GetArrayLengthOk returns a tuple with the ArrayLength field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *SchemaField) GetArrayLengthOk() (*int32, bool) { if o == nil || o.ArrayLength == nil { return nil, false } return o.ArrayLength, true } // HasArrayLength returns a boolean if a field has been set. func (o *SchemaField) HasArrayLength() bool { if o != nil && o.ArrayLength != nil { return true } return false } // SetArrayLength gets a reference to the given int32 and assigns it to the ArrayLength field. func (o *SchemaField) SetArrayLength(v int32) { o.ArrayLength = &v } func (o SchemaField) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} if true { toSerialize["name"] = o.Name } if o.Description != nil { toSerialize["description"] = o.Description } if true { toSerialize["type"] = o.Type } if true { toSerialize["mode"] = o.Mode } if o.Array != nil { toSerialize["array"] = o.Array } if o.ArrayLength != nil { toSerialize["array_length"] = o.ArrayLength } return json.Marshal(toSerialize) } type NullableSchemaField struct { value *SchemaField isSet bool } func (v NullableSchemaField) Get() *SchemaField { return v.value } func (v *NullableSchemaField) Set(val *SchemaField) { v.value = val v.isSet = true } func (v NullableSchemaField) IsSet() bool { return v.isSet } func (v *NullableSchemaField) Unset() { v.value = nil v.isSet = false } func NewNullableSchemaField(val *SchemaField) *NullableSchemaField { return &NullableSchemaField{value: val, isSet: true} } func (v NullableSchemaField) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableSchemaField) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
internal/openapi/model_schema_field.go
0.836638
0.514583
model_schema_field.go
starcoder
package challenge43 import ( "crypto/rand" "math/big" ) type DSA struct { P *big.Int Q *big.Int G *big.Int } type UserKey struct { Private *big.Int Public *big.Int } type MessageSignature struct { R *big.Int S *big.Int } func (d *DSA) Initialize() { d.P = new(big.Int).SetBytes(P) d.Q = new(big.Int).SetBytes(Q) d.G = new(big.Int).SetBytes(G) } func (d *DSA) GenerateUserKey() UserKey { // generate random x, 0 < x < q x, err := rand.Int(rand.Reader, d.Q) if err != nil { panic(err) } // calculate public key y = g^x mod p y := new(big.Int).Exp(d.G, x, d.P) return UserKey{ Private: x, Public: y, } } func (d *DSA) Sign(messageHash []byte, privateKey *big.Int) MessageSignature { r := new(big.Int) s := new(big.Int) k := new(big.Int) zero := big.NewInt(0) for s.Cmp(zero) == 0 { for r.Cmp(zero) == 0 { // generate a per message k 1 < k < q tmp, err := rand.Int(rand.Reader, d.Q) if err != nil { panic(err) } tmp.Add(tmp, big.NewInt(2)) // to make sure 1 < k k = tmp r = new(big.Int).Exp(d.G, k, d.P) r.Exp(r, big.NewInt(1), d.Q) } // calculate s = modInverse(k, q) * (H(m) + xr) mod q xr := new(big.Int).Mul(privateKey, r) hxr := new(big.Int).SetBytes(messageHash) hxr.Add(hxr, xr) mi := new(big.Int).ModInverse(k, d.Q) s = new(big.Int).Mul(mi, hxr) s.Exp(s, big.NewInt(1), d.Q) } return MessageSignature{ r, s, } } func (d *DSA) Verify(messageHash []byte, sig MessageSignature, publicKey *big.Int) bool { zero := big.NewInt(0) one := big.NewInt(1) // reject if 0 < r < q and 0 < s < q is not true if zero.Cmp(sig.R) != -1 || d.Q.Cmp(sig.R) != 1 || zero.Cmp(sig.S) != -1 || d.Q.Cmp(sig.S) != 1 { return false } w := new(big.Int).ModInverse(sig.S, d.Q) u1 := new(big.Int).SetBytes(messageHash) u1.Mul(u1, w) u1.Exp(u1, one, d.Q) u2 := new(big.Int).Mul(sig.R, w) u2.Exp(u2, one, d.Q) // v = (g^u1 * y^u2 mod p) mod q gu1 := new(big.Int).Exp(d.G, u1, d.P) gu2 := new(big.Int).Exp(publicKey, u2, d.P) gy := new(big.Int).Mul(gu1, gu2) gy.Exp(gy, one, d.P) v := new(big.Int).Exp(gy, one, d.Q) if v.Cmp(sig.R) == 0 { return true } return false }
set6/challenge43/43_utility.go
0.502441
0.427516
43_utility.go
starcoder
package rvm import ( "fmt" "math" ) type InvalidRoundingMode RoundingMode func (i InvalidRoundingMode) Error() string { return fmt.Sprintf("invalid rounding mode: %x", i) } type RoundingMode uint const ( RoundTruncate RoundingMode = iota RoundNearest RoundFloor RoundCeil ) func round(v Value, mode RoundingMode) Value { if mode > RoundCeil { panic("invalid rounding mode") } loop: switch vx := v.(type) { case Uint, Int: return vx case Float: return vx.Round(mode) case Rounder: return vx.Round(mode) case float64: return Float(vx).Round(mode) default: v = toarith(vx) goto loop } } type ( Float float64 Int int64 Uint uint64 Arith interface { Add(Arith) Arith Sub(Arith) Arith Neg() Arith Mul(Arith) Arith Div(Arith) Arith Mod(Arith) Arith Pow(Arith) Arith Sqrt() Arith } Bitwise interface { Arith Xor(Bitwise) Bitwise And(Bitwise) Bitwise Or(Bitwise) Bitwise Not() Bitwise } ArithmeticShifter interface { ArithShift(bits Value) Value } BitShifter interface { BitShift(bits Value) Value } Rounder interface { Round(RoundingMode) Value } FloatValuer interface { Float64() float64 } IntValuer interface { Int64() int64 } UintValuer interface { Uint64() uint64 } ) var ( _ Arith = Float(0) _ Arith = Int(0) _ Arith = Uint(0) _ Bitwise = Int(0) _ Bitwise = Uint(0) _ Rounder = Float(0) _ Rounder = Int(0) _ Rounder = Uint(0) ) // Float64 func (lhs Float) Float64() float64 { return float64(lhs) } func (lhs Float) Int64() int64 { return int64(lhs) } func (lhs Float) Uint64() uint64 { return uint64(lhs) } func (lhs Float) Add(rhs Arith) Arith { return lhs + tofloat(rhs) } func (lhs Float) Sub(rhs Arith) Arith { return lhs - tofloat(rhs) } func (lhs Float) Mul(rhs Arith) Arith { return lhs * tofloat(rhs) } func (lhs Float) Div(rhs Arith) Arith { return lhs / tofloat(rhs) } func (lhs Float) Neg() Arith { return -lhs } func (lhs Float) Sqrt() Arith { return Float(math.Sqrt(float64(lhs))) } func (lhs Float) Round(mode RoundingMode) Value { switch x := float64(lhs); mode { case RoundTruncate: return math.Trunc(x) case RoundNearest: return math.Trunc(x + math.Copysign(0.5, x)) case RoundFloor: return math.Floor(x) case RoundCeil: return math.Ceil(x) } panic("unreachable") } func (lhs Float) Pow(rhs Arith) Arith { return Float(math.Pow(float64(lhs), float64(tofloat(rhs)))) } func (lhs Float) Mod(rhs Arith) Arith { return Float(math.Mod(float64(lhs), float64(tofloat(rhs)))) } // Signed integer func (lhs Int) Float64() float64 { return float64(lhs) } func (lhs Int) Int64() int64 { return int64(lhs) } func (lhs Int) Uint64() uint64 { return uint64(lhs) } func (lhs Int) Neg() Arith { return -lhs } func (lhs Int) Round(RoundingMode) Value { return lhs } func (lhs Int) ArithShift(bits Value) Value { if bits := toint(bits); bits < 0 { return lhs << uint(-bits) } else if bits > 0 { return lhs >> uint(bits) } return lhs } func (lhs Int) BitShift(bits Value) Value { if bits := toint(bits); bits < 0 { return Int(uint64(lhs) << uint(-bits)) } else if bits > 0 { return Int(uint64(lhs) >> uint(bits)) } return lhs } func (lhs Int) Add(rhs Arith) Arith { switch rhs := toarith(rhs).(type) { case Int: return Int(int64(lhs) + int64(rhs)) case Uint: return Int(int64(lhs) + int64(rhs)) case Float: return Float(float64(lhs) + float64(rhs)) } panic("unreachable") } func (lhs Int) Sub(rhs Arith) Arith { switch rhs := toarith(rhs).(type) { case Int: return Int(int64(lhs) - int64(rhs)) case Uint: return Int(int64(lhs) - int64(rhs)) case Float: return Float(float64(lhs) - float64(rhs)) } panic("unreachable") } func (lhs Int) Mul(rhs Arith) Arith { switch rhs := toarith(rhs).(type) { case Int: return Int(int64(lhs) * int64(rhs)) case Uint: return Int(int64(lhs) * int64(rhs)) case Float: return Float(float64(lhs) * float64(rhs)) } panic("unreachable") } func (lhs Int) Div(rhs Arith) Arith { switch rhs := toarith(rhs).(type) { case Int: return Int(int64(lhs) / int64(rhs)) case Uint: return Int(int64(lhs) / int64(rhs)) case Float: return Float(float64(lhs) / float64(rhs)) } panic("unreachable") } func (lhs Int) Mod(rhs Arith) Arith { switch rhs := toarith(rhs).(type) { case Int: return Int(int64(lhs) % int64(rhs)) case Uint: return Int(int64(lhs) % int64(rhs)) case Float: return Float(math.Mod(float64(lhs), float64(rhs))) } panic("unreachable") } func (lhs Int) Sqrt() Arith { return Int(math.Sqrt(float64(lhs))) } func (lhs Int) Pow(rhs Arith) Arith { switch rhs := toarith(rhs).(type) { case Int: if rhs == 0 { return Uint(1) } else if rhs < 0 { return Float(math.Pow(float64(lhs), float64(rhs))) } for q, i := lhs, Int(0); i < rhs; i++ { lhs = lhs * q } return lhs case Uint: if rhs == 0 { return Uint(1) } for q, i := lhs, Uint(0); i < rhs; i++ { lhs = lhs * q } return lhs case Float: return Float(math.Pow(float64(lhs), float64(rhs))) } panic("unreachable") } func (lhs Int) Xor(rhs Bitwise) Bitwise { return Int(uint64(lhs) ^ uint64(touint(rhs))) } func (lhs Int) And(rhs Bitwise) Bitwise { return Int(uint64(lhs) & uint64(touint(rhs))) } func (lhs Int) Or(rhs Bitwise) Bitwise { return Int(uint64(lhs) | uint64(touint(rhs))) } func (lhs Int) Not() Bitwise { return Int(^uint64(lhs)) } // Unsigned integer func (lhs Uint) Float64() float64 { return float64(lhs) } func (lhs Uint) Int64() int64 { return int64(lhs) } func (lhs Uint) Uint64() uint64 { return uint64(lhs) } func (lhs Uint) Neg() Arith { return -lhs } func (lhs Uint) Round(RoundingMode) Value { return lhs } func (lhs Uint) ArithShift(bits Value) Value { if bits := toint(bits); bits < 0 { return Uint(int64(lhs) << uint(-bits)) } else if bits > 0 { return Uint(int64(lhs) >> uint(bits)) } return lhs } func (lhs Uint) BitShift(bits Value) Value { if bits := toint(bits); bits < 0 { return lhs << uint(-bits) } else if bits > 0 { return lhs >> uint(bits) } return lhs } func (lhs Uint) Add(rhs Arith) Arith { switch rhs := toarith(rhs).(type) { case Uint: return Uint(uint64(lhs) + uint64(rhs)) case Int: return Uint(int64(lhs) + int64(rhs)) case Float: return Float(float64(lhs) + float64(rhs)) } panic("unreachable") } func (lhs Uint) Sub(rhs Arith) Arith { switch rhs := toarith(rhs).(type) { case Uint: return Uint(uint64(lhs) - uint64(rhs)) case Int: return Uint(int64(lhs) - int64(rhs)) case Float: return Float(float64(lhs) - float64(rhs)) } panic("unreachable") } func (lhs Uint) Mul(rhs Arith) Arith { switch rhs := toarith(rhs).(type) { case Uint: return Int(uint64(lhs) * uint64(rhs)) case Int: return Uint(int64(lhs) * int64(rhs)) case Float: return Float(float64(lhs) * float64(rhs)) } panic("unreachable") } func (lhs Uint) Div(rhs Arith) Arith { switch rhs := toarith(rhs).(type) { case Uint: return Int(uint64(lhs) / uint64(rhs)) case Int: return Uint(int64(lhs) / int64(rhs)) case Float: return Float(float64(lhs) / float64(rhs)) } panic("unreachable") } func (lhs Uint) Mod(rhs Arith) Arith { switch rhs := toarith(rhs).(type) { case Uint: return Int(uint64(lhs) % uint64(rhs)) case Int: return Uint(int64(lhs) % int64(rhs)) case Float: return Float(math.Mod(float64(lhs), float64(rhs))) } panic("unreachable") } func (lhs Uint) Sqrt() Arith { return Uint(math.Sqrt(float64(lhs))) } func (lhs Uint) Pow(rhs Arith) Arith { switch rhs := toarith(rhs).(type) { case Uint: if rhs == 0 { return Uint(1) } for q, i := lhs, Uint(0); i < rhs; i++ { lhs = lhs * q } return lhs case Int: if rhs == 0 { return Uint(1) } else if rhs < 0 { return Float(math.Pow(float64(lhs), float64(rhs))) } for q, i := lhs, Int(0); i < rhs; i++ { lhs = lhs * q } return lhs case Float: return Float(math.Pow(float64(lhs), float64(rhs))) } panic("unreachable") } func (lhs Uint) Xor(rhs Bitwise) Bitwise { return lhs ^ touint(rhs) } func (lhs Uint) And(rhs Bitwise) Bitwise { return lhs & touint(rhs) } func (lhs Uint) Or(rhs Bitwise) Bitwise { return lhs | touint(rhs) } func (lhs Uint) Not() Bitwise { return ^lhs } func toarith(v Value) (r Arith) { switch v := v.(type) { case Arith: return v case FloatValuer: return Float(v.Float64()) case IntValuer: return Int(v.Int64()) case UintValuer: return Uint(v.Uint64()) case int: return Int(v) case int64: return Int(v) case int32: return Int(v) case int16: return Int(v) case float64: return Float(v) case float32: return Float(v) case uint: return Uint(v) case uint64: return Uint(v) case uint32: return Uint(v) case uint16: return Uint(v) case uint8: return Uint(v) default: panic(fmt.Errorf("unable to convert %T to arithmetic type", v)) } } func tobitwise(v Value) (r Bitwise) { switch v := v.(type) { case Bitwise: return v case IntValuer: return Int(v.Int64()) case UintValuer: return Uint(v.Uint64()) case float64: return Int(v) case float32: return Int(v) case int: return Int(v) case int64: return Int(v) case int32: return Int(v) case int16: return Int(v) case uint: return Uint(v) case uint64: return Uint(v) case uint32: return Uint(v) case uint16: return Uint(v) case uint8: return Uint(v) default: panic(fmt.Errorf("unable to convert %T to bitwise type", v)) } } func tofloat(v Value) Float { switch v := toarith(v).(type) { case Float: return v case Int: return Float(v) case Uint: return Float(v) } panic("unreachable") } func toint(v Value) Int { switch v := toarith(v).(type) { case Int: return v case Float: return Int(v) case Uint: return Int(v) } panic("unreachable") } func touint(v Value) Uint { switch v := toarith(v).(type) { case Int: return Uint(v) case Float: return Uint(v) case Uint: return v } panic("unreachable") } func arithShift(v, bits Value) Value { var ( ov = v try bool ) loop: switch vx := v.(type) { case Uint: return vx.ArithShift(bits) case Int: return vx.ArithShift(bits) case ArithmeticShifter: return vx.ArithShift(bits) default: if try { panic(fmt.Errorf("invalid type for arithmetic shift: %T", ov)) } try = true v = tobitwise(v) goto loop } } func bitwiseShift(v, bits Value) Value { var ( ov = v try bool ) loop: switch vx := v.(type) { case Uint: return vx.BitShift(bits) case Int: return vx.BitShift(bits) case BitShifter: return vx.BitShift(bits) default: if try { panic(fmt.Errorf("invalid type for bitwise shift: %T", ov)) } try = true v = tobitwise(v) goto loop } }
rvm/arith.go
0.611266
0.414603
arith.go
starcoder
package levels import ( mgl "github.com/go-gl/mathgl/mgl32" "github.com/inkyblackness/hacked/editor/render" "github.com/inkyblackness/hacked/ui/opengl" ) var highlighterVertexShaderSource = ` #version 150 precision mediump float; in vec3 vertexPosition; uniform mat4 modelMatrix; uniform mat4 viewMatrix; uniform mat4 projectionMatrix; void main(void) { gl_Position = projectionMatrix * viewMatrix * modelMatrix * vec4(vertexPosition, 1.0); } ` var highlighterFragmentShaderSource = ` #version 150 precision mediump float; uniform vec4 inColor; out vec4 fragColor; void main(void) { fragColor = inColor; } ` // Highlighter draws a simple highlighting of a rectangular area. type Highlighter struct { context *render.Context program uint32 vao *opengl.VertexArrayObject vertexPositionBuffer uint32 vertexPositionAttrib int32 modelMatrixUniform opengl.Matrix4Uniform viewMatrixUniform opengl.Matrix4Uniform projectionMatrixUniform opengl.Matrix4Uniform inColorUniform opengl.Vector4Uniform } // NewHighlighter returns a new instance of Highlighter. func NewHighlighter(context *render.Context) *Highlighter { gl := context.OpenGL program, programErr := opengl.LinkNewStandardProgram(gl, highlighterVertexShaderSource, highlighterFragmentShaderSource) if programErr != nil { panic(opengl.NamedShaderError{Name: "HighlighterShader", Nested: programErr}) } highlighter := &Highlighter{ context: context, program: program, vao: opengl.NewVertexArrayObject(gl, program), vertexPositionBuffer: gl.GenBuffers(1)[0], vertexPositionAttrib: gl.GetAttribLocation(program, "vertexPosition"), modelMatrixUniform: opengl.Matrix4Uniform(gl.GetUniformLocation(program, "modelMatrix")), viewMatrixUniform: opengl.Matrix4Uniform(gl.GetUniformLocation(program, "viewMatrix")), projectionMatrixUniform: opengl.Matrix4Uniform(gl.GetUniformLocation(program, "projectionMatrix")), inColorUniform: opengl.Vector4Uniform(gl.GetUniformLocation(program, "inColor"))} { gl.BindBuffer(opengl.ARRAY_BUFFER, highlighter.vertexPositionBuffer) half := float32(0.5) var vertices = []float32{ -half, -half, 0.0, half, -half, 0.0, half, half, 0.0, half, half, 0.0, -half, half, 0.0, -half, -half, 0.0} gl.BufferData(opengl.ARRAY_BUFFER, len(vertices)*4, vertices, opengl.STATIC_DRAW) gl.BindBuffer(opengl.ARRAY_BUFFER, 0) } highlighter.vao.OnShader(func() { gl.EnableVertexAttribArray(uint32(highlighter.vertexPositionAttrib)) gl.BindBuffer(opengl.ARRAY_BUFFER, highlighter.vertexPositionBuffer) gl.VertexAttribOffset(uint32(highlighter.vertexPositionAttrib), 3, opengl.FLOAT, false, 0, 0) gl.BindBuffer(opengl.ARRAY_BUFFER, 0) }) return highlighter } // Dispose releases all resources. func (highlighter *Highlighter) Dispose() { gl := highlighter.context.OpenGL highlighter.vao.Dispose() gl.DeleteBuffers([]uint32{highlighter.vertexPositionBuffer}) gl.DeleteShader(highlighter.program) } // Render renders the highlights. func (highlighter *Highlighter) Render(positions []MapPosition, sideLength float32, color [4]float32) { gl := highlighter.context.OpenGL highlighter.vao.OnShader(func() { highlighter.viewMatrixUniform.Set(gl, highlighter.context.ViewMatrix) highlighter.projectionMatrixUniform.Set(gl, &highlighter.context.ProjectionMatrix) highlighter.inColorUniform.Set(gl, &color) for _, pos := range positions { modelMatrix := mgl.Ident4(). Mul4(mgl.Translate3D(float32(pos.X), float32(pos.Y), 0.0)). Mul4(mgl.Scale3D(sideLength, sideLength, 1.0)) highlighter.modelMatrixUniform.Set(gl, &modelMatrix) gl.DrawArrays(opengl.TRIANGLES, 0, 6) } }) }
editor/levels/Highlighter.go
0.763043
0.538862
Highlighter.go
starcoder
type block struct { signer string // Account that signed this particular block voted string // Optional value if the signer voted on adding/removing someone auth bool // Whether the vote was to authorize (or deauthorize) checkpoint []string // List of authorized signers if this is an epoch block } // Define the various voting scenarios to test tests := []struct { epoch uint64 // Number of blocks in an epoch (unset = 30000) signers []string // Initial list of authorized signers in the genesis blocks []block // Chain of signed blocks, potentially influencing auths results []string // Final list of authorized signers after all blocks failure error // Failure if some block is invalid according to the rules }{ { // Single signer, no votes cast signers: []string{"A"}, blocks: []block{ {signer: "A"} }, results: []string{"A"}, }, { // Single signer, voting to add two others (only accept first, second needs 2 votes) signers: []string{"A"}, blocks: []block{ {signer: "A", voted: "B", auth: true}, {signer: "B"}, {signer: "A", voted: "C", auth: true}, }, results: []string{"A", "B"}, }, { // Two signers, voting to add three others (only accept first two, third needs 3 votes already) signers: []string{"A", "B"}, blocks: []block{ {signer: "A", voted: "C", auth: true}, {signer: "B", voted: "C", auth: true}, {signer: "A", voted: "D", auth: true}, {signer: "B", voted: "D", auth: true}, {signer: "C"}, {signer: "A", voted: "E", auth: true}, {signer: "B", voted: "E", auth: true}, }, results: []string{"A", "B", "C", "D"}, }, { // Single signer, dropping itself (weird, but one less cornercase by explicitly allowing this) signers: []string{"A"}, blocks: []block{ {signer: "A", voted: "A", auth: false}, }, results: []string{}, }, { // Two signers, actually needing mutual consent to drop either of them (not fulfilled) signers: []string{"A", "B"}, blocks: []block{ {signer: "A", voted: "B", auth: false}, }, results: []string{"A", "B"}, }, { // Two signers, actually needing mutual consent to drop either of them (fulfilled) signers: []string{"A", "B"}, blocks: []block{ {signer: "A", voted: "B", auth: false}, {signer: "B", voted: "B", auth: false}, }, results: []string{"A"}, }, { // Three signers, two of them deciding to drop the third signers: []string{"A", "B", "C"}, blocks: []block{ {signer: "A", voted: "C", auth: false}, {signer: "B", voted: "C", auth: false}, }, results: []string{"A", "B"}, }, { // Four signers, consensus of two not being enough to drop anyone signers: []string{"A", "B", "C", "D"}, blocks: []block{ {signer: "A", voted: "C", auth: false}, {signer: "B", voted: "C", auth: false}, }, results: []string{"A", "B", "C", "D"}, }, { // Four signers, consensus of three already being enough to drop someone signers: []string{"A", "B", "C", "D"}, blocks: []block{ {signer: "A", voted: "D", auth: false}, {signer: "B", voted: "D", auth: false}, {signer: "C", voted: "D", auth: false}, }, results: []string{"A", "B", "C"}, }, { // Authorizations are counted once per signer per target signers: []string{"A", "B"}, blocks: []block{ {signer: "A", voted: "C", auth: true}, {signer: "B"}, {signer: "A", voted: "C", auth: true}, {signer: "B"}, {signer: "A", voted: "C", auth: true}, }, results: []string{"A", "B"}, }, { // Authorizing multiple accounts concurrently is permitted signers: []string{"A", "B"}, blocks: []block{ {signer: "A", voted: "C", auth: true}, {signer: "B"}, {signer: "A", voted: "D", auth: true}, {signer: "B"}, {signer: "A"}, {signer: "B", voted: "D", auth: true}, {signer: "A"}, {signer: "B", voted: "C", auth: true}, }, results: []string{"A", "B", "C", "D"}, }, { // Deauthorizations are counted once per signer per target signers: []string{"A", "B"}, blocks: []block{ {signer: "A", voted: "B", auth: false}, {signer: "B"}, {signer: "A", voted: "B", auth: false}, {signer: "B"}, {signer: "A", voted: "B", auth: false}, }, results: []string{"A", "B"}, }, { // Deauthorizing multiple accounts concurrently is permitted signers: []string{"A", "B", "C", "D"}, blocks: []block{ {signer: "A", voted: "C", auth: false}, {signer: "B"}, {signer: "C"}, {signer: "A", voted: "D", auth: false}, {signer: "B"}, {signer: "C"}, {signer: "A"}, {signer: "B", voted: "D", auth: false}, {signer: "C", voted: "D", auth: false}, {signer: "A"}, {signer: "B", voted: "C", auth: false}, }, results: []string{"A", "B"}, }, { // Votes from deauthorized signers are discarded immediately (deauth votes) signers: []string{"A", "B", "C"}, blocks: []block{ {signer: "C", voted: "B", auth: false}, {signer: "A", voted: "C", auth: false}, {signer: "B", voted: "C", auth: false}, {signer: "A", voted: "B", auth: false}, }, results: []string{"A", "B"}, }, { // Votes from deauthorized signers are discarded immediately (auth votes) signers: []string{"A", "B", "C"}, blocks: []block{ {signer: "C", voted: "D", auth: true}, {signer: "A", voted: "C", auth: false}, {signer: "B", voted: "C", auth: false}, {signer: "A", voted: "D", auth: true}, }, results: []string{"A", "B"}, }, { // Cascading changes are not allowed, only the account being voted on may change signers: []string{"A", "B", "C", "D"}, blocks: []block{ {signer: "A", voted: "C", auth: false}, {signer: "B"}, {signer: "C"}, {signer: "A", voted: "D", auth: false}, {signer: "B", voted: "C", auth: false}, {signer: "C"}, {signer: "A"}, {signer: "B", voted: "D", auth: false}, {signer: "C", voted: "D", auth: false}, }, results: []string{"A", "B", "C"}, }, { // Changes reaching consensus out of bounds (via a deauth) execute on touch signers: []string{"A", "B", "C", "D"}, blocks: []block{ {signer: "A", voted: "C", auth: false}, {signer: "B"}, {signer: "C"}, {signer: "A", voted: "D", auth: false}, {signer: "B", voted: "C", auth: false}, {signer: "C"}, {signer: "A"}, {signer: "B", voted: "D", auth: false}, {signer: "C", voted: "D", auth: false}, {signer: "A"}, {signer: "C", voted: "C", auth: true}, }, results: []string{"A", "B"}, }, { // Changes reaching consensus out of bounds (via a deauth) may go out of consensus on first touch signers: []string{"A", "B", "C", "D"}, blocks: []block{ {signer: "A", voted: "C", auth: false}, {signer: "B"}, {signer: "C"}, {signer: "A", voted: "D", auth: false}, {signer: "B", voted: "C", auth: false}, {signer: "C"}, {signer: "A"}, {signer: "B", voted: "D", auth: false}, {signer: "C", voted: "D", auth: false}, {signer: "A"}, {signer: "B", voted: "C", auth: true}, }, results: []string{"A", "B", "C"}, }, { // Ensure that pending votes don't survive authorization status changes. This // corner case can only appear if a signer is quickly added, removed and then // readded (or the inverse), while one of the original voters dropped. If a // past vote is left cached in the system somewhere, this will interfere with // the final signer outcome. signers: []string{"A", "B", "C", "D", "E"}, blocks: []block{ {signer: "A", voted: "F", auth: true}, // Authorize F, 3 votes needed {signer: "B", voted: "F", auth: true}, {signer: "C", voted: "F", auth: true}, {signer: "D", voted: "F", auth: false}, // Deauthorize F, 4 votes needed (leave A's previous vote "unchanged") {signer: "E", voted: "F", auth: false}, {signer: "B", voted: "F", auth: false}, {signer: "C", voted: "F", auth: false}, {signer: "D", voted: "F", auth: true}, // Almost authorize F, 2/3 votes needed {signer: "E", voted: "F", auth: true}, {signer: "B", voted: "A", auth: false}, // Deauthorize A, 3 votes needed {signer: "C", voted: "A", auth: false}, {signer: "D", voted: "A", auth: false}, {signer: "B", voted: "F", auth: true}, // Finish authorizing F, 3/3 votes needed }, results: []string{"B", "C", "D", "E", "F"}, }, { // Epoch transitions reset all votes to allow chain checkpointing epoch: 3, signers: []string{"A", "B"}, blocks: []block{ {signer: "A", voted: "C", auth: true}, {signer: "B"}, {signer: "A", checkpoint: []string{"A", "B"}}, {signer: "B", voted: "C", auth: true}, }, results: []string{"A", "B"}, }, { // An unauthorized signer should not be able to sign blocks signers: []string{"A"}, blocks: []block{ {signer: "B"}, }, failure: errUnauthorizedSigner, }, { // An authorized signer that signed recenty should not be able to sign again signers: []string{"A", "B"}, blocks []block{ {signer: "A"}, {signer: "A"}, }, failure: errRecentlySigned, }, { // Recent signatures should not reset on checkpoint blocks imported in a batch epoch: 3, signers: []string{"A", "B", "C"}, blocks: []block{ {signer: "A"}, {signer: "B"}, {signer: "A", checkpoint: []string{"A", "B", "C"}}, {signer: "A"}, }, failure: errRecentlySigned, },, }
clique/EIP225.go
0.628635
0.494446
EIP225.go
starcoder
package collect import ( "golang.org/x/exp/constraints" "math" "reflect" ) func IsNumber(v any) bool { switch v.(type) { case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64, float32, float64: return true default: return false } } func NumberCompare[T constraints.Integer | constraints.Float](a T, operator string, b T) bool { switch operator { case "=", "!=": var eq bool switch av := any(a).(type) { case float64: if math.IsNaN(av) && math.IsNaN(any(b).(float64)) { eq = true } else { eq = math.Abs(av-any(b).(float64)) <= 1e-9 } case float32: eq = math.Abs(float64(av)-float64(any(b).(float32))) <= 1e-9 default: eq = a == b } if operator == "=" { return eq } else { return !eq } case "<": return a < b case "<=": return a <= b case ">": return a > b case ">=": return a >= b } return false } func AnyNumberCompare(a any, operator string, b any) bool { if a == nil || b == nil { return false } else if !IsNumber(a) { return false } ar, br := reflect.ValueOf(a), reflect.ValueOf(b) switch true { case ar.CanInt(): if !br.CanInt() { return operator != "=" } return NumberCompare(ar.Int(), operator, br.Int()) case ar.CanUint(): if !br.CanUint() { return operator != "=" } return NumberCompare(ar.Uint(), operator, br.Uint()) case ar.CanFloat(): if !br.CanFloat() { return operator != "=" } return NumberCompare(ar.Float(), operator, br.Float()) } return false } func Compare(a any, operator string, b any) bool { if a == nil && b == nil { return operator == "=" } else if a == nil || b == nil { return operator != "=" } if IsNumber(a) || IsNumber(b) { return AnyNumberCompare(a, operator, b) } else if operator != "=" && operator != "!=" { return false } ar, br := reflect.TypeOf(a), reflect.TypeOf(b) ak, bk := ar.Kind(), br.Kind() if ak != bk { return operator == "!=" } if ak != reflect.Slice && ak != reflect.Map && ak != reflect.Func { switch operator { case "=": return a == b case "!=": return a != b } } p := reflect.ValueOf(a).UnsafePointer() switch operator { case "=": return p == reflect.ValueOf(b).UnsafePointer() case "!=": return p != reflect.ValueOf(b).UnsafePointer() } return false } type ComparisonSet struct { LooseNumber bool z map[any]map[reflect.Kind]struct{} } func (c *ComparisonSet) Normalize(v reflect.Value) (reflect.Kind, any) { kind := v.Kind() if kind == reflect.Slice || kind == reflect.Func || kind == reflect.Map { return kind, v.UnsafePointer() } if c.LooseNumber { switch true { case v.CanInt(): return reflect.Int64, v.Int() case v.CanUint(): return reflect.Uint64, v.Uint() case v.CanFloat(): return reflect.Float64, v.Float() } } return kind, v.Interface() } func (c *ComparisonSet) Add(v any) { kind, value := c.Normalize(reflect.ValueOf(v)) if _, ok := c.z[value]; !ok { c.z[value] = make(map[reflect.Kind]struct{}) } c.z[value][kind] = struct{}{} } func (c *ComparisonSet) Has(v any) bool { kind, value := c.Normalize(reflect.ValueOf(v)) if m, ok := c.z[value]; ok { _, ok := m[kind] return ok } return false } func NewComparisonSet(looseNumber bool) *ComparisonSet { return &ComparisonSet{looseNumber, make(map[any]map[reflect.Kind]struct{})} }
comparator.go
0.682891
0.446314
comparator.go
starcoder
package xnumber import ( "strconv" ) // parse // ParseInt parses a string to int using given base. func ParseInt(s string, base int) (int, error) { i, e := strconv.ParseInt(s, base, 0) return int(i), e } // ParseInt8 parses a string to int8 using given base. func ParseInt8(s string, base int) (int8, error) { i, e := strconv.ParseInt(s, base, 8) return int8(i), e } // ParseInt16 parses a string to int16 using given base. func ParseInt16(s string, base int) (int16, error) { i, e := strconv.ParseInt(s, base, 16) return int16(i), e } // ParseInt32 parses a string to int32 using given base. func ParseInt32(s string, base int) (int32, error) { i, e := strconv.ParseInt(s, base, 32) return int32(i), e } // ParseInt64 parses a string to int64 using given base. func ParseInt64(s string, base int) (int64, error) { i, e := strconv.ParseInt(s, base, 64) return i, e } // ParseUint parses a string to uint using given base. func ParseUint(s string, base int) (uint, error) { u, e := strconv.ParseUint(s, base, 0) return uint(u), e } // ParseUint8 parses a string to uint8 using given base. func ParseUint8(s string, base int) (uint8, error) { u, e := strconv.ParseUint(s, base, 8) return uint8(u), e } // ParseUint16 parses a string to uint16 using given base. func ParseUint16(s string, base int) (uint16, error) { u, e := strconv.ParseUint(s, base, 16) return uint16(u), e } // ParseUint32 parses a string to uint32 using given base. func ParseUint32(s string, base int) (uint32, error) { u, e := strconv.ParseUint(s, base, 32) return uint32(u), e } // ParseUint64 parses a string to uint64 using given base. func ParseUint64(s string, base int) (uint64, error) { u, e := strconv.ParseUint(s, base, 64) return u, e } // ParseFloat32 parses a string to float32. func ParseFloat32(s string) (float32, error) { f, e := strconv.ParseFloat(s, 32) return float32(f), e } // ParseFloat64 parses a string to float64. func ParseFloat64(s string) (float64, error) { f, e := strconv.ParseFloat(s, 64) return f, e } // parseOr // ParseIntOr parses a string to int using given base with a fallback value. func ParseIntOr(s string, base int, o int) int { i, e := ParseInt(s, base) if e != nil { return o } return i } // ParseInt8Or parses a string to int8 using given base with a fallback value. func ParseInt8Or(s string, base int, o int8) int8 { i, e := ParseInt8(s, base) if e != nil { return o } return i } // ParseInt16Or parses a string to int16 using given base with a fallback value. func ParseInt16Or(s string, base int, o int16) int16 { i, e := ParseInt16(s, base) if e != nil { return o } return i } // ParseInt32Or parses a string to int32 using given base with a fallback value. func ParseInt32Or(s string, base int, o int32) int32 { i, e := ParseInt32(s, base) if e != nil { return o } return i } // ParseInt64Or parses a string to int64 using given base with a fallback value. func ParseInt64Or(s string, base int, o int64) int64 { i, e := ParseInt64(s, base) if e != nil { return o } return i } // ParseUintOr parses a string to uint using given base with a fallback value. func ParseUintOr(s string, base int, o uint) uint { u, e := ParseUint(s, base) if e != nil { return o } return u } // ParseUint8Or parses a string to uint8 using given base with a fallback value. func ParseUint8Or(s string, base int, o uint8) uint8 { u, e := ParseUint8(s, base) if e != nil { return o } return u } // ParseUint16Or parses a string to uint16 using given base with a fallback value. func ParseUint16Or(s string, base int, o uint16) uint16 { u, e := ParseUint16(s, base) if e != nil { return o } return u } // ParseUint32Or parses a string to uint32 using given base with a fallback value. func ParseUint32Or(s string, base int, o uint32) uint32 { u, e := ParseUint32(s, base) if e != nil { return o } return u } // ParseUint64Or parses a string to uint64 using given base with a fallback value. func ParseUint64Or(s string, base int, o uint64) uint64 { u, e := ParseUint64(s, base) if e != nil { return o } return u } // ParseFloat32Or parses a string to float32 with a fallback value. func ParseFloat32Or(s string, o float32) float32 { f, e := ParseFloat32(s) if e != nil { return o } return f } // ParseFloat64Or parses a string to float64 with a fallback value. func ParseFloat64Or(s string, o float64) float64 { f, e := ParseFloat64(s) if e != nil { return o } return f } // atoX // Atoi parses a string to int in base 10. func Atoi(s string) (int, error) { return ParseInt(s, 10) } // Atoi8 parses a string to int8 in base 10. func Atoi8(s string) (int8, error) { return ParseInt8(s, 10) } // Atoi16 parses a string to int8 in base 10. func Atoi16(s string) (int16, error) { return ParseInt16(s, 10) } // Atoi32 parses a string to int32 in base 10. func Atoi32(s string) (int32, error) { return ParseInt32(s, 10) } // Atoi64 parses a string to int64 in base 10. func Atoi64(s string) (int64, error) { return ParseInt64(s, 10) } // Atou parses a string to uint in base 10. func Atou(s string) (uint, error) { return ParseUint(s, 10) } // Atou8 parses a string to uint8 in base 10. func Atou8(s string) (uint8, error) { return ParseUint8(s, 10) } // Atou16 parses a string to uint16 in base 10. func Atou16(s string) (uint16, error) { return ParseUint16(s, 10) } // Atou32 parses a string to uint32 in base 10. func Atou32(s string) (uint32, error) { return ParseUint32(s, 10) } // Atou64 parses a string to uint64 in base 10. func Atou64(s string) (uint64, error) { return ParseUint64(s, 10) } // Atof32 parses a string to float32, is same as ParseFloat32. func Atof32(s string) (float32, error) { return ParseFloat32(s) } // Atof64 parses a string to float32, is same as ParseFloat64. func Atof64(s string) (float64, error) { return ParseFloat64(s) } // atoXOr // AtoiOr parses a string to int in base 10 with a fallback value. func AtoiOr(s string, o int) int { i, e := Atoi(s) if e != nil { return o } return i } // Atoi8Or parses a string to int8 in base 10 with a fallback value. func Atoi8Or(s string, o int8) int8 { i, e := Atoi8(s) if e != nil { return o } return i } // Atoi16Or parses a string to int8 in base 10 with a fallback value. func Atoi16Or(s string, o int16) int16 { i, e := Atoi16(s) if e != nil { return o } return i } // Atoi32Or parses a string to int32 in base 10 with a fallback value. func Atoi32Or(s string, o int32) int32 { i, e := Atoi32(s) if e != nil { return o } return i } // Atoi64Or parses a string to int64 in base 10 with a fallback value. func Atoi64Or(s string, o int64) int64 { i, e := Atoi64(s) if e != nil { return o } return i } // AtouOr parses a string to uint in base 10 with a fallback value. func AtouOr(s string, o uint) uint { u, e := Atou(s) if e != nil { return o } return u } // Atou8Or parses a string to uint8 in base 10 with a fallback value. func Atou8Or(s string, o uint8) uint8 { u, e := Atou8(s) if e != nil { return o } return u } // Atou16Or parses a string to uint16 in base 10 with a fallback value. func Atou16Or(s string, o uint16) uint16 { u, e := Atou16(s) if e != nil { return o } return u } // Atou32Or parses a string to uint32 in base 10 with a fallback value. func Atou32Or(s string, o uint32) uint32 { u, e := Atou32(s) if e != nil { return o } return u } // Atou64Or parses a string to uint64 in base 10 with a fallback value. func Atou64Or(s string, o uint64) uint64 { u, e := Atou64(s) if e != nil { return o } return u } // Atof32Or parses a string to float32 with a fallback value. func Atof32Or(s string, o float32) float32 { f, e := Atof32(s) if e != nil { return o } return f } // Atof64Or parses a string to float32 with a fallback value. func Atof64Or(s string, o float64) float64 { f, e := Atof64(s) if e != nil { return o } return f } // format // FormatInt formats a int to string using given base. func FormatInt(i int, base int) string { return strconv.FormatInt(int64(i), base) } // FormatInt8 formats a int8 to string using given base. func FormatInt8(i int8, base int) string { return strconv.FormatInt(int64(i), base) } // FormatInt16 formats a int16 to string using given base. func FormatInt16(i int16, base int) string { return strconv.FormatInt(int64(i), base) } // FormatInt32 formats a int32 to string using given base. func FormatInt32(i int32, base int) string { return strconv.FormatInt(int64(i), base) } // FormatInt64 formats a int64 to string using given base. func FormatInt64(i int64, base int) string { return strconv.FormatInt(i, base) } // FormatUint formats a uint to string using given base. func FormatUint(u uint, base int) string { return strconv.FormatUint(uint64(u), base) } // FormatUint8 formats a uint8 to string using given base. func FormatUint8(u uint8, base int) string { return strconv.FormatUint(uint64(u), base) } // FormatUint16 formats a uint16 to string using given base. func FormatUint16(u uint16, base int) string { return strconv.FormatUint(uint64(u), base) } // FormatUint32 formats a uint32 to string using given base. func FormatUint32(u uint32, base int) string { return strconv.FormatUint(uint64(u), base) } // FormatUint64 formats a uint64 to string using given base. func FormatUint64(u uint64, base int) string { return strconv.FormatUint(u, base) } // FormatFloat32 formats a float32 to string using given format and precision. func FormatFloat32(f float32, fmt byte, prec int) string { return strconv.FormatFloat(float64(f), fmt, prec, 32) } // FormatFloat64 formats a float64 to string using given format and precision. func FormatFloat64(f float64, fmt byte, prec int) string { return strconv.FormatFloat(f, fmt, prec, 64) } // Xtoa // Itoa formats a int to string in base 10. func Itoa(i int) string { return FormatInt(i, 10) } // I8toa formats a int8 to string in base 10. func I8toa(i int8) string { return FormatInt8(i, 10) } // I16toa formats a int16 to string in base 10. func I16toa(i int16) string { return FormatInt16(i, 10) } // I32toa formats a int32 to string in base 10. func I32toa(i int32) string { return FormatInt32(i, 10) } // I64toa formats a int64 to string in base 10. func I64toa(i int64) string { return FormatInt64(i, 10) } // Utoa formats a uint to string in base 10. func Utoa(u uint) string { return FormatUint(u, 10) } // U8toa formats a uint8 to string in base 10. func U8toa(u uint8) string { return FormatUint8(u, 10) } // U16toa formats a uint16 to string in base 10. func U16toa(u uint16) string { return FormatUint16(u, 10) } // U32toa formats a uint32 to string in base 10. func U32toa(u uint32) string { return FormatUint32(u, 10) } // U64toa formats a uint64 to string in base 10. func U64toa(u uint64) string { return FormatUint64(u, 10) } // F32toa formats a float32 to string using default format. func F32toa(f float32) string { return FormatFloat32(f, 'f', -1) } // F64toa formats a float64 to string using default format. func F64toa(f float64) string { return FormatFloat64(f, 'f', -1) }
xnumber/xnumber_parse.go
0.843219
0.457258
xnumber_parse.go
starcoder
package continuous import ( "github.com/jtejido/ggsl/specfunc" "github.com/jtejido/stats" "github.com/jtejido/stats/err" smath "github.com/jtejido/stats/math" "math" "math/rand" ) // Rice distribution // https://en.wikipedia.org/wiki/Rice_distribution type Rice struct { distance, spread float64 // v, σ src rand.Source } func NewRice(distance, spread float64) (*Rice, error) { return NewRiceWithSource(distance, spread, nil) } func NewRiceWithSource(distance, spread float64, src rand.Source) (*Rice, error) { if distance < 0 || spread < 0 { return nil, err.Invalid() } return &Rice{distance, spread, src}, nil } // μ ∈ [0,∞) // σ ∈ [0,∞) func (r *Rice) Parameters() stats.Limits { return stats.Limits{ "v": stats.Interval{0, math.Inf(1), true, true}, "σ": stats.Interval{0, math.Inf(1), true, true}, } } // x ∈ [0,∞) func (r *Rice) Support() stats.Interval { return stats.Interval{0, math.Inf(1), false, true} } func (r *Rice) Probability(x float64) float64 { if r.Support().IsWithinInterval(x) { return (x / (r.spread * r.spread)) * math.Exp(-((x*x)+(r.distance*r.distance))/(2*(r.spread*r.spread))) * specfunc.Bessel_I0((x*r.distance)/(r.spread*r.spread)) } return 0 } func (r *Rice) Distribution(x float64) float64 { if r.Support().IsWithinInterval(x) { return 1 - smath.MarcumQ(1, r.distance/r.spread, x/r.spread) } return 0 } func (r *Rice) Inverse(p float64) float64 { if p <= 0 { return 0 } if p >= 1 { return math.Inf(1) } ncs := NonCentralChiSquared{2, math.Pow(r.distance/r.spread, 2), nil} return math.Sqrt(ncs.Inverse(p)) * r.spread } func (r *Rice) Mean() float64 { return r.spread * math.Sqrt(math.Pi/2) * smath.Laguerre(1./2, math.Pow(-r.distance, 2.)/(2*(r.spread*r.spread))) } func (r *Rice) Variance() float64 { return 2*(r.spread*r.spread) + (r.distance * r.distance) - ((math.Pi*(r.spread*r.spread))/2)*math.Pow(smath.Laguerre(1./2, math.Pow(-r.distance, 2.)/(2*(r.spread*r.spread))), 2.) } func (r *Rice) Skewness() float64 { m1 := r.rm(1) m2 := r.rm(2) m3 := r.rm(3) return (m1*(2*(m1*m1)-3*m2) + m3) / math.Pow(m2-(m1*m1), 3./2) } func (r *Rice) ExKurtosis() float64 { m1 := r.rm(1) m2 := r.rm(2) m3 := r.rm(3) m4 := r.rm(4) return (-3*(m1*m1*m1*m1) + 6*(m1*m1)*m2 - 4*m1*m3 + m4 - 3*((m2-(m1*m1))*(m2-(m1*m1)))) / ((m2 - (m1 * m1)) * (m2 - (m1 * m1))) } func (r *Rice) rm(k float64) float64 { return math.Pow(r.spread, k) * math.Pow(2, k/2) * specfunc.Gamma(1+k/2) * smath.Laguerre(k/2, -(r.distance*r.distance)/(2*(r.spread*r.spread))) } func (r *Rice) Rand() float64 { n := &Normal{0, 1, r.src, nil} x := r.spread*n.Rand() + r.distance y := r.spread * n.Rand() return math.Sqrt((x * x) + (y * y)) }
dist/continuous/rice.go
0.757346
0.47591
rice.go
starcoder
package main import ( "log" "os" "sort" "text/template" "github.com/prometheus/client_golang/prometheus" "github.com/percona/rds_exporter/enhanced" ) type Group struct { Name string metrics []Metric } type Metric struct { Group string Name string Help string } func (m Metric) FqName() string { namespace, subsystem, name, _, _ := enhanced.MapToNode(m.Group, m.Name) switch m.Group { case "disk": namespace = "node" } return prometheus.BuildFQName(namespace, subsystem, name) } func (m Metric) Labels() []string { labels := []string{ "instance", "region", } _, _, _, extraLabels, _ := enhanced.MapToNode(m.Group, m.Name) return append(labels, extraLabels...) } func (m Metric) ConstLabels() map[string]string { switch m.Group { case "cpuUtilization": return map[string]string{ "cpu": "All", } } return nil } func (g Group) Metrics() []Metric { switch g.Name { case "cpuUtilization": return []Metric{ { Group: g.Name, Name: "cpu_average", Help: "The percentage of CPU utilization. Units: Percent", }, } } return g.metrics } var ( // http://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_Monitoring.OS.html docs = map[string]map[string]string{ "General": { "engine": "The database engine for the DB instance.", "instanceID": "The DB instance identifier.", "instanceResourceID": "A region-unique, immutable identifier for the DB instance, also used as the log stream identifier.", "numVCPUs": "The number of virtual CPUs for the DB instance.", "timestamp": "The time at which the metrics were taken.", "uptime": "The amount of time that the DB instance has been active.", "version": "The version of the OS metrics' stream JSON format.", }, "cpuUtilization": { "guest": "The percentage of CPU in use by guest programs.", "idle": "The percentage of CPU that is idle.", "irq": "The percentage of CPU in use by software interrupts.", "nice": "The percentage of CPU in use by programs running at lowest priority.", "steal": "The percentage of CPU in use by other virtual machines.", "system": "The percentage of CPU in use by the kernel.", "total": "The total percentage of the CPU in use. This value includes the nice value.", "user": "The percentage of CPU in use by user programs.", "wait": "The percentage of CPU unused while waiting for I/O access.", }, "diskIO": { "avgQueueLen": "The number of requests waiting in the I/O device's queue. This metric is not available for Amazon Aurora.", "avgReqSz": "The average request size, in kilobytes. This metric is not available for Amazon Aurora.", "await": "The number of milliseconds required to respond to requests, including queue time and service time. This metric is not available for Amazon Aurora.", "device": "The identifier of the disk device in use. This metric is not available for Amazon Aurora.", "readIOsPS": "The number of read operations per second. This metric is not available for Amazon Aurora.", "readKb": "The total number of kilobytes read. This metric is not available for Amazon Aurora.", "readKbPS": "The number of kilobytes read per second. This metric is not available for Amazon Aurora.", "rrqmPS": "The number of merged read requests queued per second. This metric is not available for Amazon Aurora.", "tps": "The number of I/O transactions per second. This metric is not available for Amazon Aurora.", "util": "The percentage of CPU time during which requests were issued. This metric is not available for Amazon Aurora.", "writeIOsPS": "The number of write operations per second. This metric is not available for Amazon Aurora.", "writeKb": "The total number of kilobytes written. This metric is not available for Amazon Aurora.", "writeKbPS": "The number of kilobytes written per second. This metric is not available for Amazon Aurora.", "wrqmPS": "The number of merged write requests queued per second. This metric is not available for Amazon Aurora.", "readLatency": "The average amount of time taken per disk I/O operation.", "writeLatency": "The average amount of time taken per disk I/O operation.", "writeThroughput": "The average number of bytes written to disk per second.", "readThroughput": "The average number of bytes read from disk per second.", "diskQueueDepth": "The number of outstanding read/write requests waiting to access the disk.", }, "fileSys": { "maxFiles": "The maximum number of files that can be created for the file system.", "mountPoint": "The path to the file system.", "name": "The name of the file system.", "total": "The total number of disk space available for the file system, in kilobytes.", "used": "The amount of disk space used by files in the file system, in kilobytes.", "usedFilePercent": "The percentage of available files in use.", "usedFiles": "The number of files in the file system.", "usedPercent": "The percentage of the file-system disk space in use.", }, "loadAverageMinute": { "fifteen": "The number of processes requesting CPU time over the last 15 minutes.", "five": "The number of processes requesting CPU time over the last 5 minutes.", "one": "The number of processes requesting CPU time over the last minute.", }, "memory": { "active": "The amount of assigned memory, in kilobytes.", "buffers": "The amount of memory used for buffering I/O requests prior to writing to the storage device, in kilobytes.", "cached": "The amount of memory used for caching file system–based I/O.", "dirty": "The amount of memory pages in RAM that have been modified but not written to their related data block in storage, in kilobytes.", "free": "The amount of unassigned memory, in kilobytes.", "hugePagesFree": "The number of free huge pages.Huge pages are a feature of the Linux kernel.", "hugePagesRsvd": "The number of committed huge pages.", "hugePagesSize": "The size for each huge pages unit, in kilobytes.", "hugePagesSurp": "The number of available surplus huge pages over the total.", "hugePagesTotal": "The total number of huge pages for the system.", "inactive": "The amount of least-frequently used memory pages, in kilobytes.", "mapped": "The total amount of file-system contents that is memory mapped inside a process address space, in kilobytes.", "pageTables": "The amount of memory used by page tables, in kilobytes.", "slab": "The amount of reusable kernel data structures, in kilobytes.", "total": "The total amount of memory, in kilobytes.", "writeback": "The amount of dirty pages in RAM that are still being written to the backing storage, in kilobytes.", }, "network": { "interface": "The identifier for the network interface being used for the DB instance.", "rx": "The number of bytes received per second.", "tx": "The number of bytes uploaded per second.", }, "processList": { "cpuUsedPc": "The percentage of CPU used by the process.", "id": "The identifier of the process.", "memoryUsedPc": "The amount of memory used by the process, in kilobytes.", "name": "The name of the process.", "parentID": "The process identifier for the parent process of the process.", "rss": "The amount of RAM allocated to the process, in kilobytes.", "tgid": "The thread group identifier, which is a number representing the process ID to which a thread belongs.This identifier is used to group threads from the same process.", "vss": "The amount of virtual memory allocated to the process, in kilobytes.", "vmlimit": "TODO", }, "swap": { "cached": "The amount of swap memory, in kilobytes, used as cache memory.", "free": "The total amount of swap memory free, in kilobytes.", "total": "The total amount of swap memory available, in kilobytes.", "in": "Number of kilobytes the system has swapped in from disk per second (disk reads).", "out": "Number of kilobytes the system has swapped out to disk per second (disk writes).", }, "tasks": { "blocked": "The number of tasks that are blocked.", "running": "The number of tasks that are running.", "sleeping": "The number of tasks that are sleeping.", "stopped": "The number of tasks that are stopped.", "total": "The total number of tasks.", "zombie": "The number of child tasks that are inactive with an active parent task.", }, "disk": { "bytes_read": "The total number of kilobytes read. This metric is not available for Amazon Aurora.", "bytes_written": "The total number of kilobytes written. This metric is not available for Amazon Aurora.", }, } ) func main() { f, err := os.Create("metrics.go") if err != nil { log.Fatal(err) } defer f.Close() groups := []Group{} for groupName, doc := range docs { metrics := []Metric{} for metricName, metricHelp := range doc { metric := Metric{ Group: groupName, Name: metricName, Help: metricHelp, } metrics = append(metrics, metric) } sort.SliceStable(metrics, func(i, j int) bool { return metrics[i].Name < metrics[j].Name }) group := Group{ Name: groupName, metrics: metrics, } groups = append(groups, group) } sort.SliceStable(groups, func(i, j int) bool { return groups[i].Name < groups[j].Name }) packageTemplate.Execute(f, struct { Groups []Group }{ Groups: groups, }) } var packageTemplate = template.Must(template.New("").Parse(`// Code generated by go generate; DO NOT EDIT. package enhanced import ( "github.com/prometheus/client_golang/prometheus" ) var Metrics = map[string]Metric{ {{- range .Groups }} {{- range .Metrics }} "{{.FqName}}" : { Name: "{{.Name}}", Desc: prometheus.NewDesc( "{{.FqName}}", "{{.Help}}", {{printf "%#v" .Labels}}, {{printf "%#v" .ConstLabels}}, ), }, {{- end }} {{- end }} } `))
enhanced/generate/main.go
0.580947
0.441372
main.go
starcoder
package indexspace import ( "fmt" "errors" ) /* Matrix represents an [m][n] matrix The Matrix type is intended for small matrix manipulations, and providing a tailored interface to geometric questions that arise in the manipulation of index spaces and domains of computation */ type Matrix [][]float64 // Clone creates a deep copy of the receiver matrix func (m Matrix) Clone() Matrix { newMatrix := make([][]float64, m.Rows()) for i := 0; i < m.Rows(); i++ { if newMatrix[i] == nil { newMatrix[i] = make([]float64, m.Rows()) } for j := 0; j < m.Columns(); j++ { newMatrix[i][j] = m[i][j] } } return newMatrix } // Rows returns the number of rows in the Matrix func (m Matrix) Rows() int { return len(m) } // Columns returns the number of columns in the Matrix func (m Matrix) Columns() int { if m.Rows() == 0 { return 0 } return len(m[0]) } // Dimensionality returns the dimension of the Matrix func (mat Matrix) Dimensionality() int { return mat.Columns() } // NewMatrix creates a ready to use Matrix of size rows x columns // This is an alternative to using a Matrix shell with AddRow to create matrices. func NewMatrix(rows, columns int) Matrix { mat := make(Matrix, rows) for i := 0; i < rows; i++ { mat[i] = make(Vector, columns) } return mat } // NewMatrixShell creates an empty Matrix slice that can be used with AddRow to compose a Matrix func NewMatrixShell() Matrix { return make(Matrix, 0) // since we are using append to add to the slice } // AddRow checks the dimensionality of the argument and adds it if consistent, or returns old set and error func (mat Matrix) AddRow(vec Vector) (Matrix, error) { if len(mat) != 0 && len(vec) != len(mat[0]) { return mat, errors.New("Dimensionality of the new vector is not consistent with the existing matrix") } mat = append(mat, vec) return mat, nil } // GetRow returns the row vector at index 'index' func (mat Matrix) GetRow(index int) (Vector, error) { if index >= len(mat) { return nil, errors.New("Index out of bound") } return mat[index], nil } /* String represents the Matrix in row-order form */ func (mat Matrix) String() string { if len(mat) == 0 { return "Empty Matrix" } var str string dim := mat.Columns() for _, v := range mat { for i := 0; i < dim; i++ { str = str + fmt.Sprintf("%6.2f ", v[i]) } str = str + "\n" } return str }
indexspace/matrix.go
0.827201
0.69298
matrix.go
starcoder
// Package vision wraps the Cloud Vision API and provides an auth method that // allows you to check an input image for an input term. package vision import ( "context" "io" "net/url" "os" "strings" vision "cloud.google.com/go/vision/apiv1" pb "google.golang.org/genproto/googleapis/cloud/vision/v1" ) // Auth takes a picture and a term and compares them to each other to see if // the an item matching the input term is contained in the image func Auth(term, file string) (AuthResult, error) { var image *pb.Image var err error if isValidURL(file) { image = vision.NewImageFromURI(file) } else { image, err = imageFromFile(file) if err != nil { return AuthResult{}, err } } return compareAuth(image, term) } // AuthFromReader takes a picture and a term and compares them to each other // to see if the an item matching the input term is contained in the image func AuthFromReader(term string, file io.Reader) (AuthResult, error) { image, err := vision.NewImageFromReader(file) if err != nil { return AuthResult{}, err } return compareAuth(image, term) } func compareAuth(image *pb.Image, term string) (AuthResult, error) { resp, err := findLabels(image) if err != nil { return resp, err } resp.AuthTerm(term) return resp, nil } func findLabels(image *pb.Image) (AuthResult, error) { ctx := context.Background() resp := AuthResult{} client, err := vision.NewImageAnnotatorClient(ctx) if err != nil { return resp, err } defer client.Close() annotations, err := client.DetectLabels(ctx, image, nil, 10) if err != nil { return resp, err } resp.Raw = annotations return resp, nil } func imageFromFile(path string) (*pb.Image, error) { f, err := os.Open(path) if err != nil { return nil, err } img, err := vision.NewImageFromReader(f) if err != nil { return nil, err } return img, nil } func isValidURL(toTest string) bool { r, err := url.ParseRequestURI(toTest) if err != nil { return false } if r.Scheme == "http" || r.Scheme == "https" || r.Scheme == "gs" { return true } return false } // AuthResult is the return from auth operations. It allows us to show // tbe pure result and the work. type AuthResult struct { Result bool `json:"result"` Raw []*pb.EntityAnnotation `json:"raw"` } // AuthTerm does the check to see if the language query worked func (l *AuthResult) AuthTerm(term string) { var labels []string for _, annotation := range l.Raw { labels = append(labels, annotation.Description) } for _, v := range labels { if strings.Contains(strings.ToUpper(v), strings.ToUpper(term)) { l.Result = true return } } return }
vision/vision.go
0.762778
0.408336
vision.go
starcoder
package main import ( "fmt" "strconv" "strings" "time" "github.com/google/go-tpm/tpm2" "github.com/google/go-tpm/tpmutil" ) // parseHandle parses a string (typically from the command line) into tpmutil.Handle func parseHandle(s string) (tpmutil.Handle, error) { i, err := strconv.ParseUint(s, 0, 32) return tpmutil.Handle(i), err } // parseDuration takes a string "<years>:<months>:<days>" and adds it to time.Now(). func parseDuration(s string) (time.Time, error) { var years, months, days int _, err := fmt.Sscanf(s, "%d:%d:%d", &years, &months, &days) return time.Now().UTC().AddDate(years, months, days), err } // parseOptionsMap breaks up a slice of <key>=<value> strings into a map. Used to parse // SSH certificate options and extensions. func parseOptionsMap(opt []string) map[string]string { m := make(map[string]string) for _, o := range opt { s := strings.SplitN(o, "=", 2) if len(s) > 1 { m[s[0]] = s[1] continue } m[s[0]] = "" } return m } // Parses a string of key properties as specified in the command line and returns // the propery value. For example "sign|fixedtpm|fixedparent" becomes // tpm2.FlagSign | tpm2.FlagFixedTPM | tpm2.FlagFixedParent. func parseKeyAttributes(s string) (tpm2.KeyProp, error) { var keyProp tpm2.KeyProp s = strings.Replace(s, " ", "", -1) for _, prop := range strings.Split(s, "|") { v, ok := stringToKeyAttribute[prop] if !ok { return keyProp, fmt.Errorf("unknown attribute property '%s'", prop) } keyProp |= v } return keyProp, nil } // Parses a string of NV properties as specified in the command line and returns // the propery value. For example "ownerwrite|ownerread|authread|ppread" becomes // tpm2.AttrOwnerWrite | tpm2.AttrOwnerRead | tpm2.AttrAuthRead | tpm2.AttrPPRead. func parseNVAttributes(s string) (tpm2.NVAttr, error) { var nvAttr tpm2.NVAttr s = strings.Replace(s, " ", "", -1) for _, prop := range strings.Split(s, "|") { v, ok := stringToNVAttribute[prop] if !ok { return nvAttr, fmt.Errorf("unknown attribute '%s'", prop) } nvAttr |= v } return nvAttr, nil } var stringToKeyAttribute = map[string]tpm2.KeyProp{ "fixedtpm": tpm2.FlagFixedTPM, "fixedparent": tpm2.FlagFixedParent, "sensitivedataorigin": tpm2.FlagSensitiveDataOrigin, "userwithauth": tpm2.FlagUserWithAuth, "adminwithpolicy": tpm2.FlagAdminWithPolicy, "noda": tpm2.FlagNoDA, "restricted": tpm2.FlagRestricted, "decrypt": tpm2.FlagDecrypt, "sign": tpm2.FlagSign, } var stringToNVAttribute = map[string]tpm2.NVAttr{ "ppwrite": tpm2.AttrPPWrite, "ownerwrite": tpm2.AttrOwnerWrite, "authwrite": tpm2.AttrAuthWrite, "policywrite": tpm2.AttrPolicyWrite, "policydelete": tpm2.AttrPolicyDelete, "writelocked": tpm2.AttrWriteLocked, "writeall": tpm2.AttrWriteAll, "writedefine": tpm2.AttrWriteDefine, "writestclear": tpm2.AttrWriteSTClear, "globallock": tpm2.AttrGlobalLock, "ppread": tpm2.AttrPPRead, "ownerread": tpm2.AttrOwnerRead, "authread": tpm2.AttrAuthRead, "policyread": tpm2.AttrPolicyRead, "noda": tpm2.AttrNoDA, "orderly": tpm2.AttrOrderly, "clearstclear": tpm2.AttrClearSTClear, "readlocked": tpm2.AttrReadLocked, "written": tpm2.AttrWritten, "platformcreate": tpm2.AttrPlatformCreate, "readstclear": tpm2.AttrReadSTClear, }
cmd/tpmk/args.go
0.608478
0.404802
args.go
starcoder
package rules func (this piece) getPawnCoverageFrom(from square, board board) (covered []square) { var captures = whitePawnCaptureOffsets if this.Player() == Black { captures = blackPawnCaptureOffsets } for _, offset := range captures { covered = append(covered, from.Offset(offset)) } return covered } func (this piece) calculatePawnMovesFrom(from square, board board) (moves []move) { return NewPawnMoveCalculator(this, from, board).calculateLegalMoves() } /**************************************************************************/ type PawnMoveCalculator struct { piece piece from square board board advancement []square captures []square promotions []piece moves []move } func NewPawnMoveCalculator(piece piece, from square, board board) *PawnMoveCalculator { calculator := &PawnMoveCalculator{ piece: piece, from: from, board: board, } calculator.determinePossibilities() return calculator } func (this *PawnMoveCalculator) determinePossibilities() { if this.piece.Player() == White { this.determineWhitePawnPossibilities() } else { this.determineBlackPawnPossibilities() } } func (this *PawnMoveCalculator) determineWhitePawnPossibilities() { this.captures = whitePawnCaptureOffsets this.promotions = whitePawnPromotions if this.from.Rank() == "2" { this.advancement = whitePawnInitialAdvancementOffsets } else { this.advancement = whitePawnAdvancementOffsets } } func (this *PawnMoveCalculator) determineBlackPawnPossibilities() { this.captures = blackPawnCaptureOffsets this.promotions = blackPawnPromotions if this.from.Rank() == "7" { this.advancement = blackPawnInitialAdvancementOffsets } else { this.advancement = blackPawnAdvancementOffsets } } func (this *PawnMoveCalculator) calculateLegalMoves() []move { this.calculateAdvancements() this.calculateCaptures() return this.moves } func (this *PawnMoveCalculator) calculateAdvancements() { for _, offset := range this.advancement { if target := this.from.Offset(offset); this.canAdvanceTo(target) { this.calculateAdvancement(target) } } } func (this *PawnMoveCalculator) canAdvanceTo(target square) bool { return this.board.GetPieceAt(target) == Void } func (this *PawnMoveCalculator) calculateAdvancement(target square) { if this.canPromoteOnNextMove(target) { this.appendAdvancementPromotions(target) } else { this.appendAdvancement(target) } } func (this *PawnMoveCalculator) canPromoteOnNextMove(target square) bool { rank := target.Rank() return rank == "8" || rank == "1" } func (this *PawnMoveCalculator) appendAdvancementPromotions(target square) { for _, promotion := range this.promotions { this.moves = append(this.moves, move{ Piece: this.piece, From: this.from, To: target, Promotion: promotion, }) } } func (this *PawnMoveCalculator) appendAdvancement(target square) { this.moves = append(this.moves, move{ Piece: this.piece, From: this.from, To: target, }) } func (this *PawnMoveCalculator) calculateCaptures() { for _, offset := range this.captures { targetSquare := this.from.Offset(offset) targetPiece := this.board.GetPieceAt(targetSquare) this.calculateCapture(targetSquare, targetPiece) } } func (this *PawnMoveCalculator) calculateCapture(targetSquare square, targetPiece piece) { isEnPassant := targetSquare == this.board.GetEnPassantTarget() if isEnPassant { if this.piece == WhitePawn { targetPiece = BlackPawn } else if this.piece == BlackPawn { targetPiece = WhitePawn } } if this.canCapture(targetPiece) { if this.canPromoteOnNextMove(targetSquare) { this.appendCapturingPromotions(targetSquare, targetPiece) } else { this.appendCapture(targetSquare, targetPiece, isEnPassant) } } } func (this *PawnMoveCalculator) canCapture(targetPiece piece) bool { return targetPiece.Player() == this.piece.Player().Other() } func (this *PawnMoveCalculator) appendCapturingPromotions(targetSquare square, targetPiece piece) { for _, promotion := range this.promotions { this.moves = append(this.moves, move{ Piece: this.piece, From: this.from, To: targetSquare, Captured: targetPiece, CapturedOn: targetSquare, Promotion: promotion, }) } } func (this *PawnMoveCalculator) appendCapture(targetSquare square, targetPiece piece, isEnPassant bool) { this.moves = append(this.moves, move{ Piece: this.piece, From: this.from, To: targetSquare, Captured: targetPiece, CapturedOn: targetSquare, EnPassant: isEnPassant, }) } /**************************************************************************/ func calculateEnPassantTarget(move move) square { if isBlackPawnDoubleAdvancement(move) { return IntSquare(move.To.Int() + 8) } else if isWhitePawnDoubleAdvancement(move) { return IntSquare(move.From.Int() + 8) } else { return IntSquare(-1) } } func isWhitePawnDoubleAdvancement(move move) bool { if move.Piece != WhitePawn { return false } if move.From.Rank() != "2" { return false } return move.To.Rank() == "4" } func isBlackPawnDoubleAdvancement(move move) bool { if move.Piece != BlackPawn { return false } if move.From.Rank() != "7" { return false } return move.To.Rank() == "5" }
rules/pieces_pawn.go
0.655887
0.447702
pieces_pawn.go
starcoder
package fastmatch import ( "bytes" "fmt" "math" ) // The maximum allowable state value. Can be overridden for testing. var maxState uint64 = math.MaxUint64 // stateMachine holds the mapping between a match and the intermediate state // changes (runes encountered) leading up to a match. type stateMachine struct { next uint64 base uint64 final map[string][]uint64 possible [][]rune changes []map[rune]uint64 noMore []map[rune][]string offset int continued *stateMachine collapsed map[string]uint64 } // foreachNoMore iterates over (length, final rune, key) tuples in the // stateMachine.noMore map. func (state *stateMachine) foreachNoMore(f func(int, rune, string)) { for len := range state.noMore { for r := range state.noMore[len] { for _, key := range state.noMore[len][r] { f(len, r, key) } } } } // newStateMachine initializes a stateMachine. func newStateMachine(keys []string) *stateMachine { state := &stateMachine{ next: 1, base: 1, final: make(map[string][]uint64, len(keys)), } for _, key := range keys { state.final[key] = make([]uint64, 0, len(key)) } return state } // makeNextStateMachine initializes an additional state machine once we've // exceeded the number of intermediate states which fit in a uint64. func (state *stateMachine) makeNextStateMachine(realOffset int) { offset := realOffset - state.offset if offset < 1 { // This should only be possible during testing, when maxState // != math.MaxUint64. panic("maxState too small") } // The current switch statement is incomplete, so truncate any // internal state learned on this pass. state.possible = state.possible[:offset] state.changes = state.changes[:offset] state.noMore = state.noMore[:offset] // Make a note of keys which finished at previous offsets; they don't // need to be copied to the next state machine. (This will be // zero-length if we're not doing partial matching.) finishedKeys := make(map[string]bool, len(state.final)) state.foreachNoMore(func(_ int, _ rune, key string) { finishedKeys[key] = true }) // Now create the next state machine, copying remaining keys to it. state.continued = &stateMachine{ next: 1, offset: realOffset, final: make(map[string][]uint64, len(state.final)-len(finishedKeys)), collapsed: make(map[string]uint64, len(state.final)-len(finishedKeys)), } for key := range state.final { if finishedKeys[key] { continue } // The current switch statement is incomplete, so forget any // intermediate state we've noted for this key. if state.offset == 0 { state.final[key] = state.final[key][:offset] } else { // Need to include initial value from previous // stateMachine. state.final[key] = state.final[key][:offset+1] } // The current sum gets "collapsed" into a new state value in // the next machine. Note that many keys may share the same // intermediate state. before := state.finalString(key) after := state.continued.collapsed[before] if after == 0 { after = state.continued.next state.continued.next++ state.continued.collapsed[before] = after } state.continued.final[key] = append(make([]uint64, 0, len(key)-realOffset+1), after) } state.continued.base = state.continued.next } // indexKeys assigns a unique state value to each possible state change. For // partial matching, this method also notes where the state should be checked // against possible final values. func (state *stateMachine) indexKeys(equiv runeEquivalents, partialMatch bool) { longestKey := 0 keys := make([]string, 0, len(state.final)) for key := range state.final { keys = append(keys, key) if len(key) > longestKey { longestKey = len(key) } } needShift := true state.possible = make([][]rune, longestKey-state.offset) state.changes = make([]map[rune]uint64, longestKey-state.offset) state.noMore = make([]map[rune][]string, longestKey-state.offset) for realOffset := state.offset; realOffset < longestKey; realOffset++ { offset := realOffset - state.offset state.possible[offset] = equiv.uniqueAtOffset(keys, realOffset) if len(state.possible[offset]) > 1 { if needShift { // This ensures new intermediate state values // do not overlap with previous ones. state.base = state.next needShift = false } state.changes[offset] = make(map[rune]uint64, len(keys)) for _, r := range state.possible[offset] { needIncr := false for _, key := range keys { if partialMatch && realOffset >= len(key)-1 { continue } if equiv.isEquiv(rune(key[realOffset]), r) { state.final[key] = append(state.final[key], state.next) needIncr = true } } if needIncr { if state.base > maxState-state.next { state.makeNextStateMachine(realOffset) state.continued.indexKeys(equiv, partialMatch) return } state.changes[offset][r] = state.next state.next += state.base needShift = true } } } else { // All of the keys share the same rune at this offset, // so there's no state change. However, we still need // to write something to each key's state.final, so // that offsets within that array match key offset. // The zeroes will be omitted by state.finalString(). for _, key := range keys { state.final[key] = append(state.final[key], 0) } } state.noMore[offset] = make(map[rune][]string, len(state.possible[offset])) if partialMatch { for _, r := range state.possible[offset] { for _, key := range keys { if len(key)-1 == realOffset && equiv.isEquiv(rune(key[realOffset]), r) { state.noMore[offset][r] = append(state.noMore[offset][r], key) } } } } } } // remove removes a string from a slice of strings if present, in the same // manner the delete builtin can remove a key from a map. func remove(a []string, s string) []string { for n := 0; n < len(a); n++ { if a[n] == s { if n < len(a)-1 { copy(a[n:], a[n+1:]) } a = a[:len(a)-1] } } if len(a) == 0 { return nil } return a } // deleteKey forgets about a possible match. This is called by checkAmbiguity // to prune redundant keys, so that we don't output duplicate or unreachable // case statements. func (state *stateMachine) deleteKey(key string) { delete(state.final, key) for _, noMore := range state.noMore { for r := range noMore { noMore[r] = remove(noMore[r], key) } } } // finalState returns the uint64 state value for a given key. func (state *stateMachine) finalState(key string) (sum uint64) { for _, value := range state.final[key] { sum += value } return } // finalString returns a string representing the final state of each key. To // make the generated code slightly more readable, this consists of an // expression summing each intermediate state value (in hex). func (state *stateMachine) finalString(key string) string { var b bytes.Buffer for _, value := range state.final[key] { if value == 0 { continue } if b.Len() != 0 { b.WriteString(" + ") } b.WriteString(fmt.Sprintf("0x%x", value)) } if b.Len() == 0 { return "0" } return b.String() }
state.go
0.570092
0.465995
state.go
starcoder
package vector import ( "github.com/hsiafan/glow/v2/container/optional" "github.com/hsiafan/glow/v2/container/slicex" ) // Vector is a slice but with mutable data-pointer/len/cap. type Vector[T any] []T // Make create a new Vector func Make[T any](values ...T) Vector[T] { return values } // Append add a new value to the end of Vector func (v *Vector[T]) Append(value T) { s := []T(*v) *v = append(s, value) } // AppendAll add new values to the end of Vector func (v *Vector[T]) AppendAll(value ...T) { s := []T(*v) *v = append(s, value...) } // Extend add new values in another Vector to the end of Vector func (v *Vector[T]) Extend(vec Vector[T]) { s := []T(*v) *v = append(s, []T(vec)...) } // Insert inserts value at index func (v *Vector[T]) Insert(index int, value T) { s := []T(*v) *v = slicex.Insert(s, index, value) } // RemoveAt removes value at index func (v *Vector[T]) RemoveAt(index int) { s := []T(*v) *v = slicex.RemoveAt(s, index) } // RemoveBy removes all value match the given predicate func (v *Vector[T]) RemoveBy(predicate func(value T) bool) { s := []T(*v) *v = slicex.RemoveBy(s, predicate) } // At returns element at index func (v *Vector[T]) At(idx int) T { s := []T(*v) return s[idx] } // Set sets element at index func (v *Vector[T]) Set(idx int, value T) { s := []T(*v) s[idx] = value } // SetSlice sets elements begin from index func (v *Vector[T]) SetSlice(idx int, values []T) { s := []T(*v) copy(s[idx:], values) } // First returns the first element of Vector, in an Optional. // If Vector is empty, return empty Optional func (v *Vector[T]) First() optional.Optional[T] { s := []T(*v) if len(s) == 0 { return optional.Empty[T]() } return optional.OfValue(s[0]) } // Last returns the last element of Vector, in an Optional. // If Vector is empty, return empty Optional func (v *Vector[T]) Last() optional.Optional[T] { s := []T(*v) if len(s) == 0 { return optional.Empty[T]() } return optional.OfValue(s[len(s)-1]) } // RemoveLast remove and returns the last element of Vector, in an Optional. // If Vector is empty, return empty Optional func (v *Vector[T]) RemoveLast() optional.Optional[T] { s := []T(*v) if len(s) == 0 { return optional.Empty[T]() } o := optional.OfValue(s[len(s)-1]) *v = s[:len(s)-1] return o } // Clear vector content, but do not reset capacity func (v *Vector[T]) Clear() { s := []T(*v) *v = s[:0] } // Size returns elements count in Vector func (v *Vector[T]) Size() int { s := []T(*v) return len(s) } // Empty returns if Vector has no element func (v *Vector[T]) Empty() bool { s := []T(*v) return len(s) == 0 } // Reserve requests that the vector capacity be at least enough to contain n elements func (v *Vector[T]) Reserve(capacity int) { s := []T(*v) if cap(s) >= capacity { return } ns := make([]T, len(s), capacity) copy(ns, s) *v = ns } // ShrinkToFit requests the Vector to reduce its capacity to fit its size. func (v *Vector[T]) ShrinkToFit() { s := []T(*v) if cap(s) > len(s) { ns := slicex.Copy(s) *v = ns } } // Capacity returns capacity of Vector func (v *Vector[T]) Capacity() int { s := []T(*v) return cap(s) } // FindBy find first value match the given predicate, return the index. // If not found, return -1 func (v *Vector[T]) FindBy(predicate func(v T) bool) int { s := []T(*v) return slicex.FindBy(s, predicate) } // Select return a new Vector contains the values match given predicate func (v *Vector[T]) Select(predicate func(v T) bool) Vector[T] { s := []T(*v) return Vector[T](slicex.Select(s, predicate)) } // SortBy sorts the vector in place, a less function is specified to compare slice values. func (v *Vector[T]) SortBy(less func(v1, v2 T) bool) { s := []T(*v) slicex.SortBy(s, less) } // ForEach travers all elements in Vector func (v *Vector[T]) ForEach(f func(v T)) { s := []T(*v) for _, e := range s { f(e) } } // SortStableBy sorts the vector in place, with stable sort algorithm. A less function is specified to compare slice values. func (v *Vector[T]) SortStableBy(less func(v1, v2 T) bool) { s := []T(*v) slicex.SortStableBy(s, less) } // Copy return a new Vector contains same values as the origin Vector func (v *Vector[T]) Copy() Vector[T] { s := []T(*v) t := make([]T, len(s)) copy(t, s) return t } // Convert apply convert func on the origin Vector, return a new Vector contains the generated values. func Convert[T any, R any](s Vector[T], convert func(v T) R) Vector[R] { result := make([]R, len(s)) for idx, v := range []T(s) { result[idx] = convert(v) } return result } // AsSlice return the underlying slice, the slice share same data with Vector. func (v *Vector[T]) AsSlice() []T { return *v } // ToSlice return a new slice, the slice contains a copy of data in Vector. func (v *Vector[T]) ToSlice() []T { return slicex.Copy(*v) }
container/vector/vector.go
0.862612
0.591163
vector.go
starcoder
package bst import ( "fmt" //importing required packages ) type Node struct { //Node represents node of a Binary Search Tree value int left *Node right *Node } func Insert(root *Node, key int) *Node { //The following function inserts elements into Binary Search Tree if root == nil { root = &Node{key, nil, nil} //root node is created with left and right child as nil } else if key < root.value { root.left = Insert(root.left, key) //Using recursion to maintain the tree as BST } else if key > root.value { root.right = Insert(root.right, key) } return root } func Search(root *Node, key int) *Node { //The following function searches a given key in a BST if root == nil { return root } if root.value == key { //If the key is not found a nil pointer is returned return root } if root.value < key { return Search(root.right, key) } return Search(root.left, key) } func Minval(root *Node) *Node { //The following function is used in Delete function to find the inorder successor var current *Node current = root for current.left != nil { current = current.left } return current } func Delete(root *Node, key int) *Node { //The following function deletes the key in BST and returns the new root if root == nil { //base case return root } if key < root.value { //If the key to be deleted is smaller than the root's key,the key lies in the left subtree root.left = Delete(root.left, key) } else if key > root.value { //If the key to be deleted is greater than the root's key,the key lies in the right subtree root.right = Delete(root.right, key) } else { //node with only one child if root.left == nil { var temp *Node = root.right return temp } else if root.right == nil { var temp *Node = root.left return temp } //node with two children var temp *Node temp = Minval(root.right) //Get inorder successor root.value = temp.value //Get the inorder successor's value root.right = Delete(root.right, temp.value) //Delete the inorder successor } return root } func PreorderTraverse(root *Node) { //The following function does preorder traversal of BST,i.e,Root,left,Right nodes. if root == nil { return } fmt.Println(root.value) PreorderTraverse(root.left) PreorderTraverse(root.right) } func InorderTraverse(root *Node) { //The following function does inorder traversal of BST,i.e,Left,Root,Right nodes. if root == nil { return } InorderTraverse(root.left) fmt.Println(root.value) InorderTraverse(root.right) } func PostorderTraverse(root *Node) { //The following function does postorder traversal of BST,i.e,Left,Right,Root nodes. if root == nil { return } PostorderTraverse(root.left) PostorderTraverse(root.right) fmt.Println(root.value) } func IdenticalTrees(a *Node, b *Node) bool { //The following function checks whether two trees are indentical or not if a == nil && b == nil { return true } if a != nil && b != nil { return (a.value == b.value && IdenticalTrees(a.left, b.left) && IdenticalTrees(a.right, b.right)) } return false }
bst/binarySearchTree.go
0.705582
0.603289
binarySearchTree.go
starcoder