code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package seq
import (
"github.com/fogfish/golem/generic"
)
// AnyT is Seq data type build of `generic.T` elements.
// It is a type alias of `[]generic.T`
type AnyT []generic.T
// Contain tests if sequence contains an element
func (seq AnyT) Contain(e generic.T) bool {
return seq.Exists(func(x generic.T) bool { return x == e })
}
// ContainSlice tests if sequence contains a sub-sequence
// func (seq AnyT) ContainSlice(subseq AnyT) bool
// Count number of elements that satisfy the predicate
func (seq AnyT) Count(p func(generic.T) bool) int {
c := 0
for _, x := range seq {
if p(x) {
c = c + 1
}
}
return c
}
// Diff computes the difference between sequences: seq -- that
// func (seq AnyT) Diff(that AnyT) AnyT
// Distinct builds a new sequence without any duplicate elements
func (seq AnyT) Distinct() AnyT {
s := AnyT{}
for _, x := range seq {
if !s.Contain(x) {
s = append(s, x)
}
}
return s
}
// Drop removes n elements from head of sequence
func (seq AnyT) Drop(n int) AnyT {
return append(seq[:0:0], seq[n:]...)
}
// DropWhile removes elements from sequence head while predicate returns true
// and returns remaining sequence suffix.
func (seq AnyT) DropWhile(p func(generic.T) bool) AnyT {
for i, x := range seq {
if !p(x) {
return append(seq[:0:0], seq[i:]...)
}
}
return AnyT{}
}
// Exists tests if a predicate holds for at least one element
func (seq AnyT) Exists(p func(generic.T) bool) bool {
for _, x := range seq {
if p(x) {
return true
}
}
return false
}
// Filter selects all elements which satisfy predicate
func (seq AnyT) Filter(p func(generic.T) bool) AnyT {
s := AnyT{}
for _, x := range seq {
if p(x) {
s = append(s, x)
}
}
return s
}
// Find returns the first element that satisfy predicate
func (seq AnyT) Find(p func(generic.T) bool) (e generic.T) {
for _, x := range seq {
if p(x) {
return x
}
}
return
}
// ForAll tests where a predicate holds for elements of sequence
func (seq AnyT) ForAll(p func(generic.T) bool) bool {
if len(seq) == 0 {
return false
}
for _, x := range seq {
if !p(x) {
return false
}
}
return true
}
// FMap applies high-order function (clojure) to all elements of sequence
func (seq AnyT) FMap(f func(generic.T)) {
for _, x := range seq {
f(x)
}
}
// Fold applies associative binary operator to sequence
func (seq AnyT) Fold(f func(generic.T, generic.T) generic.T, empty generic.T) generic.T {
acc := empty
for _, x := range seq {
acc = f(x, acc)
}
return acc
}
// GroupBy shards sequence into map of sequences with descriminator function
func (seq AnyT) GroupBy(f func(generic.T) int) map[int]AnyT {
s := make(map[int]AnyT)
for _, x := range seq {
key := f(x)
shard, exists := s[key]
if exists {
s[key] = append(shard, x)
} else {
s[key] = AnyT{x}
}
}
return s
}
// Join takes sequence of sequence, flattens and append it
func (seq AnyT) Join(subseq AnyT) AnyT {
seq = append(seq, subseq...)
return seq
}
// Intersect computes the intersection of sequences: seq ^ that
// func (seq AnyT) Intersect(that AnyT) AnyT
// Map applies high-order function to all element of sequence.
func (seq AnyT) Map(f func(generic.T) generic.T) AnyT {
s := AnyT{}
for _, x := range seq {
s = append(s, f(x))
}
return s
}
// Partition split sequence into two sequence accroding to predicate
// It is equivalent of consequent calls to Filter/FilterNot
func (seq AnyT) Partition(p func(generic.T) bool) (AnyT, AnyT) {
a := AnyT{}
b := AnyT{}
for _, x := range seq {
if p(x) {
a = append(a, x)
} else {
b = append(b, x)
}
}
return a, b
}
// Reverse returns a new sequence with elements in reserve order
// func (seq AnyT) Reverse() AnyT
// Span splits the sequences into prefix/suffix pair according to predicate
// It is equivalent of consequent calls to TakeWhile/DropWhile
// func (seq AnyT) Span(p Predicate) AnyT, AnyT
// Split sequence to nested sequence with given splitting predicated
func (seq AnyT) Split(p func(generic.T) bool) []AnyT {
s := make([]AnyT, 0)
i := 0
for j, x := range seq {
if p(x) {
if i != j {
s = append(s, seq[i:j])
}
i = j
}
}
s = append(s, seq[i:])
return s
}
// Take accepts n elements from head of sequence
func (seq AnyT) Take(n int) AnyT {
return append(seq[:0:0], seq[:n]...)
}
// TakeWhile accumulates elements from sequence head while predicate returns true
// and returns this prefix.
func (seq AnyT) TakeWhile(p func(generic.T) bool) AnyT {
for i, x := range seq {
if !p(x) {
return append(seq[:0:0], seq[:i]...)
}
}
return append(AnyT{}, seq...)
} | seq/seq.go | 0.807157 | 0.446676 | seq.go | starcoder |
package strassen
import (
"context"
"log"
"github.com/rossmerr/graphblas"
"github.com/rossmerr/graphblas/constraints"
)
// Multiply multiplies a matrix by another matrix using the Strassen algorithm
func Multiply[T constraints.Number](ctx context.Context, a, b graphblas.Matrix[T]) graphblas.Matrix[T] {
return MultiplyCrossoverPoint(ctx, a, b, 64)
}
// MultiplyCrossoverPoint multiplies a matrix by another matrix using the Strassen algorithm
// the crossover point is when to switch standard methods of matrix multiplication for more efficiency
func MultiplyCrossoverPoint[T constraints.Number](ctx context.Context, a, b graphblas.Matrix[T], crossover int) graphblas.Matrix[T] {
if a.Columns() != b.Rows() {
log.Panicf("Can not multiply matrices found length miss match %+v, %+v", a.Columns(), b.Rows())
}
n := b.Rows()
if n <= crossover {
matrix := graphblas.NewDenseMatrix[T](a.Rows(), b.Columns())
graphblas.MatrixMatrixMultiply[T](ctx, a, b, nil, matrix)
return matrix
}
size := n / 2
a11 := graphblas.NewDenseMatrix[T](size, size)
a12 := graphblas.NewDenseMatrix[T](size, size)
a21 := graphblas.NewDenseMatrix[T](size, size)
a22 := graphblas.NewDenseMatrix[T](size, size)
b11 := graphblas.NewDenseMatrix[T](size, size)
b12 := graphblas.NewDenseMatrix[T](size, size)
b21 := graphblas.NewDenseMatrix[T](size, size)
b22 := graphblas.NewDenseMatrix[T](size, size)
// dividing the matrices in 4 sub-matrices:
for r := 0; r < size; r++ {
for c := 0; c < size; c++ {
select {
case <-ctx.Done():
break
default:
a11.Set(r, c, a.At(r, c)) // top left
a12.Set(r, c, a.At(r, c+size)) // top right
a21.Set(r, c, a.At(r+size, c)) // bottom left
a22.Set(r, c, a.At(r+size, c+size)) // bottom right
b11.Set(r, c, b.At(r, c)) // top left
b12.Set(r, c, b.At(r, c+size)) // top right
b21.Set(r, c, b.At(r+size, c)) // bottom left
b22.Set(r, c, b.At(r+size, c+size)) // bottom right
}
}
}
out := make(chan *mPlace[T])
go subMatrixM(ctx, out, 1, a11.Add(a22), b11.Add(b22), crossover)
go subMatrixM[T](ctx, out, 2, a21.Add(a22), b11, crossover)
go subMatrixM[T](ctx, out, 3, a11, b12.Subtract(b22), crossover)
go subMatrixM[T](ctx, out, 4, a22, b21.Subtract(b11), crossover)
go subMatrixM[T](ctx, out, 5, a11.Add(a12), b22, crossover)
go subMatrixM(ctx, out, 6, a21.Subtract(a11), b11.Add(b12), crossover)
go subMatrixM(ctx, out, 7, a12.Subtract(a22), b21.Add(b22), crossover)
m := [8]graphblas.Matrix[T]{}
for i := 0; i < 7; i++ {
mtx := <-out
m[mtx.m] = mtx.matrix
}
c11 := m[1].Add(m[4]).Subtract(m[5]).Add(m[7])
c12 := m[3].Add(m[5])
c21 := m[2].Add(m[4])
c22 := m[1].Subtract(m[2]).Add(m[3]).Add(m[6])
matrix := graphblas.NewDenseMatrix[T](c11.Rows()*2, c11.Rows()*2)
shift := c11.Rows()
// Combine the results
for r := 0; r < c11.Rows(); r++ {
for c := 0; c < c11.Columns(); c++ {
select {
case <-ctx.Done():
break
default:
matrix.Set(r, c, c11.At(r, c))
matrix.Set(r, c+shift, c12.At(r, c))
matrix.Set(r+shift, c, c21.At(r, c))
matrix.Set(r+shift, c+shift, c22.At(r, c))
}
}
}
return matrix
}
func subMatrixM[T constraints.Number](ctx context.Context, out chan *mPlace[T], m int, a, b graphblas.Matrix[T], crossover int) {
out <- &mPlace[T]{
m: m,
matrix: MultiplyCrossoverPoint(ctx, a, b, crossover),
}
}
type mPlace[T constraints.Number] struct {
m int
matrix graphblas.Matrix[T]
} | math/strassen/strassen.go | 0.750461 | 0.695079 | strassen.go | starcoder |
package objects
import "strconv"
//User structure for the user
type User struct {
X, Y, Z int64
C chan string
Name string
SentTimeUpdate int
}
//ToString Function to convert the user to a string object
func (u *User) ToString() string {
return strconv.FormatInt(u.X, 10) + ", " + strconv.FormatInt(u.Y, 10) + ", " + strconv.FormatInt(u.Z, 10)
}
//Move Function to move the user's position
func (u *User) Move(direction int) {
switch direction {
case 0:
u.Y = u.Y + 1
case 1:
u.X = u.X + 1
case 2:
u.X = u.X - 1
case 3:
u.Y = u.Y - 1
case 4:
u.Z = u.Z + 1
case 5:
u.Z = u.Z - 1
}
}
/*
Strength - A measure of the characters ability to exert physical
force. The attribute is also referred to as Physical Strength. The
Strength attribute factors into the ability to lift and carry heavy
loads; the amount of damage inflicted through a blow; and restricts
the types of manual tools that can be operated easily.
Dexterity - The relative ability to react physically to a brief
event. Dexterity is also called Agility, Reflexes, or Physical
Prowess. It differs from pure exertion of strength, and requires tight
coordination between the central nervous system and the
musculature. This attribute is used to evaluate most competitive
physical skills, such as striking a target with a missile, or dodging
a blow.
Fatigue - The short term physical energy that the character can expend
before becoming exhausted. It is also called Energy. This attribute
places a more realistic cap on the characters activities during a
battle, and is also used to limit magical powers.
Endurance - A rating of how much cumulative damage a creature can
withstand before dying. Endurance is also referred to as Hit Points or
Body. In systems that do not include defensive skills, Hit Points are
also used to measure the amount of combat experience. This mixture of
unrelated physiological and combat capabilities within a single
parameter can have a distorting effect, resulting in an unrealistic
model. However, it does have the side benefits of making combat less
bloody and more protracted at higher levels, as the characters are
rarely eliminated by a single blow.
Appearance - The physical attractiveness of the character,
particularly with regards to the opposite sex. This attribute has also
been called Comeliness and Beauty. The effect on game play is less
useful than most physical attributes, being required primarily during
social situations. However, physical beauty has been known to enjoy
subtle effects on the human psyche, inspiring loyalty and trust beyond
the norm.
Intelligence - The mental ability to remember facts and employ
reasoning to analyze a problem. Known also as I.Q., Memory, and
Reasoning. Intelligence is also a crude rating of the ability to use
mathematics, solve puzzles, learn a new language, create music, etc.
Quickness - The speed of reaction to a rapidly changing
situation. Quickness is also known as Reaction or Initiative. It
determines who reacts first during a crisis, allowing a quick witted
character to get the jump on a foe. Quickness is closely related to
the Dexterity attribute.
Size - The gross physical proportions of a creature. Also known as
Height or Growth. This attribute is often used to handle scaling
issues, such as visibility, melee reach, minimum opening required to
enter a room, etc.
Cool - This measures the ability of the character to remain calm under
conditions of duress, such as during combat or when facing a truly
terrifying sight. It is also called Morale, and is a measure of a
creatures steady reaction to a panic situation. In AD&D, this
Attribute is only used for creatures, and the morale aspects of the
characters are handled directly by the players.
Wisdom - An Attribute that measures a characters worldly knowledge and
common sense. Also known as empathy. It is used, probably incorrectly,
as a magical attribute for priestly magic. Most modern systems use
willpower or piety, and leave knowledge evaluation to a skills system,
various advantages, and/or role-playing.
Cunning - A primal form of intelligence available to many animals,
cunning measures a creatures ability to quickly formulate good
reactions to stressful conditions. For humans, this measures wits, and
is closely related (if not identical) to quickness. Cunning is useful
for evaluating a changing tactical situation, coming up with a witty
rejoinder during a discussion, taking advantage of a lucky break, and
so on.
Willpower - The amount of self-control a character has over his own
mind and body. This attribute is also called Mental Endurance, Self
Discipline, and Ego. It is often used as a measure of a characters
control over arcane forces, the ability to resist the imposition of
another's will, and the degree of vulnerability to fearful thoughts
and experiences. Willpower is closely related to cool.
Leadership - The ability to influence the behavior of others using a
commanding presence, persuasive dialogue, and appealing behavior. This
attribute has also called Charisma, Mental Affinity, Power, or
Presence. It does not imply an ability to lead sensibly, but does
enhance the loyalty and morale of friends and allies under
discouraging conditions. Some systems use a separate set of leadership
skills.
Fellowship - The ease with which a character associates with others in
a social environment. Many systems use a skill to handle this ability,
although some people do seem to have an innate ability to get along
well with others.
Movement - This derived attribute is used to determine how far a
creature can move during an interval of time. Movement is also called
Speed. Typically the movement rate is a fixed value for each race,
with a modifier based on the Dexterity Attribute. Separate factors are
also used for measuring different means of movement, such a swimming,
flying, tunneling, etc.
*/ | lib/objects/user.go | 0.573201 | 0.706026 | user.go | starcoder |
package cw
import (
"context"
"strings"
"time"
"unicode"
)
// WPMToSeconds returns the duration of a dit in seconds with the given speed in WpM.
func WPMToSeconds(wpm int) float64 {
return (float64(60) / float64(50*wpm))
}
// BPMToSeconds returns the duration of a dit in seconds with the given speed in BpM.
func BPMToSeconds(bpm int) float64 {
return WPMToSeconds(bpm * 5)
}
// WPMToDit returns the duration of a dit with the given speed in WpM.
func WPMToDit(wpm int) time.Duration {
return time.Duration(WPMToSeconds(wpm) * float64(time.Second))
}
// BPMToDit returns the duration of a dit with the given speed in BpM.
func BPMToDit(bpm int) time.Duration {
return WPMToDit(bpm * 5)
}
// Symbol represents the morse symbols: dits, das and breaks.
type Symbol struct {
Weight int
KeyDown bool
}
// All symbols
var (
Dit = Symbol{1, true}
Da = Symbol{3, true}
SymbolBreak = Symbol{1, false}
CharBreak = Symbol{3, false}
WordBreak = Symbol{7, false}
)
// Code contains the morse code table.
var Code = map[rune][]Symbol{
// characters
'a': {Dit, Da},
'b': {Da, Dit, Dit, Dit},
'c': {Da, Dit, Da, Dit},
'd': {Da, Dit, Dit},
'e': {Dit},
'f': {Dit, Dit, Da, Dit},
'g': {Da, Da, Dit},
'h': {Dit, Dit, Dit, Dit},
'i': {Dit, Dit},
'j': {Dit, Da, Da, Da},
'k': {Da, Dit, Da},
'l': {Dit, Da, Dit, Dit},
'm': {Da, Da},
'n': {Da, Dit},
'o': {Da, Da, Da},
'p': {Dit, Da, Da, Dit},
'q': {Da, Da, Dit, Da},
'r': {Dit, Da, Dit},
's': {Dit, Dit, Dit},
't': {Da},
'u': {Dit, Dit, Da},
'v': {Dit, Dit, Dit, Da},
'w': {Dit, Da, Da},
'x': {Da, Dit, Dit, Da},
'y': {Da, Dit, Da, Da},
'z': {Da, Da, Dit, Dit},
// diacritics
'ä': {Dit, Da, Dit, Da},
'ö': {Da, Da, Da, Dit},
'ü': {Dit, Dit, Da, Da},
// numbers
'0': {Da, Da, Da, Da, Da},
'1': {Dit, Da, Da, Da, Da},
'2': {Dit, Dit, Da, Da, Da},
'3': {Dit, Dit, Dit, Da, Da},
'4': {Dit, Dit, Dit, Dit, Da},
'5': {Dit, Dit, Dit, Dit, Dit},
'6': {Da, Dit, Dit, Dit, Dit},
'7': {Da, Da, Dit, Dit, Dit},
'8': {Da, Da, Da, Dit, Dit},
'9': {Da, Da, Da, Da, Dit},
// punctuation
'+': {Dit, Da, Dit, Da, Dit},
'-': {Da, Dit, Dit, Dit, Dit, Da},
'=': {Da, Dit, Dit, Dit, Da},
'.': {Dit, Da, Dit, Da, Dit, Da},
':': {Da, Da, Da, Dit, Dit, Dit},
',': {Da, Da, Dit, Dit, Da, Da},
';': {Da, Dit, Da, Dit, Da, Dit},
'?': {Dit, Dit, Da, Da, Dit, Dit},
'\'': {Dit, Da, Da, Da, Da, Dit},
'"': {Dit, Da, Dit, Dit, Da, Dit},
'(': {Da, Dit, Da, Da, Dit},
')': {Da, Dit, Da, Da, Dit, Da},
'_': {Dit, Dit, Da, Da, Dit, Da},
'@': {Dit, Da, Da, Dit, Da, Dit},
// specials
'[': {Da, Dit, Da, Dit, Da}, // "Spruchanfang"
']': {Dit, Dit, Dit, Da, Dit, Da}, // transmission end, "slient key"
'%': {Dit, Dit, Dit, Da, Dit}, // understood, "seen"
'~': {Dit, Da, Dit, Dit, Dit}, // wait
'§': {Dit, Dit, Dit, Dit, Dit, Dit, Dit, Dit}, // correction
}
// WriteToSymbolStream writes the content of the given text as morse symbols to the given stream.
// The first written symbol is always a Dit or a Da (key down), the last written symbol is always a WordBreak (key up).
func WriteToSymbolStream(ctx context.Context, symbols chan<- Symbol, text string) {
normalized := strings.ToLower(text)
wasWhitespace := true
var canceled bool
for _, r := range normalized {
if canceled {
return
}
if unicode.IsSpace(r) {
if !wasWhitespace {
canceled = writeSymbol(ctx, symbols, WordBreak)
}
wasWhitespace = true
continue
}
code, knownCode := Code[r]
if !knownCode {
continue
}
if !wasWhitespace {
canceled = writeSymbol(ctx, symbols, CharBreak)
}
firstSymbol := true
for _, s := range code {
if !firstSymbol {
canceled = writeSymbol(ctx, symbols, SymbolBreak)
}
canceled = writeSymbol(ctx, symbols, s)
firstSymbol = false
}
wasWhitespace = false
}
if !wasWhitespace {
canceled = writeSymbol(ctx, symbols, WordBreak)
}
}
func writeSymbol(ctx context.Context, symbols chan<- Symbol, symbol Symbol) bool {
select {
case symbols <- symbol:
return false
case <-ctx.Done():
return true
}
}
// Send reads CW symbols from the given stream and transmits them using the given setKeyDown function with the given speed in WpM.
func Send(ctx context.Context, setKeyDown func(bool), symbols <-chan Symbol, wpm int) {
dit := WPMToDit(wpm)
symbolEnd := time.Now().Add(-1 * time.Second)
keyDown := false
canceled := false
for {
select {
case now := <-time.After(1 * time.Microsecond):
if now.Before(symbolEnd) {
continue
}
symbolEnd, keyDown, canceled = decodeSymbol(ctx, symbols, dit)
if canceled {
setKeyDown(false)
return
}
setKeyDown(keyDown)
case <-ctx.Done():
setKeyDown(false)
return
}
}
}
func decodeSymbol(ctx context.Context, symbols <-chan Symbol, dit time.Duration) (time.Time, bool, bool) {
select {
case symbol := <-symbols:
duration := time.Duration(symbol.Weight) * dit
end := time.Now().Add(duration)
keyDown := symbol.KeyDown
return end, keyDown, false
case <-ctx.Done():
return time.Now(), false, true
}
} | cw/cw.go | 0.736874 | 0.639609 | cw.go | starcoder |
package migrate
import (
"reflect"
"strconv"
"strings"
"time"
"github.com/google/uuid"
"github.com/neuronlabs/neuron/errors"
"github.com/neuronlabs/neuron/mapping"
)
var (
/** Character types */
// FChar is the 'char' field type.
FChar = &ParameterDataType{SQLName: "char", DataType: DataType{Name: "char"}}
// FVarChar is the 'varchar' field type.
FVarChar = &ParameterDataType{SQLName: "varchar", DataType: DataType{Name: "varchar"}}
// FText is the 'text' field type.
FText = &BasicDataType{SQLName: "text", DataType: DataType{Name: "text"}}
/** Numerics */
// FSmallInt is the 2 bytes signed 'smallint' - int16.
FSmallInt = &BasicDataType{SQLName: "smallint", DataType: DataType{Name: "smallint"}}
// FInteger is the 4 bytes signed 'integer' type - int32.
FInteger = &BasicDataType{SQLName: "integer", DataType: DataType{Name: "integer"}}
// FBigInt is the 8 bytes signed 'bigint' type - int64.
FBigInt = &BasicDataType{SQLName: "bigint", DataType: DataType{Name: "bigint"}}
// FDecimal is the variable 'decimal' type.
FDecimal = &ParameterDataType{SQLName: "decimal", DataType: DataType{Name: "decimal"}}
// FNumeric is the ariable 'numeric' type.
FNumeric = &ParameterDataType{SQLName: "numeric", DataType: DataType{Name: "numeric"}}
// FReal is the 4 bytes - 6 decimal digits precision 'real' type.
FReal = &BasicDataType{SQLName: "real", DataType: DataType{Name: "real"}}
// FDouble is the 8 bytes - 15 decimal digits precision 'double precision' type.
FDouble = &BasicDataType{SQLName: "double precision", DataType: DataType{Name: "double"}}
// FSerial is the 4 bytes - autoincrement integer 'serial' type.
FSerial = &BasicDataType{SQLName: "serial", DataType: DataType{Name: "serial"}}
// FBigSerial is the 8 bytes autoincrement big integer - 'bigserial' type.
FBigSerial = &BasicDataType{SQLName: "bigserial", DataType: DataType{Name: "bigserial"}}
// FUUID is the uuid
FUUID = &BasicDataType{SQLName: "uuid", DataType: DataType{Name: "uuid"}}
// FLTree is the LTree extension type.
FLTree = &BasicDataType{SQLName: "ltree", DataType: DataType{Name: "ltree"}}
// FHStore is the HStore extension type
FHStore = &BasicDataType{SQLName: "hstore", DataType: DataType{Name: "hstore"}}
/** Binary */
// FBytea is the 1 or 4 bytes plus the actual binary string data type 'bytea'.
FBytea = &BasicDataType{SQLName: "bytea", DataType: DataType{Name: "bytea"}}
// FBoolean is the 'boolean' pq data type.
FBoolean = &BasicDataType{SQLName: "boolean", DataType: DataType{Name: "boolean"}}
/** Date / Time */
// FDate is the 'date' field kind.
FDate = &BasicDataType{SQLName: "date", DataType: DataType{Name: "date"}}
// FTimestamp is the 'timestamp' without time zone data type.
FTimestamp = &OptionalParameterDataType{SQLNames: []string{"timestamp"}, ParameterIndex: 1, DataType: DataType{Name: "timestamp"}}
// FTimestampTZ is the 'timestamp with time zone' data type.
FTimestampTZ = &OptionalParameterDataType{SQLNames: []string{"timestamp", "with time zone"}, ParameterIndex: 1, DataType: DataType{Name: "timestamptz"}}
// FTime is the 'time' without time zone data type.
FTime = &OptionalParameterDataType{SQLNames: []string{"time"}, ParameterIndex: 1, DataType: DataType{Name: "time"}}
// FTimeTZ is the 'time with time zone' data type.
FTimeTZ = &OptionalParameterDataType{SQLNames: []string{"time", "with time zone"}, ParameterIndex: 1, DataType: DataType{Name: "timetz"}}
)
// dataTypes is the array containing the data types
var (
dataTypes = make(map[string]DataTyper)
defaultKindDT = map[reflect.Kind]DataTyper{
reflect.Bool: FBoolean,
reflect.Int: FInteger,
reflect.Int8: FSmallInt,
reflect.Int16: FSmallInt,
reflect.Int32: FInteger,
reflect.Int64: FBigInt,
reflect.Uint: FInteger,
reflect.Uint8: FSmallInt,
reflect.Uint16: FSmallInt,
reflect.Uint32: FInteger,
reflect.Uint64: FBigInt,
reflect.String: FText,
reflect.Float32: FReal,
reflect.Float64: FDouble,
}
defaultTypeDT = map[reflect.Type]DataTyper{
reflect.TypeOf(map[string]string{}): FHStore,
reflect.TypeOf(time.Time{}): FTimestamp,
reflect.TypeOf(&time.Time{}): FTimestamp,
reflect.TypeOf(uuid.UUID{}): FUUID,
reflect.TypeOf(&uuid.UUID{}): FUUID,
}
defaultTypes = []DataTyper{
// Characters
FChar, FVarChar, FText,
// Numerics
FInteger, FSmallInt, FBigInt, FDecimal, FNumeric, FReal, FDouble, FBigSerial, FSerial,
// Binaries
FBytea, FBoolean,
// Times
FDate, FTimestamp, FTimestampTZ, FTime, FTimeTZ,
// UUID
FUUID,
// Extensions
FLTree, FHStore,
}
)
type ParameterSetter interface {
SetParameters(params []string) error
}
var (
byteSlice = reflect.TypeOf([]byte{})
byteSlicePtr = reflect.TypeOf(&[]byte{})
)
// findDataType finds the data type for the provided field
func findDataType(field *mapping.StructField) (DataTyper, error) {
// For predefined database type
if field.DatabaseType != "" {
v, err := parseDataType(field.DatabaseType)
if err != nil {
return nil, err
}
dt, ok := dataTypes[v[0]]
if !ok {
return nil, errors.WrapDetf(mapping.ErrMapping, "model: '%s' field: '%s' database type: '%s' is unknown in the postgres repository", field.ModelStruct(), field, field.DatabaseType)
}
dt = dt.Copy()
if len(v) > 0 {
if pm, ok := dt.(ParameterSetter); ok {
if err := pm.SetParameters(v[1:]); err != nil {
return nil, err
}
}
}
return dt, nil
}
t := field.ReflectField().Type
if field.Kind() == mapping.KindPrimary {
// by default for the integer primary keys set the serial or bigserial type
switch t.Kind() {
case reflect.Int, reflect.Int16, reflect.Int8, reflect.Int32, reflect.Uint, reflect.Uint32, reflect.Uint8, reflect.Uint16:
return FSerial.Copy(), nil
case reflect.Int64, reflect.Uint64:
return FBigSerial.Copy(), nil
}
}
// Check if the field is UUID.
if strings.ToLower(t.Name()) == "uuid" && t.Kind() == reflect.Array && t.Len() == 16 && t.Elem().Kind() == reflect.Uint8 {
return FUUID, nil
}
if byteSlice == t || t == byteSlicePtr {
return FBytea, nil
}
if field.IsCreatedAt() || field.IsDeletedAt() || field.IsUpdatedAt() {
return FTimestampTZ.Copy(), nil
}
var (
isArray bool
arrayLen int
)
if t.Kind() == reflect.Slice {
isArray = true
t = t.Elem()
if t.Name() == "byte" {
// Byte slice maps to bytea.
return FBytea, nil
}
} else if t.Kind() == reflect.Array {
isArray = true
arrayLen = t.Len()
t = t.Elem()
}
// at first check type
dt, ok := defaultTypeDT[t]
if !ok {
if t.Kind() == reflect.Ptr {
dt, ok = defaultTypeDT[t.Elem()]
}
}
if !ok {
k := t.Kind()
if k == reflect.Ptr {
k = t.Elem().Kind()
}
dt, ok = defaultKindDT[k]
if !ok {
return nil, errors.WrapDetf(errors.ErrInternal, "postgres field type not found. Model: '%s', Field: '%s'", field.ModelStruct().Type().Name(), field.Name())
}
}
dt = dt.Copy()
if isArray {
dt = &ArrayDataType{Len: arrayLen, Subtype: dt}
}
return dt, nil
}
// ExternalDataTyper is the interface that defines the columns that sets the column outside the table definition.
type ExternalDataTyper interface {
DataTyper
// ExternalFunction is the method used to create the column outside of the table definition.
ExternalFunction(field *mapping.StructField) string
}
// DataTyper is the interface for basic data type methods.
type DataTyper interface {
// KeyName gets the sql key name.
KeyName() string
// GetName creates the column string used within the table definition
GetName() string
Copy() DataTyper
}
// DataType is the pq base model defininig the data type.
type DataType struct {
Name string
}
// KeyName gets the name of the data type.
func (d *DataType) KeyName() string {
return d.Name
}
// BasicDataType is the InlineDataTyper that sets the basic columns on the base of it's SQLName.
type BasicDataType struct {
SQLName string
DataType
}
// GetName creates the inline column definition on the base of it's SQLName.
func (b *BasicDataType) GetName() string {
return b.SQLName
}
// Copy implements DataTyper.
func (b *BasicDataType) Copy() DataTyper {
return &(*b)
}
// compile time check of BasicDataType
var _ DataTyper = &BasicDataType{}
// ParameterDataType is the data type that contains the variable parameters.
// i.e. varchar(2) has a single parameter '2'.
type ParameterDataType struct {
DataType
SQLName string
Validate func(params []string) error
Parameters []string
}
// Copy implements DataTyper interface.
func (p *ParameterDataType) Copy() DataTyper {
cp := &ParameterDataType{
DataType: p.DataType,
SQLName: p.SQLName,
Validate: p.Validate,
Parameters: make([]string, len(p.Parameters)),
}
copy(cp.Parameters, p.Parameters)
return cp
}
func (p *ParameterDataType) SetParameters(params []string) error {
if p.Validate != nil {
err := p.Validate(params)
if err != nil {
return err
}
}
p.Parameters = params
return nil
}
// GetName creates the inline column definition on the base of it's SQLName and Parameters.
func (p *ParameterDataType) GetName() string {
return p.SQLName + "(" + strings.Join(p.Parameters, ",") + ")"
}
// OptionalParameterDataType is the data type that contains optional parameters.
type OptionalParameterDataType struct {
DataType
SQLNames []string
ParameterIndex int
Parameters []string
}
// Copy implements DataTyper interface.
func (p *OptionalParameterDataType) Copy() DataTyper {
cp := &OptionalParameterDataType{
DataType: p.DataType,
SQLNames: make([]string, len(p.SQLNames)),
ParameterIndex: p.ParameterIndex,
Parameters: make([]string, len(p.Parameters)),
}
copy(cp.SQLNames, p.SQLNames)
copy(cp.Parameters, p.Parameters)
return cp
}
func (p *OptionalParameterDataType) SetParameters(params []string) error {
param := "(" + strings.Join(params, ",") + ")"
if p.ParameterIndex == len(p.SQLNames) {
p.Parameters = append(p.SQLNames, param)
} else {
p.Parameters = append(p.SQLNames[:p.ParameterIndex], param)
p.Parameters = append(p.Parameters, p.SQLNames[p.ParameterIndex+1:]...)
}
return nil
}
// GetName creates the inline column definition on the base of it's SQLName and Parameters.
func (p *OptionalParameterDataType) GetName() string {
if len(p.Parameters) == 0 {
return strings.Join(p.SQLNames, " ")
}
return strings.Join(p.Parameters, " ")
}
type ArrayDataType struct {
Len int
Subtype DataTyper
}
// Copy implements DataTyper.
func (a *ArrayDataType) Copy() DataTyper {
return &ArrayDataType{
Len: a.Len,
Subtype: a.Subtype.Copy(),
}
}
func (a *ArrayDataType) KeyName() string {
return a.Subtype.KeyName() + "[]"
}
func (a *ArrayDataType) GetName() string {
if a.Len == 0 {
return a.Subtype.GetName() + "[]"
}
return a.Subtype.GetName() + "[" + strconv.Itoa(a.Len) + "]"
}
// RegisterDataType registers the provided datatype assigning it next id.
func RegisterDataType(dt DataTyper) error {
return registerDataType(dt)
}
func registerDataType(dt DataTyper) error {
// check it the data type exists
_, ok := dataTypes[dt.KeyName()]
if ok {
return errors.WrapDetf(errors.ErrInternal, "postgres data type: '%s' is already registered", dt.KeyName())
}
// set the data type at index
dataTypes[dt.KeyName()] = dt
return nil
}
// MapDataType maps provided type 't' to the given data type 'dt'.
func MapDataType(t interface{}, dt DataTyper, override ...bool) error {
return registerRefTypeDT(reflect.TypeOf(t), dt, override...)
}
// RegisterRefTypeDT registers default data type for provided reflect.Type.
func RegisterRefTypeDT(t reflect.Type, dt DataTyper, override ...bool) error {
return registerRefTypeDT(t, dt, override...)
}
func registerRefTypeDT(t reflect.Type, dt DataTyper, override ...bool) error {
var ov bool
if len(override) > 0 {
ov = override[0]
}
_, ok := defaultTypeDT[t]
if ok && !ov {
return errors.WrapDetf(errors.ErrInternal, "default data typer is already set for given type: '%s'", t.Name())
}
defaultTypeDT[t] = dt
return nil
}
func parseDataType(v string) ([]string, error) {
i := strings.Index(v, "(")
if i == -1 {
return []string{v}, nil
} else if v[len(v)-1] != ')' {
return nil, errors.WrapDetf(errors.ErrInternal, "invalid postgres DataType value: '%s'", v)
}
return append([]string{v[:i]}, strings.Split(v[i+1:len(v)-1], ",")...), nil
} | repository/postgres/migrate/types.go | 0.516839 | 0.498779 | types.go | starcoder |
package bls12381
type pair struct {
g1 *PointG1
g2 *PointG2
}
func newPair(g1 *PointG1, g2 *PointG2) pair {
return pair{g1, g2}
}
// Engine is BLS12-381 elliptic curve pairing engine
type Engine struct {
G1 *G1
G2 *G2
fp12 *fp12
fp2 *fp2
pairingEngineTemp
pairs []pair
}
// NewEngine creates new pairing engine insteace.
func NewEngine() *Engine {
fp2 := newFp2()
fp6 := newFp6(fp2)
fp12 := newFp12(fp6)
g1 := NewG1()
g2 := newG2(fp2)
return &Engine{
fp2: fp2,
fp12: fp12,
G1: g1,
G2: g2,
pairingEngineTemp: newEngineTemp(),
}
}
type pairingEngineTemp struct {
t2 [10]*fe2
t12 [9]fe12
}
func newEngineTemp() pairingEngineTemp {
t2 := [10]*fe2{}
for i := 0; i < 10; i++ {
t2[i] = &fe2{}
}
t12 := [9]fe12{}
return pairingEngineTemp{t2, t12}
}
// AddPair adds a g1, g2 point pair to pairing engine
func (e *Engine) AddPair(g1 *PointG1, g2 *PointG2) *Engine {
p := newPair(g1, g2)
if !(e.G1.IsZero(p.g1) || e.G2.IsZero(p.g2)) {
e.G1.Affine(p.g1)
e.G2.Affine(p.g2)
e.pairs = append(e.pairs, p)
}
return e
}
// AddPairInv adds a G1, G2 point pair to pairing engine. G1 point is negated.
func (e *Engine) AddPairInv(g1 *PointG1, g2 *PointG2) *Engine {
ng1 := e.G1.New().Set(g1)
e.G1.Neg(ng1, g1)
e.AddPair(ng1, g2)
return e
}
// Reset deletes added pairs.
func (e *Engine) Reset() *Engine {
e.pairs = []pair{}
return e
}
func (e *Engine) doublingStep(coeff *fe6, r *PointG2) {
fp2 := e.fp2
t := e.t2
fp2.mul(t[0], &r[0], &r[1])
fp2.mul0(t[0], t[0], twoInv)
fp2.square(t[1], &r[1])
fp2.square(t[2], &r[2])
fp2.double(t[7], t[2])
fp2.addAssign(t[7], t[2])
fp2.mulByB(t[3], t[7])
fp2.double(t[4], t[3])
fp2.addAssign(t[4], t[3])
fp2.add(t[5], t[1], t[4])
fp2.mul0(t[5], t[5], twoInv)
fp2.add(t[6], &r[1], &r[2])
fp2.squareAssign(t[6])
fp2.add(t[7], t[2], t[1])
fp2.subAssign(t[6], t[7])
fp2.sub(&coeff[0], t[3], t[1])
fp2.square(t[7], &r[0])
fp2.sub(t[4], t[1], t[4])
fp2.mul(&r[0], t[4], t[0])
fp2.square(t[2], t[3])
fp2.double(t[3], t[2])
fp2.addAssign(t[3], t[2])
fp2.squareAssign(t[5])
fp2.sub(&r[1], t[5], t[3])
fp2.mul(&r[2], t[1], t[6])
fp2.double(t[0], t[7])
fp2.add(&coeff[1], t[0], t[7])
fp2.neg(&coeff[2], t[6])
}
func (e *Engine) additionStep(coeff *fe6, r, q *PointG2) {
fp2 := e.fp2
t := e.t2
fp2.mul(t[0], &q[1], &r[2])
fp2.neg(t[0], t[0])
fp2.addAssign(t[0], &r[1])
fp2.mul(t[1], &q[0], &r[2])
fp2.neg(t[1], t[1])
fp2.addAssign(t[1], &r[0])
fp2.square(t[2], t[0])
fp2.square(t[3], t[1])
fp2.mul(t[4], t[1], t[3])
fp2.mul(t[2], &r[2], t[2])
fp2.mulAssign(t[3], &r[0])
fp2.double(t[5], t[3])
fp2.sub(t[5], t[4], t[5])
fp2.addAssign(t[5], t[2])
fp2.mul(&r[0], t[1], t[5])
fp2.subAssign(t[3], t[5])
fp2.mulAssign(t[3], t[0])
fp2.mul(t[2], &r[1], t[4])
fp2.sub(&r[1], t[3], t[2])
fp2.mulAssign(&r[2], t[4])
fp2.mul(t[2], t[1], &q[1])
fp2.mul(t[3], t[0], &q[0])
fp2.sub(&coeff[0], t[3], t[2])
fp2.neg(&coeff[1], t[0])
coeff[2].set(t[1])
}
func (e *Engine) precompute() [][68]fe6 {
n := len(e.pairs)
coeffs := make([][68]fe6, len(e.pairs))
for i := 0; i < n; i++ {
r := new(PointG2).Set(e.pairs[i].g2)
j := 0
for k := 62; k >= 0; k-- {
e.doublingStep(&coeffs[i][j], r)
if x.Bit(k) != 0 {
j++
e.additionStep(&coeffs[i][j], r, e.pairs[i].g2)
}
j++
}
}
return coeffs
}
func (e *Engine) lineEval(f *fe12, coeffs [][68]fe6, j int) {
t := e.t2
for i := 0; i < len(e.pairs); i++ {
e.fp2.mul0(t[0], &coeffs[i][j][2], &e.pairs[i].g1[1])
e.fp2.mul0(t[1], &coeffs[i][j][1], &e.pairs[i].g1[0])
e.fp12.mul014(f, &coeffs[i][j][0], t[1], t[0])
}
}
func (e *Engine) millerLoop(f *fe12) {
coeffs := e.precompute()
f.one()
j := 0
for i := 62; i >= 0; i-- {
if i != 62 {
e.fp12.square(f, f)
}
e.lineEval(f, coeffs, j)
if x.Bit(i) != 0 {
j++
e.lineEval(f, coeffs, j)
}
j++
}
e.fp12.conjugate(f, f)
}
// exp raises element by x = -15132376222941642752
func (e *Engine) exp(c, a *fe12) {
// Adapted from https://github.com/supranational/blst/blob/master/src/pairing.c
fp12 := e.fp12
chain := func(n int) {
fp12.mulAssign(c, a)
for i := 0; i < n; i++ {
fp12.cyclotomicSquare(c, c)
}
}
fp12.cyclotomicSquare(c, a) // (a ^ 2)
chain(2) // (a ^ (2 + 1)) ^ (2 ^ 2) = a ^ 12
chain(3) // (a ^ (12 + 1)) ^ (2 ^ 3) = a ^ 104
chain(9) // (a ^ (104 + 1)) ^ (2 ^ 9) = a ^ 53760
chain(32) // (a ^ (53760 + 1)) ^ (2 ^ 32) = a ^ 230901736800256
chain(16) // (a ^ (230901736800256 + 1)) ^ (2 ^ 16) = a ^ 15132376222941642752
// invert chain result since x is negative
fp12.conjugate(c, c)
}
func (e *Engine) finalExp(f *fe12) {
fp12, t := e.fp12, e.t12
// easy part
fp12.inverse(&t[1], f) // t1 = f0 ^ -1
fp12.conjugate(&t[0], f) // t0 = f0 ^ p6
fp12.mul(&t[2], &t[0], &t[1]) // t2 = f0 ^ (p6 - 1)
t[1].set(&t[2]) // t1 = f0 ^ (p6 - 1)
fp12.frobeniusMap2(&t[2]) // t2 = f0 ^ ((p6 - 1) * p2)
fp12.mulAssign(&t[2], &t[1]) // t2 = f0 ^ ((p6 - 1) * (p2 + 1))
// f = f0 ^ ((p6 - 1) * (p2 + 1))
// hard part
// https://eprint.iacr.org/2016/130
// On the Computation of the Optimal Ate Pairing at the 192-bit Security Level
// Section 3
// f ^ d = λ_0 + λ_1 * p + λ_2 * p^2 + λ_3 * p^3
fp12.conjugate(&t[1], &t[2])
fp12.cyclotomicSquare(&t[1], &t[1]) // t1 = f ^ (-2)
e.exp(&t[3], &t[2]) // t3 = f ^ (u)
fp12.cyclotomicSquare(&t[4], &t[3]) // t4 = f ^ (2u)
fp12.mul(&t[5], &t[1], &t[3]) // t5 = f ^ (u - 2)
e.exp(&t[1], &t[5]) // t1 = f ^ (u^2 - 2 * u)
e.exp(&t[0], &t[1]) // t0 = f ^ (u^3 - 2 * u^2)
e.exp(&t[6], &t[0]) // t6 = f ^ (u^4 - 2 * u^3)
fp12.mulAssign(&t[6], &t[4]) // t6 = f ^ (u^4 - 2 * u^3 + 2 * u)
e.exp(&t[4], &t[6]) // t4 = f ^ (u^4 - 2 * u^3 + 2 * u^2)
fp12.conjugate(&t[5], &t[5]) // t5 = f ^ (2 - u)
fp12.mulAssign(&t[4], &t[5]) // t4 = f ^ (u^4 - 2 * u^3 + 2 * u^2 - u + 2)
fp12.mulAssign(&t[4], &t[2]) // f_λ_0 = t4 = f ^ (u^4 - 2 * u^3 + 2 * u^2 - u + 3)
fp12.conjugate(&t[5], &t[2]) // t5 = f ^ (-1)
fp12.mulAssign(&t[5], &t[6]) // t1 = f ^ (u^4 - 2 * u^3 + 2 * u - 1)
fp12.frobeniusMap1(&t[5]) // f_λ_1 = t1 = f ^ ((u^4 - 2 * u^3 + 2 * u - 1) ^ p)
fp12.mulAssign(&t[3], &t[0]) // t3 = f ^ (u^3 - 2 * u^2 + u)
fp12.frobeniusMap2(&t[3]) // f_λ_2 = t3 = f ^ ((u^3 - 2 * u^2 + u) ^ p^2)
fp12.mulAssign(&t[1], &t[2]) // t1 = f ^ (u^2 - 2 * u + 1)
fp12.frobeniusMap3(&t[1]) // f_λ_3 = t1 = f ^ ((u^2 - 2 * u + 1) ^ p^3)
// out = f ^ (λ_0 + λ_1 + λ_2 + λ_3)
fp12.mulAssign(&t[3], &t[1])
fp12.mulAssign(&t[3], &t[5])
fp12.mul(f, &t[3], &t[4])
}
// expDrop raises element by x = -15132376222941642752 / 2
// func (e *Engine) expDrop(c, a *fe12) {
// // Adapted from https://github.com/supranational/blst/blob/master/src/pairing.c
// fp12 := e.fp12
// chain := func(n int) {
// fp12.mulAssign(c, a)
// for i := 0; i < n; i++ {
// fp12.cyclotomicSquare(c, c)
// }
// }
// fp12.cyclotomicSquare(c, a) // (a ^ 2)
// chain(2) // (a ^ (2 + 1)) ^ (2 ^ 2) = a ^ 12
// chain(3) // (a ^ (12 + 1)) ^ (2 ^ 3) = a ^ 104
// chain(9) // (a ^ (104 + 1)) ^ (2 ^ 9) = a ^ 53760
// chain(32) // (a ^ (53760 + 1)) ^ (2 ^ 32) = a ^ 230901736800256
// chain(15) // (a ^ (230901736800256 + 1)) ^ (2 ^ 16) = a ^ 15132376222941642752 / 2
// // invert chin result since x is negative
// fp12.conjugate(c, c)
// }
// func (e *Engine) finalExp(f *fe12) {
// fp12, t := e.fp12, e.t12
// // easy part
// fp12.inverse(&t[1], f) // t1 = f0 ^ -1
// fp12.conjugate(&t[0], f) // t0 = f0 ^ p6
// fp12.mul(&t[2], &t[0], &t[1]) // t2 = f0 ^ (p6 - 1)
// t[1].set(&t[2]) // t1 = f0 ^ (p6 - 1)
// fp12.frobeniusMap2(&t[2]) // t2 = f0 ^ ((p6 - 1) * p2)
// fp12.mulAssign(&t[2], &t[1]) // t2 = f0 ^ ((p6 - 1) * (p2 + 1))
// // f = f0 ^ ((p6 - 1) * (p2 + 1))
// // hard part
// // https://eprint.iacr.org/2016/130
// // On the Computation of the Optimal Ate Pairing at the 192-bit Security Level
// // Section 4, Algorithm 2
// // f ^ d = λ_0 + λ_1 * p + λ_2 * p^2 + λ_3 * p^3
// f.set(&t[2])
// fp12.cyclotomicSquare(&t[0], f) // t0 = f ^ (2)
// e.exp(&t[1], &t[0]) // t1 = f ^ (2 * u)
// e.expDrop(&t[2], &t[1]) // t2 = f ^ (u ^ 2)
// fp12.conjugate(&t[3], f) // t3 = f ^ (-1)
// fp12.mulAssign(&t[1], &t[3]) // t1 = f ^ (2 * u - 1)
// fp12.conjugate(&t[1], &t[1]) // t1 = f ^ (-2 * u + 1 )
// fp12.mulAssign(&t[1], &t[2]) // f ^ λ_3 = &t[1] = f ^ (u^2 - 2 * u + 1)
// e.exp(&t[2], &t[1]) // f ^ λ_2 = &t[2] = f ^ (u^3 - 2 * u^2 + u)
// e.exp(&t[3], &t[2]) // t3 = f ^ (u^4 - 2 * u^3 + u^2)
// fp12.conjugate(&t[4], &t[1]) // t4 = f ^ (-λ_3)
// fp12.mulAssign(&t[3], &t[4]) // t2 = f ^ (λ_1)
// fp12.frobeniusMap3(&t[1]) // t1 = f ^ (λ_3 * (p ^ 3))
// fp12.frobeniusMap2(&t[2]) // t2 = f ^ (λ_2 * (p ^ 2))
// fp12.mulAssign(&t[1], &t[2]) // t1 = f ^ (λ_2 * (p ^ 2) + λ_3 * (p ^ 3))
// e.exp(&t[2], &t[3]) // t2 = f ^ (λ_1 * u)
// fp12.mulAssign(&t[2], &t[0]) // t2 = f ^ (λ_1 * u + 2)
// fp12.mulAssign(&t[2], f) // t2 = f ^ (λ_0 * u)
// // out = f ^ (λ_0 + λ_1 + λ_2 + λ_3)
// fp12.mulAssign(&t[1], &t[2])
// fp12.frobeniusMap1(&t[3])
// fp12.mul(f, &t[1], &t[3])
// }
func (e *Engine) calculate() *fe12 {
f := e.fp12.one()
if len(e.pairs) == 0 {
return f
}
e.millerLoop(f)
e.finalExp(f)
return f
}
// Check computes pairing and checks if result is equal to one
func (e *Engine) Check() bool {
return e.calculate().isOne()
}
// Result computes pairing and returns target group element as result.
func (e *Engine) Result() *E {
r := e.calculate()
e.Reset()
return r
}
// GT returns target group instance.
func (e *Engine) GT() *GT {
return NewGT()
} | pairing.go | 0.615319 | 0.569254 | pairing.go | starcoder |
package rest
type RestClient interface {
// Makes a GET request to the shift api at "/staged".
// Returns result as array of mappings of strings to interfaces
// where each map in the array represents a migration, and each
// migration contains keys and values describing all of the fields
// of a migration.
Staged() (RestResponseItems, error)
// Makes a POST request to the shift api at "/unstage".
// Returns result as map of strings to interfaces, where keys and
// values describe all of the fields of a migration.
Unstage(params map[string]string) (RestResponseItem, error)
// Makes a POST request to the shift api at "/next_step".
// Returns result as map of strings to interfaces, where keys and
// values describe all of the fields of a migration.
NextStep(params map[string]string) (RestResponseItem, error)
// Makes a PUT request to the shift api at "/{id}".
// Returns result as map of strings to interfaces, where keys and
// values describe all of the fields of a migration.
Update(params map[string]string) (RestResponseItem, error)
// Makes a POST request to the shift api at "/complete".
// Returns result as map of strings to interfaces, where keys and
// values describe all of the fields of a migration.
Complete(params map[string]string) (RestResponseItem, error)
// Makes a POST request to the shift api at "/cancel".
// Returns result as map of strings to interfaces, where keys and
// values describe all of the fields of a migration.
Cancel(params map[string]string) (RestResponseItem, error)
// Makes a POST request to the shift api at "/fail".
// Returns result as map of strings to interfaces, where keys and
// values describe all of the fields of a migration.
Fail(params map[string]string) (RestResponseItem, error)
// Makes a POST request to the shift api at "/error".
// Returns result as map of strings to interfaces, where keys and
// values describe all of the fields of a migration.
Error(params map[string]string) (RestResponseItem, error)
// Makes a POST request to the shift api at "/offer".
// Returns result as map of strings to interfaces, where keys and
// values describe all of the fields of a migration
Offer(params map[string]string) (RestResponseItem, error)
// Makes a POST request to the shift api at "/unpin_run_host".
// Returns result as map of strings to interfaces, where keys and
// values describe all of the fields of a migration
UnpinRunHost(params map[string]string) (RestResponseItem, error)
// Makes a POST request to the shift api at "/append_to_file".
// Returns result as map of strings to interfaces, where keys and
// values describe all of the fields of the file minus the contents
AppendToFile(params map[string]string) (RestResponseItem, error)
// Makes a POST request to the shift api at "/write_file".
// Returns result as map of strings to interfaces, where keys and
// values describe all of the fields of the file minus the contents
WriteFile(params map[string]string) (RestResponseItem, error)
// Makes a GET request to the shift api at "/get_file".
// Returns result as map of strings to interfaces, where keys and
// values describe aff of the fields of the file including the contents
GetFile(params map[string]string) (RestResponseItem, error)
} | runner/pkg/rest/rest_api.go | 0.641085 | 0.455259 | rest_api.go | starcoder |
package builder
import (
"path"
"github.com/gravitational/gravity/lib/storage"
)
// DependencyForServer looks up a dependency in the list of sub-phases of the given phase
// that references the specified server and returns a reference to it.
// If no server has been found, it returns the reference to the phase itself
func DependencyForServer(phase *Phase, server storage.Server) *Phase {
for _, subphase := range phase.phases {
if subphase.p.Data != nil && subphase.p.Data.Server != nil &&
subphase.p.Data.Server.AdvertiseIP == server.AdvertiseIP {
return subphase
}
}
return phase
}
// ResolveInline returns a new plan with phases from the specified root after resolving
// phase dependencies and rendering phase IDs as absolute.
func ResolveInline(root *Phase, emptyPlan storage.OperationPlan) *storage.OperationPlan {
return Resolve(root.phases, emptyPlan)
}
// Resolve returns a new plan with specified phases after resolving
// phase dependencies and rendering phase IDs as absolute.
func Resolve(phases []*Phase, emptyPlan storage.OperationPlan) *storage.OperationPlan {
resolveIDs(nil, phases)
resolveRequirements(nil, phases)
result := make([]storage.OperationPhase, len(phases))
render(result, phases)
plan := emptyPlan
plan.Phases = result
return &plan
}
// NewPhase returns a new phase using the specified phase as a template
func NewPhase(phase storage.OperationPhase) *Phase {
return &Phase{
p: phase,
}
}
// HasSubphases returns true if this phase has sub-phases
func (p *Phase) HasSubphases() bool {
return len(p.phases) != 0
}
// AddSequential will append sub-phases which depend one upon another
func (p *Phase) AddSequential(subs ...*Phase) {
for i := range subs {
if len(p.phases) != 0 {
subs[i].Require(p.phases[len(p.phases)-1])
}
p.phases = append(p.phases, subs[i])
}
}
// AddParallel will append sub-phases which depend on parent only
func (p *Phase) AddParallel(subs ...*Phase) {
p.phases = append(p.phases, subs...)
}
// AddParallelRaw will append sub-phases which depend on parent only
func (p *Phase) AddParallelRaw(subs ...storage.OperationPhase) {
for _, sub := range subs {
phase := NewPhase(sub)
p.phases = append(p.phases, phase)
}
}
// AddSequentialRaw will append sub-phases which depend one upon another
func (p *Phase) AddSequentialRaw(subs ...storage.OperationPhase) {
for _, sub := range subs {
phase := NewPhase(sub)
if len(p.phases) != 0 {
phase.Require(p.phases[len(p.phases)-1])
}
p.phases = append(p.phases, phase)
}
}
// AddWithDependency sets phase as explicit dependency on subs
func (p *Phase) AddWithDependency(dep *Phase, subs ...*Phase) {
for i := range subs {
subs[i].Require(dep)
p.phases = append(p.phases, subs[i])
}
}
// Required adds the specified phases reqs as requirements for this phase
func (p *Phase) Require(reqs ...*Phase) *Phase {
for _, req := range reqs {
p.requires = append(p.requires, req)
}
return p
}
// Phase wraps an operation phase and adds builder-specific extensions
type Phase struct {
p storage.OperationPhase
phases []*Phase
requires []*Phase
}
// child formats sub as a child of this phase and returns the path
func (p *Phase) child(sub *Phase) string {
return p.childLiteral(sub.p.ID)
}
// childLiteral formats sub as a child of this phase and returns the path
func (p *Phase) childLiteral(sub string) string {
if p == nil {
return path.Join("/", sub)
}
return path.Join(p.p.ID, sub)
}
// resolveIDs traverses the phase tree and turns relative IDs into absolute
func resolveIDs(parent *Phase, phases []*Phase) {
for i, phase := range phases {
if !path.IsAbs(phases[i].p.ID) {
phases[i].p.ID = parent.child(phase)
}
resolveIDs(phases[i], phases[i].phases)
}
}
// resolveRequirements traverses the phase tree and resolves relative IDs in requirements into absolute
func resolveRequirements(parent *Phase, phases []*Phase) {
for i := range phases {
var requires []string
for _, req := range phases[i].requires {
if path.IsAbs(req.p.ID) {
requires = append(requires, req.p.ID)
} else {
requires = append(requires, parent.child(req))
}
}
phases[i].p.Requires = requires
resolveRequirements(phases[i], phases[i].phases)
}
}
// render converts the specified phases into storage format in result.
// Works recursively on sub-phases.
// expects len(result) == len(phases)
func render(result []storage.OperationPhase, phases []*Phase) {
for i, phase := range phases {
result[i] = phase.p
if len(phase.phases) == 0 {
continue
}
result[i].Phases = make([]storage.OperationPhase, len(phase.phases))
render(result[i].Phases, phase.phases)
}
} | lib/update/internal/builder/builder.go | 0.808635 | 0.407333 | builder.go | starcoder |
package avaclient
// PositionDto struct for PositionDto
type PositionDto struct {
// Elements GUID identifier.
Id string `json:"id"`
// This is used to store the GAEB XML Id within this IElement. This data is not used for any calculations or evaluations but only for GAEB serialization and deserialization.
GaebXmlId string `json:"gaebXmlId,omitempty"`
ElementTypeDiscriminator string `json:"elementTypeDiscriminator"`
// Will return the price per unit, rounded according to the project settings or the default value of three decimal places
UnitPrice float32 `json:"unitPrice"`
// You can use this property to directly set the unit price for this position. This will override any given PriceComponents
UnitPriceOverride float32 `json:"unitPriceOverride,omitempty"`
// Will return this Position's total quantity, rounded to three decimal places.
Quantity float32 `json:"quantity"`
// You can use this property to directly set the quantity for this position. This will override any given QuantityComponents
QuantityOverride float32 `json:"quantityOverride,omitempty"`
// The tag of the unit used for this positions quantity.
UnitTag string `json:"unitTag,omitempty"`
LabourComponents LabourPriceComponentDto `json:"labourComponents,omitempty"`
// The single price components in this Position. Price components should not be handled directly on a per-position basis but set globally in the parent Projects ProjectInformation.
PriceComponents []PriceComponentDto `json:"priceComponents,omitempty"`
// The quantity components of this Position.
QuantityComponents []CalculationDto `json:"quantityComponents,omitempty"`
// Further structuring of this Position.
SubDescriptions []SubDescriptionDto `json:"subDescriptions,omitempty"`
ComissionStatus ComissionStatusDto `json:"comissionStatus"`
// A list of positions that complement this Position. The positions are referenced by their GUIDs. It might be used together with ComplementedByQuantities in case that only a given quantity is complemented by positions.
ComplementedBy []string `json:"complementedBy,omitempty"`
// Will indicate if this Position is complemented in this ServiceSpecification by other Positions. It can not be set to false when there are entries in the ComplementedBy property.
Complemented bool `json:"complemented"`
// Indicates that the amount for this Position is to be set by the bidder.
AmountToBeEnteredByBidder bool `json:"amountToBeEnteredByBidder"`
// Indicates if the bidder demands for prices to be broken up into their price components.
PriceCompositionRequired bool `json:"priceCompositionRequired"`
// Indicates if this Position should use a different TaxRate than what is the default for the Project.
UseDifferentTaxRate bool `json:"useDifferentTaxRate"`
// Will return either the parent ServiceSpecification's TaxRate or it's own if it has a different value. (For example, in Germany Water has a different TaxRate than regular Positions)
TaxRate float32 `json:"taxRate"`
ItemNumber ItemNumberDto `json:"itemNumber,omitempty"`
// The rate of deductions, i.e. 0.12m means 12% price deduction.
DeductionFactor float32 `json:"deductionFactor"`
// Returns the product of UnitPrice times Quantity.
TotalPrice float32 `json:"totalPrice"`
// The total gross price for this Position.
TotalPriceGross float32 `json:"totalPriceGross"`
// Total gross price after applied deductions.
TotalPriceGrossDeducted float32 `json:"totalPriceGrossDeducted"`
// Net price after applied deductions.
DeductedPrice float32 `json:"deductedPrice"`
PositionType PositionTypeDto `json:"positionType"`
PriceType PriceTypeDto `json:"priceType"`
ServiceType ServiceTypeDto `json:"serviceType"`
ProductData ProductDataDto `json:"productData,omitempty"`
// Short description for this DescriptionBase element.
ShortText string `json:"shortText,omitempty"`
// Detailed description for this DescriptionBase element. When the HtmlLongText is set, this is automatically overwritten and filled with the appropriate plain text representation of the Html text. Vice versa, setting this property overrides the HtmlLongText.
LongText string `json:"longText,omitempty"`
// This contains the Html representation of the Longtext. When the LongText is set, this is automatically overwritten and filled with the appropriate Html representation of the plaintext. Vice versa, setting this property overrides the LongText. GAEB 90 and GAEB 2000 exports do not support any image functionality. In GAEB XML, only images that use an embedded Base64 data uri are exported, regular url references are cleared before written out.
HtmlLongText string `json:"htmlLongText,omitempty"`
AdditionType AdditionTypeDto `json:"additionType"`
ElementType string `json:"elementType,omitempty"`
// Quantity assignments are, in contrast to SubDescriptions, used to categorize parts of this Position. For example, it could be categorized by cost group - e.g. a Position describing concrete walls could follow the German DIN 276 Cost Groups Standard and specify that of the total 1.000m² wall, 500m² are classified as exterior walls and 500m² are classified as interior walls. They would then have different cost groups associated, e.g. for accounting purposes.
QuantityAssignments []QuantityAssignmentDto `json:"quantityAssignments,omitempty"`
CommerceProperties CommercePropertiesDto `json:"commerceProperties,omitempty"`
// If this position is an Alternative, then this property should point to the id of the position in this service specification that it can replace.
AlternativeTo string `json:"alternativeTo,omitempty"`
// If this is true, it indicates that the position is intended to be a lump sum, \"Pauschal\" in German. This means the position total price that is being invoiced should not depend on the actual quantity. In practice, partial invoices are still often used for partial services rendered. This property does not affect the price calculation for this position. Typically, the Quantity should be set to 1.0 when this flag is used.
IsLumpSum bool `json:"isLumpSum"`
// This identifier can be used to point to the Id of a position in the same ServiceSpecification that acts as a base position. It matches \"Bezugsposition\" in GAEB. This can be used for positions that repeat partially or are linked together
RepetitionTo string `json:"repetitionTo,omitempty"`
// These are Catalogues that are used within this Position. Catalogues are used to describe catalogues, or collections, that can be used to describe elements with commonly known properties. For example, QuantityAssignments use these to categorize themselves. They are propagate to all child elements, e.g. other containers and QuantityAssignments. In the context of a ServiceSpecification, all elements share the same instance of the collection.
ProjectCatalogues []CatalogueDto `json:"projectCatalogues,omitempty"`
// Referenced catalogues for this Position.
CatalogueReferences []CatalogueReferenceDto `json:"catalogueReferences,omitempty"`
Type string `json:"type,omitempty"`
StandardizedDescription StandardizedDescriptionDto `json:"standardizedDescription,omitempty"`
// This list contains references to positions that complement this one, additionally also specifying a quantity for which the addition is intended. This does not replace the ComplementedBy property and there are no automatic checks being done between these two properties, so it's up to the user code to ensure deletions (and additions, if desired) are performed for both properties. When copying withing keeping Ids, this list will not be part of the copy process, since it would only contain quantities without actual position references. Containers, however, will rebuild the list with updated position references when copying positions that contain entries here.
ComplementedByQuantities []ComplementedByQuantityDto `json:"complementedByQuantities,omitempty"`
// This identifier can be used to point to the Id of an ExecutionDescription in the same ServiceSpecification. ExecutionDescriptions act as a way to centrally describe how positions should be executed in practice. Often, the position itself still has text of its own to highlight deviations from that or add more details.
ExecutionDescriptionReference string `json:"executionDescriptionReference,omitempty"`
} | model_position_dto.go | 0.819352 | 0.400955 | model_position_dto.go | starcoder |
package types
import (
"strconv"
)
// Vector3int16 is a three-dimensional Euclidean vector with 16-bit integer
// precision.
type Vector3int16 struct {
X, Y, Z int16
}
// NewVector3int16 returns a vector initialized with the given components.
func NewVector3int16(x, y, z int) Vector3int16 {
return Vector3int16{X: int16(x), Y: int16(y), Z: int16(z)}
}
// Add returns the sum of two vectors.
func (v Vector3int16) Add(op Vector3int16) Vector3int16 {
return Vector3int16{X: v.X + op.X, Y: v.Y + op.Y, Z: v.Z + op.Z}
}
// Sub returns the difference of two vectors.
func (v Vector3int16) Sub(op Vector3int16) Vector3int16 {
return Vector3int16{X: v.X - op.X, Y: v.Y - op.Y, Z: v.Z - op.Z}
}
// Mul returns the product of two vectors.
func (v Vector3int16) Mul(op Vector3int16) Vector3int16 {
return Vector3int16{X: v.X * op.X, Y: v.Y * op.Y, Z: v.Z * op.Z}
}
// Div returns the quotient of two vectors.
func (v Vector3int16) Div(op Vector3int16) Vector3int16 {
return Vector3int16{X: v.X / op.X, Y: v.Y / op.Y, Z: v.Z / op.Z}
}
// MulN returns the vector with each component multiplied by a number.
func (v Vector3int16) MulN(op float64) Vector3int16 {
return Vector3int16{X: int16(float64(v.X) * op), Y: int16(float64(v.Y) * op), Z: int16(float64(v.Z) * op)}
}
// DivN returns the vector with each component divided by a number.
func (v Vector3int16) DivN(op float64) Vector3int16 {
return Vector3int16{X: int16(float64(v.X) / op), Y: int16(float64(v.Y) / op), Z: int16(float64(v.Z) / op)}
}
// Neg returns the negation of the vector.
func (v Vector3int16) Neg() Vector3int16 {
return Vector3int16{X: -v.X, Y: -v.Y, Z: -v.Z}
}
// Type returns a string that identifies the type.
func (Vector3int16) Type() string {
return "Vector3int16"
}
// String returns a human-readable string representation of the value.
func (v Vector3int16) String() string {
var b []byte
b = strconv.AppendInt(b, int64(v.X), 10)
b = append(b, ", "...)
b = strconv.AppendInt(b, int64(v.Y), 10)
b = append(b, ", "...)
b = strconv.AppendInt(b, int64(v.Z), 10)
return string(b)
}
// Copy returns a copy of the value.
func (v Vector3int16) Copy() PropValue {
return v
} | Vector3int16.go | 0.921087 | 0.616388 | Vector3int16.go | starcoder |
package vmath
import (
"fmt"
"math"
"github.com/maja42/vmath/mathi"
)
type Vec4i [4]int
func (v Vec4i) String() string {
return fmt.Sprintf("Vec4i[%d x %d x %d x %d]", v[0], v[1], v[2], v[3])
}
// Format the vector to a string.
func (v Vec4i) Format(format string) string {
return fmt.Sprintf(format, v[0], v[1], v[2], v[3])
}
// Vec2f returns a float representation of the vector.
func (v Vec4i) Vec4f() Vec4f {
return Vec4f{float32(v[0]), float32(v[1]), float32(v[2]), float32(v[3])}
}
// Split returns the vector's components.
func (v Vec4i) Split() (x, y, z, w int) {
return v[0], v[1], v[2], v[3]
}
// X returns the vector's first component.
// Performance is equivalent to using v[0].
func (v Vec4i) X() int {
return v[0]
}
// Y returns the vector's second component.
// Performance is equivalent to using v[1].
func (v Vec4i) Y() int {
return v[1]
}
// Z returns the vector's third component.
// Performance is equivalent to using v[2].
func (v Vec4i) Z() int {
return v[2]
}
// W returns the vector's fourth component.
// Performance is equivalent to using v[3].
func (v Vec4i) W() int {
return v[3]
}
// XY returns a 2D vector with the X and Y components.
func (v Vec4i) XY() Vec2i {
return Vec2i{v[0], v[1]}
}
// XYZ returns a 3D vector with the X, Y and Z components.
func (v Vec4i) XYZ() Vec3i {
return Vec3i{v[0], v[1], v[2]}
}
// Abs returns a vector with the components turned into absolute values.
func (v Vec4i) Abs() Vec4i {
return Vec4i{mathi.Abs(v[0]), mathi.Abs(v[1]), mathi.Abs(v[2]), mathi.Abs(v[3])}
}
// Add performs component-wise addition between two vectors.
func (v Vec4i) Add(other Vec4i) Vec4i {
return Vec4i{v[0] + other[0], v[1] + other[1], v[2] + other[2], v[3] + other[3]}
}
// AddScalar performs a component-wise scalar addition.
func (v Vec4i) AddScalar(s int) Vec4i {
return Vec4i{v[0] + s, v[1] + s, v[2] + s, v[3] + s}
}
// Sub performs component-wise subtraction between two vectors.
func (v Vec4i) Sub(other Vec4i) Vec4i {
return Vec4i{v[0] - other[0], v[1] - other[1], v[2] - other[2], v[3] - other[3]}
}
// SubScalar performs a component-wise scalar subtraction.
func (v Vec4i) SubScalar(s int) Vec4i {
return Vec4i{v[0] - s, v[1] - s, v[2] - s, v[3] - s}
}
// Mul performs a component-wise multiplication.
func (v Vec4i) Mul(other Vec4i) Vec4i {
return Vec4i{v[0] * other[0], v[1] * other[1], v[2] * other[2], v[3] * other[3]}
}
// MulScalar performs a scalar multiplication.
func (v Vec4i) MulScalar(s int) Vec4i {
return Vec4i{v[0] * s, v[1] * s, v[2] * s, v[3] * s}
}
// Div performs a component-wise division.
func (v Vec4i) Div(other Vec4i) Vec4i {
return Vec4i{v[0] / other[0], v[1] / other[1], v[2] / other[2], v[3] / other[3]}
}
// DivScalar performs a scalar division.
func (v Vec4i) DivScalar(s int) Vec4i {
return Vec4i{v[0] / s, v[1] / s, v[2] / s, v[3] / s}
}
// Length returns the vector's length.
func (v Vec4i) Length() float32 {
return float32(math.Sqrt(float64(v[0]*v[0] + v[1]*v[1] + v[2]*v[2] + v[3]*v[3])))
}
// SquareLength returns the vector's squared length.
func (v Vec4i) SquareLength() int {
return v[0]*v[0] + v[1]*v[1] + v[2]*v[2] + v[3]*v[3]
}
// IsZero returns true if all components are zero.
func (v Vec4i) IsZero() bool {
return v[0] == 0 && v[1] == 0 && v[2] == 0 && v[3] == 0
}
// Equal compares two vectors component-wise.
func (v Vec4i) Equal(other Vec4i) bool {
return v[0] == other[0] && v[1] == other[1] && v[2] == other[2] && v[3] == other[3]
}
// Clamp clamps each component to the range of [min, max].
func (v Vec4i) Clamp(min, max int) Vec4i {
return Vec4i{
Clampi(v[0], min, max),
Clampi(v[1], min, max),
Clampi(v[2], min, max),
Clampi(v[3], min, max),
}
}
// Negate inverts all components.
func (v Vec4i) Negate() Vec4i {
return Vec4i{-v[0], -v[1], -v[2], -v[3]}
}
// Dot performs a dot product with another vector.
func (v Vec4i) Dot(other Vec4i) int {
return v[0]*other[0] + v[1]*other[1] + v[2]*other[2] + v[3]*other[3]
}
// Distance returns the euclidean distance to another position.
func (v Vec4i) Distance(other Vec4i) float32 {
return other.Sub(v).Length()
}
// SquareDistance returns the squared euclidean distance to another position.
func (v Vec4i) SquareDistance(other Vec4i) int {
return other.Sub(v).SquareLength()
} | vec4i.go | 0.906547 | 0.636664 | vec4i.go | starcoder |
package box2d
import (
"fmt"
"math"
)
/// Weld joint definition. You need to specify local anchor points
/// where they are attached and the relative body angle. The position
/// of the anchor points is important for computing the reaction torque.
type B2WeldJointDef struct {
B2JointDef
/// The local anchor point relative to bodyA's origin.
LocalAnchorA B2Vec2
/// The local anchor point relative to bodyB's origin.
LocalAnchorB B2Vec2
/// The bodyB angle minus bodyA angle in the reference state (radians).
ReferenceAngle float64
/// The rotational stiffness in N*m
/// Disable softness with a value of 0
Stiffness float64
/// The rotational damping in N*m*s
Damping float64
}
func MakeB2WeldJointDef() B2WeldJointDef {
res := B2WeldJointDef{
B2JointDef: MakeB2JointDef(),
}
res.Type = B2JointType.E_weldJoint
res.LocalAnchorA.Set(0.0, 0.0)
res.LocalAnchorB.Set(0.0, 0.0)
res.ReferenceAngle = 0.0
res.Stiffness = 0.0
res.Damping = 0.0
return res
}
/// A weld joint essentially glues two bodies together. A weld joint may
/// distort somewhat because the island constraint solver is approximate.
type B2WeldJoint struct {
*B2Joint
M_stiffness float64
M_damping float64
M_bias float64
// Solver shared
M_localAnchorA B2Vec2
M_localAnchorB B2Vec2
M_referenceAngle float64
M_gamma float64
M_impulse B2Vec3
// Solver temp
M_indexA int
M_indexB int
M_rA B2Vec2
M_rB B2Vec2
M_localCenterA B2Vec2
M_localCenterB B2Vec2
M_invMassA float64
M_invMassB float64
M_invIA float64
M_invIB float64
M_mass B2Mat33
}
/// The local anchor point relative to bodyA's origin.
func (joint B2WeldJoint) GetLocalAnchorA() B2Vec2 {
return joint.M_localAnchorA
}
/// The local anchor point relative to bodyB's origin.
func (joint B2WeldJoint) GetLocalAnchorB() B2Vec2 {
return joint.M_localAnchorB
}
/// Get the reference angle.
func (joint B2WeldJoint) GetReferenceAngle() float64 {
return joint.M_referenceAngle
}
/// Set stiffness in N*m
func (joint *B2WeldJoint) SetStiffness(stiffness float64) {
joint.M_stiffness = stiffness
}
/// Get stiffness in N*m
func (joint B2WeldJoint) GetStiffness() float64 {
return joint.M_stiffness
}
/// Set damping in N*m*s
func (joint *B2WeldJoint) SetDamping(damping float64) {
joint.M_damping = damping
}
/// Get damping in N*m*s
func (joint B2WeldJoint) GetDamping() float64 {
return joint.M_damping
}
// // Point-to-point constraint
// // C = p2 - p1
// // Cdot = v2 - v1
// // = v2 + cross(w2, r2) - v1 - cross(w1, r1)
// // J = [-I -r1_skew I r2_skew ]
// // Identity used:
// // w k % (rx i + ry j) = w * (-ry i + rx j)
// // Angle constraint
// // C = angle2 - angle1 - referenceAngle
// // Cdot = w2 - w1
// // J = [0 0 -1 0 0 1]
// // K = invI1 + invI2
/// Initialize the bodies, anchors, reference angle, stiffness, and damping.
/// @param bodyA the first body connected by this joint
/// @param bodyB the second body connected by this joint
/// @param anchor the point of connection in world coordinates
func (def *B2WeldJointDef) Initialize(bA *B2Body, bB *B2Body, anchor B2Vec2) {
def.BodyA = bA
def.BodyB = bB
def.LocalAnchorA = def.BodyA.GetLocalPoint(anchor)
def.LocalAnchorB = def.BodyB.GetLocalPoint(anchor)
def.ReferenceAngle = def.BodyB.GetAngle() - def.BodyA.GetAngle()
}
func MakeB2WeldJoint(def *B2WeldJointDef) *B2WeldJoint {
res := B2WeldJoint{
B2Joint: MakeB2Joint(def),
}
res.M_localAnchorA = def.LocalAnchorA
res.M_localAnchorB = def.LocalAnchorB
res.M_referenceAngle = def.ReferenceAngle
res.M_stiffness = def.Stiffness
res.M_damping = def.Damping
res.M_impulse.SetZero()
return &res
}
func (joint *B2WeldJoint) InitVelocityConstraints(data B2SolverData) {
joint.M_indexA = joint.M_bodyA.M_islandIndex
joint.M_indexB = joint.M_bodyB.M_islandIndex
joint.M_localCenterA = joint.M_bodyA.M_sweep.LocalCenter
joint.M_localCenterB = joint.M_bodyB.M_sweep.LocalCenter
joint.M_invMassA = joint.M_bodyA.M_invMass
joint.M_invMassB = joint.M_bodyB.M_invMass
joint.M_invIA = joint.M_bodyA.M_invI
joint.M_invIB = joint.M_bodyB.M_invI
aA := data.Positions[joint.M_indexA].A
vA := data.Velocities[joint.M_indexA].V
wA := data.Velocities[joint.M_indexA].W
aB := data.Positions[joint.M_indexB].A
vB := data.Velocities[joint.M_indexB].V
wB := data.Velocities[joint.M_indexB].W
qA := MakeB2RotFromAngle(aA)
qB := MakeB2RotFromAngle(aB)
joint.M_rA = B2RotVec2Mul(qA, B2Vec2Sub(joint.M_localAnchorA, joint.M_localCenterA))
joint.M_rB = B2RotVec2Mul(qB, B2Vec2Sub(joint.M_localAnchorB, joint.M_localCenterB))
// J = [-I -r1_skew I r2_skew]
// [ 0 -1 0 1]
// r_skew = [-ry; rx]
// Matlab
// K = [ mA+r1y^2*iA+mB+r2y^2*iB, -r1y*iA*r1x-r2y*iB*r2x, -r1y*iA-r2y*iB]
// [ -r1y*iA*r1x-r2y*iB*r2x, mA+r1x^2*iA+mB+r2x^2*iB, r1x*iA+r2x*iB]
// [ -r1y*iA-r2y*iB, r1x*iA+r2x*iB, iA+iB]
mA := joint.M_invMassA
mB := joint.M_invMassB
iA := joint.M_invIA
iB := joint.M_invIB
var K B2Mat33
K.Ex.X = mA + mB + joint.M_rA.Y*joint.M_rA.Y*iA + joint.M_rB.Y*joint.M_rB.Y*iB
K.Ey.X = -joint.M_rA.Y*joint.M_rA.X*iA - joint.M_rB.Y*joint.M_rB.X*iB
K.Ez.X = -joint.M_rA.Y*iA - joint.M_rB.Y*iB
K.Ex.Y = K.Ey.X
K.Ey.Y = mA + mB + joint.M_rA.X*joint.M_rA.X*iA + joint.M_rB.X*joint.M_rB.X*iB
K.Ez.Y = joint.M_rA.X*iA + joint.M_rB.X*iB
K.Ex.Z = K.Ez.X
K.Ey.Z = K.Ez.Y
K.Ez.Z = iA + iB
if joint.M_stiffness > 0.0 {
K.GetInverse22(&joint.M_mass)
invM := iA + iB
C := aB - aA - joint.M_referenceAngle
// Damping coefficient
d := joint.M_damping
// Spring stiffness
k := joint.M_stiffness
// magic formulas
h := data.Step.Dt
joint.M_gamma = h * (d + h*k)
if joint.M_gamma != 0.0 {
joint.M_gamma = 1.0 / joint.M_gamma
} else {
joint.M_gamma = 0.0
}
joint.M_bias = C * h * k * joint.M_gamma
invM += joint.M_gamma
if invM != 0.0 {
joint.M_mass.Ez.Z = 1.0 / invM
} else {
joint.M_mass.Ez.Z = 0.0
}
} else if K.Ez.Z == 0.0 {
K.GetInverse22(&joint.M_mass)
joint.M_gamma = 0.0
joint.M_bias = 0.0
} else {
K.GetSymInverse33(&joint.M_mass)
joint.M_gamma = 0.0
joint.M_bias = 0.0
}
if data.Step.WarmStarting {
// Scale impulses to support a variable time step.
joint.M_impulse.OperatorScalarMulInplace(data.Step.DtRatio)
P := MakeB2Vec2(joint.M_impulse.X, joint.M_impulse.Y)
vA.OperatorMinusInplace(B2Vec2MulScalar(mA, P))
wA -= iA * (B2Vec2Cross(joint.M_rA, P) + joint.M_impulse.Z)
vB.OperatorPlusInplace(B2Vec2MulScalar(mB, P))
wB += iB * (B2Vec2Cross(joint.M_rB, P) + joint.M_impulse.Z)
} else {
joint.M_impulse.SetZero()
}
data.Velocities[joint.M_indexA].V = vA
data.Velocities[joint.M_indexA].W = wA
data.Velocities[joint.M_indexB].V = vB
data.Velocities[joint.M_indexB].W = wB
}
func (joint *B2WeldJoint) SolveVelocityConstraints(data B2SolverData) {
vA := data.Velocities[joint.M_indexA].V
wA := data.Velocities[joint.M_indexA].W
vB := data.Velocities[joint.M_indexB].V
wB := data.Velocities[joint.M_indexB].W
mA := joint.M_invMassA
mB := joint.M_invMassB
iA := joint.M_invIA
iB := joint.M_invIB
if joint.M_stiffness > 0.0 {
Cdot2 := wB - wA
impulse2 := -joint.M_mass.Ez.Z * (Cdot2 + joint.M_bias + joint.M_gamma*joint.M_impulse.Z)
joint.M_impulse.Z += impulse2
wA -= iA * impulse2
wB += iB * impulse2
Cdot1 := B2Vec2Sub(B2Vec2Sub(B2Vec2Add(vB, B2Vec2CrossScalarVector(wB, joint.M_rB)), vA), B2Vec2CrossScalarVector(wA, joint.M_rA))
impulse1 := B2Vec2Mul22(joint.M_mass, Cdot1).OperatorNegate()
joint.M_impulse.X += impulse1.X
joint.M_impulse.Y += impulse1.Y
P := impulse1
vA.OperatorMinusInplace(B2Vec2MulScalar(mA, P))
wA -= iA * B2Vec2Cross(joint.M_rA, P)
vB.OperatorPlusInplace(B2Vec2MulScalar(mB, P))
wB += iB * B2Vec2Cross(joint.M_rB, P)
} else {
Cdot1 := B2Vec2Sub(B2Vec2Sub(B2Vec2Add(vB, B2Vec2CrossScalarVector(wB, joint.M_rB)), vA), B2Vec2CrossScalarVector(wA, joint.M_rA))
Cdot2 := wB - wA
Cdot := MakeB2Vec3(Cdot1.X, Cdot1.Y, Cdot2)
impulse := B2Vec3Mat33Mul(joint.M_mass, Cdot).OperatorNegate()
joint.M_impulse.OperatorPlusInplace(impulse)
P := MakeB2Vec2(impulse.X, impulse.Y)
vA.OperatorMinusInplace(B2Vec2MulScalar(mA, P))
wA -= iA * (B2Vec2Cross(joint.M_rA, P) + impulse.Z)
vB.OperatorPlusInplace(B2Vec2MulScalar(mB, P))
wB += iB * (B2Vec2Cross(joint.M_rB, P) + impulse.Z)
}
data.Velocities[joint.M_indexA].V = vA
data.Velocities[joint.M_indexA].W = wA
data.Velocities[joint.M_indexB].V = vB
data.Velocities[joint.M_indexB].W = wB
}
func (joint *B2WeldJoint) SolvePositionConstraints(data B2SolverData) bool {
cA := data.Positions[joint.M_indexA].C
aA := data.Positions[joint.M_indexA].A
cB := data.Positions[joint.M_indexB].C
aB := data.Positions[joint.M_indexB].A
qA := MakeB2RotFromAngle(aA)
qB := MakeB2RotFromAngle(aB)
mA := joint.M_invMassA
mB := joint.M_invMassB
iA := joint.M_invIA
iB := joint.M_invIB
rA := B2RotVec2Mul(qA, B2Vec2Sub(joint.M_localAnchorA, joint.M_localCenterA))
rB := B2RotVec2Mul(qB, B2Vec2Sub(joint.M_localAnchorB, joint.M_localCenterB))
positionError := 0.0
angularError := 0.0
var K B2Mat33
K.Ex.X = mA + mB + rA.Y*rA.Y*iA + rB.Y*rB.Y*iB
K.Ey.X = -rA.Y*rA.X*iA - rB.Y*rB.X*iB
K.Ez.X = -rA.Y*iA - rB.Y*iB
K.Ex.Y = K.Ey.X
K.Ey.Y = mA + mB + rA.X*rA.X*iA + rB.X*rB.X*iB
K.Ez.Y = rA.X*iA + rB.X*iB
K.Ex.Z = K.Ez.X
K.Ey.Z = K.Ez.Y
K.Ez.Z = iA + iB
if joint.M_stiffness > 0.0 {
C1 := B2Vec2Sub(B2Vec2Sub(B2Vec2Add(cB, rB), cA), rA)
positionError = C1.Length()
angularError = 0.0
P := K.Solve22(C1).OperatorNegate()
cA.OperatorMinusInplace(B2Vec2MulScalar(mA, P))
aA -= iA * B2Vec2Cross(rA, P)
cB.OperatorPlusInplace(B2Vec2MulScalar(mB, P))
aB += iB * B2Vec2Cross(rB, P)
} else {
C1 := B2Vec2Sub(B2Vec2Sub(B2Vec2Add(cB, rB), cA), rA)
C2 := aB - aA - joint.M_referenceAngle
positionError = C1.Length()
angularError = math.Abs(C2)
C := MakeB2Vec3(C1.X, C1.Y, C2)
var impulse B2Vec3
if K.Ez.Z > 0.0 {
impulse = K.Solve33(C).OperatorNegate()
} else {
impulse2 := K.Solve22(C1).OperatorNegate()
impulse.Set(impulse2.X, impulse2.Y, 0.0)
}
P := MakeB2Vec2(impulse.X, impulse.Y)
cA.OperatorMinusInplace(B2Vec2MulScalar(mA, P))
aA -= iA * (B2Vec2Cross(rA, P) + impulse.Z)
cB.OperatorPlusInplace(B2Vec2MulScalar(mB, P))
aB += iB * (B2Vec2Cross(rB, P) + impulse.Z)
}
data.Positions[joint.M_indexA].C = cA
data.Positions[joint.M_indexA].A = aA
data.Positions[joint.M_indexB].C = cB
data.Positions[joint.M_indexB].A = aB
return positionError <= B2_linearSlop && angularError <= B2_angularSlop
}
func (joint B2WeldJoint) GetAnchorA() B2Vec2 {
return joint.M_bodyA.GetWorldPoint(joint.M_localAnchorA)
}
func (joint B2WeldJoint) GetAnchorB() B2Vec2 {
return joint.M_bodyB.GetWorldPoint(joint.M_localAnchorB)
}
func (joint B2WeldJoint) GetReactionForce(inv_dt float64) B2Vec2 {
P := MakeB2Vec2(joint.M_impulse.X, joint.M_impulse.Y)
return B2Vec2MulScalar(inv_dt, P)
}
func (joint B2WeldJoint) GetReactionTorque(inv_dt float64) float64 {
return inv_dt * joint.M_impulse.Z
}
func (joint *B2WeldJoint) Dump() {
indexA := joint.M_bodyA.M_islandIndex
indexB := joint.M_bodyB.M_islandIndex
fmt.Printf(" b2WeldJointDef jd;\n")
fmt.Printf(" jd.bodyA = bodies[%d];\n", indexA)
fmt.Printf(" jd.bodyB = bodies[%d];\n", indexB)
fmt.Printf(" jd.collideConnected = bool(%v);\n", joint.M_collideConnected)
fmt.Printf(" jd.localAnchorA.Set(%.15f, %.15f);\n", joint.M_localAnchorA.X, joint.M_localAnchorA.Y)
fmt.Printf(" jd.localAnchorB.Set(%.15f, %.15f);\n", joint.M_localAnchorB.X, joint.M_localAnchorB.Y)
fmt.Printf(" jd.referenceAngle = %.15f;\n", joint.M_referenceAngle)
fmt.Printf(" jd.frequencyHz = %.15f;\n", joint.M_stiffness)
fmt.Printf(" jd.dampingRatio = %.15f;\n", joint.M_damping)
fmt.Printf(" joints[%d] = m_world.CreateJoint(&jd);\n", joint.M_index)
} | DynamicsB2JointWeld.go | 0.886574 | 0.746347 | DynamicsB2JointWeld.go | starcoder |
package main
import (
"fmt"
"gopkg.in/yaml.v2"
)
// Dataset is a wrapper around a configuration for a dataset export
type Dataset struct {
Directory string `yaml:"directory"`
ZipFileName string `yaml:"zipFileName"`
PrimaryKeyFileName string `yaml:"pkFileName"`
NumberOfEntities uint16 `yaml:"numberOfEntities"`
TotalTimeInHours uint16 `yaml:"totalTimeInHours"`
Files []DsFile `yaml:"files"`
}
// DsFile is a wrapper around the configuration necessary to generate a file.
type DsFile struct {
FileName string `yaml:"fileName"`
DataType string `yaml:"dataType"`
ValueType string `yaml:"valueType"`
TimeStepInMilliseconds uint32 `yaml:"timeStepMillis"`
PossibleValues []string `yaml:"values"`
Minimum float64 `yaml:"minValue"`
Maximum float64 `yaml:"maxValue"`
TimeStepVariance float64 `yaml:"timeVariance"`
ValueVariance float64 `yaml:"valueVariance"`
}
type primaryKeyFile struct {
filePath string
numberOfEntities uint16
}
type staticFile struct {
filePath string
numberOfEntities uint16
possibleValues []string
}
type timeSeriesFile struct {
filePath string
numberOfEntities uint16
valueType string
totalTimeInHours uint16
timeStepInMilliseconds uint32
minimum float64
maximum float64
timeStepVariance float64
valueVariance float64
}
// NewDatasetFromYAML creates a new dataset configuration from a yaml file.
func NewDatasetFromYAML(data string) (Dataset, error) {
dataset := Dataset{}
err := yaml.Unmarshal([]byte(data), &dataset)
return dataset, err
}
// String the toString() method for the dataset.
func (d Dataset) String() string {
return fmt.Sprintf(`Dataset {
Directory: "%v"
PrimaryKeyFileName: "%v"
NumberOfEntities: "%v"
TotalTimeInHours: "%v"
Files: %v
}`, d.Directory, d.PrimaryKeyFileName, d.NumberOfEntities, d.TotalTimeInHours, d.Files)
}
// String the toString() method for the dataset file.
func (f DsFile) String() string {
return fmt.Sprintf(`
File {
FileName: "%v"
DataType: "%v"
ValueType: "%v"
TimeStepInMilliseconds: "%v"
PossibleValues: %v
Minimum: "%v"
Maximum: "%v"
TimeStepVariance: "%v"
ValueVariance: "%v"
}`, f.FileName, f.DataType, f.ValueType, f.TimeStepInMilliseconds, f.PossibleValues, f.Minimum, f.Maximum, f.TimeStepVariance, f.ValueVariance)
}
func (d Dataset) getFiles() []dataGenerator {
l := make([]dataGenerator, len(d.Files)+1, len(d.Files)+1)
l[0] = primaryKeyFile{
filePath: d.PrimaryKeyFileName,
numberOfEntities: d.NumberOfEntities,
}
for i, f := range d.Files {
l[i+1] = f.toGeneratedFile(d)
}
return l
}
func (f DsFile) toGeneratedFile(d Dataset) dataGenerator {
fp := f.FileName
if f.DataType == "static" {
return staticFile{
filePath: fp,
numberOfEntities: d.NumberOfEntities,
possibleValues: f.PossibleValues,
}
}
return timeSeriesFile{
filePath: fp,
numberOfEntities: d.NumberOfEntities,
valueType: f.ValueType,
totalTimeInHours: d.TotalTimeInHours,
timeStepInMilliseconds: f.TimeStepInMilliseconds,
minimum: f.Minimum,
maximum: f.Maximum,
timeStepVariance: f.TimeStepVariance,
valueVariance: f.ValueVariance,
}
} | model.go | 0.667581 | 0.417331 | model.go | starcoder |
package main
import (
"fmt"
"image/color"
"math"
"math/rand"
"os"
"runtime"
"time"
"github.com/veandco/go-sdl2/sdl"
"github.com/xyproto/pf"
"github.com/xyproto/pixelpusher"
"github.com/xyproto/sdl2utils"
)
const (
// Size of "worldspace pixels", measured in "screenspace pixels"
pixelscale = 4
// The resolution (worldspace)
width = 320
height = 200
// The width of the pixel buffer, used when calculating where to place pixels (y*pitch+x)
pitch = width
// Target framerate
frameRate = 60
// Alpha value for opaque colors
opaque = 255
)
var (
// Convenience function for returning a random byte
rb = func() uint8 { return uint8(rand.Intn(255)) }
)
func Strobe(pixels []uint32, width, height, pitch int32) {
for y := int32(1); y < int32(height-1); y++ {
for x := int32(1); x < int32(width-1); x++ {
left := pixels[y*pitch+x-1]
right := pixels[y*pitch+x+1]
this := pixels[y*pitch+x]
above := pixels[(y+1)*pitch+x]
// Dividing the raw uint32 color value!
average := (left + right + this + above) / 4
pixels[y*pitch+x] = average
}
}
}
func Convolution(time float32, pixels []uint32, width, height, pitch int32, enr int) {
// Make the effect increase and decrease in intensity instead of increasing and then dropping down to 0 again
stime := float32(math.Sin(float64(time) * math.Pi))
var left, right, this, above uint32
two1 := int32(2.0 - stime*4.0)
two2 := int32(2.0 - time*4.0)
one1 := int32(1.0 - stime*2.0)
one2 := int32(1.0 - time*2.0)
size := width * height
for y := int32(0); y < height; y++ {
for x := int32(0); x < width; x++ {
switch enr {
case 0:
// "snow patterns"
left = pixelpusher.GetWrap(pixels, y*pitch+x-1, size)
right = pixelpusher.GetWrap(pixels, y*pitch+x+1, size)
this = pixelpusher.GetWrap(pixels, y*pitch+x, size)
above = pixelpusher.GetWrap(pixels, (y+1)*pitch+x, size)
case 1:
// "highway"
left = pixelpusher.GetWrap(pixels, (y-1)*pitch+x-1, size)
right = pixelpusher.GetWrap(pixels, (y-1)*pitch+x+1, size)
this = pixelpusher.GetWrap(pixels, y*pitch+x, size)
above = pixelpusher.GetWrap(pixels, (y-1)*pitch+x, size)
case 2:
// "dither highway"
left = pixelpusher.GetWrap(pixels, (y-1)*pitch+x-1, size)
right = pixelpusher.GetWrap(pixels, (y-1)*pitch+x+1, size)
this = pixelpusher.GetWrap(pixels, (y-1)*pitch+(x-1), size)
above = pixelpusher.GetWrap(pixels, (y+1)*pitch+(x+1), size)
case 3:
// "butterfly"
left = pixelpusher.GetWrap(pixels, y*pitch+(x-two1), size)
right = pixelpusher.GetWrap(pixels, y*pitch+(x+two1), size)
this = pixelpusher.GetWrap(pixels, y*pitch+x*two2, size)
above = pixelpusher.GetWrap(pixels, (y-two1)*pitch+x*two2, size)
case 4:
// ?
left = pixelpusher.GetWrap(pixels, y*pitch+(x-two2), size)
right = pixelpusher.GetWrap(pixels, y*pitch+(x+two1), size)
this = pixelpusher.GetWrap(pixels, y*pitch+int32(float32(x)*stime), size)
above = pixelpusher.GetWrap(pixels, (y-two2)*pitch+int32(float32(x)*stime), size)
case 5:
// "castle"
left = pixelpusher.GetWrap(pixels, y*pitch+(x-one1), size)
right = pixelpusher.GetWrap(pixels, y*pitch+(x+one1), size)
this = pixelpusher.GetWrap(pixels, y*pitch+x*two1, size)
above = pixelpusher.GetWrap(pixels, (y-one2)*pitch+x*two1, size)
}
lr, lg, lb, _ := pixelpusher.ColorValueToRGBA(left)
rr, rg, rb, _ := pixelpusher.ColorValueToRGBA(right)
tr, tg, tb, _ := pixelpusher.ColorValueToRGBA(this)
ar, ag, ab, _ := pixelpusher.ColorValueToRGBA(above)
averageR := uint8(float32(lr+rr+tr+ar) / float32(4.8-stime))
averageG := uint8(float32(lg+rg+tg+ag) / float32(4.8-stime))
averageB := uint8(float32(lb+rb+tb+ab) / float32(4.8-stime))
pixelpusher.SetWrap(pixels, y*pitch+x, width*height, pixelpusher.RGBAToColorValue(averageR, averageG, averageB, 0xff))
}
}
}
func clamp(v float32, max uint8) uint8 {
u := uint32(v)
if u > uint32(max) {
return 255
}
return uint8(u)
}
// TriangleDance draws a dancing triangle, as time goes from 0.0 to 1.0.
// The returned value signals to which degree the graphics should be transitioned out.
func TriangleDance(cores int, time float32, pixels []uint32, width, height, pitch int32, xdirection, ydirection int) (transition float32) {
time *= time
size := int32(120)
//var bgColorValue uint32 = 0x4e7f9eff
// The function is responsible for clearing the pixels,
// it might want to reuse the pixels from the last time (flame effect)
//pixelpusher.FastClear(pixels, bgColorValue)
// Find a suitable placement and color
var x int32
if xdirection > 0 {
x = pixelpusher.Clamp(int32(float32(width)*time), size, width-size)
} else if xdirection == 0 {
x = int32(width / 2)
} else {
x = pixelpusher.Clamp(int32(float32(width)*(1.0-time)), size, width-size)
}
var y int32
if ydirection > 0 {
y = pixelpusher.Clamp(int32(float32(height)*time), size, height-size)
} else if ydirection == 0 {
y = int32(height / 2)
} else {
y = pixelpusher.Clamp(int32(float32(height)*(1.0-time)), size, height-size)
}
// Make the center triangle red
var c color.RGBA
if xdirection == 0 && ydirection == 0 {
c = color.RGBA{0xff, 0, 0, 0xff}
} else {
c = color.RGBA{rb(), rb(), rb(), 0xff}
}
x1 := x
y1 := y
x2 := x + rand.Int31n(int32(size)) - int32(size/2)
y2 := y + rand.Int31n(int32(size)) - int32(size/2)
x3 := x + rand.Int31n(int32(size)) - int32(size/2)
y3 := y + rand.Int31n(int32(size)) - int32(size/2)
pixelpusher.Triangle(cores, pixels, x1, y1, x2, y2, x3, y3, c, pitch)
return 0.0
}
func run() int {
sdl.Init(uint32(sdl.INIT_VIDEO))
defer sdl.Quit()
var (
window *sdl.Window
renderer *sdl.Renderer
err error
)
window, err = sdl.CreateWindow("Strobing Triangles", sdl.WINDOWPOS_UNDEFINED, sdl.WINDOWPOS_UNDEFINED, int32(width*pixelscale), int32(height*pixelscale), sdl.WINDOW_SHOWN)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to create window: %s\n", err)
return 1
}
defer window.Destroy()
renderer, err = sdl.CreateRenderer(window, -1, sdl.RENDERER_ACCELERATED)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to create renderer: %s\n", err)
return 1
}
defer renderer.Destroy()
// Fill the render buffer with black
renderer.SetDrawColor(0, 0, 0, opaque)
renderer.Clear()
texture, err := renderer.CreateTexture(sdl.PIXELFORMAT_ARGB8888, sdl.TEXTUREACCESS_STREAMING, width, height)
if err != nil {
fmt.Fprintf(os.Stderr, "Failed to create texture: %s\n", err)
return 1
}
rand.Seed(time.Now().UnixNano())
var (
pixels = make([]uint32, width*height)
pixelCopy = make([]uint32, width*height)
cores = runtime.NumCPU()
event sdl.Event
quit bool
pause bool
recording bool
glitch bool = true
)
cycleTime := float32(0.0)
flameStart := float32(0.75)
convTime := flameStart
convTimeAdd := float32(0.0001)
var loopCounter uint64 = 0
var frameCounter uint64 = 0
// effect number
enr := 3
// PixelFunction for inverting the colors
lineEffectFunction := pf.Combine(pf.Invert, pf.SetBlueBits)
// Innerloop
for !quit {
if !pause {
// Invert pixels, and or with Blue, before drawing
if glitch {
pf.GlitchyMap(cores, lineEffectFunction, pixels)
} else {
pf.Map(cores, lineEffectFunction, pixels)
}
if loopCounter%4 == 0 {
// Draw to the pixel buffer
TriangleDance(cores, cycleTime, pixels, width, height, pitch, 1, 0)
TriangleDance(cores, cycleTime, pixels, width, height, pitch, -1, 0)
TriangleDance(cores, cycleTime, pixels, width, height, pitch, 0, 1)
TriangleDance(cores, cycleTime, pixels, width, height, pitch, 0, -1)
Convolution(convTime, pixels, width, height, pitch, enr)
}
Convolution(convTime, pixels, width, height, pitch, enr)
// Keep track of the time given to TriangleDance
cycleTime += 0.002
if cycleTime >= 1.0 {
cycleTime = 0.0
enr++
if enr > 3 {
enr = 0
}
}
// Keep track of the time given to Convolution
convTime += convTimeAdd
if convTime >= 0.81 {
convTime = flameStart
convTimeAdd = -convTimeAdd
} else if convTime <= flameStart {
convTime = flameStart
convTimeAdd = -convTimeAdd
}
// Take a copy before applying post-processing
copy(pixelCopy, pixels)
// Invert the pixels back after adding all the things above
if glitch {
pf.GlitchyMap(cores, lineEffectFunction, pixels)
} else {
pf.Map(cores, lineEffectFunction, pixels)
}
// Stretch the contrast on a copy of the pixels
if glitch {
pixelpusher.GlitchyStretchContrast(cores, pixelCopy, pitch, cycleTime)
} else {
pixelpusher.StretchContrast(cores, pixelCopy, pitch, cycleTime)
}
texture.UpdateRGBA(nil, pixelCopy, pitch)
renderer.Copy(texture, nil, nil)
renderer.Present()
if recording {
filename := fmt.Sprintf("frame%05d.png", frameCounter)
pixelpusher.SavePixelsToPNG(pixelCopy, pitch, filename, true)
frameCounter++
}
}
// Check for events
for event = sdl.PollEvent(); event != nil; event = sdl.PollEvent() {
switch event.(type) {
case *sdl.QuitEvent:
quit = true
case *sdl.KeyboardEvent:
ke := event.(*sdl.KeyboardEvent)
if ke.Type == sdl.KEYDOWN {
ks := ke.Keysym
switch ks.Sym {
case sdl.K_ESCAPE:
quit = true
case sdl.K_q:
quit = true
case sdl.K_RETURN:
altHeldDown := ks.Mod == sdl.KMOD_LALT || ks.Mod == sdl.KMOD_RALT
if !altHeldDown {
// alt+enter is not pressed
break
}
// alt+enter is pressed
fallthrough
case sdl.K_f, sdl.K_F11:
sdl2utils.ToggleFullscreen(window)
case sdl.K_p:
// pause toggle
pause = !pause
case sdl.K_s:
ctrlHeldDown := ks.Mod == sdl.KMOD_LCTRL || ks.Mod == sdl.KMOD_RCTRL
if !ctrlHeldDown {
// ctrl+s is not pressed
break
}
// ctrl+s is pressed
fallthrough
case sdl.K_SPACE:
// Toggle the use of the alternative glitch functions, and back
glitch = !glitch
case sdl.K_F12:
// screenshot
sdl2utils.Screenshot(renderer, "screenshot.png", true)
case sdl.K_r:
// recording
recording = !recording
frameCounter = 0
}
}
}
}
sdl.Delay(1000 / frameRate)
loopCounter++
}
return 0
}
func main() {
// This is to allow the deferred functions in run() to kick in at exit
os.Exit(run())
} | cmd/strobe/main.go | 0.674694 | 0.493714 | main.go | starcoder |
package models
// There are a few methods of PatternPhase we can implement uniformly, and that we also
// would want access to to implement the unique logic for a given phase. We can embed
// this type in our specific implementations in order to gain access to these methods.
type phaseCoreAuto struct {
ticker *PriceTicker
// The current length in price periods for this phase.
length int
// Should be incremented every time `PossibleLengths` is called. Used to determine
// what length calculation pass we are on
pass int
// Set when there will be no further possible lengths
possibilitiesComplete bool
isFinal bool
}
// Called by the predictor when setting up a prediction. Sets the real-work pricing
// information.
func (phase *phaseCoreAuto) SetTicker(ticker *PriceTicker) {
phase.ticker = ticker
}
// Returns the ticker set by SetTicker()
func (phase *phaseCoreAuto) Ticker() *PriceTicker {
return phase.ticker
}
// How many times IncrementPass() has been called. The intended usage is to track
// how many times the predictor has requested PossibleLengths() while iterating over
// potential lengths.
func (phase *phaseCoreAuto) Pass() int {
return phase.pass
}
// To be called when PossibleLengths() is called, allows us to track how many times
// the predictor has iterated over this phase looking for possible lengths.
func (phase *phaseCoreAuto) IncrementPass() {
phase.pass++
}
// The length of this phase. This length may or may not be the final length, as some
// phases go through multiple temporary lengths before the calculation is done.
func (phase *phaseCoreAuto) Length() int {
return phase.length
}
// Called by the predictor to set a potential length for this pattern
func (phase *phaseCoreAuto) SetLength(length int) {
phase.length = length
// If there are not going to be any more possibilities then this is the final
// length, not a temp length. This phase is finalized.
if phase.possibilitiesComplete {
phase.isFinal = true
}
}
// If true, there the current length is the final length
func (phase *phaseCoreAuto) IsFinal() bool {
return phase.isFinal
}
// After this is called, IsFinal() will return true
func (phase *phaseCoreAuto) PossibilitiesComplete() {
phase.possibilitiesComplete = true
} | models/phaseAutoCore.go | 0.841077 | 0.544075 | phaseAutoCore.go | starcoder |
package models
import (
i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization"
)
// WorkbookSortField
type WorkbookSortField struct {
// Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
additionalData map[string]interface{}
// Represents whether the sorting is done in an ascending fashion.
ascending *bool
// Represents the color that is the target of the condition if the sorting is on font or cell color.
color *string
// Represents additional sorting options for this field. Possible values are: Normal, TextAsNumber.
dataOption *string
// Represents the icon that is the target of the condition if the sorting is on the cell's icon.
icon WorkbookIconable
// Represents the column (or row, depending on the sort orientation) that the condition is on. Represented as an offset from the first column (or row).
key *int32
// Represents the type of sorting of this condition. Possible values are: Value, CellColor, FontColor, Icon.
sortOn *string
}
// NewWorkbookSortField instantiates a new workbookSortField and sets the default values.
func NewWorkbookSortField()(*WorkbookSortField) {
m := &WorkbookSortField{
}
m.SetAdditionalData(make(map[string]interface{}));
return m
}
// CreateWorkbookSortFieldFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value
func CreateWorkbookSortFieldFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) {
return NewWorkbookSortField(), nil
}
// GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *WorkbookSortField) GetAdditionalData()(map[string]interface{}) {
if m == nil {
return nil
} else {
return m.additionalData
}
}
// GetAscending gets the ascending property value. Represents whether the sorting is done in an ascending fashion.
func (m *WorkbookSortField) GetAscending()(*bool) {
if m == nil {
return nil
} else {
return m.ascending
}
}
// GetColor gets the color property value. Represents the color that is the target of the condition if the sorting is on font or cell color.
func (m *WorkbookSortField) GetColor()(*string) {
if m == nil {
return nil
} else {
return m.color
}
}
// GetDataOption gets the dataOption property value. Represents additional sorting options for this field. Possible values are: Normal, TextAsNumber.
func (m *WorkbookSortField) GetDataOption()(*string) {
if m == nil {
return nil
} else {
return m.dataOption
}
}
// GetFieldDeserializers the deserialization information for the current model
func (m *WorkbookSortField) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) {
res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error))
res["ascending"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetBoolValue()
if err != nil {
return err
}
if val != nil {
m.SetAscending(val)
}
return nil
}
res["color"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetColor(val)
}
return nil
}
res["dataOption"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetDataOption(val)
}
return nil
}
res["icon"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetObjectValue(CreateWorkbookIconFromDiscriminatorValue)
if err != nil {
return err
}
if val != nil {
m.SetIcon(val.(WorkbookIconable))
}
return nil
}
res["key"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetInt32Value()
if err != nil {
return err
}
if val != nil {
m.SetKey(val)
}
return nil
}
res["sortOn"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error {
val, err := n.GetStringValue()
if err != nil {
return err
}
if val != nil {
m.SetSortOn(val)
}
return nil
}
return res
}
// GetIcon gets the icon property value. Represents the icon that is the target of the condition if the sorting is on the cell's icon.
func (m *WorkbookSortField) GetIcon()(WorkbookIconable) {
if m == nil {
return nil
} else {
return m.icon
}
}
// GetKey gets the key property value. Represents the column (or row, depending on the sort orientation) that the condition is on. Represented as an offset from the first column (or row).
func (m *WorkbookSortField) GetKey()(*int32) {
if m == nil {
return nil
} else {
return m.key
}
}
// GetSortOn gets the sortOn property value. Represents the type of sorting of this condition. Possible values are: Value, CellColor, FontColor, Icon.
func (m *WorkbookSortField) GetSortOn()(*string) {
if m == nil {
return nil
} else {
return m.sortOn
}
}
// Serialize serializes information the current object
func (m *WorkbookSortField) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) {
{
err := writer.WriteBoolValue("ascending", m.GetAscending())
if err != nil {
return err
}
}
{
err := writer.WriteStringValue("color", m.GetColor())
if err != nil {
return err
}
}
{
err := writer.WriteStringValue("dataOption", m.GetDataOption())
if err != nil {
return err
}
}
{
err := writer.WriteObjectValue("icon", m.GetIcon())
if err != nil {
return err
}
}
{
err := writer.WriteInt32Value("key", m.GetKey())
if err != nil {
return err
}
}
{
err := writer.WriteStringValue("sortOn", m.GetSortOn())
if err != nil {
return err
}
}
{
err := writer.WriteAdditionalData(m.GetAdditionalData())
if err != nil {
return err
}
}
return nil
}
// SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well.
func (m *WorkbookSortField) SetAdditionalData(value map[string]interface{})() {
if m != nil {
m.additionalData = value
}
}
// SetAscending sets the ascending property value. Represents whether the sorting is done in an ascending fashion.
func (m *WorkbookSortField) SetAscending(value *bool)() {
if m != nil {
m.ascending = value
}
}
// SetColor sets the color property value. Represents the color that is the target of the condition if the sorting is on font or cell color.
func (m *WorkbookSortField) SetColor(value *string)() {
if m != nil {
m.color = value
}
}
// SetDataOption sets the dataOption property value. Represents additional sorting options for this field. Possible values are: Normal, TextAsNumber.
func (m *WorkbookSortField) SetDataOption(value *string)() {
if m != nil {
m.dataOption = value
}
}
// SetIcon sets the icon property value. Represents the icon that is the target of the condition if the sorting is on the cell's icon.
func (m *WorkbookSortField) SetIcon(value WorkbookIconable)() {
if m != nil {
m.icon = value
}
}
// SetKey sets the key property value. Represents the column (or row, depending on the sort orientation) that the condition is on. Represented as an offset from the first column (or row).
func (m *WorkbookSortField) SetKey(value *int32)() {
if m != nil {
m.key = value
}
}
// SetSortOn sets the sortOn property value. Represents the type of sorting of this condition. Possible values are: Value, CellColor, FontColor, Icon.
func (m *WorkbookSortField) SetSortOn(value *string)() {
if m != nil {
m.sortOn = value
}
} | models/workbook_sort_field.go | 0.72594 | 0.600569 | workbook_sort_field.go | starcoder |
package binding
import (
"fmt"
"reflect"
"runtime"
)
// isStructPtr returns true if the value given is a
// pointer to a struct
func isStructPtr(value interface{}) bool {
return reflect.ValueOf(value).Kind() == reflect.Ptr &&
reflect.ValueOf(value).Elem().Kind() == reflect.Struct
}
// isFunction returns true if the given value is a function
func isFunction(value interface{}) bool {
return reflect.ValueOf(value).Kind() == reflect.Func
}
// isStructPtr returns true if the value given is a struct
func isStruct(value interface{}) bool {
return reflect.ValueOf(value).Kind() == reflect.Struct
}
func (b *Bindings) getMethods(value interface{}) ([]*BoundMethod, error) {
// Create result placeholder
var result []*BoundMethod
// Check type
if !isStructPtr(value) {
if isStruct(value) {
name := reflect.ValueOf(value).Type().Name()
return nil, fmt.Errorf("%s is a struct, not a pointer to a struct", name)
}
if isFunction(value) {
name := runtime.FuncForPC(reflect.ValueOf(value).Pointer()).Name()
return nil, fmt.Errorf("%s is a function, not a pointer to a struct. Wails v2 has deprecated the binding of functions. Please wrap your functions up in a struct and bind a pointer to that struct.", name)
}
return nil, fmt.Errorf("not a pointer to a struct.")
}
// Process Struct
structType := reflect.TypeOf(value)
structValue := reflect.ValueOf(value)
baseName := structType.String()[1:]
// Process Methods
for i := 0; i < structType.NumMethod(); i++ {
methodDef := structType.Method(i)
methodName := methodDef.Name
fullMethodName := baseName + "." + methodName
method := structValue.MethodByName(methodName)
methodReflectName := runtime.FuncForPC(methodDef.Func.Pointer()).Name()
if b.exemptions.Contains(methodReflectName) {
continue
}
// Create new method
boundMethod := &BoundMethod{
Name: fullMethodName,
Inputs: nil,
Outputs: nil,
Comments: "",
Method: method,
}
// Iterate inputs
methodType := method.Type()
inputParamCount := methodType.NumIn()
var inputs []*Parameter
for inputIndex := 0; inputIndex < inputParamCount; inputIndex++ {
input := methodType.In(inputIndex)
thisParam := newParameter("", input)
// Process struct pointer params
if input.Kind() == reflect.Ptr {
if input.Elem().Kind() == reflect.Struct {
typ := input.Elem()
a := reflect.New(typ)
s := reflect.Indirect(a).Interface()
b.converter.Add(s)
}
}
// Process struct params
if input.Kind() == reflect.Struct {
a := reflect.New(input)
s := reflect.Indirect(a).Interface()
b.converter.Add(s)
}
inputs = append(inputs, thisParam)
}
boundMethod.Inputs = inputs
// Iterate outputs
// TODO: Determine what to do about limiting return types
// especially around errors.
outputParamCount := methodType.NumOut()
var outputs []*Parameter
for outputIndex := 0; outputIndex < outputParamCount; outputIndex++ {
output := methodType.Out(outputIndex)
thisParam := newParameter("", output)
outputs = append(outputs, thisParam)
}
boundMethod.Outputs = outputs
// Save method in result
result = append(result, boundMethod)
}
return result, nil
} | v2/internal/binding/reflect.go | 0.598077 | 0.416975 | reflect.go | starcoder |
package tart
// On Balance Volume (OBV) measures buying and selling
// pressure as a cumulative indicator, adding volume on
// up days and subtracting it on down days. OBV was
// developed by <NAME> and introduced in his 1963
// book Granville's New Key to Stock Market Profits.
// It was one of the first indicators to measure positive
// and negative volume flow. Chartists can look for
// divergences between OBV and price to predict price
// movements or use OBV to confirm price trends.
// https://school.stockcharts.com/doku.php?id=technical_indicators:on_balance_volume_obv
// https://www.investopedia.com/terms/o/onbalancevolume.asp
// https://www.fidelity.com/learning-center/trading-investing/technical-analysis/technical-indicator-guide/obv
type Obv struct {
prev float64
obv float64
sz int64
}
func NewObv() *Obv {
return &Obv{
prev: 0,
obv: 0,
sz: 0,
}
}
func (o *Obv) Update(c, v float64) float64 {
o.sz++
prev := o.prev
o.prev = c
if o.sz == 1 {
o.obv = v
return o.obv
}
if c > prev {
o.obv += v
} else if c < prev {
o.obv -= v
}
return o.obv
}
func (o *Obv) InitPeriod() int64 {
return 0
}
func (o *Obv) Valid() bool {
return true
}
// On Balance Volume (OBV) measures buying and selling
// pressure as a cumulative indicator, adding volume on
// up days and subtracting it on down days. OBV was
// developed by <NAME> and introduced in his 1963
// book Granville's New Key to Stock Market Profits.
// It was one of the first indicators to measure positive
// and negative volume flow. Chartists can look for
// divergences between OBV and price to predict price
// movements or use OBV to confirm price trends.
// https://school.stockcharts.com/doku.php?id=technical_indicators:on_balance_volume_obv
// https://www.investopedia.com/terms/o/onbalancevolume.asp
// https://www.fidelity.com/learning-center/trading-investing/technical-analysis/technical-indicator-guide/obv
func ObvArr(c, v []float64) []float64 {
out := make([]float64, len(c))
o := NewObv()
for i := 0; i < len(c); i++ {
out[i] = o.Update(c[i], v[i])
}
return out
} | obv.go | 0.640186 | 0.53777 | obv.go | starcoder |
package channeld
import (
"errors"
"fmt"
"math"
"channeld.clewcat.com/channeld/pkg/channeldpb"
"channeld.clewcat.com/channeld/pkg/common"
"go.uber.org/zap"
"google.golang.org/protobuf/encoding/protojson"
"google.golang.org/protobuf/types/known/anypb"
)
type SpatialController interface {
// Notify() is called in the spatial channels (shared instance)
common.SpatialInfoChangedNotifier
// Called in GLOBAL and spatial channels
GetChannelId(info common.SpatialInfo) (ChannelId, error)
// Called in GLOBAL channel
GetRegions() ([]*channeldpb.SpatialRegion, error)
// Called in any spatial channel
GetAdjacentChannels(spatialChannelId ChannelId) ([]ChannelId, error)
// Called in GLOBAL channel
CreateChannels(ctx MessageContext) ([]*Channel, error)
// Called in GLOBAL channel
Tick()
}
// A channeld instance should have only one SpatialController
var spatialController SpatialController
func InitSpatialController(controller SpatialController) {
spatialController = controller
}
// Divide the world into GridCols x GridRows static squares on the XZ plane. Each square(grid) represents a spatial channel.
// Typically, a player's view distance is 150m, so a grid is sized at 50x50m.
// A 100x100 km world has 2000x2000 grids, which needs about 2^22 spatial channels.
// By default, we support up to 2^32-2^16 grid-based spatial channels.
type StaticGrid2DSpatialController struct {
SpatialController
/* Defines how the world is divided into grids */
// The width of a grid in simulation/engine units
GridWidth float64
// The heights of a grid in the simulation/engine units
GridHeight float64
// How many grids the world has in X axis. The width of the world = GridWidth x GridCols.
GridCols uint32
// How many grids the world has in Z axis. The height of the world = GridHeight x GridRows.
GridRows uint32
// WorldWidth float64
// WorldHeight float64
// In the right-handed coordinate system, the difference between the world origin and the top-right corner of the first grid, in the simulation/engine units.
// This is how we uses the offset value to calculate which grid a (x,z) point is in: gridX = Floor((x - OffsetX) / GridWidth), gridY = Floor((y - OffsetY) / GridHeight)
// If the world origin is exactly in the middle of the world, the offset should be (-WorldWidth*0.5, -WorldHeight*0.5).
WorldOffsetX float64
WorldOffsetZ float64
/* Defines the authority area of a spatial server, as well as the number of the servers (= ServerCols * ServerRows) */
// How many servers the world has in X axis.
ServerCols uint32
// How many servers the world has in Z axis.
ServerRows uint32
/* Defines the extra interest area a spatial server has, adjacent to the authority area */
// For each side of a server's grids (authority area), how many grids(spatial channels) the server subscribes to, as the extend of its interest area.
// For example, ServerInterestBorderSize = 1 means a spatial server of 3x3 grids has interest area of 4x4 grids.
// Remarks: the value should always be less than the size of the authority area (=Min(GridCols/ServerCols, GridRows/ServerRows))
ServerInterestBorderSize uint32
//serverIndex uint32
serverConnections []ConnectionInChannel
}
func (ctl *StaticGrid2DSpatialController) GetChannelId(info common.SpatialInfo) (ChannelId, error) {
return ctl.GetChannelIdWithOffset(info, ctl.WorldOffsetX, ctl.WorldOffsetZ)
}
func (ctl *StaticGrid2DSpatialController) GetChannelIdNoOffset(info common.SpatialInfo) (ChannelId, error) {
return ctl.GetChannelIdWithOffset(info, 0, 0)
}
func (ctl *StaticGrid2DSpatialController) GetChannelIdWithOffset(info common.SpatialInfo, offsetX float64, offsetZ float64) (ChannelId, error) {
gridX := int(math.Floor((info.X - offsetX) / ctl.GridWidth))
if gridX < 0 || gridX >= int(ctl.GridCols) {
return 0, fmt.Errorf("gridX=%d when X=%f. GridX should be in [0,%d)", gridX, info.X, ctl.GridCols)
}
gridY := int(math.Floor((info.Z - offsetZ) / ctl.GridHeight))
if gridY < 0 || gridY >= int(ctl.GridRows) {
return 0, fmt.Errorf("gridY=%d when Z=%f. GridY should be in [0,%d)", gridY, info.Z, ctl.GridRows)
}
index := uint32(gridX) + uint32(gridY)*ctl.GridCols
return ChannelId(index) + GlobalSettings.SpatialChannelIdStart, nil
}
func (ctl *StaticGrid2DSpatialController) GetRegions() ([]*channeldpb.SpatialRegion, error) {
// How many grids a server has in X axis
serverGridCols := ctl.GridCols / ctl.ServerCols
if ctl.GridCols%ctl.ServerCols > 0 {
serverGridCols++
}
// How many grids a server has in Z axis
serverGridRows := ctl.GridRows / ctl.ServerRows
if ctl.GridRows%ctl.ServerRows > 0 {
serverGridRows++
}
regions := make([]*channeldpb.SpatialRegion, ctl.GridCols*ctl.GridRows)
for y := uint32(0); y < ctl.GridRows; y++ {
for x := uint32(0); x < ctl.GridCols; x++ {
index := x + y*ctl.GridCols
serverX := x / serverGridCols
serverY := y / serverGridRows
regions[index] = &channeldpb.SpatialRegion{
Min: &channeldpb.SpatialInfo{
X: ctl.WorldOffsetX + ctl.GridWidth*float64(x),
Z: ctl.WorldOffsetZ + ctl.GridHeight*float64(y),
},
Max: &channeldpb.SpatialInfo{
X: ctl.WorldOffsetX + ctl.GridWidth*float64(x+1),
Z: ctl.WorldOffsetZ + ctl.GridHeight*float64(y+1),
},
ChannelId: uint32(GlobalSettings.SpatialChannelIdStart) + index,
ServerIndex: serverX + serverY*ctl.ServerCols,
}
}
}
return regions, nil
}
func (ctl *StaticGrid2DSpatialController) GetAdjacentChannels(spatialChannelId ChannelId) ([]ChannelId, error) {
index := uint32(spatialChannelId - GlobalSettings.SpatialChannelIdStart)
gridX := int32(index % ctl.GridCols)
gridY := int32(index / ctl.GridCols)
channelIds := make([]ChannelId, 0)
for y := gridY - 1; y <= gridY+1; y++ {
if y < 0 || y >= int32(ctl.GridRows-1) {
continue
}
for x := gridX - 1; x <= gridX+1; x++ {
if x < 0 || x >= int32(ctl.GridCols-1) {
continue
}
if x == gridX && y == gridY {
continue
}
channelIndex := uint32(x) + uint32(y)*ctl.GridCols
channelIds = append(channelIds, ChannelId(channelIndex)+GlobalSettings.SpatialChannelIdStart)
}
}
return channelIds, nil
}
func (ctl *StaticGrid2DSpatialController) CreateChannels(ctx MessageContext) ([]*Channel, error) {
ctl.initServerConnections()
serverIndex := ctl.nextServerIndex()
if serverIndex >= ctl.ServerCols*ctl.ServerRows {
return nil, fmt.Errorf("failed to create spatail channel as all %d grids are allocated to %d servers", ctl.GridCols*ctl.GridRows, ctl.ServerCols*ctl.ServerRows)
}
msg, ok := ctx.Msg.(*channeldpb.CreateChannelMessage)
if !ok {
return nil, errors.New("ctx.Msg is not a CreateChannelMessage, will not be handled")
}
// How many grids a server has in X axis
serverGridCols := ctl.GridCols / ctl.ServerCols
if ctl.GridCols%ctl.ServerCols > 0 {
serverGridCols++
}
// How many grids a server has in Z axis
serverGridRows := ctl.GridRows / ctl.ServerRows
if ctl.GridRows%ctl.ServerRows > 0 {
serverGridRows++
}
channelIds := make([]ChannelId, serverGridCols*serverGridRows)
serverX := serverIndex % ctl.ServerCols
serverY := serverIndex / ctl.ServerCols
var spatialInfo common.SpatialInfo
for y := uint32(0); y < serverGridRows; y++ {
for x := uint32(0); x < serverGridCols; x++ {
spatialInfo.X = float64(serverX*serverGridCols+x) * ctl.GridWidth
spatialInfo.Z = float64(serverY*serverGridRows+y) * ctl.GridHeight
channelId, err := ctl.GetChannelIdNoOffset(spatialInfo)
if err != nil {
return nil, err
}
channelIds[x+y*serverGridCols] = channelId
}
}
channels := make([]*Channel, len(channelIds))
for index, channelId := range channelIds {
channel := createChannelWithId(channelId, channeldpb.ChannelType_SPATIAL, ctx.Connection)
if msg.Data != nil {
dataMsg, err := msg.Data.UnmarshalNew()
if err != nil {
return nil, fmt.Errorf("failed to unmarshal data message for the new channel: %v", err)
} else {
channel.InitData(dataMsg, msg.MergeOptions)
}
} else {
// Channel data should always be initialized
channel.InitData(nil, msg.MergeOptions)
}
channels[index] = channel
}
// Save the connection for later use
ctl.serverConnections[serverIndex] = ctx.Connection
//ctl.serverIndex++
serverIndex = ctl.nextServerIndex()
// When all spatial channels are created, subscribe each server to its adjacent grids(channels)
if serverIndex == ctl.ServerCols*ctl.ServerRows {
for i := uint32(0); i < serverIndex; i++ {
err := ctl.subToAdjacentChannels(i, serverGridCols, serverGridRows, msg.SubOptions)
if err != nil {
return channels, fmt.Errorf("failed to sub to adjacent channels of server connection %d, err: %v", ctl.serverConnections[i].Id(), err)
}
}
}
return channels, nil
}
func (ctl *StaticGrid2DSpatialController) subToAdjacentChannels(serverIndex uint32, serverGridCols uint32, serverGridRows uint32, subOptions *channeldpb.ChannelSubscriptionOptions) error {
serverConn := ctl.serverConnections[serverIndex]
serverX := serverIndex % ctl.ServerCols
serverY := serverIndex / ctl.ServerCols
spatialInfo := common.SpatialInfo{
X: float64(serverX*serverGridCols) * ctl.GridWidth,
Z: float64(serverY*serverGridRows) * ctl.GridHeight,
}
serverChannelId, err := ctl.GetChannelIdNoOffset(spatialInfo)
if err != nil {
return err
}
serverChannel := GetChannel(serverChannelId)
if serverChannel == nil {
return fmt.Errorf("failed to subscribe to adjacent channels for %d as it doesn't exist", serverChannelId)
}
// Right border
if serverX > 0 {
for y := uint32(0); y < serverGridRows; y++ {
for x := uint32(1); x <= ctl.ServerInterestBorderSize; x++ {
spatialInfo.X = float64(serverX*serverGridCols-x) * ctl.GridWidth
spatialInfo.Z = float64(serverY*serverGridRows+y) * ctl.GridHeight
channelId, err := ctl.GetChannelIdNoOffset(spatialInfo)
if err != nil {
return err
}
channelToSub := GetChannel(channelId)
if channelToSub == nil {
return fmt.Errorf("failed to subscribe border channel %d as it doesn't exist", channelId)
}
cs := serverConn.SubscribeToChannel(channelToSub, subOptions)
if cs != nil {
serverConn.sendSubscribed(MessageContext{}, channelToSub, serverConn, 0, &cs.options)
}
}
}
}
// Left border
if serverX < ctl.ServerCols-1 {
for y := uint32(0); y < serverGridRows; y++ {
for x := uint32(0); x < ctl.ServerInterestBorderSize; x++ {
spatialInfo.X = float64((serverX+1)*serverGridCols+x) * ctl.GridWidth
spatialInfo.Z = float64(serverY*serverGridRows+y) * ctl.GridHeight
channelId, err := ctl.GetChannelIdNoOffset(spatialInfo)
if err != nil {
return err
}
channelToSub := GetChannel(channelId)
if channelToSub == nil {
return fmt.Errorf("failed to subscribe border channel %d as it doesn't exist", channelId)
}
cs := serverConn.SubscribeToChannel(channelToSub, subOptions)
if cs != nil {
serverConn.sendSubscribed(MessageContext{}, channelToSub, serverConn, 0, &cs.options)
}
}
}
}
// Top border
if serverY > 0 {
for y := uint32(1); y <= ctl.ServerInterestBorderSize; y++ {
for x := uint32(0); x < serverGridCols; x++ {
spatialInfo.X = float64(serverX*serverGridCols+x) * ctl.GridWidth
spatialInfo.Z = float64(serverY*serverGridRows-y) * ctl.GridHeight
channelId, err := ctl.GetChannelIdNoOffset(spatialInfo)
if err != nil {
return err
}
channelToSub := GetChannel(channelId)
if channelToSub == nil {
return fmt.Errorf("failed to subscribe border channel %d as it doesn't exist", channelId)
}
cs := serverConn.SubscribeToChannel(channelToSub, subOptions)
if cs != nil {
serverConn.sendSubscribed(MessageContext{}, channelToSub, serverConn, 0, &cs.options)
}
}
}
}
// Bottom border
if serverY < ctl.ServerRows-1 {
for y := uint32(0); y < ctl.ServerInterestBorderSize; y++ {
for x := uint32(0); x < serverGridCols; x++ {
spatialInfo.X = float64(serverX*serverGridCols+x) * ctl.GridWidth
spatialInfo.Z = float64((serverY+1)*serverGridRows+y) * ctl.GridHeight
channelId, err := ctl.GetChannelIdNoOffset(spatialInfo)
if err != nil {
return err
}
channelToSub := GetChannel(channelId)
if channelToSub == nil {
return fmt.Errorf("failed to subscribe border channel %d as it doesn't exist", channelId)
}
cs := serverConn.SubscribeToChannel(channelToSub, subOptions)
if cs != nil {
serverConn.sendSubscribed(MessageContext{}, channelToSub, serverConn, 0, &cs.options)
}
}
}
}
return nil
}
var dataMarshalOptions = protojson.MarshalOptions{Multiline: false}
// Runs in the source spatial channel (shared instance)
func (ctl *StaticGrid2DSpatialController) Notify(oldInfo common.SpatialInfo, newInfo common.SpatialInfo, handoverDataProvider func() common.ChannelDataMessage) {
srcChannelId, err := ctl.GetChannelId(oldInfo)
if err != nil {
rootLogger.Error("failed to calculate srcChannelId", zap.Error(err))
return
}
dstChannelId, err := ctl.GetChannelId(newInfo)
if err != nil {
rootLogger.Error("failed to calculate dstChannelId", zap.Error(err))
return
}
// No migration between channels
if dstChannelId == srcChannelId {
return
}
srcChannel := GetChannel(srcChannelId)
if srcChannel == nil {
rootLogger.Error("channel doesn't exist, failed to handover channel data", zap.Uint32("srcChannelId", uint32(srcChannelId)))
return
}
if !srcChannel.HasOwner() {
rootLogger.Error("channel doesn't have owner, failed to handover channel data", zap.Uint32("srcChannelId", uint32(srcChannelId)))
}
dstChannel := GetChannel(dstChannelId)
if dstChannel == nil {
rootLogger.Error("channel doesn't exist, failed to handover channel data", zap.Uint32("dstChannelId", uint32(dstChannelId)))
return
}
if !srcChannel.HasOwner() {
rootLogger.Error("channel doesn't have owner, failed to handover channel data", zap.Uint32("dstChannelId", uint32(dstChannelId)))
}
// Handover data is provider by the Merger [channeld.MergeableChannelData]
handoverData := handoverDataProvider()
if handoverData == nil {
rootLogger.Error("failed to provider handover channel data", zap.Uint32("srcChannelId", uint32(srcChannelId)), zap.Uint32("dstChannelId", uint32(dstChannelId)))
return
}
rootLogger.Debug("handover channel data", zap.Uint32("srcChannelId", uint32(srcChannelId)), zap.Uint32("dstChannelId", uint32(dstChannelId)),
zap.String("data", dataMarshalOptions.Format(handoverData)))
anyData, err := anypb.New(handoverData)
if err != nil {
rootLogger.Error("failed to marshall handover data", zap.Error(err))
return
}
/*
newChannel.PutMessage(&channeldpb.ChannelDataUpdateMessage{
Data: anyData,
}, handleChannelDataUpdate, internalDummyConnection, &channeldpb.MessagePack{
MsgType: uint32(channeldpb.MessageType_CHANNEL_DATA_UPDATE),
Broadcast: channeldpb.BroadcastType_NO_BROADCAST,
StubId: 0,
ChannelId: uint32(dstChannelId),
})
*/
handoverMsg := &channeldpb.ChannelDataHandoverMessage{
SrcChannelId: uint32(srcChannelId),
DstChannelId: uint32(dstChannelId),
Data: anyData,
ContextConnId: uint32(srcChannel.latestDataUpdateConnId),
}
// Avoid duplicate sending
// Race Condition: reading dstChannel's subscribedConnections in srcChannel
conns := dstChannel.GetAllConnections()
for conn := range srcChannel.subscribedConnections {
if _, exists := conns[conn]; !exists {
conn.Send(MessageContext{
MsgType: channeldpb.MessageType_CHANNEL_DATA_HANDOVER,
Msg: handoverMsg,
Broadcast: channeldpb.BroadcastType_NO_BROADCAST,
StubId: 0,
ChannelId: uint32(srcChannelId),
})
}
}
for conn := range conns {
conn.Send(MessageContext{
MsgType: channeldpb.MessageType_CHANNEL_DATA_HANDOVER,
Msg: handoverMsg,
Broadcast: channeldpb.BroadcastType_NO_BROADCAST,
StubId: 0,
ChannelId: uint32(dstChannelId),
})
}
/* Broadcast in both channels can cause duplicate sending
srcChannel.Broadcast(MessageContext{
MsgType: channeldpb.MessageType_CHANNEL_DATA_HANDOVER,
Msg: handoverMsg,
Broadcast: channeldpb.BroadcastType_ALL,
StubId: 0,
ChannelId: uint32(srcChannelId),
})
var broadcastType = channeldpb.BroadcastType_ALL
// Don't send the spatial server twice (if it's the owner of both channels)
if dstChannel.ownerConnection == srcChannel.ownerConnection {
broadcastType = channeldpb.BroadcastType_ALL_BUT_OWNER
}
dstChannel.Broadcast(MessageContext{
MsgType: channeldpb.MessageType_CHANNEL_DATA_HANDOVER,
Msg: handoverMsg,
Broadcast: broadcastType,
StubId: 0,
ChannelId: uint32(dstChannelId),
})
*/
/* Unsub and Sub is controlled by server/client, and has nothing to do with src/dst channel in most cases.
// Unsub from srcChannel & Sub to dstChannel
clientConn := GetConnection(ConnectionId(ctl.contextConnId))
if clientConn != nil {
subOptions, err := clientConn.UnsubscribeFromChannel(srcChannel)
if err != nil {
rootLogger.Error("failed to unsub from channel",
zap.String("channelType", srcChannel.channelType.String()),
zap.Uint32("channelId", uint32(srcChannel.id)),
zap.Error(err))
}
clientConn.sendUnsubscribed(MessageContext{}, srcChannel, clientConn, 0)
clientConn.SubscribeToChannel(dstChannel, subOptions)
clientConn.sendSubscribed(MessageContext{}, dstChannel, clientConn, 0, subOptions)
}
*/
}
func (ctl *StaticGrid2DSpatialController) initServerConnections() {
if ctl.serverConnections == nil {
ctl.serverConnections = make([]ConnectionInChannel, ctl.ServerCols*ctl.ServerRows)
}
}
func (ctl *StaticGrid2DSpatialController) nextServerIndex() uint32 {
var i int = 0
for i = 0; i < len(ctl.serverConnections); i++ {
if ctl.serverConnections[i] == nil || ctl.serverConnections[i].IsRemoving() {
break
}
}
return uint32(i)
}
func (ctl *StaticGrid2DSpatialController) Tick() {
ctl.initServerConnections()
for i := 0; i < len(ctl.serverConnections); i++ {
if ctl.serverConnections[i] != nil && ctl.serverConnections[i].IsRemoving() {
ctl.serverConnections[i] = nil
rootLogger.Info("reset spatial server connection", zap.Int("serverIndex", i))
}
}
}
/*
// Used for sending message between channels
var internalDummyConnection = &Connection{
id: math.MaxUint32,
connectionType: channeldpb.ConnectionType_NO_CONNECTION,
compressionType: channeldpb.CompressionType_NO_COMPRESSION,
conn: nil,
reader: nil,
writer: nil,
sender: nil, //&queuedMessageSender{},
sendQueue: nil, //make(chan MessageContext, 128),
logger: logger.With(
zap.String("connType", "Internal"),
),
removing: 0,
}
*/ | pkg/channeld/spatial.go | 0.6508 | 0.40486 | spatial.go | starcoder |
package canvas
// Object represents an object that can be drawn on the canvas.
type Object interface {
// Stroke draws an outline of the Object.
Stroke(Point)
// Fill fills the area of the canvas represented by the Object.
Fill(Point)
// Set various styles having to do with drawing lines. For more
// information, see the CanvasRenderingContext2D documentation.
SetLineWidth(float64)
SetLineCap(LineCap)
SetLineJoin(LineJoin)
SetMiterLimit(float64)
SetLineDash([]float64)
}
type pathObj struct {
c *Canvas
path func(*PathBuilder)
style
}
func (p pathObj) Fill(pt Point) {
pb := &PathBuilder{p.c}
pb.begin(pt)
p.path(pb)
p.set(p.c)
p.c.ctx.Call("fill")
}
func (p pathObj) Stroke(pt Point) {
pb := &PathBuilder{p.c}
pb.begin(pt)
p.path(pb)
p.set(p.c)
p.c.ctx.Call("stroke")
}
// PathBuilder is a type that is passed to a function used to create a
// custom, path-based object. See the documentation for
// (*Canvas).Path().
type PathBuilder struct {
c *Canvas
}
func (pb *PathBuilder) begin(p Point) {
pb.c.ctx.Call("beginPath")
pb.c.ctx.Call("moveTo", p.X, p.Y)
}
// Adds the rectangle r to the path.
func (pb PathBuilder) Rect(r Rectangle) {
r = r.Canon()
pb.c.ctx.Call("rect", r.Min.X, r.Min.Y, r.Dx(), r.Dy())
}
// Moves the current postion to p.
func (pb PathBuilder) MoveTo(p Point) {
pb.c.ctx.Call("moveTo", p.X, p.Y)
}
// Add a line from the current position to p to the path.
func (pb PathBuilder) Line(p Point) {
pb.c.ctx.Call("lineTo", p.X, p.Y)
}
// Add a bezier curve to the path.
func (pb PathBuilder) Bezier(cp1, cp2, end Point) {
pb.c.ctx.Call("bezierCurveTo",
cp1.X,
cp1.Y,
cp2.X,
cp2.Y,
end.X,
end.Y,
)
}
// Add a quadratic curve to the path.
func (pb PathBuilder) Quadratic(cp, end Point) {
pb.c.ctx.Call("quadraticCurveTo",
cp.X,
cp.Y,
end.X,
end.Y,
)
}
// Add an arc to the path.
func (pb PathBuilder) Arc(c Point, r float64, sa, ea float64, cc bool) {
pb.c.ctx.Call("arc",
c.X,
c.Y,
r,
sa,
ea,
cc,
)
}
type textObj struct {
c *Canvas
text string
mw float64
style
}
func (t textObj) Fill(p Point) {
if t.mw < 0 {
t.c.ctx.Call("fillText", t.text, p.X, p.Y)
return
}
t.c.ctx.Call("fillText", t.text, p.X, p.Y, t.mw)
}
func (t textObj) Stroke(p Point) {
if t.mw < 0 {
t.c.ctx.Call("strokeText", t.text, p.X, p.Y)
return
}
t.c.ctx.Call("strokeText", t.text, p.X, p.Y, t.mw)
} | object.go | 0.864882 | 0.573141 | object.go | starcoder |
package entropy
import (
"errors"
"kanzi"
)
const (
BINARY_ENTROPY_TOP = uint64(0x00FFFFFFFFFFFFFF)
MASK_24_56 = uint64(0x00FFFFFFFF000000)
MASK_0_24 = uint64(0x0000000000FFFFFF)
MASK_0_32 = uint64(0x00000000FFFFFFFF)
)
type Predictor interface {
// Update the probability model
Update(bit byte)
// Return the split value representing the probability of 1 in the [0..4095] range.
// E.G. 410 represents roughly a probability of 10% for 1
Get() uint
}
type BinaryEntropyEncoder struct {
predictor Predictor
low uint64
high uint64
bitstream kanzi.OutputBitStream
disposed bool
}
func NewBinaryEntropyEncoder(bs kanzi.OutputBitStream, predictor Predictor) (*BinaryEntropyEncoder, error) {
if bs == nil {
return nil, errors.New("Invalid null bitstream parameter")
}
if predictor == nil {
return nil, errors.New("Invalid null predictor parameter")
}
this := new(BinaryEntropyEncoder)
this.predictor = predictor
this.low = 0
this.high = BINARY_ENTROPY_TOP
this.bitstream = bs
return this, nil
}
func (this *BinaryEntropyEncoder) encodeByte(val byte) {
this.encodeBit((val >> 7) & 1)
this.encodeBit((val >> 6) & 1)
this.encodeBit((val >> 5) & 1)
this.encodeBit((val >> 4) & 1)
this.encodeBit((val >> 3) & 1)
this.encodeBit((val >> 2) & 1)
this.encodeBit((val >> 1) & 1)
this.encodeBit(val & 1)
}
func (this *BinaryEntropyEncoder) encodeBit(bit byte) {
// Calculate interval split
// Written in a way to maximize accuracy of multiplication/division
split := (((this.high - this.low) >> 7) * uint64(this.predictor.Get())) >> 5
// Update fields with new interval bounds
bitmask := uint64(bit) - 1
this.high -= (^bitmask & (this.high - this.low - split))
this.low += (bitmask & (split + 1))
// Update predictor
this.predictor.Update(bit)
// Write unchanged first 32 bits to bitstream
for (this.low^this.high)&MASK_24_56 == 0 {
this.flush()
}
}
func (this *BinaryEntropyEncoder) Encode(block []byte) (int, error) {
for i := range block {
this.encodeByte(block[i])
}
return len(block), nil
}
func (this *BinaryEntropyEncoder) flush() {
this.bitstream.WriteBits(this.high>>24, 32)
this.low <<= 32
this.high = (this.high << 32) | MASK_0_32
}
func (this *BinaryEntropyEncoder) BitStream() kanzi.OutputBitStream {
return this.bitstream
}
func (this *BinaryEntropyEncoder) Dispose() {
if this.disposed == true {
return
}
this.disposed = true
this.bitstream.WriteBits(this.low|MASK_0_24, 56)
}
type BinaryEntropyDecoder struct {
predictor Predictor
low uint64
high uint64
current uint64
initialized bool
bitstream kanzi.InputBitStream
}
func NewBinaryEntropyDecoder(bs kanzi.InputBitStream, predictor Predictor) (*BinaryEntropyDecoder, error) {
if bs == nil {
return nil, errors.New("Invalid null bitstream parameter")
}
if predictor == nil {
return nil, errors.New("Invalid null predictor parameter")
}
// Defer stream reading. We are creating the object, we should not do any I/O
this := new(BinaryEntropyDecoder)
this.predictor = predictor
this.low = 0
this.high = BINARY_ENTROPY_TOP
this.bitstream = bs
return this, nil
}
func (this *BinaryEntropyDecoder) decodeByte() byte {
res := (this.decodeBit() << 7)
res |= (this.decodeBit() << 6)
res |= (this.decodeBit() << 5)
res |= (this.decodeBit() << 4)
res |= (this.decodeBit() << 3)
res |= (this.decodeBit() << 2)
res |= (this.decodeBit() << 1)
return res | this.decodeBit()
}
func (this *BinaryEntropyDecoder) Initialized() bool {
return this.initialized
}
func (this *BinaryEntropyDecoder) Initialize() {
if this.initialized == true {
return
}
this.current = this.bitstream.ReadBits(56)
this.initialized = true
}
func (this *BinaryEntropyDecoder) decodeBit() byte {
// Calculate interval split
// Written in a way to maximize accuracy of multiplication/division
xmid := ((((this.high - this.low) >> 7) * uint64(this.predictor.Get())) >> 5) + this.low
var bit byte
if this.current <= xmid {
bit = 1
this.high = xmid
} else {
bit = 0
this.low = xmid + 1
}
// Update predictor
this.predictor.Update(bit)
// Read 32 bits from bitstream
for (this.low^this.high)&MASK_24_56 == 0 {
this.read()
}
return bit
}
func (this *BinaryEntropyDecoder) read() {
this.low = this.low << 32
this.high = (this.high << 32) | MASK_0_32
this.current = (this.current << 32) | this.bitstream.ReadBits(32)
}
func (this *BinaryEntropyDecoder) Decode(block []byte) (int, error) {
err := error(nil)
// Deferred initialization: the bitstream may not be ready at build time
// Initialize 'current' with bytes read from the bitstream
if this.Initialized() == false {
this.Initialize()
}
for i := range block {
block[i] = this.decodeByte()
}
return len(block), err
}
func (this *BinaryEntropyDecoder) BitStream() kanzi.InputBitStream {
return this.bitstream
}
func (this *BinaryEntropyDecoder) Dispose() {
} | go/src/kanzi/entropy/BinaryEntropyCodec.go | 0.804137 | 0.40204 | BinaryEntropyCodec.go | starcoder |
package models
import (
"errors"
)
// Provides operations to call the validateComplianceScript method.
type DataType int
const (
// None data type.
NONE_DATATYPE DataType = iota
// Boolean data type.
BOOLEAN_DATATYPE
// Int64 data type.
INT64_DATATYPE
// Double data type.
DOUBLE_DATATYPE
// String data type.
STRING_DATATYPE
// DateTime data type.
DATETIME_DATATYPE
// Version data type.
VERSION_DATATYPE
// Base64 data type.
BASE64_DATATYPE
// Xml data type.
XML_DATATYPE
// Boolean array data type.
BOOLEANARRAY_DATATYPE
// Int64 array data type.
INT64ARRAY_DATATYPE
// Double array data type.
DOUBLEARRAY_DATATYPE
// String array data type.
STRINGARRAY_DATATYPE
// DateTime array data type.
DATETIMEARRAY_DATATYPE
// Version array data type.
VERSIONARRAY_DATATYPE
)
func (i DataType) String() string {
return []string{"none", "boolean", "int64", "double", "string", "dateTime", "version", "base64", "xml", "booleanArray", "int64Array", "doubleArray", "stringArray", "dateTimeArray", "versionArray"}[i]
}
func ParseDataType(v string) (interface{}, error) {
result := NONE_DATATYPE
switch v {
case "none":
result = NONE_DATATYPE
case "boolean":
result = BOOLEAN_DATATYPE
case "int64":
result = INT64_DATATYPE
case "double":
result = DOUBLE_DATATYPE
case "string":
result = STRING_DATATYPE
case "dateTime":
result = DATETIME_DATATYPE
case "version":
result = VERSION_DATATYPE
case "base64":
result = BASE64_DATATYPE
case "xml":
result = XML_DATATYPE
case "booleanArray":
result = BOOLEANARRAY_DATATYPE
case "int64Array":
result = INT64ARRAY_DATATYPE
case "doubleArray":
result = DOUBLEARRAY_DATATYPE
case "stringArray":
result = STRINGARRAY_DATATYPE
case "dateTimeArray":
result = DATETIMEARRAY_DATATYPE
case "versionArray":
result = VERSIONARRAY_DATATYPE
default:
return 0, errors.New("Unknown DataType value: " + v)
}
return &result, nil
}
func SerializeDataType(values []DataType) []string {
result := make([]string, len(values))
for i, v := range values {
result[i] = v.String()
}
return result
} | models/data_type.go | 0.661048 | 0.422207 | data_type.go | starcoder |
package parse
import (
"encoding/json"
"flag"
"fmt"
"reflect"
"strconv"
"strings"
"time"
)
// Parser is an interface that allows the contents of a flag.Getter to be set.
type Parser interface {
flag.Getter
SetValue(interface{})
}
// BoolValue bool Value type
type BoolValue bool
// Set sets bool value from the given string value.
func (b *BoolValue) Set(s string) error {
v, err := strconv.ParseBool(s)
*b = BoolValue(v)
return err
}
// Get returns the bool value.
func (b *BoolValue) Get() interface{} { return bool(*b) }
func (b *BoolValue) String() string { return fmt.Sprintf("%v", *b) }
// IsBoolFlag return true
func (b *BoolValue) IsBoolFlag() bool { return true }
// SetValue sets the duration from the given bool-asserted value.
func (b *BoolValue) SetValue(val interface{}) {
*b = BoolValue(val.(bool))
}
// BoolFlag optional interface to indicate boolean flags that can be
// supplied without "=value" text
type BoolFlag interface {
flag.Value
IsBoolFlag() bool
}
// IntValue int Value
type IntValue int
// Set sets int value from the given string value.
func (i *IntValue) Set(s string) error {
v, err := strconv.ParseInt(s, 0, 64)
*i = IntValue(v)
return err
}
// Get returns the int value.
func (i *IntValue) Get() interface{} { return int(*i) }
func (i *IntValue) String() string { return fmt.Sprintf("%v", *i) }
// SetValue sets the IntValue from the given int-asserted value.
func (i *IntValue) SetValue(val interface{}) {
*i = IntValue(val.(int))
}
// Int64Value int64 Value
type Int64Value int64
// Set sets int64 value from the given string value.
func (i *Int64Value) Set(s string) error {
v, err := strconv.ParseInt(s, 0, 64)
*i = Int64Value(v)
return err
}
// Get returns the int64 value.
func (i *Int64Value) Get() interface{} { return int64(*i) }
func (i *Int64Value) String() string { return fmt.Sprintf("%v", *i) }
// SetValue sets the Int64Value from the given int64-asserted value.
func (i *Int64Value) SetValue(val interface{}) {
*i = Int64Value(val.(int64))
}
// UintValue uint Value
type UintValue uint
// Set sets uint value from the given string value.
func (i *UintValue) Set(s string) error {
v, err := strconv.ParseUint(s, 0, 64)
*i = UintValue(v)
return err
}
// Get returns the uint value.
func (i *UintValue) Get() interface{} { return uint(*i) }
func (i *UintValue) String() string { return fmt.Sprintf("%v", *i) }
// SetValue sets the UintValue from the given uint-asserted value.
func (i *UintValue) SetValue(val interface{}) {
*i = UintValue(val.(uint))
}
// Uint64Value uint64 Value
type Uint64Value uint64
// Set sets uint64 value from the given string value.
func (i *Uint64Value) Set(s string) error {
v, err := strconv.ParseUint(s, 0, 64)
*i = Uint64Value(v)
return err
}
// Get returns the uint64 value.
func (i *Uint64Value) Get() interface{} { return uint64(*i) }
func (i *Uint64Value) String() string { return fmt.Sprintf("%v", *i) }
// SetValue sets the Uint64Value from the given uint64-asserted value.
func (i *Uint64Value) SetValue(val interface{}) {
*i = Uint64Value(val.(uint64))
}
// StringValue string Value
type StringValue string
// Set sets string value from the given string value.
func (s *StringValue) Set(val string) error {
*s = StringValue(val)
return nil
}
// Get returns the string value.
func (s *StringValue) Get() interface{} { return string(*s) }
func (s *StringValue) String() string { return string(*s) }
// SetValue sets the StringValue from the given string-asserted value.
func (s *StringValue) SetValue(val interface{}) {
*s = StringValue(val.(string))
}
// Float64Value float64 Value
type Float64Value float64
// Set sets float64 value from the given string value.
func (f *Float64Value) Set(s string) error {
v, err := strconv.ParseFloat(s, 64)
*f = Float64Value(v)
return err
}
// Get returns the float64 value.
func (f *Float64Value) Get() interface{} { return float64(*f) }
func (f *Float64Value) String() string { return fmt.Sprintf("%v", *f) }
// SetValue sets the Float64Value from the given float64-asserted value.
func (f *Float64Value) SetValue(val interface{}) {
*f = Float64Value(val.(float64))
}
// Duration is a custom type suitable for parsing duration values.
// It supports `time.ParseDuration`-compatible values and suffix-less digits; in
// the latter case, seconds are assumed.
type Duration time.Duration
// Set sets the duration from the given string value.
func (d *Duration) Set(s string) error {
if v, err := strconv.Atoi(s); err == nil {
*d = Duration(time.Duration(v) * time.Second)
return nil
}
v, err := time.ParseDuration(s)
*d = Duration(v)
return err
}
// Get returns the duration value.
func (d *Duration) Get() interface{} { return time.Duration(*d) }
// String returns a string representation of the duration value.
func (d *Duration) String() string { return (*time.Duration)(d).String() }
// SetValue sets the duration from the given Duration-asserted value.
func (d *Duration) SetValue(val interface{}) {
*d = val.(Duration)
}
// MarshalText serialize the given duration value into a text.
func (d *Duration) MarshalText() ([]byte, error) {
return []byte(d.String()), nil
}
// UnmarshalText deserializes the given text into a duration value.
// It is meant to support TOML decoding of durations.
func (d *Duration) UnmarshalText(text []byte) error {
return d.Set(string(text))
}
// MarshalJSON serializes the given duration value.
func (d *Duration) MarshalJSON() ([]byte, error) {
return json.Marshal(time.Duration(*d))
}
// UnmarshalJSON deserializes the given text into a duration value.
func (d *Duration) UnmarshalJSON(text []byte) error {
if v, err := strconv.Atoi(string(text)); err == nil {
*d = Duration(time.Duration(v))
return nil
}
// We use json unmarshal on value because we have the quoted version
var value string
err := json.Unmarshal(text, &value)
if err != nil {
return err
}
v, err := time.ParseDuration(value)
*d = Duration(v)
return err
}
// TimeValue time.Time Value
type TimeValue time.Time
// Set sets time.Time value from the given string value.
func (t *TimeValue) Set(s string) error {
v, err := time.Parse(time.RFC3339, s)
*t = TimeValue(v)
return err
}
// Get returns the time.Time value.
func (t *TimeValue) Get() interface{} { return time.Time(*t) }
func (t *TimeValue) String() string { return (*time.Time)(t).String() }
// SetValue sets the TimeValue from the given time.Time-asserted value.
func (t *TimeValue) SetValue(val interface{}) {
*t = TimeValue(val.(time.Time))
}
// SliceStrings parse slice of strings
type SliceStrings []string
// Set adds strings elem into the the parser.
// It splits str on , and ;
func (s *SliceStrings) Set(str string) error {
fargs := func(c rune) bool {
return c == ',' || c == ';'
}
// get function
slice := strings.FieldsFunc(str, fargs)
*s = append(*s, slice...)
return nil
}
// Get []string
func (s *SliceStrings) Get() interface{} { return []string(*s) }
// String return slice in a string
func (s *SliceStrings) String() string { return fmt.Sprintf("%v", *s) }
// SetValue sets []string into the parser
func (s *SliceStrings) SetValue(val interface{}) {
*s = SliceStrings(val.([]string))
}
// LoadParsers loads default parsers and custom parsers given as parameter.
// Return a map [reflect.Type]parsers
// bool, int, int64, uint, uint64, float64,
func LoadParsers(customParsers map[reflect.Type]Parser) (map[reflect.Type]Parser, error) {
parsers := map[reflect.Type]Parser{}
var boolParser BoolValue
parsers[reflect.TypeOf(true)] = &boolParser
var intParser IntValue
parsers[reflect.TypeOf(1)] = &intParser
var int64Parser Int64Value
parsers[reflect.TypeOf(int64(1))] = &int64Parser
var uintParser UintValue
parsers[reflect.TypeOf(uint(1))] = &uintParser
var uint64Parser Uint64Value
parsers[reflect.TypeOf(uint64(1))] = &uint64Parser
var stringParser StringValue
parsers[reflect.TypeOf("")] = &stringParser
var float64Parser Float64Value
parsers[reflect.TypeOf(float64(1.5))] = &float64Parser
var durationParser Duration
parsers[reflect.TypeOf(Duration(time.Second))] = &durationParser
var timeParser TimeValue
parsers[reflect.TypeOf(time.Now())] = &timeParser
for rType, parser := range customParsers {
parsers[rType] = parser
}
return parsers, nil
} | vendor/github.com/containous/flaeg/parse/parse.go | 0.805288 | 0.499756 | parse.go | starcoder |
package main
import (
"bufio"
"fmt"
"io"
"math/bits"
"os"
)
// State is a game state bitboard encoding the entire game state. No
// methods actually modify the State, rather return an updated State.
type State uint64
// Mask efficiently validate moves, mostly behaving like a State. No
// methods actually modify the Mask, rather return an updated Mask.
type Mask uint64
// Turn returns the 0-indexed turn count.
func (s State) Turn() int {
return int(s >> 50)
}
// Turn returns the 0-indexed turn count.
func (m Mask) Turn() int {
return int(m >> 50)
}
// Pass the current turn without placing a piece.
func (s State) Pass() State {
turn := s.Turn()
bits := s & 0x3ffffffffffff
return State(turn+1)<<50 | bits
}
// Pass the current turn without placing a piece.
func (m Mask) Pass() Mask {
turn := m.Turn()
bits := m & 0x3ffffffffffff
return Mask(turn+1)<<50 | bits
}
// Place a piece at a specific position and advance the turn.
func (s State) Place(i int) State {
turn := s.Turn()
who := turn % 2
bits := s & 0x3ffffffffffff
bit := State(1) << (who*25 + i)
return State(turn+1)<<50 | bits | bit
}
var masks = [...]Mask{
0x0000023, 0x0000047, 0x000008e, 0x000011c, 0x0000218,
0x0000461, 0x00008e2, 0x00011c4, 0x0002388, 0x0004310,
0x0008c20, 0x0011c40, 0x0023880, 0x0047100, 0x0086200,
0x0118400, 0x0238800, 0x0471000, 0x08e2000, 0x10c4000,
0x0308000, 0x0710000, 0x0e20000, 0x1c40000, 0x1880000,
}
// Place a piece at a specific position and advance the turn.
func (m Mask) Place(i int) Mask {
turn := m.Turn()
who := turn % 2
bits := m & 0x3ffffffffffff
other := masks[i] << ((1 ^ who) * 25)
self := Mask(1) << (who*25 + i)
return Mask(turn+1)<<50 | bits | other | self
}
// Derive a validation mask from a game state.
func (s State) Derive() Mask {
var ns [2]int
var moves [2][25]int
for i := 0; i < 25; i++ {
if s>>i&1 == 1 {
moves[0][ns[0]] = i
ns[0]++
}
if s>>(i+25)&1 == 1 {
moves[1][ns[1]] = i
ns[1]++
}
}
var m Mask
for i := 0; m.Turn() < s.Turn(); i++ {
if i/2 < ns[i%2] {
m = m.Place(moves[i%2][i/2])
} else {
m = m.Pass()
}
}
return m
}
// Transpose around the 0-6-12-18-24 diagonal.
func (s State) Transpose() State {
return ((s >> 16) & 0x00000020000010) |
((s >> 12) & 0x00000410000208) |
((s >> 8) & 0x00008208004104) |
((s >> 4) & 0x00104104082082) |
((s >> 0) & 0xfe082083041041) |
((s << 4) & 0x01041040820820) |
((s << 8) & 0x00820800410400) |
((s << 12) & 0x00410000208000) |
((s << 16) & 0x00200000100000)
}
// Flip vertically.
func (s State) Flip() State {
return ((s >> 20) & 0x0000003e00001f) |
((s >> 10) & 0x000007c00003e0) |
((s >> 0) & 0xfc00f800007c00) |
((s << 10) & 0x001f00000f8000) |
((s << 20) & 0x03e00001f00000)
}
// Canonicalize to a specific orientation.
func (s State) Canonicalize() State {
min := func(a, b State) State {
if a < b {
return a
}
return b
}
c := s
s = s.Transpose()
c = min(s, c)
s = s.Flip()
c = min(s, c)
s = s.Transpose()
c = min(s, c)
s = s.Flip()
c = min(s, c)
s = s.Transpose()
c = min(s, c)
s = s.Flip()
c = min(s, c)
s = s.Transpose()
c = min(s, c)
return c
}
// Valid indicates if a move is permitted.
func (m Mask) Valid(i int) bool {
turn := m.Turn()
who := turn % 2
if turn == 0 {
return i != 12
}
return (m >> (who*25 + i) & 1) == 0
}
// NoMoves indicates if the current player has no moves.
func (s State) NoMoves(m Mask) bool {
turn := s.Turn()
who := turn % 2
const M = 0x1ffffff
return ((uint64(s)>>(who*25) | uint64(m)>>(who*25)) & M) == M
}
// IsComplete indicates if the game has completed (no more moves).
func (s State) IsComplete(m Mask) bool {
return ((uint64(s)>>25|uint64(m)>>25)&0x1ffffff) == 0x1ffffff &&
((uint64(s)>>0|uint64(m)>>0)&0x1ffffff) == 0x1ffffff
}
// Print an ANSI-escape represenation of the game state.
func (s State) Print(w io.Writer, m Mask) error {
buf := bufio.NewWriter(w)
for y := 0; y < 5; y++ {
for x := 0; x < 5; x++ {
i := y*5 + x
p0 := s >> i & 1
p1 := s >> (i + 25) & 1
x0 := m >> (i + 25) & 1
x1 := m >> i & 1
c := "∙"
if p0 == 1 {
c = "\x1b[94m█\x1b[0m"
} else if p1 == 1 {
c = "\x1b[91m█\x1b[0m"
} else if x0 == 1 && x1 == 1 {
c = " "
} else if x0 == 1 {
c = "\x1b[94m░\x1b[0m"
} else if x1 == 1 {
c = "\x1b[91m░\x1b[0m"
}
buf.WriteString(c)
}
buf.WriteRune('\n')
}
buf.WriteRune('\n')
return buf.Flush()
}
// InitScore returns the initial minimax score for this turn.
func (s State) InitScore() int {
if s.Turn()%2 == 1 {
return +25
}
return -25
}
// Score computes the final game score.
func (s State) Score() int {
p0 := bits.OnesCount(uint(s & 0x1ffffff))
p1 := bits.OnesCount(uint(s >> 25 & 0x1ffffff))
return p0 - p1
}
// Minimax is a game evaluator storing the explored game tree. It always
// explores to game completion and plays perfectly.
type Minimax map[State]int8
// New returns an empty minimax tree.
func New() Minimax {
return make(map[State]int8)
}
// Evaluate the minimax score at a game state.
func (t Minimax) Evaluate(s State, m Mask) int {
s0 := s.Canonicalize()
score8, ok := t[s0]
if ok {
return int(score8)
}
if s.IsComplete(m) {
score := s0.Score()
t[s0] = int8(score)
return score
}
if s.NoMoves(m) {
score := t.Evaluate(s.Pass(), m.Pass())
t[s0] = int8(score)
return score
}
score := s.InitScore()
for i := 0; i < 5*5; i++ {
if m.Valid(i) {
tmp := t.Evaluate(s.Place(i), m.Place(i))
if s.Turn()%2 == 1 {
if tmp < score {
score = tmp // min
}
} else {
if tmp > score {
score = tmp // max
}
}
}
}
t[s0] = int8(score)
return score
}
// Print an ANSI-escape representation of the scores for each position.
func (t Minimax) Print(w io.Writer, s State, m Mask) error {
buf := bufio.NewWriter(w)
for i := 0; i < 5*5; i++ {
if m.Valid(i) {
score := t.Evaluate(s.Place(i), m.Place(i))
if score > 0 {
fmt.Fprintf(buf, "\x1b[94m%x\x1b[0m", +score)
} else if score < 0 {
fmt.Fprintf(buf, "\x1b[91m%x\x1b[0m", -score)
} else {
buf.WriteRune('0')
}
} else {
buf.WriteRune('-')
}
if i%5 == 4 {
buf.WriteRune('\n')
}
}
buf.WriteRune('\n')
return buf.Flush()
}
func main() {
t := New()
t.Evaluate(0, 0)
fmt.Println(len(t))
var p1Wins, p2Wins, ties int
for s, score := range t {
m := s.Derive()
if s.IsComplete(m) {
if score > 0 {
p1Wins++
} else if score < 0 {
p2Wins++
} else {
ties++
}
}
}
fmt.Printf("Total endings: %d\n", p1Wins+p2Wins+ties)
fmt.Printf("Player 1 wins: %d\n", p1Wins)
fmt.Printf("Player 2 wins: %d\n", p2Wins)
t.Print(os.Stdout, State(0).Place(6), Mask(0).Place(6))
State(0).Place(6).Print(os.Stdout, Mask(0).Place(6))
} | misc/bsquare.go | 0.684159 | 0.437163 | bsquare.go | starcoder |
package go_kd_segment_tree
import (
"errors"
"fmt"
mapset "github.com/deckarep/golang-set"
"sort"
)
type ConjunctionNode struct {
TreeNode
Tree *Tree
DimName interface{}
Level int
DecreasePercent float64
segments []*Segment
dimNode map[interface{}]ConjunctionDimNode
}
func (node *ConjunctionNode) Search(p Point) []interface{} {
segCounter := make(map[int]int)
for dimName, d := range p {
if node.dimNode[dimName] == nil {
continue
}
for _, segIndex := range node.dimNode[dimName].Search(d) {
segCounter[segIndex] += 1
}
}
var result = mapset.NewSet()
for segIndex, matchNum := range segCounter {
if len(node.segments[segIndex].Rect) == matchNum {
result = result.Union(node.segments[segIndex].Data)
}
}
return result.ToSlice()
}
func (node *ConjunctionNode) SearchRect(r Rect) []interface{} {
segCounter := make(map[int]int)
for dimName, d := range r {
if node.dimNode[dimName] == nil {
continue
}
for _, seg := range node.dimNode[dimName].SearchRect(d) {
segCounter[seg] += 1
}
}
var result = mapset.NewSet()
for segIndex, matchNum := range segCounter {
if len(node.segments[segIndex].Rect) == matchNum {
result = result.Union(node.segments[segIndex].Data)
}
}
return result.ToSlice()
}
func (node *ConjunctionNode) Insert(seg *Segment) error {
return errors.New("conjunction node not support insert yet")
}
func NewConjunctionNode(tree *Tree,
segments []*Segment,
dimName interface{},
decreasePercent float64,
level int,
) *ConjunctionNode {
var node = &ConjunctionNode{
Tree: tree,
DimName: dimName,
Level: level,
segments: segments,
DecreasePercent: decreasePercent,
dimNode: make(map[interface{}]ConjunctionDimNode),
}
for dimName, dimType := range tree.dimTypes {
switch dimType.Type {
case DimTypeDiscrete.Type:
node.dimNode[dimName] = NewDiscreteConjunctionNode(segments, dimName)
case DimTypeReal.Type:
node.dimNode[dimName] = NewConjunctionRealNode(segments, dimName)
}
}
return node
}
func (node *ConjunctionNode) MaxInvertNodeNum() int {
totalInvertNode := 0
for _, dimNode := range node.dimNode {
if dimNode == nil {
continue
}
totalInvertNode += dimNode.MaxInvertNode()
}
return totalInvertNode
}
func (node *ConjunctionNode) Dumps(prefix string) string {
return fmt.Sprintf("%v conjunction_node{max_invert_node=%v}\n", prefix, node.MaxInvertNodeNum())
}
type ConjunctionDimNode interface {
Search(measure Measure) []int
SearchRect(rect interface{}) []int
MaxInvertNode() int
}
type ConjunctionDimRealNode struct {
ConjunctionDimNode
dimName interface{}
splitPoints []Measure
segments map[string][]int
}
func (dimNode *ConjunctionDimRealNode) Search(measure Measure) []int {
if dimNode == nil || len(dimNode.splitPoints) == 0 {
return nil
}
pos := dimNode.searchPos(measure)
if pos < 0 || pos >= len(dimNode.splitPoints) {
return nil
}
return dimNode.segments[fmt.Sprintf("%v_%v",
dimNode.splitPoints[pos], dimNode.splitPoints[pos+1])]
}
func (dimNode *ConjunctionDimRealNode) searchPos(measure Measure) int {
if dimNode == nil || len(dimNode.splitPoints) == 0 {
return -1
}
start := 0
end := len(dimNode.splitPoints) - 1
for start < end {
mid := (start + end) / 2
if dimNode.splitPoints[mid].SmallerOrEqual(measure) &&
dimNode.splitPoints[mid+1].Bigger(measure) {
return mid
} else if dimNode.splitPoints[mid+1].SmallerOrEqual(measure) {
start = mid + 1
} else if dimNode.splitPoints[mid].Bigger(measure) {
end = mid
} else {
break
}
}
return -1
}
func (dimNode *ConjunctionDimRealNode) MaxInvertNode() int {
if dimNode == nil || len(dimNode.segments) == 0 {
return 0
}
maxNodeNum := 0
for _, nodes := range dimNode.segments {
if len(nodes) > maxNodeNum {
maxNodeNum = len(nodes)
}
}
return maxNodeNum
}
func (dimNode *ConjunctionDimRealNode) SearchRect(measure interface{}) []int {
if dimNode == nil || len(dimNode.splitPoints) == 0 {
return nil
}
if _, ok := measure.(Interval); ok == false {
return nil
}
interval := measure.(Interval)
matchSegments := mapset.NewSet()
for i, _ := range dimNode.splitPoints[:len(dimNode.splitPoints)-1] {
if interval.Contains(dimNode.splitPoints[i]) &&
interval.Contains(dimNode.splitPoints[i+1]) {
for _, seg := range dimNode.segments[fmt.Sprintf("%v_%v",
dimNode.splitPoints[i], dimNode.splitPoints[i+1])] {
matchSegments.Add(seg)
}
}
}
var result []int
for _, seg := range matchSegments.ToSlice() {
result = append(result, seg.(int))
}
return result
}
func NewConjunctionRealNode(segments []*Segment, dimName interface{}) *ConjunctionDimRealNode {
var allSplit = []Measure{}
for _, seg := range segments {
if seg.Rect[dimName] == nil {
continue
}
allSplit = append(allSplit, seg.Rect[dimName].(Interval)[0])
allSplit = append(allSplit, seg.Rect[dimName].(Interval)[1])
}
var dimNode = &ConjunctionDimRealNode{
dimName: dimName,
splitPoints: allSplit,
segments: make(map[string][]int),
}
if len(dimNode.splitPoints) == 0 {
return nil
}
sort.Sort(&sortMeasures{measures: dimNode.splitPoints})
toIndex := 0
for _, m := range dimNode.splitPoints {
if dimNode.splitPoints[toIndex].Equal(m) {
continue
} else {
dimNode.splitPoints[toIndex+1] = m
toIndex += 1
}
}
dimNode.splitPoints = dimNode.splitPoints[:toIndex+1]
for index, seg := range segments {
if seg.Rect[dimName] == nil {
continue
}
pos := dimNode.searchPos(seg.Rect[dimName].(Interval)[0])
if pos < 0 || pos >= len(dimNode.splitPoints) {
continue
}
for i, m := range dimNode.splitPoints[pos : len(dimNode.splitPoints)-1] {
nextM := dimNode.splitPoints[i+pos+1]
if seg.Rect[dimName] == nil {
continue
}
if seg.Rect[dimName].(Interval)[1].Smaller(m) {
break
}
if seg.Rect[dimName].(Interval)[0].SmallerOrEqual(m) &&
seg.Rect[dimName].(Interval)[1].BiggerOrEqual(nextM) {
key := fmt.Sprintf("%v_%v", m, nextM)
dimNode.segments[key] = append(dimNode.segments[key], index)
}
}
}
return dimNode
}
type ConjunctionDimDiscreteNode struct {
ConjunctionNode
dimName interface{}
segments map[Measure][]int
}
func (node *ConjunctionDimDiscreteNode) Search(measure Measure) []int {
if node == nil || node.segments == nil {
return nil
}
return node.segments[measure]
}
func (node *ConjunctionDimDiscreteNode) MaxInvertNode() int {
if node == nil || len(node.segments) == 0 {
return 0
}
maxNodeNum := 0
for _, nodes := range node.segments {
if len(nodes) > maxNodeNum {
maxNodeNum = len(nodes)
}
}
return maxNodeNum
}
func (node *ConjunctionDimDiscreteNode) SearchRect(scatters interface{}) []int {
if node == nil || node.segments == nil {
return nil
}
if _, ok := scatters.(Measures); ok == false {
return nil
}
matchSegments := mapset.NewSet()
for _, d := range scatters.(Measures) {
for _, seg := range node.segments[d] {
matchSegments.Add(seg)
}
}
var result []int
for _, seg := range matchSegments.ToSlice() {
result = append(result, seg.(int))
}
return result
}
func NewDiscreteConjunctionNode(segments []*Segment, dimName interface{}) *ConjunctionDimDiscreteNode {
node := &ConjunctionDimDiscreteNode{
dimName: dimName,
segments: make(map[Measure][]int),
}
for segIndex, seg := range segments {
if seg.Rect[dimName] == nil {
continue
}
for _, m := range seg.Rect[dimName].(Measures) {
node.segments[m] = append(node.segments[m], segIndex)
}
}
if len(node.segments) == 0 {
return nil
}
return node
} | node_conjunction.go | 0.551574 | 0.507873 | node_conjunction.go | starcoder |
package main
import (
"log"
"time"
"github.com/ikester/gpio"
)
const redIndex int = 0
const greenIndex int = 1
const blueIndex int = 2
const brightnessIndex int = 3
// default raw brightness. Not to be used user-side
const defaultBrightnessInt int = 10
//upper and lower bounds for user specified brightness
const minBrightness float64 = 0.0
const maxBrightness float64 = 1.0
func NewLed(brightness ...float64) Led {
brightnessInt := defaultBrightnessInt
if len(brightness) > 0 {
brightnessInt = convertBrightnessToInt(brightness[0])
}
return Led{
pixel: initPixel(brightnessInt),
}
}
type Led struct {
pixel [4]int
}
// pulse sends a pulse through the LedData/CLK pins
func pulse(pulses int) {
gpio.DigitalWrite(LedData, 0)
for i := 0; i < pulses; i++ {
gpio.DigitalWrite(LedClock, 1)
gpio.DigitalWrite(LedClock, 0)
}
}
// eof end of file or signal, from Python library
func eof() {
pulse(36)
}
// sof start of file (name from Python library)
func sof() {
pulse(32)
}
func writeByte(val int) {
for i := 0; i < 8; i++ {
// 0b10000000 = 128
gpio.DigitalWrite(LedData, val&128)
gpio.DigitalWrite(LedClock, 1)
val = val << 1
gpio.DigitalWrite(LedClock, 0)
}
}
func convertBrightnessToInt(brightness float64) int {
if !inRangeFloat(minBrightness, brightness, maxBrightness) {
log.Fatalf("Supplied brightness was %#v - value should be between: %#v and %#v", brightness, minBrightness, maxBrightness)
}
return int(brightness * 31.0)
}
func inRangeFloat(minVal float64, testVal float64, maxVal float64) bool {
return (testVal >= minVal) && (testVal <= maxVal)
}
func Delay(ms int) {
time.Sleep(time.Duration(ms) * time.Millisecond)
}
func (bl *Led) Clear() {
bl.SetPixel(0, 0, 0)
bl.show()
}
func (bl *Led) show() {
sof()
brightness := bl.pixel[brightnessIndex]
r := bl.pixel[redIndex]
g := bl.pixel[greenIndex]
b := bl.pixel[blueIndex]
// 0b11100000 (224)
bitwise := 224
writeByte(bitwise | brightness)
writeByte(b)
writeByte(g)
writeByte(r)
eof()
}
func (bl *Led) SetPixel(r int, g int, b int) {
bl.pixel[redIndex] = r
bl.pixel[greenIndex] = g
bl.pixel[blueIndex] = b
bl.show()
}
func (bl *Led) SetBrightness(brightness float64) *Led {
brightnessInt := convertBrightnessToInt(brightness)
bl.pixel[brightnessIndex] = brightnessInt
bl.show()
return bl
}
func initPixel(brightness int) [4]int {
var pixels [4]int
pixels[redIndex] = 0
pixels[greenIndex] = 0
pixels[blueIndex] = 0
pixels[brightnessIndex] = brightness
return pixels
} | led.go | 0.743541 | 0.471041 | led.go | starcoder |
package sql
import (
"fmt"
"github.com/sheenobu/cm.go"
)
// ValueColumn is a value column that contains metadata about the database column
type ValueColumn struct {
name string
ctype string
null bool
fns map[string]func() interface{}
}
// Column returns a column object given the name and type
func Column(name string, ctype string) ValueColumn {
return ValueColumn{name, ctype, true, make(map[string]func() interface{})}
}
// Varchar returns a column object given the name and size of string
func Varchar(name string, size int) ValueColumn {
return Column(name,
fmt.Sprintf("varchar(%d)", size))
}
// Integer returns an integer given the size
func Integer(name string, size int) ValueColumn {
//TODO: make size a specific flag (BIGINT, SMALLINT, etc)
return Column(name,
fmt.Sprintf("integer"))
}
// PrimaryKey returns a value column that is a primary key
func (c ValueColumn) PrimaryKey() ValueColumn {
c.ctype = c.ctype + " PRIMARY KEY "
return c
}
// FromFunction returns a value column that is populated, on insert, from
// the given function
func (c ValueColumn) FromFunction(fn func() interface{}) ValueColumn {
c.fns["insert"] = fn
return c
}
// AutoIncrement returns the value column definition with autoincrement added
func (c ValueColumn) AutoIncrement() ValueColumn {
c.ctype = c.ctype + " AUTOINCREMENT "
return c
}
// NotNull returns a column object that does not allow null values
func (c ValueColumn) NotNull() ValueColumn {
c.null = false
return c
}
// Build builds the SQL column expression
func (c ValueColumn) Build() string {
s := c.name + " " + c.ctype + " "
if !c.null {
s = s + " not null"
}
return s
}
// begin ValueColumn implementation
// Name returns the name of the SQL value column
func (c ValueColumn) Name() string {
return c.name
}
// Type returns the type of the SQL value column
func (c ValueColumn) Type() string {
return c.ctype
}
// Eq creates an equal predicate used for filtering
func (c ValueColumn) Eq(i interface{}) cm.Predicate {
return &EqPredicate{c, i}
}
// NotEq creates a not equal predicate used for filtering
func (c ValueColumn) NotEq(i interface{}) cm.Predicate {
return &NotEqPredicate{c, i}
}
// Like creates a like predicate used for filtering
func (c ValueColumn) Like(caseSensitive bool, i interface{}) cm.Predicate {
return &LikePredicate{c, i, caseSensitive}
}
// Set creates a modify operation
func (c ValueColumn) Set(i interface{}) cm.Operation {
return &UpdateOperation{c, i}
}
// end ValueColumn implementation | sql/column.go | 0.569374 | 0.4133 | column.go | starcoder |
package main
import (
"math"
"math/rand"
. "github.com/jakecoffman/cp"
"github.com/jakecoffman/cp/examples"
)
const (
COLLISION_TYPE_STICKY = 1
STICK_SENSOR_THICKNESS = 2.5
)
func PostStepAddJoint(space *Space, key, _ interface{}) {
space.AddConstraint(key.(*Constraint))
}
func StickyPreSolve(arb *Arbiter, space *Space, data interface{}) bool {
// We want to fudge the collisions a bit to allow shapes to overlap more.
// This simulates their squishy sticky surface, and more importantly
// keeps them from separating and destroying the joint.
// Track the deepest collision point and use that to determine if a rigid collision should occur.
deepest := INFINITY
contacts := arb.ContactPointSet()
for i := 0; i < contacts.Count; i++ {
// Sink the contact points into the surface of each shape.
contacts.Points[i].PointA = contacts.Points[i].PointA.Sub(contacts.Normal.Mult(STICK_SENSOR_THICKNESS))
contacts.Points[i].PointB = contacts.Points[i].PointB.Sub(contacts.Normal.Mult(STICK_SENSOR_THICKNESS))
deepest = math.Min(deepest, contacts.Points[i].Distance)
}
arb.SetContactPointSet(&contacts)
// If the shapes are overlapping enough, then create a
// joint that sticks them together at the first contact point.
if arb.UserData == nil && deepest <= 0 {
bodyA, bodyB := arb.Bodies()
// Create a joint at the contact point to hold the body in place.
anchorA := bodyA.WorldToLocal(contacts.Points[0].PointA)
anchorB := bodyB.WorldToLocal(contacts.Points[0].PointB)
joint := NewPivotJoint2(bodyA, bodyB, anchorA, anchorB)
// Give it a finite force for the stickiness.
joint.SetMaxForce(3e3)
// Schedule a post-step() callback to add the joint.
space.AddPostStepCallback(PostStepAddJoint, joint, nil)
// Store the joint on the arbiter so we can remove it later.
arb.UserData = joint
}
// Position correction and velocity are handled separately so changing
// the overlap distance alone won't prevent the collision from occuring.
// Explicitly the collision for this frame if the shapes don't overlap using the new distance.
return deepest <= 0
// Lots more that you could improve upon here as well:
// * Modify the joint over time to make it plastic.
// * Modify the joint in the post-step to make it conditionally plastic (like clay).
// * Track a joint for the deepest contact point instead of the first.
// * Track a joint for each contact point. (more complicated since you only get one data pointer).
}
func PostStepRemoveJoint(space *Space, key, _ interface{}) {
space.RemoveConstraint(key.(*Constraint))
}
func StickySeparate(arb *Arbiter, space *Space, _ interface{}) {
if arb.UserData != nil {
joint := arb.UserData.(*Constraint)
// The joint won't be removed until the step is done.
// Need to disable it so that it won't apply itself.
// Setting the force to 0 will do just that
joint.SetMaxForce(0)
// Perform the removal in a post-step() callback.
space.AddPostStepCallback(PostStepRemoveJoint, joint, nil)
// NULL out the reference to the joint.
// Not required, but it's a good practice.
arb.UserData = nil
}
}
func main() {
space := NewSpace()
space.Iterations = 10
space.SetGravity(Vector{0, -1000})
space.SetCollisionSlop(2.0)
walls := []Vector{
{-340, -260}, {-340, 260},
{340, -260}, {340, 260},
{-340, -260}, {340, -260},
{-340, 260}, {340, 260},
}
for i := 0; i < len(walls)-1; i += 2 {
shape := space.AddShape(NewSegment(space.StaticBody, walls[i], walls[i+1], 20))
shape.SetElasticity(1)
shape.SetFriction(1)
shape.SetFilter(examples.NotGrabbableFilter)
}
mass := 0.15
radius := 10.0
for i := 0; i < 200; i++ {
body := space.AddBody(NewBody(mass, MomentForCircle(mass, 0, radius, Vector{})))
body.SetPosition(Vector{Lerp(-150, 150, rand.Float64()), Lerp(-150, 150, rand.Float64())})
shape := space.AddShape(NewCircle(body, radius+STICK_SENSOR_THICKNESS, Vector{}))
shape.SetFriction(0.9)
shape.SetCollisionType(COLLISION_TYPE_STICKY)
}
handler := space.NewWildcardCollisionHandler(COLLISION_TYPE_STICKY)
handler.PreSolveFunc = StickyPreSolve
handler.SeparateFunc = StickySeparate
examples.Main(space, 1.0/60.0, update, examples.DefaultDraw)
}
func update(space *Space, dt float64) {
space.Step(dt)
} | examples/sticky/sticky.go | 0.682891 | 0.423398 | sticky.go | starcoder |
package texture
import (
"log"
"time"
"github.com/kasworld/h4o/_examples/app"
"github.com/kasworld/h4o/geometry"
"github.com/kasworld/h4o/graphic"
"github.com/kasworld/h4o/light"
"github.com/kasworld/h4o/material"
"github.com/kasworld/h4o/math32"
"github.com/kasworld/h4o/texture"
"github.com/kasworld/h4o/util/helper"
)
func init() {
app.DemoMap["texture.cone-cylinder"] = &TexConeCylinder{}
}
type TexConeCylinder struct {
mesh1 *graphic.Mesh
mesh2 *graphic.Mesh
mesh3 *graphic.Mesh
}
// Start is called once at the start of the demo.
func (t *TexConeCylinder) Start(a *app.App) {
// Add directional red light from right
l1 := light.NewDirectional(&math32.Color{1, 0, 0}, 1.0)
l1.SetPosition(1, 0, 0)
a.Scene().Add(l1)
// Add directional green light from top
l2 := light.NewDirectional(&math32.Color{0, 1, 0}, 1.0)
l2.SetPosition(0, 1, 0)
a.Scene().Add(l2)
// Add directional blue light from front
l3 := light.NewDirectional(&math32.Color{0, 0, 1}, 1.0)
l3.SetPosition(0, 0, 1)
a.Scene().Add(l3)
// Left cylinder
tex, err := texture.NewTexture2DFromImage(a.DirData() + "/images/brick1.jpg")
if err != nil {
log.Fatal("Error loading texture: %s", err)
}
geom1 := geometry.NewCylinder(0.8, 2, 16, 2, true, true)
mat1 := material.NewStandard(&math32.Color{1, 1, 1})
mat1.SetSide(material.SideDouble)
mat1.AddTexture(tex)
t.mesh1 = graphic.NewMesh(geom1, mat1)
t.mesh1.SetPosition(-2, 0, 0)
a.Scene().Add(t.mesh1)
// Middle cylinder
tex, err = texture.NewTexture2DFromImage(a.DirData() + "/images/moss.png")
if err != nil {
log.Fatal("Error loading texture: %s", err)
}
geom2 := geometry.NewCylinder(0.8, 2, 32, 16, false, true)
mat2 := material.NewStandard(&math32.Color{1, 1, 1})
mat2.SetSide(material.SideDouble)
mat2.AddTexture(tex)
t.mesh2 = graphic.NewMesh(geom2, mat2)
t.mesh2.SetPosition(0, 0, 0)
a.Scene().Add(t.mesh2)
// Right cylinder
tex, err = texture.NewTexture2DFromImage(a.DirData() + "/images/checkerboard.jpg")
if err != nil {
log.Fatal("Error loading texture: %s", err)
}
geom3 := geometry.NewTruncatedCone(0.4, 0.8, 2, 32, 1, false, true)
mat3 := material.NewStandard(&math32.Color{1, 1, 1})
mat3.SetSide(material.SideDouble)
mat3.AddTexture(tex)
t.mesh3 = graphic.NewMesh(geom3, mat3)
t.mesh3.SetPosition(2, 0, 0)
a.Scene().Add(t.mesh3)
// Create axes helper
axes := helper.NewAxes(2)
a.Scene().Add(axes)
}
// Update is called every frame.
func (t *TexConeCylinder) Update(a *app.App, deltaTime time.Duration) {
// TODO Use deltaTime
t.mesh1.RotateY(0.005)
t.mesh2.RotateY(-0.004)
t.mesh3.RotateY(0.003)
}
// Cleanup is called once at the end of the demo.
func (t *TexConeCylinder) Cleanup(a *app.App) {} | _examples/demos/texture/cylinder.go | 0.51562 | 0.41052 | cylinder.go | starcoder |
package protocol
const size int = 16
type ColorModel struct {
Color [3]uint8
Hold uint8
}
type LedMatrixModel struct {
Matrix [size][size]ColorModel
}
func (l *LedMatrixModel) ConvertMatrixToFrame() (FrameModelOutput, FrameModelOutput) {
var frame FrameModelOutput
var frame2 FrameModelOutput
frame.FrameNumber = 0x01
frame2.FrameNumber = 0x02
for column := 0; column < 4; column++ { // On fait la hauteur de dalle (4)
for lign := 0; lign < 8; lign++ { // On fait la longueur de dalle (8)
frame.Data = append(frame.Data[:], l.Matrix[column*2][lign*2].Color[0])
frame.Data = append(frame.Data[:], l.Matrix[column*2][lign*2].Color[1])
frame.Data = append(frame.Data[:], l.Matrix[column*2][lign*2].Color[2])
frame.Data = append(frame.Data[:], l.Matrix[column*2][lign*2+1].Color[0])
frame.Data = append(frame.Data[:], l.Matrix[column*2][lign*2+1].Color[1])
frame.Data = append(frame.Data[:], l.Matrix[column*2][lign*2+1].Color[2])
frame.Data = append(frame.Data[:], l.Matrix[column*2+1][lign*2].Color[0])
frame.Data = append(frame.Data[:], l.Matrix[column*2+1][lign*2].Color[1])
frame.Data = append(frame.Data[:], l.Matrix[column*2+1][lign*2].Color[2])
frame.Data = append(frame.Data[:], l.Matrix[column*2+1][lign*2+1].Color[0])
frame.Data = append(frame.Data[:], l.Matrix[column*2+1][lign*2+1].Color[1])
frame.Data = append(frame.Data[:], l.Matrix[column*2+1][lign*2+1].Color[2])
}
}
for column := 4; column < 8; column++ { // On fait la hauteur de dalle (4)
for lign := 0; lign < 8; lign++ { // On fait la longueur de dalle (8)
frame2.Data = append(frame2.Data[:], l.Matrix[column*2][lign*2].Color[0])
frame2.Data = append(frame2.Data[:], l.Matrix[column*2][lign*2].Color[1])
frame2.Data = append(frame2.Data[:], l.Matrix[column*2][lign*2].Color[2])
frame2.Data = append(frame2.Data[:], l.Matrix[column*2][lign*2+1].Color[0])
frame2.Data = append(frame2.Data[:], l.Matrix[column*2][lign*2+1].Color[1])
frame2.Data = append(frame2.Data[:], l.Matrix[column*2][lign*2+1].Color[2])
frame2.Data = append(frame2.Data[:], l.Matrix[column*2+1][lign*2].Color[0])
frame2.Data = append(frame2.Data[:], l.Matrix[column*2+1][lign*2].Color[1])
frame2.Data = append(frame2.Data[:], l.Matrix[column*2+1][lign*2].Color[2])
frame2.Data = append(frame2.Data[:], l.Matrix[column*2+1][lign*2+1].Color[0])
frame2.Data = append(frame2.Data[:], l.Matrix[column*2+1][lign*2+1].Color[1])
frame2.Data = append(frame2.Data[:], l.Matrix[column*2+1][lign*2+1].Color[2])
}
}
return frame, frame2
} | GOLANG/protocol_paillettes/protocol/LedMatrixModel.go | 0.515376 | 0.635548 | LedMatrixModel.go | starcoder |
package interest
import (
"github.com/strongo/decimal"
"time"
"fmt"
)
type Credit interface {
Formula() Formula
RatePeriod() RatePeriodInDays
RatePercent() decimal.Decimal64p2
MinimumPeriod() int
GracePeriod() int
}
type Deal interface {
Credit
Time() time.Time
LentAmount() decimal.Decimal64p2
}
type deal struct {
formula Formula
time time.Time
lentAmount decimal.Decimal64p2
ratePeriod RatePeriodInDays
ratePercent decimal.Decimal64p2
minimumPeriod int
gracePeriod int
}
func (d deal) Formula() Formula {
return d.formula
}
func (d deal) LentAmount() decimal.Decimal64p2 {
return d.lentAmount
}
func (d deal) RatePercent() decimal.Decimal64p2 {
return d.ratePercent
}
func (d deal) RatePeriod() RatePeriodInDays {
return d.ratePeriod
}
func (d deal) MinimumPeriod() int {
return d.minimumPeriod
}
func (d deal) GracePeriod() int {
return d.gracePeriod
}
func (d deal) Time() time.Time {
return d.time
}
func NewDeal(formula Formula, time time.Time, lentAmount, ratePercent decimal.Decimal64p2, ratePeriod RatePeriodInDays, minimumPeriod, gracePeriod int) Deal {
if lentAmount < 0 {
panic(fmt.Sprintf("lentAmount <= 0: %v", lentAmount))
}
if ratePercent < 0 {
panic(fmt.Sprintf("ratePercent <= 0: %v", ratePercent))
}
if minimumPeriod < 0 {
panic(fmt.Sprintf("minimumPeriod <= 0: %v", minimumPeriod))
}
if gracePeriod < 0 {
panic(fmt.Sprintf("gracePeriod <= 0: %v", gracePeriod))
}
if ratePeriod <= 0 {
panic(fmt.Sprintf("ratePeriod <= 0: %v", ratePeriod))
}
return deal{
formula: formula,
time: time,
lentAmount: lentAmount,
ratePeriod: ratePeriod,
ratePercent: ratePercent,
minimumPeriod: minimumPeriod,
gracePeriod: gracePeriod,
}
}
type Payment interface {
Time() time.Time
Amount() decimal.Decimal64p2
}
type payment struct {
time time.Time
amount decimal.Decimal64p2
}
func (p payment) Time() time.Time {
return p.time
}
func (p payment) Amount() decimal.Decimal64p2 {
return p.amount
}
func NewPayment(time time.Time, amount decimal.Decimal64p2) Payment {
return payment{time: time, amount: amount}
}
type Calculator interface {
Formula() Formula
Calculate(reportTime time.Time, deal Deal, payments []Payment) (interest, outstanding decimal.Decimal64p2, err error)
} | interfaces.go | 0.705785 | 0.434281 | interfaces.go | starcoder |
package fy
import "github.com/MaxSlyugrov/cldr"
var calendar = cldr.Calendar{
Formats: cldr.CalendarFormats{
Date: cldr.CalendarDateFormat{Full: "EEEE d MMMM y", Long: "d MMMM y", Medium: "d MMM y", Short: "dd-MM-yy"},
Time: cldr.CalendarDateFormat{Full: "HH:mm:ss zzzz", Long: "HH:mm:ss z", Medium: "HH:mm:ss", Short: "HH:mm"},
DateTime: cldr.CalendarDateFormat{Full: "{1} {0}", Long: "{1} {0}", Medium: "{1} {0}", Short: "{1} {0}"},
},
FormatNames: cldr.CalendarFormatNames{
Months: cldr.CalendarMonthFormatNames{
Abbreviated: cldr.CalendarMonthFormatNameValue{Jan: "jan", Feb: "feb", Mar: "mrt", Apr: "apr", May: "mai", Jun: "jun", Jul: "jul", Aug: "aug", Sep: "sep", Oct: "okt", Nov: "nov", Dec: "des"},
Narrow: cldr.CalendarMonthFormatNameValue{Jan: "J", Feb: "F", Mar: "M", Apr: "A", May: "M", Jun: "J", Jul: "J", Aug: "A", Sep: "S", Oct: "O", Nov: "N", Dec: "D"},
Short: cldr.CalendarMonthFormatNameValue{},
Wide: cldr.CalendarMonthFormatNameValue{Jan: "jannewaris", Feb: "febrewaris", Mar: "maart", Apr: "april", May: "maaie", Jun: "juny", Jul: "july", Aug: "augustus", Sep: "septimber", Oct: "oktober", Nov: "novimber", Dec: "desimber"},
},
Days: cldr.CalendarDayFormatNames{
Abbreviated: cldr.CalendarDayFormatNameValue{Sun: "si", Mon: "mo", Tue: "ti", Wed: "wo", Thu: "to", Fri: "fr", Sat: "so"},
Narrow: cldr.CalendarDayFormatNameValue{Sun: "Z", Mon: "M", Tue: "D", Wed: "W", Thu: "D", Fri: "V", Sat: "Z"},
Short: cldr.CalendarDayFormatNameValue{Sun: "si", Mon: "mo", Tue: "ti", Wed: "wo", Thu: "to", Fri: "fr", Sat: "so"},
Wide: cldr.CalendarDayFormatNameValue{Sun: "snein", Mon: "moandei", Tue: "tiisdei", Wed: "woansdei", Thu: "tongersdei", Fri: "freed", Sat: "sneon"},
},
Periods: cldr.CalendarPeriodFormatNames{
Abbreviated: cldr.CalendarPeriodFormatNameValue{AM: "a.m.", PM: "p.m."},
Narrow: cldr.CalendarPeriodFormatNameValue{AM: "AM", PM: "PM"},
Short: cldr.CalendarPeriodFormatNameValue{},
Wide: cldr.CalendarPeriodFormatNameValue{AM: "foarmiddei", PM: "p.m."},
},
},
} | resources/locales/fy/calendar.go | 0.516352 | 0.425009 | calendar.go | starcoder |
package cal
import (
"fmt"
"time"
)
// Canadian holidays
// Source: https://en.wikipedia.org/wiki/Public_holidays_in_Canada
// Other source with conflicting data: https://www.timeanddate.com/holidays/canada/
// Wikipedia was chosen over timeanddate, an actual Canadian should go over the data.
var (
// National holidays
caNewYear = newYear.SetLabel("New year")
caGoodFriday = goodFriday.SetLabel("Good friday")
caCanadaDay = NewHoliday(time.July, 1).SetLabel("Canada day")
caLabourDay = NewHolidayFloat(time.September, time.Monday, 1).SetLabel("Labour day")
caChristmasDay = christmas.SetLabel("Christmas day")
// Per-province holidays, some are on the same day but are not the same
// holidays, entries are duplicated for future-proofing.
// AB, BC (special case below), NB, ON, SK
caFamilyDay = NewHolidayFloat(time.February, time.Monday, 3).SetLabel("Family day")
// Acts as QC National Patriot's Day and Victoria Day for all the rest except NL.
caVictoriaDay = NewHolidayFloat(time.May, time.Monday, 3).SetLabel("Victoria day")
// NT, YT
caAboriginalDay = NewHoliday(time.June, 21).SetLabel("Aboriginal day")
// NL, QC, YT
caDiscoveryDay = NewHoliday(time.June, 24).SetLabel("Discovery day")
// Everyone except MB, ON, QC
caRemembranceDay = NewHoliday(time.November, 11).SetLabel("Remembrance day")
// AB, NB, NS, ON, PE
caBoxingDay = christmas2.SetLabel("Boxing day")
// aka. Civic Holiday, Heritage Day, New Brunswick Day, Natal Day
// Everyone except QC
caCivicHoliday = NewHolidayFloat(time.August, time.Monday, 1).SetLabel("Civic Holiday")
// Everyone except NB, NL, PE
caThanksgiving = NewHolidayFloat(time.October, time.Monday, 2).SetLabel("Thanksgiving")
caBCFamilyDay = NewHolidayFactory(calculateBritishColumbiaFamilyDay).SetLabel("Family day")
caMBLouisRielDay = NewHolidayFloat(time.February, time.Monday, 3).SetLabel("Louis Riel day")
caNLOrangemensDay = NewHoliday(time.July, 12).SetLabel("Orangemens Day")
caNLSaintGeorgesDay = NewHoliday(time.April, 23).SetLabel("Saint George's day")
caNLSaintPatricksDay = NewHoliday(time.March, 17).SetLabel("Saint Patrick's day")
caNSHeritageDay = NewHolidayFloat(time.February, time.Monday, 3).SetLabel("Heritage day")
caPEEasterMonday = easterMonday.SetLabel("Easter monday")
caPEGoldCupParadeDay = NewHolidayFloat(time.August, time.Friday, 3).SetLabel("Gold Cup parade day")
caPEIslanderDay = NewHolidayFloat(time.February, time.Monday, 3).SetLabel("Islander day")
caQCNationalHoliday = NewHoliday(time.June, 24).SetLabel("National Holiday")
caQCPatriotsDay = caVictoriaDay.SetLabel("Patriot's day")
caYTDiscoveryDay = NewHolidayFloat(time.August, time.Monday, 3).SetLabel("Discovery day")
caYTSaintJeanBaptiste = caDiscoveryDay.SetLabel("Saint John the Baptist")
)
// addCanadianHolidays adds all Canadian holidays to the Calender
func addCanadianHolidays(c *Calendar) {
c.AddHoliday(
caBoxingDay,
caNewYear,
caGoodFriday,
caCanadaDay,
caLabourDay,
caChristmasDay,
)
}
// addCanadaProvinceHolidays adds Canadian state holidays to the calendar
func addCanadaProvinceHolidays(c *Calendar, province string) error { // nolint:funlen
switch province {
case "CA-AB": // Alberta
c.AddHoliday(
caCivicHoliday,
caFamilyDay,
caRemembranceDay,
caThanksgiving,
caVictoriaDay,
)
case "CA-BC": // British Columbia
c.AddHoliday(
caBCFamilyDay,
caCivicHoliday,
caRemembranceDay,
caThanksgiving,
caVictoriaDay,
)
case "CA-MB": // Manitoba
c.AddHoliday(
caCivicHoliday,
caMBLouisRielDay,
caThanksgiving,
caVictoriaDay,
)
case "CA-NB": // New Brunswick
c.AddHoliday(
caBoxingDay,
caCivicHoliday,
caFamilyDay,
caRemembranceDay,
caVictoriaDay,
)
case "CA-NL": // Newfoundland and Labrador
c.AddHoliday(
caCivicHoliday,
caDiscoveryDay,
caNLOrangemensDay,
caNLSaintGeorgesDay,
caNLSaintPatricksDay,
caRemembranceDay,
)
case "CA-NT": // Northwest Territories
c.AddHoliday(
caAboriginalDay,
caCivicHoliday,
caRemembranceDay,
caThanksgiving,
caVictoriaDay,
)
case "CA-NS": // Nova Scotia
c.AddHoliday(
caBoxingDay,
caCivicHoliday,
caNSHeritageDay,
caRemembranceDay,
caThanksgiving,
caVictoriaDay,
)
case "CA-NU": // Nunavut
c.AddHoliday(
caCivicHoliday,
caRemembranceDay,
caThanksgiving,
caVictoriaDay,
)
case "CA-ON": // Ontario
c.AddHoliday(
caBoxingDay,
caCivicHoliday,
caFamilyDay,
caThanksgiving,
caVictoriaDay,
)
case "CA-PE": // Prince Edward Island
c.AddHoliday(
caBoxingDay,
caCivicHoliday,
caPEEasterMonday,
caPEGoldCupParadeDay,
caPEIslanderDay,
caRemembranceDay,
caVictoriaDay,
)
case "CA-QC": // Quebec
c.AddHoliday(
caDiscoveryDay,
caQCNationalHoliday,
caQCPatriotsDay,
caThanksgiving,
)
case "CA-SK": // Saskatchewan
c.AddHoliday(
caCivicHoliday,
caFamilyDay,
caRemembranceDay,
caThanksgiving,
caVictoriaDay,
)
case "CA-YT": // Yukon
c.AddHoliday(
caAboriginalDay,
caCivicHoliday,
caRemembranceDay,
caThanksgiving,
caVictoriaDay,
caYTDiscoveryDay,
caYTSaintJeanBaptiste, // Saint Jean-Baptiste
)
default:
return fmt.Errorf("unknown province %s", province)
}
return nil
}
// https://www.cbc.ca/news/canada/british-columbia/b-c-s-first-family-day-set-for-feb-11-2013-1.1240359
// https://www.cbc.ca/news/canada/british-columbia/b-c-family-day-moving-one-week-later-starting-in-2019-1.4528735
func calculateBritishColumbiaFamilyDay(year int, loc *time.Location) Holiday {
switch {
case year >= 2013 && year <= 2018:
return NewHolidayFloat(time.February, time.Monday, 2)
case year >= 2019:
return NewHolidayFloat(time.February, time.Monday, 3)
}
return Holiday{}
} | v2/holiday_defs_ca.go | 0.53607 | 0.547101 | holiday_defs_ca.go | starcoder |
package main
import (
"bufio"
"fmt"
"math"
"os"
"strconv"
)
// Grid for cellular automata, mazes, etc.
type Grid [][]byte
// Coordinate is a position on a Grid, i.e., grid[y][x]
type Coordinate struct {
y int
x int
}
// Returns the value at a map coordinate, or the zero byte if out of bounds
func (g Grid) Get(i, j int) (result byte) {
if i < 0 || i >= len(g) || j < 0 || j >= len(g[i]) {
return 0
}
return g[i][j]
}
func (g Grid) Print() {
for _, line := range g {
fmt.Printf("%s\n", line)
}
}
func (g Grid) Copy() (c Grid) {
c = make(Grid, len(g))
for i, line := range g {
c[i] = make([]byte, len(line))
copy(c[i], line)
}
return
}
func parseInt(str string) int {
i, err := strconv.Atoi(str)
if err != nil {
panic(err)
}
return i
}
const (
sideLen = 10
)
func reverseBits(num int) (res int) {
for i := 0; i < sideLen; i++ {
if (num & (1 << ((sideLen - 1) - i))) != 0 {
res |= 1 << i
}
}
return
}
func intSqrt(num int) (res int) {
res = int(math.Sqrt(float64(num)))
if res*res != num {
panic(num)
}
return
}
// the top, right, bottom, and left edges, in clockwise order,
// each time starting at the least significant bit of the integer
type Tile [4]int
func (t Tile) Rotate() (r Tile) {
return Tile{t[3], t[0], t[1], t[2]}
}
func (t Tile) Flip() (r Tile) {
return Tile{reverseBits(t[0]), reverseBits(t[3]), reverseBits(t[2]), reverseBits(t[1])}
}
func (t Tile) DihedralFour() (result [8]Tile) {
result[0] = t
result[1] = result[0].Rotate()
result[2] = result[1].Rotate()
result[3] = result[2].Rotate()
result[4] = t.Flip()
result[5] = result[4].Rotate()
result[6] = result[5].Rotate()
result[7] = result[6].Rotate()
return
}
type TileArrangement [][]Tile
func compatible(a TileArrangement, y, x int, t Tile) bool {
if y > 0 {
if a[y-1][x][2] != reverseBits(t[0]) {
return false
}
}
if x > 0 {
if a[y][x-1][1] != reverseBits(t[3]) {
return false
}
}
return true
}
func recursiveBacktrack(tiles map[int]Tile, result [][]int, tileArrangement [][]Tile, y, x int) (success bool) {
if len(tiles) == 0 {
return true
}
curTiles := make([]int, 0, len(tiles))
for tileID := range tiles {
curTiles = append(curTiles, tileID)
}
nextY := y
nextX := (x + 1) % len(result[0])
if nextX == 0 {
nextY = (y + 1) % len(result)
}
for _, tileId := range curTiles {
tile := tiles[tileId]
delete(tiles, tileId)
for _, r := range tile.DihedralFour() {
if compatible(tileArrangement, y, x, r) {
result[y][x] = tileId
tileArrangement[y][x] = r
if recursiveBacktrack(tiles, result, tileArrangement, nextY, nextX) {
return true
}
}
}
tiles[tileId] = tile
}
return false
}
func arrange(tiles map[int]Tile) (result [][]int) {
squareSide := intSqrt(len(tiles))
ta := make(TileArrangement, squareSide)
result = make([][]int, squareSide)
for i := 0; i < squareSide; i++ {
result[i] = make([]int, squareSide)
ta[i] = make([]Tile, squareSide)
}
success := recursiveBacktrack(tiles, result, ta, 0, 0)
if !success {
panic("fail")
}
return
}
func solve(input []string) (result int, err error) {
tiles := make(map[int]Tile)
for len(input) > 0 {
line := input[0]
input = input[1:]
tileNum := parseInt(line[5 : len(line)-1])
var grid Grid
for {
grid = append(grid, []byte(input[0]))
input = input[1:]
if input[0] == "" {
break
}
}
var tile Tile
for i := 0; i < sideLen; i++ {
if grid[0][i] == '#' {
tile[0] |= 1 << i
}
}
for i := 0; i < sideLen; i++ {
if grid[i][sideLen-1] == '#' {
tile[1] |= 1 << i
}
}
for i := 0; i < sideLen; i++ {
if grid[sideLen-1][(sideLen-1)-i] == '#' {
tile[2] |= 1 << i
}
}
for i := 0; i < sideLen; i++ {
if grid[(sideLen-1)-i][0] == '#' {
tile[3] |= 1 << i
}
}
tiles[tileNum] = tile
input = input[1:]
}
squareSide := intSqrt(len(tiles))
arrangement := arrange(tiles)
result = 1
result *= arrangement[0][0]
result *= arrangement[0][squareSide-1]
result *= arrangement[squareSide-1][0]
result *= arrangement[squareSide-1][squareSide-1]
return
}
func main() {
var input []string
scanner := bufio.NewScanner(os.Stdin)
for scanner.Scan() {
line := scanner.Text()
input = append(input, line)
}
solution, err := solve(input)
if err != nil {
panic(err)
}
fmt.Println(solution)
} | 2020/20/a.go | 0.669745 | 0.403126 | a.go | starcoder |
package schema
const PayloadSchema = `{
"$schema": "http://json-schema.org/draft-04/schema#",
"$id": "docs/spec/metrics/payload.json",
"title": "Metrics payload",
"description": "Metrics for correlation with other APM data",
"type": "object",
"properties": {
"metrics": {
"type": "array",
"items": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$id": "docs/spec/metrics/metric.json",
"type": "object",
"description": "Metric data captured by an APM agent",
"properties": {
"samples": {
"type": ["object"],
"description": "Sampled application metrics collected from the agent",
"patternProperties": {
"^[^*\"]*$": {
"$schema": "http://json-schema.org/draft-04/schema#",
"$id": "docs/spec/metrics/sample.json",
"type": ["object", "null"],
"description": "A single metric sample.",
"properties": {
"value": {"type": "number"}
},
"required": ["value"]
}
},
"additionalProperties": false
},
"tags": {
"type": ["object", "null"],
"description": "A flat mapping of user-defined tags with string values",
"patternProperties": {
"^[^*\"]*$": {
"type": ["string", "null"],
"maxLength": 1024
}
},
"additionalProperties": false
},
"timestamp": {
"type": "string",
"format": "date-time",
"pattern": "Z$",
"description": "Recorded time of the metric, UTC based and formatted as YYYY-MM-DDTHH:mm:ss.sssZ"
}
},
"required": ["samples", "timestamp"]
},
"minItems": 1
},
"process": {
"$id": "doc/spec/process.json",
"title": "Process",
"type": ["object", "null"],
"properties": {
"pid": {
"description": "Process ID of the service",
"type": ["integer"]
},
"ppid": {
"description": "Parent process ID of the service",
"type": ["integer", "null"]
},
"title": {
"type": ["string", "null"],
"maxLength": 1024
},
"argv": {
"description": "Command line arguments used to start this process",
"type": ["array", "null"],
"minItems": 0,
"items": {
"type": "string"
}
}
},
"required": ["pid"]
},
"service": {
"$id": "doc/spec/service.json",
"title": "Service",
"type": "object",
"properties": {
"agent": {
"description": "Name and version of the Elastic APM agent",
"type": "object",
"properties": {
"name": {
"description": "Name of the Elastic APM agent, e.g. \"Python\"",
"type": "string",
"maxLength": 1024
},
"version": {
"description": "Version of the Elastic APM agent, e.g.\"1.0.0\"",
"type": "string",
"maxLength": 1024
}
},
"required": ["name", "version"]
},
"framework": {
"description": "Name and version of the web framework used",
"type": ["object", "null"],
"properties": {
"name": {
"type": "string",
"maxLength": 1024
},
"version": {
"type": "string",
"maxLength": 1024
}
},
"required": ["name", "version"]
},
"language": {
"description": "Name and version of the programming language used",
"type": ["object", "null"],
"properties": {
"name": {
"type": "string",
"maxLength": 1024
},
"version": {
"type": ["string", "null"],
"maxLength": 1024
}
},
"required": ["name"]
},
"name": {
"description": "Immutable name of the service emitting this event",
"type": "string",
"pattern": "^[a-zA-Z0-9 _-]+$",
"maxLength": 1024
},
"environment": {
"description": "Environment name of the service, e.g. \"production\" or \"staging\"",
"type": ["string", "null"],
"maxLength": 1024
},
"runtime": {
"description": "Name and version of the language runtime running this service",
"type": ["object", "null"],
"properties": {
"name": {
"type": "string",
"maxLength": 1024
},
"version": {
"type": "string",
"maxLength": 1024
}
},
"required": ["name", "version"]
},
"version": {
"description": "Version of the service emitting this event",
"type": ["string", "null"],
"maxLength": 1024
}
},
"required": ["agent", "name"]
},
"system": {
"$id": "doc/spec/system.json",
"title": "System",
"type": ["object", "null"],
"properties": {
"architecture": {
"description": "Architecture of the system the agent is running on.",
"type": ["string", "null"],
"maxLength": 1024
},
"hostname": {
"description": "Hostname of the system the agent is running on.",
"type": ["string", "null"],
"maxLength": 1024
},
"platform": {
"description": "Name of the system platform the agent is running on.",
"type": ["string", "null"],
"maxLength": 1024
}
}
}
},
"required": ["service", "metrics"]
}
` | model/metric/generated/schema/payload.go | 0.82386 | 0.536191 | payload.go | starcoder |
package task
import (
"fmt"
"path"
"github.com/pkg/errors"
"github.com/uncharted-distil/distil-compute/model"
"github.com/uncharted-distil/distil-compute/primitive/compute"
"github.com/uncharted-distil/distil-compute/primitive/compute/description"
"github.com/uncharted-distil/distil-compute/primitive/compute/result"
"github.com/uncharted-distil/distil-compute/metadata"
"github.com/uncharted-distil/distil/api/serialization"
"github.com/uncharted-distil/distil/api/util"
)
// GeocodedPoint contains data that has been geocoded.
type GeocodedPoint struct {
D3MIndex string
SourceField string
Latitude string
Longitude string
}
// GeocodeForwardDataset geocodes fields that are types of locations.
// The results are append to the dataset and the whole is output to disk.
func GeocodeForwardDataset(schemaFile string, dataset string, config *IngestTaskConfig) (string, error) {
outputPath := createDatasetPaths(schemaFile, dataset, compute.D3MLearningData)
// load metadata from original schema
meta, err := metadata.LoadMetadataFromClassification(schemaFile, path.Join(path.Dir(schemaFile), config.ClassificationOutputPathRelative), false, true)
if err != nil {
return "", errors.Wrap(err, "unable to load original schema file")
}
mainDR := meta.GetMainDataResource()
d3mIndexVariable := getD3MIndexField(mainDR)
// read raw data
dataPath := path.Join(outputPath.sourceFolder, mainDR.ResPath)
lines, err := util.ReadCSVFile(dataPath, config.HasHeader)
if err != nil {
return "", errors.Wrap(err, "error reading raw data")
}
// index d3m indices by row since primitive returns row numbers.
// header row already skipped in the readCSVFile call.
rowIndex := make(map[int]string)
for i, line := range lines {
rowIndex[i] = line[d3mIndexVariable]
}
// Geocode location fields
datasetInputDir := outputPath.sourceFolder
colsToGeocode := geocodeColumns(meta)
geocodedData := make([][]*GeocodedPoint, 0)
for _, col := range colsToGeocode {
geocoded, err := GeocodeForward(datasetInputDir, dataset, col)
if err != nil {
return "", err
}
geocodedData = append(geocodedData, geocoded)
}
// map geocoded data by d3m index
indexedData := make(map[string][]*GeocodedPoint)
fields := make(map[string][]*model.Variable)
for _, field := range geocodedData {
latName, lonName := getLatLonVariableNames(field[0].SourceField)
latDesc := fmt.Sprintf("latitude obtained from field %s", field[0].SourceField)
lonDesc := fmt.Sprintf("longitude obtained from field %s", field[0].SourceField)
fields[field[0].SourceField] = []*model.Variable{
model.NewVariable(len(mainDR.Variables), latName, "label", latName, latName, model.LatitudeType, model.LatitudeType, latDesc, []string{"attribute"}, []string{model.VarDistilRoleMetadata}, nil, mainDR.Variables, false),
model.NewVariable(len(mainDR.Variables)+1, lonName, "label", latName, lonName, model.LongitudeType, model.LongitudeType, lonDesc, []string{"attribute"}, []string{model.VarDistilRoleMetadata}, nil, mainDR.Variables, false),
}
mainDR.Variables = append(mainDR.Variables, fields[field[0].SourceField]...)
for _, gc := range field {
if indexedData[gc.D3MIndex] == nil {
indexedData[gc.D3MIndex] = make([]*GeocodedPoint, 0)
}
indexedData[gc.D3MIndex] = append(indexedData[gc.D3MIndex], gc)
}
}
// add the geocoded data to the raw data
for i, line := range lines {
geocodedFields := indexedData[line[d3mIndexVariable]]
for _, geo := range geocodedFields {
line = append(line, geo.Latitude)
line = append(line, geo.Longitude)
}
lines[i] = line
}
// output the header
header := make([]string, len(mainDR.Variables))
for _, v := range mainDR.Variables {
header[v.Index] = v.HeaderName
}
output := [][]string{header}
output = append(output, lines...)
// output the data with the new feature
datasetStorage := serialization.GetStorage(outputPath.outputData)
err = datasetStorage.WriteData(outputPath.outputData, output)
if err != nil {
return "", errors.Wrap(err, "error writing feature output")
}
mainDR.ResPath = path.Dir(outputPath.outputData)
// write the new schema to file
err = datasetStorage.WriteMetadata(outputPath.outputSchema, meta, true, false)
if err != nil {
return "", errors.Wrap(err, "unable to store feature schema")
}
return outputPath.outputSchema, nil
}
// GeocodeForward will geocode a column into lat & lon values.
func GeocodeForward(datasetInputDir string, dataset string, variable *model.Variable) ([]*GeocodedPoint, error) {
// create & submit the solution request
pip, err := description.CreateGoatForwardPipeline("mountain", "", variable)
if err != nil {
return nil, errors.Wrap(err, "unable to create Goat pipeline")
}
datasetURI, err := submitPipeline([]string{datasetInputDir}, pip, true)
if err != nil {
return nil, errors.Wrap(err, "unable to run Goat pipeline")
}
// parse primitive response (col index,importance)
res, err := result.ParseResultCSV(datasetURI)
if err != nil {
return nil, errors.Wrap(err, "unable to parse Goat pipeline result")
}
// result should be d3m index, lat, lon
header, err := castTypeArray(res[0])
if err != nil {
return nil, errors.Wrap(err, "unable to parse Goat pipeline header")
}
// skip the header
res = res[1:]
geocodedData := make([]*GeocodedPoint, len(res))
latIndex := getFieldIndex(header, fmt.Sprintf("%s_latitude", variable.HeaderName))
lonIndex := getFieldIndex(header, fmt.Sprintf("%s_longitude", variable.HeaderName))
d3mIndexIndex := getFieldIndex(header, model.D3MIndexFieldName)
for i, v := range res {
lat := v[latIndex].(string)
lon := v[lonIndex].(string)
d3mIndex := v[d3mIndexIndex].(string)
geocodedData[i] = &GeocodedPoint{
D3MIndex: d3mIndex,
SourceField: variable.Key,
Latitude: lat,
Longitude: lon,
}
}
return geocodedData, nil
}
func getLatLonVariableNames(variableName string) (string, string) {
lat := fmt.Sprintf("_lat_%s", variableName)
lon := fmt.Sprintf("_lon_%s", variableName)
return lat, lon
}
func geocodeColumns(meta *model.Metadata) []*model.Variable {
// cycle throught types to determine columns to geocode.
colsToGeocode := make([]*model.Variable, 0)
for _, v := range meta.DataResources[0].Variables {
for _, t := range v.SuggestedTypes {
if isLocationType(t.Type) {
colsToGeocode = append(colsToGeocode, v)
}
}
}
return colsToGeocode
}
func isLocationType(typ string) bool {
return typ == model.AddressType || typ == model.CityType || typ == model.CountryType ||
typ == model.PostalCodeType || typ == model.StateType || typ == model.TA2LocationType
} | api/task/geocoding.go | 0.696991 | 0.420064 | geocoding.go | starcoder |
package transformer
import "gopkg.in/bblfsh/sdk.v2/uast/nodes"
func MapEach(vr string, m Mapping) Mapping {
src, dst := m.Mapping()
return Map(Each(vr, src), Each(vr, dst))
}
// ArrayOp is a subset of operations that operates on an arrays with a pre-defined size. See Arr.
type ArrayOp interface {
Op
arr(st *State) (opArr, error)
}
// Arr checks if the current object is a list with a number of elements
// matching a number of ops, and applies ops to corresponding elements.
// Reversal creates a list of the size that matches the number of ops
// and creates each element with the corresponding op.
func Arr(ops ...Op) ArrayOp {
return opArr(ops)
}
type opArr []Op
func (opArr) Kinds() nodes.Kind {
return nodes.KindArray
}
func (op opArr) arr(_ *State) (opArr, error) {
return op, nil
}
func (op opArr) Check(st *State, n nodes.Node) (bool, error) {
arr, ok := n.(nodes.Array)
if !ok {
return filtered("%+v is not a list, %+v", n, op)
} else if len(arr) != len(op) {
return filtered("%+v has wrong len for %+v", n, op)
}
for i, sub := range op {
if ok, err := sub.Check(st, arr[i]); err != nil {
return false, errElem.Wrap(err, i, sub)
} else if !ok {
return false, nil
}
}
return true, nil
}
func (op opArr) Construct(st *State, n nodes.Node) (nodes.Node, error) {
if err := noNode(n); err != nil {
return nil, err
}
arr := make(nodes.Array, 0, len(op))
for i, sub := range op {
nn, err := sub.Construct(st, n)
if err != nil {
return nil, errElem.Wrap(err, i, sub)
}
arr = append(arr, nn)
}
return arr, nil
}
// One is a shorthand for a list with one element.
func One(op Op) ArrayOp {
return Arr(op)
}
// AnyElem check matches if any of list elements matches sub-check.
func AnyElem(s Sel) Sel {
if s == nil {
s = Is(nil)
}
return &opAnyElem{sel: s}
}
type opAnyElem struct {
sel Sel
}
func (*opAnyElem) Kinds() nodes.Kind {
return nodes.KindArray
}
func (op *opAnyElem) Check(st *State, n nodes.Node) (bool, error) {
l, ok := n.(nodes.Array)
if !ok {
return false, nil
}
for _, o := range l {
if ok, err := op.sel.Check(st.Clone(), o); err != nil {
return false, err
} else if ok {
return true, nil
}
}
return false, nil
}
// All check matches if all list elements matches sub-check.
func All(s Sel) Sel {
return &opAll{sel: s}
}
type opAll struct {
sel Sel
}
func (*opAll) Kinds() nodes.Kind {
return nodes.KindArray
}
func (op *opAll) Check(st *State, n nodes.Node) (bool, error) {
l, ok := n.(nodes.Array)
if !ok {
return false, nil
}
for _, o := range l {
if ok, err := op.sel.Check(st.Clone(), o); err != nil || !ok {
return false, err
}
}
return true, nil
}
// LookupArrOpVar is like LookupOpVar but returns an array operation.
// Default value can be specified by setting the nil key.
func LookupArrOpVar(vr string, cases map[nodes.Value]ArrayOp) ArrayOp {
def := cases[nil]
delete(cases, nil)
return opLookupArrOp{vr: vr, cases: cases, def: def}
}
type opLookupArrOp struct {
vr string
def ArrayOp
cases map[nodes.Value]ArrayOp
}
func (opLookupArrOp) Kinds() nodes.Kind {
return nodes.KindArray
}
func (op opLookupArrOp) arr(st *State) (opArr, error) {
vn, err := st.MustGetVar(op.vr)
if err != nil {
return nil, err
}
v, ok := vn.(nodes.Value)
if !ok {
return nil, ErrExpectedValue.New(vn)
}
sub, ok := op.cases[v]
if !ok {
if op.def == nil {
return nil, ErrUnhandledValueIn.New(v, op.cases)
}
sub = op.def
}
return sub.arr(st)
}
func (op opLookupArrOp) Check(st *State, n nodes.Node) (bool, error) {
sub, err := op.arr(st)
if err != nil {
return false, err
}
return sub.Check(st, n)
}
func (op opLookupArrOp) Construct(st *State, n nodes.Node) (nodes.Node, error) {
sub, err := op.arr(st)
if err != nil {
return nil, err
}
return sub.Construct(st, n)
}
// PrependOne prepends a single element to an array.
func PrependOne(first Op, arr Op) Op {
return prependOne{first: first, tail: arr}
}
type prependOne struct {
first, tail Op
}
func (prependOne) Kinds() nodes.Kind {
return nodes.KindArray
}
func (op prependOne) Check(st *State, n nodes.Node) (bool, error) {
arr, ok := n.(nodes.Array)
if !ok {
return false, nil
} else if len(arr) < 1 {
return false, nil
}
first, tail := arr[0], arr[1:]
if ok, err := op.first.Check(st, first); err != nil || !ok {
return false, err
}
if ok, err := op.tail.Check(st, tail); err != nil || !ok {
return false, err
}
return true, nil
}
func (op prependOne) Construct(st *State, n nodes.Node) (nodes.Node, error) {
first, err := op.first.Construct(st, n)
if err != nil {
return nil, err
}
tail, err := op.tail.Construct(st, n)
if err != nil {
return nil, err
}
arr, ok := tail.(nodes.Array)
if !ok && tail != nil {
return nil, ErrExpectedList.New(tail)
}
out := make(nodes.Array, 0, len(arr)+1)
out = append(out, first)
out = append(out, arr...)
return out, nil
}
// Append is like AppendArr but allows to set more complex first operation.
// Result of this operation should still be an array.
func Append(to Op, items ...ArrayOp) Op {
if len(items) == 0 {
return to
}
return opAppend{op: to, arrs: opAppendArr{arrs: items}}
}
type opAppend struct {
op Op
arrs opAppendArr
}
func (opAppend) Kinds() nodes.Kind {
return nodes.KindArray
}
func (op opAppend) Check(st *State, n nodes.Node) (bool, error) {
arr, ok := n.(nodes.Array)
if !ok {
return filtered("%+v is not a list, %+v", n, op)
}
sarr, err := op.arrs.arr(st)
if err != nil {
return false, err
}
if len(sarr) > len(arr) {
return filtered("array %+v is too small for %+v", n, op)
}
// split into array part that will go to sub op,
// and the part we will use for sub-array checks
tail := len(arr) - len(sarr)
sub, arrs := arr[:tail], arr[tail:]
if ok, err := op.op.Check(st, sub); err != nil {
return false, errAppend.Wrap(err)
} else if !ok {
return false, nil
}
return sarr.Check(st, arrs)
}
func (op opAppend) Construct(st *State, n nodes.Node) (nodes.Node, error) {
n, err := op.op.Construct(st, n)
if err != nil {
return nil, err
}
arr, ok := n.(nodes.Array)
if !ok {
return nil, ErrExpectedList.New(n)
}
sarr, err := op.arrs.arr(st)
if err != nil {
return nil, err
}
nn, err := sarr.Construct(st, nil)
if err != nil {
return nil, err
}
arr2, ok := nn.(nodes.Array)
if !ok {
return nil, ErrExpectedList.New(n)
}
arr = append(arr, arr2...)
return arr, nil
}
// AppendArr asserts that a node is a Array and checks that it contains a defined set of nodes at the end.
// Reversal uses sub-operation to create a Array and appends provided element lists at the end of it.
func AppendArr(items ...ArrayOp) ArrayOp {
if len(items) == 1 {
return items[0]
}
return opAppendArr{arrs: items}
}
type opAppendArr struct {
arrs []ArrayOp
}
func (opAppendArr) Kinds() nodes.Kind {
return nodes.KindArray
}
func (op opAppendArr) arr(st *State) (opArr, error) {
var arr opArr
for _, sub := range op.arrs {
a, err := sub.arr(st)
if err != nil {
return nil, err
}
arr = append(arr, a...)
}
return arr, nil
}
func (op opAppendArr) Check(st *State, n nodes.Node) (bool, error) {
sarr, err := op.arr(st)
if err != nil {
return false, err
}
return sarr.Check(st, n)
}
func (op opAppendArr) Construct(st *State, n nodes.Node) (nodes.Node, error) {
sarr, err := op.arr(st)
if err != nil {
return nil, err
}
return sarr.Construct(st, n)
}
// Each checks that current node is an array and applies sub-operation to each element.
// It uses a variable to store state of each element.
func Each(vr string, op Op) Op {
return opEach{vr: vr, op: op}
}
type opEach struct {
vr string
op Op
}
func (opEach) Kinds() nodes.Kind {
return nodes.KindNil | nodes.KindArray
}
func (op opEach) Check(st *State, n nodes.Node) (bool, error) {
arr, ok := n.(nodes.Array)
if !ok && n != nil {
return filtered("%+v is not a list, %+v", n, op)
}
var subs []*State
if arr != nil {
subs = make([]*State, 0, len(arr))
}
for i, sub := range arr {
sst := NewState()
ok, err := op.op.Check(sst, sub)
if err != nil {
return false, errElem.Wrap(err, i, sub)
} else if !ok {
return false, nil
}
subs = append(subs, sst)
}
if err := st.SetStateVar(op.vr, subs); err != nil {
return false, err
}
return true, nil
}
func (op opEach) Construct(st *State, n nodes.Node) (nodes.Node, error) {
if err := noNode(n); err != nil {
return nil, err
}
subs, ok := st.GetStateVar(op.vr)
if !ok {
return nil, ErrVariableNotDefined.New(op.vr)
}
if subs == nil {
return nil, nil
}
arr := make(nodes.Array, 0, len(subs))
for i, stt := range subs {
sub, err := op.op.Construct(stt, nil)
if err != nil {
return nil, errElem.Wrap(err, i, nil)
}
arr = append(arr, sub)
}
return arr, nil
}
func ArrWith(arr Op, items ...Op) Op {
if len(items) == 0 {
return arr
}
return opArrWith{arr: arr, items: items}
}
type opArrWith struct {
arr Op
items []Op
}
func (op opArrWith) Kinds() nodes.Kind {
return (nodes.KindArray | nodes.KindNil) & op.arr.Kinds()
}
func (op opArrWith) Check(st *State, n nodes.Node) (bool, error) {
arr, ok := n.(nodes.Array)
if !ok {
return false, nil
}
arr = arr.CloneList()
// find items in the array and remove them from it
for _, s := range op.items {
found := false
for i, v := range arr {
sst := st.Clone()
if ok, err := s.Check(sst, v); err != nil {
return false, err
} else if ok {
st.ApplyFrom(sst)
arr = append(arr[:i], arr[i+1:]...)
found = true
break
}
}
if !found {
if ok, err := s.Check(st, nil); err != nil || !ok {
return false, err
}
}
}
return op.arr.Check(st, arr)
}
func (op opArrWith) Construct(st *State, n nodes.Node) (nodes.Node, error) {
n, err := op.arr.Construct(st, n)
if err != nil {
return nil, err
}
arr, ok := n.(nodes.Array)
if !ok {
return nil, ErrExpectedList.New(n)
}
for _, s := range op.items {
v, err := s.Construct(st, nil)
if err != nil {
return nil, err
}
arr = append(arr, v)
}
return arr, nil
} | vendor/gopkg.in/bblfsh/sdk.v2/uast/transformer/arrays.go | 0.797399 | 0.628122 | arrays.go | starcoder |
package simplex
import (
"math"
)
// Skewing and unskewing factors for 2, 3, and 4 dimensions
var (
f2 float64 = 0.5 * (math.Sqrt(3.0) - 1.0)
g2 float64 = (3.0 - math.Sqrt(3.0)) / 6.0
f3 float64 = 1.0 / 3.0
g3 float64 = 1.0 / 6.0
F4 float64 = (math.Sqrt(5.0) - 1.0) / 4.0
G4 float64 = (5.0 - math.Sqrt(5.0)) / 20.0
)
// Faster than int(math.Floor(x)), check out simplex_test.go
func fastfloor(x float64) int {
var xi = int(x)
if x < float64(xi) {
return xi - 1
}
return xi
}
func dot2(g grad, x, y float64) float64 {
return g.x*x + g.y*y
}
func dot3(g grad, x, y, z float64) float64 {
return g.x*x + g.y*y + g.z*z
}
func dot4(g grad, x, y, z, w float64) float64 {
return g.x*x + g.y*y + g.z*z + g.w*w
}
// At2d computes 2D simplex noise.
func At2d(x, y float64) float64 {
// Noise contributions from the three corners
var n0, n1, n2 float64
// Skew the input space to determine which simplex cell we're in
var s float64 = (x + y) * f2 // Hairy factor for 2D
var i int = fastfloor(x + s)
var j int = fastfloor(y + s)
var t = float64(i+j) * g2
// Unskew the cell origin back to (x,y) space
var ox0 = float64(i) - t
var oy0 = float64(j) - t
// The x,y distances from the cell origin
var x0 float64 = x - ox0
var y0 float64 = y - oy0
// For the 2D case, the simplex shape is an equilateral triangle.
// Determine which simplex we are in.
var i1, j1 int // Offsets for second (middle) corner of simplex in (i,j) coords
if x0 > y0 {
// lower triangle, XY order: (0,0)->(1,0)->(1,1)
i1, j1 = 1, 0
} else {
// upper triangle, YX order: (0,0)->(0,1)->(1,1)
i1, j1 = 0, 1
}
// A step of (1,0) in (i,j) means a step of (1-c,-c) in (x,y), and
// a step of (0,1) in (i,j) means a step of (-c,1-c) in (x,y), where
// c = (3-sqrt(3))/6
// Offsets for middle corner in (x,y) unskewed coords
var x1 float64 = x0 - float64(i1) + g2
var y1 float64 = y0 - float64(j1) + g2
// Offsets for last corner in (x,y) unskewed coords
var x2 float64 = x0 - 1.0 + 2.0*g2
var y2 float64 = y0 - 1.0 + 2.0*g2
// Work out the hashed gradient indices of the three simplex corners
var ii = i & 0xff
var jj = j & 0xff
var gi0 int = permMod12[ii+perm[jj]]
var gi1 int = permMod12[ii+i1+perm[jj+j1]]
var gi2 int = permMod12[ii+1+perm[jj+1]]
// Calculate the contribution from the three corners
var t0 float64 = 0.5 - x0*x0 - y0*y0
if t0 < 0 {
n0 = 0.0
} else {
t0 *= t0
n0 = t0 * t0 * dot2(grad3[gi0], x0, y0) // (x,y) of grad3 used for 2D gradient
}
var t1 float64 = 0.5 - x1*x1 - y1*y1
if t1 < 0 {
n1 = 0.0
} else {
t1 *= t1
n1 = t1 * t1 * dot2(grad3[gi1], x1, y1)
}
var t2 float64 = 0.5 - x2*x2 - y2*y2
if t2 < 0 {
n2 = 0.0
} else {
t2 *= t2
n2 = t2 * t2 * dot2(grad3[gi2], x2, y2)
}
// Sum and scale to [-1,1]
return 70.0 * (n0 + n1 + n2)
}
// At3d computes 3D simplex noise.
func At3d(x, y, z float64) float64 {
// Noise contributions from the four corners
var n0, n1, n2, n3 float64
// Skew the input space to determine which simplex cell we're in
var s float64 = (x + y + z) * f3 // Very nice and simple skew factor for 3D
var i = int(fastfloor(x + s))
var j = int(fastfloor(y + s))
var k = int(fastfloor(z + s))
var t = float64(i+j+k) * g3
// Unskew the cell origin back to (x,y,z) space
var ox0 = float64(i) - t
var oy0 = float64(j) - t
var oz0 = float64(k) - t
// The x,y,z distances from the cell origin
var x0 float64 = x - ox0
var y0 float64 = y - oy0
var z0 float64 = z - oz0
// For the 3D case, the simplex shape is a slightly irregular tetrahedron.
// Determine which simplex we are in.
var i1, j1, k1 int // Offsets for second corner of simplex in (i,j,k) coords
var i2, j2, k2 int // Offsets for third corner of simplex in (i,j,k) coords
if x0 >= y0 {
if y0 >= z0 {
i1, j1, k1 = 1, 0, 0
i2, j2, k2 = 1, 1, 0
} else if x0 >= z0 {
i1, j1, k1 = 1, 0, 0
i2, j2, k2 = 1, 0, 1
} else {
i1, j1, k1 = 0, 0, 1
i2, j2, k2 = 1, 0, 1
}
} else {
if y0 < z0 {
i1, j1, k1 = 0, 0, 1
i2, j2, k2 = 0, 1, 1
} else if x0 < z0 {
i1, j1, k1 = 0, 1, 0
i2, j2, k2 = 0, 1, 1
} else {
i1, j1, k1 = 0, 1, 0
i2, j2, k2 = 1, 1, 0
}
}
// A step of (1,0,0) in (i,j,k) means a step of (1-c,-c,-c) in (x,y,z),
// a step of (0,1,0) in (i,j,k) means a step of (-c,1-c,-c) in (x,y,z), and
// a step of (0,0,1) in (i,j,k) means a step of (-c,-c,1-c) in (x,y,z), where
// c = 1/6.
// Offsets for second corner in (x,y,z) coords
var x1 = x0 - float64(i1) + g3
var y1 = y0 - float64(j1) + g3
var z1 = z0 - float64(k1) + g3
// Offsets for third corner in (x,y,z) coords
var x2 = x0 - float64(i2) + 2.0*g3
var y2 = y0 - float64(j2) + 2.0*g3
var z2 = z0 - float64(k2) + 2.0*g3
// Offsets for last corner in (x,y,z) coords
var x3 = x0 - 1.0 + 3.0*g3
var y3 = y0 - 1.0 + 3.0*g3
var z3 = z0 - 1.0 + 3.0*g3
// Work out the hashed gradient indices of the four simplex corners
var ii int = i & 0xff
var jj int = j & 0xff
var kk int = k & 0xff
var gi0 int = permMod12[ii+perm[jj+perm[kk]]]
var gi1 int = permMod12[ii+i1+perm[jj+j1+perm[kk+k1]]]
var gi2 int = permMod12[ii+i2+perm[jj+j2+perm[kk+k2]]]
var gi3 int = permMod12[ii+1+perm[jj+1+perm[kk+1]]]
// Calculate the contribution from the four corners
var t0 float64 = 0.5 - x0*x0 - y0*y0 - z0*z0
if t0 < 0 {
n0 = 0.0
} else {
t0 *= t0
n0 = t0 * t0 * dot3(grad3[gi0], x0, y0, z0)
}
var t1 float64 = 0.5 - x1*x1 - y1*y1 - z1*z1
if t1 < 0 {
n1 = 0.0
} else {
t1 *= t1
n1 = t1 * t1 * dot3(grad3[gi1], x1, y1, z1)
}
var t2 float64 = 0.5 - x2*x2 - y2*y2 - z2*z2
if t2 < 0 {
n2 = 0.0
} else {
t2 *= t2
n2 = t2 * t2 * dot3(grad3[gi2], x2, y2, z2)
}
var t3 float64 = 0.5 - x3*x3 - y3*y3 - z3*z3
if t3 < 0 {
n3 = 0.0
} else {
t3 *= t3
n3 = t3 * t3 * dot3(grad3[gi3], x3, y3, z3)
}
// Sum and scale to [-1,1]
return 32.0 * (n0 + n1 + n2 + n3)
}
// At4d computes 4D simplex noise.
func At4d(x, y, z, w float64) float64 {
// Noise contributions from the five corners
var n0, n1, n2, n3, n4 float64
// Skew the (x,y,z,w) space to determine which cell of 24 simplices we're in
var s float64 = (x + y + z + w) * F4 // Factor for 4D skewing
var i int = fastfloor(x + s)
var j int = fastfloor(y + s)
var k int = fastfloor(z + s)
var l int = fastfloor(w + s)
var t = float64(i+j+k+l) * G4 // Factor for 4D unskewing
// Unskew the cell origin back to (x,y,z,w) space
var ox0 = float64(i) - t
var oy0 = float64(j) - t
var oz0 = float64(k) - t
var ow0 = float64(l) - t
// The x,y,z,w distances from the cell origin
var x0 float64 = x - ox0
var y0 float64 = y - oy0
var z0 float64 = z - oz0
var w0 float64 = w - ow0
// For the 4D case, the simplex is a 4D shape I won't even try to describe.
// To find out which of the 24 possible simplices we're in, we need to
// determine the magnitude ordering of x0, y0, z0 and w0.
var rankx, ranky, rankz, rankw int
// Six pair-wise comparisons are performed between each possible pair
// of the four coordinates, and the results are used to rank the numbers.
if x0 > y0 {
rankx++
} else {
ranky++
}
if x0 > z0 {
rankx++
} else {
rankz++
}
if x0 > w0 {
rankx++
} else {
rankw++
}
if y0 > z0 {
ranky++
} else {
rankz++
}
if y0 > w0 {
ranky++
} else {
rankw++
}
if z0 > w0 {
rankz++
} else {
rankw++
}
// The integer offsets for the second, third and fourth simplex corner
var i1, j1, k1, l1 int
var i2, j2, k2, l2 int
var i3, j3, k3, l3 int
// simplex[c] is a 4-vector with the numbers 0, 1, 2 and 3 in some order.
// Many values of c will never occur, since e.g. x>y>z>w makes x<z, y<w and x<w
// impossible. Only the 24 indices which have non-zero entries make any sense.
// We use a thresholding to set the coordinates in turn from the largest magnitude.
// Rank 3 denotes the largest coordinate.
if rankx >= 3 {
i1 = 1
} else {
i1 = 0
}
if ranky >= 3 {
j1 = 1
} else {
j1 = 0
}
if rankz >= 3 {
k1 = 1
} else {
k1 = 0
}
if rankw >= 3 {
l1 = 1
} else {
l1 = 0
}
// Rank 2 denotes the second largest coordinate.
if rankx >= 2 {
i2 = 1
} else {
i2 = 0
}
if ranky >= 2 {
j2 = 1
} else {
j2 = 0
}
if rankz >= 2 {
k2 = 1
} else {
k2 = 0
}
if rankw >= 2 {
l2 = 1
} else {
l2 = 0
}
// Rank 1 denotes the second smallest coordinate.
if rankx >= 1 {
i3 = 1
} else {
i3 = 0
}
if ranky >= 1 {
j3 = 1
} else {
j3 = 0
}
if rankz >= 1 {
k3 = 1
} else {
k3 = 0
}
if rankw >= 1 {
l3 = 1
} else {
l3 = 0
}
// The fifth corner has all coordinate offsets = 1, so no need to compute that.
// Offsets for second corner in (x,y,z,w) coords
var x1 = x0 - float64(i1) + G4
var y1 = y0 - float64(j1) + G4
var z1 = z0 - float64(k1) + G4
var w1 = w0 - float64(l1) + G4
// Offsets for third corner in (x,y,z,w) coords
var x2 = x0 - float64(i2) + 2.0*G4
var y2 = y0 - float64(j2) + 2.0*G4
var z2 = z0 - float64(k2) + 2.0*G4
var w2 = w0 - float64(l2) + 2.0*G4
// Offsets for fourth corner in (x,y,z,w) coords
var x3 = x0 - float64(i3) + 3.0*G4
var y3 = y0 - float64(j3) + 3.0*G4
var z3 = z0 - float64(k3) + 3.0*G4
var w3 = w0 - float64(l3) + 3.0*G4
// Offsets for last corner in (x,y,z,w) coords
var x4 = x0 - 1.0 + 4.0*G4
var y4 = y0 - 1.0 + 4.0*G4
var z4 = z0 - 1.0 + 4.0*G4
var w4 = w0 - 1.0 + 4.0*G4
// Work out the hashed gradient indices of the five simplex corners
var ii int = i & 0xff
var jj int = j & 0xff
var kk int = k & 0xff
var ll int = l & 0xff
var gi0 int = perm[ii+perm[jj+perm[kk+perm[ll]]]] % 32
var gi1 int = perm[ii+i1+perm[jj+j1+perm[kk+k1+perm[ll+l1]]]] % 32
var gi2 int = perm[ii+i2+perm[jj+j2+perm[kk+k2+perm[ll+l2]]]] % 32
var gi3 int = perm[ii+i3+perm[jj+j3+perm[kk+k3+perm[ll+l3]]]] % 32
var gi4 int = perm[ii+1+perm[jj+1+perm[kk+1+perm[ll+1]]]] % 32
// Calculate the contribution from the five corners
var t0 float64 = 0.5 - x0*x0 - y0*y0 - z0*z0 - w0*w0
if t0 < 0 {
n0 = 0.0
} else {
t0 *= t0
n0 = t0 * t0 * dot4(grad4[gi0], x0, y0, z0, w0)
}
var t1 float64 = 0.5 - x1*x1 - y1*y1 - z1*z1 - w1*w1
if t1 < 0 {
n1 = 0.0
} else {
t1 *= t1
n1 = t1 * t1 * dot4(grad4[gi1], x1, y1, z1, w1)
}
var t2 float64 = 0.5 - x2*x2 - y2*y2 - z2*z2 - w2*w2
if t2 < 0 {
n2 = 0.0
} else {
t2 *= t2
n2 = t2 * t2 * dot4(grad4[gi2], x2, y2, z2, w2)
}
var t3 float64 = 0.5 - x3*x3 - y3*y3 - z3*z3 - w3*w3
if t3 < 0 {
n3 = 0.0
} else {
t3 *= t3
n3 = t3 * t3 * dot4(grad4[gi3], x3, y3, z3, w3)
}
var t4 float64 = 0.5 - x4*x4 - y4*y4 - z4*z4 - w4*w4
if t4 < 0 {
n4 = 0.0
} else {
t4 *= t4
n4 = t4 * t4 * dot4(grad4[gi4], x4, y4, z4, w4)
}
// Sum and scale to [-1,1]
return 27.0 * (n0 + n1 + n2 + n3 + n4)
} | util/noise/simplex/simplex.go | 0.723016 | 0.63202 | simplex.go | starcoder |
package linkedlist
import (
"errors"
"fmt"
col "github.com/raunakjodhawat/gorithims/src/dataStructure/collection"
"reflect"
)
// Node holds the value/data and a pointer to next and previous value
type Node struct {
Val interface{}
Next *Node
Prev *Node
}
// List type, stores the head and tail. It represents a list of address with each list instantiated having a default nil.head and tail
type List struct {
listElements col.Collection
}
// Add Inserts the specified element at the specified position in this list. By default add to the end of list
func (l *List) Add(element interface{}, startIndexSlice ...int) error {
return l.listElements.Add(element, startIndexSlice...)
}
// AddAll Inserts all of the elements in the specified collection into this list, starting at the specified position. or by default at the end
func (l *List) AddAll(elementsInterface interface{}, startIndexSlice ...int) error {
slice := reflect.ValueOf(elementsInterface)
if slice.Kind() != reflect.Slice {
return fmt.Errorf("A slice input is required for this function expected: Slice, got: %v", slice.Kind())
}
return l.listElements.Add(elementsInterface, startIndexSlice...)
}
// AddFirst Inserts the specified element at the beginning of this list.
func (l *List) AddFirst(elementsInterface interface{}) error {
return l.listElements.Add(elementsInterface, 0)
}
// AddLast Appends the specified element to the end of this list.
func (l *List) AddLast(elementsInterface interface{}) error {
return l.listElements.Add(elementsInterface, l.listElements.Size())
}
// Clear Removes all of the elements from this list.
func (l *List) Clear() {
l.listElements.Head = nil
l.listElements.Tail = nil
l.listElements.Length = 0
}
// Clone Returns a shallow copy of this LinkedList.
func (l *List) Clone() List {
return *l
}
// Contains Returns true if this list contains the specified element.
func (l *List) Contains(element interface{}) bool {
matchIndex, _ := col.Iterate(&l.listElements, false, element, false, false)
return matchIndex != -1
}
// Element Retrieves, but does not remove, the head (first element) of this list.
func (l *List) Element() *col.Node {
return l.listElements.Head
}
// Get Returns the element at the specified position in this list.
func (l *List) Get(index int) (interface{}, error) {
if index >= l.listElements.Size() {
return nil, fmt.Errorf("received %v, which is greater than the list size: %v", index, l.listElements.Size())
}
_, NodeElement := col.Iterate(&l.listElements, false, index, false, true)
return NodeElement.Val, nil
}
// GetFirst Returns the first element in this list.
func (l *List) GetFirst() (interface{}, error) {
if l.listElements.Head != nil {
return l.listElements.Head.Val, nil
}
return -1, errors.New("head is not yet initialized")
}
// GetLast Returns the last element in this list.
func (l *List) GetLast() (interface{}, error) {
if l.listElements.Head != nil {
return l.listElements.Tail.Val, nil
}
return -1, errors.New("head is not yet initialized")
}
// IndexOf Returns the index of the first occurrence of the specified element in this list, or -1 if this list does not contain the element.
func (l *List) IndexOf(searchKey interface{}) int {
index, _ := col.Iterate(&l.listElements, false, searchKey, false, false)
return index
}
// LastIndexOf Returns the index of the last occurrence of the specified element in this list, or -1 if this list does not contain the element.
func (l *List) LastIndexOf(searchKey interface{}) int {
index, _ := col.ReverseIterate(&l.listElements, false, searchKey, false, false)
return index
}
// ListIterator Returns a list-iterator of the elements in this list (in proper sequence), starting at the specified position in the list.
func (l *List) ListIterator(index int) (*col.Node, error) {
if index >= l.listElements.Length {
return nil, fmt.Errorf("received %v, which is greater than the list size: %v", index, l.Size())
}
_, itrPointer := col.Iterate(&l.listElements, false, index, false, true)
return itrPointer, nil
}
// Offer Adds the specified element as the tail (last element) of this list.
func (l *List) Offer(element interface{}) (bool, error) {
return l.offerHelper(element)
}
// OfferFirst Inserts the specified element at the front of this list.
func (l *List) OfferFirst(element interface{}) (bool, error) {
return l.offerHelper(element, 0)
}
// OfferLast Inserts the specified element at the end of this list.
func (l *List) OfferLast(element interface{}) (bool, error) {
return l.offerHelper(element)
}
// Peek Retrieves, but does not remove, the head (first element) of this list.
func (l *List) Peek() (*col.Node, error) {
return l.peekHelper(l.listElements.Head)
}
// PeekFirst Retrieves, but does not remove, the first element of this list, or returns null if this list is empty.
func (l *List) PeekFirst() (*col.Node, error) {
return l.peekHelper(l.listElements.Head)
}
// PeekLast Retrieves, but does not remove, the last element of this list, or returns null if this list is empty.
func (l *List) PeekLast() (*col.Node, error) {
return l.peekHelper(l.listElements.Tail)
}
// Poll Retrieves and removes the head (first element) of this list.
func (l *List) Poll() (*col.Node, error) {
return l.listElements.Remove()
}
// PollFirst Retrieves and removes the first element of this list, or returns null if this list is empty.
func (l *List) PollFirst() (*col.Node, error) {
return l.listElements.Remove(0)
}
// PollLast Retrieves and removes the last element of this list, or returns null if this list is empty.
func (l *List) PollLast() (*col.Node, error) {
return l.listElements.Remove(l.Size() - 1)
}
// Pop Pops an element from the stack represented by this list.
func (l *List) Pop() (interface{}, error) {
currentNode, err := l.Peek()
if err != nil {
return nil, err
}
_, err = l.Poll()
if err != nil {
return nil, err
}
return currentNode.Val, nil
}
// Push Pushes an element onto the stack represented by this list.
func (l *List) Push(element interface{}) error {
return l.listElements.Add(element, 0)
}
// Remove Retrieves and removes the head (first element) of this list.
func (l *List) Remove(startIndexSlice ...int) (*col.Node, error) {
return l.listElements.Remove(startIndexSlice...)
}
// RemoveFirst Removes and returns the first element from this list.
func (l *List) RemoveFirst() (*col.Node, error) {
return l.listElements.Remove()
}
// RemoveLast Removes and returns the last element from this list.
func (l *List) RemoveLast() (*col.Node, error) {
return l.listElements.Remove(l.Size() - 1)
}
// RemoveFirstOccurrence Removes the first occurrence of the specified element in this list (when traversing the list from head to tail).
func (l *List) RemoveFirstOccurrence(searchKey interface{}) (*col.Node, error) {
index, _ := col.Iterate(&l.listElements, false, searchKey, false, false)
if index != -1 {
return l.listElements.Remove(index)
}
return nil, fmt.Errorf("%v, is not present in the list", searchKey)
}
// RemoveLastOccurrence Removes the last occurrence of the specified element in this list (when traversing the list from head to tail).
func (l *List) RemoveLastOccurrence(searchKey interface{}) (*col.Node, error) {
index, _ := col.ReverseIterate(&l.listElements, false, searchKey, false, false)
if index != -1 {
return l.listElements.Remove(index)
}
return nil, fmt.Errorf("%v, is not present in the list", searchKey)
}
// Set Replaces the element at the specified position in this list with the specified element.
func (l *List) Set(index int, element interface{}) error {
if index < l.Size() {
_, oldElement := col.Iterate(&l.listElements, false, index, false, true)
oldElement.Val = element
return nil
}
return errors.New("index is greater than the list length")
}
// Size returns the length of the linked list
func (l *List) Size() int {
return l.listElements.Length
}
// ToArray Returns an array containing all of the elements in this list in proper sequence (from first to last element)
func (l *List) ToArray() []interface{} {
listToArray := make([]interface{}, l.Size())
var i int
curr := l.listElements.Head
for curr != nil {
listToArray[i] = curr.Val
curr = curr.Next
i++
}
return listToArray
}
// Print prints the linked list element. optional debug argument as true or false can be provided
func (l *List) Print(debug ...bool) {
col.Iterate(&l.listElements, true, nil, l.printHelper(debug...), false)
}
// PrintReverse prints the linked list in reverse order. optional debug argument as true or false can be provided
func (l *List) PrintReverse(debug ...bool) {
col.ReverseIterate(&l.listElements, true, nil, l.printHelper(debug...), false)
}
// PrintPretty prints the linked list as a slice, so it is easier to debug
func (l *List) PrintPretty() {
fmt.Println(l.ToArray())
}
// unexported and utility functions used by above functions
func (l *List) offerHelper(element interface{}, startIndex ...int) (bool, error) {
err := l.listElements.Add(element, startIndex...)
if err != nil {
return false, err
}
return true, err
}
func (l *List) peekHelper(searchNode *col.Node) (*col.Node, error) {
if searchNode != nil {
return searchNode, nil
}
return nil, errors.New("list is not yet initialized")
}
func (l *List) printHelper(debug ...bool) bool {
var shouldDebug bool
if len(debug) != 0 {
if debug[0] {
shouldDebug = true
}
}
return shouldDebug
} | src/dataStructure/collection/linkedList/linkedList.go | 0.746693 | 0.4081 | linkedList.go | starcoder |
package main
import (
"github.com/srabraham/advent-of-code-2019/internal/seanmath"
"io/ioutil"
"log"
"math"
"sort"
"strings"
)
func f(err error) {
if err != nil {
log.Fatal(err)
}
}
type GridPos struct {
x int
y int
}
type Grid struct {
vals map[GridPos]string
maxX int
maxY int
}
func (g Grid) ValAt(x, y int) (string, bool) {
val, inBounds := g.vals[GridPos{x: x, y: y}]
return val, inBounds
}
func (g Grid) String() string {
var s string
for y := 0; y < g.maxY; y++ {
for x := 0; x < g.maxX; x++ {
s += g.vals[GridPos{x: x, y: y}]
s += " "
}
s += "\n"
}
return s
}
func ReadGrid(filename string) Grid {
b, err := ioutil.ReadFile(filename)
f(err)
rows := strings.Split(string(b), "\n")
grid := Grid{
vals: make(map[GridPos]string),
maxX: len(rows[0]),
maxY: len(rows),
}
for y, row := range rows {
for x, col := range row {
grid.vals[GridPos{x: x, y: y}] = string(col)
if x > grid.maxX {
grid.maxX = x
}
}
if y > grid.maxY {
grid.maxY = y
}
}
//log.Printf("grid =\n%v", grid)
return grid
}
func countCollisions(grid Grid, x int, y int) int {
collisions := 0
for checkX := 0; checkX < grid.maxX; checkX++ {
for checkY := 0; checkY < grid.maxY; checkY++ {
if checkX == x && checkY == y {
continue
}
checkVal, _ := grid.ValAt(checkX, checkY)
if checkVal == "#" {
rise := checkY - y
run := checkX - x
gcd := GCD(rise, run)
rise = rise / gcd
run = run / gcd
xRay := x
yRay := y
intersect := false
loop:
for true {
xRay += run
yRay += rise
if xRay == checkX && yRay == checkY {
break loop
}
rayVal, inBounds := grid.ValAt(xRay, yRay)
if !inBounds {
break loop
}
if rayVal == "#" {
//log.Printf("rise %v run %v intersect (%v,%v) <> (%v,%v) <> (%v,%v)", rise, run, x, y, xRay, yRay, checkX, checkY)
intersect = true
break loop
}
}
if !intersect {
collisions++
}
}
}
}
return collisions
}
func Part1(filename string) int {
g := ReadGrid(filename)
collisions := make(map[GridPos]int)
for x := 0; x < g.maxX; x++ {
for y := 0; y < g.maxY; y++ {
val, _ := g.ValAt(x, y)
if val == "#" {
collisions[GridPos{x: x, y: y}] = countCollisions(g, x, y)
}
}
}
var max int
var maxX, maxY int
for x := 0; x < g.maxX; x++ {
//log.Print(row)
for y := 0; y < g.maxY; y++ {
v := collisions[GridPos{x: x, y: y}]
if v > max {
max = v
maxX = x
maxY = y
}
}
}
log.Printf("max = %v, maxX = %v, maxY = %v", max, maxX, maxY)
return max
}
// RelAst is the relative position of an asteroid with respect to some other x,y point
type RelAst struct {
pos GridPos
distX int
distY int
}
func (ra RelAst) angle() float64 {
return math.Atan2(-float64(ra.distX), float64(ra.distY))
}
func (ra *RelAst) length() float64 {
return math.Sqrt(float64(ra.distX*ra.distX + ra.distY*ra.distY))
}
func Part2(filename string, x, y, targetMove int) GridPos {
g := ReadGrid(filename)
var ra []RelAst
// find all the other asteroids on the board and calculate their positions
// relative to the x,y input pair
for checkX := 0; checkX < g.maxX; checkX++ {
for checkY := 0; checkY < g.maxY; checkY++ {
if checkX == x && checkY == y {
continue
}
checkVal, _ := g.ValAt(checkX, checkY)
if checkVal == "#" {
r := RelAst{
pos: GridPos{checkX, checkY},
distX: checkX - x,
distY: checkY - y,
}
ra = append(ra, r)
//log.Printf("created ra %v", r)
}
}
}
// map of angles to other asteroids on the ray extending at that angle from x,y
anglesToAsteroids := make(map[int64][]RelAst)
for _, r := range ra {
// round all of the angles since these are floats
angle := int64(math.Round(r.angle() * 1_000_000.0))
anglesToAsteroids[angle] = append(anglesToAsteroids[angle], r)
}
// all of the known angles
angles := make([]int64, 0)
for k := range anglesToAsteroids {
angles = append(angles, k)
}
seanmath.Sort(angles)
for _, s := range anglesToAsteroids {
sort.Slice(s, func(i, j int) bool {
return s[i].length() < s[j].length()
})
}
var targetResult GridPos
blast := 0
blastsThisPass := 1
// each pass is a full 2pi rotation of the blaster ray.
// keep doing passes until no asteroids remain.
g.vals[GridPos{x: x, y: y}] = "S"
for pass := 0; blastsThisPass > 0; pass++ {
blastsThisPass = 0
for _, a := range angles {
ma, found := anglesToAsteroids[a]
if !found {
continue
}
if len(ma) <= pass {
continue
}
blast++
blastsThisPass++
val := ma[pass]
if blast == targetMove {
targetResult = val.pos
}
g.vals[val.pos] = "X"
//log.Printf("%v entry is %v, grid =\n%v", blast, val, g)
}
}
return targetResult
}
func main() {
Part1("cmd/day10/input10-1.txt")
Part2("cmd/day10/input10-1.txt", 11, 11, 200)
}
// greatest common divisor (GCD) via Euclidean algorithm
func GCD(a, b int) int {
for b != 0 {
t := b
b = a % b
a = t
}
if a < 0 {
a = -a
}
//log.Printf("gcd = %v", a)
return a
} | cmd/day10/day10.go | 0.631253 | 0.462837 | day10.go | starcoder |
package curves
import (
math2 "github.com/wieku/danser-go/bmath"
"github.com/wieku/danser-go/bmath"
"math"
)
type Catmull struct {
points []math2.Vector2d
ApproxLength float64
}
func NewCatmull(points []math2.Vector2d) Catmull {
if len(points) != 4 {
panic("4 points are needed to create centripetal catmull rom")
}
cm := &Catmull{points: points}
pointLength := points[1].Dst(points[2])
pointLength = math.Ceil(pointLength)
for i := 1; i <= int(pointLength); i++ {
cm.ApproxLength += cm.NPointAt(float64(i) / pointLength).Dst(cm.NPointAt(float64(i-1) / pointLength))
}
return *cm
}
func (cm Catmull) NPointAt(t float64) math2.Vector2d {
return findPoint(cm.points[0], cm.points[1], cm.points[2], cm.points[3], t)
}
func findPoint(vec1, vec2, vec3, vec4 bmath.Vector2d, t float64) bmath.Vector2d {
t2 := t * t
t3 := t * t2
return bmath.NewVec2d(0.5*(2*vec2.X+(-vec1.X+vec3.X)*t+(2*vec1.X-5*vec2.X+4*vec3.X-vec4.X)*t2+(-vec1.X+3*vec2.X-3*vec3.X+vec4.X)*t3),
0.5*(2*vec2.Y+(-vec1.Y+vec3.Y)*t+(2*vec1.Y-5*vec2.Y+4*vec3.Y-vec4.Y)*t2+(-vec1.Y+3*vec2.Y-3*vec3.Y+vec4.Y)*t3))
}
//It's not a neat solution, but it works
//This calculates point on catmull curve with constant velocity
func (cm Catmull) PointAt(t float64) math2.Vector2d {
desiredWidth := cm.ApproxLength * t
width := 0.0
pos := cm.points[1]
c := 0.0
for width < desiredWidth {
pt := cm.NPointAt(c)
width += pt.Dst(pos)
if width > desiredWidth {
return pos
}
pos = pt
c += 1.0 / float64(cm.ApproxLength*2-1)
}
return pos
}
func (cm Catmull) GetLength() float64 {
return cm.ApproxLength
}
func (cm Catmull) GetStartAngle() float64 {
return cm.points[0].AngleRV(cm.NPointAt(1.0 / cm.ApproxLength))
}
func (cm Catmull) GetEndAngle() float64 {
return cm.points[len(cm.points)-1].AngleRV(cm.NPointAt((cm.ApproxLength - 1) / cm.ApproxLength))
}
func (ln Catmull) GetPoints(num int) []math2.Vector2d {
t0 := 1 / float64(num-1)
points := make([]math2.Vector2d, num)
t := 0.0
for i := 0; i < num; i += 1 {
points[i] = ln.PointAt(t)
t += t0
}
return points
} | bmath/curves/catmull.go | 0.777046 | 0.621311 | catmull.go | starcoder |
package conditions
import (
"strings"
"time"
)
// Binary string operators
var stringComparisons = map[string]func(string, string) bool{
"equals": func(a, b string) bool { return a == b },
"equalsFoldCase": strings.EqualFold,
"notEquals": func(a, b string) bool { return a != b },
"beginsWith": strings.HasPrefix,
"endsWith": strings.HasSuffix,
"contains": strings.Contains,
"substring": func(a, b string) bool { return strings.Contains(b, a) },
}
// Binary number operators
var numberComparisons = map[string]func(float64, float64) bool{
"equals": func(a, b float64) bool { return a == b },
"notEquals": func(a, b float64) bool { return a != b },
"lessThan": func(a, b float64) bool { return a < b },
"greaterThan": func(a, b float64) bool { return a > b },
"lessThanOrEqualTo": func(a, b float64) bool { return a <= b },
"greaterThanOrEqualTo": func(a, b float64) bool { return a >= b },
}
/*
From go docs https://golang.org/pkg/time/#example_Time_Equal:
Equal reports whether t and u represent the same time instant. Two times can be equal even if they are in different locations.
For example, 6:00 +0200 and 4:00 UTC are Equal. See the documentation on the Time type for the pitfalls of using == with
Time values; most code should use Equal instead.`
Simple golang "==", "<", ">", etc. operations will not work in many cases. Conversion to a date object resolves these issues.
so long as conversion is also supported by the golang lib
*/
// Binary date operators
var dateComparisons = map[string]func(*time.Time, *time.Time) bool{ //nolint:gochecknoglobals
"equals": func(a, b *time.Time) bool { return a.Equal(*b) },
"notEquals": func(a, b *time.Time) bool { return !a.Equal(*b) },
"lessThan": func(a, b *time.Time) bool { return a.Before(*b) },
"greaterThan": func(a, b *time.Time) bool { return a.After(*b) },
"lessThanOrEqualTo": func(a, b *time.Time) bool { return a.Equal(*b) || a.Before(*b) },
"greaterThanOrEqualTo": func(a, b *time.Time) bool { return a.Equal(*b) || a.After(*b) },
} | credential/presentation/conditions/operations.go | 0.826922 | 0.509398 | operations.go | starcoder |
// Renders a textured spinning cube using GLFW 3 and OpenGL 4.1 core forward-compatible profile.
// VS Code, left hand column: Green lines are new lines (since last commit), blue lines are changed from last commit,
// and red arrows mean deletion since last commit.
package main
import (
_ "image/png"
"github.com/go-gl/gl/v4.6-core/gl"
"github.com/go-gl/glfw/v3.3/glfw"
"github.com/go-gl/mathgl/mgl32"
"github.com/purelazy/modlib/internal/utils"
)
func main() {
// Create the OpenGL context, window and camera
window, cam := utils.GetWindowAndCamera(800, 600)
defer window.Destroy()
// Set up Box2D world
world, boxBody := setupPhysics()
// Load textures
cubemapTexture := utils.Cubemap(utils.Faces)
gl.BindTexture(gl.TEXTURE_CUBE_MAP, cubemapTexture)
modelTexture := utils.NewTexture("square.png")
gl.BindTexture(gl.TEXTURE_2D, modelTexture.ID)
// Compile model and cubemap shaders
lighting := utils.NewProgram(utils.ReadShader("Lighting.vs.glsl"), utils.ReadShader("Lighting.fs.glsl"))
cubemapShader := utils.NewProgram(utils.ReadShader("cubemap.vs.glsl"), utils.ReadShader("cubemap.fs.glsl"))
defer gl.DeleteProgram(lighting)
defer gl.DeleteProgram(cubemapShader)
// ------------------------- Compute and set static uniforms
projection := mgl32.Perspective(cam.Fovy, cam.Aspect, cam.Near, cam.Far)
cubeVAO, uModelLocation, uViewLocation, uViewPosLocation, indices := setupModel(lighting, &projection[0])
skyboxVAO, uViewCubemapLocation := setupSkybox(cubemapShader, &projection[0])
for !window.ShouldClose() {
// View is used in multiple programs
view := mgl32.LookAtV(cam.Position, cam.Position.Add(cam.Forward), cam.Up)
{ // ----------------Physics
// Reverse gravity
if cam.Paused {
boxBody.SetGravityScale(-1)
} else {
boxBody.SetGravityScale(1)
}
// Step through time
world.Step(1.0/60.0, 8, 3)
}
{ // ----------------Draw the skybox (36 verts)
gl.UseProgram(cubemapShader)
// Drawing the skybox first will draw every pixel, so the screen does not
// need to be cleared and not depth testing
gl.Disable(gl.DEPTH_TEST)
// The skybox does not move, relative to the view. So all translation is set to zero
viewWithoutTranslation := view.Mat3().Mat4()
gl.UniformMatrix4fv(uViewCubemapLocation, 1, false, &viewWithoutTranslation[0])
// Arm GPU with VAO and Render
gl.BindVertexArray(skyboxVAO)
gl.DrawArrays(gl.TRIANGLES, 0, 36)
}
{ // ----------------Draw the model
gl.UseProgram(lighting)
gl.Enable(gl.CULL_FACE) // Only front-facing triangles will be drawn
// Calculate uniforms
position := boxBody.GetPosition()
angle := boxBody.GetAngle()
rotate := mgl32.HomogRotate3D(float32(angle), mgl32.Vec3{0, 0, 1})
translate := mgl32.Translate3D(float32(position.X), float32(position.Y), 0)
model := translate.Mul4(rotate)
// Set uniforms
gl.UniformMatrix4fv(uViewLocation, 1, false, &view[0])
gl.UniformMatrix4fv(uModelLocation, 1, false, &model[0])
gl.Uniform3fv(uViewPosLocation, 1, &cam.Position[0])
// Arm GPU with VAO and Render
gl.BindVertexArray(cubeVAO)
gl.DrawElements(gl.TRIANGLES, int32(len(*indices)), gl.UNSIGNED_INT, gl.PtrOffset(0))
}
// Swap and Poll
window.SwapBuffers()
glfw.PollEvents()
}
} | cmd/Basics/07-Box2D/main.go | 0.735926 | 0.486088 | main.go | starcoder |
package rules
var (
kingMoveOffsets = []square{
{file: -1, rank: 1}, {file: 0, rank: 1}, {file: 1, rank: 1},
{file: -1, rank: 0} /*****************/, {file: 1, rank: 0},
{file: -1, rank: -1}, {file: 0, rank: -1}, {file: 1, rank: -1},
}
knightMoveOffsets = []square{
{file: -1, rank: 2}, {file: -1, rank: -2},
{file: 1, rank: 2}, {file: 1, rank: -2},
{file: -2, rank: 1}, {file: -2, rank: -1},
{file: 2, rank: 1}, {file: 2, rank: -1},
}
rookMoveOffsetLines = [][]square{
{
{rank: 1}, {rank: 2}, {rank: 3}, {rank: 4}, {rank: 5}, {rank: 6}, {rank: 7},
},
{
{rank: -1}, {rank: -2}, {rank: -3}, {rank: -4}, {rank: -5}, {rank: -6}, {rank: -7},
},
{
{file: 1}, {file: 2}, {file: 3}, {file: 4}, {file: 5}, {file: 6}, {file: 7},
},
{
{file: -1}, {file: -2}, {file: -3}, {file: -4}, {file: -5}, {file: -6}, {file: -7},
},
}
bishopMoveOffsetLines = [][]square{
{
{rank: 1, file: 1}, {rank: 2, file: 2}, {rank: 3, file: 3}, {rank: 4, file: 4},
{rank: 5, file: 5}, {rank: 6, file: 6}, {rank: 7, file: 7},
},
{
{rank: -1, file: 1}, {rank: -2, file: 2}, {rank: -3, file: 3}, {rank: -4, file: 4},
{rank: -5, file: 5}, {rank: -6, file: 6}, {rank: -7, file: 7},
},
{
{rank: 1, file: -1}, {rank: 2, file: -2}, {rank: 3, file: -3}, {rank: 4, file: -4},
{rank: 5, file: -5}, {rank: 6, file: -6}, {rank: 7, file: -7},
},
{
{rank: -1, file: -1}, {rank: -2, file: -2}, {rank: -3, file: -3}, {rank: -4, file: -4},
{rank: -5, file: -5}, {rank: -6, file: -6}, {rank: -7, file: -7},
},
}
queenMoveOffsetLines = append(rookMoveOffsetLines, bishopMoveOffsetLines...)
whitePawnAdvancementOffsets = []square{{file: 0, rank: 1}}
blackPawnAdvancementOffsets = []square{{file: 0, rank: -1}}
whitePawnInitialAdvancementOffsets = append(whitePawnAdvancementOffsets, square{file: 0, rank: 2})
blackPawnInitialAdvancementOffsets = append(blackPawnAdvancementOffsets, square{file: 0, rank: -2})
whitePawnCaptureOffsets = []square{
{file: -1, rank: 1},
{file: 1, rank: 1},
}
blackPawnCaptureOffsets = []square{
{file: -1, rank: -1},
{file: 1, rank: -1},
}
whitePawnPromotions = []piece{WhiteQueen, WhiteRook, WhiteBishop, WhiteKnight}
blackPawnPromotions = []piece{BlackQueen, BlackRook, BlackBishop, BlackKnight}
) | rules/pieces_offsets.go | 0.525856 | 0.611556 | pieces_offsets.go | starcoder |
package privacy
import (
"errors"
"math/big"
"github.com/incognitochain/incognito-chain/common"
)
// SchnorrPublicKey represents Schnorr Publickey
// PK = G^SK + H^R
type SchnorrPublicKey struct {
publicKey *EllipticPoint
g, h *EllipticPoint
}
func (schnorrPubKey SchnorrPublicKey) GetPublicKey() *EllipticPoint {
return schnorrPubKey.publicKey
}
// SchnorrPrivateKey represents Schnorr Privatekey
type SchnorrPrivateKey struct {
privateKey *big.Int
randomness *big.Int
publicKey *SchnorrPublicKey
}
func (schnPrivKey SchnorrPrivateKey) GetPublicKey() *SchnorrPublicKey {
return schnPrivKey.publicKey
}
// SchnSignature represents Schnorr Signature
type SchnSignature struct {
e, z1, z2 *big.Int
}
// Set sets Schnorr private key
func (privateKey *SchnorrPrivateKey) Set(sk *big.Int, r *big.Int) {
privateKey.privateKey = sk
privateKey.randomness = r
privateKey.publicKey = new(SchnorrPublicKey)
privateKey.publicKey.g = new(EllipticPoint)
privateKey.publicKey.g.Set(PedCom.G[PedersenPrivateKeyIndex].x, PedCom.G[PedersenPrivateKeyIndex].y)
privateKey.publicKey.h = new(EllipticPoint)
privateKey.publicKey.h.Set(PedCom.G[PedersenRandomnessIndex].x, PedCom.G[PedersenRandomnessIndex].y)
privateKey.publicKey.publicKey = PedCom.G[PedersenPrivateKeyIndex].ScalarMult(sk).Add(PedCom.G[PedersenRandomnessIndex].ScalarMult(r))
}
// Set sets Schnorr public key
func (publicKey *SchnorrPublicKey) Set(pk *EllipticPoint) {
publicKey.publicKey = new(EllipticPoint)
publicKey.publicKey.Set(pk.x, pk.y)
publicKey.g = new(EllipticPoint)
publicKey.g.Set(PedCom.G[PedersenPrivateKeyIndex].x, PedCom.G[PedersenPrivateKeyIndex].y)
publicKey.h = new(EllipticPoint)
publicKey.h.Set(PedCom.G[PedersenRandomnessIndex].x, PedCom.G[PedersenRandomnessIndex].y)
}
//Sign is function which using for signing on hash array by private key
func (privateKey SchnorrPrivateKey) Sign(data []byte) (*SchnSignature, error) {
if len(data) != common.HashSize {
return nil, NewPrivacyErr(UnexpectedErr, errors.New("hash length must be 32 bytes"))
}
signature := new(SchnSignature)
// has privacy
if privateKey.randomness.Cmp(big.NewInt(0)) != 0 {
// generates random numbers s1, s2 in [0, Curve.Params().N - 1]
s1 := RandScalar()
s2 := RandScalar()
// t = s1*G + s2*H
t := privateKey.publicKey.g.ScalarMult(s1).Add(privateKey.publicKey.h.ScalarMult(s2))
// E is the hash of elliptic point t and data need to be signed
signature.e = Hash(*t, data)
signature.z1 = new(big.Int).Sub(s1, new(big.Int).Mul(privateKey.privateKey, signature.e))
signature.z1.Mod(signature.z1, Curve.Params().N)
signature.z2 = new(big.Int).Sub(s2, new(big.Int).Mul(privateKey.randomness, signature.e))
signature.z2.Mod(signature.z2, Curve.Params().N)
return signature, nil
}
// generates random numbers s, k2 in [0, Curve.Params().N - 1]
s := RandScalar()
// t = s*G
t := privateKey.publicKey.g.ScalarMult(s)
// E is the hash of elliptic point t and data need to be signed
signature.e = Hash(*t, data)
// Z1 = s - e*sk
signature.z1 = new(big.Int).Sub(s, new(big.Int).Mul(privateKey.privateKey, signature.e))
signature.z1.Mod(signature.z1, Curve.Params().N)
return signature, nil
}
//Verify is function which using for verify that the given signature was signed by by privatekey of the public key
func (publicKey SchnorrPublicKey) Verify(signature *SchnSignature, data []byte) bool {
if signature == nil {
return false
}
rv := publicKey.g.ScalarMult(signature.z1).Add(publicKey.h.ScalarMult(signature.z2))
rv = rv.Add(publicKey.publicKey.ScalarMult(signature.e))
ev := Hash(*rv, data)
return ev.Cmp(signature.e) == 0
}
func (sig SchnSignature) Bytes() []byte {
bytes := append(common.AddPaddingBigInt(sig.e, common.BigIntSize), common.AddPaddingBigInt(sig.z1, common.BigIntSize)...)
// Z2 is nil when has no privacy
if sig.z2 != nil {
bytes = append(bytes, common.AddPaddingBigInt(sig.z2, common.BigIntSize)...)
}
return bytes
}
func (sig *SchnSignature) SetBytes(bytes []byte) error {
if len(bytes) == 0 {
return NewPrivacyErr(InvalidInputToSetBytesErr, nil)
}
sig.e = new(big.Int).SetBytes(bytes[0:common.BigIntSize])
sig.z1 = new(big.Int).SetBytes(bytes[common.BigIntSize : 2*common.BigIntSize])
sig.z2 = new(big.Int).SetBytes(bytes[2*common.BigIntSize:])
return nil
}
// Hash calculates a hash concatenating a given message bytes with a given EC Point. H(p||m)
func Hash(p EllipticPoint, m []byte) *big.Int {
b := append(common.AddPaddingBigInt(p.x, common.BigIntSize), common.AddPaddingBigInt(p.y, common.BigIntSize)...)
b = append(b, m...)
return new(big.Int).SetBytes(common.HashB(b))
} | privacy/schnorr.go | 0.766992 | 0.405596 | schnorr.go | starcoder |
A full reptend prime in base b is a prime such that b is a primitive root modulo p. In this case 10
is the base.
A number m is a primitive root modulo n, if the multiplicative order of the number m modulo n is
equal to phi(n). (totient function) (Primitive roots modulo n)
Phi(p) = p - 1. Therefore the order of 10 modulo p must be a divisor of p - 1 ( ord[p](10)|p-1 )
(Multiplicative order).
10 is a primitive root mod p only if the order is equal to phi(p) or p - 1. Therefore, for each
proper divisor of p - 1, q1,q2,q3... where q[i]<p-1, if 10^q[i]=1 mod p for any one of them, then
10 is not a primitive root modulo p.
Other References: http://math.stackexchange.com/questions/74884/quick-algorithm-for-computing-
orders-mod-n http://rosettacode.org/wiki/Multiplicative_order
*/
package main
import (
"fmt"
"math"
"strconv"
"time"
)
var p = fmt.Println
var pf = fmt.Printf
// timeTrack is used for basic benchmarking in other functions
func timeTrack(start time.Time, name string) {
elapsed := time.Since(start)
fmt.Printf("%s took %s \n", name, elapsed)
}
func inSlice(slice []int, value int) (bool, []int) {
found := false
positions := []int{}
for i := 0; i < len(slice); i++ {
if slice[i] == value {
found = true
positions = append(positions, i)
}
}
return found, positions
}
func repeatingDecimalLength(divisor int) int {
// defer timeTrack(time.Now(), "repeatingDecimalLength()")
dividend := int(math.Pow(10, float64(len(strconv.Itoa(divisor)))))
limit := dividend
remainderList := make([]int, 0)
for len(remainderList) <= limit {
ans := dividend / divisor
diff := dividend - ans*divisor
if diff == 0 {
return 0
}
isInRemainderList, position := inSlice(remainderList, diff)
// p("ans: ", ans, ", diff: ", diff)
if isInRemainderList {
// Compare current position to position in list to get the repeating length
// p(remainderList)
return len(remainderList) - position[0]
}
remainderList = append(remainderList, diff)
dividend = diff * 10
}
return -1 // error condidion
}
func longestRepeatingDecimal(lim int) int {
defer timeTrack(time.Now(), "longestRepeatingDecimal()")
longestLength, numberWithLongestRepeat := 0, 0
repeatLength := 0
for i := 1; i < lim; i++ {
repeatLength = repeatingDecimalLength(i)
if repeatLength > numberWithLongestRepeat {
numberWithLongestRepeat = i
longestLength = repeatLength
}
}
p(longestLength)
return numberWithLongestRepeat
}
// JGB: Could speed up by only testing primes
func main() {
p(longestRepeatingDecimal(1000))
} | Problems/euler026.go | 0.666171 | 0.552298 | euler026.go | starcoder |
package brotli
/* Copyright 2013 Google Inc. All Rights Reserved.
Distributed under MIT license.
See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/
/* Functions to estimate the bit cost of Huffman trees. */
func shannonEntropy(population []uint32, size uint, total *uint) float64 {
var sum uint = 0
var retval float64 = 0
var population_end []uint32 = population[size:]
var p uint
for -cap(population) < -cap(population_end) {
p = uint(population[0])
population = population[1:]
sum += p
retval -= float64(p) * fastLog2(p)
}
if sum != 0 {
retval += float64(sum) * fastLog2(sum)
}
*total = sum
return retval
}
func bitsEntropy(population []uint32, size uint) float64 {
var sum uint
var retval float64 = shannonEntropy(population, size, &sum)
if retval < float64(sum) {
/* At least one bit per literal is needed. */
retval = float64(sum)
}
return retval
}
const kOneSymbolHistogramCost float64 = 12
const kTwoSymbolHistogramCost float64 = 20
const kThreeSymbolHistogramCost float64 = 28
const kFourSymbolHistogramCost float64 = 37
func populationCostLiteral(histogram *histogramLiteral) float64 {
var data_size uint = histogramDataSizeLiteral()
var count int = 0
var s [5]uint
var bits float64 = 0.0
var i uint
if histogram.total_count_ == 0 {
return kOneSymbolHistogramCost
}
for i = 0; i < data_size; i++ {
if histogram.data_[i] > 0 {
s[count] = i
count++
if count > 4 {
break
}
}
}
if count == 1 {
return kOneSymbolHistogramCost
}
if count == 2 {
return kTwoSymbolHistogramCost + float64(histogram.total_count_)
}
if count == 3 {
var histo0 uint32 = histogram.data_[s[0]]
var histo1 uint32 = histogram.data_[s[1]]
var histo2 uint32 = histogram.data_[s[2]]
var histomax uint32 = brotli_max_uint32_t(histo0, brotli_max_uint32_t(histo1, histo2))
return kThreeSymbolHistogramCost + 2*(float64(histo0)+float64(histo1)+float64(histo2)) - float64(histomax)
}
if count == 4 {
var histo [4]uint32
var h23 uint32
var histomax uint32
for i = 0; i < 4; i++ {
histo[i] = histogram.data_[s[i]]
}
/* Sort */
for i = 0; i < 4; i++ {
var j uint
for j = i + 1; j < 4; j++ {
if histo[j] > histo[i] {
var tmp uint32 = histo[j]
histo[j] = histo[i]
histo[i] = tmp
}
}
}
h23 = histo[2] + histo[3]
histomax = brotli_max_uint32_t(h23, histo[0])
return kFourSymbolHistogramCost + 3*float64(h23) + 2*(float64(histo[0])+float64(histo[1])) - float64(histomax)
}
{
var max_depth uint = 1
var depth_histo = [codeLengthCodes]uint32{0}
/* In this loop we compute the entropy of the histogram and simultaneously
build a simplified histogram of the code length codes where we use the
zero repeat code 17, but we don't use the non-zero repeat code 16. */
var log2total float64 = fastLog2(histogram.total_count_)
for i = 0; i < data_size; {
if histogram.data_[i] > 0 {
var log2p float64 = log2total - fastLog2(uint(histogram.data_[i]))
/* Compute -log2(P(symbol)) = -log2(count(symbol)/total_count) =
= log2(total_count) - log2(count(symbol)) */
var depth uint = uint(log2p + 0.5)
/* Approximate the bit depth by round(-log2(P(symbol))) */
bits += float64(histogram.data_[i]) * log2p
if depth > 15 {
depth = 15
}
if depth > max_depth {
max_depth = depth
}
depth_histo[depth]++
i++
} else {
var reps uint32 = 1
/* Compute the run length of zeros and add the appropriate number of 0
and 17 code length codes to the code length code histogram. */
var k uint
for k = i + 1; k < data_size && histogram.data_[k] == 0; k++ {
reps++
}
i += uint(reps)
if i == data_size {
/* Don't add any cost for the last zero run, since these are encoded
only implicitly. */
break
}
if reps < 3 {
depth_histo[0] += reps
} else {
reps -= 2
for reps > 0 {
depth_histo[repeatZeroCodeLength]++
/* Add the 3 extra bits for the 17 code length code. */
bits += 3
reps >>= 3
}
}
}
}
/* Add the estimated encoding cost of the code length code histogram. */
bits += float64(18 + 2*max_depth)
/* Add the entropy of the code length code histogram. */
bits += bitsEntropy(depth_histo[:], codeLengthCodes)
}
return bits
}
func populationCostCommand(histogram *histogramCommand) float64 {
var data_size uint = histogramDataSizeCommand()
var count int = 0
var s [5]uint
var bits float64 = 0.0
var i uint
if histogram.total_count_ == 0 {
return kOneSymbolHistogramCost
}
for i = 0; i < data_size; i++ {
if histogram.data_[i] > 0 {
s[count] = i
count++
if count > 4 {
break
}
}
}
if count == 1 {
return kOneSymbolHistogramCost
}
if count == 2 {
return kTwoSymbolHistogramCost + float64(histogram.total_count_)
}
if count == 3 {
var histo0 uint32 = histogram.data_[s[0]]
var histo1 uint32 = histogram.data_[s[1]]
var histo2 uint32 = histogram.data_[s[2]]
var histomax uint32 = brotli_max_uint32_t(histo0, brotli_max_uint32_t(histo1, histo2))
return kThreeSymbolHistogramCost + 2*(float64(histo0)+float64(histo1)+float64(histo2)) - float64(histomax)
}
if count == 4 {
var histo [4]uint32
var h23 uint32
var histomax uint32
for i = 0; i < 4; i++ {
histo[i] = histogram.data_[s[i]]
}
/* Sort */
for i = 0; i < 4; i++ {
var j uint
for j = i + 1; j < 4; j++ {
if histo[j] > histo[i] {
var tmp uint32 = histo[j]
histo[j] = histo[i]
histo[i] = tmp
}
}
}
h23 = histo[2] + histo[3]
histomax = brotli_max_uint32_t(h23, histo[0])
return kFourSymbolHistogramCost + 3*float64(h23) + 2*(float64(histo[0])+float64(histo[1])) - float64(histomax)
}
{
var max_depth uint = 1
var depth_histo = [codeLengthCodes]uint32{0}
/* In this loop we compute the entropy of the histogram and simultaneously
build a simplified histogram of the code length codes where we use the
zero repeat code 17, but we don't use the non-zero repeat code 16. */
var log2total float64 = fastLog2(histogram.total_count_)
for i = 0; i < data_size; {
if histogram.data_[i] > 0 {
var log2p float64 = log2total - fastLog2(uint(histogram.data_[i]))
/* Compute -log2(P(symbol)) = -log2(count(symbol)/total_count) =
= log2(total_count) - log2(count(symbol)) */
var depth uint = uint(log2p + 0.5)
/* Approximate the bit depth by round(-log2(P(symbol))) */
bits += float64(histogram.data_[i]) * log2p
if depth > 15 {
depth = 15
}
if depth > max_depth {
max_depth = depth
}
depth_histo[depth]++
i++
} else {
var reps uint32 = 1
/* Compute the run length of zeros and add the appropriate number of 0
and 17 code length codes to the code length code histogram. */
var k uint
for k = i + 1; k < data_size && histogram.data_[k] == 0; k++ {
reps++
}
i += uint(reps)
if i == data_size {
/* Don't add any cost for the last zero run, since these are encoded
only implicitly. */
break
}
if reps < 3 {
depth_histo[0] += reps
} else {
reps -= 2
for reps > 0 {
depth_histo[repeatZeroCodeLength]++
/* Add the 3 extra bits for the 17 code length code. */
bits += 3
reps >>= 3
}
}
}
}
/* Add the estimated encoding cost of the code length code histogram. */
bits += float64(18 + 2*max_depth)
/* Add the entropy of the code length code histogram. */
bits += bitsEntropy(depth_histo[:], codeLengthCodes)
}
return bits
}
func populationCostDistance(histogram *histogramDistance) float64 {
var data_size uint = histogramDataSizeDistance()
var count int = 0
var s [5]uint
var bits float64 = 0.0
var i uint
if histogram.total_count_ == 0 {
return kOneSymbolHistogramCost
}
for i = 0; i < data_size; i++ {
if histogram.data_[i] > 0 {
s[count] = i
count++
if count > 4 {
break
}
}
}
if count == 1 {
return kOneSymbolHistogramCost
}
if count == 2 {
return kTwoSymbolHistogramCost + float64(histogram.total_count_)
}
if count == 3 {
var histo0 uint32 = histogram.data_[s[0]]
var histo1 uint32 = histogram.data_[s[1]]
var histo2 uint32 = histogram.data_[s[2]]
var histomax uint32 = brotli_max_uint32_t(histo0, brotli_max_uint32_t(histo1, histo2))
return kThreeSymbolHistogramCost + 2*(float64(histo0)+float64(histo1)+float64(histo2)) - float64(histomax)
}
if count == 4 {
var histo [4]uint32
var h23 uint32
var histomax uint32
for i = 0; i < 4; i++ {
histo[i] = histogram.data_[s[i]]
}
/* Sort */
for i = 0; i < 4; i++ {
var j uint
for j = i + 1; j < 4; j++ {
if histo[j] > histo[i] {
var tmp uint32 = histo[j]
histo[j] = histo[i]
histo[i] = tmp
}
}
}
h23 = histo[2] + histo[3]
histomax = brotli_max_uint32_t(h23, histo[0])
return kFourSymbolHistogramCost + 3*float64(h23) + 2*(float64(histo[0])+float64(histo[1])) - float64(histomax)
}
{
var max_depth uint = 1
var depth_histo = [codeLengthCodes]uint32{0}
/* In this loop we compute the entropy of the histogram and simultaneously
build a simplified histogram of the code length codes where we use the
zero repeat code 17, but we don't use the non-zero repeat code 16. */
var log2total float64 = fastLog2(histogram.total_count_)
for i = 0; i < data_size; {
if histogram.data_[i] > 0 {
var log2p float64 = log2total - fastLog2(uint(histogram.data_[i]))
/* Compute -log2(P(symbol)) = -log2(count(symbol)/total_count) =
= log2(total_count) - log2(count(symbol)) */
var depth uint = uint(log2p + 0.5)
/* Approximate the bit depth by round(-log2(P(symbol))) */
bits += float64(histogram.data_[i]) * log2p
if depth > 15 {
depth = 15
}
if depth > max_depth {
max_depth = depth
}
depth_histo[depth]++
i++
} else {
var reps uint32 = 1
/* Compute the run length of zeros and add the appropriate number of 0
and 17 code length codes to the code length code histogram. */
var k uint
for k = i + 1; k < data_size && histogram.data_[k] == 0; k++ {
reps++
}
i += uint(reps)
if i == data_size {
/* Don't add any cost for the last zero run, since these are encoded
only implicitly. */
break
}
if reps < 3 {
depth_histo[0] += reps
} else {
reps -= 2
for reps > 0 {
depth_histo[repeatZeroCodeLength]++
/* Add the 3 extra bits for the 17 code length code. */
bits += 3
reps >>= 3
}
}
}
}
/* Add the estimated encoding cost of the code length code histogram. */
bits += float64(18 + 2*max_depth)
/* Add the entropy of the code length code histogram. */
bits += bitsEntropy(depth_histo[:], codeLengthCodes)
}
return bits
} | vendor/github.com/andybalholm/brotli/bit_cost.go | 0.684053 | 0.475423 | bit_cost.go | starcoder |
package p273
import "bytes"
/**
Convert a non-negative integer to its english words representation.
Given input is guaranteed to be less than 2^31 - 1.
123 -> "One Hundred Twenty Three"
12345 -> "Twelve Thousand Three Hundred Forty Five"
1234567 -> "One Million Two Hundred Thirty Four Thousand Five Hundred Sixty Seven"
*/
func digitToWords(digit int) string {
word := ""
if digit >= 0 && digit < 10 {
word = []string{"Zero", "One", "Two", "Three", "Four",
"Five", "Six", "Seven", "Eight", "Nine"}[digit]
}
return word
}
func tenToWords(digit int) string {
word := ""
if digit > 0 && digit < 10 {
word = []string{"Zero", "Ten", "Twenty", "Thirty", "Forty",
"Fifty", "Sixty", "Seventy", "Eighty", "Ninety"}[digit]
}
return word
}
// convert [10..19] to string
func doubleDigitToWords(digit int) string {
word := ""
if digit >= 10 && digit < 20 {
word = []string{"Ten", "Eleven", "Twelve", "Thirteen", "Fourteen",
"Fifteen", "Sixteen", "Seventeen", "Eighteen", "Nineteen"}[digit-10]
}
return word
}
func unitNumberToWords(num int) string {
buf := bytes.Buffer{}
if num >= 100 {
buf.WriteString(digitToWords(num / 100))
buf.WriteByte(' ')
buf.WriteString("Hundred")
}
num %= 100
if num > 0 {
if buf.Len() > 0 {
buf.WriteByte(' ')
}
if num < 10 {
buf.WriteString(digitToWords(num))
num = 0
} else if num < 20 {
buf.WriteString(doubleDigitToWords(num))
num = 0
} else {
buf.WriteString(tenToWords(num / 10))
}
}
num %= 10
if num > 0 {
if buf.Len() > 0 {
buf.WriteByte(' ')
}
buf.WriteString(digitToWords(num))
}
return buf.String()
}
func numberToWords(num int) string {
unit := num % 1000
num /= 1000
thousand := num % 1000
num /= 1000
million := num % 1000
num /= 1000
billion := num % 1000
buf := bytes.Buffer{}
if billion > 0 {
buf.WriteString(unitNumberToWords(billion))
buf.WriteByte(' ')
buf.WriteString("Billion")
}
if million > 0 {
if buf.Len() > 0 {
buf.WriteByte(' ')
}
buf.WriteString(unitNumberToWords(million))
buf.WriteByte(' ')
buf.WriteString("Million")
}
if thousand > 0 {
if buf.Len() > 0 {
buf.WriteByte(' ')
}
buf.WriteString(unitNumberToWords(thousand))
buf.WriteByte(' ')
buf.WriteString("Thousand")
}
if unit > 0 {
if buf.Len() > 0 {
buf.WriteByte(' ')
}
buf.WriteString(unitNumberToWords(unit))
}
if buf.Len() == 0 {
buf.WriteString(digitToWords(num))
}
return buf.String()
} | algorithms/p273/273.go | 0.614163 | 0.439266 | 273.go | starcoder |
package ast
// Hierarchy provides helper methods to access a children's parent node and
// some node's children nodes.
type Hierarchy struct {
root Node
visitor NodeVisitor
parents map[Node]Node
children map[Node][]Node
}
func (h *Hierarchy) Root() Node {
return h.root
}
// Parent finds the parent node of some child node.
// Rewalks the root node of the hierarchy if there was no parent
// previously found. Returns the parent (if any) and a boolean
// indicating whether or not the tree was successfully walked.
func (h *Hierarchy) Parent(node Node) (Node, bool) {
if parent, ok := h.parents[node]; ok {
return parent, true
}
// Rewalk the root node in case this node is new to the AST
h.root.Accept(h.visitor)
if parent, ok := h.parents[node]; ok {
return parent, true
}
return nil, false
}
// Siblings gets all the direct siblings of a specific node, not
// including the node itself.
func (h *Hierarchy) Siblings(node Node) []Node {
var siblings []Node
var parent Node
if parent, _ = h.Parent(node); parent == nil {
return siblings
}
for _, child := range h.Children(parent) {
if child == node {
continue
}
siblings = append(siblings, child)
}
return siblings
}
// Children gets all the direct children nodes of a certain node.
// Rewalks the root node of the hierarchy if the children could not
// be found, then returns nil if the node could not be walked.
func (h *Hierarchy) Children(node Node) []Node {
if children, ok := h.children[node]; ok {
return children
}
// Rewalk the node in case it's a new node and needs to be cached
node.Accept(h.visitor)
return h.children[node]
}
func (h *Hierarchy) ChildIdx(parent Node, child Node) int {
children := h.Children(parent)
for i, node := range children {
if node == child {
return i
}
}
return -1
}
func NewHierarchy(root Node) *Hierarchy {
hier := &Hierarchy{
root: root,
parents: make(map[Node]Node),
children: make(map[Node][]Node),
}
hier.visitor = &hierarchyVisitor{hier}
hier.root.Accept(hier.visitor)
return hier
}
type hierarchyVisitor struct {
hierarchy *Hierarchy
}
func (v *hierarchyVisitor) setParent(parent Node, child Node) {
v.hierarchy.parents[child] = parent
}
func (v *hierarchyVisitor) setChildren(parent Node, children []Node) {
for _, child := range children {
child.Accept(v)
v.setParent(parent, child)
}
v.hierarchy.children[parent] = children
}
func (v *hierarchyVisitor) noChildren(parent Node) {
v.setChildren(parent, []Node{})
}
func (v *hierarchyVisitor) VisitBlockStmt(node *BlockStmt) {
children := []Node{}
for _, child := range node.Nodes {
children = append(children, child)
}
v.setChildren(node, children)
}
func (v *hierarchyVisitor) VisitAliasDecl(node *AliasDecl) {
v.noChildren(node)
}
func (v *hierarchyVisitor) VisitClassDecl(node *ClassDecl) {
v.setChildren(node, []Node{node.Body})
}
func (v *hierarchyVisitor) VisitVarDecl(node *VarDecl) {
children := []Node{}
if node.Value != nil {
children = append(children, node.Value)
}
v.setChildren(node, children)
}
func (v *hierarchyVisitor) VisitParamDecl(node *ParamDecl) {
v.noChildren(node)
}
func (v *hierarchyVisitor) VisitMemberDecl(node *MemberDecl) {
children := []Node{}
if node.Value != nil {
children = append(children, node.Value)
}
v.setChildren(node, children)
}
func (v *hierarchyVisitor) VisitGetterStmt(node *GetterStmt) {
v.setChildren(node, []Node{node.Body})
}
func (v *hierarchyVisitor) VisitSetterStmt(node *SetterStmt) {
v.setChildren(node, []Node{
node.SetterVariable,
node.Body,
})
}
func (v *hierarchyVisitor) VisitIfStmt(node *IfStmt) {
children := []Node{
node.Condition,
node.Body,
}
if node.Else != nil {
children = append(children, node.Else)
}
v.setChildren(node, children)
}
func (v *hierarchyVisitor) VisitConstIfStmt(node *ConstIfStmt) {
children := []Node{
node.Condition,
node.Body,
}
if node.Else != nil {
children = append(children, node.Else)
}
v.setChildren(node, children)
}
func (v *hierarchyVisitor) VisitLoopStmt(node *LoopStmt) {
children := []Node{}
if node.Initializer != nil {
children = append(children, node.Initializer)
}
if node.Condition != nil {
children = append(children, node.Condition)
}
if node.Afterthought != nil {
children = append(children, node.Afterthought)
}
children = append(children, node.Body)
v.setChildren(node, children)
}
func (v *hierarchyVisitor) VisitPropertyDecl(node *PropertyDecl) {
children := []Node{}
if node.Getter != nil {
children = append(children, node.Getter)
}
if node.Setter != nil {
children = append(children, node.Setter)
}
v.setChildren(node, children)
}
func (v *hierarchyVisitor) VisitEnumDecl(node *EnumDecl) {
v.noChildren(node)
}
func (v *hierarchyVisitor) VisitExtensionDecl(node *ExtensionDecl) {
v.setChildren(node, []Node{
node.Body,
})
}
func (v *hierarchyVisitor) VisitInterfaceDecl(node *InterfaceDecl) {
v.setChildren(node, []Node{
node.Body,
})
}
func (v *hierarchyVisitor) VisitPackageDecl(node *PackageDecl) {
v.noChildren(node)
}
func (v *hierarchyVisitor) VisitImportDecl(node *ImportDecl) {
v.noChildren(node)
}
func (v *hierarchyVisitor) VisitTryStmt(node *TryStmt) {
children := []Node{node.Body}
for _, catchStmt := range node.Catch {
children = append(children, catchStmt)
}
if node.Finally != nil {
children = append(children, node.Finally)
}
v.setChildren(node, children)
}
func (v *hierarchyVisitor) VisitCatchStmt(node *CatchStmt) {
v.setChildren(node, []Node{
node.Variable,
node.Body,
})
}
func (v *hierarchyVisitor) VisitFunctionStmt(node *FunctionStmt) {
children := []Node{}
for _, child := range node.Parameters {
children = append(children, child)
}
children = append(children, node.Body)
v.setChildren(node, children)
}
func (v *hierarchyVisitor) VisitExternFuncStmt(node *ExternFuncStmt) {
children := []Node{}
for _, child := range node.Parameters {
children = append(children, child)
}
v.setChildren(node, children)
}
func (v *hierarchyVisitor) VisitReturnStmt(node *ReturnStmt) {
children := []Node{}
if node.Value != nil {
children = append(children, node.Value)
}
v.setChildren(node, children)
}
func (v *hierarchyVisitor) VisitArrayExpr(node *ArrayExpr) {
children := []Node{}
for _, child := range node.Members {
children = append(children, child)
}
v.setChildren(node, children)
}
func (v *hierarchyVisitor) VisitUnaryExpr(node *UnaryExpr) {
v.setChildren(node, []Node{node.Operand})
}
func (v *hierarchyVisitor) VisitBinaryExpr(node *BinaryExpr) {
v.setChildren(node, []Node{node.LHS, node.RHS})
}
func (v *hierarchyVisitor) VisitCallExpr(node *CallExpr) {
children := []Node{node.Object}
for _, arg := range node.Arguments {
children = append(children, arg)
}
v.setChildren(node, children)
}
func (v *hierarchyVisitor) VisitArrayAccessExpr(node *ArrayAccessExpr) {
v.setChildren(node, []Node{node.Object, node.Index})
}
func (v *hierarchyVisitor) VisitMemberAccessExpr(node *MemberAccessExpr) {
v.setChildren(node, []Node{node.Object})
}
func (v *hierarchyVisitor) VisitGenericInst(node *GenericInst) {
v.setChildren(node, []Node{node.Object})
}
func (v *hierarchyVisitor) VisitThisExpr(node *ThisExpr) {
v.noChildren(node)
}
func (v *hierarchyVisitor) VisitSuperExpr(node *SuperExpr) {
v.noChildren(node)
}
func (v *hierarchyVisitor) VisitStringExpr(node *StringExpr) {
v.noChildren(node)
}
func (v *hierarchyVisitor) VisitBoolExpr(node *BoolExpr) {
v.noChildren(node)
}
func (v *hierarchyVisitor) VisitCharExpr(node *CharExpr) {
v.noChildren(node)
}
func (v *hierarchyVisitor) VisitIntExpr(node *IntExpr) {
v.noChildren(node)
}
func (v *hierarchyVisitor) VisitUIntExpr(node *UIntExpr) {
v.noChildren(node)
}
func (v *hierarchyVisitor) VisitDoubleExpr(node *DoubleExpr) {
v.noChildren(node)
}
func (v *hierarchyVisitor) VisitFloatExpr(node *FloatExpr) {
v.noChildren(node)
}
func (v *hierarchyVisitor) VisitNamedIDExpr(node *NamedIDExpr) {
v.noChildren(node)
}
func (v *hierarchyVisitor) VisitIDAccessExpr(node *IDAccessExpr) {
v.setChildren(node, []Node{node.LHS})
} | pkg/ast/hierarchy.go | 0.738858 | 0.535402 | hierarchy.go | starcoder |
package fault
import (
"errors"
"math/rand"
"net/http"
"sync"
)
const (
// defaultRandSeed is used when a random seed is not set explicitly.
defaultRandSeed = 1
)
var (
// ErrNilInjector when a nil Injector is passed.
ErrNilInjector = errors.New("injector cannot be nil")
// ErrInvalidPercent when a percent is outside of [0.0,1.0).
ErrInvalidPercent = errors.New("percent must be 0.0 <= percent <= 1.0")
)
// Fault combines an Injector with options on when to use that Injector.
type Fault struct {
// enabled determines if the fault should evaluate.
enabled bool
// injector is the Injector that will be injected.
injector Injector
// participation is the percent of requests that run the injector. 0.0 <= p <= 1.0.
participation float32
// pathBlocklist is a map of paths that the Injector will never run against.
pathBlocklist map[string]bool
// pathAllowlist, if set, is a map of the only paths that the Injector will run against.
pathAllowlist map[string]bool
// headerBlocklist is a map of headers that the Injector will never run against.
headerBlocklist map[string]string
// headerAllowlist, if set, is a map of the only headers the Injector will run against.
headerAllowlist map[string]string
// randSeed is a number to seed rand with.
randSeed int64
// rand is our random number source.
rand *rand.Rand
// randF is a function that returns a float32 [0.0,1.0).
randF func() float32
// randMtx protects Fault.rand, which is not thread safe.
randMtx sync.Mutex
}
// Option configures a Fault.
type Option interface {
applyFault(f *Fault) error
}
type enabledOption bool
func (o enabledOption) applyFault(f *Fault) error {
f.enabled = bool(o)
return nil
}
// WithEnabled sets if the Fault should evaluate.
func WithEnabled(e bool) Option {
return enabledOption(e)
}
type participationOption float32
func (o participationOption) applyFault(f *Fault) error {
if o < 0.0 || o > 1.0 {
return ErrInvalidPercent
}
f.participation = float32(o)
return nil
}
// WithParticipation sets the percent of requests that run the Injector. 0.0 <= p <= 1.0.
func WithParticipation(p float32) Option {
return participationOption(p)
}
type pathBlocklistOption []string
func (o pathBlocklistOption) applyFault(f *Fault) error {
blocklist := make(map[string]bool, len(o))
for _, path := range o {
blocklist[path] = true
}
f.pathBlocklist = blocklist
return nil
}
// WithPathBlocklist is a list of paths that the Injector will not run against.
func WithPathBlocklist(blocklist []string) Option {
return pathBlocklistOption(blocklist)
}
type pathAllowlistOption []string
func (o pathAllowlistOption) applyFault(f *Fault) error {
allowlist := make(map[string]bool, len(o))
for _, path := range o {
allowlist[path] = true
}
f.pathAllowlist = allowlist
return nil
}
// WithPathAllowlist is, if set, a list of the only paths that the Injector will run against.
func WithPathAllowlist(allowlist []string) Option {
return pathAllowlistOption(allowlist)
}
type headerBlocklistOption map[string]string
func (o headerBlocklistOption) applyFault(f *Fault) error {
blocklist := make(map[string]string, len(o))
for key, val := range o {
blocklist[key] = val
}
f.headerBlocklist = blocklist
return nil
}
// WithHeaderBlocklist is a map of header keys to values that the Injector will not run against.
func WithHeaderBlocklist(blocklist map[string]string) Option {
return headerBlocklistOption(blocklist)
}
type headerAllowlistOption map[string]string
func (o headerAllowlistOption) applyFault(f *Fault) error {
allowlist := make(map[string]string, len(o))
for key, val := range o {
allowlist[key] = val
}
f.headerAllowlist = allowlist
return nil
}
// WithHeaderAllowlist is, if set, a map of header keys to values of the only headers that the
// Injector will run against.
func WithHeaderAllowlist(allowlist map[string]string) Option {
return headerAllowlistOption(allowlist)
}
// RandSeedOption configures things that can set a random seed.
type RandSeedOption interface {
Option
RandomInjectorOption
}
type randSeedOption int64
func (o randSeedOption) applyFault(f *Fault) error {
f.randSeed = int64(o)
return nil
}
// WithRandSeed sets the rand.Rand seed for this struct.
func WithRandSeed(s int64) RandSeedOption {
return randSeedOption(s)
}
type randFloat32FuncOption func() float32
func (o randFloat32FuncOption) applyFault(f *Fault) error {
f.randF = o
return nil
}
// WithRandFloat32Func sets the function that will be used to randomly get our float value. Default
// rand.Float32. Always returns a float32 between [0.0,1.0) to avoid errors.
func WithRandFloat32Func(f func() float32) Option {
return randFloat32FuncOption(f)
}
// NewFault sets/validates the Injector and Options and returns a usable Fault.
func NewFault(i Injector, opts ...Option) (*Fault, error) {
if i == nil {
return nil, ErrNilInjector
}
// set defaults
f := &Fault{
injector: i,
randSeed: defaultRandSeed,
randF: nil,
}
// apply options
for _, opt := range opts {
err := opt.applyFault(f)
if err != nil {
return nil, err
}
}
// set seeded rand source and function
f.rand = rand.New(rand.NewSource(f.randSeed))
if f.randF == nil {
f.randF = f.rand.Float32
}
return f, nil
}
// Handler determines if the Injector should execute and runs it if so.
func (f *Fault) Handler(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
// By default faults do not evaluate. Here we go through conditions where faults
// will evaluate, if everything is configured correctly.
var shouldEvaluate bool
shouldEvaluate = f.enabled
shouldEvaluate = shouldEvaluate && f.checkAllowBlockLists(shouldEvaluate, r)
// false if not selected for participation
shouldEvaluate = shouldEvaluate && f.participate()
// run the injector or pass
if shouldEvaluate {
f.injector.Handler(next).ServeHTTP(w, r)
} else {
next.ServeHTTP(w, r)
}
})
}
// SetEnabled updates the enabled state of the Fault.
func (f *Fault) SetEnabled(o enabledOption) error {
return o.applyFault(f)
}
// SetParticipation updates the participation percentage of the Fault.
func (f *Fault) SetParticipation(o participationOption) error {
return o.applyFault(f)
}
// checkAllowBlockLists checks the request against the provided allowlists and blocklists, returning
// true if the request may proceed and false otherwise.
func (f *Fault) checkAllowBlockLists(shouldEvaluate bool, r *http.Request) bool {
// false if path is in pathBlocklist
shouldEvaluate = shouldEvaluate && !f.pathBlocklist[r.URL.Path]
// false if pathAllowlist exists and path is not in it
if len(f.pathAllowlist) > 0 {
shouldEvaluate = shouldEvaluate && f.pathAllowlist[r.URL.Path]
}
// false if any headers match headerBlocklist
for key, val := range f.headerBlocklist {
shouldEvaluate = shouldEvaluate && !(r.Header.Get(key) == val)
}
// false if headerAllowlist exists and headers are not in it
if len(f.headerAllowlist) > 0 {
for key, val := range f.headerAllowlist {
shouldEvaluate = shouldEvaluate && (r.Header.Get(key) == val)
}
}
return shouldEvaluate
}
// participate randomly decides (returns true) if the Injector should run based on f.participation.
// Numbers outside of [0.0,1.0] will always return false.
func (f *Fault) participate() bool {
f.randMtx.Lock()
rn := f.randF()
f.randMtx.Unlock()
if rn < f.participation && f.participation <= 1.0 {
return true
}
return false
} | fault.go | 0.684897 | 0.410697 | fault.go | starcoder |
package orbit
import (
"math"
"github.com/dayaftereh/discover/server/mathf"
)
// https://github.com/jordanstephens/kepler.js
type Orbit struct {
r *mathf.Vec3 // position
v *mathf.Vec3 // velocity
mu float64 // mu = G*M
CentralBodyRadius float64
}
func NewOrbit(r *mathf.Vec3, v *mathf.Vec3, mu float64, centralBodyRadius float64) *Orbit {
return &Orbit{
r: r,
v: v,
mu: mu,
CentralBodyRadius: centralBodyRadius,
}
}
func (orbit *Orbit) AngularMomentum() *mathf.Vec3 {
return orbit.r.Cross(orbit.v)
}
func (orbit *Orbit) RadialVelocity() float64 {
return orbit.r.Dot(orbit.v) / orbit.r.Length()
}
func (orbit *Orbit) Eccentricity() *mathf.Vec3 {
return orbit.r.Multiply(
math.Pow(orbit.v.Length(), 2) - (orbit.mu / orbit.r.Length()),
).SubtractVec(
orbit.v.Multiply(orbit.r.Length() * orbit.RadialVelocity()),
).Multiply(1.0 / orbit.mu)
}
func (orbit *Orbit) SemimajorAxis() float64 {
h := orbit.AngularMomentum().Length()
e := orbit.Eccentricity().Length()
return (math.Pow(h, 2) / orbit.mu) * (1.0 / (1.0 - math.Pow(e, 2)))
}
func (orbit *Orbit) SemiminorAxis() float64 {
a := orbit.SemimajorAxis()
e := orbit.Eccentricity().Length()
return a * math.Sqrt(1.0-math.Pow(e, 2))
}
func (orbit *Orbit) SemilatusRectum() float64 {
return math.Pow(orbit.AngularMomentum().Length(), 2) / orbit.mu
}
func (orbit *Orbit) Inclination() float64 {
h := orbit.AngularMomentum()
K := mathf.NewVec3(0, 0, 1)
return mathf.ToDegress(math.Acos(K.Dot(h) / h.Length()))
}
func (orbit *Orbit) NodeLine() *mathf.Vec3 {
K := mathf.NewVec3(0, 0, 1)
return K.Cross(orbit.AngularMomentum())
}
func (orbit *Orbit) RightAscension() float64 {
n := orbit.NodeLine()
if mathf.CloseZero(n.Length()) {
return 0.0
}
omega := mathf.ToDegress(math.Acos(n.X / n.Length()))
if n.Y < 0 {
return 360.0 - omega
}
return omega
}
func (orbit *Orbit) ArgumentOfPeriapsis() float64 {
n := orbit.NodeLine()
if mathf.CloseZero(n.Length()) {
return 0.0
}
e := orbit.Eccentricity()
w := mathf.ToDegress(math.Acos(n.Dot(e) / (n.Length() * e.Length())))
if n.Z < 0 {
return 360.0 - w
}
return w
}
func (orbit *Orbit) TrueAnomaly() float64 {
e := orbit.Eccentricity()
eNorm := e.Length()
n := orbit.NodeLine()
nNorm := n.Length()
var u float64
if mathf.CloseZero(eNorm) && mathf.CloseZero(nNorm) {
u = mathf.ToDegress(math.Acos(math.Min(1.0, orbit.r.X/orbit.r.Length())))
} else {
l := e
if mathf.CloseZero(eNorm) {
l = n
}
u = mathf.ToDegress(math.Acos(math.Min(1.0, l.Dot(orbit.r)/(l.Length()*orbit.r.Length()))))
}
if orbit.r.Dot(orbit.v) < 0.0 {
return 360.0 - u
}
return u
}
func (orbit *Orbit) Apoapsis() float64 {
h := orbit.AngularMomentum()
e := orbit.Eccentricity()
return (math.Pow(h.Length(), 2.0) / orbit.mu) * (1.0 / (1.0 + e.Length()*math.Cos(math.Pi)))
}
func (orbit *Orbit) Periapsis() float64 {
h := orbit.AngularMomentum()
e := orbit.Eccentricity()
return (math.Pow(h.Length(), 2.0) / orbit.mu) * (1.0 / (1.0 + e.Length()*math.Cos(0)))
}
func (orbit *Orbit) Period() float64 {
a := orbit.SemimajorAxis()
return (2.0 * math.Pi / math.Sqrt(orbit.mu)) * math.Sqrt(math.Pow(a, 3))
}
func (orbit *Orbit) UniversalAnomaly(dt float64) float64 {
a := orbit.SemimajorAxis()
// initial guess of x
x := math.Sqrt(orbit.mu) / (dt / a)
r := orbit.r
v := orbit.v
mu := orbit.mu
f := func(x2 float64) float64 {
return UniversalFormulationF(x2, a, r, v, mu, dt)
}
df := func(x2 float64) float64 {
return UniversalFormulationDFDT(x2, a, r, v, mu)
}
d2f := func(x2 float64) float64 {
return UniversalFormulationD2FDT(x2, a, r, v, mu)
}
return LaguerreSolve(x, f, df, d2f)
}
func (orbit *Orbit) Update(dt float64) *Orbit {
x := orbit.UniversalAnomaly(dt)
a := orbit.SemimajorAxis()
z := UniversalFormulationZ(x, a)
r0 := orbit.r
v0 := orbit.v
mu := orbit.mu
r := r0.Multiply(LagrangeF(x, z, orbit.r)).AddVec(v0.Multiply(LagrangeG(x, z, mu, dt)))
v := r0.Multiply(LagrangeDf(x, z, r, r0, mu)).AddVec(v0.Multiply(LagrangeDg(x, z, r)))
return &Orbit{
r: r,
v: v,
mu: mu,
CentralBodyRadius: orbit.CentralBodyRadius,
}
}
func (orbit *Orbit) Position() *mathf.Vec3 {
return orbit.r.Clone()
}
func (orbit *Orbit) Velocity() *mathf.Vec3 {
return orbit.v.Clone()
} | server/mathf/orbit/orbit.go | 0.868269 | 0.569673 | orbit.go | starcoder |
package runner
import (
"errors"
"fmt"
"os"
"sort"
"strings"
"testing"
"text/tabwriter"
)
// NewRegistry returns a pointer to a new TestRegistry
func NewRegistry() *TestRegistry {
return &TestRegistry{
tests: make(map[string]Test),
TestSuites: make(map[string]TestSuite),
}
}
//NewTestSuite returns a pointer to a new TestSuite
func NewTestSuite(name string) *TestSuite {
return &TestSuite{
name: name,
tests: make(map[string]Test),
}
}
// Test is a test function
type Test func(t *testing.T)
// TestRegistry contains a mapping of named test groups
type TestRegistry struct {
tests map[string]Test
TestSuites map[string]TestSuite
}
//TestSuite to run multiple tests
type TestSuite struct {
name string
tests map[string]Test
}
//RegisterTest registers a test to the registry
func (r *TestRegistry) RegisterTest(name string, test Test, suites []*TestSuite) {
r.tests[name] = test
for _, suite := range suites {
//fmt.Println("Registering test: ", name, "on suite: ",suite.name)
suite.registerTest(name, test)
}
}
//RegisterTest registers a test to a test group
func (r *TestSuite) registerTest(name string, test Test) {
r.tests[name] = test
}
//RegisterTestSuite registers test suite into the registry
func (r *TestRegistry) RegisterTestSuite(testGroup TestSuite) {
r.TestSuites[testGroup.name] = testGroup
}
// GetTestNames returns a slice of test names
func (r *TestRegistry) GetTestNames() []string {
names := make([]string, 0, len(r.tests))
for name := range r.tests {
names = append(names, name)
}
sort.Slice(names, func(i, j int) bool {
return names[i] < names[j]
})
return names
}
// GetTestNames returns a slice of test names
func (r *TestSuite) GetTestNames() []string {
names := make([]string, 0, len(r.tests))
for name := range r.tests {
names = append(names, name)
}
sort.Slice(names, func(i, j int) bool {
return names[i] < names[j]
})
return names
}
// GetTestSuiteNames returns a slice of test names
func (r *TestRegistry) GetTestSuiteNames() []string {
names := make([]string, 0, len(r.TestSuites))
for name := range r.TestSuites {
names = append(names, name)
}
sort.Slice(names, func(i, j int) bool {
return names[i] < names[j]
})
return names
}
//PrintTestSuites prints test suites in a table
func (r *TestRegistry) PrintTestSuites() {
writer := new(tabwriter.Writer)
writer.Init(os.Stdout, 0, 0, 3, ' ', tabwriter.FilterHTML)
fmt.Fprintln(writer, "SUITE\tTESTS")
for name, suite := range r.TestSuites {
fmt.Fprintln(writer, fmt.Sprintf("%s\t%s", name,strings.Join(suite.GetTestNames(),", ")))
}
writer.Flush()
}
// TestRunner runs integration tests
type TestRunner struct {
Registry *TestRegistry
}
// RunTests Runs the tests
func (r *TestRunner) RunTests(args []string) error {
tests := make([]testing.InternalTest, 0, len(args))
if len(args) > 0 {
for _, name := range args {
test, ok := r.Registry.tests[name]
if !ok {
return errors.New("unknown test " + name)
}
tests = append(tests, testing.InternalTest{
Name: name,
F: test,
})
}
} else {
for name, test := range r.Registry.tests {
tests = append(tests, testing.InternalTest{
Name: name,
F: test,
})
}
}
// Hack to enable verbose testing.
os.Args = []string{
os.Args[0],
"-test.v",
}
// Run the integration tests via the testing package.
testing.Main(func(_, _ string) (bool, error) { return true, nil }, tests, nil, nil)
return nil
}
// RunTestSuites Runs the tests groups
func (r *TestRunner) RunTestSuites(args []string) error {
tests := make([]testing.InternalTest, 0, len(args))
if len(args) > 0 {
for _, name := range args {
testSuite, ok := r.Registry.TestSuites[name]
if !ok {
return errors.New("unknown test suite" + name)
}
testNames := []string{}
for testName := range testSuite.tests {
testNames = append(testNames,testName)
}
r.RunTests(testNames)
}
} else {
return nil
}
// Hack to enable verbose testing.
os.Args = []string{
os.Args[0],
"-test.v",
}
// Run the integration tests via the testing package.
testing.Main(func(_, _ string) (bool, error) { return true, nil }, tests, nil, nil)
return nil
} | test/runner/runner.go | 0.732687 | 0.449755 | runner.go | starcoder |
package merkletree2
import "fmt"
// ValueConstructor is an interface for constructing values, so that typed
// values can be pulled out of the Merkle Tree. All Values must have the same
// type, howerver multiple types can be encoded by having this type implement
// the codec.Selfer interface.
type ValueConstructor interface {
// Construct a new template empty value for the leaf, so that the
// Unmarshalling routine has the correct type template.
Construct() interface{}
}
// Config defines the shape of the MerkleTree.
type Config struct {
// An encoder is used to compute hashes in this configuration, and also
// manages the blinding secrets (see useBlindedValueHashes).
encoder Encoder
// useBlindedValueHashes controls whether this tree blinds hashes of
// KeyValuePairs with a per (Key,Seqno) specific secret (which is itself
// derived from a per Seqno specific secret which is stored together with
// the tree). This ensures values stored in the tree cannot are not leaked
// by the membership proofs (but keys can leak, as well as the rough tree
// size). If the tree is rebuilt at every Seqno, this also hides whether
// values are changing (but not when a value is first inserted).
useBlindedValueHashes bool
// The number of children per node. Must be a power of two. Some children
// can be empty.
childrenPerNode int
// The maximum number of KeyValuePairs in a leaf node before we split
maxValuesPerLeaf int
// The number of bits necessary to represent a ChildIndex, i.e.
// log2(childrenPerNode)
bitsPerIndex uint8
// The length of all the keys which will be stored in the tree. For
// simplicity, we enforce that all the keys have the same length and that
// bitsPerIndex divides keyByteLength*8
keysByteLength int
// valueConstructor is an interface to construct empty values to be used for
// deserialization.
valueConstructor ValueConstructor
}
// NewConfig makes a new config object. It takes a a Hasher, logChildrenPerNode
// which is the base 2 logarithm of the number of children per interior node,
// maxValuesPerLeaf the maximum number of entries in a leaf before the leaf is
// split into multiple nodes (at a lower level in the tree), keyByteLength the
// length of the Keys which the tree will store, and a valueConstructor (so that
// typed values can be pulled out of the Merkle Tree).
func NewConfig(h Encoder, useBlindedValueHashes bool, logChildrenPerNode uint8, maxValuesPerLeaf int, keysByteLength int) (Config, error) {
childrenPerNode := 1 << logChildrenPerNode
if (keysByteLength*8)%int(logChildrenPerNode) != 0 {
return Config{}, NewInvalidConfigError("The key bit length does not divide logChildrenPerNode")
}
if logChildrenPerNode > 63 {
return Config{}, NewInvalidConfigError("This package does not support more than 2^63 children per internal node")
}
if logChildrenPerNode < 1 {
return Config{}, NewInvalidConfigError(fmt.Sprintf("Need at least 2 children per node, but logChildrenPerNode = %v", logChildrenPerNode))
}
return Config{encoder: h, useBlindedValueHashes: useBlindedValueHashes, childrenPerNode: childrenPerNode, maxValuesPerLeaf: maxValuesPerLeaf, bitsPerIndex: logChildrenPerNode, keysByteLength: keysByteLength, valueConstructor: nil}, nil
}
// MasterSecret is a secret used to hide wether a leaf value has changed between
// different versions (Seqnos) in a blinded merkle tree. One MasterSecret per
// tree is generated for each Seqno, and such secret is then used to generate a
// KeySpecific secret per leaf.
type MasterSecret []byte
// MasterSecret is a secret used to hide wether a leaf value has changed between
// different versions (Seqnos) in a blinded merkle tree. This is derived from a
// per-Seqno MasterSecret as specified by the Encoder
type KeySpecificSecret []byte
// Encoder is an interface for hashing MerkleTree data structures into their
// cryptographic hashes. It also manages blinding secrets.
type Encoder interface {
EncodeAndHashGeneric(interface{}) (Hash, error)
HashKeyValuePairWithKeySpecificSecret(KeyValuePair, KeySpecificSecret) (Hash, error)
GenerateMasterSecret(Seqno) (MasterSecret, error)
ComputeKeySpecificSecret(MasterSecret, Key) KeySpecificSecret
} | go/merkletree2/config.go | 0.711531 | 0.51068 | config.go | starcoder |
package GoTrees
import "strconv"
// bTreeNode is a container for the array of nodes used in b tree nodes
type bTreeNode struct {
nodes []*keyValue
length int
children []*bTreeNode
numChildren int
}
func newbTreeNode(alloc int) bTreeNode {
return bTreeNode{nodes: make([]*keyValue, alloc), length: 0, children: make([]*bTreeNode, alloc), numChildren: 0}
}
// AddToList adds a node to the nodes list, it does not do any b-tree insert logic
func (btn *bTreeNode) AddToList(n *keyValue) {
min := 0
max := btn.length
midPoint := (min + max) / 2
for min < max {
if btn.nodes[midPoint].key > n.key {
max = midPoint
} else if btn.nodes[midPoint].key < n.key {
min = midPoint + 1
} else {
// this means node is duplicate key
break
}
midPoint = (min + max) / 2
}
if btn.length <= midPoint || btn.length == 0 {
btn.nodes = append(btn.nodes, n)
} else {
btn.nodes = append(btn.nodes[:midPoint+1], btn.nodes[midPoint:]...)
btn.nodes[midPoint] = n
}
btn.length++
}
func (btn *bTreeNode) MergeRightSilbing(right *bTreeNode) {
btn.nodes = append(btn.nodes, right.nodes...)
btn.length += right.length
btn.children = append(btn.children, right.children...)
btn.numChildren += right.numChildren
}
// RemoveFromList removes an element from the nodes list, it does not do any b-tree delete logic
func (btn *bTreeNode) RemoveFromList(key int) {
_, i := btn.Search(key)
if i >= 0 {
if i >= btn.length {
btn.length--
btn.nodes = btn.nodes[:i]
} else {
btn.length--
btn.nodes = append(btn.nodes[:i], btn.nodes[i+1:]...)
}
}
}
func (btn *bTreeNode) RemoveFromListAt(index int) {
btn.length--
btn.nodes = append(btn.nodes[:index], btn.nodes[index+1:]...)
}
func (btn *bTreeNode) ReplaceFromListAt(new *keyValue, index int) {
btn.nodes[index] = new
}
// Search will complete a binary search for the given key and return the node and its index in the list. If it is not found, it will return (nil, index of where the node would be)
func (btn *bTreeNode) Search(key int) (*keyValue, int) {
min := 0
max := btn.length
midPoint := (min + max) / 2
for min < max {
if btn.nodes[midPoint].key > key {
max = midPoint
} else if btn.nodes[midPoint].key < key {
min = midPoint + 1
} else {
return btn.nodes[midPoint], midPoint
}
midPoint = (min + max) / 2
}
return nil, midPoint
}
// SplitInTwo splits a node into two subnodes, and takes the middle out
func (btn *bTreeNode) SplitInTwo(alloc int) (*keyValue, *bTreeNode, *bTreeNode) {
mid := btn.length / 2
var left *bTreeNode = nil
var right *bTreeNode = nil
nodeAlloc := max(alloc, btn.length/2)
childAlloc := max(alloc, btn.length/2+1)
if btn.numChildren > 0 {
left = &bTreeNode{nodes: btn.nodes[:mid], length: btn.length / 2, children: btn.children[:mid+1], numChildren: btn.numChildren / 2}
right = &bTreeNode{nodes: make([]*keyValue, nodeAlloc), length: btn.length / 2, children: make([]*bTreeNode, childAlloc), numChildren: btn.numChildren / 2}
copy(right.children, btn.children[mid+1:])
} else {
// splitting a leaf, these nodes will need new child lists allocated
left = &bTreeNode{nodes: btn.nodes[:mid], length: btn.length / 2, children: make([]*bTreeNode, btn.length/2+1), numChildren: 0}
right = &bTreeNode{nodes: make([]*keyValue, nodeAlloc), length: btn.length / 2, children: make([]*bTreeNode, alloc+1), numChildren: 0}
}
// Only copy the right hand nodes, the left memory can be recycled
copy(right.nodes, btn.nodes[mid+1:])
return btn.nodes[mid], left, right
}
func max(a, b int) int {
if a > b {
return a
} else {
return b
}
}
// AddChild adds a child to the end list
func (btn *bTreeNode) AddChild(other *bTreeNode) {
btn.children = append(btn.children, other)
btn.numChildren++
}
// PrependChild adds a child to the front list
func (btn *bTreeNode) PrependChild(other *bTreeNode) {
btn.children = append(btn.children[1:], btn.children...)
btn.children[0] = other
btn.numChildren++
}
// DeleteChild removes a child at index from the child list
func (btn *bTreeNode) DeleteChild(index int) {
if index >= 0 {
btn.numChildren--
btn.children = append(btn.children[:index], btn.children[index+1:]...)
}
}
// InsertTwoChildren adds 2 children to the child list, overwriting the child at index
func (btn *bTreeNode) InsertTwoChildren(left *bTreeNode, right *bTreeNode, index int) {
// making room for children nodes
btn.children = append(btn.children, nil)
// shift over current children
for i := btn.numChildren - 1; i >= index; i-- {
btn.children[i+1] = btn.children[i]
}
// assign new children (note this will overwrite an existing node on purpose)
btn.children[index] = left
btn.children[index+1] = right
btn.numChildren += 1
}
func (btn *bTreeNode) String() string {
str := "["
for _, k := range btn.nodes {
str += strconv.Itoa(k.key) + " "
}
str += "]"
return str
} | BTreeNode.go | 0.584864 | 0.42662 | BTreeNode.go | starcoder |
Package etag is used to calculate the hash value of the file.
The algorithm is based on the qetag of Qi Niuyun.
Qetag address is
https://github.com/qiniu/qetag
This package extends two export functions based on Qetag,
named GetEtagByString, GetEtagByBytes
And re-implemented GetEtagByPath
*/
package goetag
import (
"bytes"
"crypto/sha1"
"encoding/base64"
"io"
"os"
"runtime"
)
const (
BlockSize int64 = 4194304
)
type Reader interface {
Read([]byte) (int, error)
ReadAt([]byte, int64) (int, error)
}
//GetEtagByString calculates the hash value from the string
func GetEtagByString(str string) (string, error) {
return GetEtagByBytes([]byte(str))
}
//GetEtagByString calculates the hash value from file path
func GetEtagByPath(filepath string) (string, error) {
var stat os.FileInfo
file, err := os.Open(filepath)
defer file.Close()
if err == nil {
if stat, err = file.Stat(); err == nil {
size := stat.Size()
return getEtagByReader(file, size), nil
}
}
return "", err
}
//GetEtagByString calculates the hash value from the byte array
func GetEtagByBytes(content []byte) (string, error) {
reader := bytes.NewReader(content)
size := reader.Size()
return getEtagByReader(reader, size), nil
}
func getEtagByReader(reader Reader, size int64) string {
buffer := make([]byte, 0, 21)
if count := blockCount(size); count > 1 {
buffer = getHugeEtag(reader, count)
} else {
buffer = getTinyEtag(reader, buffer)
}
return base64.URLEncoding.EncodeToString(buffer)
}
func getTinyEtag(reader Reader, buffer []byte) []byte {
buffer = append(buffer, 0x16)
buffer = getSha1ByReader(buffer, reader)
return buffer
}
func doEtagWork(reader Reader, offsetChan <-chan int, conseqChan chan<- map[int][]byte) {
for offset := range offsetChan {
data := io.NewSectionReader(reader, int64(offset)*BlockSize, BlockSize)
sha1 := getSha1ByReader(nil, data)
conseqChan <- map[int][]byte{
offset: sha1,
}
}
}
func getHugeEtag(reader Reader, count int64) []byte {
conseqChan := make(chan map[int][]byte, count)
offsetChan := make(chan int, count)
for i := 1; i <= runtime.NumCPU(); i++ {
go doEtagWork(reader, offsetChan, conseqChan)
}
for offset := 0; offset < int(count); offset++ {
offsetChan <- offset
}
close(offsetChan)
return getSha1ByConseqChan(conseqChan, count)
}
func getSha1ByConseqChan(conseqChan chan map[int][]byte, count int64) (conseq []byte) {
sha1Map := make(map[int][]byte, 0)
for i := 0; i < int(count); i++ {
eachChan := <-conseqChan
for k, v := range eachChan {
sha1Map[k] = v
}
}
blockSha1 := make([]byte, 0, count*20)
for i := 0; int64(i) < count; i++ {
blockSha1 = append(blockSha1, sha1Map[i]...)
}
conseq = make([]byte, 0, 21)
conseq = append(conseq, 0x96)
conseq = getSha1ByReader(conseq, bytes.NewReader(blockSha1))
return
}
func getSha1ByReader(buffer []byte, reader Reader) []byte {
hash := sha1.New()
io.Copy(hash, reader)
return hash.Sum(buffer)
}
func blockCount(size int64) int64 {
if size > BlockSize {
count := size / BlockSize
if size&BlockSize == 0 {
return count
}
return count + 1
}
return 1
} | etag.go | 0.718594 | 0.442757 | etag.go | starcoder |
package engine
import (
"bytes"
"github.com/wesrobin/battlesnakes/model"
"log"
"math"
)
func getCoordAfterMove(coord model.Coord, move model.Move) model.Coord {
switch move {
case model.Up:
return model.Coord{X: coord.X, Y: coord.Y + 1}
case model.Down:
return model.Coord{X: coord.X, Y: coord.Y - 1}
case model.Left:
return model.Coord{X: coord.X - 1, Y: coord.Y}
case model.Right:
return model.Coord{X: coord.X + 1, Y: coord.Y}
}
return model.Coord{} // Hiss
}
func inBounds(board model.Board, coord model.Coord) bool {
if coord.X < 0 {
return false
}
if coord.Y < 0 {
return false
}
if coord.Y >= board.Height {
return false
}
if coord.X >= board.Width {
return false
}
return true
}
func legalCoord(s state, board model.Board, coord model.Coord) bool {
return inBounds(board, coord) && s.gobjs[coord] != model.Body && s.gobjs[coord] != model.Head
}
// Pls only call with legal moves <3
func step(b model.Board, mv model.Move) model.Board {
board := deepCopy(b)
snek := board.Snakes[0]
snek.Health--
newHead := getCoordAfterMove(snek.Head, mv)
l := len(snek.Body)
var hazCheez bool
for i, cheezes := range board.Food {
if newHead == cheezes {
hazCheez = true
snek.Length++
snek.Health = 100
l++
board.Food = append(board.Food[:i], board.Food[i+1:]...)
break
}
}
newBod := make([]model.Coord, l)
newBod[0] = newHead
for i := 1; i < len(snek.Body); i++ {
newBod[i] = snek.Body[i-1]
}
if hazCheez {
newBod[len(newBod)-1] = snek.Body[len(snek.Body)-1]
}
snek.Body = newBod
snek.Head = newHead
board.Snakes[0] = snek
return board
}
func deepCopy(board model.Board) model.Board {
b := model.Board{
Height: board.Height,
Width: board.Width,
}
b.Snakes = append(b.Snakes, board.Snakes...)
b.Food = append(b.Food, board.Food...)
return b
}
func getPossibleMoves(s state, board model.Board) []model.Move {
var pMvs []model.Move
for mv := range model.PossibleMoves {
coord := getCoordAfterMove(board.Snakes[0].Head, mv)
if legalCoord(s, board, coord) {
pMvs = append(pMvs, mv)
}
}
return pMvs
}
func printMap(state model.Board) {
var o bytes.Buffer
board := make([][]rune, state.Width)
for i := range board {
board[i] = make([]rune, state.Height)
}
for y := 0; y < state.Height; y++ {
for x := 0; x < state.Width; x++ {
board[x][y] = '◦'
}
}
for _, f := range state.Food {
board[f.X][f.Y] = '⚕'
}
for _, s := range state.Snakes {
for i, b := range s.Body {
if i == 0 {
board[b.X][b.Y] = 'X'
} else {
board[b.X][b.Y] = 'O'
}
}
}
o.WriteRune('\n')
for y := state.Height - 1; y >= 0; y-- {
for x := 0; x < state.Width; x++ {
o.WriteRune(board[x][y])
}
o.WriteString("\n")
}
log.Println(o.String())
}
func dist(a, b model.Coord) float64 {
return math.Sqrt(math.Pow(float64(a.X-b.X), 2) + math.Pow(float64(a.Y-b.Y), 2))
}
// distTaxi calculates distance using https://en.wikipedia.org/wiki/Taxicab_geometry
func distTaxi(a, b model.Coord) int64 {
return int64(math.Abs(float64(a.X-b.X)) + math.Abs(float64(a.Y-b.Y)))
} | engine/util.go | 0.615435 | 0.494812 | util.go | starcoder |
package tables
import (
"github.com/sudachen/go-tables/internal"
"github.com/sudachen/go-tables/util"
"reflect"
)
type Column struct {
column reflect.Value
na util.Bits
}
func Col(a interface{}) *Column {
v := reflect.ValueOf(a)
if v.Kind() != reflect.Slice {
panic("anly slice is allowed as an argument")
}
return &Column{v, util.Bits{}}
}
/*
Table.Col returns Column object for the table' column selected by the name
t := tables.New([]struct{Name string; Age int; Rate float32}{{"Ivanov",42,1.2},{"Petrov",42,1.5}})
t.Col("Name").String(0) -> "Ivanov"
t.Col("Name").Len() -> 2
*/
func (t *Table) Col(column string) *Column {
for i, n := range t.raw.Names {
if n == column {
if t.cols == nil {
t.cols = make([]*Column, len(t.raw.Names))
}
if t.cols[i] == nil {
c := &Column{t.raw.Columns[i], t.raw.Na[i]}
t.cols[i] = c
}
return t.cols[i]
}
}
panic("there is not column with name " + column)
}
/*
String returns column' value converted to string
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Name").String(0) -> "Ivanov"
t.Col("Name").Index(0).String() -> "Ivanov"
*/
func (c *Column) String(row int) string {
return c.Index(row).String()
}
func (c *Column) Na(i int) bool {
return c.na.Bit(i)
}
/*
Strings extracts column' values as []string
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Name").Strings() -> {"Ivanov","Petrow"}
*/
func (c *Column) Strings() []string {
return c.ExtractAs(internal.StringType).([]string)
}
/*
Int returns column' value converted to int
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Int(0) -> 32
t.Col("Age").Index(0).Int() -> 32
*/
func (c *Column) Int(row int) int {
return c.Index(row).Int()
}
/*
Int8 returns column' value converted to int8
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Int8(0) -> 32
t.Col("Age").Index().Int8() -> 32
*/
func (c *Column) Int8(row int) int8 {
return c.Index(row).Int8()
}
/*
Int16 returns column' value converted to int16
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Int16(0) -> 32
t.Col("Age").Index().Int16() -> 32
*/
func (c *Column) Int16(row int) int16 {
return c.Index(row).Int16()
}
/*
Int32 returns column' value converted to int32
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Int32(0) -> 32
t.Col("Age").Index(0).Int32() -> 32
*/
func (c *Column) Int32(row int) int32 {
return c.Index(row).Int32()
}
/*
Int64 returns column' value converted to int64
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Int64(0) -> 32
t.Col("Age").Index(0).Int64() -> 32
*/
func (c *Column) Int64(row int) int64 {
return c.Index(row).Int64()
}
/*
Uint returns column' value converted to uint
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Uint(0) -> 32
*/
func (c *Column) Uint(row int) uint {
v := c.column.Index(row)
return util.Convert(v, internal.UintType).(uint)
}
/*
Uint8 returns column' value converted to uint8
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Uint8(0) -> 32
*/
func (c *Column) Uint8(row int) uint8 {
v := c.column.Index(row)
return util.Convert(v, internal.Uint8Type).(uint8)
}
/*
Uint16 returns column' value converted to uint16
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Uint16(0) -> 32
*/
func (c *Column) Uint16(row int) uint16 {
v := c.column.Index(row)
return util.Convert(v, internal.Uint16Type).(uint16)
}
/*
Uint32 returns column' value converted to uint32
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Uint32(0) -> 32
*/
func (c *Column) Uint32(row int) uint32 {
v := c.column.Index(row)
return util.Convert(v, internal.Uint32Type).(uint32)
}
/*
Uint64 returns column' value converted to uint64
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Uint64(0) -> 32
t.Col("Age").Index(0).Uint64() -> 32
*/
func (c *Column) Uint64(row int) uint64 {
return c.Index(row).Uint64()
}
/*
Ints extracts column' values as []int
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Ints() -> {32,44}
*/
func (c *Column) Ints() []int {
return c.ExtractAs(internal.IntType).([]int)
}
/*
Ints8 extracts column' values as []int8
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Ints8() -> {32,44}
*/
func (c *Column) Ints8() []int8 {
return c.ExtractAs(internal.Int8Type).([]int8)
}
/*
Ints16 extracts column' values as []int16
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Ints16() -> {32,44}
*/
func (c *Column) Ints16() []int16 {
return c.ExtractAs(internal.Int16Type).([]int16)
}
/*
Ints32 extracts column' values as []int32
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Ints32() -> {32,44}
*/
func (c *Column) Ints32() []int32 {
return c.ExtractAs(internal.Int32Type).([]int32)
}
/*
Ints64 extracts column' values as []int64
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Ints64() -> {32,44}
*/
func (c *Column) Ints64() []int64 {
return c.ExtractAs(internal.Int64Type).([]int64)
}
/*
Uints extracts column' values as []uint
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Uints() -> {32,44}
*/
func (c *Column) Uints() []uint {
return c.ExtractAs(internal.UintType).([]uint)
}
/*
Uints8 extracts column' values as []uint8
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Uints8() -> {32,44}
*/
func (c *Column) Uints8() []uint8 {
return c.ExtractAs(internal.Uint8Type).([]uint8)
}
/*
Uints16 extracts column' values as []uint16
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Uints16() -> {32,44}
*/
func (c *Column) Uints16() []uint16 {
return c.ExtractAs(internal.Uint16Type).([]uint16)
}
/*
Uints32 extracts column' values as []uint32
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Uints32() -> {32,44}
*/
func (c *Column) Uints32() []uint32 {
return c.ExtractAs(internal.Uint32Type).([]uint32)
}
/*
Uints64 extracts column' values as []uint64
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Uints64() -> {32,44}
*/
func (c *Column) Uints64() []uint64 {
return c.ExtractAs(internal.Uint64Type).([]uint64)
}
/*
Float returns column' value converted to float32
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Rate").Float(0) -> 1.2
*/
func (c *Column) Float(row int) float32 {
return c.Index(row).Float()
}
/*
Float64 returns column' value converted to float64
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Rate").Float64(0) -> 1.2
*/
func (c *Column) Float64(row int) float64 {
return c.Index(row).Float64()
}
/*
Floats extracts column' values as []float32
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Rate").Floats() -> {1.2,1.5}
*/
func (c *Column) Floats() []float32 {
return c.ExtractAs(internal.FloatType).([]float32)
}
/*
Floats64 extracts column' values as []float64
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Rate").Floats64() -> {1.2,1.5}
*/
func (c *Column) Floats64() []float64 {
return c.ExtractAs(internal.Float64Type).([]float64)
}
/*
Interface returns column' value as is
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Rate").Interface(0).(float32) -> 1.2
t.Col("Rate").Index(0).Interface().(float32) -> 1.2
*/
func (c *Column) Interface(row int) interface{} {
return c.Index(row).Interface()
}
/*
ExtractAs extracts values as array with specified type
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").ExtractAs(reflect.TypeOf("")).([]string)[0] -> "32"
t.Col("Rate").ExtractAs(reflect.TypeOf(int(0))).([]int)[0] -> 1
*/
func (c *Column) ExtractAs(tp reflect.Type) interface{} {
if c.column.Type().Elem() == tp {
l := c.column.Len()
r := reflect.MakeSlice(c.column.Type(), l, l)
reflect.Copy(r, c.column)
return r.Interface()
} else {
return util.Convert(c.column, tp)
}
}
/*
Inspect returns raw array of column's values
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Name").Inspect().([]string)[0] -> "Ivanov"
t.Col("Age").Inspect().([]int)[0] -> 32
t.Col("Rate").Inspect().([]float32)[0] -> 1.2
*/
func (c *Column) Inspect() interface{} {
return c.column.Interface()
}
/*
Type returns (reflect) type of column' values
*/
func (c *Column) Type() reflect.Type {
return c.column.Type().Elem()
}
/*
Len returns length of column
t := tables.New([]struct{Name string}{{"Ivanov"}})
c1 := t.Col("Name")
t.Append([]struct{Name string}{{"Petrov"}})
c2 := t.Col("Name")
c1.Len() -> 1
c2.Len() -> 2
*/
func (c *Column) Len() int {
return c.column.Len()
}
/*
Unique returns column with only unique values
t := tables.New([]struct{Name string}{{"Ivanov"}})
u1 := t.Col("Name").Unique()
t = t.Append([]struct{Name string}{{"Petrov"},{"Petrov"}})
u2 := t.Col("Name").Unique()
u1.Unique().Inspect() -> {}
u2.Unique().Len() -> 2
*/
func (c *Column) Unique() *Column {
v := reflect.ValueOf(true)
m := reflect.MakeMap(reflect.MapOf(c.column.Type().Elem(), v.Type()))
r := reflect.MakeSlice(c.column.Type(), 0, 0)
for i := 0; i < c.column.Len(); i++ {
x := c.column.Index(i)
q := m.MapIndex(x)
if !q.IsValid() {
r = reflect.Append(r, x)
m.SetMapIndex(x, v)
}
}
return &Column{r, util.Bits{}}
}
/*
Index returns cell with value at specified index
t := tables.New([]struct{Age int}{{"33"}})
c := t.Col("Age").Index(0)
c.String() -> "33"
c.Float() -> 33.0
c.Int() -> 33
*/
func (c *Column) Index(i int) Cell {
return Cell{c.column.Index(i)}
}
/*
Max returns cell with max column' maximal value
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Max().Int() -> 44
t.Col("Rate").Max().Float() -> 1.5
*/
func (c *Column) Max() Cell {
return Cell{util.Max(c.column)}
}
/*
Min returns cell with column' minimal value
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").Min().Int() -> 32
t.Col("Rate").Min().Float() -> 1.2
*/
func (c *Column) Min() Cell {
return Cell{util.Min(c.column)}
}
/*
MaxIndex returns index of first column' maximal value
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").MaxIndex() -> 1
*/
func (c *Column) MaxIndex() int {
return util.MaxIndex(c.column)
}
/*
MinIndex returns index of first column' minimal value
t := table.New([]struct{Name string; Age int; Rate float}{{"Ivanov",32,1.2},{"Petrov",44,1.5}})
t.Col("Age").MinIndex() -> 0
*/
func (c *Column) MinIndex() int {
return util.MinIndex(c.column)
} | tables/column.go | 0.71721 | 0.413004 | column.go | starcoder |
package consensus
import (
"context"
"fmt"
"github.com/filecoin-project/go-filecoin/internal/pkg/block"
"github.com/filecoin-project/go-filecoin/internal/pkg/clock"
)
// BlockValidator defines an interface used to validate a blocks syntax and
// semantics.
type BlockValidator interface {
BlockSemanticValidator
BlockSyntaxValidator
}
// SyntaxValidator defines and interface used to validate block's syntax and the
// syntax of constituent messages
type SyntaxValidator interface {
BlockSyntaxValidator
}
// BlockSemanticValidator defines an interface used to validate a blocks
// semantics.
type BlockSemanticValidator interface {
ValidateSemantic(ctx context.Context, child *block.Block, parents block.TipSet) error
}
// BlockSyntaxValidator defines an interface used to validate a blocks
// syntax.
type BlockSyntaxValidator interface {
ValidateSyntax(ctx context.Context, blk *block.Block) error
}
// DefaultBlockValidator implements the BlockValidator interface.
type DefaultBlockValidator struct {
clock.ChainEpochClock
}
// NewDefaultBlockValidator returns a new DefaultBlockValidator. It uses `blkTime`
// to validate blocks and uses the DefaultBlockValidationClock.
func NewDefaultBlockValidator(c clock.ChainEpochClock) *DefaultBlockValidator {
return &DefaultBlockValidator{
ChainEpochClock: c,
}
}
// NotFutureBlock errors if the block belongs to a future epoch according to
// the chain clock.
func (dv *DefaultBlockValidator) NotFutureBlock(b *block.Block) error {
currentEpoch := dv.EpochAtTime(dv.Now())
if b.Height > currentEpoch {
return fmt.Errorf("block %s with timestamp %d generate in future epoch %d", b.Cid().String(), b.Timestamp, b.Height)
}
return nil
}
// TimeMatchesEpoch errors if the epoch and time don't match according to the
// chain clock.
func (dv *DefaultBlockValidator) TimeMatchesEpoch(b *block.Block) error {
earliestExpected, latestExpected := dv.EpochRangeAtTimestamp(b.Timestamp)
blockEpoch := b.Height
if (blockEpoch < earliestExpected) || (blockEpoch > latestExpected) {
return fmt.Errorf(
"block %s with timestamp %d generated in wrong epoch %d, expected epoch in range [%d, %d]",
b.Cid().String(),
b.Timestamp,
b.Height,
earliestExpected,
latestExpected,
)
}
return nil
}
// ValidateSemantic checks validation conditions on a header that can be
// checked given only the parent header.
func (dv *DefaultBlockValidator) ValidateSemantic(ctx context.Context, child *block.Block, parents block.TipSet) error {
ph, err := parents.Height()
if err != nil {
return err
}
if child.Height <= ph {
return fmt.Errorf("block %s has invalid height %d", child.Cid().String(), child.Height)
}
return nil
}
// ValidateSyntax validates a single block is correctly formed.
// TODO this is an incomplete implementation #3277
func (dv *DefaultBlockValidator) ValidateSyntax(ctx context.Context, blk *block.Block) error {
// TODO special handling for genesis block #3121
if blk.Height == 0 {
return nil
}
err := dv.NotFutureBlock(blk)
if err != nil {
return err
}
err = dv.TimeMatchesEpoch(blk)
if err != nil {
return err
}
if !blk.StateRoot.Defined() {
return fmt.Errorf("block %s has nil StateRoot", blk.Cid())
}
if blk.Miner.Empty() {
return fmt.Errorf("block %s has nil miner address", blk.Cid())
}
if len(blk.Ticket.VRFProof) == 0 {
return fmt.Errorf("block %s has nil ticket", blk.Cid())
}
if blk.BlockSig == nil {
return fmt.Errorf("block %s has nil signature", blk.Cid())
}
return nil
} | internal/pkg/consensus/block_validation.go | 0.736969 | 0.417153 | block_validation.go | starcoder |
package heisenberg
import (
"fmt"
"math"
"math/cmplx"
)
// Dense64 is an algebriac matrix
type Dense64 struct {
R, C int
Matrix []complex64
}
func (a Dense64) String() string {
output := ""
for i := 0; i < a.R; i++ {
for j := 0; j < a.C; j++ {
output += fmt.Sprintf("%f ", a.Matrix[i*a.C+j])
}
output += fmt.Sprintf("\n")
}
return output
}
// MachineDense64 is a 64 bit dense matrix machine
type MachineDense64 struct {
Dense64
Qubits int
}
// Zero adds a zero to the matrix
func (a *MachineDense64) Zero() Qubit {
qubit := Qubit(a.Qubits)
a.Qubits++
zero := Dense64{
R: 2,
C: 1,
Matrix: []complex64{
1, 0,
},
}
if qubit == 0 {
a.Dense64 = zero
return qubit
}
a.Dense64 = *a.Tensor(&zero)
return qubit
}
// One adds a one to the matrix
func (a *MachineDense64) One() Qubit {
qubit := Qubit(a.Qubits)
a.Qubits++
one := Dense64{
R: 2,
C: 1,
Matrix: []complex64{
0, 1,
},
}
if qubit == 0 {
a.Dense64 = one
return qubit
}
a.Dense64 = *a.Tensor(&one)
return qubit
}
// Tensor product is the tensor product
func (a *Dense64) Tensor(b *Dense64) *Dense64 {
output := make([]complex64, 0, len(a.Matrix)*len(b.Matrix))
for x := 0; x < a.R; x++ {
for y := 0; y < b.R; y++ {
for i := 0; i < a.C; i++ {
for j := 0; j < b.C; j++ {
output = append(output, a.Matrix[x*a.C+i]*b.Matrix[y*b.C+j])
}
}
}
}
return &Dense64{
R: a.R * b.R,
C: a.C * b.C,
Matrix: output,
}
}
// Multiply multiplies to matricies
func (a *Dense64) Multiply(b *Dense64) *Dense64 {
if a.C != b.R {
panic("invalid dimensions")
}
output := make([]complex64, 0, a.R*b.C)
for j := 0; j < b.C; j++ {
for x := 0; x < a.R; x++ {
var sum complex64
for y := 0; y < b.R; y++ {
sum += b.Matrix[y*b.C+j] * a.Matrix[x*a.C+y]
}
output = append(output, sum)
}
}
return &Dense64{
R: a.R,
C: b.C,
Matrix: output,
}
}
// Transpose transposes a matrix
func (a *Dense64) Transpose() {
for i := 0; i < a.R; i++ {
for j := 0; j < a.C; j++ {
a.Matrix[j*a.R+i] = a.Matrix[i*a.C+j]
}
}
a.R, a.C = a.C, a.R
}
// Copy copies a matrix`
func (a *Dense64) Copy() *Dense64 {
cp := &Dense64{
R: a.R,
C: a.C,
Matrix: make([]complex64, len(a.Matrix)),
}
copy(cp.Matrix, a.Matrix)
return cp
}
// ControlledNot controlled not gate
func (a *MachineDense64) ControlledNot(c []Qubit, t Qubit) *Dense64 {
n := a.Qubits
p := &Dense64{
R: 2,
C: 2,
Matrix: []complex64{
1, 0,
0, 1,
},
}
q := p
for i := 0; i < n-1; i++ {
q = p.Tensor(q)
}
d := q.R
index := make([]int64, 0)
for i := 0; i < d; i++ {
bits := int64(i)
// Apply X
apply := true
for _, j := range c {
if (bits>>(Qubit(n-1)-j))&1 == 0 {
apply = false
break
}
}
if apply {
if (bits>>(Qubit(n-1)-t))&1 == 0 {
bits |= 1 << (Qubit(n-1) - t)
} else {
bits &= ^(1 << (Qubit(n-1) - t))
}
}
index = append(index, bits)
}
g := Dense64{
R: q.R,
C: q.C,
Matrix: make([]complex64, q.R*q.C),
}
for i, ii := range index {
copy(g.Matrix[i*g.C:(i+1)*g.C], q.Matrix[int(ii)*g.C:int(ii+1)*g.C])
}
a.Dense64 = *g.Multiply(&a.Dense64)
return &g
}
// Multiply multiplies the machine by a matrix
func (a *MachineDense64) Multiply(b *Dense64, qubits ...Qubit) {
indexes := make(map[int]bool)
for _, value := range qubits {
indexes[int(value)] = true
}
identity := IDense64()
d := IDense64()
if indexes[0] {
d = b.Copy()
}
for i := 1; i < a.Qubits; i++ {
if indexes[i] {
d = d.Tensor(b)
continue
}
d = d.Tensor(identity)
}
a.Dense64 = *d.Multiply(&a.Dense64)
}
// IDense64 identity matrix
func IDense64() *Dense64 {
return &Dense64{
R: 2,
C: 2,
Matrix: []complex64{
1, 0,
0, 1,
},
}
}
// I multiply by identity
func (a *MachineDense64) I(qubits ...Qubit) *MachineDense64 {
a.Multiply(IDense64(), qubits...)
return a
}
// HDense64 Hadamard matrix
func HDense64() *Dense64 {
v := complex(1/math.Sqrt2, 0)
return &Dense64{
R: 2,
C: 2,
Matrix: []complex64{
complex64(v), complex64(v),
complex64(v), complex64(-v),
},
}
}
// H multiply by Hadamard gate
func (a *MachineDense64) H(qubits ...Qubit) *MachineDense64 {
a.Multiply(HDense64(), qubits...)
return a
}
// XDense64 Pauli X matrix
func XDense64() *Dense64 {
return &Dense64{
R: 2,
C: 2,
Matrix: []complex64{
0, 1,
1, 0,
},
}
}
// X multiply by Pauli X matrix
func (a *MachineDense64) X(qubits ...Qubit) *MachineDense64 {
a.Multiply(XDense64(), qubits...)
return a
}
// YDense64 Pauli Y matrix
func YDense64() *Dense64 {
return &Dense64{
R: 2,
C: 2,
Matrix: []complex64{
0, -1i,
1i, 0,
},
}
}
// Y multiply by Pauli Y matrix
func (a *MachineDense64) Y(qubits ...Qubit) *MachineDense64 {
a.Multiply(YDense64(), qubits...)
return a
}
// ZDense64 Pauli Z matrix
func ZDense64() *Dense64 {
return &Dense64{
R: 2,
C: 2,
Matrix: []complex64{
1, 0,
0, -1,
},
}
}
// Z multiply by Pauli Z matrix
func (a *MachineDense64) Z(qubits ...Qubit) *MachineDense64 {
a.Multiply(ZDense64(), qubits...)
return a
}
// SDense64 phase gate
func SDense64() *Dense64 {
return &Dense64{
R: 2,
C: 2,
Matrix: []complex64{
1, 0,
0, 1i,
},
}
}
// S multiply by phase matrix
func (a *MachineDense64) S(qubits ...Qubit) *MachineDense64 {
a.Multiply(SDense64(), qubits...)
return a
}
// TDense64 T gate
func TDense64() *Dense64 {
return &Dense64{
R: 2,
C: 2,
Matrix: []complex64{
1, 0,
0, complex64(cmplx.Exp(1i * math.Pi / 4)),
},
}
}
// T multiply by T matrix
func (a *MachineDense64) T(qubits ...Qubit) *MachineDense64 {
a.Multiply(TDense64(), qubits...)
return a
}
// UDense64 U gate
func UDense64(theta, phi, lambda float64) *Dense64 {
v := complex(theta/2, 0)
return &Dense64{
R: 2,
C: 2,
Matrix: []complex64{
complex64(cmplx.Cos(v)), complex64(-1 * cmplx.Exp(complex(0, lambda)) * cmplx.Sin(v)),
complex64(cmplx.Exp(complex(0, phi)) * cmplx.Sin(v)), complex64(cmplx.Exp(complex(0, (phi+lambda))) * cmplx.Cos(v)),
},
}
}
// U multiply by U matrix
func (a *MachineDense64) U(theta, phi, lambda float64, qubits ...Qubit) *MachineDense64 {
a.Multiply(UDense64(theta, phi, lambda), qubits...)
return a
}
// RXDense64 x rotation matrix
func RXDense64(theta complex128) *Dense64 {
return &Dense64{
R: 2,
C: 2,
Matrix: []complex64{
complex64(cmplx.Cos(complex128(theta))), -1i * complex64(cmplx.Sin(complex128(theta))),
-1i * complex64(cmplx.Sin(complex128(theta))), complex64(cmplx.Cos(complex128(theta))),
},
}
}
// RX rotate X gate
func (a *MachineDense64) RX(theta float64, qubits ...Qubit) *MachineDense64 {
a.Multiply(RXDense64(complex(theta/2, 0)), qubits...)
return a
}
// RYDense64 y rotation matrix
func RYDense64(theta complex128) *Dense64 {
return &Dense64{
R: 2,
C: 2,
Matrix: []complex64{
complex64(cmplx.Cos(complex128(theta))), -1 * complex64(cmplx.Sin(complex128(theta))),
complex64(cmplx.Sin(complex128(theta))), complex64(cmplx.Cos(complex128(theta))),
},
}
}
// RY rotate Y gate
func (a *MachineDense64) RY(theta float64, qubits ...Qubit) *MachineDense64 {
a.Multiply(RYDense64(complex(theta/2, 0)), qubits...)
return a
}
// RZDense64 z rotation matrix
func RZDense64(theta complex128) *Dense64 {
return &Dense64{
R: 2,
C: 2,
Matrix: []complex64{
complex64(cmplx.Exp(-1 * complex128(theta))), 0,
0, complex64(cmplx.Exp(complex128(theta))),
},
}
}
// RZ rotate Z gate
func (a *MachineDense64) RZ(theta float64, qubits ...Qubit) *MachineDense64 {
a.Multiply(RZDense64(complex(theta/2, 0)), qubits...)
return a
}
// Swap swaps qubits`
func (a *MachineDense64) Swap(qubits ...Qubit) *MachineDense64 {
length := len(qubits)
for i := 0; i < length/2; i++ {
c, t := qubits[i], qubits[(length-1)-i]
a.ControlledNot([]Qubit{c}, t)
a.ControlledNot([]Qubit{t}, c)
a.ControlledNot([]Qubit{c}, t)
}
return a
}
// Dense128 is an algebriac matrix
type Dense128 struct {
R, C int
Matrix []complex128
}
func (a Dense128) String() string {
output := ""
for i := 0; i < a.R; i++ {
for j := 0; j < a.C; j++ {
output += fmt.Sprintf("%f ", a.Matrix[i*a.C+j])
}
output += fmt.Sprintf("\n")
}
return output
}
// MachineDense128 is a 128 bit dense matrix machine
type MachineDense128 struct {
Dense128
Qubits int
}
// Zero adds a zero to the matrix
func (a *MachineDense128) Zero() Qubit {
qubit := Qubit(a.Qubits)
a.Qubits++
zero := Dense128{
R: 2,
C: 1,
Matrix: []complex128{
1, 0,
},
}
if qubit == 0 {
a.Dense128 = zero
return qubit
}
a.Dense128 = *a.Tensor(&zero)
return qubit
}
// One adds a one to the matrix
func (a *MachineDense128) One() Qubit {
qubit := Qubit(a.Qubits)
a.Qubits++
one := Dense128{
R: 2,
C: 1,
Matrix: []complex128{
0, 1,
},
}
if qubit == 0 {
a.Dense128 = one
return qubit
}
a.Dense128 = *a.Tensor(&one)
return qubit
}
// Tensor product is the tensor product
func (a *Dense128) Tensor(b *Dense128) *Dense128 {
output := make([]complex128, 0, len(a.Matrix)*len(b.Matrix))
for x := 0; x < a.R; x++ {
for y := 0; y < b.R; y++ {
for i := 0; i < a.C; i++ {
for j := 0; j < b.C; j++ {
output = append(output, a.Matrix[x*a.C+i]*b.Matrix[y*b.C+j])
}
}
}
}
return &Dense128{
R: a.R * b.R,
C: a.C * b.C,
Matrix: output,
}
}
// Multiply multiplies to matricies
func (a *Dense128) Multiply(b *Dense128) *Dense128 {
if a.C != b.R {
panic("invalid dimensions")
}
output := make([]complex128, 0, a.R*b.C)
for j := 0; j < b.C; j++ {
for x := 0; x < a.R; x++ {
var sum complex128
for y := 0; y < b.R; y++ {
sum += b.Matrix[y*b.C+j] * a.Matrix[x*a.C+y]
}
output = append(output, sum)
}
}
return &Dense128{
R: a.R,
C: b.C,
Matrix: output,
}
}
// Transpose transposes a matrix
func (a *Dense128) Transpose() {
for i := 0; i < a.R; i++ {
for j := 0; j < a.C; j++ {
a.Matrix[j*a.R+i] = a.Matrix[i*a.C+j]
}
}
a.R, a.C = a.C, a.R
}
// Copy copies a matrix`
func (a *Dense128) Copy() *Dense128 {
cp := &Dense128{
R: a.R,
C: a.C,
Matrix: make([]complex128, len(a.Matrix)),
}
copy(cp.Matrix, a.Matrix)
return cp
}
// ControlledNot controlled not gate
func (a *MachineDense128) ControlledNot(c []Qubit, t Qubit) *Dense128 {
n := a.Qubits
p := &Dense128{
R: 2,
C: 2,
Matrix: []complex128{
1, 0,
0, 1,
},
}
q := p
for i := 0; i < n-1; i++ {
q = p.Tensor(q)
}
d := q.R
index := make([]int64, 0)
for i := 0; i < d; i++ {
bits := int64(i)
// Apply X
apply := true
for _, j := range c {
if (bits>>(Qubit(n-1)-j))&1 == 0 {
apply = false
break
}
}
if apply {
if (bits>>(Qubit(n-1)-t))&1 == 0 {
bits |= 1 << (Qubit(n-1) - t)
} else {
bits &= ^(1 << (Qubit(n-1) - t))
}
}
index = append(index, bits)
}
g := Dense128{
R: q.R,
C: q.C,
Matrix: make([]complex128, q.R*q.C),
}
for i, ii := range index {
copy(g.Matrix[i*g.C:(i+1)*g.C], q.Matrix[int(ii)*g.C:int(ii+1)*g.C])
}
a.Dense128 = *g.Multiply(&a.Dense128)
return &g
}
// Multiply multiplies the machine by a matrix
func (a *MachineDense128) Multiply(b *Dense128, c ...Qubit) {
indexes := make(map[int]bool)
for _, value := range c {
indexes[int(value)] = true
}
identity := IDense128()
d := IDense128()
if indexes[0] {
d = b.Copy()
}
for i := 1; i < a.Qubits; i++ {
if indexes[i] {
d = d.Tensor(b)
continue
}
d = d.Tensor(identity)
}
a.Dense128 = *d.Multiply(&a.Dense128)
}
// IDense128 identity matrix
func IDense128() *Dense128 {
return &Dense128{
R: 2,
C: 2,
Matrix: []complex128{
1, 0,
0, 1,
},
}
}
// I multiply by identity
func (a *MachineDense128) I(qubits ...Qubit) *MachineDense128 {
a.Multiply(IDense128(), qubits...)
return a
}
// HDense128 Hadamard matrix
func HDense128() *Dense128 {
v := complex(1/math.Sqrt2, 0)
return &Dense128{
R: 2,
C: 2,
Matrix: []complex128{
v, v,
v, -v,
},
}
}
// H multiply by Hadamard gate
func (a *MachineDense128) H(qubits ...Qubit) *MachineDense128 {
a.Multiply(HDense128(), qubits...)
return a
}
// XDense128 Pauli X matrix
func XDense128() *Dense128 {
return &Dense128{
R: 2,
C: 2,
Matrix: []complex128{
0, 1,
1, 0,
},
}
}
// X multiply by Pauli X matrix
func (a *MachineDense128) X(qubits ...Qubit) *MachineDense128 {
a.Multiply(XDense128(), qubits...)
return a
}
// YDense128 Pauli Y matrix
func YDense128() *Dense128 {
return &Dense128{
R: 2,
C: 2,
Matrix: []complex128{
0, -1i,
1i, 0,
},
}
}
// Y multiply by Pauli Y matrix
func (a *MachineDense128) Y(qubits ...Qubit) *MachineDense128 {
a.Multiply(YDense128(), qubits...)
return a
}
// ZDense128 Pauli Z matrix
func ZDense128() *Dense128 {
return &Dense128{
R: 2,
C: 2,
Matrix: []complex128{
1, 0,
0, -1,
},
}
}
// Z multiply by Pauli Z matrix
func (a *MachineDense128) Z(qubits ...Qubit) *MachineDense128 {
a.Multiply(ZDense128(), qubits...)
return a
}
// SDense128 phase gate
func SDense128() *Dense128 {
return &Dense128{
R: 2,
C: 2,
Matrix: []complex128{
1, 0,
0, 1i,
},
}
}
// S multiply by phase matrix
func (a *MachineDense128) S(qubits ...Qubit) *MachineDense128 {
a.Multiply(SDense128(), qubits...)
return a
}
// TDense128 T gate
func TDense128() *Dense128 {
return &Dense128{
R: 2,
C: 2,
Matrix: []complex128{
1, 0,
0, cmplx.Exp(1i * math.Pi / 4),
},
}
}
// T multiply by T matrix
func (a *MachineDense128) T(qubits ...Qubit) *MachineDense128 {
a.Multiply(TDense128(), qubits...)
return a
}
// UDense128 U gate
func UDense128(theta, phi, lambda float64) *Dense128 {
v := complex(theta/2, 0)
return &Dense128{
R: 2,
C: 2,
Matrix: []complex128{
cmplx.Cos(v), -1 * cmplx.Exp(complex(0, lambda)) * cmplx.Sin(v),
cmplx.Exp(complex(0, phi)) * cmplx.Sin(v), cmplx.Exp(complex(0, (phi+lambda))) * cmplx.Cos(v),
},
}
}
// U multiply by U matrix
func (a *MachineDense128) U(theta, phi, lambda float64, qubits ...Qubit) *MachineDense128 {
a.Multiply(UDense128(theta, phi, lambda), qubits...)
return a
}
// RXDense128 x rotation matrix
func RXDense128(theta complex128) *Dense128 {
return &Dense128{
R: 2,
C: 2,
Matrix: []complex128{
cmplx.Cos(theta), -1i * cmplx.Sin(theta),
-1i * cmplx.Sin(theta), cmplx.Cos(theta),
},
}
}
// RX rotate X gate
func (a *MachineDense128) RX(theta float64, qubits ...Qubit) *MachineDense128 {
a.Multiply(RXDense128(complex(theta/2, 0)), qubits...)
return a
}
// RYDense128 y rotation matrix
func RYDense128(theta complex128) *Dense128 {
return &Dense128{
R: 2,
C: 2,
Matrix: []complex128{
cmplx.Cos(theta), -1 * cmplx.Sin(theta),
cmplx.Sin(theta), cmplx.Cos(theta),
},
}
}
// RY rotate Y gate
func (a *MachineDense128) RY(theta float64, qubits ...Qubit) *MachineDense128 {
a.Multiply(RYDense128(complex(theta/2, 0)), qubits...)
return a
}
// RZDense128 z rotation matrix
func RZDense128(theta complex128) *Dense128 {
return &Dense128{
R: 2,
C: 2,
Matrix: []complex128{
cmplx.Exp(-1 * theta), 0,
0, cmplx.Exp(theta),
},
}
}
// RZ rotate Z gate
func (a *MachineDense128) RZ(theta float64, qubits ...Qubit) *MachineDense128 {
a.Multiply(RZDense128(complex(theta/2, 0)), qubits...)
return a
}
// Swap swaps qubits`
func (a *MachineDense128) Swap(qubits ...Qubit) *MachineDense128 {
length := len(qubits)
for i := 0; i < length/2; i++ {
c, t := qubits[i], qubits[(length-1)-i]
a.ControlledNot([]Qubit{c}, t)
a.ControlledNot([]Qubit{t}, c)
a.ControlledNot([]Qubit{c}, t)
}
return a
} | dense.go | 0.741487 | 0.535463 | dense.go | starcoder |
package v1alpha1
// DataTransferConfigListerExpansion allows custom methods to be added to
// DataTransferConfigLister.
type DataTransferConfigListerExpansion interface{}
// DataTransferConfigNamespaceListerExpansion allows custom methods to be added to
// DataTransferConfigNamespaceLister.
type DataTransferConfigNamespaceListerExpansion interface{}
// DatasetListerExpansion allows custom methods to be added to
// DatasetLister.
type DatasetListerExpansion interface{}
// DatasetNamespaceListerExpansion allows custom methods to be added to
// DatasetNamespaceLister.
type DatasetNamespaceListerExpansion interface{}
// DatasetAccessListerExpansion allows custom methods to be added to
// DatasetAccessLister.
type DatasetAccessListerExpansion interface{}
// DatasetAccessNamespaceListerExpansion allows custom methods to be added to
// DatasetAccessNamespaceLister.
type DatasetAccessNamespaceListerExpansion interface{}
// DatasetIamBindingListerExpansion allows custom methods to be added to
// DatasetIamBindingLister.
type DatasetIamBindingListerExpansion interface{}
// DatasetIamBindingNamespaceListerExpansion allows custom methods to be added to
// DatasetIamBindingNamespaceLister.
type DatasetIamBindingNamespaceListerExpansion interface{}
// DatasetIamMemberListerExpansion allows custom methods to be added to
// DatasetIamMemberLister.
type DatasetIamMemberListerExpansion interface{}
// DatasetIamMemberNamespaceListerExpansion allows custom methods to be added to
// DatasetIamMemberNamespaceLister.
type DatasetIamMemberNamespaceListerExpansion interface{}
// DatasetIamPolicyListerExpansion allows custom methods to be added to
// DatasetIamPolicyLister.
type DatasetIamPolicyListerExpansion interface{}
// DatasetIamPolicyNamespaceListerExpansion allows custom methods to be added to
// DatasetIamPolicyNamespaceLister.
type DatasetIamPolicyNamespaceListerExpansion interface{}
// JobListerExpansion allows custom methods to be added to
// JobLister.
type JobListerExpansion interface{}
// JobNamespaceListerExpansion allows custom methods to be added to
// JobNamespaceLister.
type JobNamespaceListerExpansion interface{}
// ReservationListerExpansion allows custom methods to be added to
// ReservationLister.
type ReservationListerExpansion interface{}
// ReservationNamespaceListerExpansion allows custom methods to be added to
// ReservationNamespaceLister.
type ReservationNamespaceListerExpansion interface{}
// RoutineListerExpansion allows custom methods to be added to
// RoutineLister.
type RoutineListerExpansion interface{}
// RoutineNamespaceListerExpansion allows custom methods to be added to
// RoutineNamespaceLister.
type RoutineNamespaceListerExpansion interface{}
// TableListerExpansion allows custom methods to be added to
// TableLister.
type TableListerExpansion interface{}
// TableNamespaceListerExpansion allows custom methods to be added to
// TableNamespaceLister.
type TableNamespaceListerExpansion interface{}
// TableIamBindingListerExpansion allows custom methods to be added to
// TableIamBindingLister.
type TableIamBindingListerExpansion interface{}
// TableIamBindingNamespaceListerExpansion allows custom methods to be added to
// TableIamBindingNamespaceLister.
type TableIamBindingNamespaceListerExpansion interface{}
// TableIamMemberListerExpansion allows custom methods to be added to
// TableIamMemberLister.
type TableIamMemberListerExpansion interface{}
// TableIamMemberNamespaceListerExpansion allows custom methods to be added to
// TableIamMemberNamespaceLister.
type TableIamMemberNamespaceListerExpansion interface{}
// TableIamPolicyListerExpansion allows custom methods to be added to
// TableIamPolicyLister.
type TableIamPolicyListerExpansion interface{}
// TableIamPolicyNamespaceListerExpansion allows custom methods to be added to
// TableIamPolicyNamespaceLister.
type TableIamPolicyNamespaceListerExpansion interface{} | client/listers/bigquery/v1alpha1/expansion_generated.go | 0.556159 | 0.538073 | expansion_generated.go | starcoder |
// package shp implements shape structures/routines
package shp
import (
"github.com/cpmech/gosl/chk"
"github.com/cpmech/gosl/gm"
"github.com/cpmech/gosl/io"
"github.com/cpmech/gosl/la"
"github.com/cpmech/gosl/utl"
)
// constants
const MINDET = 1.0e-14 // minimum determinant allowed for dxdR
// ShpFunc is the shape functions callback function
type ShpFunc func(S []float64, dSdR [][]float64, r []float64, derivs bool, idxface int)
// Shape holds geometry data
type Shape struct {
// geometry
Type string // name; e.g. "lin2"
Func ShpFunc // shape/derivs function callback function
FaceType string // geometry of face; e.g. "qua8" => "lin3"
Gndim int // geometry of shape; e.g. "lin3" => gnd == 1 (even in 3D simulations)
Nverts int // number of vertices in cell; e.g. "qua8" => 8
VtkCode int // VTK code
VtkNverts int // number of vertices to use in VTK file; e.g. "qua9" => 8 vertices
NatCoords [][]float64 // natural coordinates [gndim][nverts]
// face data
FaceFunc ShpFunc // face shape/derivs function callback function
FaceNvertsMax int // max number of vertices on face
FaceLocalVerts [][]int // face local vertices [nfaces][...]
FaceFlip []bool // [nfaces] flip normal
// basic type => for plotting or LBB cells
BasicType string // geometry of basic cell; e.g. "qua8" => "qua4"
BasicNverts int // number of vertices in basic cell; e.g. 8 => 4
BasicVtkCode int // VTK code of basic cell
// geometry: for seams (3D-edges)
SeamType int // geometry of seam (3D-edge); e.g. "hex8" => "lin2"
SeamLocalVerts [][]int // seam (3d-edge) local vertices [nseams][nVertsOnSeam]
// scratchpad: volume
S []float64 // [nverts] shape functions
G [][]float64 // [nverts][gndim] G == dSdx. derivative of shape function
J float64 // Jacobian: determinant of dxdr
DSdR [][]float64 // [nverts][gndim] derivatives of S w.r.t natural coordinates
DxdR [][]float64 // [gndim][gndim] derivatives of real coordinates w.r.t natural coordinates
DRdx [][]float64 // [gndim][gndim] dRdx == inverse(dxdR)
// scratchpad: line
Jvec3d []float64 // Jacobian: norm of dxdr for line elements (size==3)
Gvec []float64 // [nverts] G == dSdx. derivative of shape function
// scratchpad: face
Sf []float64 // [FaceNvertsMax] shape functions values
Fnvec []float64 // [gndim] face normal vector multiplied by Jf
DSfdRf [][]float64 // [FaceNvertsMax][gndim-1] derivatives of Sf w.r.t natural coordinates
DxfdRf [][]float64 // [gndim][gndim-1] derivatives of real coordinates w.r.t natural coordinates
// NURBS
Nurbs *gm.Nurbs // pointer to NURBS structure => indicates that this shape strucutre is based on NURBS
NurbsFaces []*gm.Nurbs // boundaries (surfaces) of NURBS [normalTo0, normalTo0, normalTo1, normalTo1, normalTo2, normalTo2]
Span []int // NURBS knots' indices defining cell/element; e.g. [2, 3, 1, 2] for x-quad/y-lin cell
Ibasis []int // indices of basis functions corresponding to Span == local indices of control points
SpanFace [][]int // NURBS knots' indices defining cell/element; e.g. [2, 3, 1, 2] for x-quad/y-lin cell
IbasisFace [][]int // indices of basis functions corresponding to Span == local indices of control points
U []float64 // [gndim] NURBS' parametric space coordinates
}
// GetCopy returns a new copy of this shape structure
func (o Shape) GetCopy() *Shape {
// new structure
var p Shape
// geometry
p.Type = o.Type
p.Func = o.Func
p.BasicType = o.BasicType
p.FaceType = o.FaceType
p.Gndim = o.Gndim
p.Nverts = o.Nverts
p.VtkCode = o.VtkCode
p.NatCoords = la.MatClone(o.NatCoords)
// face data
p.FaceFunc = o.FaceFunc
p.FaceNvertsMax = o.FaceNvertsMax
p.FaceLocalVerts = utl.IntClone(o.FaceLocalVerts)
p.FaceFlip = make([]bool, len(o.FaceFlip))
copy(p.FaceFlip, o.FaceFlip)
// geometry: for seams (3D-edges)
p.SeamType = o.SeamType
p.SeamLocalVerts = utl.IntClone(o.SeamLocalVerts)
// scratchpad: volume
p.S = la.VecClone(o.S)
p.G = la.MatClone(o.G)
p.J = o.J
p.DSdR = la.MatClone(o.DSdR)
p.DxdR = la.MatClone(o.DxdR)
p.DRdx = la.MatClone(o.DRdx)
// scratchpad: line
p.Jvec3d = la.VecClone(o.Jvec3d)
p.Gvec = la.VecClone(o.Gvec)
// scratchpad: face
p.Sf = la.VecClone(o.Sf)
p.Fnvec = la.VecClone(o.Fnvec)
p.DSfdRf = la.MatClone(o.DSfdRf)
p.DxfdRf = la.MatClone(o.DxfdRf)
// fail if NURBS
if o.Nurbs != nil {
chk.Panic("cannot get a copy of Shape if it's NURBS")
}
return &p
}
// factory holds all Shapes available
var factory = make(map[string]*Shape)
// _shapes (internal) holds pre-allocated shapes with goroutineId > 0; key => Shape
var _shapes = make(map[string]*Shape)
// Get returns an existent Shape structure
// Note: 1) returns nil on errors
// 2) use goroutineId > 0 to get a copy
func Get(geoType string, goroutineId int) *Shape {
s, ok := factory[geoType]
if !ok {
return nil
}
if goroutineId > 0 {
key := io.Sf("%s_%d", geoType, goroutineId)
if shape, found := _shapes[key]; found {
return shape
}
shape := s.GetCopy()
_shapes[key] = shape
return shape
}
return s
}
// IpRealCoords returns the real coordinates (y) of an integration point
func (o *Shape) IpRealCoords(x [][]float64, ip Ipoint) (y []float64) {
ndim := len(x)
y = make([]float64, ndim)
o.Func(o.S, o.DSdR, ip, false, -1)
for i := 0; i < ndim; i++ {
for m := 0; m < o.Nverts; m++ {
y[i] += o.S[m] * x[i][m]
}
}
return
}
// FaceIpRealCoords returns the real coordinates (y) of an integration point @ face
// TODO: check this function
func (o *Shape) FaceIpRealCoords(x [][]float64, ipf Ipoint, idxface int) (y []float64) {
ndim := len(x)
y = make([]float64, ndim)
o.FaceFunc(o.Sf, o.DSfdRf, ipf, false, idxface)
for i := 0; i < ndim; i++ {
for k, n := range o.FaceLocalVerts[idxface] {
y[i] += o.Sf[k] * x[i][n]
}
}
return
}
// CalcAtIp calculates volume data such as S and G at natural coordinate r
// Input:
// x[ndim][nverts+?] -- coordinates matrix of solid element
// ip -- integration point
// Output:
// S, DSdR, DxdR, DRdx, G, and J
func (o *Shape) CalcAtIp(x [][]float64, ip Ipoint, derivs bool) (err error) {
// S and dSdR
o.Func(o.S, o.DSdR, ip, derivs, -1)
if !derivs {
return
}
if o.Gndim == 1 {
// calculate Jvec3d == dxdR
o.Jvec3d[0], o.Jvec3d[1], o.Jvec3d[2] = 0, 0, 0
for i := 0; i < len(x); i++ {
for m := 0; m < o.Nverts; m++ {
o.Jvec3d[i] += x[i][m] * o.DSdR[m][0] // dxdR := x * dSdR
}
}
// calculate J = norm of Jvec3d
o.J = la.VecNorm(o.Jvec3d)
// calculate G
for m := 0; m < o.Nverts; m++ {
o.Gvec[m] = o.DSdR[m][0] / o.J
}
return
}
// dxdR := sum_n x * dSdR => dx_i/dR_j := sum_n x^n_i * dS^n/dR_j
for i := 0; i < len(x); i++ {
for j := 0; j < o.Gndim; j++ {
o.DxdR[i][j] = 0.0
for n := 0; n < o.Nverts; n++ {
o.DxdR[i][j] += x[i][n] * o.DSdR[n][j]
}
}
}
// dRdx := inv(dxdR)
o.J, err = la.MatInv(o.DRdx, o.DxdR, MINDET)
if err != nil {
return
}
// G == dSdx := dSdR * dRdx => dS^m/dR_i := sum_i dS^m/dR_i * dR_i/dx_j
la.MatMul(o.G, 1, o.DSdR, o.DRdx)
return
}
// CalcAtR calculates volume data such as S and G at natural coordinate r
// Input:
// x[ndim][nverts+?] -- coordinates matrix of solid element
// R[3] -- local/natural coordinates
// Output:
// S, DSdR, DxdR, DRdx, G, and J
func (o *Shape) CalcAtR(x [][]float64, R []float64, derivs bool) (err error) {
return o.CalcAtIp(x, R, derivs)
}
// CalcAtFaceIp calculates face data such as Sf and Fnvec
// Input:
// x[ndim][nverts+?] -- coordinates matrix of solid element
// ipf -- local/natural coordinates of face
// idxface -- local index of face
// Output:
// Sf and Fnvec
func (o *Shape) CalcAtFaceIp(x [][]float64, ipf Ipoint, idxface int) (err error) {
// skip 1D elements
if o.Gndim == 1 {
return
}
// Sf and dSfdR
o.FaceFunc(o.Sf, o.DSfdRf, ipf, true, idxface)
// dxfdRf := sum_n x * dSfdRf => dxf_i/dRf_j := sum_n xf^n_i * dSf^n/dRf_j
for i := 0; i < len(x); i++ {
for j := 0; j < o.Gndim-1; j++ {
o.DxfdRf[i][j] = 0.0
for k, n := range o.FaceLocalVerts[idxface] {
o.DxfdRf[i][j] += x[i][n] * o.DSfdRf[k][j]
}
}
}
// face normal vector
if o.Gndim == 2 {
o.Fnvec[0] = o.DxfdRf[1][0]
o.Fnvec[1] = -o.DxfdRf[0][0]
} else {
o.Fnvec[0] = o.DxfdRf[1][0]*o.DxfdRf[2][1] - o.DxfdRf[2][0]*o.DxfdRf[1][1]
o.Fnvec[1] = o.DxfdRf[2][0]*o.DxfdRf[0][1] - o.DxfdRf[0][0]*o.DxfdRf[2][1]
o.Fnvec[2] = o.DxfdRf[0][0]*o.DxfdRf[1][1] - o.DxfdRf[1][0]*o.DxfdRf[0][1]
}
// flip normal vector
if len(o.FaceFlip) > 0 {
if o.FaceFlip[idxface] {
for i := 0; i < o.Gndim; i++ {
o.Fnvec[i] *= -1.0
}
}
}
return
}
// AxisymGetRadius returns the x0 == radius for axisymmetric computations
// Note: must be called after CalcAtIp
func (o *Shape) AxisymGetRadius(x [][]float64) (radius float64) {
for m := 0; m < o.Nverts; m++ {
radius += o.S[m] * x[0][m]
}
return
}
// AxisymGetRadiusF (face) returns the x0 == radius for axisymmetric computations
// Note: must be called after CalcAtFaceIp
func (o *Shape) AxisymGetRadiusF(x [][]float64, idxface int) (radius float64) {
for m := 0; m < o.FaceNvertsMax; m++ {
radius += o.Sf[m] * x[0][o.FaceLocalVerts[idxface][m]]
}
return
}
// init_scratchpad initialise volume data (scratchpad)
func (o *Shape) init_scratchpad() {
// volume data
o.S = make([]float64, o.Nverts)
o.DSdR = la.MatAlloc(o.Nverts, o.Gndim)
o.DxdR = la.MatAlloc(o.Gndim, o.Gndim)
o.DRdx = la.MatAlloc(o.Gndim, o.Gndim)
o.G = la.MatAlloc(o.Nverts, o.Gndim)
// face data
if o.Gndim > 1 {
o.Sf = make([]float64, o.FaceNvertsMax)
o.DSfdRf = la.MatAlloc(o.FaceNvertsMax, o.Gndim-1)
o.DxfdRf = la.MatAlloc(o.Gndim, o.Gndim-1)
o.Fnvec = make([]float64, o.Gndim)
}
// lin data
if o.Gndim == 1 {
o.Jvec3d = make([]float64, 3)
o.Gvec = make([]float64, o.Nverts)
}
} | shp/shp.go | 0.588298 | 0.458106 | shp.go | starcoder |
package probe
import (
"fmt"
"reflect"
"regexp"
"strconv"
"strings"
"github.com/litmuschaos/litmus-go/pkg/log"
)
//Model contains operands and operator for the comparison operations
// a and b attribute belongs to operands and operator attribute belongs to operator
type Model struct {
a interface{}
b interface{}
operator string
}
//FirstValue sets the first operands
func FirstValue(a interface{}) *Model {
model := Model{}
return model.FirstValue(a)
}
//FirstValue sets the first operands
func (model *Model) FirstValue(a interface{}) *Model {
model.a = a
return model
}
//SecondValue sets the second operand
func (model *Model) SecondValue(b interface{}) *Model {
model.b = b
return model
}
//Criteria sets the criteria/operator
func (model *Model) Criteria(criteria string) *Model {
model.operator = criteria
return model
}
// CompareInt compares integer numbers for specific operation
// it check for the >=, >, <=, <, ==, != operators
func (model Model) CompareInt() error {
expectedOutput, err := strconv.Atoi(reflect.ValueOf(model.a).String())
if err != nil {
return err
}
actualOutput, err := strconv.Atoi(reflect.ValueOf(model.b).String())
if err != nil {
return err
}
switch model.operator {
case ">=":
if !(actualOutput >= expectedOutput) {
return fmt.Errorf("The probe output didn't match with expected criteria")
}
case "<=":
if !(actualOutput <= expectedOutput) {
return fmt.Errorf("The probe output didn't match with expected criteria")
}
case ">":
if !(actualOutput > expectedOutput) {
return fmt.Errorf("The probe output didn't match with expected criteria")
}
case "<":
if !(actualOutput < expectedOutput) {
return fmt.Errorf("The probe output didn't match with expected criteria")
}
case "==":
if !(actualOutput == expectedOutput) {
return fmt.Errorf("The probe output didn't match with expected criteria")
}
case "!=":
if !(actualOutput != expectedOutput) {
return fmt.Errorf("The probe output didn't match with expected criteria")
}
default:
return fmt.Errorf("criteria '%s' not supported in the probe", model.operator)
}
return nil
}
// CompareFloat compares floating numbers for specific operation
// it check for the >=, >, <=, <, ==, != operators
func (model Model) CompareFloat() error {
expectedOutput, err := strconv.ParseFloat(reflect.ValueOf(model.a).String(), 64)
if err != nil {
return err
}
actualOutput, err := strconv.ParseFloat(reflect.ValueOf(model.b).String(), 64)
if err != nil {
return err
}
switch model.operator {
case ">=":
if !(actualOutput >= expectedOutput) {
return fmt.Errorf("The probe output didn't match with expected criteria")
}
case "<=":
if !(actualOutput <= expectedOutput) {
return fmt.Errorf("The probe output didn't match with expected criteria")
}
case ">":
if !(actualOutput > expectedOutput) {
return fmt.Errorf("The probe output didn't match with expected criteria")
}
case "<":
if !(actualOutput < expectedOutput) {
return fmt.Errorf("The probe output didn't match with expected criteria")
}
case "==":
if !(actualOutput == expectedOutput) {
return fmt.Errorf("The probe output didn't match with expected criteria")
}
case "!=":
if !(actualOutput != expectedOutput) {
return fmt.Errorf("The probe output didn't match with expected criteria")
}
default:
return fmt.Errorf("criteria '%s' not supported in the probe", model.operator)
}
return nil
}
// CompareString compares strings for specific operation
// it check for the equal, not equal and contains(sub-string) operations
func (model Model) CompareString() error {
expectedOutput := reflect.ValueOf(model.a).String()
actualOutput := reflect.ValueOf(model.b).String()
log.Infof("actual: %v, expected: %v, operator: %v", actualOutput, expectedOutput, model.operator)
switch model.operator {
case "equal", "Equal":
if !(actualOutput == expectedOutput) {
return fmt.Errorf("The probe output didn't match with expected criteria")
}
case "notEqual", "NotEqual":
if !(actualOutput != expectedOutput) {
return fmt.Errorf("The probe output didn't match with expected criteria")
}
case "contains", "Contains":
if !strings.Contains(actualOutput, expectedOutput) {
return fmt.Errorf("The probe output didn't match with expected criteria")
}
case "matches", "Matches":
re, err := regexp.Compile(expectedOutput)
if err != nil {
return fmt.Errorf("The probe regex '%s' is not a valid expression", expectedOutput)
}
if !re.MatchString(actualOutput) {
return fmt.Errorf("The probe output didn't match with expected criteria")
}
case "notMatches", "NotMatches":
re, err := regexp.Compile(expectedOutput)
if err != nil {
return fmt.Errorf("The probe regex '%s' is not a valid expression", expectedOutput)
}
if re.MatchString(actualOutput) {
return fmt.Errorf("The probe output didn't match with expected criteria")
}
default:
return fmt.Errorf("criteria '%s' not supported in the probe", model.operator)
}
return nil
} | pkg/probe/comparator.go | 0.650134 | 0.509764 | comparator.go | starcoder |
package maths
import (
"fmt"
"math"
"github.com/wdevore/Ranger-Go-IGE/api"
)
type vector struct {
x, y float32
}
// NewVector constructs a new IVector
func NewVector() api.IVector {
o := new(vector)
return o
}
// NewVectorUsing constructs a new IVector using components
func NewVectorUsing(x, y float32) api.IVector {
o := new(vector)
o.x = x
o.y = y
return o
}
func (v *vector) Components() (x, y float32) {
return v.x, v.y
}
func (v *vector) X() float32 {
return v.x
}
func (v *vector) Y() float32 {
return v.y
}
func (v *vector) SetByComp(x, y float32) {
v.x = x
v.y = y
}
func (v *vector) SetByPoint(ip api.IPoint) {
v.x = ip.X()
v.y = ip.Y()
}
func (v *vector) SetByAngle(radians float64) {
v.x = float32(math.Cos(radians))
v.y = float32(math.Sin(radians))
}
func (v *vector) SetByVector(ip api.IVector) {
v.x = ip.X()
v.y = ip.Y()
}
func (v *vector) Length() float32 {
return float32(math.Sqrt(float64(v.x*v.x + v.y*v.y)))
}
func (v *vector) LengthSqr() float32 {
return v.x*v.x + v.y*v.y
}
func (v *vector) Add(x, y float32) {
v.x += x
v.y += y
}
func (v *vector) Sub(x, y float32) {
v.x -= x
v.y -= y
}
func (v *vector) AddV(iv api.IVector) {
v.x += iv.X()
v.y += iv.Y()
}
func (v *vector) SubV(iv api.IVector) {
v.x -= iv.X()
v.y -= iv.Y()
}
// Add performs: out = v1 + v2
func Add(v1, v2, out api.IVector) {
out.SetByComp(v1.X()+v2.X(), v1.Y()+v2.Y())
}
// Sub performs: out = v1 - v2
func Sub(v1, v2, out api.IVector) {
out.SetByComp(v1.X()-v2.X(), v1.Y()-v2.Y())
}
func (v *vector) Scale(s float32) {
v.x = v.x * s
v.y = v.y * s
}
// ScaleBy performs: out = v * s
func ScaleBy(v api.IVector, s float32, out api.IVector) {
out.SetByComp(v.X()*s, v.Y()*s)
}
func (v *vector) Div(d float32) {
v.x = v.x / d
v.y = v.y / d
}
var tmp = NewVector()
// Distance between two vectors
func VectorDistance(v1, v2 api.IVector) float32 {
Sub(v1, v2, tmp)
return tmp.Length()
}
func (v *vector) AngleX(vo api.IVector) float32 {
return float32(math.Atan2(float64(vo.Y()), float64(vo.X())))
}
func (v *vector) Normalize() {
len := v.Length()
if len != 0.0 {
v.Div(len)
}
}
func (v *vector) SetDirection(radians float64) {
v.SetByComp(float32(math.Cos(radians)), float32(math.Sin(radians)))
}
// Dot computes the dot-product between the vectors
func VectorDot(v1, v2 api.IVector) float32 {
return v1.X()*v2.X() + v1.Y()*v2.Y()
}
// Cross computes the cross-product of two vectors
func Cross(v1, v2 api.IVector) float32 {
return v1.X()*v2.Y() - v1.Y()*v2.X()
}
func (v *vector) CrossCW() {
v.SetByComp(v.Y(), -v.X())
}
func (v *vector) CrossCCW() {
v.SetByComp(-v.Y(), v.X())
}
var tmp2 = NewVector()
// Angle computes the angle in radians between two vector directions
func Angle(v1, v2 api.IVector) float32 {
tmp.SetByVector(v1)
tmp2.SetByVector(v2)
tmp.Normalize() // a2
tmp2.Normalize() // b2
angle := math.Atan2(float64(Cross(tmp, tmp2)), float64(VectorDot(tmp, tmp2)))
if math.Abs(angle) < Epsilon {
return 0.0
}
return float32(angle)
}
func (v vector) String() string {
return fmt.Sprintf("<%0.3f,%0.3f>", v.x, v.y)
} | engine/maths/vector.go | 0.851459 | 0.519704 | vector.go | starcoder |
package pg
import (
"github.com/lib/pq/oid"
)
// RowDescriptionMessageType identifies RowDescriptionMessage message.
const RowDescriptionMessageType = 'T'
// RowDescriptionMessage represents a message sent by a backend to describe query result fields.
type RowDescriptionMessage struct {
// List of fields to be returned
Fields []*FieldDescriptor
}
// FieldDescriptor describes a field of a DataRow.
type FieldDescriptor struct {
// The field name.
Name string
// If the field can be identified as a column of a specific table, the object ID of the table; otherwise zero.
TableOID oid.Oid
// If the field can be identified as a column of a specific table, the attribute number of the column; otherwise zero.
ColumnIndex int16
// The object ID of the field's data type.
DataTypeOID oid.Oid
// The data type size (see pg_type.typlen). Note that negative values denote variable-width types.
DataTypeSize int16
// The type modifier (see pg_attribute.atttypmod). The meaning of the modifier is type-specific.
DataTypeModifier int32
// The format code being used for the field. Currently will be zero (text) or one (binary). In a RowDescription
// returned from the statement variant of Describe, the format code is not yet known and will always be zero.
Format DataFormat
}
// DataFormat is a code for PostgreSQL data format.
type DataFormat int16
const (
DataFormatText DataFormat = 0 // Plain text
DataFormatBinary DataFormat = 1 // Binary representation
)
// Compile time check to make sure that RowDescriptionMessage implements the Message interface.
var _ Message = &RowDescriptionMessage{}
// ParseRowDescriptionMessage parses RowDescriptionMessage from a network frame.
func ParseRowDescriptionMessage(frame Frame) (*RowDescriptionMessage, error) {
// Assert the message type
if frame.MessageType() != RowDescriptionMessageType {
return nil, ErrMalformedMessage
}
messageData := ReadBuffer(frame.MessageBody())
// Number of fields (could be 0)
fieldsCount, err := messageData.ReadInt16()
if err != nil {
return nil, err
}
fields := make([]*FieldDescriptor, fieldsCount)
for i := 0; i < int(fieldsCount); i++ {
name, err := messageData.ReadString()
if err != nil {
return nil, err
}
tableOID, err := messageData.ReadInt32()
if err != nil {
return nil, err
}
columnNum, err := messageData.ReadInt16()
if err != nil {
return nil, err
}
dataTypeOID, err := messageData.ReadInt32()
if err != nil {
return nil, err
}
dataTypeSize, err := messageData.ReadInt16()
if err != nil {
return nil, err
}
dataTypeModifier, err := messageData.ReadInt32()
if err != nil {
return nil, err
}
format, err := messageData.ReadInt16()
if err != nil {
return nil, err
}
fields[i] = &FieldDescriptor{
Name: name,
TableOID: oid.Oid(tableOID),
ColumnIndex: columnNum,
DataTypeOID: oid.Oid(dataTypeOID),
DataTypeSize: dataTypeSize,
DataTypeModifier: dataTypeModifier,
Format: DataFormat(format),
}
}
return &RowDescriptionMessage{Fields: fields}, nil
}
// Frame serializes the message into a network frame.
func (m *RowDescriptionMessage) Frame() Frame {
var messageBuffer WriteBuffer
messageBuffer.WriteInt16(int16(len(m.Fields)))
for i := 0; i < len(m.Fields); i++ {
f := m.Fields[i]
messageBuffer.WriteString(f.Name)
messageBuffer.WriteInt32(int32(f.TableOID))
messageBuffer.WriteInt16(f.ColumnIndex)
messageBuffer.WriteInt32(int32(f.DataTypeOID))
messageBuffer.WriteInt16(f.DataTypeSize)
messageBuffer.WriteInt32(f.DataTypeModifier)
messageBuffer.WriteInt16(int16(f.Format))
}
return NewStandardFrame(RowDescriptionMessageType, messageBuffer)
} | pkg/pg/row_description_message.go | 0.763396 | 0.422803 | row_description_message.go | starcoder |
package models
import (
"math"
)
type Vector struct {
data [3]float64
}
func NewEmptyVector() *Vector {
return &Vector{
[3]float64{0, 0, 0},
}
}
func NewVectorFromArray(data [3]float64) *Vector {
return &Vector{data}
}
func NewVector(x, y, z float64) *Vector {
return &Vector{
[3]float64{x, y, z},
}
}
func (v *Vector) Update(x, y, z float64) *Vector {
v.data[0] = x
v.data[1] = y
v.data[2] = z
return v
}
func (v *Vector) Copy() *Vector {
return &Vector{
[3]float64{v.data[0], v.data[1], v.data[2]},
}
}
func (v *Vector) X() float64 {
return v.data[0]
}
func (v *Vector) Y() float64 {
return v.data[1]
}
func (v *Vector) Z() float64 {
return v.data[2]
}
func (v *Vector) Reflect(n *Vector) *Vector {
return v.AddScaledVector(n, -2*v.Dot(n))
}
func Refract(v, n *Vector, ni, nt float64) (bool, *Vector) {
uv := v.Copy().MakeUnitVector()
cosθ := uv.Dot(n)
snellRatio := ni / nt
discriminator := 1 - snellRatio*snellRatio*(1-cosθ*cosθ)
if discriminator > 0 {
//(uv - n*cosθ)*snellRatio - n*sqrt(disc)
refracted := uv.SubtractScaledVector(n, cosθ).Scale(snellRatio).
SubtractScaledVector(n, math.Sqrt(discriminator))
return true, refracted
}
return false, nil
}
func (v *Vector) Negate() *Vector {
v.data[0] = -v.data[0]
v.data[1] = -v.data[1]
v.data[2] = -v.data[2]
return v
}
func (v *Vector) SquaredLength() float64 {
return v.Dot(v)
}
func (v *Vector) Length() float64 {
return math.Sqrt(v.SquaredLength())
}
func (v *Vector) MakeUnitVector() *Vector {
length := v.Length()
v.data[0] /= length
v.data[1] /= length
v.data[2] /= length
return v
}
func (v *Vector) Scale(t float64) *Vector {
v.data[0] *= t
v.data[1] *= t
v.data[2] *= t
return v
}
func (v *Vector) AddVector(v1 *Vector) *Vector {
v.data[0] += v1.data[0]
v.data[1] += v1.data[1]
v.data[2] += v1.data[2]
return v
}
func (v *Vector) AddScaledVector(v1 *Vector, t float64) *Vector {
v.data[0] += v1.data[0] * t
v.data[1] += v1.data[1] * t
v.data[2] += v1.data[2] * t
return v
}
func (v *Vector) SubtractVector(v1 *Vector) *Vector {
v.data[0] -= v1.data[0]
v.data[1] -= v1.data[1]
v.data[2] -= v1.data[2]
return v
}
func (v *Vector) SubtractScaledVector(v1 *Vector, t float64) *Vector {
return v.AddScaledVector(v1, -t)
}
func (v *Vector) MultiplyVector(v1 *Vector) *Vector {
v.data[0] *= v1.data[0]
v.data[1] *= v1.data[1]
v.data[2] *= v1.data[2]
return v
}
func (v *Vector) Dot(v1 *Vector) float64 {
return v1.data[0]*v.data[0] + v1.data[1]*v.data[1] + v1.data[2]*v.data[2]
}
func (v *Vector) VectorCrossProduct(v1, v2 *Vector) *Vector {
v.data[0] = v1.data[1]*v2.data[2] - v1.data[2]*v2.data[1]
v.data[1] = v1.data[2]*v2.data[0] - v1.data[0]*v2.data[2]
v.data[2] = v1.data[0]*v2.data[1] - v1.data[1]*v2.data[0]
return v
} | models/vector.go | 0.757884 | 0.757593 | vector.go | starcoder |
// Package dbf implements a database of f(t,{x}) functions (e.g. time-space functions). The
// functions in this package are accompanied by derivatives w.r.t t and gradients w.r.t {x}. For
// instance: g(t,{x}) = df/dt, h(t,{x}) = dg/dt, and grad = df/d{x}
package dbf
import (
"os"
"github.com/cpmech/gosl/chk"
"github.com/cpmech/gosl/io"
"github.com/cpmech/gosl/plt"
"github.com/cpmech/gosl/utl"
)
// T defines the interface for t-x functions; i.e. f(t, {x})
type T interface {
Init(prms Params) // initialise function parameters
F(t float64, x []float64) float64 // y = F(t, x)
G(t float64, x []float64) float64 // ∂y/∂t_cteX = G(t, x)
H(t float64, x []float64) float64 // ∂²y/∂t²_cteX = H(t, x)
Grad(v []float64, t float64, x []float64) // ∇F = ∂y/∂x = Grad(t, x)
}
// allocators maps function type to function allocator
var allocators = map[string]func() T{} // type => function allocator
// New allocates function by name
func New(name string, prms Params) T {
if name == "zero" {
return &Zero
}
allocator, ok := allocators[name]
if !ok {
chk.Panic("cannot find function named %q\n", name)
}
o := allocator()
o.Init(prms)
return o
}
// PlotT plots F, G and H for varying t and fixed coordinates x
// fnkey -- filename key (without extension)
// args{F,G,H} -- if any is "", the corresponding plot is not created
func PlotT(o T, dirout, fnkey string, t0, tf float64, xcte []float64, np int) {
// variables
t := utl.LinSpace(t0, tf, np)
var f, g, h []float64
withF := true
withG := true
withH := true
nrow := 0
// y-values
if withF {
f = make([]float64, np)
for i := 0; i < np; i++ {
f[i] = o.F(t[i], xcte)
}
nrow++
}
if withG {
g = make([]float64, np)
for i := 0; i < np; i++ {
g[i] = o.G(t[i], xcte)
}
nrow++
}
if withH {
h = make([]float64, np)
for i := 0; i < np; i++ {
h[i] = o.H(t[i], xcte)
}
nrow++
}
if nrow == 0 {
chk.Panic("one of args{F,G,H} must be provided")
}
// labels
labelT := "$t$"
labelX := ""
for _, x := range xcte {
labelX += io.Sf(",%g", x)
}
labelF := "$f(t" + labelX + ")$"
labelG := "$g(t" + labelX + ")=\\frac{\\mathrm{d}f}{\\mathrm{d}t}$"
labelH := "$h(t" + labelX + ")=\\frac{\\mathrm{d}^2f}{\\mathrm{d}t^2}$"
// plot F
pidx := 1
if withF {
if nrow > 1 {
plt.Subplot(nrow, 1, pidx)
}
plt.Plot(t, f, nil)
plt.Gll(labelT, labelF, nil)
pidx++
}
// plot G
if withG {
if nrow > 1 {
plt.Subplot(nrow, 1, pidx)
}
plt.Plot(t, g, nil)
plt.Gll(labelT, labelG, nil)
pidx++
}
// plot H
if withH {
if nrow > 1 {
plt.Subplot(nrow, 1, pidx)
}
plt.Plot(t, h, nil)
plt.Gll(labelT, labelH, nil)
}
// save figure
if fnkey != "" {
plt.Save(dirout, fnkey)
}
}
// PlotX plots F and the gradient of F, Gx and Gy, for varying x and fixed t
// hlZero -- highlight F(t,x) = 0
// axEqual -- use axis['equal']
func PlotX(o T, dirout, fnkey string, tcte float64, xmin, xmax []float64, np int) {
withGrad := true
hlZero := true
axEqual := true
if len(xmin) == 3 {
chk.Panic("PlotX works in 2D only")
}
X, Y := utl.MeshGrid2d(xmin[0], xmax[0], xmin[1], xmax[1], np, np)
F := utl.Alloc(np, np)
var Gx, Gy [][]float64
nrow := 1
if withGrad {
Gx = utl.Alloc(np, np)
Gy = utl.Alloc(np, np)
nrow++
}
x := make([]float64, 2)
g := make([]float64, 2)
for i := 0; i < np; i++ {
for j := 0; j < np; j++ {
x[0], x[1] = X[i][j], Y[i][j]
F[i][j] = o.F(tcte, x)
if withGrad {
o.Grad(g, tcte, x)
Gx[i][j] = g[0]
Gy[i][j] = g[1]
}
}
}
figsize := &plt.A{Prop: 1, WidthPt: 600, Dpi: 200}
os.MkdirAll(dirout, 0777)
if withGrad {
figsize.Prop = 2
plt.Reset(true, figsize)
plt.Subplot(nrow, 1, 1)
plt.Title("F(t,x)", nil)
} else {
plt.Reset(true, figsize)
}
plt.ContourF(X, Y, F, nil)
if hlZero {
plt.ContourL(X, Y, F, &plt.A{Levels: []float64{0}, Lw: 2, Colors: []string{"yellow"}})
}
if axEqual {
plt.Equal()
}
plt.Gll("x", "y", nil)
if withGrad {
plt.Subplot(2, 1, 2)
plt.Title("gradient", nil)
plt.Quiver(X, Y, Gx, Gy, nil)
if axEqual {
plt.Equal()
}
plt.Gll("x", "y", nil)
}
if fnkey != "" {
plt.Save(dirout, fnkey)
}
} | fun/dbf/dbfunctions.go | 0.726037 | 0.563498 | dbfunctions.go | starcoder |
package sbvector
// BitVectorBuilderData holds bit vector data to build.
type BitVectorBuilderData struct {
vec *BitVectorData
}
// SuccinctBitVectorBuilder is interface of succinct bit vector builder.
type SuccinctBitVectorBuilder interface {
Set(i uint64, val bool)
Get(i uint64) (bool, error)
PushBack(b bool)
PushBackBits(x uint64, length uint64)
GetBits(pos uint64, length uint64) (uint64, error)
Size() uint64
Build(enableFasterSelect1 bool, enableFasterSelect0 bool) (SuccinctBitVector, error)
}
// NewVectorBuilder returns new succinct bit vector builder.
func NewVectorBuilder() SuccinctBitVectorBuilder {
builder := new(BitVectorBuilderData)
builder.vec = new(BitVectorData)
return builder
}
// NewVectorBuilderWithInit returns new succinct bit vector builder(initialize by argument).
func NewVectorBuilderWithInit(vec SuccinctBitVector) SuccinctBitVectorBuilder {
builder := new(BitVectorBuilderData)
builder.vec = vec.(*BitVectorData)
return builder
}
// Set value to bit vector by index
func (builder *BitVectorBuilderData) Set(i uint64, val bool) {
builder.vec.set(i, val)
}
// Get returns value from bit vector by index.
func (builder *BitVectorBuilderData) Get(i uint64) (bool, error) {
return builder.vec.Get(i)
}
// PushBack add bit to the bit vector
func (builder *BitVectorBuilderData) PushBack(b bool) {
builder.vec.pushBack(b)
}
// PushBackBits add bits to the bit vector
func (builder *BitVectorBuilderData) PushBackBits(x uint64, length uint64) {
builder.vec.pushBackBits(x, length)
}
//GetBits returns bits from bit vector
func (builder *BitVectorBuilderData) GetBits(pos uint64, length uint64) (uint64, error) {
return builder.vec.GetBits(pos, length)
}
// Size returns size of bit vector
func (builder *BitVectorBuilderData) Size() uint64 {
return builder.vec.Size()
}
// Build creates indexes for succinct bit vector(rank index, ...).
// If `enableFasterSelect1` is true, creates index for select1 make faster.
// If `enableFasterSelect0` is true, creates index for select0 make faster.
func (builder *BitVectorBuilderData) Build(enableFasterSelect1 bool, enableFasterSelect0 bool) (SuccinctBitVector, error) {
builder.vec.build(enableFasterSelect1, enableFasterSelect0)
vec := builder.vec
builder.vec = new(BitVectorData)
return vec, nil
} | sbvector_builder.go | 0.847653 | 0.571139 | sbvector_builder.go | starcoder |
package lango
const jsonList = `
{
"aa": {
"code": "aa",
"name": [
"Afar"
],
"native": [
"Afaraf"
]
},
"ab": {
"code": "ab",
"name": [
"Abkhaz"
],
"native": [
"аҧсуа бызшәа",
"аҧсшәа"
]
},
"ae": {
"code": "ae",
"name": [
"Avestan"
],
"native": [
"avesta"
]
},
"af": {
"code": "af",
"name": [
"Afrikaans"
],
"native": [
"Afrikaans"
]
},
"ak": {
"code": "ak",
"name": [
"Akan"
],
"native": [
"Akan"
]
},
"am": {
"code": "am",
"name": [
"Amharic"
],
"native": [
"አማርኛ"
]
},
"an": {
"code": "an",
"name": [
"Aragonese"
],
"native": [
"aragonés"
]
},
"ar": {
"code": "ar",
"name": [
"Arabic"
],
"native": [
"العربية"
]
},
"as": {
"code": "as",
"name": [
"Assamese"
],
"native": [
"অসমীয়া"
]
},
"av": {
"code": "av",
"name": [
"Avaric"
],
"native": [
"авар мацӀ",
"магӀарул мацӀ"
]
},
"ay": {
"code": "ay",
"name": [
"Aymara"
],
"native": [
"aymar aru"
]
},
"az": {
"code": "az",
"name": [
"Azerbaijani"
],
"native": [
"azərbaycan dili"
]
},
"ba": {
"code": "ba",
"name": [
"Bashkir"
],
"native": [
"башҡорт теле"
]
},
"be": {
"code": "be",
"name": [
"Belarusian"
],
"native": [
"беларуская мова"
]
},
"bg": {
"code": "bg",
"name": [
"Bulgarian"
],
"native": [
"български език"
]
},
"bh": {
"code": "bh",
"name": [
"Bihari"
],
"native": [
"भोजपुरी"
]
},
"bi": {
"code": "bi",
"name": [
"Bislama"
],
"native": [
"Bislama"
]
},
"bm": {
"code": "bm",
"name": [
"Bambara"
],
"native": [
"bamanankan"
]
},
"bn": {
"code": "bn",
"name": [
"Bengali",
"Bangla"
],
"native": [
"বাংলা"
]
},
"bo": {
"code": "bo",
"name": [
"Tibetan Standard",
"Tibetan",
"Central"
],
"native": [
"བོད་ཡིག"
]
},
"br": {
"code": "br",
"name": [
"Breton"
],
"native": [
"brezhoneg"
]
},
"bs": {
"code": "bs",
"name": [
"Bosnian"
],
"native": [
"bosanski jezik"
]
},
"ca": {
"code": "ca",
"name": [
"Catalan"
],
"native": [
"català"
]
},
"ce": {
"code": "ce",
"name": [
"Chechen"
],
"native": [
"нохчийн мотт"
]
},
"ch": {
"code": "ch",
"name": [
"Chamorro"
],
"native": [
"Chamoru"
]
},
"co": {
"code": "co",
"name": [
"Corsican"
],
"native": [
"corsu",
"lingua corsa"
]
},
"cr": {
"code": "cr",
"name": [
"Cree"
],
"native": [
"ᓀᐦᐃᔭᐍᐏᐣ"
]
},
"cs": {
"code": "cs",
"name": [
"Czech"
],
"native": [
"čeština",
"český jazyk"
]
},
"cu": {
"code": "cu",
"name": [
"Old Church Slavonic",
"Church Slavonic",
"Old Bulgarian"
],
"native": [
"ѩзыкъ словѣньскъ"
]
},
"cv": {
"code": "cv",
"name": [
"Chuvash"
],
"native": [
"чӑваш чӗлхи"
]
},
"cy": {
"code": "cy",
"name": [
"Welsh"
],
"native": [
"Cymraeg"
]
},
"da": {
"code": "da",
"name": [
"Danish"
],
"native": [
"dansk"
]
},
"de": {
"code": "de",
"name": [
"German"
],
"native": [
"Deutsch"
]
},
"dv": {
"code": "dv",
"name": [
"Divehi",
"Dhivehi",
"Maldivian"
],
"native": [
"ދިވެހި"
]
},
"dz": {
"code": "dz",
"name": [
"Dzongkha"
],
"native": [
"རྫོང་ཁ"
]
},
"ee": {
"code": "ee",
"name": [
"Ewe"
],
"native": [
"Eʋegbe"
]
},
"el": {
"code": "el",
"name": [
"Greek"
],
"native": [
"ελληνικά"
]
},
"en": {
"code": "en",
"name": [
"English"
],
"native": [
"English"
]
},
"eo": {
"code": "eo",
"name": [
"Esperanto"
],
"native": [
"Esperanto"
]
},
"es": {
"code": "es",
"name": [
"Spanish"
],
"native": [
"Español"
]
},
"et": {
"code": "et",
"name": [
"Estonian"
],
"native": [
"eesti",
"eesti keel"
]
},
"eu": {
"code": "eu",
"name": [
"Basque"
],
"native": [
"euskara",
"euskera"
]
},
"fa": {
"code": "fa",
"name": [
"Persian",
"Farsi"
],
"native": [
"فارسی"
]
},
"ff": {
"code": "ff",
"name": [
"Fula",
"Fulah",
"Pulaar",
"Pular"
],
"native": [
"Fulfulde",
"Pulaar",
"Pular"
]
},
"fi": {
"code": "fi",
"name": [
"Finnish"
],
"native": [
"suomi",
"suomen kieli"
]
},
"fj": {
"code": "fj",
"name": [
"Fijian"
],
"native": [
"vosa Vakaviti"
]
},
"fo": {
"code": "fo",
"name": [
"Faroese"
],
"native": [
"føroyskt"
]
},
"fr": {
"code": "fr",
"name": [
"French"
],
"native": [
"français",
"langue française"
]
},
"fy": {
"code": "fy",
"name": [
"Western Frisian"
],
"native": [
"Frysk"
]
},
"ga": {
"code": "ga",
"name": [
"Irish"
],
"native": [
"Gaeilge"
]
},
"gd": {
"code": "gd",
"name": [
"<NAME>",
"Gaelic"
],
"native": [
"Gàidhlig"
]
},
"gl": {
"code": "gl",
"name": [
"Galician"
],
"native": [
"galego"
]
},
"gn": {
"code": "gn",
"name": [
"Guaraní"
],
"native": [
"Avañe'ẽ"
]
},
"gu": {
"code": "gu",
"name": [
"Gujarati"
],
"native": [
"ગુજરાતી"
]
},
"gv": {
"code": "gv",
"name": [
"Manx"
],
"native": [
"Gaelg",
"Gailck"
]
},
"ha": {
"code": "ha",
"name": [
"Hausa"
],
"native": [
"Hausa",
"هَوُسَ"
]
},
"he": {
"code": "he",
"name": [
"Hebrew"
],
"native": [
"עברית"
]
},
"hi": {
"code": "hi",
"name": [
"Hindi"
],
"native": [
"हिन्दी",
"हिंदी"
]
},
"ho": {
"code": "ho",
"name": [
"<NAME>"
],
"native": [
"Hiri Motu"
]
},
"hr": {
"code": "hr",
"name": [
"Croatian"
],
"native": [
"hrvatski jezik"
]
},
"ht": {
"code": "ht",
"name": [
"Haitian",
"<NAME>"
],
"native": [
"Kreyòl ayisyen"
]
},
"hu": {
"code": "hu",
"name": [
"Hungarian"
],
"native": [
"magyar"
]
},
"hy": {
"code": "hy",
"name": [
"Armenian"
],
"native": [
"Հայերեն"
]
},
"hz": {
"code": "hz",
"name": [
"Herero"
],
"native": [
"Otjiherero"
]
},
"ia": {
"code": "ia",
"name": [
"Interlingua"
],
"native": [
"Interlingua"
]
},
"id": {
"code": "id",
"name": [
"Indonesian"
],
"native": [
"Bahasa Indonesia"
]
},
"ie": {
"code": "ie",
"name": [
"Interlingue"
],
"native": [
"Interlingue"
]
},
"ig": {
"code": "ig",
"name": [
"Igbo"
],
"native": [
"Asụsụ Igbo"
]
},
"ii": {
"code": "ii",
"name": [
"Nuosu"
],
"native": [
"ꆈꌠ꒿ Nuosuhxop"
]
},
"ik": {
"code": "ik",
"name": [
"Inupiaq"
],
"native": [
"Iñupiaq",
"Iñupiatun"
]
},
"io": {
"code": "io",
"name": [
"Ido"
],
"native": [
"Ido"
]
},
"is": {
"code": "is",
"name": [
"Icelandic"
],
"native": [
"Íslenska"
]
},
"it": {
"code": "it",
"name": [
"Italian"
],
"native": [
"Italiano"
]
},
"iu": {
"code": "iu",
"name": [
"Inuktitut"
],
"native": [
"ᐃᓄᒃᑎᑐᑦ"
]
},
"ja": {
"code": "ja",
"name": [
"Japanese"
],
"native": [
"日本語",
"にほんご"
]
},
"jv": {
"code": "jv",
"name": [
"Javanese"
],
"native": [
"ꦧꦱꦗꦮ",
"Basa Jawa"
]
},
"ka": {
"code": "ka",
"name": [
"Georgian"
],
"native": [
"ქართული"
]
},
"kg": {
"code": "kg",
"name": [
"Kongo"
],
"native": [
"Kikongo"
]
},
"ki": {
"code": "ki",
"name": [
"Kikuyu",
"Gikuyu"
],
"native": [
"Gĩkũyũ"
]
},
"kj": {
"code": "kj",
"name": [
"Kwanyama",
"Kuanyama"
],
"native": [
"Kuanyama"
]
},
"kk": {
"code": "kk",
"name": [
"Kazakh"
],
"native": [
"қазақ тілі"
]
},
"kl": {
"code": "kl",
"name": [
"Kalaallisut",
"Greenlandic"
],
"native": [
"kalaallisut",
"kalaallit oqaasii"
]
},
"km": {
"code": "km",
"name": [
"Khmer"
],
"native": [
"ខ្មែរ",
"ខេមរភាសា",
"ភាសាខ្មែរ"
]
},
"kn": {
"code": "kn",
"name": [
"Kannada"
],
"native": [
"ಕನ್ನಡ"
]
},
"ko": {
"code": "ko",
"name": [
"Korean"
],
"native": [
"한국어"
]
},
"kr": {
"code": "kr",
"name": [
"Kanuri"
],
"native": [
"Kanuri"
]
},
"ks": {
"code": "ks",
"name": [
"Kashmiri"
],
"native": [
"कश्मीरी",
"كشميري"
]
},
"ku": {
"code": "ku",
"name": [
"Kurdish"
],
"native": [
"Kurdî",
"كوردی"
]
},
"kv": {
"code": "kv",
"name": [
"Komi"
],
"native": [
"коми кыв"
]
},
"kw": {
"code": "kw",
"name": [
"Cornish"
],
"native": [
"Kernewek"
]
},
"ky": {
"code": "ky",
"name": [
"Kyrgyz"
],
"native": [
"Кыргызча",
"Кыргыз тили"
]
},
"la": {
"code": "la",
"name": [
"Latin"
],
"native": [
"latine",
"lingua latina"
]
},
"lb": {
"code": "lb",
"name": [
"Luxembourgish",
"Letzeburgesch"
],
"native": [
"Lëtzebuergesch"
]
},
"lg": {
"code": "lg",
"name": [
"Ganda"
],
"native": [
"Luganda"
]
},
"li": {
"code": "li",
"name": [
"Limburgish",
"Limburgan",
"Limburger"
],
"native": [
"Limburgs"
]
},
"ln": {
"code": "ln",
"name": [
"Lingala"
],
"native": [
"Lingála"
]
},
"lo": {
"code": "lo",
"name": [
"Lao"
],
"native": [
"ພາສາລາວ"
]
},
"lt": {
"code": "lt",
"name": [
"Lithuanian"
],
"native": [
"lietuvių kalba"
]
},
"lu": {
"code": "lu",
"name": [
"Luba-Katanga"
],
"native": [
"Tshiluba"
]
},
"lv": {
"code": "lv",
"name": [
"Latvian"
],
"native": [
"latviešu valoda"
]
},
"mg": {
"code": "mg",
"name": [
"Malagasy"
],
"native": [
"fiteny malagasy"
]
},
"mh": {
"code": "mh",
"name": [
"Marshallese"
],
"native": [
"Kajin M̧ajeļ"
]
},
"mi": {
"code": "mi",
"name": [
"Māori"
],
"native": [
"te reo Māori"
]
},
"mk": {
"code": "mk",
"name": [
"Macedonian"
],
"native": [
"македонски јазик"
]
},
"ml": {
"code": "ml",
"name": [
"Malayalam"
],
"native": [
"മലയാളം"
]
},
"mn": {
"code": "mn",
"name": [
"Mongolian"
],
"native": [
"Монгол хэл"
]
},
"mr": {
"code": "mr",
"name": [
"Marathi",
"Marāṭhī"
],
"native": [
"मराठी"
]
},
"ms": {
"code": "ms",
"name": [
"Malay"
],
"native": [
"bahasa Melayu",
"بهاس ملايو"
]
},
"mt": {
"code": "mt",
"name": [
"Maltese"
],
"native": [
"Malti"
]
},
"my": {
"code": "my",
"name": [
"Burmese"
],
"native": [
"ဗမာစာ"
]
},
"na": {
"code": "na",
"name": [
"Nauruan"
],
"native": [
"<NAME>"
]
},
"nb": {
"code": "nb",
"name": [
"Norwegian Bokmål"
],
"native": [
"Norsk bokmål"
]
},
"nd": {
"code": "nd",
"name": [
"Northern Ndebele"
],
"native": [
"isiNdebele"
]
},
"ne": {
"code": "ne",
"name": [
"Nepali"
],
"native": [
"नेपाली"
]
},
"ng": {
"code": "ng",
"name": [
"Ndonga"
],
"native": [
"Owambo"
]
},
"nl": {
"code": "nl",
"name": [
"Dutch"
],
"native": [
"Nederlands",
"Vlaams"
]
},
"nn": {
"code": "nn",
"name": [
"Norwegian Nynorsk"
],
"native": [
"Norsk nynorsk"
]
},
"no": {
"code": "no",
"name": [
"Norwegian"
],
"native": [
"Norsk"
]
},
"nr": {
"code": "nr",
"name": [
"Southern Ndebele"
],
"native": [
"isiNdebele"
]
},
"nv": {
"code": "nv",
"name": [
"Navajo",
"Navaho"
],
"native": [
"Diné bizaad"
]
},
"ny": {
"code": "ny",
"name": [
"Chichewa",
"Chewa",
"Nyanja"
],
"native": [
"chiCheŵa",
"chinyanja"
]
},
"oc": {
"code": "oc",
"name": [
"Occitan"
],
"native": [
"occitan",
"lenga d'òc"
]
},
"oj": {
"code": "oj",
"name": [
"Ojibwe",
"Ojibwa"
],
"native": [
"ᐊᓂᔑᓈᐯᒧᐎᓐ"
]
},
"om": {
"code": "om",
"name": [
"Oromo"
],
"native": [
"Afaan Oromoo"
]
},
"or": {
"code": "or",
"name": [
"Oriya"
],
"native": [
"ଓଡ଼ିଆ"
]
},
"os": {
"code": "os",
"name": [
"Ossetian",
"Ossetic"
],
"native": [
"ирон æвзаг"
]
},
"pa": {
"code": "pa",
"name": [
"Eastern Punjabi",
"Punjabi"
],
"native": [
"ਪੰਜਾਬੀ"
]
},
"pi": {
"code": "pi",
"name": [
"Pāli"
],
"native": [
"पाऴि"
]
},
"pl": {
"code": "pl",
"name": [
"Polish"
],
"native": [
"<NAME>",
"polszczyzna"
]
},
"ps": {
"code": "ps",
"name": [
"Pashto",
"Pushto"
],
"native": [
"پښتو"
]
},
"pt": {
"code": "pt",
"name": [
"Portuguese"
],
"native": [
"Português"
]
},
"qu": {
"code": "qu",
"name": [
"Quechua"
],
"native": [
"<NAME>",
"Kichwa"
]
},
"rm": {
"code": "rm",
"name": [
"Romansh"
],
"native": [
"rumantsch grischun"
]
},
"rn": {
"code": "rn",
"name": [
"Kirundi"
],
"native": [
"Ikirundi"
]
},
"ro": {
"code": "ro",
"name": [
"Romanian"
],
"native": [
"Română"
]
},
"ru": {
"code": "ru",
"name": [
"Russian"
],
"native": [
"Русский"
]
},
"rw": {
"code": "rw",
"name": [
"Kinyarwanda"
],
"native": [
"Ikinyarwanda"
]
},
"sa": {
"code": "sa",
"name": [
"Sanskrit",
"Saṁskṛta"
],
"native": [
"संस्कृतम्"
]
},
"sc": {
"code": "sc",
"name": [
"Sardinian"
],
"native": [
"sardu"
]
},
"sd": {
"code": "sd",
"name": [
"Sindhi"
],
"native": [
"सिन्धी",
"سنڌي، سندھی"
]
},
"se": {
"code": "se",
"name": [
"Nor<NAME>"
],
"native": [
"Davvisámegiella"
]
},
"sg": {
"code": "sg",
"name": [
"Sango"
],
"native": [
"yângâ tî sängö"
]
},
"si": {
"code": "si",
"name": [
"Sinhalese",
"Sinhala"
],
"native": [
"සිංහල"
]
},
"sk": {
"code": "sk",
"name": [
"Slovak"
],
"native": [
"slovenčina",
"slovenský jazyk"
]
},
"sl": {
"code": "sl",
"name": [
"Slovene"
],
"native": [
"slovenski jezik",
"slovenščina"
]
},
"sm": {
"code": "sm",
"name": [
"Samoan"
],
"native": [
"gagana fa'a Samoa"
]
},
"sn": {
"code": "sn",
"name": [
"Shona"
],
"native": [
"chiShona"
]
},
"so": {
"code": "so",
"name": [
"Somali"
],
"native": [
"Soomaaliga",
"af Soomaali"
]
},
"sq": {
"code": "sq",
"name": [
"Albanian"
],
"native": [
"Shqip"
]
},
"sr": {
"code": "sr",
"name": [
"Serbian"
],
"native": [
"српски језик"
]
},
"ss": {
"code": "ss",
"name": [
"Swati"
],
"native": [
"SiSwati"
]
},
"st": {
"code": "st",
"name": [
"<NAME>"
],
"native": [
"Sesotho"
]
},
"su": {
"code": "su",
"name": [
"Sundanese"
],
"native": [
"Basa Sunda"
]
},
"sv": {
"code": "sv",
"name": [
"Swedish"
],
"native": [
"svenska"
]
},
"sw": {
"code": "sw",
"name": [
"Swahili"
],
"native": [
"Kiswahili"
]
},
"ta": {
"code": "ta",
"name": [
"Tamil"
],
"native": [
"தமிழ்"
]
},
"te": {
"code": "te",
"name": [
"Telugu"
],
"native": [
"తెలుగు"
]
},
"tg": {
"code": "tg",
"name": [
"Tajik"
],
"native": [
"тоҷикӣ",
"toçikī",
"تاجیکی"
]
},
"th": {
"code": "th",
"name": [
"Thai"
],
"native": [
"ไทย"
]
},
"ti": {
"code": "ti",
"name": [
"Tigrinya"
],
"native": [
"ትግርኛ"
]
},
"tk": {
"code": "tk",
"name": [
"Turkmen"
],
"native": [
"Türkmen",
"Түркмен"
]
},
"tl": {
"code": "tl",
"name": [
"Tagalog"
],
"native": [
"Wikang Tagalog"
]
},
"tn": {
"code": "tn",
"name": [
"Tswana"
],
"native": [
"Setswana"
]
},
"to": {
"code": "to",
"name": [
"Tonga",
"Tonga Islands"
],
"native": [
"faka Tonga"
]
},
"tr": {
"code": "tr",
"name": [
"Turkish"
],
"native": [
"Türkçe"
]
},
"ts": {
"code": "ts",
"name": [
"Tsonga"
],
"native": [
"Xitsonga"
]
},
"tt": {
"code": "tt",
"name": [
"Tatar"
],
"native": [
"татар теле",
"tatar tele"
]
},
"tw": {
"code": "tw",
"name": [
"Twi"
],
"native": [
"Twi"
]
},
"ty": {
"code": "ty",
"name": [
"Tahitian"
],
"native": [
"Reo Tahiti"
]
},
"ug": {
"code": "ug",
"name": [
"Uighur",
"Uyghur"
],
"native": [
"Uyghurche",
"ئۇيغۇرچە"
]
},
"uk": {
"code": "uk",
"name": [
"Ukrainian"
],
"native": [
"Українська"
]
},
"ur": {
"code": "ur",
"name": [
"Urdu"
],
"native": [
"اردو"
]
},
"uz": {
"code": "uz",
"name": [
"Uzbek"
],
"native": [
"Oʻzbek",
"Ўзбек",
"أۇزبېك"
]
},
"ve": {
"code": "ve",
"name": [
"Venda"
],
"native": [
"Tshivenḓa"
]
},
"vi": {
"code": "vi",
"name": [
"Vietnamese"
],
"native": [
"Tiếng Việt"
]
},
"vo": {
"code": "vo",
"name": [
"Volapük"
],
"native": [
"Volapük"
]
},
"wa": {
"code": "wa",
"name": [
"Walloon"
],
"native": [
"walon"
]
},
"wo": {
"code": "wo",
"name": [
"Wolof"
],
"native": [
"Wollof"
]
},
"xh": {
"code": "xh",
"name": [
"Xhosa"
],
"native": [
"isiXhosa"
]
},
"yi": {
"code": "yi",
"name": [
"Yiddish"
],
"native": [
"ייִדיש"
]
},
"yo": {
"code": "yo",
"name": [
"Yoruba"
],
"native": [
"Yorùbá"
]
},
"za": {
"code": "za",
"name": [
"Zhuang",
"Chuang"
],
"native": [
"Saɯ cueŋƅ",
"Saw cuengh"
]
},
"zh": {
"code": "zh",
"name": [
"Chinese"
],
"native": [
"中文",
"汉语",
"漢語"
]
},
"zu": {
"code": "zu",
"name": [
"Zulu"
],
"native": [
"isiZulu"
]
}
}
` | list.go | 0.514644 | 0.461684 | list.go | starcoder |
package blockchain
import (
"encoding/json"
"fmt"
)
// Data is an interface used to standardize methods for any type of Block data
type Data interface {
GetData() Data
ToString() string
}
// =========== Transaction ===========
// Transaction is a type of Data
type Transaction struct {
From string `json:"from"`
To string `json:"to"`
Amount int `json:"amount"`
Signature string `json:"signature"`
}
// GetData is the interface method that is required to retrieve Data object
func (t Transaction) GetData() Data {
return t
}
// ToString is the interface method that is required to transform the Data object into a string for communication
func (t Transaction) ToString() string {
b, err := json.Marshal(t)
if err != nil {
fmt.Println(err)
return ""
}
return string(b)
}
// =========== Chain ===========
// Chain contains a slice, or chain, of blocks, representing a blockchain
type Chain struct {
ChainCopy []Block `json:"chainCopy"`
}
// GetData is the interface method that is required to retrieve Data object
func (c Chain) GetData() Data {
return c
}
// ToString is the interface method that is required to transform the Data object into a string for communication
func (c Chain) ToString() string {
b, err := json.Marshal(c)
if err != nil {
fmt.Println(err)
return ""
}
return string(b)
}
// =========== PeerChains ===========
// PeerChains is a list of all the copies of the blockchain on the network
type PeerChains struct {
List [][]Block `json:"list"`
}
// GetData is the interface method that is required to retrieve Data object
func (p PeerChains) GetData() Data {
return p
}
// ToString is the interface method that is required to transform the Data object into a string for communication
func (p PeerChains) ToString() string {
b, err := json.Marshal(p)
if err != nil {
fmt.Println(err)
return ""
}
return string(b)
}
// =========== LotteryEntry ===========
// LotteryEntry represents one entry in the proof of stake lottery
type LotteryEntry struct {
Stake int `json:"stake"`
Peer PeerAddress `json:"peer"`
}
// GetData is the interface method that is required to retrieve Data object
func (l LotteryEntry) GetData() Data {
return l
}
// ToString is the interface method that is required to transform the Data object into a string for communication
func (l LotteryEntry) ToString() string {
b, err := json.Marshal(l)
if err != nil {
fmt.Println(err)
return ""
}
return string(b)
}
// =========== CandidateBlock ===========
// CandidateBlock represents a peer's mined block that must be validated
type CandidateBlock struct {
Block Block `json:"block"`
Miner PeerAddress `json:"miner"`
}
// GetData is the interface method that is required to retrieve Data object
func (c CandidateBlock) GetData() Data {
return c
}
// ToString is the interface method that is required to transform the Data object into a string for communication
func (c CandidateBlock) ToString() string {
b, err := json.Marshal(c)
if err != nil {
fmt.Println(err)
return ""
}
return string(b)
}
// =========== PublicKey ===========
// PublicKey represents a peer's mined block that must be validated
type PublicKey struct {
X string `json:"x"`
Y string `json:"y"`
}
// GetData is the interface method that is required to retrieve Data object
func (pk PublicKey) GetData() Data {
return pk
}
// ToString is the interface method that is required to transform the Data object into a string for communication
func (pk PublicKey) ToString() string {
b, err := json.Marshal(pk)
if err != nil {
fmt.Println(err)
return ""
}
return string(b)
} | src/blockchain/data.go | 0.554953 | 0.419232 | data.go | starcoder |
package rtc
import "math"
// Triangle returns a new TriangleT.
func Triangle(p1, p2, p3 Tuple) *TriangleT {
e1 := p2.Sub(p1)
e2 := p3.Sub(p1)
normal := e2.Cross(e1).Normalize()
bounds := Bounds()
bounds.UpdateBounds(p1)
bounds.UpdateBounds(p2)
bounds.UpdateBounds(p3)
return &TriangleT{
Shape: Shape{Transform: M4Identity(), Material: GetMaterial()},
P1: p1,
P2: p2,
P3: p3,
E1: e1,
E2: e2,
Normal: normal,
bounds: bounds,
}
}
// TriangleT represents a triangle object.
type TriangleT struct {
Shape
P1 Tuple
P2 Tuple
P3 Tuple
N1 Tuple
N2 Tuple
N3 Tuple
E1 Tuple
E2 Tuple
Normal Tuple
bounds *BoundsT
}
var _ Object = &TriangleT{}
// SetTransform sets the object's transform 4x4 matrix.
func (t *TriangleT) SetTransform(m M4) Object {
t.Transform = m
return t
}
// SetMaterial sets the object's material.
func (t *TriangleT) SetMaterial(material MaterialT) Object {
t.Material = material
return t
}
// SetParent sets the object's parent object.
func (t *TriangleT) SetParent(parent Object) Object {
t.Parent = parent
return t
}
// Bounds returns the minimum bounding box of the object in object
// (untransformed) space.
func (t *TriangleT) Bounds() *BoundsT {
return t.bounds
}
// LocalIntersect returns a slice of IntersectionT values where the
// transformed (object space) ray intersects the object.
func (t *TriangleT) LocalIntersect(ray RayT) []IntersectionT {
dirCrossE2 := ray.Direction.Cross(t.E2)
det := t.E1.Dot(dirCrossE2)
if math.Abs(det) < epsilon {
return nil
}
f := 1 / det
p1ToOrigin := ray.Origin.Sub(t.P1)
u := f * p1ToOrigin.Dot(dirCrossE2)
if u < 0 || u > 1 {
return nil
}
originCrossE1 := p1ToOrigin.Cross(t.E1)
v := f * ray.Direction.Dot(originCrossE1)
if v < 0 || u+v > 1 {
return nil
}
tv := f * t.E2.Dot(originCrossE1)
return Intersections(IntersectionWithUV(tv, t, u, v))
}
// LocalNormalAt returns the normal vector at the given point of intersection
// (transformed to object space) with the object.
func (t *TriangleT) LocalNormalAt(objectPoint Tuple, hit *IntersectionT) Tuple {
return t.Normal
}
// Includes returns whether this object includes (or actually is) the
// other object.
func (t *TriangleT) Includes(other Object) bool {
return t == other
} | rtc/triangle.go | 0.915748 | 0.606469 | triangle.go | starcoder |
package test
import (
"fmt"
"reflect"
gomegaFormat "github.com/onsi/gomega/format"
gomegaMatchers "github.com/onsi/gomega/matchers"
gomegaTypes "github.com/onsi/gomega/types"
)
func MatchArray(elements ...interface{}) gomegaTypes.GomegaMatcher {
return &MatchArrayMatcher{
Elements: elements,
}
}
type MatchArrayMatcher struct {
Elements []interface{}
}
func (m *MatchArrayMatcher) Match(actual interface{}) (bool, error) {
if !isArrayOrSlice(actual) {
return false, fmt.Errorf("MatchArray matcher expects an array/slice. Got:\n%s", gomegaFormat.Object(actual, 1))
}
elements := m.Elements
if len(elements) == 1 && isArrayOrSlice(elements[0]) {
element := reflect.ValueOf(elements[0])
elements = []interface{}{}
for index := 0; index < element.Len(); index++ {
elements = append(elements, element.Index(index).Interface())
}
}
matchers := []gomegaTypes.GomegaMatcher{}
for _, element := range elements {
matcher, isMatcher := element.(gomegaTypes.GomegaMatcher)
if !isMatcher {
matcher = &gomegaMatchers.EqualMatcher{Expected: element}
}
matchers = append(matchers, matcher)
}
value := reflect.ValueOf(actual)
values := []interface{}{}
for index := 0; index < value.Len(); index++ {
values = append(values, value.Index(index).Interface())
}
if len(values) != len(matchers) {
return false, nil
}
for index := 0; index < len(values); index++ {
if success, err := matchers[index].Match(values[index]); err != nil || !success {
return success, err
}
}
return true, nil
}
func (m *MatchArrayMatcher) FailureMessage(actual interface{}) (message string) {
return gomegaFormat.Message(actual, "to match array", m.Elements)
}
func (m *MatchArrayMatcher) NegatedFailureMessage(actual interface{}) (message string) {
return gomegaFormat.Message(actual, "not to match array", m.Elements)
}
func isArrayOrSlice(actual interface{}) bool {
if actual == nil {
return false
}
switch reflect.TypeOf(actual).Kind() {
case reflect.Array, reflect.Slice:
return true
default:
return false
}
} | test/array.go | 0.676834 | 0.44077 | array.go | starcoder |
package bitset
// Dense is a standard bitset, represented as a sequence of bits. See Sparse in
// this package for a more memory-efficient storage scheme for sparse bitsets.
type Dense struct {
sets []Set64
}
// NewDense creates a set capable of representing values in the range
// [0, capacity), at least. The Cap method reports the exact capacity.
// NewDense panics if capacity is negative.
func NewDense(capacity int) *Dense {
return &Dense{
sets: setslice(capacity),
}
}
func setslice(capacity int) []Set64 {
if capacity == 0 {
return nil
}
if capacity < 0 {
panic("negative capacity")
}
return make([]Set64, (capacity-1)/64+1)
}
// Cap returns the maximum number of elements the set can contain,
// which is one greater than the largest element it can contain.
func (s *Dense) Cap() int {
return len(s.sets) * 64
}
// Len returns the number of elements in s.
func (s *Dense) Len() int {
sz := 0
for _, t := range s.sets {
sz += t.Len()
}
return sz
}
// Empty reports whether s has no elements.
func (s *Dense) Empty() bool {
for _, t := range s.sets {
if !t.Empty() {
return false
}
}
return true
}
// Copy returns a copy of s.
func (s *Dense) Copy() *Dense {
newSets := make([]Set64, len(s.sets))
copy(newSets, s.sets)
return &Dense{sets: newSets}
}
// Add adds n to s.
func (s *Dense) Add(n uint) {
s.sets[n/64].Add(uint8(n % 64))
}
// Remove removes n from s.
func (s *Dense) Remove(n uint) {
s.sets[n/64].Remove(uint8(n % 64))
}
// Contains reports whether s contains s.
func (s *Dense) Contains(n uint) bool {
return s.sets[n/64].Contains(uint8(n % 64))
}
// Clear removes all elements from s.
func (s *Dense) Clear() {
for i := range s.sets { // can't use _, t because it copies
s.sets[i].Clear()
}
}
// SetCap changes the capacity of s.
func (s *Dense) SetCap(newCapacity int) {
newSets := setslice(newCapacity)
copy(newSets, s.sets)
s.sets = newSets
}
// Equal reports whether s2 has the same elements as s1. It may have a different capacity.
func (s1 *Dense) Equal(s2 *Dense) bool {
if len(s1.sets) > len(s2.sets) {
s1, s2 = s2, s1
}
// Here, len(s1.sets) <= len(s2.sets).
for i, t1 := range s1.sets {
if t1 != s2.sets[i] {
return false
}
}
for _, t2 := range s2.sets[len(s1.sets):] {
if t2 != 0 {
return false
}
}
return true
}
// Complement replaces s with its complement.
func (s *Dense) Complement() {
for i := 0; i < len(s.sets); i++ {
s.sets[i].Complement()
}
}
// AddIn adds all the elements in s2 to s1.
// It sets s1 to the union of s1 and s2.
func (s1 *Dense) AddIn(s2 *Dense) {
if s1.Cap() < s2.Cap() {
// TODO: Grow s1 less if it's not necessary, or panic.
s1.SetCap(s2.Cap())
}
for i, t2 := range s2.sets {
s1.sets[i].AddIn(t2)
}
}
// RemoveIn removes from s1 all the elements that are in s2.
// It sets s1 to the set difference of s1 and s2.
func (s1 *Dense) RemoveIn(s2 *Dense) {
min := minSetLen(s1, s2)
for i := 0; i < min; i++ {
s1.sets[i].RemoveIn(s2.sets[i])
}
}
// LenRemoveIn returns what s1.Len() would be after s1.RemoveIn(s2), without
// modifying s1.
func (s1 *Dense) LenRemoveIn(s2 *Dense) int {
min := minSetLen(s1, s2)
n := 0
for i, t := range s1.sets[:min] {
t.RemoveIn(s2.sets[i])
n += t.Len()
}
for _, t := range s1.sets[min:] {
n += t.Len()
}
return n
}
// RemoveNotIn removes from s1 all the elements that are not in s2.
// It sets s1 to the intersection of s1 and s2.
func (s1 *Dense) RemoveNotIn(s2 *Dense) {
min := minSetLen(s1, s2)
for i := 0; i < min; i++ {
s1.sets[i].RemoveNotIn(s2.sets[i])
}
for i := min; i < len(s1.sets); i++ {
s1.sets[i].Clear()
}
}
func minSetLen(s1, s2 *Dense) int {
if len(s1.sets) <= len(s2.sets) {
return len(s1.sets)
}
return len(s2.sets)
}
// Elements calls f on successive slices of the set's elements, from lowest to
// highest. If f returns false, the iteration stops. The slice passed to f will
// be reused when f returns.
func (s *Dense) Elements(f func([]uint) bool) {
var buf [64]uint
for i, t := range s.sets {
n := t.populate(&buf)
offset := uint(64 * i)
for j := range buf[:n] {
buf[j] += offset
}
if !f(buf[:n]) {
break
}
}
} | dense.go | 0.774924 | 0.475605 | dense.go | starcoder |
package mat
import (
"bufio"
"fmt"
"io"
"math"
"os"
"strconv"
"strings"
"github.com/pkg/errors"
)
// Vector is a list of float numbers.
type Vector []float64
// SparseVector is a map with index is a key and value is a value at that index.
type SparseVector map[int]float64
// SparseMatrix is a list of sparse vectors.
type SparseMatrix struct {
Vectors []SparseVector
}
// Matrix is a list of vector.
type Matrix struct {
Vectors []*Vector
}
// ReadLibsvmFileToSparseMatrix reads libsvm file into sparse matrix.
func ReadLibsvmFileToSparseMatrix(fileName string) (SparseMatrix, error) {
file, err := os.Open(fileName)
if err != nil {
return SparseMatrix{}, fmt.Errorf("unable to open %s: %s", fileName, err)
}
defer file.Close()
reader := bufio.NewReader(file)
sparseMatrix := SparseMatrix{Vectors: make([]SparseVector, 0)}
for {
line, err := reader.ReadString('\n')
if err != nil {
if err != io.EOF {
return SparseMatrix{}, err
}
break
}
line = strings.TrimSpace(line)
if line == "" {
break
}
tokens := strings.Split(line, " ")
if len(tokens) < 2 {
return SparseMatrix{}, fmt.Errorf("too few columns")
}
// first column is label so skip it.
vec := SparseVector{}
for c := 1; c < len(tokens); c++ {
if len(tokens[c]) == 0 {
return SparseMatrix{}, fmt.Errorf("corrupted data format please check for empty spaces")
}
pair := strings.Split(tokens[c], ":")
if len(pair) != 2 {
return SparseMatrix{}, fmt.Errorf("wrong data format %s", tokens[c])
}
colIdx, err := strconv.ParseUint(pair[0], 10, 32)
if err != nil {
return SparseMatrix{}, fmt.Errorf("cannot parse to int %s: %s", pair[0], err)
}
val, err := strconv.ParseFloat(pair[1], 64)
if err != nil {
return SparseMatrix{}, fmt.Errorf("cannot parse to float %s: %s", pair[1], err)
}
vec[int(colIdx)] = val
}
sparseMatrix.Vectors = append(sparseMatrix.Vectors, vec)
}
return sparseMatrix, nil
}
// ReadCSVFileToDenseMatrix reads CSV file to dense matrix.
func ReadCSVFileToDenseMatrix(fileName string, delimiter string, defaultVal float64) (Matrix, error) {
file, err := os.Open(fileName)
if err != nil {
return Matrix{}, fmt.Errorf("unable to open %s: %s", fileName, err)
}
defer file.Close()
reader := bufio.NewReader(file)
matrix := Matrix{Vectors: make([]*Vector, 0)}
colDim := -1
row := 0
for {
line, err := reader.ReadString('\n')
if err != nil && err != io.EOF {
return Matrix{}, err
}
line = strings.TrimSpace(line)
if line == "" {
break
}
tokens := strings.Split(line, delimiter)
vec := Vector{}
for i := 0; i < len(tokens); i++ {
var val float64
if len(tokens[i]) == 0 {
val = defaultVal
} else {
v, err := strconv.ParseFloat(tokens[i], 64)
if err != nil {
return Matrix{}, fmt.Errorf("cannot convert to float %s: %s", tokens[i], err)
}
val = v
}
vec = append(vec, val)
}
if colDim == -1 {
colDim = len(vec)
} else if colDim != len(vec) {
return Matrix{}, fmt.Errorf("row %d has different dimension: %d, please check your file",
row, len(vec))
}
matrix.Vectors = append(matrix.Vectors, &vec)
row++
}
return matrix, nil
}
// IsEqualVectors compares 2 vectors with a threshold.
func IsEqualVectors(v1, v2 *Vector, threshold float64) error {
if len(*v1) != len(*v2) {
return fmt.Errorf("different vector length v1=%d, v2=%d", len(*v1), len(*v2))
}
for i := range *v1 {
if math.Abs((*v1)[i]-(*v2)[i]) > threshold {
return fmt.Errorf("%d element mismatch: v1[%d]=%f, v2[%d]=%f", i, i, (*v1)[i], i, (*v2)[i])
}
}
return nil
}
// GetVectorMaxIdx gets the index of the maximum value within a vector.
func GetVectorMaxIdx(v *Vector) (int, error) {
if len(*v) == 0 {
return -1, fmt.Errorf("empty vector")
}
maxVal := math.Inf(-1)
r := 0
for idx, i := range *v {
if i > maxVal {
maxVal = i
r = idx
}
}
return r, nil
}
// IsEqualMatrices compares 2 matrices with a threshold.
func IsEqualMatrices(m1, m2 *Matrix, threshold float64) error {
if len(m1.Vectors) != len(m2.Vectors) {
return fmt.Errorf("row matrix mismatch: m1 got %d rows, m2 got %d rows", len(m1.Vectors), len(m2.Vectors))
}
for i := range m1.Vectors {
err := IsEqualVectors(m1.Vectors[i], m2.Vectors[i], threshold)
if err != nil {
return errors.Wrap(err, fmt.Sprintf("matrix comparison at index %d", i))
}
}
return nil
} | mat/mat.go | 0.600657 | 0.555435 | mat.go | starcoder |
package consensus
import (
"github.com/axiom-org/axiom/util"
)
// The nomination state for the Stellar Consensus Protocol.
// See page 21 of:
// https://www.stellar.org/papers/stellar-consensus-protocol.pdf
type NominationState struct {
// The values we have voted to nominate
X []SlotValue
// The values we have accepted as nominated
Y []SlotValue
// The values whose nomination we have confirmed
Z []SlotValue
// The last NominationMessage received from each node
N map[string]*NominationMessage
// Who we are
publicKey util.PublicKey
// Who we listen to for quorum
D *QuorumSlice
// The number of messages this state has processed
received int
// Which priority we think we are for creating a nomination
// 0 is the first priority
// Negative means we should never create a nomination
priority int
// The value store we use to validate or combine values
values ValueStore
}
func NewNominationState(
publicKey util.PublicKey, qs *QuorumSlice, vs ValueStore) *NominationState {
return &NominationState{
X: make([]SlotValue, 0),
Y: make([]SlotValue, 0),
Z: make([]SlotValue, 0),
N: make(map[string]*NominationMessage),
publicKey: publicKey,
D: qs,
priority: SeedPriority(string(vs.Last()), qs.Members, publicKey.String()),
values: vs,
}
}
func (s *NominationState) Logf(format string, a ...interface{}) {
util.Logf("NS", s.publicKey.ShortName(), format, a...)
}
func (s *NominationState) Show() {
s.Logf("nState:")
s.Logf("X: %+v", s.X)
s.Logf("Y: %+v", s.Y)
s.Logf("Z: %+v", s.Z)
}
// HasNomination tells you whether this nomination state can currently send out
// a nominate message.
// If we have never received a nomination from a peer, and haven't had SetDefault
// called ourselves, then we won't have a nomination.
func (s *NominationState) HasNomination() bool {
return len(s.X) > 0
}
// Returns whether we nominated a new value
func (s *NominationState) MaybeNominateNewValue() bool {
if len(s.X) > 0 {
// We already nominated a value
return false
}
if s.priority < 0 || s.D.Threshold*s.priority > s.received {
// We don't think it's our turn
return false
}
v, ok := s.values.SuggestValue()
if !ok {
// We have nothing to nominate
return false
}
s.Logf("nominating %s", util.Shorten(string(v)))
s.NominateNewValue(v)
return true
}
// WantsToNominateNewValue is a heuristic. If we already have some value, we don't
// want to nominate a new one. We also want to wait some time, according to our
// priority, before we are willing to make a nomination.
func (s *NominationState) WantsToNominateNewValue() bool {
return s.D.Threshold*s.priority <= s.received
}
func (s *NominationState) NominateNewValue(v SlotValue) {
if s.HasNomination() {
// We already have something to nominate
return
}
s.X = []SlotValue{v}
}
// PredictValue can predict the value iff HasNomination is true. If not, panic
func (s *NominationState) PredictValue() SlotValue {
if len(s.Z) > 0 {
return s.values.Combine(s.Z)
}
if len(s.Y) > 0 {
return s.values.Combine(s.Y)
}
if len(s.X) > 0 {
return s.values.Combine(s.X)
}
panic("PredictValue was called when HasNomination was false")
}
func (s *NominationState) QuorumSlice(node string) (*QuorumSlice, bool) {
if node == s.publicKey.String() {
return s.D, true
}
m, ok := s.N[node]
if !ok {
return nil, false
}
return m.D, true
}
func (s *NominationState) PublicKey() util.PublicKey {
return s.publicKey
}
func (s *NominationState) AssertValid() {
AssertNoDupes(s.X)
AssertNoDupes(s.Y)
AssertNoDupes(s.Z)
}
// MaybeAdvance checks whether we should accept the nomination for this slot value,
// and adds it to our accepted list if appropriate.
// It also checks whether we should confirm the nomination.
// Returns whether we made any changes.
func (s *NominationState) MaybeAdvance(v SlotValue) bool {
if HasSlotValue(s.Z, v) {
// We already confirmed this, so we can't do anything more
return false
}
changed := false
votedOrAccepted := []string{}
accepted := []string{}
if HasSlotValue(s.X, v) {
votedOrAccepted = append(votedOrAccepted, s.publicKey.String())
}
if HasSlotValue(s.Y, v) {
accepted = append(accepted, s.publicKey.String())
}
for node, m := range s.N {
if HasSlotValue(m.Acc, v) {
votedOrAccepted = append(votedOrAccepted, node)
accepted = append(accepted, node)
continue
}
if HasSlotValue(m.Nom, v) {
votedOrAccepted = append(votedOrAccepted, node)
}
}
// The rules for accepting are on page 13, section 5.3
// Rule 1: if a quorum has either voted for the nomination or accepted the
// nomination, we accept it.
// Rule 2: if a blocking set for us accepts the nomination, we accept it.
accept := MeetsQuorum(s, votedOrAccepted) || s.D.BlockedBy(accepted)
if accept && !HasSlotValue(s.Y, v) {
// Accept this value
s.Logf("accepts the nomination of %s", util.Shorten(string(v)))
changed = true
AssertNoDupes(s.Y)
s.Y = append(s.Y, v)
accepted = append(accepted, s.publicKey.String())
AssertNoDupes(s.Y)
}
// We confirm once a quorum has accepted
if MeetsQuorum(s, accepted) {
s.Logf("confirms the nomination of %s", util.Shorten(string(v)))
changed = true
s.Z = append(s.Z, v)
}
return changed
}
// Handles an incoming nomination message from a peer node
func (s *NominationState) Handle(node string, m *NominationMessage) {
s.received++
// What nodes we have seen new information about
touched := []SlotValue{}
// Check if there's anything new
old, ok := s.N[node]
var oldLenNom, oldLenAcc int
if ok {
oldLenNom = len(old.Nom)
oldLenAcc = len(old.Acc)
}
if len(m.Nom) < oldLenNom {
s.Logf("%s sent a stale message: %v", node, m)
return
}
if len(m.Acc) < oldLenAcc {
s.Logf("%s sent a stale message: %v", node, m)
return
}
if len(m.Nom) == oldLenNom && len(m.Acc) == oldLenAcc {
// It's just a dupe
return
}
// Update our most-recent-message
// s.Logf("got message from %s: %s", util.Shorten(node), m)
s.N[node] = m
for i := oldLenNom; i < len(m.Nom); i++ {
value := m.Nom[i]
if !HasSlotValue(touched, value) {
touched = append(touched, value)
}
// If we don't have a candidate, and the value is valid,
// we can support this new nomination
if !HasSlotValue(s.X, value) && s.values.ValidateValue(value) {
s.Logf("supports the nomination of %s", util.Shorten(string(value)))
s.X = append(s.X, value)
}
}
for i := oldLenAcc; i < len(m.Acc); i++ {
if !HasSlotValue(touched, m.Acc[i]) {
touched = append(touched, m.Acc[i])
}
}
for _, v := range touched {
s.MaybeAdvance(v)
}
}
func (s *NominationState) Message(slot int, qs *QuorumSlice) *NominationMessage {
return &NominationMessage{
I: slot,
Nom: s.X,
Acc: s.Y,
D: qs,
}
} | consensus/nomination_state.go | 0.73659 | 0.531696 | nomination_state.go | starcoder |
package trix
// NodeList represents a list of pointers to nodes
type NodeList []*Node
// ConvertValues applies the conversion function to each of the NodeList's
// nodes that match specified keys, and replaces its value with the one
// returned.
func (nodes NodeList) ConvertValues(conv func(*Node) Value, keys ...string) NodeList {
if nodes == nil {
return nodes
}
for _, node := range nodes {
matches := len(keys) == 0
for _, key := range keys {
if node.Key == key {
matches = true
break
}
}
if matches {
node.Value = conv(node)
}
}
return nodes
}
// ValuesToString converts values from the specified children of each node in the
// NodeList to string.
func (nodes NodeList) ValuesToString(keys ...string) NodeList {
return nodes.ConvertValues(func(node *Node) Value {
return node.GetString()
}, keys...)
}
// ValuesToInt converts values from the specified children of each node in the
// NodeList to Int.
func (nodes NodeList) ValuesToInt(keys ...string) NodeList {
return nodes.ConvertValues(func(node *Node) Value {
return node.GetInt()
}, keys...)
}
// ValuesToFloat converts values from the specified children of each node in the
// NodeList to Float.
func (nodes NodeList) ValuesToFloat(keys ...string) NodeList {
return nodes.ConvertValues(func(node *Node) Value {
return node.GetFloat()
}, keys...)
}
// ValuesToBool converts values from the specified children of each node in the
// NodeList to Bool.
func (nodes NodeList) ValuesToBool(keys ...string) NodeList {
return nodes.ConvertValues(func(node *Node) Value {
return node.GetBool()
}, keys...)
}
// ValuesToDuration converts values from the specified children of each node in the
// NodeList to Duration.
func (nodes NodeList) ValuesToDuration(keys ...string) NodeList {
return nodes.ConvertValues(func(node *Node) Value {
return node.GetDuration()
}, keys...)
}
// ForEach runs the specified callback on each resulting node, and returns the
// resulting slice.
func (nodes NodeList) ForEach(cb func(node *Node) Value) []Value {
result := make([]Value, len(nodes))
for i := range nodes {
result[i] = cb(nodes[i])
}
return result
}
// Filter runs the specified callback on each resulting node, and returns the
// nodes where the callback returns true.
func (nodes NodeList) Filter(cb func(node *Node) bool) NodeList {
result := make(NodeList, 0, len(nodes))
for _, node := range nodes {
if cb(node) {
result = append(result, node)
}
}
return result
}
// FilterByValue returns the subset of the NodeList where the value equals
// the specified one.
func (nodes NodeList) FilterByValue(value Value) NodeList {
return nodes.Filter(func(node *Node) bool {
return node.Value == value
})
}
// First returns the first node from the list, or nil if the list is empty.
func (nodes NodeList) First() *Node {
if len(nodes) == 0 {
return nil
}
return nodes[0]
} | nodelist.go | 0.824744 | 0.680122 | nodelist.go | starcoder |
package geom
import "math"
// PointEmptyCoordHex is the hex representation of a NaN that represents
// an empty coord in a shape.
const PointEmptyCoordHex = 0x7FF8000000000000
// PointEmptyCoord is the NaN float64 representation of the empty coordinate.
func PointEmptyCoord() float64 {
return math.Float64frombits(PointEmptyCoordHex)
}
// A Point represents a single point.
type Point struct {
Geom0
}
// NewPoint allocates a new Point with Lay l and all values zero.
func NewPoint(l Layout) *Point {
return NewPointFlat(l, make([]float64, l.Stride()))
}
// NewPointEmpty allocates a new Point with no coordinates.
func NewPointEmpty(l Layout) *Point {
return NewPointFlat(l, nil)
}
// NewPointFlat allocates a new Point with Lay l and flat coordinates FlatCoord.
func NewPointFlat(l Layout, FlatCoord []float64) *Point {
g := new(Point)
g.Lay = l
g.Strd = l.Stride()
g.FlatCoord = FlatCoord
return g
}
// NewPointFlatMaybeEmpty returns a new point, checking whether the point may be empty
// by checking wther all the points are NaN.
func NewPointFlatMaybeEmpty(Lay Layout, FlatCoord []float64) *Point {
isEmpty := true
for _, coord := range FlatCoord {
if math.Float64bits(coord) != PointEmptyCoordHex {
isEmpty = false
break
}
}
if isEmpty {
return NewPointEmpty(Lay)
}
return NewPointFlat(Lay, FlatCoord)
}
// Area returns g's area, i.e. zero.
func (g *Point) Area() float64 {
return 0
}
// Clone returns a copy of g that does not alias g.
func (g *Point) Clone() *Point {
return deriveClonePoint(g)
}
// Length returns the length of g, i.e. zero.
func (g *Point) Length() float64 {
return 0
}
// MustSetCoords is like SetCoords but panics on any error.
func (g *Point) MustSetCoords(coords Coord) *Point {
Must(g.SetCoords(coords))
return g
}
// SetCoords sets the coordinates of g.
func (g *Point) SetCoords(coords Coord) (*Point, error) {
if err := g.setCoords(coords); err != nil {
return nil, err
}
return g, nil
}
// SetSRID sets the SRID of g.
func (g *Point) SetSRID(Srid int) *Point {
g.Srid = Srid
return g
}
// Swap swaps the values of g and g2.
func (g *Point) Swap(g2 *Point) {
*g, *g2 = *g2, *g
}
// X returns g's X-coordinate.
func (g *Point) X() float64 {
return g.FlatCoord[0]
}
// Y returns g's Y-coordinate.
func (g *Point) Y() float64 {
return g.FlatCoord[1]
}
// Z returns g's Z-coordinate, or zero if g has no Z-coordinate.
func (g *Point) Z() float64 {
zIndex := g.Lay.ZIndex()
if zIndex == -1 {
return 0
}
return g.FlatCoord[zIndex]
}
// M returns g's M-coordinate, or zero if g has no M-coordinate.
func (g *Point) M() float64 {
mIndex := g.Lay.MIndex()
if mIndex == -1 {
return 0
}
return g.FlatCoord[mIndex]
} | point.go | 0.859384 | 0.502808 | point.go | starcoder |
package aduket
import (
"encoding/json"
"encoding/xml"
"net/http"
"testing"
"github.com/stretchr/testify/assert"
)
func (r RequestRecorder) AssertStringBodyEqual(t *testing.T, expectedBody string) bool {
return assert.Equal(t, expectedBody, string(r.Data))
}
func (r RequestRecorder) AssertJSONBodyEqual(t *testing.T, expectedBody interface{}) bool {
expectedBodyBytes, err := json.Marshal(expectedBody)
if err != nil {
t.Error("expected body could not marshaled to json")
}
return assert.Equal(t, string(expectedBodyBytes), string(r.Body))
}
func (r RequestRecorder) AssertXMLBodyEqual(t *testing.T, expectedXMLBody interface{}) bool {
expectedBodyBytes, err := xml.Marshal(expectedXMLBody)
if err != nil {
t.Error("expected body could not marshaled to xml")
}
return assert.Equal(t, string(expectedBodyBytes), string(r.Body))
}
func (r RequestRecorder) AssertParamEqual(t *testing.T, paramName, paramValue string) bool {
return assert.Equal(t, paramValue, r.Params[paramName])
}
func (r RequestRecorder) AssertQueryParamEqual(t *testing.T, queryParamName string, queryParamValues []string) bool {
return assert.Equal(t, queryParamValues, r.QueryParams[queryParamName])
}
func (r RequestRecorder) AssertFormParamEqual(t *testing.T, formParamName string, formValues []string) bool {
return assert.Equal(t, formValues, r.FormParams[formParamName])
}
func (r RequestRecorder) AssertHeaderContains(t *testing.T, expectedHeader http.Header) bool {
return assert.True(t, isHeaderContains(expectedHeader, r.Header))
}
func (r RequestRecorder) AssertNoRequest(t *testing.T) bool {
return assert.False(t, r.isRequestReceived)
}
func isHeaderContains(expectedHeader, actualHeader http.Header) bool {
assertionResult := true
for key, value := range expectedHeader {
headerValue := actualHeader[key]
assertionResult = assertionResult && assert.ObjectsAreEqualValues(headerValue, value)
}
return assertionResult
}
func isJSONEqual(expectedBody interface{}, actualBody Body) (bool, error) {
expectedBytes, err := json.Marshal(expectedBody)
return assert.ObjectsAreEqualValues(expectedBytes, actualBody), err
}
func isXMLEqual(expectedBody interface{}, actualBody Body) (bool, error) {
expectedBytes, err := xml.Marshal(expectedBody)
return assert.ObjectsAreEqualValues(expectedBytes, actualBody), err
} | assert.go | 0.758868 | 0.604428 | assert.go | starcoder |
package constraint
import (
"fmt"
"reflect"
"github.com/jt0/gomer/flect"
"github.com/jt0/gomer/gomerr"
)
// Length determines whether the value's length is either between (inclusively) two provided values (a min and max) or a
// single value (internally: min = max). This tests for min <= len(value) <= max. The value's type can be one of Array,
// Chan, Map, Slice, or String. Any other type will result in a false value from the constraint. If min is greater than
// max or min is less than 0, this will return a Fail() constraint.
func Length(values ...*uint64) Constraint {
switch len(values) {
case 1:
return length("LengthEquals", *values[0], values[0], values[0])
case 2:
if values[0] != nil {
if values[1] != nil {
return length("LengthBetween", []interface{}{*values[0], *values[1]}, values[0], values[1])
} else {
return MinLength(values[0])
}
} else if values[1] != nil {
return MaxLength(values[1])
}
fallthrough
default:
return ConfigurationError(fmt.Sprintf("'Length' constraint requires 1 or 2 non-nil input values, received %d", len(values)))
}
}
// MinLength determines whether the value's length is greater than or equal to the min value provided.
// Stated explicitly, this tests for min <= len(value). The value's type can be one of Array, Chan,
// Map, Slice, or String. Any other type will result in a false value from the constraint.
func MinLength(min *uint64) Constraint {
return length("LengthMin", min, min, nil)
}
// MaxLength determines whether the value's length is less than or equal to the max value provided.
// Stated explicitly, this tests for len(value) <= max. The value's type can be one of Array, Chan,
// Map, Slice, or String. Any other type will result in a false value from the constraint.
func MaxLength(max *uint64) Constraint {
return length("LengthMax", max, nil, max)
}
var (
uintZero = uint64(0)
uintOne = uint64(1)
Empty = length("Empty", nil, nil, &uintZero)
NonEmpty = length("NonEmpty", nil, &uintOne, nil)
zeroLength = reflect.ValueOf([]interface{}{})
)
func length(lengthType string, lengthParams interface{}, min, max *uint64) Constraint {
return New(lengthType, lengthParams, func(toTest interface{}) gomerr.Gomerr {
ttv, ok := flect.ReadableIndirectValue(toTest)
if !ok {
ttv = zeroLength
}
if !lenable(ttv.Kind()) {
return gomerr.Unprocessable("Test value must be one of Array, Chan, Map, Slice, or String (or pointer to one of these)", toTest)
}
ttLen := uint64(ttv.Len())
if !(min == nil || ttLen >= *min) || !(max == nil || ttLen <= *max) {
return NotSatisfied(toTest)
}
return nil
})
}
func lenable(kind reflect.Kind) bool {
return kind == reflect.Array || kind == reflect.Chan || kind == reflect.Map || kind == reflect.Slice || kind == reflect.String
} | constraint/length.go | 0.744006 | 0.57681 | length.go | starcoder |
package problem9
import (
"math"
)
/**
* Special Pythagorean triplet
*
* https://projecteuler.net/problem=9
* A Pythagorean triplet is a set of three natural numbers, a < b < c, for which,
* a^2 + b^2 = c^2
* For example, 3^2 + 4^2 = 9 + 16 = 25 = 5^2.
* There exists exactly one Pythagorean triplet for which a + b + c = 1000.
* Find the product abc.
*
* http://odz.sakura.ne.jp/projecteuler/index.php?cmd=read&page=Problem%209
* ピタゴラス数(ピタゴラスの定理を満たす自然数)とは a < b < c で以下の式を満たす数の組である.
* a^2 + b^2 = c^2
* 例えば, 3^2 + 4^2 = 9 + 16 = 25 = 5^2 である.
* a + b + c = 1000 となるピタゴラスの三つ組が一つだけ存在する.
* これらの積 abc を計算しなさい.
*
* Contents of Project Euler are licenced under a Creative Commons Licence: Attribution-NonCommercial-ShareAlike 2.0 UK: England & Wales.
* http://creativecommons.org/licenses/by-nc-sa/2.0/uk/
*/
//Answer0 returns answer to this problem
func Answer0(s int64) int64 {
for a := int64(1); a < (s-3)/3; a++ {
for b := a + 1; b < (s-a-1)/2; b++ {
c := 1000 - a - b
if a*a+b*b == c*c {
//fmt.Println(a, b, c, a*b*c)
return a * b * c
}
}
}
return 0
}
//Answer1 returns answer to this problem (refactoring version)
func Answer1(s int64) int64 {
// a = (m^1 - n^2)d, b = 2mnd, c = (m^2 + n^2)d
// a + b + c = 2m(m + n)d = 2mkd
// k = m + n; m < k < 2m and gcd(m,k) = 1
s2 := s / 2
mmax := int64(math.Ceil(math.Sqrt(float64(s2)))) - 1
for m := int64(2); m <= mmax; m++ {
if s2%m == 0 {
sm := s2 / m
for sm&0x01 == 0 { // removing all factors 2
sm >>= 1
}
var k int64
if m&0x01 != 0 { //odd
k = m + 2
} else { //even
k = m + 1
}
for ; k < 2*m && k <= sm; k += 2 {
if sm%k == 0 && gcd(k, m) == 1 {
d := s2 / (k * m)
n := k - m
a := d * (m*m - n*n)
b := 2 * d * m * n
c := d * (m*m + n*n)
//fmt.Println(a, b, c, a*b*c)
return a * b * c
}
}
}
}
return 0
}
func gcd(a, b int64) int64 {
if a < b {
a, b = b, a
}
if b == 0 {
return a
}
return gcd(b, a%b)
}
/* Copyright 2018 Spiegel
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/ | problem-9/answer.go | 0.773559 | 0.431584 | answer.go | starcoder |
package plot
import (
"math"
"sort"
)
// Violin implements violin plot using cubic-pulse for kernel function.
type Violin struct {
Style
Label string
Side float64
Kernel Length
Normalized bool
Data []float64 // sorted
}
// NewViolin creates a new violin element using the specified values.
func NewViolin(label string, values []float64) *Violin {
data := append(values[:0:0], values...)
sort.Float64s(data)
return &Violin{
Kernel: math.NaN(),
Side: 1,
Label: label,
Normalized: true,
Data: data,
}
}
// Stats calculates element statistics.
func (line *Violin) Stats() Stats {
min, avg, max := math.NaN(), math.NaN(), math.NaN()
n := len(line.Data)
if n > 0 {
min = line.Data[0]
avg = line.Data[n/2]
max = line.Data[n-1]
}
return Stats{
Min: Point{-1, min},
Center: Point{0, avg}, // todo, figure out how to get the 50% of density plot
Max: Point{1, max},
}
}
// Draw draws the element to canvas.
func (line *Violin) Draw(plot *Plot, canvas Canvas) {
x, y := plot.X, plot.Y
size := canvas.Bounds().Size()
ymin, ymax := y.ToCanvas(y.Min, 0, size.Y), y.ToCanvas(y.Max, 0, size.Y)
if ymin > ymax {
ymin, ymax = ymax, ymin
}
kernel := line.Kernel
if math.IsNaN(kernel) {
// default to 4px wide kernel
kernel = 4 * (y.Max - y.Min) / size.Y
}
invkernel := 1 / kernel
points := []Point{}
if line.Fill != nil || line.Side == 0 {
points = append(points, Point{0, ymin})
}
index := 0
previousLow := math.Inf(-1)
maxx := 0.0
for screenY := 0.0; screenY < size.Y; screenY += 0.5 {
center := y.FromCanvas(screenY, 0, size.Y)
low, high := center-kernel, center+kernel
if low < previousLow {
index = sort.SearchFloat64s(line.Data, low)
} else {
for ; index < len(line.Data); index++ {
if line.Data[index] >= low {
break
}
}
}
previousLow = low
sample := 0.0
for _, value := range line.Data[index:] {
if value > high {
break
}
sample += cubicPulse(center, kernel, invkernel, value)
}
maxx = math.Max(maxx, sample)
points = append(points, Point{
X: sample,
Y: screenY,
})
}
if line.Fill != nil || line.Side == 0 {
points = append(points, Point{0, ymax})
}
scale := kernel / float64(len(line.Data))
if line.Normalized {
scale = 1 / maxx
}
if line.Side == 0 {
otherSide := make([]Point, len(points))
for i := range points {
k := len(points) - i - 1
otherSide[k] = points[i]
points[i].X = x.ToCanvas(points[i].X*scale, 0, size.X)
otherSide[k].X = x.ToCanvas(-otherSide[k].X*scale, 0, size.X)
}
points = append(points, otherSide...)
} else {
scale *= line.Side
for i := range points {
points[i].X = x.ToCanvas(points[i].X*scale, 0, size.X)
}
}
if !line.Style.IsZero() {
canvas.Poly(points, &line.Style)
} else {
canvas.Poly(points, &plot.Theme.Line)
}
} | violin.go | 0.66356 | 0.519217 | violin.go | starcoder |
package address
type country struct {
ID string
Name string
DefaultLanguage string
PostCodePrefix string
PostCodeRegex postCodeRegex
Format string
LatinizedFormat string
AdministrativeAreaNameType FieldName
LocalityNameType FieldName
DependentLocalityNameType FieldName
PostCodeNameType FieldName
AllowedFields map[Field]struct{}
RequiredFields map[Field]struct{}
Upper map[Field]struct{}
AdministrativeAreas map[string][]administrativeArea
}
type postCodeRegex struct {
regex string
subdivisionRegex map[string]postCodeRegex
}
type administrativeArea struct {
ID string
Name string
PostalKey string
Localities []locality
}
type locality struct {
ID string
Name string
DependentLocalities []dependentLocality
}
type dependentLocality struct {
ID string
Name string
}
type data map[string]country
func (d data) getCountry(countryCode string) country {
data := generated[countryCode]
defaults := generated["ZZ"]
if data.Format == "" {
data.Format = defaults.Format
}
if data.AdministrativeAreaNameType == 0 {
data.AdministrativeAreaNameType = defaults.AdministrativeAreaNameType
}
if data.LocalityNameType == 0 {
data.LocalityNameType = defaults.LocalityNameType
}
if data.DependentLocalityNameType == 0 {
data.DependentLocalityNameType = defaults.DependentLocalityNameType
}
if data.PostCodeNameType == 0 {
data.PostCodeNameType = defaults.PostCodeNameType
}
if len(data.AllowedFields) <= 0 {
data.AllowedFields = defaults.AllowedFields
}
if len(data.RequiredFields) <= 0 {
data.RequiredFields = defaults.RequiredFields
}
if len(data.Upper) <= 0 {
data.Upper = defaults.Upper
}
return data
}
func (d data) hasCountry(countryCode string) bool {
_, ok := generated[countryCode]
return ok
}
func (d data) getAdministrativeAreaName(countryCode, administrativeAreaID, language string) string {
data := d.getCountry(countryCode)
lang := d.normalizeLanguage(countryCode, language)
for _, adminArea := range data.AdministrativeAreas[lang] {
if adminArea.ID == administrativeAreaID {
return adminArea.Name
}
}
return ""
}
func (d data) getAdministrativeAreaPostalKey(countryCode, administrativeAreaID string) string {
data := d.getCountry(countryCode)
lang := d.normalizeLanguage(countryCode, "")
for _, adminArea := range data.AdministrativeAreas[lang] {
if adminArea.ID == administrativeAreaID {
return adminArea.PostalKey
}
}
return ""
}
func (d data) getLocalityName(countryCode, administrativeAreaID, localityID, language string) string {
data := d.getCountry(countryCode)
lang := d.normalizeLanguage(countryCode, language)
for _, adminArea := range data.AdministrativeAreas[lang] {
if adminArea.ID == administrativeAreaID {
for _, locality := range adminArea.Localities {
if locality.ID == localityID {
return locality.Name
}
}
}
}
return ""
}
func (d data) getDependentLocalityName(countryCode, administrativeAreaID, localityID, dependentLocalityID, language string) string {
data := d.getCountry(countryCode)
lang := d.normalizeLanguage(countryCode, language)
for _, adminArea := range data.AdministrativeAreas[lang] {
if adminArea.ID == administrativeAreaID {
for _, locality := range adminArea.Localities {
if locality.ID == localityID {
for _, dependentLocality := range locality.DependentLocalities {
if dependentLocality.ID == dependentLocalityID {
return dependentLocality.Name
}
}
}
}
}
}
return ""
}
func (d data) normalizeLanguage(countryCode, language string) string {
country := d.getCountry(countryCode)
if _, ok := country.AdministrativeAreas[language]; ok {
return language
}
return country.DefaultLanguage
} | data.go | 0.51562 | 0.407805 | data.go | starcoder |
package tuple
import (
"fmt"
"golang.org/x/exp/constraints"
)
// T4 is a tuple type holding 4 generic values.
type T4[Ty1, Ty2, Ty3, Ty4 any] struct {
V1 Ty1
V2 Ty2
V3 Ty3
V4 Ty4
}
// Len returns the number of values held by the tuple.
func (t T4[Ty1, Ty2, Ty3, Ty4]) Len() int {
return 4
}
// Values returns the values held by the tuple.
func (t T4[Ty1, Ty2, Ty3, Ty4]) Values() (Ty1, Ty2, Ty3, Ty4) {
return t.V1, t.V2, t.V3, t.V4
}
// Array returns an array of the tuple values.
func (t T4[Ty1, Ty2, Ty3, Ty4]) Array() [4]any {
return [4]any{
t.V1,
t.V2,
t.V3,
t.V4,
}
}
// Slice returns a slice of the tuple values.
func (t T4[Ty1, Ty2, Ty3, Ty4]) Slice() []any {
a := t.Array()
return a[:]
}
// String returns the string representation of the tuple.
func (t T4[Ty1, Ty2, Ty3, Ty4]) String() string {
return tupString(t.Slice())
}
// GoString returns a Go-syntax representation of the tuple.
func (t T4[Ty1, Ty2, Ty3, Ty4]) GoString() string {
return tupGoString(t.Slice())
}
// New4 creates a new tuple holding 4 generic values.
func New4[Ty1, Ty2, Ty3, Ty4 any](v1 Ty1, v2 Ty2, v3 Ty3, v4 Ty4) T4[Ty1, Ty2, Ty3, Ty4] {
return T4[Ty1, Ty2, Ty3, Ty4]{
V1: v1,
V2: v2,
V3: v3,
V4: v4,
}
}
// FromArray4 returns a tuple from an array of length 4.
// If any of the values can not be converted to the generic type, an error is returned.
func FromArray4[Ty1, Ty2, Ty3, Ty4 any](arr [4]any) (T4[Ty1, Ty2, Ty3, Ty4], error) {
v1, ok := arr[0].(Ty1)
if !ok {
return T4[Ty1, Ty2, Ty3, Ty4]{}, fmt.Errorf("value at array index 0 expected to have type %s but has type %T", typeName[Ty1](), arr[0])
}
v2, ok := arr[1].(Ty2)
if !ok {
return T4[Ty1, Ty2, Ty3, Ty4]{}, fmt.Errorf("value at array index 1 expected to have type %s but has type %T", typeName[Ty2](), arr[1])
}
v3, ok := arr[2].(Ty3)
if !ok {
return T4[Ty1, Ty2, Ty3, Ty4]{}, fmt.Errorf("value at array index 2 expected to have type %s but has type %T", typeName[Ty3](), arr[2])
}
v4, ok := arr[3].(Ty4)
if !ok {
return T4[Ty1, Ty2, Ty3, Ty4]{}, fmt.Errorf("value at array index 3 expected to have type %s but has type %T", typeName[Ty4](), arr[3])
}
return New4(v1, v2, v3, v4), nil
}
// FromArray4X returns a tuple from an array of length 4.
// If any of the values can not be converted to the generic type, the function panics.
func FromArray4X[Ty1, Ty2, Ty3, Ty4 any](arr [4]any) T4[Ty1, Ty2, Ty3, Ty4] {
return FromSlice4X[Ty1, Ty2, Ty3, Ty4](arr[:])
}
// FromSlice4 returns a tuple from a slice of length 4.
// If the length of the slice doesn't match, or any of the values can not be converted to the generic type, an error is returned.
func FromSlice4[Ty1, Ty2, Ty3, Ty4 any](values []any) (T4[Ty1, Ty2, Ty3, Ty4], error) {
if len(values) != 4 {
return T4[Ty1, Ty2, Ty3, Ty4]{}, fmt.Errorf("slice length %d must match number of tuple values 4", len(values))
}
v1, ok := values[0].(Ty1)
if !ok {
return T4[Ty1, Ty2, Ty3, Ty4]{}, fmt.Errorf("value at slice index 0 expected to have type %s but has type %T", typeName[Ty1](), values[0])
}
v2, ok := values[1].(Ty2)
if !ok {
return T4[Ty1, Ty2, Ty3, Ty4]{}, fmt.Errorf("value at slice index 1 expected to have type %s but has type %T", typeName[Ty2](), values[1])
}
v3, ok := values[2].(Ty3)
if !ok {
return T4[Ty1, Ty2, Ty3, Ty4]{}, fmt.Errorf("value at slice index 2 expected to have type %s but has type %T", typeName[Ty3](), values[2])
}
v4, ok := values[3].(Ty4)
if !ok {
return T4[Ty1, Ty2, Ty3, Ty4]{}, fmt.Errorf("value at slice index 3 expected to have type %s but has type %T", typeName[Ty4](), values[3])
}
return New4(v1, v2, v3, v4), nil
}
// FromSlice4X returns a tuple from a slice of length 4.
// If the length of the slice doesn't match, or any of the values can not be converted to the generic type, the function panics.
func FromSlice4X[Ty1, Ty2, Ty3, Ty4 any](values []any) T4[Ty1, Ty2, Ty3, Ty4] {
if len(values) != 4 {
panic(fmt.Errorf("slice length %d must match number of tuple values 4", len(values)))
}
v1 := values[0].(Ty1)
v2 := values[1].(Ty2)
v3 := values[2].(Ty3)
v4 := values[3].(Ty4)
return New4(v1, v2, v3, v4)
}
// Equal4 returns whether the host tuple is equal to the other tuple.
// All tuple elements of the host and guest parameters must match the "comparable" built-in constraint.
// To test equality of tuples that hold custom Equalable values, use the Equal4E function.
// To test equality of tuples that hold custom Comparable values, use the Equal4C function.
// Otherwise, use Equal or reflect.DeepEqual to test tuples of any types.
func Equal4[Ty1, Ty2, Ty3, Ty4 comparable](host, guest T4[Ty1, Ty2, Ty3, Ty4]) bool {
return host.V1 == guest.V1 && host.V2 == guest.V2 && host.V3 == guest.V3 && host.V4 == guest.V4
}
// Equal4E returns whether the host tuple is semantically equal to the guest tuple.
// All tuple elements of the host and guest parameters must match the Equalable constraint.
// To test equality of tuples that hold built-in "comparable" values, use the Equal4 function.
// To test equality of tuples that hold custom Comparable values, use the Equal4C function.
// Otherwise, use Equal or reflect.DeepEqual to test tuples of any types.
func Equal4E[Ty1 Equalable[Ty1], Ty2 Equalable[Ty2], Ty3 Equalable[Ty3], Ty4 Equalable[Ty4]](host, guest T4[Ty1, Ty2, Ty3, Ty4]) bool {
return host.V1.Equal(guest.V1) && host.V2.Equal(guest.V2) && host.V3.Equal(guest.V3) && host.V4.Equal(guest.V4)
}
// Equal4C returns whether the host tuple is semantically less than, equal to, or greater than the guest tuple.
// All tuple elements of the host and guest parameters must match the Comparable constraint.
// To test equality of tuples that hold built-in "comparable" values, use the Equal4 function.
// To test equality of tuples that hold custom Equalable values, use the Equal4E function.
// Otherwise, use Equal or reflect.DeepEqual to test tuples of any types.
func Equal4C[Ty1 Comparable[Ty1], Ty2 Comparable[Ty2], Ty3 Comparable[Ty3], Ty4 Comparable[Ty4]](host, guest T4[Ty1, Ty2, Ty3, Ty4]) bool {
return host.V1.CompareTo(guest.V1).EQ() && host.V2.CompareTo(guest.V2).EQ() && host.V3.CompareTo(guest.V3).EQ() && host.V4.CompareTo(guest.V4).EQ()
}
// Compare4 returns whether the host tuple is semantically less than, equal to, or greater than the guest tuple.
// All tuple elements of the host and guest parameters must match the "Ordered" constraint.
// To compare tuples that hold custom comparable values, use the Compare4C function.
func Compare4[Ty1, Ty2, Ty3, Ty4 constraints.Ordered](host, guest T4[Ty1, Ty2, Ty3, Ty4]) OrderedComparisonResult {
return multiCompare(
func() OrderedComparisonResult { return compareOrdered(host.V1, guest.V1) },
func() OrderedComparisonResult { return compareOrdered(host.V2, guest.V2) },
func() OrderedComparisonResult { return compareOrdered(host.V3, guest.V3) },
func() OrderedComparisonResult { return compareOrdered(host.V4, guest.V4) },
)
}
// Compare4C returns whether the host tuple is semantically less than, equal to, or greater than the guest tuple.
// All tuple elements of the host and guest parameters must match the Comparable constraint.
// To compare tuples that hold built-in "Ordered" values, use the Compare4 function.
func Compare4C[Ty1 Comparable[Ty1], Ty2 Comparable[Ty2], Ty3 Comparable[Ty3], Ty4 Comparable[Ty4]](host, guest T4[Ty1, Ty2, Ty3, Ty4]) OrderedComparisonResult {
return multiCompare(
func() OrderedComparisonResult { return host.V1.CompareTo(guest.V1) },
func() OrderedComparisonResult { return host.V2.CompareTo(guest.V2) },
func() OrderedComparisonResult { return host.V3.CompareTo(guest.V3) },
func() OrderedComparisonResult { return host.V4.CompareTo(guest.V4) },
)
}
// LessThan4 returns whether the host tuple is semantically less than the guest tuple.
// All tuple elements of the host and guest parameters must match the "Ordered" constraint.
// To compare tuples that hold custom comparable values, use the LessThan4C function.
func LessThan4[Ty1, Ty2, Ty3, Ty4 constraints.Ordered](host, guest T4[Ty1, Ty2, Ty3, Ty4]) bool {
return Compare4(host, guest).LT()
}
// LessThan4C returns whether the host tuple is semantically less than the guest tuple.
// All tuple elements of the host and guest parameters must match the Comparable constraint.
// To compare tuples that hold built-in "Ordered" values, use the LessThan4 function.
func LessThan4C[Ty1 Comparable[Ty1], Ty2 Comparable[Ty2], Ty3 Comparable[Ty3], Ty4 Comparable[Ty4]](host, guest T4[Ty1, Ty2, Ty3, Ty4]) bool {
return Compare4C(host, guest).LT()
}
// LessOrEqual4 returns whether the host tuple is semantically less than or equal to the guest tuple.
// All tuple elements of the host and guest parameters must match the "Ordered" constraint.
// To compare tuples that hold custom comparable values, use the LessOrEqual4C function.
func LessOrEqual4[Ty1, Ty2, Ty3, Ty4 constraints.Ordered](host, guest T4[Ty1, Ty2, Ty3, Ty4]) bool {
return Compare4(host, guest).LE()
}
// LessOrEqual4C returns whether the host tuple is semantically less than or equal to the guest tuple.
// All tuple elements of the host and guest parameters must match the Comparable constraint.
// To compare tuples that hold built-in "Ordered" values, use the LessOrEqual4 function.
func LessOrEqual4C[Ty1 Comparable[Ty1], Ty2 Comparable[Ty2], Ty3 Comparable[Ty3], Ty4 Comparable[Ty4]](host, guest T4[Ty1, Ty2, Ty3, Ty4]) bool {
return Compare4C(host, guest).LE()
}
// GreaterThan4 returns whether the host tuple is semantically greater than the guest tuple.
// All tuple elements of the host and guest parameters must match the "Ordered" constraint.
// To compare tuples that hold custom comparable values, use the GreaterThan4C function.
func GreaterThan4[Ty1, Ty2, Ty3, Ty4 constraints.Ordered](host, guest T4[Ty1, Ty2, Ty3, Ty4]) bool {
return Compare4(host, guest).GT()
}
// GreaterThan4C returns whether the host tuple is semantically greater than the guest tuple.
// All tuple elements of the host and guest parameters must match the Comparable constraint.
// To compare tuples that hold built-in "Ordered" values, use the GreaterThan4 function.
func GreaterThan4C[Ty1 Comparable[Ty1], Ty2 Comparable[Ty2], Ty3 Comparable[Ty3], Ty4 Comparable[Ty4]](host, guest T4[Ty1, Ty2, Ty3, Ty4]) bool {
return Compare4C(host, guest).GT()
}
// GreaterOrEqual4 returns whether the host tuple is semantically greater than or equal to the guest tuple.
// All tuple elements of the host and guest parameters must match the "Ordered" constraint.
// To compare tuples that hold custom comparable values, use the GreaterOrEqual4C function.
func GreaterOrEqual4[Ty1, Ty2, Ty3, Ty4 constraints.Ordered](host, guest T4[Ty1, Ty2, Ty3, Ty4]) bool {
return Compare4(host, guest).GE()
}
// GreaterOrEqual4C returns whether the host tuple is semantically greater than or equal to the guest tuple.
// All tuple elements of the host and guest parameters must match the Comparable constraint.
// To compare tuples that hold built-in "Ordered" values, use the GreaterOrEqual4 function.
func GreaterOrEqual4C[Ty1 Comparable[Ty1], Ty2 Comparable[Ty2], Ty3 Comparable[Ty3], Ty4 Comparable[Ty4]](host, guest T4[Ty1, Ty2, Ty3, Ty4]) bool {
return Compare4C(host, guest).GE()
} | tuple4.go | 0.801081 | 0.496826 | tuple4.go | starcoder |
package v1
import (
"encoding/json"
)
// IPAssignmentInput struct for IPAssignmentInput
type IPAssignmentInput struct {
Address string `json:"address"`
Manageable *bool `json:"manageable,omitempty"`
Customdata *map[string]interface{} `json:"customdata,omitempty"`
}
// NewIPAssignmentInput instantiates a new IPAssignmentInput object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewIPAssignmentInput(address string) *IPAssignmentInput {
this := IPAssignmentInput{}
this.Address = address
return &this
}
// NewIPAssignmentInputWithDefaults instantiates a new IPAssignmentInput object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewIPAssignmentInputWithDefaults() *IPAssignmentInput {
this := IPAssignmentInput{}
return &this
}
// GetAddress returns the Address field value
func (o *IPAssignmentInput) GetAddress() string {
if o == nil {
var ret string
return ret
}
return o.Address
}
// GetAddressOk returns a tuple with the Address field value
// and a boolean to check if the value has been set.
func (o *IPAssignmentInput) GetAddressOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.Address, true
}
// SetAddress sets field value
func (o *IPAssignmentInput) SetAddress(v string) {
o.Address = v
}
// GetManageable returns the Manageable field value if set, zero value otherwise.
func (o *IPAssignmentInput) GetManageable() bool {
if o == nil || o.Manageable == nil {
var ret bool
return ret
}
return *o.Manageable
}
// GetManageableOk returns a tuple with the Manageable field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *IPAssignmentInput) GetManageableOk() (*bool, bool) {
if o == nil || o.Manageable == nil {
return nil, false
}
return o.Manageable, true
}
// HasManageable returns a boolean if a field has been set.
func (o *IPAssignmentInput) HasManageable() bool {
if o != nil && o.Manageable != nil {
return true
}
return false
}
// SetManageable gets a reference to the given bool and assigns it to the Manageable field.
func (o *IPAssignmentInput) SetManageable(v bool) {
o.Manageable = &v
}
// GetCustomdata returns the Customdata field value if set, zero value otherwise.
func (o *IPAssignmentInput) GetCustomdata() map[string]interface{} {
if o == nil || o.Customdata == nil {
var ret map[string]interface{}
return ret
}
return *o.Customdata
}
// GetCustomdataOk returns a tuple with the Customdata field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *IPAssignmentInput) GetCustomdataOk() (*map[string]interface{}, bool) {
if o == nil || o.Customdata == nil {
return nil, false
}
return o.Customdata, true
}
// HasCustomdata returns a boolean if a field has been set.
func (o *IPAssignmentInput) HasCustomdata() bool {
if o != nil && o.Customdata != nil {
return true
}
return false
}
// SetCustomdata gets a reference to the given map[string]interface{} and assigns it to the Customdata field.
func (o *IPAssignmentInput) SetCustomdata(v map[string]interface{}) {
o.Customdata = &v
}
func (o IPAssignmentInput) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["address"] = o.Address
}
if o.Manageable != nil {
toSerialize["manageable"] = o.Manageable
}
if o.Customdata != nil {
toSerialize["customdata"] = o.Customdata
}
return json.Marshal(toSerialize)
}
type NullableIPAssignmentInput struct {
value *IPAssignmentInput
isSet bool
}
func (v NullableIPAssignmentInput) Get() *IPAssignmentInput {
return v.value
}
func (v *NullableIPAssignmentInput) Set(val *IPAssignmentInput) {
v.value = val
v.isSet = true
}
func (v NullableIPAssignmentInput) IsSet() bool {
return v.isSet
}
func (v *NullableIPAssignmentInput) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableIPAssignmentInput(val *IPAssignmentInput) *NullableIPAssignmentInput {
return &NullableIPAssignmentInput{value: val, isSet: true}
}
func (v NullableIPAssignmentInput) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableIPAssignmentInput) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | v1/model_ip_assignment_input.go | 0.708414 | 0.400603 | model_ip_assignment_input.go | starcoder |
package binutils
// Utility functions to translate native types into bytes sequence and vise versa.
import (
"encoding/binary"
"fmt"
)
// AllocateBytes creates a byte slice of required size.
func AllocateBytes(size int) []byte {
return make([]byte, size)
}
// Uint8 translates next byte from buffer into uint8 value.
// Returns error if insufficient bytes in buffer.
func Uint8(data []byte) (uint8, error) {
if len(data) != Uint8size {
return 0, ErrExpected1
}
return data[0], nil
}
// Int8 translates next byte from buffer into int8 value.
// Returns error if insufficient bytes in buffer.
func Int8(data []byte) (int8, error) {
if len(data) != Int8size {
return 0, ErrExpected1
}
return int8(data[0]), nil
}
// Uint16 translates next 2 bytes from buffer into uint16 value using big-endian bytes order.
// Returns error if insufficient bytes in buffer.
func Uint16(data []byte) (uint16, error) {
if len(data) != Uint16size {
return 0, ErrExpected2
}
return binary.BigEndian.Uint16(data), nil
}
// Int16 translates next 2 bytes from buffer into int16 value using big-endian bytes order.
// Returns error if insufficient bytes in buffer.
func Int16(data []byte) (int16, error) {
if len(data) != Int16size {
return 0, ErrExpected2
}
return int16(binary.BigEndian.Uint16(data)), nil
}
// Uint32 translates next 4 bytes from buffer into uint32 value using big-endian bytes order.
// Returns error if insufficient bytes in buffer.
func Uint32(data []byte) (uint32, error) {
if len(data) != Uint32size {
return 0, ErrExpected4
}
return binary.BigEndian.Uint32(data), nil
}
// Int32 translates next 4 bytes from buffer into int32 value using big-endian bytes order.
// Returns error if insufficient bytes in buffer.
func Int32(data []byte) (int32, error) {
if len(data) != Int32size {
return 0, ErrExpected4
}
return int32(binary.BigEndian.Uint32(data)), nil
}
// Uint64 translates next 8 bytes from buffer into uint64 value using big-endian bytes order.
// Returns error if insufficient bytes in buffer.
func Uint64(data []byte) (uint64, error) {
if len(data) != Uint64size {
return 0, ErrExpected8
}
return binary.BigEndian.Uint64(data), nil
}
// Int64 translates next 8 bytes from buffer into int64 value using big-endian bytes order.
// Returns error if insufficient bytes in buffer.
func Int64(data []byte) (int64, error) {
if len(data) != Int64size {
return 0, ErrExpected8
}
return int64(binary.BigEndian.Uint64(data)), nil
}
// Uint8bytes adds uint8 data to buffer.
func Uint8bytes(data uint8) []byte { return []byte{data} }
// Int8bytes adds int8 data to buffer.
func Int8bytes(data int8) []byte { return []byte{uint8(data)} }
// Uint16bytes adds uint16 data to buffer using big-endian bytes order.
func Uint16bytes(data uint16) []byte {
d := AllocateBytes(Int16size)
binary.BigEndian.PutUint16(d, data)
return d
}
// Int16bytes adds int16 data to buffer using big-endian bytes order.
func Int16bytes(data int16) []byte {
d := AllocateBytes(Int16size)
binary.BigEndian.PutUint16(d, uint16(data))
return d
}
// Uint32bytes adds uint32 data to buffer using big-endian bytes order.
func Uint32bytes(data uint32) []byte {
d := AllocateBytes(Uint32size)
binary.BigEndian.PutUint32(d, data)
return d
}
// Int32bytes adds int32 data to buffer using big-endian bytes order.
func Int32bytes(data int32) []byte {
d := AllocateBytes(Uint32size)
binary.BigEndian.PutUint32(d, uint32(data))
return d
}
// Uint64bytes adds uint64 data to buffer using big-endian bytes order.
func Uint64bytes(data uint64) []byte {
d := AllocateBytes(Uint64size)
binary.BigEndian.PutUint64(d, data)
return d
}
// Int64bytes adds uint64 data to buffer using big-endian bytes order.
func Int64bytes(data int64) []byte {
d := AllocateBytes(Uint64size)
binary.BigEndian.PutUint64(d, uint64(data))
return d
}
// StringBytes makes a zero-terminated string []byte sequence.
func StringBytes(s string) []byte { return append([]byte(s), 0) }
// String reads a zero-terminated string from []byte sequence
// Returns error if last byte is not 0.
func String(data []byte) (string, error) {
switch {
case len(data) == 0:
return "", fmt.Errorf("0-terminated string: %w", ErrMinimum1)
case data[len(data)-1] != 0:
return "", ErrRequired0T
default:
return string(data[:len(data)-1]), nil
}
} | utils.go | 0.840488 | 0.494751 | utils.go | starcoder |
package threshold
import (
"context"
"time"
"github.com/geo-data/nicer/sample"
)
type (
// Threshold combines a Sampler with a threshold value and arbitrary name.
// It can then poll the Sampler and compare metrics against the threshold,
// triggering alerts when a threshold is crossed.
Threshold struct {
Name string // The identifier for this threshold.
Threshold float32 // The threshold value.
Sampler sample.Sampler // Sample source.
Ascending bool // Whether the treshold is based on increasing values.
}
// AlertHandler represents a function that is called whenever a threshold
// value is crossed. If the threshold is exceeded, exceeded will be true or
// false otherwise.
AlertHandler func(name string, value float32, exceeded bool)
// ErrorHandler represents a function that is called whenever a Sampler
// encounters an error.
ErrorHandler func(name string, err error)
)
// New instantiates a Threshold.
func New(name string, sampler sample.Sampler, threshold float32, ascending bool) *Threshold {
return &Threshold{
Name: name,
Threshold: threshold,
Sampler: sampler,
Ascending: ascending,
}
}
// Poll samples the Threshold Sampler every interval. If the threshold value is
// crossed, AlertHandler is called. If the sampler encounters an error,
// ErrorHandler is called. ctx is passed to the Sampler. Poll does not block.
func (t *Threshold) Poll(ctx context.Context, interval time.Duration, alert AlertHandler, eh ErrorHandler) {
var (
sendAlert func(float32)
lastValue float32 = t.Threshold // Last sample value
)
if t.Ascending {
sendAlert = func(metric float32) {
if lastValue < t.Threshold && metric >= t.Threshold {
alert(t.Name, metric, true)
} else if lastValue >= t.Threshold && metric < t.Threshold {
alert(t.Name, metric, false)
}
}
} else {
sendAlert = func(metric float32) {
if lastValue > t.Threshold && metric <= t.Threshold {
alert(t.Name, metric, true)
} else if lastValue <= t.Threshold && metric > t.Threshold {
alert(t.Name, metric, false)
}
}
}
handler := func(metric float32, err error) {
if err != nil {
eh(t.Name, err)
return
}
// Send an alert if required.
sendAlert(metric)
lastValue = metric
}
// Send an initial alert
alert(t.Name, t.Threshold, true)
go t.Sampler.Sample(ctx, interval, handler)
return
} | threshold/threshold.go | 0.778776 | 0.464719 | threshold.go | starcoder |
package screen
import (
"image/color"
"github.com/go-vgo/robotgo"
"github.com/haroflow/go-macros/automation"
)
func Commands() []automation.Command {
moduleName := "screen"
return []automation.Command{
{
ModuleName: moduleName,
MethodName: "capture",
Parameters: "",
Description: "Captures and returns a screenshot of the entire screen.",
Action: Capture,
},
{
ModuleName: moduleName,
MethodName: "captureRect",
Parameters: "x: int, y: int, width: int, height: int",
Description: "Captures and returns a screenshot of a rectangular area on the screen.",
Action: CaptureRect,
},
{
ModuleName: moduleName,
MethodName: "getPixelColor",
Parameters: "x: int, y: int",
Description: "Returns the color from a point on screen.",
Action: GetPixelColor,
},
{
ModuleName: moduleName,
MethodName: "getWidth",
Parameters: "",
Description: "Returns the screen width.",
Action: GetWidth,
},
{
ModuleName: moduleName,
MethodName: "getHeight",
Parameters: "",
Description: "Returns the screen height.",
Action: GetHeight,
},
{
ModuleName: moduleName,
MethodName: "search",
Parameters: "img: image, tolerance: float from 0.0 to 1.0",
Description: "Search for an image on the screen. The tolerance parameter should be in the range 0.0 to 1.0, denoting how closely the colors in the image have to match, with 0.0 being exact and 1.0 being any. Returns a point.",
Action: Search,
},
{
ModuleName: moduleName,
MethodName: "searchAll",
Parameters: "",
Description: "Search for all instances of an image on the screen. The tolerance parameter should be in the range 0.0 to 1.0, denoting how closely the colors in the image have to match, with 0.0 being exact and 1.0 being any. Returns a list of points.",
Action: SearchAll,
},
}
}
func Capture() robotgo.CBitmap {
// Unfortunately, robotgo.FindBitmap is not working
// when converting an image.Image to a MMBitmapRef. I don't know why...
// So for now we'll use robotgo.CBitmap and robotgo.CaptureScreen.
return robotgo.CBitmap(robotgo.CaptureScreen())
}
func CaptureRect(x, y, w, h int) robotgo.CBitmap {
// Unfortunately, robotgo.FindBitmap is not working
// when converting an image.Image to a MMBitmapRef. I don't know why...
// So for now we'll use robotgo.CBitmap and robotgo.CaptureScreen.
if w == 0 || h == 0 {
return robotgo.CBitmap(robotgo.CaptureScreen())
}
return robotgo.CBitmap(robotgo.CaptureScreen(x, y, w, h))
}
func GetPixelColor(x, y int) color.Color {
c := robotgo.GetPxColor(x, y)
return color.RGBA{
B: uint8(uint(c)),
G: uint8(uint(c) >> 8),
R: uint8(uint(c) >> 16),
A: 0,
}
}
func GetWidth() int {
x, _ := robotgo.GetScreenSize()
return x
}
func GetHeight() int {
_, y := robotgo.GetScreenSize()
return y
}
func Search(img robotgo.CBitmap, tolerance float64) Point {
// Unfortunately, robotgo.FindBitmap is not working
// when converting an image.Image to a MMBitmapRef. I don't know why...
// So for now we'll use robotgo.CBitmap and robotgo.CaptureScreen.
c := robotgo.ToMMBitmapRef(img)
screen := robotgo.CaptureScreen()
x, y := robotgo.FindBitmap(c, screen, tolerance)
return Point{X: x, Y: y}
}
func SearchAll(img robotgo.CBitmap, tolerance float64) []Point {
// Unfortunately, robotgo.FindBitmap is not working
// when converting an image.Image to a MMBitmapRef. I don't know why...
// So for now we'll use robotgo.CBitmap and robotgo.CaptureScreen.
c := robotgo.ToMMBitmapRef(img)
screen := robotgo.CaptureScreen()
points := robotgo.FindEveryBitmap(c, screen, tolerance)
ret := make([]Point, len(points))
for i, p := range points {
ret[i] = Point{X: p.X, Y: p.Y}
}
return ret
} | automation/screen/screen.go | 0.720762 | 0.583915 | screen.go | starcoder |
package midi
import (
"encoding/binary"
"io"
"github.com/pkg/errors"
)
const (
SingleTrack uint16 = iota
Syncronous
Asyncronous
)
type Encoder struct {
// we need a write seeker because we will update the size at the end
// and need to back to the beginning of the file.
w io.WriteSeeker
/*
Format describes the tracks format
0 - single-track
Format 0 file has a header chunk followed by one track chunk. It
is the most interchangable representation of data. It is very useful
for a simple single-track player in a program which needs to make
synthesizers make sounds, but which is primarily concerned with
something else such as mixers or sound effect boxes. It is very
desirable to be able to produce such a format, even if your program
is track-based, in order to work with these simple programs. On the
other hand, perhaps someone will write a format conversion from
format 1 to format 0 which might be so easy to use in some setting
that it would save you the trouble of putting it into your program.
Synchronous multiple tracks means that the tracks will all be vertically synchronous, or in other words,
they all start at the same time, and so can represent different parts in one song.
1 - multiple tracks, synchronous
Asynchronous multiple tracks do not necessarily start at the same time, and can be completely asynchronous.
2 - multiple tracks, asynchronous
*/
Format uint16
// NumTracks represents the number of tracks in the midi file
NumTracks uint16
// resolution for delta timing
TicksPerQuarterNote uint16
TimeFormat timeFormat
Tracks []*Track
size int
}
// NewEncoder returns an encoder with the specified format
func NewEncoder(w io.WriteSeeker, format uint16, ppqn uint16) *Encoder {
return &Encoder{w: w, Format: format, TicksPerQuarterNote: ppqn}
}
// NewTrack adds and return a new track (not thread safe)
func (e *Encoder) NewTrack() *Track {
t := &Track{ticksPerBeat: e.TicksPerQuarterNote}
e.Tracks = append(e.Tracks, t)
return t
}
// Write writes the binary representation to the writer
func (e *Encoder) Write() error {
if e == nil {
return errors.New("Can't write a nil encoder")
}
if err := e.writeHeaders(); err != nil {
return err
}
for _, t := range e.Tracks {
if err := e.encodeTrack(t); err != nil {
return err
}
}
// go back and update body size in header
return nil
}
func (e *Encoder) writeHeaders() error {
// chunk id [4] headerChunkID
if _, err := e.w.Write(headerChunkID[:]); err != nil {
return err
}
// header size
if err := binary.Write(e.w, binary.BigEndian, uint32(6)); err != nil {
return err
}
// Format
if err := binary.Write(e.w, binary.BigEndian, e.Format); err != nil {
return err
}
// numtracks (not trusting the field value, but checking the actual amount of tracks
if err := binary.Write(e.w, binary.BigEndian, uint16(len(e.Tracks))); err != nil {
return err
}
// division [uint16] <-- contains precision
if err := binary.Write(e.w, binary.BigEndian, e.TicksPerQuarterNote); err != nil {
return err
}
return nil
}
func (e *Encoder) encodeTrack(t *Track) error {
// chunk id [4]
if _, err := e.w.Write(trackChunkID[:]); err != nil {
return err
}
data, err := t.ChunkData(true)
if err != nil {
return err
}
// chunk size
if err := binary.Write(e.w, binary.BigEndian, uint32(len(data))); err != nil {
return errors.Wrap(err, "106")
}
// chunk data
if _, err := e.w.Write(data); err != nil {
return err
}
return nil
} | encoder.go | 0.646906 | 0.428831 | encoder.go | starcoder |
package pt
import "math"
func NewSDFMesh(sdf SDF, box Box, step float64) *Mesh {
min := box.Min
size := box.Size()
nx := int(math.Ceil(size.X / step))
ny := int(math.Ceil(size.Y / step))
nz := int(math.Ceil(size.Z / step))
sx := size.X / float64(nx)
sy := size.Y / float64(ny)
sz := size.Z / float64(nz)
var triangles []*Triangle
for x := 0; x < nx-1; x++ {
for y := 0; y < ny-1; y++ {
for z := 0; z < nz-1; z++ {
x0, y0, z0 := float64(x)*sx+min.X, float64(y)*sy+min.Y, float64(z)*sz+min.Z
x1, y1, z1 := x0+sx, y0+sy, z0+sz
p := [8]Vector{
Vector{x0, y0, z0},
Vector{x1, y0, z0},
Vector{x1, y1, z0},
Vector{x0, y1, z0},
Vector{x0, y0, z1},
Vector{x1, y0, z1},
Vector{x1, y1, z1},
Vector{x0, y1, z1},
}
var v [8]float64
for i := 0; i < 8; i++ {
v[i] = sdf.Evaluate(p[i])
}
triangles = append(triangles, mcPolygonize(p, v, 0)...)
}
}
}
return NewMesh(triangles)
}
func mcPolygonize(p [8]Vector, v [8]float64, x float64) []*Triangle {
index := 0
for i := 0; i < 8; i++ {
if v[i] < x {
index |= 1 << uint(i)
}
}
if edgeTable[index] == 0 {
return nil
}
var points [12]Vector
for i := 0; i < 12; i++ {
bit := 1 << uint(i)
if edgeTable[index]&bit != 0 {
a := pairTable[i][0]
b := pairTable[i][1]
points[i] = mcInterpolate(p[a], p[b], v[a], v[b], x)
}
}
table := triangleTable[index]
count := len(table) / 3
result := make([]*Triangle, count)
for i := 0; i < count; i++ {
triangle := Triangle{}
triangle.V3 = points[table[i*3+0]]
triangle.V2 = points[table[i*3+1]]
triangle.V1 = points[table[i*3+2]]
triangle.FixNormals()
result[i] = &triangle
}
return result
}
func mcInterpolate(p1, p2 Vector, v1, v2, x float64) Vector {
if math.Abs(x-v1) < EPS {
return p1
}
if math.Abs(x-v2) < EPS {
return p2
}
if math.Abs(v1-v2) < EPS {
return p1
}
t := (x - v1) / (v2 - v1)
return Vector{
p1.X + t*(p2.X-p1.X),
p1.Y + t*(p2.Y-p1.Y),
p1.Z + t*(p2.Z-p1.Z),
}
}
var pairTable = [][]int{
{0, 1},
{1, 2},
{2, 3},
{3, 0},
{4, 5},
{5, 6},
{6, 7},
{7, 4},
{0, 4},
{1, 5},
{2, 6},
{3, 7},
}
var edgeTable = []int{
0x0000, 0x0109, 0x0203, 0x030a, 0x0406, 0x050f, 0x0605, 0x070c,
0x080c, 0x0905, 0x0a0f, 0x0b06, 0x0c0a, 0x0d03, 0x0e09, 0x0f00,
0x0190, 0x0099, 0x0393, 0x029a, 0x0596, 0x049f, 0x0795, 0x069c,
0x099c, 0x0895, 0x0b9f, 0x0a96, 0x0d9a, 0x0c93, 0x0f99, 0x0e90,
0x0230, 0x0339, 0x0033, 0x013a, 0x0636, 0x073f, 0x0435, 0x053c,
0x0a3c, 0x0b35, 0x083f, 0x0936, 0x0e3a, 0x0f33, 0x0c39, 0x0d30,
0x03a0, 0x02a9, 0x01a3, 0x00aa, 0x07a6, 0x06af, 0x05a5, 0x04ac,
0x0bac, 0x0aa5, 0x09af, 0x08a6, 0x0faa, 0x0ea3, 0x0da9, 0x0ca0,
0x0460, 0x0569, 0x0663, 0x076a, 0x0066, 0x016f, 0x0265, 0x036c,
0x0c6c, 0x0d65, 0x0e6f, 0x0f66, 0x086a, 0x0963, 0x0a69, 0x0b60,
0x05f0, 0x04f9, 0x07f3, 0x06fa, 0x01f6, 0x00ff, 0x03f5, 0x02fc,
0x0dfc, 0x0cf5, 0x0fff, 0x0ef6, 0x09fa, 0x08f3, 0x0bf9, 0x0af0,
0x0650, 0x0759, 0x0453, 0x055a, 0x0256, 0x035f, 0x0055, 0x015c,
0x0e5c, 0x0f55, 0x0c5f, 0x0d56, 0x0a5a, 0x0b53, 0x0859, 0x0950,
0x07c0, 0x06c9, 0x05c3, 0x04ca, 0x03c6, 0x02cf, 0x01c5, 0x00cc,
0x0fcc, 0x0ec5, 0x0dcf, 0x0cc6, 0x0bca, 0x0ac3, 0x09c9, 0x08c0,
0x08c0, 0x09c9, 0x0ac3, 0x0bca, 0x0cc6, 0x0dcf, 0x0ec5, 0x0fcc,
0x00cc, 0x01c5, 0x02cf, 0x03c6, 0x04ca, 0x05c3, 0x06c9, 0x07c0,
0x0950, 0x0859, 0x0b53, 0x0a5a, 0x0d56, 0x0c5f, 0x0f55, 0x0e5c,
0x015c, 0x0055, 0x035f, 0x0256, 0x055a, 0x0453, 0x0759, 0x0650,
0x0af0, 0x0bf9, 0x08f3, 0x09fa, 0x0ef6, 0x0fff, 0x0cf5, 0x0dfc,
0x02fc, 0x03f5, 0x00ff, 0x01f6, 0x06fa, 0x07f3, 0x04f9, 0x05f0,
0x0b60, 0x0a69, 0x0963, 0x086a, 0x0f66, 0x0e6f, 0x0d65, 0x0c6c,
0x036c, 0x0265, 0x016f, 0x0066, 0x076a, 0x0663, 0x0569, 0x0460,
0x0ca0, 0x0da9, 0x0ea3, 0x0faa, 0x08a6, 0x09af, 0x0aa5, 0x0bac,
0x04ac, 0x05a5, 0x06af, 0x07a6, 0x00aa, 0x01a3, 0x02a9, 0x03a0,
0x0d30, 0x0c39, 0x0f33, 0x0e3a, 0x0936, 0x083f, 0x0b35, 0x0a3c,
0x053c, 0x0435, 0x073f, 0x0636, 0x013a, 0x0033, 0x0339, 0x0230,
0x0e90, 0x0f99, 0x0c93, 0x0d9a, 0x0a96, 0x0b9f, 0x0895, 0x099c,
0x069c, 0x0795, 0x049f, 0x0596, 0x029a, 0x0393, 0x0099, 0x0190,
0x0f00, 0x0e09, 0x0d03, 0x0c0a, 0x0b06, 0x0a0f, 0x0905, 0x080c,
0x070c, 0x0605, 0x050f, 0x0406, 0x030a, 0x0203, 0x0109, 0x0000,
}
var triangleTable = [][]int{
{},
{0, 8, 3},
{0, 1, 9},
{1, 8, 3, 9, 8, 1},
{1, 2, 10},
{0, 8, 3, 1, 2, 10},
{9, 2, 10, 0, 2, 9},
{2, 8, 3, 2, 10, 8, 10, 9, 8},
{3, 11, 2},
{0, 11, 2, 8, 11, 0},
{1, 9, 0, 2, 3, 11},
{1, 11, 2, 1, 9, 11, 9, 8, 11},
{3, 10, 1, 11, 10, 3},
{0, 10, 1, 0, 8, 10, 8, 11, 10},
{3, 9, 0, 3, 11, 9, 11, 10, 9},
{9, 8, 10, 10, 8, 11},
{4, 7, 8},
{4, 3, 0, 7, 3, 4},
{0, 1, 9, 8, 4, 7},
{4, 1, 9, 4, 7, 1, 7, 3, 1},
{1, 2, 10, 8, 4, 7},
{3, 4, 7, 3, 0, 4, 1, 2, 10},
{9, 2, 10, 9, 0, 2, 8, 4, 7},
{2, 10, 9, 2, 9, 7, 2, 7, 3, 7, 9, 4},
{8, 4, 7, 3, 11, 2},
{11, 4, 7, 11, 2, 4, 2, 0, 4},
{9, 0, 1, 8, 4, 7, 2, 3, 11},
{4, 7, 11, 9, 4, 11, 9, 11, 2, 9, 2, 1},
{3, 10, 1, 3, 11, 10, 7, 8, 4},
{1, 11, 10, 1, 4, 11, 1, 0, 4, 7, 11, 4},
{4, 7, 8, 9, 0, 11, 9, 11, 10, 11, 0, 3},
{4, 7, 11, 4, 11, 9, 9, 11, 10},
{9, 5, 4},
{9, 5, 4, 0, 8, 3},
{0, 5, 4, 1, 5, 0},
{8, 5, 4, 8, 3, 5, 3, 1, 5},
{1, 2, 10, 9, 5, 4},
{3, 0, 8, 1, 2, 10, 4, 9, 5},
{5, 2, 10, 5, 4, 2, 4, 0, 2},
{2, 10, 5, 3, 2, 5, 3, 5, 4, 3, 4, 8},
{9, 5, 4, 2, 3, 11},
{0, 11, 2, 0, 8, 11, 4, 9, 5},
{0, 5, 4, 0, 1, 5, 2, 3, 11},
{2, 1, 5, 2, 5, 8, 2, 8, 11, 4, 8, 5},
{10, 3, 11, 10, 1, 3, 9, 5, 4},
{4, 9, 5, 0, 8, 1, 8, 10, 1, 8, 11, 10},
{5, 4, 0, 5, 0, 11, 5, 11, 10, 11, 0, 3},
{5, 4, 8, 5, 8, 10, 10, 8, 11},
{9, 7, 8, 5, 7, 9},
{9, 3, 0, 9, 5, 3, 5, 7, 3},
{0, 7, 8, 0, 1, 7, 1, 5, 7},
{1, 5, 3, 3, 5, 7},
{9, 7, 8, 9, 5, 7, 10, 1, 2},
{10, 1, 2, 9, 5, 0, 5, 3, 0, 5, 7, 3},
{8, 0, 2, 8, 2, 5, 8, 5, 7, 10, 5, 2},
{2, 10, 5, 2, 5, 3, 3, 5, 7},
{7, 9, 5, 7, 8, 9, 3, 11, 2},
{9, 5, 7, 9, 7, 2, 9, 2, 0, 2, 7, 11},
{2, 3, 11, 0, 1, 8, 1, 7, 8, 1, 5, 7},
{11, 2, 1, 11, 1, 7, 7, 1, 5},
{9, 5, 8, 8, 5, 7, 10, 1, 3, 10, 3, 11},
{5, 7, 0, 5, 0, 9, 7, 11, 0, 1, 0, 10, 11, 10, 0},
{11, 10, 0, 11, 0, 3, 10, 5, 0, 8, 0, 7, 5, 7, 0},
{11, 10, 5, 7, 11, 5},
{10, 6, 5},
{0, 8, 3, 5, 10, 6},
{9, 0, 1, 5, 10, 6},
{1, 8, 3, 1, 9, 8, 5, 10, 6},
{1, 6, 5, 2, 6, 1},
{1, 6, 5, 1, 2, 6, 3, 0, 8},
{9, 6, 5, 9, 0, 6, 0, 2, 6},
{5, 9, 8, 5, 8, 2, 5, 2, 6, 3, 2, 8},
{2, 3, 11, 10, 6, 5},
{11, 0, 8, 11, 2, 0, 10, 6, 5},
{0, 1, 9, 2, 3, 11, 5, 10, 6},
{5, 10, 6, 1, 9, 2, 9, 11, 2, 9, 8, 11},
{6, 3, 11, 6, 5, 3, 5, 1, 3},
{0, 8, 11, 0, 11, 5, 0, 5, 1, 5, 11, 6},
{3, 11, 6, 0, 3, 6, 0, 6, 5, 0, 5, 9},
{6, 5, 9, 6, 9, 11, 11, 9, 8},
{5, 10, 6, 4, 7, 8},
{4, 3, 0, 4, 7, 3, 6, 5, 10},
{1, 9, 0, 5, 10, 6, 8, 4, 7},
{10, 6, 5, 1, 9, 7, 1, 7, 3, 7, 9, 4},
{6, 1, 2, 6, 5, 1, 4, 7, 8},
{1, 2, 5, 5, 2, 6, 3, 0, 4, 3, 4, 7},
{8, 4, 7, 9, 0, 5, 0, 6, 5, 0, 2, 6},
{7, 3, 9, 7, 9, 4, 3, 2, 9, 5, 9, 6, 2, 6, 9},
{3, 11, 2, 7, 8, 4, 10, 6, 5},
{5, 10, 6, 4, 7, 2, 4, 2, 0, 2, 7, 11},
{0, 1, 9, 4, 7, 8, 2, 3, 11, 5, 10, 6},
{9, 2, 1, 9, 11, 2, 9, 4, 11, 7, 11, 4, 5, 10, 6},
{8, 4, 7, 3, 11, 5, 3, 5, 1, 5, 11, 6},
{5, 1, 11, 5, 11, 6, 1, 0, 11, 7, 11, 4, 0, 4, 11},
{0, 5, 9, 0, 6, 5, 0, 3, 6, 11, 6, 3, 8, 4, 7},
{6, 5, 9, 6, 9, 11, 4, 7, 9, 7, 11, 9},
{10, 4, 9, 6, 4, 10},
{4, 10, 6, 4, 9, 10, 0, 8, 3},
{10, 0, 1, 10, 6, 0, 6, 4, 0},
{8, 3, 1, 8, 1, 6, 8, 6, 4, 6, 1, 10},
{1, 4, 9, 1, 2, 4, 2, 6, 4},
{3, 0, 8, 1, 2, 9, 2, 4, 9, 2, 6, 4},
{0, 2, 4, 4, 2, 6},
{8, 3, 2, 8, 2, 4, 4, 2, 6},
{10, 4, 9, 10, 6, 4, 11, 2, 3},
{0, 8, 2, 2, 8, 11, 4, 9, 10, 4, 10, 6},
{3, 11, 2, 0, 1, 6, 0, 6, 4, 6, 1, 10},
{6, 4, 1, 6, 1, 10, 4, 8, 1, 2, 1, 11, 8, 11, 1},
{9, 6, 4, 9, 3, 6, 9, 1, 3, 11, 6, 3},
{8, 11, 1, 8, 1, 0, 11, 6, 1, 9, 1, 4, 6, 4, 1},
{3, 11, 6, 3, 6, 0, 0, 6, 4},
{6, 4, 8, 11, 6, 8},
{7, 10, 6, 7, 8, 10, 8, 9, 10},
{0, 7, 3, 0, 10, 7, 0, 9, 10, 6, 7, 10},
{10, 6, 7, 1, 10, 7, 1, 7, 8, 1, 8, 0},
{10, 6, 7, 10, 7, 1, 1, 7, 3},
{1, 2, 6, 1, 6, 8, 1, 8, 9, 8, 6, 7},
{2, 6, 9, 2, 9, 1, 6, 7, 9, 0, 9, 3, 7, 3, 9},
{7, 8, 0, 7, 0, 6, 6, 0, 2},
{7, 3, 2, 6, 7, 2},
{2, 3, 11, 10, 6, 8, 10, 8, 9, 8, 6, 7},
{2, 0, 7, 2, 7, 11, 0, 9, 7, 6, 7, 10, 9, 10, 7},
{1, 8, 0, 1, 7, 8, 1, 10, 7, 6, 7, 10, 2, 3, 11},
{11, 2, 1, 11, 1, 7, 10, 6, 1, 6, 7, 1},
{8, 9, 6, 8, 6, 7, 9, 1, 6, 11, 6, 3, 1, 3, 6},
{0, 9, 1, 11, 6, 7},
{7, 8, 0, 7, 0, 6, 3, 11, 0, 11, 6, 0},
{7, 11, 6},
{7, 6, 11},
{3, 0, 8, 11, 7, 6},
{0, 1, 9, 11, 7, 6},
{8, 1, 9, 8, 3, 1, 11, 7, 6},
{10, 1, 2, 6, 11, 7},
{1, 2, 10, 3, 0, 8, 6, 11, 7},
{2, 9, 0, 2, 10, 9, 6, 11, 7},
{6, 11, 7, 2, 10, 3, 10, 8, 3, 10, 9, 8},
{7, 2, 3, 6, 2, 7},
{7, 0, 8, 7, 6, 0, 6, 2, 0},
{2, 7, 6, 2, 3, 7, 0, 1, 9},
{1, 6, 2, 1, 8, 6, 1, 9, 8, 8, 7, 6},
{10, 7, 6, 10, 1, 7, 1, 3, 7},
{10, 7, 6, 1, 7, 10, 1, 8, 7, 1, 0, 8},
{0, 3, 7, 0, 7, 10, 0, 10, 9, 6, 10, 7},
{7, 6, 10, 7, 10, 8, 8, 10, 9},
{6, 8, 4, 11, 8, 6},
{3, 6, 11, 3, 0, 6, 0, 4, 6},
{8, 6, 11, 8, 4, 6, 9, 0, 1},
{9, 4, 6, 9, 6, 3, 9, 3, 1, 11, 3, 6},
{6, 8, 4, 6, 11, 8, 2, 10, 1},
{1, 2, 10, 3, 0, 11, 0, 6, 11, 0, 4, 6},
{4, 11, 8, 4, 6, 11, 0, 2, 9, 2, 10, 9},
{10, 9, 3, 10, 3, 2, 9, 4, 3, 11, 3, 6, 4, 6, 3},
{8, 2, 3, 8, 4, 2, 4, 6, 2},
{0, 4, 2, 4, 6, 2},
{1, 9, 0, 2, 3, 4, 2, 4, 6, 4, 3, 8},
{1, 9, 4, 1, 4, 2, 2, 4, 6},
{8, 1, 3, 8, 6, 1, 8, 4, 6, 6, 10, 1},
{10, 1, 0, 10, 0, 6, 6, 0, 4},
{4, 6, 3, 4, 3, 8, 6, 10, 3, 0, 3, 9, 10, 9, 3},
{10, 9, 4, 6, 10, 4},
{4, 9, 5, 7, 6, 11},
{0, 8, 3, 4, 9, 5, 11, 7, 6},
{5, 0, 1, 5, 4, 0, 7, 6, 11},
{11, 7, 6, 8, 3, 4, 3, 5, 4, 3, 1, 5},
{9, 5, 4, 10, 1, 2, 7, 6, 11},
{6, 11, 7, 1, 2, 10, 0, 8, 3, 4, 9, 5},
{7, 6, 11, 5, 4, 10, 4, 2, 10, 4, 0, 2},
{3, 4, 8, 3, 5, 4, 3, 2, 5, 10, 5, 2, 11, 7, 6},
{7, 2, 3, 7, 6, 2, 5, 4, 9},
{9, 5, 4, 0, 8, 6, 0, 6, 2, 6, 8, 7},
{3, 6, 2, 3, 7, 6, 1, 5, 0, 5, 4, 0},
{6, 2, 8, 6, 8, 7, 2, 1, 8, 4, 8, 5, 1, 5, 8},
{9, 5, 4, 10, 1, 6, 1, 7, 6, 1, 3, 7},
{1, 6, 10, 1, 7, 6, 1, 0, 7, 8, 7, 0, 9, 5, 4},
{4, 0, 10, 4, 10, 5, 0, 3, 10, 6, 10, 7, 3, 7, 10},
{7, 6, 10, 7, 10, 8, 5, 4, 10, 4, 8, 10},
{6, 9, 5, 6, 11, 9, 11, 8, 9},
{3, 6, 11, 0, 6, 3, 0, 5, 6, 0, 9, 5},
{0, 11, 8, 0, 5, 11, 0, 1, 5, 5, 6, 11},
{6, 11, 3, 6, 3, 5, 5, 3, 1},
{1, 2, 10, 9, 5, 11, 9, 11, 8, 11, 5, 6},
{0, 11, 3, 0, 6, 11, 0, 9, 6, 5, 6, 9, 1, 2, 10},
{11, 8, 5, 11, 5, 6, 8, 0, 5, 10, 5, 2, 0, 2, 5},
{6, 11, 3, 6, 3, 5, 2, 10, 3, 10, 5, 3},
{5, 8, 9, 5, 2, 8, 5, 6, 2, 3, 8, 2},
{9, 5, 6, 9, 6, 0, 0, 6, 2},
{1, 5, 8, 1, 8, 0, 5, 6, 8, 3, 8, 2, 6, 2, 8},
{1, 5, 6, 2, 1, 6},
{1, 3, 6, 1, 6, 10, 3, 8, 6, 5, 6, 9, 8, 9, 6},
{10, 1, 0, 10, 0, 6, 9, 5, 0, 5, 6, 0},
{0, 3, 8, 5, 6, 10},
{10, 5, 6},
{11, 5, 10, 7, 5, 11},
{11, 5, 10, 11, 7, 5, 8, 3, 0},
{5, 11, 7, 5, 10, 11, 1, 9, 0},
{10, 7, 5, 10, 11, 7, 9, 8, 1, 8, 3, 1},
{11, 1, 2, 11, 7, 1, 7, 5, 1},
{0, 8, 3, 1, 2, 7, 1, 7, 5, 7, 2, 11},
{9, 7, 5, 9, 2, 7, 9, 0, 2, 2, 11, 7},
{7, 5, 2, 7, 2, 11, 5, 9, 2, 3, 2, 8, 9, 8, 2},
{2, 5, 10, 2, 3, 5, 3, 7, 5},
{8, 2, 0, 8, 5, 2, 8, 7, 5, 10, 2, 5},
{9, 0, 1, 5, 10, 3, 5, 3, 7, 3, 10, 2},
{9, 8, 2, 9, 2, 1, 8, 7, 2, 10, 2, 5, 7, 5, 2},
{1, 3, 5, 3, 7, 5},
{0, 8, 7, 0, 7, 1, 1, 7, 5},
{9, 0, 3, 9, 3, 5, 5, 3, 7},
{9, 8, 7, 5, 9, 7},
{5, 8, 4, 5, 10, 8, 10, 11, 8},
{5, 0, 4, 5, 11, 0, 5, 10, 11, 11, 3, 0},
{0, 1, 9, 8, 4, 10, 8, 10, 11, 10, 4, 5},
{10, 11, 4, 10, 4, 5, 11, 3, 4, 9, 4, 1, 3, 1, 4},
{2, 5, 1, 2, 8, 5, 2, 11, 8, 4, 5, 8},
{0, 4, 11, 0, 11, 3, 4, 5, 11, 2, 11, 1, 5, 1, 11},
{0, 2, 5, 0, 5, 9, 2, 11, 5, 4, 5, 8, 11, 8, 5},
{9, 4, 5, 2, 11, 3},
{2, 5, 10, 3, 5, 2, 3, 4, 5, 3, 8, 4},
{5, 10, 2, 5, 2, 4, 4, 2, 0},
{3, 10, 2, 3, 5, 10, 3, 8, 5, 4, 5, 8, 0, 1, 9},
{5, 10, 2, 5, 2, 4, 1, 9, 2, 9, 4, 2},
{8, 4, 5, 8, 5, 3, 3, 5, 1},
{0, 4, 5, 1, 0, 5},
{8, 4, 5, 8, 5, 3, 9, 0, 5, 0, 3, 5},
{9, 4, 5},
{4, 11, 7, 4, 9, 11, 9, 10, 11},
{0, 8, 3, 4, 9, 7, 9, 11, 7, 9, 10, 11},
{1, 10, 11, 1, 11, 4, 1, 4, 0, 7, 4, 11},
{3, 1, 4, 3, 4, 8, 1, 10, 4, 7, 4, 11, 10, 11, 4},
{4, 11, 7, 9, 11, 4, 9, 2, 11, 9, 1, 2},
{9, 7, 4, 9, 11, 7, 9, 1, 11, 2, 11, 1, 0, 8, 3},
{11, 7, 4, 11, 4, 2, 2, 4, 0},
{11, 7, 4, 11, 4, 2, 8, 3, 4, 3, 2, 4},
{2, 9, 10, 2, 7, 9, 2, 3, 7, 7, 4, 9},
{9, 10, 7, 9, 7, 4, 10, 2, 7, 8, 7, 0, 2, 0, 7},
{3, 7, 10, 3, 10, 2, 7, 4, 10, 1, 10, 0, 4, 0, 10},
{1, 10, 2, 8, 7, 4},
{4, 9, 1, 4, 1, 7, 7, 1, 3},
{4, 9, 1, 4, 1, 7, 0, 8, 1, 8, 7, 1},
{4, 0, 3, 7, 4, 3},
{4, 8, 7},
{9, 10, 8, 10, 11, 8},
{3, 0, 9, 3, 9, 11, 11, 9, 10},
{0, 1, 10, 0, 10, 8, 8, 10, 11},
{3, 1, 10, 11, 3, 10},
{1, 2, 11, 1, 11, 9, 9, 11, 8},
{3, 0, 9, 3, 9, 11, 1, 2, 9, 2, 11, 9},
{0, 2, 11, 8, 0, 11},
{3, 2, 11},
{2, 3, 8, 2, 8, 10, 10, 8, 9},
{9, 10, 2, 0, 9, 2},
{2, 3, 8, 2, 8, 10, 0, 1, 8, 1, 10, 8},
{1, 10, 2},
{1, 3, 8, 9, 1, 8},
{0, 9, 1},
{0, 3, 8},
{},
} | pt/mc.go | 0.52683 | 0.565899 | mc.go | starcoder |
package block
import (
"encoding/json"
"fmt"
"sort"
"github.com/filecoin-project/go-filecoin/internal/pkg/encoding"
"github.com/ipfs/go-cid"
"github.com/pkg/errors"
"github.com/polydawn/refmt/obj/atlas"
)
func init() {
// A TipSetKey serializes as a sorted array of CIDs.
// Deserialization will sort the CIDs, if they're not already.
encoding.RegisterIpldCborType(atlas.BuildEntry(TipSetKey{}).Transform().
TransformMarshal(atlas.MakeMarshalTransformFunc(
func(s TipSetKey) ([]cid.Cid, error) {
return s.cids, nil
})).
TransformUnmarshal(atlas.MakeUnmarshalTransformFunc(
func(cids []cid.Cid) (TipSetKey, error) {
return NewTipSetKeyFromUnique(cids...)
})).
Complete())
}
// TipSetKey is an immutable set of CIDs forming a unique key for a TipSet.
// Equal keys will have equivalent iteration order, but note that the CIDs are *not* maintained in
// the same order as the canonical iteration order of blocks in a tipset (which is by ticket).
// TipSetKey is a lightweight value type; passing by pointer is usually unnecessary.
type TipSetKey struct {
// The slice is wrapped in a struct to enforce immutability.
cids []cid.Cid
}
// NewTipSetKey initialises a new TipSetKey.
// Duplicate CIDs are silently ignored.
func NewTipSetKey(ids ...cid.Cid) TipSetKey {
if len(ids) == 0 {
// Empty set is canonically represented by a nil slice rather than zero-length slice
// so that a zero-value exactly matches an empty one.
return TipSetKey{}
}
cids := make([]cid.Cid, len(ids))
copy(cids, ids)
return TipSetKey{uniq(cids)}
}
// NewTipSetKeyFromUnique initialises a set with CIDs that are expected to be unique.
func NewTipSetKeyFromUnique(ids ...cid.Cid) (TipSetKey, error) {
s := NewTipSetKey(ids...)
if s.Len() != len(ids) {
return TipSetKey{}, errors.Errorf("Duplicate CID in %s", ids)
}
return s, nil
}
// Empty checks whether the set is empty.
func (s TipSetKey) Empty() bool {
return s.Len() == 0
}
// Has checks whether the set contains `id`.
func (s TipSetKey) Has(id cid.Cid) bool {
// Find index of the first CID not less than id.
idx := sort.Search(len(s.cids), func(i int) bool {
return !cidLess(s.cids[i], id)
})
return idx < len(s.cids) && s.cids[idx].Equals(id)
}
// Len returns the number of items in the set.
func (s TipSetKey) Len() int {
return len(s.cids)
}
// ToSlice returns a slice listing the cids in the set.
func (s TipSetKey) ToSlice() []cid.Cid {
out := make([]cid.Cid, len(s.cids))
copy(out, s.cids)
return out
}
// Iter returns an iterator that allows the caller to iterate the set in its sort order.
func (s TipSetKey) Iter() TipSetKeyIterator {
return TipSetKeyIterator{
s: s.cids,
i: 0,
}
}
// Equals checks whether the set contains exactly the same CIDs as another.
func (s TipSetKey) Equals(other TipSetKey) bool {
if len(s.cids) != len(other.cids) {
return false
}
for i := 0; i < len(s.cids); i++ {
if !s.cids[i].Equals(other.cids[i]) {
return false
}
}
return true
}
// ContainsAll checks if another set is a subset of this one.
func (s *TipSetKey) ContainsAll(other TipSetKey) bool {
// Since the slices are sorted we can perform one pass over this set, advancing
// the other index whenever the values match.
otherIdx := 0
for i := 0; i < s.Len() && otherIdx < other.Len(); i++ {
if s.cids[i].Equals(other.cids[otherIdx]) {
otherIdx++
}
}
// otherIdx is advanced the full length only if every element was found in this set.
return otherIdx == other.Len()
}
// String returns a string listing the cids in the set.
func (s TipSetKey) String() string {
out := "{"
for it := s.Iter(); !it.Complete(); it.Next() {
out = fmt.Sprintf("%s %s", out, it.Value().String())
}
return out + " }"
}
// MarshalJSON serializes the key to JSON.
func (s TipSetKey) MarshalJSON() ([]byte, error) {
return json.Marshal(s.cids)
}
// UnmarshalJSON parses JSON into the key.
// Note that this pattern technically violates the immutability.
func (s *TipSetKey) UnmarshalJSON(b []byte) error {
var cids []cid.Cid
if err := json.Unmarshal(b, &cids); err != nil {
return err
}
k, err := NewTipSetKeyFromUnique(cids...)
if err != nil {
return err
}
s.cids = k.cids
return nil
}
// TipSetKeyIterator is a iterator over a sorted collection of CIDs.
type TipSetKeyIterator struct {
s []cid.Cid
i int
}
// Complete returns true if the iterator has reached the end of the set.
func (si *TipSetKeyIterator) Complete() bool {
return si.i >= len(si.s)
}
// Next advances the iterator to the next item and returns true if there is such an item.
func (si *TipSetKeyIterator) Next() bool {
switch {
case si.i < len(si.s):
si.i++
return si.i < len(si.s)
case si.i == len(si.s):
return false
default:
panic("unreached")
}
}
// Value returns the current item for the iterator
func (si TipSetKeyIterator) Value() cid.Cid {
switch {
case si.i < len(si.s):
return si.s[si.i]
case si.i == len(si.s):
return cid.Undef
default:
panic("unreached")
}
}
// Destructively sorts and uniqifies a slice of CIDs.
func uniq(cids []cid.Cid) []cid.Cid {
sort.Slice(cids, func(i, j int) bool {
return cidLess(cids[i], cids[j])
})
if len(cids) >= 2 {
// Uniq-ify the sorted array.
// You can imagine doing this by using a second slice and appending elements to it if
// they don't match the last-copied element. This is just the same, but using the
// source slice as the destination too.
this, next := 0, 1
for next < len(cids) {
if cids[next] != cids[this] {
this++
cids[this] = cids[next]
}
next++
}
return cids[:this+1]
}
return cids
}
func cidLess(c1, c2 cid.Cid) bool {
return c1.KeyString() < c2.KeyString()
} | internal/pkg/block/tipset_key.go | 0.698844 | 0.411939 | tipset_key.go | starcoder |
package window
import (
"math"
)
// Apply applies the window windowFunction to x.
func Apply(x []float64, windowFunction func(int) []float64) {
for i, w := range windowFunction(len(x)) {
x[i] *= w
}
}
// Rectangular returns an L-point rectangular window (all values are 1).
func Rectangular(L int) []float64 {
r := make([]float64, L)
for i := range r {
r[i] = 1
}
return r
}
// Hamming returns an L-point symmetric Hamming window.
// Reference: http://www.mathworks.com/help/signal/ref/hamming.html
func Hamming(L int) []float64 {
r := make([]float64, L)
if L == 1 {
r[0] = 1
} else {
N := L - 1
coef := math.Pi * 2 / float64(N)
for n := 0; n <= N; n++ {
r[n] = 0.54 - 0.46*math.Cos(coef*float64(n))
}
}
return r
}
// Hann returns an L-point Hann window.
// Reference: http://www.mathworks.com/help/signal/ref/hann.html
func Hann(L int) []float64 {
r := make([]float64, L)
if L == 1 {
r[0] = 1
} else {
N := L - 1
coef := 2 * math.Pi / float64(N)
for n := 0; n <= N; n++ {
r[n] = 0.5 * (1 - math.Cos(coef*float64(n)))
}
}
return r
}
// Bartlett returns an L-point Bartlett window.
// Reference: http://www.mathworks.com/help/signal/ref/bartlett.html
func Bartlett(L int) []float64 {
r := make([]float64, L)
if L == 1 {
r[0] = 1
} else {
N := L - 1
coef := 2 / float64(N)
n := 0
for ; n <= N/2; n++ {
r[n] = coef * float64(n)
}
for ; n <= N; n++ {
r[n] = 2 - coef*float64(n)
}
}
return r
}
// FlatTop returns an L-point flat top window.
// Reference: http://www.mathworks.com/help/signal/ref/flattopwin.html
func FlatTop(L int) []float64 {
const (
alpha0 = float64(0.21557895)
alpha1 = float64(0.41663158)
alpha2 = float64(0.277263158)
alpha3 = float64(0.083578947)
alpha4 = float64(0.006947368)
)
r := make([]float64, L)
if L == 1 {
r[0] = 1
return r
}
N := L - 1
coef := 2 * math.Pi / float64(N)
for n := 0; n <= N; n++ {
factor := float64(n) * coef
term0 := alpha0
term1 := alpha1 * math.Cos(factor)
term2 := alpha2 * math.Cos(2*factor)
term3 := alpha3 * math.Cos(3*factor)
term4 := alpha4 * math.Cos(4*factor)
r[n] = term0 - term1 + term2 - term3 + term4
}
return r
}
// Blackman returns an L-point Blackman window
// Reference: http://www.mathworks.com/help/signal/ref/blackman.html
func Blackman(L int) []float64 {
r := make([]float64, L)
if L == 1 {
r[0] = 1
} else {
N := L - 1
for n := 0; n <= N; n++ {
const term0 = 0.42
term1 := -0.5 * math.Cos(2*math.Pi*float64(n)/float64(N))
term2 := 0.08 * math.Cos(4*math.Pi*float64(n)/float64(N))
r[n] = term0 + term1 + term2
}
}
return r
} | window/window.go | 0.813164 | 0.412057 | window.go | starcoder |
package stats
import (
"fmt"
"time"
"github.com/status-im/simulation/propagation"
)
// Stats represents stats data for given simulation log.
type Stats struct {
NodeHits map[int]int
NodeCoverage Coverage
LinkCoverage Coverage
NodeHistogram *Histogram
LinkHistogram *Histogram
TimeToNodeHistogram *Histogram
Time time.Duration
}
// PrintVerbose prints detailed terminal-friendly stats to
// the console.
func (s *Stats) PrintVerbose() {
fmt.Println("Stats:")
fmt.Println("Time elapsed:", s.Time)
fmt.Println("Nodes coverage:", s.NodeCoverage)
fmt.Println("Links coverage:", s.LinkCoverage)
fmt.Println("Nodes histogram:", s.NodeHistogram)
fmt.Println("Links histogram:", s.LinkHistogram)
fmt.Println("TimeToNode histogram:", s.TimeToNodeHistogram)
}
// Analyze analyzes given propagation log and returns filled Stats object.
func Analyze(plog *propagation.Log, nodeCount, linkCount int) *Stats {
t := analyzeTiming(plog)
nodeHits, nodeHistogram := analyzeNodeHits(plog)
nodeCoverage := analyzeNodeCoverage(nodeHits, nodeCount)
linkCoverage, linkHistogram := analyzeLinkCoverage(plog, linkCount)
timeToNodeHistogram := analyzeTimeToNode(plog)
return &Stats{
NodeHits: nodeHits,
NodeCoverage: nodeCoverage,
LinkCoverage: linkCoverage,
NodeHistogram: nodeHistogram,
LinkHistogram: linkHistogram,
TimeToNodeHistogram: timeToNodeHistogram,
Time: t,
}
}
func analyzeNodeHits(plog *propagation.Log) (map[int]int, *Histogram) {
nodeHits := make(map[int]int)
x := make([]float64, 0, len(plog.Timestamps))
for _, nodes := range plog.Nodes {
for _, j := range nodes {
nodeHits[j]++
}
count := len(nodes)
x = append(x, float64(count))
}
return nodeHits, NewHistogram(x, 20)
}
func analyzeNodeCoverage(nodeHits map[int]int, total int) Coverage {
actual := len(nodeHits)
return NewCoverage(actual, total)
}
func analyzeLinkCoverage(plog *propagation.Log, total int) (Coverage, *Histogram) {
linkHits := make(map[int]struct{})
x := make([]float64, 0, len(plog.Timestamps))
for _, links := range plog.Links {
for _, j := range links {
linkHits[j] = struct{}{}
}
count := len(links)
x = append(x, float64(count))
}
actual := len(linkHits)
return NewCoverage(actual, total), NewHistogram(x, 20)
}
// analyzeTiming returns the amount of time the simulation took.
func analyzeTiming(plog *propagation.Log) time.Duration {
// log contains timestamps in milliseconds, so the
// max value will be our number
var max int
for _, ts := range plog.Timestamps {
if ts > max {
max = ts
}
}
return time.Duration(max) * time.Millisecond
}
func analyzeTimeToNode(plog *propagation.Log) *Histogram {
var hits = make(map[int]int)
for i, ts := range plog.Timestamps {
nodes := plog.Nodes[i]
for _, j := range nodes {
if _, ok := hits[j]; !ok {
hits[j] = ts
}
}
}
x := make([]float64, 0, len(plog.Nodes))
for _, ts := range hits {
x = append(x, float64(ts))
}
return NewHistogram(x, 20)
} | stats/stats.go | 0.659515 | 0.599427 | stats.go | starcoder |
package condition
import (
"fmt"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/types"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeAnd] = TypeSpec{
constructor: NewAnd,
Description: `
And is a condition that returns the logical AND of its children conditions:
` + "``` yaml" + `
# True if message contains both 'foo' and 'bar'
and:
- text:
operator: contains
arg: foo
- text:
operator: contains
arg: bar
` + "```" + ``,
sanitiseConfigFunc: func(conf Config) (interface{}, error) {
var err error
condConfs := make([]interface{}, len(conf.And))
for i, cConf := range conf.And {
if condConfs[i], err = SanitiseConfig(cConf); err != nil {
return nil, err
}
}
return condConfs, nil
},
}
}
//------------------------------------------------------------------------------
// AndConfig is a configuration struct containing fields for the And condition.
type AndConfig []Config
// NewAndConfig returns a AndConfig with default values.
func NewAndConfig() AndConfig {
return AndConfig{}
}
//------------------------------------------------------------------------------
// And is a condition that returns the logical AND of all children.
type And struct {
children []Type
mCount metrics.StatCounter
mTrue metrics.StatCounter
mFalse metrics.StatCounter
}
// NewAnd returns an And condition.
func NewAnd(
conf Config, mgr types.Manager, log log.Modular, stats metrics.Type,
) (Type, error) {
children := []Type{}
for i, childConf := range conf.And {
ns := fmt.Sprintf("%v", i)
child, err := New(childConf, mgr, log.NewModule("."+ns), metrics.Namespaced(stats, ns))
if err != nil {
return nil, fmt.Errorf("failed to create child '%v': %v", childConf.Type, err)
}
children = append(children, child)
}
return &And{
children: children,
mCount: stats.GetCounter("count"),
mTrue: stats.GetCounter("true"),
mFalse: stats.GetCounter("false"),
}, nil
}
//------------------------------------------------------------------------------
// Check attempts to check a message part against a configured condition.
func (c *And) Check(msg types.Message) bool {
c.mCount.Incr(1)
for _, child := range c.children {
if !child.Check(msg) {
c.mFalse.Incr(1)
return false
}
}
c.mTrue.Incr(1)
return true
}
//------------------------------------------------------------------------------ | lib/condition/and.go | 0.639849 | 0.615781 | and.go | starcoder |
package ahrs
import (
"log"
"math"
"fmt"
"github.com/skelterjohn/go.matrix"
)
type Kalman1State struct {
State
f *matrix.DenseMatrix
z *Measurement
y *matrix.DenseMatrix
h *matrix.DenseMatrix
ss *matrix.DenseMatrix
kk *matrix.DenseMatrix
}
// Initialize the state at the start of the Kalman filter, based on current measurements
func NewKalman1AHRS() (s *Kalman1State) {
s = new(Kalman1State)
s.needsInitialization = true
s.aNorm = 1
s.E0 = 1 // Initial guess is East
s.F0 = 1 // Initial guess is that it's oriented pointing forward and level
s.normalize()
s.M = matrix.Zeros(32, 32)
s.N = matrix.Zeros(32, 32)
s.f = matrix.Eye(32)
s.z = NewMeasurement()
s.y = matrix.Zeros(15, 1)
s.h = matrix.Zeros(15, 32)
s.ss = matrix.Zeros(32, 15)
s.kk = matrix.Zeros(32, 15)
s.logMap = make(map[string]interface{})
s.updateLogMap(NewMeasurement(), s.logMap)
s.gLoad = 1
return
}
func (s *Kalman1State) init(m *Measurement) {
s.needsInitialization = false
s.E0, s.E1, s.E2, s.E3 = 1, 0, 0, 0 // Initial guess is East
//s.F0, s.F1, s.F2, s.F3 = 0, math.Sqrt(0.5), -math.Sqrt(0.5), 0
//s.F0, s.F1, s.F2, s.F3 = 0, 0, 1, 0
s.F0, s.F1, s.F2, s.F3 = 1, 0, 0, 0
s.normalize()
s.T = m.T
// Diagonal matrix of initial state uncertainties, will be squared into covariance below
// Specifics here aren't too important--it will change very quickly
s.M = matrix.Diagonal([]float64{
Big, Big, Big, // U*3
Big, Big, Big, // Z*3
1, 1, 1, 1, // E*4
2, 2, 2, // H*3
Big, Big, Big, // N*3
Big, Big, Big, // V*3
Big, Big, Big, // C*3
Big, Big, Big, Big, // F*4
2, 2, 2, // D*3
Big, Big, Big, // L*3
})
s.M = matrix.Product(s.M, s.M)
// Diagonal matrix of state process uncertainties per s, will be squared into covariance below
// Tuning these is more important
tt := math.Sqrt(60.0 * 60.0) // One-hour time constant for drift of biases V, C, F, D, L
s.N = matrix.Diagonal([]float64{
Big, Big, Big, // U*3
Big, Big, Big, // Z*3
0.05, 0.05, 0.05, 0.05, // E*4
50, 50, 50, // H*3
Big, Big, Big, // N*3
Big, Big, Big, // V*3
Big, Big, Big, // C*3
Big, Big, Big, Big, // F*4
0.1 / tt, 0.1 / tt, 0.1 / tt, // D*3
Big, Big, Big, // L*3
})
s.N = matrix.Product(s.N, s.N)
s.updateLogMap(m, s.logMap)
log.Println("Kalman1 Initialized")
return
}
// Compute runs first the prediction and then the update phases of the Kalman filter
func (s *Kalman1State) Compute(m *Measurement) {
m.A1, m.A2, m.A3 = s.rotateByF(m.A1, m.A2, m.A3, false)
m.B1, m.B2, m.B3 = s.rotateByF(m.B1, m.B2, m.B3, false)
if s.needsInitialization {
s.init(m)
return
}
s.predict(m.T)
s.update(m)
s.updateLogMap(m, s.logMap)
}
// predict performs the prediction phase of the Kalman filter
func (s *Kalman1State) predict(t float64) {
dt := t - s.T
// State vectors H and D are unchanged; only E evolves.
s.E0, s.E1, s.E2, s.E3 = QuaternionRotate(s.E0, s.E1, s.E2, s.E3, s.H1*dt*Deg, s.H2*dt*Deg, s.H3*dt*Deg)
s.T = t
s.calcJacobianState(t)
s.M = matrix.Sum(matrix.Product(s.f, matrix.Product(s.M, s.f.Transpose())), matrix.Scaled(s.N, dt))
}
// predictMeasurement returns the measurement expected given the current state.
func (s *Kalman1State) predictMeasurement() (m *Measurement) {
m = NewMeasurement()
m.SValid = true
m.A1, m.A2, m.A3 = s.rotateByE(0, 0, 1, true)
m.B1 = s.H1 + s.D1
m.B2 = s.H2 + s.D2
m.B3 = s.H3 + s.D3
m.T = s.T
return
}
// update applies the Kalman filter corrections given the measurements
func (s *Kalman1State) update(m *Measurement) {
s.z = s.predictMeasurement()
s.y.Set(6, 0, m.A1-s.z.A1)
s.y.Set(7, 0, m.A2-s.z.A2)
s.y.Set(8, 0, m.A3-s.z.A3)
s.y.Set(9, 0, m.B1-s.z.B1)
s.y.Set(10, 0, m.B2-s.z.B2)
s.y.Set(11, 0, m.B3-s.z.B3)
s.calcJacobianMeasurement()
var v float64
_, _, v = m.Accums[6](m.A1)
m.M.Set(6, 6, v)
_, _, v = m.Accums[7](m.A2)
m.M.Set(7, 7, v)
_, _, v = m.Accums[8](m.A3)
m.M.Set(8, 8, v)
_, _, v = m.Accums[9](m.B1)
m.M.Set(9, 9, v)
_, _, v = m.Accums[10](m.B2)
m.M.Set(10, 10, v)
_, _, v = m.Accums[11](m.B3)
m.M.Set(11, 11, v)
s.ss = matrix.Sum(matrix.Product(s.h, matrix.Product(s.M, s.h.Transpose())), m.M)
m2, err := s.ss.Inverse()
if err != nil {
log.Println("AHRS: Can't invert Kalman gain matrix")
log.Printf("ss: %s\n", s.ss)
return
}
s.kk = matrix.Product(s.M, matrix.Product(s.h.Transpose(), m2))
su := matrix.Product(s.kk, s.y)
s.E0 += su.Get(6, 0)
s.E1 += su.Get(7, 0)
s.E2 += su.Get(8, 0)
s.E3 += su.Get(9, 0)
s.H1 += su.Get(10, 0)
s.H2 += su.Get(11, 0)
s.H3 += su.Get(12, 0)
s.D1 += su.Get(26, 0)
s.D2 += su.Get(27, 0)
s.D3 += su.Get(28, 0)
s.T = m.T
s.M = matrix.Product(matrix.Difference(matrix.Eye(32), matrix.Product(s.kk, s.h)), s.M)
s.normalize()
}
func (s *Kalman1State) calcJacobianState(t float64) {
dt := t - s.T
// U*3, Z*3, E*4, H*3, N*3,
// V*3, C*3, F*4, D*3, L*3
//s.E0 += 0.5*dt*(-s.E1*s.H1 - s.E2*s.H2 - s.E3*s.H3)*Deg
s.f.Set(6, 7, -0.5*dt*s.H1*Deg) // E0/E1
s.f.Set(6, 8, -0.5*dt*s.H2*Deg) // E0/E2
s.f.Set(6, 9, -0.5*dt*s.H3*Deg) // E0/E3
s.f.Set(6, 10, -0.5*dt*s.E1*Deg) // E0/H1
s.f.Set(6, 11, -0.5*dt*s.E2*Deg) // E0/H2
s.f.Set(6, 12, -0.5*dt*s.E3*Deg) // E0/H3
//s.E1 += 0.5*dt*(+s.E0*s.H1 - s.E3*s.H2 + s.E2*s.H3)*Deg
s.f.Set(7, 6, +0.5*dt*s.H1*Deg) // E1/E0
s.f.Set(7, 8, +0.5*dt*s.H3*Deg) // E1/E2
s.f.Set(7, 9, -0.5*dt*s.H2*Deg) // E1/E3
s.f.Set(7, 10, +0.5*dt*s.E0*Deg) // E1/H1
s.f.Set(7, 11, -0.5*dt*s.E3*Deg) // E1/H2
s.f.Set(7, 12, +0.5*dt*s.E2*Deg) // E1/H3
//s.E2 += 0.5*dt*(+s.E3*s.H1 + s.E0*s.H2 - s.E1*s.H3)*Deg
s.f.Set(8, 6, +0.5*dt*s.H2*Deg) // E2/E0
s.f.Set(8, 7, -0.5*dt*s.H3*Deg) // E2/E1
s.f.Set(8, 9, +0.5*dt*s.H1*Deg) // E2/E3
s.f.Set(8, 10, +0.5*dt*s.E3*Deg) // E2/H1
s.f.Set(8, 11, +0.5*dt*s.E0*Deg) // E2/H2
s.f.Set(8, 12, -0.5*dt*s.E1*Deg) // E2/H3
//s.E3 += 0.5*dt*(-s.E2*s.H1 + s.E1*s.H2 + s.E0*s.H3)*Deg
s.f.Set(9, 6, +0.5*dt*s.H3*Deg) // E3/E0
s.f.Set(9, 7, +0.5*dt*s.H2*Deg) // E3/E1
s.f.Set(9, 8, -0.5*dt*s.H1*Deg) // E3/E2
s.f.Set(9, 10, -0.5*dt*s.E2*Deg) // E3/H1
s.f.Set(9, 11, +0.5*dt*s.E1*Deg) // E3/H2
s.f.Set(9, 12, +0.5*dt*s.E0*Deg) // E3/H3
// H and D are constant.
return
}
func (s *Kalman1State) calcJacobianMeasurement() {
// U*3, Z*3, E*4, H*3, N*3,
// V*3, C*3, F*4, D*3, L*3
// U*3, W*3, A*3, B*3, M*3
// m.A1 = s.e31 = 2 * (-s.E0*s.E2 + s.E3*s.E1)
s.h.Set(6, 6, -2*s.E2) // A1/E0
s.h.Set(6, 7, +2*s.E3) // A1/E1
s.h.Set(6, 8, -2*s.E0) // A1/E2
s.h.Set(6, 9, +2*s.E1) // A1/E3
// m.A2 = s.e32 = 2 * (+s.E0*s.E1 + s.E3*s.E2)
s.h.Set(7, 6, +2*s.E1) // A2/E0
s.h.Set(7, 7, +2*s.E0) // A2/E1
s.h.Set(7, 8, +2*s.E3) // A2/E2
s.h.Set(7, 9, +2*s.E2) // A2/E3
// m.A3 = s.e33 = +s.E0*s.E0 - s.E1*s.E1 - s.E2*s.E2 + s.E3*s.E3
s.h.Set(8, 6, +2*s.E0) // A3/E0
s.h.Set(8, 7, -2*s.E1) // A3/E1
s.h.Set(8, 8, -2*s.E2) // A3/E2
s.h.Set(8, 9, +2*s.E3) // A3/E3
// m.B1 = s.H1 + s.D1
s.h.Set(9, 10, 1) // B1/H1
s.h.Set(9, 26, 1) // B1/D1
// m.B2 = s.H2 + s.D2
s.h.Set(10, 11, 1) // B2/H2
s.h.Set(10, 27, 1) // B2/D2
// m.B3 = s.H3 + s.D3
s.h.Set(11, 12, 1) // B3/H3
s.h.Set(11, 28, 1) // B3/D3
return
}
// SetCalibrations sets the AHRS accelerometer calibrations to c and gyro calibrations to d.
func (s *Kalman1State) SetCalibrations(c, d *[3]float64) {
return
}
func (s *Kalman1State) updateLogMap(m *Measurement, p map[string]interface{}) {
s.State.updateLogMap(m, s.logMap)
/*
rv, pv, hv := s.State.RollPitchHeadingUncertainty()
p["RollVar"] = rv / Deg
p["PitchVar"] = pv / Deg
p["HeadingVar"] = hv / Deg
*/
for k, v := range map[string]*matrix.DenseMatrix {
"M": s.M, // M is the state uncertainty covariance matrix
"N": s.N, // N is the process uncertainty covariance matrix
"f": s.f, // f is the State Jacobian
"y": s.y, // y is the correction between actual and predicted measurements
"h": s.h, // h is
"ss": s.ss, // ss is
"kk": s.kk, // kk is
} {
r, c := v.GetSize()
for i := 0; i < r; i++ {
for j := 0; j < c; j++ {
p[fmt.Sprintf("%s[%02d_%02d]", k, i, j)] = v.Get(i, j)
}
}
}
// z is the predicted measurement
p["zA1"] = s.z.A1
p["zA2"] = s.z.A2
p["zA3"] = s.z.A3
p["zB1"] = s.z.B1
p["zB2"] = s.z.B2
p["zB3"] = s.z.B3
}
var Kalman1JSONConfig = `{
"State": [
["Roll", "RollActual", 0],
["Pitch", "PitchActual", 0],
["Heading", "HeadingActual", null],
["T", null, null],
["E0", "E0Actual", null],
["E1", "E1Actual", null],
["E2", "E2Actual", null],
["E3", "E3Actual", null],
["H1", "H1Actual", 0],
["H2", "H2Actual", 0],
["H3", "H3Actual", 0]
["D1", "D1Actual", 0],
["D2", "D2Actual", 0],
["D3", "D3Actual", 0]
],
"Measurement": [
["A1", null, 0],
["A2", null, 0],
["A3", null, 0],
["B1", null, 0],
["B2", null, 0],
["B3", null, 0]
]
}` | ahrs/ahrs_kalman1.go | 0.705785 | 0.678021 | ahrs_kalman1.go | starcoder |
package judge
import (
"fmt"
"math"
"github.com/didi/nightingale/src/dataobj"
)
type Function interface {
Compute(vs []*dataobj.HistoryData) (leftValue dataobj.JsonFloat, isTriggered bool)
}
type MaxFunction struct {
Function
Limit int
Operator string
RightValue float64
}
func (f MaxFunction) Compute(vs []*dataobj.HistoryData) (leftValue dataobj.JsonFloat, isTriggered bool) {
if float64(len(vs)) < float64(f.Limit)*0.7 {
return
}
max := vs[0].Value
lenght := f.Limit
if len(vs) < f.Limit {
lenght = len(vs)
}
for i := 1; i < lenght; i++ {
if max < vs[i].Value {
max = vs[i].Value
}
}
leftValue = max
isTriggered = checkIsTriggered(leftValue, f.Operator, f.RightValue)
return
}
type MinFunction struct {
Function
Limit int
Operator string
RightValue float64
}
func (f MinFunction) Compute(vs []*dataobj.HistoryData) (leftValue dataobj.JsonFloat, isTriggered bool) {
if float64(len(vs)) < float64(f.Limit)*0.7 {
return
}
min := vs[0].Value
lenght := f.Limit
if len(vs) < f.Limit {
lenght = len(vs)
}
for i := 1; i < lenght; i++ {
if min > vs[i].Value {
min = vs[i].Value
}
}
leftValue = min
isTriggered = checkIsTriggered(leftValue, f.Operator, f.RightValue)
return
}
type AllFunction struct {
Function
Limit int
Operator string
RightValue float64
}
func (f AllFunction) Compute(vs []*dataobj.HistoryData) (leftValue dataobj.JsonFloat, isTriggered bool) {
if float64(len(vs)) < float64(f.Limit)*0.7 {
return
}
isTriggered = true
lenght := f.Limit
if len(vs) < f.Limit {
lenght = len(vs)
}
for i := 0; i < lenght; i++ {
isTriggered = checkIsTriggered(vs[i].Value, f.Operator, f.RightValue)
if !isTriggered {
break
}
}
leftValue = vs[0].Value
return
}
type SumFunction struct {
Function
Limit int
Operator string
RightValue float64
}
func (f SumFunction) Compute(vs []*dataobj.HistoryData) (leftValue dataobj.JsonFloat, isTriggered bool) {
if float64(len(vs)) < float64(f.Limit)*0.7 {
return
}
sum := dataobj.JsonFloat(0.0)
lenght := f.Limit
if len(vs) < f.Limit {
lenght = len(vs)
}
for i := 0; i < lenght; i++ {
sum += vs[i].Value
}
leftValue = sum
isTriggered = checkIsTriggered(leftValue, f.Operator, f.RightValue)
return
}
type AvgFunction struct {
Function
Limit int
Operator string
RightValue float64
}
func (f AvgFunction) Compute(vs []*dataobj.HistoryData) (leftValue dataobj.JsonFloat, isTriggered bool) {
if float64(len(vs)) < float64(f.Limit)*0.7 {
return
}
sum := dataobj.JsonFloat(0.0)
lenght := f.Limit
if len(vs) < f.Limit {
lenght = len(vs)
}
for i := 0; i < lenght; i++ {
sum += vs[i].Value
}
leftValue = sum / dataobj.JsonFloat(lenght)
isTriggered = checkIsTriggered(leftValue, f.Operator, f.RightValue)
return
}
type DiffFunction struct {
Function
Limit int
Operator string
RightValue float64
}
// 只要有一个点的diff触发阈值,就报警
func (f DiffFunction) Compute(vs []*dataobj.HistoryData) (leftValue dataobj.JsonFloat, isTriggered bool) {
if float64(len(vs)) < float64(f.Limit)*0.7 {
return
}
first := vs[0].Value
lenght := f.Limit
if len(vs) < f.Limit {
lenght = len(vs)
}
isTriggered = false
for i := 1; i < lenght; i++ {
// diff是当前值减去历史值
leftValue = first - vs[i].Value
isTriggered = checkIsTriggered(leftValue, f.Operator, f.RightValue)
if isTriggered {
break
}
}
return
}
// pdiff(#3)
type PDiffFunction struct {
Function
Limit int
Operator string
RightValue float64
}
func (f PDiffFunction) Compute(vs []*dataobj.HistoryData) (leftValue dataobj.JsonFloat, isTriggered bool) {
if float64(len(vs)) < float64(f.Limit)*0.7 {
return
}
first := vs[0].Value
lenght := f.Limit
if len(vs) < f.Limit {
lenght = len(vs)
}
isTriggered = false
for i := 1; i < lenght; i++ {
if vs[i].Value == 0 {
continue
}
leftValue = (first - vs[i].Value) / vs[i].Value * 100.0
isTriggered = checkIsTriggered(leftValue, f.Operator, f.RightValue)
if isTriggered {
break
}
}
return
}
type HappenFunction struct {
Function
Num int
Limit int
Operator string
RightValue float64
}
func (f HappenFunction) Compute(vs []*dataobj.HistoryData) (leftValue dataobj.JsonFloat, isTriggered bool) {
for n, i := 0, 0; i < len(vs); i++ {
if checkIsTriggered(vs[i].Value, f.Operator, f.RightValue) {
n++
if n == f.Num {
isTriggered = true
leftValue = vs[i].Value
return
}
}
}
return
}
type NodataFunction struct {
Function
}
func (f NodataFunction) Compute(vs []*dataobj.HistoryData) (leftValue dataobj.JsonFloat, isTriggered bool) {
for _, value := range vs {
if !math.IsNaN(float64(value.Value)) {
return value.Value, false
}
}
return dataobj.JsonFloat(math.NaN()), true
}
type CAvgAbsFunction struct {
Function
Limit int
Operator string
RightValue float64
CompareValue float64
}
func (f CAvgAbsFunction) Compute(vs []*dataobj.HistoryData) (leftValue dataobj.JsonFloat, isTriggered bool) {
if len(vs) < f.Limit {
return
}
sum := dataobj.JsonFloat(0.0)
for i := 0; i < f.Limit; i++ {
sum += vs[i].Value
}
value := sum / dataobj.JsonFloat(f.Limit)
leftValue = dataobj.JsonFloat(math.Abs(float64(value) - float64(f.CompareValue)))
isTriggered = checkIsTriggered(leftValue, f.Operator, f.RightValue)
return
}
type CAvgFunction struct {
Function
Limit int
Operator string
RightValue float64
CompareValue float64
}
func (f CAvgFunction) Compute(vs []*dataobj.HistoryData) (leftValue dataobj.JsonFloat, isTriggered bool) {
if len(vs) < f.Limit {
return
}
sum := dataobj.JsonFloat(0.0)
for i := 0; i < f.Limit; i++ {
sum += vs[i].Value
}
leftValue = sum/dataobj.JsonFloat(f.Limit) - dataobj.JsonFloat(f.CompareValue)
isTriggered = checkIsTriggered(leftValue, f.Operator, f.RightValue)
return
}
type CAvgRateAbsFunction struct {
Function
Limit int
Operator string
RightValue float64
CompareValue float64
}
func (f CAvgRateAbsFunction) Compute(vs []*dataobj.HistoryData) (leftValue dataobj.JsonFloat, isTriggered bool) {
if len(vs) < f.Limit {
return
}
sum := dataobj.JsonFloat(0.0)
for i := 0; i < f.Limit; i++ {
sum += vs[i].Value
}
value := sum / dataobj.JsonFloat(f.Limit)
leftValue = dataobj.JsonFloat(math.Abs(float64(value) - float64(f.CompareValue)))
isTriggered = checkIsTriggered(leftValue, f.Operator, f.RightValue)
return
}
type CAvgRateFunction struct {
Function
Limit int
Operator string
RightValue float64
CompareValue float64
}
func (f CAvgRateFunction) Compute(vs []*dataobj.HistoryData) (leftValue dataobj.JsonFloat, isTriggered bool) {
if len(vs) < f.Limit {
return
}
sum := dataobj.JsonFloat(0.0)
for i := 0; i < f.Limit; i++ {
sum += vs[i].Value
}
value := sum / dataobj.JsonFloat(f.Limit)
leftValue = (value - dataobj.JsonFloat(f.CompareValue)) / dataobj.JsonFloat(math.Abs(f.CompareValue))
isTriggered = checkIsTriggered(leftValue, f.Operator, f.RightValue)
return
}
func ParseFuncFromString(str string, span []interface{}, operator string, rightValue float64) (fn Function, err error) {
if str == "" {
return nil, fmt.Errorf("func can not be null!")
}
limit := span[0].(int)
switch str {
case "max":
fn = &MaxFunction{Limit: limit, Operator: operator, RightValue: rightValue}
case "min":
fn = &MinFunction{Limit: limit, Operator: operator, RightValue: rightValue}
case "all":
fn = &AllFunction{Limit: limit, Operator: operator, RightValue: rightValue}
case "sum":
fn = &SumFunction{Limit: limit, Operator: operator, RightValue: rightValue}
case "avg":
fn = &AvgFunction{Limit: limit, Operator: operator, RightValue: rightValue}
case "diff":
fn = &DiffFunction{Limit: limit, Operator: operator, RightValue: rightValue}
case "pdiff":
fn = &PDiffFunction{Limit: limit, Operator: operator, RightValue: rightValue}
case "happen":
fn = &HappenFunction{Limit: limit, Num: span[1].(int), Operator: operator, RightValue: rightValue}
case "nodata":
fn = &NodataFunction{}
case "c_avg":
fn = &CAvgFunction{Limit: limit, CompareValue: span[1].(float64), Operator: operator, RightValue: rightValue}
case "c_avg_abs":
fn = &CAvgAbsFunction{Limit: limit, CompareValue: span[1].(float64), Operator: operator, RightValue: rightValue}
case "c_avg_rate":
fn = &CAvgRateFunction{Limit: limit, CompareValue: span[1].(float64), Operator: operator, RightValue: rightValue}
case "c_avg_rate_abs":
fn = &CAvgRateAbsFunction{Limit: limit, CompareValue: span[1].(float64), Operator: operator, RightValue: rightValue}
default:
err = fmt.Errorf("not_supported_method")
}
return
}
func checkIsTriggered(leftValue dataobj.JsonFloat, operator string, rightValue float64) (isTriggered bool) {
switch operator {
case "=", "==":
isTriggered = math.Abs(float64(leftValue)-rightValue) < 0.0001
case "!=":
isTriggered = math.Abs(float64(leftValue)-rightValue) > 0.0001
case "<":
isTriggered = float64(leftValue) < rightValue
case "<=":
isTriggered = float64(leftValue) <= rightValue
case ">":
isTriggered = float64(leftValue) > rightValue
case ">=":
isTriggered = float64(leftValue) >= rightValue
}
return
} | src/modules/judge/judge/func.go | 0.548915 | 0.441974 | func.go | starcoder |
package main
import (
"fmt"
"math"
"github.com/golang-collections/go-datastructures/queue"
)
type Node struct {
value int
left, right *Node
}
type Tree struct {
root *Node
}
func LevelOrderBinaryTree(arr []int) *Tree {
tree := new(Tree)
tree.root = levelOrderBinaryTree(arr, 0, len(arr))
return tree
}
func levelOrderBinaryTree(arr []int, start int, size int) *Node {
curr := &Node{arr[start], nil, nil}
left := 2*start + 1
right := 2*start + 2
if left < size {
curr.left = levelOrderBinaryTree(arr, left, size)
}
if right < size {
curr.right = levelOrderBinaryTree(arr, right, size)
}
return curr
}
func (t *Tree) Add(value int) {
t.root = addUtil(t.root, value)
}
func addUtil(n *Node, value int) *Node {
if n == nil {
n = new(Node)
n.value = value
return n
}
if value < n.value {
n.left = addUtil(n.left, value)
} else {
n.right = addUtil(n.right, value)
}
return n
}
func (t *Tree) PrintPreOrder() {
printPreOrder(t.root)
fmt.Println()
}
func printPreOrder(n *Node) {
if n == nil {
return
}
fmt.Print(n.value, " ")
printPreOrder(n.left)
printPreOrder(n.right)
}
func (t *Tree) PrintPostOrder() {
printPostOrder(t.root)
fmt.Println()
}
func printPostOrder(n *Node) {
if n == nil {
return
}
printPostOrder(n.left)
printPostOrder(n.right)
fmt.Print(n.value)
}
func (t *Tree) PrintInOrder() {
printInOrder(t.root)
fmt.Println()
}
func printInOrder(n *Node) {
if n == nil {
return
}
printInOrder(n.left)
fmt.Print(n.value)
printInOrder(n.right)
}
// Sort sorts values in place.
func Sort(values []int) {
t := new(Tree)
for _, v := range values {
t.Add(v)
}
appendValues(values[:0], t.root)
}
// appendValues appends the elements of t to values in order
// and returns the resulting slice.
func appendValues(values []int, t *Node) []int {
if t != nil {
values = appendValues(values, t.left)
values = append(values, t.value)
values = appendValues(values, t.right)
}
return values
}
func (t *Tree) PrintBredthFirst() {
que := new(queue.Queue)
var temp *Node
if t.root != nil {
que.Put(t.root)
}
for que.Empty() == false {
temp2, _ := que.Get(1)
temp = temp2[0].(*Node)
fmt.Print(temp.value, " ")
if temp.left != nil {
que.Put(temp.left)
}
if temp.right != nil {
que.Put(temp.right)
}
}
fmt.Println()
}
func (t *Tree) NthPreOrder(index int) {
var counter int
nthPreOrder(t.root, index, &counter)
}
func nthPreOrder(node *Node, index int, counter *int) {
if node != nil {
(*counter)++
if *counter == index {
fmt.Println(node.value)
}
nthPreOrder(node.left, index, counter)
nthPreOrder(node.right, index, counter)
}
}
func (t *Tree) NthPostOrder(index int) {
var counter int
nthPostOrder(t.root, index, &counter)
}
func nthPostOrder(node *Node, index int, counter *int) {
if node != nil {
nthPostOrder(node.left, index, counter)
nthPostOrder(node.right, index, counter)
(*counter)++
if *counter == index {
fmt.Println(node.value)
}
}
}
func (t *Tree) NthInOrder(index int) {
var counter int
nthInOrder(t.root, index, &counter)
}
func nthInOrder(node *Node, index int, counter *int) {
if node != nil {
nthInOrder(node.left, index, counter)
*counter++
if *counter == index {
fmt.Println(node.value)
}
nthInOrder(node.right, index, counter)
}
}
func (t *Tree) Find(value int) bool {
var curr *Node = t.root
for curr != nil {
if curr.value == value {
return true
} else if curr.value > value {
curr = curr.left
} else {
curr = curr.right
}
}
return false
}
func (t *Tree) FindMin() (int, bool) {
var node *Node = t.root
if node == nil {
fmt.Println("EmptyTreeException")
return 0, false
}
for node.left != nil {
node = node.left
}
return node.value, true
}
func (t *Tree) FindMax() (int, bool) {
var node *Node = t.root
if node == nil {
fmt.Println("EmptyTreeException")
return 0, false
}
for node.right != nil {
node = node.right
}
return node.value, true
}
func (t *Tree) FindMaxNode() *Node {
var node *Node = t.root
if node == nil {
fmt.Println("EmptyTreeException")
return nil
}
for node.right != nil {
node = node.right
}
return node
}
func (t *Tree) FindMinNode() *Node {
var node *Node = t.root
if node == nil {
fmt.Println("EmptyTreeException")
return nil
}
for node.left != nil {
node = node.left
}
return node
}
func FindMax(curr *Node) *Node {
var node *Node = curr
if node == nil {
fmt.Println("EmptyTreeException")
return nil
}
for node.right != nil {
node = node.right
}
return node
}
func FindMin(curr *Node) *Node {
var node *Node = curr
if node == nil {
fmt.Println("EmptyTreeException")
return nil
}
for node.left != nil {
node = node.left
}
return node
}
func (t *Tree) Free() {
t.root = nil
}
func (t *Tree) DeleteNode(value int) {
t.root = DeleteNode(t.root, value)
}
func DeleteNode(node *Node, value int) *Node {
var temp *Node = nil
if node != nil {
if node.value == value {
if node.left == nil && node.right == nil {
return nil
}
if node.left == nil {
temp = node.right
return temp
}
if node.right == nil {
temp = node.left
return temp
}
maxNode := FindMax(node.left)
maxValue := maxNode.value
node.value = maxValue
node.left = DeleteNode(node.left, maxValue)
} else {
if node.value > value {
node.left = DeleteNode(node.left, value)
} else {
node.right = DeleteNode(node.right, value)
}
}
}
return node
}
func (t *Tree) TreeDepth() int {
return treeDepth(t.root)
}
func treeDepth(root *Node) int {
if root == nil {
return 0
}
lDepth := treeDepth(root.left)
rDepth := treeDepth(root.right)
if lDepth > rDepth {
return lDepth + 1
}
return rDepth + 1
}
func (t *Tree) IsEqual(t2 *Tree) bool {
return isEqual(t.root, t2.root)
}
func isEqual(node1 *Node, node2 *Node) bool {
if node1 == nil && node2 == nil {
return true
} else if node1 == nil || node2 == nil {
return false
} else {
return ((node1.value == node2.value) &&
isEqual(node1.left, node2.left) &&
isEqual(node1.right, node2.right))
}
}
func (t *Tree) Ancestor(first int, second int) *Node {
if first > second {
temp := first
first = second
second = temp
}
return Ancestor(t.root, first, second)
}
func Ancestor(curr *Node, first int, second int) *Node {
if curr == nil {
return nil
}
if curr.value > first && curr.value > second {
return Ancestor(curr.left, first, second)
}
if curr.value < first && curr.value < second {
return Ancestor(curr.right, first, second)
}
return curr
}
func (t *Tree) CopyTree() *Tree {
Tree2 := new(Tree)
Tree2.root = copyTree(t.root)
return Tree2
}
func copyTree(curr *Node) *Node {
var temp *Node
if curr != nil {
temp = new(Node)
temp.value = curr.value
temp.left = copyTree(curr.left)
temp.right = copyTree(curr.right)
return temp
}
return nil
}
func (t *Tree) CopyMirrorTree() *Tree {
tree := new(Tree)
tree.root = copyMirrorTree(t.root)
return tree
}
func copyMirrorTree(curr *Node) *Node {
var temp *Node
if curr != nil {
temp = new(Node)
temp.value = curr.value
temp.right = copyMirrorTree(curr.left)
temp.left = copyMirrorTree(curr.right)
return temp
}
return nil
}
func (t *Tree) NumNodes() int {
return numNodes(t.root)
}
func numNodes(curr *Node) int {
if curr == nil {
return 0
}
return (1 + numNodes(curr.right) + numNodes(curr.left))
}
func (t *Tree) NumFullNodesBT() int {
return numFullNodesBT(t.root)
}
func numFullNodesBT(curr *Node) int {
var count int
if curr == nil {
return 0
}
count = numFullNodesBT(curr.right) + numFullNodesBT(curr.left)
if curr.right != nil && curr.left != nil {
count++
}
return count
}
func (t *Tree) MaxLengthPathBT() int {
return maxLengthPathBT(t.root)
}
func maxLengthPathBT(curr *Node) int {
var max, leftPath, rightPath, leftMax, rightMax int
if curr == nil {
return 0
}
leftPath = treeDepth(curr.left)
rightPath = treeDepth(curr.right)
max = leftPath + rightPath + 1
leftMax = maxLengthPathBT(curr.left)
rightMax = maxLengthPathBT(curr.right)
if leftMax > max {
max = leftMax
}
if rightMax > max {
max = rightMax
}
return max
}
func (t *Tree) NumLeafNodes() int {
return numLeafNodes(t.root)
}
func numLeafNodes(curr *Node) int {
if curr == nil {
return 0
}
if curr.left == nil && curr.right == nil {
return 1
}
return (numLeafNodes(curr.right) + numLeafNodes(curr.left))
}
func (t *Tree) SumAllBT() int {
return sumAllBT(t.root)
}
func sumAllBT(curr *Node) int {
var sum, leftSum, rightSum int
if curr == nil {
return 0
}
rightSum = sumAllBT(curr.right)
leftSum = sumAllBT(curr.left)
sum = rightSum + leftSum + curr.value
return sum
}
func IsBST3(root *Node) bool {
if root == nil {
return true
}
if root.left != nil && FindMax(root.left).value > root.value {
return false
}
if root.right != nil && FindMin(root.right).value <= root.value {
return false
}
return (IsBST3(root.left) && IsBST3(root.right))
}
func (t *Tree) IsBST() bool {
return IsBST(t.root, math.MinInt32, math.MaxInt32)
}
func IsBST(curr *Node, min int, max int) bool {
if curr == nil {
return true
}
if curr.value < min || curr.value > max {
return false
}
return IsBST(curr.left, min, curr.value) && IsBST(curr.right, curr.value, max)
}
func (t *Tree) IsBST2() bool {
var c int
return IsBST2(t.root, &c)
}
func IsBST2(root *Node, count *int) bool {
var ret bool
if root != nil {
ret = IsBST2(root.left, count)
if !ret {
return false
}
if *count > root.value {
return false
}
*count = root.value
ret = IsBST2(root.right, count)
if !ret {
return false
}
}
return true
}
type Stack struct {
s []int
}
func (s *Stack) Push(value int) {
s.s = append(s.s, value)
}
func (s *Stack) Pop() int {
length := len(s.s)
res := s.s[length-1]
s.s = s.s[:length-1]
return res
}
func (s *Stack) IsEmpty() bool {
length := len(s.s)
return length == 0
}
func (s *Stack) Length() int {
length := len(s.s)
return length
}
func (s *Stack) Print() {
length := len(s.s)
for i := 0; i < length; i++ {
fmt.Print(s.s[i], " ")
}
fmt.Println()
}
func (t *Tree) PrintAllPath() {
stk := new(Stack)
printAllPath(t.root, stk)
}
func printAllPath(curr *Node, stk *Stack) {
if curr == nil {
return
}
stk.Push(curr.value)
if curr.left == nil && curr.right == nil {
stk.Print()
stk.Pop()
return
}
printAllPath(curr.right, stk)
printAllPath(curr.left, stk)
stk.Pop()
}
func (t *Tree) LCA(first int, second int) (int, bool) {
ans := LCAUtil(t.root, first, second)
if ans != nil {
return ans.value, true
}
fmt.Println("NotFoundException")
return 0, false
}
func LCAUtil(curr *Node, first int, second int) *Node {
var left, right *Node
if curr == nil {
return nil
}
if curr.value == first || curr.value == second {
return curr
}
left = LCAUtil(curr.left, first, second)
right = LCAUtil(curr.right, first, second)
if left != nil && right != nil {
return curr
} else if left != nil {
return left
} else {
return right
}
}
func (t *Tree) LcaBST(first int, second int) (int, bool) {
return LcaBST(t.root, first, second)
}
func LcaBST(curr *Node, first int, second int) (int, bool) {
if curr == nil {
fmt.Println("NotFoundException")
return 0, false
}
if curr.value > first && curr.value > second {
return LcaBST(curr.left, first, second)
}
if curr.value < first && curr.value < second {
return LcaBST(curr.right, first, second)
}
return curr.value, true
}
func (t *Tree) TrimOutsidedataRange(min int, max int) {
t.root = trimOutsidedataRange(t.root, min, max)
}
func trimOutsidedataRange(curr *Node, min int, max int) *Node {
if curr == nil {
return nil
}
curr.left = trimOutsidedataRange(curr.left, min, max)
curr.right = trimOutsidedataRange(curr.right, min, max)
if curr.value < min {
return curr.right
}
if curr.value > max {
return curr.left
}
return curr
}
func (t *Tree) PrintDataInRange(min int, max int) {
printDataInRange(t.root, min, max)
}
func printDataInRange(root *Node, min int, max int) {
if root == nil {
return
}
printDataInRange(root.left, min, max)
if root.value >= min && root.value <= max {
fmt.Print(root.value, " ")
}
printDataInRange(root.right, min, max)
}
func (t *Tree) FloorBST(val int) int {
curr := t.root
floor := math.MaxInt32
for curr != nil {
if curr.value == val {
floor = curr.value
break
} else if curr.value > val {
curr = curr.left
} else {
floor = curr.value
curr = curr.right
}
}
return floor
}
func (t *Tree) CeilBST(val int) int {
curr := t.root
ceil := math.MinInt32
for curr != nil {
if curr.value == val {
ceil = curr.value
break
} else if curr.value > val {
ceil = curr.value
curr = curr.left
} else {
curr = curr.right
}
}
return ceil
}
func (t *Tree) FindMaxBT() int {
return findMaxBT(t.root)
}
func findMaxBT(curr *Node) int {
if curr == nil {
return math.MinInt32
}
max := curr.value
left := findMaxBT(curr.left)
right := findMaxBT(curr.right)
if left > max {
max = left
}
if right > max {
max = right
}
return max
}
func SearchBT(root *Node, value int) bool {
var left, right bool
if root == nil || root.value == value {
return false
}
left = SearchBT(root.left, value)
if left {
return true
}
right = SearchBT(root.right, value)
if right {
return true
}
return false
}
func CreateBinaryTree(arr []int) *Tree {
t := new(Tree)
size := len(arr)
t.root = createBinaryTreeUtil(arr, 0, size-1)
return t
}
func createBinaryTreeUtil(arr []int, start int, end int) *Node {
if start > end {
return nil
}
mid := (start + end) / 2
curr := new(Node)
curr.value = arr[mid]
curr.left = createBinaryTreeUtil(arr, start, mid-1)
curr.right = createBinaryTreeUtil(arr, mid+1, end)
return curr
}
func main() {
arr := []int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
t := CreateBinaryTree(arr)
}
//t2 := LevelOrderBinaryTree(arr)
//t2.PrintPreOrder()
//t := new(Tree)
//t.Add(2)
//t.Add(1)
//t.Add(3)
//t.Add(4)
//t.InOrder()
//t.PreOrder()
//t.PostOrder()
//lst := []int{2, 1, 3, 4}
//sort(lst)
//fmt.Println(lst)
//t.PrintBredthFirst()
//t.NthPreOrder(2)
//t.NthPostOrder(2)
//t.NthInOrder(2)
//fmt.Println(t.Find(10))
//fmt.Println(t.Find(3))
//fmt.Println(t.FindMax())
//fmt.Println(t.FindMaxNode())
//fmt.Println(t.FindMin())
//fmt.Println(t.FindMinNode())
//t.Free()
//t.InOrder()
//fmt.Println()
//t.PrintAllPath()
} | corpus/hermant.data-structure-algo/BinaryTree/Tree.go | 0.645008 | 0.418816 | Tree.go | starcoder |
package nn
import "github.com/MaxSlyugrov/cldr"
var currencies = []cldr.Currency{
{Currency: "ADP", DisplayName: "andorransk peseta", Symbol: ""},
{Currency: "AED", DisplayName: "UAE dirham", Symbol: ""},
{Currency: "AFA", DisplayName: "afghani (1927–2002)", Symbol: ""},
{Currency: "AFN", DisplayName: "afghani", Symbol: ""},
{Currency: "ALL", DisplayName: "albansk lek", Symbol: ""},
{Currency: "AMD", DisplayName: "armensk dram", Symbol: ""},
{Currency: "ANG", DisplayName: "nederlansk antillegylden", Symbol: ""},
{Currency: "AOA", DisplayName: "angolsk kwanza", Symbol: ""},
{Currency: "AOK", DisplayName: "angolsk kwanza (1977–1990)", Symbol: ""},
{Currency: "AON", DisplayName: "angolsk ny kwanza (1990–2000)", Symbol: ""},
{Currency: "AOR", DisplayName: "angolsk kwanza reajustado (1995–1999)", Symbol: ""},
{Currency: "ARA", DisplayName: "argentisk austral", Symbol: ""},
{Currency: "ARP", DisplayName: "argentinsk peso (1983–1985)", Symbol: ""},
{Currency: "ARS", DisplayName: "argentinsk peso", Symbol: ""},
{Currency: "ATS", DisplayName: "austerriksk schilling", Symbol: ""},
{Currency: "AUD", DisplayName: "australsk dollar", Symbol: ""},
{Currency: "AWG", DisplayName: "arubisk gylden", Symbol: ""},
{Currency: "AZM", DisplayName: "aserbaijansk manat", Symbol: ""},
{Currency: "AZN", DisplayName: "aserbajdsjansk manat", Symbol: ""},
{Currency: "BAD", DisplayName: "bosnisk-hercegovinsk dinar", Symbol: ""},
{Currency: "BAM", DisplayName: "bosnisk-hercegovinsk mark (konvertibel)", Symbol: ""},
{Currency: "BBD", DisplayName: "barbadisk dollar", Symbol: ""},
{Currency: "BDT", DisplayName: "bangladeshisk taka", Symbol: ""},
{Currency: "BEC", DisplayName: "belgisk franc (konvertibel)", Symbol: ""},
{Currency: "BEF", DisplayName: "belgisk franc", Symbol: ""},
{Currency: "BEL", DisplayName: "belgisk franc (finansiell)", Symbol: ""},
{Currency: "BGL", DisplayName: "bulgarsk hard lev", Symbol: ""},
{Currency: "BGN", DisplayName: "bulgarsk ny lev", Symbol: ""},
{Currency: "BHD", DisplayName: "bahrainsk dinar", Symbol: ""},
{Currency: "BIF", DisplayName: "burundisk franc", Symbol: ""},
{Currency: "BMD", DisplayName: "bermudisk dollar", Symbol: ""},
{Currency: "BND", DisplayName: "bruneisk dollar", Symbol: ""},
{Currency: "BOB", DisplayName: "boliviano", Symbol: ""},
{Currency: "BOP", DisplayName: "bolivisk peso", Symbol: ""},
{Currency: "BOV", DisplayName: "bolivisk mvdol", Symbol: ""},
{Currency: "BRB", DisplayName: "brasiliansk cruzeiro novo (1967–1986)", Symbol: ""},
{Currency: "BRC", DisplayName: "brasiliansk cruzado", Symbol: ""},
{Currency: "BRE", DisplayName: "brasiliansk cruzeiro (1990–1993)", Symbol: ""},
{Currency: "BRL", DisplayName: "brasiliansk real", Symbol: ""},
{Currency: "BRN", DisplayName: "brasiliansk cruzado novo", Symbol: ""},
{Currency: "BRR", DisplayName: "brasiliansk cruzeiro", Symbol: ""},
{Currency: "BSD", DisplayName: "bahamisk dollar", Symbol: ""},
{Currency: "BTN", DisplayName: "bhutansk ngultrum", Symbol: ""},
{Currency: "BUK", DisplayName: "burmesisk kyat", Symbol: ""},
{Currency: "BWP", DisplayName: "botswansk pula", Symbol: ""},
{Currency: "BYB", DisplayName: "kviterussisk ny rubel (1994–1999)", Symbol: ""},
{Currency: "BYR", DisplayName: "kviterussisk rubel", Symbol: ""},
{Currency: "BZD", DisplayName: "belizisk dollar", Symbol: ""},
{Currency: "CAD", DisplayName: "kanadisk dollar", Symbol: ""},
{Currency: "CDF", DisplayName: "kongolesisk franc", Symbol: ""},
{Currency: "CHE", DisplayName: "WIR euro", Symbol: ""},
{Currency: "CHF", DisplayName: "sveitsisk franc", Symbol: ""},
{Currency: "CHW", DisplayName: "WIR franc", Symbol: ""},
{Currency: "CLF", DisplayName: "chilensk unidades de fomento", Symbol: ""},
{Currency: "CLP", DisplayName: "chilensk peso", Symbol: ""},
{Currency: "CNY", DisplayName: "kinesisk yuan renminbi", Symbol: ""},
{Currency: "COP", DisplayName: "colombiansk peso", Symbol: ""},
{Currency: "COU", DisplayName: "unidad de valor real", Symbol: ""},
{Currency: "CRC", DisplayName: "costaricansk colon", Symbol: ""},
{Currency: "CSD", DisplayName: "gammal serbisk dinar", Symbol: ""},
{Currency: "CSK", DisplayName: "tsjekkoslovakisk koruna (hard)", Symbol: ""},
{Currency: "CUC", DisplayName: "kubansk peso (konvertibel)", Symbol: ""},
{Currency: "CUP", DisplayName: "kubansk peso", Symbol: ""},
{Currency: "CVE", DisplayName: "kappverdisk escudo", Symbol: ""},
{Currency: "CYP", DisplayName: "kypriotisk pund", Symbol: ""},
{Currency: "CZK", DisplayName: "tsjekkisk koruna", Symbol: ""},
{Currency: "DDM", DisplayName: "austtysk mark", Symbol: ""},
{Currency: "DEM", DisplayName: "tysk mark", Symbol: ""},
{Currency: "DJF", DisplayName: "djiboutisk franc", Symbol: ""},
{Currency: "DKK", DisplayName: "dansk krone", Symbol: ""},
{Currency: "DOP", DisplayName: "dominikansk peso", Symbol: ""},
{Currency: "DZD", DisplayName: "algerisk dinar", Symbol: ""},
{Currency: "ECS", DisplayName: "ecuadoriansk sucre", Symbol: ""},
{Currency: "ECV", DisplayName: "ecuadoriansk unidad de valor constante (UVC)", Symbol: ""},
{Currency: "EEK", DisplayName: "estisk kroon", Symbol: ""},
{Currency: "EGP", DisplayName: "egyptisk pund", Symbol: ""},
{Currency: "ERN", DisplayName: "eritreisk nakfa", Symbol: ""},
{Currency: "ESA", DisplayName: "spansk peseta (A–konto)", Symbol: ""},
{Currency: "ESB", DisplayName: "spansk peseta (konvertibel konto)", Symbol: ""},
{Currency: "ESP", DisplayName: "spansk peseta", Symbol: ""},
{Currency: "ETB", DisplayName: "etiopisk birr", Symbol: ""},
{Currency: "EUR", DisplayName: "euro", Symbol: ""},
{Currency: "FIM", DisplayName: "finsk mark", Symbol: ""},
{Currency: "FJD", DisplayName: "fijiansk dollar", Symbol: ""},
{Currency: "FKP", DisplayName: "Falkland-pund", Symbol: ""},
{Currency: "FRF", DisplayName: "fransk franc", Symbol: ""},
{Currency: "GBP", DisplayName: "britisk pund", Symbol: ""},
{Currency: "GEK", DisplayName: "georgisk kupon larit", Symbol: ""},
{Currency: "GEL", DisplayName: "georgisk lari", Symbol: ""},
{Currency: "GHC", DisplayName: "ghanesisk cedi (1979–2007)", Symbol: ""},
{Currency: "GHS", DisplayName: "ghanesisk cedi", Symbol: ""},
{Currency: "GIP", DisplayName: "gibraltarsk pund", Symbol: ""},
{Currency: "GMD", DisplayName: "gambisk dalasi", Symbol: ""},
{Currency: "GNF", DisplayName: "guineansk franc", Symbol: ""},
{Currency: "GNS", DisplayName: "guineansk syli", Symbol: ""},
{Currency: "GQE", DisplayName: "ekvatorialguineansk ekwele guineana", Symbol: ""},
{Currency: "GRD", DisplayName: "gresk drakme", Symbol: ""},
{Currency: "GTQ", DisplayName: "guatemalansk quetzal", Symbol: ""},
{Currency: "GWE", DisplayName: "portugisisk guinea escudo", Symbol: ""},
{Currency: "GWP", DisplayName: "Guinea-Bissau-peso", Symbol: ""},
{Currency: "GYD", DisplayName: "guyansk dollar", Symbol: ""},
{Currency: "HKD", DisplayName: "Hongkong-dollar", Symbol: ""},
{Currency: "HNL", DisplayName: "honduransk lempira", Symbol: ""},
{Currency: "HRD", DisplayName: "kroatisk dinar", Symbol: ""},
{Currency: "HRK", DisplayName: "kroatisk kuna", Symbol: ""},
{Currency: "HTG", DisplayName: "haitisk gourde", Symbol: ""},
{Currency: "HUF", DisplayName: "ungarsk forint", Symbol: ""},
{Currency: "IDR", DisplayName: "indonesisk rupi", Symbol: ""},
{Currency: "IEP", DisplayName: "irsk pund", Symbol: ""},
{Currency: "ILP", DisplayName: "israelsk pund", Symbol: ""},
{Currency: "ILS", DisplayName: "israelsk ny shekel", Symbol: ""},
{Currency: "INR", DisplayName: "indisk rupi", Symbol: ""},
{Currency: "IQD", DisplayName: "iraksk dinar", Symbol: ""},
{Currency: "IRR", DisplayName: "iransk rial", Symbol: ""},
{Currency: "ISK", DisplayName: "islandsk krone", Symbol: ""},
{Currency: "ITL", DisplayName: "italiensk lire", Symbol: ""},
{Currency: "JMD", DisplayName: "jamaikansk dollar", Symbol: ""},
{Currency: "JOD", DisplayName: "jordansk dinar", Symbol: ""},
{Currency: "JPY", DisplayName: "japansk yen", Symbol: ""},
{Currency: "KES", DisplayName: "kenyansk shilling", Symbol: ""},
{Currency: "KGS", DisplayName: "kirgisisk som", Symbol: ""},
{Currency: "KHR", DisplayName: "kambodsjansk riel", Symbol: ""},
{Currency: "KMF", DisplayName: "komorisk franc", Symbol: ""},
{Currency: "KPW", DisplayName: "nordkoreansk won", Symbol: ""},
{Currency: "KRW", DisplayName: "sørkoreansk won", Symbol: ""},
{Currency: "KWD", DisplayName: "kuwaitisk dinar", Symbol: ""},
{Currency: "KYD", DisplayName: "caymansk dollar", Symbol: ""},
{Currency: "KZT", DisplayName: "kasakhstansk tenge", Symbol: ""},
{Currency: "LAK", DisplayName: "laotisk kip", Symbol: ""},
{Currency: "LBP", DisplayName: "libanesisk pund", Symbol: ""},
{Currency: "LKR", DisplayName: "srilankisk rupi", Symbol: ""},
{Currency: "LRD", DisplayName: "liberisk dollar", Symbol: ""},
{Currency: "LSL", DisplayName: "lesothisk loti", Symbol: ""},
{Currency: "LTL", DisplayName: "litauisk lita", Symbol: ""},
{Currency: "LTT", DisplayName: "litauisk talona", Symbol: ""},
{Currency: "LUC", DisplayName: "luxemburgsk konvertibel franc", Symbol: ""},
{Currency: "LUF", DisplayName: "luxemburgsk franc", Symbol: ""},
{Currency: "LUL", DisplayName: "luxemburgsk finansiell franc", Symbol: ""},
{Currency: "LVL", DisplayName: "latvisk lat", Symbol: ""},
{Currency: "LVR", DisplayName: "latvisk rubel", Symbol: ""},
{Currency: "LYD", DisplayName: "libysk dinar", Symbol: ""},
{Currency: "MAD", DisplayName: "marokkansk dirham", Symbol: ""},
{Currency: "MAF", DisplayName: "marokkansk franc", Symbol: ""},
{Currency: "MDL", DisplayName: "moldovsk leu", Symbol: ""},
{Currency: "MGA", DisplayName: "madagassisk ariary", Symbol: ""},
{Currency: "MGF", DisplayName: "madagassisk franc", Symbol: ""},
{Currency: "MKD", DisplayName: "makedonsk denar", Symbol: ""},
{Currency: "MLF", DisplayName: "malisk franc", Symbol: ""},
{Currency: "MMK", DisplayName: "myanmarsk kyat", Symbol: ""},
{Currency: "MNT", DisplayName: "mongolsk tugrik", Symbol: ""},
{Currency: "MOP", DisplayName: "makaosk pataca", Symbol: ""},
{Currency: "MRO", DisplayName: "mauritansk ouguiya", Symbol: ""},
{Currency: "MTL", DisplayName: "maltesisk lira", Symbol: ""},
{Currency: "MTP", DisplayName: "maltesisk pund", Symbol: ""},
{Currency: "MUR", DisplayName: "mauritansk rupi", Symbol: ""},
{Currency: "MVR", DisplayName: "maldivisk rufiyaa", Symbol: ""},
{Currency: "MWK", DisplayName: "malawisk kwacha", Symbol: ""},
{Currency: "MXN", DisplayName: "meksikansk peso", Symbol: ""},
{Currency: "MXP", DisplayName: "meksikansk sølvpeso (1861–1992)", Symbol: ""},
{Currency: "MXV", DisplayName: "meksikansk unidad de inversion (UDI)", Symbol: ""},
{Currency: "MYR", DisplayName: "malaysisk ringgit", Symbol: ""},
{Currency: "MZE", DisplayName: "mosambikisk escudo", Symbol: ""},
{Currency: "MZM", DisplayName: "gammal mosambikisk metical", Symbol: ""},
{Currency: "MZN", DisplayName: "mosambikisk metical", Symbol: ""},
{Currency: "NAD", DisplayName: "namibisk dollar", Symbol: ""},
{Currency: "NGN", DisplayName: "nigeriansk naira", Symbol: ""},
{Currency: "NIC", DisplayName: "nicaraguansk cordoba", Symbol: ""},
{Currency: "NIO", DisplayName: "nicaraguansk cordoba oro", Symbol: ""},
{Currency: "NLG", DisplayName: "nederlandsk gylden", Symbol: ""},
{Currency: "NOK", DisplayName: "norsk krone", Symbol: "kr"},
{Currency: "NPR", DisplayName: "nepalsk rupi", Symbol: ""},
{Currency: "NZD", DisplayName: "new zealandsk dollar", Symbol: ""},
{Currency: "OMR", DisplayName: "omansk rial", Symbol: ""},
{Currency: "PAB", DisplayName: "panamansk balboa", Symbol: ""},
{Currency: "PEI", DisplayName: "peruansk inti", Symbol: ""},
{Currency: "PEN", DisplayName: "peruansk nuevo sol", Symbol: ""},
{Currency: "PES", DisplayName: "peruansk sol", Symbol: ""},
{Currency: "PGK", DisplayName: "papuansk kina", Symbol: ""},
{Currency: "PHP", DisplayName: "filippinsk peso", Symbol: ""},
{Currency: "PKR", DisplayName: "pakistansk rupi", Symbol: ""},
{Currency: "PLN", DisplayName: "polsk zloty", Symbol: ""},
{Currency: "PLZ", DisplayName: "polsk zloty (1950–1995)", Symbol: ""},
{Currency: "PTE", DisplayName: "portugisisk escudo", Symbol: ""},
{Currency: "PYG", DisplayName: "paraguayansk guarani", Symbol: ""},
{Currency: "QAR", DisplayName: "qatarsk rial", Symbol: ""},
{Currency: "RHD", DisplayName: "rhodesisk dollar", Symbol: ""},
{Currency: "ROL", DisplayName: "gammal rumensk leu", Symbol: ""},
{Currency: "RON", DisplayName: "rumensk leu", Symbol: ""},
{Currency: "RSD", DisplayName: "serbisk dinar", Symbol: ""},
{Currency: "RUB", DisplayName: "russisk rubel", Symbol: ""},
{Currency: "RUR", DisplayName: "russisk rubel (1991–1998)", Symbol: ""},
{Currency: "RWF", DisplayName: "rwandisk franc", Symbol: ""},
{Currency: "SAR", DisplayName: "saudiarabisk rial", Symbol: ""},
{Currency: "SBD", DisplayName: "salomonsk dollar", Symbol: ""},
{Currency: "SCR", DisplayName: "seychellisk rupi", Symbol: ""},
{Currency: "SDD", DisplayName: "gammal sudanesisk dinar", Symbol: ""},
{Currency: "SDG", DisplayName: "sudansk pund", Symbol: ""},
{Currency: "SDP", DisplayName: "gammalt sudanesisk pund", Symbol: ""},
{Currency: "SEK", DisplayName: "svensk krone", Symbol: ""},
{Currency: "SGD", DisplayName: "singaporsk dollar", Symbol: ""},
{Currency: "SHP", DisplayName: "sankthelensk pund", Symbol: ""},
{Currency: "SIT", DisplayName: "slovensk tolar", Symbol: ""},
{Currency: "SKK", DisplayName: "slovakisk koruna", Symbol: ""},
{Currency: "SLL", DisplayName: "sierraleonsk leone", Symbol: ""},
{Currency: "SOS", DisplayName: "somalisk shilling", Symbol: ""},
{Currency: "SRD", DisplayName: "surinamsk dollar", Symbol: ""},
{Currency: "SRG", DisplayName: "surinamsk gylden", Symbol: ""},
{Currency: "STD", DisplayName: "Sao Tome og Principe-dobra", Symbol: ""},
{Currency: "SUR", DisplayName: "sovjetisk rubel", Symbol: ""},
{Currency: "SVC", DisplayName: "salvadoransk colon", Symbol: ""},
{Currency: "SYP", DisplayName: "syrisk pund", Symbol: ""},
{Currency: "SZL", DisplayName: "swazilandsk lilangeni", Symbol: ""},
{Currency: "THB", DisplayName: "thailandsk baht", Symbol: ""},
{Currency: "TJR", DisplayName: "tadsjikisk rubel", Symbol: ""},
{Currency: "TJS", DisplayName: "tadsjikisk somoni", Symbol: ""},
{Currency: "TMM", DisplayName: "turkmensk manat", Symbol: ""},
{Currency: "TMT", DisplayName: "turkmenistansk manat", Symbol: ""},
{Currency: "TND", DisplayName: "tunisisk dinar", Symbol: ""},
{Currency: "TOP", DisplayName: "tongansk paʻanga", Symbol: ""},
{Currency: "TPE", DisplayName: "timoresisk escudo", Symbol: ""},
{Currency: "TRL", DisplayName: "gammal tyrkiske lire", Symbol: ""},
{Currency: "TRY", DisplayName: "tyrkisk lire", Symbol: ""},
{Currency: "TTD", DisplayName: "trinidadisk dollar", Symbol: ""},
{Currency: "TWD", DisplayName: "taiwansk ny dollar", Symbol: ""},
{Currency: "TZS", DisplayName: "tanzaniansk shilling", Symbol: ""},
{Currency: "UAH", DisplayName: "ukrainsk hryvnia", Symbol: ""},
{Currency: "UAK", DisplayName: "ukrainsk karbovanetz", Symbol: ""},
{Currency: "UGS", DisplayName: "ugandisk shilling (1966–1987)", Symbol: ""},
{Currency: "UGX", DisplayName: "ugandisk shilling", Symbol: ""},
{Currency: "USD", DisplayName: "amerikansk dollar", Symbol: ""},
{Currency: "USN", DisplayName: "amerikansk dollar (neste dag)", Symbol: ""},
{Currency: "USS", DisplayName: "amerikansk dollar (same dag)", Symbol: ""},
{Currency: "UYI", DisplayName: "uruguayansk peso en unidades indexadas", Symbol: ""},
{Currency: "UYP", DisplayName: "uruguayansk peso (1975–1993)", Symbol: ""},
{Currency: "UYU", DisplayName: "uruguayansk peso", Symbol: ""},
{Currency: "UZS", DisplayName: "usbekisk sum", Symbol: ""},
{Currency: "VEB", DisplayName: "venezuelansk bolivar (1871–2008)", Symbol: ""},
{Currency: "VEF", DisplayName: "venezuelansk bolivar", Symbol: ""},
{Currency: "VND", DisplayName: "vietnamesisk dong", Symbol: ""},
{Currency: "VUV", DisplayName: "vanuatuisk vatu", Symbol: ""},
{Currency: "WST", DisplayName: "vestsamoisk tala", Symbol: ""},
{Currency: "XAF", DisplayName: "CFA franc BEAC", Symbol: ""},
{Currency: "XAG", DisplayName: "sølv", Symbol: ""},
{Currency: "XAU", DisplayName: "gull", Symbol: ""},
{Currency: "XBA", DisplayName: "europeisk samansett eining", Symbol: ""},
{Currency: "XBB", DisplayName: "europeisk monetær eining", Symbol: ""},
{Currency: "XBC", DisplayName: "europeisk kontoeining (XBC)", Symbol: ""},
{Currency: "XBD", DisplayName: "europeisk kontoeining (XBD)", Symbol: ""},
{Currency: "XCD", DisplayName: "austkaribisk dollar", Symbol: ""},
{Currency: "XDR", DisplayName: "spesielle trekkrettar", Symbol: ""},
{Currency: "XEU", DisplayName: "europeisk valutaeining", Symbol: ""},
{Currency: "XFO", DisplayName: "fransk gullfranc", Symbol: ""},
{Currency: "XFU", DisplayName: "fransk UIC-franc", Symbol: ""},
{Currency: "XOF", DisplayName: "CFA franc BCEAO", Symbol: ""},
{Currency: "XPD", DisplayName: "palladium", Symbol: ""},
{Currency: "XPF", DisplayName: "CFP franc", Symbol: ""},
{Currency: "XPT", DisplayName: "platina", Symbol: ""},
{Currency: "XRE", DisplayName: "RINET-fond", Symbol: ""},
{Currency: "XTS", DisplayName: "testvalutakode", Symbol: ""},
{Currency: "XXX", DisplayName: "ukjend eller ugyldig valuta", Symbol: ""},
{Currency: "YDD", DisplayName: "jemenittisk dinar", Symbol: ""},
{Currency: "YER", DisplayName: "jemenittisk rial", Symbol: ""},
{Currency: "YUD", DisplayName: "jugoslavisk dinar (hard)", Symbol: ""},
{Currency: "YUM", DisplayName: "jugoslavisk noviy-dinar", Symbol: ""},
{Currency: "YUN", DisplayName: "jugoslavisk konvertibel dinar", Symbol: ""},
{Currency: "ZAL", DisplayName: "sørafrikansk rand (finansiell)", Symbol: ""},
{Currency: "ZAR", DisplayName: "sørafrikansk rand", Symbol: ""},
{Currency: "ZMK", DisplayName: "zambisk kwacha (1968–2012)", Symbol: ""},
{Currency: "ZMW", DisplayName: "zambisk kwacha", Symbol: ""},
{Currency: "ZRN", DisplayName: "zairisk ny zaire", Symbol: ""},
{Currency: "ZRZ", DisplayName: "zairisk zaire", Symbol: ""},
{Currency: "ZWD", DisplayName: "zimbabwisk dollar", Symbol: ""},
{Currency: "ZWL", DisplayName: "Zimbabwe-dollar (2009)", Symbol: ""},
} | resources/locales/nn/currency.go | 0.609059 | 0.405007 | currency.go | starcoder |
package main
import (
"math"
"math/rand"
. "github.com/jakecoffman/cp"
"github.com/jakecoffman/cp/examples"
)
const (
bevel = 1.0
)
func main() {
space := NewSpace()
space.Iterations = 10
offset := Vector{-320, -240}
for i := 0; i < len(bouncy_terrain_verts)-1; i++ {
a := bouncy_terrain_verts[i]
b := bouncy_terrain_verts[i+1]
shape := space.AddShape(NewSegment(space.StaticBody, a.Add(offset), b.Add(offset), 0))
shape.SetElasticity(1)
}
radius := 5.0
hexagon := []Vector{}
for i := 0; i < 6; i++ {
angle := -math.Pi * 2.0 * float64(i) / 6.0
hexagon = append(hexagon, Vector{math.Cos(angle), math.Sin(angle)}.Mult(radius - bevel))
}
for i := 0; i < 500; i++ {
mass := radius * radius
body := space.AddBody(NewBody(mass, MomentForPoly(mass, 6, hexagon, Vector{}, 0)))
body.SetPosition(randUnitCircle().Mult(130).Add(Vector{}))
body.SetVelocityVector(randUnitCircle().Mult(50))
shape := space.AddShape(NewPolyShape(body, 6, hexagon, NewTransformIdentity(), bevel))
shape.SetElasticity(1)
}
examples.Main(space, 1.0/60.0, update, examples.DefaultDraw)
}
func update(space *Space, dt float64) {
space.Step(dt)
}
func randUnitCircle() Vector {
v := Vector{rand.Float64()*2.0 - 1.0, rand.Float64()*2.0 - 1.0}
if v.LengthSq() < 1.0 {
return v
}
return randUnitCircle()
}
var bouncy_terrain_verts = []Vector{
{537.18, 23.00}, {520.50, 36.00}, {501.53, 63.00}, {496.14, 76.00}, {498.86, 86.00}, {504.00, 90.51}, {508.00, 91.36}, {508.77, 84.00}, {513.00, 77.73}, {519.00, 74.48}, {530.00, 74.67}, {545.00, 54.65},
{554.00, 48.77}, {562.00, 46.39}, {568.00, 45.94}, {568.61, 47.00}, {567.94, 55.00}, {571.27, 64.00}, {572.92, 80.00}, {572.00, 81.39}, {563.00, 79.93}, {556.00, 82.69}, {551.49, 88.00}, {549.00, 95.76},
{538.00, 93.40}, {530.00, 102.38}, {523.00, 104.00}, {517.00, 103.02}, {516.22, 109.00}, {518.96, 116.00}, {526.00, 121.15}, {534.00, 116.48}, {543.00, 116.77}, {549.28, 121.00}, {554.00, 130.17}, {564.00, 125.67},
{575.60, 129.00}, {573.31, 121.00}, {567.77, 111.00}, {575.00, 106.47}, {578.51, 102.00}, {580.25, 95.00}, {577.98, 87.00}, {582.00, 85.71}, {597.00, 89.46}, {604.80, 95.00}, {609.28, 104.00}, {610.55, 116.00},
{609.30, 125.00}, {600.80, 142.00}, {597.31, 155.00}, {584.00, 167.23}, {577.86, 175.00}, {583.52, 184.00}, {582.64, 195.00}, {591.00, 196.56}, {597.81, 201.00}, {607.45, 219.00}, {607.51, 246.00}, {600.00, 275.46},
{588.00, 267.81}, {579.00, 264.91}, {557.00, 264.41}, {552.98, 259.00}, {548.00, 246.18}, {558.00, 247.12}, {565.98, 244.00}, {571.10, 237.00}, {571.61, 229.00}, {568.25, 222.00}, {562.00, 217.67}, {544.00, 213.93},
{536.73, 214.00}, {535.60, 204.00}, {539.69, 181.00}, {542.84, 171.00}, {550.43, 161.00}, {540.00, 156.27}, {536.62, 152.00}, {534.70, 146.00}, {527.00, 141.88}, {518.59, 152.00}, {514.51, 160.00}, {510.33, 175.00},
{519.38, 183.00}, {520.52, 194.00}, {516.00, 201.27}, {505.25, 206.00}, {507.57, 223.00}, {519.90, 260.00}, {529.00, 260.48}, {534.00, 262.94}, {538.38, 268.00}, {540.00, 275.00}, {537.06, 284.00}, {530.00, 289.23},
{520.00, 289.23}, {513.00, 284.18}, {509.71, 286.00}, {501.69, 298.00}, {501.56, 305.00}, {504.30, 311.00}, {512.00, 316.43}, {521.00, 316.42}, {525.67, 314.00}, {535.00, 304.98}, {562.00, 294.80}, {573.00, 294.81},
{587.52, 304.00}, {600.89, 310.00}, {596.96, 322.00}, {603.28, 327.00}, {606.52, 333.00}, {605.38, 344.00}, {597.65, 352.00}, {606.36, 375.00}, {607.16, 384.00}, {603.40, 393.00}, {597.00, 398.14}, {577.00, 386.15},
{564.35, 373.00}, {565.21, 364.00}, {562.81, 350.00}, {553.00, 346.06}, {547.48, 338.00}, {547.48, 330.00}, {550.00, 323.30}, {544.00, 321.53}, {537.00, 322.70}, {532.00, 326.23}, {528.89, 331.00}, {527.83, 338.00},
{533.02, 356.00}, {542.00, 360.73}, {546.68, 369.00}, {545.38, 379.00}, {537.58, 386.00}, {537.63, 388.00}, {555.00, 407.47}, {563.00, 413.52}, {572.57, 418.00}, {582.72, 426.00}, {578.00, 431.12}, {563.21, 440.00},
{558.00, 449.27}, {549.00, 452.94}, {541.00, 451.38}, {536.73, 448.00}, {533.00, 441.87}, {520.00, 437.96}, {514.00, 429.69}, {490.00, 415.15}, {472.89, 399.00}, {472.03, 398.00}, {474.00, 396.71}, {486.00, 393.61},
{492.00, 385.85}, {492.00, 376.15}, {489.04, 371.00}, {485.00, 368.11}, {480.00, 376.27}, {472.00, 379.82}, {463.00, 378.38}, {455.08, 372.00}, {446.00, 377.69}, {439.00, 385.24}, {436.61, 391.00}, {437.52, 404.00},
{440.00, 409.53}, {463.53, 433.00}, {473.80, 441.00}, {455.00, 440.30}, {443.00, 436.18}, {436.00, 431.98}, {412.00, 440.92}, {397.00, 442.46}, {393.59, 431.00}, {393.71, 412.00}, {400.00, 395.10}, {407.32, 387.00},
{408.54, 380.00}, {407.42, 375.00}, {403.97, 370.00}, {399.00, 366.74}, {393.00, 365.68}, {391.23, 374.00}, {387.00, 380.27}, {381.00, 383.52}, {371.56, 384.00}, {364.98, 401.00}, {362.96, 412.00}, {363.63, 435.00},
{345.00, 433.55}, {344.52, 442.00}, {342.06, 447.00}, {337.00, 451.38}, {330.00, 453.00}, {325.00, 452.23}, {318.00, 448.17}, {298.00, 453.70}, {284.00, 451.49}, {278.62, 449.00}, {291.47, 408.00}, {291.77, 398.00},
{301.00, 393.83}, {305.00, 393.84}, {305.60, 403.00}, {310.00, 409.47}, {318.00, 413.07}, {325.00, 412.40}, {332.31, 407.00}, {335.07, 400.00}, {334.40, 393.00}, {329.00, 385.69}, {319.00, 382.79}, {301.00, 389.23},
{289.00, 389.97}, {265.00, 389.82}, {251.00, 385.85}, {245.00, 389.23}, {239.00, 389.94}, {233.00, 388.38}, {226.00, 382.04}, {206.00, 374.75}, {206.00, 394.00}, {204.27, 402.00}, {197.00, 401.79}, {191.00, 403.49},
{186.53, 407.00}, {183.60, 412.00}, {183.60, 422.00}, {189.00, 429.31}, {196.00, 432.07}, {203.00, 431.40}, {209.47, 427.00}, {213.00, 419.72}, {220.00, 420.21}, {227.00, 418.32}, {242.00, 408.41}, {258.98, 409.00},
{250.00, 435.43}, {239.00, 438.78}, {223.00, 448.19}, {209.00, 449.70}, {205.28, 456.00}, {199.00, 460.23}, {190.00, 460.52}, {182.73, 456.00}, {178.00, 446.27}, {160.00, 441.42}, {148.35, 435.00}, {149.79, 418.00},
{157.72, 401.00}, {161.00, 396.53}, {177.00, 385.00}, {180.14, 380.00}, {181.11, 374.00}, {180.00, 370.52}, {170.00, 371.68}, {162.72, 368.00}, {158.48, 361.00}, {159.56, 349.00}, {154.00, 342.53}, {146.00, 339.85},
{136.09, 343.00}, {130.64, 351.00}, {131.74, 362.00}, {140.61, 374.00}, {130.68, 387.00}, {120.75, 409.00}, {118.09, 421.00}, {117.92, 434.00}, {100.00, 432.40}, {87.00, 427.48}, {81.59, 423.00}, {73.64, 409.00},
{72.57, 398.00}, {74.62, 386.00}, {78.80, 378.00}, {88.00, 373.43}, {92.49, 367.00}, {93.32, 360.00}, {91.30, 353.00}, {103.00, 342.67}, {109.00, 343.10}, {116.00, 340.44}, {127.33, 330.00}, {143.00, 327.24},
{154.30, 322.00}, {145.00, 318.06}, {139.77, 311.00}, {139.48, 302.00}, {144.95, 293.00}, {143.00, 291.56}, {134.00, 298.21}, {118.00, 300.75}, {109.40, 305.00}, {94.67, 319.00}, {88.00, 318.93}, {81.00, 321.69},
{67.24, 333.00}, {56.68, 345.00}, {53.00, 351.40}, {47.34, 333.00}, {50.71, 314.00}, {56.57, 302.00}, {68.00, 287.96}, {91.00, 287.24}, {110.00, 282.36}, {133.80, 271.00}, {147.34, 256.00}, {156.47, 251.00},
{157.26, 250.00}, {154.18, 242.00}, {154.48, 236.00}, {158.72, 229.00}, {166.71, 224.00}, {170.15, 206.00}, {170.19, 196.00}, {167.24, 188.00}, {160.00, 182.67}, {150.00, 182.66}, {143.60, 187.00}, {139.96, 195.00},
{139.50, 207.00}, {136.45, 221.00}, {136.52, 232.00}, {133.28, 238.00}, {129.00, 241.38}, {119.00, 243.07}, {115.00, 246.55}, {101.00, 253.16}, {86.00, 257.32}, {63.00, 259.24}, {57.00, 257.31}, {50.54, 252.00},
{47.59, 247.00}, {46.30, 240.00}, {47.58, 226.00}, {50.00, 220.57}, {58.00, 226.41}, {69.00, 229.17}, {79.00, 229.08}, {94.50, 225.00}, {100.21, 231.00}, {107.00, 233.47}, {107.48, 224.00}, {109.94, 219.00},
{115.00, 214.62}, {122.57, 212.00}, {116.00, 201.49}, {104.00, 194.57}, {90.00, 194.04}, {79.00, 198.21}, {73.00, 198.87}, {62.68, 191.00}, {62.58, 184.00}, {64.42, 179.00}, {75.00, 167.70}, {80.39, 157.00},
{68.79, 140.00}, {61.67, 126.00}, {61.47, 117.00}, {64.43, 109.00}, {63.10, 96.00}, {56.48, 82.00}, {48.00, 73.88}, {43.81, 66.00}, {43.81, 56.00}, {50.11, 46.00}, {59.00, 41.55}, {71.00, 42.64},
{78.00, 36.77}, {83.00, 34.75}, {99.00, 34.32}, {117.00, 38.92}, {133.00, 55.15}, {142.00, 50.70}, {149.74, 51.00}, {143.55, 68.00}, {153.28, 74.00}, {156.23, 79.00}, {157.00, 84.00}, {156.23, 89.00},
{153.28, 94.00}, {144.58, 99.00}, {151.52, 112.00}, {151.51, 124.00}, {150.00, 126.36}, {133.00, 130.25}, {126.71, 125.00}, {122.00, 117.25}, {114.00, 116.23}, {107.73, 112.00}, {104.48, 106.00}, {104.32, 99.00},
{106.94, 93.00}, {111.24, 89.00}, {111.60, 85.00}, {107.24, 73.00}, {102.00, 67.57}, {99.79, 67.00}, {99.23, 76.00}, {95.00, 82.27}, {89.00, 85.52}, {79.84, 86.00}, {86.73, 114.00}, {98.00, 136.73},
{99.00, 137.61}, {109.00, 135.06}, {117.00, 137.94}, {122.52, 146.00}, {122.94, 151.00}, {121.00, 158.58}, {134.00, 160.97}, {153.00, 157.45}, {171.30, 150.00}, {169.06, 142.00}, {169.77, 136.00}, {174.00, 129.73},
{181.46, 126.00}, {182.22, 120.00}, {182.20, 111.00}, {180.06, 101.00}, {171.28, 85.00}, {171.75, 80.00}, {182.30, 53.00}, {189.47, 50.00}, {190.62, 38.00}, {194.00, 33.73}, {199.00, 30.77}, {208.00, 30.48},
{216.00, 34.94}, {224.00, 31.47}, {240.00, 30.37}, {247.00, 32.51}, {249.77, 35.00}, {234.75, 53.00}, {213.81, 93.00}, {212.08, 99.00}, {213.00, 101.77}, {220.00, 96.77}, {229.00, 96.48}, {236.28, 101.00},
{240.00, 107.96}, {245.08, 101.00}, {263.00, 65.32}, {277.47, 48.00}, {284.00, 47.03}, {286.94, 41.00}, {292.00, 36.62}, {298.00, 35.06}, {304.00, 35.77}, {314.00, 43.81}, {342.00, 32.56}, {359.00, 31.32},
{365.00, 32.57}, {371.00, 36.38}, {379.53, 48.00}, {379.70, 51.00}, {356.00, 52.19}, {347.00, 54.74}, {344.38, 66.00}, {341.00, 70.27}, {335.00, 73.52}, {324.00, 72.38}, {317.00, 65.75}, {313.00, 67.79},
{307.57, 76.00}, {315.00, 78.62}, {319.28, 82.00}, {322.23, 87.00}, {323.00, 94.41}, {334.00, 92.49}, {347.00, 87.47}, {349.62, 80.00}, {353.00, 75.73}, {359.00, 72.48}, {366.00, 72.32}, {372.00, 74.94},
{377.00, 81.34}, {382.00, 83.41}, {392.00, 83.40}, {399.00, 79.15}, {404.00, 85.74}, {411.00, 85.06}, {417.00, 86.62}, {423.38, 93.00}, {425.05, 104.00}, {438.00, 110.35}, {450.00, 112.17}, {452.62, 103.00},
{456.00, 98.73}, {462.00, 95.48}, {472.00, 95.79}, {471.28, 92.00}, {464.00, 84.62}, {445.00, 80.39}, {436.00, 75.33}, {428.00, 68.46}, {419.00, 68.52}, {413.00, 65.27}, {408.48, 58.00}, {409.87, 46.00},
{404.42, 39.00}, {408.00, 33.88}, {415.00, 29.31}, {429.00, 26.45}, {455.00, 28.77}, {470.00, 33.81}, {482.00, 42.16}, {494.00, 46.85}, {499.65, 36.00}, {513.00, 25.95}, {529.00, 22.42}, {537.18, 23.00},
} | examples/bouncyhexagons/bouncyhexagons.go | 0.602529 | 0.573917 | bouncyhexagons.go | starcoder |
package idx
import (
"github.com/galaxy-digital/lachesis-base/common/bigendian"
)
type (
// Epoch numeration.
Epoch uint32
// Event numeration.
Event uint32
// Block numeration.
Block uint64
// Lamport numeration.
Lamport uint32
// Frame numeration.
Frame uint32
// Pack numeration.
Pack uint32
// ValidatorID numeration.
ValidatorID uint32
)
// Bytes gets the byte representation of the index.
func (e Epoch) Bytes() []byte {
return bigendian.Uint32ToBytes(uint32(e))
}
// Bytes gets the byte representation of the index.
func (e Event) Bytes() []byte {
return bigendian.Uint32ToBytes(uint32(e))
}
// Bytes gets the byte representation of the index.
func (b Block) Bytes() []byte {
return bigendian.Uint64ToBytes(uint64(b))
}
// Bytes gets the byte representation of the index.
func (l Lamport) Bytes() []byte {
return bigendian.Uint32ToBytes(uint32(l))
}
// Bytes gets the byte representation of the index.
func (p Pack) Bytes() []byte {
return bigendian.Uint32ToBytes(uint32(p))
}
// Bytes gets the byte representation of the index.
func (s ValidatorID) Bytes() []byte {
return bigendian.Uint32ToBytes(uint32(s))
}
// Bytes gets the byte representation of the index.
func (f Frame) Bytes() []byte {
return bigendian.Uint32ToBytes(uint32(f))
}
// BytesToEpoch converts bytes to epoch index.
func BytesToEpoch(b []byte) Epoch {
return Epoch(bigendian.BytesToUint32(b))
}
// BytesToEvent converts bytes to event index.
func BytesToEvent(b []byte) Event {
return Event(bigendian.BytesToUint32(b))
}
// BytesToBlock converts bytes to block index.
func BytesToBlock(b []byte) Block {
return Block(bigendian.BytesToUint64(b))
}
// BytesToLamport converts bytes to block index.
func BytesToLamport(b []byte) Lamport {
return Lamport(bigendian.BytesToUint32(b))
}
// BytesToFrame converts bytes to block index.
func BytesToFrame(b []byte) Frame {
return Frame(bigendian.BytesToUint32(b))
}
// BytesToPack converts bytes to block index.
func BytesToPack(b []byte) Pack {
return Pack(bigendian.BytesToUint32(b))
}
// BytesToValidatorID converts bytes to validator index.
func BytesToValidatorID(b []byte) ValidatorID {
return ValidatorID(bigendian.BytesToUint32(b))
}
// MaxLamport return max value
func MaxLamport(x, y Lamport) Lamport {
if x > y {
return x
}
return y
} | inter/idx/index.go | 0.743168 | 0.572544 | index.go | starcoder |
package commands
var EventsShort = `[Alpha] Poll the cluster until all provided resources have become Current and list the status change events.`
var EventsLong = `
[Alpha] Poll the cluster for the state of all the provided resources until either they have all become
Current or the timeout is reached. The output will be status change events.
The list of resources which should be polled are provided as manifests either on the filesystem or
on StdIn.
DIR:
Path to local directory. If not provided, input is expected on StdIn.
`
var EventsExamples = `
# Read resources from the filesystem and wait up to 1 minute for all of them to become Current
resource status events my-dir/
# Fetch all resources in the cluster and wait up to 5 minutes for all of them to become Current
kubectl get all --all-namespaces -oyaml | resource status events --timeout=5m`
var FetchShort = `[Alpha] Fetch the state of the provided resources from the cluster and display status in a table.`
var FetchLong = `
[Alpha] Fetches the state of all provided resources from the cluster and displays the status in
a table.
The list of resources are provided as manifests either on the filesystem or on StdIn.
DIR:
Path to local directory.
`
var FetchExamples = `
# Read resources from the filesystem and wait up to 1 minute for all of them to become Current
resource status fetch my-dir/
# Fetch all resources in the cluster and wait up to 5 minutes for all of them to become Current
kubectl get all --all-namespaces -oyaml | resource status fetch`
var WaitShort = `[Alpha] Poll the cluster until all provided resources have become Current and display progress in a table. `
var WaitLong = `
[Alpha] Poll the cluster for the state of all the provided resources until either they have all become
Current or the timeout is reached. The output will be presented as a table.
The list of resources which should be polled are provided as manifests either on the filesystem or
on StdIn.
DIR:
Path to local directory. If not provided, input is expected on StdIn.
`
var WaitExamples = `
# Read resources from the filesystem and wait up to 1 minute for all of them to become Current
resource status wait my-dir/
# Fetch all resources in the cluster and wait up to 5 minutes for all of them to become Current
kubectl get all --all-namespaces -oyaml | resource status wait --timeout=5m` | kustomize/internal/commands/status/generateddocs/commands/docs.go | 0.648355 | 0.651012 | docs.go | starcoder |
package query
type JoinType int
const (
// use “iota + 1” to be sure that the enum type is initialized.
JoinTypeBase JoinType = iota + 1
JoinTypeLeft
JoinTypeLeftOuter
JoinTypeRight
JoinTypeRightOuter
JoinTypeInner
JoinTypeFull
JoinTypeFullOuter
JoinTypeNatural
JoinTypeCross
endJoinType
)
var types = map[JoinType]string{
JoinTypeBase: "JOIN",
JoinTypeLeft: "LEFT JOIN",
JoinTypeLeftOuter: "LEFT OUTER JOIN",
JoinTypeRight: "RIGHT JOIN",
JoinTypeRightOuter: "RIGHT OUTER JOIN",
JoinTypeInner: "INNER JOIN",
JoinTypeFull: "FULL JOIN",
JoinTypeFullOuter: "FULL OUTER JOIN",
JoinTypeNatural: "NATURAL JOIN",
JoinTypeCross: "CROSS JOIN",
}
func (e JoinType) IsValid() bool {
return e >= JoinTypeBase && e < endJoinType
}
func (e JoinType) Ordinal() int {
return int(e)
}
func (e JoinType) String() string {
if !e.IsValid() {
// TODO maybe return error
return types[endJoinType]
}
return types[e]
}
type Operator int
const (
// use “iota + 1” to be sure that the enum type is initialized.
OperatorEq Operator = iota + 1
OperatorNotEq
OperatorGt
OperatorGtEq
OperatorLt
OperatorLtEq
endOperator
)
var operators = map[Operator]string{
OperatorEq: "=",
OperatorNotEq: "<>",
OperatorGt: ">",
OperatorGtEq: ">=",
OperatorLt: "<",
OperatorLtEq: "<=",
}
func (e Operator) IsValid() bool {
return e >= OperatorEq && e < endOperator
}
func (e Operator) Ordinal() int {
return int(e)
}
func (e Operator) String() string {
if !e.IsValid() {
// TODO maybe return error
return operators[endOperator]
}
return operators[e]
}
type Combine int
const (
// use “iota + 1” to be sure that the enum type is initialized.
CombineAnd Combine = iota + 1
CombineOr
endCombine
)
var combines = map[Combine]string{
CombineAnd: "AND",
CombineOr: "OR",
}
func (e Combine) IsValid() bool {
return e >= CombineAnd && e < endCombine
}
func (e Combine) Ordinal() int {
return int(e)
}
func (e Combine) String() string {
if !e.IsValid() {
// TODO maybe return error
return combines[endCombine]
}
return combines[e]
}
type Placeholder int
const (
// use “iota + 1” to be sure that the enum type is initialized.
PlaceholderQMark Placeholder = iota + 1
PlaceholderDollar
endPlaceholder
)
var placeholders = map[Placeholder]string{
PlaceholderQMark: "?",
PlaceholderDollar: "$",
}
func (e Placeholder) IsValid() bool {
return e >= PlaceholderQMark && e < endPlaceholder
}
func (e Placeholder) Ordinal() int {
return int(e)
}
func (e Placeholder) String() string {
if !e.IsValid() {
// TODO maybe return error
return placeholders[endPlaceholder]
}
return placeholders[e]
}
type Direction int
const (
// use “iota + 1” to be sure that the enum type is initialized.
DirectionAsc Direction = iota + 1
DirectionDesc
endDirection
)
var directions = map[Direction]string{
DirectionAsc: "ASC",
DirectionDesc: "DESC",
}
func (e Direction) IsValid() bool {
return e >= DirectionAsc && e < endDirection
}
func (e Direction) Ordinal() int {
return int(e)
}
func (e Direction) String() string {
if !e.IsValid() {
// TODO maybe return error
return directions[endDirection]
}
return directions[e]
} | enum.go | 0.553988 | 0.490846 | enum.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.