code
stringlengths
114
1.05M
path
stringlengths
3
312
quality_prob
float64
0.5
0.99
learning_prob
float64
0.2
1
filename
stringlengths
3
168
kind
stringclasses
1 value
package convert import ( "database/sql" "fmt" "math/big" "reflect" "strconv" ) // AsFloat64 convets interface as float64 func AsFloat64(src interface{}) (float64, error) { switch v := src.(type) { case int: return float64(v), nil case int16: return float64(v), nil case int32: return float64(v), nil case int8: return float64(v), nil case int64: return float64(v), nil case uint: return float64(v), nil case uint8: return float64(v), nil case uint16: return float64(v), nil case uint32: return float64(v), nil case uint64: return float64(v), nil case []byte: return strconv.ParseFloat(string(v), 64) case string: return strconv.ParseFloat(v, 64) case *sql.NullString: return strconv.ParseFloat(v.String, 64) case *sql.NullInt32: return float64(v.Int32), nil case *sql.NullInt64: return float64(v.Int64), nil case *sql.NullFloat64: return v.Float64, nil } rv := reflect.ValueOf(src) switch rv.Kind() { case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: return float64(rv.Int()), nil case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: return float64(rv.Uint()), nil case reflect.Float64, reflect.Float32: return float64(rv.Float()), nil case reflect.String: return strconv.ParseFloat(rv.String(), 64) } return 0, fmt.Errorf("unsupported value %T as int64", src) } // AsBigFloat converts interface as big.Float func AsBigFloat(src interface{}) (*big.Float, error) { res := big.NewFloat(0) switch v := src.(type) { case int: res.SetInt64(int64(v)) return res, nil case int16: res.SetInt64(int64(v)) return res, nil case int32: res.SetInt64(int64(v)) return res, nil case int8: res.SetInt64(int64(v)) return res, nil case int64: res.SetInt64(int64(v)) return res, nil case uint: res.SetUint64(uint64(v)) return res, nil case uint8: res.SetUint64(uint64(v)) return res, nil case uint16: res.SetUint64(uint64(v)) return res, nil case uint32: res.SetUint64(uint64(v)) return res, nil case uint64: res.SetUint64(uint64(v)) return res, nil case []byte: res.SetString(string(v)) return res, nil case string: res.SetString(v) return res, nil case *sql.NullString: if v.Valid { res.SetString(v.String) return res, nil } return nil, nil case *sql.NullInt32: if v.Valid { res.SetInt64(int64(v.Int32)) return res, nil } return nil, nil case *sql.NullInt64: if v.Valid { res.SetInt64(int64(v.Int64)) return res, nil } return nil, nil } rv := reflect.ValueOf(src) switch rv.Kind() { case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: res.SetInt64(rv.Int()) return res, nil case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: res.SetUint64(rv.Uint()) return res, nil case reflect.Float64, reflect.Float32: res.SetFloat64(rv.Float()) return res, nil case reflect.String: res.SetString(rv.String()) return res, nil } return nil, fmt.Errorf("unsupported value %T as big.Float", src) }
vendor/xorm.io/xorm/convert/float.go
0.567937
0.473962
float.go
starcoder
package main import "fmt" /* Given two integers dividend and divisor, divide two integers without using multiplication, division and mod operator. Return the quotient after dividing dividend by divisor. The integer division should truncate toward zero. Example 1: Input: dividend = 10, divisor = 3 Output: 3 Example 2: Input: dividend = 7, divisor = -3 Output: -2 Note: Both dividend and divisor will be 32-bit signed integers. The divisor will never be 0. Assume we are dealing with an environment which could only store integers within the 32-bit signed integer range: [−231, 231 − 1]. For the purpose of this problem, assume that your function returns 231 − 1 when the division result overflows. 给定两个整数,被除数 dividend 和除数 divisor。将两数相除,要求不使用乘法、除法和 mod 运算符。 返回被除数 dividend 除以除数 divisor 得到的商。 示例 1: 输入: dividend = 10, divisor = 3 输出: 3 示例 2: 输入: dividend = 7, divisor = -3 输出: -2 说明: 被除数和除数均为 32 位有符号整数。 除数不为 0。 假设我们的环境只能存储 32 位有符号整数,其数值范围是 [−231, 231 − 1]。本题中,如果除法结果溢出,则返回 231 − 1。 */ // 解法一:虽然不满足题目的要求,就使用正常除法 /* Runtime: 4 ms, faster than 100.00% of Go online submissions for Divide Two Integers. Memory Usage: 2.4 MB, less than 44.44% of Go online submissions for Divide Two Integers. */ /*func divide(dividend int, divisor int) int { res := dividend / divisor if res > 2<<30-1 || res < -2<<30 { return 2<<30 - 1 } return res }*/ // 解法二: 方案是用加减法代替除法。两个数都是正数的情况,即转化为:被除数,减去多少个除数以后结果开始小于除数 // 但是一个个减,循环的次数比较多,太慢了,我们可以用位移运算,加快速度,减少运算次数 // 已知divisor<<i = divisor * 2<<i-1, // 我们可以遍历2的指数,即判断divisor<<i和被除数的大小,刚好比被除数小时,这部分个数的被除数即计算完毕 // 此时我们从被除数中减去divisor * 2<<i-1,结果中加上 2<<i-1 // 重复这种运算,直至,结果小于被除数 /*func divide(dividend int, divisor int) int { // 先判断两个数的正负,分4种情况,确定最终结果的正负 // 把两个数都转为正数,方便计算 flag := 1 if dividend < 0 && divisor < 0 { dividend = -dividend divisor = -divisor } else if dividend < 0 && divisor > 0 { dividend = -dividend flag = -1 } else if dividend > 0 && divisor < 0 { divisor = -divisor flag = -1 } // 如果两个数都是正数,不用进行任何处理 res := 0 var i uint for dividend >= divisor { i = 0 // fmt.Println(res) for ; i < 33; i++ { // fmt.Println(i) if divisor<<i > dividend { if i == 1 { res++ dividend -= divisor break } else { // divisor<<(i-1)= divisor * 2^(i-1)=2<<(i-2) res += 2 << (i - 2) dividend -= divisor << (i - 1) break } } } } // res = res * flag // 分解成判断 if flag == -1 { res = -res } if res > 2<<30-1 { return 2<<30 - 1 } return res }*/ // 解法二 优化 // 解法二的循环还可以优化一下,因为i每次从最小0开始遍历,有不少遍历重复了,如果一开始从最大的开始遍历就减少了重复的遍历 func divide(dividend int, divisor int) int { // 先判断两个数的正负,分4种情况,确定最终结果的正负 // 把两个数都转为正数,方便计算 flag := 1 if dividend < 0 && divisor < 0 { dividend = -dividend divisor = -divisor } else if dividend < 0 && divisor > 0 { dividend = -dividend flag = -1 } else if dividend > 0 && divisor < 0 { divisor = -divisor flag = -1 } // 如果两个数都是正数,不用进行任何处理 res := 0 // go语言中int默认64位 这里位移值31刚好是边界值,32的时候,位移会出现溢出,导致程序不符合预期 var i uint = 31 for dividend >= divisor { for ; i >= 0; i-- { if divisor<<i <= dividend { // divisor<<i= divisor * 2^(i-1)=2<<(i-1) if i == 0 { res++ dividend -= divisor i-- break } else { res += 2 << (i - 1) dividend -= divisor << i i-- break } } } } // res = res * flag // 分解成判断 if flag == -1 { res = -res } fmt.Println(res) if res > 2<<30-1 { return 2<<30 - 1 } return res } func main() { fmt.Println(divide(-2147483648, -2147483648)) }
Programs/029Divide Two Integers/029Divide Two Integers.go
0.551574
0.548915
029Divide Two Integers.go
starcoder
package unitpacking import "github.com/EliCDavis/vector" // Quadrant2D represents a quadrant in a 2D space. type Quadrant2D int const ( TopLeft Quadrant2D = iota TopRight BottomLeft BottomRight ) // QuadRecurse recursively builds a quad tree based on the given Vector2. The // tree's depth is determined by the number of levels passed in. func QuadRecurse(in, min, max vector.Vector2, levels int) []Quadrant2D { if levels <= 0 { return nil } midX := ((max.X() + min.X()) / 2) midY := ((max.Y() + min.Y()) / 2) var dir Quadrant2D var newMinX float64 var newMinY float64 var newMaxX float64 var newMaxY float64 if in.X() < midX { newMinX = min.X() newMaxX = midX if in.Y() < midY { newMinY = min.Y() newMaxY = midY dir = BottomLeft } else { newMinY = midY newMaxY = max.Y() dir = TopLeft } } else { newMinX = midX newMaxX = max.X() if in.Y() < midY { newMinY = min.Y() newMaxY = midY dir = BottomRight } else { newMinY = midY newMaxY = max.Y() dir = TopRight } } return append( QuadRecurse( in, vector.NewVector2(newMinX, newMinY), vector.NewVector2(newMaxX, newMaxY), levels-1, ), dir, ) } // Vec2ToByteQuad creates a quadtree of depth 4 and encodes itself into a // single byte func Vec2ToByteQuad(v vector.Vector2) byte { results := QuadRecurse( v, vector.NewVector2(-1, -1), vector.NewVector2(1, 1), 4, ) return byte(results[0]) | (byte(results[1]) << 2) | (byte(results[2]) << 4) | (byte(results[3]) << 6) } // ByteToVec2 calculates a Vector2 based on the encoded quadtree inside the // byte. func ByteQuadToVec2(b byte) vector.Vector2 { directions := make([]Quadrant2D, 4) directions[3] = Quadrant2D(b & 0b11) directions[2] = Quadrant2D((b >> 2) & 0b11) directions[1] = Quadrant2D((b >> 4) & 0b11) directions[0] = Quadrant2D((b >> 6) & 0b11) return recalc(directions) } // Vec2ToTwoByteQuad creates a quadtree of depth 8 and encodes itself in two // bytes func Vec2ToTwoByteQuad(v vector.Vector2) []byte { results := QuadRecurse( v, vector.NewVector2(-1, -1), vector.NewVector2(1, 1), 8, ) return []byte{ byte(results[0]) | (byte(results[1]) << 2) | (byte(results[2]) << 4) | (byte(results[3]) << 6), byte(results[4]) | (byte(results[5]) << 2) | (byte(results[6]) << 4) | (byte(results[7]) << 6), } } // Vec2ToThreeByteQuad creates a quadtree of depth 12 and encodes itself in // three bytes func Vec2ToThreeByteQuad(v vector.Vector2) []byte { results := QuadRecurse( v, vector.NewVector2(-1, -1), vector.NewVector2(1, 1), 12, ) return []byte{ byte(results[0]) | (byte(results[1]) << 2) | (byte(results[2]) << 4) | (byte(results[3]) << 6), byte(results[4]) | (byte(results[5]) << 2) | (byte(results[6]) << 4) | (byte(results[7]) << 6), byte(results[8]) | (byte(results[9]) << 2) | (byte(results[10]) << 4) | (byte(results[11]) << 6), } } // ThreeByteQuadToVec2 calculates a Vector2 based on the encoded quadtree inside // the 3 bytes func ThreeByteQuadToVec2(b []byte) vector.Vector2 { directions := make([]Quadrant2D, 12) directions[11] = Quadrant2D(b[0] & 0b11) directions[10] = Quadrant2D((b[0] >> 2) & 0b11) directions[9] = Quadrant2D((b[0] >> 4) & 0b11) directions[8] = Quadrant2D((b[0] >> 6) & 0b11) directions[7] = Quadrant2D(b[1] & 0b11) directions[6] = Quadrant2D((b[1] >> 2) & 0b11) directions[5] = Quadrant2D((b[1] >> 4) & 0b11) directions[4] = Quadrant2D((b[1] >> 6) & 0b11) directions[3] = Quadrant2D(b[2] & 0b11) directions[2] = Quadrant2D((b[2] >> 2) & 0b11) directions[1] = Quadrant2D((b[2] >> 4) & 0b11) directions[0] = Quadrant2D((b[2] >> 6) & 0b11) return recalc(directions) } // TwoByteQuadToVec2 calculates a Vector2 based on the encoded quadtree inside // the 2 bytes func TwoByteQuadToVec2(b []byte) vector.Vector2 { directions := make([]Quadrant2D, 8) directions[7] = Quadrant2D(b[0] & 0b11) directions[6] = Quadrant2D((b[0] >> 2) & 0b11) directions[5] = Quadrant2D((b[0] >> 4) & 0b11) directions[4] = Quadrant2D((b[0] >> 6) & 0b11) directions[3] = Quadrant2D(b[1] & 0b11) directions[2] = Quadrant2D((b[1] >> 2) & 0b11) directions[1] = Quadrant2D((b[1] >> 4) & 0b11) directions[0] = Quadrant2D((b[1] >> 6) & 0b11) return recalc(directions) } // Vec2ToFourByteQuad creates a quadtree of depth 16 and encodes itself in // 4 bytes func Vec2ToFourByteQuad(v vector.Vector2) []byte { results := QuadRecurse( v, vector.NewVector2(-1, -1), vector.NewVector2(1, 1), 16, ) return []byte{ byte(results[0]) | (byte(results[1]) << 2) | (byte(results[2]) << 4) | (byte(results[3]) << 6), byte(results[4]) | (byte(results[5]) << 2) | (byte(results[6]) << 4) | (byte(results[7]) << 6), byte(results[8]) | (byte(results[9]) << 2) | (byte(results[10]) << 4) | (byte(results[11]) << 6), byte(results[12]) | (byte(results[13]) << 2) | (byte(results[14]) << 4) | (byte(results[15]) << 6), } } // FourByteQuadToVec2 calculates a Vector2 based on the encoded quadtree inside // the 4 bytes func FourByteQuadToVec2(b []byte) vector.Vector2 { directions := make([]Quadrant2D, 16) directions[15] = Quadrant2D(b[0] & 0b11) directions[14] = Quadrant2D((b[0] >> 2) & 0b11) directions[13] = Quadrant2D((b[0] >> 4) & 0b11) directions[12] = Quadrant2D((b[0] >> 6) & 0b11) directions[11] = Quadrant2D(b[1] & 0b11) directions[10] = Quadrant2D((b[1] >> 2) & 0b11) directions[9] = Quadrant2D((b[1] >> 4) & 0b11) directions[8] = Quadrant2D((b[1] >> 6) & 0b11) directions[7] = Quadrant2D(b[2] & 0b11) directions[6] = Quadrant2D((b[2] >> 2) & 0b11) directions[5] = Quadrant2D((b[2] >> 4) & 0b11) directions[4] = Quadrant2D((b[2] >> 6) & 0b11) directions[3] = Quadrant2D(b[3] & 0b11) directions[2] = Quadrant2D((b[3] >> 2) & 0b11) directions[1] = Quadrant2D((b[3] >> 4) & 0b11) directions[0] = Quadrant2D((b[3] >> 6) & 0b11) return recalc(directions) } func recalc(directions []Quadrant2D) vector.Vector2 { multiplyer := 0.5 outVec := vector.Vector2Zero() for _, v := range directions { switch v { case TopRight: outVec = outVec.Add(vector.NewVector2(multiplyer, multiplyer)) break case TopLeft: outVec = outVec.Add(vector.NewVector2(-multiplyer, multiplyer)) break case BottomLeft: outVec = outVec.Add(vector.NewVector2(-multiplyer, -multiplyer)) break case BottomRight: outVec = outVec.Add(vector.NewVector2(multiplyer, -multiplyer)) break } multiplyer /= 2.0 } return outVec }
unitpacking/quad.go
0.824037
0.693239
quad.go
starcoder
package console import ( "fmt" "io" "github.com/martinohmann/neat/style" ) // Print formats using the default formats for its operands and writes to // standard output. Spaces are added between operands when neither is a string. // It returns the number of bytes written and any write error encountered. func Print(args ...interface{}) (n int, err error) { return Fprint(style.Stdout, args...) } // Println formats using the default formats for its operands and writes to // standard output. Spaces are always added between operands and a newline is // appended. It returns the number of bytes written and any write error // encountered. func Println(args ...interface{}) (n int, err error) { return Fprintln(style.Stdout, args...) } // Printf formats according to a format specifier and writes to standard // output. It returns the number of bytes written and any write error // encountered. func Printf(format string, args ...interface{}) (n int, err error) { return Fprintf(style.Stdout, format, args...) } // Fprint formats using the default formats for its operands and writes to w. // Spaces are added between operands when neither is a string. It returns the // number of bytes written and any write error encountered. func Fprint(w io.Writer, args ...interface{}) (n int, err error) { return wrapWriter(w, func() (int, error) { return fmt.Fprint(w, styleArgs(args)...) }) } // Fprintln formats using the default formats for its operands and writes to w. // Spaces are always added between operands and a newline is appended. It // returns the number of bytes written and any write error encountered. func Fprintln(w io.Writer, args ...interface{}) (n int, err error) { return wrapWriter(w, func() (int, error) { return fmt.Fprintln(w, styleArgs(args)...) }) } // Fprintf formats according to a format specifier and writes to w. It returns // the number of bytes written and any write error encountered. func Fprintf(w io.Writer, format string, args ...interface{}) (n int, err error) { return wrapWriter(w, func() (int, error) { return fmt.Fprintf(w, style.StyleString(format), styleArgs(args)...) }) } // Sprint formats using the default formats for its operands and returns the // resulting string. Spaces are added between operands when neither is a // string. func Sprint(args ...interface{}) string { return wrapString(func() string { return fmt.Sprint(styleArgs(args)...) }) } // Sprintln formats using the default formats for its operands and returns the // resulting string. Spaces are always added between operands and a newline is // appended. func Sprintln(args ...interface{}) string { return wrapString(func() string { return fmt.Sprintln(styleArgs(args)...) }) } // Sprintf formats according to a format specifier and returns the resulting // string. func Sprintf(format string, args ...interface{}) string { return wrapString(func() string { return fmt.Sprintf(style.StyleString(format), styleArgs(args)...) }) } func wrapWriter(w io.Writer, fn func() (int, error)) (n int, err error) { n, err = fn() if err != nil { return } nn, err := style.ResetWriter(w) n += nn return } func wrapString(fn func() string) string { return fn() + style.ResetString() } func styleArgs(args []interface{}) []interface{} { for i, arg := range args { switch v := arg.(type) { case style.Attribute: args[i] = style.EscapeString(v) case string: args[i] = style.StyleString(v) } } return args } // Printer wraps an io.Writer for writing colorful strings to it. type Printer struct { io.Writer } // NewPrinter returns a new *Printer which writes to w. func NewPrinter(w io.Writer) *Printer { return &Printer{w} } // Print formats using the default formats for its operands and writes to the // underlying io.Writer. Spaces are added between operands when neither is a // string. It returns the number of bytes written and any write error // encountered. func (p *Printer) Print(args ...interface{}) (n int, err error) { return Fprint(p, args...) } // Println formats using the default formats for its operands and writes to to // the underlying io.Writer. Spaces are always added between operands and a // newline is appended. It returns the number of bytes written and any write // error encountered. func (p *Printer) Println(args ...interface{}) (n int, err error) { return Fprintln(p, args...) } // Printf formats according to a format specifier and writes to the underlying // io.Writer. It returns the number of bytes written and any write error // encountered. func (p *Printer) Printf(format string, args ...interface{}) (n int, err error) { return Fprintf(p, format, args...) }
console/printer.go
0.64646
0.533094
printer.go
starcoder
package ga import ( "math" "wheal-investments-algorithm/funds" ) //The FundAllocation type type FundAllocation [11]float64 //The Chromosome type type Chromosome struct { FundAllocation FundAllocation Fitness float64 } //Calculate the fitness of a chromosome func (chromosome *Chromosome) CalculateFitness() float64 { //Get the desired fund parameters desiredFundParameters := funds.GetDesiredFundParameters() //Get the actual fund parameters actualFundParameters := chromosome.GetActualFundParameters() //Store the difference between the actual and desired parameters difference := 0.0 //Loop through all the parameters for parameterIndex := 0; parameterIndex < len(desiredFundParameters); parameterIndex++ { //Ignore if the desired parameter equals zero if desiredFundParameters[parameterIndex] != 0 { //Calculate the difference between the actual and desired parameters and make positive difference += math.Abs(desiredFundParameters[parameterIndex] - actualFundParameters[parameterIndex]) } } //Avoid a divide by zero bug (i.e. fitness of infinity) if difference == 0 { difference = 0.00000001 } //Return the fitness (the bigger the better) return 1000000 / difference } //Get the actual fund parameters func (chromosome *Chromosome) GetActualFundParameters() funds.FundParameters { //Get the funds table fundsTable := funds.GetFunds() //Get the desired fund parameters desiredFundParameters := funds.GetDesiredFundParameters() //A table used to store the actual allocation values for each fund var allocatedFundsTable funds.FundsTable //Get the chromosome percentage fund allocation percentageFundAllocation := chromosome.GetFundAllocationPercentage() //Loop through all the funds for fundIndex := 0; fundIndex < len(fundsTable); fundIndex++ { //Loop through all the parameters for parameterIndex := 0; parameterIndex < len(desiredFundParameters); parameterIndex++ { //Calculate the actual allocation values for each fund allocatedFundsTable[fundIndex][parameterIndex] = fundsTable[fundIndex][parameterIndex] * percentageFundAllocation[fundIndex] } } //Used to store the fund parameters of the actual fund created var actualFundParameters funds.FundParameters //Loop through all the parameters for parameterIndex := 0; parameterIndex < len(desiredFundParameters); parameterIndex++ { //Loop through all the funds for fundIndex := 0; fundIndex < len(fundsTable); fundIndex++ { actualFundParameters[parameterIndex] += allocatedFundsTable[fundIndex][parameterIndex] } } return actualFundParameters } //Get the weighted fund allocation (won't add up to 100% without this function) func (chromosome *Chromosome) GetFundAllocationPercentage() FundAllocation { //Create the percentage allocation var percentageAllocation FundAllocation //Initialise the total allocation total := 0.0 //Loop through all the funds and calculate the total allocation for fundIndex := 0; fundIndex < len(chromosome.FundAllocation); fundIndex++ { total += chromosome.FundAllocation[fundIndex] } //Loop through all the funds and calculate the weighted allocation (to equal 1) for index := 0; index < len(chromosome.FundAllocation); index++ { percentageAllocation[index] = chromosome.FundAllocation[index] / total } //Return the percenntage allocation return percentageAllocation } //Generate a random chromosome func GenerateRandomChromosome() Chromosome { //The new fund allocation for the chromosome var fundAllocation FundAllocation //Loop through all the funds for index := 0; index < len(fundAllocation); index++ { //Give a fund a ranndom allocation fundAllocation[index] = Random().Float64() } //Create the new chromosome chromosome := Chromosome{ FundAllocation: fundAllocation, } //Return the new chromomosome return chromosome } //Mutate the chromosome by incrementing a random value func (chromosome *Chromosome) MutateIncrement() { //Select a random fund to mutate fundToMutate := Random().Intn(len(chromosome.FundAllocation) - 1) //Select a random fund to balance fundToBalance := Random().Intn(len(chromosome.FundAllocation) - 1) //If the mutation won't cause the fund allocation to go under zero if chromosome.FundAllocation[fundToMutate] >= 0.01 && chromosome.FundAllocation[fundToBalance] >= 0.01 { //If the mutation won't cause the fund allocation to go over one if chromosome.FundAllocation[fundToMutate] <= 0.99 && chromosome.FundAllocation[fundToBalance] <= 0.99 { //Mutate the fund allocations chromosome.FundAllocation[fundToMutate] += 0.01 chromosome.FundAllocation[fundToBalance] -= 0.01 } } } //Mutate the chromosome by swapping a ranndom value func (chromosome *Chromosome) MutateSwap() { //Select a random fund to mutate fundToMutate := Random().Intn(len(chromosome.FundAllocation) - 1) //Select a random fund to mutate fundToSwap := Random().Intn(len(chromosome.FundAllocation) - 1) //Swap the fund allocations temp := chromosome.FundAllocation[fundToMutate] chromosome.FundAllocation[fundToMutate] = chromosome.FundAllocation[fundToSwap] chromosome.FundAllocation[fundToSwap] = temp } //Single crossover two chromosomes func SingleCrossover(parent1 Chromosome, parent2 Chromosome) Chromosome { //Randomly select a fund to crossover fundToCrossover := Random().Intn(len(parent1.FundAllocation) - 1) //Crossover the fund allocation at the random point child := parent1 child.FundAllocation[fundToCrossover] = parent2.FundAllocation[fundToCrossover] //Return the child chromosome return child } //Mutliple crossover two chromosomes func MultipleCrossover(parent1 Chromosome, parent2 Chromosome) Chromosome { //Randomly select a crossover point crossoverPoint := Random().Intn(len(parent1.FundAllocation) - 1) //The child equals the first parent child := parent1 //Loop through parent 2 allocations for index, value := range parent2.FundAllocation { //If the index is larger than the crossover point if index >= crossoverPoint { //Set the child allocation equal to parent allocation child.FundAllocation[index] = value } } //Return the child chromosome return child }
ga/chromosome.go
0.697197
0.483526
chromosome.go
starcoder
package plausible import "strconv" // BreakdownQuery represents an API query for detailed information about a property over a period of time. // In an breakdown query, the Property field and the Period fields are mandatory, all the others are optional. type BreakdownQuery struct { // Property is the property name for which the breakdown result will be about. // This field is mandatory. Property PropertyName // Period is the period of time to consider for the results. // This field is mandatory. Period TimePeriod // Metrics is a list of metrics for which to include in the results. // This field is optional. Metrics Metrics // Limit limits the number of results to be returned. // This field is optional. Limit int // Page indicates the page number for which to fetch the results. Page numbers start at 1. // This field is optional. Page int // Filters is a filter over properties to narrow down the breakdown results. // This field is optional. Filters Filter } // Validate tells whether the query is valid or not. // If the query is not valid, a string explaining why the query is not valid will be returned. func (bq *BreakdownQuery) Validate() (ok bool, invalidReason string) { if bq.Property.IsEmpty() { return false, "a property must be specified for a breakdown query" } if bq.Period.IsEmpty() { return false, "a period must be specified for a breakdown query" } return true, "" } func (bq *BreakdownQuery) toQueryArgs() QueryArgs { queryArgs := QueryArgs{} queryArgs.Merge(bq.Property.toQueryArgs()) queryArgs.Merge(bq.Period.toQueryArgs()) if !bq.Metrics.IsEmpty() { queryArgs.Merge(bq.Metrics.toQueryArgs()) } if bq.Limit != 0 { queryArgs.Add(QueryArg{Name: "limit", Value: strconv.Itoa(bq.Limit)}) } if bq.Page != 0 { queryArgs.Add(QueryArg{Name: "page", Value: strconv.Itoa(bq.Page)}) } if !bq.Filters.IsEmpty() { queryArgs.Merge(bq.Filters.toQueryArgs()) } return queryArgs } // PropertyResult contains the value of a property for an entry in the breakdown query results. // At any moment, only the field corresponding to the property indicated in the query must be used. // All the other fields will be empty. type PropertyResult struct { // Name contains a value of the event name property. // This value must only be only if the breakdown query was for this property. Name string `json:"name"` // Page contains a value of the event page property. // This value must only be only if the breakdown query was for this property. Page string `json:"page"` // Page contains a value of the visit source property. // This value must only be only if the breakdown query was for this property. Source string `json:"source"` // Referrer contains a value of the visit referrer property. // This value must only be only if the breakdown query was for this property. Referrer string `json:"referrer"` // UtmMedium contains a value of the utm medium property. // This value must only be only if the breakdown query was for this property. UtmMedium string `json:"utm_medium"` // UtmSource contains a value of the utm source property. // This value must only be only if the breakdown query was for this property. UtmSource string `json:"utm_source"` // UtmCampaign contains a value of the utm campaign property. // This value must only be only if the breakdown query was for this property. UtmCampaign string `json:"utm_campaign"` // Device contains a value of the device property. // This value must only be only if the breakdown query was for this property. Device string `json:"device"` // Browser contains a value of the browser property. // This value must only be only if the breakdown query was for this property. Browser string `json:"browser"` // BrowserVersion contains a value of the browser version property. // This value must only be only if the breakdown query was for this property. BrowserVersion string `json:"browser_version"` // OS contains a value of the operating system property. // This value must only be only if the breakdown query was for this property. OS string `json:"os"` // OSVersion contains a value of the operating system version property. // This value must only be only if the breakdown query was for this property. OSVersion string `json:"os_version"` // Country contains a value of the country property. // This value must only be only if the breakdown query was for this property. Country string `json:"country"` } // BreakdownResultEntry represents an entry in a breakdown query result. type BreakdownResultEntry struct { // PropertyResult contains the property value associated with this entry PropertyResult // MetricsResult contains the metric information associated with this entry MetricsResult } type rawBreakdownResponse struct { Results []BreakdownResultEntry `json:"results"` } // BreakdownResult represents a result type BreakdownResult []BreakdownResultEntry
plausible/breakdown_query.go
0.817028
0.493531
breakdown_query.go
starcoder
package xi import "github.com/zephyrtronium/xirho" // Mobius implements Mobius transformations over quaternions. type Mobius struct { Ar xirho.Real `xirho:"A.scalar"` Avec xirho.Vec3 `xirho:"A.vector"` Br xirho.Real `xirho:"B.scalar"` Bvec xirho.Vec3 `xirho:"B.vector"` Cr xirho.Real `xirho:"C.scalar"` Cvec xirho.Vec3 `xirho:"C.vector"` Dr xirho.Real `xirho:"D.scalar"` Dvec xirho.Vec3 `xirho:"D.vector"` InZero xirho.List `xirho:"input blank,r,i,j,k"` } func newMobius() xirho.Func { return &Mobius{ Ar: 1, Dr: 1, InZero: 3, } } func (v *Mobius) Calc(in xirho.Pt, rng *xirho.RNG) xirho.Pt { // May the compiler bless me with optimized quaternion operations. var nr, ni, nj, nk, dr, di, dj, dk float64 switch v.InZero { case 0: // input is 0 + in.X*i + in.Y*j + in.Z*k nr = float64(v.Br) - v.Avec[0]*in.X - v.Avec[1]*in.Y - v.Avec[2]*in.Z ni = v.Bvec[0] + float64(v.Ar)*in.X + v.Avec[1]*in.Z - v.Avec[2]*in.Y nj = v.Bvec[1] + float64(v.Ar)*in.Y - v.Avec[0]*in.Z + v.Avec[2]*in.X nk = v.Bvec[2] + float64(v.Ar)*in.Z + v.Avec[0]*in.Y - v.Avec[1]*in.X dr = float64(v.Dr) - v.Cvec[0]*in.X - v.Cvec[1]*in.Y - v.Cvec[2]*in.Z di = v.Dvec[0] + float64(v.Cr)*in.X + v.Cvec[1]*in.Z - v.Cvec[2]*in.Y dj = v.Dvec[1] + float64(v.Cr)*in.Y - v.Cvec[0]*in.Z + v.Cvec[2]*in.X dk = v.Dvec[2] + float64(v.Cr)*in.Z + v.Cvec[0]*in.Y - v.Cvec[1]*in.X case 1: // input is in.X + 0*i + in.Y*j + in.Z*k nr = float64(v.Br) + float64(v.Ar)*in.X - v.Avec[1]*in.Y - v.Avec[2]*in.Z ni = v.Bvec[0] + v.Avec[0]*in.X + v.Avec[1]*in.Z - v.Avec[2]*in.Y nj = v.Bvec[1] + float64(v.Ar)*in.Y - v.Avec[0]*in.Z + v.Avec[1]*in.X nk = v.Bvec[2] + float64(v.Ar)*in.Z + v.Avec[0]*in.Y + v.Avec[2]*in.X dr = float64(v.Dr) + float64(v.Cr)*in.X - v.Cvec[1]*in.Y - v.Cvec[2]*in.Z di = v.Dvec[0] + v.Cvec[0]*in.X + v.Cvec[1]*in.Z - v.Cvec[2]*in.Y dj = v.Dvec[1] + float64(v.Cr)*in.Y - v.Cvec[0]*in.Z + v.Cvec[1]*in.X dk = v.Dvec[2] + float64(v.Cr)*in.Z + v.Cvec[0]*in.Y + v.Cvec[2]*in.X case 2: // input is in.X + in.Y*i + 0*j + in.Z*k nr = float64(v.Br) + float64(v.Ar)*in.X - v.Avec[0]*in.Y - v.Avec[2]*in.Z ni = v.Bvec[0] + float64(v.Ar)*in.Y + v.Avec[0]*in.X + v.Avec[1]*in.Z nj = v.Bvec[1] - v.Avec[0]*in.Z + v.Avec[1]*in.X + v.Avec[2]*in.Y nk = v.Bvec[2] + float64(v.Ar)*in.Z - v.Avec[1]*in.Y + v.Avec[2]*in.X dr = float64(v.Dr) + float64(v.Cr)*in.X - v.Cvec[0]*in.Y - v.Cvec[2]*in.Z di = v.Dvec[0] + float64(v.Cr)*in.Y + v.Cvec[0]*in.X + v.Cvec[1]*in.Z dj = v.Dvec[1] - v.Cvec[0]*in.Z + v.Cvec[1]*in.X + v.Cvec[2]*in.Y dk = v.Dvec[2] + float64(v.Cr)*in.Z - v.Cvec[1]*in.Y + v.Cvec[2]*in.X case 3: // input is in.X + in.Y*i + in.Z*j + 0*k nr = float64(v.Br) + float64(v.Ar)*in.X - v.Avec[0]*in.Y - v.Avec[1]*in.Z ni = v.Bvec[0] + float64(v.Ar)*in.Y + v.Avec[0]*in.X - v.Avec[2]*in.Z nj = v.Bvec[1] + float64(v.Ar)*in.Z + v.Avec[1]*in.X + v.Avec[2]*in.Y nk = v.Bvec[2] + v.Avec[0]*in.Z - v.Avec[1]*in.Y + v.Avec[2]*in.X dr = float64(v.Dr) + float64(v.Cr)*in.X - v.Cvec[0]*in.Y - v.Cvec[1]*in.Z di = v.Dvec[0] + float64(v.Cr)*in.Y + v.Cvec[0]*in.X - v.Cvec[2]*in.Z dj = v.Dvec[1] + float64(v.Cr)*in.Z + v.Cvec[1]*in.X + v.Cvec[2]*in.Y dk = v.Dvec[2] + v.Cvec[0]*in.Z - v.Cvec[1]*in.Y + v.Cvec[2]*in.X } rr := dr*dr + di*di + dj*dj + dk*dk dr /= rr di /= -rr dj /= -rr dk /= -rr in.X = nr*dr - ni*di - nj*dj - nk*dk in.Y = nr*di + ni*dr + nj*dk - nk*dj in.Z = nr*dj - ni*dk + nj*dr + nk*di // outk := nr*dk + ni*dj - nj*di + nk*dr return in } func (v *Mobius) Prep() {} func init() { must("mobius", newMobius) must("mobiq", newMobius) }
xi/mobius.go
0.613931
0.678513
mobius.go
starcoder
package geography import ( "database/sql/driver" "encoding/binary" "errors" "fmt" "strconv" "github.com/go-courier/geography/encoding/mvt" "github.com/go-courier/geography/encoding/wkb" "github.com/go-courier/geography/encoding/wkt" ) // Polygon is a closed area. // The first Polygon is the outer ring. // The others are the holes. // Each Polygon is expected to be closed // ie. the first point matches the last. type Polygon []LineString func (p Polygon) ToGeom() Geom { return p } func (p Polygon) Clip(b Bound) Geom { if len(p) == 0 { return nil } circle := ring(b, p[0]) if circle == nil { return nil } result := Polygon{circle} for i := 1; i < len(p); i++ { r := ring(b, p[i]) if r != nil { result = append(result, r) } } return result } func (p Polygon) Project(transform Transform) Geom { nextP := make(Polygon, len(p)) for i := range p { nextP[i] = p[i].Project(transform).(LineString) } return nextP } func (p Polygon) Bound() Bound { if len(p) == 0 { return emptyBound } return p[0].Bound() } func (p Polygon) Equal(g Geom) bool { switch polygon := g.(type) { case Polygon: if len(p) != len(polygon) { return false } for i := range p { if !p[i].Equal(polygon[i]) { return false } } return true } return false } func (Polygon) Type() string { return "Polygon" } func (p Polygon) MarshalWKT(w *wkt.WKTWriter) { MultiLineString(p).MarshalWKT(w) } func (p *Polygon) UnmarshalWKB(r *wkb.WKBReader, order binary.ByteOrder, tpe wkb.GeometryType) error { if tpe != wkb.PolygonType { return errors.New("not line polygon wkb") } var numOfLines uint32 if err := r.ReadBinary(order, &numOfLines); err != nil { return err } result := make(Polygon, 0, numOfLines) for i := 0; i < int(numOfLines); i++ { p := LineString{} if err := p.UnmarshalWKB(r, order, wkb.LineStringType); err != nil { return fmt.Errorf("error on %d of %s: %s", i, p.Type(), err) } result = append(result, p) } *p = result return nil } func (p Polygon) Cap() int { c := 0 for _, r := range p { c += 3 + 2*len(r) } return c } func (p Polygon) DrawFeature(w *mvt.FeatureWriter) { for _, ls := range p { ls.DrawFeature(w) if !ls.Closed() && ls.IsValid() { // force close path w.ClosePath() } } } func (p Polygon) Geometry() []uint32 { w := mvt.NewFeatureWriter(p.Cap()) p.DrawFeature(w) return w.Data() } func (Polygon) DataType(driverName string) string { if driverName == "mysql" { return "POLYGON" } return "geometry(POLYGON)" } func (Polygon) ValueEx() string { return "ST_GeomFromText(?," + strconv.FormatInt(SRS3857, 10) + ")" } func (p Polygon) Value() (driver.Value, error) { return wkt.MarshalWKT(p, SRS3857), nil } func (p *Polygon) Scan(src interface{}) error { return scan(src, p) }
geom_polygon.go
0.719778
0.466056
geom_polygon.go
starcoder
package lazyledger import ( "encoding/binary" ) // Message represents a namespaced message. type Message struct { namespace [namespaceSize]byte data []byte } // NewMessage returns a new message from its namespace and data. func NewMessage(namespace [namespaceSize]byte, data []byte) *Message { return &Message{ namespace: namespace, data: data, } } // UnmarshalMessage returns a message from its marshalled raw data. func UnmarshalMessage(marshalled []byte) *Message { var namespace [namespaceSize]byte copy(namespace[:], marshalled[:namespaceSize]) return NewMessage(namespace, marshalled[namespaceSize:]) } // UnmarshalPaddedMessage returns a message from its marshalled padded raw data. func UnmarshalPaddedMessage(marshalled []byte) *Message { marshalledSizeBytes := make([]byte, 2) marshalledSizeBytes[0] = marshalled[len(marshalled) - 2] marshalledSizeBytes[1] = marshalled[len(marshalled) - 1] marshalled = marshalled[:int(binary.LittleEndian.Uint16(marshalledSizeBytes))] var namespace [namespaceSize]byte copy(namespace[:], marshalled[:namespaceSize]) return NewMessage(namespace, marshalled[namespaceSize:]) } // Marshal converts a message to raw data. func (m *Message) Marshal() []byte { return append(m.namespace[:], m.data...) } // Marshal converts a message to padded raw data. func (m *Message) MarshalPadded(messageSize int) []byte { marshalled := append(m.namespace[:], m.data...) marshalledSizeBytes := make([]byte, 2) binary.LittleEndian.PutUint16(marshalledSizeBytes, uint16(len(marshalled))) padding := make([]byte, messageSize - len(marshalled)) for i, _ := range padding { padding[i] = 0x00 } marshalled = append(marshalled, padding...) marshalled[len(marshalled) - 2] = marshalledSizeBytes[0] marshalled[len(marshalled) - 1] = marshalledSizeBytes[1] return marshalled } // Namespace returns the namespace of a message. func (m *Message) Namespace() [namespaceSize]byte { return m.namespace; } // Data returns the data of a message. func (m *Message) Data() []byte { return m.data; }
message.go
0.780244
0.402099
message.go
starcoder
package instances // VerifiedStatus captures the verification status for each Instance type. type VerifiedStatus struct { // Attempted denotes whether a verification attempt has been made. Attempted bool // Verified denotes whether the instance type is verified to work for Reflow. Verified bool // ApproxETASeconds is the approximate ETA (in seconds) for Reflow to become available on this instance type. ApproxETASeconds int64 // MemoryBytes is memory bytes reported as available on this Instance type. MemoryBytes int64 } // ExpectedMemoryBytes is the amount of memory we can expect to be available based on verification. func (v VerifiedStatus) ExpectedMemoryBytes() int64 { // samplingErrorDiscount is used to discount the amount of memory to account for sampling variation. // Since we are modeling the expected available memory on an instance type based on // one sample (collected during verification), this provides a buffer. const samplingErrorDiscount = 0.02 // 2 percent return int64(float64(v.MemoryBytes) * (1 - samplingErrorDiscount)) } // VerifiedByRegion stores mapping of instance types to VerifiedStatus by AWS Region. var VerifiedByRegion = make(map[string]map[string]VerifiedStatus) func init() { VerifiedByRegion["us-west-2"] = map[string]VerifiedStatus{ "c3.2xlarge": {true, true, 67, 14938849894}, "c3.4xlarge": {true, true, 68, 29948344524}, "c3.8xlarge": {true, true, 67, 60119495270}, "c3.large": {true, true, 102, 3728617881}, "c3.xlarge": {true, true, 68, 7419958067}, "c4.2xlarge": {true, true, 73, 14942873395}, "c4.4xlarge": {true, true, 61, 29948441804}, "c4.8xlarge": {true, true, 65, 60119709286}, "c4.large": {true, true, 72, 3728625664}, "c4.xlarge": {true, true, 73, 7423954329}, "c5.12xlarge": {true, true, 48, 94061518438}, "c5.18xlarge": {true, true, 53, 140213629132}, "c5.24xlarge": {true, true, 49, 188406148915}, "c5.2xlarge": {true, true, 51, 15374598144}, "c5.4xlarge": {true, true, 49, 31191723008}, "c5.9xlarge": {true, true, 51, 69965258547}, "c5.large": {true, true, 48, 3679098470}, "c5.xlarge": {true, true, 52, 7577598361}, "c5a.12xlarge": {true, true, 57, 95175211008}, "c5a.16xlarge": {true, true, 41, 126966855884}, "c5a.24xlarge": {true, true, 45, 190458223820}, "c5a.2xlarge": {true, true, 48, 15763095552}, "c5a.4xlarge": {true, true, 46, 31636004659}, "c5a.8xlarge": {true, true, 48, 63381822873}, "c5a.large": {true, true, 50, 3858412748}, "c5a.xlarge": {true, true, 52, 7826642944}, "c5ad.12xlarge": {true, true, 75, 95175211008}, "c5ad.16xlarge": {true, true, 51, 126956894412}, "c5ad.24xlarge": {true, true, 51, 190442285465}, "c5ad.2xlarge": {true, true, 48, 15763087769}, "c5ad.4xlarge": {true, true, 40, 31636004659}, "c5ad.8xlarge": {true, true, 49, 63381822873}, "c5ad.large": {true, true, 49, 3858404966}, "c5ad.xlarge": {true, true, 66, 7826642944}, "c5d.12xlarge": {true, true, 48, 94061518438}, "c5d.18xlarge": {true, true, 52, 140213629132}, "c5d.24xlarge": {true, true, 52, 188406148915}, "c5d.2xlarge": {true, true, 50, 15374598144}, "c5d.4xlarge": {true, true, 49, 31191723008}, "c5d.9xlarge": {true, true, 51, 69965258547}, "c5d.large": {true, true, 49, 3679098470}, "c5d.xlarge": {true, true, 59, 7577606144}, "c5n.18xlarge": {true, true, 52, 188411199692}, "c5n.2xlarge": {true, true, 49, 20379241676}, "c5n.4xlarge": {true, true, 46, 40977869209}, "c5n.9xlarge": {true, true, 49, 94064043827}, "c5n.large": {true, true, 56, 4930259353}, "c5n.xlarge": {true, true, 46, 10079920128}, "c6i.12xlarge": {true, true, 48, 94673152819}, "c6i.16xlarge": {true, true, 49, 126299437260}, "c6i.24xlarge": {true, true, 53, 189629421568}, "c6i.2xlarge": {true, true, 47, 15681411481}, "c6i.32xlarge": {true, true, 52, 252881963212}, "c6i.4xlarge": {true, true, 51, 31470644224}, "c6i.8xlarge": {true, true, 48, 63049113600}, "c6i.large": {true, true, 50, 3838478131}, "c6i.xlarge": {true, true, 46, 7786793164}, "d2.2xlarge": {true, true, 82, 61056500121}, "d2.4xlarge": {true, true, 61, 122283275264}, "d2.8xlarge": {true, true, 68, 244881017856}, "d2.xlarge": {true, true, 63, 30478773452}, "d3.2xlarge": {true, true, 52, 63277301350}, "d3.4xlarge": {true, true, 45, 126869630361}, "d3.8xlarge": {true, false, 1097, 0}, "d3.xlarge": {true, true, 59, 31528949964}, "d3en.12xlarge": {true, false, 1609, 0}, "d3en.2xlarge": {true, true, 46, 31528105574}, "d3en.4xlarge": {true, true, 55, 63275616460}, "d3en.6xlarge": {true, true, 47, 95072930816}, "d3en.8xlarge": {true, true, 50, 126866260582}, "d3en.xlarge": {true, true, 48, 15654363750}, "dl1.24xlarge": {true, true, 69, 762979577446}, "f1.16xlarge": {true, true, 187, 979889855897}, "f1.2xlarge": {true, true, 73, 122307568025}, "f1.4xlarge": {true, true, 90, 244685796352}, "g2.2xlarge": {true, true, 75, 14938846003}, "g2.8xlarge": {true, true, 71, 60119495270}, "g3.16xlarge": {true, true, 81, 489881304883}, "g3.4xlarge": {true, true, 70, 122279290675}, "g3.8xlarge": {true, true, 74, 244685796352}, "g3s.xlarge": {true, true, 77, 30478773452}, "g4ad.16xlarge": {true, true, 54, 254244521369}, "g4ad.2xlarge": {true, true, 50, 31237238374}, "g4ad.4xlarge": {true, true, 64, 63068425625}, "g4ad.8xlarge": {true, true, 54, 126826414694}, "g4ad.xlarge": {true, true, 50, 15321650585}, "g4dn.12xlarge": {true, true, 58, 190361017753}, "g4dn.16xlarge": {true, true, 51, 254013387980}, "g4dn.2xlarge": {true, true, 50, 31528113356}, "g4dn.4xlarge": {true, true, 49, 63275616460}, "g4dn.8xlarge": {true, true, 52, 126866260582}, "g4dn.xlarge": {true, true, 52, 15654355968}, "h1.16xlarge": {true, true, 80, 256926418124}, "h1.2xlarge": {true, true, 79, 31984948019}, "h1.4xlarge": {true, true, 68, 64040560230}, "h1.8xlarge": {true, true, 79, 128303977267}, "i2.2xlarge": {true, true, 83, 61090151219}, "i2.4xlarge": {true, true, 75, 122348565708}, "i2.8xlarge": {true, true, 62, 245019556249}, "i2.large": {true, false, 1, 0}, "i2.xlarge": {true, true, 60, 30498599116}, "i3.16xlarge": {true, true, 93, 489885289472}, "i3.2xlarge": {true, true, 77, 61056500121}, "i3.4xlarge": {true, true, 73, 122279290675}, "i3.8xlarge": {true, true, 80, 244685796352}, "i3.large": {true, true, 116, 15189917900}, "i3.xlarge": {true, true, 91, 30478773452}, "i3en.12xlarge": {true, true, 107, 381143036108}, "i3en.24xlarge": {true, true, 89, 762760440627}, "i3en.2xlarge": {true, true, 73, 63277301350}, "i3en.3xlarge": {true, true, 78, 95075456204}, "i3en.6xlarge": {true, true, 77, 190368313753}, "i3en.large": {true, true, 59, 15654776217}, "i3en.xlarge": {true, true, 89, 31528957747}, "inf1.24xlarge": {true, true, 63, 188406148915}, "inf1.2xlarge": {true, true, 56, 15486166630}, "inf1.6xlarge": {true, true, 53, 46875368038}, "inf1.xlarge": {true, true, 52, 7633382604}, "m3.2xlarge": {true, true, 92, 29976621875}, "m3.large": {true, true, 80, 7423942656}, "m3.medium": {true, true, 183, 3724606054}, "m3.xlarge": {true, true, 98, 14938849894}, "m4.10xlarge": {true, true, 70, 160531292569}, "m4.16xlarge": {true, true, 76, 256926418124}, "m4.2xlarge": {true, true, 73, 31988932608}, "m4.4xlarge": {true, true, 88, 64040560230}, "m4.large": {true, true, 119, 7926012518}, "m4.xlarge": {true, true, 79, 15946985881}, "m5.12xlarge": {true, true, 53, 188352784998}, "m5.16xlarge": {true, true, 51, 254013391872}, "m5.24xlarge": {true, true, 52, 377179938406}, "m5.2xlarge": {true, true, 47, 31193407897}, "m5.4xlarge": {true, true, 51, 62606213324}, "m5.8xlarge": {true, true, 52, 126866260582}, "m5.large": {true, true, 52, 7717487001}, "m5.xlarge": {true, true, 50, 15487011020}, "m5a.12xlarge": {true, true, 49, 189297078067}, "m5a.16xlarge": {true, true, 55, 252427474944}, "m5a.24xlarge": {true, true, 70, 378686011801}, "m5a.2xlarge": {true, true, 50, 31362752921}, "m5a.4xlarge": {true, true, 46, 62942903296}, "m5a.8xlarge": {true, true, 56, 126168930304}, "m5a.large": {true, true, 52, 7677633331}, "m5a.xlarge": {true, true, 48, 15572679680}, "m5ad.12xlarge": {true, true, 56, 189297081958}, "m5ad.16xlarge": {true, true, 56, 252427471052}, "m5ad.24xlarge": {true, true, 71, 378686011801}, "m5ad.2xlarge": {true, true, 54, 31362752921}, "m5ad.4xlarge": {true, true, 52, 62942911078}, "m5ad.8xlarge": {true, true, 53, 126168934195}, "m5ad.large": {true, true, 50, 7677641113}, "m5ad.xlarge": {true, true, 50, 15572679680}, "m5d.12xlarge": {true, true, 53, 190361017753}, "m5d.16xlarge": {true, true, 54, 254013391872}, "m5d.24xlarge": {true, true, 58, 381196400025}, "m5d.2xlarge": {true, true, 53, 31528113356}, "m5d.4xlarge": {true, true, 50, 63275616460}, "m5d.8xlarge": {true, true, 49, 126866260582}, "m5d.large": {true, true, 53, 7717487001}, "m5d.xlarge": {true, true, 51, 15487003238}, "m5dn.12xlarge": {true, true, 52, 190361017753}, "m5dn.16xlarge": {true, true, 52, 254013384089}, "m5dn.24xlarge": {true, true, 52, 381196400025}, "m5dn.2xlarge": {true, true, 55, 31528113356}, "m5dn.4xlarge": {true, true, 52, 63275616460}, "m5dn.8xlarge": {true, true, 45, 126866260582}, "m5dn.large": {true, true, 53, 7717487001}, "m5dn.xlarge": {true, true, 59, 15654355968}, "m5n.12xlarge": {true, true, 47, 190361017753}, "m5n.16xlarge": {true, true, 51, 254013387980}, "m5n.24xlarge": {true, true, 58, 381196400025}, "m5n.2xlarge": {true, true, 46, 31528113356}, "m5n.4xlarge": {true, true, 46, 63275616460}, "m5n.8xlarge": {true, true, 47, 126866260582}, "m5n.large": {true, true, 49, 7717479219}, "m5n.xlarge": {true, true, 49, 15654363750}, "m5zn.12xlarge": {true, true, 50, 188418499584}, "m5zn.2xlarge": {true, true, 60, 31193407897}, "m5zn.3xlarge": {true, true, 46, 46877893427}, "m5zn.6xlarge": {true, true, 50, 94068814438}, "m5zn.large": {true, true, 45, 7633810636}, "m5zn.xlarge": {true, true, 47, 15487011020}, "m6i.12xlarge": {true, true, 50, 189466477568}, "m6i.16xlarge": {true, true, 50, 252722408448}, "m6i.24xlarge": {true, true, 63, 379407327436}, "m6i.2xlarge": {true, true, 49, 31472329113}, "m6i.32xlarge": {true, true, 54, 505919161958}, "m6i.4xlarge": {true, true, 46, 63052487270}, "m6i.8xlarge": {true, true, 46, 126308414259}, "m6i.large": {true, true, 48, 7787213414}, "m6i.xlarge": {true, true, 51, 15682251980}, "p2.8xlarge": {true, true, 92, 489690071859}, "p2.xlarge": {true, true, 77, 61060484710}, "p3.16xlarge": {true, true, 93, 489881304883}, "p3.2xlarge": {true, true, 70, 61056500121}, "p3.8xlarge": {true, true, 75, 244685796352}, "p3dn.24xlarge": {true, true, 56, 762760440627}, "p4d.24xlarge": {true, false, 353, 0}, "r3.2xlarge": {true, true, 69, 61090151219}, "r3.4xlarge": {true, true, 69, 122348565708}, "r3.8xlarge": {true, true, 67, 245015575552}, "r3.large": {true, true, 75, 15200832716}, "r3.xlarge": {true, true, 75, 30498599116}, "r4.16xlarge": {true, true, 80, 489881304883}, "r4.2xlarge": {true, true, 61, 61060484710}, "r4.4xlarge": {true, true, 85, 122279290675}, "r4.8xlarge": {true, true, 72, 244685796352}, "r4.large": {true, true, 104, 15193902489}, "r4.xlarge": {true, true, 95, 30478773452}, "r5.12xlarge": {true, true, 50, 381143036108}, "r5.16xlarge": {true, true, 52, 508389416345}, "r5.24xlarge": {true, true, 57, 762760436736}, "r5.2xlarge": {true, true, 46, 63277301350}, "r5.4xlarge": {true, true, 47, 126869630361}, "r5.8xlarge": {true, true, 53, 253958642688}, "r5.large": {true, true, 46, 15654784000}, "r5.xlarge": {true, true, 48, 31528957747}, "r5a.12xlarge": {true, true, 53, 382087329177}, "r5a.16xlarge": {true, true, 54, 509481143091}, "r5a.24xlarge": {true, true, 60, 764266510131}, "r5a.2xlarge": {true, true, 48, 63446646374}, "r5a.4xlarge": {true, true, 50, 127206328115}, "r5a.8xlarge": {true, true, 63, 254695768268}, "r5a.large": {true, true, 50, 15698614476}, "r5a.xlarge": {true, true, 53, 31614626406}, "r5ad.12xlarge": {true, true, 56, 382087329177}, "r5ad.16xlarge": {true, true, 59, 509481139200}, "r5ad.24xlarge": {true, true, 75, 764266514022}, "r5ad.2xlarge": {true, true, 54, 63446646374}, "r5ad.4xlarge": {true, true, 54, 127206328115}, "r5ad.8xlarge": {true, true, 53, 254695768268}, "r5ad.large": {true, true, 52, 15698614476}, "r5ad.xlarge": {true, true, 53, 31614626406}, "r5b.12xlarge": {true, true, 46, 381143020544}, "r5b.16xlarge": {true, true, 52, 508389400780}, "r5b.24xlarge": {true, true, 56, 762760425062}, "r5b.2xlarge": {true, true, 48, 63277301350}, "r5b.4xlarge": {true, true, 53, 126869630361}, "r5b.8xlarge": {true, true, 54, 253958642688}, "r5b.large": {true, true, 51, 15654772326}, "r5b.xlarge": {true, true, 51, 31528953856}, "r5d.12xlarge": {true, true, 52, 381143036108}, "r5d.16xlarge": {true, true, 54, 508389416345}, "r5d.24xlarge": {true, true, 54, 762760436736}, "r5d.2xlarge": {true, true, 50, 63277301350}, "r5d.4xlarge": {true, true, 52, 126869630361}, "r5d.8xlarge": {true, true, 53, 253958642688}, "r5d.large": {true, true, 55, 15654784000}, "r5d.xlarge": {true, true, 49, 31528957747}, "r5dn.12xlarge": {true, true, 51, 381143036108}, "r5dn.16xlarge": {true, true, 51, 508389412454}, "r5dn.24xlarge": {true, true, 68, 762760436736}, "r5dn.2xlarge": {true, true, 45, 63277301350}, "r5dn.4xlarge": {true, true, 51, 126869630361}, "r5dn.8xlarge": {true, true, 45, 253958642688}, "r5dn.large": {true, true, 48, 15654784000}, "r5dn.xlarge": {true, true, 51, 31528957747}, "r5n.12xlarge": {true, true, 54, 381143036108}, "r5n.16xlarge": {true, true, 53, 508389412454}, "r5n.24xlarge": {true, true, 53, 762760436736}, "r5n.2xlarge": {true, true, 47, 63277301350}, "r5n.4xlarge": {true, true, 51, 126869630361}, "r5n.8xlarge": {true, true, 51, 253958642688}, "r5n.large": {true, true, 48, 15654784000}, "r5n.xlarge": {true, true, 51, 31528957747}, "t3.2xlarge": {true, true, 55, 31637685657}, "t3.large": {true, true, 51, 7827059302}, "t3.medium": {true, true, 52, 3858408857}, "t3.micro": {true, true, 51, 946549964}, "t3.nano": {true, false, 271, 0}, "t3.small": {true, true, 52, 1939210649}, "t3.xlarge": {true, true, 53, 15763936051}, "t3a.2xlarge": {true, true, 70, 31723354316}, "t3a.large": {true, true, 80, 7848974540}, "t3a.medium": {true, true, 68, 3870354841}, "t3a.micro": {true, true, 50, 950534553}, "t3a.nano": {true, false, 268, 0}, "t3a.small": {true, true, 67, 1945187532}, "t3a.xlarge": {true, true, 50, 15807758745}, "x1.16xlarge": {true, true, 99, 979889855897}, "x1.32xlarge": {true, true, 152, 1959906899558}, "x1e.16xlarge": {true, true, 115, 1959906954035}, "x1e.2xlarge": {true, true, 77, 244714077593}, "x1e.32xlarge": {true, true, 206, 3919941107507}, "x1e.4xlarge": {true, true, 81, 489690071859}, "x1e.8xlarge": {true, true, 89, 979507366502}, "x1e.xlarge": {true, true, 83, 122307568025}, "z1d.12xlarge": {true, true, 53, 381208758476}, "z1d.2xlarge": {true, true, 50, 63277301350}, "z1d.3xlarge": {true, true, 45, 95075456204}, "z1d.6xlarge": {true, true, 57, 190368313753}, "z1d.large": {true, true, 51, 15654784000}, "z1d.xlarge": {true, true, 46, 31528957747}, } }
ec2cluster/instances/verified.go
0.795896
0.408513
verified.go
starcoder
package geom import "fmt" // The Margin type represent 2D margins of a rectangle area. type Margin struct { Top float64 Bottom float64 Left float64 Right float64 } // MakeMargin takes a numeric value as argument that defines the top, left, // right, and bottom components of the returned margin value. func MakeMargin(m float64) Margin { return Margin{ Top: m, Bottom: m, Left: m, Right: m, } } // TopLeft takes a point as argument that would be the top-left corner of a // rectangle, applies the margin and return the modified coordinates. func (m Margin) TopLeft(p Point) Point { return Point{ X: p.X + m.Left, Y: p.Y + m.Top, } } // BottomRight takes a point as argument that would be the bottom-right corner // of a rectangle, applies the margin and return the modified corrdinates. func (m Margin) BottomRight(p Point) Point { return Point{ X: p.X - m.Right, Y: p.Y - m.Bottom, } } // GrowRect grows the given rectangle by applying the margin and returns the // modified rectangle. func (m Margin) GrowRect(r Rect) Rect { return Rect{ X: r.X - m.Left, Y: r.Y - m.Top, W: r.W + m.Width(), H: r.H + m.Height(), } } // ShrinkRect shrinks the given rectangle by applying the margin and returns the // modified rectangle. func (m Margin) ShrinkRect(r Rect) Rect { s := Rect{ X: r.X + m.Left, Y: r.Y + m.Top, W: r.W - m.Width(), H: r.H - m.Height(), } if s.W < 0 { s.X = r.X + (r.W / 2) s.W = 0 } if s.H < 0 { s.Y = r.Y + (r.H / 2) s.H = 0 } return s } // Width returns the sum of the left and right values of the given margin. func (m Margin) Width() float64 { return m.Left + m.Right } // Height returns the sum of the top and bottom values of the given margin. func (m Margin) Height() float64 { return m.Top + m.Bottom } // Size returns the combined width and height of the margin as a size value. func (m Margin) Size() Size { return Size{ W: m.Width(), H: m.Height(), } } // The String method returns a string representation of the margin value. func (m Margin) String() string { return fmt.Sprintf("margin { top = %g, bottom = %g, left = %g, right = %g }", m.Top, m.Bottom, m.Left, m.Right) }
margin.go
0.933363
0.677727
margin.go
starcoder
package bayes import ( "math" "strings" "github.com/sboehler/knut/lib/journal" "github.com/sboehler/knut/lib/journal/ast" ) // Model is a model trained from a journal type Model struct { accounts int accountCounts map[*journal.Account]int tokenCounts map[string]map[*journal.Account]int } // NewModel creates a new model. func NewModel() *Model { return &Model{ accounts: 0, accountCounts: make(map[*journal.Account]int), tokenCounts: make(map[string]map[*journal.Account]int), } } // Update updates the model with the given transaction. func (m *Model) Update(t *ast.Transaction) { for _, p := range t.Postings { m.accounts++ m.accountCounts[p.Credit]++ for _, token := range tokenize(t, &p, p.Credit) { tc, ok := m.tokenCounts[token] if !ok { tc = make(map[*journal.Account]int) m.tokenCounts[token] = tc } tc[p.Credit]++ } m.accounts++ m.accountCounts[p.Debit]++ for _, token := range tokenize(t, &p, p.Debit) { tc, ok := m.tokenCounts[token] if !ok { tc = make(map[*journal.Account]int) m.tokenCounts[token] = tc } tc[p.Debit]++ } } } // Infer replaces the given account with an inferred account. func (m *Model) Infer(trx *ast.Transaction, tbd *journal.Account) { for i := range trx.Postings { var posting = &trx.Postings[i] var tokens []string if posting.Credit == tbd { tokens = tokenize(trx, posting, posting.Credit) } if posting.Debit == tbd { tokens = tokenize(trx, posting, posting.Debit) } var scores = make(map[*journal.Account]float64) for a, accountCount := range m.accountCounts { if a == tbd { continue } scores[a] = math.Log(float64(accountCount) / float64(m.accounts)) for token := range dedup(tokens) { if tokenCount, ok := m.tokenCounts[token][a]; ok { scores[a] += math.Log(float64(tokenCount) / float64(accountCount)) } else { // assign a low but positive default probability scores[a] += math.Log(1.0 / float64(m.accounts)) } } } var ( selected *journal.Account max = math.Inf(-1) ) for a, score := range scores { if score > max && a != posting.Credit && a != posting.Debit { selected = a max = score } } if selected != nil { if posting.Credit == tbd { posting.Credit = selected } if posting.Debit == tbd { posting.Debit = selected } } } } func dedup(ss []string) map[string]bool { var res = make(map[string]bool) for _, s := range ss { res[s] = true } return res } func tokenize(trx *ast.Transaction, posting *ast.Posting, account *journal.Account) []string { var tokens = append(strings.Fields(trx.Description), posting.Commodity.String(), posting.Amount.String()) if account == posting.Credit { tokens = append(tokens, "credit", posting.Debit.String()) } if account == posting.Debit { tokens = append(tokens, "debit", posting.Credit.String()) } var result = make([]string, 0, len(tokens)) for _, token := range tokens { result = append(result, strings.ToLower(token)) } return result }
lib/journal/ast/bayes/bayes.go
0.612657
0.415314
bayes.go
starcoder
package day18 type Map [][]string //uint8 ? func (m Map) ToString() string { result := "" for y, line := range m { if y > 0 { result += "\n" } for _, s := range line { result += s } } return result } func (m Map) ToStringModified(transform func(Point) *string) string { result := "" for y, line := range m { if y > 0 { result += "\n" } for x, s := range line { t := transform(Point{x, y}) if t != nil { result += *t } else { result += s } } } return result } func (m Map) Each(handler func(p Point, v string)) { for y := 0; y < len(m); y++ { for x := 0; x < len(m[y]); x++ { handler(Point{x, y}, m[y][x]) } } } func (m Map) EachColumnCell(x int, handler func(p Point, v string)) { for y := 0; y < len(m); y++ { handler(Point{x, y}, m[y][x]) } } func (m Map) EachRowColumn(y int, handler func(p Point, v string)) { for x := 0; x < len(m[y]); x++ { handler(Point{x, y}, m[y][x]) } } func (m Map) EachValue(handler func(v string)) { for y := 0; y < len(m); y++ { for x := 0; x < len(m[y]); x++ { handler(m[y][x]) } } } func (m Map) Filter(filter func(v string) bool) []Point { result := make([]Point, 0) for y := 0; y < len(m); y++ { for x := 0; x < len(m[y]); x++ { if filter(m[y][x]) { result = append(result, Point{x, y,}) } } } return result } func (m Map) Count(filter func(v string) bool) int { total := 0 m.EachValue(func(v string) { if filter(v) { total++ } }) return total } func (m Map) FindFirst(filter func(v string) bool) *Point { for y := 0; y < len(m); y++ { for x := 0; x < len(m[y]); x++ { if filter(m[y][x]) { return &Point{x, y} } } } return nil } func (m Map) Set(p Point, v string) *Map { m[p.Y][p.X] = v return &m } func (m Map) Contains(p Point) bool { if p.X < 0 || p.Y < 0 { return false } return len(m) > p.Y && len(m[p.Y]) > p.X } func (m Map) Get(p Point) *string { if m.Contains(p) { return &m[p.Y][p.X] } else { return nil } } func (m Map) Clone() *Map { clone := make(Map, len(m)) for y := 0; y < len(m); y++ { clone[y ] = make([]string, len(m[y])) for x := 0; x < len(m[y]); x++ { clone[y][x] = m[y][x] } } return &clone } func (m Map) Height() int { return len(m) } func (m Map) Width() int { return len(m[0]) } func NewMap(lines []string) Map { result := make(Map, 0) for _, line := range lines { r := make([]string, 0) for _, c := range line { r = append(r, string(c)) } result = append(result, r) } return result }
day18/lib/map.go
0.588061
0.451387
map.go
starcoder
package day3 func GetPowerConsumption(diagReport []uint16, resultBitMask uint16) uint64 { gamma := getMostCommonBits(diagReport) epsilon := uint16(^gamma) epsilon = epsilon & resultBitMask powerRating := uint64(gamma) * uint64(epsilon) return powerRating } func GetLifeSupportRating(diagReport []uint16, positionMasks []uint16) uint64 { oxygenRating := getOxygenRating(diagReport, positionMasks) scrubberRating := getScrubberRating(diagReport, positionMasks) liftSupportRating := uint64(oxygenRating) * uint64(scrubberRating) return liftSupportRating } func getMostCommonBits(diagReport []uint16) uint16 { masks := []uint16{ 0x800, 0x400, 0x200, 0x100, 0x80, 0x40, 0x20, 0x10, 0x8, 0x4, 0x2, 0x1, } numberOfEntries := len(diagReport) bitCounts := make([]int, 12) for _, report := range diagReport { for i, mask := range masks { if report&mask == mask { bitCounts[i]++ } } } mostCommonBits := uint16(0) for i, countOfOnes := range bitCounts { if countOfOnes > numberOfEntries-countOfOnes { mostCommonBits = mostCommonBits | masks[i] } } return mostCommonBits } func getMostCommonBitForPosition(diagReports []uint16, positionMask uint16) uint16 { numberOfEntries := len(diagReports) countOfSetBits := 0 for _, report := range diagReports { if report&positionMask == positionMask { countOfSetBits++ } } if countOfSetBits >= numberOfEntries-countOfSetBits { return positionMask } else { return 0 } } func getLeastCommonBitForPosition(diagReports []uint16, positionMask uint16) uint16 { numberOfEntries := len(diagReports) numberOfOnes := 0 for _, report := range diagReports { if report&positionMask == positionMask { numberOfOnes++ } } numberOfZeroes := numberOfEntries - numberOfOnes if numberOfOnes < numberOfZeroes { return positionMask } else { return 0 } } func getOxygenRating(diagReport []uint16, positionMasks []uint16) uint16 { searchSpace := diagReport for _, mask := range positionMasks { mostCommonBitInPosition := getMostCommonBitForPosition(searchSpace, mask) newSearchSpace := make([]uint16, 0, len(searchSpace)) for _, entry := range searchSpace { bitvalue := entry & mask shouldGrabReport := bitvalue^(mostCommonBitInPosition&mask) == 0 if shouldGrabReport { newSearchSpace = append(newSearchSpace, entry) } } searchSpace = newSearchSpace if len(searchSpace) == 1 { break } } return searchSpace[0] } func getScrubberRating(diagReport []uint16, positionMasks []uint16) uint16 { searchSpace := diagReport for _, mask := range positionMasks { leastCommonBitInPosition := getLeastCommonBitForPosition(searchSpace, mask) newSearchSpace := make([]uint16, 0, len(searchSpace)) for _, entry := range searchSpace { bitvalue := entry & mask shouldGrabReport := bitvalue^(leastCommonBitInPosition&mask) == 0 if shouldGrabReport { newSearchSpace = append(newSearchSpace, entry) } } searchSpace = newSearchSpace if len(searchSpace) == 1 { break } } return searchSpace[0] }
internal/pkg/day3/diagparser.go
0.697918
0.421076
diagparser.go
starcoder
package main import ( "math" "strconv" "strings" ) type Position struct { x, y, z int } func ParsePositions(lines []string) []Position { reading := []Position{} for _, line := range lines { parts := strings.Split(line, ",") x, _ := strconv.Atoi(parts[0]) y, _ := strconv.Atoi(parts[1]) z, _ := strconv.Atoi(parts[2]) reading = append(reading, Position{x, y, z}) } return reading } func (p *Position) Distances(positions *[]Position) map[float64]bool { distances := make(map[float64]bool) for _, otherBeacon := range *positions { if p.x == otherBeacon.x && p.y == otherBeacon.y && p.z == otherBeacon.z { continue } distance := distance(*p, otherBeacon) distances[distance] = true } return distances } type cacheKey struct { from Position to Position } var cache = make(map[cacheKey]float64) func distance(from Position, to Position) float64 { result, found := cache[cacheKey{from, to}] if found { return result } result, found = cache[cacheKey{to, from}] if found { return result } result = math.Sqrt( float64( math.Pow(float64(from.x-to.x), 2) + math.Pow(float64(from.y-to.y), 2) + math.Pow(float64(from.z-to.z), 2), ), ) cache[cacheKey{from, to}] = result return result } func SameDistance(ps1 []Position, ps2 []Position) (Position, bool) { dist := Position{ ps1[0].x - ps2[0].x, ps1[0].y - ps2[0].y, ps1[0].z - ps2[0].z, } for i := 1; i < len(ps1); i++ { if ps1[i].x-ps2[i].x != dist.x { return Position{}, true } if ps1[i].y-ps2[i].y != dist.y { return Position{}, true } if ps1[i].z-ps2[i].z != dist.z { return Position{}, true } } return dist, false } func Variations(reading *[]Position) [][]Position { variations := [][]int{ {1, 1, 1}, {1, 1, -1}, {1, -1, 1}, {1, -1, -1}, {-1, 1, 1}, {-1, 1, -1}, {-1, -1, 1}, {-1, -1, -1}, } rotations := make([][]Position, 48) for _, beacon := range *reading { position := 0 for i := 0; i < 6; i++ { rotation := beacon if i == 1 { rotation = Position{beacon.z, beacon.x, beacon.y} } else if i == 2 { rotation = Position{beacon.y, beacon.z, beacon.x} } else if i == 3 { rotation = Position{beacon.x, beacon.z, beacon.y} } else if i == 4 { rotation = Position{beacon.z, beacon.y, beacon.x} } else if i == 5 { rotation = Position{beacon.y, beacon.x, beacon.z} } for _, v := range variations { rotations[position] = append(rotations[position], Position{ rotation.x * v[0], rotation.y * v[1], rotation.z * v[2], }) position++ } } } return rotations }
day19/position.go
0.692434
0.426083
position.go
starcoder
package angular import ( "time" ) // Units for Acceleration values. Always multiply with a unit when setting the initial value like you would for // time.Time. This prevents you from having to worry about the internal storage format. const ( MilliradianPerSecondSquared Acceleration = Acceleration(MilliradianPerSecond) / Acceleration(time.Second) RadianPerSecondSquared Acceleration = Acceleration(RadianPerSecond) / Acceleration(time.Second) DegreePerSecondSquared Acceleration = Acceleration(DegreePerSecond) / Acceleration(time.Second) GradianPerSecondSquared Acceleration = Acceleration(GradianPerSecond) / Acceleration(time.Second) ) // MilliradiansPerSecondSquared returns a as a floating point number of milliradianspersecondsquared. func (a Acceleration) MilliradiansPerSecondSquared() float64 { return float64(a / MilliradianPerSecondSquared) } // RadiansPerSecondSquared returns a as a floating point number of radianspersecondsquared. func (a Acceleration) RadiansPerSecondSquared() float64 { return float64(a / RadianPerSecondSquared) } // DegreeesPerSecondSquared returns a as a floating point number of degreeespersecondsquared. func (a Acceleration) DegreeesPerSecondSquared() float64 { return float64(a / DegreePerSecondSquared) } // GradiansPerSecondSquared returns a as a floating point number of gradianspersecondsquared. func (a Acceleration) GradiansPerSecondSquared() float64 { return float64(a / GradianPerSecondSquared) } // Abs returns the absolute value of a as a copy. func (a Acceleration) Abs() Acceleration { if a < 0 { return -a } return a } // Mul returns the product of a * x as a new Acceleration. func (a Acceleration) Mul(x float64) Acceleration { return a * Acceleration(x) } // Div returns the quotient of a / x as a new Acceleration. func (a Acceleration) Div(x float64) Acceleration { return a / Acceleration(x) } // DivAcceleration returns the quotient of a / x as a floating point number. func (a Acceleration) DivAcceleration(x Acceleration) float64 { return float64(a / x) } // MulDuration returns the product of a * t as a Velocity. func (a Acceleration) MulDuration(t time.Duration) Velocity { return Velocity(float64(a) * float64(t)) }
angular/acceleration_generated.go
0.931634
0.607605
acceleration_generated.go
starcoder
package main import ( "fmt" "strconv" "strings" "time" "github.com/spf13/cobra" "github.com/koalacxr/nodescan" ) const ConstNodeScanLogo = ` $$\ $$\ $$\ $$$$$$\ $$$\ $$ | $$ | $$ __$$\ $$$$\ $$ | $$$$$$\ $$$$$$$ | $$$$$$\ $$ / \__| $$$$$$$\ $$$$$$\ $$$$$$$\ $$ $$\$$ |$$ __$$\ $$ __$$ |$$ __$$\ \$$$$$$\ $$ _____|\____$$\ $$ __$$\ $$ \$$$$ |$$ / $$ |$$ / $$ |$$$$$$$$ | \____$$\ $$ / $$$$$$$ |$$ | $$ | $$ |\$$$ |$$ | $$ |$$ | $$ |$$ ____| $$\ $$ |$$ | $$ __$$ |$$ | $$ | $$ | \$$ |\$$$$$$ |\$$$$$$$ |\$$$$$$$\ \$$$$$$ |\$$$$$$$\\$$$$$$$ |$$ | $$ | \__| \__| \______/ \_______| \_______|$$$$$$\\______/ \_______|\_______|\__| \__| \______| ` func main() { newNodeScanner := nodescan.NewNodeScanner(nodescan.SetIsLocal(true)) LocalIPs := newNodeScanner.LocalIPs() rootCmd := &cobra.Command{ Use: "nodescan", Short: ConstNodeScanLogo + "local IP:" + strings.Join(LocalIPs, ", ") + "\n\n\n" + "nodescan is a quick scan of local or remote IP and ports. ", Version: "1.0", } rootCmd.AddCommand( ScanLocalIPs(), ScanLocalIpPorts(), ScanLocalNetwork(), ScanRemoteNetwork()) cobra.OnInitialize() if err := rootCmd.Execute(); err != nil { panic(err) } } func ScanLocalIPs() *cobra.Command { return &cobra.Command{ Use: "l", Aliases: []string{"localIPs"}, Short: "Use commands(l or localIPs) to scan the locally IPs ", RunE: func(cmd *cobra.Command, args []string) error { newNodeScanner := nodescan.NewNodeScanner(nodescan.SetIsLocal(true)) LocalIPs := newNodeScanner.LocalIPs() fmt.Println("Scanned the locally IPs as follows:") for _, localIp := range LocalIPs { fmt.Println(localIp) } return nil }, } } func ScanLocalIpPorts() *cobra.Command { return &cobra.Command{ Use: "p", Aliases: []string{"localPorts"}, Short: "Use commands(p or localPorts) to scan the locally IP ports, multiple port Numbers are spaced by ','(ex:80,443) ", RunE: func(cmd *cobra.Command, args []string) error { if len(args) == 0 { return nil } searchPorts := parsePortParm(args[0]) if len(searchPorts) == 0 { fmt.Println("params[port] is empty") return nil } newNodeScanner := nodescan.NewNodeScanner(nodescan.SetIsLocal(true), nodescan.SetPorts(searchPorts)) LocalIPs := newNodeScanner.LocalScan() fmt.Println("Scanned the locally available IP ports as follows:") for _, ipa := range LocalIPs.Values { fmt.Println(fmt.Sprintf("%v:%d", ipa.IP, ipa.Port)) } return nil }, } } func ScanLocalNetwork() *cobra.Command { return &cobra.Command{ Use: "lp", Aliases: []string{"lanPorts"}, Short: "Use commands(lp or lanPorts) to scan the local network IP ports, multiple port Numbers are spaced by ','(ex:80,443) ", RunE: func(cmd *cobra.Command, args []string) error { if len(args) == 0 { return nil } searchPorts := parsePortParm(args[0]) if len(searchPorts) == 0 { fmt.Println("params[port] is empty") return nil } newNodeScanner := nodescan.NewNodeScanner( nodescan.SetIsLocal(true), nodescan.SetTimeout(300*time.Millisecond), nodescan.SetPorts(searchPorts), nodescan.SetNetworkTypes([]nodescan.NetworkType{nodescan.NetWorkTCP4}), nodescan.SetMaxChannel(100)) LocalIPs := newNodeScanner.Scan() fmt.Println("Scanned the local network IP ports as follows:") for _, localIp := range LocalIPs.Values { fmt.Println(fmt.Sprintf("%v:%d", localIp.IP, localIp.Port)) } return nil }, } } func ScanRemoteNetwork() *cobra.Command { return &cobra.Command{ Use: "wp", Aliases: []string{"wanIpPorts"}, Short: "Use commands(wp or wanIpPorts) to scan the remote network IP ports, multiple port Numbers are spaced by ','(ex: 127.0.0.1,10.128.51.187:80,443) ", RunE: func(cmd *cobra.Command, args []string) error { if len(args) == 0 { fmt.Println("params[ip or port] invalid, example 127.0.0.1,10.128.11.187:80,443 ") return nil } searchIPs, searchPorts := parseIpPortParm(args[0]) if len(searchIPs) == 0 || len(searchPorts) == 0 { fmt.Println(fmt.Sprintf("params[ip or port] is empty, ips[%v], ports[%v]", searchIPs, searchPorts)) return nil } newNodeScanner := nodescan.NewNodeScanner( nodescan.SetIsLocal(false), nodescan.SetTimeout(300*time.Millisecond), nodescan.SetIps(searchIPs), nodescan.SetPorts(searchPorts), nodescan.SetNetworkTypes([]nodescan.NetworkType{nodescan.NetWorkTCP4}), nodescan.SetMaxChannel(100)) LocalIPs := newNodeScanner.Scan() fmt.Println("Scanned the remote network IP ports as follows:") for _, localIp := range LocalIPs.Values { fmt.Println(fmt.Sprintf("%v:%d", localIp.IP, localIp.Port)) } return nil }, } } func parseIpPortParm(value string) ([]string, []int) { ips := make([]string, 0) ports := make([]int, 0) if len(value) > 0 { param := strings.TrimSpace(value) ipPortParams := strings.Split(param, ":") if len(ipPortParams) == 2 { ipSplit := strings.Split(ipPortParams[0], ",") for _, itemIP := range ipSplit { ips = append(ips, itemIP) } portSplit := strings.Split(ipPortParams[1], ",") for _, itemPort := range portSplit { portNum, err := strconv.Atoi(strings.TrimSpace(itemPort)) if err != nil { continue } ports = append(ports, portNum) } } } return ips, ports } func parsePortParm(value string) []int { ports := make([]int, 0) if len(value) > 0 { portParam := strings.TrimSpace(value) portSplit := strings.Split(portParam, ",") for _, itemPort := range portSplit { portNum, err := strconv.Atoi(strings.TrimSpace(itemPort)) if err != nil { continue } ports = append(ports, portNum) } } return ports }
cmds/nodescan/main.go
0.503174
0.741171
main.go
starcoder
package uitheme import ( "image/color" "fyne.io/fyne" "fyne.io/fyne/theme" ) type DarkBlueNormal struct{} func NewDarkBlueNormal() *DarkBlueNormal { return &DarkBlueNormal{} } func (DarkBlueNormal) BackgroundColor() color.Color { return ColorDark } //func (DarkBlueNormal) ButtonColor() color.Color { return color.RGBA{R: 0x6f, G: 0x42, B: 0xc1, A: 0xff} } // 紫色 //func (DarkBlueNormal) ButtonColor() color.Color { return color.RGBA{R: 0x2e, G: 0x22, B: 0x8b, A: 0xff} } // 2E228BFF func (DarkBlueNormal) ButtonColor() color.Color { return color.RGBA{R: 0x14, G: 0x14, B: 0x14, A: 0xff} } // 141414FF func (DarkBlueNormal) DisabledButtonColor() color.Color { return color.RGBA{R: 0xf, G: 0xf, B: 0x11, A: 0xff} } func (DarkBlueNormal) TextColor() color.Color { return ColorLight } func (DarkBlueNormal) DisabledTextColor() color.Color { return color.RGBA{R: 0xc8, G: 0xc8, B: 0xc8, A: 0xff} } func (DarkBlueNormal) IconColor() color.Color { return color.RGBA{R: 0xff, G: 0xff, B: 0xff, A: 0xff} } func (DarkBlueNormal) DisabledIconColor() color.Color { return color.RGBA{R: 0xc8, G: 0xc8, B: 0xc8, A: 0xff} } func (DarkBlueNormal) HyperlinkColor() color.Color { return color.RGBA{R: 0x0, G: 0x7b, B: 0xff, A: 0xff} } func (DarkBlueNormal) PlaceHolderColor() color.Color { return color.RGBA{R: 0x6c, G: 0x75, B: 0x7d, A: 0xff} } func (DarkBlueNormal) PrimaryColor() color.Color { return color.RGBA{R: 0x0, G: 0x7b, B: 0xff, A: 0xff} } func (DarkBlueNormal) HoverColor() color.Color { return ColorSecondary } func (DarkBlueNormal) FocusColor() color.Color { return color.RGBA{R: 0x0, G: 0x7b, B: 0xff, A: 0xff} } func (DarkBlueNormal) ScrollBarColor() color.Color { return color.RGBA{R: 0x23, G: 0x23, B: 0x23, A: 0x8} } func (DarkBlueNormal) ShadowColor() color.Color { return color.RGBA{R: 0x0, G: 0x0, B: 0x0, A: 0x40} } func (DarkBlueNormal) TextSize() int { return 14 } func (DarkBlueNormal) TextFont() fyne.Resource { return theme.LightTheme().TextFont() } //func (DarkBlueNormal) TextFont() fyne.Resource { return resource.FontOfYaHeiMonacoHybrid } func (DarkBlueNormal) TextBoldFont() fyne.Resource { return theme.LightTheme().TextBoldFont() } func (DarkBlueNormal) TextItalicFont() fyne.Resource { return theme.LightTheme().TextItalicFont() } func (DarkBlueNormal) TextBoldItalicFont() fyne.Resource { return theme.LightTheme().TextBoldItalicFont() } func (DarkBlueNormal) TextMonospaceFont() fyne.Resource { return theme.LightTheme().TextMonospaceFont() } //func (DarkBlueNormal) TextMonospaceFont() fyne.Resource { return resource.FontOfYaHeiMonacoHybrid } func (DarkBlueNormal) Padding() int { return 4 } func (DarkBlueNormal) IconInlineSize() int { return 12 } func (DarkBlueNormal) ScrollBarSize() int { return 12 } func (DarkBlueNormal) ScrollBarSmallSize() int { return 3 }
uitheme/darkBlueNormal.go
0.670716
0.429669
darkBlueNormal.go
starcoder
package qset import ( "errors" "regexp" "strconv" "sync" "time" "github.com/garyburd/redigo/redis" "github.com/kavehmz/lww" ) /*QSet structure defines the structure connects to Redis and needs two connections one for read and write and one for subscribing to channel. QSet can only store data which is acceptable both as map key in Go and key name in Redis. Marshal function needs to make sure if this based on user data. UnMarshal must be able to convert the stored data back to a format that is usable by user. */ type QSet struct { // ConnWrite is the redis connection to be used for write elements to redis. This can be for example one master server. ConnWrite redis.Conn // ConnWrite is the redis connection to be used for subscribing to element notificatinos. This can be for example the local redis replica. ConnSub redis.Conn // AddSet sets which key will be used in redis for the set. Change will be also published in the channel with the same name. SetKey string // Marshal function needs to convert the element to string. Redis can only store and retrieve string values. Marshal func(interface{}) string // UnMarshal function needs to be able to convert a Marshalled string back to a readable structure for consumer of library. UnMarshal func(string) interface{} lastState error set lww.Set sync.WaitGroup sync.RWMutex setChannel chan setData sync chan bool quit chan bool // QueueMax set the buffer size for set channel. Larger numbers will icnrease the risk of losing data in case of crash but will create larger buffers that normally improve set performance. // When buffer is full, speed of each set will equal to speed of saving data in Redis. QueueMax int setScript *redis.Script psc redis.PubSubConn } type setData struct { element interface{} ts time.Time } func roundToMicro(t time.Time) int64 { return t.Round(time.Microsecond).UnixNano() / 1000 } func (s *QSet) checkErr(err error) { s.Lock() if err != nil { s.lastState = err s.Unlock() return } s.Unlock() s.lastState = nil } // LastState is an error type that will return the error state of last executed redis command. Add redis connection are not shareable this can be used after each command to know the last state. func (s *QSet) LastState() error { s.Lock() st := s.lastState s.Unlock() return st } const updateToLatestAndPublishInRedis string = ` local c = tonumber(redis.call('ZSCORE', KEYS[1], ARGV[2])) if not c or tonumber(ARGV[1]) > c then redis.call('ZADD', KEYS[1], ARGV[1], ARGV[2]) redis.call('PUBLISH', KEYS[1], ARGV[1] .. ":" .. ARGV[2]) return tonumber(ARGV[2]) else return 0 end ` //Init will do a one time setup for underlying set. It will be called from WLL.Init func (s *QSet) Init() { if !s.checkInitParams() { return } s.set.Init() listening := make(chan bool) go s.listenLoop(listening) <-listening s.readMembers() if s.QueueMax == 0 { s.QueueMax = 100000 } s.setChannel = make(chan setData, s.QueueMax) s.sync = make(chan bool) s.quit = make(chan bool) //This Lua function will do a __atomic__ check and set of timestamp only in incremental way. s.setScript = redis.NewScript(1, updateToLatestAndPublishInRedis) go s.writeLoop() } func (s *QSet) checkInitParams() bool { if s.ConnWrite == nil { s.checkErr(errors.New("ConnWrite must be set")) return false } if s.ConnSub == nil { s.checkErr(errors.New("ConnSub must be set")) return false } if s.Marshal == nil { s.checkErr(errors.New("Marshal must be set")) return false } if s.UnMarshal == nil { s.checkErr(errors.New("UnMarshal must be set")) return false } if s.SetKey == "" { s.checkErr(errors.New("SetKey must be set")) return false } return true } func (s *QSet) listenLoop(listening chan bool) { s.psc = redis.PubSubConn{Conn: s.ConnSub} s.Lock() s.psc.Subscribe(s.SetKey) s.Unlock() listening <- true r := regexp.MustCompile(":") for { switch n := s.psc.Receive().(type) { case redis.Message: e := r.Split(string(n.Data), 2) tms, _ := strconv.Atoi(e[0]) s.set.Set(s.UnMarshal(e[1]), time.Unix(0, 0).Add(time.Duration(tms)*time.Microsecond)) case redis.Subscription: if n.Count == 0 { return } case error: s.checkErr(n) return } } } func (s *QSet) writeLoop() { for { select { case d := <-s.setChannel: s.setScript.Do(s.ConnWrite, s.SetKey, roundToMicro(d.ts), s.Marshal(d.element)) s.Done() case <-s.quit: return } } } func (s *QSet) readMembers() { zs, err := redis.Strings(s.ConnWrite.Do("ZRANGE", s.SetKey, 0, -1, "WITHSCORES")) s.checkErr(err) for i := 0; i < len(zs); i += 2 { n, _ := strconv.Atoi(zs[i+1]) s.set.Set(s.UnMarshal(zs[i]), time.Unix(0, 0).Add(time.Duration(n)*time.Microsecond)) } } //Quit will end the write loop (Goroutine). This exist to be call at the end of Qset life to close the Goroutine to avoid memory leakage. func (s *QSet) Quit() { s.quit <- true s.Lock() s.psc.Unsubscribe(s.SetKey) s.Unlock() } //Sync will block the call until redis queue is empty and all writes are done func (s *QSet) Sync() { s.Wait() } //Set adds an element to the set if it does not exists. If it exists Set will update the provided timestamp. It also publishes the change into redis at SetKey channel. func (s *QSet) Set(e interface{}, t time.Time) { s.set.Set(e, t.Round(time.Microsecond)) s.Add(1) s.setChannel <- setData{ts: t.Round(time.Microsecond), element: e} } //Len must return the number of members in the set func (s *QSet) Len() int { return s.set.Len() } //Get returns timestmap of the element in the set if it exists and true. Otherwise it will return an empty timestamp and false. func (s *QSet) Get(e interface{}) (time.Time, bool) { return s.set.Get(e) } //List returns list of all elements in the set func (s *QSet) List() []interface{} { var l []interface{} for _, v := range s.set.List() { l = append(l, v) } return l }
qset.go
0.54577
0.41401
qset.go
starcoder
package frontend import ( "fmt" "strings" "github.com/isaacev/Plaid/feedback" "github.com/isaacev/Plaid/source" ) type Type interface { Equals(Type) bool CastsTo(Type) bool AddMethod(*Method) HasMethod(string, Type) (bool, Type) String() string isType() } type Method struct { operator string root Type operand Type result Type } type AnyType struct { methods []*Method } func (*AnyType) Equals(t2 Type) bool { if _, ok := t2.(*AnyType); ok { return true } return false } func (any *AnyType) CastsTo(t2 Type) bool { // It's important to remember that all types can be used where `Any` is // accepted but `Any` can only be used where `Any` is accepted so `Any` can // only be automatically cast to `Any` hence the check for equality in this // method return any.Equals(t2) } func (any *AnyType) AddMethod(method *Method) { any.methods = append(any.methods, method) } func (any *AnyType) HasMethod(operator string, operand Type) (exists bool, returnType Type) { for _, method := range any.methods { // Handle unary methods with a `nil` operand if method.operator == operator && (method.operand == nil) && (operand == nil) { return true, method.result } // Handle binary methods with a well defined operand if method.operator == operator && method.operand.Equals(operand) { return true, method.result } } return false, nil } func (*AnyType) String() string { return "Any" } func (AnyType) isType() {} type TypeOperator struct { name string types []Type methods []*Method } func (op *TypeOperator) Equals(t2 Type) bool { switch v := t2.(type) { case *TypeOperator: // Check if the two types are pointers to the same address if op == v { return true } // Check if the operator names are the same if op.name != v.name { return false } // Check that their Type arguments are the same if len(op.types) == len(v.types) { for i, t := range op.types { if t.Equals(v.types[i]) == false { return false } } return true } } return false } func (op *TypeOperator) CastsTo(t2 Type) bool { switch v := t2.(type) { case *AnyType: return true case *TypeOperator: // Check if the two types are pointers to the same address if op == v { return true } // Check if the operator names are the same if op.name != v.name { return false } // Check that their Type arguments are the same if len(op.types) == len(v.types) { for i, t := range op.types { if t.CastsTo(v.types[i]) == false { return false } } return true } } return false } func (op *TypeOperator) AddMethod(method *Method) { op.methods = append(op.methods, method) } func (op TypeOperator) HasMethod(operator string, operand Type) (exists bool, returnType Type) { for _, method := range op.methods { // Handle unary methods with a `nil` operand if method.operator == operator && (method.operand == nil) && (operand == nil) { return true, method.result } // Handle binary methods with a well defined operand if method.operator == operator && method.operand.Equals(operand) { return true, method.result } } return false, nil } func (op TypeOperator) String() string { switch len(op.types) { case 0: return op.name case 2: return fmt.Sprintf("(%s %s %s)", op.types[0], op.name, op.types[1]) default: strungTypes := make([]string, len(op.types)) for i, t := range op.types { strungTypes[i] = t.String() } return fmt.Sprintf("%s %s", op.name, strings.Join(strungTypes, ", ")) } } func (TypeOperator) isType() {} type FuncType struct { params []Type returnType Type methods []*Method } func (fn *FuncType) Equals(t2 Type) bool { switch v := t2.(type) { case *FuncType: // Check if the two types are pointers to the same address if fn == v { return true } // Check that the return type is the same if fn.returnType.Equals(v.returnType) == false { return false } // Check that the arguments have the same types if len(fn.params) == len(v.params) { for i, t := range fn.params { if t.Equals(v.params[i]) == false { return false } } return true } else { return false } default: return false } } func (fn *FuncType) CastsTo(t2 Type) bool { switch v := t2.(type) { case *AnyType: return true case *FuncType: // Check if the two types are pointers to the same address if fn == v { return true } // Check that the return type is the same if fn.returnType.Equals(v.returnType) == false { return false } // Check that the arguments have the same types if len(fn.params) == len(v.params) { for i, t := range fn.params { if t.CastsTo(v.params[i]) == false { return false } } return true } else { return false } default: return false } } func (fn *FuncType) AddMethod(method *Method) { fn.methods = append(fn.methods, method) } func (fn FuncType) HasMethod(operator string, operand Type) (exists bool, returnType Type) { for _, method := range fn.methods { // Handle unary methods with a `nil` operand if method.operator == operator && (method.operand == nil) && (operand == nil) { return true, method.result } // Handle binary methods with a well defined operand if method.operator == operator && method.operand.Equals(operand) { return true, method.result } } return false, nil } func (fn FuncType) String() string { if len(fn.params) == 1 { return fmt.Sprintf("(%s => %s)", fn.params[0].String(), fn.returnType.String()) } return fmt.Sprintf("(%s => %s)", tupleToString(fn.params), fn.returnType.String()) } func (FuncType) isType() {} type ListType struct { elementType Type methods []*Method } func (lt *ListType) Equals(t2 Type) bool { if listT2, ok := t2.(*ListType); ok { return lt.elementType.Equals(listT2.elementType) } return false } func (lt *ListType) CastsTo(t2 Type) bool { switch v := t2.(type) { case *AnyType: return true case *ListType: return lt.elementType.CastsTo(v.elementType) } return false } func (lt *ListType) AddMethod(method *Method) { lt.methods = append(lt.methods, method) } func (lt *ListType) HasMethod(operator string, operand Type) (exists bool, returnType Type) { for _, method := range lt.methods { // Handle unary methods with a `nil` operand if method.operator == operator && (method.operand == nil) && (operand == nil) { return true, method.result } // Handle binary methods with a well defined operand if method.operator == operator && method.operand.Equals(operand) { return true, method.result } } return false, nil } func (lt *ListType) String() string { return fmt.Sprintf("[%s]", lt.elementType.String()) } func (ListType) isType() {} func tupleToString(tuple []Type) string { str := "(" for i, t := range tuple { str += t.String() if i < len(tuple)-1 { str += ", " } } str += ")" return str } func typeAnnotationToType(scope *Scope, annotation TypeAnnotation) (Type, feedback.Message) { switch a := annotation.(type) { case NamedTypeAnnotation: name := a.Name.Name newRef := &TypeOperator{ name: a.Name.Name, } if exists, typeRef := scope.types.getNamedType(name); exists && typeRef.CastsTo(newRef) { return typeRef, nil } return scope.types.builtin.Any, feedback.Error{ Classification: feedback.UndefinedTypeError, File: scope.File, What: feedback.Selection{ Description: fmt.Sprintf("Unknown type `%s`", name), Span: source.Span{a.Pos(), a.End()}, }, } case FuncTypeAnnotation: var params []Type for _, p := range a.Parameters { if paramType, err := typeAnnotationToType(scope, p); err != nil { return scope.types.builtin.Any, err } else { params = append(params, paramType) } } if returnType, err := typeAnnotationToType(scope, a.ReturnType); err != nil { return scope.types.builtin.Any, err } else { return &FuncType{ params: params, returnType: returnType, }, nil } case ListTypeAnnotation: if elementType, err := typeAnnotationToType(scope, a.ElementType); err != nil { return scope.types.builtin.Any, err } else { return &ListType{ elementType: elementType, }, nil } case nil: return scope.types.builtin.Any, nil default: panic(fmt.Sprintf("Unknown annotation: %T", annotation)) } }
frontend/types.go
0.697609
0.496826
types.go
starcoder
package day72 import ( "errors" ) var errInfiniteLoop = errors.New("loop detected") // AdjacencyMatrix represents a sparse adjacency matrix. type AdjacencyMatrix map[int]map[int]struct{} // Node is a character representation of a node in a graph. type Node rune // Edge represents a directed edge going 'From' and 'To' a different node. // Nodes are represented by integers from 0..N. type Edge struct { From, To int } // ErrInfiniteLoop returns the error if an infinite loop is detected. func ErrInfiniteLoop() error { return errInfiniteLoop } // LargestPathValue takes a slice of runes representing nodes. // Edges represents all the directed edges. // Returns the largest path value or an error if an infinite loop is detected. func LargestPathValue(nodes []Node, edges []Edge) (int, error) { graph := buildAdjacencyMatrix(edges) visited := make(map[int]struct{}, len(nodes)) freq := make(map[Node]int) var largest int for start := range graph { if count, err := dfsBacktracking(graph, nodes, start, visited, freq); err != nil { return 0, err } else if count > largest { largest = count } } return largest, nil } func dfsBacktracking(g AdjacencyMatrix, nodes []Node, n int, visited map[int]struct{}, freq map[Node]int) (int, error) { visited[n] = struct{}{} freq[nodes[n]]++ var max int var err error if len(g[n]) == 0 { for _, count := range freq { if count > max { max = count } } } else { for next := range g[n] { if _, seen := visited[next]; seen { return 0, ErrInfiniteLoop() } else if v, err := dfsBacktracking(g, nodes, next, visited, freq); err != nil { return 0, err } else if v > max { max = v } } } freq[nodes[n]]-- delete(visited, n) return max, err } func buildAdjacencyMatrix(edges []Edge) AdjacencyMatrix { graph := make(AdjacencyMatrix) for _, edge := range edges { if _, found := graph[edge.From]; !found { graph[edge.From] = make(map[int]struct{}) } graph[edge.From][edge.To] = struct{}{} } return graph }
day72/problem.go
0.790854
0.498901
problem.go
starcoder
package models // Deployment provides declarative updates for Pods and ReplicaSets (the next-generation ReplicationController). type Deployment struct { APIVersion string `yaml:"apiVersion"` Kind string Metadata struct { Name string Namespace string Labels struct { Version string Date string } } Spec struct { Replicas int RevisionHistoryLimit int `yaml:"revisionHistoryLimit"` Strategy struct { RollingUpdate `yaml:"rollingUpdate"` Type string } Template struct { Metadata struct { Labels struct { App string } } Spec struct { Containers []Container Volumes []struct { Name string EmptyDir `yaml:"emptyDir"` } } } } } // Container represents the running container in a pod type Container struct { Name string Image string VolumeMounts `yaml:"volumeMounts"` Env []struct { Name string ValueFrom `yaml:"valueFrom,omitempty"` Value string `yaml:"value,omitempty"` } ImagePullPolicy string `yaml:"imagePullPolicy"` Ports []struct { ContainerPort int `yaml:"containerPort"` } ReadinessProbe `yaml:"readinessProbe"` } // RollingUpdate represents the rolling update strategy type RollingUpdate struct { MaxUnavailable int `yaml:"maxUnavailable"` } // HTTPGet configures the readiness probes type HTTPGet struct { Path string Port int } // ReadinessProbe describes the configuration around readiness checks type ReadinessProbe struct { HTTPGet `yaml:"httpGet"` InitialDelaySeconds int `yaml:"initialDelaySeconds"` TimeoutSeconds int `yaml:"timeoutSeconds"` SuccessThreshold int `yaml:"successThreshold"` FailureThreshold int `yaml:"failureThreshold"` } // SecretKeyRef provides a description to how secrets are pulled type SecretKeyRef struct { Name string Key string } // FieldRef is the configuration object of how values are pulled from the cluster type FieldRef struct { FieldPath string `yaml:"fieldPath,omitempty"` } // ValueFrom is the configuration object of how values are pulled type ValueFrom struct { SecretKeyRef `yaml:"secretKeyRef,omitempty"` FieldRef `yaml:"fieldRef,omitempty"` } // VolumeMounts provides a configuration for additional pod volumes type VolumeMounts []struct { MountPath string `yaml:"mountPath"` Name string } // EmptyDir provides a descriptor for volume configuration type EmptyDir struct { }
models/deployment.go
0.708918
0.41401
deployment.go
starcoder
package packet import ( "bytes" "encoding/binary" "fmt" "github.com/sandertv/gophertunnel/minecraft/protocol" "image/color" ) const ( MapUpdateFlagTexture = 1 << (iota + 1) MapUpdateFlagDecoration MapUpdateFlagInitialisation ) // ClientBoundMapItemData is sent by the server to the client to update the data of a map shown to the client. // It is sent with a combination of flags that specify what data is updated. // The ClientBoundMapItemData packet may be used to update specific parts of the map only. It is not required // to send the entire map each time when updating one part. type ClientBoundMapItemData struct { // MapID is the unique identifier that represents the map that is updated over network. It remains // consistent across sessions. MapID int64 // UpdateFlags is a combination of flags found above that indicate what parts of the map should be updated // client-side. UpdateFlags uint32 // Dimension is the dimension of the map that should be updated, for example the overworld (0), the nether // (1) or the end (2). Dimension byte // LockedMap specifies if the map that was updated was a locked map, which may be done using a cartography // table. LockedMap bool // Scale is the scale of the map as it is shown in-game. It is written when any of the MapUpdateFlags are // set to the UpdateFlags field. Scale byte // The following fields apply only for the MapUpdateFlagInitialisation. // MapsIncludedIn holds an array of map IDs that the map updated is included in. This has to do with the // scale of the map: Each map holds its own map ID and all map IDs of maps that include this map and have // a bigger scale. This means that a scale 0 map will have 5 map IDs in this slice, whereas a scale 4 map // will have only 1 (its own). // The actual use of this field remains unknown. MapsIncludedIn []int64 // The following fields apply only for the MapUpdateFlagDecoration. // TrackedObjects is a list of tracked objects on the map, which may either be entities or blocks. The // client makes sure these tracked objects are actually tracked. (position updated etc.) TrackedObjects []protocol.MapTrackedObject // Decorations is a list of fixed decorations located on the map. The decorations will not change // client-side, unless the server updates them. Decorations []protocol.MapDecoration // The following fields apply only for the MapUpdateFlagTexture update flag. // Height is the height of the texture area that was updated. The height may be a subset of the total // height of the map. Height int32 // Width is the width of the texture area that was updated. The width may be a subset of the total width // of the map. Width int32 // XOffset is the X offset in pixels at which the updated texture area starts. From this X, the updated // texture will extend exactly Width pixels to the right. XOffset int32 // YOffset is the Y offset in pixels at which the updated texture area starts. From this Y, the updated // texture will extend exactly Height pixels up. YOffset int32 // Pixels is a list of pixel colours for the new texture of the map. It is indexed as Pixels[y][x], with // the length of the outer slice having to be exactly Height long and the inner slices exactly Width long. Pixels [][]color.RGBA } // ID ... func (*ClientBoundMapItemData) ID() uint32 { return IDClientBoundMapItemData } // Marshal ... func (pk *ClientBoundMapItemData) Marshal(buf *bytes.Buffer) { _ = protocol.WriteVarint64(buf, pk.MapID) _ = protocol.WriteVaruint32(buf, pk.UpdateFlags) _ = binary.Write(buf, binary.LittleEndian, pk.Dimension) _ = binary.Write(buf, binary.LittleEndian, pk.LockedMap) if pk.UpdateFlags&MapUpdateFlagInitialisation != 0 { _ = protocol.WriteVaruint32(buf, uint32(len(pk.MapsIncludedIn))) for _, mapID := range pk.MapsIncludedIn { _ = protocol.WriteVarint64(buf, mapID) } } if pk.UpdateFlags&(MapUpdateFlagInitialisation|MapUpdateFlagDecoration|MapUpdateFlagTexture) != 0 { _ = binary.Write(buf, binary.LittleEndian, pk.Scale) } if pk.UpdateFlags&MapUpdateFlagDecoration != 0 { _ = protocol.WriteVaruint32(buf, uint32(len(pk.TrackedObjects))) for _, obj := range pk.TrackedObjects { _ = protocol.WriteMapTrackedObj(buf, obj) } _ = protocol.WriteVaruint32(buf, uint32(len(pk.Decorations))) for _, decoration := range pk.Decorations { _ = protocol.WriteMapDeco(buf, decoration) } } if pk.UpdateFlags&MapUpdateFlagTexture != 0 { // Some basic validation for the values passed into the packet. if pk.Width <= 0 || pk.Height <= 0 { panic("invalid map texture update: width and height must be at least 1") } _ = protocol.WriteVarint32(buf, pk.Width) _ = protocol.WriteVarint32(buf, pk.Height) _ = protocol.WriteVarint32(buf, pk.XOffset) _ = protocol.WriteVarint32(buf, pk.YOffset) _ = protocol.WriteVaruint32(buf, uint32(pk.Width*pk.Height)) if len(pk.Pixels) != int(pk.Height) { panic("invalid map texture update: length of outer pixels array must be equal to height") } for y := int32(0); y < pk.Height; y++ { if len(pk.Pixels[y]) != int(pk.Width) { panic("invalid map texture update: length of inner pixels array must be equal to width") } for x := int32(0); x < pk.Width; x++ { _ = protocol.WriteVarRGBA(buf, pk.Pixels[y][x]) } } } } // Unmarshal ... func (pk *ClientBoundMapItemData) Unmarshal(buf *bytes.Buffer) error { if err := chainErr( protocol.Varint64(buf, &pk.MapID), protocol.Varuint32(buf, &pk.UpdateFlags), binary.Read(buf, binary.LittleEndian, &pk.Dimension), binary.Read(buf, binary.LittleEndian, &pk.LockedMap), ); err != nil { return err } var count uint32 if pk.UpdateFlags&MapUpdateFlagInitialisation != 0 { if err := protocol.Varuint32(buf, &count); err != nil { return err } pk.MapsIncludedIn = make([]int64, count) for i := uint32(0); i < count; i++ { if err := protocol.Varint64(buf, &pk.MapsIncludedIn[i]); err != nil { return err } } } if pk.UpdateFlags&(MapUpdateFlagInitialisation|MapUpdateFlagDecoration|MapUpdateFlagTexture) != 0 { if err := binary.Read(buf, binary.LittleEndian, &pk.Scale); err != nil { return err } } if pk.UpdateFlags&MapUpdateFlagDecoration != 0 { if err := protocol.Varuint32(buf, &count); err != nil { return err } pk.TrackedObjects = make([]protocol.MapTrackedObject, count) for i := uint32(0); i < count; i++ { if err := protocol.MapTrackedObj(buf, &pk.TrackedObjects[i]); err != nil { return err } } if err := protocol.Varuint32(buf, &count); err != nil { return err } pk.Decorations = make([]protocol.MapDecoration, count) for i := uint32(0); i < count; i++ { if err := protocol.MapDeco(buf, &pk.Decorations[i]); err != nil { return err } } } if pk.UpdateFlags&MapUpdateFlagTexture != 0 { if err := chainErr( protocol.Varint32(buf, &pk.Width), protocol.Varint32(buf, &pk.Height), protocol.Varint32(buf, &pk.XOffset), protocol.Varint32(buf, &pk.YOffset), protocol.Varuint32(buf, &count), ); err != nil { return err } // Make sure the values we decoded are correct: We do some basic sanity checks. if pk.Width <= 0 || pk.Height <= 0 { return fmt.Errorf("invalid map texture size: width or height is below 1") } if uint32(pk.Width*pk.Height) != count { return fmt.Errorf("invalid map pixel count: %v * %v = %v, not %v", pk.Width, pk.Height, pk.Width*pk.Height, count) } pk.Pixels = make([][]color.RGBA, pk.Height) for y := int32(0); y < pk.Height; y++ { pk.Pixels[y] = make([]color.RGBA, pk.Width) for x := int32(0); x < pk.Width; x++ { if err := protocol.VarRGBA(buf, &pk.Pixels[y][x]); err != nil { return err } } } } return nil }
minecraft/protocol/packet/client_bound_map_item_data.go
0.608129
0.437463
client_bound_map_item_data.go
starcoder
package intrusive import "unsafe" // List presents a doubly-linked list. type List struct { nil ListNode } // Init initializes the list and then returns the list. func (l *List) Init() *List { l.nil = ListNode{&l.nil, &l.nil} return l } // AppendNode inserts the given node at the end of the list. // The given node must be not null. func (l *List) AppendNode(node *ListNode) { node.insert(l.Tail(), &l.nil) } // PrependNode inserts the given node at the beginning of the list. // The given node must be not null. func (l *List) PrependNode(node *ListNode) { node.insert(&l.nil, l.Head()) } // AppendNodes removes all nodes of the given other list and then inserts // the nodes at the end of the list. func (l *List) AppendNodes(other *List) { if other.IsEmpty() { return } insertListSlice(other.Head(), other.Tail(), l.Tail(), &l.nil) other.Init() } // PrependNodes removes all nodes of the given other list and then inserts // the nodes at the beginning of the list. func (l *List) PrependNodes(other *List) { if other.IsEmpty() { return } insertListSlice(other.Head(), other.Tail(), &l.nil, l.Head()) other.Init() } // AppendSlice inserts the given slice at the end of the list. // The given slice must not contain null node. func (l *List) AppendSlice(firstNode *ListNode, lastNode *ListNode) { insertListSlice(firstNode, lastNode, l.Tail(), &l.nil) } // PrependSlice inserts the given slice at the beginning of the list. // The given slice must not contain null node. func (l *List) PrependSlice(firstNode *ListNode, lastNode *ListNode) { insertListSlice(firstNode, lastNode, &l.nil, l.Head()) } // Foreach returns an iterator over all nodes in the list in order. func (l *List) Foreach() *ListIterator { return new(ListIterator).Init(l) } // ForeachReverse returns an iterator over all nodes in the list in // reverse order. func (l *List) ForeachReverse() *ListReverseIterator { return new(ListReverseIterator).Init(l) } // IsEmpty indicates whether the list is empty. func (l *List) IsEmpty() bool { return l.Tail() == &l.nil } // Tail returns the last node of the list. // The last node may be null (using *ListNode.IsNull to test) // when the list is empty. func (l *List) Tail() *ListNode { return l.nil.prev } // Head returns the first node of the list. // The first node may be null (using *ListNode.IsNull to test) // when the list is empty. func (l *List) Head() *ListNode { return l.nil.next } // ListNode represents a node in a doubly-linked list. type ListNode struct { prev, next *ListNode } // InsertBefore inserts the node before the given other node. // Inserting the node before a null node is legal as if inserting // at the end of a list. func (ln *ListNode) InsertBefore(other *ListNode) { ln.insert(other.prev, other) } // InsertAfter inserts the node after the given other node. // Inserting the node after a null node is legal as if inserting // at the beginning of a list. func (ln *ListNode) InsertAfter(other *ListNode) { ln.insert(other, other.next) } // Remove removes the node from a list. // The node must be in a list. func (ln *ListNode) Remove() { ln.prev.setNext(ln.next) } // GetContainer returns a pointer to the container which contains // the ListNode field about the node at the given offset. // The node must be not null. // The given offset is of the ListNode field in the container. func (ln *ListNode) GetContainer(offset uintptr) unsafe.Pointer { return unsafe.Pointer(uintptr(unsafe.Pointer(ln)) - offset) } // IsNull indicates whether the node is null (the nil of the given list). func (ln *ListNode) IsNull(l *List) bool { return ln == &l.nil } // IsReset indicates whether the node is reset (with a zero value). func (ln *ListNode) IsReset() bool { return ln.prev == nil } // Prev returns the previous node to the node. // Retrieving previous node to a null node is legal as if // retrieving the tail of a list. // The previous node may be null (using *ListNode.IsNull to test) // when the node is at the beginning of a list. func (ln *ListNode) Prev() *ListNode { return ln.prev } // Next returns the next node to the node. // Retrieving the next node to a null node is legal as if // retrieving the head of a list. // The next node may be null (using *ListNode.IsNull to test) // when the node is at the end of a list. func (ln *ListNode) Next() *ListNode { return ln.next } func (ln *ListNode) insert(prev *ListNode, next *ListNode) { ln.setPrev(prev) ln.setNext(next) } func (ln *ListNode) setPrev(prev *ListNode) { ln.prev = prev prev.next = ln } func (ln *ListNode) setNext(next *ListNode) { ln.next = next next.prev = ln } // ListIterator represents an iterator over all nodes in // a doubly-linked list. type ListIterator struct { listIteratorBase } // Init initializes the iterator and then returns the iterator. func (li *ListIterator) Init(l *List) *ListIterator { li.l = l li.node = l.Head() li.nextNode = li.node.Next() return li } // Advance advances the iterator to the next node. func (li *ListIterator) Advance() { li.advance(li.nextNode.Next()) } // ListReverseIterator represents an iterator over all nodes in // a doubly-linked list in reverse order. type ListReverseIterator struct { listIteratorBase } // Init initializes the iterator and then returns the iterator. func (lri *ListReverseIterator) Init(l *List) *ListReverseIterator { lri.l = l lri.node = l.Tail() lri.nextNode = lri.node.Prev() return lri } // Advance advances the iterator to the next node. func (lri *ListReverseIterator) Advance() { lri.advance(lri.nextNode.Prev()) } // InsertListSliceBefore inserts the given slice before given list node. // Inserting the given slice before a null node is legal as if inserting // at the end of a list. // The given node must be in a list. func InsertListSliceBefore(firstListNode *ListNode, lastListNode *ListNode, listNode *ListNode) { insertListSlice(firstListNode, lastListNode, listNode.prev, listNode) } // InsertListSliceAfter inserts the given slice after given list node. // Inserting the given slice after a null node is legal as if inserting // at the beginning of a list. // The given node must be in a list. func InsertListSliceAfter(firstListNode *ListNode, lastListNode *ListNode, listNode *ListNode) { insertListSlice(firstListNode, lastListNode, listNode, listNode.next) } // RemoveListSlice removes the given slice from a list. // The given slice must be in a list. func RemoveListSlice(firstListNode *ListNode, lastListNode *ListNode) { firstListNode.prev.setNext(lastListNode.next) } type listIteratorBase struct { l *List node, nextNode *ListNode } // IsAtEnd indicates whether the iteration has no more nodes. func (lib *listIteratorBase) IsAtEnd() bool { return lib.node.IsNull(lib.l) } // Node returns the current node in the iteration. // It's safe to erase the current node for the next node // to advance to is pre-cached. That will be useful to // destroy the entire list while iterating through the list. func (lib *listIteratorBase) Node() *ListNode { return lib.node } func (lib *listIteratorBase) advance(nextNode *ListNode) { lib.node = lib.nextNode lib.nextNode = nextNode } func insertListSlice(firstListNode *ListNode, lastListNode *ListNode, firstListNodePrev *ListNode, lastListNodeNext *ListNode) { firstListNode.setPrev(firstListNodePrev) lastListNode.setNext(lastListNodeNext) }
list.go
0.863392
0.504394
list.go
starcoder
package slice import "errors" // PopBool removes and returns the last value a bool slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopBool(a []bool) (bool, []bool, error) { if len(a) == 0 { return false, nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil } // PopByte removes and returns the last value a byte slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopByte(a []byte) (byte, []byte, error) { if len(a) == 0 { return 0, nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil } // PopComplex128 removes and returns the last value a complex128 slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopComplex128(a []complex128) (complex128, []complex128, error) { if len(a) == 0 { return 0, nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil } // PopComplex64 removes and returns the last value a complex64 slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopComplex64(a []complex64) (complex64, []complex64, error) { if len(a) == 0 { return 0, nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil } // PopFloat32 removes and returns the last value a float32 slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopFloat32(a []float32) (float32, []float32, error) { if len(a) == 0 { return 0, nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil } // PopFloat64 removes and returns the last value a float64 slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopFloat64(a []float64) (float64, []float64, error) { if len(a) == 0 { return 0, nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil } // PopInt removes and returns the last value a int slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopInt(a []int) (int, []int, error) { if len(a) == 0 { return 0, nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil } // PopInt16 removes and returns the last value a int16 slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopInt16(a []int16) (int16, []int16, error) { if len(a) == 0 { return 0, nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil } // PopInt32 removes and returns the last value a int32 slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopInt32(a []int32) (int32, []int32, error) { if len(a) == 0 { return 0, nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil } // PopInt64 removes and returns the last value a int64 slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopInt64(a []int64) (int64, []int64, error) { if len(a) == 0 { return 0, nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil } // PopInt8 removes and returns the last value a int8 slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopInt8(a []int8) (int8, []int8, error) { if len(a) == 0 { return 0, nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil } // PopRune removes and returns the last value a rune slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopRune(a []rune) (rune, []rune, error) { if len(a) == 0 { return 0, nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil } // PopString removes and returns the last value a string slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopString(a []string) (string, []string, error) { if len(a) == 0 { return "", nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil } // PopUint removes and returns the last value a uint slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopUint(a []uint) (uint, []uint, error) { if len(a) == 0 { return 0, nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil } // PopUint16 removes and returns the last value a uint16 slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopUint16(a []uint16) (uint16, []uint16, error) { if len(a) == 0 { return 0, nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil } // PopUint32 removes and returns the last value a uint32 slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopUint32(a []uint32) (uint32, []uint32, error) { if len(a) == 0 { return 0, nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil } // PopUint64 removes and returns the last value a uint64 slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopUint64(a []uint64) (uint64, []uint64, error) { if len(a) == 0 { return 0, nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil } // PopUint8 removes and returns the last value a uint8 slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopUint8(a []uint8) (uint8, []uint8, error) { if len(a) == 0 { return 0, nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil } // PopUintptr removes and returns the last value a uintptr slice and the remaining slice. // An error is returned in case of a nil or empty slice. func PopUintptr(a []uintptr) (uintptr, []uintptr, error) { if len(a) == 0 { return 0, nil, errors.New("Cannot pop from a nil or empty slice") } return a[len(a)-1], a[:len(a)-1], nil }
pop.go
0.837786
0.58163
pop.go
starcoder
package minecontract const MineABI = `[ { "inputs": [ { "internalType": "bytes32", "name": "", "type": "bytes32" } ], "name": "freezeOf", "outputs": [ { "internalType": "bool", "name": "", "type": "bool" } ], "stateMutability": "view", "type": "function" }, { "inputs": [ { "internalType": "bytes32", "name": "node", "type": "bytes32" } ], "name": "unfreeze", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "anonymous": false, "inputs": [ { "indexed": true, "internalType": "bytes32", "name": "node", "type": "bytes32" }, { "indexed": true, "internalType": "bool", "name": "deposit", "type": "bool" }, { "indexed": false, "internalType": "bool", "name": "active", "type": "bool" } ], "name": "Miner", "type": "event" }, { "anonymous": false, "inputs": [ { "indexed": true, "internalType": "address", "name": "previousOwner", "type": "address" }, { "indexed": true, "internalType": "address", "name": "newOwner", "type": "address" } ], "name": "OwnershipTransferred", "type": "event" }, { "anonymous": false, "inputs": [ { "indexed": true, "internalType": "bytes32", "name": "node", "type": "bytes32" }, { "indexed": true, "internalType": "bool", "name": "trust", "type": "bool" } ], "name": "Trust", "type": "event" }, { "anonymous": false, "inputs": [ { "indexed": true, "internalType": "bytes32", "name": "user", "type": "bytes32" }, { "indexed": false, "internalType": "uint256", "name": "reward", "type": "uint256" } ], "name": "Withdraw", "type": "event" }, { "inputs": [ { "internalType": "bytes32", "name": "node", "type": "bytes32" }, { "internalType": "uint256", "name": "cate", "type": "uint256" }, { "internalType": "uint256", "name": "deadline", "type": "uint256" }, { "internalType": "bytes", "name": "signatures", "type": "bytes" } ], "name": "active", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [], "name": "blockReward", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "view", "type": "function" }, { "inputs": [ { "internalType": "bytes32", "name": "node", "type": "bytes32" } ], "name": "cashDeposit", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ], "name": "cycles", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "view", "type": "function" }, { "inputs": [ { "internalType": "bytes32", "name": "node", "type": "bytes32" }, { "internalType": "uint256", "name": "cate", "type": "uint256" }, { "internalType": "uint256", "name": "price", "type": "uint256" }, { "internalType": "uint256", "name": "deadline", "type": "uint256" }, { "internalType": "bytes", "name": "signatrues", "type": "bytes" } ], "name": "deposit", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [ { "internalType": "bytes32", "name": "node", "type": "bytes32" }, { "internalType": "uint256", "name": "deadline", "type": "uint256" }, { "internalType": "bytes", "name": "signatures", "type": "bytes" } ], "name": "dishonesty", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [ { "internalType": "bytes32", "name": "node", "type": "bytes32" } ], "name": "expireOf", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "view", "type": "function" }, { "inputs": [ { "internalType": "bytes32", "name": "node", "type": "bytes32" }, { "internalType": "uint256", "name": "deadline", "type": "uint256" }, { "internalType": "bytes", "name": "signatures", "type": "bytes" } ], "name": "inactive", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [ { "internalType": "bytes32[]", "name": "nodes", "type": "bytes32[]" }, { "internalType": "uint256", "name": "deadline", "type": "uint256" }, { "internalType": "bytes", "name": "signatrues", "type": "bytes" } ], "name": "inactives", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [ { "internalType": "address", "name": "_token", "type": "address" } ], "name": "init", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [], "name": "lastMintRewardPeriod", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "view", "type": "function" }, { "inputs": [], "name": "lastUpdateBlock", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "view", "type": "function" }, { "inputs": [], "name": "lockup", "outputs": [ { "internalType": "contract ILockup", "name": "", "type": "address" } ], "stateMutability": "view", "type": "function" }, { "inputs": [], "name": "minerNews", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "view", "type": "function" }, { "inputs": [], "name": "minerNums", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "view", "type": "function" }, { "inputs": [ { "internalType": "bytes32", "name": "", "type": "bytes32" } ], "name": "miners", "outputs": [ { "internalType": "bool", "name": "active", "type": "bool" }, { "internalType": "bool", "name": "trusted", "type": "bool" }, { "internalType": "bool", "name": "deposit", "type": "bool" }, { "internalType": "uint256", "name": "reward", "type": "uint256" }, { "internalType": "uint256", "name": "withdraw", "type": "uint256" }, { "internalType": "address", "name": "received", "type": "address" } ], "stateMutability": "view", "type": "function" }, { "inputs": [ { "internalType": "uint256", "name": "height", "type": "uint256" }, { "internalType": "uint256", "name": "rate", "type": "uint256" } ], "name": "notifyMine", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [], "name": "owner", "outputs": [ { "internalType": "address", "name": "", "type": "address" } ], "stateMutability": "view", "type": "function" }, { "inputs": [], "name": "periodFinish", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "view", "type": "function" }, { "inputs": [], "name": "renounceOwnership", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [ { "internalType": "bytes32", "name": "node", "type": "bytes32" } ], "name": "reward", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "view", "type": "function" }, { "inputs": [], "name": "rewardOfMint", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" }, { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "view", "type": "function" }, { "inputs": [], "name": "rewardOfMintStored", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "view", "type": "function" }, { "inputs": [], "name": "startBlock", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "view", "type": "function" }, { "inputs": [], "name": "token", "outputs": [ { "internalType": "contract IERC20", "name": "", "type": "address" } ], "stateMutability": "view", "type": "function" }, { "inputs": [], "name": "totalRewardStored", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "view", "type": "function" }, { "inputs": [ { "internalType": "address", "name": "newOwner", "type": "address" } ], "name": "transferOwnership", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [ { "internalType": "bytes32", "name": "node", "type": "bytes32" } ], "name": "trust", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [], "name": "trustNums", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "view", "type": "function" }, { "inputs": [ { "internalType": "address", "name": "", "type": "address" } ], "name": "trusts", "outputs": [ { "internalType": "bool", "name": "", "type": "bool" } ], "stateMutability": "view", "type": "function" }, { "inputs": [ { "internalType": "bytes32", "name": "node", "type": "bytes32" } ], "name": "untrust", "outputs": [], "stateMutability": "nonpayable", "type": "function" }, { "inputs": [ { "internalType": "bytes32", "name": "", "type": "bytes32" } ], "name": "userRewardOfMintPaid", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "view", "type": "function" }, { "inputs": [], "name": "validateTrusts", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "pure", "type": "function" }, { "inputs": [ { "internalType": "bytes32", "name": "node", "type": "bytes32" } ], "name": "withdraw", "outputs": [], "stateMutability": "nonpayable", "type": "function" } ]` const LockupABI = `[ { "inputs": [ { "internalType": "bytes32", "name": "node", "type": "bytes32" } ], "name": "depositOf", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "view", "type": "function" }, { "inputs": [ { "internalType": "bytes32", "name": "node", "type": "bytes32" } ], "name": "expireOf", "outputs": [ { "internalType": "uint256", "name": "", "type": "uint256" } ], "stateMutability": "view", "type": "function" } ]`
pkg/mine/minecontract/contract_abi.go
0.540924
0.439026
contract_abi.go
starcoder
package grid import ( . "github.com/golangee/forms" ) const Path = "/demo/grid" type ContentView struct { *VStack } func NewContentView() *ContentView { view := &ContentView{} view.VStack = NewVStack().AddViews( NewText("Grid").Style(Font(Headline1)), NewText("A grid allows complex grid based layouts.").Style(Font(Body)), NewGrid(). SetAreas([][]string{ {"header", "header", "header"}, {"menu", "main", "main"}, {"menu", "footer", "footer"}, }). AddView(NewText("header").Style(BackgroundColor(Red50)), GridLayoutParams{Area: "header"}). AddView(NewText("menu").Style(BackgroundColor(Blue50)), GridLayoutParams{Area: "menu"}). AddView(NewText("main").Style(BackgroundColor(Yellow50)), GridLayoutParams{Area: "main"}). AddView(NewText("footer").Style(BackgroundColor(Green50)), GridLayoutParams{Area: "footer"}). Style(BackgroundColor(BlueGray50), Padding(), Repel()). SetGap(DefaultPadding), NewText("Example with automatic and fractional column width:").Style(Font(Body)), NewGrid().AddViews( NewText("left - wrap content (auto)"). Style(BackgroundColor(Red50)), NewText("mid - match parent (1fr)"). Style(BackgroundColor(Gray50)), NewText("right - wrap content (auto)"). Style(BackgroundColor(Brown50)), ).SetColumnWidths(Auto(), Fraction(1), Auto()). Style(BackgroundColor(BlueGray50), Padding()). SetGap(DefaultPadding), NewCode(GoSyntax, code), ) return view } func FromQuery(Query) View { return NewContentView() } const code = `package grid import ( . "github.com/golangee/forms" ) const Path = "/demo/grid" type ContentView struct { *VStack } func NewContentView() *ContentView { view := &ContentView{} view.VStack = NewVStack().AddViews( NewText("Grid").Style(Font(Headline1)), NewText("A grid allows complex grid based layouts.").Style(Font(Body)), NewGrid(). SetAreas([][]string{ {"header", "header", "header"}, {"menu", "main", "main"}, {"menu", "footer", "footer"}, }). AddView(NewText("header").Style(BackgroundColor(Red50)), GridLayoutParams{Area: "header"}). AddView(NewText("menu").Style(BackgroundColor(Blue50)), GridLayoutParams{Area: "menu"}). AddView(NewText("main").Style(BackgroundColor(Yellow50)), GridLayoutParams{Area: "main"}). AddView(NewText("footer").Style(BackgroundColor(Green50)), GridLayoutParams{Area: "footer"}). Style(BackgroundColor(BlueGray50), Padding(), Repel()). SetGap(DefaultPadding), NewText("Example with automatic and fractional column width:").Style(Font(Body)), NewGrid().AddViews( NewText("left - wrap content (auto)"). Style(BackgroundColor(Red50)), NewText("mid - match parent (1fr)"). Style(BackgroundColor(Blue50)), NewText("right - wrap content (auto)"). Style(BackgroundColor(Yellow50)), ).SetColumnWidths(Auto(), Fraction(1), Auto()). Style(BackgroundColor(BlueGray50), Padding()). SetGap(DefaultPadding), NewCode(GoSyntax, code), ) return view }`
demo/grid/component.go
0.707708
0.41561
component.go
starcoder
package quasigo import "strconv" func _() { // An "invalid array index" compiler error signifies that the constant values have changed. // Re-run the stringer command to generate them again. var x [1]struct{} _ = x[opInvalid-0] _ = x[opPop-1] _ = x[opDup-2] _ = x[opPushParam-3] _ = x[opPushIntParam-4] _ = x[opPushLocal-5] _ = x[opPushIntLocal-6] _ = x[opPushFalse-7] _ = x[opPushTrue-8] _ = x[opPushConst-9] _ = x[opPushIntConst-10] _ = x[opConvIntToIface-11] _ = x[opSetLocal-12] _ = x[opSetIntLocal-13] _ = x[opIncLocal-14] _ = x[opDecLocal-15] _ = x[opReturnTop-16] _ = x[opReturnIntTop-17] _ = x[opReturnFalse-18] _ = x[opReturnTrue-19] _ = x[opReturn-20] _ = x[opJump-21] _ = x[opJumpFalse-22] _ = x[opJumpTrue-23] _ = x[opSetVariadicLen-24] _ = x[opCallNative-25] _ = x[opIsNil-26] _ = x[opIsNotNil-27] _ = x[opNot-28] _ = x[opEqInt-29] _ = x[opNotEqInt-30] _ = x[opGtInt-31] _ = x[opGtEqInt-32] _ = x[opLtInt-33] _ = x[opLtEqInt-34] _ = x[opEqString-35] _ = x[opNotEqString-36] _ = x[opConcat-37] _ = x[opAdd-38] _ = x[opSub-39] _ = x[opStringSlice-40] _ = x[opStringSliceFrom-41] _ = x[opStringSliceTo-42] _ = x[opStringLen-43] } const _opcode_name = "InvalidPopDupPushParamPushIntParamPushLocalPushIntLocalPushFalsePushTruePushConstPushIntConstConvIntToIfaceSetLocalSetIntLocalIncLocalDecLocalReturnTopReturnIntTopReturnFalseReturnTrueReturnJumpJumpFalseJumpTrueSetVariadicLenCallNativeIsNilIsNotNilNotEqIntNotEqIntGtIntGtEqIntLtIntLtEqIntEqStringNotEqStringConcatAddSubStringSliceStringSliceFromStringSliceToStringLen" var _opcode_index = [...]uint16{0, 7, 10, 13, 22, 34, 43, 55, 64, 72, 81, 93, 107, 115, 126, 134, 142, 151, 163, 174, 184, 190, 194, 203, 211, 225, 235, 240, 248, 251, 256, 264, 269, 276, 281, 288, 296, 307, 313, 316, 319, 330, 345, 358, 367} func (i opcode) String() string { if i >= opcode(len(_opcode_index)-1) { return "opcode(" + strconv.FormatInt(int64(i), 10) + ")" } return _opcode_name[_opcode_index[i]:_opcode_index[i+1]] }
vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go
0.533884
0.422445
opcode_string.go
starcoder
package models import ( i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91 "github.com/microsoft/kiota-abstractions-go/serialization" ) // IdentityProtectionRoot type IdentityProtectionRoot struct { // Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. additionalData map[string]interface{} // Risk detection in Azure AD Identity Protection and the associated information about the detection. riskDetections []RiskDetectionable // Azure AD service principals that are at risk. riskyServicePrincipals []RiskyServicePrincipalable // Users that are flagged as at-risk by Azure AD Identity Protection. riskyUsers []RiskyUserable // Represents information about detected at-risk service principals in an Azure AD tenant. servicePrincipalRiskDetections []ServicePrincipalRiskDetectionable } // NewIdentityProtectionRoot instantiates a new IdentityProtectionRoot and sets the default values. func NewIdentityProtectionRoot()(*IdentityProtectionRoot) { m := &IdentityProtectionRoot{ } m.SetAdditionalData(make(map[string]interface{})); return m } // CreateIdentityProtectionRootFromDiscriminatorValue creates a new instance of the appropriate class based on discriminator value func CreateIdentityProtectionRootFromDiscriminatorValue(parseNode i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, error) { return NewIdentityProtectionRoot(), nil } // GetAdditionalData gets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. func (m *IdentityProtectionRoot) GetAdditionalData()(map[string]interface{}) { if m == nil { return nil } else { return m.additionalData } } // GetFieldDeserializers the deserialization information for the current model func (m *IdentityProtectionRoot) GetFieldDeserializers()(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) { res := make(map[string]func(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode)(error)) res["riskDetections"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreateRiskDetectionFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]RiskDetectionable, len(val)) for i, v := range val { res[i] = v.(RiskDetectionable) } m.SetRiskDetections(res) } return nil } res["riskyServicePrincipals"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreateRiskyServicePrincipalFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]RiskyServicePrincipalable, len(val)) for i, v := range val { res[i] = v.(RiskyServicePrincipalable) } m.SetRiskyServicePrincipals(res) } return nil } res["riskyUsers"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreateRiskyUserFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]RiskyUserable, len(val)) for i, v := range val { res[i] = v.(RiskyUserable) } m.SetRiskyUsers(res) } return nil } res["servicePrincipalRiskDetections"] = func (n i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.ParseNode) error { val, err := n.GetCollectionOfObjectValues(CreateServicePrincipalRiskDetectionFromDiscriminatorValue) if err != nil { return err } if val != nil { res := make([]ServicePrincipalRiskDetectionable, len(val)) for i, v := range val { res[i] = v.(ServicePrincipalRiskDetectionable) } m.SetServicePrincipalRiskDetections(res) } return nil } return res } // GetRiskDetections gets the riskDetections property value. Risk detection in Azure AD Identity Protection and the associated information about the detection. func (m *IdentityProtectionRoot) GetRiskDetections()([]RiskDetectionable) { if m == nil { return nil } else { return m.riskDetections } } // GetRiskyServicePrincipals gets the riskyServicePrincipals property value. Azure AD service principals that are at risk. func (m *IdentityProtectionRoot) GetRiskyServicePrincipals()([]RiskyServicePrincipalable) { if m == nil { return nil } else { return m.riskyServicePrincipals } } // GetRiskyUsers gets the riskyUsers property value. Users that are flagged as at-risk by Azure AD Identity Protection. func (m *IdentityProtectionRoot) GetRiskyUsers()([]RiskyUserable) { if m == nil { return nil } else { return m.riskyUsers } } // GetServicePrincipalRiskDetections gets the servicePrincipalRiskDetections property value. Represents information about detected at-risk service principals in an Azure AD tenant. func (m *IdentityProtectionRoot) GetServicePrincipalRiskDetections()([]ServicePrincipalRiskDetectionable) { if m == nil { return nil } else { return m.servicePrincipalRiskDetections } } // Serialize serializes information the current object func (m *IdentityProtectionRoot) Serialize(writer i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.SerializationWriter)(error) { if m.GetRiskDetections() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetRiskDetections())) for i, v := range m.GetRiskDetections() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err := writer.WriteCollectionOfObjectValues("riskDetections", cast) if err != nil { return err } } if m.GetRiskyServicePrincipals() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetRiskyServicePrincipals())) for i, v := range m.GetRiskyServicePrincipals() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err := writer.WriteCollectionOfObjectValues("riskyServicePrincipals", cast) if err != nil { return err } } if m.GetRiskyUsers() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetRiskyUsers())) for i, v := range m.GetRiskyUsers() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err := writer.WriteCollectionOfObjectValues("riskyUsers", cast) if err != nil { return err } } if m.GetServicePrincipalRiskDetections() != nil { cast := make([]i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable, len(m.GetServicePrincipalRiskDetections())) for i, v := range m.GetServicePrincipalRiskDetections() { cast[i] = v.(i878a80d2330e89d26896388a3f487eef27b0a0e6c010c493bf80be1452208f91.Parsable) } err := writer.WriteCollectionOfObjectValues("servicePrincipalRiskDetections", cast) if err != nil { return err } } { err := writer.WriteAdditionalData(m.GetAdditionalData()) if err != nil { return err } } return nil } // SetAdditionalData sets the additionalData property value. Stores additional data not described in the OpenAPI description found when deserializing. Can be used for serialization as well. func (m *IdentityProtectionRoot) SetAdditionalData(value map[string]interface{})() { if m != nil { m.additionalData = value } } // SetRiskDetections sets the riskDetections property value. Risk detection in Azure AD Identity Protection and the associated information about the detection. func (m *IdentityProtectionRoot) SetRiskDetections(value []RiskDetectionable)() { if m != nil { m.riskDetections = value } } // SetRiskyServicePrincipals sets the riskyServicePrincipals property value. Azure AD service principals that are at risk. func (m *IdentityProtectionRoot) SetRiskyServicePrincipals(value []RiskyServicePrincipalable)() { if m != nil { m.riskyServicePrincipals = value } } // SetRiskyUsers sets the riskyUsers property value. Users that are flagged as at-risk by Azure AD Identity Protection. func (m *IdentityProtectionRoot) SetRiskyUsers(value []RiskyUserable)() { if m != nil { m.riskyUsers = value } } // SetServicePrincipalRiskDetections sets the servicePrincipalRiskDetections property value. Represents information about detected at-risk service principals in an Azure AD tenant. func (m *IdentityProtectionRoot) SetServicePrincipalRiskDetections(value []ServicePrincipalRiskDetectionable)() { if m != nil { m.servicePrincipalRiskDetections = value } }
models/identity_protection_root.go
0.727685
0.401923
identity_protection_root.go
starcoder
package verb // init regular verbs // each entry is a verb that we conjugate func init() { for _, present := range regular { past := VBD.solveRegex(present) pastParticiple := VBN.solveRegex(present) present3rd := VBZ.solveRegex(present) gerund := VBG.solveRegex(present) entry := []string{ present, past, pastParticiple, present3rd, gerund, } vbpTable[present] = entry vbdTable[past] = entry vbnTable[pastParticiple] = entry vbzTable[present3rd] = entry vbgTable[gerund] = entry } } var regular = []string{ "abandon", "abase", "abash", "abate", "abbreviate", "abdicate", "abduct", "abjure", "abnegate", "abominate", "abound", "abrade", "abridge", "abscond", "absent", "absolve", "absorb", "abstain", "abstract", "abuse", "abut", "accede", "accelerate", "accent", "accentuate", "accept", "acclaim", "accommodate", "accompany", "accomplish", "accord", "accost", "account", "accouter", "accredit", "accrue", "accumulate", "accuse", "accustom", "acerbate", "ache", "achieve", "acidify", "acknowledge", "acquaint", "acquiesce", "acquire", "act", "action", "activate", "actualize", "actuate", "adapt", "add", "addict", "addle", "address", "adduce", "adhere", "adjoin", "adjourn", "adjudge", "adjudicate", "adjure", "adjust", "administer", "administrate", "admire", "admonish", "adopt", "adorn", "adulterate", "adumbrate", "advance", "adventure", "advert", "advertise", "advise", "advocate", "aerate", "affect", "affiance", "affiliate", "affirm", "afflict", "afford", "afforest", "affront", "age", "aggrandize", "aggravate", "aggregate", "aggrieve", "agitate", "agnise", "agnize", "agonise", "agonize", "agree", "aid", "ail", "aim", "air", "airlift", "alarm", "alert", "alien", "alienate", "align", "aliment", "allay", "allege", "alleviate", "allocate", "allot", "allow", "alloy", "allude", "allure", "alter", "alternate", "amalgamate", "amass", "amaze", "amble", "ambuscade", "ambush", "ameliorate", "amend", "amount", "amplify", "amuse", "analyse", "analyze", "anathematize", "anatomize", "anchor", "angle", "anguish", "animadvert", "animate", "anneal", "annihilate", "annotate", "announce", "annoy", "annul", "annunciate", "anoint", "answer", "antagonize", "antedate", "anticipate", "antiquate", "antique", "ape", "apologise", "apologize", "apostatise", "apostatize", "appal", "appall", "apparel", "appeal", "appear", "appease", "append", "appertain", "applaud", "apply", "appoint", "apportion", "appraise", "appreciate", "apprehend", "apprise", "approach", "appropriate", "approve", "approximate", "arbitrate", "arc", "arch", "argue", "arm", "armor", "arouse", "arrange", "array", "arrest", "arrive", "arrogate", "articulate", "ascend", "ascertain", "ascribe", "ask", "asphyxiate", "assail", "assault", "assay", "assemble", "assent", "assert", "assess", "asseverate", "assign", "assimilate", "assist", "associate", "assort", "assuage", "assume", "assure", "asterisk", "astonish", "astound", "atom-bomb", "atomize", "atone", "attach", "attack", "attain", "attempt", "attend", "attenuate", "attest", "attire", "attract", "attribute", "auction", "auctioneer", "audit", "augur", "authorize", "autograph", "avail", "avenge", "avert", "avoid", "avouch", "avow", "await", "awaken", "awaken", "award", "baa", "babble", "baby", "baby-sit", "back", "backfire", "background", "backlash", "backpack", "badger", "badmouth", "baffle", "bag", "bait", "bake", "balance", "balk", "balloon", "bamboozle", "ban", "band", "bandage", "bang", "banish", "bank", "bankrupt", "banquet", "banter", "baptise", "baptize", "bar", "barbarise", "barbarize", "bard", "barde", "bare", "barf", "bargain", "bark", "barrack", "barrage", "barricade", "base", "bash", "bask", "baste", "batten", "batter", "battle", "baulk", "bawl", "bay", "beak", "beam", "beatify", "beautify", "becalm", "beckon", "becloud", "bedamn", "bedaub", "bedaze", "bedazzle", "bedeck", "bedevil", "bedevil", "beef", "beep", "beetle", "befit", "befog", "befoul", "befuddle", "beg", "beggar", "begrime", "begrudge", "beguile", "behave", "bejewel", "belabor", "belch", "beleaguer", "belie", "believe", "belittle", "bellow", "bellyache", "belong", "bemire", "bemoan", "bemuse", "benumb", "bequeath", "berth", "beseem", "besiege", "besmear", "besmirch", "besot", "bespatter", "best", "bestir", "bestow", "betide", "betoken", "betoken", "betray", "betroth", "better", "bevel", "bevel", "bewail", "beware", "bewhisker", "bewilder", "bewitch", "bicker", "bicycle", "bike", "bilk", "bill", "billet", "billow", "binge", "bird", "birdlime", "birdwatch", "bitch", "blab", "blabber", "black", "blackball", "blacken", "blacken", "blackguard", "blackjack", "blackmail", "blame", "blanch", "blandish", "blare", "blarney", "blaspheme", "blast", "blather", "bleach", "blear", "bleat", "blemish", "blench", "blend", "bless", "blight", "blind", "blink", "blister", "blob", "block", "blockade", "bloom", "blossom", "blot", "blubber", "bludgeon", "blueprint", "blunder", "blunt", "blush", "bluster", "board", "boast", "bob", "bobble", "bode", "body", "boil", "bolster", "bolt", "bomb", "bombard", "bond", "bone", "boo", "book", "boom", "boost", "boot", "bop", "border", "borrow", "bosom", "boss", "botch", "bother", "bottom", "bounce", "bow", "bowdlerize", "bower", "brace", "brag", "braid", "branch", "brand", "brandish", "brave", "brawl", "bray", "breach", "breast", "breastfeed", "breathe", "bribe", "brighten", "brisk", "bristle", "broach", "broaden", "broil", "bronze", "brood", "brook", "broom", "brown", "browse", "bruise", "bruit", "brush", "bubble", "buck", "buckle", "budge", "buff", "buffer", "buffet", "bug", "bugger", "bulge", "bulk", "bully", "bullyrag", "bum", "bumble", "bump", "bunch", "bunco", "bundle", "bung", "bungle", "bunk", "bunt", "burble", "burden", "burlesque", "burnish", "burp", "burrow", "burthen", "burthen", "bury", "bushel", "bushwhack", "bustle", "busy", "butcher", "butt", "buttonhole", "buzz", "bypass", "cab", "cabal", "cabbage", "cable", "cache", "cadge", "cajole", "cake", "calculate", "calibrate", "call", "calm", "calumniate", "camp", "campaign", "canal", "canalise", "canalize", "cancel", "candy", "cane", "canonise", "canonize", "cant", "cantillate", "canton", "canvass", "cap", "caparison", "capsize", "capsulate", "capsule", "capsulise", "capsulize", "captivate", "capture", "card", "care", "careen", "caress", "caricature", "cark", "carouse", "carp", "carry", "cart", "carve", "case", "cash", "castigate", "castrate", "cat", "catalog", "catalogue", "catapult", "catechise", "catechize", "cater", "caterwaul", "catnap", "cause", "cauterise", "cauterize", "caution", "cave", "cavil", "cavort", "cease", "cede", "celebrate", "cense", "censor", "censure", "center", "centralise", "centralize", "centre", "cerebrate", "certify", "chafe", "chaff", "chaffer", "chagrin", "chair", "chairman", "challenge", "chamfer", "champ", "champion", "chance", "change", "channel", "channel-surf", "channelise", "channelize", "chant", "char", "characterise", "characterize", "charge", "charm", "chart", "charter", "chase", "chasse", "chasten", "chastise", "chat", "chatter", "cheapen", "cheapen", "cheat", "check", "checker", "checkmate", "cheep", "cheer", "chequer", "chevvy", "chevy", "chew", "chicane", "chill", "chink", "chip", "chirp", "chirrup", "chisel", "chitter", "chivvy", "chivy", "choir", "choke", "chomp", "chop", "chord", "chouse", "christen", "christianise", "christianize", "chrome", "chuck", "chuff", "chunk", "churn", "chute", "cinch", "cipher", "circle", "circularise", "circularize", "circulate", "circumnavigate", "circumscribe", "circumvent", "circumvolve", "cite", "civilise", "civilize", "clack", "claim", "clamber", "clamor", "clamour", "clang", "clangor", "clangour", "clap", "clarify", "clash", "clasp", "class", "classify", "clatter", "claxon", "clean", "cleanse", "clear", "clench", "clew", "click", "climb", "clinch", "clink", "clip", "cloak", "clobber", "clock", "clog", "clomp", "clop", "close", "closure", "clot", "cloture", "cloud", "clown", "cloy", "club", "cluck", "clue", "clump", "clunk", "cluster", "co-occur", "co-operate", "coach", "coagulate", "coal", "coalesce", "coat", "cock", "cocker", "cockle", "cod", "coddle", "code", "coerce", "coggle", "cogitate", "cognise", "cognize", "cohere", "coif", "coiffe", "coiffure", "coil", "coin", "coincide", "cold-shoulder", "coldcock", "collaborate", "collapse", "collar", "collect", "collide", "colligate", "collimate", "collocate", "collude", "colly", "colonise", "colonize", "color", "colorise", "colorize", "colour", "colourise", "colourize", "comb", "combat", "combine", "combust", "comfit", "comfort", "command", "commandeer", "commemorate", "commence", "commend", "comment", "commercialise", "commercialize", "comminate", "commingle", "comminute", "commiserate", "commit", "commove", "commune", "communicate", "commute", "compact", "companion", "company", "compare", "compass", "compel", "compensate", "compete", "compile", "complain", "complete", "complexify", "complicate", "compliment", "complot", "comply", "comport", "compose", "compound", "comprehend", "compress", "comprise", "compute", "con", "conceal", "concede", "conceive", "concentrate", "conceptualize", "concern", "conciliate", "conclude", "concur", "condemn", "condense", "condescend", "condition", "condone", "conduce", "conduct", "confab", "confabulate", "confection", "confess", "confide", "confine", "confirm", "confiscate", "conflate", "conflict", "conform", "confound", "confront", "confuse", "confute", "congeal", "congest", "conglomerate", "congratulate", "conjecture", "conjoin", "conjure", "conk", "connect", "connive", "connote", "conquer", "consecrate", "consent", "conserve", "consider", "consign", "consist", "console", "consort", "conspire", "constipate", "constitute", "constrain", "constrict", "construct", "construe", "consult", "consume", "contact", "contain", "contaminate", "contemn", "contemplate", "contend", "contest", "continue", "contort", "contract", "contradict", "contrast", "contravene", "contribute", "contrive", "control", "controvert", "contuse", "convalesce", "convene", "converge", "converse", "convert", "convey", "convict", "convince", "convoke", "convolute", "convolve", "convulse", "coo", "cooccur", "cook", "cool", "cooperate", "coordinate", "cop", "copulate", "copy", "corkscrew", "corner", "coronate", "corrade", "correct", "correspond", "corroborate", "corrode", "corrupt", "coruscate", "cosset", "couch", "cough", "counsel", "count", "countenance", "counter", "counteract", "counterbalance", "counterfeit", "countermand", "counterpoint", "counterpoise", "countervail", "counterweight", "couple", "course", "court", "cover", "cow", "cower", "cozen", "cozen", "crab", "crack", "crackle", "cram", "cramp", "cranch", "crank", "crap", "crape", "crash", "craunch", "crave", "crawfish", "crawl", "craze", "creak", "cream", "crease", "create", "credit", "crepe", "crepitate", "crest", "criminalise", "criminalize", "criminate", "crimp", "crimson", "cringe", "crinkle", "cripple", "crisp", "crispen", "crispen", "criticise", "criticize", "critique", "croak", "crochet", "cronk", "crook", "crop", "cross", "cross-file", "crouch", "crow", "crowd", "crown", "crucify", "crumble", "crump", "crumple", "crunch", "crusade", "crush", "cry", "crystallize", "cube", "cuddle", "cudgel", "cue", "cuff", "cull", "culminate", "cultivate", "cumber", "cumulate", "cup", "curb", "cure", "curl", "curry", "curse", "curtail", "curtsey", "curtsy", "curve", "cushion", "custom-make", "customise", "customize", "cycle", "cypher", "dab", "dabble", "dally", "damage", "damn", "damp", "dampen", "dampen", "dance", "dangle", "dapple", "dare", "dart", "dash", "daub", "daunt", "dawdle", "dawn", "daydream", "daze", "dazzle", "de-escalate", "deaden", "deaf", "deafen", "deafen", "debar", "debark", "debase", "debate", "debauch", "debilitate", "debone", "debunk", "decamp", "decant", "decay", "decease", "deceive", "decelerate", "decide", "decimate", "decipher", "deck", "declaim", "declare", "decline", "decode", "decompose", "decompress", "decorate", "decouple", "decrease", "decree", "decriminalize", "decry", "decrypt", "dedicate", "deduce", "deduct", "deem", "deepen", "deepen", "deface", "defalcate", "defame", "defeat", "defecate", "defect", "defend", "defer", "defile", "define", "deflect", "deflower", "deform", "defraud", "defy", "degenerate", "degrade", "dehydrate", "deign", "deject", "delay", "delegate", "delete", "deliberate", "delight", "delimit", "delimitate", "delineate", "deliver", "delude", "deluge", "delve", "demand", "demarcate", "demilitarize", "demo", "demolish", "demonstrate", "demoralise", "demoralize", "demote", "demur", "denigrate", "denominate", "denote", "denounce", "dent", "denudate", "denude", "deny", "depart", "depend", "depict", "deplete", "deplore", "depopulate", "deport", "depose", "deposit", "deprave", "deprecate", "depreciate", "depress", "depressurise", "depressurize", "deprive", "depute", "deputise", "deputize", "deracinate", "derail", "derange", "derive", "derogate", "desacralize", "descale", "descant", "descend", "describe", "descry", "desecrate", "desegregate", "desert", "deserve", "desexualise", "desexualize", "desiccate", "design", "designate", "desire", "desist", "desolate", "despatch", "despise", "despoil", "destine", "destroy", "destruct", "detach", "detain", "detect", "deter", "deteriorate", "determine", "detest", "dethaw", "detonate", "detusk", "devaluate", "devalue", "deviate", "devil", "devise", "devolve", "devote", "devour", "diagnose", "diagram", "dice", "dicker", "dictate", "didder", "diddle", "die", "differ", "differentiate", "diffuse", "digest", "dignify", "digress", "dike", "dilapidate", "dilate", "dilute", "diminish", "din", "dine", "ding", "dingdong", "dinge", "direct", "dirty", "disable", "disaccord", "disadvantage", "disaffect", "disagree", "disallow", "disappear", "disapprove", "disarm", "disarray", "disarticulate", "disassemble", "disassociate", "disband", "disbelieve", "disburden", "discard", "discase", "discern", "discerp", "discharge", "discipline", "disclose", "discolor", "discombobulate", "discomfit", "discommode", "discompose", "disconcert", "disconnect", "discontinue", "discord", "discount", "discourage", "discourse", "discover", "discredit", "discriminate", "disdain", "disembark", "disembarrass", "disembowel", "disembroil", "disenable", "disenchant", "disencumber", "disenfranchise", "disengage", "disentangle", "disesteem", "disfavor", "disfavour", "disfigure", "disfranchise", "disgorge", "disgrace", "disguise", "disgust", "dish", "dishevel", "dishonor", "dishonour", "disillusion", "disincline", "disinherit", "disintegrate", "disinter", "disinvest", "disinvolve", "disjoin", "disjoint", "disk", "dislocate", "dislodge", "dismantle", "dismay", "dismember", "dismiss", "dismount", "disobey", "disoblige", "disorder", "disorganise", "disorganize", "disown", "disparage", "dispatch", "dispel", "dispense", "disperse", "dispirit", "displace", "display", "displume", "disport", "dispose", "disprove", "dispute", "disqualify", "disquiet", "disregard", "disrespect", "disrobe", "disrupt", "diss", "dissect", "dissemble", "disseminate", "dissent", "dissever", "dissipate", "dissociate", "dissolve", "dissuade", "distance", "distend", "distil", "distill", "distinguish", "distort", "distract", "distress", "distribute", "district", "distrust", "disturb", "disunite", "disuse", "ditch", "dither", "divagate", "dive", "diverge", "diversify", "divert", "divest", "divide", "divorce", "divulge", "dj", "dock", "doctor", "dodder", "dodge", "dog", "dogmatise", "dogmatize", "dogsled", "domesticate", "domesticise", "domesticize", "domicile", "domiciliate", "dominate", "domineer", "don", "donate", "dong", "doom", "dose", "doss", "dot", "double", "douse", "dower", "down", "download", "downplay", "dowse", "doze", "draft", "drag", "dragoon", "drain", "dramatise", "dramatize", "drape", "draught", "dread", "dream", "dredge", "drench", "dress", "dribble", "drift", "drill", "drip", "drivel", "drizzle", "drool", "droop", "drop", "drown", "drowse", "drub", "drudge", "drug", "drum", "dry", "dub", "duck", "dulcify", "dulcorate", "dull", "dumbfound", "dump", "dun", "dunk", "dupe", "duplicate", "dust", "dwarf", "dyke", "e-mail", "earmark", "earn", "ease", "echo", "eclipse", "economise", "economize", "eddy", "edge", "edify", "edit", "educate", "educe", "edulcorate", "efface", "effect", "effectuate", "effervesce", "effloresce", "egest", "egress", "eject", "elaborate", "elapse", "elate", "electrify", "electrocute", "elevate", "elicit", "eliminate", "elongate", "elucidate", "elude", "emaciate", "email", "emanate", "emancipate", "emasculate", "embark", "embarrass", "embed", "embellish", "embezzle", "embitter", "emblazon", "embody", "embolden", "emboss", "embower", "embrace", "embrangle", "embrocate", "embroider", "embroil", "embrown", "emerge", "emit", "empale", "empanel", "empathise", "empathize", "emphasise", "emphasize", "employ", "empower", "empty", "empurple", "enact", "enamor", "enamour", "encamp", "encapsulate", "encase", "enchant", "encipher", "encircle", "enclose", "enclothe", "encompass", "encounter", "encourage", "encroach", "encrust", "encrypt", "encumber", "end", "endeavor", "endeavour", "endorse", "endow", "endue", "endure", "energise", "energize", "enervate", "enfeeble", "enfold", "enforce", "enfranchise", "engage", "engender", "engild", "engineer", "englut", "engorge", "engraft", "engrave", "engross", "engulf", "enhance", "enjoin", "enjoy", "enkindle", "enlace", "enlarge", "enlighten", "enlist", "enliven", "enliven", "enmesh", "ennoble", "enounce", "enquire", "enrapture", "enrol", "enroll", "ensconce", "enshrine", "enshroud", "ensnare", "ensnarl", "ensue", "ensure", "entail", "entangle", "enter", "entertain", "enthral", "enthrall", "enthrone", "entice", "entitle", "entomb", "entrance", "entrap", "entreat", "entrench", "entrust", "entwine", "enumerate", "enunciate", "envelop", "envenom", "environ", "envisage", "envision", "envy", "enwrap", "epitomise", "epitomize", "equal", "equalise", "equalize", "equate", "equilibrate", "equilibrise", "equilibrize", "equip", "eradicate", "erase", "erect", "erode", "err", "eruct", "erupt", "escalate", "escape", "eschew", "escort", "espouse", "espy", "essay", "establish", "esteem", "estimate", "estrange", "etch", "eulogize", "evacuate", "evade", "evaluate", "evaporate", "even", "even", "evidence", "evince", "eviscerate", "evoke", "evolve", "exacerbate", "exact", "exaggerate", "exalt", "examine", "exasperate", "excavate", "exceed", "excel", "except", "excerpt", "exchange", "excise", "excite", "exclaim", "exclude", "excogitate", "excommunicate", "excoriate", "excrete", "excruciate", "exculpate", "excuse", "execrate", "execute", "exemplify", "exempt", "exercise", "exert", "exhale", "exhaust", "exhibit", "exhilarate", "exhort", "exhume", "exile", "exist", "exit", "exonerate", "expand", "expatiate", "expatriate", "expect", "expedite", "expel", "expend", "experience", "expiate", "expire", "explain", "explicate", "explode", "exploit", "explore", "expose", "exposit", "expound", "express", "expunge", "expurgate", "extemporize", "extend", "extenuate", "exterminate", "externalize", "extinguish", "extirpate", "extort", "extract", "extradite", "extrapolate", "extricate", "exude", "exult", "eyeball", "fabricate", "face", "face-lift", "facilitate", "facsimile", "fade", "fag", "faggot", "fagot", "fail", "faint", "fake", "falsify", "falter", "familiarise", "familiarize", "famish", "fan", "fancify", "fancy", "fantasise", "fantasize", "fantasy", "farce", "fare", "farm", "farrow", "fascinate", "fashion", "fasten", "fat", "fate", "father", "fathom", "fatigue", "fatten", "fault", "favor", "favour", "fawn", "faze", "fear", "feast", "feather", "featherbed", "feature", "fecundate", "fee", "feign", "felicitate", "fellate", "fence", "fend", "ferment", "ferry", "fertilise", "fertilize", "fester", "festinate", "fetch", "fete", "fetter", "fictionalise", "fictionalize", "fiddle", "figure", "filch", "file", "filet", "fill", "fillet", "film", "filter", "filtrate", "finagle", "finalise", "finalize", "fine", "fine-tune", "finetune", "finger", "finish", "fink", "fire", "firm", "fish", "fit", "fixate", "fizz", "flag", "flagellate", "flail", "flame", "flap", "flare", "flash", "flatten", "flatter", "flaunt", "flavor", "flavour", "flaw", "fleck", "fledge", "fleece", "fleet", "flick", "flicker", "flight", "flinch", "flip", "flip-flop", "flirt", "float", "flock", "flog", "flood", "floor", "flop", "flounder", "flourish", "flout", "flow", "flower", "flub", "fluctuate", "fluff", "flunk", "flurry", "flush", "flutter", "foam", "fob", "fog", "foil", "fold", "foliate", "follow", "foment", "fondle", "fool", "foot", "footnote", "forage", "foray", "force", "forebode", "foreclose", "foreground", "foreshadow", "foreshorten", "forest", "forestall", "forewarn", "forfeit", "forfend", "forgather", "forge", "fork", "form", "formalize", "format", "formulate", "fortify", "foster", "foul", "found", "founder", "fraction", "fracture", "frame", "frank", "fraternize", "fray", "frazzle", "free", "free-base", "freewheel", "frequent", "freshen", "freshen", "fright", "frighten", "frisk", "fritter", "frizz", "frizzle", "front", "frost", "froth", "frown", "fructify", "frustrate", "fry", "fuddle", "fudge", "fuel", "fulfil", "fulfill", "full", "fulminate", "fumble", "fume", "fumigate", "function", "funk", "furbish", "furcate", "furnish", "furrow", "further", "fuse", "fustigate", "gab", "gabble", "gag", "gain", "gall", "gallop", "galvanise", "galvanize", "gamble", "gambol", "gangrene", "gaol", "gap", "gape", "garb", "garble", "gargle", "garment", "garner", "garnish", "garnishee", "garotte", "garrote", "garrotte", "gasconade", "gash", "gasify", "gasp", "gather", "gauge", "gawk", "gawp", "gaze", "gazump", "gear", "gel", "geld", "geminate", "generalise", "generalize", "generate", "gentle", "genuflect", "germinate", "gestate", "gesticulate", "gesture", "ghost", "ghostwrite", "gibber", "gibbet", "gibe", "gift", "gild", "gimp", "gird", "girdle", "girth", "gladden", "glamour", "glance", "glare", "glass", "glaze", "gleam", "glean", "glimmer", "glint", "glisten", "glitter", "gloat", "glom", "glorify", "gloss", "glow", "glower", "glue", "glut", "gnarl", "gnaw", "goad", "gobble", "goggle", "goldbrick", "goof-proof", "goofproof", "google", "gorge", "gormandise", "gormandize", "gouge", "gourmandize", "govern", "grab", "grace", "grade", "graduate", "graft", "grain", "grant", "granulate", "graph", "grapple", "grasp", "grass", "grate", "gratify", "grave", "gravel", "gray", "graze", "greet", "grey", "grieve", "grime", "grip", "gripe", "grizzle", "groan", "grok", "groom", "groove", "grope", "grouch", "group", "grouse", "grovel", "growl", "grub", "grudge", "grumble", "gruntle", "guarantee", "guard", "guess", "guggle", "guide", "gull", "gulp", "gum", "gurgle", "gush", "guttle", "guy", "gybe", "gyrate", "habilitate", "habituate", "hack", "haggle", "hail", "hale", "hallow", "halt", "halter", "ham", "hammer", "hamper", "hand", "hand-build", "handbuild", "handcuff", "handicap", "handle", "hanker", "hap", "happen", "harass", "harbinger", "harbor", "harbour", "harden", "hark", "harken", "harken", "harm", "harmonise", "harmonize", "harness", "harp", "harrow", "harry", "harvest", "hassle", "hasten", "hatch", "hate", "haul", "haunt", "hawk", "hazard", "head", "heal", "heap", "hearken", "hearken", "hearten", "heat", "hector", "hedge", "heed", "heel", "heft", "heighten", "help", "herald", "herd", "hero-worship", "hesitate", "hie", "higgle", "highjack", "highlight", "hijack", "hike", "hinder", "hint", "hire", "hiss", "hitch", "hitchhike", "hoard", "hobble", "hock", "hoist", "holiday", "holler", "hollo", "hollow", "hone", "honk", "honor", "honour", "hoodwink", "hoof", "hook", "hoover", "hop", "hope", "hopple", "horn", "hornswoggle", "horrify", "hotfoot", "hound", "house", "houseclean", "hover", "howl", "huckster", "huddle", "hue", "huff", "hug", "hulk", "hum", "humanise", "humanize", "humble", "humiliate", "humor", "humour", "hump", "hunch", "hunger", "hunker", "hunt", "hurl", "hurry", "hurtle", "husband", "hush", "husk", "hustle", "hybridise", "hybridize", "hydroplane", "hyperbolise", "hyperbolize", "hyphen", "hyphen", "hyphenate", "hypnotise", "hypnotize", "hypothecate", "hypothesise", "hypothesize", "ice", "idealize", "ideate", "identify", "idle", "idolise", "idolize", "ignite", "ignore", "ill-treat", "ill-use", "illegalise", "illegalize", "illume", "illuminate", "illumine", "illustrate", "image", "imagine", "imbed", "imbibe", "imbrue", "imbue", "imitate", "immerse", "immingle", "immobilise", "immobilize", "immolate", "immortalise", "immortalize", "immunise", "immunize", "immure", "impact", "impair", "impale", "impanel", "impart", "impeach", "impede", "impel", "impend", "imperil", "impersonate", "impinge", "implant", "implement", "implicate", "implode", "implore", "imply", "import", "importune", "impose", "impound", "impoverish", "imprecate", "impregnate", "impress", "imprint", "improve", "improvise", "improvize", "impute", "inaugurate", "incapacitate", "incarcerate", "incarnate", "incase", "incense", "inch", "incinerate", "incise", "incite", "incline", "inclose", "include", "incommode", "inconvenience", "incorporate", "increase", "incriminate", "incrust", "incubate", "inculcate", "inculpate", "incur", "indemnify", "indent", "indenture", "indicate", "indispose", "indite", "individualise", "individualize", "indorse", "induce", "induct", "indue", "indulge", "indurate", "inebriate", "infect", "infer", "infest", "infiltrate", "inflame", "inflate", "inflect", "inflict", "influence", "inform", "infract", "infringe", "infuriate", "infuse", "ingeminate", "ingest", "ingraft", "ingrain", "ingurgitate", "inhabit", "inhale", "inherit", "inhibit", "inhume", "initialise", "initialize", "initiate", "inject", "injure", "innovate", "inoculate", "inquire", "inscribe", "inseminate", "insert", "insinuate", "insist", "insolate", "inspect", "inspire", "inspirit", "inspissate", "instal", "install", "instance", "instigate", "instil", "instill", "institute", "institutionalise", "institutionalize", "instruct", "instrument", "instrumentate", "insulate", "insult", "insure", "integrate", "intend", "intensify", "inter", "intercede", "intercept", "interchange", "intercommunicate", "interdict", "interest", "interfere", "interject", "interlace", "interlard", "interlock", "intermediate", "intermingle", "intermit", "interpenetrate", "interpolate", "interpose", "interpret", "interrelate", "interrogate", "interrupt", "intersect", "intersperse", "intertwine", "intervene", "interview", "intimate", "intonate", "intone", "intoxicate", "intrench", "intrigue", "introduce", "intromit", "intrude", "intrust", "intumesce", "inunct", "inundate", "inure", "invade", "invalid", "invalidate", "inveigh", "inveigle", "invent", "inventory", "invert", "invest", "investigate", "invigilate", "invigorate", "invite", "invoke", "involve", "irk", "iron", "irradiate", "irrigate", "irritate", "irrupt", "isolate", "issue", "itch", "itemise", "itemize", "iterate", "jab", "jabber", "jack", "jacklight", "jactitate", "jade", "jail", "jam", "jampack", "jangle", "jar", "jaunt", "jaw", "jawbone", "jeer", "jell", "jellify", "jelly", "jeopardise", "jeopardize", "jerk", "jest", "jewel", "jib", "jibe", "jiggle", "jimmy", "jingle", "jingle-jangle", "job", "jockey", "jog", "joggle", "join", "joint", "joke", "jollify", "jolly", "jolt", "josh", "jostle", "jounce", "journey", "joy", "joyride", "jubilate", "judder", "judge", "jug", "juggle", "jumble", "jump", "jump-start", "jumpstart", "junk", "junket", "junketeer", "justify", "keel", "keen", "key", "kibosh", "kick", "kidnap", "kill", "kindle", "kink", "kip", "kiss", "knap", "knead", "knell", "knife", "knight", "knock", "knot", "kotow", "kowtow", "kvetch", "label", "labialise", "labialize", "labor", "labour", "lace", "lack", "lactate", "ladder", "ladle", "lag", "lallygag", "lam", "lambast", "lambaste", "lame", "lament", "lampoon", "land", "languish", "lap", "lapidate", "lapse", "lard", "lark", "larn", "larrup", "lash", "lasso", "last", "latch", "lather", "laud", "laugh", "launch", "launder", "lave", "lavish", "laze", "leach", "leaf", "leak", "lean", "leap", "learn", "lease", "leash", "leaven", "leaven", "lecture", "leech", "leer", "legalise", "legalize", "legislate", "legitimate", "legitimatise", "legitimatize", "legitimise", "legitimize", "lenify", "lessen", "lessen", "level", "lever", "levitate", "levy", "liaise", "liberate", "librate", "licence", "license", "lick", "lie", "lift", "lighten", "like", "liken", "liken", "lime", "limit", "limn", "limp", "line", "linger", "link", "lionise", "lionize", "liquefy", "liquidate", "liquidise", "liquidize", "liquify", "list", "listen", "litigate", "live", "liven", "liven", "load", "loaf", "loan", "loathe", "lobby", "localise", "localize", "locate", "lock", "locomote", "lodge", "log", "loiter", "loll", "lollygag", "long", "look", "loom", "loop", "loose", "loosen", "loosen", "loot", "lop", "lot", "lounge", "lour", "love", "low", "lowball", "lower", "lucubrate", "luff", "lug", "lull", "lumber", "lump", "lunge", "lurch", "lure", "lurk", "lust", "luxate", "luxuriate", "macerate", "machinate", "maculate", "madden", "magnetise", "magnetize", "magnify", "mail", "maintain", "maledict", "malign", "malinger", "maltreat", "malversate", "manacle", "manage", "manducate", "maneuver", "mangle", "manifest", "manifold", "manipulate", "manoeuver", "manoeuvre", "manufacture", "manumit", "manure", "map", "mar", "march", "mark", "market", "maroon", "marry", "marshal", "marvel", "mash", "mask", "massacre", "massage", "master", "mastermind", "masticate", "mat", "match", "mate", "materialise", "materialize", "matt-up", "matte", "matter", "maturate", "mature", "maul", "maunder", "meander", "measure", "meddle", "mediate", "medicate", "medicine", "meditate", "melanise", "melanize", "meld", "meliorate", "mellow", "melt", "memorialise", "memorialize", "memorise", "memorize", "menace", "mend", "menstruate", "mensurate", "mention", "merchandise", "merge", "merit", "mesh", "mesmerise", "mesmerize", "metamorphose", "mew", "mewl", "microwave", "micturate", "migrate", "mildew", "milk", "mime", "mimic", "mince", "mind", "mingle", "minify", "minimise", "minimize", "mint", "mire", "misaddress", "misadvise", "misapply", "misapprehend", "misappropriate", "misbehave", "miscalculate", "miscall", "miscarry", "misconceive", "misconduct", "misconstrue", "misdemean", "misdirect", "misestimate", "misguide", "mishandle", "misidentify", "misinform", "misinterpret", "mislearn", "mismanage", "misname", "misplace", "mispronounce", "misrepresent", "miss", "mist", "mistreat", "mistrust", "misuse", "mitigate", "mizzle", "moan", "mob", "mobilise", "mobilize", "mock", "model", "moderate", "modernise", "modernize", "modify", "modulate", "moil", "moisten", "mold", "mold", "molder", "molest", "mollify", "mollycoddle", "molt", "monger", "monish", "monitor", "monkey", "monopolise", "monopolize", "moo", "mooch", "moon", "moor", "moot", "mop", "moralise", "moralize", "mortify", "mosey", "mother", "motion", "motivate", "motley", "motor", "motorbike", "motorcycle", "mottle", "mould", "moulder", "moult", "mount", "mouse", "mousse", "mouth", "move", "muck", "mud", "muddle", "muff", "muffle", "mulct", "multiply", "mumble", "munch", "murder", "murmur", "mush", "mussitate", "muster", "mute", "mutilate", "mutter", "muzzle", "mystify", "nab", "nag", "nail", "name", "nap", "nark", "narrate", "narrow", "naturalise", "naturalize", "nauseate", "navigate", "near", "neaten", "necessitate", "necrose", "need", "needle", "negate", "negative", "neglect", "negociate", "negotiate", "neighbor", "neighbour", "nerve", "nest", "nestle", "netmail", "nett", "nettle", "neuter", "neutralise", "neutralize", "nibble", "nick", "nickname", "nictate", "nictitate", "niggle", "nip", "nock", "nod", "nominate", "normalise", "normalize", "nose", "nosh", "note", "notice", "notify", "nourish", "nudge", "nuke", "nullify", "numb", "number", "numerate", "nurse", "nurture", "nutrify", "nuzzle", "obey", "objectify", "objurgate", "obligate", "oblige", "obliterate", "obnubilate", "obscure", "observe", "obsess", "obstruct", "obtain", "obtrude", "obturate", "obviate", "occlude", "occult", "occupy", "occur", "odorize", "odourise", "off", "offend", "offer", "officiate", "offload", "oil", "oink", "omen", "omen", "omit", "ooze", "open", "operate", "opine", "oppose", "oppress", "oppugn", "opt", "optimise", "optimize", "orb", "orbit", "orchestrate", "ordain", "order", "ordinate", "organise", "organize", "orient", "orientate", "originate", "ornament", "oscillate", "osculate", "ostracize", "oust", "outbalance", "outcry", "outdistance", "outfit", "outflank", "outlast", "outlaw", "outline", "outlive", "outmaneuver", "outmatch", "outperform", "outrage", "outrank", "outshout", "outsmart", "outstrip", "outweigh", "over-correct", "overact", "overawe", "overbalance", "overcharge", "overcloud", "overcompensate", "overdress", "overestimate", "overflow", "overhaul", "overindulge", "overload", "overlook", "overmaster", "overplay", "overpower", "overprotect", "overrate", "overreach", "overrule", "oversew", "overshadow", "overspread", "overstate", "overstep", "overstretch", "oversupply", "overtop", "overturn", "overuse", "overvalue", "overwhelm", "overwinter", "overwork", "owe", "own", "pace", "pacify", "pack", "pack", "package", "pad", "paddle", "page", "paginate", "pain", "paint", "pair", "palaver", "pale", "palisade", "pall", "palliate", "palm", "palpate", "palpitate", "pamper", "pan", "pander", "panel", "pant", "pantomime", "paper", "parachute", "parade", "parallel", "paralyse", "paralyze", "paraphrase", "parboil", "parch", "pardon", "pare", "parent", "parody", "parry", "part", "participate", "particularise", "particularize", "partition", "pass", "paste", "pasture", "pat", "patch", "patrol", "patronage", "patronise", "patronize", "patter", "pattern", "pauperise", "pauperize", "pause", "pawn", "peal", "peck", "peculate", "pedal", "peddle", "pee", "peek", "peel", "peep", "pelt", "pen", "penalise", "penalize", "penetrate", "pepper", "perceive", "perch", "percolate", "perfect", "perforate", "perform", "perfume", "perfuse", "peril", "perish", "permeate", "permute", "perpetrate", "persecute", "persevere", "persist", "personalise", "personalize", "personate", "personify", "perspire", "persuade", "pertain", "perturb", "pervade", "pervert", "pester", "pettifog", "phlebotomise", "phlebotomize", "phonate", "phone", "photocopy", "photograph", "pick", "picture", "piddle", "piece", "pierce", "piffle", "pig", "pigeonhole", "pile", "pilfer", "pillage", "pillory", "pillow", "pilot", "pimp", "pin", "pinch", "pine", "ping", "pinion", "pink", "pinpoint", "pioneer", "pip", "pipe", "pique", "pirate", "piss", "pit", "pitch", "pitchfork", "pivot", "placard", "placate", "place", "plagiarise", "plagiarize", "plague", "plain", "plait", "plan", "plane", "plank", "plant", "plash", "plaster", "plat", "play", "playact", "pleach", "plead", "please", "pleat", "pledge", "plicate", "plight", "plod", "plonk", "plop", "plot", "plough", "plow", "pluck", "plug", "plume", "plummet", "plump", "plunder", "plunge", "plunk", "ply", "pock", "pocket", "poetise", "poetize", "point", "poise", "poison", "poke", "pole", "police", "polish", "poll", "pollard", "pollute", "pommel", "ponder", "pop", "popularise", "popularize", "populate", "pore", "portend", "portion", "portray", "pose", "posit", "position", "possess", "post", "postdate", "postmark", "postpone", "postulate", "posture", "pother", "potter", "pouch", "poultice", "pounce", "pound", "pour", "pout", "powder", "powderise", "powderize", "practice", "practice", "practise", "praise", "prance", "prate", "prattle", "pray", "preach", "preachify", "precede", "precipitate", "preclude", "predate", "predetermine", "predicate", "predict", "predominate", "preempt", "preen", "preface", "prefigure", "prehend", "prejudice", "premier", "premiere", "premise", "premiss", "prepare", "preponderate", "prepossess", "presage", "prescribe", "present", "preserve", "preside", "press", "pressure", "pressurise", "pressurize", "presume", "presuppose", "pretend", "pretermit", "prettify", "prevail", "prevent", "previse", "prey", "prick", "prickle", "pride", "prime", "primp", "prink", "print", "prise", "privilege", "prize", "probe", "proceed", "process", "proclaim", "procrastinate", "procreate", "proctor", "procure", "prod", "produce", "profane", "profess", "proffer", "prognosticate", "program", "programme", "progress", "prohibit", "project", "prolong", "promenade", "promise", "promote", "prompt", "promulgate", "pronounce", "proof", "propagate", "propel", "prophesy", "propitiate", "propose", "prorogue", "proscribe", "prosecute", "prosper", "protect", "protest", "protract", "protrude", "prove", "provide", "provision", "provoke", "prowl", "prune", "pry", "psychoanalyse", "psychoanalyze", "ptyalise", "ptyalize", "publicise", "publicize", "publish", "pucker", "puddle", "puff", "puke", "pule", "pull", "pullulate", "pulsate", "pulse", "pulverise", "pulverize", "pummel", "punch", "punctuate", "punish", "punt", "pup", "purchase", "puree", "purge", "purify", "purl", "purloin", "purple", "purport", "purpose", "purpurate", "purr", "purse", "pursue", "purvey", "push", "pussyfoot", "putter", "puzzle", "quaff", "quail", "quake", "qualify", "quantify", "quarrel", "quarter", "quash", "quaver", "queer", "quell", "quench", "query", "quest", "question", "quetch", "quibble", "quicken", "quiesce", "quiet", "quieten", "quip", "quiver", "quiz", "quote", "race", "rack", "racket", "radiate", "rag", "rage", "raid", "rail", "railroad", "raiment", "rain", "raise", "rake", "rally", "ram", "ramble", "ramify", "ramp", "range", "rank", "ransack", "ransom", "rant", "rap", "rape", "rarefy", "rarify", "rase", "rat", "rate", "ratify", "rationalise", "rationalize", "rattle", "ravage", "rave", "ravel", "raven", "raven", "ravish", "ray", "raze", "razz", "re-create", "re-emerge", "re-start", "reach", "react", "readapt", "readjust", "ready", "realise", "realize", "reanimate", "reap", "reappear", "rear", "reason", "reassert", "reassign", "reassure", "reave", "reboot", "rebuff", "rebut", "recall", "recant", "recap", "recapitulate", "recede", "receipt", "receive", "recess", "recharge", "recidivate", "recite", "reckon", "reclaim", "recline", "recognise", "recognize", "recoil", "recollect", "recommend", "recompense", "reconcile", "reconnoiter", "reconnoitre", "reconstruct", "record", "recount", "recoup", "recover", "recreate", "recrudesce", "recruit", "rectify", "recumb", "recuperate", "recur", "redact", "redden", "rede", "redeem", "redress", "reduce", "reduplicate", "reek", "reel", "reelect", "reenact", "reenforce", "reestablish", "reexamine", "refashion", "referee", "reference", "refill", "refine", "refit", "reflect", "reforge", "reform", "refrain", "refresh", "refreshen", "refreshen", "refund", "refurbish", "refuse", "refute", "regain", "regale", "regard", "regenerate", "register", "regorge", "regress", "regularise", "regularize", "regulate", "regurgitate", "rehash", "rehearse", "reheel", "reign", "reimburse", "rein", "reincarnate", "reinforce", "reinstate", "reinvigorate", "reissue", "reiterate", "reject", "rejoice", "rejoin", "rejuvenate", "relapse", "relate", "release", "relegate", "relent", "relieve", "relinquish", "relish", "reload", "rely", "remain", "remand", "remark", "remediate", "remedy", "remember", "remind", "remit", "remodel", "remold", "remould", "remove", "remunerate", "render", "renegade", "renew", "renormalise", "renormalize", "renounce", "renovate", "repair", "repeal", "repeat", "repel", "repent", "rephrase", "replace", "replenish", "replete", "replicate", "reply", "repoint", "report", "repose", "reposition", "repossess", "represent", "repress", "reprieve", "reprimand", "reprint", "reprise", "reprize", "reproach", "reprobate", "reproduce", "reproof", "reprove", "repudiate", "repugn", "repulse", "request", "require", "requite", "rescind", "rescue", "research", "resect", "resemble", "resent", "reserve", "resew", "reshape", "reside", "resign", "resile", "resist", "resole", "resolve", "resonate", "resort", "resound", "respect", "respire", "respite", "respond", "rest", "restart", "restate", "restitute", "restore", "restrain", "restrict", "result", "resume", "resurrect", "resuscitate", "retain", "retaliate", "retard", "retch", "retick", "retire", "retool", "retort", "retrace", "retract", "retrieve", "retrograde", "retrogress", "retrospect", "retrovert", "return", "reunify", "reunite", "reuse", "revalue", "revamp", "reveal", "revel", "revenge", "reverberate", "revere", "reverence", "reverse", "revert", "review", "revile", "revise", "revitalise", "revitalize", "revive", "revivify", "revoke", "revolt", "revolutionise", "revolutionize", "revolve", "reward", "reword", "rhyme", "rib", "rick", "ricochet", "riddle", "ridicule", "riff", "riffle", "rifle", "rig", "right", "rile", "rime", "rinse", "riot", "rip", "ripen", "ripen", "riposte", "ripple", "risk", "rival", "rive", "rivet", "roam", "roar", "roast", "rob", "robe", "rock", "rocket", "roil", "roister", "roleplay", "roll", "rollick", "romance", "romp", "rook", "room", "roost", "root", "rootle", "rope", "rot", "rotate", "round", "rouse", "rout", "rove", "rub", "rubber", "rubberise", "rubberize", "ruck", "ruckle", "rue", "ruff", "ruffle", "ruin", "rule", "rumble", "rumor", "rumour", "rumple", "rupture", "rush", "rust", "rustle", "rut", "sabotage", "saccharify", "sack", "sacrifice", "sadden", "saddle", "sag", "sail", "sailplane", "saint", "salivate", "sally", "salute", "salvage", "salve", "sample", "sanctify", "sanction", "sand", "sandbag", "sandpaper", "sap", "saponify", "sash", "sashay", "sass", "sate", "satiate", "satirise", "satirize", "satisfy", "saturate", "saunter", "savage", "save", "savor", "savour", "savvy", "scab", "scabble", "scald", "scale", "scam", "scamper", "scan", "scandalise", "scandalize", "scant", "scare", "scarify", "scarp", "scarper", "scat", "scatter", "scavenge", "scend", "scent", "scheme", "schmoose", "schmooze", "schnorr", "school", "scintillate", "scoff", "scold", "scoop", "scoot", "scorch", "score", "scorn", "scotch", "scour", "scourge", "scout", "scowl", "scrabble", "scrag", "scram", "scramble", "scranch", "scrap", "scrape", "scratch", "scraunch", "scrawl", "screak", "scream", "screech", "screen", "screw", "scribble", "scrimp", "scrounge", "scrub", "scrunch", "scrutinise", "scrutinize", "scud", "scuff", "scuffle", "sculpt", "sculpture", "scupper", "scurry", "scuttle", "seal", "seaplane", "sear", "search", "season", "seat", "secede", "secern", "secernate", "seclude", "second", "secrete", "section", "secularise", "secularize", "secure", "sedate", "seduce", "seed", "seem", "seep", "seesaw", "seethe", "segment", "seize", "select", "senesce", "sense", "sentence", "sentimentalise", "sentimentalize", "sentimentise", "sentimentize", "separate", "sequester", "sequestrate", "sermonise", "sermonize", "serve", "service", "settle", "sever", "shack", "shackle", "shade", "shadow", "shaft", "shallow", "sham", "shamble", "shame", "shanghai", "shape", "share", "sharpen", "sharpen", "sharpshoot", "shatter", "shave", "shear", "sheer", "shell", "shelve", "shew", "shield", "shift", "shimmy", "shin", "shinny", "ship", "shirk", "shiver", "shoal", "shock", "shoetree", "shop", "short", "short-circuit", "shorten", "shout", "shower", "shriek", "shrill", "shrine", "shrivel", "shroud", "shudder", "shuffle", "shun", "sibilate", "sic", "sick", "sicken", "sicken", "sidestep", "sidetrack", "sidle", "sieve", "sift", "sigh", "sight", "sign", "signal", "signalise", "signalize", "signify", "silence", "simulate", "sin", "singe", "single-foot", "sip", "sire", "site", "situate", "sketch", "skewer", "ski", "skim", "skimp", "skin", "skip", "skirt", "skitter", "skulk", "skunk", "sky", "skylark", "skyrocket", "slack", "slacken", "slacken", "slake", "slam", "slander", "slang", "slant", "slash", "slaughter", "slaver", "sledge", "sledgehammer", "sleek", "slenderize", "sleuth", "slice", "slick", "slight", "slim", "slip", "slither", "sliver", "slobber", "slog", "slop", "slope", "slosh", "slouch", "slough", "slow", "slue", "slug", "sluice", "slumber", "slump", "slush", "smack", "smart", "smash", "smear", "smile", "smirch", "smoke", "smolder", "smooch", "smooth", "smoothen", "smoothen", "smother", "smudge", "snack", "snap", "snare", "snarf", "snarl", "snatch", "sneeze", "sniff", "sniffle", "snip", "snipe", "snitch", "snivel", "snoop", "snooze", "snort", "snow", "snub", "snuff", "snuffle", "snuggle", "soak", "soap", "soar", "sob", "sock", "sodomise", "sodomize", "soften", "soil", "solace", "solarise", "solarize", "sole", "solemnise", "solemnize", "solicit", "solve", "soothe", "sop", "sophisticate", "sorrow", "sort", "sough", "sound", "sour", "souse", "sow", "span", "spang", "spank", "spare", "sparge", "spark", "sparkle", "spat", "spatter", "spawn", "spay", "specialise", "specialize", "speciate", "specify", "speckle", "speculate", "spell", "spellbind", "spelunk", "spew", "sphacelate", "spice", "spiel", "spike", "spill", "spirit", "spite", "splash", "splatter", "splay", "splice", "splinter", "splosh", "splurge", "splutter", "spoil", "sponge", "sponsor", "spoof", "spoon", "sport", "spot", "spotlight", "spout", "sprain", "sprawl", "spray", "spread-eagle", "spreadeagle", "sprinkle", "sprout", "spud", "spue", "spume", "spur", "spurn", "spurt", "sputter", "spy", "squabble", "squall", "squander", "square", "squash", "squat", "squawk", "squeak", "squeal", "squeeze", "squelch", "squinch", "squint", "squirm", "squirt", "squish", "stab", "stabilise", "stabilize", "stack", "stag", "stage", "stagger", "stagnate", "stain", "stake", "stalk", "stall", "stammer", "stamp", "stanch", "star", "stare", "stargaze", "start", "startle", "starve", "stash", "state", "station", "staunch", "stay", "steady", "steam", "steamer", "steel", "steep", "steer", "step", "stereotype", "sterilise", "sterilize", "stew", "stiffen", "stiffen", "stifle", "stigmatise", "stigmatize", "still", "stimulate", "stint", "stipple", "stipulate", "stir", "stitch", "stock", "stockpile", "stomach", "stomp", "stone", "stool", "stoop", "stopple", "store", "storm", "straddle", "straggle", "straighten", "strain", "straiten", "strand", "strangle", "strangulate", "strap", "straw", "stray", "streak", "stream", "strengthen", "strengthen", "stress", "stretch", "strickle", "strip", "stroll", "struggle", "strum", "strut", "stud", "study", "stuff", "stultify", "stumble", "stump", "stun", "stupefy", "stutter", "style", "stymie", "stymy", "sub", "subcontract", "subdue", "subject", "subjugate", "sublimate", "sublime", "submerge", "submerse", "subordinate", "subscribe", "subside", "subsidize", "subsist", "substantiate", "substitute", "subsume", "subtend", "subtract", "subvert", "succeed", "succor", "succumb", "suck", "suckle", "sue", "suffer", "suffice", "suffocate", "suffuse", "sugar", "sugarcoat", "suggest", "suit", "sulfur", "sulk", "sully", "sulphur", "sum", "summarise", "summarize", "summate", "summit", "sun", "sunbathe", "supercharge", "superintend", "supersede", "supervise", "supplant", "supplement", "supply", "support", "suppose", "suppress", "suppurate", "surcharge", "surf", "surface", "surfboard", "surfeit", "surge", "surmise", "surmount", "surpass", "surprise", "surrender", "surround", "surveil", "survey", "survive", "suspect", "suspend", "suspire", "sustain", "swab", "swaddle", "swag", "swage", "swagger", "swallow", "swamp", "swan", "swank", "swap", "swarm", "swash", "swat", "swathe", "sway", "sweet-talk", "sweeten", "swerve", "swig", "swill", "swindle", "swipe", "swirl", "swish", "switch", "swivel", "swoon", "swoop", "swoosh", "symbolize", "sympathize", "sync", "synchronise", "synchronize", "systematize", "systemise", "systemize", "table", "tabularise", "tabularize", "tabulate", "tack", "tackle", "tail", "tailor", "tailor-make", "taint", "talk", "tally", "tame", "tamper", "tan", "tangle", "tantalise", "tantalize", "tapdance", "tape", "taper", "target", "tarnish", "tarry", "task", "taste", "tat", "tattle", "taunt", "tauten", "taxi", "tease", "tee", "teem", "teeter", "teeter-totter", "teetertotter", "telegraph", "telephone", "temper", "tempt", "tend", "tender", "tenderise", "tenderize", "tense", "tent", "tergiversate", "terminate", "terrace", "terrasse", "terrify", "terrorise", "terrorize", "test", "testify", "thank", "thaw", "theorise", "theorize", "thicken", "thicken", "thieve", "thin", "thirst", "thrash", "thread", "threaten", "thresh", "thrill", "thrive", "throb", "throne", "throng", "throttle", "thrum", "thud", "thumb", "thump", "thunder", "thurify", "thwack", "thwart", "tick", "ticket", "tickle", "ticktack", "ticktock", "tide", "tidy", "tie", "tighten", "tiller", "tilt", "time", "tin", "tinct", "tincture", "tinge", "tingle", "tink", "tinker", "tinkle", "tint", "tipple", "tippytoe", "tiptoe", "tire", "tissue", "titillate", "title", "tittle-tattle", "tittup", "toady", "toast", "toddle", "toenail", "tog", "toil", "tolerate", "tone", "tool", "toot", "top", "tope", "topple", "torment", "torture", "toss", "tot", "total", "tote", "totter", "touch", "tousle", "tout", "tower", "toy", "trace", "track", "trade", "trademark", "traduce", "trail", "train", "trammel", "tramp", "trample", "trance", "tranquilize", "tranquillise", "tranquillize", "transcend", "transcribe", "transfigure", "transform", "transfuse", "transgress", "translate", "transliterate", "transmigrate", "transmit", "transmogrify", "transmute", "transplant", "transport", "transpose", "transubstantiate", "transude", "trap", "trash", "traumatise", "traumatize", "travail", "travel", "traverse", "treasure", "treat", "treble", "tree", "tremble", "tremor", "trench", "trend", "trespass", "trick", "trickle", "trifle", "trigger", "trill", "trim", "trip", "triple", "triumph", "troop", "trot", "trouble", "trounce", "truckle", "trudge", "trump", "trust", "try", "tuck", "tucker", "tug", "tumble", "tumefy", "tumesce", "tunnel", "turn", "turtle", "tusk", "tussle", "twaddle", "tweak", "tweedle", "tweet", "twiddle", "twin", "twine", "twinge", "twinkle", "twirl", "twirp", "twist", "twit", "twitch", "twitter", "type", "typify", "tyrannise", "tyrannize", "ululate", "umpire", "unarm", "unbalance", "unblock", "unbosom", "unbrace", "unburden", "uncase", "unchurch", "uncloak", "unclutter", "uncompress", "uncouple", "uncover", "undercoat", "underestimate", "underline", "undermine", "underpin", "underprice", "underrate", "underscore", "understate", "understudy", "undervalue", "undress", "undulate", "unearth", "unfit", "unfold", "unfurl", "unhallow", "unhinge", "unhorse", "unify", "unionise", "unionize", "unite", "unknot", "unlace", "unlade", "unleash", "unload", "unloose", "unloosen", "unloosen", "unmake", "unmask", "unnerve", "unpack", "unpick", "unplug", "unravel", "unroll", "unsay", "unscramble", "unsettle", "unsnarl", "unstrain", "untangle", "unthaw", "untie", "untune", "unwrap", "upbraid", "upchuck", "upgrade", "uplift", "upraise", "uprise", "uproot", "urge", "urinate", "urticate", "use", "usher", "usurp", "utilise", "utilize", "utter", "vacate", "vacation", "vaccinate", "vacillate", "vacuum", "vacuum-clean", "vagabond", "validate", "valuate", "value", "vamoose", "vamp", "vanish", "vanquish", "vaporise", "vaporize", "variegate", "varnish", "vary", "vaticinate", "vault", "vaunt", "veer", "veil", "vellicate", "vend", "venerate", "vent", "ventilate", "venture", "verbalise", "verbalize", "verify", "verse", "versify", "vesicate", "vest", "veto", "vibrate", "victimise", "victimize", "videotape", "vie", "view", "vilify", "vilipend", "vindicate", "violate", "visit", "visualise", "visualize", "vitalise", "vitalize", "vitiate", "vituperate", "vivify", "vocalise", "vocalize", "voice", "void", "volunteer", "vomit", "vouch", "vow", "voyage", "vulgarize", "wad", "waddle", "waffle", "wag", "wage", "wager", "waggle", "wail", "wait", "waitress", "waive", "waken", "waken", "walk", "wall", "wallop", "wallow", "wallpaper", "wamble", "wander", "wane", "wangle", "want", "wanton", "warble", "ward", "ware", "warn", "warp", "warrant", "wash", "wassail", "waste", "watch", "water", "waul", "wave", "waver", "wawl", "weaken", "weaken", "wean", "weary", "weather", "web", "wedge", "wee", "weigh", "weight", "weight-lift", "weightlift", "welch", "welcome", "well", "welsh", "welt", "welter", "whack", "wham", "whap", "wharf", "wheedle", "wheel", "whelm", "whelp", "whiff", "whimper", "whine", "whip", "whir", "whirl", "whirlpool", "whirr", "whisk", "whisker", "whisper", "whistle", "white", "whiten", "whittle", "whiz", "whizz", "whomp", "whoop", "whop", "widen", "wield", "wiggle", "wilt", "wince", "wing", "wink", "winkle", "winnow", "winter", "wire", "wiretap", "wish", "witch", "wither", "witness", "wobble", "wonder", "woo", "woolgather", "work", "worm", "worry", "worsen", "worsen", "worship", "worst", "wrangle", "wrap", "wrawl", "wreak", "wreathe", "wrench", "wrestle", "wrick", "wriggle", "wrinkle", "writhe", "yack", "yak", "yammer", "yank", "yap", "yarn-dye", "yaup", "yaw", "yawl", "yawn", "yawp", "yearn", "yell", "yelp", "yield", "yip", "yodel", "yoke", "yowl", "zap", "zest", "zigzag", "zone", "zoom", }
verb/regular.go
0.560253
0.418103
regular.go
starcoder
package common import ( "image/color" "github.com/EngoEngine/engo" "github.com/EngoEngine/gl" ) // TriangleType is the type of triangle: Right or Isosceles. type TriangleType uint8 const ( // TriangleIsosceles indicates a Triangle where two sides have equal length TriangleIsosceles TriangleType = iota // TriangleRight indicates a Triangles where one angle is at 90 degrees TriangleRight ) // Triangle is a basic triangular form; the "point" of the triangle is pointing to the top. The dimensions are controlled // by the SpaceComponent. type Triangle struct { TriangleType TriangleType BorderWidth float32 BorderColor color.Color } // Texture always returns nil. Triangle is drawable without a Texture. This implements the Drawable interface. func (Triangle) Texture() *gl.Texture { return nil } // Width always returns 0. This implements the Drawable interface. func (Triangle) Width() float32 { return 0 } // Height always returns 0. This implements the Drawable interface. func (Triangle) Height() float32 { return 0 } // View always returns 0, 0, 1, 1. This implements the Drawable interface. func (Triangle) View() (float32, float32, float32, float32) { return 0, 0, 1, 1 } // Close does nothing, because there's no Texture on the GPU. This implements the Drawable interface. func (Triangle) Close() {} // Rectangle is a basic rectangular form; the dimensions are controlled via the `SpaceComponent`. type Rectangle struct { BorderWidth float32 BorderColor color.Color } // Texture always returns nil. Rectangle is drawable without a Texture. This implements the Drawable interface. func (Rectangle) Texture() *gl.Texture { return nil } // Width always returns 0. This implements the Drawable interface. func (Rectangle) Width() float32 { return 0 } // Height always returns 0. This implements the Drawable interface. func (Rectangle) Height() float32 { return 0 } // View always returns 0, 0, 1, 1. This implements the Drawable interface. func (Rectangle) View() (float32, float32, float32, float32) { return 0, 0, 1, 1 } // Close does nothing, because there's no Texture on the GPU. This implements the Drawable interface. func (Rectangle) Close() {} // Circle is a basic circular form; the dimensions / radius are controlled via the `SpaceComponent`. // This was made possible by the shared knowledge of <NAME> (@hydroflame). type Circle struct { BorderWidth float32 BorderColor color.Color Arc float32 } // Texture always returns nil. Circle is drawable without a Texture. This implements the Drawable interface. func (Circle) Texture() *gl.Texture { return nil } // Width always returns 0. This implements the Drawable interface. func (Circle) Width() float32 { return 0 } // Height always returns 0. This implements the Drawable interface. func (Circle) Height() float32 { return 0 } // View always returns 0, 0, 1, 1. This implements the Drawable interface. func (Circle) View() (float32, float32, float32, float32) { return 0, 0, 1, 1 } // Close does nothing, because there's no Texture on the GPU. This implements the Drawable interface. func (Circle) Close() {} // Curve is a beizer curve defined by the points provided. These points are relative // to the SpaceComponent provided, so moving that will move the Curve without // adjusting the Points for the beizer curve. The first control point is the // top-left corner of the space component, and the last control point is the // bottom-right corner. To do third and fourth order, add points to Points. type Curve struct { LineWidth float32 Points []engo.Point } // Texture always returns nil. Curve is drawable without a Texture. This implements the Drawable interface. func (Curve) Texture() *gl.Texture { return nil } // Width always returns 0. This implements the Drawable interface. func (Curve) Width() float32 { return 0 } // Height always returns 0. This implements the Drawable interface. func (Curve) Height() float32 { return 0 } // View always returns 0, 0, 1, 1. This implements the Drawable interface. func (Curve) View() (float32, float32, float32, float32) { return 0, 0, 1, 1 } // Close does nothing, because there's no Texture on the GPU. This implements the Drawable interface. func (Curve) Close() {} // ComplexTriangles is a complex form, made out of triangles. type ComplexTriangles struct { // Points are the points the form is made of. They should be defined on a scale from 0 to 1, where (0, 0) starts // at the top-left of the area (as defined by the `SpaceComponent`. // You should use a multitude of 3 points, because each triangle is defined by defining 3 points. Points []engo.Point // BorderWidth indicates the width of the border, around EACH of the Triangles it is made out of BorderWidth float32 // BorderColor indicates the color of the border, around EACH of the Triangles it is made out of BorderColor color.Color } // Texture always returns nil. ComplexTriangles is drawable without a Texture. This implements the Drawable interface. func (ComplexTriangles) Texture() *gl.Texture { return nil } // Width always returns 0. This implements the Drawable interface. func (ComplexTriangles) Width() float32 { return 0 } // Height always returns 0. This implements the Drawable interface. func (ComplexTriangles) Height() float32 { return 0 } // View always returns 0, 0, 1, 1. This implements the Drawable interface. func (ComplexTriangles) View() (float32, float32, float32, float32) { return 0, 0, 1, 1 } // Close does nothing, because there's no Texture on the GPU. This implements the Drawable interface. func (ComplexTriangles) Close() {}
common/render_shapes.go
0.916489
0.667229
render_shapes.go
starcoder
package utils import ( crand "crypto/rand" "fmt" "math/rand" "net/url" "strings" "time" "unicode" ) // IsStringAbsURL checks a string can be parsed as a URL and that is IsAbs and if it can't it returns an error // describing why. func IsStringAbsURL(input string) (err error) { parsedURL, err := url.Parse(input) if err != nil { return fmt.Errorf("could not parse '%s' as a URL", input) } if !parsedURL.IsAbs() { return fmt.Errorf("the url '%s' is not absolute because it doesn't start with a scheme like 'http://' or 'https://'", input) } return nil } // IsStringAlphaNumeric returns false if any rune in the string is not alpha-numeric. func IsStringAlphaNumeric(input string) bool { for _, r := range input { if !unicode.IsLetter(r) && !unicode.IsNumber(r) { return false } } return true } // IsStringInSlice checks if a single string is in a slice of strings. func IsStringInSlice(needle string, haystack []string) (inSlice bool) { for _, b := range haystack { if b == needle { return true } } return false } // IsStringInSliceSuffix checks if the needle string has one of the suffixes in the haystack. func IsStringInSliceSuffix(needle string, haystack []string) (hasSuffix bool) { for _, straw := range haystack { if strings.HasSuffix(needle, straw) { return true } } return false } // IsStringInSliceFold checks if a single string is in a slice of strings but uses strings.EqualFold to compare them. func IsStringInSliceFold(needle string, haystack []string) (inSlice bool) { for _, b := range haystack { if strings.EqualFold(b, needle) { return true } } return false } // IsStringInSliceContains checks if a single string is in an array of strings. func IsStringInSliceContains(needle string, haystack []string) (inSlice bool) { for _, b := range haystack { if strings.Contains(needle, b) { return true } } return false } // SliceString splits a string s into an array with each item being a max of int d // d = denominator, n = numerator, q = quotient, r = remainder. func SliceString(s string, d int) (array []string) { n := len(s) q := n / d r := n % d for i := 0; i < q; i++ { array = append(array, s[i*d:i*d+d]) if i+1 == q && r != 0 { array = append(array, s[i*d+d:]) } } return } func isStringSlicesDifferent(a, b []string, method func(s string, b []string) bool) (different bool) { if len(a) != len(b) { return true } for _, s := range a { if !method(s, b) { return true } } return false } // IsStringSlicesDifferent checks two slices of strings and on the first occurrence of a string item not existing in the // other slice returns true, otherwise returns false. func IsStringSlicesDifferent(a, b []string) (different bool) { return isStringSlicesDifferent(a, b, IsStringInSlice) } // IsStringSlicesDifferentFold checks two slices of strings and on the first occurrence of a string item not existing in // the other slice (case insensitive) returns true, otherwise returns false. func IsStringSlicesDifferentFold(a, b []string) (different bool) { return isStringSlicesDifferent(a, b, IsStringInSliceFold) } // StringSlicesDelta takes a before and after []string and compares them returning a added and removed []string. func StringSlicesDelta(before, after []string) (added, removed []string) { for _, s := range before { if !IsStringInSlice(s, after) { removed = append(removed, s) } } for _, s := range after { if !IsStringInSlice(s, before) { added = append(added, s) } } return added, removed } // RandomString returns a random string with a given length with values from the provided characters. When crypto is set // to false we use math/rand and when it's set to true we use crypto/rand. The crypto option should always be set to true // excluding when the task is time sensitive and would not benefit from extra randomness. func RandomString(n int, characters string, crypto bool) (randomString string) { return string(RandomBytes(n, characters, crypto)) } // RandomBytes returns a random []byte with a given length with values from the provided characters. When crypto is set // to false we use math/rand and when it's set to true we use crypto/rand. The crypto option should always be set to true // excluding when the task is time sensitive and would not benefit from extra randomness. func RandomBytes(n int, characters string, crypto bool) (bytes []byte) { bytes = make([]byte, n) if crypto { _, _ = crand.Read(bytes) } else { _, _ = rand.Read(bytes) //nolint:gosec // As this is an option when using this function it's not necessary to be concerned about this. } for i, b := range bytes { bytes[i] = characters[b%byte(len(characters))] } return bytes } // StringHTMLEscape escapes chars for a HTML body. func StringHTMLEscape(input string) (output string) { return htmlEscaper.Replace(input) } func init() { rand.Seed(time.Now().UnixNano()) }
internal/utils/strings.go
0.767254
0.410756
strings.go
starcoder
package types import ( "regexp" ) const ( IntType Datatype = iota FloatType StringType ObjectType BooleanType ArrayType ) type Datatype int type Column struct { Name string items orderedMapType Dtype Datatype } // Returns a list of Items func (c *Column) Items() []interface{} { return c.items.ToSlice() } // Inserts a given value at the given index. // If the index is beyond the length of keys, // it fills the gap in both Items and keys with nil and "" respectively func (c *Column) insert(index int, value interface{}) { nextIndex := len(c.items) if nextIndex <= index { for i := nextIndex; i <= index; i++ { // FIXME: concurrency possible c.items[i] = nil } } c.items[index] = value } // Deletes many indices at once func (c *Column) deleteMany(indices []int) { for _, i := range indices { // FIXME: concurrency possible delete(c.items, i) } } // Returns an array of booleans corresponding in position to each item, // true if item is greater than operand or else false // The operand can reference a constant, or a Col func (c *Column) GreaterThan(operand float64) filterType { count := len(c.items) flags := make(filterType, count) for i, v := range c.items { // FIXME: concurrency possible switch v := v.(type) { case int: flags[i] = float64(v) > operand case int8: flags[i] = float64(v) > operand case int16: flags[i] = float64(v) > operand case int32: flags[i] = float64(v) > operand case int64: flags[i] = float64(v) > operand case float32: flags[i] = float64(v) > operand case float64: flags[i] = v > operand default: flags[i] = false } } return flags } // Returns an array of booleans corresponding in position to each item, // true if item is greater than or equal to the operand or else false // The operand can reference a constant, or a Col func (c *Column) GreaterOrEquals(operand float64) filterType { count := len(c.items) flags := make(filterType, count) for i, v := range c.items { // FIXME: concurrency possible switch v := v.(type) { case int: flags[i] = float64(v) >= operand case int8: flags[i] = float64(v) >= operand case int16: flags[i] = float64(v) >= operand case int32: flags[i] = float64(v) >= operand case int64: flags[i] = float64(v) >= operand case float32: flags[i] = float64(v) >= operand case float64: flags[i] = v >= operand default: flags[i] = false } } return flags } // Returns an array of booleans corresponding in position to each item, // true if item is less than operand or else false // The operand can reference a constant, or a Col func (c *Column) LessThan(operand float64) filterType { count := len(c.items) flags := make(filterType, count) for i, v := range c.items { // FIXME: concurrency possible switch v := v.(type) { case int: flags[i] = float64(v) < operand case int8: flags[i] = float64(v) < operand case int16: flags[i] = float64(v) < operand case int32: flags[i] = float64(v) < operand case int64: flags[i] = float64(v) < operand case float32: flags[i] = float64(v) < operand case float64: flags[i] = v < operand default: flags[i] = false } } return flags } // Returns an array of booleans corresponding in position to each item, // true if item is less than or equal to the operand or else false // The operand can reference a constant, or a Col func (c *Column) LessOrEquals(operand float64) filterType { count := len(c.items) flags := make(filterType, count) for i, v := range c.items { // FIXME: concurrency possible switch v := v.(type) { case int: flags[i] = float64(v) <= operand case int8: flags[i] = float64(v) <= operand case int16: flags[i] = float64(v) <= operand case int32: flags[i] = float64(v) <= operand case int64: flags[i] = float64(v) <= operand case float32: flags[i] = float64(v) <= operand case float64: flags[i] = v <= operand default: flags[i] = false } } return flags } // Returns an array of booleans corresponding in position to each item, // true if item is equal to operand or else false // The operand can reference a constant, or a Col func (c *Column) Equals(operand interface{}) filterType { count := len(c.items) flags := make(filterType, count) for i, v := range c.items { // FIXME: concurrency possible flags[i] = v == operand } return flags } // Returns an array of booleans corresponding in position to each item, // true if item is like the regex expression or else false func (c *Column) IsLike(pattern *regexp.Regexp) filterType { count := len(c.items) flags := make(filterType, count) for i, v := range c.items { // FIXME: concurrency possible switch v := v.(type) { case string: flags[i] = pattern.MatchString(v) case []byte: flags[i] = pattern.Match(v) default: flags[i] = false } } return flags } // Returns transformer method specific to this column to transform its values from one thing to another // It is passed a function expecting a value any type func (c *Column) Tx(op rowWiseFunc) transformation { return transformation{c.Name: op} } // Returns an aggregation function specific to this column to // merge its values into a single value. It works when GroupBy is used func (c *Column) Agg(aggFunc aggregateFunc) aggregation { return aggregation{c.Name: aggFunc} } // Returns a Sort Option that is attached to this column, for the given order func (c *Column) Order(option sortOrder) sortOption { return sortOption{c.Name: option} }
projects/dataframe/types/column.go
0.660829
0.557123
column.go
starcoder
package jwt import ( "crypto/rsa" "time" "gopkg.in/square/go-jose.v2" "gopkg.in/square/go-jose.v2/jwt" ) // Encoder is an interface which contains the methods to encode and decode JWT tokens // given the token claims. It allows to abstract from the details of cryptography, focusing // only in the token contents. type Encoder interface { // Signed encodes a new JWT token which is signed according to the JWS specification and // contains the given claims. // The algorithm and key used is left to implementations. Signed(claims interface{}) (string, error) // Encrypted encodes a new JWT token which is signed and encrypted according to the JWS and // JWE specifications, containing the given claims. // The algorithm and key used is left to implementations. Encrypted(claims interface{}) (string, error) // ValidateSigned decodes the given JWT signed token. It validates the signature, checks that // it has not expired and parses the claims into the given destination. // Tokens are expected with the same algorithm than the Signed method. ValidateSigned(token string, dest interface{}) error // ValidateEncrypted decodes the given JWT signed and encrypted token. It validates the signature, // checks that it has not expired and parses the claims into the given destination. // Tokens are expected with the same algorithm than the Encrypted method. ValidateEncrypted(token string, dest interface{}) error // JWKS obtains the JWK Set definition of the public key used by the encoder to sign tokens, // so other services can validate the token signature. JWKS() *jose.JSONWebKeySet } type jwtEncoder struct { PrivateKey *rsa.PrivateKey KeyID string encrypter jose.Encrypter signer jose.Signer } // NewEncoder creates a new Encoder which uses the given RSA private key to sign and encrypt // tokens, using the RS512 and RSA_OAEP algorithms. func NewEncoder(privateKey *rsa.PrivateKey, keyID string) (Encoder, error) { encoder := &jwtEncoder{ PrivateKey: privateKey, KeyID: keyID, } // Signer used to sign JWT tokens. Using RS512 algorithm so the Authorization Server // signs each token with its private key and Resource Servers can validate the token // signature using the Authorization Server's public key. signer, err := jose.NewSigner( jose.SigningKey{ Algorithm: jose.RS256, Key: encoder.privateKeyJWKSig(), }, (&jose.SignerOptions{}).WithType("JWT")) if err != nil { return nil, err } encoder.signer = signer // Encrypter used to encrypt secret JWT tokens. Using the Authorization Server's public // key to encrypt tokens, we ensure that those tokens can only be decrypted with the // private key, so only the Authorization Server can get the contents. encrypter, err := jose.NewEncrypter( jose.A128GCM, jose.Recipient{ Algorithm: jose.RSA_OAEP, Key: encoder.publicKeyJWKEnc(), }, (&jose.EncrypterOptions{}).WithType("JWT").WithContentType("JWT")) if err != nil { return nil, err } encoder.encrypter = encrypter return encoder, nil } // Signed creates a new JWT token which is signed according to the JWS specification and // contains the given claims. // It uses the RS512 algorithm with the configured RSA key. func (je *jwtEncoder) Signed(claims interface{}) (string, error) { raw, err := jwt.Signed(je.signer).Claims(claims).CompactSerialize() if err != nil { return "", err } return raw, nil } // Encrypted creates a new JWT token which is signed and encrypted according to the JWS and // JWE specifications, containing the given claims. // The algorithm used is RSA_OAEP with A128M encryption. func (je *jwtEncoder) Encrypted(claims interface{}) (string, error) { raw, err := jwt.SignedAndEncrypted(je.signer, je.encrypter).Claims(claims).CompactSerialize() if err != nil { return "", err } return raw, nil } // ValidateSigned decodes the given JWT signed token. It validates the signature, checks that // it has not expired and parses the claims into the given destination. // Tokens are expected with the same algorithm than the Signed method. func (je *jwtEncoder) ValidateSigned(token string, dest interface{}) error { decoded, err := jwt.ParseSigned(token) if err != nil { return err } if err := decoded.Claims(je.publicKeyJWKSig(), &dest); err != nil { return err } if claims, ok := dest.(jwt.Claims); ok { // Validate that token have not expired. return claims.Validate(jwt.Expected{Time: time.Now()}) } return nil } // ValidateEncrypted decodes the given JWT signed and encrypted token. It validates the signature, // checks that it has not expired and parses the claims into the given destination. // Tokens are expected with the same algorithm than the Encrypted method. func (je *jwtEncoder) ValidateEncrypted(token string, dest interface{}) error { encrypted, err := jwt.ParseSignedAndEncrypted(token) if err != nil { return err } decoded, err := encrypted.Decrypt(je.privateKeyJWKEnc()) if err != nil { return err } if err := decoded.Claims(je.publicKeyJWKSig(), &dest); err != nil { return err } if claims, ok := dest.(jwt.Claims); ok { // Validate that token have not expired. return claims.Validate(jwt.Expected{Time: time.Now()}) } return nil } func (je *jwtEncoder) privateKeyJWKSig() *jose.JSONWebKey { return &jose.JSONWebKey{ Key: je.PrivateKey, KeyID: je.KeyID, Algorithm: string(jose.RS256), Use: "sig", } } func (je *jwtEncoder) publicKeyJWKSig() *jose.JSONWebKey { return &jose.JSONWebKey{ Key: &je.PrivateKey.PublicKey, KeyID: je.KeyID, Algorithm: string(jose.RS256), Use: "sig", } } func (je *jwtEncoder) privateKeyJWKEnc() *jose.JSONWebKey { return &jose.JSONWebKey{ Key: je.PrivateKey, KeyID: je.KeyID, Algorithm: string(jose.RSA_OAEP), Use: "enc", } } func (je *jwtEncoder) publicKeyJWKEnc() *jose.JSONWebKey { return &jose.JSONWebKey{ Key: &je.PrivateKey.PublicKey, KeyID: je.KeyID, Algorithm: string(jose.RSA_OAEP), Use: "enc", } } // JWKS obtains the JWK Set definition of the public key used by the encoder to sign tokens, // so other services can validate the token signature. func (je *jwtEncoder) JWKS() *jose.JSONWebKeySet { return &jose.JSONWebKeySet{ Keys: []jose.JSONWebKey{*je.publicKeyJWKSig()}, } }
jwt/jwt.go
0.80329
0.535098
jwt.go
starcoder
package zap import ( "time" "github.com/liranbg/uberzap/zapcore" ) // Array constructs a field with the given key and ArrayMarshaler. It provides // a flexible, but still type-safe and efficient, way to add array-like types // to the logging context. The struct's MarshalLogArray method is called lazily. func Array(key string, val zapcore.ArrayMarshaler) Field { return Field{Key: key, Type: zapcore.ArrayMarshalerType, Interface: val} } // Bools constructs a field that carries a slice of bools. func Bools(key string, bs []bool) Field { return Array(key, bools(bs)) } // ByteStrings constructs a field that carries a slice of []byte, each of which // must be UTF-8 encoded text. func ByteStrings(key string, bss [][]byte) Field { return Array(key, byteStringsArray(bss)) } // Complex128s constructs a field that carries a slice of complex numbers. func Complex128s(key string, nums []complex128) Field { return Array(key, complex128s(nums)) } // Complex64s constructs a field that carries a slice of complex numbers. func Complex64s(key string, nums []complex64) Field { return Array(key, complex64s(nums)) } // Durations constructs a field that carries a slice of time.Durations. func Durations(key string, ds []time.Duration) Field { return Array(key, durations(ds)) } // Float64s constructs a field that carries a slice of floats. func Float64s(key string, nums []float64) Field { return Array(key, float64s(nums)) } // Float32s constructs a field that carries a slice of floats. func Float32s(key string, nums []float32) Field { return Array(key, float32s(nums)) } // Ints constructs a field that carries a slice of integers. func Ints(key string, nums []int) Field { return Array(key, ints(nums)) } // Int64s constructs a field that carries a slice of integers. func Int64s(key string, nums []int64) Field { return Array(key, int64s(nums)) } // Int32s constructs a field that carries a slice of integers. func Int32s(key string, nums []int32) Field { return Array(key, int32s(nums)) } // Int16s constructs a field that carries a slice of integers. func Int16s(key string, nums []int16) Field { return Array(key, int16s(nums)) } // Int8s constructs a field that carries a slice of integers. func Int8s(key string, nums []int8) Field { return Array(key, int8s(nums)) } // Strings constructs a field that carries a slice of strings. func Strings(key string, ss []string) Field { return Array(key, stringArray(ss)) } // Times constructs a field that carries a slice of time.Times. func Times(key string, ts []time.Time) Field { return Array(key, times(ts)) } // Uints constructs a field that carries a slice of unsigned integers. func Uints(key string, nums []uint) Field { return Array(key, uints(nums)) } // Uint64s constructs a field that carries a slice of unsigned integers. func Uint64s(key string, nums []uint64) Field { return Array(key, uint64s(nums)) } // Uint32s constructs a field that carries a slice of unsigned integers. func Uint32s(key string, nums []uint32) Field { return Array(key, uint32s(nums)) } // Uint16s constructs a field that carries a slice of unsigned integers. func Uint16s(key string, nums []uint16) Field { return Array(key, uint16s(nums)) } // Uint8s constructs a field that carries a slice of unsigned integers. func Uint8s(key string, nums []uint8) Field { return Array(key, uint8s(nums)) } // Uintptrs constructs a field that carries a slice of pointer addresses. func Uintptrs(key string, us []uintptr) Field { return Array(key, uintptrs(us)) } // Errors constructs a field that carries a slice of errors. func Errors(key string, errs []error) Field { return Array(key, errArray(errs)) } type bools []bool func (bs bools) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range bs { arr.AppendBool(bs[i]) } return nil } type byteStringsArray [][]byte func (bss byteStringsArray) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range bss { arr.AppendByteString(bss[i]) } return nil } type complex128s []complex128 func (nums complex128s) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range nums { arr.AppendComplex128(nums[i]) } return nil } type complex64s []complex64 func (nums complex64s) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range nums { arr.AppendComplex64(nums[i]) } return nil } type durations []time.Duration func (ds durations) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range ds { arr.AppendDuration(ds[i]) } return nil } type float64s []float64 func (nums float64s) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range nums { arr.AppendFloat64(nums[i]) } return nil } type float32s []float32 func (nums float32s) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range nums { arr.AppendFloat32(nums[i]) } return nil } type ints []int func (nums ints) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range nums { arr.AppendInt(nums[i]) } return nil } type int64s []int64 func (nums int64s) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range nums { arr.AppendInt64(nums[i]) } return nil } type int32s []int32 func (nums int32s) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range nums { arr.AppendInt32(nums[i]) } return nil } type int16s []int16 func (nums int16s) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range nums { arr.AppendInt16(nums[i]) } return nil } type int8s []int8 func (nums int8s) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range nums { arr.AppendInt8(nums[i]) } return nil } type stringArray []string func (ss stringArray) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range ss { arr.AppendString(ss[i]) } return nil } type times []time.Time func (ts times) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range ts { arr.AppendTime(ts[i]) } return nil } type uints []uint func (nums uints) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range nums { arr.AppendUint(nums[i]) } return nil } type uint64s []uint64 func (nums uint64s) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range nums { arr.AppendUint64(nums[i]) } return nil } type uint32s []uint32 func (nums uint32s) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range nums { arr.AppendUint32(nums[i]) } return nil } type uint16s []uint16 func (nums uint16s) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range nums { arr.AppendUint16(nums[i]) } return nil } type uint8s []uint8 func (nums uint8s) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range nums { arr.AppendUint8(nums[i]) } return nil } type uintptrs []uintptr func (nums uintptrs) MarshalLogArray(arr zapcore.ArrayEncoder) error { for i := range nums { arr.AppendUintptr(nums[i]) } return nil }
array.go
0.803752
0.543348
array.go
starcoder
package util import "time" var ( // Combinatorics is a namespace containing combinatoric functions. Combinatorics = combinatorics{} ) type combinatorics struct{} // CombinationsOfInt returns the "power set" of values less the empty set. // Use "combinations" when the order of the resulting sets do not matter. func (c combinatorics) CombinationsOfInt(values []int) [][]int { possibleValues := Math.PowOfInt(2, uint(len(values))) //less the empty entry output := make([][]int, possibleValues-1) for x := 0; x < possibleValues-1; x++ { row := []int{} for i := 0; i < len(values); i++ { y := 1 << uint(i) if y&x == 0 && y != x { row = append(row, values[i]) } } if len(row) > 0 { output[x] = row } } return output } // CombinationsOfFloat returns the "power set" of values less the empty set. // Use "combinations" when the order of the resulting sets do not matter. func (c combinatorics) CombinationsOfFloat(values []float64) [][]float64 { possibleValues := Math.PowOfInt(2, uint(len(values))) //less the empty entry output := make([][]float64, possibleValues-1) for x := 0; x < possibleValues-1; x++ { row := []float64{} for i := 0; i < len(values); i++ { y := 1 << uint(i) if y&x == 0 && y != x { row = append(row, values[i]) } } if len(row) > 0 { output[x] = row } } return output } // CombinationsOfString returns the "power set" of values less the empty set. // Use "combinations" when the order of the resulting sets do not matter. func (c combinatorics) CombinationsOfString(values []string) [][]string { possibleValues := Math.PowOfInt(2, uint(len(values))) //less the empty entry output := make([][]string, possibleValues-1) for x := 0; x < possibleValues-1; x++ { row := []string{} for i := 0; i < len(values); i++ { y := 1 << uint(i) if y&x == 0 && y != x { row = append(row, values[i]) } } if len(row) > 0 { output[x] = row } } return output } // PermutationsOfInt returns the possible orderings of the values array. // Use "permutations" when order matters. func (c combinatorics) PermutationsOfInt(values []int) [][]int { if len(values) == 1 { return [][]int{values} } output := [][]int{} for x := 0; x < len(values); x++ { workingValues := make([]int, len(values)) copy(workingValues, values) value := workingValues[x] pre := workingValues[0:x] post := workingValues[x+1 : len(values)] joined := append(pre, post...) for _, inner := range c.PermutationsOfInt(joined) { output = append(output, append([]int{value}, inner...)) } } return output } // PermutationsOfFloat returns the possible orderings of the values array. // Use "permutations" when order matters. func (c combinatorics) PermutationsOfFloat(values []float64) [][]float64 { if len(values) == 1 { return [][]float64{values} } output := [][]float64{} for x := 0; x < len(values); x++ { workingValues := make([]float64, len(values)) copy(workingValues, values) value := workingValues[x] pre := workingValues[0:x] post := workingValues[x+1 : len(values)] joined := append(pre, post...) for _, inner := range c.PermutationsOfFloat(joined) { output = append(output, append([]float64{value}, inner...)) } } return output } // PermutationsOfString returns the possible orderings of the values array. // Use "permutations" when order matters. func (c combinatorics) PermutationsOfString(values []string) [][]string { if len(values) == 1 { return [][]string{values} } output := [][]string{} for x := 0; x < len(values); x++ { workingValues := make([]string, len(values)) copy(workingValues, values) value := workingValues[x] pre := workingValues[0:x] post := workingValues[x+1 : len(values)] joined := append(pre, post...) for _, inner := range c.PermutationsOfString(joined) { output = append(output, append([]string{value}, inner...)) } } return output } // PermuteDistributions returns all the possible ways you can split a total among buckets completely. func (c combinatorics) PermuteDistributions(total, buckets int) [][]int { return c.PermuteDistributionsFromExisting(total, buckets, []int{}) } // PermuteDistributionsFromExisting returns all the possible ways you can split the total among additional buckets // given an existing distribution func (c combinatorics) PermuteDistributionsFromExisting(total, buckets int, existing []int) [][]int { output := [][]int{} existingLength := len(existing) existingSum := Math.SumOfInt(existing) remainder := total - existingSum if buckets == 1 { newExisting := make([]int, existingLength+1) copy(newExisting, existing) newExisting[existingLength] = remainder output = append(output, newExisting) return output } for x := 0; x <= remainder; x++ { newExisting := make([]int, existingLength+1) copy(newExisting, existing) newExisting[existingLength] = x results := c.PermuteDistributionsFromExisting(total, buckets-1, newExisting) output = append(output, results...) } return output } // RandomInt returns a random int from an array. func (c combinatorics) RandomInt(values []int) int { if len(values) == 0 { return 0 } if len(values) == 1 { return values[0] } return values[RandomProvider().Intn(len(values))] } // RandomFloat64 returns a random int from an array. func (c combinatorics) RandomFloat64(values []float64) float64 { if len(values) == 0 { return 0 } if len(values) == 1 { return values[0] } return values[RandomProvider().Intn(len(values))] } // RandomFloat64 returns a random int from an array. func (c combinatorics) RandomString(values []string) string { if len(values) == 0 { return "" } if len(values) == 1 { return values[0] } return values[RandomProvider().Intn(len(values))] } // RandomFloat64 returns a random int from an array. func (c combinatorics) RandomTime(values []time.Time) time.Time { if len(values) == 0 { return time.Time{} } if len(values) == 1 { return values[0] } return values[RandomProvider().Intn(len(values))] }
golang/vendor/github.com/blendlabs/go-util/combinatorics.go
0.683736
0.494934
combinatorics.go
starcoder
package lsap import "github.com/ryanjoneil/ap" // LSAP solves linear sum assignment problems. type LSAP struct { M int64 // A large cost to avoid using edges (default: math.Pow(1000, 3)) n int // n of assignment problem a [][]int64 // a[i][j] = cost of assigning row i to column j u []int64 // u[i] = dual price for row i v []int64 // v[j] = dual price for column j z int64 // Objective value f []int // f[i] = column assigned to row i, -1 if unassigned fBar []int // fBar[j] = row assigned to column j, -1 if unassigned p []int // First column of row i not yet examined in phase 2 c []int // c[j] = row preceding column j in current alternating path pi []int64 // pi[j] = min { a[i,j] - u[i] - v[j] | i in lr, i != fBar[j] } } // Assign returns an optimal assignment for an LSAP. func (a *LSAP) Assign() ap.Permutation { a.solve() p := make(ap.Permutation, a.n) copy(p, a.f) return p } // Cost returns the objective value of an LSAP. If Assign is called prior to // calling Cost, then the value is minimal. func (a *LSAP) Cost() int64 { return a.z } // DualPrices returns the dual prices associated with the assignment constraints // of the U and V sets. If Assign is called prior, then complementary slackness // conditions hold. func (a *LSAP) DualPrices() ap.Int64DualPrices { u := make([]int64, a.n) v := make([]int64, a.n) copy(u, a.u) copy(v, a.v) return ap.Int64DualPrices{U: u, V: v} } // ReducedCost returns the reduced cost of an edge. If Assign is called prior, // then the reduced cost of a nonbasic (not in the optimal assignment) edge is // the additional cost introducing that edge would incur. func (a *LSAP) ReducedCost(u, v int) int64 { return a.a[u][v] - a.u[u] - a.v[v] } // Remove takes an edge out of the solution. The edge's cost is set to the M // value of the LSAP struct to discourage its use in subsequent calls to Assign. func (a *LSAP) Remove(i, j int) { if a.f[i] == j { a.f[i] = -1 a.fBar[j] = -1 } a.a[i][j] = a.M }
lsap/lsap.go
0.671686
0.431285
lsap.go
starcoder
package capnp // An Address is an index inside a segment's data (in bytes). type Address uint32 // addSize returns the address a+sz. func (a Address) addSize(sz Size) Address { return a.element(1, sz) } // element returns the address a+i*sz. func (a Address) element(i int32, sz Size) Address { return a + Address(sz.times(i)) } // addOffset returns the address a+o. func (a Address) addOffset(o DataOffset) Address { return a + Address(o) } // A Size is a size (in bytes). type Size uint32 // wordSize is the number of bytes in a Cap'n Proto word. const wordSize Size = 8 // maxSize is the maximum representable size. const maxSize Size = 1<<32 - 1 // times returns the size sz*n. func (sz Size) times(n int32) Size { result := int64(sz) * int64(n) if result > int64(maxSize) { panic(errOverlarge) } return Size(result) } // padToWord adds padding to sz to make it divisible by wordSize. func (sz Size) padToWord() Size { n := Size(wordSize - 1) return (sz + n) &^ n } // DataOffset is an offset in bytes from the beginning of a struct's data section. type DataOffset uint32 // ObjectSize records section sizes for a struct or list. type ObjectSize struct { DataSize Size PointerCount uint16 } // isZero reports whether sz is the zero size. func (sz ObjectSize) isZero() bool { return sz.DataSize == 0 && sz.PointerCount == 0 } // isOneByte reports whether the object size is one byte (for Text/Data element sizes). func (sz ObjectSize) isOneByte() bool { return sz.DataSize == 1 && sz.PointerCount == 0 } // isValid reports whether sz's fields are in range. func (sz ObjectSize) isValid() bool { return sz.DataSize <= 0xffff*wordSize } // pointerSize returns the number of bytes the pointer section occupies. func (sz ObjectSize) pointerSize() Size { return wordSize.times(int32(sz.PointerCount)) } // totalSize returns the number of bytes that the object occupies. func (sz ObjectSize) totalSize() Size { return sz.DataSize + sz.pointerSize() } // dataWordCount returns the number of words in the data section. func (sz ObjectSize) dataWordCount() int32 { if sz.DataSize%wordSize != 0 { panic("data size not aligned by word") } return int32(sz.DataSize / wordSize) } // totalWordCount returns the number of words that the object occupies. func (sz ObjectSize) totalWordCount() int32 { return sz.dataWordCount() + int32(sz.PointerCount) } // BitOffset is an offset in bits from the beginning of a struct's data section. type BitOffset uint32 // offset returns the equivalent byte offset. func (bit BitOffset) offset() DataOffset { return DataOffset(bit / 8) } // mask returns the bitmask for the bit. func (bit BitOffset) mask() byte { return byte(1 << (bit % 8)) }
address.go
0.878393
0.430686
address.go
starcoder
package main import "sort" type dungeonPath struct { dungeon *dungeon neighbors [8]position wcost int } func (dp *dungeonPath) Neighbors(pos position) []position { nb := dp.neighbors[:0] return pos.Neighbors(nb, position.valid) } func (dp *dungeonPath) Cost(from, to position) int { if dp.dungeon.Cell(to).T == WallCell { if dp.wcost > 0 { return dp.wcost } return 4 } return 1 } func (dp *dungeonPath) Estimation(from, to position) int { return from.Distance(to) } type playerPath struct { game *game neighbors [8]position } func (pp *playerPath) Neighbors(pos position) []position { d := pp.game.Dungeon nb := pp.neighbors[:0] keep := func(npos position) bool { if cld, ok := pp.game.Clouds[npos]; ok && cld == CloudFire && !(pp.game.WrongDoor[npos] || pp.game.WrongFoliage[npos]) { return false } return npos.valid() && ((d.Cell(npos).T == FreeCell && !pp.game.WrongWall[npos] || d.Cell(npos).T == WallCell && pp.game.WrongWall[npos]) || pp.game.Player.HasStatus(StatusDig)) && d.Cell(npos).Explored } if pp.game.Player.HasStatus(StatusConfusion) { nb = pos.CardinalNeighbors(nb, keep) } else { nb = pos.Neighbors(nb, keep) } return nb } func (pp *playerPath) Cost(from, to position) int { if !pp.game.ExclusionsMap[from] && pp.game.ExclusionsMap[to] { return unreachable } return 1 } func (pp *playerPath) Estimation(from, to position) int { return from.Distance(to) } type noisePath struct { game *game neighbors [8]position } func (fp *noisePath) Neighbors(pos position) []position { nb := fp.neighbors[:0] d := fp.game.Dungeon keep := func(npos position) bool { return npos.valid() && d.Cell(npos).T != WallCell } return pos.Neighbors(nb, keep) } func (fp *noisePath) Cost(from, to position) int { return 1 } type normalPath struct { game *game neighbors [8]position } func (np *normalPath) Neighbors(pos position) []position { nb := np.neighbors[:0] d := np.game.Dungeon keep := func(npos position) bool { return npos.valid() && d.Cell(npos).T != WallCell } if np.game.Player.HasStatus(StatusConfusion) { return pos.CardinalNeighbors(nb, keep) } return pos.Neighbors(nb, keep) } func (np *normalPath) Cost(from, to position) int { return 1 } type autoexplorePath struct { game *game neighbors [8]position } func (ap *autoexplorePath) Neighbors(pos position) []position { if ap.game.ExclusionsMap[pos] { return nil } d := ap.game.Dungeon nb := ap.neighbors[:0] keep := func(npos position) bool { if cld, ok := ap.game.Clouds[npos]; ok && cld == CloudFire && !(ap.game.WrongDoor[npos] || ap.game.WrongFoliage[npos]) { // XXX little info leak return false } return npos.valid() && (d.Cell(npos).T == FreeCell && !ap.game.WrongWall[npos] || d.Cell(npos).T == WallCell && ap.game.WrongWall[npos]) && !ap.game.ExclusionsMap[npos] } if ap.game.Player.HasStatus(StatusConfusion) { nb = pos.CardinalNeighbors(nb, keep) } else { nb = pos.Neighbors(nb, keep) } return nb } func (ap *autoexplorePath) Cost(from, to position) int { return 1 } type monPath struct { game *game monster *monster wall bool neighbors [8]position } func (mp *monPath) Neighbors(pos position) []position { nb := mp.neighbors[:0] d := mp.game.Dungeon keep := func(npos position) bool { return npos.valid() && (d.Cell(npos).T != WallCell || mp.wall) } if mp.monster.Status(MonsConfused) { return pos.CardinalNeighbors(nb, keep) } return pos.Neighbors(nb, keep) } func (mp *monPath) Cost(from, to position) int { g := mp.game mons := g.MonsterAt(to) if !mons.Exists() { if mp.wall && g.Dungeon.Cell(to).T == WallCell && mp.monster.State != Hunting { return 6 } return 1 } if mons.Status(MonsLignified) { return 8 } return 4 } func (mp *monPath) Estimation(from, to position) int { return from.Distance(to) } func (m *monster) APath(g *game, from, to position) []position { mp := &monPath{game: g, monster: m} if m.Kind == MonsEarthDragon { mp.wall = true } path, _, found := AstarPath(mp, from, to) if !found { return nil } return path } func (g *game) PlayerPath(from, to position) []position { pp := &playerPath{game: g} path, _, found := AstarPath(pp, from, to) if !found { return nil } return path } func (g *game) SortedNearestTo(cells []position, to position) []position { ps := posSlice{} for _, pos := range cells { pp := &dungeonPath{dungeon: g.Dungeon, wcost: unreachable} _, cost, found := AstarPath(pp, pos, to) if found { ps = append(ps, posCost{pos, cost}) } } sort.Sort(ps) sorted := []position{} for _, pc := range ps { sorted = append(sorted, pc.pos) } return sorted } type posCost struct { pos position cost int } type posSlice []posCost func (ps posSlice) Len() int { return len(ps) } func (ps posSlice) Swap(i, j int) { ps[i], ps[j] = ps[j], ps[i] } func (ps posSlice) Less(i, j int) bool { return ps[i].cost < ps[j].cost }
path.go
0.599368
0.511046
path.go
starcoder
package character import ( "fmt" "strconv" "strings" "github.com/ironarachne/world/pkg/measurement" "github.com/ironarachne/world/pkg/random" "github.com/ironarachne/world/pkg/words" ) // Description is a description object of a character type Description struct { Age string Culture string FirstName string FullName string GenderNoun string Height string Heraldry string Hobby string LastName string Motivation string NegativeTraits string PositiveTraits string PossessivePronoun string Profession string Race string SubjectPronoun string Traits string Weight string } func allDescriptionTemplates() []string { templates := []string{ `{{.FullName}} is a {{.Age}}-year-old {{.Culture}} {{.Race}} {{.GenderNoun}}. {{caseStart .SubjectPronoun}} has {{.Traits}}. {{.FirstName}} is {{.Height}} tall and weighs {{.Weight}} lbs. {{caseStart .SubjectPronoun}} is motivated by {{.Motivation}}. While {{.SubjectPronoun}} is {{.PositiveTraits}}, {{.SubjectPronoun}} has also been described as {{.NegativeTraits}}. {{.FirstName}}'s hobby is {{.Hobby}} and {{.SubjectPronoun}} is {{pronoun .Profession}}. {{if .Heraldry}}{{caseStart .PossessivePronoun}} coat of arms is described "{{.Heraldry}}."{{end}} `, `{{.FullName}} is {{pronoun .Race}} {{.GenderNoun}} of {{.Age}} years. {{caseStart .SubjectPronoun}} is {{.Height}} and weighs {{.Weight}} pounds, with {{.Traits}}. Motivated by {{.Motivation}}, {{.FirstName}} is {{.PositiveTraits}}, as well as {{.NegativeTraits}}. {{caseStart .SubjectPronoun}} is {{pronoun .Profession}}{{if .Heraldry}} and has a coat of arms of "{{.Heraldry}}." {{else}}.{{end}}`, `The {{.PositiveTraits}} {{.FullName}} is {{.Age}} years old and {{pronoun .Race}} {{.GenderNoun}}. {{caseStart .SubjectPronoun}} is {{pronoun .Profession}}, and seeks {{.Motivation}}. {{caseStart .SubjectPronoun}} is {{.Height}} tall and weighs {{.Weight}} lbs. Despite a generally positive perception, some describe {{.FirstName}} as {{.NegativeTraits}}. {{caseStart .SubjectPronoun}} has {{.Traits}}.`, } return templates } func randomDescriptionTemplate() (string, error) { all := allDescriptionTemplates() template, err := random.String(all) if err != nil { err = fmt.Errorf("Could not generate description template: %w", err) return "", err } return template, nil } func (character Character) compileDescription() (Description, error) { description := Description{} description.Age = character.describeAge() description.Culture = character.describeCulture() description.FirstName = character.FirstName description.FullName = character.describeFullName() description.GenderNoun = character.describeGenderNoun() description.Height = character.describeHeight() description.Heraldry = character.describeHeraldry() description.Hobby = character.describeHobby() description.LastName = character.LastName description.Motivation = character.describeMotivation() description.NegativeTraits = character.describeNegativeTraits() description.PositiveTraits = character.describePositiveTraits() description.PossessivePronoun = character.Gender.PossessivePronoun description.Profession = character.describeProfession() description.Race = character.describeRace() description.SubjectPronoun = character.Gender.SubjectPronoun traits, err := character.describeTraits() if err != nil { err = fmt.Errorf("Could not generate description template: %w", err) return Description{}, err } description.Traits = traits description.Weight = character.describeWeight() return description, nil } func (character Character) describeAge() string { description := strconv.Itoa(character.Age) return description } func (character Character) describeCulture() string { description := character.Culture.Adjective return description } func (character Character) describeFullName() string { description := "" if character.Title != "" { description += strings.Title(character.Title) + " " } description += character.FirstName + " " + character.LastName return description } func (character Character) describeGenderNoun() string { description := "" if character.AgeCategory.Name == "child" || character.AgeCategory.Name == "infant" { description = character.Gender.AdolescentNoun } else { description = character.Gender.Noun } return description } func (character Character) describeHeight() string { description := measurement.ToString(character.Height) return description } func (character Character) describeHeraldry() string { description := character.Heraldry.Blazon return description } func (character Character) describeHobby() string { description := character.Hobby.Name return description } func (character Character) describeMotivation() string { description := character.Motivation return description } func (character Character) describeNegativeTraits() string { description := words.CombinePhrases(character.NegativeTraits) return description } func (character Character) describePositiveTraits() string { description := words.CombinePhrases(character.PositiveTraits) return description } func (character Character) describeProfession() string { description := character.Profession.Name return description } func (character Character) describeRace() string { description := character.Race.Adjective return description } func (character Character) describeTraits() (string, error) { traits := []string{} for _, i := range character.PhysicalTraits { t, err := i.ToString() if err != nil { err = fmt.Errorf("Failed to describe traits: %w", err) return "", err } traits = append(traits, t) } description := words.CombinePhrases(traits) return description, nil } func (character Character) describeWeight() string { description := strconv.Itoa(character.Weight) return description }
pkg/character/description.go
0.776199
0.455622
description.go
starcoder
package condition import ( "fmt" "github.com/Jeffail/benthos/lib/log" "github.com/Jeffail/benthos/lib/metrics" "github.com/Jeffail/benthos/lib/types" ) //------------------------------------------------------------------------------ func init() { Constructors[TypeResource] = TypeSpec{ constructor: NewResource, description: ` Resource is a condition type that runs a condition resource by its name. This condition allows you to run the same configured condition resource in multiple processors, or as a branch of another condition. For example, let's imagine we have two outputs, one of which only receives messages that satisfy a condition and the other receives the logical NOT of that same condition. In this example we can save ourselves the trouble of configuring the same condition twice by referring to it as a resource, like this: ` + "``` yaml" + ` output: type: broker broker: pattern: fan_out outputs: - type: foo foo: processors: - type: filter filter: type: resource resource: foobar - type: bar bar: processors: - type: filter filter: type: not not: type: resource resource: foobar resources: conditions: foobar: type: text text: operator: equals_cs part: 1 arg: filter me please ` + "```" + ``, } } //------------------------------------------------------------------------------ // Resource is a condition that returns the result of a condition resource. type Resource struct { mgr types.Manager name string log log.Modular mCount metrics.StatCounter mTrue metrics.StatCounter mFalse metrics.StatCounter mErr metrics.StatCounter mErrNotFound metrics.StatCounter } // NewResource returns a resource condition. func NewResource( conf Config, mgr types.Manager, log log.Modular, stats metrics.Type, ) (Type, error) { if _, err := mgr.GetCondition(conf.Resource); err != nil { return nil, fmt.Errorf("failed to obtain condition resource '%v': %v", conf.Resource, err) } return &Resource{ mgr: mgr, name: conf.Resource, log: log, mCount: stats.GetCounter("count"), mTrue: stats.GetCounter("true"), mFalse: stats.GetCounter("false"), mErrNotFound: stats.GetCounter("error_not_found"), mErr: stats.GetCounter("error"), }, nil } //------------------------------------------------------------------------------ // Check attempts to check a message part against a configured condition. func (c *Resource) Check(msg types.Message) bool { c.mCount.Incr(1) cond, err := c.mgr.GetCondition(c.name) if err != nil { c.log.Debugf("Failed to obtain condition resource '%v': %v", c.name, err) c.mErrNotFound.Incr(1) c.mErr.Incr(1) c.mFalse.Incr(1) return false } res := cond.Check(msg) if res { c.mTrue.Incr(1) } else { c.mFalse.Incr(1) } return res } //------------------------------------------------------------------------------
lib/processor/condition/resource.go
0.781497
0.682303
resource.go
starcoder
package go2d import ( "math" "math/rand" "time" ) const TICK_DURATION = time.Duration(0) type VelocityVector struct { Vector Duration time.Duration } func NewVelocityVector(x, y float64, d time.Duration) VelocityVector { return VelocityVector{ Vector: Vector{ X: x, Y: y, }, Duration: d, } } func (this VelocityVector) GetNextMovement() Vector { if this.Duration == TICK_DURATION { return this.Vector } framesPerNanoSecond := 60 * time.Second desiredXMovementPerNanoSecond := this.Vector.X / float64(this.Duration) desiredYMovementPerNanoSecond := this.Vector.Y / float64(this.Duration) return Vector{ X: float64(framesPerNanoSecond) / desiredXMovementPerNanoSecond, Y: float64(framesPerNanoSecond) / desiredYMovementPerNanoSecond, } } type Vector struct { X float64 Y float64 } func NewVector(x, y float64) Vector { return Vector{ X: x, Y: y, } } func NewRandomVector(max Vector) Vector { return NewRandomVectorWithin( NewZeroRect(max.X, max.Y), ) } func NewRandomVectorWithin(r Rect) Vector { rand.Seed(time.Now().UnixNano()) return Vector{ X: r.X + rand.Float64() * (r.X+r.Width - r.X), Y: r.Y + rand.Float64() * (r.Y+r.Width - r.Y), } } func NewZeroVector() Vector { return Vector{} } func DirectionUp() Vector { return Vector { Y: -1, } } func DirectionDown() Vector { return Vector { Y: 1, } } func DirectionLeft() Vector { return Vector { X: -1, } } func DirectionRight() Vector { return Vector { X: 1, } } func (this *Vector) DirectionTo(other Vector) Vector { angle := this.AngleTo(other) return Vector{ X: math.Cos(angle), Y: math.Sin(angle), } } func (this *Vector) DistanceTo(other Vector) float64 { return math.Sqrt(math.Pow(this.X - other.X, 2) + math.Pow(this.Y - other.Y, 2)) } func (this *Vector) AngleTo(other Vector) float64 { return math.Atan2(other.Y - this.Y, other.X - this.X) } func (this *Vector) ConstrainTo(r Rect) { if this.X < r.X { this.X = r.X } if this.Y < r.Y { this.Y = r.Y } if this.X >= r.X + r.Width { this.X = r.X + r.Width - 1 } if this.Y >= r.Y + r.Width { this.Y = r.Y + r.Width - 1 } } func (this *Vector) Copy() Vector { return Vector{ X: this.X, Y: this.Y, } } func (this Vector) Constrained(r Rect) Vector { copy := this.Copy() copy.ConstrainTo(r) return copy } func (this Vector) IsInsideOf(r Rect) bool { return r.Contains(this) } func (this Vector) IsLeftOf(v Vector) bool { return this.X < v.X } func (this Vector) IsRightOf(v Vector) bool { return this.X > v.X } func (this Vector) IsAbove(v Vector) bool { return this.Y < v.Y } func (this Vector) IsBelow(v Vector) bool { return this.Y > v.Y } func (this Vector) IsZero() bool { return this.Equals(NewZeroVector()) } func (this Vector) Equals(other Vector) bool { return this.X == other.X && this.Y == other.Y } func (this Vector) Inverted() Vector { return Vector{ X: -this.X, Y: -this.Y, } } func (this Vector) InvertedY() Vector { return Vector{ X: this.X, Y: -this.Y, } } func (this Vector) InvertedX() Vector { return Vector{ X: -this.X, Y: this.Y, } }
go2d/vector.go
0.888051
0.876582
vector.go
starcoder
package metrics import ( "context" "github.com/prometheus/client_golang/prometheus" ) // NewHistogram returns a new Histogram and sets its namespace. func NewHistogram(opts prometheus.HistogramOpts) prometheus.Histogram { opts.Namespace = Namespace return prometheus.NewHistogram(opts) } // MustRegisterHistogram is a convenience function for NewHistogram and MustRegister. func MustRegisterHistogram(opts prometheus.HistogramOpts) prometheus.Histogram { metric := NewHistogram(opts) MustRegister(metric) return metric } // NewHistogramVec returns a new HistogramVec and sets its namespace. func NewHistogramVec(opts prometheus.HistogramOpts, labelNames []string) *prometheus.HistogramVec { opts.Namespace = Namespace return prometheus.NewHistogramVec(opts, labelNames) } // MustRegisterHistogramVec is a convenience function for NewHistogramVec and MustRegister. func MustRegisterHistogramVec(opts prometheus.HistogramOpts, labelNames []string) *prometheus.HistogramVec { metric := NewHistogramVec(opts, labelNames) MustRegister(metric) return metric } // ContextualHistogramVec wraps a HistogramVec in order to get labels from the context. type ContextualHistogramVec struct { *prometheus.HistogramVec } // With is the equivalent of HistogramVec.With, but with a context. func (c ContextualHistogramVec) With(ctx context.Context, labels prometheus.Labels) prometheus.Observer { if LabelsFromContext == nil { return c.HistogramVec.With(labels) } return c.HistogramVec.MustCurryWith(LabelsFromContext(ctx)).With(labels) } // WithLabelValues is the equivalent of HistogramVec.WithLabelValues, but with a context. func (c ContextualHistogramVec) WithLabelValues(ctx context.Context, lvs ...string) prometheus.Observer { if len(ContextLabelNames) == 0 { return c.HistogramVec.WithLabelValues(lvs...) } return c.HistogramVec.MustCurryWith(LabelsFromContext(ctx)).WithLabelValues(lvs...) } // NewContextualHistogramVec returns a new ContextualHistogramVec and sets its namespace. func NewContextualHistogramVec(opts prometheus.HistogramOpts, labelNames []string) *ContextualHistogramVec { opts.Namespace = Namespace if len(ContextLabelNames) > 0 { labelNames = append(ContextLabelNames, labelNames...) } return &ContextualHistogramVec{prometheus.NewHistogramVec(opts, labelNames)} } // MustRegisterContextualHistogramVec is a convenience function for NewContextualHistogramVec and MustRegister. func MustRegisterContextualHistogramVec(opts prometheus.HistogramOpts, labelNames []string) *ContextualHistogramVec { metric := NewContextualHistogramVec(opts, labelNames) MustRegister(metric) return metric }
pkg/metrics/histogram.go
0.846324
0.407628
histogram.go
starcoder
2D Signed Distance Functions */ //----------------------------------------------------------------------------- package sdf import ( "errors" "math" ) //----------------------------------------------------------------------------- // SDF2 is the interface to a 2d signed distance function object. type SDF2 interface { Evaluate(p V2) float64 BoundingBox() Box2 } //----------------------------------------------------------------------------- // SDF2 Evaluation Caching (experimental) type sdf2Cache struct { cache map[V2]float64 hits uint } func (c *sdf2Cache) lookup(p V2) (float64, error) { if d, ok := c.cache[p]; ok { c.hits++ return d, nil } return 0, errors.New("not found") } func (c *sdf2Cache) store(p V2, d float64) { c.cache[p] = d } func newSdf2Cache() *sdf2Cache { c := sdf2Cache{} c.cache = make(map[V2]float64) return &c } //----------------------------------------------------------------------------- // Basic SDF Functions func sdfBox2d(p, s V2) float64 { p = p.Abs() d := p.Sub(s) k := s.Y - s.X if d.X > 0 && d.Y > 0 { return d.Length() } if p.Y-p.X > k { return d.Y } return d.X } //----------------------------------------------------------------------------- // 2D Circle // CircleSDF2 is the 2d signed distance object for a circle. type CircleSDF2 struct { radius float64 bb Box2 } // Circle2D returns the SDF2 for a 2d circle. func Circle2D(radius float64) SDF2 { s := CircleSDF2{} s.radius = radius d := V2{radius, radius} s.bb = Box2{d.Neg(), d} return &s } // Evaluate returns the minimum distance to a 2d circle. func (s *CircleSDF2) Evaluate(p V2) float64 { return p.Length() - s.radius } // BoundingBox returns the bounding box of a 2d circle. func (s *CircleSDF2) BoundingBox() Box2 { return s.bb } //----------------------------------------------------------------------------- // MultiCircleSDF2 is an SDF2 made from multiple circles (used for hole patterns). type MultiCircleSDF2 struct { radius float64 positions V2Set bb Box2 } // MultiCircle2D returns an SDF2 for multiple circles. func MultiCircle2D(radius float64, positions V2Set) SDF2 { s := MultiCircleSDF2{} s.radius = radius s.positions = positions // work out the bounding box pmin := positions.Min().Sub(V2{radius, radius}) pmax := positions.Max().Add(V2{radius, radius}) s.bb = Box2{pmin, pmax} return &s } // Evaluate returns the minimum distance to multiple circles. func (s *MultiCircleSDF2) Evaluate(p V2) float64 { d := math.MaxFloat64 for _, posn := range s.positions { d = Min(d, p.Sub(posn).Length()-s.radius) } return d } // BoundingBox returns the bounding box for multiple circles. func (s *MultiCircleSDF2) BoundingBox() Box2 { return s.bb } //----------------------------------------------------------------------------- // 2D Box (rounded corners with round > 0) // BoxSDF2 is the 2d signed distance object for a rectangular box. type BoxSDF2 struct { size V2 round float64 bb Box2 } // Box2D returns a 2d box. func Box2D(size V2, round float64) SDF2 { size = size.MulScalar(0.5) s := BoxSDF2{} s.size = size.SubScalar(round) s.round = round s.bb = Box2{size.Neg(), size} return &s } // Evaluate returns the minimum distance to a 2d box. func (s *BoxSDF2) Evaluate(p V2) float64 { return sdfBox2d(p, s.size) - s.round } // BoundingBox returns the bounding box for a 2d box. func (s *BoxSDF2) BoundingBox() Box2 { return s.bb } //----------------------------------------------------------------------------- // 2D Line // LineSDF2 is the 2d signed distance object for a line. type LineSDF2 struct { l float64 // line length round float64 // rounding bb Box2 // bounding box } // Line2D returns a line from (-l/2,0) to (l/2,0). func Line2D(l, round float64) SDF2 { s := LineSDF2{} s.l = l / 2 s.round = round s.bb = Box2{V2{-s.l - round, -round}, V2{s.l + round, round}} return &s } // Evaluate returns the minimum distance to a 2d line. func (s *LineSDF2) Evaluate(p V2) float64 { p = p.Abs() if p.X <= s.l { return p.Y - s.round } return p.Sub(V2{s.l, 0}).Length() - s.round } // BoundingBox returns the bounding box for a 2d line. func (s *LineSDF2) BoundingBox() Box2 { return s.bb } //----------------------------------------------------------------------------- // OffsetSDF2 offsets the distance function of an existing SDF2. type OffsetSDF2 struct { sdf SDF2 offset float64 bb Box2 } // Offset2D returns an SDF2 that offsets the distance function of another SDF2. func Offset2D(sdf SDF2, offset float64) SDF2 { s := OffsetSDF2{} s.sdf = sdf s.offset = offset // work out the bounding box bb := sdf.BoundingBox() s.bb = NewBox2(bb.Center(), bb.Size().AddScalar(2*offset)) return &s } // Evaluate returns the minimum distance to an offset SDF2. func (s *OffsetSDF2) Evaluate(p V2) float64 { return s.sdf.Evaluate(p) - s.offset } // BoundingBox returns the bounding box of an offset SDF2. func (s *OffsetSDF2) BoundingBox() Box2 { return s.bb } //----------------------------------------------------------------------------- // Cut an SDF2 along a line // CutSDF2 is an SDF2 made by cutting across an existing SDF2. type CutSDF2 struct { sdf SDF2 a V2 // point on line n V2 // normal to line bb Box2 // bounding box } // Cut2D cuts the SDF2 along a line from a in direction v. // The SDF2 to the right of the line remains. func Cut2D(sdf SDF2, a, v V2) SDF2 { s := CutSDF2{} s.sdf = sdf s.a = a v = v.Normalize() s.n = V2{-v.Y, v.X} // TODO - cut the bounding box s.bb = sdf.BoundingBox() return &s } // Evaluate returns the minimum distance to cut SDF2. func (s *CutSDF2) Evaluate(p V2) float64 { return Max(p.Sub(s.a).Dot(s.n), s.sdf.Evaluate(p)) } // BoundingBox returns the bounding box for the cut SDF2. func (s *CutSDF2) BoundingBox() Box2 { return s.bb } //----------------------------------------------------------------------------- // 2D Polygon // PolySDF2 is an SDF2 made from a closed set of line segments. type PolySDF2 struct { vertex []V2 // vertices vector []V2 // unit line vectors length []float64 // line lengths bb Box2 // bounding box } // Polygon2D returns an SDF2 made from a closed set of line segments. func Polygon2D(vertex []V2) SDF2 { s := PolySDF2{} n := len(vertex) if n < 3 { return nil } // Close the loop (if necessary) s.vertex = vertex if !vertex[0].Equals(vertex[n-1], tolerance) { s.vertex = append(s.vertex, vertex[0]) } // allocate pre-calculated line segment info nsegs := len(s.vertex) - 1 s.vector = make([]V2, nsegs) s.length = make([]float64, nsegs) vmin := s.vertex[0] vmax := s.vertex[0] for i := 0; i < nsegs; i++ { l := s.vertex[i+1].Sub(s.vertex[i]) s.length[i] = l.Length() s.vector[i] = l.Normalize() vmin = vmin.Min(s.vertex[i]) vmax = vmax.Max(s.vertex[i]) } s.bb = Box2{vmin, vmax} return &s } // Evaluate returns the minimum distance for a 2d polygon. func (s *PolySDF2) Evaluate(p V2) float64 { dd := math.MaxFloat64 // d^2 to polygon (>0) wn := 0 // winding number (inside/outside) // iterate over the line segments nsegs := len(s.vertex) - 1 pb := p.Sub(s.vertex[0]) for i := 0; i < nsegs; i++ { a := s.vertex[i] b := s.vertex[i+1] pa := pb pb = p.Sub(b) t := pa.Dot(s.vector[i]) // t-parameter of projection onto line dn := pa.Dot(V2{s.vector[i].Y, -s.vector[i].X}) // normal distance from p to line // Distance to line segment if t < 0 { dd = Min(dd, pa.Length2()) // distance to vertex[0] of line } else if t > s.length[i] { dd = Min(dd, pb.Length2()) // distance to vertex[1] of line } else { dd = Min(dd, dn*dn) // normal distance to line } // Is the point in the polygon? // See: http://geomalgorithms.com/a03-_inclusion.html if a.Y <= p.Y { if b.Y > p.Y { // upward crossing if dn < 0 { // p is to the left of the line segment wn++ // up intersect } } } else { if b.Y <= p.Y { // downward crossing if dn > 0 { // p is to the right of the line segment wn-- // down intersect } } } } // normalise d*d to d d := math.Sqrt(dd) if wn != 0 { // p is inside the polygon return -d } return d } // BoundingBox returns the bounding box of a 2d polygon. func (s *PolySDF2) BoundingBox() Box2 { return s.bb } // Vertices returns the set of vertices for a 2d polygon. func (s *PolySDF2) Vertices() []V2 { return s.vertex } //----------------------------------------------------------------------------- // Transform SDF2 (rotation and translation are distance preserving) // TransformSDF2 transorms an SDF2 with rotation, translation and scaling. type TransformSDF2 struct { sdf SDF2 mInv M33 bb Box2 } // Transform2D applies a transformation matrix to an SDF2. // Distance is *not* preserved with scaling. func Transform2D(sdf SDF2, m M33) SDF2 { s := TransformSDF2{} s.sdf = sdf s.mInv = m.Inverse() s.bb = m.MulBox(sdf.BoundingBox()) return &s } // Evaluate returns the minimum distance to a transformed SDF2. // Distance is *not* preserved with scaling. func (s *TransformSDF2) Evaluate(p V2) float64 { q := s.mInv.MulPosition(p) return s.sdf.Evaluate(q) } // BoundingBox returns the bounding box of a transformed SDF2. func (s *TransformSDF2) BoundingBox() Box2 { return s.bb } //----------------------------------------------------------------------------- // Uniform XY Scaling of SDF2s (we can work out the distance) // ScaleUniformSDF2 scales another SDF2 on each axis. type ScaleUniformSDF2 struct { sdf SDF2 k, invk float64 bb Box2 } // ScaleUniform2D scales an SDF2 by k on each axis. // Distance is correct with scaling. func ScaleUniform2D(sdf SDF2, k float64) SDF2 { m := Scale2d(V2{k, k}) return &ScaleUniformSDF2{ sdf: sdf, k: k, invk: 1.0 / k, bb: m.MulBox(sdf.BoundingBox()), } } // Evaluate returns the minimum distance to an SDF2 with uniform scaling. func (s *ScaleUniformSDF2) Evaluate(p V2) float64 { q := p.MulScalar(s.invk) return s.sdf.Evaluate(q) * s.k } // BoundingBox returns the bounding box of an SDF2 with uniform scaling. func (s *ScaleUniformSDF2) BoundingBox() Box2 { return s.bb } //----------------------------------------------------------------------------- // Center2D centers the origin of an SDF2 on it's bounding box. func Center2D(s SDF2) SDF2 { ofs := s.BoundingBox().Center().Neg() return Transform2D(s, Translate2d(ofs)) } // CenterAndScale2D centers the origin of an SDF2 on it's bounding box, and then scales it. // Distance is correct with scaling. func CenterAndScale2D(s SDF2, k float64) SDF2 { ofs := s.BoundingBox().Center().Neg() s = Transform2D(s, Translate2d(ofs)) return ScaleUniform2D(s, k) } //----------------------------------------------------------------------------- // ArraySDF2: Create an X by Y array of a given SDF2 // ArraySDF2 defines an XY grid array of an existing SDF2. type ArraySDF2 struct { sdf SDF2 num V2i // grid size step V2 // grid step size min MinFunc bb Box2 } // Array2D returns an XY grid array of an existing SDF2. func Array2D(sdf SDF2, num V2i, step V2) SDF2 { // check the number of steps if num[0] <= 0 || num[1] <= 0 { return nil } s := ArraySDF2{} s.sdf = sdf s.num = num s.step = step s.min = Min // work out the bounding box bb0 := sdf.BoundingBox() bb1 := bb0.Translate(step.Mul(num.SubScalar(1).ToV2())) s.bb = bb0.Extend(bb1) return &s } // SetMin sets the minimum function to control blending. func (s *ArraySDF2) SetMin(min MinFunc) { s.min = min } // Evaluate returns the minimum distance to a grid array of SDF2s. func (s *ArraySDF2) Evaluate(p V2) float64 { d := math.MaxFloat64 for j := 0; j < s.num[0]; j++ { for k := 0; k < s.num[1]; k++ { x := p.Sub(V2{float64(j) * s.step.X, float64(k) * s.step.Y}) d = s.min(d, s.sdf.Evaluate(x)) } } return d } // BoundingBox returns the bounding box of a grid array of SDF2s. func (s *ArraySDF2) BoundingBox() Box2 { return s.bb } //----------------------------------------------------------------------------- // RotateUnionSDF2 defines a union of rotated SDF2s. type RotateUnionSDF2 struct { sdf SDF2 num int step M33 min MinFunc bb Box2 } // RotateUnion2D returns a union of rotated SDF2s. func RotateUnion2D(sdf SDF2, num int, step M33) SDF2 { // check the number of steps if num <= 0 { return nil } s := RotateUnionSDF2{} s.sdf = sdf s.num = num s.step = step.Inverse() s.min = Min // work out the bounding box v := sdf.BoundingBox().Vertices() bbMin := v[0] bbMax := v[0] for i := 0; i < s.num; i++ { bbMin = bbMin.Min(v.Min()) bbMax = bbMax.Max(v.Max()) v.MulVertices(step) } s.bb = Box2{bbMin, bbMax} return &s } // Evaluate returns the minimum distance to a union of rotated SDF2s. func (s *RotateUnionSDF2) Evaluate(p V2) float64 { d := math.MaxFloat64 rot := Identity2d() for i := 0; i < s.num; i++ { x := rot.MulPosition(p) d = s.min(d, s.sdf.Evaluate(x)) rot = rot.Mul(s.step) } return d } // SetMin sets the minimum function to control blending. func (s *RotateUnionSDF2) SetMin(min MinFunc) { s.min = min } // BoundingBox returns the bounding box of a union of rotated SDF2s. func (s *RotateUnionSDF2) BoundingBox() Box2 { return s.bb } //----------------------------------------------------------------------------- // RotateCopySDF2 copies an SDF2 n times in a full circle. type RotateCopySDF2 struct { sdf SDF2 theta float64 bb Box2 } // RotateCopy2D rotates and copies an SDF2 n times in a full circle. func RotateCopy2D(sdf SDF2, n int) SDF2 { // check the number of steps if n <= 0 { return nil } s := RotateCopySDF2{} s.sdf = sdf s.theta = Tau / float64(n) // work out the bounding box bb := sdf.BoundingBox() rmax := 0.0 // find the bounding box vertex with the greatest distance from the origin for _, v := range bb.Vertices() { l := v.Length() if l > rmax { rmax = l } } s.bb = Box2{V2{-rmax, -rmax}, V2{rmax, rmax}} return &s } // Evaluate returns the minimum distance to a rotate/copy SDF2. func (s *RotateCopySDF2) Evaluate(p V2) float64 { // Map p to a point in the first copy sector. pnew := PolarToXY(p.Length(), SawTooth(math.Atan2(p.Y, p.X), s.theta)) return s.sdf.Evaluate(pnew) } // BoundingBox returns the bounding box of a rotate/copy SDF2. func (s *RotateCopySDF2) BoundingBox() Box2 { return s.bb } //----------------------------------------------------------------------------- // SliceSDF2 creates an SDF2 from a planar slice through an SDF3. type SliceSDF2 struct { sdf SDF3 // the sdf3 being sliced a V3 // 3d point for 2d origin u V3 // vector for the 2d x-axis v V3 // vector for the 2d y-axis bb Box2 // bounding box } // Slice2D returns an SDF2 created from a planar slice through an SDF3. func Slice2D( sdf SDF3, // SDF3 to be sliced a V3, // point on slicing plane n V3, // normal to slicing plane ) SDF2 { s := SliceSDF2{} s.sdf = sdf s.a = a // work out the x/y vectors on the plane. if n.X == 0 { s.u = V3{1, 0, 0} } else if n.Y == 0 { s.u = V3{0, 1, 0} } else if n.Z == 0 { s.u = V3{0, 0, 1} } else { s.u = V3{n.Y, -n.X, 0} } s.v = n.Cross(s.u) s.u = s.u.Normalize() s.v = s.v.Normalize() // work out the bounding box // TODO: This is bigger than it needs to be. We could consider intersection // between the plane and the edges of the 3d bounding box for a smaller 2d // bounding box in some circumstances. v3 := sdf.BoundingBox().Vertices() v2 := make(V2Set, len(v3)) n = n.Normalize() for i, v := range v3 { // project the 3d bounding box vertex onto the plane va := v.Sub(s.a) pa := va.Sub(n.MulScalar(n.Dot(va))) // work out the 3d point in terms of the 2d unit vectors v2[i] = V2{pa.Dot(s.u), pa.Dot(s.v)} } s.bb = Box2{v2.Min(), v2.Max()} return &s } // Evaluate returns the minimum distance to the sliced SDF2. func (s *SliceSDF2) Evaluate(p V2) float64 { pnew := s.a.Add(s.u.MulScalar(p.X)).Add(s.v.MulScalar(p.Y)) return s.sdf.Evaluate(pnew) } // BoundingBox returns the bounding box of the sliced SDF2. func (s *SliceSDF2) BoundingBox() Box2 { return s.bb } //----------------------------------------------------------------------------- // UnionSDF2 is a union of multiple SDF2 objects. type UnionSDF2 struct { sdf []SDF2 min MinFunc bb Box2 } // Union2D returns the union of multiple SDF2 objects. func Union2D(sdf ...SDF2) SDF2 { if len(sdf) == 0 { return nil } s := UnionSDF2{} // strip out any nils s.sdf = make([]SDF2, 0, len(sdf)) for _, x := range sdf { if x != nil { s.sdf = append(s.sdf, x) } } if len(s.sdf) == 0 { return nil } if len(s.sdf) == 1 { // only one sdf - not really a union return s.sdf[0] } // work out the bounding box bb := s.sdf[0].BoundingBox() for _, x := range s.sdf { bb = bb.Extend(x.BoundingBox()) } s.bb = bb s.min = Min return &s } // Evaluate returns the minimum distance to the SDF2 union. func (s *UnionSDF2) Evaluate(p V2) float64 { // work out the min/max distance for every bounding box vs := make([]V2, len(s.sdf)) minDist2 := -1.0 minIndex := 0 for i := range s.sdf { vs[i] = s.sdf[i].BoundingBox().MinMaxDist2(p) // as we go record the sdf with the minimum minimum d2 value if minDist2 < 0 || vs[i].X < minDist2 { minDist2 = vs[i].X minIndex = i } } var d float64 first := true for i := range s.sdf { // only an sdf whose min/max distances overlap // the minimum box are worthy of consideration if i == minIndex || vs[minIndex].Overlap(vs[i]) { x := s.sdf[i].Evaluate(p) if first { first = false d = x } else { d = s.min(d, x) } } } return d } // EvaluateSlow returns the minimum distance to the SDF2 union. func (s *UnionSDF2) EvaluateSlow(p V2) float64 { var d float64 for i := range s.sdf { x := s.sdf[i].Evaluate(p) if i == 0 { d = x } else { d = s.min(d, x) } } return d } // SetMin sets the minimum function to control SDF2 blending. func (s *UnionSDF2) SetMin(min MinFunc) { s.min = min } // BoundingBox returns the bounding box of an SDF2 union. func (s *UnionSDF2) BoundingBox() Box2 { return s.bb } //----------------------------------------------------------------------------- // DifferenceSDF2 is the difference of two SDF2s. type DifferenceSDF2 struct { s0 SDF2 s1 SDF2 max MaxFunc bb Box2 } // Difference2D returns the difference of two SDF2 objects, s0 - s1. func Difference2D(s0, s1 SDF2) SDF2 { if s1 == nil { return s0 } if s0 == nil { return nil } s := DifferenceSDF2{} s.s0 = s0 s.s1 = s1 s.max = Max s.bb = s0.BoundingBox() return &s } // Evaluate returns the minimum distance to the difference of two SDF2s. func (s *DifferenceSDF2) Evaluate(p V2) float64 { return s.max(s.s0.Evaluate(p), -s.s1.Evaluate(p)) } // SetMax sets the maximum function to control blending. func (s *DifferenceSDF2) SetMax(max MaxFunc) { s.max = max } // BoundingBox returns the bounding box of the difference of two SDF2s. func (s *DifferenceSDF2) BoundingBox() Box2 { return s.bb } //----------------------------------------------------------------------------- // ElongateSDF2 is the elongation of an SDF2. type ElongateSDF2 struct { sdf SDF2 // the sdf being elongated hp, hn V2 // positive/negative elongation vector bb Box2 // bounding box } // Elongate2D returns the elongation of an SDF2. func Elongate2D(sdf SDF2, h V2) SDF2 { h = h.Abs() s := ElongateSDF2{ sdf: sdf, hp: h.MulScalar(0.5), hn: h.MulScalar(-0.5), } // bounding box bb := sdf.BoundingBox() bb0 := bb.Translate(s.hp) bb1 := bb.Translate(s.hn) s.bb = bb0.Extend(bb1) return &s } // Evaluate returns the minimum distance to an elongated SDF2. func (s *ElongateSDF2) Evaluate(p V2) float64 { q := p.Sub(p.Clamp(s.hn, s.hp)) return s.sdf.Evaluate(q) } // BoundingBox returns the bounding box of an elongated SDF2. func (s *ElongateSDF2) BoundingBox() Box2 { return s.bb } //----------------------------------------------------------------------------- // GenerateMesh2D generates a set of internal mesh points for an SDF2. func GenerateMesh2D(s SDF2, grid V2i) (V2Set, error) { // create the grid mapping for the bounding box m, err := NewMap2(s.BoundingBox(), grid, false) if err != nil { return nil, err } // create the vertex set storage vset := make(V2Set, 0, grid[0]*grid[1]) // iterate across the grid and add the vertices if they are inside the SDF2 for i := 0; i < grid[0]; i++ { for j := 0; j < grid[1]; j++ { v := m.ToV2(V2i{i, j}) if s.Evaluate(v) <= 0 { vset = append(vset, v) } } } return vset, nil } //----------------------------------------------------------------------------- // LineOf2D returns a union of 2D objects positioned along a line from p0 to p1. func LineOf2D(s SDF2, p0, p1 V2, pattern string) SDF2 { var objects []SDF2 if pattern != "" { x := p0 dx := p1.Sub(p0).DivScalar(float64(len(pattern))) for _, c := range pattern { if c == 'x' { objects = append(objects, Transform2D(s, Translate2d(x))) } x = x.Add(dx) } } return Union2D(objects...) } //-----------------------------------------------------------------------------
sdf/sdf2.go
0.843605
0.547767
sdf2.go
starcoder
package gemini import ( "bufio" "fmt" "io" "strings" ) // Line represents a line of a Gemini text response. type Line interface { // String formats the line for use in a Gemini text response. String() string line() // private function to prevent other packages from implementing Line } // LineLink is a link line. type LineLink struct { URL string Name string } // LinePreformattingToggle is a preformatting toggle line. type LinePreformattingToggle string // LinePreformattedText is a preformatted text line. type LinePreformattedText string // LineHeading1 is a first-level heading line. type LineHeading1 string // LineHeading2 is a second-level heading line. type LineHeading2 string // LineHeading3 is a third-level heading line. type LineHeading3 string // LineListItem is an unordered list item line. type LineListItem string // LineQuote is a quote line. type LineQuote string // LineText is a text line. type LineText string func (l LineLink) String() string { if l.Name != "" { return fmt.Sprintf("=> %s %s", l.URL, l.Name) } return fmt.Sprintf("=> %s", l.URL) } func (l LinePreformattingToggle) String() string { return fmt.Sprintf("```%s", string(l)) } func (l LinePreformattedText) String() string { return string(l) } func (l LineHeading1) String() string { return fmt.Sprintf("# %s", string(l)) } func (l LineHeading2) String() string { return fmt.Sprintf("## %s", string(l)) } func (l LineHeading3) String() string { return fmt.Sprintf("### %s", string(l)) } func (l LineListItem) String() string { return fmt.Sprintf("* %s", string(l)) } func (l LineQuote) String() string { return fmt.Sprintf("> %s", string(l)) } func (l LineText) String() string { return string(l) } func (l LineLink) line() {} func (l LinePreformattingToggle) line() {} func (l LinePreformattedText) line() {} func (l LineHeading1) line() {} func (l LineHeading2) line() {} func (l LineHeading3) line() {} func (l LineListItem) line() {} func (l LineQuote) line() {} func (l LineText) line() {} // Text represents a Gemini text response. type Text []Line // ParseText parses Gemini text from the provided io.Reader. func ParseText(r io.Reader) (Text, error) { var t Text err := ParseLines(r, func(line Line) { t = append(t, line) }) return t, err } // ParseLines parses Gemini text from the provided io.Reader. // It calls handler with each line that it parses. func ParseLines(r io.Reader, handler func(Line)) error { const spacetab = " \t" var pre bool scanner := bufio.NewScanner(r) for scanner.Scan() { var line Line text := scanner.Text() if strings.HasPrefix(text, "```") { pre = !pre text = text[3:] line = LinePreformattingToggle(text) } else if pre { line = LinePreformattedText(text) } else if strings.HasPrefix(text, "=>") { text = text[2:] text = strings.TrimLeft(text, spacetab) split := strings.IndexAny(text, spacetab) if split == -1 { // text is a URL line = LineLink{URL: text} } else { url := text[:split] name := text[split:] name = strings.TrimLeft(name, spacetab) line = LineLink{url, name} } } else if strings.HasPrefix(text, "*") { text = text[1:] text = strings.TrimLeft(text, spacetab) line = LineListItem(text) } else if strings.HasPrefix(text, "###") { text = text[3:] text = strings.TrimLeft(text, spacetab) line = LineHeading3(text) } else if strings.HasPrefix(text, "##") { text = text[2:] text = strings.TrimLeft(text, spacetab) line = LineHeading2(text) } else if strings.HasPrefix(text, "#") { text = text[1:] text = strings.TrimLeft(text, spacetab) line = LineHeading1(text) } else if strings.HasPrefix(text, ">") { text = text[1:] text = strings.TrimLeft(text, spacetab) line = LineQuote(text) } else { line = LineText(text) } handler(line) } return scanner.Err() } // String writes the Gemini text response to a string and returns it. func (t Text) String() string { var b strings.Builder for _, l := range t { b.WriteString(l.String()) b.WriteByte('\n') } return b.String() }
text.go
0.657978
0.437042
text.go
starcoder
package histwriter import ( "fmt" "io" "os" "github.com/HdrHistogram/HdrHistogram" ) // WriteDistribution writes the percentile distribution of a Histogram in a // format plottable by http://hdrhistogram.github.io/HdrHistogram/plotFiles.html // to the given Writer. Percentiles is a list of percentiles to include, e.g. // 10.0, 50.0, 99.0, 99.99, etc. If percentiles is nil, it defaults to a // logarithmic percentile scale. The scaleFactor is used to scale values. func WriteDistribution(hist *hdrhistogram.Histogram, percentiles Percentiles, scaleFactor float64, writer io.Writer) error { if percentiles == nil { percentiles = Logarithmic } if _, err := writer.Write([]byte("Value Percentile TotalCount 1/(1-Percentile)\n\n")); err != nil { return err } totalCount := hist.TotalCount() for _, percentile := range percentiles { value := float64(hist.ValueAtQuantile(percentile)) * scaleFactor oneByPercentile := getOneByPercentile(percentile) countAtPercentile := int64(((percentile / 100) * float64(totalCount)) + 0.5) _, err := writer.Write([]byte(fmt.Sprintf("%f %f %d %f\n", value, percentile/100, countAtPercentile, oneByPercentile))) if err != nil { return err } } return nil } // WriteDistributionFile writes the percentile distribution of a Histogram in a // format plottable by http://hdrhistogram.github.io/HdrHistogram/plotFiles.html // to the given file. If the file doesn't exist, it's created. Percentiles is a // list of percentiles to include, e.g. 10.0, 50.0, 99.0, 99.99, etc. If // percentiles is nil, it defaults to a logarithmic percentile scale. The // scaleFactor is used to scale values. func WriteDistributionFile(hist *hdrhistogram.Histogram, percentiles Percentiles, scaleFactor float64, file string) error { f, err := os.OpenFile(file, os.O_WRONLY, 0644) if err != nil { f, err = os.Create(file) if err != nil { return err } } defer f.Close() return WriteDistribution(hist, percentiles, scaleFactor, f) } func getOneByPercentile(percentile float64) float64 { if percentile < 100 { return 1 / (1 - (percentile / 100)) } return float64(10000000) }
writer.go
0.730578
0.443239
writer.go
starcoder
package planparserv2 import ( "fmt" "github.com/milvus-io/milvus/internal/util/typeutil" "github.com/milvus-io/milvus/internal/proto/planpb" "github.com/milvus-io/milvus/internal/proto/schemapb" ) func IsBool(n *planpb.GenericValue) bool { switch n.GetVal().(type) { case *planpb.GenericValue_BoolVal: return true } return false } func IsInteger(n *planpb.GenericValue) bool { switch n.GetVal().(type) { case *planpb.GenericValue_Int64Val: return true } return false } func IsFloating(n *planpb.GenericValue) bool { switch n.GetVal().(type) { case *planpb.GenericValue_FloatVal: return true } return false } func IsNumber(n *planpb.GenericValue) bool { return IsInteger(n) || IsFloating(n) } func IsString(n *planpb.GenericValue) bool { switch n.GetVal().(type) { case *planpb.GenericValue_StringVal: return true } return false } func NewBool(value bool) *planpb.GenericValue { return &planpb.GenericValue{ Val: &planpb.GenericValue_BoolVal{ BoolVal: value, }, } } func NewInt(value int64) *planpb.GenericValue { return &planpb.GenericValue{ Val: &planpb.GenericValue_Int64Val{ Int64Val: value, }, } } func NewFloat(value float64) *planpb.GenericValue { return &planpb.GenericValue{ Val: &planpb.GenericValue_FloatVal{ FloatVal: value, }, } } func NewString(value string) *planpb.GenericValue { return &planpb.GenericValue{ Val: &planpb.GenericValue_StringVal{ StringVal: value, }, } } func toValueExpr(n *planpb.GenericValue) *ExprWithType { expr := &planpb.Expr{ Expr: &planpb.Expr_ValueExpr{ ValueExpr: &planpb.ValueExpr{ Value: n, }, }, } switch n.GetVal().(type) { case *planpb.GenericValue_BoolVal: return &ExprWithType{ expr: expr, dataType: schemapb.DataType_Bool, } case *planpb.GenericValue_Int64Val: return &ExprWithType{ expr: expr, dataType: schemapb.DataType_Int64, } case *planpb.GenericValue_FloatVal: return &ExprWithType{ expr: expr, dataType: schemapb.DataType_Double, } case *planpb.GenericValue_StringVal: return &ExprWithType{ expr: expr, dataType: schemapb.DataType_VarChar, } default: return nil } } func getSameType(a, b schemapb.DataType) (schemapb.DataType, error) { if typeutil.IsFloatingType(a) && typeutil.IsArithmetic(b) { return schemapb.DataType_Double, nil } if typeutil.IsIntegerType(a) && typeutil.IsIntegerType(b) { return schemapb.DataType_Int64, nil } return schemapb.DataType_None, fmt.Errorf("incompatible data type, %s, %s", a.String(), b.String()) } func calcDataType(left, right *ExprWithType, reverse bool) (schemapb.DataType, error) { if reverse { return getSameType(right.dataType, left.dataType) } return getSameType(left.dataType, right.dataType) } func reverseOrder(op planpb.OpType) (planpb.OpType, error) { switch op { case planpb.OpType_LessThan: return planpb.OpType_GreaterThan, nil case planpb.OpType_LessEqual: return planpb.OpType_GreaterEqual, nil case planpb.OpType_GreaterThan: return planpb.OpType_LessThan, nil case planpb.OpType_GreaterEqual: return planpb.OpType_LessEqual, nil case planpb.OpType_Equal: return planpb.OpType_Equal, nil case planpb.OpType_NotEqual: return planpb.OpType_NotEqual, nil default: return planpb.OpType_Invalid, fmt.Errorf("cannot reverse order: %s", op) } } func toColumnInfo(left *ExprWithType) *planpb.ColumnInfo { return left.expr.GetColumnExpr().GetInfo() } func castValue(dataType schemapb.DataType, value *planpb.GenericValue) (*planpb.GenericValue, error) { if typeutil.IsStringType(dataType) && IsString(value) { return value, nil } if typeutil.IsBoolType(dataType) && IsBool(value) { return value, nil } if typeutil.IsFloatingType(dataType) { if IsFloating(value) { return value, nil } if IsInteger(value) { return NewFloat(float64(value.GetInt64Val())), nil } } if typeutil.IsIntegerType(dataType) { if IsInteger(value) { return value, nil } } return nil, fmt.Errorf("cannot cast value to %s, value: %s", dataType.String(), value) } func combineBinaryArithExpr(op planpb.OpType, arithOp planpb.ArithOpType, columnInfo *planpb.ColumnInfo, operand *planpb.GenericValue, value *planpb.GenericValue) *planpb.Expr { castedValue, err := castValue(columnInfo.GetDataType(), operand) if err != nil { return nil } return &planpb.Expr{ Expr: &planpb.Expr_BinaryArithOpEvalRangeExpr{ BinaryArithOpEvalRangeExpr: &planpb.BinaryArithOpEvalRangeExpr{ ColumnInfo: columnInfo, ArithOp: arithOp, RightOperand: castedValue, Op: op, Value: value, }, }, } } func handleBinaryArithExpr(op planpb.OpType, arithExpr *planpb.BinaryArithExpr, valueExpr *planpb.ValueExpr) (*planpb.Expr, error) { switch op { case planpb.OpType_Equal, planpb.OpType_NotEqual: break default: // TODO: enable this after execution is ready. return nil, fmt.Errorf("%s is not supported in execution backend", op) } leftExpr, leftValue := arithExpr.Left.GetColumnExpr(), arithExpr.Left.GetValueExpr() rightExpr, rightValue := arithExpr.Right.GetColumnExpr(), arithExpr.Right.GetValueExpr() if leftExpr != nil && rightExpr != nil { // a + b == 3 return nil, fmt.Errorf("not supported to do arithmetic operations between multiple fields") } if leftValue != nil && rightValue != nil { // 2 + 1 == 3 return nil, fmt.Errorf("unexpected, should be optimized already") } if leftExpr != nil && rightValue != nil { // a + 2 == 3 // a - 2 == 3 // a * 2 == 3 // a / 2 == 3 // a % 2 == 3 return combineBinaryArithExpr(op, arithExpr.GetOp(), leftExpr.GetInfo(), rightValue.GetValue(), valueExpr.GetValue()), nil } else if rightExpr != nil && leftValue != nil { // 2 + a == 3 // 2 - a == 3 // 2 * a == 3 // 2 / a == 3 // 2 % a == 3 switch arithExpr.GetOp() { case planpb.ArithOpType_Add, planpb.ArithOpType_Mul: return combineBinaryArithExpr(op, arithExpr.GetOp(), rightExpr.GetInfo(), leftValue.GetValue(), valueExpr.GetValue()), nil default: return nil, fmt.Errorf("todo") } } else { // (a + b) / 2 == 3 return nil, fmt.Errorf("complicated arithmetic operations are not supported") } } func handleCompareRightValue(op planpb.OpType, left *ExprWithType, right *planpb.ValueExpr) (*planpb.Expr, error) { castedValue, err := castValue(left.dataType, right.GetValue()) if err != nil { return nil, err } if leftArithExpr := left.expr.GetBinaryArithExpr(); leftArithExpr != nil { return handleBinaryArithExpr(op, leftArithExpr, &planpb.ValueExpr{Value: castedValue}) } columnInfo := toColumnInfo(left) if columnInfo == nil { return nil, fmt.Errorf("not supported to combine multiple fields") } expr := &planpb.Expr{ Expr: &planpb.Expr_UnaryRangeExpr{ UnaryRangeExpr: &planpb.UnaryRangeExpr{ ColumnInfo: columnInfo, Op: op, Value: castedValue, }, }, } switch op { case planpb.OpType_Invalid: return nil, fmt.Errorf("unsupported op type: %s", op) default: return expr, nil } } func handleCompare(op planpb.OpType, left *ExprWithType, right *ExprWithType) (*planpb.Expr, error) { leftColumnInfo := toColumnInfo(left) rightColumnInfo := toColumnInfo(right) if leftColumnInfo == nil || rightColumnInfo == nil { return nil, fmt.Errorf("only comparison between two fields is supported") } expr := &planpb.Expr{ Expr: &planpb.Expr_CompareExpr{ CompareExpr: &planpb.CompareExpr{ LeftColumnInfo: leftColumnInfo, RightColumnInfo: rightColumnInfo, Op: op, }, }, } switch op { case planpb.OpType_Invalid: return nil, fmt.Errorf("unsupported op type: %s", op) default: return expr, nil } } func relationalCompatible(t1, t2 schemapb.DataType) bool { both := typeutil.IsStringType(t1) && typeutil.IsStringType(t2) neither := !typeutil.IsStringType(t1) && !typeutil.IsStringType(t2) return both || neither } func HandleCompare(op int, left, right *ExprWithType) (*planpb.Expr, error) { if !relationalCompatible(left.dataType, right.dataType) { return nil, fmt.Errorf("comparisons between string and non-string are not supported") } cmpOp := cmpOpMap[op] if valueExpr := left.expr.GetValueExpr(); valueExpr != nil { op, err := reverseOrder(cmpOp) if err != nil { return nil, err } return handleCompareRightValue(op, right, valueExpr) } else if valueExpr := right.expr.GetValueExpr(); valueExpr != nil { return handleCompareRightValue(cmpOp, left, valueExpr) } else { return handleCompare(cmpOp, left, right) } }
internal/parser/planparserv2/utils.go
0.565299
0.513425
utils.go
starcoder
package utils import ( //"fmt" "fmt" "math" "math/big" "math/rand" "time" ) type TupleInt struct { A int B int } type TupleFloat struct { A float64 B float64 } //Euclidean modulous func Mod(a, b int) int { ab := big.NewInt(int64(a)) bb := big.NewInt(int64(b)) return int(ab.Mod(ab, bb).Int64()) } //Dot product func DotInt(a, b []int) int { if len(a) != len(b) { panic("Params have differing lengths") } result := 0 for i := range a { result += a[i] * b[i] } return result } //Populates integer slice with index values func FillSliceWithIdxInt(values []int) { for i := range values { values[i] = i } } //Populates float64 slice with specified value func FillSliceInt(values []int, value int) { for i := range values { values[i] = value } } //Populates float64 slice with specified value func FillSliceFloat64(values []float64, value float64) { for i := range values { values[i] = value } } //Populates bool slice with specified value func FillSliceBool(values []bool, value bool) { for i := range values { values[i] = value } } //Populates bool slice with specified value func FillSliceRangeBool(values []bool, value bool, start, length int) { for i := 0; i < length; i++ { values[start+i] = value } } //Returns the subset of values specified by indices func SubsetSliceInt(values, indices []int) []int { result := make([]int, len(indices)) for i, val := range indices { result[i] = values[val] } return result } //Returns the subset of values specified by indices func SubsetSliceFloat64(values []float64, indices []int) []float64 { result := make([]float64, len(indices)) for i, val := range indices { result[i] = values[val] } return result } //returns a copy of specified indices func SubsetSliceBool(values []bool, indices []int) []bool { result := make([]bool, len(indices)) for i, val := range indices { result[i] = values[val] } return result } //sets the specified indexes of a bool slice to specified value func SetIdxBool(values []bool, indexes []int, value bool) { for _, val := range indexes { values[val] = value } } //Compares 2 bool slices for equality func BoolEq(a, b []bool) bool { if len(a) != len(b) { return false } for idx, val := range a { if val != b[idx] { return false } } return true } //Creates an integer slice with indices containing // the specified initial value func MakeSliceInt(size, initialValue int) []int { result := make([]int, size) if initialValue != 0 { for i, _ := range result { result[i] = initialValue } } return result } func MakeSliceFloat64(size int, initialValue float64) []float64 { result := make([]float64, size) if initialValue != 0 { for i, _ := range result { result[i] = initialValue } } return result } //Returns cartesian product of specified //2d arrayb func CartProductInt(values [][]int) [][]int { pos := make([]int, len(values)) var result [][]int for pos[0] < len(values[0]) { temp := make([]int, len(values)) for j := 0; j < len(values); j++ { temp[j] = values[j][pos[j]] } result = append(result, temp) pos[len(values)-1]++ for k := len(values) - 1; k >= 1; k-- { if pos[k] >= len(values[k]) { pos[k] = 0 pos[k-1]++ } else { break } } } return result } //Searches int slice for specified integer func ContainsInt(q int, vals []int) bool { for _, val := range vals { if val == q { return true } } return false } func ContainsFloat64(q float64, vals []float64) bool { for _, val := range vals { if val == q { return true } } return false } // type CompareInt func(int) bool // func CountInt(q CompareInt, vals []int) int { // count := 0 // for i := range vals { // if q(i) { // count++ // } // } // return count // } func RandFloatRange(min, max float64) float64 { return rand.Float64()*(max-min) + min } //returns max index wise comparison func MaxInt(a, b []int) []int { result := make([]int, len(a)) for i := 0; i < len(a); i++ { if a[i] > b[i] { result[i] = a[i] } else { result[i] = b[i] } } return result } //Returns max value from specified int slice func MaxSliceInt(values []int) int { max := 0 for i := 0; i < len(values); i++ { if values[i] > max { max = values[i] } } return max } //Returns max value from specified float slice func MaxSliceFloat64(values []float64) float64 { max := 0.0 for i := 0; i < len(values); i++ { if values[i] > max { max = values[i] } } return max } //Returns product of set of integers func ProdInt(vals []int) int { sum := 1 for x := 0; x < len(vals); x++ { sum *= vals[x] } if sum == 1 { return 0 } else { return sum } } //Returns cumulative product func CumProdInt(vals []int) []int { if len(vals) < 2 { return vals } result := make([]int, len(vals)) result[0] = vals[0] for x := 1; x < len(vals); x++ { result[x] = vals[x] * result[x-1] } return result } //Returns cumulative product starting from end func RevCumProdInt(vals []int) []int { if len(vals) < 2 { return vals } result := make([]int, len(vals)) result[len(vals)-1] = vals[len(vals)-1] for x := len(vals) - 2; x >= 0; x-- { result[x] = vals[x] * result[x+1] } return result } func RoundPrec(x float64, prec int) float64 { if math.IsNaN(x) || math.IsInf(x, 0) { return x } sign := 1.0 if x < 0 { sign = -1 x *= -1 } var rounder float64 pow := math.Pow(10, float64(prec)) intermed := x * pow _, frac := math.Modf(intermed) if frac >= 0.5 { rounder = math.Ceil(intermed) } else { rounder = math.Floor(intermed) } return rounder / pow * sign } //Helper for unit tests where int literals are easier // to read func Make2DBool(values [][]int) [][]bool { result := make([][]bool, len(values)) for i, val := range values { result[i] = make([]bool, len(val)) for j, col := range val { result[i][j] = col == 1 } } return result } func Make1DBool(values []int) []bool { result := make([]bool, len(values)) for i, val := range values { result[i] = val == 1 } return result } //Returns number of on bits func CountInt(values []int, value int) int { count := 0 for _, val := range values { if val == value { count++ } } return count } //Returns number of on bits func CountFloat64(values []float64, value float64) int { count := 0 for _, val := range values { if val == value { count++ } } return count } //Returns number of on bits func CountTrue(values []bool) int { count := 0 for _, val := range values { if val { count++ } } return count } //Returns number of on bits func AnyTrue(values []bool) bool { for _, val := range values { if val { return true } } return false } //Or's 2 bool slices func OrBool(a, b []bool) []bool { result := make([]bool, len(a)) for i, val := range a { result[i] = val || b[i] } return result } //Returns random slice of floats of specified length func RandomSample(length int) []float64 { result := make([]float64, length) for i, _ := range result { result[i] = rand.Float64() } return result } func Bool2Int(s []bool) []int { result := make([]int, len(s)) for idx, val := range s { if val { result[idx] = 1 } else { result[idx] = 0 } } return result } func timeTrack(start time.Time, name string) { elapsed := time.Since(start) fmt.Printf("%s took %s \n", name, elapsed) } func SumSliceFloat64(values []float64) float64 { result := 0.0 for _, val := range values { result += val } return result } //Returns "on" indices func OnIndices(s []bool) []int { var result []int for idx, val := range s { if val { result = append(result, idx) } } return result } // Returns complement of s and t func Complement(s []int, t []int) []int { result := make([]int, 0, len(s)) for _, val := range s { found := false for _, v2 := range t { if v2 == val { found = true break } } if !found { result = append(result, val) } } return result } func Add(s []int, t []int) []int { result := make([]int, 0, len(s)+len(t)) result = append(result, s...) for _, val := range t { if !ContainsInt(val, s) { result = append(result, val) } } return result }
utils/utils.go
0.644896
0.458712
utils.go
starcoder
package common import ( integreatlyv1alpha1 "github.com/integr8ly/integreatly-operator/apis/v1alpha1" ) var ( rhmiProductOperatorVersions = map[integreatlyv1alpha1.StageName]map[integreatlyv1alpha1.ProductName]integreatlyv1alpha1.OperatorVersion{ integreatlyv1alpha1.AuthenticationStage: { integreatlyv1alpha1.ProductRHSSO: integreatlyv1alpha1.OperatorVersionRHSSO, }, integreatlyv1alpha1.ObservabilityStage: { integreatlyv1alpha1.ProductObservability: integreatlyv1alpha1.OperatorVersionObservability, }, integreatlyv1alpha1.CloudResourcesStage: { integreatlyv1alpha1.ProductCloudResources: integreatlyv1alpha1.OperatorVersionCloudResources, }, integreatlyv1alpha1.ProductsStage: { integreatlyv1alpha1.Product3Scale: integreatlyv1alpha1.OperatorVersion3Scale, integreatlyv1alpha1.ProductFuse: integreatlyv1alpha1.OperatorVersionFuse, integreatlyv1alpha1.ProductRHSSOUser: integreatlyv1alpha1.OperatorVersionRHSSOUser, integreatlyv1alpha1.ProductCodeReadyWorkspaces: integreatlyv1alpha1.OperatorVersionCodeReadyWorkspaces, integreatlyv1alpha1.ProductAMQOnline: integreatlyv1alpha1.OperatorVersionAMQOnline, integreatlyv1alpha1.ProductUps: integreatlyv1alpha1.OperatorVersionUPS, integreatlyv1alpha1.ProductApicurito: integreatlyv1alpha1.OperatorVersionApicurito, }, integreatlyv1alpha1.SolutionExplorerStage: { integreatlyv1alpha1.ProductSolutionExplorer: integreatlyv1alpha1.OperatorVersionSolutionExplorer, }, } managedApiProductOperatorVersions = map[integreatlyv1alpha1.StageName]map[integreatlyv1alpha1.ProductName]integreatlyv1alpha1.OperatorVersion{ integreatlyv1alpha1.AuthenticationStage: { integreatlyv1alpha1.ProductRHSSO: integreatlyv1alpha1.OperatorVersionRHSSO, }, integreatlyv1alpha1.ObservabilityStage: { integreatlyv1alpha1.ProductObservability: integreatlyv1alpha1.OperatorVersionObservability, }, integreatlyv1alpha1.CloudResourcesStage: { integreatlyv1alpha1.ProductCloudResources: integreatlyv1alpha1.OperatorVersionCloudResources, }, integreatlyv1alpha1.ProductsStage: { integreatlyv1alpha1.Product3Scale: integreatlyv1alpha1.OperatorVersion3Scale, integreatlyv1alpha1.ProductRHSSOUser: integreatlyv1alpha1.OperatorVersionRHSSOUser, }, } ) func TestProductOperatorVersions(t TestingTB, ctx *TestingContext) { rhmi, err := GetRHMI(ctx.Client, true) if err != nil { t.Fatalf("failed to get the RHMI: %s", err) } operatorVersions := getOperatorVersions(rhmi.Spec.Type) for stage := range operatorVersions { for productName, operatorVersion := range operatorVersions[stage] { clusterVersion := rhmi.Status.Stages[stage].Products[productName].OperatorVersion if clusterVersion != operatorVersion { t.Errorf("Error with version of %s operator deployed on cluster. Expected %s. Got %s", productName, operatorVersion, clusterVersion) } } } } func getOperatorVersions(installType string) map[integreatlyv1alpha1.StageName]map[integreatlyv1alpha1.ProductName]integreatlyv1alpha1.OperatorVersion { if integreatlyv1alpha1.IsRHOAM(integreatlyv1alpha1.InstallationType(installType)) { return managedApiProductOperatorVersions } else { return rhmiProductOperatorVersions } }
test/common/operator_versions.go
0.544559
0.402216
operator_versions.go
starcoder
package function import ( "image" "math" "sync" "gocv.io/x/gocv" ) // Etf is the main entry struct for the edge tangent flow computation. // It encompass the basic operational entities needed for the matrix operations. type Etf struct { flowField gocv.Mat gradientField gocv.Mat refinedEtf gocv.Mat gradientMag gocv.Mat wg sync.WaitGroup mu sync.RWMutex } // point is a basic struct for vector type operations type point struct { x int y int } // NewETF is a constructor method which initializes an Etf struct. func NewETF() *Etf { return &Etf{} } // Init initializes the ETF matrices. func (etf *Etf) Init(rows, cols int) { etf.flowField = gocv.NewMatWithSize(rows, cols, gocv.MatTypeCV32F+gocv.MatChannels3) etf.gradientField = gocv.NewMatWithSize(rows, cols, gocv.MatTypeCV32F+gocv.MatChannels3) etf.refinedEtf = gocv.NewMatWithSize(rows, cols, gocv.MatTypeCV32F+gocv.MatChannels3) etf.gradientMag = gocv.NewMatWithSize(rows, cols, gocv.MatTypeCV32F+gocv.MatChannels3) } // InitDefaultEtf computes the gradientField matrix by setting up // the pixel values from original image on which a sobel threshold has been applied. func (etf *Etf) InitDefaultEtf(file string, size image.Point) error { etf.resizeMat(size) src := gocv.IMRead(file, gocv.IMReadColor) src.ConvertTo(&src, gocv.MatTypeCV32F, 255) gocv.Normalize(src, &src, 0.0, 1.0, gocv.NormMinMax) // Generate gradX and gradY gradX := gocv.NewMatWithSize(src.Rows(), src.Cols(), gocv.MatTypeCV32F) gradY := gocv.NewMatWithSize(src.Rows(), src.Cols(), gocv.MatTypeCV32F) gocv.Sobel(src, &gradX, gocv.MatTypeCV32F, 1, 0, 5, 1, 0, gocv.BorderDefault) gocv.Sobel(src, &gradY, gocv.MatTypeCV32F, 0, 1, 5, 1, 0, gocv.BorderDefault) // Compute gradient gocv.Magnitude(gradX, gradY, &etf.gradientMag) gocv.Normalize(etf.gradientMag, &etf.gradientMag, 0.0, 1.0, gocv.NormMinMax) width, height := src.Cols(), src.Rows() etf.wg.Add(width * height) for y := 0; y < height; y++ { for x := 0; x < width; x++ { go func(y, x int) { etf.mu.RLock() defer etf.mu.RUnlock() u := gradX.GetVecfAt(y, x) v := gradY.GetVecfAt(y, x) etf.gradientField.SetVecfAt(y, x, gocv.Vecf{v[0], u[0], 0}) etf.wg.Done() }(y, x) } } etf.wg.Wait() etf.rotateFlow(&etf.gradientField, &etf.flowField, 90) return nil } // RefineEtf will compute the refined edge tangent flow // based on the formulas from the original paper. func (etf *Etf) RefineEtf(kernel int) { width, height := etf.flowField.Cols(), etf.flowField.Rows() etf.wg.Add(width * height) for y := 0; y < height; y++ { for x := 0; x < width; x++ { // Spawn computation into separate goroutines go func(y, x int) { etf.mu.Lock() etf.computeNewVector(x, y, kernel) etf.mu.Unlock() etf.wg.Done() }(y, x) } } etf.wg.Wait() etf.flowField = etf.refinedEtf.Clone() } // resizeMat resize all the matrices func (etf *Etf) resizeMat(size image.Point) { gocv.Resize(etf.gradientField, &etf.gradientField, size, 0, 0, gocv.InterpolationLinear) gocv.Resize(etf.flowField, &etf.flowField, size, 0, 0, gocv.InterpolationLinear) gocv.Resize(etf.refinedEtf, &etf.refinedEtf, size, 0, 0, gocv.InterpolationLinear) gocv.Resize(etf.gradientMag, &etf.gradientMag, size, 0, 0, gocv.InterpolationLinear) } // rotateFlow applies a rotation on the original gradient field and calculates the new angles. func (etf *Etf) rotateFlow(src, dst *gocv.Mat, theta float64) { theta = theta / 180.0 * math.Pi width, height := src.Cols(), src.Rows() etf.wg.Add(width * height) for y := 0; y < height; y++ { for x := 0; x < width; x++ { go func(y, x int) { etf.mu.Lock() defer etf.mu.Unlock() v := src.GetVecfAt(y, x) // Obtain the vector value and rotate it. rx := float64(v[0])*math.Cos(theta) - float64(v[1])*math.Sin(theta) ry := float64(v[0])*math.Sin(theta) + float64(v[1])*math.Cos(theta) dst.SetVecfAt(y, x, gocv.Vecf{float32(rx), float32(ry), 0}) etf.wg.Done() }(y, x) } } etf.wg.Wait() } // computeNewVector computes a new, normalized vector from the refined edge tangent flow matrix following the original paper Eq(1). func (etf *Etf) computeNewVector(x, y int, kernel int) { var tNew0, tNew1, tNew2 float32 tCurX := etf.flowField.GetVecfAt(y, x) for r := y - kernel; r <= y+kernel; r++ { for c := x - kernel; c <= x+kernel; c++ { // Checking for boundaries. if r < 0 || r >= etf.refinedEtf.Rows() || c < 0 || c >= etf.refinedEtf.Cols() { continue } tCurY := etf.flowField.GetVecfAt(r, c) phi := etf.computePhi(tCurX, tCurY) // Compute the euclidean distance of the current point and the neighborhood point. ws := etf.computeWeightSpatial(point{x, y}, point{c, r}, kernel) wm := etf.computeWeightMagnitude(etf.gradientMag.GetFloatAt(y, x), etf.gradientMag.GetFloatAt(r, c)) wd := etf.computeWeightDirection(tCurX, tCurY) tNew0 += phi * tCurY[0] * ws * wm * wd tNew1 += phi * tCurY[1] * ws * wm * wd tNew2 += phi * tCurY[2] * ws * wm * wd } } etf.refinedEtf.SetVecfAt(y, x, etf.normalize(tNew0, tNew1, tNew2)) } // computeWeightSpatial implementation of Paper's Eq(2) func (etf *Etf) computeWeightSpatial(p1, p2 point, r int) float32 { // Get the euclidean distance of two points. dx := p2.x - p1.x dy := p2.y - p1.y dist := math.Sqrt(float64(dx*dx) + float64(dy*dy)) if dist < float64(r) { return 1.0 } return 0.0 } // computeWeightMagnitude implementation of Paper's Eq(3) func (etf *Etf) computeWeightMagnitude(gradMagX, gradMagY float32) float32 { return (1.0 + float32(math.Tanh(float64(gradMagX-gradMagY)))) / 2.0 } // computeWeightDirection implementation of Paper's Eq(4) func (etf *Etf) computeWeightDirection(x, y gocv.Vecf) float32 { return float32(math.Abs(float64(etf.computeDot(x, y)))) } // computePhi implementation of Paper's Eq(5) func (etf *Etf) computePhi(x, y gocv.Vecf) float32 { dot := etf.computeDot(x, y) if dot > 0 { return 1.0 } return -1.0 } // computeDot computes the dot product of two vectors func (etf *Etf) computeDot(x, y gocv.Vecf) float32 { var s float32 ch := etf.flowField.Channels() for i := 0; i < ch; i++ { s += x[i] * y[i] } return s } // normalize returns a normalized vector func (etf *Etf) normalize(x, y, z float32) gocv.Vecf { nv := float32(math.Sqrt(float64(x*x) + float64(y*y) + float64(z*z))) if nv > 0.0 { return gocv.Vecf{x * 1.0 / nv, y * 1.0 / nv, z * 1.0 / nv} } return gocv.Vecf{0.0, 0.0, 0.0} }
colidr-openfaas/etf.go
0.728265
0.527195
etf.go
starcoder
package llrp /// Compliance requirement: Compliant Readers and Clients SHALL implement this parameter. func AccessSpecStopTrigger(AccessSpecStopTriggerType ,OperationCountValue uint) []interface{} { return commonSpec( P_AccessSpecStopTrigger, []interface{}{ uint8(AccessSpecStopTriggerType), uint16(OperationCountValue), }, ) } /* * This parameter defines the air protocol access-specific settings. It contains a TagSpec and an OpSpec Parameter. The TagSpec specifies the tag filters in terms of air protocol specific memory capabilities (e.g., memory banks, pointer and length). The OpSpec specifies all the details of the operations required for the air protocol specific access operation commands. * *Compliance requirement: Compliant Readers and Clients SHALL implement this parameter. * Notes - TagSpecParameter is the air protocol specific tag spec parameter. For C1G2, it is C1G2TagSpec Parameter. */ //func AccessCommand(TagSpec,OpSpec []interface{}) []interface{} { func AccessCommand(params ...[]interface{}) []interface{} { return commonSpec( P_AccessCommand, nil, params..., ) } /* * This parameter describes the target tag population on which certain operations have to be performed. This Parameter is similar to the selection C1G2Filter Parameter described earlier. However, because these tags are stored in the Reader's memory and ternary comparisons are to be allowed for, each bit i in the target tag is represented using 2 bits - bit i in mask, and bit i in tag pattern. If bit i in the mask is zero, then bit i of the target tag is a don’t care (X); if bit i in the mask is one, then bit i of the target tag is bit i of the tag pattern. For example, “all tags” is specified using a mask length of zero. */ func C1G2TagSpec(params ...[]interface{}) []interface{} { return commonSpec( P_C1G2TagSpec, nil, params..., ) } /* * * If Length is zero, this pattern will match all tags regardless of MB, pointer, mask and data. */ func C1G2TargetTag(MB, Pointer, MaskBitCount uint, TagMask []uint8,TagData []uint, Match int) []interface{} { MB = MB << 1 MB += MaskBitCount MB = MB << 5 DataBitCount := len(TagMask) return commonSpec( P_C1G2TargetTag, []interface{}{ MB, uint16(Pointer), uint16(MaskBitCount), TagMask, uint16(DataBitCount), TagData, }, ) } /* * This parameter sets up the triggers for the Reader to send the access results to the Client. In addition, the Client can enable or disable reporting of ROSpec details in the access results. */ func AccessReportSpec(AccessReportTrigger uint) []interface{} { return commonSpec( P_AccessReportSpec, nil, []interface{}{ uint8(AccessReportTrigger), }, ) } /* * This parameter carries information of the Reader access operation. */ func AccessSpec(AntennaID,ProtocolID, uint,CurrentState int,ROSpecID uint, params ...[]interface{}) []interface{} { return commonSpec( P_AccessSpecID, []interface{} { uint16(AntennaID), uint8(ProtocolID), uint8(CurrentState), uint32(ROSpecID), }, params..., ) } /* This parameter carries the information of the Reader inventory and survey operation. */ // params = ROBoundarySpec Parameter , SpecParameter (1-n) , ROReportSpec Parameter func ROSpec(ROSpecID, Priority, CurrentState int, params ...[]interface{}) []interface{} { return commonSpec( P_ROSpec, []interface{}{ uint32(ROSpecID), uint8(Priority), uint8(CurrentState), }, params..., ) } // params = AISpecStopTrigger , InventoryParameter Spec and Custom Parameter func AISpec(AntennaCount int, AntennaIDn []int, params ...[]interface{}) []interface{} { inf := []interface{}{ uint16(AntennaCount), } for _, k := range AntennaIDn { inf = append(inf, uint16(k)) } return commonSpec( P_AISpec, inf, params..., ) } const ( C_AISpecStopTrigger_NULL = iota C_AISpecStopTrigger_DURATION C_AISpecStopTrigger_GPI_WITH_TIMEOUT C_AISpecStopTrigger_TAG_OBSERVATION ) // params = GPITriggerValue Parameter , TagObservationTrigger Parameter func AISpecStopTrigger(AISpecStopTriggerType, DurationTrigger int, params ...[]interface{}) []interface{} { return commonSpec( P_AISpecStopTrigger, []interface{}{ uint8(AISpecStopTriggerType), uint32(DurationTrigger), }, params..., ) } /* TriggerType: Integer value : modulation 0 Upon seeing N tag observations, or timeout. The definition of an "observation" is vendor specific. 1 Upon seeing no more new tag observations for T ms, or timeout. The definition of an "observation" is vendor specific. 2 N attempts to see all tags in the FOV, or timeout. 3 Upon seeing N unique tag observations, or timeout. 4 Upon seeing no more new unique tag observations for T ms, or timeout. ----- NumberOfTags: Unsigned Short Integer. This field SHALL be ignored when TriggerType != 0 and TriggerType != 3. NumberOfAttempts; Unsigned Short Integer. This field SHALL be ignored when TriggerType != 2. T : Unsigned Short Integer. Idle time between tag responses in milliseconds. This field SHALL be ignored when TriggerType != 1 and TriggerType != 4. Timeout : Unsigned Integer; Trigger timeout value in milliseconds. If set to zero, it indicates that there is no timeout. */ func TagObservationTrigger(TriggerType, NumberOfTags, NumberOfAttempts, T, Timeout int) []interface{} { return commonSpec( P_TagObservationTrigger, []interface{}{ uint8(TriggerType), uint16(NumberOfTags), uint16(T), uint32(Timeout), }, ) } // Operational parameters for an inventory using a single air protocol. // params = AntennaConfigurationParameter , Custom Parameter func InventoryParameterSpec(InventoryParameterSpecID, ProtocolID int, params ...[]interface{}) []interface{} { return commonSpec( P_InventoryParameterSpec, []interface{}{ uint16(InventoryParameterSpecID), uint8(ProtocolID), }, params..., ) } // Details of a RF Survey operation // params = RFSurveySpecStopTrigger , Custom Parameter func RFSurveySpec(AntennaID, StartFrequency, EndFrequency int, params ...[]interface{}) []interface{} { return commonSpec( P_RFSurveySpec, []interface{}{ uint16(AntennaID), uint32(StartFrequency), uint32(EndFrequency), }, params..., ) } // RFSurveySpecStopTrigger Type const ( C_RFSurveySpecStopTrigger_NULL = iota C_RFSurveySpecStopTrigger_Duration C_RFSurveySpecStopTrigger_N_Iteration ) /* Duration: Unsigned Integer; The maximum duration of the RFSurvey operation specified in milliseconds. This field SHALL be ignored when StopTriggerType != 1. When StopTriggerType = 1, the value SHALL be greater than zero. N: Unsigned Integer. The maximum number of iterations through the specified frequency range. This field SHALL be ignored when StopTriggerType != 2. When StopTriggerType = 2, the value SHALL be greater than zero. */ func RFSurveySpecStopTrigger(StopTriggerType, Duration, N int) []interface{} { return commonSpec( P_RFSurveySpecStopTrigger, []interface{}{ uint8(StopTriggerType), uint32(Duration), uint32(N), }, ) } // Instructs the Reader to execute the first Spec in the Set of Specs. // LoopCount: This value instructs the reader on the number of times to loop through the Set of Specs within the ROSpec func LoopSpec(LoopCount int) []interface{} { return commonSpec( P_LoopSpec, []interface{}{ uint32(LoopCount), }, ) } // ROSpecStartTrigger Parameter, ROSpecStopTrigger Parameter func ROBoundarySpec(params ...[]interface{}) []interface{} { return commonSpec( P_ROBoundarySpec, nil, params..., ) } // PeriodicTriggerValue Parameter , GPITriggerValue Parameter func ROSpecStartTrigger(ROSpecStartTriggerType int, params ...[]interface{}) []interface{} { return commonSpec( P_ROSpecStartTrigger, []interface{}{ uint8(ROSpecStartTriggerType), }, params..., ) } // UTCTimestamp Parameter func PeriodicTriggerValue(Offset, Period int, params ...interface{}) []interface{} { return commonSpec( P_PeriodicTriggerValue, []interface{}{ uint32(Offset), uint32(Period), }, params, ) } func GPITriggerValue(GPIPortNum int, GPIEvent bool, Timeout int) []interface{} { gp := 0 if GPIEvent { gp = 0x80 } return commonSpec( P_GPITriggerValue, []interface{}{ uint16(GPIPortNum), uint8(gp), uint32(Timeout), }, ) } // GPITriggerValue Parameter func ROSpecStopTrigger(ROSpecStopTriggerType int, DurationTriggerValue int, params ...[]interface{}) []interface{} { return commonSpec( P_ROSpecStopTrigger, []interface{}{ uint8(ROSpecStopTriggerType), uint32(DurationTriggerValue), }, params..., ) } /* This parameter defines the C1G2 inventory-specific settings to be used during a particular C1G2 inventory operation. This comprises of C1G2Filter Parameter, C1G2RF Parameter and C1G2Singulation Parameter. It is not necessary that the Filter, RF Control and Singulation Control Parameters be specified in each and every inventory command.They are optional parameters. If not specified, the default values in the Reader are used during the inventory operation. If multiple C1G2Filter parameters are encapsulated by the Client in the C1G2InventoryCommand parameter, the ordering of the filter parameters determine the order of C1G2 air-protocol commands (e.g., Select command) generated by the Reader. C1G2Filter parameters included in the C1G2InventoryCommand parameter */ func C1G2InventoryCommand(TagInventoryStateAware bool, params ...[]interface{}) []interface{} { return commonSpec( P_C1G2InventoryCommand, []interface{}{ convertBooleanUint8(TagInventoryStateAware), }, params..., ) } /* This Parameter carries the settings relevant to RF forward and reverse link control in the C1G2 air protocol. This is basically the C1G2 RF Mode and the Tari value to use for the inventory operation. --- ModeIndex: Unsigned Integer. This is an index into the UHFC1G2RFModeTable. Tari: Integer. Value of Tari to use for this mode specified in nsec. This is specified if the mode selected has a Tari range. If the selected mode has a range, and the Tari is set to zero, the Reader implementation picks up any Tari value within the range. If the selected mode has a range, and the specified Tari is out of that range and is not set to zero, an error message is generated. Possible Values: 0 or 6250-25000 nsec */ func C1G2RFControl(ModeIndex, Tari int) []interface{} { return commonSpec( P_C1G2RFControl, []interface{}{ uint16(ModeIndex), uint16(Tari), }, ) } /* This C1G2SingulationControl Parameter provides controls particular to the singulation process in the C1G2 air protocol. The singulation process is started using a Query command in the C1G2 protocol. The Query command describes the session number, tag state, the start Q value to use, and the RF link parameters. The RF link parameters are specified using the C1G2RFControl Parameter (see section 16.2.1.2.1.2). This Singulation Parameter specifies the session, tag state and description of the target singulation environment Session: Integer. Session number to use for the inventory operation. Possible Values: 0-3 Tag population: Unsigned Short Integer. An estimate of the tag population in view of the RF field of the antenna. Tag transit time: Unsigned Integer. An estimate of the time a tag will typically remain in the RF field of the antenna specified in milliseconds. TagInventoryStateAwareSingulationAction: <C1G2TagInventoryStateAwareSingulationAction Parameter> (optional) params = C1G2TagInventoryStateAwareSingulationAction */ func C1G2SingulationControl(Session, TagPopulation, TagTransitTime int, params ...[]interface{}) []interface{} { return commonSpec( P_C1G2SingulationControl, []interface{}{ uint8(Session), uint16(TagPopulation), uint32(TagTransitTime), }, params..., ) }
llrp/params_operations.go
0.726911
0.441372
params_operations.go
starcoder
// Package graph defines an abstraction of the SSA graph that facilitates rendering. package graph import ( "log" "golang.org/x/tools/go/ssa" ) type relationship int const ( // Referrer represents a node that is referred to as per ssa.Value.Referrers Referrer relationship = iota // Operand represents a node that is an operand of an instruction as per ssa.Instr.Operands Operand ) // Node represents a node in the SSA graph, along with its relationship to a parent node. type Node struct { // N is the ssa.Node wrapped by this type. N ssa.Node // R is the relationship between this node and its parent in the SSA graph. R relationship } // FuncGraph represents the SSA graph for an ssa.Function. type FuncGraph struct { // F is the function whose graph this is F *ssa.Function // Children is a mapping from each node to its children (referrers + operands) Children map[ssa.Node][]Node // visited is used while creating the graph to avoid needlessly revisiting nodes visited map[ssa.Node]bool // stack is used to perform a DFS on F's SSA graph s stack } // New returns a new Graph constructed from a given function. func New(f *ssa.Function) *FuncGraph { g := FuncGraph{ F: f, Children: map[ssa.Node][]Node{}, visited: map[ssa.Node]bool{}, } g.visitBlocks() return &g } func (g *FuncGraph) visitBlocks() { for _, b := range g.F.Blocks { g.visit(b) } } func (g *FuncGraph) visit(b *ssa.BasicBlock) { // according to the ssa package docs, this should not happen, // but we don't want a panic if len(b.Instrs) == 0 { return } n := b.Instrs[0].(ssa.Node) g.visited[n] = true g.s.push(n) for len(g.s) > 0 { current := g.s.pop() if current == nil { break } g.visitOperands(current) g.visitReferrers(current) } } func (g *FuncGraph) visitOperands(n ssa.Node) { operands := n.Operands(nil) if operands == nil { return } for _, o := range operands { on, ok := (*o).(ssa.Node) if !ok { continue } g.addOperand(n, on) g.visitNode(on) } } func (g *FuncGraph) visitReferrers(n ssa.Node) { if n.Referrers() == nil { return } for _, ref := range *n.Referrers() { rn := ref.(ssa.Node) g.addReferrer(n, rn) g.visitNode(rn) } } func (g *FuncGraph) visitNode(n ssa.Node) { if g.visited[n] { return } g.visited[n] = true g.s.push(n) } func (g *FuncGraph) addReferrer(current, referrer ssa.Node) { g.Children[current] = append(g.Children[current], Node{N: referrer, R: Referrer}) } func (g *FuncGraph) addOperand(current, operand ssa.Node) { g.Children[current] = append(g.Children[current], Node{N: operand, R: Operand}) } type stack []ssa.Node func (s *stack) pop() ssa.Node { if len(*s) == 0 { log.Println("tried to pop from empty stack") } popped := (*s)[len(*s)-1] *s = (*s)[:len(*s)-1] return popped } func (s *stack) push(n ssa.Node) { *s = append(*s, n) }
internal/pkg/debug/graph/graph.go
0.745676
0.499756
graph.go
starcoder
package block import ( "fmt" "sort" "strings" "time" "github.com/m3db/m3/src/query/models" ) // Metadata is metadata for a block, describing size and common tags across // constituent series. type Metadata struct { // Bounds represents the time bounds for all series in the block. Bounds models.Bounds // Tags contains any tags common across all series in the block. Tags models.Tags // ResultMetadata contains metadata from any database access operations during // fetching block details. ResultMetadata ResultMetadata } // Equals returns a boolean reporting whether the compared metadata has equal // fields. func (m Metadata) Equals(other Metadata) bool { return m.Tags.Equals(other.Tags) && m.Bounds.Equals(other.Bounds) } // String returns a string representation of metadata. func (m Metadata) String() string { return fmt.Sprintf("Bounds: %v, Tags: %v", m.Bounds, m.Tags) } // Warnings is a slice of warnings. type Warnings []Warning // ResultMetadata describes metadata common to each type of query results, // indicating any additional information about the result. type ResultMetadata struct { // LocalOnly indicates that this query was executed only on the local store. LocalOnly bool // Exhaustive indicates whether the underlying data set presents a full // collection of retrieved data. Exhaustive bool // Warnings is a list of warnings that indicate potetitally partial or // incomplete results. Warnings Warnings // Resolutions is a list of resolutions for series obtained by this query. Resolutions []time.Duration // KeepNaNs indicates if NaNs should be kept when returning results. KeepNaNs bool // WaitedIndex counts how many times index querying had to wait for permits. WaitedIndex int // WaitedSeriesRead counts how many times series being read had to wait for permits. WaitedSeriesRead int // FetchedSeriesCount is the total number of series that were fetched to compute // this result. FetchedSeriesCount int } // NewResultMetadata creates a new result metadata. func NewResultMetadata() ResultMetadata { return ResultMetadata{ LocalOnly: true, Exhaustive: true, } } func combineResolutions(a, b []time.Duration) []time.Duration { if len(a) == 0 { if len(b) != 0 { return b } } else { if len(b) == 0 { return a } combined := make([]time.Duration, 0, len(a)+len(b)) combined = append(combined, a...) combined = append(combined, b...) return combined } return nil } func combineWarnings(a, b Warnings) Warnings { if len(a) == 0 { if len(b) != 0 { return b } } else { if len(b) == 0 { return a } combinedWarnings := make(Warnings, 0, len(a)+len(b)) combinedWarnings = append(combinedWarnings, a...) return combinedWarnings.addWarnings(b...) } return nil } // Equals determines if two result metadatas are equal. func (m ResultMetadata) Equals(n ResultMetadata) bool { if m.Exhaustive && !n.Exhaustive || !m.Exhaustive && n.Exhaustive { return false } if m.LocalOnly && !n.LocalOnly || !m.LocalOnly && n.LocalOnly { return false } if len(m.Resolutions) != len(n.Resolutions) { return false } for i, mRes := range m.Resolutions { if n.Resolutions[i] != mRes { return false } } for i, mWarn := range m.Warnings { if !n.Warnings[i].equals(mWarn) { return false } } if m.WaitedIndex != n.WaitedIndex { return false } if m.WaitedSeriesRead != n.WaitedSeriesRead { return false } return m.FetchedSeriesCount == n.FetchedSeriesCount } // CombineMetadata combines two result metadatas. func (m ResultMetadata) CombineMetadata(other ResultMetadata) ResultMetadata { return ResultMetadata{ LocalOnly: m.LocalOnly && other.LocalOnly, Exhaustive: m.Exhaustive && other.Exhaustive, Warnings: combineWarnings(m.Warnings, other.Warnings), Resolutions: combineResolutions(m.Resolutions, other.Resolutions), WaitedIndex: m.WaitedIndex + other.WaitedIndex, WaitedSeriesRead: m.WaitedSeriesRead + other.WaitedSeriesRead, FetchedSeriesCount: m.FetchedSeriesCount + other.FetchedSeriesCount, } } // IsDefault returns true if this result metadata matches the unchanged default. func (m ResultMetadata) IsDefault() bool { return m.Exhaustive && m.LocalOnly && len(m.Warnings) == 0 } // VerifyTemporalRange will verify that each resolution seen is below the // given step size, adding warning headers if it is not. func (m *ResultMetadata) VerifyTemporalRange(step time.Duration) { // NB: this map is unlikely to have more than 2 elements in real execution, // since these correspond to namespace count. invalidResolutions := make(map[time.Duration]struct{}, 10) for _, res := range m.Resolutions { if res > step { invalidResolutions[res] = struct{}{} } } if len(invalidResolutions) > 0 { warnings := make([]string, 0, len(invalidResolutions)) for k := range invalidResolutions { warnings = append(warnings, fmt.Sprintf("%v", time.Duration(k))) } sort.Strings(warnings) warning := fmt.Sprintf("range: %v, resolutions: %s", step, strings.Join(warnings, ", ")) m.AddWarning("resolution larger than query range", warning) } } // AddWarning adds a warning to the result metadata. // NB: warnings are expected to be small in general, so it's better to iterate // over the array rather than introduce a map. func (m *ResultMetadata) AddWarning(name string, message string) { m.Warnings = m.Warnings.addWarnings(Warning{ Name: name, Message: message, }) } // NB: this is not a very efficient merge but this is extremely unlikely to be // merging more than 5 or 6 total warnings. func (w Warnings) addWarnings(warnings ...Warning) Warnings { for _, newWarning := range warnings { found := false for _, warning := range w { if warning.equals(newWarning) { found = true break } } if !found { w = append(w, newWarning) } } return w } // WarningStrings converts warnings to a slice of strings for presentation. func (m ResultMetadata) WarningStrings() []string { size := len(m.Warnings) if !m.Exhaustive { size++ } strs := make([]string, 0, size) for _, warn := range m.Warnings { strs = append(strs, warn.Header()) } if !m.Exhaustive { strs = append(strs, "m3db exceeded query limit: results not exhaustive") } return strs } // Warning is a message that indicates potential partial or incomplete results. type Warning struct { // Name is the name of the store originating the warning. Name string // Message is the content of the warning message. Message string } // Header formats the warning into a format to send in a response header. func (w Warning) Header() string { return fmt.Sprintf("%s_%s", w.Name, w.Message) } func (w Warning) equals(warning Warning) bool { return w.Name == warning.Name && w.Message == warning.Message }
src/query/block/meta.go
0.755907
0.459197
meta.go
starcoder
package simple import "k8s.io/kubernetes/third_party/forked/gonum/graph" // edgeHolder represents a set of edges, with no more than one edge to or from a particular neighbor node type edgeHolder interface { // Visit invokes visitor with each edge and the id of the neighbor node in the edge Visit(visitor func(neighbor int, edge graph.Edge)) // Delete removes edges to or from the specified neighbor Delete(neighbor int) edgeHolder // Set stores the edge to or from the specified neighbor Set(neighbor int, edge graph.Edge) edgeHolder // Get returns the edge to or from the specified neighbor Get(neighbor int) (graph.Edge, bool) // Len returns the number of edges Len() int } // sliceEdgeHolder holds a list of edges to or from self type sliceEdgeHolder struct { self int edges []graph.Edge } func (e *sliceEdgeHolder) Visit(visitor func(neighbor int, edge graph.Edge)) { for _, edge := range e.edges { if edge.From().ID() == e.self { visitor(edge.To().ID(), edge) } else { visitor(edge.From().ID(), edge) } } } func (e *sliceEdgeHolder) Delete(neighbor int) edgeHolder { edges := e.edges[:0] for i, edge := range e.edges { if edge.From().ID() == e.self { if edge.To().ID() == neighbor { continue } } else { if edge.From().ID() == neighbor { continue } } edges = append(edges, e.edges[i]) } e.edges = edges return e } func (e *sliceEdgeHolder) Set(neighbor int, newEdge graph.Edge) edgeHolder { for i, edge := range e.edges { if edge.From().ID() == e.self { if edge.To().ID() == neighbor { e.edges[i] = newEdge return e } } else { if edge.From().ID() == neighbor { e.edges[i] = newEdge return e } } } if len(e.edges) < 4 { e.edges = append(e.edges, newEdge) return e } h := mapEdgeHolder(make(map[int]graph.Edge, len(e.edges)+1)) for i, edge := range e.edges { if edge.From().ID() == e.self { h[edge.To().ID()] = e.edges[i] } else { h[edge.From().ID()] = e.edges[i] } } h[neighbor] = newEdge return h } func (e *sliceEdgeHolder) Get(neighbor int) (graph.Edge, bool) { for _, edge := range e.edges { if edge.From().ID() == e.self { if edge.To().ID() == neighbor { return edge, true } } else { if edge.From().ID() == neighbor { return edge, true } } } return nil, false } func (e *sliceEdgeHolder) Len() int { return len(e.edges) } // mapEdgeHolder holds a map of neighbors to edges type mapEdgeHolder map[int]graph.Edge func (e mapEdgeHolder) Visit(visitor func(neighbor int, edge graph.Edge)) { for neighbor, edge := range e { visitor(neighbor, edge) } } func (e mapEdgeHolder) Delete(neighbor int) edgeHolder { delete(e, neighbor) return e } func (e mapEdgeHolder) Set(neighbor int, edge graph.Edge) edgeHolder { e[neighbor] = edge return e } func (e mapEdgeHolder) Get(neighbor int) (graph.Edge, bool) { edge, ok := e[neighbor] return edge, ok } func (e mapEdgeHolder) Len() int { return len(e) }
third_party/forked/gonum/graph/simple/edgeholder.go
0.672654
0.760006
edgeholder.go
starcoder
package thumbnail import ( "fmt" "image" "math" "strconv" "strings" "github.com/pkg/errors" ) const ( _resolutionSeperator = "x" ) // ParseResolution returns an image.Rectangle representing the resolution given as a string func ParseResolution(s string) (image.Rectangle, error) { parts := strings.Split(s, _resolutionSeperator) if len(parts) != 2 { return image.Rectangle{}, fmt.Errorf("failed to parse resolution: %s. Expected format <width>x<height>", s) } width, err := strconv.Atoi(parts[0]) if err != nil { return image.Rectangle{}, fmt.Errorf("width: %s has an invalid value. Expected an integer", parts[0]) } height, err := strconv.Atoi(parts[1]) if err != nil { return image.Rectangle{}, fmt.Errorf("height: %s has an invalid value. Expected an integer", parts[1]) } return image.Rect(0, 0, width, height), nil } // Resolutions is a list of image.Rectangle representing resolutions. type Resolutions []image.Rectangle // ParseResolutions creates an instance of Resolutions from resolution strings. func ParseResolutions(strs []string) (Resolutions, error) { rs := make(Resolutions, 0, len(strs)) for _, s := range strs { r, err := ParseResolution(s) if err != nil { return nil, errors.Wrap(err, "could not parse resolutions") } rs = append(rs, r) } return rs, nil } // ClosestMatch returns the resolution which is closest to the provided resolution. // If there is no exact match the resolution will be the next higher one. // If the given resolution is bigger than all available resolutions the biggest available one is used. func (rs Resolutions) ClosestMatch(requested image.Rectangle, sourceSize image.Rectangle) image.Rectangle { isLandscape := sourceSize.Dx() > sourceSize.Dy() sourceLen := dimensionLength(sourceSize, isLandscape) requestedLen := dimensionLength(requested, isLandscape) isSourceSmaller := sourceLen < requestedLen // We don't want to scale images up. if isSourceSmaller { return sourceSize } if len(rs) == 0 { return requested } var match image.Rectangle // Since we want to search for the smallest difference we start with the highest possible number minDiff := math.MaxInt32 for _, current := range rs { cLen := dimensionLength(current, isLandscape) diff := requestedLen - cLen if diff > 0 { // current is smaller continue } // Convert diff to positive value // Multiplying by -1 is safe since we aren't getting postive numbers here // because of the check above absDiff := diff * -1 if absDiff < minDiff { minDiff = absDiff match = current } } if (match == image.Rectangle{}) { match = rs[len(rs)-1] } return match } func mapRatio(given image.Rectangle, other image.Rectangle) image.Rectangle { isLandscape := given.Dx() > given.Dy() ratio := float64(given.Dx()) / float64(given.Dy()) if isLandscape { return image.Rect(0, 0, other.Dx(), int(float64(other.Dx())/ratio)) } return image.Rect(0, 0, int(float64(other.Dy())*ratio), other.Dy()) } func dimensionLength(rect image.Rectangle, isLandscape bool) int { if isLandscape { return rect.Dx() } return rect.Dy() }
thumbnails/pkg/thumbnail/resolutions.go
0.842475
0.494141
resolutions.go
starcoder
package tango // An Axis is an input which is a spectrum of values. An example of this is the horizontal movement in a game, or how far a joystick is pressed. type Axis struct { // Name represents the name of the axis (Horizontal, Vertical) Name string // Pairs represents the axis pairs of this acis Pairs []AxisPair } // Value returns the value of an Axis. func (a Axis) Value() float32 { for _, pair := range a.Pairs { v := pair.Value() if v != AxisNeutral { return v } } return AxisNeutral } // An AxisPair is a set of Min/Max values which could possible be used by an Axis. type AxisPair interface { Value() float32 } // An AxisKeyPair is a set of Min/Max values used for detecting whether or not a key has been pressed. type AxisKeyPair struct { Min Key Max Key } // Value returns the value of a keypress. func (keys AxisKeyPair) Value() float32 { if Input.keys.Get(keys.Max).Down() { return AxisMax } else if Input.keys.Get(keys.Min).Down() { return AxisMin } return AxisNeutral } // AxisMouseDirection is the direction (X or Y) which the mouse is being tracked for. type AxisMouseDirection uint const ( // AxisMouseVert is vertical mouse axis AxisMouseVert AxisMouseDirection = 0 // AxisMouseHori is vertical mouse axis AxisMouseHori AxisMouseDirection = 1 ) // AxisMouse is an axis for a single x or y component of the Mouse. The value returned from it is // the delta movement, since the previous call and it is not constrained by the AxisMin and AxisMax values. type AxisMouse struct { // direction is the value storing either AxisMouseVert and AxisMouseHori. It determines which directional // component to operate on. direction AxisMouseDirection // old is the delta from the previous calling of Value. old float32 } // NewAxisMouse creates a new Mouse Axis in either direction AxisMouseVert or AxisMouseHori. func NewAxisMouse(d AxisMouseDirection) *AxisMouse { old := Input.Mouse.Y if d == AxisMouseHori { old = Input.Mouse.X } return &AxisMouse{ direction: d, old: old, } } // Value returns the delta of a mouse movement. func (am *AxisMouse) Value() float32 { var diff float32 if am.direction == AxisMouseHori { diff = Input.Mouse.X - am.old + (ResizeXOffset / (2 * GetGlobalScale().X * CanvasScale())) am.old = Input.Mouse.X + (ResizeXOffset / (2 * GetGlobalScale().X * CanvasScale())) } else { diff = Input.Mouse.Y - am.old + (ResizeYOffset / (2 * GetGlobalScale().Y * CanvasScale())) am.old = Input.Mouse.Y + (ResizeYOffset / (2 * GetGlobalScale().Y * CanvasScale())) } return diff }
axis.go
0.864625
0.607168
axis.go
starcoder
package cios import ( "encoding/json" ) // MultipleDataStoreDataLatest struct for MultipleDataStoreDataLatest type MultipleDataStoreDataLatest struct { Total *float32 `json:"total,omitempty"` Objects *[]PackerFormatJson `json:"objects,omitempty"` Errors *[]DataError `json:"errors,omitempty"` } // NewMultipleDataStoreDataLatest instantiates a new MultipleDataStoreDataLatest object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewMultipleDataStoreDataLatest() *MultipleDataStoreDataLatest { this := MultipleDataStoreDataLatest{} return &this } // NewMultipleDataStoreDataLatestWithDefaults instantiates a new MultipleDataStoreDataLatest object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewMultipleDataStoreDataLatestWithDefaults() *MultipleDataStoreDataLatest { this := MultipleDataStoreDataLatest{} return &this } // GetTotal returns the Total field value if set, zero value otherwise. func (o *MultipleDataStoreDataLatest) GetTotal() float32 { if o == nil || o.Total == nil { var ret float32 return ret } return *o.Total } // GetTotalOk returns a tuple with the Total field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *MultipleDataStoreDataLatest) GetTotalOk() (*float32, bool) { if o == nil || o.Total == nil { return nil, false } return o.Total, true } // HasTotal returns a boolean if a field has been set. func (o *MultipleDataStoreDataLatest) HasTotal() bool { if o != nil && o.Total != nil { return true } return false } // SetTotal gets a reference to the given float32 and assigns it to the Total field. func (o *MultipleDataStoreDataLatest) SetTotal(v float32) { o.Total = &v } // GetObjects returns the Objects field value if set, zero value otherwise. func (o *MultipleDataStoreDataLatest) GetObjects() []PackerFormatJson { if o == nil || o.Objects == nil { var ret []PackerFormatJson return ret } return *o.Objects } // GetObjectsOk returns a tuple with the Objects field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *MultipleDataStoreDataLatest) GetObjectsOk() (*[]PackerFormatJson, bool) { if o == nil || o.Objects == nil { return nil, false } return o.Objects, true } // HasObjects returns a boolean if a field has been set. func (o *MultipleDataStoreDataLatest) HasObjects() bool { if o != nil && o.Objects != nil { return true } return false } // SetObjects gets a reference to the given []PackerFormatJson and assigns it to the Objects field. func (o *MultipleDataStoreDataLatest) SetObjects(v []PackerFormatJson) { o.Objects = &v } // GetErrors returns the Errors field value if set, zero value otherwise. func (o *MultipleDataStoreDataLatest) GetErrors() []DataError { if o == nil || o.Errors == nil { var ret []DataError return ret } return *o.Errors } // GetErrorsOk returns a tuple with the Errors field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *MultipleDataStoreDataLatest) GetErrorsOk() (*[]DataError, bool) { if o == nil || o.Errors == nil { return nil, false } return o.Errors, true } // HasErrors returns a boolean if a field has been set. func (o *MultipleDataStoreDataLatest) HasErrors() bool { if o != nil && o.Errors != nil { return true } return false } // SetErrors gets a reference to the given []DataError and assigns it to the Errors field. func (o *MultipleDataStoreDataLatest) SetErrors(v []DataError) { o.Errors = &v } func (o MultipleDataStoreDataLatest) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} if o.Total != nil { toSerialize["total"] = o.Total } if o.Objects != nil { toSerialize["objects"] = o.Objects } if o.Errors != nil { toSerialize["errors"] = o.Errors } return json.Marshal(toSerialize) } type NullableMultipleDataStoreDataLatest struct { value *MultipleDataStoreDataLatest isSet bool } func (v NullableMultipleDataStoreDataLatest) Get() *MultipleDataStoreDataLatest { return v.value } func (v *NullableMultipleDataStoreDataLatest) Set(val *MultipleDataStoreDataLatest) { v.value = val v.isSet = true } func (v NullableMultipleDataStoreDataLatest) IsSet() bool { return v.isSet } func (v *NullableMultipleDataStoreDataLatest) Unset() { v.value = nil v.isSet = false } func NewNullableMultipleDataStoreDataLatest(val *MultipleDataStoreDataLatest) *NullableMultipleDataStoreDataLatest { return &NullableMultipleDataStoreDataLatest{value: val, isSet: true} } func (v NullableMultipleDataStoreDataLatest) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableMultipleDataStoreDataLatest) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
cios/model_multiple_data_store_data_latest.go
0.78789
0.458409
model_multiple_data_store_data_latest.go
starcoder
// The heappermutations package implements primitives to generate all // possible permutations following Heap's algorithm on typed collection. package heappermutations // An internal type that defines the generic structure of permutable collections type heapInterface interface { // Len returns the number of elements in the collection Len() int // Swap swaps the elements with indexes i and j Swap(i, j int) // Copy copies the existing slice in a new slice Copy() heapInterface } // intSlice attaches the methods of heapInterface to []int. type intSlice []int func (p intSlice) Len() int { return len(p) } func (p intSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] } func (p intSlice) Copy() heapInterface { A := make(intSlice, p.Len()) copy(A, p) return A } // float64Slice attaches the methods of heapInterface to []int. type float64Slice []float64 func (p float64Slice) Len() int { return len(p) } func (p float64Slice) Swap(i, j int) { p[i], p[j] = p[j], p[i] } func (p float64Slice) Copy() heapInterface { A := make(float64Slice, p.Len()) copy(A, p) return A } // stringSlice attaches the methods of heapInterface to []int. type stringSlice []string func (p stringSlice) Len() int { return len(p) } func (p stringSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] } func (p stringSlice) Copy() heapInterface { A := make(stringSlice, p.Len()) copy(A, p) return A } // Ints returns all permutations of a slice of ints. func Ints(a []int) [][]int { interfaceSlice := heapPermutations(intSlice(a)) permutations := make([][]int, len(interfaceSlice)) // Assert and transfer each permuted item to our target-typed structure for i, d := range interfaceSlice { permutations[i] = d.(intSlice) } return permutations } // Strings returns all permutations of a slice of strings. func Strings(a []string) [][]string { interfaceSlice := heapPermutations(stringSlice(a)) permutations := make([][]string, len(interfaceSlice)) for i, d := range interfaceSlice { permutations[i] = d.(stringSlice) } return permutations } // Float64s returns all permutations of a slice of float64s. func Float64s(a []float64) [][]float64 { interfaceSlice := heapPermutations(float64Slice(a)) permutations := make([][]float64, len(interfaceSlice)) for i, d := range interfaceSlice { permutations[i] = d.(float64Slice) } return permutations } // An implementation of Heap's algorithm func heapPermutations(data heapInterface) []heapInterface { permutations := []heapInterface{} var generate func(int, heapInterface) generate = func(n int, arr heapInterface) { if n == 1 { A := arr.Copy() permutations = append(permutations, A) } else { for i := 0; i < n; i++ { generate(n-1, arr) if n%2 == 0 { arr.Swap(i, n-1) } else { arr.Swap(0, n-1) } } } } generate(data.Len(), data) return permutations }
heappermutations.go
0.711932
0.460895
heappermutations.go
starcoder
package scanutil import ( "errors" "strconv" "strings" "unicode" ) type Matcher struct { sc *Scanner } func (m *Matcher) Rune(ru rune) bool { return m.sc.RewindOnFalse(func() bool { return m.sc.ReadRune() == ru }) } func (m *Matcher) End() bool { return m.Rune(Eof) || m.Rune(readErr) } func (m *Matcher) Any(valid string) bool { return m.sc.RewindOnFalse(func() bool { return strings.ContainsRune(valid, m.sc.ReadRune()) }) } func (m *Matcher) Except(invalid string) bool { return m.sc.RewindOnFalse(func() bool { if m.End() { return false } return !strings.ContainsRune(invalid, m.sc.ReadRune()) }) } func (m *Matcher) Sequence(s string) bool { if s == "" { return false } if m.sc.Reverse { s = ReverseString(s) } return m.sc.RewindOnFalse(func() bool { for _, ru := range s { if m.sc.ReadRune() != ru { return false } } return true }) } func (m *Matcher) Fn(fn func(rune) bool) bool { return m.sc.RewindOnFalse(func() bool { if m.End() { return false } return fn(m.sc.ReadRune()) }) } // Returns true if at least one rune was read. func (m *Matcher) FnLoop(fn func(rune) bool) bool { v := false for { if m.Fn(fn) { v = true continue } break } return v } // Must all return true. func (m *Matcher) FnOrder(fns ...func() bool) bool { index := func(i int) int { if m.sc.Reverse { return len(fns) - 1 - i } return i } return m.sc.RewindOnFalse(func() bool { for i := 0; i < len(fns); i++ { fn := fns[index(i)] if !fn() { return false } } return true }) } //---------- func (m *Matcher) NRunes(n int) bool { return m.sc.RewindOnFalse(func() bool { c := 0 _ = m.FnLoop(func(ru rune) bool { if c >= n { return false // stop loop } c++ return true }) return c == n // result }) } func (m *Matcher) NPos(n int) bool { if m.sc.Reverse { if m.sc.Pos-n < m.sc.R.Min() { return false } m.sc.Pos -= n return true } if m.sc.Pos+n > m.sc.R.Max() { return false } m.sc.Pos += n return true } //---------- func (m *Matcher) Spaces() bool { return m.FnLoop(unicode.IsSpace) } func (m *Matcher) SpacesExceptNewline() bool { return m.FnLoop(func(ru rune) bool { if ru == '\n' { return false } return unicode.IsSpace(ru) }) } func (m *Matcher) ExceptUnescapedSpaces(escape rune) bool { pos := m.sc.Pos notSpace := func(ru rune) bool { return !unicode.IsSpace(ru) } for { if m.End() { break } if m.Escape(escape) { continue } if m.Fn(notSpace) { continue } break } return m.sc.Pos != pos } func (m *Matcher) ToNewlineOrEnd() { _ = m.FnLoop(func(ru rune) bool { return ru != '\n' }) } //---------- func (m *Matcher) Section(open, close string, escape rune, failOnNewline bool, maxLen int, eofClose bool) bool { return m.sc.RewindOnFalse(func() bool { start := m.sc.Pos if !m.Sequence(open) { return false } for { if escape != 0 && m.Escape(escape) { continue } if m.Sequence(close) { return true } ru := m.sc.ReadRune() // consume rune // extension: stop on eof if ru == Eof { return eofClose } // extension: newline if failOnNewline && ru == '\n' { return false } // extension: stop on maxlength if maxLen > 0 { d := m.sc.Pos - start if d < 0 { d = -d } if d >= maxLen { return false } } } }) } //---------- func (m *Matcher) GoQuotes(escape rune, maxLen, maxLenSingleQuote int) bool { if m.Quote('"', escape, true, maxLen) { return true } if m.Quote('`', escape, false, maxLen) { return true } if m.Quote('\'', escape, true, maxLenSingleQuote) { return true } return false } func (m *Matcher) Quote(quote rune, escape rune, failOnNewline bool, maxLen int) bool { q := string(quote) return m.Section(q, q, escape, failOnNewline, maxLen, false) } func (m *Matcher) Quoted(validQuotes string, escape rune, failOnNewline bool, maxLen int) bool { ru := m.sc.PeekRune() if strings.ContainsRune(validQuotes, ru) { if m.Quote(ru, escape, failOnNewline, maxLen) { return true } } return false } func (m *Matcher) DoubleQuoteStr() bool { q := string('"') return m.Section(q, q, '\\', true, 0, false) } func (m *Matcher) SingleQuoteStr() bool { q := string('\'') return m.Section(q, q, '\\', true, 0, false) } func (m *Matcher) MultiLineComment() bool { return m.Section("/*", "*/", 0, false, 0, false) } func (m *Matcher) LineComment() bool { return m.Section("//", "\n", 0, true, 0, false) } //---------- func (m *Matcher) Escape(escape rune) bool { if m.sc.Reverse { return m.reverseEscape(escape) } return m.sc.RewindOnFalse(func() bool { // needs rune to succeed, will fail on eos return m.Rune(escape) && m.NRunes(1) }) } func (m *Matcher) reverseEscape(escape rune) bool { return m.sc.RewindOnFalse(func() bool { if !m.NRunes(1) { return false } // need to read odd number of escapes to accept c := 0 epos := 0 for { if m.Rune(escape) { c++ if c == 1 { epos = m.sc.Pos } else if c > 10 { // max escapes to test return false } } else { if c%2 == 1 { // odd m.sc.Pos = epos // epos was set return true } return false } } }) } //---------- func (m *Matcher) Id() bool { if m.sc.Reverse { panic("can't parse in reverse") } if !(m.Any("_") || m.Fn(unicode.IsLetter)) { return false } for m.Any("_-") || m.FnLoop(unicode.IsLetter) || m.FnLoop(unicode.IsDigit) { } return true } func (m *Matcher) Int() bool { return m.FnOrder( func() bool { _ = m.Any("+-") return true }, func() bool { return m.FnLoop(unicode.IsDigit) }) } func (m *Matcher) Float() bool { if m.sc.Reverse { panic("can't parse in reverse") // TODO } return m.sc.RewindOnFalse(func() bool { ok := false _ = m.Any("+-") if m.FnLoop(unicode.IsDigit) { ok = true } if m.Any(".") { if m.FnLoop(unicode.IsDigit) { ok = true } } ok3 := m.sc.RewindOnFalse(func() bool { ok2 := false if m.Any("eE") { _ = m.Any("+-") if m.FnLoop(unicode.IsDigit) { ok2 = true } } return ok2 }) return ok || ok3 }) } //---------- func (m *Matcher) IntValue() (int, error) { if !m.Int() { return 0, errors.New("failed to parse int") } return strconv.Atoi(string(m.sc.Value())) } func (m *Matcher) FloatValue() (float64, error) { if !m.Float() { return 0, errors.New("failed to parse float") } return strconv.ParseFloat(string(m.sc.Value()), 64) } //---------- func (m *Matcher) IntValueAdvance() (int, error) { v, err := m.IntValue() if err != nil { return 0, err } m.sc.Advance() return v, nil } func (m *Matcher) FloatValueAdvance() (float64, error) { v, err := m.FloatValue() if err != nil { return 0, err } m.sc.Advance() return v, nil } //---------- func (m *Matcher) SpacesAdvance() error { if !m.Spaces() { return m.sc.Errorf("expecting space") } m.sc.Advance() return nil } //---------- func ReverseString(s string) string { r := []rune(s) for i, j := 0, len(r)-1; i < len(r)/2; i, j = i+1, j-1 { r[i], r[j] = r[j], r[i] } return string(r) }
util/scanutil/matcher.go
0.604516
0.448064
matcher.go
starcoder
package nitro import ( "image" "image/color" "image/draw" "math" ) // BUG: these drawing routines may be incorrect when src and dst overlap // DrawUnder aligns r.Min in dst with sp in src and replaces r with the result of (src over dst). func drawUnder(dst draw.Image, r image.Rectangle, src image.Image, sp image.Point) { if dst, ok := dst.(*image.Paletted); ok { if src, ok := src.(image.PalettedImage); ok { if samePalette(dst.Palette, src.ColorModel().(color.Palette)) { drawPalettedUnder(dst, r, src, sp) } } } // Slow, and broken for paletted images //draw.DrawMask(dst, r, src, sp, under{dst}, r.Min, draw.Over) // XXX Is drawGenericUnder actually faster? Does it matter? //rotate(dst, r, r.Min, src, sp, 1, 1, 0) drawGenericUnder(dst, r, src, sp) } func drawGenericUnder(dst draw.Image, r image.Rectangle, src image.Image, sp image.Point) { dp := r.Min r = clip(r, dst.Bounds(), dp, src.Bounds(), sp) for y := r.Min.Y; y < r.Max.Y; y++ { for x := r.Min.X; x < r.Max.X; x++ { dr, dg, db, da := dst.At(x, y).RGBA() if da == 0xFFFF { continue } sx := x + sp.X - dp.X sy := y + sp.Y - dp.Y if da == 0 { dst.Set(x, y, src.At(sx, sy)) continue } a := 0xFFFF - da sr, sg, sb, sa := src.At(sx, sy).RGBA() dst.Set(x, y, color.RGBA64{ uint16(dr + sr*a/0xFFFF), uint16(dg + sg*a/0xFFFF), uint16(db + sb*a/0xFFFF), uint16(da + sa*a/0xFFFF), }) } } } // n.b. assumes that index 0 is fully transparent and all other colors are fully opaque func drawPalettedUnder(dst *image.Paletted, r image.Rectangle, src image.PalettedImage, sp image.Point) { dp := r.Min r = clip(r, dst.Bounds(), dp, src.Bounds(), sp) sr := r.Add(sp.Sub(dp)) d0 := dst.PixOffset(r.Min.X, r.Min.Y) for y := sr.Min.Y; y < sr.Max.Y; y++ { for i, x := 0, sr.Min.X; x < sr.Max.X; i, x = i+1, x+1 { if dst.Pix[d0+i] != 0 { continue } dst.Pix[d0+i] = src.ColorIndexAt(x, y) } d0 += dst.Stride } } // Clip clips the rectangle r to the src and dst rectangles. func clip(r, dst image.Rectangle, dp image.Point, src image.Rectangle, sp image.Point) image.Rectangle { r = r.Intersect(dst) r = r.Intersect(src.Add(dp.Sub(sp))) return r } // SamePalette reports whether p and q have the same length and share the same backing array. func samePalette(p, q color.Palette) bool { return len(p) == len(q) && &p[0] == &q[0] } // Under represents an image.Image by the inverse of its alpha channel. type under struct{ m image.Image } func (u under) Bounds() image.Rectangle { return u.m.Bounds() } func (u under) ColorModel() color.Model { return color.Alpha16Model } func (u under) At(x, y int) color.Color { _, _, _, a := u.m.At(x, y).RGBA() return color.Alpha16{uint16(0xffff - a)} } // Rotate draws a image rotated clockwise around the point dp by deg degrees // and scaled by 1/scale. The point sp gives the corresponding center point in // the source image. func rotate(dst draw.Image, r image.Rectangle, dp image.Point, src image.Image, sp image.Point, scaleX, scaleY, deg float64) { if dstp, ok := dst.(*image.Paletted); ok { if srcp, ok := src.(*image.Paletted); ok { rotatePaletted(dstp, r, dp, srcp, sp, scaleX, scaleY, deg) return } } sin := -round(math.Sin(deg * (2 * math.Pi))) cos := round(math.Cos(deg * (2 * math.Pi))) sr := src.Bounds() r = r.Intersect(dst.Bounds()) for y := r.Min.Y; y < r.Max.Y; y++ { for x := r.Min.X; x < r.Max.X; x++ { if _, _, _, a := dst.At(x, y).RGBA(); a != 0 { continue } sx := sp.X + int((float64(x-dp.X)*cos-float64(y-dp.Y)*sin)*scaleX) sy := sp.Y + int((float64(x-dp.X)*sin+float64(y-dp.Y)*cos)*scaleY) if !image.Pt(sx, sy).In(sr) { continue } dst.Set(x, y, src.At(sx, sy)) } } } func rotatePaletted(dst *image.Paletted, r image.Rectangle, dp image.Point, src *image.Paletted, sp image.Point, scaleX, scaleY, deg float64) { sin := -round(math.Sin(deg * (2 * math.Pi))) cos := round(math.Cos(deg * (2 * math.Pi))) sr := src.Bounds() r = r.Intersect(dst.Bounds()) //pix := dst.Pix[dst.PixOffset(r.Min.X, r.Min.Y):] for y := r.Min.Y; y < r.Max.Y; y++ { for x := r.Min.X; x < r.Max.X; x++ { if dst.ColorIndexAt(x, y) != 0 { continue } sx := sp.X + int((float64(x-dp.X)*cos-float64(y-dp.Y)*sin)*scaleX) sy := sp.Y + int((float64(x-dp.X)*sin+float64(y-dp.Y)*cos)*scaleY) if !image.Pt(sx, sy).In(sr) { continue } si := src.ColorIndexAt(sx, sy) if si == 0 { continue } //fmt.Fprintln(os.Stderr, x, y, sx, sy) dst.SetColorIndex(x, y, si) } } } func round(x float64) float64 { return math.Trunc(x*4096 + 0.5)/4096 }
nitro/draw.go
0.556641
0.552359
draw.go
starcoder
package noarch import ( "math" ) // Signbitf ... func Signbitf(x float32) int { return BoolToInt(math.Signbit(float64(x))) } // Signbitd ... func Signbitd(x float64) int { return BoolToInt(math.Signbit(x)) } // Signbitl ... func Signbitl(x float64) int { return BoolToInt(math.Signbit(x)) } // IsNaN ... func IsNaN(x float64) int { return BoolToInt(math.IsNaN(x)) } // Fma returns x*y+z. func Fma(x, y, z float64) float64 { return x*y + z } // Fmaf returns x*y+z. func Fmaf(x, y, z float32) float32 { return x*y + z } // Fmin returns the smaller of its arguments: either x or y. func Fmin(x, y float64) float64 { if x < y { return x } return y } // Fminf returns the smaller of its arguments: either x or y. func Fminf(x, y float32) float32 { if x < y { return x } return y } // Fmax returns the larger of its arguments: either x or y. func Fmax(x, y float64) float64 { if x < y { return y } return x } // Fmaxf returns the larger of its arguments: either x or y. func Fmaxf(x, y float32) float32 { if x < y { return y } return x } // Expm1f returns e raised to the power x minus one: e^x-1 func Expm1f(x float32) float32 { return float32(math.Expm1(float64(x))) } // Exp2f Returns the base-2 exponential function of x, which is 2 raised // to the power x: 2^x func Exp2f(x float32) float32 { return float32(math.Exp2(float64(x))) } // Fdim returns the positive difference between x and y. func Fdim(x, y float64) float64 { if x > y { return x - y } return 0 } // Fdimf returns the positive difference between x and y. func Fdimf(x, y float32) float32 { if x > y { return x - y } return 0 } // Log2f returns the binary (base-2) logarithm of x. func Log2f(x float32) float32 { return float32(math.Log2(float64(x))) } // Sinhf compute hyperbolic sine func Sinhf(a float32) float32 { return float32(math.Sinh(float64(a))) } // Coshf compute hyperbolic cose func Coshf(a float32) float32 { return float32(math.Cosh(float64(a))) } // Tanhf compute hyperbolic tan func Tanhf(a float32) float32 { return float32(math.Tanh(float64(a))) } // Cbrt compute cube root func Cbrtf(x float32) float32 { return float32(math.Cbrt(float64(x))) } // Hypotf compute the square root of the sum of the squares of x and y func Hypotf(x, y float32) float32 { return float32(math.Hypot(float64(x), float64(y))) } // Log1pf compute ln(1+arg) func Log1pf(arg float32) float32 { return float32(math.Log1p(float64(arg))) } // Copysignf copies sign of y to absolute value of x func Copysignf(x float32, y float32) float32 { return float32(math.Copysign(float64(x), float64(y))) } // Expf : finds e^x func Expf(x float32) float32 { return float32(math.Exp(float64(x))) } // Erff : finds error function value of x func Erff(x float32) float32 { return float32(math.Erf(float64(x))) } // Erfcf : finds error function value of x func Erfcf(x float32) float32 { return float32(math.Erfc(float64(x))) }
noarch/math.go
0.905897
0.738127
math.go
starcoder
package querier import ( "sort" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/storage" "github.com/weaveworks/cortex/pkg/prom1/storage/metric" ) // errSeriesSet implements storage.SeriesSet, just returning an error. type errSeriesSet struct { err error } func (errSeriesSet) Next() bool { return false } func (errSeriesSet) At() storage.Series { return nil } func (e errSeriesSet) Err() error { return e.err } // concreteSeriesSet implements storage.SeriesSet. type concreteSeriesSet struct { cur int series []storage.Series } func newConcreteSeriesSet(series []storage.Series) storage.SeriesSet { sort.Sort(byLabels(series)) return &concreteSeriesSet{ cur: -1, series: series, } } func (c *concreteSeriesSet) Next() bool { c.cur++ return c.cur < len(c.series) } func (c *concreteSeriesSet) At() storage.Series { return c.series[c.cur] } func (c *concreteSeriesSet) Err() error { return nil } // concreteSeries implements storage.Series. type concreteSeries struct { labels labels.Labels samples []model.SamplePair } func (c *concreteSeries) Labels() labels.Labels { return c.labels } func (c *concreteSeries) Iterator() storage.SeriesIterator { return newConcreteSeriesIterator(c) } // concreteSeriesIterator implements storage.SeriesIterator. type concreteSeriesIterator struct { cur int series *concreteSeries } func newConcreteSeriesIterator(series *concreteSeries) storage.SeriesIterator { return &concreteSeriesIterator{ cur: -1, series: series, } } func (c *concreteSeriesIterator) Seek(t int64) bool { c.cur = sort.Search(len(c.series.samples), func(n int) bool { return c.series.samples[n].Timestamp >= model.Time(t) }) return c.cur < len(c.series.samples) } func (c *concreteSeriesIterator) At() (t int64, v float64) { s := c.series.samples[c.cur] return int64(s.Timestamp), float64(s.Value) } func (c *concreteSeriesIterator) Next() bool { c.cur++ return c.cur < len(c.series.samples) } func (c *concreteSeriesIterator) Err() error { return nil } func matrixToSeriesSet(m model.Matrix) storage.SeriesSet { series := make([]storage.Series, 0, len(m)) for _, ss := range m { series = append(series, &concreteSeries{ labels: metricToLabels(ss.Metric), samples: ss.Values, }) } return newConcreteSeriesSet(series) } func metricsToSeriesSet(ms []metric.Metric) storage.SeriesSet { series := make([]storage.Series, 0, len(ms)) for _, m := range ms { series = append(series, &concreteSeries{ labels: metricToLabels(m.Metric), samples: nil, }) } return newConcreteSeriesSet(series) } func metricToLabels(m model.Metric) labels.Labels { ls := make(labels.Labels, 0, len(m)) for k, v := range m { ls = append(ls, labels.Label{ Name: string(k), Value: string(v), }) } // PromQL expects all labels to be sorted! In general, anyone constructing // a labels.Labels list is responsible for sorting it during construction time. sort.Sort(ls) return ls } func labelsToMetric(ls labels.Labels) model.Metric { m := make(model.Metric, len(ls)) for _, l := range ls { m[model.LabelName(l.Name)] = model.LabelValue(l.Value) } return m } type byLabels []storage.Series func (b byLabels) Len() int { return len(b) } func (b byLabels) Swap(i, j int) { b[i], b[j] = b[j], b[i] } func (b byLabels) Less(i, j int) bool { return labels.Compare(b[i].Labels(), b[j].Labels()) < 0 }
pkg/querier/series_set.go
0.761804
0.522202
series_set.go
starcoder
package server type L2MeasCellInfo struct { // It indicates the packet discard rate in percentage of the downlink GBR traffic in a cell, as defined in ETSI TS 136 314 [i.11]. DlGbrPdrCell int32 `json:"dl_gbr_pdr_cell,omitempty"` // It indicates the PRB usage for downlink GBR traffic, as defined in ETSI TS 136 314 [i.11] and ETSI TS 136 423 [i.12]. DlGbrPrbUsageCell int32 `json:"dl_gbr_prb_usage_cell,omitempty"` // It indicates the packet discard rate in percentage of the downlink non-GBR traffic in a cell, as defined in ETSI TS 136 314 [i.11]. DlNongbrPdrCell int32 `json:"dl_nongbr_pdr_cell,omitempty"` // It indicates (in percentage) the PRB usage for downlink non-GBR traffic, as defined in ETSI TS 136 314 [i.11] and ETSI TS 136 423 [i.12]. DlNongbrPrbUsageCell int32 `json:"dl_nongbr_prb_usage_cell,omitempty"` // It indicates (in percentage) the PRB usage for total downlink traffic, as defined in ETSI TS 136 314 [i.11] and ETSI TS 136 423 [i.12]. DlTotalPrbUsageCell int32 `json:"dl_total_prb_usage_cell,omitempty"` Ecgi *Ecgi `json:"ecgi,omitempty"` // It indicates the number of active UEs with downlink GBR traffic, as defined in ETSI TS 136 314 [i.11]. NumberOfActiveUeDlGbrCell int32 `json:"number_of_active_ue_dl_gbr_cell,omitempty"` // It indicates the number of active UEs with downlink non-GBR traffic, as defined in ETSI TS 136 314 [i.11]. NumberOfActiveUeDlNongbrCell int32 `json:"number_of_active_ue_dl_nongbr_cell,omitempty"` // It indicates the number of active UEs with uplink GBR traffic, as defined in ETSI TS 136 314 [i.11]. NumberOfActiveUeUlGbrCell int32 `json:"number_of_active_ue_ul_gbr_cell,omitempty"` // It indicates the number of active UEs with uplink non-GBR traffic, as defined in ETSI TS 136 314 [i.11]. NumberOfActiveUeUlNongbrCell int32 `json:"number_of_active_ue_ul_nongbr_cell,omitempty"` // It indicates (in percentage) the received dedicated preamples, as defined in ETSI TS 136 314 [i.11]. ReceivedDedicatedPreamblesCell int32 `json:"received_dedicated_preambles_cell,omitempty"` // It indicates (in percentage) the received randomly selected preambles in the high range, as defined in ETSI TS 136 314 [i.11]. ReceivedRandomlySelectedPreamblesHighRangeCell int32 `json:"received_randomly_selected_preambles_high_range_cell,omitempty"` // It indicates (in percentage) the received randomly selected preambles in the low range, as defined in ETSI TS 136 314 [i.11]. ReceivedRandomlySelectedPreamblesLowRangeCell int32 `json:"received_randomly_selected_preambles_low_range_cell,omitempty"` // It indicates the packet discard rate in percentage of the uplink GBR traffic in a cell, as defined in ETSI TS 136 314 [i.11]. UlGbrPdrCell int32 `json:"ul_gbr_pdr_cell,omitempty"` // It indicates (in percentage) the PRB usage for uplink GBR traffic, as defined in ETSI TS 136 314 [i.11] and ETSI TS 136 423 [i.12]. UlGbrPrbUsageCell int32 `json:"ul_gbr_prb_usage_cell,omitempty"` // It indicates the packet discard rate in percentage of the uplink non-GBR traffic in a cell, as defined in ETSI TS 136 314 [i.11]. UlNongbrPdrCell int32 `json:"ul_nongbr_pdr_cell,omitempty"` // It indicates (in percentage) the PRB usage for uplink non-GBR traffic, as defined in ETSI TS 136 314 [i.11] and ETSI TS 136 423 [i.12]. UlNongbrPrbUsageCell int32 `json:"ul_nongbr_prb_usage_cell,omitempty"` // It indicates (in percentage) the PRB usage for total uplink traffic, as defined in ETSI TS 136 314 [i.11] and ETSI TS 136 423 [i.12]. UlTotalPrbUsageCell int32 `json:"ul_total_prb_usage_cell,omitempty"` }
go-apps/meep-rnis/server/model_l2_meas_cell_info.go
0.519034
0.560734
model_l2_meas_cell_info.go
starcoder
package entity type LabelMap map[string]Label func (m LabelMap) Get(name string) Label { if result, ok := m[name]; ok { return result } return *NewLabel(name, 0) } func (m LabelMap) Pointer(name string) *Label { if result, ok := m[name]; ok { return &result } return NewLabel(name, 0) } func (m LabelMap) PhotoLabel(photoId uint, labelName string, uncertainty int, source string) PhotoLabel { label := m.Get(labelName) photoLabel := NewPhotoLabel(photoId, label.ID, uncertainty, source) photoLabel.Label = &label return *photoLabel } var LabelFixtures = LabelMap{ "landscape": { ID: 1000000, LabelUID: "lt9k3pw1wowuy3c2", LabelSlug: "landscape", CustomSlug: "landscape", LabelName: "Landscape", LabelPriority: 0, LabelFavorite: true, LabelDescription: "", LabelNotes: "", PhotoCount: 1, LabelCategories: []*Label{}, CreatedAt: Timestamp(), UpdatedAt: Timestamp(), DeletedAt: nil, New: false, }, "flower": { ID: 1000001, LabelUID: "lt9k3pw1wowuy3c3", LabelSlug: "flower", CustomSlug: "flower", LabelName: "Flower", LabelPriority: 1, LabelFavorite: true, LabelDescription: "", LabelNotes: "", PhotoCount: 2, LabelCategories: []*Label{}, CreatedAt: Timestamp(), UpdatedAt: Timestamp(), DeletedAt: nil, New: false, }, "cake": { ID: 1000002, LabelUID: "lt9k3pw1wowuy3c4", LabelSlug: "cake", CustomSlug: "kuchen", LabelName: "Cake", LabelPriority: 5, LabelFavorite: false, LabelDescription: "", LabelNotes: "", PhotoCount: 3, LabelCategories: []*Label{}, CreatedAt: Timestamp(), UpdatedAt: Timestamp(), DeletedAt: nil, New: false, }, "cow": { ID: 1000003, LabelUID: "lt9k3pw1wowuy3c5", LabelSlug: "cow", CustomSlug: "kuh", LabelName: "COW", LabelPriority: -1, LabelFavorite: true, LabelDescription: "", LabelNotes: "", PhotoCount: 4, LabelCategories: []*Label{}, CreatedAt: Timestamp(), UpdatedAt: Timestamp(), DeletedAt: nil, New: false, }, "batchdelete": { ID: 1000004, LabelUID: "lt9k3pw1wowuy3c6", LabelSlug: "batchdelete", CustomSlug: "batchDelete", LabelName: "BatchDelete", LabelPriority: 1, LabelFavorite: true, LabelDescription: "", LabelNotes: "", PhotoCount: 5, LabelCategories: []*Label{}, CreatedAt: Timestamp(), UpdatedAt: Timestamp(), DeletedAt: nil, New: false, }, "updateLabel": { ID: 1000005, LabelUID: "lt9k3pw1wowuy3c7", LabelSlug: "updatelabel", CustomSlug: "updateLabel", LabelName: "updateLabel", LabelPriority: 2, LabelFavorite: false, LabelDescription: "", LabelNotes: "", PhotoCount: 1, LabelCategories: []*Label{}, CreatedAt: Timestamp(), UpdatedAt: Timestamp(), DeletedAt: nil, New: false, }, "updatePhotoLabel": { ID: 1000006, LabelUID: "lt9k3pw1wowuy3c8", LabelSlug: "updatephotolabel", CustomSlug: "updateLabelPhoto", LabelName: "updatePhotoLabel", LabelPriority: 2, LabelFavorite: false, LabelDescription: "", LabelNotes: "", PhotoCount: 1, LabelCategories: []*Label{}, CreatedAt: Timestamp(), UpdatedAt: Timestamp(), DeletedAt: nil, New: false, }, "likeLabel": { ID: 1000007, LabelUID: "lt9k3pw1wowuy3c9", LabelSlug: "likeLabel", CustomSlug: "likeLabel", LabelName: "likeLabel", LabelPriority: 3, LabelFavorite: false, LabelDescription: "", LabelNotes: "", PhotoCount: 1, LabelCategories: []*Label{}, CreatedAt: Timestamp(), UpdatedAt: Timestamp(), DeletedAt: nil, New: false, }, "no-jpeg": { ID: 1000008, LabelUID: "lt9k3aa1wowuy310", LabelSlug: "no-jpeg", CustomSlug: "no-jpeg", LabelName: "NO JPEG", LabelPriority: -1, LabelFavorite: false, LabelDescription: "", LabelNotes: "", PhotoCount: 4, LabelCategories: []*Label{}, CreatedAt: Timestamp(), UpdatedAt: Timestamp(), DeletedAt: nil, New: false, }, "apilikeLabel": { ID: 1000009, LabelUID: "lt9k3pw1wowuy311", LabelSlug: "apilikeLabel", CustomSlug: "apilikeLabel", LabelName: "apilikeLabel", LabelPriority: -1, LabelFavorite: false, LabelDescription: "", LabelNotes: "", PhotoCount: 1, LabelCategories: []*Label{}, CreatedAt: Timestamp(), UpdatedAt: Timestamp(), DeletedAt: nil, New: false, }, "apidislikeLabel": { ID: 1000010, LabelUID: "lt9k3pw1wowuy312", LabelSlug: "apidislikeLabel", CustomSlug: "apidislikeLabel", LabelName: "apidislikeLabel", LabelPriority: -2, LabelFavorite: true, LabelDescription: "", LabelNotes: "", PhotoCount: 1, LabelCategories: []*Label{}, CreatedAt: Timestamp(), UpdatedAt: Timestamp(), DeletedAt: nil, New: false, }, } // CreateLabelFixtures inserts known entities into the database for testing. func CreateLabelFixtures() { for _, entity := range LabelFixtures { Db().Create(&entity) } }
internal/entity/label_fixtures.go
0.625095
0.423756
label_fixtures.go
starcoder
package nlptools import ( "crypto/md5" "encoding/hex" "github.com/broosaction/gotext/tokenizers" stringUtils "github.com/broosaction/gotext/utils/strings" "math" ) /* TFIDF is a Term Frequency- Inverse Document Frequency model that is created from a trained NaiveBayes model (they are very similar so you can just train NaiveBayes and convert into TDIDF) This is not a probabalistic model, necessarily, and doesn't give classification. It can be used to determine the 'importance' of a word in a document, though, which is useful in, say, keyword tagging. Term frequency is basically just adjusted frequency of a word within a document/sentence: termFrequency(word, doc) = 0.5 * ( 0.5 * word.Count ) / max{ w.Count | w ∈ doc } Inverse document frequency is basically how little the term is mentioned within all of your documents: invDocumentFrequency(word, Docs) = log( len(Docs) ) - log( 1 + |{ d ∈ Docs | t ∈ d}| ) TFIDF is the multiplication of those two functions, giving you a term that is larger when the word is more important, and less when the word is less important */ // TFIDF tfidf model type TFIDF struct { // train document index in TermFreqs DocIndex map[string]int // term frequency for each train document TermFreqs []map[string]int // documents number for each term in train data TermDocs map[string]int // number of documents in train data N int // words to be filtered StopWords map[string]struct{} // tokenizer, space is used as default Tokenizer string } // New new model with default func NewTFIDF() *TFIDF { return &TFIDF{ DocIndex: make(map[string]int), TermFreqs: make([]map[string]int, 0), TermDocs: make(map[string]int), N: 0, Tokenizer: tokenizers.WordTokenizer{}.GetName(), } } // NewTokenizer new with specified tokenizer // works well in GOLD func NewTokenizer(tokenizer tokenizers.Tokenizer) *TFIDF { return &TFIDF{ DocIndex: make(map[string]int), TermFreqs: make([]map[string]int, 0), TermDocs: make(map[string]int), N: 0, Tokenizer: tokenizer.GetName(), } } func (f *TFIDF) initStopWords() { if f.StopWords == nil { f.StopWords = stringUtils.GetStopwords() } } // AddStopWords add stop words to be filtered func (f *TFIDF) AddStopWords(words ...string) { if f.StopWords == nil { f.StopWords = stringUtils.GetStopwords() } for _, word := range words { f.StopWords[word] = struct{}{} } } // AddStopWordsFile add stop words file to be filtered, with one word a line func (f *TFIDF) AddStopWordsFile(file string) (err error) { lines, err := tokenizers.ReadLines(file) if err != nil { return } f.AddStopWords(lines...) return } // AddDocs add train documents func (f *TFIDF) AddDocs(docs ...string) { for _, doc := range docs { h := f.hash(doc) if f.docHashPos(h) >= 0 { return } termFreq := f.termFreq(doc) if len(termFreq) == 0 { return } f.DocIndex[h] = f.N f.N++ f.TermFreqs = append(f.TermFreqs, termFreq) for term := range termFreq { f.TermDocs[term]++ } } } // Cal calculate tf-idf weight for specified document func (f *TFIDF) Cal(doc string) (weight map[string]float64) { weight = make(map[string]float64) var termFreq map[string]int docPos := f.docPos(doc) if docPos < 0 { termFreq = f.termFreq(doc) } else { termFreq = f.TermFreqs[docPos] } docTerms := 0 for _, freq := range termFreq { docTerms += freq } for term, freq := range termFreq { weight[term] = f.tfidf(freq, docTerms, f.TermDocs[term], f.N) } return weight } func (f *TFIDF) termFreq(doc string) (m map[string]int) { m = make(map[string]int) tokens := tokenizers.GetTokenizer(f.Tokenizer).Tokenize(doc) if len(tokens) == 0 { return } for _, term := range tokens { if _, ok := f.StopWords[term]; ok { continue } m[term]++ } return } func (f *TFIDF) docHashPos(hash string) int { if pos, ok := f.DocIndex[hash]; ok { return pos } return -1 } func (f *TFIDF) docPos(doc string) int { return f.docHashPos(f.hash(doc)) } func (f *TFIDF) hash(text string) string { h := md5.New() h.Write([]byte(text)) return hex.EncodeToString(h.Sum(nil)) } func (f *TFIDF) tfidf(termFreq, docTerms, termDocs, N int) float64 { tf := float64(termFreq) / float64(docTerms) idf := math.Log(float64(1+N) / (1 + float64(termDocs))) return tf * idf }
nlp/nlptools/tfidf.go
0.601945
0.439988
tfidf.go
starcoder
package shape import ( "fmt" "io" "math" "github.com/gregoryv/go-design/xy" ) func NewRect(title string) *Rect { return &Rect{ Title: title, Font: DefaultFont, Pad: DefaultTextPad, class: "rect", } } type Rect struct { X, Y int Title string Font Font Pad Padding class string } func (r *Rect) String() string { return fmt.Sprintf("R %q", r.Title) } func (r *Rect) Position() (int, int) { return r.X, r.Y } func (r *Rect) SetX(x int) { r.X = x } func (r *Rect) SetY(y int) { r.Y = y } func (r *Rect) Direction() Direction { return LR } func (r *Rect) SetClass(c string) { r.class = c } func (r *Rect) WriteSvg(out io.Writer) error { w, err := newTagPrinter(out) w.printf( `<rect class="%s" x="%v" y="%v" width="%v" height="%v"/>`, r.class, r.X, r.Y, r.Width(), r.Height()) w.printf("\n") r.title().WriteSvg(w) return *err } func (r *Rect) title() *Label { return &Label{ Pos: xy.Position{ r.X + r.Pad.Left, r.Y + r.Pad.Top/2, }, Font: r.Font, Text: r.Title, class: "record-title", } } func (r *Rect) SetFont(f Font) { r.Font = f } func (r *Rect) SetTextPad(pad Padding) { r.Pad = pad } func (r *Rect) Height() int { return boxHeight(r.Font, r.Pad, 1) } func (r *Rect) Width() int { return boxWidth(r.Font, r.Pad, r.Title) } // Edge returns intersecting position of a line starting at start and // pointing to the rect center. func (r *Rect) Edge(start xy.Position) xy.Position { return boxEdge(start, r) } type Box interface { Position() (int, int) Width() int Height() int } func boxEdge(start xy.Position, r Box) xy.Position { x, y := r.Position() center := xy.Position{ x + r.Width()/2, y + r.Height()/2, } l1 := xy.Line{start, center} var ( d float64 = math.MaxFloat64 pos xy.Position lowY = y + r.Height() rightX = x + r.Width() top = xy.NewLine(x, y, rightX, y) left = xy.NewLine(x, y, x, lowY) right = xy.NewLine(rightX, y, rightX, lowY) bottom = xy.NewLine(x, lowY, rightX, lowY) ) for _, side := range []*xy.Line{top, left, right, bottom} { p, err := l1.IntersectSegment(side) if err != nil { continue } dist := start.Distance(p) if dist < d { pos = p d = dist } } return pos }
shape/rect.go
0.708918
0.406037
rect.go
starcoder
package lyft // A non-guaranteed estimate of price type CostEstimate struct { RideType RideTypeEnum `json:"ride_type,omitempty"` // A human readable description of the ride type DisplayName string `json:"display_name,omitempty"` // The ISO 4217 currency code for the amount (e.g. 'USD') Currency string `json:"currency,omitempty"` // Estimated lower bound for trip cost, in minor units (cents). Estimates are not guaranteed, and only provide a reasonable range based on current conditions. EstimatedCostCentsMin int32 `json:"estimated_cost_cents_min,omitempty"` // Estimated upper bound for trip cost, in minor units (cents). Estimates are not guaranteed, and only provide a reasonable range based on current conditions. EstimatedCostCentsMax int32 `json:"estimated_cost_cents_max,omitempty"` // Estimated distance for this trip EstimatedDistanceMiles float64 `json:"estimated_distance_miles,omitempty"` // Estimated time to get from the start location to the end. EstimatedDurationSeconds int32 `json:"estimated_duration_seconds,omitempty"` // The validity of the cost estimate returned IsValidEstimate bool `json:"is_valid_estimate"` // Current Prime Time Percentage. Prime Time adds a percentage to ride costs, prior to other applicable fees. When ride requests greatly outnumber available drivers, our system will automatically turn on Prime Time. If Prime Time is inactive, the value returned will be '0%'. Note: The returned estimate already has Prime Time factored in. The value is returned here for reference and to allow users to confirm/accept Prime Time prior to initiating a ride. PrimetimePercentage string `json:"primetime_percentage,omitempty"` // This token is needed when requesting rides. (Deprecated) PrimetimeConfirmationToken string `json:"primetime_confirmation_token,omitempty"` // A token that confirms the user has accepted current Prime Time and/or fixed price charges. See 'Request a Lyft' for more details CostToken string `json:"cost_token,omitempty"` }
lyft/cost_estimate.go
0.800809
0.418935
cost_estimate.go
starcoder
package lookup import ( "encoding/binary" "errors" "fmt" ) // Epoch represents a time slot at a particular frequency level type Epoch struct { Time uint64 `json:"time"` // Time stores the time at which the update or lookup takes place Level uint8 `json:"level"` // Level indicates the frequency level as the exponent of a power of 2 } // EpochID is a unique identifier for an Epoch, based on its level and base time. type EpochID [8]byte // EpochLength stores the serialized binary length of an Epoch const EpochLength = 8 // MaxTime contains the highest possible time value an Epoch can handle const MaxTime uint64 = (1 << 56) - 1 // Base returns the base time of the Epoch func (e *Epoch) Base() uint64 { return getBaseTime(e.Time, e.Level) } // ID Returns the unique identifier of this epoch func (e *Epoch) ID() EpochID { base := e.Base() var id EpochID binary.LittleEndian.PutUint64(id[:], base) id[7] = e.Level return id } // MarshalBinary implements the encoding.BinaryMarshaller interface func (e *Epoch) MarshalBinary() (data []byte, err error) { b := make([]byte, 8) binary.LittleEndian.PutUint64(b[:], e.Time) b[7] = e.Level return b, nil } // UnmarshalBinary implements the encoding.BinaryUnmarshaller interface func (e *Epoch) UnmarshalBinary(data []byte) error { if len(data) != EpochLength { return errors.New("Invalid data unmarshalling Epoch") } b := make([]byte, 8) copy(b, data) e.Level = b[7] b[7] = 0 e.Time = binary.LittleEndian.Uint64(b) return nil } // After returns true if this epoch occurs later or exactly at the other epoch. func (e *Epoch) After(epoch Epoch) bool { if e.Time == epoch.Time { return e.Level < epoch.Level } return e.Time >= epoch.Time } // Equals compares two epochs and returns true if they refer to the same time period. func (e *Epoch) Equals(epoch Epoch) bool { return e.Level == epoch.Level && e.Base() == epoch.Base() } // String implements the Stringer interface. func (e *Epoch) String() string { return fmt.Sprintf("Epoch{Time:%d, Level:%d}", e.Time, e.Level) }
vendor/github.com/ethereum/go-ethereum/swarm/storage/feed/lookup/epoch.go
0.899998
0.428532
epoch.go
starcoder
package manifest import "sort" // LevelMetadata contains metadata for all of the files within // a level of the LSM. type LevelMetadata struct { files []*FileMetadata } // Empty indicates whether there are any files in the level. func (lm *LevelMetadata) Empty() bool { return lm.Len() == 0 } // Len returns the number of files within the level. func (lm *LevelMetadata) Len() int { return len(lm.files) } // Iter constructs a LevelIterator over the entire level. func (lm *LevelMetadata) Iter() LevelIterator { return LevelIterator{files: lm.files, end: len(lm.files)} } // Slice constructs a slice containing the entire level. func (lm *LevelMetadata) Slice() LevelSlice { return LevelSlice{files: lm.files, end: len(lm.files)} } // LevelFile holds a file's metadata along with its position // within a level of the LSM. type LevelFile struct { *FileMetadata slice LevelSlice } // Slice constructs a LevelSlice containing only this file. func (lf LevelFile) Slice() LevelSlice { return lf.slice } // NewLevelSlice constructs a LevelSlice over the provided files. This // function is expected to be a temporary adapter between interfaces. // TODO(jackson): Revisit once the conversion of Version.Files to a btree is // complete. func NewLevelSlice(files []*FileMetadata) LevelSlice { return LevelSlice{files: files, start: 0, end: len(files)} } // LevelSlice contains a slice of the files within a level of the LSM. type LevelSlice struct { files []*FileMetadata start int end int } // Each invokes fn for each element in the slice. func (ls LevelSlice) Each(fn func(*FileMetadata)) { iter := ls.Iter() for f := iter.First(); f != nil; f = iter.Next() { fn(f) } } // Empty indicates whether the slice contains any files. func (ls LevelSlice) Empty() bool { return ls.start >= ls.end } // Iter constructs a LevelIterator that iterates over the slice. func (ls LevelSlice) Iter() LevelIterator { return LevelIterator{ files: ls.files, start: ls.start, end: ls.end, } } // Len returns the number of files in the slice. func (ls LevelSlice) Len() int { return ls.end - ls.start } // SizeSum sums the size of all files in the slice. func (ls LevelSlice) SizeSum() uint64 { var sum uint64 for _, f := range ls.files[ls.start:ls.end] { sum += f.Size } return sum } // Reslice constructs a new slice backed by the same underlying level, with // new start and end positions. Reslice invokes the provided function, passing // two LevelIterators: one positioned to i's inclusive start and one // positioned to i's inclusive end. The resliceFunc may move either iterator // forward or backwards, including beyond the callee's original bounds to // capture additional files from the underlying level. Reslice constructs and // returns a new LevelSlice with the final bounds of the iterators after // calling resliceFunc. func (ls LevelSlice) Reslice(resliceFunc func(start, end *LevelIterator)) LevelSlice { start := LevelIterator{ files: ls.files, cur: ls.start, start: 0, end: len(ls.files), } end := LevelIterator{ files: ls.files, cur: ls.end - 1, start: 0, end: len(ls.files), } resliceFunc(&start, &end) return LevelSlice{ files: ls.files, start: start.cur, end: end.cur + 1, } } // LevelIterator iterates over a set of files' metadata. Its zero value is an // empty iterator. type LevelIterator struct { files []*FileMetadata cur int start int end int } // Clone copies the iterator, returning an independent iterator at the same // position. func (i *LevelIterator) Clone() LevelIterator { return *i } // Current returns the item at the current iterator position. func (i LevelIterator) Current() *FileMetadata { if i.cur < i.start || i.cur >= i.end { return nil } return i.files[i.cur] } // First seeks to the first file in the iterator and returns it. func (i *LevelIterator) First() *FileMetadata { i.cur = i.start if i.cur < i.start || i.cur >= i.end { return nil } return i.files[i.cur] } // Last seeks to the last file in the iterator and returns it. func (i *LevelIterator) Last() *FileMetadata { i.cur = i.end - 1 if i.cur < i.start || i.cur >= i.end { return nil } return i.files[i.cur] } // Next advances the iterator to the next file and returns it. func (i *LevelIterator) Next() *FileMetadata { i.cur++ if i.cur < i.start || i.cur >= i.end { return nil } return i.files[i.cur] } // Prev moves the iterator the previous file and returns it. func (i *LevelIterator) Prev() *FileMetadata { i.cur-- if i.cur < i.start || i.cur >= i.end { return nil } return i.files[i.cur] } // SeekGE seeks to the first file in the iterator's file set with a largest // user key less than or equal to the provided user key. The iterator must // have been constructed from L1+, because it requires the underlying files to // be sorted by user keys and non-overlapping. func (i *LevelIterator) SeekGE(cmp Compare, userKey []byte) *FileMetadata { files := i.files[i.start:i.end] i.cur = i.start + sort.Search(len(files), func(j int) bool { return cmp(userKey, files[j].Largest.UserKey) <= 0 }) if i.cur >= i.end { return nil } return i.files[i.cur] } // SeekLT seeks to the last file in the iterator's file set with a smallest // user key less than the provided user key. The iterator must have been // constructed from L1+, because it requries the underlying files to be sorted // by user keys and non-overlapping. func (i *LevelIterator) SeekLT(cmp Compare, userKey []byte) *FileMetadata { files := i.files[i.start:i.end] i.cur = i.start + sort.Search(len(files), func(j int) bool { return cmp(files[j].Smallest.UserKey, userKey) >= 0 }) if i.cur < i.start { return nil } return i.Prev() } // Take constructs a LevelFile containing the file at the iterator's current // position. Take panics if the iterator is not currently positioned over a // file. func (i LevelIterator) Take() LevelFile { m := i.Current() if m == nil { panic("Take called on invalid LevelIterator") } return LevelFile{ FileMetadata: m, slice: LevelSlice{ files: i.files, start: i.cur, end: i.cur + 1, }, } }
internal/manifest/level_metadata.go
0.652906
0.421254
level_metadata.go
starcoder
package maze import ( "bytes" "math/rand" "time" ) // Grid is an interface of grid. type Grid interface { Rows() int Cols() int Get(int, int) *Cell Random() *Cell Size() int EachRow() [][]*Cell EachCell() []*Cell String() string DeadEnds() []*Cell Braid(float64) } // NormalGrid is a grid containing all the cells. type NormalGrid struct { rows, cols int grid [][]*Cell r *rand.Rand } // NewNormalGrid returns a new NormalGrid whose size is rows by cols. func NewNormalGrid(rows, cols int) Grid { g := &NormalGrid{ rows: rows, cols: cols, grid: prepareGrid(rows, cols), r: rand.New(rand.NewSource(time.Now().UnixNano())), } g.configureCells() return g } // Rows returns the number of rows of `g`. func (g *NormalGrid) Rows() int { return g.rows } // Cols returns the number of columns of `g`. func (g *NormalGrid) Cols() int { return g.cols } // Get returns a cell on (row, col). func (g *NormalGrid) Get(row, col int) *Cell { if row < 0 || row > g.rows-1 { return nil } if col < 0 || col > g.cols-1 { return nil } return g.grid[row][col] } // Random returns a cell chosen randomly from a grid. func (g *NormalGrid) Random() *Cell { r := rand.New(rand.NewSource(time.Now().UnixNano())) row := r.Intn(g.rows) col := r.Intn(g.cols) return g.Get(row, col) } // Size returns a size of g. func (g *NormalGrid) Size() int { return g.rows * g.cols } // EachRow returns each row. func (g *NormalGrid) EachRow() [][]*Cell { return g.grid } // EachCell returns each cell. func (g *NormalGrid) EachCell() []*Cell { cells := make([]*Cell, 0, g.Size()) for _, row := range g.grid { for _, cell := range row { cells = append(cells, cell) } } return cells } // String draws a maze by an ASCII art. func (g *NormalGrid) String() string { return g.stringWithContentsFunc(g.contentsOf) } // stringContentsFunc draws a maze by an ASCII art using `f`. func (g *NormalGrid) stringWithContentsFunc(f func(*Cell) string) string { var ( space = " " wall = "|" corner = "+" line = "---" ) var output bytes.Buffer _, _ = output.WriteString(corner) for i := 0; i < g.cols; i++ { _, _ = output.WriteString(line + corner) } _, _ = output.WriteString("\n") mid := bytes.NewBuffer([]byte(wall)) btm := bytes.NewBuffer([]byte(corner)) for _, row := range g.EachRow() { // initialize all but the first character mid.Truncate(1) btm.Truncate(1) for _, cell := range row { if cell == nil { // dummy cell cell = NewCell(-1, -1) } body := " " + f(cell) + " " _, _ = mid.WriteString(body) if cell.IsLinked(cell.East) { _, _ = mid.WriteString(" ") } else { _, _ = mid.WriteString(wall) } if cell.IsLinked(cell.South) { _, _ = btm.WriteString(space) } else { _, _ = btm.WriteString(line) } _, _ = btm.WriteString(corner) } _, _ = output.Write(mid.Bytes()) _, _ = output.WriteString("\n") _, _ = output.Write(btm.Bytes()) _, _ = output.WriteString("\n") } return output.String() } func (g *NormalGrid) contentsOf(cell *Cell) string { return " " } func (g *NormalGrid) configureCells() { for _, cell := range g.EachCell() { row, col := cell.Row, cell.Col cell.North = g.Get(row-1, col) cell.South = g.Get(row+1, col) cell.East = g.Get(row, col+1) cell.West = g.Get(row, col-1) } } // DeadEnds returns all the dead-end cells. func (g *NormalGrid) DeadEnds() []*Cell { var list []*Cell for _, cell := range g.EachCell() { if len(cell.Links()) == 1 { list = append(list, cell) } } return list } // Braid rearranges g to "braid" one, that is, a maze without any dead ends. func (g *NormalGrid) Braid(p float64) { for _, cell := range Shuffle(g.DeadEnds()) { if len(cell.Links()) != 1 || g.r.Float64() > p { continue } var nbs, best []*Cell for _, nb := range cell.Neighbors() { if !nb.IsLinked(cell) { nbs = append(nbs, nb) } } for _, n := range nbs { if len(n.Links()) == 1 { best = append(best, n) } } if len(best) == 0 { best = nbs } idx := g.r.Intn(len(best)) cell.Link(best[idx]) } } // prepareGrid returns a rows-by-cols 2D Cell array. func prepareGrid(rows, cols int) [][]*Cell { grid := make([][]*Cell, rows) for i := 0; i < rows; i++ { grid[i] = make([]*Cell, cols) for j := 0; j < cols; j++ { grid[i][j] = NewCell(i, j) } } return grid }
go/maze/grid.go
0.860384
0.412412
grid.go
starcoder
package persist import ( "errors" ) // LogType is used to describe different types of log entries. type LogType uint8 // Differents types of log entries. const ( LogUnknown LogType = iota // LogCommand is applied to the FSM. LogCommand // LogNoop is used to ensure the leadership for leader. LogNoop // LogMemberChange is used for member change in the cluster. LogMemberChange // LogBarrier is used to ensure all preceeding operations have heen // applied to the FSM. A loan from hashicorp-raft. LogBarrier ) // Config represents the membership of the cluster. type Config struct { // There are three possible combinations of Servers and NewServers: // 1. Servers != nil, NewServers == nil // It's not in member change procedure now. Servers contains // all members of the cluster. // 2. Servers != nil, NewServers != nil // It's in member change procedure phase 1. Servers contains // all old members of the cluster, and NewServers is the new members. // 3. Servers == nil, NewServers != nil // It's in member change procedure phase 2. NewServers contains // all the new members of the cluster. Servers *ServerAddressSlice NewServers *ServerAddressSlice } func (self *Config) IsInMemeberChange() bool { return (self.NewServers != nil) } func (self *Config) IsNormalConfig() bool { return (self.Servers != nil) && (self.NewServers == nil) } func (self *Config) IsOldNewConfig() bool { return (self.Servers != nil) && (self.NewServers != nil) } func (self *Config) IsNewConfig() bool { return (self.Servers == nil) && (self.NewServers != nil) } func ConfigEqual(conf1 *Config, conf2 *Config) bool { if (conf1.Servers == nil) && (conf2.Servers == nil) { if (conf1.NewServers == nil) && (conf2.NewServers == nil) { return true } else if (conf1.NewServers == nil) || (conf2.NewServers == nil) { return false } return MultiAddrSliceEqual(conf1.NewServers, conf2.NewServers) } else if (conf1.Servers == nil) || (conf1.Servers == nil) { return false } if (conf1.NewServers == nil) && (conf2.NewServers == nil) { return MultiAddrSliceEqual(conf1.Servers, conf2.Servers) } else if (conf1.NewServers == nil) || (conf2.NewServers == nil) { return false } return (MultiAddrSliceEqual(conf1.Servers, conf2.Servers) && MultiAddrSliceEqual(conf1.NewServers, conf2.NewServers)) } func ConfigNotEqual(conf1 *Config, conf2 *Config) bool { return !ConfigEqual(conf1, conf2) } func ConfigCopy(conf *Config) *Config { return &Config{ Servers: &ServerAddressSlice{ Addresses: conf.Servers.Addresses[:], }, NewServers: &ServerAddressSlice{ Addresses: conf.NewServers.Addresses[:], }, } } // LogEntry is the element of replicated log in raft. type LogEntry struct { Term uint64 Index uint64 Type LogType Data []byte Conf *Config } var ( ErrorLogEntryNotFound error = errors.New("Log entry not found") ) // Log is the interface for local durable log in raft. // It provides functions to store and retrieve LogEntry. // Any implementation of this interface should ensure the duration. type Log interface { // Returns the term of the first LogEntry written. 0 for no entry. FirstTerm() (uint64, error) // Returns the index of the first LogEntry written. 0 for no entry. FirstIndex() (uint64, error) // Returns the term and index of the first LogEntry written. // Both term and index are 0 if there is no entry. FirstEntryInfo() (term uint64, index uint64, err error) // Returns the term mof the last LogEntry written. 0 for no entry. LastTerm() (uint64, error) // Returns the index of the last LogEntry written. 0 for no entry. LastIndex() (uint64, error) // Returns the term and index of the last LogEntry written. // Both term and index are 0 if there is no entry. LastEntryInfo() (term uint64, index uint64, err error) // Returns the index of log entry latest committed CommittedIndex() (uint64, error) // Store the index of log entry latest committed StoreCommittedIndex(index uint64) error // Returns the index of log entry latest applied to state machine LastAppliedIndex() (uint64, error) // Store the index of log entry latest applied to state machine StoreLastAppliedIndex(index uint64) error // Gets a log entry at a given index GetLog(index uint64) (*LogEntry, error) // Gets all log entris in range GetLogInRange(fromIndex uint64, toIndex uint64) ([]*LogEntry, error) // Store a single log entry StoreLog(log *LogEntry) error // Store multiple log entries StoreLogs(logs []*LogEntry) error // Delete the log entries before and up to the given index, // including the log entry right at the index. TruncateBefore(index uint64) error // Delete the log entries after the given index, // including the log entry right at index. TruncateAfter(index uint64) error }
persist/log.go
0.666388
0.429788
log.go
starcoder
package clipper import ( "go/ast" "go/parser" "go/token" "strings" ) // GoNewImportPositionData stores data collected during a selection of a new import position. type GoNewImportPositionData struct { ShouldAddNewLine bool OnlyURLNeeded bool } // GoBeforeFunctionReturnsPositionData stores data collected during a selection of the position before a return. type GoBeforeFunctionReturnsPositionData struct { HasReturn bool } // GoReturningFunctionCallNewArgumentPositionData stores data collected during a selection of the position for // a new argument in a function call which is being returned. type GoReturningFunctionCallNewArgumentPositionData struct { HasArguments bool HasTrailingComma bool } // GoReturningCompositeNewArgumentPositionData stores data collected during a selection of the position for // a new argument in a struct which is being returned. type GoReturningCompositeNewArgumentPositionData struct { HasArguments bool HasTrailingComma bool } // goPositionFinder tries to find a required position during a walk of the Golang AST. type goPositionFinder func(result *PositionSelectorResult, options SelectOptions, code string) goVisitor // goVisitor visits each node in the Golang AST tree until it returns true. type goVisitor func(node ast.Node) bool // Visit implements the Visitor interface to walk the Golang AST. func (f goVisitor) Visit(node ast.Node) ast.Visitor { if f(node) { return f } return nil } // wrapGoFinder creates a selector out of each finder. func wrapGoFinder(finder goPositionFinder) *PositionSelector { positionSelectorID += 1 return &PositionSelector{ id: positionSelectorID, call: func(path, code string, options SelectOptions) (*PositionSelectorResult, error) { parsedAST, err := parser.ParseFile(token.NewFileSet(), path, []byte(code), 0) if err != nil { return nil, err } if options == nil { options = SelectOptions{} } result := &PositionSelectorResult{ OffsetPosition: NoOffsetPosition, } ast.Walk(finder(result, options, code), parsedAST) if result.OffsetPosition != NoOffsetPosition { // The offset position coming from the finder is 1-indexed. So making it 0-indexed. result.OffsetPosition -= 1 } return result, nil }, } } // GoSelectNewImportPosition selects a position where in a new import can be added. var GoSelectNewImportPosition = wrapGoFinder( func(result *PositionSelectorResult, options SelectOptions, _ string) goVisitor { return func(node ast.Node) bool { switch n := node.(type) { case *ast.File: // Adds new import after package declaration: `package name`. result.OffsetPosition = OffsetPosition(n.Name.End()) result.Data = GoNewImportPositionData{ ShouldAddNewLine: true, } case *ast.GenDecl: if n.Tok == token.IMPORT { // Adds new import after the last import URL. result.OffsetPosition = OffsetPosition(n.Specs[len(n.Specs)-1].End()) result.Data = GoNewImportPositionData{} // If this is a group import, only URL is needed for the new one. if n.Lparen != token.NoPos { result.Data = GoNewImportPositionData{ OnlyURLNeeded: true, } } } } return true } }, ) // GoSelectNewGlobalPosition selects a position a new variable declaration, function or anything global can be // added. var GoSelectNewGlobalPosition = wrapGoFinder( func(result *PositionSelectorResult, options SelectOptions, _ string) goVisitor { return func(node ast.Node) bool { // Select a position after the package declaration or all the imports. switch n := node.(type) { case *ast.File: result.OffsetPosition = OffsetPosition(n.Name.End()) case *ast.GenDecl: if n.Tok == token.IMPORT { result.OffsetPosition = OffsetPosition(n.End()) } } return true } }, ) // GoSelectBeforeFunctionReturnsPosition selects a position just before the last function return (implicit or explicit). // This only considers the function return which is at the function return. var GoSelectBeforeFunctionReturnsPosition = wrapGoFinder( func(result *PositionSelectorResult, options SelectOptions, _ string) goVisitor { functionName := options["functionName"] return func(node ast.Node) bool { // Select a position after the package declaration or all the imports. if n, ok := node.(*ast.FuncDecl); ok && n.Name.Name == functionName { lastItem := n.Body.List[len(n.Body.List)-1] switch l := lastItem.(type) { case *ast.ReturnStmt: // If there is a return, select a position before it. result.OffsetPosition = OffsetPosition(l.Pos()) result.Data = GoBeforeFunctionReturnsPositionData{ HasReturn: true, } default: // Select the last position as there is no return here. result.OffsetPosition = OffsetPosition(lastItem.End()) result.Data = GoBeforeFunctionReturnsPositionData{ HasReturn: false, } } } return true } }, ) // GoSelectStartOfFunctionPosition selects a position just after the function block starts. var GoSelectStartOfFunctionPosition = wrapGoFinder( func(result *PositionSelectorResult, options SelectOptions, _ string) goVisitor { functionName := options["functionName"] return func(node ast.Node) bool { if n, ok := node.(*ast.FuncDecl); ok && n.Name.Name == functionName { // Select the position after the left brace. result.OffsetPosition = OffsetPosition(n.Body.Lbrace + 1) } return true } }, ) // GoSelectReturningFunctionCallNewArgumentPosition selects a position for a new argument in a function call that is // returning a value. This function call must be in the returning statement. var GoSelectReturningFunctionCallNewArgumentPosition = wrapGoFinder( func(result *PositionSelectorResult, options SelectOptions, code string) goVisitor { functionName := options["functionName"] return func(node ast.Node) bool { if n, ok := node.(*ast.FuncDecl); ok && n.Name.Name == functionName { lastItem := n.Body.List[len(n.Body.List)-1] if l, ok := lastItem.(*ast.ReturnStmt); ok && len(l.Results) == 1 { ret := l.Results[0] if r, ok := ret.(*ast.CallExpr); ok { result.OffsetPosition = OffsetPosition(r.Rparen) data := GoReturningFunctionCallNewArgumentPositionData{ HasArguments: len(r.Args) != 0, } // Check if the closing parenthesis is preceded by a comma. leftPart := []rune(strings.TrimSpace(code[:r.Rparen-1])) // TODO: This won't work if there is a comment after the comma. if leftPart[len(leftPart)-1] == ',' { data.HasTrailingComma = true } result.Data = data } } } return true } }, ) // GoSelectReturningCompositeNewArgumentPosition selects a position for a new argument in a struct/map that is being // returned a value. This function call must be in the returning statement. var GoSelectReturningCompositeNewArgumentPosition = wrapGoFinder( func(result *PositionSelectorResult, options SelectOptions, code string) goVisitor { functionName := options["functionName"] return func(node ast.Node) bool { if n, ok := node.(*ast.FuncDecl); ok && n.Name.Name == functionName { lastItem := n.Body.List[len(n.Body.List)-1] if l, ok := lastItem.(*ast.ReturnStmt); ok && len(l.Results) == 1 { ret := l.Results[0] // If the returned value is a reference, cut the reference symbol off. if r, ok := ret.(*ast.UnaryExpr); ok && r.Op == token.AND { ret = r.X } if r, ok := ret.(*ast.CompositeLit); ok { result.OffsetPosition = OffsetPosition(r.Rbrace) data := GoReturningCompositeNewArgumentPositionData{ HasArguments: len(r.Elts) != 0, } // Check if the closing brace is preceded by a comma. leftPart := []rune(strings.TrimSpace(code[:r.Rbrace-1])) // TODO: This won't work if there is a comment after the comma. if leftPart[len(leftPart)-1] == ',' { data.HasTrailingComma = true } result.Data = data } } } return true } }, ) // GoSelectStructNewFieldPosition selects a position for a new field in a struct definition. var GoSelectStructNewFieldPosition = wrapGoFinder( func(result *PositionSelectorResult, options SelectOptions, code string) goVisitor { structName := options["structName"] return func(node ast.Node) bool { if n, ok := node.(*ast.TypeSpec); ok && n.Name.Name == structName { if s, ok := n.Type.(*ast.StructType); ok { result.OffsetPosition = OffsetPosition(s.Fields.Closing) } } return true } }, )
starport/pkg/clipper/goselection.go
0.673836
0.618348
goselection.go
starcoder
package dlog import ( "fmt" "math" "strconv" "github.com/spf13/cobra" "github.com/timebertt/grypto/modular" ) func NewCommand() *cobra.Command { var x, base, mod int32 cmd := &cobra.Command{ Use: "dlog [x] [base] [modulus]", Short: "Calculate the discrete logarithm of x to the given base mod modulus", Long: `dlog calculates the discrete logarithm of x to the given base and modulus by enumeration for int32 numbers. The discrete logarithm of a number x to the base of b modulo m is defined as the smallest number y, so that b^y ≡ x mod m. dlog is the inverse operation to exp. Enumeration is a very simple approach to calculate dlog. It calculates b^i for i=0,1,...,m until b^i=x. While being simple, the algorithm can take up to order(b) steps in the worst case, so it is very impractical for bases with large order. Calculating the discrete logarithm is thought to be hard, so currently there is no known algorithm for solving it efficiently. The security of some cryptographic algorithms (e.g. Diffie-Hellman, ElGamal and others) is based on exactly this assumption, that DLog is hard. See https://en.wikipedia.org/wiki/Discrete_logarithm.`, Args: cobra.ExactArgs(3), PreRunE: func(cmd *cobra.Command, args []string) error { xIn, err := strconv.Atoi(args[0]) if err != nil { return fmt.Errorf("first argument is not an int: %w", err) } if xIn > math.MaxInt32 { return fmt.Errorf("x is greater than MaxInt32 (%d): %d", math.MaxInt32, xIn) } x = int32(xIn) b, err := strconv.Atoi(args[1]) if err != nil { return fmt.Errorf("second argument is not an int: %w", err) } if b > math.MaxInt32 { return fmt.Errorf("base is greater than MaxInt32 (%d): %d", math.MaxInt32, b) } base = int32(b) m, err := strconv.Atoi(args[2]) if err != nil { return fmt.Errorf("third argument is not an int: %w", err) } if m > math.MaxInt32 { return fmt.Errorf("modulus is greater than MaxInt32 (%d): %d", math.MaxInt32, b) } mod = int32(m) cmd.SilenceErrors = true cmd.SilenceUsage = true return nil }, RunE: func(cmd *cobra.Command, args []string) error { return runDLog32(x, base, mod) }, } return cmd } func runDLog32(x, base, mod int32) (err error) { defer func() { if p := recover(); p != nil { if e, ok := p.(error); ok { err = e } if e, ok := p.(string); ok { err = fmt.Errorf(e) } } }() dlog, exists := modular.DLog32(x, base, mod) if !exists { return fmt.Errorf("dlog(%d) to the base %d mod %d does not exist\n", x, base, mod) } fmt.Printf("dlog(%d) to the base %d mod %d = %d\n", x, base, mod, dlog) return nil }
grypto/cmd/dlog/dlog.go
0.801042
0.484136
dlog.go
starcoder
package dilithium import ( "github.com/openziti/dilithium/util" "github.com/pkg/errors" ) /* * ACK Encoding Format * * If the high-bit of the first byte of the ACK region is low, then this is a single int32 containing a sequence number. * * If the high-bit of the first byte of the ACK region is high, then we know we're dealing with multiple ACKs (or ACK * ranges) encoded in series. The remaining 7 bits contain the number of ACKs (or ACK ranges) encoded in the series. * * When decoding an ACK from a series, we use the high bit of the 4-byte int32 to determine if this is an ACK range. * If the high-bit is set, then we know to expect that there are actually 2 int32s in a row, definining the lower and * upper bounds of the range. If the high-bit is low, then we know this is a single sequence number. */ type Ack struct { Start int32 End int32 } const ackSeriesMarker = uint8(1 << 7) const sequenceRangeMarker = uint32(1 << 31) const sequenceRangeInvert = 0xFFFFFFFF ^ sequenceRangeMarker func EncodeAcks(acks []Ack, data []byte) (n uint32, err error) { if len(acks) < 1 { return 0, nil } if len(acks) > 127 { return 0, errors.Errorf("ack series too large [%d > 127]", len(acks)) } dataSz := uint32(len(data)) if len(acks) == 1 && acks[0].Start == acks[0].End { if dataSz < 4 { return 0, errors.Errorf("insufficient buffer to encode ack [%d < 4]", dataSz) } util.WriteInt32(data, int32(uint32(acks[0].Start)&sequenceRangeInvert)) return 4, nil } i := uint32(0) if (i + 1) > dataSz { return i, errors.Errorf("insufficient buffer to encode ack series [%d < %d]", dataSz, i+1) } data[i] = ackSeriesMarker + uint8(len(acks)) i++ for _, a := range acks { if a.Start == a.End { if (i + 4) > dataSz { return i, errors.Errorf("insufficient buffer to encode ack series [%d < %d]", dataSz, i) } util.WriteInt32(data[i:i+4], int32(uint32(a.Start)&sequenceRangeInvert)) i += 4 } else { if (i + 4) > dataSz { return i, errors.Errorf("insufficient buffer to encode ack series [%d < %d]", dataSz, i) } util.WriteInt32(data[i:i+4], int32(uint32(a.Start)|sequenceRangeMarker)) i += 4 if (i + 4) > dataSz { return i, errors.Errorf("insufficient buffer to encode ack series [%d < %d]", dataSz, i) } util.WriteInt32(data[i:i+4], int32(uint32(a.End)&sequenceRangeInvert)) i += 4 } } return i, nil } func DecodeAcks(data []byte) (acks []Ack, sz uint32, err error) { dataSz := uint32(len(data)) if dataSz < 4 { return nil, 0, errors.Errorf("short ack buffer [%d < 4]", dataSz) } if data[0]&ackSeriesMarker == 0 { seq := util.ReadInt32(data[0:4]) acks = append(acks, Ack{seq, seq}) return acks, 4, nil } else { seriesSz := int(data[0] ^ ackSeriesMarker) sz = 1 for i := 0; i < seriesSz; i++ { first := util.ReadInt32(data[sz : sz+4]) if uint32(first)&sequenceRangeMarker == sequenceRangeMarker { sz += 4 second := util.ReadInt32(data[sz : sz+4]) acks = append(acks, Ack{int32(uint32(first) & sequenceRangeInvert), int32(uint32(second) & sequenceRangeInvert)}) } else { acks = append(acks, Ack{int32(uint32(first) & sequenceRangeInvert), int32(uint32(first) & sequenceRangeInvert)}) } sz += 4 } } return }
ack.go
0.640973
0.736211
ack.go
starcoder
package regression import ( "math" ) //Regression represents a queue of past points. Use New() to initialize. type Regression struct { xSum, ySum, xxSum, xySum, yySum, xDelta float64 lastSlopeCalc, lastInterceptCalc, lastStdErrorCalc float64 N int //here so multiple calcs calls per add calls wont hurt performance lastCalcFresh bool } //New returns a Regression that keeps points back as far as xDelta from the last //added point. func New() Regression { return Regression{} } //Calculate returns the slope, intercept and standard error of a best fit line to the added //points. Returns a cached value if called between adds. Deprecated in favor of CalculateWithStdError. func (r *Regression) Calculate() (slope, intercept float64) { slope, intercept, _ = r.CalculateWithStdError() return } //CalculateWithStdError returns the slope, intercept and standard error of a best fit line to the added //points. Returns a cached value if called between adds. func (r *Regression) CalculateWithStdError() (slope, intercept, stdError float64) { if r.lastCalcFresh { slope = r.lastSlopeCalc intercept = r.lastInterceptCalc stdError = r.lastStdErrorCalc return } n := float64(r.N) //linear regression formula: //slope is (n*SUM(x*y) - SUM(x)*SUM(y)) / (n*SUM(x*x) - (SUM(x))^2) //intercept is (SUM(y)-slope*SUM(x)) / n xSumOverN := r.xSum / n //here to only calc once for performance slope = (r.xySum - xSumOverN*r.ySum) / (r.xxSum - xSumOverN*r.xSum) intercept = (r.ySum - slope*r.xSum) / n //standard error formula is sqrt(SUM((yActual - yPredicted)^2) / (n - 2)) //the n-2 is related to the degrees of freedom for the regression, 2 in this case //simplification of the sum //SUM((yA - yP)^2) //SUM(yA*yA - 2*yA*yP + yP*yP) //SUM(y*y) - SUM(2*y*(m*x+b)) + SUM((m*x+b)(m*x+b)) //SUM(y*y) - 2*m*SUM(x*y) - 2*b*SUM(y) + m*m*SUM(x*x) + 2*b*m*SUM(x) + n*b*b twoTimesB := 2 * intercept stdError = math.Sqrt((r.yySum - 2*slope*r.xySum - twoTimesB*r.ySum + slope*slope*r.xxSum + twoTimesB*slope*r.xSum + n*intercept*intercept) / (n - 2)) r.lastSlopeCalc = slope r.lastInterceptCalc = intercept r.lastStdErrorCalc = stdError r.lastCalcFresh = true return } //Add adds the new x and y as a point into the queue. Panics if given an x value less than the last. func (r *Regression) Add(x, y float64) { r.lastCalcFresh = false r.N++ r.xSum += x r.ySum += y r.xxSum += x * x r.xySum += x * y r.yySum += y * y }
v1/regression.go
0.842507
0.650287
regression.go
starcoder
package main import ( "github.com/hajimehoshi/ebiten/v2" "github.com/jakecoffman/cp" "github.com/jakecoffman/cpebiten" "log" "math" ) const ( screenWidth = 600 screenHeight = 480 ) const ( PlayerVelocity = 500.0 PlayerGroundAccelTime = 0.1 PlayerGroundAccel = PlayerVelocity / PlayerGroundAccelTime PlayerAirAccelTime = 0.25 PlayerAirAccel = PlayerVelocity / PlayerAirAccelTime JumpHeight = 50.0 JumpBoostHeight = 55.0 FallVelocity = 900.0 Gravity = 2000.0 ) var playerBody *cp.Body var playerShape *cp.Shape var remainingBoost float64 var grounded, lastJumpState bool func playerUpdateVelocity(body *cp.Body, gravity cp.Vector, damping, dt float64) { jumpState := ebiten.IsKeyPressed(ebiten.KeyW) || ebiten.IsKeyPressed(ebiten.KeyUp) // Grab the grounding normal from last frame groundNormal := cp.Vector{} playerBody.EachArbiter(func(arb *cp.Arbiter) { n := arb.Normal().Neg() if n.Y < groundNormal.Y { groundNormal = n } }) grounded = groundNormal.Y < 0 if groundNormal.Y > 0 { remainingBoost = 0 } // Do a normal-ish update boost := jumpState && remainingBoost > 0 var g cp.Vector if !boost { g = gravity } body.UpdateVelocity(g, damping, dt) // Target horizontal speed for air/ground control var targetVx float64 if ebiten.IsKeyPressed(ebiten.KeyA) || ebiten.IsKeyPressed(ebiten.KeyLeft) { targetVx -= PlayerVelocity } if ebiten.IsKeyPressed(ebiten.KeyD) || ebiten.IsKeyPressed(ebiten.KeyRight) { targetVx += PlayerVelocity } // Update the surface velocity and friction // Note that the "feet" move in the opposite direction of the player. surfaceV := cp.Vector{-targetVx, 0} playerShape.SetSurfaceV(surfaceV) if grounded { playerShape.SetFriction(PlayerGroundAccel / Gravity) } else { playerShape.SetFriction(0) } // Apply air control if not grounded if !grounded { v := playerBody.Velocity() playerBody.SetVelocity(cp.LerpConst(v.X, targetVx, PlayerAirAccel*dt), v.Y) } v := body.Velocity() body.SetVelocity(v.X, cp.Clamp(v.Y, -FallVelocity, cp.INFINITY)) } type Game struct { *cpebiten.Game } func NewGame() *Game { space := cp.NewSpace() space.Iterations = 10 space.SetGravity(cp.Vector{0, Gravity}) walls := []cp.Vector{ {0, 0}, {0, screenHeight}, {screenWidth, 0}, {screenWidth, screenHeight}, {0, 0}, {screenWidth, 0}, {0, screenHeight}, {screenWidth, screenHeight}, } for i := 0; i < len(walls)-1; i += 2 { shape := space.AddShape(cp.NewSegment(space.StaticBody, walls[i], walls[i+1], 0)) shape.SetElasticity(1) shape.SetFriction(1) shape.SetFilter(cpebiten.NotGrabbable) } // player playerBody = space.AddBody(cp.NewBody(1, cp.INFINITY)) playerBody.SetPosition(cp.Vector{100, 200}) playerBody.SetVelocityUpdateFunc(playerUpdateVelocity) playerShape = space.AddShape(cp.NewBox2(playerBody, cp.BB{-15, -27.5, 15, 27.5}, 10)) playerShape.SetElasticity(0) playerShape.SetFriction(0) playerShape.SetCollisionType(1) for i := 0; i < 6; i++ { for j := 0; j < 3; j++ { body := space.AddBody(cp.NewBody(4, cp.INFINITY)) body.SetPosition(cp.Vector{float64(400 + j*60), float64(200 + i*60)}) shape := space.AddShape(cp.NewBox(body, 50, 50, 0)) shape.SetElasticity(0) shape.SetFriction(0.7) } } return &Game{ Game: cpebiten.NewGame(space, 180), } } func (g *Game) Update() error { jumpState := ebiten.IsKeyPressed(ebiten.KeyW) || ebiten.IsKeyPressed(ebiten.KeyUp) // If the jump key was just pressed this frame, jump! if jumpState && !lastJumpState && grounded { jumpV := math.Sqrt(2.0 * JumpHeight * Gravity) playerBody.SetVelocityVector(playerBody.Velocity().Add(cp.Vector{0, -jumpV})) remainingBoost = JumpBoostHeight / jumpV } if err := g.Game.Update(); err != nil { return err } remainingBoost -= 1./60. lastJumpState = jumpState return nil } func main() { ebiten.SetWindowSize(screenWidth, screenHeight) ebiten.SetWindowTitle("Player") if err := ebiten.RunGame(NewGame()); err != nil { log.Fatal(err) } }
player/player.go
0.598195
0.443781
player.go
starcoder
package swapi // TestCharacters is a list of sample Character objects, taken mostly from // https://swapi.dev/ and https://starwars.fandom.com/ var TestCharacters = []Character{ { Name: "<NAME>", Height: 172, Mass: 77, HairColor: "blond", Gender: "male", ForceSensitive: true, Ghost: true, Tags: []string{"human", "jedi", "rebel", "starfighter-pilot", "shuttle-pilot", "green-lightsaber"}, }, { Name: "C-3PO", Height: 167, Mass: 75, HairColor: "n/a", Gender: "n/a", ForceSensitive: false, Ghost: false, Tags: []string{"droid"}, }, { Name: "R2-D2", Height: 96, Mass: 32, HairColor: "n/a", Gender: "n/a", ForceSensitive: false, Ghost: false, Tags: []string{"droid"}, }, { Name: "<NAME>", Height: 202, Mass: 136, HairColor: "none", Gender: "male", ForceSensitive: true, Ghost: true, Tags: []string{"human", "sith", "empire", "tie-pilot", "crimson-lightsaber"}, }, { Name: "<NAME>", Height: 150, Mass: 49, HairColor: "brown", Gender: "female", ForceSensitive: true, Ghost: false, Tags: []string{"human", "general", "rebel"}, }, { Name: "<NAME>", Height: 182, Mass: 77, HairColor: "white", Gender: "male", ForceSensitive: true, Ghost: true, Tags: []string{"human", "jedi", "starfighter-pilot", "medium-blue-lightsaber"}, }, { Name: "Chewbacca", Height: 228, Mass: 112, HairColor: "brown", Gender: "male", ForceSensitive: false, Ghost: false, Tags: []string{"wookie", "smuggler", "freighter-pilot"}, }, { Name: "<NAME>", Height: 180, Mass: 80, HairColor: "brown", Gender: "male", ForceSensitive: false, Ghost: true, Tags: []string{"human", "smuggler", "freighter-pilot"}, }, { Name: "Yoda", Height: 66, Mass: 17, HairColor: "white", Gender: "male", ForceSensitive: true, Ghost: true, Tags: []string{"jedi", "green-lightsaber"}, }, }
cli-experiments/cobra-k8s-selectors/pkg/swapi/data.go
0.561215
0.446012
data.go
starcoder
package matrix import "errors" // Matrix is a square matrix. // It`s underlying [][]int slice is guaranteed to be sliced from one // linear backing array, which allows some copying optimizations. type Matrix [][]int // ConvertToMatrix converts raw [][]int slice to square matrix func ConvertToMatrix(slice [][]int) Matrix { size := len(slice) backingArray := make([]int, size*size) var matrix Matrix = make([][]int, size) for i, row := range slice { matrix[i] = backingArray[i*size : (i+1)*size] copy(matrix[i], row) } return matrix } // Copy copies matrix to a new one with some optimizations. func (m Matrix) Copy() Matrix { size := len(m) backingArray := make([]int, size*size) if size != 0 { copy(backingArray, m[0][:cap(m[0])]) } var matrix Matrix = make([][]int, size) for i := range m { matrix[i] = backingArray[i*size : (i+1)*size] } return matrix } // LoadFrom copies matrix data from another matrix func (m Matrix) LoadFrom(source Matrix) error { size := len(m) if len(m) != len(source) { return errors.New("Matrix size mismatch") } if size != 0 { copy(m[0][:cap(m[0])], source[0][:cap(m[0])]) } return nil } // Normalize calculates minimal value in each row separately. // Then it substracts this value from other values in the row. // Then, a process is repeated for all columns. // All minimum values are summed and returned as a return value. func (m Matrix) Normalize() int { size := len(m) norm := 0 val := 0 for i := 0; i < size; i++ { // First, we calculate minimum min := -1 for j := 0; j < size; j++ { val = m[i][j] if val == -1 { continue } if val == 0 { min = 0 break } if (min == -1) || (min > val) { min = val } } if min <= 0 { continue } // And now, we substract it for j := 0; j < size; j++ { val = m[i][j] if val == -1 { continue } m[i][j] = val - min } // Accumulation norm += min } // And we repeat the same process for columns // I couldn`t figure out a way to get rid of duplication yet for j := 0; j < size; j++ { // First, we calculate minimum min := -1 for i := 0; i < size; i++ { val = m[i][j] if val == -1 { continue } if val == 0 { min = 0 break } if (min == -1) || (min > val) { min = val } } if min <= 0 { continue } // And now, we substract it for i := 0; i < size; i++ { val = m[i][j] if val == -1 { continue } m[i][j] = val - min } // Accumulation norm += min } return norm } // CutNode cuts elements from matrix (sets them to -1) according // to the processed path from source to destination nodes. // Details included in the method body. func (m Matrix) CutNode(source, dest int, lastNode bool) { size := len(m) // Disabling paths from the source - we have already passed it for i := 0; i < size; i++ { m[source][i] = -1 } // Disabling paths to the destination - we are already in it for i := 0; i < size; i++ { m[i][dest] = -1 } if !lastNode && (size != 0) { // Disabling path to the root node (index 0). // There are more nodes to visit, and we will be coming to // root from one of them. m[dest][0] = -1 } }
solver/matrix/matrix.go
0.782579
0.567158
matrix.go
starcoder
package heldiamgo import ( "image" "image/color" ) //绘图 //圆形 https://blog.golang.org/go-imagedraw-package type Circle struct { p image.Point r int } //新建一个圆形,p为圆形中心点. r为半径 func NewCircle(p image.Point, r int) *Circle { return &Circle{ p: p, r: r, } } func (c *Circle) ColorModel() color.Model { return color.AlphaModel } func (c *Circle) Bounds() image.Rectangle { return image.Rect(c.p.X-c.r, c.p.Y-c.r, c.p.X+c.r, c.p.Y+c.r) } func (c *Circle) At(x, y int) color.Color { xx, yy, rr := float64(x-c.p.X)+0.5, float64(y-c.p.Y)+0.5, float64(c.r) if xx*xx+yy*yy < rr*rr { return color.Alpha{A: 255} } return color.Alpha{A: 0} } //椭圆形 type Ellipse struct { rect image.Rectangle radius int width int height int } func (c *Ellipse) SetBounds(rect image.Rectangle) { c.rect = rect c.width = rect.Size().X c.height = rect.Size().Y } func (c *Ellipse) SetRadius(radius int) { c.radius = radius } func (c *Ellipse) ColorModel() color.Model { return color.AlphaModel } func (c *Ellipse) Bounds() image.Rectangle { return c.rect } func (c *Ellipse) At(x, y int) color.Color { if x < c.radius && y < c.radius { xx, yy, rr := float64(x-c.radius)+0.5, float64(y-c.radius)+0.5, float64(c.radius) if xx*xx+yy*yy < rr*rr { return color.Alpha{A: 255} } return color.Alpha{A: 0} } else if x > c.width-c.radius && y < c.radius { xx, yy, rr := float64(x+c.radius-c.width)+0.5, float64(y-c.radius)+0.5, float64(c.radius) if xx*xx+yy*yy < rr*rr { return color.Alpha{A: 255} } return color.Alpha{A: 0} } else if x > c.width-c.radius && y > c.height-c.radius { xx, yy, rr := float64(x+c.radius-c.width)+0.5, float64(y+c.radius-c.height)+0.5, float64(c.radius) if xx*xx+yy*yy < rr*rr { return color.Alpha{A: 255} } return color.Alpha{A: 0} } else if x < c.radius && y > c.height-c.radius { xx, yy, rr := float64(x-c.radius)+0.5, float64(y+c.radius-c.height)+0.5, float64(c.radius) if xx*xx+yy*yy < rr*rr { return color.Alpha{A: 255} } return color.Alpha{A: 0} } return color.Alpha{A: 255} }
image.go
0.646349
0.412234
image.go
starcoder
package output import ( "github.com/Jeffail/benthos/v3/internal/component/output" "github.com/Jeffail/benthos/v3/internal/docs" "github.com/Jeffail/benthos/v3/lib/log" "github.com/Jeffail/benthos/v3/lib/message/batch" "github.com/Jeffail/benthos/v3/lib/metrics" "github.com/Jeffail/benthos/v3/lib/output/writer" "github.com/Jeffail/benthos/v3/lib/types" "github.com/Jeffail/benthos/v3/lib/util/aws/session" "github.com/Jeffail/benthos/v3/lib/util/retries" ) //------------------------------------------------------------------------------ func init() { Constructors[TypeAWSSQS] = TypeSpec{ constructor: fromSimpleConstructor(NewAWSSQS), Version: "3.36.0", Summary: ` Sends messages to an SQS queue.`, Description: ` Metadata values are sent along with the payload as attributes with the data type String. If the number of metadata values in a message exceeds the message attribute limit (10) then the top ten keys ordered alphabetically will be selected. The fields ` + "`message_group_id` and `message_deduplication_id`" + ` can be set dynamically using [function interpolations](/docs/configuration/interpolation#bloblang-queries), which are resolved individually for each message of a batch. ### Credentials By default Benthos will use a shared credentials file when connecting to AWS services. It's also possible to set them explicitly at the component level, allowing you to transfer data across accounts. You can find out more [in this document](/docs/guides/aws).`, Async: true, Batches: true, FieldSpecs: docs.FieldSpecs{ docs.FieldCommon("url", "The URL of the target SQS queue."), docs.FieldCommon("message_group_id", "An optional group ID to set for messages.").IsInterpolated(), docs.FieldCommon("message_deduplication_id", "An optional deduplication ID to set for messages.").IsInterpolated(), docs.FieldCommon("max_in_flight", "The maximum number of messages to have in flight at a given time. Increase this to improve throughput."), docs.FieldCommon("metadata", "Specify criteria for which metadata values are sent as headers.").WithChildren(output.MetadataFields()...), batch.FieldSpec(), }.Merge(session.FieldSpecs()).Merge(retries.FieldSpecs()), Categories: []Category{ CategoryServices, CategoryAWS, }, } Constructors[TypeSQS] = TypeSpec{ constructor: fromSimpleConstructor(NewAmazonSQS), Status: docs.StatusDeprecated, Summary: ` Sends messages to an SQS queue.`, Description: ` ## Alternatives This output has been renamed to ` + "[`aws_sqs`](/docs/components/outputs/aws_sqs)" + `. Metadata values are sent along with the payload as attributes with the data type String. If the number of metadata values in a message exceeds the message attribute limit (10) then the top ten keys ordered alphabetically will be selected. The fields ` + "`message_group_id` and `message_deduplication_id`" + ` can be set dynamically using [function interpolations](/docs/configuration/interpolation#bloblang-queries), which are resolved individually for each message of a batch. ### Credentials By default Benthos will use a shared credentials file when connecting to AWS services. It's also possible to set them explicitly at the component level, allowing you to transfer data across accounts. You can find out more [in this document](/docs/guides/aws).`, Async: true, Batches: true, FieldSpecs: docs.FieldSpecs{ docs.FieldCommon("url", "The URL of the target SQS queue."), docs.FieldCommon("message_group_id", "An optional group ID to set for messages.").IsInterpolated(), docs.FieldCommon("message_deduplication_id", "An optional deduplication ID to set for messages.").IsInterpolated(), docs.FieldCommon("max_in_flight", "The maximum number of messages to have in flight at a given time. Increase this to improve throughput."), docs.FieldCommon("metadata", "Specify criteria for which metadata values are sent as headers.").WithChildren(output.MetadataFields()...), batch.FieldSpec(), }.Merge(session.FieldSpecs()).Merge(retries.FieldSpecs()), Categories: []Category{ CategoryServices, CategoryAWS, }, } } //------------------------------------------------------------------------------ // NewAWSSQS creates a new AmazonSQS output type. func NewAWSSQS(conf Config, mgr types.Manager, log log.Modular, stats metrics.Type) (Type, error) { return newAmazonSQS(TypeAWSSQS, conf.AWSSQS, mgr, log, stats) } // NewAmazonSQS creates a new AmazonSQS output type. func NewAmazonSQS(conf Config, mgr types.Manager, log log.Modular, stats metrics.Type) (Type, error) { return newAmazonSQS(TypeSQS, conf.SQS, mgr, log, stats) } func newAmazonSQS(name string, conf writer.AmazonSQSConfig, mgr types.Manager, log log.Modular, stats metrics.Type) (Type, error) { s, err := writer.NewAmazonSQS(conf, log, stats) if err != nil { return nil, err } var w Type if conf.MaxInFlight == 1 { w, err = NewWriter(name, s, log, stats) } else { w, err = NewAsyncWriter(name, conf.MaxInFlight, s, log, stats) } if err != nil { return w, err } return NewBatcherFromConfig(conf.Batching, w, mgr, log, stats) } //------------------------------------------------------------------------------
lib/output/aws_sqs.go
0.782081
0.457985
aws_sqs.go
starcoder
// Package core contains a set of primitives, including but not limited to various // elliptic curves, hashes, and commitment schemes. These primitives are used internally // and can also be used independently on their own externally. package core import ( crand "crypto/rand" "crypto/subtle" "fmt" "math/big" "github.com/dB2510/kryptology/internal" ) var ( // Zero is additive identity in the set of integers Zero = big.NewInt(0) // One is the multiplicative identity in the set of integers One = big.NewInt(1) // Two is the odd prime Two = big.NewInt(2) ) // ConstantTimeEqByte determines if a, b have identical byte serialization // and signs. It uses the crypto/subtle package to get a constant time comparison // over byte representations. Return value is a byte which may be // useful in bitwise operations. Returns 0x1 if the two values have the // identical sign and byte representation; 0x0 otherwise. func ConstantTimeEqByte(a, b *big.Int) byte { if a == nil && a == b { return 1 } if a == nil || b == nil { return 0 } // Determine if the byte representations are the same var sameBytes byte if subtle.ConstantTimeCompare(a.Bytes(), b.Bytes()) == 1 { sameBytes = 1 } else { sameBytes = 0 } // Determine if the signs are the same var sameSign byte if a.Sign() == b.Sign() { sameSign = 1 } else { sameSign = 0 } // Report the conjunction return sameBytes & sameSign } // ConstantTimeEq determines if a, b have identical byte serialization // and uses the crypto/subtle package to get a constant time comparison // over byte representations. func ConstantTimeEq(a, b *big.Int) bool { return ConstantTimeEqByte(a, b) == 1 } // In determines ring membership before modular reduction: x ∈ Z_m // returns nil if 0 ≤ x < m func In(x, m *big.Int) error { if AnyNil(x, m) { return internal.ErrNilArguments } // subtle doesn't support constant time big.Int compare // just use big.Cmp for now // x ∈ Z_m ⇔ 0 ≤ x < m if x.Cmp(Zero) != -1 && x.Cmp(m) == -1 { return nil } return internal.ErrZmMembership } // Add (modular addition): z = x+y (modulo m) func Add(x, y, m *big.Int) (*big.Int, error) { if AnyNil(x, y) { return nil, internal.ErrNilArguments } z := new(big.Int).Add(x, y) // Compute the residue if one is specified, otherwise // we leave the value as an unbound integer if m != nil { z.Mod(z, m) } return z, nil } // Mul (modular multiplication): z = x*y (modulo m) func Mul(x, y, m *big.Int) (*big.Int, error) { if AnyNil(x, y) { return nil, internal.ErrNilArguments } z := new(big.Int).Mul(x, y) // Compute the residue if one is specified, otherwise // we leave the value as an unbound integer if m != nil { z.Mod(z, m) } return z, nil } // Exp (modular exponentiation): z = x^y (modulo m) func Exp(x, y, m *big.Int) (*big.Int, error) { if AnyNil(x, y) { return nil, internal.ErrNilArguments } // This wrapper looks silly, but it makes the calling code read more consistently. return new(big.Int).Exp(x, y, m), nil } // Neg (modular negation): z = -x (modulo m) func Neg(x, m *big.Int) (*big.Int, error) { if AnyNil(x, m) { return nil, internal.ErrNilArguments } z := new(big.Int).Neg(x) z.Mod(z, m) return z, nil } // Inv (modular inverse): returns y such that xy = 1 (modulo m). func Inv(x, m *big.Int) (*big.Int, error) { if AnyNil(x, m) { return nil, internal.ErrNilArguments } z := new(big.Int).ModInverse(x, m) if z == nil { return nil, fmt.Errorf("cannot compute the multiplicative inverse") } return z, nil } // Rand generates a cryptographically secure random integer in the range: 1 < r < m. func Rand(m *big.Int) (*big.Int, error) { if m == nil { return nil, internal.ErrNilArguments } // Select a random element, but not zero or one // The reason is the random element may be used as a Scalar or an exponent. // An exponent of 1 is generally acceptable because the generator can't be // 1. If a Scalar is combined with another Scalar like in fiat-shamir, it // offers no hiding properties when multiplied. for { result, err := crand.Int(crand.Reader, m) if err != nil { return nil, err } if result.Cmp(One) == 1 { // result > 1 return result, nil } } } // AnyNil determines if any of values are nil func AnyNil(values ...*big.Int) bool { for _, x := range values { if x == nil { return true } } return false }
pkg/core/mod.go
0.852951
0.549036
mod.go
starcoder
package metric import ( "math" "github.com/emer/etable/etensor" ) // ClosestRow32 returns the closest fit between probe pattern and patterns in // an etensor.Float32 where the outer-most dimension is assumed to be a row // (e.g., as a column in an etable), using the given metric function, // *which must have the Increasing property* -- i.e., larger = further. // returns the row and metric value for that row. // Col cell sizes must match size of probe (panics if not). func ClosestRow32(probe *etensor.Float32, col *etensor.Float32, mfun Func32) (int, float32) { rows := col.Dim(0) csz := col.Len() / rows if csz != probe.Len() { panic("metric.ClosestRow32: probe size != cell size of tensor column!\n") } ci := -1 minv := float32(math.MaxFloat32) for ri := 0; ri < rows; ri++ { st := ri * csz rvals := col.Values[st : st+csz] v := mfun(probe.Values, rvals) if v < minv { ci = ri minv = v } } return ci, minv } // ClosestRow64 returns the closest fit between probe pattern and patterns in // an etensor.Tensor where the outer-most dimension is assumed to be a row // (e.g., as a column in an etable), using the given metric function, // *which must have the Increasing property* -- i.e., larger = further. // returns the row and metric value for that row. // Col cell sizes must match size of probe (panics if not). // Optimized for etensor.Float64 but works for any tensor. func ClosestRow64(probe etensor.Tensor, col etensor.Tensor, mfun Func64) (int, float64) { rows := col.Dim(0) csz := col.Len() / rows if csz != probe.Len() { panic("metric.ClosestRow64: probe size != cell size of tensor column!\n") } ci := -1 minv := math.MaxFloat64 fp, pok := probe.(*etensor.Float64) fc, cok := col.(*etensor.Float64) if pok && cok { for ri := 0; ri < rows; ri++ { st := ri * csz rvals := fc.Values[st : st+csz] v := mfun(fp.Values, rvals) if v < minv { ci = ri minv = v } } } else if cok { var fpv []float64 probe.Floats(&fpv) for ri := 0; ri < rows; ri++ { st := ri * csz rvals := fc.Values[st : st+csz] v := mfun(fpv, rvals) if v < minv { ci = ri minv = v } } } else { var fpv, fcv []float64 probe.Floats(&fpv) col.Floats(&fcv) for ri := 0; ri < rows; ri++ { st := ri * csz rvals := fcv[st : st+csz] v := mfun(fpv, rvals) if v < minv { ci = ri minv = v } } } return ci, minv } // ClosestRow32Py returns the closest fit between probe pattern and patterns in // an etensor.Float32 where the outer-most dimension is assumed to be a row // (e.g., as a column in an etable), using the given metric function, // *which must have the Increasing property* -- i.e., larger = further. // returns the row and metric value for that row. // Col cell sizes must match size of probe (panics if not). // Py version is for Python, returns a slice with row, cor, takes std metric func ClosestRow32Py(probe *etensor.Float32, col *etensor.Float32, std StdMetrics) []float32 { row, cor := ClosestRow32(probe, col, StdFunc32(std)) return []float32{float32(row), cor} } // ClosestRow64Py returns the closest fit between probe pattern and patterns in // an etensor.Tensor where the outer-most dimension is assumed to be a row // (e.g., as a column in an etable), using the given metric function, // *which must have the Increasing property* -- i.e., larger = further. // returns the row and metric value for that row. // Col cell sizes must match size of probe (panics if not). // Optimized for etensor.Float64 but works for any tensor. // Py version is for Python, returns a slice with row, cor, takes std metric func ClosestRow64Py(probe etensor.Tensor, col etensor.Tensor, std StdMetrics) []float64 { row, cor := ClosestRow64(probe, col, StdFunc64(std)) return []float64{float64(row), cor} }
metric/tensor.go
0.867036
0.621541
tensor.go
starcoder
package raylib //#include "raylib.h" import "C" import "unsafe" /* Rectangle Structure author: Lachee source: https://github.com/raysan5/raylib/blob/master/src/raylib.h */ type Rectangle struct { X float32 Y float32 Width float32 Height float32 } //NewRectangle creates a new rect func NewRectangle(x, y, width, height float32) Rectangle { return Rectangle{X: x, Y: y, Width: width, Height: height} } //NewRectangleFromVector4 creates a rectangle out of a vector4 func NewRectangleFromVector4(vector Vector4) Rectangle { return NewRectangle(vector.X, vector.Y, vector.Z, vector.W) } //NewRectangleFromPositionSize creates a rectangle out of a position and size func NewRectangleFromPositionSize(position, size Vector2) Rectangle { return NewRectangle(position.X, position.Y, size.X, size.Y) } //ToVector4 creates a Vector4 out of the rectangle components func (r Rectangle) ToVector4() Vector4 { return NewVector4(r.X, r.Y, r.Width, r.Height) } //Position gets the position of the rectangle. Alias of MinPosition(). func (r Rectangle) Position() Vector2 { return NewVector2(r.X, r.Y) } //SetPosition sets the position of the rectangle func (r Rectangle) SetPosition(v Vector2) Rectangle { return NewRectangle(v.X, v.Y, r.Width, r.Height) } //Size gets the size of the rectangle func (r Rectangle) Size() Vector2 { return NewVector2(r.Width, r.Height) } //SetSize sets the size of the rectangle func (r Rectangle) SetSize(v Vector2) Rectangle { return NewRectangle(r.X, r.Y, v.X, v.Y) } //MinPosition gets the smallest position the rectangle can be. Alias of Position(). func (r Rectangle) MinPosition() Vector2 { return NewVector2(r.X, r.Y) } //Center gets the center position of the rectangle func (r Rectangle) Center() Vector2 { return NewVector2(r.X+r.Width/2, r.Y+r.Height/2) } //MaxPosition gets the maximum position within the bounds func (r Rectangle) MaxPosition() Vector2 { return NewVector2(r.X+r.Width, r.Y+r.Height) } //MoveV shifts the rectangle by a vector func (r Rectangle) MoveV(v Vector2) Rectangle { return NewRectangle(r.X+v.X, r.Y+v.Y, r.Width, r.Height) } //Move shifts the rectangle by a value func (r Rectangle) Move(x, y float32) Rectangle { return NewRectangle(r.X+x, r.Y+y, r.Width, r.Height) } //GrowV adds the vector to the width and height of the rectangle. func (r Rectangle) GrowV(v Vector2) Rectangle { return NewRectangle(r.X, r.Y, v.X+r.Width, v.Y+r.Height) } //Grow adds the vector to the width and height of the rectangle. func (r Rectangle) Grow(x, y float32) Rectangle { return NewRectangle(r.X, r.Y, x+r.Width, y+r.Height) } //Scale the width and height of the rectangle func (r Rectangle) Scale(scale float32) Rectangle { return NewRectangle(r.X, r.Y, r.Width*scale, r.Height*scale) } //Lerp a rectangle to a target rectangle func (r Rectangle) Lerp(target Rectangle, amount float32) Rectangle { return Rectangle{ X: r.X + amount*(target.X-r.X), Y: r.Y + amount*(target.Y-r.Y), Width: r.Width + amount*(target.Width-r.Width), Height: r.Height + amount*(target.Height-r.Height), } } //LerpPosition a rectangle to a target position func (r Rectangle) LerpPosition(pos Vector2, amount float32) Rectangle { return Rectangle{ X: r.X + amount*(pos.X-r.X), Y: r.Y + amount*(pos.Y-r.Y), Width: r.Width, Height: r.Height, } } type BoundingBox struct { Min Vector3 Max Vector3 } // NewBoundingBox - Returns new BoundingBox func NewBoundingBox(min, max Vector3) BoundingBox { return BoundingBox{min, max} } //Size gets the size of the bounding box func (bb BoundingBox) Size() Vector3 { return bb.Max.Subtract(bb.Min) } //Center gets the center position of the bounding box func (bb BoundingBox) Center() Vector3 { return bb.Min.Add(bb.Max.Subtract(bb.Min).Divide(2)) } func newBoundingBoxFromPointer(ptr unsafe.Pointer) BoundingBox { return *(*BoundingBox)(ptr) } func (r *BoundingBox) cptr() *C.BoundingBox { return (*C.BoundingBox)(unsafe.Pointer(r)) }
raylib/rectangle.go
0.94281
0.789802
rectangle.go
starcoder
package characters var ( RaceTraitArmored = &RaceTrait{ Name: "Armored", Description: "The species possess thick fur, scales, a bony exoskeleton or other natural protection that gives it one point of natural armour. This works in the same way as normal armour.", } RaceTraitAquatic = &RaceTrait{ Name: "Aquatic", Description: "The species is adapted to life underwater. It can breathe underwater, or hold its breath for a long period (Endurance 10 minutes on average). If amphibious, its Dexterity is halved on land. If the species is not amphibious, then it cannot operate out of water without mechanical aid or telepresence.", } RaceTraitAtmosphericRequirements = &RaceTrait{ Name: "Atmospheric Requirements", Description: "The species requires an unusual combination of gasses to breathe, and cannot survive in most atmospheres without artificial aid. Creatures with this trait usually come from homeworlds with an Exotic atmosphere.", } RaceTraitEngineered = &RaceTrait{ Name: "Engineered", Description: "The species has been altered by some external factor to adapt to changed circumstances or a different environment. Medical treatment of Engineered species by a facility of a lower Technology Level than that at which the species was created receives a negative DM equal to the difference. Some strains of humans have been engineered to tolerate unusual atmospheres, to live on water worlds, or to cope with the stresses of space travel.", } RaceTraitFastMetabolism = &RaceTrait{ Name: "Fast Metabolism", Description: "Creatures with a fast metabolism require more food than most species, and their life support costs are doubled. In combat, fast-metabolism creatures gain a +2 initiative bonus. Fast-metabolism creatures halve their Endurance for the purposes of determining fatigue.", } RaceTraitFeral = &RaceTrait{ Name: "Feral", Description: "Feral species are uncivilised, regardless of their technological knowledge. Often, such species have acquired their technology from other races, or from Ancient ruins. Feral species are much less likely to accept the laws of more civilised societies. Feral species roll Education on 1d6 only.", } RaceTraitFlyer = &RaceTrait{ Name: "Flyer", Description: "The species can fly using wings, glider membranes, gasbags or other means. Characters of this species gain the Athletics (flying) skill at Level 0 and can travel at a speed noted in their description. Flying creatures who are aloft must spend one minor action every round on movement or stall and fall out of the air.\n• Winged flight is tiring and can only be sustained for a number of rounds equal to the creature’s Endurance before requiring a like amount of rest. Some specialised avians can increase this to minutes or even hours equal to Endurance.\n• Species with glider membranes cannot gain altitude while flying. They descend one meter every time they move forwards and cannot use more than one minor action for flying movement in a round.\n• Species that float using gasbags or some other method do not need to move to remain aloft. They are typically slower than other fliers, though.", } RaceTraitLarge = &RaceTrait{ Name: "Large", Description: "The species is considerably larger than the average for sophonts. Large creatures generally have a Strength and Endurance of 3d6 or even 4d6, and a Dexterity of 1d6. Life support requirements for Large creatures are doubled and they often have trouble operating in buildings and spacecraft designed for smaller creatures.\n• Some Large creatures are described as Huge. Attacks against Huge creatures receive a +1 DM to hit.", } RaceTraitNaturalWeapon = &RaceTrait{ Name: "Natural Weapon", Description: "The species has a natural weapon, such as claws, a strong bite or a poisonous stinger. Such weapons are usable at Personal range and deal 1 damage. The creature gains Melee (natural weapons) at level 0.", } RaceTraitNoFineManipulators = &RaceTrait{ Name: "No Fine Manipulators", Description: "The species has no fingers or other prehensile appendages, preventing them from easily picking things up, pushing small buttons, reaching into tight spaces, and so on. The species will need special equipment to function in most civilised settings.", } RaceTraitNotableStrength = &RaceTrait{ Name: "Notable Strength", Description: "Some species are notably dextrous, intelligent, tough or strong. Characters from such races have a positive Dice Modifier when rolling for that characteristic (+2 unless otherwise specified), and their racial maximum for that characteristic is increased by the same amount. Any characteristic can be Notable.", } RaceTraitNotableDexterity = &RaceTrait{ Name: "Notable Dexterity", Description: "Some species are notably dextrous, intelligent, tough or strong. Characters from such races have a positive Dice Modifier when rolling for that characteristic (+2 unless otherwise specified), and their racial maximum for that characteristic is increased by the same amount. Any characteristic can be Notable.", } RaceTraitNotableEndurance = &RaceTrait{ Name: "Notable Endurance", Description: "Some species are notably dextrous, intelligent, tough or strong. Characters from such races have a positive Dice Modifier when rolling for that characteristic (+2 unless otherwise specified), and their racial maximum for that characteristic is increased by the same amount. Any characteristic can be Notable.", } RaceTraitNotableIntelligence = &RaceTrait{ Name: "Notable Intelligence", Description: "Some species are notably dextrous, intelligent, tough or strong. Characters from such races have a positive Dice Modifier when rolling for that characteristic (+2 unless otherwise specified), and their racial maximum for that characteristic is increased by the same amount. Any characteristic can be Notable.", } RaceTraitNotableEducation = &RaceTrait{ Name: "Notable Education", Description: "Some species are notably dextrous, intelligent, tough or strong. Characters from such races have a positive Dice Modifier when rolling for that characteristic (+2 unless otherwise specified), and their racial maximum for that characteristic is increased by the same amount. Any characteristic can be Notable.", } RaceTraitNotableSocialStanding = &RaceTrait{ Name: "Notable Social Standing", Description: "Some species are notably dextrous, intelligent, tough or strong. Characters from such races have a positive Dice Modifier when rolling for that characteristic (+2 unless otherwise specified), and their racial maximum for that characteristic is increased by the same amount. Any characteristic can be Notable.", } RaceTraitPsionic = &RaceTrait{ Name: "Psionic", Description: "All members of the species are Psionic, and may determine their Psionic Strength and talents at the start of character generation.", } RaceTraitSmall = &RaceTrait{ Name: "Small", Description: "Small species generally have a Strength and Endurance of only 1d6, and a Dexterity of 3d6. The minimum size for a sophont is about half that of a human, as smaller creatures lack the cranial capacity for sophont-level intelligence. This assumes that the species has a brain structure comparable to humans. Species with a more distributed neural structure, hive intelligences, or artificial intelligences can be even smaller.\n• Some Small creatures are described as Tiny. Attacks against Tiny creatures receive a –1 DM to hit.", } RaceTraitSlowMetabolism = &RaceTrait{ Name: "Slow Metabolism", Description: "Creatures with a slow metabolism require less food than most species, and their life supportcostsarehalved. In combat, slow-metabolism creatures suffer a –2 initiative penalty.", } RaceTraitUplifted = &RaceTrait{ Name: "Uplifted", Description: "This species was originally non-sentient, but has been raised to a higher intelligence by another species. Uplifted races generally become client species of their patron. Two common uplifted animals are apes and dolphins:\n• Uplifted apes have Notable Strength and Endurance (+2) but all other characteristics are Weak (–2). They have the Uplifted trait.\n• Uplifted dolphins have Notable Strength (+4) and Notable Endurance (+2) but Weak Intelligence, Education and Social Standing (–2). They have the Uplifted, Aquatic (fully aquatic, air-breathers) and No Fine Manipulators traits", } RaceTraitWeakStrength = &RaceTrait{ Name: "Weak Strength", Description: "The opposite of Notable (Characteristic), some species are weaker, less resilient or less well educated than others. Characters from such races have a negative Dice Modifier when rolling for that characteristic (–2 unless otherwise specified), and their racial maximum for that characteristic is decreased by the same amount. Any characteristic can be Weak.", } RaceTraitWeakDexterity = &RaceTrait{ Name: "Weak Dexterity", Description: "The opposite of Notable (Characteristic), some species are weaker, less resilient or less well educated than others. Characters from such races have a negative Dice Modifier when rolling for that characteristic (–2 unless otherwise specified), and their racial maximum for that characteristic is decreased by the same amount. Any characteristic can be Weak.", } RaceTraitWeakEndurance = &RaceTrait{ Name: "Weak Endurance", Description: "The opposite of Notable (Characteristic), some species are weaker, less resilient or less well educated than others. Characters from such races have a negative Dice Modifier when rolling for that characteristic (–2 unless otherwise specified), and their racial maximum for that characteristic is decreased by the same amount. Any characteristic can be Weak.", } RaceTraitWeakIntelligence = &RaceTrait{ Name: "Weak Intelligence", Description: "The opposite of Notable (Characteristic), some species are weaker, less resilient or less well educated than others. Characters from such races have a negative Dice Modifier when rolling for that characteristic (–2 unless otherwise specified), and their racial maximum for that characteristic is decreased by the same amount. Any characteristic can be Weak.", } RaceTraitWeakEducation = &RaceTrait{ Name: "Weak Education", Description: "The opposite of Notable (Characteristic), some species are weaker, less resilient or less well educated than others. Characters from such races have a negative Dice Modifier when rolling for that characteristic (–2 unless otherwise specified), and their racial maximum for that characteristic is decreased by the same amount. Any characteristic can be Weak.", } RaceTraitWeakSocialStanding = &RaceTrait{ Name: "Weak Social Standing", Description: "The opposite of Notable (Characteristic), some species are weaker, less resilient or less well educated than others. Characters from such races have a negative Dice Modifier when rolling for that characteristic (–2 unless otherwise specified), and their racial maximum for that characteristic is decreased by the same amount. Any characteristic can be Weak.", } ) type RaceTrait struct { Name string Description string }
pkg/game/characters/racetraits.go
0.574753
0.656982
racetraits.go
starcoder
package ast import ( "bytes" "github.com/cespare/xxhash/v2" "github.com/jensneuse/graphql-go-tools/internal/pkg/unsafebytes" ) // Index is a struct to easily look up objects in a document, e.g. find Nodes (type/interface/union definitions) by name type Index struct { // QueryTypeName is the name of the query type on the schema Node // schema { query: Query } QueryTypeName ByteSlice // MutationTypeName is the name of the mutation type on the schema Node // schema { mutation: Mutation } MutationTypeName ByteSlice // SubscriptionTypeName is the name of the subscription type on the schema Node // schema { subscription: Subscription } SubscriptionTypeName ByteSlice // nodes is a list of all root nodes in a schema definition // The map key is the result of the xxhash algorithm from the Node name. nodes map[uint64][]Node // ReplacedFragmentSpreads is a list of references (slice indices) of all FragmentSpreads that got replaced during normalization. ReplacedFragmentSpreads []int // MergedTypeExtensions is a list of Nodes (Node kind + reference) that got merged during type extension merging. MergedTypeExtensions []Node } // Reset empties the Index func (i *Index) Reset() { i.QueryTypeName = i.QueryTypeName[:0] i.MutationTypeName = i.MutationTypeName[:0] i.SubscriptionTypeName = i.SubscriptionTypeName[:0] i.ReplacedFragmentSpreads = i.ReplacedFragmentSpreads[:0] i.MergedTypeExtensions = i.MergedTypeExtensions[:0] for j := range i.nodes { delete(i.nodes, j) } } func (i *Index) AddNodeStr(name string, node Node) { hash := xxhash.Sum64String(name) _, exists := i.nodes[hash] if !exists { i.nodes[hash] = []Node{node} return } i.nodes[hash] = append(i.nodes[hash], node) } func (i *Index) AddNodeBytes(name []byte, node Node) { hash := xxhash.Sum64(name) _, exists := i.nodes[hash] if !exists { i.nodes[hash] = []Node{node} return } i.nodes[hash] = append(i.nodes[hash], node) } func (i *Index) NodesByNameStr(name string) ([]Node, bool) { hash := xxhash.Sum64String(name) node, exists := i.nodes[hash] return node, exists } func (i *Index) FirstNodeByNameStr(name string) (Node, bool) { hash := xxhash.Sum64String(name) node, exists := i.nodes[hash] if !exists || len(node) == 0 { return Node{}, false } return node[0], true } func (i *Index) NodesByNameBytes(name []byte) ([]Node, bool) { hash := xxhash.Sum64(name) node, exists := i.nodes[hash] return node, exists } func (i *Index) FirstNodeByNameBytes(name []byte) (Node, bool) { hash := xxhash.Sum64(name) node, exists := i.nodes[hash] if !exists || len(node) == 0 { return Node{}, false } return node[0], true } func (i *Index) FirstNonExtensionNodeByNameBytes(name []byte) (Node, bool) { hash := xxhash.Sum64(name) nodes, exists := i.nodes[hash] if !exists || len(nodes) == 0 { return Node{}, false } for j := range nodes { if nodes[j].IsExtensionKind() { continue } return nodes[j], true } return Node{}, false } func (i *Index) RemoveNodeByName(name []byte) { hash := xxhash.Sum64(name) delete(i.nodes, hash) if bytes.Equal(i.QueryTypeName, name) { i.QueryTypeName = nil } if bytes.Equal(i.MutationTypeName, name) { i.MutationTypeName = nil } if bytes.Equal(i.SubscriptionTypeName, name) { i.SubscriptionTypeName = nil } } func (i *Index) ReplaceNode(name []byte, oldNode Node, newNode Node) { nodes, ok := i.nodes[xxhash.Sum64(name)] if !ok { return } for i := range nodes { if nodes[i].Kind != oldNode.Kind || nodes[i].Ref != oldNode.Ref { continue } nodes[i].Kind = newNode.Kind nodes[i].Ref = newNode.Ref } } func (i *Index) IsRootOperationTypeNameBytes(typeName []byte) bool { if len(typeName) == 0 { return false } if bytes.Equal(i.QueryTypeName, typeName) { return true } if bytes.Equal(i.MutationTypeName, typeName) { return true } if bytes.Equal(i.SubscriptionTypeName, typeName) { return true } return false } func (i *Index) IsRootOperationTypeNameString(typeName string) bool { if typeName == "" { return false } if unsafebytes.BytesToString(i.QueryTypeName) == typeName { return true } if unsafebytes.BytesToString(i.MutationTypeName) == typeName { return true } if unsafebytes.BytesToString(i.SubscriptionTypeName) == typeName { return true } return false }
pkg/ast/index.go
0.563378
0.405508
index.go
starcoder
package trie import ( "io" "fmt" "strings" ) // treeNode provides an interface for node implementations type treeNode interface { // Child returns the child node for the given rune or nil if else Child(r rune) treeNode // AddChild adds a new child to the node AddChild(r rune, n treeNode) // Remove removes an entry if it exists Remove(s string) // Value returns the value of the node or nil Value() string // SetValue sets the value of the node SetValue(s string) // PrettyPrint writes the tree to the given Writer PrettyPrint(w io.Writer, indent string) } // newMapNode creates a new mapNode func newMapNode() treeNode { return &mapNode{children: make(map[rune]*mapNode)} } // mapNode uses a map to store its children type mapNode struct { children map[rune]*mapNode value string } func (mn *mapNode) Child(r rune) treeNode { value, ok := mn.children[r] if !ok { return nil } return value } func (mn *mapNode) Value() string { return mn.value } func (mn *mapNode) AddChild(r rune, n treeNode) { switch typedN := n.(type) { case *mapNode: _, ok := mn.children[r] if !ok { // Prevent replacement of old child mn.children[r] = typedN } default: break //Unsupported treeNode type } } func (mn *mapNode) SetValue(s string) { mn.value = s } func (mn *mapNode) Remove(s string) { runes := []rune(s) value, ok := mn.children[runes[0]] if ok { if value.remove(mn, runes, 1) { delete(mn.children, runes[0]) } } } func (mn *mapNode) PrettyPrint(w io.Writer, indent string) { if indent == "" { indent = ". " } mn.prettyPrint(w, "", 0, indent) } func (mn *mapNode) prettyPrint(w io.Writer, key string, indentation int, indent string) { if key == "" { key = "root" } fmt.Fprintf(w, strings.Repeat(indent, indentation)+"%v\n", key) for r, child := range mn.children { child.prettyPrint(w, string(r), indentation+1, indent) } } // remove tries to remove entries recursive func (mn *mapNode) remove(previous *mapNode, runes []rune, position int) bool { if mn.value == string(runes) { // Found last node mn.value = "" return len(mn.children) == 0 // Remove me if i have no children } curKey := runes[position] value, ok := mn.children[curKey] position++ if ok { if value.remove(mn, runes, position) { // Can i remove my child? delete(mn.children, curKey) } } return len(mn.children) == 0 // Remove me if i have no children }
node.go
0.725162
0.496643
node.go
starcoder
package ast // Visitor is an interface for the structs which is used for traversing AST. type Visitor interface { // Visit defines the process when a node is visit. // Visitor is a next visitor to use for visit. // When wanting to stop visiting, return nil. Visit(n Node) Visitor } // Visit visits the tree with the visitor. func Visit(v Visitor, n Node) { if v = v.Visit(n); v == nil { return } switch n := n.(type) { // Toplevel case *Program: for _, t := range n.Toplevels { Visit(v, t) } case *Typedef: Visit(v, n.Type) case *EnumTypedef: for _, c := range n.Cases { Visit(v, c.Child) } case *Function: for _, p := range n.Params { if p.Type != nil { Visit(v, p.Type) } } if n.RetType != nil { Visit(v, n.RetType) } for _, s := range n.Body { Visit(v, s) } // Type case *TypeInstantiate: for _, a := range n.Args { Visit(v, a) } case *RecordType: for _, f := range n.Fields { if f.Type != nil { Visit(v, f.Type) } } case *TupleType: for _, e := range n.Elems { if e != nil { Visit(v, e) } } case *FunctionType: for _, p := range n.ParamTypes { Visit(v, p) } Visit(v, n.RetType) case *TypeofType: Visit(v, n.Expr) case *ArrayType: Visit(v, n.Elem) case *DictType: Visit(v, n.Key) Visit(v, n.Value) // Pattern case *RecordPattern: for _, f := range n.Fields { Visit(v, f.Pattern) } case *ArrayPattern: for _, e := range n.Elems { Visit(v, e) } // Destructuring case *RecordDestructuring: for _, f := range n.Fields { Visit(v, f.Child) } // Statement case *VarDecl: for _, d := range n.Decls { Visit(v, d) } for _, e := range n.RHSExprs { Visit(v, e) } case *VarAssign: for _, a := range n.RHSExprs { Visit(v, a) } case *IndexAssign: Visit(v, n.Assignee) Visit(v, n.Index) Visit(v, n.RHS) case *RetStmt: for _, e := range n.Exprs { Visit(v, e) } case *IfStmt: Visit(v, n.Cond) for _, s := range n.Then { Visit(v, s) } for _, s := range n.Else { Visit(v, s) } case *SwitchStmt: for _, c := range n.Cases { Visit(v, c.Cond) for _, s := range c.Stmts { Visit(v, s) } } for _, s := range n.Else { Visit(v, s) } case *MatchStmt: Visit(v, n.Matched) for _, c := range n.Arms { Visit(v, c.Pattern) for _, s := range c.Stmts { Visit(v, s) } } for _, s := range n.Else { Visit(v, s) } case *ForEachStmt: Visit(v, n.Iterator) Visit(v, n.Range) for _, s := range n.Body { Visit(v, s) } case *WhileStmt: Visit(v, n.Cond) for _, s := range n.Body { Visit(v, s) } case *ExprStmt: Visit(v, n.Expr) // Expressions case *ArrayLiteral: for _, e := range n.Elems { Visit(v, e) } case *DictLiteral: for _, kv := range n.Elems { Visit(v, kv.Key) Visit(v, kv.Value) } case *UnaryExpr: Visit(v, n.Child) case *BinaryExpr: Visit(v, n.LHS) Visit(v, n.RHS) case *SeqExpr: for _, s := range n.Stmts { Visit(v, s) } Visit(v, n.LastExpr) case *IfExpr: Visit(v, n.Cond) Visit(v, n.Then) Visit(v, n.Else) case *SwitchExpr: for _, c := range n.Cases { Visit(v, c.Cond) Visit(v, c.Body) } Visit(v, n.Else) case *MatchExpr: Visit(v, n.Matched) for _, c := range n.Arms { Visit(v, c.Pattern) Visit(v, c.Body) } Visit(v, n.Else) case *CoerceExpr: Visit(v, n.Expr) Visit(v, n.Type) case *IndexAccess: Visit(v, n.Child) Visit(v, n.Index) case *FieldAccess: Visit(v, n.Child) case *RecordLiteral: for _, f := range n.Fields { Visit(v, f.Expr) } case *TupleLiteral: for _, e := range n.Elems { Visit(v, e) } case *FuncCall: Visit(v, n.Callee) for _, a := range n.Args { Visit(v, a) } if n.DoBlock != nil { Visit(v, n.DoBlock) } case *FuncCallNamed: Visit(v, n.Callee) for _, a := range n.Args { Visit(v, a.Expr) } if n.DoBlock != nil { Visit(v, n.DoBlock) } case *Lambda: for _, p := range n.Params { if p.Type != nil { Visit(v, p.Type) } } Visit(v, n.BodyExpr) } }
next/compiler/ast/visitor.go
0.540196
0.520679
visitor.go
starcoder
package gander import ( "fmt" "math" "sort" ) // A Series represents a column of data in a DataFrame. type Series struct { Name string Values []float64 categoricalLabels map[float64]string categoricalValues map[string]float64 } // NewSeries creates a new Series with the specified name // and values. func NewSeries(name string, values []float64) *Series { s := Series{} s.Name = name s.Values = []float64{} for _, v := range values { s.Values = append(s.Values, v) } return &s } // NewCategoricalSeries create a new Series to contain categorical // data. The data is passed in as a slice of strings. Internally // the string values are converted to float64 and a map is maintained // so that the original values can always be retrieved. No statistical // operations can be carried out on a categorical series. func NewCategoricalSeries(name string, values []string) *Series { categoryNumber := 0.0 s := Series{} s.categoricalLabels = make(map[float64]string) s.categoricalValues = make(map[string]float64) s.Name = name s.Values = []float64{} for _, v := range values { if i, ok := s.categoricalValues[v]; ok == true { s.Values = append(s.Values, i) } else { s.Values = append(s.Values, categoryNumber) s.categoricalValues[v] = categoryNumber s.categoricalLabels[categoryNumber] = v categoryNumber += 1 } } return &s } // Standardize scales the values in the Series // to standard form. func (s *Series) Standardize() { mu := s.Mean() sigma := s.StdDev() for i, v := range s.Values { s.Values[i] = (v - mu) / sigma } } // Sum adds together all the values in the Series. func (s *Series) Sum() float64 { return sum(s.Values) } // Mean finds the mean of all the values in the Series. func (s *Series) Mean() float64 { return s.Sum() / float64(len(s.Values)) } // Median finds the median of all the values in the Series. func (s *Series) Median() float64 { v := s.Sorted() if len(v)%2 == 0 { return (v[(len(v)/2)-1] + v[len(v)/2]) / 2 } return v[(len(v) / 2)] } // Mode finds the mode of all the values in the Series. This returns // a slice ofr float64 because a Series could have more than one mode. func (s *Series) Mode() []float64 { m := []float64{} c := count(s.Values) var maxCount int for _, v := range c { if v > maxCount { maxCount = v } } for k := range c { if c[k] == maxCount { m = append(m, k) } } return m } // Variance finds the variance of the values in the Series. func (s *Series) Variance() float64 { mu := s.Mean() sumOfSquares := sum( s.Apply( func(x float64) float64 { return math.Pow(x-mu, 2) })) return sumOfSquares / float64(len(s.Values)) } // StdDev finds the standard deviation of the values in the Series. func (s *Series) StdDev() float64 { return math.Sqrt(s.Variance()) } func (s *Series) IsCategorical() bool { return s.categoricalLabels != nil } // Max returns the maximum value in the Series. func (s *Series) Max() float64 { v := s.Sorted() return v[len(s.Values)-1] } // Min returns the minimum value in the Series. func (s *Series) Min() float64 { v := s.Sorted() return v[0] } // Range returns the minimum and maximum values in the Series. func (s *Series) Range() (float64, float64) { return s.Min(), s.Max() } // Apply applies a function to all values of the Series. // It returns a new slice and does not affect the Series values. func (s *Series) Apply(fn func(float64) float64) []float64 { r := []float64{} for _, v := range s.Values { r = append(r, fn(v)) } return r } // Transform applies a function to all values of the Series, // changing them in place. func (s *Series) Transform(fn func(float64) float64) { for i, v := range s.Values { s.Values[i] = fn(v) } } // Sorted returns a slice of the sorted values in a Series. // It does not change the values of the Series itself. func (s *Series) Sorted() []float64 { r := make([]float64, len(s.Values)) copy(r, s.Values) sort.Float64s(r) return r } // Hist returns a map of values to counts for categorical data. // It returns an error is the Series does not contain categorical data. func (s *Series) Hist() (map[string]int, error) { if s.IsCategorical() == false { return nil, fmt.Errorf("Series %s is not categorical", s.Name) } r := make(map[string]int) for _, v := range s.Values { c := s.categoricalLabels[v] if _, ok := r[c]; ok { r[c] += 1 } else { r[c] = 1 } } return r, nil } // Describe returns a summary of the statisical properties // of all the Series. func (s *Series) Describe() Summary { r := Summary{Name: s.Name} r.Mean = s.Mean() r.Median = s.Median() r.Mode = s.Mode() r.Min = s.Min() r.Max = s.Max() r.StdDev = s.StdDev() r.Variance = s.Variance() return r } func sum(r []float64) float64 { t := 0.0 for _, v := range r { t += v } return t } func count(r []float64) map[float64]int { m := map[float64]int{} for _, v := range r { if _, ok := m[v]; ok { m[v] += 1 } else { m[v] = 1 } } return m } func (s *Series) dropRow(r int) { s.Values = append(s.Values[:r], s.Values[r+1:]...) }
series.go
0.791781
0.45181
series.go
starcoder
package sim import ( "fmt" "gonum.org/v1/gonum/mat" ) // InitCond implements filter.InitCond type InitCond struct { state *mat.VecDense cov *mat.SymDense } // NewInitCond creates new InitCond and returns it func NewInitCond(state mat.Vector, cov mat.Symmetric) *InitCond { s := &mat.VecDense{} s.CloneFromVec(state) c := mat.NewSymDense(cov.Symmetric(), nil) c.CopySym(cov) return &InitCond{ state: s, cov: c, } } // State returns initial state func (c *InitCond) State() mat.Vector { state := mat.NewVecDense(c.state.Len(), nil) state.CloneFromVec(c.state) return state } // Cov returns initial covariance func (c *InitCond) Cov() mat.Symmetric { cov := mat.NewSymDense(c.cov.Symmetric(), nil) cov.CopySym(c.cov) return cov } // BaseModel is a basic model of a dynamical system type BaseModel struct { // A is internal state matrix A *mat.Dense // B is control matrix B *mat.Dense // C is output state matrix C *mat.Dense // D is output control matrix D *mat.Dense // E is Disturbance matrix E *mat.Dense } // NewBaseModel creates a model of falling ball and returns it func NewBaseModel(A, B, C, D, E *mat.Dense) (*BaseModel, error) { return &BaseModel{A: A, B: B, C: C, D: D, E: E}, nil } // Propagate propagates internal state x of a falling ball to the next step // given an input vector u and a disturbance input z. (wd is process noise, z not implemented yet) func (b *BaseModel) Propagate(x, u, wd mat.Vector) (mat.Vector, error) { nx, nu, _, _ := b.SystemDims() if u != nil && u.Len() != nu { return nil, fmt.Errorf("invalid input vector") } if x.Len() != nx { return nil, fmt.Errorf("invalid state vector") } out := new(mat.Dense) out.Mul(b.A, x) if u != nil && b.B != nil { outU := new(mat.Dense) outU.Mul(b.B, u) out.Add(out, outU) } if wd != nil && wd.Len() == nx { // TODO change _nx to _nz when switching to z and disturbance matrix implementation // outZ := new(mat.Dense) // TODO add E disturbance matrix // outZ.Mul(b.E, z) // out.Add(out, outZ) out.Add(out, wd) } return out.ColView(0), nil } // Observe observes external state of falling ball given internal state x and input u. // wn is added to the output as a noise vector. func (b *BaseModel) Observe(x, u, wn mat.Vector) (mat.Vector, error) { nx, nu, ny, _ := b.SystemDims() if u != nil && u.Len() != nu { return nil, fmt.Errorf("invalid input vector") } if x.Len() != nx { return nil, fmt.Errorf("invalid state vector") } out := new(mat.Dense) out.Mul(b.C, x) if u != nil && b.D != nil { outU := new(mat.Dense) outU.Mul(b.D, u) out.Add(out, outU) } if wn != nil && wn.Len() == ny { out.Add(out, wn) } return out.ColView(0), nil } // SystemDims returns internal state length (nx), input vector length (nu), // external/observable/output state length (ny) and disturbance vector length (nz). func (b *BaseModel) SystemDims() (nx, nu, ny, nz int) { nx, _ = b.A.Dims() if b.B != nil { _, nu = b.B.Dims() } ny, _ = b.C.Dims() if b.E != nil { _, nz = b.E.Dims() } return nx, nu, ny, nz } // SystemMatrix returns state propagation matrix func (b *BaseModel) SystemMatrix() mat.Matrix { m := &mat.Dense{} m.CloneFrom(b.A) return m } // ControlMatrix returns state propagation control matrix func (b *BaseModel) ControlMatrix() mat.Matrix { m := &mat.Dense{} if b.B != nil { m.CloneFrom(b.B) } return m } // OutputMatrix returns observation matrix func (b *BaseModel) OutputMatrix() mat.Matrix { m := &mat.Dense{} m.CloneFrom(b.C) return m } // FeedForwardMatrix returns observation control matrix func (b *BaseModel) FeedForwardMatrix() mat.Matrix { m := &mat.Dense{} if b.D != nil { m.CloneFrom(b.D) } return m }
sim/model.go
0.78789
0.483892
model.go
starcoder
package srcobj import ( "io" "strings" ) type hardToAccessBinaryOperator struct { operator string operand1 Source operand2 Source } func (h hardToAccessBinaryOperator) Dump(w io.Writer) error { data := String(h.operand1) data = strings.TrimRight(data, "\n") if _, err := io.WriteString(w, data); err != nil { return err } if _, err := io.WriteString(w, h.operator); err != nil { return err } if err := h.operand2.Dump(w); err != nil { return err } return nil } // OperatorGE generate Greater or Equal comparison func OperatorGE(op1, op2 Source) Source { return hardToAccessBinaryOperator{ operand1: op1, operand2: op2, operator: ">=", } } // OperatorGT generate Greater or Equal comparison func OperatorGT(op1, op2 Source) Source { return hardToAccessBinaryOperator{ operand1: op1, operand2: op2, operator: ">", } } // OperatorLT generate Greater or Equal comparison func OperatorLT(op1, op2 Source) Source { return hardToAccessBinaryOperator{ operand1: op1, operand2: op2, operator: "<", } } // OperatorLE generate Greater or Equal comparison func OperatorLE(op1, op2 Source) Source { return hardToAccessBinaryOperator{ operand1: op1, operand2: op2, operator: "<=", } } // OperatorAssign generate assignment func OperatorAssign(op1, op2 Source) Source { return hardToAccessBinaryOperator{ operand1: op1, operand2: op2, operator: "=", } } // OperatorEq generate equality check func OperatorEq(op1, op2 Source) Source { return hardToAccessBinaryOperator{ operand1: op1, operand2: op2, operator: "==", } } // OperatorNEq generate equality check func OperatorNEq(op1, op2 Source) Source { return hardToAccessBinaryOperator{ operand1: op1, operand2: op2, operator: "!=", } } func OperatorAnd(op1, op2 Source) Source { return hardToAccessBinaryOperator{ operand1: op1, operand2: op2, operator: "&&", } } func OperatorOr(op1, op2 Source) Source { return hardToAccessBinaryOperator{ operand1: op1, operand2: op2, operator: "||", } } func OperatorBitAnd(op1, op2 Source) Source { return hardToAccessBinaryOperator{ operand1: op1, operand2: op2, operator: "&", } } func OperatorColon(op1, op2 Source) Source { return hardToAccessBinaryOperator{ operand1: op1, operand2: op2, operator: ":", } } func OperatorAdd(op1, op2 Source) Source { return hardToAccessBinaryOperator{ operand1: op1, operand2: op2, operator: "+", } } func OperatorSub(op1, op2 Source) Source { return hardToAccessBinaryOperator{ operand1: op1, operand2: op2, operator: "-", } } func OperatorDot(op1, op2 Source) Source { return hardToAccessBinaryOperator{ operand1: op1, operand2: op2, operator: ".", } } func OperatorSemicolon(op1, op2 Source) Source { return hardToAccessBinaryOperator{ operand1: op1, operand2: op2, operator: ";", } } func OperatorInc(op1, op2 Source) Source { return hardToAccessBinaryOperator{ operand1: op1, operand2: op2, operator: "+=", } } func OperatorComma(op1, op2 Source) Source { return hardToAccessBinaryOperator{ operand1: op1, operand2: op2, operator: ",", } } type hardToAccessUnaryOperator struct { operator string operand Source } func (uo hardToAccessUnaryOperator) Dump(w io.Writer) error { if _, err := io.WriteString(w, uo.operator); err != nil { return err } if err := uo.operand.Dump(w); err != nil { return err } return nil } func unaryAccess(operator string, operand Source) Source { return hardToAccessUnaryOperator{ operator: operator, operand: operand, } } func Ref(operand Source) Source { return unaryAccess("&", operand) } func Deref(operand Source) Source { return unaryAccess("*", operand) } func OperatorNot(operand Source) Source { return unaryAccess("!", operand) }
internal/generator/gogen/internal/srcobj/operators.go
0.558327
0.411554
operators.go
starcoder
package datetime import ( "errors" "time" ) // A Week is a main package data type type Week struct { Days []time.Time Year int Number int } // NewWeek constructs new Week entity from given parameters (year and ISO-8601-compatible week number) func NewWeek(params ...int) (*Week, error) { if len(params) < 2 { return &Week{}, errors.New("NewWeek(): too few arguments, specify year and number of week") } else if params[0] < 0 { return &Week{}, errors.New("NewWeek(): year can't be less than zero") } else if params[1] < 1 || params[1] > 53 { return &Week{}, errors.New("NewWeek(): number of week can't be less than 1 or greater than 53") } var ( week = initWeek(params...) day = 1 fd = time.Date(week.Year, 1, day, 0, 0, 0, 0, time.UTC) y, w = fd.ISOWeek() ) for y != week.Year && w > 1 { day++ fd = time.Date(week.Year, 1, day, 0, 0, 0, 0, time.UTC) y, w = fd.ISOWeek() } // getting Monday of the 1st week for fd.Weekday() > 1 { day-- fd = time.Date(week.Year, 1, day, 0, 0, 0, 0, time.UTC) } // getting first day of the given week fd = fd.AddDate(0, 0, (week.Number-1)*7) for fd.Year() > y { fd = fd.AddDate(0, 0, -7) } // getting dates for whole week for i := 0; i < 7; i++ { week.Days = append(week.Days, fd.Add(time.Duration(i)*24*time.Hour)) } return &week, nil } // Next calculates and returns information (year, week number and dates) about next week func (week *Week) Next() (*Week, error) { var newYear, newWeek int if week.Number+1 > 53 { newYear = week.Year + 1 newWeek = 1 } else { newYear = week.Year newWeek = week.Number + 1 } w, e := NewWeek(newYear, newWeek) return w, e } // Previous calculates and returns information (year, week number and dates) about previous week func (week *Week) Previous() (*Week, error) { var newYear, newWeek int if week.Number-1 < 1 { newYear = week.Year - 1 newWeek = 53 } else { newYear = week.Year newWeek = week.Number - 1 } w, e := NewWeek(newYear, newWeek) return w, e } func initWeek(params ...int) Week { var week = Week{ Year: params[0], Number: params[1], } return week }
utils/datetime/week.go
0.646572
0.48749
week.go
starcoder
package detest import ( "fmt" "reflect" "unsafe" ) // StructComparer implements comparison of struct values. type StructComparer struct { with func(*StructTester) } // Struct takes a function which will be called to do further comparisons of // the struct's contents. Note that you must pass a struct _pointer_ to this // method if you want to access private fields with the StructComparer.Field() // method. func (d *D) Struct(with func(*StructTester)) StructComparer { return StructComparer{with} } // StructTester is the struct that will be passed to the test function passed // to detest.Struct. This struct implements the struct-specific testing methods // such as Idx() and AllValues(). type StructTester struct { d *D } // Compare compares the struct value in d.Actual() by calling the function // passed to `Struct()`, which is in turn expected to further tests of the // struct's content. func (sc StructComparer) Compare(d *D) { v := reflect.ValueOf(d.Actual()) d.PushPath(d.NewPath(describeTypeOfReflectValue(v), 1, "detest.(*D).Struct")) defer d.PopPath() if !v.IsValid() || v.Kind() != reflect.Struct || (v.Kind() == reflect.Ptr && v.Elem().Kind() != reflect.Struct) { d.AddResult(result{ actual: newValue(d.Actual()), pass: false, where: inDataStructure, op: ".", description: fmt.Sprintf( "Called detest.Struct() but the value being tested isn't a struct, it's %s", articleize(describeTypeOfReflectValue(v)), ), }) return } st := &StructTester{d: d} sc.with(st) } // Field takes a field name and an expected value for that field. If the field // does not exist, this is considered a failure. func (st *StructTester) Field(field string, expect interface{}) { v := reflect.ValueOf(st.d.Actual()) st.d.PushPath(st.d.NewPath(fmt.Sprintf(".%v", field), 0, "")) defer st.d.PopPath() // This is hack to be able to get private fields from structs (as opposed // to struct pointers, where this is a little simpler). We need to copy // the original Value into an addressable Value. v2 := v if v.Kind() == reflect.Struct { v2 = reflect.New(v.Type()).Elem() v2.Set(v) } f := v2.FieldByName(field) if f.IsValid() { f = reflect.NewAt(f.Type(), unsafe.Pointer(f.UnsafeAddr())).Elem() } if !f.IsValid() { st.d.AddResult(result{ actual: newValue(st.d.Actual()), pass: false, where: inDataStructure, op: fmt.Sprintf(".%s", field), description: "Attempted to get a struct field that does not exist", }) return } st.d.PushActual(f.Interface()) defer st.d.PopActual() if c, ok := expect.(Comparer); ok { c.Compare(st.d) } else { st.d.Equal(expect).Compare(st.d) } }
pkg/detest/struct.go
0.76921
0.501526
struct.go
starcoder
package commands const initializeConfirmationText = ` You are about to initialize a major-version upgrade of Greenplum. This should be done only during a downtime window. gpupgrade initialize will perform a series of steps, including: - Check disk space - Create the target cluster - Run pg_upgrade consistency checks gpupgrade log files can be found on all hosts in %s gpupgrade initialize will use these values from %s source_gphome: %s target_gphome: %s mode: %s disk_free_ratio: %.1f use_hba_hostnames: %t source_master_port: %d temp_port_range: %s hub_port: %d agent_port: %d You will still have the opportunity to revert the cluster to its original state after this step. WARNING: Do not perform operations on the cluster until gpupgrade is finalized or reverted. Before proceeding, ensure the following have occurred: - Take a backup of the source Greenplum cluster - Generate and execute the data migration "pre-initialize" scripts - Run gpcheckcat to ensure the source catalog has no inconsistencies - Run gpstate -e to ensure the source cluster's segments are up and in preferred roles To suppress this summary, use the --automatic | -a flag. ` const executeConfirmationText = ` You are about to run the "execute" command for a major-version upgrade of Greenplum. This should be done only during a downtime window. gpupgrade execute will perform a series of steps, including: - Upgrade master - Upgrade primary segments gpupgrade log files can be found on all hosts in %s You will still have the opportunity to revert the cluster to its original state after this step. WARNING: Do not perform operations on the source cluster until gpupgrade is finalized or reverted. ` const finalizeConfirmationText = ` You are about to finalize a major-version upgrade of Greenplum. This should be done only during a downtime window. gpupgrade finalize will perform a series of steps, including: - Update target master catalog - Update data directories - Update target master configuration files - Upgrade standby master - Upgrade mirror segments gpupgrade log files can be found on all hosts in %s WARNING: You will not be able to revert the cluster to its original state after this step. WARNING: Do not perform operations on the source and target clusters until gpupgrade is finalized or reverted. ` const revertConfirmationText = ` You are about to revert this upgrade. This should be done only during a downtime window. gpupgrade revert will perform a series of steps, including: - Delete target cluster data directories - Delete state directories on the segments - Delete master state directory - Archive log directories - Restore source cluster - Start source cluster gpupgrade log files can be found on all hosts in %s WARNING: Do not perform operations on the source and target clusters until gpupgrade revert has completed. `
cli/commands/confirmation_text.go
0.612078
0.555435
confirmation_text.go
starcoder
package cgo // This file implements a parser of a subset of the C language, just enough to // parse common #define statements to Go constant expressions. import ( "fmt" "go/ast" "go/scanner" "go/token" "strings" ) // parseConst parses the given string as a C constant. func parseConst(pos token.Pos, fset *token.FileSet, value string) (ast.Expr, *scanner.Error) { t := newTokenizer(pos, fset, value) expr, err := parseConstExpr(t) if t.token != token.EOF { return nil, &scanner.Error{ Pos: t.fset.Position(t.pos), Msg: "unexpected token " + t.token.String(), } } return expr, err } // parseConstExpr parses a stream of C tokens to a Go expression. func parseConstExpr(t *tokenizer) (ast.Expr, *scanner.Error) { switch t.token { case token.LPAREN: lparen := t.pos t.Next() x, err := parseConstExpr(t) if err != nil { return nil, err } if t.token != token.RPAREN { return nil, unexpectedToken(t, token.RPAREN) } expr := &ast.ParenExpr{ Lparen: lparen, X: x, Rparen: t.pos, } t.Next() return expr, nil case token.INT, token.FLOAT, token.STRING, token.CHAR: expr := &ast.BasicLit{ ValuePos: t.pos, Kind: t.token, Value: t.value, } t.Next() return expr, nil case token.EOF: return nil, &scanner.Error{ Pos: t.fset.Position(t.pos), Msg: "empty constant", } default: return nil, &scanner.Error{ Pos: t.fset.Position(t.pos), Msg: fmt.Sprintf("unexpected token %s", t.token), } } } // unexpectedToken returns an error of the form "unexpected token FOO, expected // BAR". func unexpectedToken(t *tokenizer, expected token.Token) *scanner.Error { return &scanner.Error{ Pos: t.fset.Position(t.pos), Msg: fmt.Sprintf("unexpected token %s, expected %s", t.token, expected), } } // tokenizer reads C source code and converts it to Go tokens. type tokenizer struct { pos token.Pos fset *token.FileSet token token.Token value string buf string } // newTokenizer initializes a new tokenizer, positioned at the first token in // the string. func newTokenizer(start token.Pos, fset *token.FileSet, buf string) *tokenizer { t := &tokenizer{ pos: start, fset: fset, buf: buf, token: token.ILLEGAL, } t.Next() // Parse the first token. return t } // Next consumes the next token in the stream. There is no return value, read // the next token from the pos, token and value properties. func (t *tokenizer) Next() { t.pos += token.Pos(len(t.value)) for { if len(t.buf) == 0 { t.token = token.EOF return } c := t.buf[0] switch { case c == ' ' || c == '\f' || c == '\n' || c == '\r' || c == '\t' || c == '\v': // Skip whitespace. // Based on this source, not sure whether it represents C whitespace: // https://en.cppreference.com/w/cpp/string/byte/isspace t.pos++ t.buf = t.buf[1:] case c == '(' || c == ')': // Single-character tokens. switch c { case '(': t.token = token.LPAREN case ')': t.token = token.RPAREN } t.value = t.buf[:1] t.buf = t.buf[1:] return case c >= '0' && c <= '9': // Numeric constant (int, float, etc.). // Find the last non-numeric character. tokenLen := len(t.buf) hasDot := false for i, c := range t.buf { if c == '.' { hasDot = true } if (c >= '0' && c <= '9') || c == '.' || (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') { tokenLen = i + 1 } } t.value = t.buf[:tokenLen] t.buf = t.buf[tokenLen:] if hasDot { // Integer constants are more complicated than this but this is // a close approximation. // https://en.cppreference.com/w/cpp/language/integer_literal t.token = token.FLOAT t.value = strings.TrimRight(t.value, "f") } else { t.token = token.INT t.value = strings.TrimRight(t.value, "uUlL") } return case c == '"': // String constant. Find the first '"' character that is not // preceded by a backslash. escape := false tokenLen := len(t.buf) for i, c := range t.buf { if i != 0 && c == '"' && !escape { tokenLen = i + 1 break } if !escape { escape = c == '\\' } } t.token = token.STRING t.value = t.buf[:tokenLen] t.buf = t.buf[tokenLen:] return case c == '\'': // Char (rune) constant. Find the first '\'' character that is not // preceded by a backslash. escape := false tokenLen := len(t.buf) for i, c := range t.buf { if i != 0 && c == '\'' && !escape { tokenLen = i + 1 break } if !escape { escape = c == '\\' } } t.token = token.CHAR t.value = t.buf[:tokenLen] t.buf = t.buf[tokenLen:] return default: t.token = token.ILLEGAL return } } }
cgo/const.go
0.693784
0.527256
const.go
starcoder
package tree import "fmt" var limit = 3 // maximum amount of childs for each nodes type Tree struct { root *Node } type Node struct { parent *Node child []*Node data Data } type Data struct { Id int } type Result struct { Parent int Child []int Data Data } var treeIndex map[int]*Node func addNode (parent *Node, data Data) *Node { return &Node{parent, make([]*Node, limit), data} } func (tree *Tree) Insert(data Data) (node *Node) { treeIndex = make(map[int]*Node) if tree.root == nil { node = addNode(nil, data) treeIndex[data.Id] = node tree.root = node return } return } func (node *Node) SetAllNodes(results []*Result) { var newNode, currentNode *Node for _, result := range results { if result.Parent != 0 { currentNode = node.Search(result.Parent) } else { currentNode = node } newNode = currentNode.Insert(result.Data) treeIndex[result.Data.Id] = newNode } } func (node *Node) findFreeNode() *Node { stack := make([]*Node, 0, 32) stack = append(stack, node) // push root to stack var n *Node = nil for stack != nil { n, stack = stack[0], stack[1:] //pop first node for index := 0; index < len(n.child); index++ { if (n.child[index] == nil) { return n } stack = append(stack, n.child[index]) // push } } return nil } func (node *Node) Insert(data Data) (newNode *Node) { for i := 0; i< len(node.child); i++ { if (node.child[i] == nil) { newNode = addNode(node, data) treeIndex[data.Id] = newNode node.child[i] = newNode return } } freeNode := node.findFreeNode() newNode = freeNode.Insert(data) return } func (node *Node) Print() { if node == nil { return } fmt.Print(node.data) fmt.Print("\n") for i := 0; i < len(node.child); i++ { node.child[i].Print() } } func (node *Node) Search(id int) (n *Node) { n = treeIndex[id] if n == nil { return nil } return } func (node *Node) GetData() (result *Result) { var childs []int = make([]int, 0) for _, value := range node.child { if value != nil { childs = append(childs, value.data.Id) } else { childs = append(childs, 0) } } var parentNode int = 0 if node.parent != nil { parentNode = node.parent.data.Id } return &Result{ Parent: parentNode, Child: childs, Data: node.data, } } func getLevel(nodes []*Node) (results []*Node) { for _, value := range nodes { for index := 0; index < len(value.child); index++ { if (value.child[index] == nil) { continue } results = append(results, value.child[index]) } } return } func (node *Node) GetNodes(level int) (results []*Result) { nodes := make([]*Node, 0) nodes = append(nodes, node) results = append(results, node.GetData()) for index := 0; index < level; index++ { nodes = getLevel(nodes) for _, value := range nodes { results = append(results, value.GetData()) } } return }
tree.go
0.55254
0.505005
tree.go
starcoder
package reflection import ( "reflect" ) // NewValue creates a reflection wrapper around given value. func NewValue(v interface{}) Value { return Value{ RVal: reflect.ValueOf(v), } } // Value represents every value in parens. type Value struct { RVal reflect.Value } // To converts the value to requested kind if possible. func (val *Value) To(kind reflect.Kind) (interface{}, error) { switch kind { case reflect.Int, reflect.Int64: return val.ToInt64() case reflect.Float64: return val.ToFloat64() case reflect.String: return val.ToString() case reflect.Bool: return val.ToBool() case reflect.Interface: return val.RVal.Interface(), nil default: return nil, ErrConversionImpossible } } // ToInt64 attempts converting the value to int64. func (val *Value) ToInt64() (int64, error) { if val.isInt() { return val.RVal.Int(), nil } else if val.isFloat() { return int64(val.RVal.Float()), nil } return 0, ErrConversionImpossible } // ToFloat64 attempts converting the value to float64. func (val *Value) ToFloat64() (float64, error) { if val.isFloat() { return val.RVal.Float(), nil } else if val.isInt() { return float64(val.RVal.Int()), nil } return 0, ErrConversionImpossible } // ToBool attempts converting the value to bool. func (val *Value) ToBool() (bool, error) { if isKind(val.RVal, reflect.Bool) { return val.RVal.Bool(), nil } return false, ErrConversionImpossible } // ToString attempts converting the value to bool. func (val *Value) ToString() (string, error) { if isKind(val.RVal, reflect.String) { return val.RVal.String(), nil } return "", ErrConversionImpossible } func (val *Value) isInt() bool { return isKind(val.RVal, reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64) } func (val *Value) isFloat() bool { return isKind(val.RVal, reflect.Float32, reflect.Float64) } func isKind(rval reflect.Value, kinds ...reflect.Kind) bool { for _, kind := range kinds { if rval.Kind() == kind { return true } } return false }
reflection/value.go
0.776665
0.458652
value.go
starcoder
package iso20022 // Details about tax paid, or to be paid, to the government in accordance with the law, including pre-defined parameters such as thresholds and type of account. type TaxInformation4 struct { // Party on the credit side of the transaction to which the tax applies. Creditor *TaxParty1 `xml:"Cdtr,omitempty"` // Identifies the party on the debit side of the transaction to which the tax applies. Debtor *TaxParty2 `xml:"Dbtr,omitempty"` // Ultimate party that owes an amount of money to the (ultimate) creditor, in this case, to the taxing authority. UltimateDebtor *TaxParty2 `xml:"UltmtDbtr,omitempty"` // Territorial part of a country to which the tax payment is related. AdministrationZone *Max35Text `xml:"AdmstnZone,omitempty"` // Tax reference information that is specific to a taxing agency. ReferenceNumber *Max140Text `xml:"RefNb,omitempty"` // Method used to indicate the underlying business or how the tax is paid. Method *Max35Text `xml:"Mtd,omitempty"` // Total amount of money on which the tax is based. TotalTaxableBaseAmount *ActiveOrHistoricCurrencyAndAmount `xml:"TtlTaxblBaseAmt,omitempty"` // Total amount of money as result of the calculation of the tax. TotalTaxAmount *ActiveOrHistoricCurrencyAndAmount `xml:"TtlTaxAmt,omitempty"` // Date by which tax is due. Date *ISODate `xml:"Dt,omitempty"` // Sequential number of the tax report. SequenceNumber *Number `xml:"SeqNb,omitempty"` // Record of tax details. Record []*TaxRecord1 `xml:"Rcrd,omitempty"` } func (t *TaxInformation4) AddCreditor() *TaxParty1 { t.Creditor = new(TaxParty1) return t.Creditor } func (t *TaxInformation4) AddDebtor() *TaxParty2 { t.Debtor = new(TaxParty2) return t.Debtor } func (t *TaxInformation4) AddUltimateDebtor() *TaxParty2 { t.UltimateDebtor = new(TaxParty2) return t.UltimateDebtor } func (t *TaxInformation4) SetAdministrationZone(value string) { t.AdministrationZone = (*Max35Text)(&value) } func (t *TaxInformation4) SetReferenceNumber(value string) { t.ReferenceNumber = (*Max140Text)(&value) } func (t *TaxInformation4) SetMethod(value string) { t.Method = (*Max35Text)(&value) } func (t *TaxInformation4) SetTotalTaxableBaseAmount(value, currency string) { t.TotalTaxableBaseAmount = NewActiveOrHistoricCurrencyAndAmount(value, currency) } func (t *TaxInformation4) SetTotalTaxAmount(value, currency string) { t.TotalTaxAmount = NewActiveOrHistoricCurrencyAndAmount(value, currency) } func (t *TaxInformation4) SetDate(value string) { t.Date = (*ISODate)(&value) } func (t *TaxInformation4) SetSequenceNumber(value string) { t.SequenceNumber = (*Number)(&value) } func (t *TaxInformation4) AddRecord() *TaxRecord1 { newValue := new (TaxRecord1) t.Record = append(t.Record, newValue) return newValue }
TaxInformation4.go
0.764012
0.53959
TaxInformation4.go
starcoder
package modelapi import "github.com/heustis/tsp-solver-go/graph" // PointGraph is the API representation of a single point in a graph. // It references its neighbors by name, in an array, to avoid circular references and have consistent field names in its JSON representation. type PointGraph struct { Id string `json:"id" validate:"required,min=1"` // Validator/v10 does not support `unique` with nil values in the array, see validate_test.go, so the array does not use pointers. // Once that is supported Neighbors can be converted to []*PointGraphNeighbor. Neighbors []PointGraphNeighbor `json:"neighbors" validate:"required,min=1,unique=Id,dive,required"` } // PointGraphNeighbor is a neighboring point to a PointGraph point. // Its id must correspond to the id of a point in the request's array of PointGraphs. // The distance is the distance from the PointGraph point to the point with the id, this may be asymmetrical. type PointGraphNeighbor struct { Id string `json:"id" validate:"required,min=1"` Distance float64 `json:"distance" validate:"required,min=0"` } // ToGraph converts an API request into a graph. func (api *TspRequest) ToGraph() *graph.Graph { vertices := []*graph.GraphVertex{} // This map deduplicates vertices (by ID), and prevents repeat processing of vertices. vertexMap := make(map[string]*graph.GraphVertex) for _, vApi := range api.PointsGraph { var v *graph.GraphVertex // Ensure each vertex is created only once; re-use the vertex if it was created while processing adjacent vertices of an earlier vertex. if existing, okay := vertexMap[vApi.Id]; okay { v = existing } else { v = graph.NewGraphVertex(vApi.Id) vertexMap[vApi.Id] = v } vertices = append(vertices, v) // Create one vertex for each adjacent vertex, unless that vertex already exists, in which case re-use it. for _, n := range vApi.Neighbors { if adj, okay := vertexMap[n.Id]; okay { v.AddAdjacentVertex(adj, n.Distance) } else { adj = graph.NewGraphVertex(n.Id) vertexMap[n.Id] = adj v.AddAdjacentVertex(adj, n.Distance) } } } for k := range vertexMap { delete(vertexMap, k) } return graph.NewGraph(vertices) } // ToApiFromGraph converts a graph into an API response. func ToApiFromGraph(g *graph.Graph) *TspRequest { api := &TspRequest{ PointsGraph: []*PointGraph{}, } for _, v := range g.GetVertices() { vApi := &PointGraph{ Id: v.GetId(), Neighbors: make([]PointGraphNeighbor, 0, len(v.GetAdjacentVertices())), } for adj, distance := range v.GetAdjacentVertices() { vApi.Neighbors = append(vApi.Neighbors, PointGraphNeighbor{ Id: adj.GetId(), Distance: distance, }) } api.PointsGraph = append(api.PointsGraph, vApi) } return api }
modelapi/pointgraph.go
0.846356
0.510496
pointgraph.go
starcoder
package automaton import ( "fmt" "github.com/gzg1984/golucene/core/util" "sort" "unicode" ) // util/automaton/Automaton.java /* Represents an automaton and all its states and transitions. States are integers and must be created using {@link #createState}. Mark a state as an accept state using {@link #setAccept}. Add transitions using {@link #addTransition}. Each state must have all of its transitions added at once; if this is too restrictive then use {@link Automaton.Builder} instead. State 0 is always the initial state. Once a state is finished, either because you've starting adding transitions to another state or you call {@link #finishState}, then that states transitions are sorted (first by min, then max, then dest) and reduced (transitions with adjacent labels going to the same dest are combined). */ type Automaton struct { curState int states []int // 2x transitions []int // 3x isAccept *util.OpenBitSet deterministic bool } func newEmptyAutomaton() *Automaton { return &Automaton{ deterministic: true, curState: -1, isAccept: util.NewOpenBitSet(), } } func (a *Automaton) String() string { return fmt.Sprintf("{curState=%v,states=%v,transitions=%v,isAccept=%v,%v}", a.curState, a.states, a.transitions, a.isAccept, a.deterministic) } /* Create a new state. */ func (a *Automaton) createState() int { state := len(a.states) / 2 a.states = append(a.states, -1, 0) return state } /* Set or clear this state as an accept state. */ func (a *Automaton) setAccept(state int, accept bool) { assert2(state < a.numStates(), "state=%v is out of bounds (numStates=%v)", state, a.numStates()) if accept { a.isAccept.Set(int64(state)) } else { a.isAccept.Clear(int64(state)) } } /* Sugar to get all transitions for all states. This is object-heavy; it's better to iterate state by state instead. */ func (a *Automaton) sortedTransitions() [][]*Transition { numStates := a.numStates() transitions := make([][]*Transition, numStates) for s := 0; s < numStates; s++ { numTransitions := a.numTransitions(s) transitions[s] = make([]*Transition, numTransitions) for t := 0; t < numTransitions; t++ { transition := newTransition() a.transition(s, t, transition) transitions[s][t] = transition } } return transitions } /* Returns true if this state is an accept state. */ func (a *Automaton) IsAccept(state int) bool { return a.isAccept.Get(int64(state)) } /* Add a new transition with min = max = label. */ func (a *Automaton) addTransition(source, dest, label int) { a.addTransitionRange(source, dest, label, label) } /* Add a new transition with the specified source, dest, min, max. */ func (a *Automaton) addTransitionRange(source, dest, min, max int) { assert(len(a.transitions)%3 == 0) assert2(source < a.numStates(), "source=%v is out of bounds (maxState is %v)", source, a.numStates()-1) assert2(dest < a.numStates(), "dest=%v is out of bounds (maxState is %v)", dest, a.numStates()-1) if a.curState != source { if a.curState != -1 { a.finishCurrentState() } // move to next source: a.curState = source assert2(a.states[2*a.curState] == -1, "from state (%v) already had transitions added", source) assert(a.states[2*a.curState+1] == 0) a.states[2*a.curState] = len(a.transitions) } a.transitions = append(a.transitions, dest, min, max) // increment transition count for this state a.states[2*a.curState+1]++ } /* Add a [virtual] epsilon transition between source and dest. Dest state must already have all transitions added because this method simply copies those same transitions over to source. */ func (a *Automaton) addEpsilon(source, dest int) { t := newTransition() count := a.initTransition(dest, t) for i := 0; i < count; i++ { a.nextTransition(t) a.addTransitionRange(source, t.dest, t.min, t.max) } if a.IsAccept(dest) { a.setAccept(source, true) } } /* Copies over all state/transition from other. The state numbers are sequentially assigned (appended). */ func (a *Automaton) copy(other *Automaton) { // bulk copy and then fixup the state pointers stateOffset := a.numStates() a.states = append(a.states, other.states...) for i := 0; i < len(other.states); i += 2 { if a.states[stateOffset*2+i] != -1 { a.states[stateOffset*2+i] += len(a.transitions) } } otherAcceptState := other.isAccept for state := otherAcceptState.NextSetBit(0); state != -1; state = otherAcceptState.NextSetBit(state + 1) { a.setAccept(stateOffset+int(state), true) } // bulk copy and then fixup dest for each transition transOffset := len(a.transitions) a.transitions = append(a.transitions, other.transitions...) for i := 0; i < len(other.transitions); i += 3 { a.transitions[transOffset+i] += stateOffset } if !other.deterministic { a.deterministic = false } } /* Freezes the last state, sorting and reducing the transitions. */ func (a *Automaton) finishCurrentState() { numTransitions := a.states[2*a.curState+1] assert(numTransitions > 0) offset := a.states[2*a.curState] start := offset / 3 util.NewInPlaceMergeSorter(destMinMaxSorter(a.transitions)).Sort(start, start+numTransitions) // reduce any "adjacent" transitions: upto, min, max, dest := 0, -1, -1, -1 for i := 0; i < numTransitions; i++ { tDest := a.transitions[offset+3*i] tMin := a.transitions[offset+3*i+1] tMax := a.transitions[offset+3*i+2] if dest == tDest { if tMin <= max+1 { if tMax > max { max = tMax } } else { if dest != -1 { a.transitions[offset+3*upto] = dest a.transitions[offset+3*upto+1] = min a.transitions[offset+3*upto+2] = max upto++ } min, max = tMin, tMax } } else { if dest != -1 { a.transitions[offset+3*upto] = dest a.transitions[offset+3*upto+1] = min a.transitions[offset+3*upto+2] = max upto++ } dest, min, max = tDest, tMin, tMax } } if dest != -1 { // last transition a.transitions[offset+3*upto] = dest a.transitions[offset+3*upto+1] = min a.transitions[offset+3*upto+2] = max upto++ } a.transitions = a.transitions[:len(a.transitions)-(numTransitions-upto)*3] a.states[2*a.curState+1] = upto // sort transitions by min/max/dest: util.NewInPlaceMergeSorter(minMaxDestSorter(a.transitions)).Sort(start, start+upto) if a.deterministic && upto > 1 { lastMax := a.transitions[offset+2] for i := 1; i < upto; i++ { min = a.transitions[offset+3*i+1] if min <= lastMax { a.deterministic = false break } lastMax = a.transitions[offset+3*i+2] } } } /* Finishes the current state; call this once you are done adding transitions for a state. This is automatically called if you start adding transitions to a new source state, but for the last state you add, you need to call this method yourself. */ func (a *Automaton) finishState() { if a.curState != -1 { a.finishCurrentState() a.curState = -1 } } /* How many states this automaton has. */ func (a *Automaton) numStates() int { return len(a.states) / 2 } /* How many transitions this state has. */ func (a *Automaton) numTransitions(state int) int { if count := a.states[2*state+1]; count != -1 { return count } return 0 } type destMinMaxSorter []int func (s destMinMaxSorter) Len() int { panic("niy") } func (s destMinMaxSorter) Swap(i, j int) { iStart, jStart := 3*i, 3*j for n := 0; n < 3; n++ { s[iStart+n], s[jStart+n] = s[jStart+n], s[iStart+n] } } func (s destMinMaxSorter) Less(i, j int) bool { iStart := 3 * i jStart := 3 * j // first dest: iDest := s[iStart] jDest := s[jStart] if iDest < jDest { return true } else if iDest > jDest { return false } // then min: iMin := s[iStart+1] jMin := s[jStart+1] if iMin < jMin { return true } else if iMin > jMin { return false } // then max: iMax := s[iStart+2] jMax := s[jStart+2] return iMax < jMax } type minMaxDestSorter []int func (s minMaxDestSorter) Len() int { panic("niy") } func (s minMaxDestSorter) Swap(i, j int) { iStart, jStart := 3*i, 3*j for n := 0; n < 3; n++ { s[iStart+n], s[jStart+n] = s[jStart+n], s[iStart+n] } } func (s minMaxDestSorter) Less(i, j int) bool { iStart, jStart := 3*i, 3*j iMin, jMin := s[iStart+1], s[jStart+1] if iMin < jMin { return true } else if iMin > jMin { return false } iMax, jMax := s[iStart+2], s[jStart+2] if iMax < jMax { return true } else if iMax > jMax { return false } iDest, jDest := s[iStart], s[jStart] return iDest < jDest } /* Initialize the provided Transition to iterate through all transitions leaving the specified state. You must call nextTransition() to get each transition. Returns the number of transitions leaving this tate. */ func (a *Automaton) initTransition(state int, t *Transition) int { assert2(state < a.numStates(), "state=%v nextState=%v", state, a.numStates()) t.source = state t.transitionUpto = a.states[2*state] return a.numTransitions(state) } /* Iterate to the next transition after the provided one */ func (a *Automaton) nextTransition(t *Transition) { // make sure there is still a transition left assert((t.transitionUpto + 3 - a.states[2*t.source]) <= 3*a.states[2*t.source+1]) t.dest = a.transitions[t.transitionUpto] t.min = a.transitions[t.transitionUpto+1] t.max = a.transitions[t.transitionUpto+2] t.transitionUpto += 3 } /* Fill the provided Transition with the index'th transition leaving the specified state. */ func (a *Automaton) transition(state, index int, t *Transition) { i := a.states[2*state] + 3*index t.source = state t.dest = a.transitions[i] t.min = a.transitions[i+1] t.max = a.transitions[i+2] } // L563 /* Returns sorted array of all interval start points. */ func (a *Automaton) startPoints() []int { pointset := make(map[int]bool) pointset[MIN_CODE_POINT] = true // fmt.Println("getStartPoints") for s := 0; s < len(a.states); s += 2 { trans := a.states[s] limit := trans + 3*a.states[s+1] // fmt.Printf(" state=%v trans=%v limit=%v\n", s/2, trans, limit) for trans < limit { min, max := a.transitions[trans+1], a.transitions[trans+2] // fmt.Printf(" min=%v\n", min) pointset[min] = true if max < unicode.MaxRune { pointset[max+1] = true } trans += 3 } } var points []int for m, _ := range pointset { points = append(points, m) } sort.Ints(points) return points } /* Performs lookup in transitions, assuming determinism. */ func (a *Automaton) step(state, label int) int { assert(state >= 0) assert(label >= 0) if 2*state >= len(a.states) { return -1 // invalid state } trans := a.states[2*state] limit := trans + 3*a.states[2*state+1] // TODO binary search for trans < limit { dest, min, max := a.transitions[trans], a.transitions[trans+1], a.transitions[trans+2] if min <= label && label <= max { return dest } trans += 3 } return -1 } // Go doesn't have unicode.MinRune which should be 0 const MIN_CODE_POINT = 0 type AutomatonBuilder struct { transitions []int a *Automaton } func newAutomatonBuilder() *AutomatonBuilder { return &AutomatonBuilder{ a: newEmptyAutomaton(), } } func (b *AutomatonBuilder) addTransitionRange(source, dest, min, max int) { b.transitions = append(b.transitions, source, dest, min, max) } type srcMinMaxDestSorter []int func (s srcMinMaxDestSorter) Len() int { panic("niy") } func (s srcMinMaxDestSorter) Swap(i, j int) { iStart, jStart := 4*i, 4*j for n := 0; n < 4; n++ { s[iStart+n], s[jStart+n] = s[jStart+n], s[iStart+n] } } func (s srcMinMaxDestSorter) Less(i, j int) bool { iStart, jStart := 4*i, 4*j iSrc, jSrc := s[iStart], s[jStart] if iSrc < jSrc { return true } else if iSrc > jSrc { return false } iMin, jMin := s[iStart+2], s[jStart+2] if iMin < jMin { return true } else if iMin > jMin { return false } iMax, jMax := s[iStart+3], s[jStart+3] if iMax < jMax { return true } else if iMax > jMax { return false } iDest, jDest := s[iStart+1], s[jStart+1] return iDest < jDest } /* Compiles all added states and transitions into a new Automaton and returns it. */ func (b *AutomatonBuilder) finish() *Automaton { // fmt.Printf("LA.Builder.finish: count=%v\n", len(b.transitions)/4) // fmt.Println("finish pending") util.NewInPlaceMergeSorter(srcMinMaxDestSorter(b.transitions)).Sort(0, len(b.transitions)/4) for upto := 0; upto < len(b.transitions); upto += 4 { b.a.addTransitionRange( b.transitions[upto], b.transitions[upto+1], b.transitions[upto+2], b.transitions[upto+3], ) } b.a.finishState() return b.a } func (b *AutomatonBuilder) createState() int { return b.a.createState() } func (b *AutomatonBuilder) setAccept(state int, accept bool) { b.a.setAccept(state, accept) } func (b *AutomatonBuilder) isAccept(state int) bool { return b.a.IsAccept(state) } func (b *AutomatonBuilder) copy(other *Automaton) { offset := b.a.numStates() otherNumStates := other.numStates() for s := 0; s < otherNumStates; s++ { newState := b.createState() b.setAccept(newState, other.IsAccept(s)) } t := newTransition() for s := 0; s < otherNumStates; s++ { count := other.initTransition(s, t) for i := 0; i < count; i++ { other.nextTransition(t) b.addTransitionRange(offset+s, offset+t.dest, t.min, t.max) } } }
core/util/automaton/automaton.go
0.711732
0.455744
automaton.go
starcoder
package histogram // Bin represents a histogram bin type Bin struct { Key int Count int } // Bin is a slice of bins type Bins []Bin // IsFull returns true if the bin is filled to capacity func (b Bin) IsFull(h Histogram) bool { return b.Count == h.BinCapacity } // Histogram is a structure composed of bins type Histogram struct { Bins Bins BinCapacity int } // Merge combines a second histogram and returns a MergedHistogram func (h1 Histogram) Merge(h2 Histogram) MergedHistogram { m := make(PairedBinsMap) m.InsertMasterBinCounts(h1.Bins) m.UpsertSlaveBinCounts(h2.Bins) return MergedHistogram{ BinPairs: m, BinCapacity: h1.BinCapacity, } } // PairedBin describes the counts of records from two different data sources for the same range type PairedBin struct { Key int CountFromMaster int CountFromSlave int } // IsFull returns true if the bin is filled to capacity func (p PairedBin) IsFull(capacity int) bool { return p.CountFromMaster == capacity && p.CountFromSlave == capacity } // DiffCount returns the difference between the slave source and the master source func (p PairedBin) DiffCount() int { return p.CountFromMaster - p.CountFromSlave } // PairedBinsMap describes a map of paired bins where the keys are the bin keys type PairedBinsMap map[int]*PairedBin // InsertMasterBinCounts adds all counts from the bins in the master source func (m PairedBinsMap) InsertMasterBinCounts(b Bins) []int { keys := make([]int, len(b)) for i, bin := range b { keys[i] = bin.Key m[bin.Key] = &PairedBin{ Key: bin.Key, CountFromMaster: bin.Count, CountFromSlave: 0, } } return keys } // UpsertSlaveBinCounts merges the slave counts with the master counts func (m PairedBinsMap) UpsertSlaveBinCounts(b Bins) []int { appended := 0 keys := make([]int, len(b)) for _, bin := range b { if p, ok := m[bin.Key]; ok { p.CountFromSlave = bin.Count } else { keys[appended] = bin.Key appended += appended m[bin.Key] = &PairedBin{ Key: bin.Key, CountFromMaster: 0, CountFromSlave: bin.Count, } } } return keys } // MergedHistogram is a structure composed of PairedBinsMap and the bin capacity type MergedHistogram struct { BinPairs PairedBinsMap BinCapacity int } // UnresolvedPairs returns the pairs that are not filled to capacity func (h MergedHistogram) UnresolvedPairs() []PairedBin { var s []PairedBin for _, b := range h.BinPairs { if !b.IsFull(h.BinCapacity) { s = append(s, *b) } } return s }
histogram/histogram.go
0.866175
0.694905
histogram.go
starcoder