code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package gofakeit
import (
"math"
rand "math/rand"
"strconv"
"strings"
"time"
"github.com/brianvoe/gofakeit/v6/data"
)
// CurrencyInfo is a struct of currency information
type CurrencyInfo struct {
Short string `json:"short" xml:"short"`
Long string `json:"long" xml:"long"`
}
// Currency will generate a struct with random currency information
func Currency() *CurrencyInfo { return currency(globalFaker.Rand) }
// Currency will generate a struct with random currency information
func (f *Faker) Currency() *CurrencyInfo { return currency(f.Rand) }
func currency(r *rand.Rand) *CurrencyInfo {
index := r.Intn(len(data.Data["currency"]["short"]))
return &CurrencyInfo{
Short: data.Data["currency"]["short"][index],
Long: data.Data["currency"]["long"][index],
}
}
// CurrencyShort will generate a random short currency value
func CurrencyShort() string { return currencyShort(globalFaker.Rand) }
// CurrencyShort will generate a random short currency value
func (f *Faker) CurrencyShort() string { return currencyShort(f.Rand) }
func currencyShort(r *rand.Rand) string { return getRandValue(r, []string{"currency", "short"}) }
// CurrencyLong will generate a random long currency name
func CurrencyLong() string { return currencyLong(globalFaker.Rand) }
// CurrencyLong will generate a random long currency name
func (f *Faker) CurrencyLong() string { return currencyLong(f.Rand) }
func currencyLong(r *rand.Rand) string { return getRandValue(r, []string{"currency", "long"}) }
// Price will take in a min and max value and return a formatted price
func Price(min, max float64) float64 { return price(globalFaker.Rand, min, max) }
// Price will take in a min and max value and return a formatted price
func (f *Faker) Price(min, max float64) float64 { return price(f.Rand, min, max) }
func price(r *rand.Rand, min, max float64) float64 {
return math.Floor(float64Range(r, min, max)*100) / 100
}
// CreditCardInfo is a struct containing credit variables
type CreditCardInfo struct {
Type string `json:"type" xml:"type"`
Number string `json:"number" xml:"number"`
Exp string `json:"exp" xml:"exp"`
Cvv string `json:"cvv" xml:"cvv"`
}
// CreditCard will generate a struct full of credit card information
func CreditCard() *CreditCardInfo { return creditCard(globalFaker.Rand) }
// CreditCard will generate a struct full of credit card information
func (f *Faker) CreditCard() *CreditCardInfo { return creditCard(f.Rand) }
func creditCard(r *rand.Rand) *CreditCardInfo {
ccType := randomString(r, data.CreditCardTypes)
return &CreditCardInfo{
Type: data.CreditCards[randomString(r, data.CreditCardTypes)].Display,
Number: creditCardNumber(r, &CreditCardOptions{Types: []string{ccType}}),
Exp: creditCardExp(r),
Cvv: generate(r, strings.Repeat("#", int(data.CreditCards[randomString(r, data.CreditCardTypes)].Code.Size))),
}
}
// CreditCardType will generate a random credit card type string
func CreditCardType() string { return creditCardType(globalFaker.Rand) }
// CreditCardType will generate a random credit card type string
func (f *Faker) CreditCardType() string { return creditCardType(f.Rand) }
func creditCardType(r *rand.Rand) string {
return data.CreditCards[randomString(r, data.CreditCardTypes)].Display
}
// CreditCardOptions is the options for credit card number
type CreditCardOptions struct {
Types []string `json:"types"`
Bins []string `json:"bins"` // optional parameter of prepended numbers
Gaps bool `json:"gaps"`
}
// CreditCardNumber will generate a random luhn credit card number
func CreditCardNumber(cco *CreditCardOptions) string { return creditCardNumber(globalFaker.Rand, cco) }
// CreditCardNumber will generate a random luhn credit card number
func (f *Faker) CreditCardNumber(cco *CreditCardOptions) string { return creditCardNumber(f.Rand, cco) }
func creditCardNumber(r *rand.Rand, cco *CreditCardOptions) string {
if cco == nil {
cco = &CreditCardOptions{}
}
if cco.Types == nil || len(cco.Types) == 0 {
cco.Types = data.CreditCardTypes
}
ccType := randomString(r, cco.Types)
// Get Card info
var cardInfo data.CreditCardInfo
if info, ok := data.CreditCards[ccType]; ok {
cardInfo = info
} else {
ccType = randomString(r, data.CreditCardTypes)
cardInfo = data.CreditCards[ccType]
}
// Get length and pattern
length := randomUint(r, cardInfo.Lengths)
numStr := ""
if len(cco.Bins) >= 1 {
numStr = randomString(r, cco.Bins)
} else {
numStr = strconv.FormatUint(uint64(randomUint(r, cardInfo.Patterns)), 10)
}
numStr += strings.Repeat("#", int(length)-len(numStr))
numStr = numerify(r, numStr)
ui, _ := strconv.ParseUint(numStr, 10, 64)
// Loop through until its a valid luhn
for {
valid := isLuhn(strconv.FormatUint(ui, 10))
if valid {
break
}
ui++
}
numStr = strconv.FormatUint(ui, 10)
// Add gaps to number
if cco.Gaps {
for i, spot := range cardInfo.Gaps {
numStr = numStr[:(int(spot)+i)] + " " + numStr[(int(spot)+i):]
}
}
return numStr
}
// CreditCardExp will generate a random credit card expiration date string
// Exp date will always be a future date
func CreditCardExp() string { return creditCardExp(globalFaker.Rand) }
// CreditCardExp will generate a random credit card expiration date string
// Exp date will always be a future date
func (f *Faker) CreditCardExp() string { return creditCardExp(f.Rand) }
func creditCardExp(r *rand.Rand) string {
month := strconv.Itoa(randIntRange(r, 1, 12))
if len(month) == 1 {
month = "0" + month
}
var currentYear = time.Now().Year() - 2000
return month + "/" + strconv.Itoa(randIntRange(r, currentYear+1, currentYear+10))
}
// CreditCardCvv will generate a random CVV number
// Its a string because you could have 017 as an exp date
func CreditCardCvv() string { return creditCardCvv(globalFaker.Rand) }
// CreditCardCvv will generate a random CVV number
// Its a string because you could have 017 as an exp date
func (f *Faker) CreditCardCvv() string { return creditCardCvv(f.Rand) }
func creditCardCvv(r *rand.Rand) string { return numerify(r, "###") }
// isLuhn check is used for checking if credit card is a valid luhn card
func isLuhn(s string) bool {
var t = [...]int{0, 2, 4, 6, 8, 1, 3, 5, 7, 9}
odd := len(s) & 1
var sum int
for i, c := range s {
if c < '0' || c > '9' {
return false
}
if i&1 == odd {
sum += t[c-'0']
} else {
sum += int(c - '0')
}
}
return sum%10 == 0
}
// AchRouting will generate a 9 digit routing number
func AchRouting() string { return achRouting(globalFaker.Rand) }
// AchRouting will generate a 9 digit routing number
func (f *Faker) AchRouting() string { return achRouting(f.Rand) }
func achRouting(r *rand.Rand) string { return numerify(r, "#########") }
// AchAccount will generate a 12 digit account number
func AchAccount() string { return achAccount(globalFaker.Rand) }
// AchAccount will generate a 12 digit account number
func (f *Faker) AchAccount() string { return achAccount(f.Rand) }
func achAccount(r *rand.Rand) string { return numerify(r, "############") }
// BitcoinAddress will generate a random bitcoin address consisting of numbers, upper and lower characters
func BitcoinAddress() string { return bitcoinAddress(globalFaker.Rand) }
// BitcoinAddress will generate a random bitcoin address consisting of numbers, upper and lower characters
func (f *Faker) BitcoinAddress() string { return bitcoinAddress(f.Rand) }
func bitcoinAddress(r *rand.Rand) string {
return randomString(r, []string{"1", "3"}) + password(r, true, true, true, false, false, number(r, 25, 34))
}
// BitcoinPrivateKey will generate a random bitcoin private key base58 consisting of numbers, upper and lower characters
func BitcoinPrivateKey() string { return bitcoinPrivateKey(globalFaker.Rand) }
// BitcoinPrivateKey will generate a random bitcoin private key base58 consisting of numbers, upper and lower characters
func (f *Faker) BitcoinPrivateKey() string { return bitcoinPrivateKey(f.Rand) }
func bitcoinPrivateKey(r *rand.Rand) string {
var b strings.Builder
for i := 0; i < 49; i++ {
b.WriteString(randCharacter(r, base58))
}
return "5" + randomString(r, []string{"H", "J", "K"}) + b.String()
}
func addPaymentLookup() {
AddFuncLookup("currency", Info{
Display: "Currency",
Category: "payment",
Description: "Random currency data set",
Example: `{short: "USD", long: "United States Dollar"}`,
Output: "map[string]string",
Generate: func(r *rand.Rand, m *MapParams, info *Info) (interface{}, error) {
return currencyShort(r), nil
},
})
AddFuncLookup("currencyshort", Info{
Display: "Currency Short",
Category: "payment",
Description: "Random currency abbreviated",
Example: "USD",
Output: "string",
Generate: func(r *rand.Rand, m *MapParams, info *Info) (interface{}, error) {
return currencyShort(r), nil
},
})
AddFuncLookup("currencylong", Info{
Display: "Currency Long",
Category: "payment",
Description: "Random currency",
Example: "United States Dollar",
Output: "string",
Generate: func(r *rand.Rand, m *MapParams, info *Info) (interface{}, error) {
return currencyLong(r), nil
},
})
AddFuncLookup("price", Info{
Display: "Price",
Category: "payment",
Description: "Random monitary price",
Example: "92.26",
Output: "float64",
Params: []Param{
{Field: "min", Display: "Min", Type: "float", Default: "0", Description: "Minimum price value"},
{Field: "max", Display: "Max", Type: "float", Default: "1000", Description: "Maximum price value"},
},
Generate: func(r *rand.Rand, m *MapParams, info *Info) (interface{}, error) {
min, err := info.GetFloat64(m, "min")
if err != nil {
return nil, err
}
max, err := info.GetFloat64(m, "max")
if err != nil {
return nil, err
}
return price(r, min, max), nil
},
})
AddFuncLookup("creditcard", Info{
Display: "Credit Card",
Category: "payment",
Description: "Random credit card data set",
Example: `{type: "Visa", number: "4136459948995369", exp: "01/21", cvv: "513"}`,
Output: "map[string]interface",
Generate: func(r *rand.Rand, m *MapParams, info *Info) (interface{}, error) {
return creditCard(r), nil
},
})
AddFuncLookup("creditcardtype", Info{
Display: "Credit Card Type",
Category: "payment",
Description: "Random credit card type",
Example: "Visa",
Output: "string",
Generate: func(r *rand.Rand, m *MapParams, info *Info) (interface{}, error) {
return creditCardType(r), nil
},
})
AddFuncLookup("creditcardnumber", Info{
Display: "Credit Card Number",
Category: "payment",
Description: "Random credit card number",
Example: "4136459948995369",
Output: "int",
Params: []Param{
{
Field: "types", Display: "Types", Type: "[]string", Default: "all",
Options: []string{"visa", "mastercard", "american-express", "diners-club", "discover", "jcb", "unionpay", "maestro", "elo", "hiper", "hipercard"},
Description: "A select number of types you want to use when generating a credit card number",
},
{Field: "bins", Display: "Bins", Type: "[]string", Optional: true, Description: "Optional list of prepended bin numbers to pick from"},
{Field: "gaps", Display: "Gaps", Type: "bool", Default: "false", Description: "Whether or not to have gaps in number"},
},
Generate: func(r *rand.Rand, m *MapParams, info *Info) (interface{}, error) {
types, err := info.GetStringArray(m, "types")
if err != nil {
return nil, err
}
if len(types) == 1 && types[0] == "all" {
types = []string{}
}
bins, _ := info.GetStringArray(m, "bins")
gaps, err := info.GetBool(m, "gaps")
if err != nil {
return nil, err
}
options := CreditCardOptions{
Types: types,
Gaps: gaps,
}
if len(bins) >= 1 {
options.Bins = bins
}
return creditCardNumber(r, &options), nil
},
})
AddFuncLookup("creditcardexp", Info{
Display: "Credit Card Exp",
Category: "payment",
Description: "Random credit card expiraction date",
Example: "01/21",
Output: "string",
Generate: func(r *rand.Rand, m *MapParams, info *Info) (interface{}, error) {
return creditCardExp(r), nil
},
})
AddFuncLookup("creditcardcvv", Info{
Display: "Credit Card CVV",
Category: "payment",
Description: "Random credit card number",
Example: "513",
Output: "string",
Generate: func(r *rand.Rand, m *MapParams, info *Info) (interface{}, error) {
return creditCardCvv(r), nil
},
})
AddFuncLookup("achrouting", Info{
Display: "ACH Routing Number",
Category: "payment",
Description: "Random 9 digit ach routing number",
Example: "513715684",
Output: "string",
Generate: func(r *rand.Rand, m *MapParams, info *Info) (interface{}, error) {
return achRouting(r), nil
},
})
AddFuncLookup("achaccount", Info{
Display: "ACH Account Number",
Category: "payment",
Description: "Random 12 digit ach account number",
Example: "491527954328",
Output: "string",
Generate: func(r *rand.Rand, m *MapParams, info *Info) (interface{}, error) {
return achAccount(r), nil
},
})
AddFuncLookup("bitcoinaddress", Info{
Display: "Bitcoin Address",
Category: "payment",
Description: "Random 26-35 characters representing a bitcoin address",
Example: "<KEY>",
Output: "string",
Generate: func(r *rand.Rand, m *MapParams, info *Info) (interface{}, error) {
return bitcoinAddress(r), nil
},
})
AddFuncLookup("bitcoinprivatekey", Info{
Display: "Bitcoin Private Key",
Category: "payment",
Description: "Random 51 characters representing a bitcoin private key",
Example: "<KEY>",
Output: "string",
Generate: func(r *rand.Rand, m *MapParams, info *Info) (interface{}, error) {
return bitcoinPrivateKey(r), nil
},
})
} | vendor/github.com/brianvoe/gofakeit/v6/payment.go | 0.7797 | 0.401043 | payment.go | starcoder |
package main
import (
"image"
"image/color"
"math"
"sort"
)
type byteQuad [4]uint8
type byteQuadPalette []byteQuad
func byte2dword(b uint8) uint32 {
d := uint32(b)
d |= d << 8
return d
}
func (bq byteQuad) RGBA() (r, g, b, a uint32) {
return byte2dword(bq[0]), byte2dword(bq[1]), byte2dword(bq[2]), byte2dword(bq[3])
}
func squareDiff(b1, b2 uint8) int {
t := int(b1) - int(b2)
return t * t
}
var candidaes [32768][]int
func invalidateCandidates() {
for i := range candidaes {
candidaes[i] = nil
}
}
func max(x, y int) int {
if x > y {
return x
}
return y
}
func calcMaxDistance(x byte, y [2]byte) int {
if x < y[0] {
return int(y[1]) - int(x)
} else if x > y[1] {
return int(x) - int(y[0])
} else {
return max(int(y[1])-int(x), int(x)-int(y[0]))
}
}
func calcMinDistance(x byte, y [2]byte) int {
if x < y[0] {
return int(y[0]) - int(x)
} else if x > y[1] {
return int(x) - int(y[1])
} else {
return 0
}
}
func calculateCandidates(p byteQuadPalette, c byteQuad) []int {
var result []int
bounds := [3][2]byte{{c[0] & 0xf8, c[0]&0xf8 + 7}, {c[1] & 0xf8, c[1]&0xf8 + 7}, {c[2] & 0xf8, c[2]&0xf8 + 7}}
minMaxError := math.MaxInt64
for _, pe := range p {
dr := calcMaxDistance(pe[0], bounds[0])
e := dr * dr
if e > minMaxError {
continue
}
dg := calcMaxDistance(pe[1], bounds[1])
db := calcMaxDistance(pe[2], bounds[2])
e += dg*dg + db*db
if e < minMaxError {
minMaxError = e
}
}
for i := range p {
dr := calcMinDistance(p[i][0], bounds[0])
e := dr * dr
if e > minMaxError {
continue
}
dg := calcMinDistance(p[i][1], bounds[1])
db := calcMinDistance(p[i][2], bounds[2])
e += dg*dg + db*db
if e < minMaxError {
result = append(result, i)
}
}
return result
}
func (p byteQuadPalette) fastIndex(c byteQuad) int {
block := (int(c[0])&0xf8)<<7 + (int(c[1])&0xf8)<<2 + int(c[2])>>3
if len(candidaes[block]) == 0 {
candidaes[block] = calculateCandidates(p, c)
}
bestDiff := math.MaxInt64
bestIndex := -1
for _, i := range candidaes[block] {
t := p[i]
diff := squareDiff(c[0], t[0])
if diff > bestDiff {
continue
}
diff += squareDiff(c[1], t[1]) + squareDiff(c[2], t[2])
if diff < bestDiff {
bestDiff = diff
bestIndex = i
}
}
return bestIndex
}
func (p byteQuadPalette) index(c color.Color) int {
var bq byteQuad
r, g, b, _ := c.RGBA()
bq[0] = uint8(r >> 8)
bq[1] = uint8(g >> 8)
bq[2] = uint8(b >> 8)
return p.fastIndex(bq)
}
type histogramElement struct {
color byteQuad
quantity int64
weight [3]int64
}
func newHistogramElement(r, g, b int, quantity int64) (he histogramElement) {
he.color[0] = uint8(r)
he.color[1] = uint8(g)
he.color[2] = uint8(b)
he.color[3] = 255
he.quantity = quantity
for i := 0; i < 3; i++ {
he.weight[i] = int64(he.color[i]) * he.quantity
}
return he
}
var (
colors int64
histogram [256][256][256]int64
histogramElements []histogramElement
)
func collectHistogram(i image.Image) {
for y := i.Bounds().Min.Y; y < i.Bounds().Max.Y; y++ {
for x := i.Bounds().Min.X; x < i.Bounds().Max.X; x++ {
r, g, b, _ := i.At(x, y).RGBA()
r >>= 8
g >>= 8
b >>= 8
if histogram[r][g][b] == 0 {
colors++
}
histogram[r][g][b]++
}
}
}
func calcCentroid(cluster []histogramElement) byteQuad {
var (
weightSum [3]int64
quantitySum int64
c byteQuad
)
for _, he := range cluster {
for i := 0; i < 3; i++ {
weightSum[i] += he.weight[i]
}
quantitySum += he.quantity
}
for i := 0; i < 3; i++ {
c[i] = uint8(weightSum[i] / quantitySum)
}
c[3] = 255
return c
}
func optimizePalette(p byteQuadPalette) ([]byteQuad, [][]histogramElement) {
clusters := make([][]histogramElement, len(p))
for _, he := range histogramElements {
i := p.fastIndex(he.color)
clusters[i] = append(clusters[i], he)
}
newPalette := make([]byteQuad, 0, len(p))
newCluster := make([][]histogramElement, 0, len(p))
for _, cluster := range clusters {
if len(cluster) == 0 {
continue
}
newPalette = append(newPalette, calcCentroid(cluster))
newCluster = append(newCluster, cluster)
}
invalidateCandidates()
return newPalette, newCluster
}
func divideCluster(cluster []histogramElement, color byteQuad, index int) (byteQuad, byteQuad) {
var c0, c1 []histogramElement
for _, he := range cluster {
if color[index] < he.color[index] {
c0 = append(c0, he)
} else {
c1 = append(c1, he)
}
}
return calcCentroid(c0), calcCentroid(c1)
}
func calcWorstColorIndex(clusterError *[3]int64) int {
worstError := clusterError[0]
worstIndex := 0
for i := 1; i < 3; i++ {
if clusterError[i] > worstError {
worstError = clusterError[i]
worstIndex = i
}
}
return worstIndex
}
func calcWorstCluster(p byteQuadPalette, clusters [][]histogramElement) (int, int) {
worstError := int64(-1)
worstClusterIndex := -1
worstColorIndex := 0
for i := range clusters {
var clusterError [3]int64
pe := p[i]
for _, he := range clusters[i] {
for j := 0; j < 3; j++ {
clusterError[j] += int64(squareDiff(pe[j], he.color[j])) * he.quantity
}
}
errSum := clusterError[0] + clusterError[1] + clusterError[2]
if errSum > worstError {
worstError = errSum
worstClusterIndex = i
worstColorIndex = calcWorstColorIndex(&clusterError)
}
}
return worstClusterIndex, worstColorIndex
}
func populatePalette(p byteQuadPalette) []byteQuad {
p, clusters := optimizePalette(p)
worstClusterIndex, worstColorIndex := calcWorstCluster(p, clusters)
c1, c2 := divideCluster(clusters[worstClusterIndex], p[worstClusterIndex], worstColorIndex)
invalidateCandidates()
p[worstClusterIndex] = c1
return append(p, c2)
}
func calcBrightness(c byteQuad) float64 {
return float64(c[0])*0.299 + float64(c[1])*0.587 + float64(c[2])*0.114
}
func sortPalette(p []byteQuad) {
sort.Slice(p, func(i, j int) bool { return calcBrightness(p[i]) < calcBrightness(p[j]) })
}
func createHistogramElements() []histogramElement {
result := make([]histogramElement, 0, colors)
for r := 0; r < 256; r++ {
for g := 0; g < 256; g++ {
for b := 0; b < 256; b++ {
if histogram[r][g][b] != 0 {
result = append(result, newHistogramElement(r, g, b, histogram[r][g][b]))
}
}
}
}
return result
}
func getSeedPalette() byteQuadPalette {
var result [64]byteQuad
var r, g, b uint
for r = 0; r < 4; r++ {
for g = 0; g < 4; g++ {
for b = 0; b < 4; b++ {
index := r*16 + g*4 + b
result[index][0] = uint8(255 >> (r * 2))
result[index][1] = uint8(255 >> (g * 2))
result[index][2] = uint8(255 >> (b * 2))
result[index][3] = 255
}
}
}
return result[:]
}
func generatePerfectPalette() []byteQuad {
result := make([]byteQuad, 0, colors)
for r := 0; r < 256; r++ {
for g := 0; g < 256; g++ {
for b := 0; b < 256; b++ {
if histogram[r][g][b] != 0 {
result = append(result, byteQuad{uint8(r), uint8(g), uint8(b), 255})
}
}
}
}
return result
}
func generateOptimizedPalette() []byteQuad {
histogramElements = createHistogramElements()
p := getSeedPalette()
for i := 0; i < 2; i++ {
p, _ = optimizePalette(p)
}
for {
p = populatePalette(p)
if len(p) == 255 {
break
}
}
for i := 0; i < 3; i++ {
p, _ = optimizePalette(p)
}
return p
}
func generatePalette() []byteQuad {
var result []byteQuad
if colors < 256 {
result = generatePerfectPalette()
} else {
result = generateOptimizedPalette()
}
sortPalette(result)
return result
}
type lookupCacheElement struct {
c color.Color
index int
}
var cache [32768]lookupCacheElement
func invalidateCache() {
for i := range cache {
cache[i].index = -1
}
}
func cachedIndex(p byteQuadPalette, c color.Color) int {
r, g, b, _ := c.RGBA()
ci := (r&31)<<10 + (g&31)<<5 + b&31
if (cache[ci].index != -1) && (cache[ci].c == c) {
return cache[ci].index
}
cache[ci].index = p.index(c)
cache[ci].c = c
return cache[ci].index
}
func newPalette(p []byteQuad) color.Palette {
result := make([]color.Color, len(p)+1)
for i := range p {
result[i+1] = p[i]
}
var c byteQuad
result[0] = c
return result
}
func generatePalettedImage(i image.Image, p []byteQuad) *image.Paletted {
result := image.NewPaletted(image.Rect(0, 0, i.Bounds().Max.X-i.Bounds().Min.X, i.Bounds().Max.Y-i.Bounds().Min.Y), newPalette(p))
for y := i.Bounds().Min.Y; y < i.Bounds().Max.Y; y++ {
bi := result.Stride * y
for x := i.Bounds().Min.X; x < i.Bounds().Max.X; x++ {
result.Pix[bi+x] = uint8(cachedIndex(p, i.At(x, y)) + 1)
}
}
return result
} | quantize.go | 0.558568 | 0.469399 | quantize.go | starcoder |
package series
import (
"fmt"
"math"
"strings"
)
type boolElement struct {
e bool
nan bool
}
func (e *boolElement) Set(value interface{}) {
e.nan = false
switch value.(type) {
case string:
if value.(string) == "NaN" {
e.nan = true
return
}
switch strings.ToLower(value.(string)) {
case "true", "t", "1":
e.e = true
case "false", "f", "0":
e.e = false
default:
e.nan = true
return
}
case int:
switch value.(int) {
case 1:
e.e = true
case 0:
e.e = false
default:
e.nan = true
return
}
case float64:
switch value.(float64) {
case 1:
e.e = true
case 0:
e.e = false
default:
e.nan = true
return
}
case bool:
e.e = value.(bool)
case Element:
b, err := value.(Element).Bool()
if err != nil {
e.nan = true
return
}
e.e = b
default:
e.nan = true
return
}
return
}
func (e boolElement) Copy() Element {
if e.IsNA() {
return &boolElement{false, true}
}
return &boolElement{e.e, false}
}
func (e boolElement) IsNA() bool {
if e.nan {
return true
}
return false
}
func (e boolElement) Type() Type {
return Bool
}
func (e boolElement) Val() ElementValue {
if e.IsNA() {
return nil
}
return bool(e.e)
}
func (e boolElement) String() string {
if e.IsNA() {
return "NaN"
}
if e.e {
return "true"
}
return "false"
}
func (e boolElement) Int() (int, error) {
if e.IsNA() {
return 0, fmt.Errorf("can't convert NaN to int")
}
if e.e == true {
return 1, nil
}
return 0, nil
}
func (e boolElement) Float() float64 {
if e.IsNA() {
return math.NaN()
}
if e.e {
return 1.0
}
return 0.0
}
func (e boolElement) Bool() (bool, error) {
if e.IsNA() {
return false, fmt.Errorf("can't convert NaN to bool")
}
return bool(e.e), nil
}
func (e boolElement) Eq(elem Element) bool {
b, err := elem.Bool()
if err != nil || e.IsNA() {
return false
}
return e.e == b
}
func (e boolElement) Neq(elem Element) bool {
b, err := elem.Bool()
if err != nil || e.IsNA() {
return false
}
return e.e != b
}
func (e boolElement) Less(elem Element) bool {
b, err := elem.Bool()
if err != nil || e.IsNA() {
return false
}
return !e.e && b
}
func (e boolElement) LessEq(elem Element) bool {
b, err := elem.Bool()
if err != nil || e.IsNA() {
return false
}
return !e.e || b
}
func (e boolElement) Greater(elem Element) bool {
b, err := elem.Bool()
if err != nil || e.IsNA() {
return false
}
return e.e && !b
}
func (e boolElement) GreaterEq(elem Element) bool {
b, err := elem.Bool()
if err != nil || e.IsNA() {
return false
}
return e.e || !b
} | vendor/github.com/kniren/gota/series/type-bool.go | 0.575349 | 0.519704 | type-bool.go | starcoder |
// verify is a simple example that shows how a verifiable map can be used to
// demonstrate inclusion.
package main
import (
"bytes"
"crypto"
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"math/big"
"path/filepath"
"github.com/golang/glog"
"github.com/google/trillian/experimental/batchmap"
"github.com/google/trillian/merkle"
"github.com/google/trillian/merkle/coniks"
)
const hash = crypto.SHA512_256
var (
mapDir = flag.String("map_dir", "", "Directory containing map tiles.")
treeID = flag.Int64("tree_id", 12345, "The ID of the tree. Used as a salt in hashing.")
valueSalt = flag.String("value_salt", "v1", "Some string that will be smooshed in with the generated value before hashing. Allows generated values to be deterministic but variable.")
key = flag.Int64("key", 0, "This is the seed for the key that will be looked up.")
prefixStrata = flag.Int("prefix_strata", 1, "The number of strata of 8-bit strata before the final strata.")
)
func main() {
flag.Parse()
mapDir := filepath.Clean(*mapDir)
if mapDir == "" {
glog.Fatal("No output provided")
}
// Determine the key/value we expect to find.
// Note that the map tiles do not contain raw values, but commitments to the values.
// If the map needs to return the values to clients then it is recommended that the
// map operator uses a Content Addressable Store to store these values.
h := hash.New()
h.Write([]byte(fmt.Sprintf("%d", *key)))
keyPath := h.Sum(nil)
expectedString := fmt.Sprintf("[%s]%d", *valueSalt, *key)
expectedValueHash := coniks.Default.HashLeaf(*treeID, keyPath, []byte(expectedString))
// Read the tiles required for this check from disk.
tiles, err := getTilesForKey(mapDir, keyPath)
if err != nil {
glog.Exitf("couldn't load tiles: %v", err)
}
// Perform the verification.
// 1) Start at the leaf tile and check the key/value.
// 2) Compute the merkle root of the leaf tile
// 3) Check the computed root matches that reported in the tile
// 4) Check this root value is the key/value of the tile above.
// 5) Rinse and repeat until we reach the tree root.
hs2 := merkle.NewHStar2(*treeID, coniks.Default)
needPath, needValue := keyPath, expectedValueHash
for i := *prefixStrata; i >= 0; i-- {
tile := tiles[i]
// Check the prefix of what we are looking for matches the tile's path.
if got, want := tile.Path, needPath[:len(tile.Path)]; !bytes.Equal(got, want) {
glog.Fatalf("wrong tile found at index %d: got %x, want %x", i, got, want)
}
// Leaf paths within a tile are within the scope of the tile, so we can
// drop the prefix from the expected path now we have verified it.
needLeafPath := needPath[len(tile.Path):]
// Identify the leaf we need, and convert all leaves to the format needed for hashing.
var leaf *batchmap.TileLeaf
hs2Leaves := make([]*merkle.HStar2LeafHash, len(tile.Leaves))
for j, l := range tile.Leaves {
if bytes.Equal(l.Path, needLeafPath) {
leaf = l
}
hs2Leaves[j] = toHStar2(tile.Path, l)
}
// Confirm we found the leaf we needed, and that it had the value we expected.
if leaf == nil {
glog.Fatalf("couldn't find expected leaf %x in tile %x", needLeafPath, tile.Path)
}
if !bytes.Equal(leaf.Hash, needValue) {
glog.Fatalf("wrong leaf value in tile %x, leaf %x: got %x, want %x", tile.Path, leaf.Path, leaf.Hash, needValue)
}
// Hash this tile given its leaf values, and confirm that the value we compute
// matches the value reported in the tile.
root, err := hs2.HStar2Nodes(tile.Path, 8*len(leaf.Path), hs2Leaves, nil, nil)
if err != nil {
glog.Fatalf("failed to hash tile %x: %v", tile.Path, err)
}
if !bytes.Equal(root, tile.RootHash) {
glog.Fatalf("wrong root hash for tile %x: got %x, calculated %x", tile.Path, tile.RootHash, root)
}
// Make the next iteration of the loop check that the tile above this has the
// root value of this tile stored as the value at the expected leaf index.
needPath, needValue = tile.Path, root
}
// If we get here then we have proved that the value was correct and that the map
// root commits to this value. Any other user with the same map root must see the
// same value under the same key we have checked.
glog.Infof("key %d found at path %x, with value '%s' (%x) committed to by map root %x", *key, keyPath, expectedString, expectedValueHash, needValue)
}
// getTilesForKey loads the tiles on the path from the root to the given leaf.
func getTilesForKey(mapDir string, key []byte) ([]*batchmap.Tile, error) {
tiles := make([]*batchmap.Tile, *prefixStrata+1)
for i := 0; i <= *prefixStrata; i++ {
tilePath := key[0:i]
tileFile := fmt.Sprintf("%s/path_%x", mapDir, tilePath)
in, err := ioutil.ReadFile(tileFile)
if err != nil {
return nil, fmt.Errorf("failed to read file %s: %v", tileFile, err)
}
tile := &batchmap.Tile{}
if err := json.Unmarshal(in, tile); err != nil {
return nil, fmt.Errorf("failed to parse tile in %s: %v", tileFile, err)
}
tiles[i] = tile
}
return tiles, nil
}
// toHStar2 converts a TileLeaf into the equivalent structure for HStar2.
func toHStar2(prefix []byte, l *batchmap.TileLeaf) *merkle.HStar2LeafHash {
// In hstar2 all paths need to be 256 bit (32 bytes)
leafIndexBs := make([]byte, 32)
copy(leafIndexBs, prefix)
copy(leafIndexBs[len(prefix):], l.Path)
return &merkle.HStar2LeafHash{
Index: new(big.Int).SetBytes(leafIndexBs),
LeafHash: l.Hash,
}
} | experimental/batchmap/cmd/verify/verify.go | 0.640636 | 0.501038 | verify.go | starcoder |
package sudogo
// A constraint is an added rule for solving the puzzle. This enables more complex puzzles with fewer givens.
// The following basic constraints are supported
// - A collection of cells sum up to a value
// - Diagonal lines: https://www.youtube.com/watch?v=Vc-FYo_nur4
// - Groups/cages: https://www.youtube.com/watch?v=2v6Lf3Q5AEo&t=885s
// - X/V: https://www.youtube.com/watch?v=9ATC_uBF8ow
// - Squares: https://www.youtube.com/watch?v=u6Le6f9d0KU&t=602s
// - Knights move: https://www.youtube.com/watch?v=3FMNh-_FNlk
// - Magic square: https://www.youtube.com/watch?v=La7Yg_rav24
// - A collection of cells sum up to the value of another cell
// - Path: https://www.youtube.com/watch?v=Vc-FYo_nur4
// - A collection of cells go from increasing to decreasing order (known direction or not)
// - Thermo(meter): https://www.youtube.com/watch?v=KTth49YrQVU
// - Circled ends: https://www.youtube.com/watch?v=Tpk3ga2T9Ps&t=159s
// - A collection of cells even & odd digits sum to same value
// - A cell cannot have the same value as a collection of cells
// - Kings move
// - Knights move: https://www.youtube.com/watch?v=hAyZ9K2EBF0
// - No repeats in group/age: https://www.youtube.com/watch?v=u6Le6f9d0KU&t=602s, https://www.youtube.com/watch?v=hAyZ9K2EBF0
// - Or constraint (multiple constraints can dictate which candidates are available)
// - Sum is square: https://www.youtube.com/watch?v=u6Le6f9d0KU&t=602s
// - And constraint
// - Circled ends & no duplicates: https://www.youtube.com/watch?v=Tpk3ga2T9Ps&t=159s
type Constraint interface {
Affects(cell *Cell) bool
RemoveCandidates(cell *Cell, puzzle *Puzzle, remove *Candidates)
}
// A cell position on a puzzle.
type Position struct {
Col int
Row int
}
// ==================================================
// Constraint: Sum Value
// ==================================================
// A function which returns a value other cells should sum to given
// the cell being constrained and the puzzle. If there is no sum
// to constrain then 0 should be returned.
type ConstraintSumProvider func(cell *Cell, puzzle *Puzzle) int
// A constraint on a set of cells that states that set or relative cells
// should sum to a value.
type ConstraintSum struct {
// The function which returns a value the cells need to sum to.
Sum ConstraintSumProvider
// The list of cells that are constrained to some sum. If this is nil then
// all cells will be a part of the constraint.
Cells *[]Position
Relative *[]Position
}
// A sum provider which returns a constant value
func SumConstant(value int) ConstraintSumProvider {
return func(cell *Cell, puzzle *Puzzle) int {
return value
}
}
// A sum provider which returns the value (or largest candidate) of a cell
// at the given position
func SumCell(pos Position, relative bool) ConstraintSumProvider {
return func(cell *Cell, puzzle *Puzzle) int {
col := pos.Col
row := pos.Row
if relative {
col += cell.Col
row += cell.Row
}
value := 0
if puzzle.Contains(col, row) {
other := puzzle.Get(col, row)
value = other.MaxValue()
}
return value
}
}
// A sum provider which returns the value (or largest candidate) of a cell
// at the given position
func SumCells(positions []Position, relative bool) ConstraintSumProvider {
return func(cell *Cell, puzzle *Puzzle) int {
value := 0
for _, pos := range positions {
col := pos.Col
row := pos.Row
if relative {
col += cell.Col
row += cell.Row
}
if puzzle.Contains(col, row) {
other := puzzle.Get(col, row)
value += other.MaxValue()
}
}
return value
}
}
func (c *ConstraintSum) Affects(cell *Cell) bool {
return containsCell(cell, c.Cells, nil)
}
func (c *ConstraintSum) RemoveCandidates(cell *Cell, puzzle *Puzzle, remove *Candidates) {
target := c.Sum(cell, puzzle)
if target == 0 {
return
}
cells := getCells(puzzle, cell, c.Cells, c.Relative)
sum := 0
combos := combinations[int]{}
for _, other := range cells {
if other.HasValue() {
sum += other.Value
} else if other.Id != cell.Id {
combos.add(other.Candidates())
}
}
sumEmpty := target - sum
if combos.empty() {
chosen := sumEmpty
if remove.Has(chosen) {
remove.Clear()
remove.Set(chosen, true)
}
return
}
candidates := remove.ToSlice()
values := combos.start()
for _, candidate := range candidates {
comboSum := sumEmpty - candidate
foundSum := false
for combos.next(values) {
if intsUnique(values) && intsSum(values) == comboSum {
foundSum = true
break
}
}
combos.reset()
if !foundSum {
remove.Set(candidate, false)
}
}
}
// ==================================================
// Constraint: Uniqueness
// ==================================================
// A constraint on a set of cells where they can't have the same value OR
// they all need to have the same value.
type ConstraintUnique struct {
Cells *[]Position
Relative *[]Position
Same bool
}
func (c *ConstraintUnique) Affects(cell *Cell) bool {
return containsCell(cell, c.Cells, nil)
}
func (c *ConstraintUnique) RemoveCandidates(cell *Cell, puzzle *Puzzle, remove *Candidates) {
traverseCells(puzzle, cell, c.Cells, c.Relative, func(other *Cell, index int) {
if other.HasValue() {
if c.Same {
remove.Clear()
remove.Set(other.Value, true)
} else {
remove.Set(other.Value, false)
}
}
})
}
// ==================================================
// Constraint: Uniqueness
// ==================================================
type ConstraintOrder struct {
Cells *[]Position
Relative *[]Position
Direction int
}
func (c *ConstraintOrder) Affects(cell *Cell) bool {
return containsCell(cell, c.Cells, nil)
}
func (c *ConstraintOrder) RemoveCandidates(cell *Cell, puzzle *Puzzle, remove *Candidates) {
cells := getCells(puzzle, cell, c.Cells, c.Relative)
i := -1
for k, other := range cells {
if other.Id == cell.Id {
i = k
break
}
}
if i == -1 {
return
}
var firstValue *Cell
dir := c.Direction
if dir == 0 {
for _, other := range cells {
if other.HasValue() {
if firstValue == nil {
firstValue = other
} else if firstValue.Value < other.Value {
dir = 1
break
} else {
dir = -1
break
}
}
}
}
if dir != 0 {
puzzleMin := 1
puzzleMax := puzzle.Kind.Digits()
min := puzzleMin
max := puzzleMax
for k, other := range cells {
if k == i {
continue
}
d := (i - k) * dir
if d > 0 {
otherMin := other.MinValue() + d
min = Max(min, otherMin)
}
if d < 0 {
otherMax := other.MaxValue() + d
max = Min(max, otherMax)
}
}
for i := puzzleMin; i < min; i++ {
remove.Set(i, false)
}
for i := max + 1; i <= puzzleMax; i++ {
remove.Set(i, false)
}
} else if firstValue != nil {
remove.Set(firstValue.Value, false)
}
}
// ==================================================
// Constraint: Magic Square
// ==================================================
type ConstraintMagic struct {
Center Position
}
func (c *ConstraintMagic) Affects(cell *Cell) bool {
dx := cell.Col - c.Center.Col
dy := cell.Row - c.Center.Row
return dx <= 1 && dx >= -1 && dy <= 1 && dy >= -1
}
func (c *ConstraintMagic) RemoveCandidates(cell *Cell, puzzle *Puzzle, remove *Candidates) {
dx := cell.Col - c.Center.Col
dy := cell.Row - c.Center.Row
if dx == 0 && dy == 0 {
remove.Clear()
remove.Set(5, true)
} else if dx*dy == 0 {
remove.Set(2, false)
remove.Set(4, false)
remove.Set(6, false)
remove.Set(8, false)
} else {
remove.Set(1, false)
remove.Set(3, false)
remove.Set(7, false)
remove.Set(9, false)
}
if remove.Count > 1 {
verSum := 0
verCount := 0
for ry := 0; ry < 3; ry++ {
other := puzzle.Get(cell.Col, cell.Row-(dy+1)+ry)
if other.HasValue() {
verCount++
verSum += other.Value
}
}
if verCount == 2 {
remove.Clear()
remove.Set(15-verSum, true)
return
}
horSum := 0
horCount := 0
for rx := 0; rx < 3; rx++ {
other := puzzle.Get(cell.Col-(dx+1)+rx, cell.Row)
if other.HasValue() {
horCount++
horSum += other.Value
}
}
if horCount == 2 {
remove.Clear()
remove.Set(15-horSum, true)
return
}
}
}
// ==================================================
// Constraint: Scale
// ==================================================
type ConstraintScalePair struct {
Scale int
First Position
Second Position
}
func (c *ConstraintScalePair) Affects(cell *Cell) bool {
return isSame(cell, c.First) || isSame(cell, c.Second)
}
func (c *ConstraintScalePair) RemoveCandidates(cell *Cell, puzzle *Puzzle, remove *Candidates) {
var other *Cell
if isSame(cell, c.First) {
other = getAbsoluteCell(puzzle, c.Second)
} else if isSame(cell, c.Second) {
other = getAbsoluteCell(puzzle, c.First)
}
if other == nil {
return
}
possible := other.candidates
if other.HasValue() {
possible.Clear()
possible.Set(other.Value, true)
}
available := *remove
for available.Count > 0 {
candidate := available.First()
available.Set(candidate, false)
up := candidate * c.Scale
upValid := puzzle.IsCandidate(up) && possible.Has(up)
down := candidate / c.Scale
downValid := down*c.Scale == candidate && puzzle.IsCandidate(down) && possible.Has(down)
if !upValid && !downValid {
remove.Set(candidate, false)
}
}
}
func ConstraintScalePairs(scale int, pairs [][2]Position) []ConstraintScalePair {
constraints := make([]ConstraintScalePair, len(pairs))
for pairIndex, pair := range pairs {
constraints[pairIndex].Scale = scale
constraints[pairIndex].First = pair[0]
constraints[pairIndex].Second = pair[1]
}
return constraints
}
// ==================================================
// Constraint: Difference
// ==================================================
type ConstraintDifference struct {
// The minimum difference that should exist between the given cells.
// If the cells are in the same groups then the minimum is already technically 1
// since the same value can't exist in the same group. A value of 2 means all
// cells will need to be atleast 2 apart.
Min int
// The maximum difference that should exist between the given cells.
// For example if the Max is 4 and one of the cells is 2 then the other cells
// are constrained to 1, 2, 3, 4, 5, and 6.
Max int
// The cells which are affected by this constaint. If nil all cells in the puzzle
// are affected (minus what's given in Exclude).
Cells *[]Position
// The cells which are looked at by the constraint. If nil the cells involved in
// the logic are the Cells given.
Relative *[]Position
// The cells to exclude from being constrained when Cells is nil
// (meaning all cells are constrained int he puzzle).
Exclude *[]Position
}
func (c *ConstraintDifference) Affects(cell *Cell) bool {
return containsCell(cell, c.Cells, c.Exclude)
}
func (c *ConstraintDifference) RemoveCandidates(cell *Cell, puzzle *Puzzle, remove *Candidates) {
surrounding := Candidates{}
surroundingValues := Candidates{}
traverseCells(puzzle, cell, c.Cells, c.Relative, func(other *Cell, index int) {
if other.HasValue() {
surroundingValues.Set(other.Value, true)
} else {
surrounding.Or(other.candidates)
}
})
candidateMin := puzzle.MinCandidate()
candidateMax := puzzle.MaxCandidate()
for surroundingValues.Count > 0 {
candidate := surroundingValues.First()
surroundingValues.Set(candidate, false)
doMinMaxDifference(candidate, c.Min, c.Max, candidateMin, candidateMax, remove, false)
}
if surrounding.Count > 0 {
common := Candidates{}
common.Fill(candidateMax)
for surrounding.Count > 0 {
candidate := surrounding.First()
surrounding.Set(candidate, false)
unique := Candidates{}
doMinMaxDifference(candidate, c.Min, c.Max, candidateMin, candidateMax, &unique, true)
common.And(unique)
}
if common.Count > 0 {
remove.Remove(common)
}
}
}
func doMinMaxDifference(candidate int, min int, max int, candidateMin int, candidateMax int, out *Candidates, set bool) {
if min > 1 {
minMin := Max(candidate-min+1, candidateMin)
minMax := Min(candidate+min-1, candidateMax)
for c := minMin; c <= minMax; c++ {
out.Set(c, set)
}
}
if max > 0 {
maxMin := candidate - max
maxMax := candidate + max
for c := candidateMin; c < maxMin; c++ {
out.Set(c, set)
}
for c := maxMax + 1; c <= candidateMax; c++ {
out.Set(c, set)
}
}
}
// ==================================================
// Constraint: Divisible
// ==================================================
type ConstraintDivisible struct {
By int
Remainder int
Cells []Position
}
func (c *ConstraintDivisible) Affects(cell *Cell) bool {
return containsCell(cell, &c.Cells, nil)
}
func (c *ConstraintDivisible) RemoveCandidates(cell *Cell, puzzle *Puzzle, remove *Candidates) {
cand := remove
for cand.Count > 0 {
candidate := cand.First()
cand.Set(candidate, false)
if candidate%c.By != c.Remainder {
remove.Set(candidate, false)
}
}
}
func ConstraintEven(cells []Position) ConstraintDivisible {
return ConstraintDivisible{
By: 2,
Remainder: 0,
Cells: cells,
}
}
func ConstraintOdd(cells []Position) ConstraintDivisible {
return ConstraintDivisible{
By: 2,
Remainder: 1,
Cells: cells,
}
}
// Functions
func traverseCells(puzzle *Puzzle, cell *Cell, absolute *[]Position, relative *[]Position, traverse func(other *Cell, index int)) {
if relative != nil {
for i := range *relative {
pos := (*relative)[i]
cell := getRelativeCell(puzzle, pos, cell)
if cell != nil {
traverse(cell, i)
}
}
} else if absolute != nil {
for i := range *absolute {
pos := (*absolute)[i]
cell := getAbsoluteCell(puzzle, pos)
traverse(cell, i)
}
}
}
func isSame(cell *Cell, pos Position) bool {
return cell.Col == pos.Col && cell.Row == pos.Row
}
func getAbsoluteCell(puzzle *Puzzle, pos Position) *Cell {
return puzzle.Get(pos.Col, pos.Row)
}
func getRelativeCell(puzzle *Puzzle, pos Position, relative *Cell) *Cell {
col := relative.Col + pos.Col
row := relative.Row + pos.Row
if !puzzle.Contains(col, row) {
return nil
}
return puzzle.Get(col, row)
}
func getCells(puzzle *Puzzle, cell *Cell, absolute *[]Position, relative *[]Position) []*Cell {
n := 0
if relative != nil {
n = len(*relative)
} else if absolute != nil {
n = len(*absolute)
}
cells := make([]*Cell, n)
traverseCells(puzzle, cell, absolute, relative, func(other *Cell, index int) {
cells[index] = other
})
return cells
}
func containsCell(cell *Cell, cells *[]Position, exclude *[]Position) bool {
if cells == nil {
if exclude == nil {
return true
}
return !cellExists(cell, *exclude)
}
return cellExists(cell, *cells)
}
func cellExists(cell *Cell, cells []Position) bool {
for _, p := range cells {
if isSame(cell, p) {
return true
}
}
return false
}
func intsSum(values []int) int {
sum := 0
for _, v := range values {
sum += v
}
return sum
}
func intsUnique(values []int) bool {
cand := Candidates{}
for _, v := range values {
if cand.Has(v) {
return false
}
cand.Set(v, true)
}
return true
}
type combinations[T any] struct {
groups [][]T
current []int
}
func (c *combinations[T]) add(item []T) {
c.groups = append(c.groups, item)
c.current = append(c.current, 0)
}
func (c *combinations[T]) empty() bool {
return len(c.groups) == 0
}
func (c *combinations[T]) reset() {
for i := range c.current {
c.current[i] = 0
}
}
func (c *combinations[T]) done() bool {
last := len(c.current) - 1
return c.current[last] == len(c.groups[last])
}
func (c *combinations[T]) increment(k int) bool {
c.current[k]++
if c.current[k] == len(c.groups[k]) {
if k == len(c.current)-1 {
return false
} else {
c.current[k] = 0
return c.increment(k + 1)
}
}
return true
}
func (c *combinations[T]) start() []T {
return make([]T, len(c.groups))
}
func (c *combinations[T]) next(out []T) bool {
if c.done() {
return false
}
for i, g := range c.groups {
out[i] = g[c.current[i]]
}
c.increment(0)
return true
} | pkg/constraint.go | 0.627038 | 0.582729 | constraint.go | starcoder |
package mapping
import (
"bytes"
"errors"
"fmt"
"math"
enc "github.com/bahlo/sketches-go/ddsketch/encoding"
)
// A fast IndexMapping that approximates the memory-optimal LogarithmicMapping by extracting the floor value
// of the logarithm to the base 2 from the binary representations of floating-point values and linearly
// interpolating the logarithm in-between.
type LinearlyInterpolatedMapping struct {
relativeAccuracy float64
multiplier float64
normalizedIndexOffset float64
}
func NewLinearlyInterpolatedMapping(relativeAccuracy float64) (*LinearlyInterpolatedMapping, error) {
if relativeAccuracy <= 0 || relativeAccuracy >= 1 {
return nil, errors.New("The relative accuracy must be between 0 and 1.")
}
return &LinearlyInterpolatedMapping{
relativeAccuracy: relativeAccuracy,
multiplier: 1.0 / math.Log1p(2*relativeAccuracy/(1-relativeAccuracy)),
}, nil
}
func NewLinearlyInterpolatedMappingWithGamma(gamma, indexOffset float64) (*LinearlyInterpolatedMapping, error) {
if gamma <= 1 {
return nil, errors.New("Gamma must be greater than 1.")
}
m := LinearlyInterpolatedMapping{
relativeAccuracy: 1 - 2/(1+math.Exp(math.Log2(gamma))),
multiplier: 1 / math.Log2(gamma),
}
m.normalizedIndexOffset = indexOffset - m.approximateLog(1)*m.multiplier
return &m, nil
}
func (m *LinearlyInterpolatedMapping) Equals(other IndexMapping) bool {
o, ok := other.(*LinearlyInterpolatedMapping)
if !ok {
return false
}
tol := 1e-12
return (withinTolerance(m.multiplier, o.multiplier, tol) && withinTolerance(m.normalizedIndexOffset, o.normalizedIndexOffset, tol))
}
func (m *LinearlyInterpolatedMapping) Index(value float64) int {
index := m.approximateLog(value)*m.multiplier + m.normalizedIndexOffset
if index >= 0 {
return int(index)
} else {
return int(index) - 1
}
}
func (m *LinearlyInterpolatedMapping) Value(index int) float64 {
return m.LowerBound(index) * (1 + m.relativeAccuracy)
}
func (m *LinearlyInterpolatedMapping) LowerBound(index int) float64 {
return m.approximateInverseLog((float64(index) - m.normalizedIndexOffset) / m.multiplier)
}
// Return an approximation of log(1) + Math.log(x) / Math.log(2)}
func (m *LinearlyInterpolatedMapping) approximateLog(x float64) float64 {
bits := math.Float64bits(x)
return getExponent(bits) + getSignificandPlusOne(bits)
}
// The exact inverse of approximateLog.
func (m *LinearlyInterpolatedMapping) approximateInverseLog(x float64) float64 {
exponent := math.Floor(x - 1)
significandPlusOne := x - exponent
return buildFloat64(int(exponent), significandPlusOne)
}
func (m *LinearlyInterpolatedMapping) MinIndexableValue() float64 {
return math.Max(
math.Exp2((math.MinInt32-m.normalizedIndexOffset)/m.multiplier-m.approximateLog(1)+1), // so that index >= MinInt32
minNormalFloat64*(1+m.relativeAccuracy)/(1-m.relativeAccuracy),
)
}
func (m *LinearlyInterpolatedMapping) MaxIndexableValue() float64 {
return math.Min(
math.Exp2((math.MaxInt32-m.normalizedIndexOffset)/m.multiplier-m.approximateLog(float64(1))-1), // so that index <= MaxInt32
math.Exp(expOverflow)/(1+m.relativeAccuracy), // so that math.Exp does not overflow
)
}
func (m *LinearlyInterpolatedMapping) RelativeAccuracy() float64 {
return m.relativeAccuracy
}
func (m *LinearlyInterpolatedMapping) gamma() float64 {
return math.Exp2(1 / m.multiplier)
}
func (m *LinearlyInterpolatedMapping) Encode(b *[]byte) {
enc.EncodeFlag(b, enc.FlagIndexMappingBaseLinear)
enc.EncodeFloat64LE(b, m.gamma())
enc.EncodeFloat64LE(b, m.normalizedIndexOffset+m.approximateLog(1)*m.multiplier)
}
func (m *LinearlyInterpolatedMapping) string() string {
var buffer bytes.Buffer
buffer.WriteString(fmt.Sprintf("relativeAccuracy: %v, multiplier: %v, normalizedIndexOffset: %v\n", m.relativeAccuracy, m.multiplier, m.normalizedIndexOffset))
return buffer.String()
}
func withinTolerance(x, y, tolerance float64) bool {
if x == 0 || y == 0 {
return math.Abs(x) <= tolerance && math.Abs(y) <= tolerance
} else {
return math.Abs(x-y) <= tolerance*math.Max(math.Abs(x), math.Abs(y))
}
}
var _ IndexMapping = (*LinearlyInterpolatedMapping)(nil) | ddsketch/mapping/linearly_interpolated_mapping.go | 0.902332 | 0.551755 | linearly_interpolated_mapping.go | starcoder |
package main
import (
"fmt"
"math"
"math/rand"
)
func getSigmoid(value float64) float64 {
return 1 / (1 + math.Exp(-value))
}
func getActivation(weights []float64, x []float64) float64 {
var activation = 0.0
for i, _ := range x {
activation += weights[i] * x[i]
}
return getSigmoid(activation)
}
func getLoss(weights []float64, x []float64, y int) (float64, []float64) {
var loss = 0.0
var gradients = make([]float64, len(weights))
// Computes the activation.
var activation = getActivation(weights, x)
// Computes the loss using maximum log-likelihood.
if y == 0 {
loss -= math.Log(1 - activation)
} else {
loss -= math.Log(activation)
}
// Computes the gradients.
for i, _ := range gradients {
gradients[i] += x[i] * (activation - float64(y))
}
return loss, gradients
}
func getBatchLoss(weights []float64, x [][]float64, y []int) (float64, []float64) {
var batchLoss = 0.0
var batchGradients = make([]float64, len(weights))
for i, _ := range x {
// Computes the loss and gradients for this example.
var loss, gradients = getLoss(weights, x[i], y[i])
// Accumulates and averages the values.
batchLoss += loss / float64(len(x))
for j, _ := range gradients {
batchGradients[j] += gradients[j] / float64(len(x))
}
}
return batchLoss, batchGradients
}
func main() {
var x = [][]float64 {
{ 1.0, 0.6 },
{ 1.0, 0.2 },
{ 1.0, 1.0 },
{ 1.0, 0.0 },
{ 1.0, 0.8 },
{ 1.0, 0.49 },
{ 1.0, 0.51 }}
var y = []int { 1, 0, 1, 0, 1, 0, 1 }
// Configures the hyperparameters.
const learningRate = 1.0
const maxIterations = 1000000
const epsilon = 1e-3
// Generates random weights.
var weights = []float64 { rand.Float64(), rand.Float64() }
// Trains the classifier.
for epoch := 0; epoch < maxIterations; epoch++ {
var loss, gradients = getBatchLoss(weights, x, y)
if loss <= epsilon {
fmt.Printf("Converged after %d iterations.\n", epoch)
break
}
for i, _ := range weights {
weights[i] -= learningRate * gradients[i]
}
}
var loss, _ = getBatchLoss(weights, x, y)
fmt.Printf("Loss: %f.\n", loss)
} | LogisticRegression/logisticregression.go | 0.881417 | 0.518973 | logisticregression.go | starcoder |
package main
import (
"bufio"
"fmt"
"os"
"strconv"
"strings"
"github.com/olekukonko/tablewriter"
)
func main() {
// When our program starts let's build the universe. We need a board.
board := newBoard()
// We'll loop forever. In this version there is no way to end the game.
for {
// Everytime we iterate we will render the board.
board.Render()
// After we render the board, let's ask the player what move they
// wish to make.
err := board.Turn()
if err != nil {
fmt.Printf("Error: %s\n", err)
continue
}
}
}
// cellStatus is a custom type to help us avoid programming errors. We will
// declare a few different known states for a cell below and reference them
// when we update the board and render it.
type cellStatus int
const (
Available cellStatus = iota
Player1Occupied
Player2Occupied
Block
)
// Cell represents a single point or position on the board. Every cell
// can maintain a state. In this iteration of the program we can only
// track the Status of a cell. Perhaps other states will be recorded in
// the future! One never knows after all what the future has in store for us!
type Cell struct {
Status cellStatus
}
// Render converts the internal state of a cell into a UI representation.
func (c *Cell) Render() string {
switch c.Status {
case Available:
return ""
case Player1Occupied:
return "X"
case Player2Occupied:
return "O"
case Block:
return "~"
default:
panic("unknown cell status")
}
}
// We actually have a gridSize of `9` but we extend to allow the header and
// index to be printed onto the game board.
const gridSize = 9
func newBoard() Board {
rows := make([][]Cell, 0, gridSize)
for i := 0; i < gridSize; i++ {
rows = append(rows, newRow())
}
return Board{
Rows: rows,
playerOneTurn: true,
}
}
func newRow() []Cell {
columns := make([]Cell, 0, gridSize)
for i := 0; i < gridSize; i++ {
columns = append(columns, newCell())
}
return columns
}
func newCell() Cell {
return Cell{
Status: Available,
}
}
// Board represents our main state.
type Board struct {
// Rows manages the state of each individual cell.
Rows [][]Cell
playerOneTurn bool
playerOneRow int
playerOneCol int
playerTwoRow int
playerTwoCol int
}
// Turn allows a player to take a turn.
func (b *Board) Turn() error {
input := b.captureInput()
row, column, err := b.validateInput(strings.TrimSpace(input))
if err != nil {
return err
}
b.move(row, column)
b.playerOneTurn = !b.playerOneTurn // update state for the next player
return nil
}
// captureInput prompts the user to make a move.
func (b *Board) captureInput() string {
reader := bufio.NewReader(os.Stdin)
playerTurn := "Player One"
if !b.playerOneTurn {
playerTurn = "Player Two"
}
fmt.Printf("[%s] Where would you like to move to?: ", playerTurn)
s, _ := reader.ReadString('\n')
return s
}
// validateInput confirms the user provided input looks ok.
func (b *Board) validateInput(text string) (int, int, error) {
if len(text) != 2 {
return -1, -1, fmt.Errorf("... ummm.... that is not a valid position. Try something like A7")
}
letter := text[0]
row, err := strconv.Atoi(string(text[1]))
if err != nil {
return -1, -1, fmt.Errorf("... ummm.... are you taking this serious? Enter a position like A7", row)
}
if row > gridSize || row < 1 {
return -1, -1, fmt.Errorf("... ummm.... row %d does not exist on the board", row)
}
column, ok := columnMapInverted[string(letter)]
if !ok {
return -1, -1, fmt.Errorf("... ummm.... column %s does not exist on the board", string(letter))
}
return row, column, nil
}
// move updates the internal state of the board based on the player's
// validated move.
func (b *Board) move(moveToRow, moveToColumn int) {
if b.playerOneTurn {
if b.playerOneRow > 0 {
b.Rows[b.playerOneRow][b.playerOneCol] = Cell{Status: Block}
}
b.Rows[moveToRow][moveToColumn] = Cell{Status: Player1Occupied}
b.playerOneRow = moveToRow
b.playerOneCol = moveToColumn
return
}
if b.playerTwoRow > 0 {
b.Rows[b.playerTwoRow][b.playerTwoCol] = Cell{Status: Block}
}
b.Rows[moveToRow][moveToColumn] = Cell{Status: Player2Occupied}
b.playerTwoRow = moveToRow
b.playerTwoCol = moveToColumn
return
}
// Render prints the entire board to stdout.
func (b *Board) Render() {
table := tablewriter.NewWriter(os.Stdout)
for i, row := range b.Rows {
renderedRow := make([]string, 0, gridSize)
for j, cell := range row {
// Render the top-left cell
if i == 0 && j == 0 {
renderedRow = append(renderedRow, "")
continue
}
// Render the row number
if i == 0 {
renderedRow = append(renderedRow, columnMap[j-1])
continue
}
// Render the column letter
if j == 0 {
renderedRow = append(renderedRow, strconv.Itoa(i))
continue
}
// Render an interior column
renderedRow = append(renderedRow, cell.Render())
}
table.Append(renderedRow)
}
table.Render()
}
var columnMap = make(map[int]string)
var columnMapInverted = make(map[string]int)
func init() {
for i := 0; i < gridSize; i++ {
letter := string(rune(65 + i))
columnMap[i] = letter
columnMapInverted[letter] = i + 1
}
} | main.go | 0.60871 | 0.470007 | main.go | starcoder |
package gconf
import "time"
// GetAllOpts is equal to Conf.GetAllOpts().
func GetAllOpts() []Opt { return Conf.GetAllOpts() }
// RegisterOpts is equal to Conf.RegisterOpts(opts...).
func RegisterOpts(opts ...Opt) { Conf.RegisterOpts(opts...) }
// UnregisterOpts is equal to Conf.UnregisterOpts(optNames...).
func UnregisterOpts(optNames ...string) { Conf.UnregisterOpts(optNames...) }
// SetVersion is equal to Conf.SetVersion(version).
func SetVersion(version string) { Conf.SetVersion(version) }
// LoadBackupFile is equal to Conf.LoadBackupFile().
func LoadBackupFile(filename string) error {
return Conf.LoadBackupFile(filename)
}
// Snapshot is equal to Conf.Snapshot().
func Snapshot() (generation uint64, snap map[string]interface{}) {
return Conf.Snapshot()
}
// LoadMap is equal to Conf.LoadMap(options, force...).
func LoadMap(options map[string]interface{}, force ...bool) error {
return Conf.LoadMap(options)
}
// Set is equal to Conf.Set(name, value).
func Set(name string, value interface{}) error { return Conf.Set(name, value) }
// Get is equal to Conf.Get(name).
func Get(name string) interface{} { return Conf.Get(name) }
// Must is equal to Conf.Must(name).
func Must(name string) interface{} { return Conf.Must(name) }
// Observe is equal to Conf.Observe(observers...).
func Observe(observers ...Observer) { Conf.Observe(observers...) }
// GetGroupSep is equal to Conf.GetGroupSep().
func GetGroupSep() (sep string) { return Conf.GetGroupSep() }
// GetBool is equal to Conf.GetBool(name).
func GetBool(name string) bool { return Conf.GetBool(name) }
// GetInt is equal to Conf.GetInt(name).
func GetInt(name string) int { return Conf.GetInt(name) }
// GetInt32 is equal to Conf.GetInt32(name).
func GetInt32(name string) int32 { return Conf.GetInt32(name) }
// GetInt64 is equal to Conf.GetInt64(name).
func GetInt64(name string) int64 { return Conf.GetInt64(name) }
// GetUint is equal to Conf.GetUint(name).
func GetUint(name string) uint { return Conf.GetUint(name) }
// GetUint32 is equal to Conf.GetUint32(name).
func GetUint32(name string) uint32 { return Conf.GetUint32(name) }
// GetUint64 is equal to Conf.GetUint64(name).
func GetUint64(name string) uint64 { return Conf.GetUint64(name) }
// GetFloat64 is equal to Conf.GetFloat64(name).
func GetFloat64(name string) float64 { return Conf.GetFloat64(name) }
// GetString is equal to Conf.GetString(name).
func GetString(name string) string { return Conf.GetString(name) }
// GetDuration is equal to Conf.GetDuration(name).
func GetDuration(name string) time.Duration { return Conf.GetDuration(name) }
// GetTime is equal to Conf.GetTime(name).
func GetTime(name string) time.Time { return Conf.GetTime(name) }
// GetIntSlice is equal to Conf.GetIntSlice(name).
func GetIntSlice(name string) []int { return Conf.GetIntSlice(name) }
// GetUintSlice is equal to Conf.GetUintSlice(name).
func GetUintSlice(name string) []uint { return Conf.GetUintSlice(name) }
// GetFloat64Slice is equal to Conf.GetFloat64Slice(name).
func GetFloat64Slice(name string) []float64 { return Conf.GetFloat64Slice(name) }
// GetStringSlice is equal to Conf.GetStringSlice(name).
func GetStringSlice(name string) []string { return Conf.GetStringSlice(name) }
// GetDurationSlice is equal to Conf.GetDurationSlice(name).
func GetDurationSlice(name string) []time.Duration { return Conf.GetDurationSlice(name) } | global.go | 0.64232 | 0.412234 | global.go | starcoder |
package level
// SlopeFactors is a list of multipliers for each direction of a tile.
type SlopeFactors [8]float32
// Negated returns the factors multiplied by -1.
func (factors SlopeFactors) Negated() SlopeFactors {
return SlopeFactors{
factors[0] * -1, factors[1] * -1, factors[2] * -1, factors[3] * -1,
factors[4] * -1, factors[5] * -1, factors[6] * -1, factors[7] * -1,
}
}
// TileTypeInfo is the meta information about a tile type.
type TileTypeInfo struct {
// Name is the textual representation of the tile type.
Name string
// SolidSides is a bitfield of cardinal directions describing solid walls.
SolidSides DirectionMask
// SlopeFloorFactors defines how a slope affects the floor in each direction of a tile [0.0 .. 1.0].
SlopeFloorFactors SlopeFactors
// SlopeInvertedType is the type that inverts the slope to form a solid tile if merged (e.g. floor & ceiling).
// Types that have no slope invert themselves.
SlopeInvertedType TileType
}
var tileTypeInfoList = []TileTypeInfo{
{"Solid", DirNorth.Plus(DirEast).Plus(DirSouth).Plus(DirWest), SlopeFactors{0, 0, 0, 0, 0, 0, 0, 0}, TileTypeSolid},
{"Open", 0, SlopeFactors{0, 0, 0, 0, 0, 0, 0, 0}, TileTypeOpen},
{"DiagonalOpenSouthEast", DirNorth.Plus(DirWest), SlopeFactors{0, 0, 0, 0, 0, 0, 0, 0}, TileTypeDiagonalOpenSouthEast},
{"DiagonalOpenSouthWest", DirNorth.Plus(DirEast), SlopeFactors{0, 0, 0, 0, 0, 0, 0, 0}, TileTypeDiagonalOpenSouthWest},
{"DiagonalOpenNorthWest", DirSouth.Plus(DirEast), SlopeFactors{0, 0, 0, 0, 0, 0, 0, 0}, TileTypeDiagonalOpenNorthWest},
{"DiagonalOpenNorthEast", DirSouth.Plus(DirWest), SlopeFactors{0, 0, 0, 0, 0, 0, 0, 0}, TileTypeDiagonalOpenNorthEast},
{"SlopeSouthToNorth", 0, SlopeFactors{1.0, 1.0, 0.5, 0.0, 0.0, 0.0, 0.5, 1.0}, TileTypeSlopeNorthToSouth},
{"SlopeWestToEast", 0, SlopeFactors{0.5, 1.0, 1.0, 1.0, 0.5, 0.0, 0.0, 0.0}, TileTypeSlopeEastToWest},
{"SlopeNorthToSouth", 0, SlopeFactors{0.0, 0.0, 0.5, 1.0, 1.0, 1.0, 0.5, 0.0}, TileTypeSlopeSouthToNorth},
{"SlopeEastToWest", 0, SlopeFactors{0.5, 0.0, 0.0, 0.0, 0.5, 1.0, 1.0, 1.0}, TileTypeSlopeWestToEast},
{"ValleySouthEastToNorthWest", 0, SlopeFactors{1.0, 1.0, 0.5, 0.0, 0.5, 1.0, 1.0, 1.0}, TileTypeRidgeNorthWestToSouthEast},
{"ValleySouthWestToNorthEast", 0, SlopeFactors{1.0, 1.0, 1.0, 1.0, 0.5, 0.0, 0.5, 1.0}, TileTypeRidgeNorthEastToSouthWest},
{"ValleyNorthWestToSouthEast", 0, SlopeFactors{0.5, 1.0, 1.0, 1.0, 1.0, 1.0, 0.5, 0.0}, TileTypeRidgeSouthEastToNorthWest},
{"ValleyNorthEastToSouthWest", 0, SlopeFactors{0.5, 0.0, 0.5, 1.0, 1.0, 1.0, 1.0, 1.0}, TileTypeRidgeSouthWestToNorthEast},
{"RidgeNorthWestToSouthEast", 0, SlopeFactors{0.0, 0.0, 0.5, 1.0, 0.5, 0.0, 0.0, 0.0}, TileTypeValleySouthEastToNorthWest},
{"RidgeNorthEastToSouthWest", 0, SlopeFactors{0.0, 0.0, 0.0, 0.0, 0.5, 1.0, 0.5, 0.0}, TileTypeValleySouthWestToNorthEast},
{"RidgeSouthEastToNorthWest", 0, SlopeFactors{0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5, 1.0}, TileTypeValleyNorthWestToSouthEast},
{"RidgeSouthWestToNorthEast", 0, SlopeFactors{0.5, 1.0, 0.5, 0.0, 0.0, 0.0, 0.0, 0.0}, TileTypeValleyNorthEastToSouthWest},
} | ss1/content/archive/level/TileTypeInfo.go | 0.802633 | 0.863737 | TileTypeInfo.go | starcoder |
package core
import (
"math"
"regexp"
"strconv"
"strings"
"github.com/wingify/vwo-go-sdk/pkg/constants"
"github.com/wingify/vwo-go-sdk/pkg/utils"
)
// SegmentEvaluator function evaluates segments to get the keys and values and perform appropriate functions
func SegmentEvaluator(segments map[string]interface{}, customVariables map[string]interface{}) bool {
/*
Args:
segments: segments from campaign or variation
options: options object containing CustomVariables, VariationTargertting variables and Revenue Goal
Returns:
bool: if the options falls in the segments criteria
*/
operator, subSegments := utils.GetKeyValue(segments)
if operator == constants.OperatorTypeNot {
return SegmentEvaluator(subSegments.(map[string]interface{}), customVariables) == false
} else if operator == constants.OperatorTypeAnd {
var res []bool
for _, v := range subSegments.([]interface{}) {
res = append(res, SegmentEvaluator(v.(map[string]interface{}), customVariables))
}
return evaluate(operator, res)
} else if operator == constants.OperatorTypeOr {
var res []bool
for _, v := range subSegments.([]interface{}) {
res = append(res, SegmentEvaluator(v.(map[string]interface{}), customVariables))
}
return evaluate(operator, res)
} else if operator == constants.OperandTypesCustomVariable {
return evaluateCustomVariables(subSegments.(map[string]interface{}), customVariables)
} else if operator == constants.OperandTypesUser {
return operandUserParser(subSegments.(string), customVariables)
}
return true
}
// evaluate function checks the res array, if operator is or then performs or on all elements else and
func evaluate(operator string, res []bool) bool {
/*
Args:
operator: AND or OR operator
res: array of bool values
Returns:
bool: final computed value of OR or AND
*/
if operator == constants.OperatorTypeAnd {
for _, v := range res {
if v == false {
return false
}
}
return true
} else if operator == constants.OperatorTypeOr {
for _, v := range res {
if v == true {
return true
}
}
return false
}
return false
}
//evaluateCustomVariables function processes the custom variables in the segments
func evaluateCustomVariables(custom map[string]interface{}, customVariables map[string]interface{}) bool {
/*
Args:
segments: segments from campaign or variation
options: options object containing CustomVariables, VariationTargertting variables and Revenue Goal
Returns:
bool: if the options falls in the segments criteria
*/
operandKey, operand := utils.GetKeyValue(custom)
_, okCustomVar := customVariables[operandKey]
if !okCustomVar {
return false
}
tag := customVariables[operandKey]
operandType, operandValue := preProcessOperandValue(operand)
tagValue := processCustomVariablesValue(tag)
processedValues, tagValue := processValues(operandValue, tagValue)
return extractResult(operandType, processedValues, tagValue)
}
// extractResult function compares the operand value and tag value on the basis of operand type
func extractResult(operandType int, operandValue, tagValue string) bool {
/*
Args:
operandType: Stores the type of the operand and is used to decide the checks between the operandValue and tagValue
operandValue: Value of the Operand from customVariables
tagValue: Value from CustomVariables in the options
Returns:
bool: Bool value acter comparison of operandValue and tagValue
*/
result := false
switch operandType {
case constants.LowerValue:
if tagValue != "" {
result = strings.ToLower(operandValue) == strings.ToLower(tagValue)
}
case constants.StartingEndingStarValue:
if tagValue != "" {
result = strings.Index(tagValue, operandValue) > -1
}
case constants.StartingStarValue:
if tagValue != "" {
result = strings.HasSuffix(tagValue, operandValue)
}
case constants.EndingStarValue:
if tagValue != "" {
result = strings.HasPrefix(tagValue, operandValue)
}
case constants.RegexValue:
result = matchWithRegex(tagValue, operandValue)
default:
result = tagValue == operandValue
}
return result
}
//operandUserParser function checks if the VWO user lies in the list of users in the segments
func operandUserParser(operand string, customVariables map[string]interface{}) bool {
/*
Args:
operand: list of users
options: options object containing CustomVariables, VariationTargertting variables and Revenue Goal
Returns:
bool: true if user in list, else false
*/
users := strings.Split(operand, ",")
for _, user := range users {
if strings.TrimSpace(user) == customVariables["_vwo_user_id"] {
return true
}
}
return false
}
// processCustomVariablesValue function converts interface value of customVariables to string
func processCustomVariablesValue(value interface{}) string {
/*
Args:
value: interface value that is to be typecasted
Returns:
string: final typecasted value
*/
switch value.(type) {
// handle cases
case bool:
return strconv.FormatBool(value.(bool))
case int:
return strconv.Itoa(value.(int))
case float64:
return strconv.FormatFloat(value.(float64), 'f', -1, 64)
case nil:
return ""
}
return value.(string)
}
// preProcessOperandValue function processes and simplifies the interface type operand as operandType and operandValue
func preProcessOperandValue(operand interface{}) (operandType int, operandValue string) {
/*
Args:
operand: interface type operand that is to be simplified and preprocessed
Returns:
operandType: final type of the processed operand
operandValue: final value of the processed operand
*/
if matchWithRegex(operand.(string), constants.LowerMatch) {
operandType = constants.LowerValue
operandValue = extractOperandValue(operand.(string), constants.LowerMatch)
} else if matchWithRegex(operand.(string), constants.WildcardMatch) {
operandValue = extractOperandValue(operand.(string), constants.WildcardMatch)
startingStar := matchWithRegex(operandValue, constants.StartingStar)
endingStar := matchWithRegex(operandValue, constants.EndingStar)
// In case of wildcard, the operand type is further divided into contains, startswith and endswith
if startingStar && endingStar {
operandType = constants.StartingEndingStarValue
} else if startingStar {
operandType = constants.StartingStarValue
} else if endingStar {
operandType = constants.EndingStarValue
}
remStartStar := regexp.MustCompile(constants.StartingStar)
remEndingStar := regexp.MustCompile(constants.EndingStar)
operandValue = remStartStar.ReplaceAllString(operandValue, "")
operandValue = remEndingStar.ReplaceAllString(operandValue, "")
} else if matchWithRegex(operand.(string), constants.RegexMatch) {
operandType = constants.RegexValue
operandValue = extractOperandValue(operand.(string), constants.RegexMatch)
} else {
operandType = constants.EqualValue
operandValue = operand.(string)
}
return
}
// processValues function simplifies operand and tag values
func processValues(operandValue string, tagValue interface{}) (newProcessedOperandValue string, newProcessedTagValue string) {
/*
Args:
operandValue: Value of the Operand from customVariables
tagValue: Value from CustomVariables in the options
Returns:
newProcessedOperandValue: Processed operand value
newProcessedTagValue: Processed tag value
*/
processedOperandValue, err := strconv.ParseFloat(operandValue, 64)
if err != nil {
return operandValue, tagValue.(string)
}
processedTagValue, err := strconv.ParseFloat(tagValue.(string), 64)
if err != nil {
return operandValue, tagValue.(string)
}
// now we have surity that both are numbers
// now we can convert them independently to int type if they
// are int rather than floats
if processedOperandValue == math.Floor(processedOperandValue) {
newProcessedOperandValue = strconv.Itoa(int(processedOperandValue))
} else {
newProcessedOperandValue = strconv.FormatFloat(processedOperandValue, 'f', -1, 64)
trailZero := regexp.MustCompile("0*$")
newProcessedOperandValue = trailZero.ReplaceAllString(newProcessedOperandValue, "")
}
if processedTagValue == math.Floor(processedTagValue) {
newProcessedTagValue = strconv.Itoa(int(processedTagValue))
} else {
newProcessedTagValue = strconv.FormatFloat(processedTagValue, 'f', -1, 64)
}
// convert it back to string and return
return
}
// matchWithRegex function reports whether the string s contains any match of the regular expression pattern
func matchWithRegex(operand, regex string) bool {
result, err := regexp.MatchString(regex, operand)
if err != nil {
return false
}
return result
}
// extractOperandValue function a string holding the text of the leftmost match of the regular expression in s and the matches, if any, of its subexpressions, as defined by the 'Submatch' description in the pack comment. A return value of nil indicates no match.
func extractOperandValue(operand, regex string) string {
re := regexp.MustCompile(regex)
submatchall := re.FindStringSubmatch(operand)
if len(submatchall) > 0 {
return submatchall[1]
}
return ""
} | pkg/core/segmentor.go | 0.726329 | 0.487612 | segmentor.go | starcoder |
// Package tensorflow provides implementation of Go API for extract data to vector
package tensorflow
import (
tf "github.com/tensorflow/tensorflow/tensorflow/go"
"github.com/vdaas/vald/internal/errors"
)
type SessionOptions = tf.SessionOptions
type Operation = tf.Operation
type TF interface {
GetVector(feeds []Feed, fetches []Fetch, targets ...*Operation) (values [][][]float64, err error)
Close() error
}
type tensorflow struct {
exportDir string
tags []string
operations []*Operation
sessionTarget string
sessionConfig []byte
options *SessionOptions
graph *tf.Graph
session *tf.Session
}
type Feed struct {
InputBytes []byte
OperationName string
OutputIndex int
}
type Fetch struct {
OperationName string
OutputIndex int
}
func New(opts ...Option) (TF, error) {
t := new(tensorflow)
for _, opt := range append(defaultOpts, opts...) {
opt(t)
}
if t.options == nil && (len(t.sessionTarget) != 0 || t.sessionConfig != nil) {
t.options = &tf.SessionOptions{
Target: t.sessionTarget,
Config: t.sessionConfig,
}
}
model, err := tf.LoadSavedModel(t.exportDir, t.tags, t.options)
if err != nil {
return nil, err
}
t.graph = model.Graph
t.session = model.Session
return t, nil
}
func (t *tensorflow) Close() error {
return t.session.Close()
}
func (t *tensorflow) GetVector(feeds []Feed, fetches []Fetch, targets ...*Operation) (values [][][]float64, err error) {
input := make(map[tf.Output]*tf.Tensor, len(feeds))
for _, feed := range feeds {
inputTensor, err := tf.NewTensor([]string{string(feed.InputBytes)})
if err != nil {
return nil, err
}
input[t.graph.Operation(feed.OperationName).Output(feed.OutputIndex)] = inputTensor
}
output := make([]tf.Output, 0, len(fetches))
for _, fetch := range fetches {
output = append(output, t.graph.Operation(fetch.OperationName).Output(fetch.OutputIndex))
}
if targets == nil {
targets = t.operations
}
results, err := t.session.Run(input, output, targets)
if err != nil {
return nil, err
}
values = make([][][]float64, 0, len(results))
for _, result := range results {
value, ok := result.Value().([][]float64)
if ok {
values = append(values, value)
} else {
return nil, errors.ErrFailedToCastTF(result.Value())
}
}
return values, nil
} | internal/core/converter/tensorflow/tensorflow.go | 0.686265 | 0.402598 | tensorflow.go | starcoder |
package util
import "github.com/go-gl/mathgl/mgl32"
// GenerateTangents generates tangents for vertex data
func GenerateTangents(points []float32, normals []float32, texCoords []float32) (tangents []float32) {
//const vector<vec3> & points,
//const vector<vec3> & normals,
//const vector<int> & faces,
//const vector<vec2> & texCoords,
// vector<vec4> & tangents)
//{
//vector<vec3> tan1Accum;
tan1Accum := make([]float32, len(points))
//vector<vec3> tan2Accum;
tan2Accum := make([]float32, len(points))
tangents = make([]float32, len(points)+(len(points)/3))
//for( uint i = 0; i < points.size(); i++ ) {
//tan1Accum.push_back(vec3(0.0f));
//tan2Accum.push_back(vec3(0.0f));
//tangents.push_back(vec4(0.0f));
//}
// Compute the tangent vector
for i := uint(0); i < uint(len(points))-9; i += 9 {
rootIdx := i / 3
p1 := mgl32.Vec3{points[i], points[i+1], points[i+2]}
p2 := mgl32.Vec3{points[i+3], points[i+4], points[i+5]}
p3 := mgl32.Vec3{points[i+6], points[i+7], points[i+8]}
//const vec3 &p1 = points[faces[i]];
//const vec3 &p2 = points[faces[i+1]];
//const vec3 &p3 = points[faces[i+2]];
uvIdx := rootIdx * 2
tc1 := mgl32.Vec2{texCoords[uvIdx], texCoords[uvIdx+1]}
tc2 := mgl32.Vec2{texCoords[uvIdx+2], texCoords[uvIdx+3]}
tc3 := mgl32.Vec2{texCoords[uvIdx+4], texCoords[uvIdx+5]}
//const vec2 &tc1 = texCoords[faces[i]];
//const vec2 &tc2 = texCoords[faces[i+1]];
//const vec2 &tc3 = texCoords[faces[i+2]];
q1 := p2.Sub(p1)
q2 := p3.Sub(p1)
//vec3 q1 = p2 - p1;
//vec3 q2 = p3 - p1;
s1 := tc2.X() - tc1.X()
s2 := tc3.X() - tc1.X()
t1 := tc2.Y() - tc1.Y()
t2 := tc3.Y() - tc1.Y()
//float s1 = tc2.x - tc1.x, s2 = tc3.x - tc1.x;
//float t1 = tc2.y - tc1.y, t2 = tc3.y - tc1.y;
r := 1.0 / (s1*t2 - s2*t1)
//float r = 1.0f / (s1 * t2 - s2 * t1);
tan1 := mgl32.Vec3{
(t2*q1.X() - t1*q2.X()) * r,
(t2*q1.Y() - t1*q2.Y()) * r,
(t2*q1.Z() - t1*q2.Z()) * r,
}
//vec3 tan1( (t2*q1.x - t1*q2.x) * r,
//(t2*q1.y - t1*q2.y) * r,
//(t2*q1.z - t1*q2.z) * r);
tan2 := mgl32.Vec3{
(s1*q2.X() - s2*q1.X()) * r,
(s1*q2.Y() - s2*q1.Y()) * r,
(s1*q2.Z() - s2*q1.Z()) * r,
}
//vec3 tan2( (s1*q2.x - s2*q1.x) * r,
//(s1*q2.y - s2*q1.y) * r,
//(s1*q2.z - s2*q1.z) * r);
tan1Accum[i] += tan1.X()
tan1Accum[i+1] += tan1.Y()
tan1Accum[i+2] += tan1.Z()
tan2Accum[i] += tan2.X()
tan2Accum[i+1] += tan2.Y()
tan2Accum[i+2] += tan2.Z()
//tan1Accum[faces[i]] += tan1;
//tan1Accum[faces[i+1]] += tan1;
//tan1Accum[faces[i+2]] += tan1;
//tan2Accum[faces[i]] += tan2;
//tan2Accum[faces[i+1]] += tan2;
//tan2Accum[faces[i+2]] += tan2;
}
for i := uint(0); i < uint(len(points))-2; i++ {
n := mgl32.Vec3{
normals[i],
normals[i+1],
normals[i+2],
}
t1 := mgl32.Vec3{
tan1Accum[i],
tan1Accum[i+1],
tan1Accum[i+2],
}
t2 := mgl32.Vec3{
tan2Accum[i],
tan2Accum[i+1],
tan2Accum[i+2],
}
//const vec3 &n = normals[i];
//vec3 &t1 = tan1Accum[i];
//vec3 &t2 = tan2Accum[i];
// Gram-Schmidt orthogonalize
//tangents[i] = vec4(glm::normalize( t1 - (glm::dot(n,t1) * n) ), 0.0f);
res := t1.Sub(n.Mul(n.Dot(t1))).Normalize()
tangents[i] = res.X()
tangents[i+1] = res.Y()
tangents[i+2] = res.Z()
// Store handedness in w
w := float32(1.0)
if n.Cross(t1).Dot(t2) < 0 {
w = -1.0
}
tangents[i+3] = w
//tangents[i] = (glm::dot( glm::cross(n,t1), t2 ) < 0.0f) ? -1.0f : 1.0f;
}
//tan1Accum.clear();
//tan2Accum.clear();
return tangents
} | mesh/util/tangents.go | 0.589953 | 0.472014 | tangents.go | starcoder |
package manifest
import (
"fmt"
)
// Planner represents an execution planner, returning actions to transition from a src to a target state.
type Planner interface {
Plan(src, target *Plan) ([]Action, error)
}
// Action is one operation on the home automation system.
type Action interface {
Perform(a aha) error
}
// TargetBasedPlanner creates a Planner that only focuses on target state. Devices in the source state that are not
// referenced in the target will be left untouched.
func TargetBasedPlanner(scf switchCommandFactory, tcf thermostatCommandFactory) Planner {
return &targetBasedPlanner{switchCommandFactory: scf, thermostatCommandFactory: tcf}
}
type targetBasedPlanner struct {
switchCommandFactory switchCommandFactory
thermostatCommandFactory thermostatCommandFactory
}
type switchCommandFactory func(before, after Switch) Action
type thermostatCommandFactory func(before, after Thermostat) Action
// Plan creates an execution plan (a slice of Actions) which shall be applied in oder to reach the target state.
func (d *targetBasedPlanner) Plan(src, target *Plan) ([]Action, error) {
var actions []Action
switchActions, err := d.PlanSwitches(src, target)
if err != nil {
return []Action{}, err
}
actions = append(actions, switchActions...)
thermostatActions, err := d.PlanThermostats(src, target)
if err != nil {
return []Action{}, err
}
actions = append(actions, thermostatActions...)
return actions, nil
}
// PlanSwitches creates a partial execution plan (a slice of Actions) which shall be applied to the switches.
func (d *targetBasedPlanner) PlanSwitches(src, target *Plan) ([]Action, error) {
var switchActions []Action
for _, t := range target.Switches {
before, ok := src.switchNamed(t.Name)
if !ok {
return []Action{}, fmt.Errorf("unable to find device (switch): '%s'", t.Name)
}
switchActions = append(switchActions, d.switchCommandFactory(before, t))
}
return switchActions, nil
}
// PlanThermostats creates a partial execution plan (a slice of Actions) which shall be applied to the thermostats.
func (d *targetBasedPlanner) PlanThermostats(src, target *Plan) ([]Action, error) {
var switchActions []Action
for _, t := range target.Thermostats {
before, ok := src.thermostatNamed(t.Name)
if !ok {
return []Action{}, fmt.Errorf("unable to find device (thermostat): '%s'", t.Name)
}
switchActions = append(switchActions, d.thermostatCommandFactory(before, t))
}
return switchActions, nil
} | manifest/planner.go | 0.761627 | 0.402862 | planner.go | starcoder |
package packet
import (
"errors"
)
// Encode4b6b returns the 4b/6b encoding of the given data.
func Encode4b6b(src []byte) []byte {
// 2 input bytes produce 3 output bytes.
// Odd final input byte, if any, produces 2 output bytes.
n := len(src)
dst := make([]byte, 3*(n/2)+2*(n%2))
for i, j := 0, 0; i < n; i, j = i+2, j+3 {
x := src[i]
a := encode4b[hi(4, x)]
b := encode4b[lo(4, x)]
dst[j] = a<<2 | hi(4, b)
if i+1 < n {
y := src[i+1]
c := encode4b[hi(4, y)]
d := encode4b[lo(4, y)]
dst[j+1] = lo(4, b)<<4 | hi(6, c)
dst[j+2] = lo(2, c)<<6 | d
} else {
// Fill final nibble with 5 to match pump behavior.
dst[j+1] = lo(4, b)<<4 | 0x5
}
}
return dst
}
// ErrDecoding indicates a 6b/4b decoding failure.
var ErrDecoding = errors.New("6b/4b decoding failure")
// Decode6b4b returns the 6b/4b decoding of the given data.
func Decode6b4b(src []byte) ([]byte, error) {
n := len(src)
// Check for valid packet length.
if n%3 == 1 {
return nil, ErrDecoding
}
// 3 input bytes produce 2 output bytes.
// Final 2 input bytes, if any, produce 1 output byte.
dst := make([]byte, 2*(n/3)+(n%3)/2)
for i, j := 0, 0; i < n; i, j = i+3, j+2 {
if i+1 >= n {
return dst, ErrDecoding // shouldn't happen
}
x := src[i]
y := src[i+1]
a := decode6b[hi(6, x)]
b := decode6b[lo(2, x)<<4|hi(4, y)]
if a == 0xFF || b == 0xFF {
return dst, ErrDecoding
}
dst[j] = a<<4 | b
if i+2 < n {
z := src[i+2]
c := decode6b[lo(4, y)<<2|hi(2, z)]
d := decode6b[lo(6, z)]
if c == 0xFF || d == 0xFF {
return dst, ErrDecoding
}
dst[j+1] = c<<4 | d
}
}
return dst, nil
}
func hi(n, x byte) byte {
return x >> (8 - n)
}
func lo(n, x byte) byte {
return x & (1<<n - 1)
}
var (
encode4b = []byte{
0x00: 0x15, 0x01: 0x31, 0x02: 0x32, 0x03: 0x23,
0x04: 0x34, 0x05: 0x25, 0x06: 0x26, 0x07: 0x16,
0x08: 0x1A, 0x09: 0x19, 0x0A: 0x2A, 0x0B: 0x0B,
0x0C: 0x2C, 0x0D: 0x0D, 0x0E: 0x0E, 0x0F: 0x1C,
}
// Inverse of encode4b table, with 0xFF indicating an undefined value.
decode6b = []byte{
0x00: 0xFF, 0x01: 0xFF, 0x02: 0xFF, 0x03: 0xFF,
0x04: 0xFF, 0x05: 0xFF, 0x06: 0xFF, 0x07: 0xFF,
0x08: 0xFF, 0x09: 0xFF, 0x0A: 0xFF, 0x0B: 0x0B,
0x0C: 0xFF, 0x0D: 0x0D, 0x0E: 0x0E, 0x0F: 0xFF,
0x10: 0xFF, 0x11: 0xFF, 0x12: 0xFF, 0x13: 0xFF,
0x14: 0xFF, 0x15: 0x00, 0x16: 0x07, 0x17: 0xFF,
0x18: 0xFF, 0x19: 0x09, 0x1A: 0x08, 0x1B: 0xFF,
0x1C: 0x0F, 0x1D: 0xFF, 0x1E: 0xFF, 0x1F: 0xFF,
0x20: 0xFF, 0x21: 0xFF, 0x22: 0xFF, 0x23: 0x03,
0x24: 0xFF, 0x25: 0x05, 0x26: 0x06, 0x27: 0xFF,
0x28: 0xFF, 0x29: 0xFF, 0x2A: 0x0A, 0x2B: 0xFF,
0x2C: 0x0C, 0x2D: 0xFF, 0x2E: 0xFF, 0x2F: 0xFF,
0x30: 0xFF, 0x31: 0x01, 0x32: 0x02, 0x33: 0xFF,
0x34: 0x04, 0x35: 0xFF, 0x36: 0xFF, 0x37: 0xFF,
0x38: 0xFF, 0x39: 0xFF, 0x3A: 0xFF, 0x3B: 0xFF,
0x3C: 0xFF, 0x3D: 0xFF, 0x3E: 0xFF, 0x3F: 0xFF,
}
) | packet/encoding.go | 0.649579 | 0.542682 | encoding.go | starcoder |
package neuralnetwork
import (
"math"
"math/rand"
"time"
"github.com/timothy102/matrix"
)
//Layer interface given these 5 functions which every layer must have.
type Layer interface {
Call() []float64
GetWeights() matrix.Matrix
GetBiases() matrix.Vector
Name() string
TrainableParameters() int
}
//DenseLayer defines a fully connected layer.
type DenseLayer struct {
units int
inputs, outputs []float64
weights Weights
biases Biases
trainable bool
name string
kernelRegularizer func([]float64) []float64
biasRegularizer func([]float64) []float64
Activation func(float64) float64
KernelInit func(float64) float64
BiasInit func(float64) float64
}
//Weights struct with the actual kernels and the kernel initializer function.
type Weights struct {
kernels matrix.Matrix
KernelInit func(float64) float64
}
//Biases struct with the actual biases and the bias initializer function.
type Biases struct {
bs matrix.Vector
BiasInit func(float64) float64
}
type shape struct {
inputShape []float64
}
//WeightInit used for weight initialization. Already defined at the initialization of the dense layer.
func WeightInit(a, b int, kernelInit func(float64) float64) Weights {
w := matrix.RandomValuedMatrix(a, b).MapFunc(kernelInit)
return Weights{kernels: w, KernelInit: kernelInit}
}
//BiasInit used for bias initialization. Already defined at the initialization of the dense layer.
func BiasInit(a int, biasInit func(float64) float64) Biases {
bs := matrix.RandomVector(a).Map(biasInit)
return Biases{bs: bs, BiasInit: biasInit}
}
//Dense fully connected layer initializer
func Dense(units int, inputs []float64, activation func(float64) float64) DenseLayer {
weights := WeightInit(units, len(inputs), HeUniform)
biases := BiasInit(units, ZeroInitializer)
return DenseLayer{units: units,
inputs: inputs,
Activation: activation,
weights: weights,
biases: biases,
}
}
//Call of the dense layer.Outputs the next tensors.
func (d DenseLayer) Call() []float64 {
vec := matrix.NewVector(d.inputs).ApplyMatrix(d.weights.kernels).Add(d.biases.bs)
return vec.Map(d.Activation).Slice()
}
//Name of the dense layer
func (d DenseLayer) Name() string {
return d.name
}
//GetWeights returns the layer's weights.
func (d DenseLayer) GetWeights() matrix.Matrix {
return d.weights.kernels
}
//GetBiases returns the layer's biases.
func (d DenseLayer) GetBiases() matrix.Vector {
return d.biases.bs
}
//TrainableParameters returns the count of trainable parameters.
func (d DenseLayer) TrainableParameters() int {
return d.weights.kernels.NumberOfElements() + d.biases.bs.NumberOfElements()
}
//SetWeights is used for manually defining the weight matrix.
func (d *DenseLayer) SetWeights(kernels matrix.Matrix) {
d.weights.kernels = kernels
}
//SetBiases is used for manually defining the bias vector.
func (d *DenseLayer) SetBiases(bs matrix.Vector) {
d.biases.bs = bs
}
//InputLayer layer, much like the keras one.
type InputLayer struct {
inputs, outputs []float64
weights Weights
biases Biases
trainable bool
name string
}
//Input layer
func Input(inputs []float64) InputLayer {
weights := WeightInit(len(inputs), 1, HeUniform)
biases := BiasInit(len(inputs), ZeroInitializer)
return InputLayer{
inputs: inputs,
weights: weights,
biases: biases,
}
}
//Call of the input layer
func (i *InputLayer) Call() []float64 {
vec := matrix.NewVector(i.inputs).ApplyMatrix(i.weights.kernels).Add(i.biases.bs)
i.outputs = vec.Slice()
return vec.Slice()
}
//BatchNormLayer layer
type BatchNormLayer struct {
inputs, outputs []float64
beta, epsilon, alpha float64
trainable bool
name string
}
//BatchNorm init
func BatchNorm(inputs []float64) BatchNormLayer {
return BatchNormLayer{inputs: inputs}
}
//Call for the batch normalization layer
func (bn *BatchNormLayer) Call() []float64 {
outputs := make([]float64, len(bn.inputs))
variance := Variance(bn.inputs)
mean := meanValue(bn.inputs)
for _, x := range bn.inputs {
newX := (x - mean) / math.Sqrt(variance+bn.epsilon)
outputs = append(outputs, bn.alpha*newX+bn.beta)
}
bn.outputs = outputs
return outputs
}
//Variance returns the variance
func Variance(fls []float64) float64 {
var sum float64
for _, f := range fls {
sum += math.Pow(f-meanValue(fls), 2)
}
return sum / float64(len(fls))
}
func meanValue(fls []float64) float64 {
mean := sum(fls) / float64(len(fls))
return mean
}
//DropoutLayer layer
type DropoutLayer struct {
inputs []float64
rate float64
}
//Dropout init
func Dropout(inputs []float64, rate float64) DropoutLayer {
return DropoutLayer{inputs: inputs, rate: rate}
}
//Call for the dropout layer
func (dr *DropoutLayer) Call() []float64 {
weightCount := dr.rate * float64(len(dr.inputs))
for i := int(weightCount); i > 0; i-- {
if len(dr.inputs)%int(weightCount) == 0 {
dr.inputs[i] = 0
}
}
return dr.inputs
}
//SoftmaxLayer layer
type SoftmaxLayer struct {
inputs, outputs []float64
classes int
}
//Softmax returns the softmax layer based on values.
func Softmax(inputs []float64, classes int) SoftmaxLayer {
return SoftmaxLayer{inputs: inputs, classes: classes}
}
//Call of the softmax
func (s *SoftmaxLayer) Call() []float64 {
sum := 0.0
preds := make([]float64, len(s.inputs))
for i, n := range s.inputs {
preds[i] -= math.Exp(n - findMax(s.inputs))
sum += preds[i]
}
for k := range preds {
preds[k] /= sum
}
outputs := preds[:s.classes]
s.outputs = outputs
return outputs
}
//FlattenLayer layer
type FlattenLayer struct {
inputs, outputs []float64
name string
trainable bool
}
//Call of the FlattenLayer
func (f *FlattenLayer) Call() []float64 {
return f.outputs
}
//Flatten init.
func Flatten(m matrix.Matrix) FlattenLayer {
return FlattenLayer{outputs: m.ToArray()}
}
//HeUniform stands for He Initialization or the glorot_unifom for kernel_initialization.
func HeUniform(x float64) float64 {
rand.Seed(time.Now().UnixNano())
down, upper := x-0.4, x+0.4
return down + rand.Float64()*(upper-down)
}
//ZeroInitializer returns the zeros initializer for the bias initialization
func ZeroInitializer(x float64) float64 {
return 0
}
//OnesInitializer returns the ones initializer for the bias initialization
func OnesInitializer(x float64) float64 {
return 1
} | nn/layers.go | 0.847936 | 0.660378 | layers.go | starcoder |
package mesh
import (
"errors"
"github.com/EliCDavis/vector"
)
// Line2D represents a line segment
type Line2D struct {
p1 vector.Vector2
p2 vector.Vector2
}
// ErrNoIntersection is thrown when Intersection() contains no intersection
var ErrNoIntersection = errors.New("No Intersection")
// NewLine2D create a new line
func NewLine2D(p1, p2 vector.Vector2) Line2D {
return Line2D{p1, p2}
}
// GetStartPoint returns the starting point of the line segment
func (l Line2D) GetStartPoint() vector.Vector2 {
return l.p1
}
// GetEndPoint returns the end point of the line segment
func (l Line2D) GetEndPoint() vector.Vector2 {
return l.p2
}
// Dir is end point - starting point
func (l Line2D) Dir() vector.Vector2 {
return l.p2.Sub(l.p1)
}
// ScaleOutwards multiplies the current length of the line by extending it out
// further in the two different directions it's heading
func (l Line2D) ScaleOutwards(amount float64) Line2D {
dirAndMag := l.p2.Sub(l.p1).DivByConstant(2.0)
center := dirAndMag.Add(l.p1)
return NewLine2D(
center.Add(dirAndMag.MultByConstant(amount)),
center.Add(dirAndMag.MultByConstant(-amount)),
)
}
// Intersection finds where two lines intersect
// https://stackoverflow.com/questions/563198/how-do-you-detect-where-two-line-segments-intersect
func (l Line2D) Intersection(other Line2D) (vector.Vector2, error) {
s1_x := l.p2.X() - l.p1.X()
s1_y := l.p2.Y() - l.p1.Y()
s2_x := other.p2.X() - other.p1.X()
s2_y := other.p2.Y() - other.p1.Y()
s := (-s1_y*(l.p1.X()-other.p1.X()) + s1_x*(l.p1.Y()-other.p1.Y())) / (-s2_x*s1_y + s1_x*s2_y)
t := (s2_x*(l.p1.Y()-other.p1.Y()) - s2_y*(l.p1.X()-other.p1.X())) / (-s2_x*s1_y + s1_x*s2_y)
if s >= 0 && s <= 1 && t >= 0 && t <= 1 {
return vector.NewVector2(l.p1.X()+(t*s1_x), l.p1.Y()+(t*s1_y)), nil
}
return vector.Vector2{}, ErrNoIntersection
}
// Intersects determines whether two lines intersect eachother
func (l Line2D) Intersects(other Line2D) bool {
// Find the four orientations needed for general and
// special cases
o1 := calculateOrientation(l.p1, l.p2, other.p1)
o2 := calculateOrientation(l.p1, l.p2, other.p2)
o3 := calculateOrientation(other.p1, other.p2, l.p1)
o4 := calculateOrientation(other.p1, other.p2, l.p2)
// General case
if o1 != o2 && o3 != o4 {
return true
}
// Special Cases
// l.p1, l.p2 and other.p1 are colinear and other.p1 lies on segment l.p1l.p2
if o1 == Colinear && onSegment(l.p1, other.p1, l.p2) {
return true
}
// l.p1, l.p2 and other.p2 are colinear and other.p2 lies on segment l.p1l.p2
if o2 == Colinear && onSegment(l.p1, other.p2, l.p2) {
return true
}
// p2, other.p2 and l.p1 are colinear and l.p1 lies on segment p2other.p2
if o3 == Colinear && onSegment(other.p1, l.p1, other.p2) {
return true
}
// p2, other.p2 and l.p2 are colinear and l.p2 lies on segment p2other.p2
if o4 == 0 && onSegment(other.p1, l.p2, other.p2) {
return true
}
return false // Doesn't fall in any of the above cases
} | line2D.go | 0.80765 | 0.662831 | line2D.go | starcoder |
package geometry
import (
"time"
"github.com/kasworld/h4o/_examples/app"
"github.com/kasworld/h4o/geometry"
"github.com/kasworld/h4o/graphic"
"github.com/kasworld/h4o/light"
"github.com/kasworld/h4o/material"
"github.com/kasworld/h4o/math32"
"github.com/kasworld/h4o/util/helper"
)
func init() {
app.DemoMap["geometry.plane"] = &Plane{}
}
type Plane struct {
plane1 *graphic.Mesh
plane2 *graphic.Mesh
plane3 *graphic.Mesh
}
// Start is called once at the start of the demo.
func (t *Plane) Start(a *app.App) {
// Adds directional RED light from +X
l1 := light.NewDirectional(&math32.Color{1, 0, 0}, 1)
l1.SetPosition(1, 0, 0)
a.Scene().Add(l1)
// Adds directional GREEN light from +Y
l2 := light.NewDirectional(&math32.Color{0, 1, 0}, 1)
l2.SetPosition(0, 1, 0)
a.Scene().Add(l2)
// Adds directional BLUE light from +Z
l3 := light.NewDirectional(&math32.Color{0, 0, 1}, 1)
l3.SetPosition(0, 0, 1)
a.Scene().Add(l3)
// Create axes helper
axes := helper.NewAxes(1)
a.Scene().Add(axes)
// Adds square plane, double sided at left
plane1_geom := geometry.NewPlane(1, 1)
plane1_mat := material.NewStandard(&math32.Color{1, 1, 1})
plane1_mat.SetWireframe(false)
plane1_mat.SetSide(material.SideDouble)
t.plane1 = graphic.NewMesh(plane1_geom, plane1_mat)
t.plane1.SetPositionX(-1)
a.Scene().Add(t.plane1)
// Adds rectangular plane, single sided at center
plane2_geom := geometry.NewPlane(0.5, 1)
plane2_mat := material.NewStandard(&math32.Color{1, 1, 1})
plane2_mat.SetWireframe(false)
plane2_mat.SetSide(material.SideFront)
t.plane2 = graphic.NewMesh(plane2_geom, plane2_mat)
a.Scene().Add(t.plane2)
// Adds rectangular plane, single side at right
plane3_geom := geometry.NewPlane(0.5, 1)
plane3_mat := material.NewStandard(&math32.Color{1, 1, 1})
plane3_mat.SetWireframe(false)
plane3_mat.SetSide(material.SideBack)
t.plane3 = graphic.NewMesh(plane3_geom, plane3_mat)
t.plane3.SetPositionX(1)
a.Scene().Add(t.plane3)
}
// Update is called every frame.
func (t *Plane) Update(a *app.App, deltaTime time.Duration) {
// Rotate at 1 rotation each 5 seconds
delta := float32(deltaTime.Seconds()) * 2 * math32.Pi / 5
t.plane1.RotateY(delta)
t.plane2.RotateX(delta)
t.plane3.RotateX(-delta)
}
// Cleanup is called once at the end of the demo.
func (t *Plane) Cleanup(a *app.App) {} | _examples/demos/geometry/plane.go | 0.687735 | 0.548129 | plane.go | starcoder |
package govalid
import (
"fmt"
"reflect"
"strconv"
)
// -----------------------------------------------------------------------------
// Number validation function.
type NumberOpt func(float64) error
// -----------------------------------------------------------------------------
// Construct a number validator using the specified validation functions.
// Validates data type to be either any numerical type or a pointer to such
// (except a complex number).
func Number(opts ...NumberOpt) Validator {
return &numberValidator{opts}
}
// -----------------------------------------------------------------------------
// Number validator function for checking minimum value.
func NumMin(min float64) NumberOpt {
return func(f float64) error {
if f < min {
fs, mins := shortFloatStr(f, min)
return fmt.Errorf("number %s should >= %s", fs, mins)
}
return nil
}
}
// -----------------------------------------------------------------------------
// Number validator function for checking maximum value.
func NumMax(max float64) NumberOpt {
return func(f float64) error {
if f > max {
fs, maxs := shortFloatStr(f, max)
return fmt.Errorf("number %s should <= %s", fs, maxs)
}
return nil
}
}
// -----------------------------------------------------------------------------
// Number validator function for checking value.
func NumIs(expected float64) NumberOpt {
return func(f float64) error {
if f != expected {
expecteds, fs := shortFloatStr(expected, f)
return fmt.Errorf("expected %s, got %s", expecteds, fs)
}
return nil
}
}
// -----------------------------------------------------------------------------
// validator for a number
type numberValidator struct {
opts []NumberOpt
}
// -----------------------------------------------------------------------------
var float64type = reflect.TypeOf(float64(0))
// the actual workhorse for number validator
func (r *numberValidator) Validate(data interface{}) (string, error) {
value := reflect.ValueOf(data)
switch value.Kind() {
case reflect.Invalid:
return "Number", fmt.Errorf("expected (*)data convertible to float64, got <nil>")
case reflect.Ptr:
if value.IsNil() {
return "Number", fmt.Errorf("expected (*)data convertible to float64, got <nil> %s", value.Type())
}
value = value.Elem()
}
if value.Type().ConvertibleTo(float64type) {
value = value.Convert(float64type)
} else {
return "Number", fmt.Errorf("expected (*)data convertible to float64, got %s", value.Type())
}
v := value.Interface().(float64)
for _, o := range r.opts {
if err := o(v); err != nil {
return "Number", err
}
}
return "", nil
}
// -----------------------------------------------------------------------------
func shortFloatStr(a, b float64) (string, string) {
return strconv.FormatFloat(a, 'f', -1, 64),
strconv.FormatFloat(b, 'f', -1, 64)
}
// ----------------------------------------------------------------------------- | Godeps/_workspace/src/github.com/gima/govalid/v1/number.go | 0.770983 | 0.511656 | number.go | starcoder |
package iso20022
// Details of the closing of the securities financing transaction.
type SecuritiesFinancing10 struct {
// Date/Time at which rate change has taken place.
RateChangeDate *ISODateTime `xml:"RateChngDt,omitempty"`
// Specifies whether the rate is fixed or variable.
RateType *RateType19Choice `xml:"RateTp,omitempty"`
// Specifies whether the collateral position should be subject to automatic revaluation by the account servicer.
Revaluation *Revaluation2Choice `xml:"Rvaltn,omitempty"`
// Legal framework of the transaction.
LegalFramework *LegalFramework1Code `xml:"LglFrmwk,omitempty"`
// Identifies the computation method of accrued interest of the related financial instrument.
InterestComputationMethod *InterestComputationMethod2Choice `xml:"IntrstCmptnMtd,omitempty"`
// Index or support rate used together with the spread to calculate the repurchase rate.
VariableRateSupport *RateName1 `xml:"VarblRateSpprt,omitempty"`
// Repurchase rate used to calculate the repurchase amount.
RepurchaseRate *Rate2 `xml:"RpRate,omitempty"`
// Percentage mark-up on a loan consideration used to reflect the lender's risk.
StockLoanMargin *Rate2 `xml:"StockLnMrgn,omitempty"`
// Haircut or valuation factor on the security expressed as a percentage.
SecuritiesHaircut *Rate2 `xml:"SctiesHrcut,omitempty"`
// Interest rate to be paid on the transaction amount, as agreed between the counterparties.
PricingRate *RateOrName1Choice `xml:"PricgRate,omitempty"`
// Margin over or under an index that determines the repurchase rate, expressed as a rate or an amount.
SpreadRate *SpreadRate1 `xml:"SprdRate,omitempty"`
// Indicates whether or not the trade is callable.
CallableTradeIndicator *YesNoIndicator `xml:"CllblTradInd,omitempty"`
// Minimum number of days' notice a counterparty needs for terminating the transaction.
TransactionCallDelay *Max3NumericText `xml:"TxCallDely,omitempty"`
// Interest amount that has accrued in between two periods, for example, in between interest payment periods.
AccruedInterestAmount *AmountAndDirection5 `xml:"AcrdIntrstAmt,omitempty"`
// Interest rate that has been accrued in between coupon payment periods.
AccruedInterestPercentage *PercentageRate `xml:"AcrdIntrstPctg,omitempty"`
// Fixed amount of money that has to be paid (instead of interest) in the case of a recall or at the closing date.
ForfeitAmount *AmountAndDirection5 `xml:"FrftAmt,omitempty"`
// Difference between the amount of money of the first leg and the amount of the second leg of the transaction.
PremiumAmount *AmountAndDirection5 `xml:"PrmAmt,omitempty"`
// Amount of money to be settled per piece of collateral to close the transaction.
ClosingAmountPerPiecesOfCollateral *AmountAndDirection5 `xml:"ClsgAmtPerPcsOfColl,omitempty"`
// Indicates the total Number of collateral instructions involved in the transaction.
TotalNumberOfCollateralInstructions *Max3NumericText `xml:"TtlNbOfCollInstrs,omitempty"`
// Provides details for the securities financing transaction.
FinancingAgreement *Agreement3 `xml:"FincgAgrmt,omitempty"`
// Method applied to a lending transaction.
LendingTransactionMethod *LendingTransactionMethod1Choice `xml:"LndgTxMtd,omitempty"`
// Indicates if the contract is with or without an exchange of collateral.
LendingWithCollateral *YesNoIndicator `xml:"LndgWthColl,omitempty"`
// Identifies the underlying reason for the borrowing, for instance, sale on my behalf or on behalf of a third party.
BorrowingReason *BorrowingReason1Choice `xml:"BrrwgRsn,omitempty"`
// Indicates the type of collateral, for insatnce, security, bond, etc.
CollateralType *CollateralType1Choice `xml:"CollTp,omitempty"`
// Indicates whether or not the contract terms changed.
ContractTermsModificationChanged *YesNoIndicator `xml:"CtrctTermsModChngd,omitempty"`
// Interest rate to be paid as agreed between the counterparties.
InterestRate *Rate2 `xml:"IntrstRate,omitempty"`
// Rate to be paid by the Borrower to the Lender for the securities borrowed.
BorrowingRate *Rate2 `xml:"BrrwgRate,omitempty"`
// Method used to calculate the standard collateral amount.
StandardCollateralRatio *Rate2 `xml:"StdCollRatio,omitempty"`
// Percentage of earnings paid to shareholders in dividends.
DividendRatio *Rate2 `xml:"DvddRatio,omitempty"`
// Number of days the securities are lent or borrowed where the contract has an agreed closing date.
NumberOfDaysLendingBorrowing *Number21Choice `xml:"NbOfDaysLndgBrrwg,omitempty"`
// Specifies the standard collateral amount.
StandardCollateralAmount *AmountAndDirection5 `xml:"StdCollAmt,omitempty"`
// Interest rate tax that has been accrued in between coupon payment periods.
AccruedInterestTax *YesNoIndicator `xml:"AcrdIntrstTax,omitempty"`
// Number of days accrued at the instant of closing trade.
EndNumberOfDaysAccrued *Max3Number `xml:"EndNbOfDaysAcrd,omitempty"`
// End ratio of principal outstanding to the original balance.
EndFactor *BaseOneRate `xml:"EndFctr,omitempty"`
// Type of securities lending.
SecuritiesLendingType *SecuritiesLendingType1Choice `xml:"SctiesLndgTp,omitempty"`
// Indicates the possibility to terminate the securitiesc lending contract either by the borrower or lender before the expiration date.
Reversible *Reversible1Choice `xml:"Rvsbl,omitempty"`
// This is the minimum date at which the Borrower is allowed to give back the securities.
MinimumDateForCallBack *ISODate `xml:"MinDtForCallBck,omitempty"`
// Indicates that the contract can be rolled over.
RollOver *YesNoIndicator `xml:"RollOver,omitempty"`
// Indicates whether the securities lending fees can be paid periodically or at the end of the contract.
PeriodicPayment *YesNoIndicator `xml:"PrdcPmt,omitempty"`
// Indicates whether the trade is executed ex coupon.
ExCoupon *YesNoIndicator `xml:"ExCpn,omitempty"`
}
func (s *SecuritiesFinancing10) SetRateChangeDate(value string) {
s.RateChangeDate = (*ISODateTime)(&value)
}
func (s *SecuritiesFinancing10) AddRateType() *RateType19Choice {
s.RateType = new(RateType19Choice)
return s.RateType
}
func (s *SecuritiesFinancing10) AddRevaluation() *Revaluation2Choice {
s.Revaluation = new(Revaluation2Choice)
return s.Revaluation
}
func (s *SecuritiesFinancing10) SetLegalFramework(value string) {
s.LegalFramework = (*LegalFramework1Code)(&value)
}
func (s *SecuritiesFinancing10) AddInterestComputationMethod() *InterestComputationMethod2Choice {
s.InterestComputationMethod = new(InterestComputationMethod2Choice)
return s.InterestComputationMethod
}
func (s *SecuritiesFinancing10) AddVariableRateSupport() *RateName1 {
s.VariableRateSupport = new(RateName1)
return s.VariableRateSupport
}
func (s *SecuritiesFinancing10) AddRepurchaseRate() *Rate2 {
s.RepurchaseRate = new(Rate2)
return s.RepurchaseRate
}
func (s *SecuritiesFinancing10) AddStockLoanMargin() *Rate2 {
s.StockLoanMargin = new(Rate2)
return s.StockLoanMargin
}
func (s *SecuritiesFinancing10) AddSecuritiesHaircut() *Rate2 {
s.SecuritiesHaircut = new(Rate2)
return s.SecuritiesHaircut
}
func (s *SecuritiesFinancing10) AddPricingRate() *RateOrName1Choice {
s.PricingRate = new(RateOrName1Choice)
return s.PricingRate
}
func (s *SecuritiesFinancing10) AddSpreadRate() *SpreadRate1 {
s.SpreadRate = new(SpreadRate1)
return s.SpreadRate
}
func (s *SecuritiesFinancing10) SetCallableTradeIndicator(value string) {
s.CallableTradeIndicator = (*YesNoIndicator)(&value)
}
func (s *SecuritiesFinancing10) SetTransactionCallDelay(value string) {
s.TransactionCallDelay = (*Max3NumericText)(&value)
}
func (s *SecuritiesFinancing10) AddAccruedInterestAmount() *AmountAndDirection5 {
s.AccruedInterestAmount = new(AmountAndDirection5)
return s.AccruedInterestAmount
}
func (s *SecuritiesFinancing10) SetAccruedInterestPercentage(value string) {
s.AccruedInterestPercentage = (*PercentageRate)(&value)
}
func (s *SecuritiesFinancing10) AddForfeitAmount() *AmountAndDirection5 {
s.ForfeitAmount = new(AmountAndDirection5)
return s.ForfeitAmount
}
func (s *SecuritiesFinancing10) AddPremiumAmount() *AmountAndDirection5 {
s.PremiumAmount = new(AmountAndDirection5)
return s.PremiumAmount
}
func (s *SecuritiesFinancing10) AddClosingAmountPerPiecesOfCollateral() *AmountAndDirection5 {
s.ClosingAmountPerPiecesOfCollateral = new(AmountAndDirection5)
return s.ClosingAmountPerPiecesOfCollateral
}
func (s *SecuritiesFinancing10) SetTotalNumberOfCollateralInstructions(value string) {
s.TotalNumberOfCollateralInstructions = (*Max3NumericText)(&value)
}
func (s *SecuritiesFinancing10) AddFinancingAgreement() *Agreement3 {
s.FinancingAgreement = new(Agreement3)
return s.FinancingAgreement
}
func (s *SecuritiesFinancing10) AddLendingTransactionMethod() *LendingTransactionMethod1Choice {
s.LendingTransactionMethod = new(LendingTransactionMethod1Choice)
return s.LendingTransactionMethod
}
func (s *SecuritiesFinancing10) SetLendingWithCollateral(value string) {
s.LendingWithCollateral = (*YesNoIndicator)(&value)
}
func (s *SecuritiesFinancing10) AddBorrowingReason() *BorrowingReason1Choice {
s.BorrowingReason = new(BorrowingReason1Choice)
return s.BorrowingReason
}
func (s *SecuritiesFinancing10) AddCollateralType() *CollateralType1Choice {
s.CollateralType = new(CollateralType1Choice)
return s.CollateralType
}
func (s *SecuritiesFinancing10) SetContractTermsModificationChanged(value string) {
s.ContractTermsModificationChanged = (*YesNoIndicator)(&value)
}
func (s *SecuritiesFinancing10) AddInterestRate() *Rate2 {
s.InterestRate = new(Rate2)
return s.InterestRate
}
func (s *SecuritiesFinancing10) AddBorrowingRate() *Rate2 {
s.BorrowingRate = new(Rate2)
return s.BorrowingRate
}
func (s *SecuritiesFinancing10) AddStandardCollateralRatio() *Rate2 {
s.StandardCollateralRatio = new(Rate2)
return s.StandardCollateralRatio
}
func (s *SecuritiesFinancing10) AddDividendRatio() *Rate2 {
s.DividendRatio = new(Rate2)
return s.DividendRatio
}
func (s *SecuritiesFinancing10) AddNumberOfDaysLendingBorrowing() *Number21Choice {
s.NumberOfDaysLendingBorrowing = new(Number21Choice)
return s.NumberOfDaysLendingBorrowing
}
func (s *SecuritiesFinancing10) AddStandardCollateralAmount() *AmountAndDirection5 {
s.StandardCollateralAmount = new(AmountAndDirection5)
return s.StandardCollateralAmount
}
func (s *SecuritiesFinancing10) SetAccruedInterestTax(value string) {
s.AccruedInterestTax = (*YesNoIndicator)(&value)
}
func (s *SecuritiesFinancing10) SetEndNumberOfDaysAccrued(value string) {
s.EndNumberOfDaysAccrued = (*Max3Number)(&value)
}
func (s *SecuritiesFinancing10) SetEndFactor(value string) {
s.EndFactor = (*BaseOneRate)(&value)
}
func (s *SecuritiesFinancing10) AddSecuritiesLendingType() *SecuritiesLendingType1Choice {
s.SecuritiesLendingType = new(SecuritiesLendingType1Choice)
return s.SecuritiesLendingType
}
func (s *SecuritiesFinancing10) AddReversible() *Reversible1Choice {
s.Reversible = new(Reversible1Choice)
return s.Reversible
}
func (s *SecuritiesFinancing10) SetMinimumDateForCallBack(value string) {
s.MinimumDateForCallBack = (*ISODate)(&value)
}
func (s *SecuritiesFinancing10) SetRollOver(value string) {
s.RollOver = (*YesNoIndicator)(&value)
}
func (s *SecuritiesFinancing10) SetPeriodicPayment(value string) {
s.PeriodicPayment = (*YesNoIndicator)(&value)
}
func (s *SecuritiesFinancing10) SetExCoupon(value string) {
s.ExCoupon = (*YesNoIndicator)(&value)
} | SecuritiesFinancing10.go | 0.855791 | 0.478407 | SecuritiesFinancing10.go | starcoder |
package geom
import (
"fmt"
)
//const tolerance = 0.000001
const tolerance = 0.000000001
/*
// Float64 compares two floats to see if they are within the given tolerance.
func cmpFloat(f1, f2 float64) bool {
if math.IsInf(f1, 1) {
return math.IsInf(f2, 1)
}
if math.IsInf(f2, 1) {
return math.IsInf(f1, 1)
}
if math.IsInf(f1, -1) {
return math.IsInf(f2, -1)
}
if math.IsInf(f2, -1) {
return math.IsInf(f1, -1)
}
return math.Abs(f1-f2) < tolerance
}
*/
func pointEqual(p1, p2 [2]float64) bool { return cmpFloat(p1[0], p2[0]) && cmpFloat(p1[1], p2[1]) }
// Triangle is a array representation of a geometry triangle.
type Triangle [3][2]float64
// Center returns a point at the center of the triangle.
func (t Triangle) Center() (pt [2]float64) {
pt[0] = (t[0][0] + t[1][0] + t[2][0]) / 3
pt[1] = (t[0][1] + t[1][1] + t[2][1]) / 3
return pt
}
// LinearRings returns the coordinates of the linear rings
func (t Triangle) LinearRings() [][][2]float64 {
return [][][2]float64{t[:]}
}
// ThirdPoint takes 2 points and checks which point is the 3rd in the Triangle
func (t Triangle) ThirdPoint(p1, p2 [2]float64) [2]float64 {
switch {
case (pointEqual(t[0], p1) && pointEqual(t[1], p2)) ||
(pointEqual(t[1], p1) && pointEqual(t[0], p2)):
return t[2]
case (pointEqual(t[0], p1) && pointEqual(t[2], p2)) ||
(pointEqual(t[2], p1) && pointEqual(t[0], p2)):
return t[1]
default:
return t[0]
}
}
// NewTriangleFromPolygon takes the first three points from the outer ring of a polygon to create a triangle.
func NewTriangleFromPolygon(py [][][2]float64) Triangle {
// Assume we are getting triangles from the function.
if debug && len(py) != 1 {
panic(fmt.Sprintf("Step 3 : assumption invalid for triangle. %v", py))
}
if debug && len(py[0]) < 3 {
panic(fmt.Sprintf("Step 3 : assumption invalid for triangle. %v", py))
}
t := Triangle{py[0][0], py[0][1], py[0][2]}
return t
}
// Area reaturns twice the area of the oriented triangle (a,b,c), i.e.
// the area is positive if the triangle is oriented counterclockwise.
func (t Triangle) Area() float64 {
a, b, c := t[0], t[1], t[2]
return (b[0]-a[0])*(c[1]-a[1]) - (b[1]-a[1])*(c[0]-a[0])
}
// NewTriangleContaining returns a triangle that is large enough to contain the
// given points
func NewTriangleContaining(pts ...Point) Triangle {
const buff = 10
ext := NewExtentFromPoints(pts...)
tri, err := NewTriangleForExtent(ext, buff)
if err != nil {
panic(err)
}
return tri
}
func NewTriangleContainingPoints(pts ...[2]float64) Triangle {
const buff = 10
ext := NewExtent(pts...)
tri, err := NewTriangleForExtent(ext, buff)
if err != nil {
panic(err)
}
return tri
}
func NewTriangleForExtent(ext *Extent, buff float64) (Triangle, error) {
if ext == nil {
return Triangle{EmptyPoint, EmptyPoint, EmptyPoint},
fmt.Errorf("extent is nil")
}
xlen := ext[2] - ext[0]
ylen := ext[3] - ext[1]
x2len := xlen / 2
nx := ext[0] - (x2len * buff) - buff
cx := ext[0] + x2len
xx := ext[2] + (x2len * buff) + buff
ny := ext[1] - (ylen * buff) - buff
xy := ext[3] + (2 * ylen * buff) + buff
return Triangle{
{nx, ny},
{cx, xy},
{xx, ny},
}, nil
} | triangle.go | 0.793386 | 0.68925 | triangle.go | starcoder |
package netpbm
import (
"bufio"
"errors"
"fmt"
"image"
"image/color"
"io"
"strings"
"unicode"
)
// A BW is simply an alias for an image.Paletted. However, it is intended to
// represent images containing only white and black in their color palette.
type BW struct{ *image.Paletted }
// MaxValue returns the maximum index value allowed.
func (p *BW) MaxValue() uint16 {
return 1
}
// Format identifies the image as a PBM image.
func (p *BW) Format() Format {
return PBM
}
// HasAlpha indicates that there is no alpha channel.
func (p *BW) HasAlpha() bool {
return false
}
// NewBW returns a new black-and-white image with the given bounds and
// maximum value.
func NewBW(r image.Rectangle) *BW {
colorMap := make(color.Palette, 2)
colorMap[0] = color.RGBA{255, 255, 255, 255}
colorMap[1] = color.RGBA{0, 0, 0, 255}
return &BW{image.NewPaletted(r, colorMap)}
}
// PromoteToGrayM generates an 8-bit grayscale image that looks identical to
// the given black-and-white image. It takes as input a maximum channel value.
func (p *BW) PromoteToGrayM(m uint8) *GrayM {
gray := NewGrayM(p.Bounds(), m)
for i, bw := range p.Pix {
gray.Pix[i] = (1 - bw) * m // PBM defines 0=white, 1=black.
}
return gray
}
// PromoteToGrayM32 generates an 16-bit grayscale image that looks identical to
// the given black-and-white image. It takes as input a maximum channel value.
func (p *BW) PromoteToGrayM32(m uint16) *GrayM32 {
gray := NewGrayM32(p.Bounds(), m)
for i, bw := range p.Pix {
g := uint16(1-bw) * m // PBM defines 0=white, 1=black.
gray.Pix[i*2+0] = uint8(g >> 8)
gray.Pix[i*2+1] = uint8(g)
}
return gray
}
// decodeConfigPBMWithComments reads and parses a PBM header, either "raw"
// (binary) or "plain" (ASCII). Unlike decodeConfigPBM, it also returns any
// comments appearing in the file.
func decodeConfigPBMWithComments(r io.Reader) (image.Config, []string, error) {
// We really want a bufio.Reader. If we were given one, use it. If
// not, create a new one.
br, ok := r.(*bufio.Reader)
if !ok {
br = bufio.NewReader(r)
}
nr := newNetpbmReader(br)
// Parse the PBM header.
header, ok := nr.GetNetpbmHeader()
if !ok {
err := nr.Err()
if err == nil {
err = errors.New("Invalid PBM header")
}
return image.Config{}, nil, err
}
// Store the image configuration.
var cfg image.Config
cfg.Width = header.Width
cfg.Height = header.Height
// A PBM file's color map is 0=white, 1=black.
colorMap := make(color.Palette, 2)
colorMap[0] = color.RGBA{255, 255, 255, 255}
colorMap[1] = color.RGBA{0, 0, 0, 255}
cfg.ColorModel = colorMap
return cfg, header.Comments, nil
}
// decodeConfigPBM reads and parses a PBM header, either "raw"
// (binary) or "plain" (ASCII).
func decodeConfigPBM(r io.Reader) (image.Config, error) {
img, _, err := decodeConfigPBMWithComments(r)
return img, err
}
// decodePBMWithComments reads a complete "raw" (binary) PBM image. Unlike
// decodePBM, it also returns any comments appearing in the file.
func decodePBMWithComments(r io.Reader) (image.Image, []string, error) {
// Read the image header, and use it to prepare a B&W image.
br := bufio.NewReader(r)
config, comments, err := decodeConfigPBMWithComments(br)
if err != nil {
return nil, nil, err
}
img := NewBW(image.Rect(0, 0, config.Width, config.Height))
// Read bits until no more remain.
nr := newNetpbmReader(br)
buf := make([]byte, 1<<20) // Arbitrary, large, buffer size
bitsRemaining := config.Width * config.Height
bitNum := 0
ReadLoop:
for {
var nRead int
nRead, err = nr.Read(buf)
if nRead == 0 && err != nil {
return nil, nil, err
}
for _, oneByte := range buf[:nRead] {
for i := 7; i >= 0; i-- {
img.Pix[bitNum] = uint8((oneByte >> uint8(i)) & 1)
bitNum++
bitsRemaining--
if bitsRemaining == 0 {
// We've read the entire image.
break ReadLoop
}
if bitNum%config.Width == 0 {
// Ignore row padding.
break
}
}
}
}
return img, comments, nil
}
// decodePBM reads a complete "raw" (binary) PBM image.
func decodePBM(r io.Reader) (image.Image, error) {
img, _, err := decodePBMWithComments(r)
return img, err
}
// decodePBMPlainWithComments reads a complete "plain" (ASCII) PBM image.
// Unlike decodePBMPlain, it also returns any comments appearing in the file.
func decodePBMPlainWithComments(r io.Reader) (image.Image, []string, error) {
// Read the image header, and use it to prepare a B&W image.
br := bufio.NewReader(r)
config, comments, err := decodeConfigPBMWithComments(br)
if err != nil {
return nil, nil, err
}
img := NewBW(image.Rect(0, 0, config.Width, config.Height))
// Define a simple error handler.
nr := newNetpbmReader(br)
badness := func() (image.Image, []string, error) {
// Something went wrong. Either we have an error code to
// explain what or we make up a generic error message.
err := nr.Err()
if err == nil {
err = errors.New("Failed to parse ASCII PBM data")
}
return img, nil, err
}
// Read bits (ASCII "0" or "1") until no more remain.
totalBits := config.Width * config.Height
for i := 0; i < totalBits; {
ch := nr.GetNextByteAsRune()
switch {
case nr.Err() != nil:
return badness()
case unicode.IsSpace(ch):
continue
case ch == '0' || ch == '1':
img.Pix[i] = uint8(ch - '0')
i++
default:
return badness()
}
}
return img, comments, nil
}
// decodePBMPlain reads a complete "plain" (ASCII) PBM image.
func decodePBMPlain(r io.Reader) (image.Image, error) {
img, _, err := decodePBMPlainWithComments(r)
return img, err
}
// Indicate that we can decode both raw and plain PBM files.
func init() {
image.RegisterFormat("pbm", "P4", decodePBM, decodeConfigPBM)
image.RegisterFormat("pbm", "P1", decodePBMPlain, decodeConfigPBM)
}
// encodePBM writes an arbitrary image in PBM format.
func encodePBM(w io.Writer, img image.Image, opts *EncodeOptions) error {
// Write the PBM header.
if opts.Plain {
fmt.Fprintln(w, "P1")
} else {
fmt.Fprintln(w, "P4")
}
for _, cmt := range opts.Comments {
cmt = strings.Replace(cmt, "\n", " ", -1)
cmt = strings.Replace(cmt, "\r", " ", -1)
fmt.Fprintf(w, "# %s\n", cmt)
}
rect := img.Bounds()
width := rect.Max.X - rect.Min.X
height := rect.Max.Y - rect.Min.Y
fmt.Fprintf(w, "%d %d\n", width, height)
// Write the PBM data.
return encodeBWData(w, img, opts)
}
// encodeBWData writes image data as 1-bit samples.
func encodeBWData(w io.Writer, img image.Image, opts *EncodeOptions) error {
// In the background, write each index value into a channel.
rect := img.Bounds()
width := rect.Max.X - rect.Min.X
samples := make(chan uint16, width)
go func() {
bwImage := NewBW(image.ZR)
cm := bwImage.ColorModel().(color.Palette)
for y := rect.Min.Y; y < rect.Max.Y; y++ {
for x := rect.Min.X; x < rect.Max.X; x++ {
samples <- uint16(cm.Index(img.At(x, y)))
}
}
close(samples)
}()
// In the foreground, consume index values (either 0 or 1) and
// write them to the image file as individual bits. Pack 8
// bits to a byte, pad each row, and output.
if opts.Plain {
return writePlainData(w, samples)
}
wb, ok := w.(*bufio.Writer)
if !ok {
wb = bufio.NewWriter(w)
}
var b byte // Next byte to write
var bLen uint // Valid bits in b
var rowBits int // Bits written to the current row
for s := range samples {
b = b<<1 | byte(s)
bLen++
rowBits++
if rowBits == width {
// Pad the last byte in the row.
b <<= 8 - bLen
bLen = 8
rowBits = 0
}
if bLen == 8 {
// Write a full byte to the output.
if err := wb.WriteByte(b); err != nil {
return err
}
b = 0
bLen = 0
}
}
wb.Flush()
return nil
} | pbm.go | 0.750187 | 0.40539 | pbm.go | starcoder |
package main
import (
rl "github.com/chunqian/go-raylib/raylib"
"runtime"
)
func init() {
runtime.LockOSThread()
}
func main() {
screenWidth := int32(800)
screenHeight := int32(450)
rl.InitWindow(screenWidth, screenHeight, "raylib [models] example - first person maze")
defer rl.CloseWindow()
camera := rl.NewCamera(
rl.NewVector3(0.2, 0.4, 0.2),
rl.NewVector3(0, 0, 0),
rl.NewVector3(0, 1.0, 0),
45.0,
int32(rl.CAMERA_PERSPECTIVE),
)
imMap := rl.LoadImage("../models/resources/cubicmap.png")
cubicmap := rl.LoadTextureFromImage(imMap)
defer rl.UnloadTexture(cubicmap)
mesh := rl.GenMeshCubicmap(imMap, rl.NewVector3(1.0, 1.0, 1.0))
model := rl.LoadModelFromMesh(mesh)
defer rl.UnloadModel(model)
texture := rl.LoadTexture("../models/resources/cubicmap_atlas.png")
defer rl.UnloadTexture(texture)
model.Materialser(0).Mapser(rl.MAP_DIFFUSE).Texture = rl.Texture(texture)
// mapPixels := rl.GetImageData(imMap)
mapPixels := rl.LoadImageColors(imMap)
defer rl.UnloadColors(mapPixels)
rl.UnloadImage(imMap)
mapPosition := rl.NewVector3(-16, 0, -8)
// playerPosition := camera.Position
rl.SetCameraMode(camera, int32(rl.CAMERA_FIRST_PERSON))
rl.SetTargetFPS(60)
for !rl.WindowShouldClose() {
oldCamPos := camera.Position
rl.UpdateCamera(&camera)
playerPos := rl.NewVector2(camera.Position.X, camera.Position.Z)
playerRadius := float32(0.1)
playerCellX := int32(playerPos.X - mapPosition.X + 0.5)
playerCellY := int32(playerPos.Y - mapPosition.Z + 0.5)
if playerCellX < 0 {
playerCellX = 0
} else if playerCellX >= cubicmap.Width {
playerCellX = cubicmap.Width - 1
}
if playerCellY < 0 {
playerCellY = 0
} else if playerCellY >= cubicmap.Height {
playerCellY = cubicmap.Height - 1
}
for y := int32(0); y < cubicmap.Height; y++ {
for x := int32(0); x < cubicmap.Width; x++ {
if mapPixels.Index(y*cubicmap.Width+x).R == 255 &&
rl.CheckCollisionCircleRec(
playerPos,
playerRadius,
rl.NewRectangle(
mapPosition.X-0.5+float32(x)*1.0,
mapPosition.Z-0.5+float32(y)*1.0,
1.0,
1.0,
),
) {
camera.Position = oldCamPos
}
}
}
rl.BeginDrawing()
rl.ClearBackground(rl.RayWhite)
rl.BeginMode3D(rl.Camera3D(camera))
rl.DrawModel(model, mapPosition, 1.0, rl.White)
rl.EndMode3D()
rl.DrawTextureEx(
cubicmap,
rl.NewVector2(
float32(rl.GetScreenWidth()-cubicmap.Width*4-20),
20,
),
0,
4,
rl.White,
)
rl.DrawRectangleLines(
rl.GetScreenWidth()-cubicmap.Width*4-20,
20,
cubicmap.Width*4,
cubicmap.Height*4,
rl.Green,
)
rl.DrawRectangle(
rl.GetScreenWidth()-cubicmap.Width*4-20+playerCellX*4,
20+playerCellY*4,
4,
4,
rl.Red,
)
rl.DrawFPS(10, 10)
rl.EndDrawing()
}
} | examples/models/first_person_maze/first_person_maze.go | 0.506347 | 0.421135 | first_person_maze.go | starcoder |
package storage
import (
"math"
"time"
"github.com/m3db/m3/src/query/block"
"github.com/m3db/m3/src/query/ts"
)
// FetchResultToBlockResult converts a fetch result into coordinator blocks
func FetchResultToBlockResult(result *FetchResult, query *FetchQuery) (block.Result, error) {
alignedSeriesList, err := result.SeriesList.Align(query.Start, query.End, query.Interval)
if err != nil {
return block.Result{}, err
}
multiBlock, err := newMultiSeriesBlock(alignedSeriesList, query)
if err != nil {
return block.Result{}, err
}
return block.Result{
Blocks: []block.Block{multiBlock},
}, nil
}
type multiSeriesBlock struct {
seriesList ts.SeriesList
meta block.Metadata
}
func newMultiSeriesBlock(seriesList ts.SeriesList, query *FetchQuery) (multiSeriesBlock, error) {
resolution, err := seriesList.Resolution()
if err != nil {
return multiSeriesBlock{}, err
}
meta := block.Metadata{
Bounds: block.Bounds{
Start: query.Start,
Duration: query.End.Sub(query.Start),
StepSize: resolution,
},
}
return multiSeriesBlock{seriesList: seriesList, meta: meta}, nil
}
func (m multiSeriesBlock) Meta() block.Metadata {
return m.meta
}
func (m multiSeriesBlock) StepCount() int {
// If series has fewer points then it should return NaNs
return m.meta.Bounds.Steps()
}
func (m multiSeriesBlock) StepIter() (block.StepIter, error) {
return &multiSeriesBlockStepIter{block: m, index: -1}, nil
}
func (m multiSeriesBlock) SeriesIter() (block.SeriesIter, error) {
return newMultiSeriesBlockSeriesIter(m), nil
}
func (m multiSeriesBlock) SeriesMeta() []block.SeriesMeta {
metas := make([]block.SeriesMeta, len(m.seriesList))
for i, s := range m.seriesList {
metas[i].Tags = s.Tags
metas[i].Name = s.Name()
}
return metas
}
// TODO: Actually free up resources
func (m multiSeriesBlock) Close() error {
return nil
}
type multiSeriesBlockStepIter struct {
block multiSeriesBlock
index int
}
func (m *multiSeriesBlockStepIter) SeriesMeta() []block.SeriesMeta {
return m.block.SeriesMeta()
}
func (m *multiSeriesBlockStepIter) Meta() block.Metadata {
return m.block.Meta()
}
func (m *multiSeriesBlockStepIter) Next() bool {
if len(m.block.seriesList) == 0 {
return false
}
m.index++
return m.index < m.block.StepCount()
}
func (m *multiSeriesBlockStepIter) Current() (block.Step, error) {
values := make([]float64, len(m.block.seriesList))
seriesLen := m.block.seriesList[0].Len()
for i, s := range m.block.seriesList {
if m.index < seriesLen {
values[i] = s.Values().ValueAt(m.index)
} else {
values[i] = math.NaN()
}
}
bounds := m.block.meta.Bounds
t := bounds.Start.Add(time.Duration(m.index) * bounds.StepSize)
return block.NewColStep(t, values), nil
}
func (m *multiSeriesBlockStepIter) StepCount() int {
// If series has fewer points then it should return NaNs
return m.block.StepCount()
}
// TODO: Actually free up resources
func (m *multiSeriesBlockStepIter) Close() {
}
type multiSeriesBlockSeriesIter struct {
block multiSeriesBlock
index int
}
func (m *multiSeriesBlockSeriesIter) Meta() block.Metadata {
return m.block.Meta()
}
func (m *multiSeriesBlockSeriesIter) SeriesMeta() []block.SeriesMeta {
return m.block.SeriesMeta()
}
func newMultiSeriesBlockSeriesIter(block multiSeriesBlock) block.SeriesIter {
return &multiSeriesBlockSeriesIter{block: block, index: -1}
}
func (m *multiSeriesBlockSeriesIter) SeriesCount() int {
return len(m.block.seriesList)
}
func (m *multiSeriesBlockSeriesIter) Next() bool {
m.index++
return m.index < m.SeriesCount()
}
func (m *multiSeriesBlockSeriesIter) Current() (block.Series, error) {
s := m.block.seriesList[m.index]
seriesLen := s.Values().Len()
values := make([]float64, m.block.StepCount())
seriesValues := s.Values()
for i := 0; i < m.block.StepCount(); i++ {
if i < seriesLen {
values[i] = seriesValues.ValueAt(i)
} else {
values[i] = math.NaN()
}
}
return block.NewSeries(values, block.SeriesMeta{
Tags: s.Tags,
Name: s.Name(),
}), nil
}
func (m *multiSeriesBlockSeriesIter) Close() {
} | src/query/storage/block.go | 0.626581 | 0.411761 | block.go | starcoder |
package bezier
import (
"github.com/adamcolton/geom/d2"
"github.com/adamcolton/geom/d2/curve/line"
"github.com/adamcolton/geom/d2/curve/poly"
)
// Blossom point for the control points of a bezier curve
func (b Bezier) Blossom(fs ...float64) d2.Pt {
// https://en.wikipedia.org/wiki/Blossom_(functional)
return b.newBuf(nil, fs).blossom()
}
// BlossomBuf computes the Blossom point for the control points of a bezier
// curve using the provided buffer. Reusing a buffer can increase performance.
func (b Bezier) BlossomBuf(ptBuf []d2.Pt, fs ...float64) d2.Pt {
return b.newBuf(ptBuf, fs).blossom()
}
// Segment returns a bezier curve that whose start and end is relative to the
// base curve. So calling b.(0.2, 0.7) will return a curve that exactly matches
// b from 0.2 to 0.7.
func (b Bezier) Segment(start, end float64) Bezier {
return b.newBuf(nil, nil).segment(start, end).Bezier
}
// SegmentBuf returns a bezier curve that whose start and end is relative to the
// base curve. Providing a buf reduces the overhead.
func (b Bezier) SegmentBuf(start, end float64, ptBuf []d2.Pt, floatBuf []float64) Bezier {
return b.newBuf(ptBuf, floatBuf).segment(start, end).Bezier
}
// LineIntersections fulfills line.LineIntersector returning the intersection
// points relative to the line.
func (b Bezier) LineIntersections(l line.Line, buf []float64) []float64 {
return poly.NewBezier(b).LineIntersections(l, buf)
}
// BezierIntersections returns the intersection points relative to the Bezier
// curve.
func (b Bezier) BezierIntersections(l line.Line) []float64 {
return poly.NewBezier(b).PolyLineIntersections(l, nil)
}
type buf struct {
fs []float64
pts []d2.Pt
Bezier
}
func (b Bezier) newBuf(pts []d2.Pt, fs []float64) buf {
ln := len(b)
if ptsLn := len(pts); ptsLn < ln {
pts = make([]d2.Pt, ln)
} else if ptsLn > ln {
pts = pts[:ln]
}
ln--
if fsLn := len(fs); fsLn > ln {
fs = fs[:ln]
} else if fsLn < ln {
fs = make([]float64, ln)
}
return buf{
pts: pts,
fs: fs,
Bezier: b,
}
}
func (b buf) blossom() d2.Pt {
ln := len(b.pts)
copy(b.pts, b.Bezier)
for _, f := range b.fs {
ln--
for i, pt := range b.pts[:ln] {
b.pts[i] = pt.Add(b.pts[i+1].Subtract(pt).Multiply(f))
}
}
return b.pts[0]
}
func (b buf) segment(start, end float64) buf {
ln := len(b.Bezier)
out := make(Bezier, ln)
for j := range b.fs {
b.fs[j] = start
}
out[0] = b.blossom()
for i := range b.fs {
b.fs[i] = end
out[i+1] = b.blossom()
}
return buf{
fs: b.fs,
pts: b.pts,
Bezier: out,
}
} | d2/curve/bezier/intersection.go | 0.883066 | 0.655405 | intersection.go | starcoder |
package tflite
import (
flatbuffers "github.com/google/flatbuffers/go"
)
type SparsityParametersT struct {
TraversalOrder []int32
BlockMap []int32
DimMetadata []*DimensionMetadataT
}
func (t *SparsityParametersT) Pack(builder *flatbuffers.Builder) flatbuffers.UOffsetT {
if t == nil { return 0 }
traversalOrderOffset := flatbuffers.UOffsetT(0)
if t.TraversalOrder != nil {
traversalOrderLength := len(t.TraversalOrder)
SparsityParametersStartTraversalOrderVector(builder, traversalOrderLength)
for j := traversalOrderLength - 1; j >= 0; j-- {
builder.PrependInt32(t.TraversalOrder[j])
}
traversalOrderOffset = builder.EndVector(traversalOrderLength)
}
blockMapOffset := flatbuffers.UOffsetT(0)
if t.BlockMap != nil {
blockMapLength := len(t.BlockMap)
SparsityParametersStartBlockMapVector(builder, blockMapLength)
for j := blockMapLength - 1; j >= 0; j-- {
builder.PrependInt32(t.BlockMap[j])
}
blockMapOffset = builder.EndVector(blockMapLength)
}
dimMetadataOffset := flatbuffers.UOffsetT(0)
if t.DimMetadata != nil {
dimMetadataLength := len(t.DimMetadata)
dimMetadataOffsets := make([]flatbuffers.UOffsetT, dimMetadataLength)
for j := 0; j < dimMetadataLength; j++ {
dimMetadataOffsets[j] = t.DimMetadata[j].Pack(builder)
}
SparsityParametersStartDimMetadataVector(builder, dimMetadataLength)
for j := dimMetadataLength - 1; j >= 0; j-- {
builder.PrependUOffsetT(dimMetadataOffsets[j])
}
dimMetadataOffset = builder.EndVector(dimMetadataLength)
}
SparsityParametersStart(builder)
SparsityParametersAddTraversalOrder(builder, traversalOrderOffset)
SparsityParametersAddBlockMap(builder, blockMapOffset)
SparsityParametersAddDimMetadata(builder, dimMetadataOffset)
return SparsityParametersEnd(builder)
}
func (rcv *SparsityParameters) UnPackTo(t *SparsityParametersT) {
traversalOrderLength := rcv.TraversalOrderLength()
t.TraversalOrder = make([]int32, traversalOrderLength)
for j := 0; j < traversalOrderLength; j++ {
t.TraversalOrder[j] = rcv.TraversalOrder(j)
}
blockMapLength := rcv.BlockMapLength()
t.BlockMap = make([]int32, blockMapLength)
for j := 0; j < blockMapLength; j++ {
t.BlockMap[j] = rcv.BlockMap(j)
}
dimMetadataLength := rcv.DimMetadataLength()
t.DimMetadata = make([]*DimensionMetadataT, dimMetadataLength)
for j := 0; j < dimMetadataLength; j++ {
x := DimensionMetadata{}
rcv.DimMetadata(&x, j)
t.DimMetadata[j] = x.UnPack()
}
}
func (rcv *SparsityParameters) UnPack() *SparsityParametersT {
if rcv == nil { return nil }
t := &SparsityParametersT{}
rcv.UnPackTo(t)
return t
}
type SparsityParameters struct {
_tab flatbuffers.Table
}
func GetRootAsSparsityParameters(buf []byte, offset flatbuffers.UOffsetT) *SparsityParameters {
n := flatbuffers.GetUOffsetT(buf[offset:])
x := &SparsityParameters{}
x.Init(buf, n+offset)
return x
}
func (rcv *SparsityParameters) Init(buf []byte, i flatbuffers.UOffsetT) {
rcv._tab.Bytes = buf
rcv._tab.Pos = i
}
func (rcv *SparsityParameters) Table() flatbuffers.Table {
return rcv._tab
}
func (rcv *SparsityParameters) TraversalOrder(j int) int32 {
o := flatbuffers.UOffsetT(rcv._tab.Offset(4))
if o != 0 {
a := rcv._tab.Vector(o)
return rcv._tab.GetInt32(a + flatbuffers.UOffsetT(j*4))
}
return 0
}
func (rcv *SparsityParameters) TraversalOrderLength() int {
o := flatbuffers.UOffsetT(rcv._tab.Offset(4))
if o != 0 {
return rcv._tab.VectorLen(o)
}
return 0
}
func (rcv *SparsityParameters) MutateTraversalOrder(j int, n int32) bool {
o := flatbuffers.UOffsetT(rcv._tab.Offset(4))
if o != 0 {
a := rcv._tab.Vector(o)
return rcv._tab.MutateInt32(a+flatbuffers.UOffsetT(j*4), n)
}
return false
}
func (rcv *SparsityParameters) BlockMap(j int) int32 {
o := flatbuffers.UOffsetT(rcv._tab.Offset(6))
if o != 0 {
a := rcv._tab.Vector(o)
return rcv._tab.GetInt32(a + flatbuffers.UOffsetT(j*4))
}
return 0
}
func (rcv *SparsityParameters) BlockMapLength() int {
o := flatbuffers.UOffsetT(rcv._tab.Offset(6))
if o != 0 {
return rcv._tab.VectorLen(o)
}
return 0
}
func (rcv *SparsityParameters) MutateBlockMap(j int, n int32) bool {
o := flatbuffers.UOffsetT(rcv._tab.Offset(6))
if o != 0 {
a := rcv._tab.Vector(o)
return rcv._tab.MutateInt32(a+flatbuffers.UOffsetT(j*4), n)
}
return false
}
func (rcv *SparsityParameters) DimMetadata(obj *DimensionMetadata, j int) bool {
o := flatbuffers.UOffsetT(rcv._tab.Offset(8))
if o != 0 {
x := rcv._tab.Vector(o)
x += flatbuffers.UOffsetT(j) * 4
x = rcv._tab.Indirect(x)
obj.Init(rcv._tab.Bytes, x)
return true
}
return false
}
func (rcv *SparsityParameters) DimMetadataLength() int {
o := flatbuffers.UOffsetT(rcv._tab.Offset(8))
if o != 0 {
return rcv._tab.VectorLen(o)
}
return 0
}
func SparsityParametersStart(builder *flatbuffers.Builder) {
builder.StartObject(3)
}
func SparsityParametersAddTraversalOrder(builder *flatbuffers.Builder, traversalOrder flatbuffers.UOffsetT) {
builder.PrependUOffsetTSlot(0, flatbuffers.UOffsetT(traversalOrder), 0)
}
func SparsityParametersStartTraversalOrderVector(builder *flatbuffers.Builder, numElems int) flatbuffers.UOffsetT {
return builder.StartVector(4, numElems, 4)
}
func SparsityParametersAddBlockMap(builder *flatbuffers.Builder, blockMap flatbuffers.UOffsetT) {
builder.PrependUOffsetTSlot(1, flatbuffers.UOffsetT(blockMap), 0)
}
func SparsityParametersStartBlockMapVector(builder *flatbuffers.Builder, numElems int) flatbuffers.UOffsetT {
return builder.StartVector(4, numElems, 4)
}
func SparsityParametersAddDimMetadata(builder *flatbuffers.Builder, dimMetadata flatbuffers.UOffsetT) {
builder.PrependUOffsetTSlot(2, flatbuffers.UOffsetT(dimMetadata), 0)
}
func SparsityParametersStartDimMetadataVector(builder *flatbuffers.Builder, numElems int) flatbuffers.UOffsetT {
return builder.StartVector(4, numElems, 4)
}
func SparsityParametersEnd(builder *flatbuffers.Builder) flatbuffers.UOffsetT {
return builder.EndObject()
} | SparsityParameters.go | 0.742515 | 0.403802 | SparsityParameters.go | starcoder |
package bls
import (
"crypto/cipher"
"encoding/hex"
"io"
"github.com/drand/kyber"
"github.com/drand/kyber/group/mod"
)
var domainG1 = [8]byte{1, 1, 1, 1, 1, 1, 1, 1}
// KyberG1 is a kyber.Point holding a G1 point on BLS12-381 curve
type KyberG1 struct {
p *PointG1
}
func nullKyberG1() *KyberG1 {
var p PointG1
return newKyberG1(&p)
}
func newKyberG1(p *PointG1) *KyberG1 {
return &KyberG1{p: p}
}
func (k *KyberG1) Equal(k2 kyber.Point) bool {
return NewG1(nil).Equal(k.p, k2.(*KyberG1).p)
}
func (k *KyberG1) Null() kyber.Point {
return newKyberG1(NewG1(nil).Zero())
}
func (k *KyberG1) Base() kyber.Point {
return newKyberG1(NewG1(nil).One())
}
func (k *KyberG1) Pick(rand cipher.Stream) kyber.Point {
//panic("not implemented")
var dst, src [32]byte
rand.XORKeyStream(dst[:], src[:])
return k.Hash(dst[:])
}
func (k *KyberG1) Set(q kyber.Point) kyber.Point {
k.p.Set(q.(*KyberG1).p)
return k
}
func (k *KyberG1) Clone() kyber.Point {
var p PointG1
p.Set(k.p)
return newKyberG1(&p)
}
func (k *KyberG1) EmbedLen() int {
panic("bls12-381: unsupported operation")
}
func (k *KyberG1) Embed(data []byte, rand cipher.Stream) kyber.Point {
panic("bls12-381: unsupported operation")
}
func (k *KyberG1) Data() ([]byte, error) {
panic("bls12-381: unsupported operation")
}
func (k *KyberG1) Add(a, b kyber.Point) kyber.Point {
aa := a.(*KyberG1)
bb := b.(*KyberG1)
NewG1(nil).Add(k.p, aa.p, bb.p)
return k
}
func (k *KyberG1) Sub(a, b kyber.Point) kyber.Point {
aa := a.(*KyberG1)
bb := b.(*KyberG1)
NewG1(nil).Sub(k.p, aa.p, bb.p)
return k
}
func (k *KyberG1) Neg(a kyber.Point) kyber.Point {
aa := a.(*KyberG1)
NewG1(nil).Neg(k.p, aa.p)
return k
}
func (k *KyberG1) Mul(s kyber.Scalar, q kyber.Point) kyber.Point {
if q == nil {
q = nullKyberG1().Base()
}
NewG1(nil).MulScalar(k.p, q.(*KyberG1).p, &s.(*mod.Int).V)
return k
}
func (k *KyberG1) MarshalBinary() ([]byte, error) {
return NewG1(nil).ToCompressed(k.p), nil
}
func (k *KyberG1) UnmarshalBinary(buff []byte) error {
var err error
k.p, err = NewG1(nil).FromCompressed(buff)
return err
}
func (k *KyberG1) MarshalTo(w io.Writer) (int, error) {
buf, err := k.MarshalBinary()
if err != nil {
return 0, err
}
return w.Write(buf)
}
func (k *KyberG1) UnmarshalFrom(r io.Reader) (int, error) {
buf := make([]byte, k.MarshalSize())
n, err := io.ReadFull(r, buf)
if err != nil {
return n, err
}
return n, k.UnmarshalBinary(buf)
}
func (k *KyberG1) MarshalSize() int {
return 48
}
func (k *KyberG1) String() string {
b, _ := k.MarshalBinary()
return "bls12-381.G1: " + hex.EncodeToString(b)
}
func (k *KyberG1) Hash(m []byte) kyber.Point {
if len(m) != 32 {
m = sha256Hash(m)
}
var s [32]byte
copy(s[:], m)
pg1 := hashWithDomainG1(NewG1(nil), s, domainG2)
k.p = pg1
return k
} | vendor/github.com/drand/bls12-381/kyber_g1.go | 0.660501 | 0.489503 | kyber_g1.go | starcoder |
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*
Package choices provides a library for simple a/b and multivariate
testing.
choices uses hashing to uniquely assign users to experiments and
decide the values the user is assigned. This allows us to quickly
assign a user to an experiment with out having to look up what they
were assigned previously. Most of the ideas in this package are based
off of Facebook's Planout.
In choices there are three main concepts. Experiments, Experiments, and
Params. Experiments split traffic between the experiments they
contain. Experiments are the thing you are testing. Experiments are
made up of one or more Params. Params have names and values. The
values can be either a uniform, equal weights between choices, or
weighted, user-specified weights for each choice.
In most cases you will want to create one namespace per experiment. If
you have experiments that might have interactions you can use
namespaces to split the traffic between them. For example, if you are
running a test on a banner and another test that takes over the whole
page, you will want to split the traffic between these two tests.
Another example, is if you want to run a test on a small percent of
traffic. The namespace will ensure that experiments won't overlap when
you evaluate the experiment.
Experiments contain the Params for a running experiment. When a caller
queries Experiments, they will first be hashed into a segment. We then
check if the segment is contained in that Experiment. If the segment
is contained in the experiment then the experiment will be evaluated.
An experiment will in turn evaluate each of it's Params.
Params are key-value pairs. They Options for value are Uniform choice
or Weighted choice. Uniform choices will be selected in a uniform
fashion. Weighted choices will be selected based on the proportions
supplied in the weights.
*/
package choices | doc.go | 0.879703 | 0.545528 | doc.go | starcoder |
package temporal
import (
"fmt"
"math"
"time"
"github.com/m3db/m3/src/query/executor/transform"
"github.com/m3db/m3/src/query/ts"
)
const (
// AvgType calculates the average of all values in the specified interval.
AvgType = "avg_over_time"
// CountType calculates count of all values in the specified interval.
CountType = "count_over_time"
// MinType calculates the minimum of all values in the specified interval.
MinType = "min_over_time"
// MaxType calculates the maximum of all values in the specified interval.
MaxType = "max_over_time"
// SumType calculates the sum of all values in the specified interval.
SumType = "sum_over_time"
// StdDevType calculates the standard deviation of all values in the specified interval.
StdDevType = "stddev_over_time"
// StdVarType calculates the standard variance of all values in the specified interval.
StdVarType = "stdvar_over_time"
)
type aggFunc func([]float64) float64
var (
aggFuncs = map[string]aggFunc{
AvgType: avgOverTime,
CountType: countOverTime,
MinType: minOverTime,
MaxType: maxOverTime,
SumType: sumOverTime,
StdDevType: stddevOverTime,
StdVarType: stdvarOverTime,
}
)
type aggProcessor struct {
aggFunc aggFunc
}
func (a aggProcessor) Init(op baseOp, controller *transform.Controller, opts transform.Options) Processor {
return &aggNode{
controller: controller,
op: op,
aggFunc: a.aggFunc,
}
}
// NewAggOp creates a new base temporal transform with a specified node.
func NewAggOp(args []interface{}, optype string) (transform.Params, error) {
if aggregationFunc, ok := aggFuncs[optype]; ok {
a := aggProcessor{
aggFunc: aggregationFunc,
}
return newBaseOp(args, optype, a)
}
return nil, fmt.Errorf("unknown aggregation type: %s", optype)
}
type aggNode struct {
op baseOp
controller *transform.Controller
aggFunc func([]float64) float64
}
func (a *aggNode) Process(datapoints ts.Datapoints, _ time.Time) float64 {
return a.aggFunc(datapoints.Values())
}
func avgOverTime(values []float64) float64 {
sum, count := sumAndCount(values)
return sum / count
}
func countOverTime(values []float64) float64 {
_, count := sumAndCount(values)
if count == 0 {
return math.NaN()
}
return count
}
func minOverTime(values []float64) float64 {
var seenNotNaN bool
min := math.Inf(1)
for _, v := range values {
if !math.IsNaN(v) {
seenNotNaN = true
min = math.Min(min, v)
}
}
if !seenNotNaN {
return math.NaN()
}
return min
}
func maxOverTime(values []float64) float64 {
var seenNotNaN bool
max := math.Inf(-1)
for _, v := range values {
if !math.IsNaN(v) {
seenNotNaN = true
max = math.Max(max, v)
}
}
if !seenNotNaN {
return math.NaN()
}
return max
}
func sumOverTime(values []float64) float64 {
sum, _ := sumAndCount(values)
return sum
}
func stddevOverTime(values []float64) float64 {
return math.Sqrt(stdvarOverTime(values))
}
func stdvarOverTime(values []float64) float64 {
var aux, count, mean float64
for _, v := range values {
if !math.IsNaN(v) {
count++
delta := v - mean
mean += delta / count
aux += delta * (v - mean)
}
}
if count == 0 {
return math.NaN()
}
return aux / count
}
func sumAndCount(values []float64) (float64, float64) {
sum := 0.0
count := 0.0
for _, v := range values {
if !math.IsNaN(v) {
sum += v
count++
}
}
if count == 0 {
return math.NaN(), 0
}
return sum, count
} | src/query/functions/temporal/aggregation.go | 0.768646 | 0.518729 | aggregation.go | starcoder |
package samples
func init() {
sampleDataProposalCreateOperation[44] = `{
"expiration_time": "2016-08-20T14:37:51",
"extensions": [],
"fee": {
"amount": 2318977,
"asset_id": "1.3.0"
},
"fee_paying_account": "1.2.116522",
"proposed_ops": [
{
"op": [
6,
{
"account": "1.2.116524",
"active": {
"account_auths": [
[
"1.2.159",
1
],
[
"1.2.285",
1
],
[
"1.2.10156",
1
],
[
"1.2.98957",
1
],
[
"1.2.100730",
1
],
[
"1.2.101925",
1
],
[
"1.2.114989",
1
],
[
"1.2.116522",
5
]
],
"address_auths": [],
"key_auths": [],
"weight_threshold": 7
},
"extensions": {},
"fee": {
"amount": 29122,
"asset_id": "1.3.0"
},
"new_options": {
"extensions": [],
"memo_key": "<KEY>",
"num_committee": 0,
"num_witness": 0,
"votes": [],
"voting_account": "1.2.5"
},
"owner": {
"account_auths": [
[
"1.2.159",
1
],
[
"1.2.285",
1
],
[
"1.2.10156",
1
],
[
"1.2.98957",
1
],
[
"1.2.100730",
1
],
[
"1.2.101925",
1
],
[
"1.2.114989",
1
],
[
"1.2.116522",
5
]
],
"address_auths": [],
"key_auths": [],
"weight_threshold": 5
}
}
]
}
]
}`
}
//end of file | gen/samples/proposalcreateoperation_44.go | 0.545286 | 0.471953 | proposalcreateoperation_44.go | starcoder |
package calendar
import (
"strings"
// standard libs only above!
"github.com/litesoft-go/utils/enums"
"github.com/litesoft-go/utils/strs"
)
type Weekday struct {
enums.Enum
weekdayData
}
func (wdd *weekdayData) GetDayNumber() int {
if wdd == nil {
return 0
}
return wdd.dayNumber
}
func (wdd *weekdayData) IsWeekend() bool {
dayNumber := wdd.GetDayNumber()
return (dayNumber == 6) || (dayNumber == 7)
}
func (wdd *weekdayData) GetAbbreviation3() string {
return wdd.getAbbr(func() string { return wdd.abbreviation3 })
}
func (wdd *weekdayData) GetAbbreviation2() string {
return wdd.getAbbr(func() string { return wdd.abbreviation2 })
}
func (wdd *weekdayData) GetAbbreviation1() string {
return wdd.getAbbr(func() string { return wdd.abbreviation1 })
}
var (
Monday = Weekday{Enum: enums.New("Monday"), weekdayData: wddBuilder(1).abbrs("Mon", "Mo", "M").build()}
Tuesday = Weekday{Enum: enums.New("Tuesday"), weekdayData: wddBuilder(2).abbrs("Tue", "Tu", "T").build()}
Wednesday = Weekday{Enum: enums.New("Wednesday"), weekdayData: wddBuilder(3).abbrs("Wed", "We", "W").build()}
Thursday = Weekday{Enum: enums.New("Thursday"), weekdayData: wddBuilder(4).abbrs("Thu", "Th", "R").build()}
Friday = Weekday{Enum: enums.New("Friday"), weekdayData: wddBuilder(5).abbrs("Fri", "Fr", "F").build()}
Saturday = Weekday{Enum: enums.New("Saturday"), weekdayData: wddBuilder(6).abbrs("Sat", "Sa", "S").build()}
Sunday = Weekday{Enum: enums.New("Sunday"), weekdayData: wddBuilder(7).abbrs("Sun", "Su", "U").build()}
defaultWeekday = Weekday{}
)
func init() {
enums.AddDefaultWithTransformer(&defaultWeekday, strings.ToLower)
enums.AddWithAliases(&Monday, Monday.getAbbrs()...)
enums.AddWithAliases(&Tuesday, Tuesday.getAbbrs()...)
enums.AddWithAliases(&Wednesday, Wednesday.getAbbrs()...)
enums.AddWithAliases(&Thursday, Thursday.getAbbrs()...)
enums.AddWithAliases(&Friday, Friday.getAbbrs()...)
enums.AddWithAliases(&Saturday, Saturday.getAbbrs()...)
enums.AddWithAliases(&Sunday, Sunday.getAbbrs()...)
}
func (wd *Weekday) UpdateFrom(found enums.IEnum) {
src := found.(*Weekday)
wd.weekdayData = src.weekdayData // wd is Dst
}
func (wd *Weekday) UnmarshalJSON(data []byte) error {
return enums.UnmarshalJSON(wd, data) // wd is Dst
}
type weekdayData struct {
dayNumber int
abbreviation3 string
abbreviation2 string
abbreviation1 string
}
func wddBuilder(dayNumber int) *weekdayData {
return &weekdayData{dayNumber: dayNumber}
}
func (wdd *weekdayData) build() weekdayData {
return *wdd
}
func (wdd *weekdayData) abbrs(abbreviation3, abbreviation2, abbreviation1 string) *weekdayData {
wdd.abbreviation3, wdd.abbreviation2, wdd.abbreviation1 = abbreviation3, abbreviation2, abbreviation1
return wdd
}
func (wdd *weekdayData) getAbbr(f func() string) string {
if wdd == nil {
return ""
}
return f()
}
func (wdd *weekdayData) getAbbrs() (abbrs []string) {
return strs.AppendNonEmpty(strs.AppendNonEmpty(strs.AppendNonEmpty(abbrs,
wdd.GetAbbreviation1()), wdd.GetAbbreviation2()), wdd.GetAbbreviation3())
} | calendar/weekdays.go | 0.595257 | 0.437343 | weekdays.go | starcoder |
package MySQLProtocol
func BuildFixedLengthInteger1(value uint8) (data []byte) {
data = make([]byte, 1)
data[0] = byte(value >> 0 & 0xFF)
return data
}
func (proto *Proto) GetFixedLengthInteger1() (value uint8) {
value |= uint8(proto.data[proto.offset] & 0xFF)
proto.offset += 1
return value
}
func BuildFixedLengthInteger2(value uint16) (data []byte) {
data = make([]byte, 2)
data[0] = byte(value >> 0 & 0xFF)
data[1] = byte(value >> 8 & 0xFF)
return data
}
func (proto *Proto) GetFixedLengthInteger2() (value uint16) {
value |= uint16(proto.data[proto.offset+1]&0xFF) << 8
value |= uint16(proto.data[proto.offset] & 0xFF)
proto.offset += 2
return value
}
func BuildFixedLengthInteger3(value uint32) (data []byte) {
data = make([]byte, 3)
data[0] = byte(value >> 0 & 0xFF)
data[1] = byte(value >> 8 & 0xFF)
data[2] = byte(value >> 16 & 0xFF)
return data
}
func (proto *Proto) GetFixedLengthInteger3() (value uint32) {
value |= uint32(proto.data[proto.offset+2]&0xFF) << 16
value |= uint32(proto.data[proto.offset+1]&0xFF) << 8
value |= uint32(proto.data[proto.offset] & 0xFF)
proto.offset += 3
return value
}
func BuildFixedLengthInteger4(value uint32) (data []byte) {
data = make([]byte, 4)
data[0] = byte(value >> 0 & 0xFF)
data[1] = byte(value >> 8 & 0xFF)
data[2] = byte(value >> 16 & 0xFF)
data[3] = byte(value >> 24 & 0xFF)
return data
}
func (proto *Proto) GetFixedLengthInteger4() (value uint32) {
value |= uint32(proto.data[proto.offset+3]&0xFF) << 24
value |= uint32(proto.data[proto.offset+2]&0xFF) << 16
value |= uint32(proto.data[proto.offset+1]&0xFF) << 8
value |= uint32(proto.data[proto.offset] & 0xFF)
proto.offset += 4
return value
}
func BuildFixedLengthInteger6(value uint64) (data []byte) {
data = make([]byte, 6)
data[0] = byte(value >> 0 & 0xFF)
data[1] = byte(value >> 8 & 0xFF)
data[2] = byte(value >> 16 & 0xFF)
data[3] = byte(value >> 24 & 0xFF)
data[4] = byte(value >> 32 & 0xFF)
data[5] = byte(value >> 40 & 0xFF)
return data
}
func (proto *Proto) GetFixedLengthInteger6() (value uint64) {
value |= uint64(proto.data[proto.offset+5]&0xFF) << 40
value |= uint64(proto.data[proto.offset+4]&0xFF) << 32
value |= uint64(proto.data[proto.offset+3]&0xFF) << 24
value |= uint64(proto.data[proto.offset+2]&0xFF) << 16
value |= uint64(proto.data[proto.offset+1]&0xFF) << 8
value |= uint64(proto.data[proto.offset] & 0xFF)
proto.offset += 6
return value
}
func BuildFixedLengthInteger8(value uint64) (data []byte) {
data = make([]byte, 8)
data[0] = byte(value >> 0 & 0xFF)
data[1] = byte(value >> 8 & 0xFF)
data[2] = byte(value >> 16 & 0xFF)
data[3] = byte(value >> 24 & 0xFF)
data[4] = byte(value >> 32 & 0xFF)
data[5] = byte(value >> 40 & 0xFF)
data[6] = byte(value >> 48 & 0xFF)
data[7] = byte(value >> 56 & 0xFF)
return data
}
func (proto *Proto) GetFixedLengthInteger8() (value uint64) {
value |= uint64(proto.data[proto.offset+7]&0xFF) << 56
value |= uint64(proto.data[proto.offset+6]&0xFF) << 48
value |= uint64(proto.data[proto.offset+5]&0xFF) << 40
value |= uint64(proto.data[proto.offset+4]&0xFF) << 32
value |= uint64(proto.data[proto.offset+3]&0xFF) << 24
value |= uint64(proto.data[proto.offset+2]&0xFF) << 16
value |= uint64(proto.data[proto.offset+1]&0xFF) << 8
value |= uint64(proto.data[proto.offset] & 0xFF)
proto.offset += 8
return value
} | MySQLProtocol/FixedLengthInteger.go | 0.583797 | 0.591281 | FixedLengthInteger.go | starcoder |
package main
import (
"math/rand"
. "github.com/jakecoffman/cp"
"github.com/jakecoffman/cp/examples"
)
const (
imageWidth = 188
imageHeight = 35
imageRowLength = 24
)
func main() {
space := NewSpace()
space.Iterations = 1
// The space will contain a very large number of similarly sized objects.
// This is the perfect candidate for using the spatial hash.
// Generally you will never need to do this.
space.UseSpatialHash(2.0, 10000)
bodyCount = 0
var body *Body
var shape *Shape
for y := 0; y < imageHeight; y++ {
for x := 0; x < imageWidth; x++ {
if getPixel(uint(x), uint(y)) == 0 {
continue
}
xJitter := 0.05 * rand.Float64()
yJitter := 0.05 * rand.Float64()
shape = makeBall(2.0*(float64(x)-imageWidth/2+xJitter), 2*(imageHeight/2.0-float64(y)+yJitter))
space.AddBody(shape.Body())
space.AddShape(shape)
bodyCount++
}
}
body = space.AddBody(NewBody(1e9, INFINITY))
body.SetPosition(Vector{-1000, -10})
body.SetVelocity(400, 0)
shape = space.AddShape(NewCircle(body, 8, Vector{}))
shape.SetElasticity(0)
shape.SetFriction(0)
bodyCount++
examples.Main(space, 1.0/60.0, update, draw)
}
func getPixel(x, y uint) int {
return (imageBitmap[(x>>3)+y*imageRowLength] >> (^x & 0x7)) & 1
}
var bodyCount = 0
func update(space *Space, dt float64) {
space.Step(dt)
}
func draw(space *Space) {
space.EachBody(func (body *Body) {
examples.DrawDot(3.0, body.Position(), FColor{200.0 / 255.0, 210.0 / 255.0, 230.0 / 255.0, 1.0})
})
}
func makeBall(x, y float64) *Shape {
body := NewBody(1.0, INFINITY)
body.SetPosition(Vector{x, y})
shape := NewCircle(body, 0.95, Vector{})
shape.SetElasticity(0)
shape.SetFriction(0)
return shape
}
var imageBitmap = []int{
15, -16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, -64, 15, 63, -32, -2, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, -64, 15, 127, -125, -1, -128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 127, -64, 15, 127, 15, -1, -64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, -64, 15, -2,
31, -1, -64, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, -64, 0, -4, 63, -1, -32, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -1, -64, 15, -8, 127, -1, -32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1, -1, -64, 0, -8, -15, -1, -32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, -31, -1, -64, 15, -8, -32,
-1, -32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, -15, -1, -64, 9, -15, -32, -1, -32, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, -15, -1, -64, 0, -15, -32, -1, -32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 63, -7, -1, -64, 9, -29, -32, 127, -61, -16, 63, 15, -61, -1, -8, 31, -16, 15, -8, 126, 7, -31,
-8, 31, -65, -7, -1, -64, 9, -29, -32, 0, 7, -8, 127, -97, -25, -1, -2, 63, -8, 31, -4, -1, 15, -13,
-4, 63, -1, -3, -1, -64, 9, -29, -32, 0, 7, -8, 127, -97, -25, -1, -2, 63, -8, 31, -4, -1, 15, -13,
-2, 63, -1, -3, -1, -64, 9, -29, -32, 0, 7, -8, 127, -97, -25, -1, -1, 63, -4, 63, -4, -1, 15, -13,
-2, 63, -33, -1, -1, -32, 9, -25, -32, 0, 7, -8, 127, -97, -25, -1, -1, 63, -4, 63, -4, -1, 15, -13,
-1, 63, -33, -1, -1, -16, 9, -25, -32, 0, 7, -8, 127, -97, -25, -1, -1, 63, -4, 63, -4, -1, 15, -13,
-1, 63, -49, -1, -1, -8, 9, -57, -32, 0, 7, -8, 127, -97, -25, -8, -1, 63, -2, 127, -4, -1, 15, -13,
-1, -65, -49, -1, -1, -4, 9, -57, -32, 0, 7, -8, 127, -97, -25, -8, -1, 63, -2, 127, -4, -1, 15, -13,
-1, -65, -57, -1, -1, -2, 9, -57, -32, 0, 7, -8, 127, -97, -25, -8, -1, 63, -2, 127, -4, -1, 15, -13,
-1, -1, -57, -1, -1, -1, 9, -57, -32, 0, 7, -1, -1, -97, -25, -8, -1, 63, -1, -1, -4, -1, 15, -13, -1,
-1, -61, -1, -1, -1, -119, -57, -32, 0, 7, -1, -1, -97, -25, -8, -1, 63, -1, -1, -4, -1, 15, -13, -1,
-1, -61, -1, -1, -1, -55, -49, -32, 0, 7, -1, -1, -97, -25, -8, -1, 63, -1, -1, -4, -1, 15, -13, -1,
-1, -63, -1, -1, -1, -23, -49, -32, 127, -57, -1, -1, -97, -25, -1, -1, 63, -1, -1, -4, -1, 15, -13,
-1, -1, -63, -1, -1, -1, -16, -49, -32, -1, -25, -1, -1, -97, -25, -1, -1, 63, -33, -5, -4, -1, 15,
-13, -1, -1, -64, -1, -9, -1, -7, -49, -32, -1, -25, -8, 127, -97, -25, -1, -1, 63, -33, -5, -4, -1,
15, -13, -1, -1, -64, -1, -13, -1, -32, -49, -32, -1, -25, -8, 127, -97, -25, -1, -2, 63, -49, -13,
-4, -1, 15, -13, -1, -1, -64, 127, -7, -1, -119, -17, -15, -1, -25, -8, 127, -97, -25, -1, -2, 63,
-49, -13, -4, -1, 15, -13, -3, -1, -64, 127, -8, -2, 15, -17, -1, -1, -25, -8, 127, -97, -25, -1,
-8, 63, -49, -13, -4, -1, 15, -13, -3, -1, -64, 63, -4, 120, 0, -17, -1, -1, -25, -8, 127, -97, -25,
-8, 0, 63, -57, -29, -4, -1, 15, -13, -4, -1, -64, 63, -4, 0, 15, -17, -1, -1, -25, -8, 127, -97,
-25, -8, 0, 63, -57, -29, -4, -1, -1, -13, -4, -1, -64, 31, -2, 0, 0, 103, -1, -1, -57, -8, 127, -97,
-25, -8, 0, 63, -57, -29, -4, -1, -1, -13, -4, 127, -64, 31, -2, 0, 15, 103, -1, -1, -57, -8, 127,
-97, -25, -8, 0, 63, -61, -61, -4, 127, -1, -29, -4, 127, -64, 15, -8, 0, 0, 55, -1, -1, -121, -8,
127, -97, -25, -8, 0, 63, -61, -61, -4, 127, -1, -29, -4, 63, -64, 15, -32, 0, 0, 23, -1, -2, 3, -16,
63, 15, -61, -16, 0, 31, -127, -127, -8, 31, -1, -127, -8, 31, -128, 7, -128, 0, 0,
} | examples/logosmash/logosmash.go | 0.584983 | 0.502747 | logosmash.go | starcoder |
package processor
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"regexp"
"time"
"github.com/Jeffail/benthos/lib/log"
"github.com/Jeffail/benthos/lib/metrics"
"github.com/Jeffail/benthos/lib/types"
"github.com/Jeffail/gabs"
"github.com/benhoyt/goawk/interp"
"github.com/benhoyt/goawk/parser"
"github.com/opentracing/opentracing-go"
)
//------------------------------------------------------------------------------
var varInvalidRegexp *regexp.Regexp
func init() {
varInvalidRegexp = regexp.MustCompile(`[^a-zA-Z0-9_]`)
Constructors[TypeAWK] = TypeSpec{
constructor: NewAWK,
description: `
Executes an AWK program on messages by feeding contents as the input based on a
codec and replaces the contents with the result. If the result is empty (nothing
is printed by the program) then the original message contents remain unchanged.
Comes with a wide range of [custom functions](./awk_functions.md) for accessing
message metadata, json fields, printing logs, etc. These functions can be
overridden by functions within the program.
### Codecs
A codec can be specified that determines how the contents of the message are fed
into the program. This does not change the custom functions.
` + "`none`" + `
An empty string is fed into the program. Functions can still be used in order to
extract and mutate metadata and message contents. This is useful for when your
program only uses functions and doesn't need the full text of the message to be
parsed by the program.
` + "`text`" + `
The full contents of the message are fed into the program as a string, allowing
you to reference tokenised segments of the message with variables ($0, $1, etc).
Custom functions can still be used with this codec.
This is the default codec as it behaves most similar to typical usage of the awk
command line tool.
` + "`json`" + `
No contents are fed into the program. Instead, variables are extracted from the
message by walking the flattened JSON structure. Each value is converted into a
variable by taking its full path, e.g. the object:
` + "``` json" + `
{
"foo": {
"bar": {
"value": 10
},
"created_at": "2018-12-18T11:57:32"
}
}
` + "```" + `
Would result in the following variable declarations:
` + "```" + `
foo_bar_value = 10
foo_created_at = "2018-12-18T11:57:32"
` + "```" + `
Custom functions can also still be used with this codec.`,
}
}
//------------------------------------------------------------------------------
// AWKConfig contains configuration fields for the AWK processor.
type AWKConfig struct {
Parts []int `json:"parts" yaml:"parts"`
Codec string `json:"codec" yaml:"codec"`
Program string `json:"program" yaml:"program"`
}
// NewAWKConfig returns a AWKConfig with default values.
func NewAWKConfig() AWKConfig {
return AWKConfig{
Parts: []int{},
Codec: "text",
Program: "BEGIN { x = 0 } { print $0, x; x++ }",
}
}
//------------------------------------------------------------------------------
// AWK is a processor that executes AWK programs on a message part and replaces
// the contents with the result.
type AWK struct {
parts []int
program *parser.Program
conf AWKConfig
log log.Modular
stats metrics.Type
functions map[string]interface{}
mCount metrics.StatCounter
mErr metrics.StatCounter
mSent metrics.StatCounter
mBatchSent metrics.StatCounter
}
// NewAWK returns a AWK processor.
func NewAWK(
conf Config, mgr types.Manager, log log.Modular, stats metrics.Type,
) (Type, error) {
program, err := parser.ParseProgram([]byte(conf.AWK.Program), &parser.ParserConfig{
Funcs: awkFunctionsMap,
})
if err != nil {
return nil, fmt.Errorf("failed to compile AWK program: %v", err)
}
switch conf.AWK.Codec {
case "none":
case "text":
case "json":
default:
return nil, fmt.Errorf("unrecognised codec: %v", conf.AWK.Codec)
}
functionOverrides := make(map[string]interface{}, len(awkFunctionsMap))
for k, v := range awkFunctionsMap {
functionOverrides[k] = v
}
functionOverrides["print_log"] = func(value, level string) {
switch level {
default:
fallthrough
case "":
fallthrough
case "INFO":
log.Infoln(value)
case "TRACE":
log.Traceln(value)
case "DEBUG":
log.Debugln(value)
case "WARN":
log.Warnln(value)
case "ERROR":
log.Errorln(value)
case "FATAL":
log.Fatalln(value)
}
}
a := &AWK{
parts: conf.AWK.Parts,
program: program,
conf: conf.AWK,
log: log,
stats: stats,
functions: functionOverrides,
mCount: stats.GetCounter("count"),
mErr: stats.GetCounter("error"),
mSent: stats.GetCounter("sent"),
mBatchSent: stats.GetCounter("batch.sent"),
}
return a, nil
}
//------------------------------------------------------------------------------
func getTime(dateStr string, format string) (time.Time, error) {
if len(dateStr) == 0 {
return time.Now(), nil
}
if len(format) == 0 {
var err error
var parsed time.Time
layoutIter:
for _, layout := range []string{
time.RubyDate,
time.RFC1123Z,
time.RFC1123,
time.RFC3339,
time.RFC822,
time.RFC822Z,
"Mon, 2 Jan 2006 15:04:05 -0700",
"2006-01-02T15:04:05MST",
"2006-01-02T15:04:05",
"2006-01-02 15:04:05",
"2006-01-02T15:04:05Z0700",
"2006-01-02",
} {
if parsed, err = time.Parse(layout, dateStr); err == nil {
break layoutIter
}
}
if err != nil {
return time.Time{}, fmt.Errorf("failed to detect datetime format of: %v", dateStr)
}
return parsed, nil
}
return time.Parse(format, dateStr)
}
var awkFunctionsMap = map[string]interface{}{
"timestamp_unix": func(dateStr string, format string) (int64, error) {
ts, err := getTime(dateStr, format)
if err != nil {
return 0, err
}
return ts.Unix(), nil
},
"timestamp_unix_nano": func(dateStr string, format string) (int64, error) {
ts, err := getTime(dateStr, format)
if err != nil {
return 0, err
}
return ts.UnixNano(), nil
},
"timestamp_format": func(unix int64, formatArg string) string {
format := time.RFC3339
if len(formatArg) > 0 {
format = formatArg
}
t := time.Unix(unix, 0).In(time.UTC)
return t.Format(format)
},
"timestamp_format_nano": func(unixNano int64, formatArg string) string {
format := time.RFC3339
if len(formatArg) > 0 {
format = formatArg
}
s := unixNano / 1000000000
ns := unixNano - (s * 1000000000)
t := time.Unix(s, ns).In(time.UTC)
return t.Format(format)
},
"metadata_get": func(key string) string {
// Do nothing, this is a placeholder for compilation.
return ""
},
"metadata_set": func(key, value string) {
// Do nothing, this is a placeholder for compilation.
},
"json_get": func(path string) (string, error) {
// Do nothing, this is a placeholder for compilation.
return "", errors.New("not implemented")
},
"json_set": func(path, value string) (int, error) {
// Do nothing, this is a placeholder for compilation.
return 0, errors.New("not implemented")
},
"create_json_object": func(vals ...string) string {
pairs := map[string]string{}
for i := 0; i < len(vals)-1; i += 2 {
pairs[vals[i]] = vals[i+1]
}
bytes, _ := json.Marshal(pairs)
if len(bytes) == 0 {
return "{}"
}
return string(bytes)
},
"create_json_array": func(vals ...string) string {
bytes, _ := json.Marshal(vals)
if len(bytes) == 0 {
return "[]"
}
return string(bytes)
},
"print_log": func(value, level string) {
// Do nothing, this is a placeholder for compilation.
},
}
//------------------------------------------------------------------------------
func flattenForAWK(path string, data interface{}) map[string]string {
m := map[string]string{}
switch t := data.(type) {
case map[string]interface{}:
for k, v := range t {
newPath := k
if len(path) > 0 {
newPath = path + "." + k
}
for k2, v2 := range flattenForAWK(newPath, v) {
m[k2] = v2
}
}
case []interface{}:
for _, ele := range t {
for k, v := range flattenForAWK(path, ele) {
m[k] = v
}
}
default:
m[path] = fmt.Sprintf("%v", t)
}
return m
}
//------------------------------------------------------------------------------
// ProcessMessage applies the processor to a message, either creating >0
// resulting messages or a response to be sent back to the message source.
func (a *AWK) ProcessMessage(msg types.Message) ([]types.Message, types.Response) {
a.mCount.Incr(1)
newMsg := msg.Copy()
proc := func(i int, span opentracing.Span, part types.Part) error {
var outBuf, errBuf bytes.Buffer
// Function overrides
a.functions["metadata_get"] = func(k string) string {
return part.Metadata().Get(k)
}
a.functions["metadata_set"] = func(k, v string) {
part.Metadata().Set(k, v)
}
a.functions["json_get"] = func(path string) (string, error) {
var gPart *gabs.Container
jsonPart, err := part.JSON()
if err == nil {
gPart, err = gabs.Consume(jsonPart)
}
if err != nil {
return "", fmt.Errorf("failed to parse message into json: %v", err)
}
gTarget := gPart.Path(path)
if gTarget.Data() == nil {
return "null", nil
}
if str, isString := gTarget.Data().(string); isString {
return str, nil
}
return gTarget.String(), nil
}
a.functions["json_set"] = func(path, v string) (int, error) {
var gPart *gabs.Container
jsonPart, err := part.JSON()
if err == nil {
gPart, err = gabs.Consume(jsonPart)
}
if err != nil {
return 0, fmt.Errorf("failed to parse message into json: %v", err)
}
gPart.SetP(v, path)
part.SetJSON(gPart.Data())
return 0, nil
}
config := &interp.Config{
Output: &outBuf,
Error: &errBuf,
Funcs: a.functions,
}
if a.conf.Codec == "json" {
jsonPart, err := part.JSON()
if err != nil {
a.mErr.Incr(1)
a.log.Errorf("Failed to parse part into json: %v\n", err)
return err
}
for k, v := range flattenForAWK("", jsonPart) {
config.Vars = append(config.Vars, varInvalidRegexp.ReplaceAllString(k, "_"), v)
}
config.Stdin = bytes.NewReader([]byte(" "))
} else if a.conf.Codec == "text" {
config.Stdin = bytes.NewReader(part.Get())
} else {
config.Stdin = bytes.NewReader([]byte(" "))
}
if a.conf.Codec != "none" {
part.Metadata().Iter(func(k, v string) error {
config.Vars = append(config.Vars, varInvalidRegexp.ReplaceAllString(k, "_"), v)
return nil
})
}
if exitStatus, err := interp.ExecProgram(a.program, config); err != nil {
a.mErr.Incr(1)
a.log.Errorf("Non-fatal execution error: %v\n", err)
return err
} else if exitStatus != 0 {
a.mErr.Incr(1)
err = fmt.Errorf(
"non-fatal execution error: awk interpreter returned non-zero exit code: %d", exitStatus,
)
a.log.Errorf("AWK: %v\n", err)
return err
}
if errMsg, err := ioutil.ReadAll(&errBuf); err != nil {
a.log.Errorf("Read err error: %v\n", err)
} else if len(errMsg) > 0 {
a.mErr.Incr(1)
a.log.Errorf("Execution error: %s\n", errMsg)
return errors.New(string(errMsg))
}
resMsg, err := ioutil.ReadAll(&outBuf)
if err != nil {
a.mErr.Incr(1)
a.log.Errorf("Read output error: %v\n", err)
return err
}
if len(resMsg) > 0 {
// Remove trailing line break
if resMsg[len(resMsg)-1] == '\n' {
resMsg = resMsg[:len(resMsg)-1]
}
part.Set(resMsg)
}
return nil
}
IteratePartsWithSpan(TypeAWK, a.parts, newMsg, proc)
msgs := [1]types.Message{newMsg}
a.mBatchSent.Incr(1)
a.mSent.Incr(int64(newMsg.Len()))
return msgs[:], nil
}
// CloseAsync shuts down the processor and stops processing requests.
func (a *AWK) CloseAsync() {
}
// WaitForClose blocks until the processor has closed down.
func (a *AWK) WaitForClose(timeout time.Duration) error {
return nil
}
//------------------------------------------------------------------------------ | lib/processor/awk.go | 0.673836 | 0.630273 | awk.go | starcoder |
package values
import (
"fmt"
"math"
"github.com/influxdata/flux/ast"
"github.com/influxdata/flux/semantic"
)
type BinaryFunction func(l, r Value) Value
type BinaryFuncSignature struct {
Operator ast.OperatorKind
Left, Right semantic.Type
}
// LookupBinaryFunction returns an appropriate binary function that evaluates two values and returns another value.
// If the two types are not compatible with the given operation, this returns an error.
func LookupBinaryFunction(sig BinaryFuncSignature) (BinaryFunction, error) {
f, ok := binaryFuncLookup[sig]
if !ok {
return nil, fmt.Errorf("unsupported binary expression %v %v %v", sig.Left, sig.Operator, sig.Right)
}
return f, nil
}
var binaryFuncLookup = map[BinaryFuncSignature]BinaryFunction{
//---------------
// Math Operators
//---------------
{Operator: ast.AdditionOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewInt(l + r)
},
{Operator: ast.AdditionOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewUInt(l + r)
},
{Operator: ast.AdditionOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewFloat(l + r)
},
{Operator: ast.SubtractionOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewInt(l - r)
},
{Operator: ast.SubtractionOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewUInt(l - r)
},
{Operator: ast.SubtractionOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewFloat(l - r)
},
{Operator: ast.MultiplicationOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewInt(l * r)
},
{Operator: ast.MultiplicationOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewUInt(l * r)
},
{Operator: ast.MultiplicationOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewFloat(l * r)
},
{Operator: ast.DivisionOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
if r == 0 {
// TODO(#38): reject divisions with a constant 0 divisor.
return NewInt(0)
}
return NewInt(l / r)
},
{Operator: ast.DivisionOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
if r == 0 {
// TODO(#38): reject divisions with a constant 0 divisor.
return NewUInt(0)
}
return NewUInt(l / r)
},
{Operator: ast.DivisionOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
if r == 0 {
// TODO(#38): reject divisions with a constant 0 divisor.
return NewFloat(math.NaN())
}
return NewFloat(l / r)
},
//---------------------
// Comparison Operators
//---------------------
// LessThanEqualOperator
{Operator: ast.LessThanEqualOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewBool(l <= r)
},
{Operator: ast.LessThanEqualOperator, Left: semantic.Int, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.UInt()
if l < 0 {
return NewBool(true)
}
return NewBool(uint64(l) <= r)
},
{Operator: ast.LessThanEqualOperator, Left: semantic.Int, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Float()
return NewBool(float64(l) <= r)
},
{Operator: ast.LessThanEqualOperator, Left: semantic.UInt, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Int()
if r < 0 {
return NewBool(false)
}
return NewBool(l <= uint64(r))
},
{Operator: ast.LessThanEqualOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewBool(l <= r)
},
{Operator: ast.LessThanEqualOperator, Left: semantic.UInt, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Float()
return NewBool(float64(l) <= r)
},
{Operator: ast.LessThanEqualOperator, Left: semantic.Float, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Int()
return NewBool(l <= float64(r))
},
{Operator: ast.LessThanEqualOperator, Left: semantic.Float, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.UInt()
return NewBool(l <= float64(r))
},
{Operator: ast.LessThanEqualOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewBool(l <= r)
},
{Operator: ast.LessThanEqualOperator, Left: semantic.String, Right: semantic.String}: func(lv, rv Value) Value {
l := lv.Str()
r := rv.Str()
return NewBool(l <= r)
},
// LessThanOperator
{Operator: ast.LessThanOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewBool(l < r)
},
{Operator: ast.LessThanOperator, Left: semantic.Int, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.UInt()
if l < 0 {
return NewBool(true)
}
return NewBool(uint64(l) < r)
},
{Operator: ast.LessThanOperator, Left: semantic.Int, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Float()
return NewBool(float64(l) < r)
},
{Operator: ast.LessThanOperator, Left: semantic.UInt, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Int()
if r < 0 {
return NewBool(false)
}
return NewBool(l < uint64(r))
},
{Operator: ast.LessThanOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewBool(l < r)
},
{Operator: ast.LessThanOperator, Left: semantic.UInt, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Float()
return NewBool(float64(l) < r)
},
{Operator: ast.LessThanOperator, Left: semantic.Float, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Int()
return NewBool(l < float64(r))
},
{Operator: ast.LessThanOperator, Left: semantic.Float, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.UInt()
return NewBool(l < float64(r))
},
{Operator: ast.LessThanOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewBool(l < r)
},
{Operator: ast.LessThanOperator, Left: semantic.String, Right: semantic.String}: func(lv, rv Value) Value {
l := lv.Str()
r := rv.Str()
return NewBool(l < r)
},
// GreaterThanEqualOperator
{Operator: ast.GreaterThanEqualOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewBool(l >= r)
},
{Operator: ast.GreaterThanEqualOperator, Left: semantic.Int, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.UInt()
if l < 0 {
return NewBool(true)
}
return NewBool(uint64(l) >= r)
},
{Operator: ast.GreaterThanEqualOperator, Left: semantic.Int, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Float()
return NewBool(float64(l) >= r)
},
{Operator: ast.GreaterThanEqualOperator, Left: semantic.UInt, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Int()
if r < 0 {
return NewBool(false)
}
return NewBool(l >= uint64(r))
},
{Operator: ast.GreaterThanEqualOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewBool(l >= r)
},
{Operator: ast.GreaterThanEqualOperator, Left: semantic.UInt, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Float()
return NewBool(float64(l) >= r)
},
{Operator: ast.GreaterThanEqualOperator, Left: semantic.Float, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Int()
return NewBool(l >= float64(r))
},
{Operator: ast.GreaterThanEqualOperator, Left: semantic.Float, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.UInt()
return NewBool(l >= float64(r))
},
{Operator: ast.GreaterThanEqualOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewBool(l >= r)
},
{Operator: ast.GreaterThanEqualOperator, Left: semantic.String, Right: semantic.String}: func(lv, rv Value) Value {
l := lv.Str()
r := rv.Str()
return NewBool(l >= r)
},
// GreaterThanOperator
{Operator: ast.GreaterThanOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewBool(l > r)
},
{Operator: ast.GreaterThanOperator, Left: semantic.Int, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.UInt()
if l < 0 {
return NewBool(true)
}
return NewBool(uint64(l) > r)
},
{Operator: ast.GreaterThanOperator, Left: semantic.Int, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Float()
return NewBool(float64(l) > r)
},
{Operator: ast.GreaterThanOperator, Left: semantic.UInt, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Int()
if r < 0 {
return NewBool(false)
}
return NewBool(l > uint64(r))
},
{Operator: ast.GreaterThanOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewBool(l > r)
},
{Operator: ast.GreaterThanOperator, Left: semantic.UInt, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Float()
return NewBool(float64(l) > r)
},
{Operator: ast.GreaterThanOperator, Left: semantic.Float, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Int()
return NewBool(l > float64(r))
},
{Operator: ast.GreaterThanOperator, Left: semantic.Float, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.UInt()
return NewBool(l > float64(r))
},
{Operator: ast.GreaterThanOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewBool(l > r)
},
{Operator: ast.GreaterThanOperator, Left: semantic.String, Right: semantic.String}: func(lv, rv Value) Value {
l := lv.Str()
r := rv.Str()
return NewBool(l > r)
},
// EqualOperator
{Operator: ast.EqualOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewBool(l == r)
},
{Operator: ast.EqualOperator, Left: semantic.Int, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.UInt()
if l < 0 {
return NewBool(false)
}
return NewBool(uint64(l) == r)
},
{Operator: ast.EqualOperator, Left: semantic.Int, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Float()
return NewBool(float64(l) == r)
},
{Operator: ast.EqualOperator, Left: semantic.UInt, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Int()
if r < 0 {
return NewBool(false)
}
return NewBool(l == uint64(r))
},
{Operator: ast.EqualOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewBool(l == r)
},
{Operator: ast.EqualOperator, Left: semantic.UInt, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Float()
return NewBool(float64(l) == r)
},
{Operator: ast.EqualOperator, Left: semantic.Float, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Int()
return NewBool(l == float64(r))
},
{Operator: ast.EqualOperator, Left: semantic.Float, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.UInt()
return NewBool(l == float64(r))
},
{Operator: ast.EqualOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewBool(l == r)
},
{Operator: ast.EqualOperator, Left: semantic.String, Right: semantic.String}: func(lv, rv Value) Value {
l := lv.Str()
r := rv.Str()
return NewBool(l == r)
},
// NotEqualOperator
{Operator: ast.NotEqualOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewBool(l != r)
},
{Operator: ast.NotEqualOperator, Left: semantic.Int, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.UInt()
if l < 0 {
return NewBool(true)
}
return NewBool(uint64(l) != r)
},
{Operator: ast.NotEqualOperator, Left: semantic.Int, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Float()
return NewBool(float64(l) != r)
},
{Operator: ast.NotEqualOperator, Left: semantic.UInt, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Int()
if r < 0 {
return NewBool(true)
}
return NewBool(l != uint64(r))
},
{Operator: ast.NotEqualOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewBool(l != r)
},
{Operator: ast.NotEqualOperator, Left: semantic.UInt, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Float()
return NewBool(float64(l) != r)
},
{Operator: ast.NotEqualOperator, Left: semantic.Float, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Int()
return NewBool(l != float64(r))
},
{Operator: ast.NotEqualOperator, Left: semantic.Float, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.UInt()
return NewBool(l != float64(r))
},
{Operator: ast.NotEqualOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewBool(l != r)
},
{Operator: ast.NotEqualOperator, Left: semantic.String, Right: semantic.String}: func(lv, rv Value) Value {
l := lv.Str()
r := rv.Str()
return NewBool(l != r)
},
{Operator: ast.RegexpMatchOperator, Left: semantic.String, Right: semantic.Regexp}: func(lv, rv Value) Value {
l := lv.Str()
r := rv.Regexp()
return NewBool(r.MatchString(l))
},
{Operator: ast.RegexpMatchOperator, Left: semantic.Regexp, Right: semantic.String}: func(lv, rv Value) Value {
l := lv.Regexp()
r := rv.Str()
return NewBool(l.MatchString(r))
},
{Operator: ast.NotRegexpMatchOperator, Left: semantic.String, Right: semantic.Regexp}: func(lv, rv Value) Value {
l := lv.Str()
r := rv.Regexp()
return NewBool(!r.MatchString(l))
},
{Operator: ast.NotRegexpMatchOperator, Left: semantic.Regexp, Right: semantic.String}: func(lv, rv Value) Value {
l := lv.Regexp()
r := rv.Str()
return NewBool(!l.MatchString(r))
},
{Operator: ast.AdditionOperator, Left: semantic.String, Right: semantic.String}: func(lv, rv Value) Value {
l := lv.Str()
r := rv.Str()
return NewString(l + r)
},
} | values/binary.go | 0.648466 | 0.550909 | binary.go | starcoder |
package analysis
import (
"math"
"gonum.org/v1/gonum/stat/distuv"
"gonum.org/v1/gonum/stat"
"github.com/eseymour/cryptopals/pkg/crypto/xor"
)
// BreakXOREncryptByteKey finds the byte key whose plaintext best matches the
// distribution of ASCII byte values of English text. PValue represents the
// likelihood that the plaintext fits the distribution using a Chi Squared test.
func BreakXOREncryptByteKey(ciphertext []byte) (key byte, pValue float64) {
chi2 := math.Inf(0)
var expectedFreqs [256]float64
for i, p := range englishDist {
expectedFreqs[i] = p * float64(len(ciphertext))
}
plaintext := make([]byte, len(ciphertext))
for candidate := 0; candidate <= math.MaxUint8; candidate++ {
xor.EncryptByteKey(plaintext, ciphertext, byte(candidate))
var candidateFreqs [256]float64
for _, b := range plaintext {
candidateFreqs[b]++
}
candidateChi2 := stat.ChiSquare(candidateFreqs[:], expectedFreqs[:])
if candidateChi2 < chi2 {
chi2 = candidateChi2
key = byte(candidate)
}
}
pValue = 1 - englishChi2Dist.CDF(chi2)
return key, pValue
}
// A distuv.ChiSquared distribution used to calculate a CDF for calculating
// p-values for a Chi Squared test.
var englishChi2Dist = distuv.ChiSquared{K: 255}
// English language byte distribution from local fortune files using the program
// github.com/eseymour/goutils/cmd/fortuneStats. Generated using the Homebrew
// version of fortune.
var englishDist = [256]float64{
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 2.4490410223276844e-05, 0.0001384821378079836,
0.009587772962319946, 0.021125427858598604, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 0.15991436371465198,
0.0012445581195283414, 0.004958640149934745, 0.0002849793189617669,
5.076194119006473e-05, 3.5622414870220865e-05, 5.9667544907619945e-05,
0.004365081662159689, 0.0007525235141334157, 0.0007623196782227265,
0.0002956660434228332, 2.5380970595032366e-05, 0.010131460069276692,
0.008456316010004555, 0.013214580076294308, 0.000258707787994979,
0.0005650605558788785, 0.0008660699615322448, 0.0004377104227178389,
0.00036958255427854144, 0.00026449643041138993, 0.00031525837160145464,
0.00021061752792018084, 0.00022442121368239143, 0.00030145468583924406,
0.00038071455892548547, 0.001619484036037416, 0.0005281023004510243,
3.250545356907654e-05, 0.0002431229814892574, 6.233922602288651e-05,
0.0010548687603444153, 4.007521672899847e-05, 0.0035373057966129316,
0.0018919955097946057, 0.0019449838519140592, 0.0017290229617633452,
0.002008658918494579, 0.0011964678594535432, 0.0012481203610153636,
0.0017112117543282347, 0.0048370786591901156, 0.0007235803020513613,
0.000545468227700257, 0.0017285776815774674, 0.0019445385717281813,
0.0017000797496812907, 0.0016956269478225132, 0.0014680887728389773,
0.00024802106353391274, 0.0016074614710187166, 0.0028698307979821685,
0.004423858646695553, 0.0006884031673670182, 0.0004105483313792955,
0.002190333234332705, 0.00015184054338431642, 0.0010250349878906054,
8.104099382975246e-05, 0.0001807837554663709, 0.00014338021985263897,
0.00018033847528049312, 2.5826250780910127e-05, 0.0005481399088155235,
7.925987308624141e-05, 0.05652653847643822, 0.010843017806309353,
0.018777465438465174, 0.025425943893806018, 0.08930183183815668,
0.014704932858427172, 0.015090545499397314, 0.03650896772030349,
0.04881740261833655, 0.0008843264491532329, 0.006789186994078219,
0.031481754421743564, 0.018190140873292408, 0.05105449027218642,
0.059071314738729626, 0.013006634229489392, 0.0006416487478498533,
0.04392733561702698, 0.04495237060491759, 0.06295549380014133,
0.02260242223515514, 0.007701120814755873, 0.013918568050167047,
0.0014115381892325018, 0.016621418778445054, 0.0006683655590025189,
7.124482974044172e-06, 5.476946286296458e-05, 5.343362230533129e-06,
2.0482888550376995e-05, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
2.0482888550376995e-05, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
2.0482888550376995e-05, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
2.0482888550376995e-05, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
2.0482888550376995e-05, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
2.0482888550376995e-05, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
2.0482888550376995e-05, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
2.0482888550376995e-05, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
2.0482888550376995e-05, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07, 4.4528018587776077e-07, 4.4528018587776077e-07,
4.4528018587776077e-07,
} | pkg/crypto/analysis/byteXOR.go | 0.554591 | 0.410195 | byteXOR.go | starcoder |
package tezos
import (
"fmt"
)
type OpStatus byte
const (
OpStatusInvalid OpStatus = iota // 0
OpStatusApplied // 1 (success)
OpStatusFailed
OpStatusSkipped
OpStatusBacktracked
)
func (t OpStatus) IsValid() bool {
return t != OpStatusInvalid
}
func (t OpStatus) IsSuccess() bool {
return t == OpStatusApplied
}
func (t *OpStatus) UnmarshalText(data []byte) error {
v := ParseOpStatus(string(data))
if !v.IsValid() {
return fmt.Errorf("invalid operation status '%s'", string(data))
}
*t = v
return nil
}
func (t *OpStatus) MarshalText() ([]byte, error) {
return []byte(t.String()), nil
}
func ParseOpStatus(s string) OpStatus {
switch s {
case "applied":
return OpStatusApplied
case "failed":
return OpStatusFailed
case "skipped":
return OpStatusSkipped
case "backtracked":
return OpStatusBacktracked
default:
return OpStatusInvalid
}
}
func (t OpStatus) String() string {
switch t {
case OpStatusApplied:
return "applied"
case OpStatusFailed:
return "failed"
case OpStatusSkipped:
return "skipped"
case OpStatusBacktracked:
return "backtracked"
default:
return ""
}
}
type OpType byte
const (
OpTypeBake OpType = iota // 0
OpTypeActivateAccount // 1
OpTypeDoubleBakingEvidence // 2
OpTypeDoubleEndorsementEvidence // 3
OpTypeSeedNonceRevelation // 4
OpTypeTransaction // 5
OpTypeOrigination // 6
OpTypeDelegation // 7
OpTypeReveal // 8
OpTypeEndorsement // 9
OpTypeProposals // 10
OpTypeBallot // 11
OpTypeUnfreeze // 12 indexer only
OpTypeInvoice // 13 indexer only
OpTypeAirdrop // 14 indexer only
OpTypeSeedSlash // 15 indexer only
OpTypeMigration // 16 indexer only
OpTypeFailingNoop // 17 v009
OpTypeBatch = 254 // indexer only, output-only
OpTypeInvalid = 255
)
func (t OpType) IsValid() bool {
return t != OpTypeInvalid
}
func (t *OpType) UnmarshalText(data []byte) error {
v := ParseOpType(string(data))
if !v.IsValid() {
return fmt.Errorf("invalid operation type '%s'", string(data))
}
*t = v
return nil
}
func (t *OpType) MarshalText() ([]byte, error) {
return []byte(t.String()), nil
}
func ParseOpType(s string) OpType {
switch s {
case "bake":
return OpTypeBake
case "activate_account":
return OpTypeActivateAccount
case "double_baking_evidence":
return OpTypeDoubleBakingEvidence
case "double_endorsement_evidence":
return OpTypeDoubleEndorsementEvidence
case "seed_nonce_revelation":
return OpTypeSeedNonceRevelation
case "transaction":
return OpTypeTransaction
case "origination":
return OpTypeOrigination
case "delegation":
return OpTypeDelegation
case "reveal":
return OpTypeReveal
case "endorsement", "endorsement_with_slot":
return OpTypeEndorsement
case "proposals":
return OpTypeProposals
case "ballot":
return OpTypeBallot
case "unfreeze":
return OpTypeUnfreeze
case "invoice":
return OpTypeInvoice
case "airdrop":
return OpTypeAirdrop
case "seed_slash":
return OpTypeSeedSlash
case "migration":
return OpTypeMigration
case "batch":
return OpTypeBatch
case "failing_noop":
return OpTypeFailingNoop
default:
return OpTypeInvalid
}
}
func (t OpType) String() string {
switch t {
case OpTypeBake:
return "bake"
case OpTypeActivateAccount:
return "activate_account"
case OpTypeDoubleBakingEvidence:
return "double_baking_evidence"
case OpTypeDoubleEndorsementEvidence:
return "double_endorsement_evidence"
case OpTypeSeedNonceRevelation:
return "seed_nonce_revelation"
case OpTypeTransaction:
return "transaction"
case OpTypeOrigination:
return "origination"
case OpTypeDelegation:
return "delegation"
case OpTypeReveal:
return "reveal"
case OpTypeEndorsement:
return "endorsement"
case OpTypeProposals:
return "proposals"
case OpTypeBallot:
return "ballot"
case OpTypeUnfreeze:
return "unfreeze"
case OpTypeInvoice:
return "invoice"
case OpTypeAirdrop:
return "airdrop"
case OpTypeSeedSlash:
return "seed_slash"
case OpTypeMigration:
return "migration"
case OpTypeBatch:
return "batch"
case OpTypeFailingNoop:
return "failing_noop"
default:
return ""
}
}
var (
// before babylon
opTagV1 = map[OpType]byte{
OpTypeEndorsement: 0,
OpTypeSeedNonceRevelation: 1,
OpTypeDoubleEndorsementEvidence: 2,
OpTypeDoubleBakingEvidence: 3,
OpTypeActivateAccount: 4,
OpTypeProposals: 5,
OpTypeBallot: 6,
OpTypeReveal: 7,
OpTypeTransaction: 8,
OpTypeOrigination: 9,
OpTypeDelegation: 10,
}
// Babylon v005 and up
opTagV2 = map[OpType]byte{
OpTypeEndorsement: 0,
OpTypeSeedNonceRevelation: 1,
OpTypeDoubleEndorsementEvidence: 2,
OpTypeDoubleBakingEvidence: 3,
OpTypeActivateAccount: 4,
OpTypeProposals: 5,
OpTypeBallot: 6,
OpTypeReveal: 107, // v005
OpTypeTransaction: 108, // v005
OpTypeOrigination: 109, // v005
OpTypeDelegation: 110, // v005
OpTypeFailingNoop: 17, // v009
}
)
func (t OpType) Tag(p *Params) byte {
v := 0
if p != nil {
v = p.OperationTagsVersion
}
var (
tag byte
ok bool
)
switch v {
case 0:
tag, ok = opTagV1[t]
case 1:
tag, ok = opTagV2[t]
default:
tag, ok = opTagV2[t]
}
if !ok {
return 255
}
return tag
}
func (t OpType) ListId() int {
switch t {
case OpTypeEndorsement:
return 0
case OpTypeProposals, OpTypeBallot:
return 1
case OpTypeActivateAccount,
OpTypeDoubleBakingEvidence,
OpTypeDoubleEndorsementEvidence,
OpTypeSeedNonceRevelation:
return 2
case OpTypeTransaction, // generic user operations
OpTypeOrigination,
OpTypeDelegation,
OpTypeReveal,
OpTypeBatch: // custom, indexer only
return 3
case OpTypeBake, OpTypeUnfreeze, OpTypeSeedSlash:
return -1 // block level ops
case OpTypeInvoice, OpTypeAirdrop, OpTypeMigration:
return -2 // migration ops
default:
return -255 // invalid
}
}
func ParseOpTag(t byte) OpType {
switch t {
case 0:
return OpTypeEndorsement
case 1:
return OpTypeSeedNonceRevelation
case 2:
return OpTypeDoubleEndorsementEvidence
case 3:
return OpTypeDoubleBakingEvidence
case 4:
return OpTypeActivateAccount
case 5:
return OpTypeProposals
case 6:
return OpTypeBallot
case 7, 107:
return OpTypeReveal
case 8, 108:
return OpTypeTransaction
case 9, 109:
return OpTypeOrigination
case 10, 110:
return OpTypeDelegation
case 17:
return OpTypeFailingNoop
default:
return OpTypeInvalid
}
} | tezos/op.go | 0.507812 | 0.412767 | op.go | starcoder |
package gt
import (
"database/sql/driver"
"encoding/json"
)
/*
Variant of `string` where zero value is considered empty in text, and null in
JSON and SQL. Use this for fields where an empty string is not allowed, such as
enums or text foreign keys.
Unlike `string`, encoding/decoding is not always reversible:
JSON "" → Go "" → JSON null
SQL '' → Go "" → SQL null
Differences from `"database/sql".NullString`:
* Much easier to use.
* Supports text.
* Supports JSON.
* Fewer states: null and empty string are one.
In your data model, text fields should be either:
* Non-nullable, zero value = empty string -> use `string`.
* Nullable, zero value = `null`, empty string is not allowed -> use `gt.NullString`.
Avoid `*string` or `sql.NullString`.
*/
type NullString string
var (
_ = Encodable(NullString(``))
_ = Decodable((*NullString)(nil))
)
// Implement `gt.Zeroable`. Equivalent to `reflect.ValueOf(self).IsZero()`.
func (self NullString) IsZero() bool { return self == `` }
// Implement `gt.Nullable`. True if zero.
func (self NullString) IsNull() bool { return self.IsZero() }
// Implement `gt.PtrGetter`, returning `*string`.
func (self *NullString) GetPtr() interface{} { return (*string)(self) }
// Implement `gt.Getter`. If zero, returns `nil`, otherwise returns `string`.
func (self NullString) Get() interface{} {
if self.IsNull() {
return nil
}
return string(self)
}
// Implement `gt.Setter`, using `.Scan`. Panics on error.
func (self *NullString) Set(src interface{}) { try(self.Scan(src)) }
// Implement `gt.Zeroer`, zeroing the receiver.
func (self *NullString) Zero() {
if self != nil {
*self = ``
}
}
// Implement `fmt.Stringer`, returning the string as-is.
func (self NullString) String() string {
return string(self)
}
// Implement `gt.Parser`, assigning the string as-is.
func (self *NullString) Parse(src string) error {
*self = NullString(src)
return nil
}
// Implement `gt.Appender`, appending the string as-is.
func (self NullString) Append(buf []byte) []byte {
return append(buf, self...)
}
/*
Implement `encoding.TextMarhaler`. If zero, returns nil. Otherwise returns the
string as-is.
*/
func (self NullString) MarshalText() ([]byte, error) {
if self.IsNull() {
return nil, nil
}
return self.Append(nil), nil
}
// Implement `encoding.TextUnmarshaler`, assigning the string as-is.
func (self *NullString) UnmarshalText(src []byte) error {
*self = NullString(src)
return nil
}
/*
Implement `json.Marshaler`. If zero, returns bytes representing `null`.
Otherwise uses the default `json.Marshal` behavior for `string`.
*/
func (self NullString) MarshalJSON() ([]byte, error) {
if self.IsNull() {
return bytesNull, nil
}
return json.Marshal(self.Get())
}
/*
Implement `json.Unmarshaler`. If the input is empty or represents JSON `null`,
zeroes the receiver. Otherwise uses the default `json.Unmarshal` behavior
for `*string`.
*/
func (self *NullString) UnmarshalJSON(src []byte) error {
if isJsonEmpty(src) {
self.Zero()
return nil
}
return json.Unmarshal(src, self.GetPtr())
}
// Implement `driver.Valuer`, using `.Get`.
func (self NullString) Value() (driver.Value, error) { return self.Get(), nil }
/*
Implement `sql.Scanner`, converting an arbitrary input to `gt.NullString` and
modifying the receiver. Acceptable inputs:
* `nil` -> use `.Zero`
* `string` -> use `.Parse`
* `[]byte` -> use `.UnmarshalText`
* `NullString` -> assign
* `gt.Getter` -> scan underlying value
*/
func (self *NullString) Scan(src interface{}) error {
switch src := src.(type) {
case nil:
self.Zero()
return nil
case string:
return self.Parse(src)
case []byte:
return self.UnmarshalText(src)
case NullString:
*self = src
return nil
default:
val, ok := get(src)
if ok {
return self.Scan(val)
}
return errScanType(self, src)
}
}
// Same as `len(self)`. Sometimes handy when embedding `gt.NullString` in
// single-valued structs.
func (self NullString) Len() int { return len(self) } | gt_null_string.go | 0.776284 | 0.584805 | gt_null_string.go | starcoder |
package benchmark
import (
"reflect"
"testing"
)
func isBoolToUintFuncCalibrated(supplier func() bool) bool {
return isCalibrated(reflect.Bool, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func isIntToUintFuncCalibrated(supplier func() int) bool {
return isCalibrated(reflect.Int, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func isInt8ToUintFuncCalibrated(supplier func() int8) bool {
return isCalibrated(reflect.Int8, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func isInt16ToUintFuncCalibrated(supplier func() int16) bool {
return isCalibrated(reflect.Int16, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func isInt32ToUintFuncCalibrated(supplier func() int32) bool {
return isCalibrated(reflect.Int32, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func isInt64ToUintFuncCalibrated(supplier func() int64) bool {
return isCalibrated(reflect.Int64, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func isUintToUintFuncCalibrated(supplier func() uint) bool {
return isCalibrated(reflect.Uint, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func isUint8ToUintFuncCalibrated(supplier func() uint8) bool {
return isCalibrated(reflect.Uint8, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func isUint16ToUintFuncCalibrated(supplier func() uint16) bool {
return isCalibrated(reflect.Uint16, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func isUint32ToUintFuncCalibrated(supplier func() uint32) bool {
return isCalibrated(reflect.Uint32, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func isUint64ToUintFuncCalibrated(supplier func() uint64) bool {
return isCalibrated(reflect.Uint64, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func setBoolToUintFuncCalibrated(supplier func() bool) {
setCalibrated(reflect.Bool, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func setIntToUintFuncCalibrated(supplier func() int) {
setCalibrated(reflect.Int, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func setInt8ToUintFuncCalibrated(supplier func() int8) {
setCalibrated(reflect.Int8, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func setInt16ToUintFuncCalibrated(supplier func() int16) {
setCalibrated(reflect.Int16, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func setInt32ToUintFuncCalibrated(supplier func() int32) {
setCalibrated(reflect.Int32, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func setInt64ToUintFuncCalibrated(supplier func() int64) {
setCalibrated(reflect.Int64, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func setUintToUintFuncCalibrated(supplier func() uint) {
setCalibrated(reflect.Uint, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func setUint8ToUintFuncCalibrated(supplier func() uint8) {
setCalibrated(reflect.Uint8, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func setUint16ToUintFuncCalibrated(supplier func() uint16) {
setCalibrated(reflect.Uint16, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func setUint32ToUintFuncCalibrated(supplier func() uint32) {
setCalibrated(reflect.Uint32, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
func setUint64ToUintFuncCalibrated(supplier func() uint64) {
setCalibrated(reflect.Uint64, reflect.Uint, reflect.ValueOf(supplier).Pointer())
}
// BoolToUintFunc benchmarks a function with the signature:
// func(bool) uint
// ID: B-7-1
func BoolToUintFunc(b *testing.B, supplier func() bool, toUintFunc func(bool) uint) {
if !isBoolSupplierCalibrated(supplier) {
panic("supplier function not calibrated")
}
if !isBoolToUintFuncCalibrated(supplier) {
panic("BoolToUintFunc not calibrated with this supplier")
}
for i, count := 0, b.N; i < count; i++ {
toUintFunc(supplier())
}
}
// IntToUintFunc benchmarks a function with the signature:
// func(int) uint
// ID: B-7-2
func IntToUintFunc(b *testing.B, supplier func() int, toUintFunc func(int) uint) {
if !isIntSupplierCalibrated(supplier) {
panic("supplier function not calibrated")
}
if !isIntToUintFuncCalibrated(supplier) {
panic("IntToUintFunc not calibrated with this supplier")
}
for i, count := 0, b.N; i < count; i++ {
toUintFunc(supplier())
}
}
// Int8ToUintFunc benchmarks a function with the signature:
// func(int8) uint
// ID: B-7-3
func Int8ToUintFunc(b *testing.B, supplier func() int8, toUintFunc func(int8) uint) {
if !isInt8SupplierCalibrated(supplier) {
panic("supplier function not calibrated")
}
if !isInt8ToUintFuncCalibrated(supplier) {
panic("Int8ToUintFunc not calibrated with this supplier")
}
for i, count := 0, b.N; i < count; i++ {
toUintFunc(supplier())
}
}
// Int16ToUintFunc benchmarks a function with the signature:
// func(int16) uint
// ID: B-7-4
func Int16ToUintFunc(b *testing.B, supplier func() int16, toUintFunc func(int16) uint) {
if !isInt16SupplierCalibrated(supplier) {
panic("supplier function not calibrated")
}
if !isInt16ToUintFuncCalibrated(supplier) {
panic("Int16ToUintFunc not calibrated with this supplier")
}
for i, count := 0, b.N; i < count; i++ {
toUintFunc(supplier())
}
}
// Int32ToUintFunc benchmarks a function with the signature:
// func(int32) uint
// ID: B-7-5
func Int32ToUintFunc(b *testing.B, supplier func() int32, toUintFunc func(int32) uint) {
if !isInt32SupplierCalibrated(supplier) {
panic("supplier function not calibrated")
}
if !isInt32ToUintFuncCalibrated(supplier) {
panic("Int32ToUintFunc not calibrated with this supplier")
}
for i, count := 0, b.N; i < count; i++ {
toUintFunc(supplier())
}
}
// Int64ToUintFunc benchmarks a function with the signature:
// func(int64) uint
// ID: B-7-6
func Int64ToUintFunc(b *testing.B, supplier func() int64, toUintFunc func(int64) uint) {
if !isInt64SupplierCalibrated(supplier) {
panic("supplier function not calibrated")
}
if !isInt64ToUintFuncCalibrated(supplier) {
panic("Int64ToUintFunc not calibrated with this supplier")
}
for i, count := 0, b.N; i < count; i++ {
toUintFunc(supplier())
}
}
// UintToUintFunc benchmarks a function with the signature:
// func(uint) uint
// ID: B-7-7
func UintToUintFunc(b *testing.B, supplier func() uint, toUintFunc func(uint) uint) {
if !isUintSupplierCalibrated(supplier) {
panic("supplier function not calibrated")
}
if !isUintToUintFuncCalibrated(supplier) {
panic("UintToUintFunc not calibrated with this supplier")
}
for i, count := 0, b.N; i < count; i++ {
toUintFunc(supplier())
}
}
// Uint8ToUintFunc benchmarks a function with the signature:
// func(uint8) uint
// ID: B-7-8
func Uint8ToUintFunc(b *testing.B, supplier func() uint8, toUintFunc func(uint8) uint) {
if !isUint8SupplierCalibrated(supplier) {
panic("supplier function not calibrated")
}
if !isUint8ToUintFuncCalibrated(supplier) {
panic("Uint8ToUintFunc not calibrated with this supplier")
}
for i, count := 0, b.N; i < count; i++ {
toUintFunc(supplier())
}
}
// Uint16ToUintFunc benchmarks a function with the signature:
// func(uint16) uint
// ID: B-7-9
func Uint16ToUintFunc(b *testing.B, supplier func() uint16, toUintFunc func(uint16) uint) {
if !isUint16SupplierCalibrated(supplier) {
panic("supplier function not calibrated")
}
if !isUint16ToUintFuncCalibrated(supplier) {
panic("Uint16ToUintFunc not calibrated with this supplier")
}
for i, count := 0, b.N; i < count; i++ {
toUintFunc(supplier())
}
}
// Uint32ToUintFunc benchmarks a function with the signature:
// func(uint32) uint
// ID: B-7-10
func Uint32ToUintFunc(b *testing.B, supplier func() uint32, toUintFunc func(uint32) uint) {
if !isUint32SupplierCalibrated(supplier) {
panic("supplier function not calibrated")
}
if !isUint32ToUintFuncCalibrated(supplier) {
panic("Uint32ToUintFunc not calibrated with this supplier")
}
for i, count := 0, b.N; i < count; i++ {
toUintFunc(supplier())
}
}
// Uint64ToUintFunc benchmarks a function with the signature:
// func(uint) uint
// ID: B-7-11
func Uint64ToUintFunc(b *testing.B, supplier func() uint64, toUintFunc func(uint64) uint) {
if !isUint64SupplierCalibrated(supplier) {
panic("supplier function not calibrated")
}
if !isUint64ToUintFuncCalibrated(supplier) {
panic("Uint64ToUintFunc not calibrated with this supplier")
}
for i, count := 0, b.N; i < count; i++ {
toUintFunc(supplier())
}
} | common/benchmark/07_to_uint_func.go | 0.705886 | 0.746832 | 07_to_uint_func.go | starcoder |
package game
import (
"errors"
"fmt"
"reflect"
)
type Grid [][]byte
type Coords struct {
Y byte
X byte
}
const ACTION_NONE byte = 0
const ACTION_QUIT byte = 1
const ACTION_MOVE_TOP byte = 2
const ACTION_MOVE_RIGHT byte = 3
const ACTION_MOVE_BOTTOM byte = 4
const ACTION_MOVE_LEFT byte = 5
const ACTION_SHUFFLE byte = 6
const ACTION_HELP byte = 7
const EMPTY_VALUE byte = 0
func BuildGrid(size byte) Grid {
value := EMPTY_VALUE
grid := make(Grid, size)
for y := byte(0); y < size; y++ {
grid[y] = make([]byte, size)
for x := byte(0); x < size; x++ {
value++
if value == size*size {
grid[y][x] = EMPTY_VALUE
} else {
grid[y][x] = value
}
}
}
return grid
}
func DeepCopyGrid(grid Grid) Grid {
newGrid := make(Grid, len(grid))
for y := range grid {
newGrid[y] = make([]byte, len(grid[y]))
copy(newGrid[y], grid[y])
}
return newGrid
}
func FindTileByValue(grid Grid, value byte) (Coords, error) {
var tile Coords
size := byte(len(grid))
y := byte(0)
for y < size {
x := byte(0)
for x < byte(len(grid[y])) {
if grid[y][x] == value {
tile.Y = y
tile.X = x
return tile, nil
}
x++
}
y++
}
return tile, errors.New("The grid does not contain this tile")
}
func FindEmptyTile(grid Grid) (Coords, error) {
return FindTileByValue(grid, EMPTY_VALUE)
}
func ListMovableTiles(grid Grid) ([]Coords, error) {
var coordsMovableTiles []Coords
coordsEmptyTile, err := FindEmptyTile(grid)
if err != nil {
return coordsMovableTiles, err
}
size := byte(len(grid))
if coordsEmptyTile.Y > 0 {
coordsMovableTiles = append(coordsMovableTiles, Coords{coordsEmptyTile.Y - 1, coordsEmptyTile.X})
}
if coordsEmptyTile.X+1 < size {
coordsMovableTiles = append(coordsMovableTiles, Coords{coordsEmptyTile.Y, coordsEmptyTile.X + 1})
}
if coordsEmptyTile.Y+1 < size {
coordsMovableTiles = append(coordsMovableTiles, Coords{coordsEmptyTile.Y + 1, coordsEmptyTile.X})
}
if coordsEmptyTile.X > 0 {
coordsMovableTiles = append(coordsMovableTiles, Coords{coordsEmptyTile.Y, coordsEmptyTile.X - 1})
}
return coordsMovableTiles, nil
}
func ListMovableTilesWithoutGoingBack(grid Grid, previousMovedTile byte) ([]Coords, error) {
movableTiles, err := ListMovableTiles(grid)
if err != nil {
return movableTiles, err
}
indexToRemove := -1
for index, coords := range movableTiles {
if grid[coords.Y][coords.X] == previousMovedTile {
indexToRemove = index
break
}
}
if indexToRemove >= 0 {
movableTiles = movableTiles[:indexToRemove]
}
return movableTiles, nil
}
func CoordsFromDirection(grid Grid, dir byte) (Coords, error) {
var coordsMovableTiles Coords
coordsEmptyTile, err := FindEmptyTile(grid)
if err != nil {
return coordsEmptyTile, err
}
size := byte(len(grid))
switch dir {
case ACTION_MOVE_TOP:
if coordsEmptyTile.Y+1 < size {
coordsMovableTiles.Y = coordsEmptyTile.Y + 1
coordsMovableTiles.X = coordsEmptyTile.X
} else {
err = errors.New("It's not possible to move 'top'")
}
break
case ACTION_MOVE_RIGHT:
if coordsEmptyTile.X-1 != 255 {
coordsMovableTiles.Y = coordsEmptyTile.Y
coordsMovableTiles.X = coordsEmptyTile.X - 1
} else {
err = errors.New("It's not possible to move 'right'")
}
break
case ACTION_MOVE_BOTTOM:
if coordsEmptyTile.Y-1 != 255 {
coordsMovableTiles.Y = coordsEmptyTile.Y - 1
coordsMovableTiles.X = coordsEmptyTile.X
} else {
err = errors.New("It's not possible to move 'bottom'")
}
break
case ACTION_MOVE_LEFT:
if coordsEmptyTile.X+1 < size {
coordsMovableTiles.Y = coordsEmptyTile.Y
coordsMovableTiles.X = coordsEmptyTile.X + 1
} else {
err = errors.New("It's not possible to move 'left'")
}
break
}
if err != nil {
return coordsMovableTiles, err
}
return coordsMovableTiles, nil
}
func DirectionFromCoords(grid Grid, coords Coords) (byte, error) {
coordsEmptyTile, err := FindEmptyTile(grid)
if err != nil {
return ACTION_NONE, err
}
Y := float64(coords.Y) - float64(coordsEmptyTile.Y)
X := float64(coords.X) - float64(coordsEmptyTile.X)
if Y > 0 {
return ACTION_MOVE_TOP, nil
} else if Y < 0 {
return ACTION_MOVE_BOTTOM, nil
} else {
if X > 0 {
return ACTION_MOVE_LEFT, nil
} else if X < 0 {
return ACTION_MOVE_RIGHT, nil
} else {
return ACTION_NONE, errors.New("The tile cannot move")
}
}
}
func isTileInMovableTiles(grid Grid, coordsTileToMove Coords) (bool, error) {
movableTiles, err := ListMovableTiles(grid)
if err != nil {
return false, err
}
for _, value := range movableTiles {
if reflect.DeepEqual(value, coordsTileToMove) {
return true, nil
}
}
return false, nil
}
func Move(grid Grid, coordsTileToMove Coords) (Grid, error) {
isTileMovable, err := isTileInMovableTiles(grid, coordsTileToMove)
if err != nil {
return grid, err
}
if !isTileMovable {
return grid, errors.New(fmt.Sprintf("The tile at coords (%d, %d) is not movable", coordsTileToMove.Y, coordsTileToMove.X))
}
emptyCoords, err := FindEmptyTile(grid)
if err != nil {
return grid, err
}
newCoords, err := FindTileByValue(grid, grid[coordsTileToMove.Y][coordsTileToMove.X])
if err != nil {
return grid, err
}
newGrid := DeepCopyGrid(grid)
newGrid[emptyCoords.Y][emptyCoords.X], newGrid[newCoords.Y][newCoords.X] = grid[newCoords.Y][newCoords.X], grid[emptyCoords.Y][emptyCoords.X]
return newGrid, nil
} | src/game/game.go | 0.645679 | 0.473536 | game.go | starcoder |
package main
import (
"encoding/json"
"flag"
"fmt"
"math"
"math/rand"
"gonum.org/v1/plot"
"gonum.org/v1/plot/plotter"
"gonum.org/v1/plot/plotutil"
"gonum.org/v1/plot/vg"
"github.com/pointlander/anomaly"
)
const (
// VectorSize is the size of the JSON document vector
VectorSize = 1024
// Samples is the number of JSON documents to generate per trial
Samples = 1000
// Trials is the number of trials
Trials = 100
// Parallelization is how many trials to perform in parallel
Parallelization = 10
// Cutoff is the number of initial samples to ignore
Cutoff = 100
)
// Tests are basic tests for anomaly detection
var Tests = []string{`{
"alfa": [
{"alfa": "1"},
{"bravo": "2"}
],
"bravo": [
{"alfa": "3"},
{"bravo": "4"}
]
}`, `{
"a": [
{"a": "aa"},
{"b": "bb"}
],
"b": [
{"a": "aa"},
{"b": "bb"}
]
}`}
func tanh32(x float32) float32 {
a, b := math.Exp(float64(x)), math.Exp(-float64(x))
return float32((a - b) / (a + b))
}
func dtanh32(x float32) float32 {
return 1 - x*x
}
// TestResult is a test result
type TestResult struct {
Surprise float64
Uncertainty float64
Raw float64
}
// TestResults are the test results from Anomaly
type TestResults struct {
Name string
Seed int
Surprise plotter.Values
Uncertainty plotter.Values
Average, STDDEV float64
Results []TestResult
}
func statistics(values plotter.Values) (average, stddev float64) {
sum, sumSquared, length := 0.0, 0.0, float64(len(values))
for _, v := range values {
value := float64(v)
sum += value
sumSquared += value * value
}
average = sum / length
stddev = math.Sqrt(sumSquared/length - average*average)
return
}
// Anomaly tests the anomaly detection algorithms
func Anomaly(seed int, factory anomaly.NetworkFactory, name string) *TestResults {
rndGenerator := rand.New(rand.NewSource(int64(seed)))
rndNetwork := rand.New(rand.NewSource(int64(seed)))
vectorizer := anomaly.NewVectorizer(VectorSize, true, anomaly.NewLFSR32Source)
network := factory(rndNetwork, vectorizer)
surprise, uncertainty := make(plotter.Values, Samples), make(plotter.Values, Samples)
hasUncertainty := false
for i := 0; i < Samples; i++ {
object := anomaly.GenerateRandomJSON(rndGenerator)
input, err := json.Marshal(object)
if err != nil {
panic(err)
}
s, u := network.Train(input)
if u > 0 {
hasUncertainty = true
}
surprise[i], uncertainty[i] = float64(s), float64(u)
}
surprise = surprise[Cutoff:]
average, stddev := statistics(surprise)
results := make([]TestResult, len(Tests))
for i, test := range Tests {
var object map[string]interface{}
err := json.Unmarshal([]byte(test), &object)
if err != nil {
panic(err)
}
input, err := json.Marshal(object)
if err != nil {
panic(err)
}
s, u := network.Train([]byte(input))
results[i].Raw = float64(s)
results[i].Surprise = math.Abs((float64(s) - average) / stddev)
results[i].Uncertainty = float64(u)
}
testResults := &TestResults{
Name: name,
Seed: seed,
Surprise: surprise,
Average: average,
STDDEV: stddev,
Results: results,
}
if hasUncertainty {
testResults.Uncertainty = uncertainty
}
return testResults
}
// IsCorrect determines if a result is IsCorrect
func (t *TestResults) IsCorrect() bool {
return t.Results[0].Surprise > t.Results[1].Surprise
}
// Print prints test results
func (t *TestResults) Print() {
results := t.Results
if results[0].Uncertainty != 0 && results[1].Uncertainty != 0 {
fmt.Printf("%v %v %.6f (%.6f+-%.6f) %.6f (%.6f+-%.6f)\n", t.Seed, t.Name,
results[0].Surprise, results[0].Raw, results[0].Uncertainty,
results[1].Surprise, results[1].Raw, results[1].Uncertainty)
return
}
fmt.Printf("%v %v %.6f (%.6f) %.6f (%.6f)\n", t.Seed, t.Name,
results[0].Surprise, results[0].Raw,
results[1].Surprise, results[1].Raw)
}
var full = flag.Bool("full", false, "run full bench")
func main() {
flag.Parse()
graph := 1
histogram := func(title, name string, values *TestResults) {
p, err := plot.New()
if err != nil {
panic(err)
}
p.Title.Text = title
h, err := plotter.NewHist(values.Surprise, 20)
if err != nil {
panic(err)
}
p.Add(h)
err = p.Save(8*vg.Inch, 8*vg.Inch, fmt.Sprintf("graph_%v_%v", graph, name))
if err != nil {
panic(err)
}
graph++
}
scatterPlot := func(xTitle, yTitle, name string, xx, yy *TestResults) {
xys := make(plotter.XYs, len(yy.Surprise))
if xx == nil {
for i, v := range yy.Surprise {
xys[i].X = float64(i)
xys[i].Y = v
}
} else {
for i, v := range yy.Surprise {
xys[i].X = xx.Surprise[i]
xys[i].Y = v
}
}
x, y, x2, y2, xy, n := 0.0, 0.0, 0.0, 0.0, 0.0, float64(len(xys))
for i := range xys {
x += xys[i].X
y += xys[i].Y
x2 += xys[i].X * xys[i].X
y2 += xys[i].Y * xys[i].Y
xy += xys[i].X * xys[i].Y
}
corr := (n*xy - x*y) / (math.Sqrt(n*x2-x*x) * math.Sqrt(n*y2-y*y))
p, err := plot.New()
if err != nil {
panic(err)
}
p.Title.Text = fmt.Sprintf("%v vs %v corr=%v", yTitle, xTitle, corr)
p.X.Label.Text = xTitle
p.Y.Label.Text = yTitle
s, err := plotter.NewScatter(xys)
if err != nil {
panic(err)
}
p.Add(s)
if uncertainty := yy.Uncertainty; uncertainty != nil {
errors := make(plotter.YErrors, len(uncertainty))
for k, v := range uncertainty {
errors[k].High = v
errors[k].Low = v
}
y := &struct {
plotter.XYs
plotter.YErrors
}{
XYs: xys,
YErrors: errors,
}
bar, err1 := plotter.NewYErrorBars(y)
if err1 != nil {
panic(err1)
}
err1 = plotutil.AddErrorBars(p, bar)
if err1 != nil {
panic(err1)
}
}
err = p.Save(8*vg.Inch, 8*vg.Inch, fmt.Sprintf("graph_%v_%v", graph, name))
if err != nil {
panic(err)
}
graph++
}
averageSimilarity := Anomaly(1, anomaly.NewAverageSimilarity, "average similarity")
histogram("Average Similarity Distribution", "average_similarity_distribution.png", averageSimilarity)
scatterPlot("Time", "Average Similarity", "average_similarity.png", nil, averageSimilarity)
averageSimilarity.Print()
neuron := Anomaly(1, anomaly.NewNeuron, "neuron")
histogram("Neuron Distribution", "neuron_distribution.png", neuron)
scatterPlot("Time", "Neuron", "neuron.png", nil, neuron)
scatterPlot("Average Similarity", "Neuron", "neuron_vs_average_similarity.png",
averageSimilarity, neuron)
neuron.Print()
autoencoderError := Anomaly(1, anomaly.NewAutoencoder, "autoencoder")
histogram("Autoencoder Error Distribution", "autoencoder_error_distribution.png", autoencoderError)
scatterPlot("Time", "Autoencoder Error", "autoencoder_error.png", nil, autoencoderError)
scatterPlot("Average Similarity", "Autoencoder Error", "autoencoder_error_vs_average_similarity.png",
averageSimilarity, autoencoderError)
autoencoderError.Print()
lstmError := Anomaly(1, anomaly.NewLSTM, "lstm")
histogram("LSTM Distribution", "lstm_distribution.png", lstmError)
scatterPlot("Time", "LSTM", "lstm.png", nil, lstmError)
scatterPlot("Average Similarity", "LSTM", "lstm_vs_average_similarity.png",
averageSimilarity, lstmError)
lstmError.Print()
gruError := Anomaly(1, anomaly.NewGRU, "gru")
histogram("GRU Distribution", "gru_distribution.png", gruError)
scatterPlot("Time", "GRU", "gru.png", nil, gruError)
scatterPlot("GRU", "LSTM", "lstm_vs_gru.png", gruError, lstmError)
gruError.Print()
complexityError := Anomaly(1, anomaly.NewComplexity, "complexity")
histogram("Complexity Distribution", "complexity_distribution.png", complexityError)
scatterPlot("Time", "Complexity", "complexity.png", nil, complexityError)
complexityError.Print()
metaError := Anomaly(1, anomaly.NewMeta, "meta")
scatterPlot("Time", "Meta", "meta.png", nil, metaError)
metaError.Print()
if !*full {
return
}
test := func(factory anomaly.NetworkFactory, name string) int {
count, total, results, j := 0, 0, make(chan *TestResults, Parallelization), 1
process := func() {
result := <-results
result.Print()
if result.IsCorrect() {
count++
}
total++
}
for i := 0; i < Parallelization; i++ {
go func(j int) {
results <- Anomaly(j, factory, name)
}(j)
j++
}
for j <= Trials {
process()
go func(j int) {
results <- Anomaly(j, factory, name)
}(j)
j++
}
for total < Trials {
process()
}
return count
}
averageSimilarityCount := test(anomaly.NewAverageSimilarity, "average similarity")
neuronCount := test(anomaly.NewNeuron, "neuron")
autoencoderCount := test(anomaly.NewAutoencoder, "autoencoder")
fmt.Printf("average similarity: %v / %v\n", averageSimilarityCount, Trials)
fmt.Printf("neuron: %v / %v\n", neuronCount, Trials)
fmt.Printf("autoencoder: %v / %v\n", autoencoderCount, Trials)
} | cmd/anomaly_bench/main.go | 0.616474 | 0.482246 | main.go | starcoder |
package arts
import (
"fmt"
"image/color"
"log"
"github.com/andrewwatson/generativeart"
"github.com/andrewwatson/generativeart/common"
)
// ColorMapping maps some parameters to color space.
type ColorMapping func(float64, float64, float64) color.RGBA
type domainWrap struct {
noise *common.PerlinNoise
scale float64
scale2 float64
xOffset, yOffset float64
fn ColorMapping
// How many images would be created in this generation.
numImages int
// Use these parameters to create images with time lapse.
xOffsetStep, yOffsetStep float64
// The imagPath for generative images.
imgPath string
}
// NewDomainWrap returns a domainWrap object.
func NewDomainWrap(scale, scale2, xOffset, yOffset float64, cmap ColorMapping) *domainWrap {
return &domainWrap{
scale: scale,
scale2: scale2,
xOffset: xOffset,
yOffset: yOffset,
noise: common.NewPerlinNoise(),
fn: cmap,
}
}
func (d *domainWrap) SetDynamicParameter(xstep, ystep float64, n int, path string) {
d.xOffsetStep = xstep
d.yOffsetStep = ystep
d.numImages = n
d.imgPath = path
}
// Generative draws a domain warp image.
// Reference: https://www.iquilezles.org/www/articles/warp/warp.htm
func (d *domainWrap) Generative(c *generativeart.Canva) string {
if d.numImages == 0 && len(d.imgPath) == 0 {
d.generative(c)
return ""
}
if d.numImages > 0 && len(d.imgPath) == 0 {
log.Fatal("Missing the parameters numImages or imgPath")
}
for i := 0; i < d.numImages; i++ {
imgfile := fmt.Sprintf("%v/domainwrap%03d.PNG", d.imgPath, i)
d.xOffset += d.xOffsetStep * float64(i)
d.yOffset += d.yOffsetStep * float64(i)
d.generative(c)
c.ToPNG(imgfile)
}
return ""
}
func (d *domainWrap) generative(c *generativeart.Canva) {
for h := 0.0; h < float64(c.Height()); h += 1.0 {
for w := 0.0; w < float64(c.Width()); w += 1.0 {
r, m1, m2 := d.pattern(w*d.scale, h*d.scale, d.xOffset, d.yOffset)
rgb := d.fn(r, m1, m2)
c.Img().Set(int(w), int(h), rgb)
}
}
}
func (d *domainWrap) pattern(x, y, xOffest, yOffset float64) (float64, float64, float64) {
qx := d.fbm(x+xOffest, y+yOffset)
qy := d.fbm(x+xOffest+5.2, y+yOffset+1.3)
rx := d.fbm(x+d.scale2*qx+1.7, y+d.scale2*qy+9.2)
ry := d.fbm(x+d.scale2*qx+8.3, y+d.scale2*qy+2.8)
return d.fbm(qx+d.scale2*rx, qy+d.scale2*ry), common.Magnitude(qx, qy), common.Magnitude(rx, ry)
}
func (d *domainWrap) fbm(x, y float64) float64 {
return d.noise.Noise2D(x, y)
} | arts/domainwrap.go | 0.788502 | 0.474753 | domainwrap.go | starcoder |
package timeslice
import (
"errors"
"sort"
"time"
)
// TimeSlice is used for sorting. e.g.
// sort.Sort(sort.Reverse(timeSlice))
// sort.Sort(timeSlice)
type TimeSlice []time.Time
func (ts TimeSlice) Len() int { return len(ts) }
func (ts TimeSlice) Less(i, j int) bool { return ts[i].Before(ts[j]) }
func (ts TimeSlice) Swap(i, j int) { ts[i], ts[j] = ts[j], ts[i] }
func (ts TimeSlice) Sort() { sort.Sort(ts) }
func (ts TimeSlice) SortReverse() { sort.Sort(sort.Reverse(ts)) }
func (ts TimeSlice) Dedupe() TimeSlice {
newTimeSlice := TimeSlice{}
seenMap := map[time.Time]int{}
for _, dt := range ts {
if _, ok := seenMap[dt]; !ok {
newTimeSlice = append(newTimeSlice, dt)
seenMap[dt] = 1
}
}
return newTimeSlice
}
func (ts TimeSlice) Equal(compare TimeSlice) bool {
if ts.Len() != compare.Len() {
return false
}
for i, dt := range compare {
if !dt.Equal(ts[i]) {
return false
}
}
return true
}
func (ts TimeSlice) Format(format string) []string {
formatted := []string{}
for _, dt := range ts {
formatted = append(formatted, dt.Format(format))
}
return formatted
}
func (ts TimeSlice) Duplicate() TimeSlice {
newTS := TimeSlice{}
newTS = append(newTS, ts...)
return newTS
}
var (
ErrEmptyTimeSlice = errors.New("empty time slice")
ErrOutOfBounds = errors.New("out of bounds")
ErrOutOfBoundsLower = errors.New("out of bounds lower")
ErrOutOfBoundsUpper = errors.New("out of bounds upper")
)
// RangeLower returns the TimeSlice time value for the range
// lower than or equal to the supplied time.
func (ts TimeSlice) RangeLower(t time.Time, inclusive bool) (time.Time, error) {
if len(ts) == 0 {
return t, ErrEmptyTimeSlice
}
sortedTS := ts.Dedupe()
sort.Sort(sortedTS)
if sortedTS[0].After(t) {
return t, ErrOutOfBoundsLower
}
curRangeLower := sortedTS[0]
for _, nextRangeLower := range sortedTS {
if t.Before(nextRangeLower) {
return curRangeLower, nil
} else if inclusive && t.Equal(nextRangeLower) {
return nextRangeLower, nil
}
curRangeLower = nextRangeLower
}
return sortedTS[len(sortedTS)-1], nil
}
// RangeUpper returns the TimeSlice time value for the range
// lower than or equal to the supplied time. The time `t` must
// be less than or equal to the upper range.
func (ts TimeSlice) RangeUpper(t time.Time, inclusive bool) (time.Time, error) {
if len(ts) == 0 {
return t, ErrEmptyTimeSlice
}
sortedTS := ts.Dedupe()
sort.Sort(sortedTS)
if sortedTS[len(sortedTS)-1].Before(t) {
return t, ErrOutOfBoundsUpper
}
curRangeUpper := sortedTS[len(sortedTS)-1]
for i := range sortedTS {
// check times in reverse order
nextRangeUpper := sortedTS[len(sortedTS)-1-i]
if t.After(nextRangeUpper) {
return curRangeUpper, nil
} else if inclusive && t.Equal(nextRangeUpper) {
return nextRangeUpper, nil
}
curRangeUpper = nextRangeUpper
}
return sortedTS[0], nil
}
func ParseTimeSlice(format string, times []string) (TimeSlice, error) {
ts := TimeSlice{}
for _, raw := range times {
dt, err := time.Parse(format, raw)
if err != nil {
return ts, err
}
ts = append(ts, dt)
}
return ts, nil
} | time/timeslice/timeslice.go | 0.738669 | 0.487124 | timeslice.go | starcoder |
package pipescript
var LtTransform = &Transform{
Name: "lt",
Description: "returns true if the data of the incoming stream is less than the value of the first arg",
Args: []TransformArg{
TransformArg{
Description: "Value to check against data",
Type: TransformArgType,
},
TransformArg{
Description: "Value to check against data",
Type: TransformArgType,
},
},
Constructor: NewArgBasic(func(args []*Datapoint, consts []interface{}, pipes []*Pipe, out *Datapoint) (*Datapoint, error) {
f1, err := args[0].Float()
if err == nil {
var f2 float64
f2, err = args[1].Float()
out.Data = f1 < f2
}
return out, err
}),
}
var GtTransform = &Transform{
Name: "gt",
Description: "returns true if the data of the incoming stream is greater than the value of the first arg",
Args: []TransformArg{
TransformArg{
Description: "Value to check against data",
Type: TransformArgType,
},
TransformArg{
Description: "Value to check against data",
Type: TransformArgType,
},
},
Constructor: NewArgBasic(func(args []*Datapoint, consts []interface{}, pipes []*Pipe, out *Datapoint) (*Datapoint, error) {
f1, err := args[0].Float()
if err == nil {
var f2 float64
f2, err = args[1].Float()
out.Data = f1 > f2
}
return out, err
}),
}
var LteTransform = &Transform{
Name: "lte",
Description: "returns true if the data of the incoming stream is less than or equal to the value of the first arg",
Args: []TransformArg{
TransformArg{
Description: "Value to check against data",
Type: TransformArgType,
},
TransformArg{
Description: "Value to check against data",
Type: TransformArgType,
},
},
Constructor: NewArgBasic(func(args []*Datapoint, consts []interface{}, pipes []*Pipe, out *Datapoint) (*Datapoint, error) {
f1, err := args[0].Float()
if err == nil {
var f2 float64
f2, err = args[1].Float()
out.Data = f1 <= f2
}
return out, err
}),
}
var GteTransform = &Transform{
Name: "gte",
Description: "returns true if the data of the incoming stream is greater than or equal to the value of the first arg",
Args: []TransformArg{
TransformArg{
Description: "Value to check against data",
Type: TransformArgType,
},
TransformArg{
Description: "Value to check against data",
Type: TransformArgType,
},
},
Constructor: NewArgBasic(func(args []*Datapoint, consts []interface{}, pipes []*Pipe, out *Datapoint) (*Datapoint, error) {
f1, err := args[0].Float()
if err == nil {
var f2 float64
f2, err = args[1].Float()
out.Data = f1 >= f2
}
return out, err
}),
}
var EqTransform = &Transform{
Name: "eq",
Description: "returns true if the data of the incoming stream is not equal to the value of the first arg",
Args: []TransformArg{
TransformArg{
Description: "Value to check against data",
Type: TransformArgType,
},
TransformArg{
Description: "Value to check against data",
Type: TransformArgType,
},
},
Constructor: NewArgBasic(func(args []*Datapoint, consts []interface{}, pipes []*Pipe, out *Datapoint) (*Datapoint, error) {
out.Data = Equal(args[0].Data, args[1].Data)
return out, nil
}),
}
var NeTransform = &Transform{
Name: "ne",
Description: "returns true if the data of the incoming stream is not equal to the value of the first arg",
Args: []TransformArg{
TransformArg{
Description: "Value to check against data",
Type: TransformArgType,
},
TransformArg{
Description: "Value to check against data",
Type: TransformArgType,
},
},
Constructor: NewArgBasic(func(args []*Datapoint, consts []interface{}, pipes []*Pipe, out *Datapoint) (*Datapoint, error) {
out.Data = !Equal(args[0].Data, args[1].Data)
return out, nil
}),
} | comparisons.go | 0.764012 | 0.688275 | comparisons.go | starcoder |
package gofinancial
import (
"math"
"github.com/razorpay/go-financial/enums/paymentperiod"
)
/*
Pmt compute the fixed payment(principal + interest) against a loan amount ( fv = 0).
It can also be used to calculate the recurring payments needed to achieve a certain future value
given an initial deposit, a fixed periodically compounded interest rate, and the total number of periods.
It is obtained by solving the following equation:
fv + pv*(1 + rate)**nper + pmt*(1 + rate*when)/rate*((1 + rate)**nper - 1) == 0
Params:
rate : rate of interest compounded once per period
nper : total number of periods to be compounded for
pv : present value (e.g., an amount borrowed)
fv : future value (e.g., 0)
when : specification of whether payment is made
at the beginning (when = 1) or the end
(when = 0) of each period
References:
[WRW] <NAME>., <NAME>, and <NAME> (Eds.) (2009, May).
Open Document Format for Office Applications (OpenDocument)v1.2,
Part 2: Recalculated Formula (OpenFormula) Format - Annotated Version,
Pre-Draft 12. Organization for the Advancement of Structured Information
Standards (OASIS). Billerica, MA, USA. [ODT Document].
Available:
http://www.oasis-open.org/committees/documents.php?wg_abbrev=office-formula
OpenDocument-formula-20090508.odt
*/
func Pmt(rate float64, nper int64, pv float64, fv float64, when paymentperiod.Type) float64 {
factor := math.Pow(1.0+float64(rate), float64(nper))
secondFactor := (factor - 1) * (1 + rate*when.Value()) / rate
return -(pv*factor + fv) / secondFactor
}
/*
IPmt computes interest payment for a loan under a given period.
Params:
rate : rate of interest compounded once per period
per : period under consideration
nper : total number of periods to be compounded for
pv : present value (e.g., an amount borrowed)
fv : future value (e.g., 0)
when : specification of whether payment is made
at the beginning (when = 1) or the end
(when = 0) of each period
References:
[WRW] Wheeler, <NAME>., <NAME>, and <NAME> (Eds.) (2009, May).
Open Document Format for Office Applications (OpenDocument)v1.2,
Part 2: Recalculated Formula (OpenFormula) Format - Annotated Version,
Pre-Draft 12. Organization for the Advancement of Structured Information
Standards (OASIS). Billerica, MA, USA. [ODT Document].
Available:
http://www.oasis-open.org/committees/documents.php?wg_abbrev=office-formula
OpenDocument-formula-20090508.odt
*/
func IPmt(rate float64, per int64, nper int64, pv float64, fv float64, when paymentperiod.Type) float64 {
totalPmt := Pmt(rate, nper, pv, fv, when)
ipmt := rbl(rate, per, totalPmt, pv, when) * rate
if when == paymentperiod.BEGINNING {
if per < 1 {
return math.NaN()
} else if per == 1 {
return 0
} else {
// paying at the beginning, so discount it.
return ipmt / (1 + rate)
}
} else {
if per < 1 {
return math.NaN()
} else {
return ipmt
}
}
}
/*
PPmt computes principal payment for a loan under a given period.
Params:
rate : rate of interest compounded once per period
per : period under consideration
nper : total number of periods to be compounded for
pv : present value (e.g., an amount borrowed)
fv : future value (e.g., 0)
when : specification of whether payment is made
at the beginning (when = 1) or the end
(when = 0) of each period
References:
[WRW] Wheeler, <NAME>., <NAME>, and <NAME> (Eds.) (2009, May).
Open Document Format for Office Applications (OpenDocument)v1.2,
Part 2: Recalculated Formula (OpenFormula) Format - Annotated Version,
Pre-Draft 12. Organization for the Advancement of Structured Information
Standards (OASIS). Billerica, MA, USA. [ODT Document].
Available:
http://www.oasis-open.org/committees/documents.php?wg_abbrev=office-formula
OpenDocument-formula-20090508.odt
*/
func PPmt(rate float64, per int64, nper int64, pv float64, fv float64, when paymentperiod.Type, round bool) float64 {
total := Pmt(rate, nper, pv, fv, when)
ipmt := IPmt(rate, per, nper, pv, fv, when)
if round {
return math.Round(total) - math.Round(ipmt)
} else {
return total - ipmt
}
}
// Rbl computes remaining balance
func rbl(rate float64, per int64, pmt float64, pv float64, when paymentperiod.Type) float64 {
return Fv(rate, (per - 1), pmt, pv, when)
}
/*
Fv computes future value at the end of some periods(nper) by solving the following equation:
fv +
pv*(1+rate)**nper +
pmt*(1 + rate*when)/rate*((1 + rate)**nper - 1) == 0
Params:
pv : a present value
rate : an interest rate compounded once per period
nper : total number of periods
pmt : a (fixed) payment, paid either
at the beginning (when = 1) or the end (when = 0) of each period
when : specification of whether payment is made
at the beginning (when = 1) or the end
(when = 0) of each period
References:
[WRW] <NAME>., <NAME>, and <NAME> (Eds.) (2009, May).
Open Document Format for Office Applications (OpenDocument)v1.2,
Part 2: Recalculated Formula (OpenFormula) Format - Annotated Version,
Pre-Draft 12. Organization for the Advancement of Structured Information
Standards (OASIS). Billerica, MA, USA. [ODT Document].
Available:
http://www.oasis-open.org/committees/documents.php?wg_abbrev=office-formula
OpenDocument-formula-20090508.odt
*/
func Fv(rate float64, nper int64, pmt float64, pv float64, when paymentperiod.Type) float64 {
factor := math.Pow(1.0+float64(rate), float64(nper))
secondFactor := (1 + rate*when.Value()) * (factor - 1) / rate
return -pv*factor - pmt*secondFactor
} | reducing_utils.go | 0.788176 | 0.722723 | reducing_utils.go | starcoder |
package nfa
import (
"fmt"
"strconv"
)
type args struct {
str string
}
type State string
type Symbol rune
type Delta map[State]map[Symbol]StatesBitMap
type StatesBitMap uint64
type nfa struct {
states []State
alphabet []Symbol
delta Delta
startingStates StatesBitMap
acceptingStates StatesBitMap
}
/*
Creates an empty NFA.
*/
func initializeNFA() nfa {
return nfa{
[]State([]State(nil)),
[]Symbol([]Symbol(nil)),
Delta(Delta(nil)),
StatesBitMap(0),
StatesBitMap(0),
}
}
/*
Creates an NFA and validates it.
If the NFA fails validation, then an empty NFA is returned.
*/
func NewNFA(states []State, alphabet []Symbol, delta Delta, startingStates StatesBitMap, acceptingStates StatesBitMap) (nfa, error) {
nfa := nfa{states, alphabet, delta, startingStates, acceptingStates}
err := nfa.validate()
if err != nil {
return initializeNFA(), nil
}
return nfa, nil
}
/*
Validates and solves an NFA given a string using parallel bit mapping.
If the NFA fails validation, then an empty NFA is returned.
If the given string contains a symbol not in the language, then the current state and false is returned.
*/
func (nfa *nfa) Solve(str string) (StatesBitMap, bool, error) {
currentStates := nfa.startingStates
err := nfa.validate()
if err != nil {
return currentStates, false, err
}
for _, symbol := range str {
err := nfa.validateSymbol(Symbol(symbol))
if err != nil {
return currentStates, false, err
}
nextStates := StatesBitMap(0)
for i := 0; currentStates != 0; i++ {
if currentStates%2 == 1 {
nextStates |= nfa.delta[nfa.states[i]][Symbol(symbol)]
}
currentStates >>= 1
}
currentStates = nextStates
}
isAccepting := currentStates&nfa.acceptingStates != 0
return currentStates, isAccepting, nil
}
/*
Validates the entire NFA.
*/
func (nfa *nfa) validate() error {
err := nfa.validateDelta()
if err != nil {
return err
}
err = nfa.validateStartingState()
if err != nil {
return err
}
err = nfa.validateAcceptingStates()
if err != nil {
return err
}
return nil
}
/*
Validates the NFA's delta.
*/
func (nfa *nfa) validateDelta() error {
// The last error catches if delta has less states and pinpoints it
if len(nfa.delta) > len(nfa.states) {
return fmt.Errorf("delta contains too many states")
}
i := 0
for _, state := range nfa.states {
if nfa.states[i] != state {
return fmt.Errorf("the states in the states array and delta map must be in the same order, states %v does not match delta %v", nfa.states[i], state)
}
if _, ok := nfa.delta[state]; ok {
// The last error catches if delta has less transitions and pinpoints it
if len(nfa.delta[state]) > len(nfa.alphabet) {
return fmt.Errorf("delta contains too many transitions for the state '%v'", state)
}
}
for _, symbol := range nfa.alphabet {
if newState, ok := nfa.delta[state][symbol]; ok {
err := checkStates(nfa.states, newState, args{str: "new"})
if err != nil {
return err
}
} else {
return fmt.Errorf("delta is not defined for the state '%v' and the symbol '%v'", state, string(symbol))
}
}
i++
}
return nil
}
/*
Validates the NFA's starting state.
*/
func (nfa *nfa) validateStartingState() error {
err := checkStates(nfa.states, nfa.startingStates, args{str: "starting"})
return err
}
/*
Validates the NFA's accepting states.
*/
func (nfa *nfa) validateAcceptingStates() error {
err := checkStates(nfa.states, nfa.acceptingStates, args{str: "accepting"})
return err
}
/*
Validates a given symbol against the NFA's alphabet.
*/
func (nfa *nfa) validateSymbol(symbol Symbol) error {
for _, acceptedSymbol := range nfa.alphabet {
if acceptedSymbol == symbol {
return nil
}
}
return fmt.Errorf("the symbol '%v' is not within the alphabet", string(symbol))
}
/*
Checks if a state is in a state array.
*/
func checkStates(states []State, statesBitMap StatesBitMap, args args) error {
if len(states) >= len(strconv.FormatUint(uint64(statesBitMap), 2)) {
return nil
}
return fmt.Errorf("the %v states bit map '%v' is too long", args.str, statesBitMap)
} | pkg/flfa/nfa/nfa.go | 0.59972 | 0.422445 | nfa.go | starcoder |
package interpreter
import (
"fmt"
"github.com/smackem/ylang/internal/parser"
"reflect"
"strings"
)
type Function struct {
ParameterNames []string
Body []parser.Statement
closure []scope
}
func (f Function) Compare(other Value) (Value, error) {
return nil, nil
}
func (f Function) Add(other Value) (Value, error) {
return nil, fmt.Errorf("type mismatch: function + %s Not supported", reflect.TypeOf(other))
}
func (f Function) Sub(other Value) (Value, error) {
return nil, fmt.Errorf("type mismatch: function - %s Not supported", reflect.TypeOf(other))
}
func (f Function) Mul(other Value) (Value, error) {
return nil, fmt.Errorf("type mismatch: function * %s Not supported", reflect.TypeOf(other))
}
func (f Function) Div(other Value) (Value, error) {
return nil, fmt.Errorf("type mismatch: function / %s Not supported", reflect.TypeOf(other))
}
func (f Function) Mod(other Value) (Value, error) {
return nil, fmt.Errorf("type mismatch: function %% %s Not supported", reflect.TypeOf(other))
}
func (f Function) In(other Value) (Value, error) {
return nil, fmt.Errorf("type mismatch: function In %s Not supported", reflect.TypeOf(other))
}
func (f Function) Neg() (Value, error) {
return nil, fmt.Errorf("type mismatch: -function Not supported")
}
func (f Function) Not() (Value, error) {
return nil, fmt.Errorf("type mismatch: Not function Not supported")
}
func (f Function) At(bitmap BitmapContext) (Value, error) {
return nil, fmt.Errorf("type mismatch: @function Not supported")
}
func (f Function) Property(ident string) (Value, error) {
return baseProperty(f, ident)
}
func (f Function) PrintStr() string {
return fmt.Sprintf("fn(%s)", strings.Join(f.ParameterNames, ", "))
}
func (f Function) Iterate(visit func(Value) error) error {
return fmt.Errorf("cannot Iterate over function")
}
func (f Function) Index(index Value) (Value, error) {
return nil, fmt.Errorf("type mismatch: function[Index] Not supported")
}
func (f Function) IndexRange(lower, upper Value) (Value, error) {
return nil, fmt.Errorf("type mismatch: function[lower..upper] Not supported")
}
func (f Function) IndexAssign(index Value, val Value) error {
return fmt.Errorf("type mismatch: function[%s] Not supported", reflect.TypeOf(index))
}
func (f Function) RuntimeTypeName() string {
return "function"
}
func (f Function) Concat(val Value) (Value, error) {
return nil, fmt.Errorf("type mismatch: function :: [%s] Not supported", reflect.TypeOf(val))
} | internal/interpreter/function.go | 0.773815 | 0.42477 | function.go | starcoder |
package eZmaxApi
import (
"encoding/json"
)
// EzsignsignerRequest An Ezsignsigner Object
type EzsignsignerRequest struct {
// The unique ID of the Taxassignment. Valid values: |Value|Description| |-|-| |1|No tax| |2|GST| |3|HST (ON)| |4|HST (NB)| |5|HST (NS)| |6|HST (NL)| |7|HST (PE)| |8|GST + QST (QC)| |9|GST + QST (QC) Non-Recoverable| |10|GST + PST (BC)| |11|GST + PST (SK)| |12|GST + RST (MB)| |13|GST + PST (BC) Non-Recoverable| |14|GST + PST (SK) Non-Recoverable| |15|GST + RST (MB) Non-Recoverable|
FkiTaxassignmentID int32 `json:"fkiTaxassignmentID"`
// The unique ID of the Secretquestion. Valid values: |Value|Description| |-|-| |1|The name of the hospital in which you were born| |2|The name of your grade school| |3|The last name of your favorite teacher| |4|Your favorite sports team| |5|Your favorite TV show| |6|Your favorite movie| |7|The name of the street on which you grew up| |8|The name of your first employer| |9|Your first car| |10|Your favorite food| |11|The name of your first pet| |12|Favorite musician/band| |13|What instrument you play| |14|Your father's middle name| |15|Your mother's maiden name| |16|Name of your eldest child| |17|Your spouse's middle name| |18|Favorite restaurant| |19|Childhood nickname| |20|Favorite vacation destination| |21|Your boat's name| |22|Date of Birth (YYYY-MM-DD)|
FkiSecretquestionID *int32 `json:"fkiSecretquestionID,omitempty"`
// The method the Ezsignsigner will authenticate to the signing platform. 1. **Password** means the Ezsignsigner will receive a secure link by email. 2. **PasswordPhone** means the Ezsignsigner will receive a secure link by email and will need to authenticate using SMS or Phone call. **Additional fee applies**. 3. **PasswordQuestion** means the Ezsignsigner will receive a secure link by email and will need to authenticate using a predefined question and answer. 4. **InPersonPhone** means the Ezsignsigner will only be able to sign \"In-Person\" and will need to authenticate using SMS or Phone call. No email will be sent for invitation to sign. **Additional fee applies**. 5. **InPerson** means the Ezsignsigner will only be able to sign \"In-Person\" and there won't be any authentication. No email will be sent for invitation to sign. Make sure you evaluate the risk of signature denial and at minimum, we recommend you use a handwritten signature type.
EEzsignsignerLogintype string `json:"eEzsignsignerLogintype"`
// The predefined answer to the secret question the Ezsignsigner will need to provide to successfully authenticate.
SEzsignsignerSecretanswer *string `json:"sEzsignsignerSecretanswer,omitempty"`
}
// NewEzsignsignerRequest instantiates a new EzsignsignerRequest object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewEzsignsignerRequest(fkiTaxassignmentID int32, eEzsignsignerLogintype string) *EzsignsignerRequest {
this := EzsignsignerRequest{}
this.FkiTaxassignmentID = fkiTaxassignmentID
this.EEzsignsignerLogintype = eEzsignsignerLogintype
return &this
}
// NewEzsignsignerRequestWithDefaults instantiates a new EzsignsignerRequest object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewEzsignsignerRequestWithDefaults() *EzsignsignerRequest {
this := EzsignsignerRequest{}
return &this
}
// GetFkiTaxassignmentID returns the FkiTaxassignmentID field value
func (o *EzsignsignerRequest) GetFkiTaxassignmentID() int32 {
if o == nil {
var ret int32
return ret
}
return o.FkiTaxassignmentID
}
// GetFkiTaxassignmentIDOk returns a tuple with the FkiTaxassignmentID field value
// and a boolean to check if the value has been set.
func (o *EzsignsignerRequest) GetFkiTaxassignmentIDOk() (*int32, bool) {
if o == nil {
return nil, false
}
return &o.FkiTaxassignmentID, true
}
// SetFkiTaxassignmentID sets field value
func (o *EzsignsignerRequest) SetFkiTaxassignmentID(v int32) {
o.FkiTaxassignmentID = v
}
// GetFkiSecretquestionID returns the FkiSecretquestionID field value if set, zero value otherwise.
func (o *EzsignsignerRequest) GetFkiSecretquestionID() int32 {
if o == nil || o.FkiSecretquestionID == nil {
var ret int32
return ret
}
return *o.FkiSecretquestionID
}
// GetFkiSecretquestionIDOk returns a tuple with the FkiSecretquestionID field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *EzsignsignerRequest) GetFkiSecretquestionIDOk() (*int32, bool) {
if o == nil || o.FkiSecretquestionID == nil {
return nil, false
}
return o.FkiSecretquestionID, true
}
// HasFkiSecretquestionID returns a boolean if a field has been set.
func (o *EzsignsignerRequest) HasFkiSecretquestionID() bool {
if o != nil && o.FkiSecretquestionID != nil {
return true
}
return false
}
// SetFkiSecretquestionID gets a reference to the given int32 and assigns it to the FkiSecretquestionID field.
func (o *EzsignsignerRequest) SetFkiSecretquestionID(v int32) {
o.FkiSecretquestionID = &v
}
// GetEEzsignsignerLogintype returns the EEzsignsignerLogintype field value
func (o *EzsignsignerRequest) GetEEzsignsignerLogintype() string {
if o == nil {
var ret string
return ret
}
return o.EEzsignsignerLogintype
}
// GetEEzsignsignerLogintypeOk returns a tuple with the EEzsignsignerLogintype field value
// and a boolean to check if the value has been set.
func (o *EzsignsignerRequest) GetEEzsignsignerLogintypeOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.EEzsignsignerLogintype, true
}
// SetEEzsignsignerLogintype sets field value
func (o *EzsignsignerRequest) SetEEzsignsignerLogintype(v string) {
o.EEzsignsignerLogintype = v
}
// GetSEzsignsignerSecretanswer returns the SEzsignsignerSecretanswer field value if set, zero value otherwise.
func (o *EzsignsignerRequest) GetSEzsignsignerSecretanswer() string {
if o == nil || o.SEzsignsignerSecretanswer == nil {
var ret string
return ret
}
return *o.SEzsignsignerSecretanswer
}
// GetSEzsignsignerSecretanswerOk returns a tuple with the SEzsignsignerSecretanswer field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *EzsignsignerRequest) GetSEzsignsignerSecretanswerOk() (*string, bool) {
if o == nil || o.SEzsignsignerSecretanswer == nil {
return nil, false
}
return o.SEzsignsignerSecretanswer, true
}
// HasSEzsignsignerSecretanswer returns a boolean if a field has been set.
func (o *EzsignsignerRequest) HasSEzsignsignerSecretanswer() bool {
if o != nil && o.SEzsignsignerSecretanswer != nil {
return true
}
return false
}
// SetSEzsignsignerSecretanswer gets a reference to the given string and assigns it to the SEzsignsignerSecretanswer field.
func (o *EzsignsignerRequest) SetSEzsignsignerSecretanswer(v string) {
o.SEzsignsignerSecretanswer = &v
}
func (o EzsignsignerRequest) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["fkiTaxassignmentID"] = o.FkiTaxassignmentID
}
if o.FkiSecretquestionID != nil {
toSerialize["fkiSecretquestionID"] = o.FkiSecretquestionID
}
if true {
toSerialize["eEzsignsignerLogintype"] = o.EEzsignsignerLogintype
}
if o.SEzsignsignerSecretanswer != nil {
toSerialize["sEzsignsignerSecretanswer"] = o.SEzsignsignerSecretanswer
}
return json.Marshal(toSerialize)
}
type NullableEzsignsignerRequest struct {
value *EzsignsignerRequest
isSet bool
}
func (v NullableEzsignsignerRequest) Get() *EzsignsignerRequest {
return v.value
}
func (v *NullableEzsignsignerRequest) Set(val *EzsignsignerRequest) {
v.value = val
v.isSet = true
}
func (v NullableEzsignsignerRequest) IsSet() bool {
return v.isSet
}
func (v *NullableEzsignsignerRequest) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableEzsignsignerRequest(val *EzsignsignerRequest) *NullableEzsignsignerRequest {
return &NullableEzsignsignerRequest{value: val, isSet: true}
}
func (v NullableEzsignsignerRequest) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableEzsignsignerRequest) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | model_ezsignsigner_request.go | 0.647241 | 0.465934 | model_ezsignsigner_request.go | starcoder |
package gocqlmock
import (
"fmt"
"github.com/gocql/gocql"
"reflect"
"regexp"
)
// Argument interface allows to match
// any argument in specific way
type Argument interface {
Match(interface{}) error
}
// expectIface represents compatibility interface for the
// expectations
type expectIface interface {
fulfilled() bool
setError([]interface{})
getError() error
scan(...interface{}) error
rowsLen() int
}
// expectationsIface is an interface to manage
// the collection of expectations
type expectationsIface interface {
next() expectIface
push(expectIface)
}
// expect represents common struct to setisfy
// the expectation interface
type expect struct {
triggered bool
err error
args interface{}
rows *rows
}
// Rows interface manages the collection of the data rows
type Rows interface {
AddRow(...interface{}) Rows
}
// rows represents struct to keep mock data for the
// Query.Scan and Iter.Scan actions
type rows struct {
data []gocql.RowData
rowpos int
}
// expectations is the collection of the expectation structs
// implements expectationsIface
type expectations []expectIface
// get next unfulfilled expectation
func (s *expectations) next() (e expectIface) {
for _, e = range *s {
if !e.fulfilled() {
return
}
}
return nil
}
// add new expectation to the collection
func (s *expectations) push(e expectIface) {
*s = append(*s, e)
}
// returns if expectation was fulfilled
func (e expect) fulfilled() bool {
return e.triggered
}
// scan assignes values from the mock rows collection to the arguments
// passed to original Scan function of the gocal library
func (e *expect) scan(dest ...interface{}) (err error) {
var r *rows
if r = e.rows; r == nil || len(r.data) <= r.rowpos {
return gocql.ErrNotFound
}
data := r.data[r.rowpos]
r.rowpos++
for i, v := range data.Values {
err = assignValue(dest[i], v)
}
return
}
// setError inserts error to the expection to be returned
// futher
func (e *expect) setError(args []interface{}) {
if len(args) < 1 {
return
}
switch args[0].(type) {
case string:
str := args[0].(string)
args = args[1:]
e.err = fmt.Errorf(str, args...)
case error:
e.err = args[0].(error)
}
}
// getError returns error
func (e expect) getError() error {
return e.err
}
// rowsLen returns number of the rows from data cache
func (e expect) rowsLen() int {
if e.rows != nil {
return len(e.rows.data)
}
return 0
}
// NewRows returns new mock storage for the rows data
func NewRows() Rows {
return &rows{}
}
// AddRow pushes values toe the mock storage
func (r *rows) AddRow(values ...interface{}) Rows {
r.data = append(r.data, gocql.RowData{
Values: values,
})
return r
}
type expectQuery struct {
expect
sqlRegex *regexp.Regexp
}
type expectScan struct {
expect
}
type expectIter struct {
expect
}
type expectExec struct {
expect
}
func assignValue(dst, src interface{}) error {
si := reflect.ValueOf(src)
di := reflect.ValueOf(dst)
if k := di.Kind(); k != reflect.Ptr {
return fmt.Errorf("expected destination argument as pointer, but got %s", k)
}
di = reflect.Indirect(di)
switch si.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
reflect.Float32, reflect.Float64,
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64,
reflect.String:
di.Set(si)
case reflect.Struct, reflect.Slice, reflect.Array:
if si.IsValid() {
if di.Kind() == reflect.Ptr {
di.Set(reflect.New(di.Type().Elem()))
return assignValue(di.Interface(), si.Interface())
} else if si.Type().AssignableTo(di.Type()) {
switch src.(type) {
case []byte:
di.SetBytes(si.Bytes())
default:
di.Set(si)
}
} else {
return fmt.Errorf("can't set destination argument type %s with row data value type %s", di.Kind(), si.Kind())
}
} else {
return fmt.Errorf("can't set destination argument type %s with invalid row data %v", di.Kind(), si)
}
case reflect.Ptr:
if si.IsNil() {
di.Set(reflect.Zero(di.Type()))
return nil
}
di.Set(reflect.New(di.Type().Elem()))
return assignValue(di.Interface(), reflect.Indirect(si).Interface())
default:
return fmt.Errorf("can't set destination argument type %s with row data value type %s", di.Kind(), si.Kind())
}
return nil
}
func argsMatch(qargs, eargs interface{}) error {
a := reflect.ValueOf(qargs)
e := reflect.ValueOf(eargs)
if !e.IsValid() || e.IsNil() {
return nil
}
if al, el := a.Len(), e.Len(); al != el {
return fmt.Errorf("expected %d query arguments, but got %d", el, al)
}
errStr := "argument at %d expected %v, but got %v"
for i := 0; i < e.Len(); i++ {
vi := e.Index(i).Elem()
ei := a.Index(i).Elem()
if m, ok := vi.Interface().(Argument); ok {
if err := m.Match(ei.Interface()); err != nil {
return err
}
continue
}
switch vi.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
if vi.Int() != ei.Int() {
return fmt.Errorf(errStr, i, ei.Int(), vi.Int())
}
case reflect.Float32, reflect.Float64:
if vi.Float() != ei.Float() {
return fmt.Errorf(errStr, i, ei.Float(), vi.Float())
}
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
if vi.Uint() != ei.Uint() {
return fmt.Errorf(errStr, i, ei.Uint(), vi.Uint())
}
case reflect.String:
if vi.String() != ei.String() {
return fmt.Errorf(errStr, i, ei.String(), vi.String())
}
default:
// compare types like time.Time based on type only
if vi.Kind() != ei.Kind() {
return fmt.Errorf(errStr, i, ei.Kind(), vi.Kind())
}
if reflect.TypeOf(gocql.UUID{}).Name() == vi.Type().Name() {
rv := vi.MethodByName("Timestamp").Call([]reflect.Value{})
re := ei.MethodByName("Timestamp").Call([]reflect.Value{})
if lv, le := len(rv), len(re); lv != le && lv != 1 {
return fmt.Errorf(errStr, i, ei, vi)
} else {
if rv[0].Int() != re[0].Int() {
return fmt.Errorf(errStr, i, ei, vi)
}
}
}
}
}
return nil
} | expectations.go | 0.735167 | 0.402774 | expectations.go | starcoder |
package simplify
import (
"github.com/go-spatial/tegola"
"github.com/go-spatial/tegola/basic"
"github.com/go-spatial/tegola/maths"
"github.com/go-spatial/tegola/maths/points"
)
// SimplifyGeometry applies the DouglasPeucker simplification routine to the supplied geometry
func SimplifyGeometry(g tegola.Geometry, tolerance float64) tegola.Geometry {
switch gg := g.(type) {
case tegola.Polygon:
return simplifyPolygon(gg, tolerance)
case tegola.MultiPolygon:
var newMP basic.MultiPolygon
for _, p := range gg.Polygons() {
sp := simplifyPolygon(p, tolerance)
if sp == nil {
continue
}
newMP = append(newMP, sp)
}
if len(newMP) == 0 {
return nil
}
return newMP
case tegola.LineString:
return simplifyLineString(gg, tolerance)
case tegola.MultiLine:
var newML basic.MultiLine
for _, l := range gg.Lines() {
sl := simplifyLineString(l, tolerance)
if sl == nil {
continue
}
newML = append(newML, sl)
}
if len(newML) == 0 {
return nil
}
return newML
}
return g
}
func simplifyLineString(g tegola.LineString, tolerance float64) basic.Line {
line := basic.CloneLine(g)
if len(line) <= 4 || maths.DistOfLine(g) < tolerance {
return line
}
pts := line.AsPts()
pts = DouglasPeucker(pts, tolerance)
if len(pts) == 0 {
return nil
}
return basic.NewLineTruncatedFromPt(pts...)
}
func simplifyPolygon(g tegola.Polygon, tolerance float64) basic.Polygon {
lines := g.Sublines()
if len(lines) <= 0 {
return nil
}
var poly basic.Polygon
sqTolerance := tolerance * tolerance
// First lets look the first line, then we will simplify the other lines.
for i := range lines {
area := maths.AreaOfPolygonLineString(lines[i])
l := basic.CloneLine(lines[i])
if area < sqTolerance {
if i == 0 {
return basic.ClonePolygon(g)
}
// don't simplify the internal line
poly = append(poly, l)
continue
}
pts := l.AsPts()
if len(pts) <= 2 {
if i == 0 {
return nil
}
continue
}
pts = normalizePoints(pts)
// If the last point is the same as the first, remove the first point.
if len(pts) <= 4 {
if i == 0 {
return basic.ClonePolygon(g)
}
poly = append(poly, l)
continue
}
pts = DouglasPeucker(pts, sqTolerance)
if len(pts) <= 2 {
if i == 0 {
return nil
}
//log.Println("\t Skipping polygon subline.")
continue
}
poly = append(poly, basic.NewLineTruncatedFromPt(pts...))
}
if len(poly) == 0 {
return nil
}
return poly
}
func normalizePoints(pts []maths.Pt) (pnts []maths.Pt) {
if pts[0] == pts[len(pts)-1] {
pts = pts[1:]
}
if len(pts) <= 4 {
return pts
}
lpt := 0
pnts = append(pnts, pts[0])
for i := 1; i < len(pts); i++ {
ni := i + 1
if ni >= len(pts) {
ni = 0
}
m1, _, sdef1 := points.SlopeIntercept(pts[lpt], pts[i])
m2, _, sdef2 := points.SlopeIntercept(pts[lpt], pts[ni])
if m1 != m2 || sdef1 != sdef2 {
pnts = append(pnts, pts[i])
}
}
return pnts
} | maths/simplify/simplify.go | 0.696165 | 0.567397 | simplify.go | starcoder |
package delivery
/* Conversion code inspired by https://github.com/icza/dyno/blob/master/dyno.go */
import "fmt"
type Converter struct {
}
// Converts given value to string
func (c *Converter) ToString(v interface{}) (string, error) {
s, ok := v.(string)
if !ok {
return "", fmt.Errorf("expected string value, got: %T", v)
}
return s, nil
}
// Converts given value to int64
func (c *Converter) ToInt64(v interface{}) (int64, error) {
switch i := v.(type) {
case int64:
return i, nil
case int:
return int64(i), nil
case int32:
return int64(i), nil
case int16:
return int64(i), nil
case int8:
return int64(i), nil
case uint:
return int64(i), nil
case uint64:
return int64(i), nil
case uint32:
return int64(i), nil
case uint16:
return int64(i), nil
case uint8:
return int64(i), nil
case float64:
return int64(i), nil
case float32:
return int64(i), nil
case string:
var n int64
_, err := fmt.Sscan(i, &n)
return n, err
case interface {
Int64() (int64, error)
}:
return i.Int64()
default:
return 0, fmt.Errorf("expected some form of integer number, got: %T", v)
}
}
// Converts given value to float64
func (c *Converter) ToFloat64(v interface{}) (float64, error) {
switch f := v.(type) {
case float64:
return f, nil
case float32:
return float64(f), nil
case int64:
return float64(f), nil
case int:
return float64(f), nil
case int32:
return float64(f), nil
case int16:
return float64(f), nil
case int8:
return float64(f), nil
case uint:
return float64(f), nil
case uint64:
return float64(f), nil
case uint32:
return float64(f), nil
case uint16:
return float64(f), nil
case uint8:
return float64(f), nil
case string:
var n float64
_, err := fmt.Sscan(f, &n)
return n, err
case interface {
Float64() (float64, error)
}:
return f.Float64()
default:
return 0, fmt.Errorf("expected some form of floating point number, got: %T", v)
}
}
// Converts given value to a boolean
func (c *Converter) ToBool(v interface{}) (bool, error) {
switch f := v.(type) {
case bool:
return f, nil
case int:
return f != 0, nil
case int64:
return f != 0, nil
case int32:
return f != 0, nil
case int16:
return f != 0, nil
case int8:
return f != 0, nil
case uint:
return f != 0, nil
case uint64:
return f != 0, nil
case uint32:
return f != 0, nil
case uint16:
return f != 0, nil
case uint8:
return f != 0, nil
case float64:
return f != 0, nil
case float32:
return f != 0, nil
case string:
var n bool
_, err := fmt.Sscan(f, &n)
return n, err
case interface {
Float64() (float64, error)
}:
val, err := f.Float64()
if err != nil {
return false, err
}
return val != 0, err
default:
return false, fmt.Errorf("expected bool, got: %T", v)
}
}
// Converts given value to int
func (c *Converter) ToInt(v interface{}) (int, error) {
i, ok := v.(int)
if !ok {
return 0, fmt.Errorf("expected int value, got: %T", v)
}
return i, nil
}
// Converts given value to []interface{}
func (c *Converter) ToSlice(v interface{}) ([]interface{}, error) {
s, ok := v.([]interface{})
if !ok {
return nil, fmt.Errorf("expected slice node, got: %T", v)
}
return s, nil
}
// Converts given value to a map[string]interface{}
func (c *Converter) ToMap(v interface{}) (map[string]interface{}, error) {
m, ok := v.(map[string]interface{})
if !ok {
return nil, fmt.Errorf("expected map with string keys node, got: %T", v)
}
return m, nil
} | delivery/converter.go | 0.820037 | 0.435902 | converter.go | starcoder |
package docs
import (
"bytes"
"encoding/json"
"strings"
"text/template"
"github.com/swaggo/swag"
)
var doc = `{
"schemes": {{ marshal .Schemes }},
"swagger": "2.0",
"info": {
"description": "{{escape .Description}}",
"title": "{{.Title}}",
"contact": {
"name": "API Support",
"url": "https://deridex-dev.ml/support",
"email": "<EMAIL>"
},
"license": {
"name": "Apache 2.0",
"url": "http://www.apache.org/licenses/LICENSE-2.0.html"
},
"version": "{{.Version}}"
},
"host": "{{.Host}}",
"basePath": "{{.BasePath}}",
"paths": {
"/markets": {
"get": {
"description": "Returns list of market. Response contains universal datastructure. 'data' field contains response value, in case of errors - 'status' and 'desc' contain description of the error, and the 'data' value is empty",
"produces": [
"application/json"
],
"tags": [
"Markets"
],
"summary": "Get list of markets",
"responses": {
"200": {
"description": "Array of existing markets",
"schema": {
"allOf": [
{
"$ref": "#/definitions/api.Response"
},
{
"type": "object",
"properties": {
"data": {
"type": "array",
"items": {
"$ref": "#/definitions/handlers.Market"
}
}
}
}
]
}
}
}
}
},
"/markets/{market_id}/candles": {
"get": {
"description": "Returns trading view candles of the market. Response contains universal datastructure. 'data' field contains response value, in case of errors - 'status' and 'desc' contain description of the error, and the 'data' value is empty",
"produces": [
"application/json"
],
"tags": [
"Markets"
],
"summary": "Get trading view candles of the market by market id",
"parameters": [
{
"type": "integer",
"description": "Market ID",
"name": "market_id",
"in": "path",
"required": true
},
{
"type": "integer",
"description": "Period start",
"name": "from",
"in": "query",
"required": true
},
{
"type": "integer",
"description": "Period end",
"name": "to",
"in": "query",
"required": true
},
{
"type": "integer",
"description": "Data granularity in seconds",
"name": "granularity",
"in": "query",
"required": true
}
],
"responses": {
"200": {
"description": "Candle values are inside data field",
"schema": {
"allOf": [
{
"$ref": "#/definitions/api.Response"
},
{
"type": "object",
"properties": {
"data": {
"$ref": "#/definitions/handlers.CandlesResp"
}
}
}
]
}
}
}
}
},
"/markets/{market_id}/orderbook": {
"get": {
"description": "Returns market order book. Response contains universal data structure. 'data' field contains response value, in case of errors - 'status' and 'desc' contain description of the error, and the 'data' value is empty",
"produces": [
"application/json"
],
"tags": [
"Markets"
],
"summary": "Get order book of the market by market id",
"parameters": [
{
"type": "integer",
"description": "Market ID",
"name": "market_id",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Market orderbook is inside data field",
"schema": {
"allOf": [
{
"$ref": "#/definitions/api.Response"
},
{
"type": "object",
"properties": {
"data": {
"$ref": "#/definitions/handlers.Snapshot"
}
}
}
]
}
}
}
}
},
"/markets/{market_id}/trades": {
"get": {
"description": "Returns all market trades. Response contains universal datastructure. 'data' field contains response value, in case of errors - 'status' and 'desc' contain description of the error, and the 'data' value is empty",
"produces": [
"application/json"
],
"tags": [
"Markets"
],
"summary": "Get market trades by market id",
"parameters": [
{
"type": "integer",
"description": "Market ID",
"name": "market_id",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Market trades are inside data field",
"schema": {
"allOf": [
{
"$ref": "#/definitions/api.Response"
},
{
"type": "object",
"properties": {
"data": {
"$ref": "#/definitions/handlers.TradesResp"
}
}
}
]
}
}
}
}
},
"/markets/{market_id}/trades/mine": {
"get": {
"description": "Returns all account trades in the certain market. Response contains universal datastructure. 'data' field contains response value, in case of errors - 'status' and 'desc' contain description of the error, and the 'data' value is empty. This request requires authentication",
"produces": [
"application/json"
],
"tags": [
"Markets"
],
"summary": "Get account trades of the certain market by market id",
"parameters": [
{
"type": "integer",
"description": "Market ID",
"name": "market_id",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Account trades are inside data field",
"schema": {
"allOf": [
{
"$ref": "#/definitions/api.Response"
},
{
"type": "object",
"properties": {
"data": {
"$ref": "#/definitions/handlers.TradesResp"
}
}
}
]
}
}
}
}
},
"/orders": {
"get": {
"description": "Returns all account orders. Response contains universal data structure. 'data' field contains response value, in case of errors - 'status' and 'desc' contain description of the error, and the 'data' value is empty. This request requires authentication",
"produces": [
"application/json"
],
"tags": [
"Orders"
],
"summary": "Get all account orders",
"parameters": [
{
"type": "string",
"description": "Market ID",
"name": "market_id",
"in": "query",
"required": true
},
{
"type": "string",
"description": "Status or statuses separated by comma",
"name": "status",
"in": "query"
},
{
"type": "integer",
"description": "Page",
"name": "page",
"in": "query"
},
{
"type": "integer",
"description": "Rows per page",
"name": "perPage",
"in": "query"
}
],
"responses": {
"200": {
"description": "Account orders are inside data field",
"schema": {
"allOf": [
{
"$ref": "#/definitions/api.Response"
},
{
"type": "object",
"properties": {
"data": {
"$ref": "#/definitions/handlers.OrdersResp"
}
}
}
]
}
}
}
},
"post": {
"description": "Places existing order on the market. Response contains universal datastructure. 'data' field contains response value, in case of errors - 'status' and 'desc' contain description of the error, and the 'data' value is empty. This request requires authentication",
"produces": [
"application/json"
],
"tags": [
"Orders"
],
"summary": "Place existing order",
"parameters": [
{
"description": "Contains parameters of existing order",
"name": "order_params",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/handlers.PlaceOrderReq"
}
}
],
"responses": {
"200": {
"description": "Contains result of order placing",
"schema": {
"$ref": "#/definitions/api.Response"
}
}
}
}
},
"/orders/build/limit": {
"post": {
"description": "Builds and returns new limit order. Response contains universal data structure. 'data' field contains response value, in case of errors - 'status' and 'desc' contain description of the error, and the 'data' value is empty. This request requires authentication",
"produces": [
"application/json"
],
"tags": [
"Orders"
],
"summary": "Build a new limit order",
"parameters": [
{
"description": "Contains parameters of new order",
"name": "order_params",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/handlers.BuildLimitOrderReq"
}
}
],
"responses": {
"200": {
"description": "Order data is inside data field",
"schema": {
"allOf": [
{
"$ref": "#/definitions/api.Response"
},
{
"type": "object",
"properties": {
"data": {
"$ref": "#/definitions/handlers.BuildOrderResp"
}
}
}
]
}
}
}
}
},
"/orders/build/market": {
"post": {
"description": "Builds and returns new market order. Response contains universal datastructure. 'data' field contains response value, in case of errors - 'status' and 'desc' contain description of the error, and the 'data' value is empty. This request requires authentication",
"produces": [
"application/json"
],
"tags": [
"Orders"
],
"summary": "Build a new market order",
"parameters": [
{
"description": "Contains parameters of new order",
"name": "order_params",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/handlers.BuildMarketOrderReq"
}
}
],
"responses": {
"200": {
"description": "Order data is inside data field",
"schema": {
"allOf": [
{
"$ref": "#/definitions/api.Response"
},
{
"type": "object",
"properties": {
"data": {
"$ref": "#/definitions/handlers.BuildOrderResp"
}
}
}
]
}
}
}
}
},
"/orders/info": {
"get": {
"description": "Returns all account orders info. Response contains universal datastructure. 'data' field contains response value, in case of errors - 'status' and 'desc' contain description of the error, and the 'data' value is empty. This request requires authentication",
"produces": [
"application/json"
],
"tags": [
"Orders"
],
"summary": "Get all account orders info",
"parameters": [
{
"type": "string",
"description": "Market ID",
"name": "market_id",
"in": "query",
"required": true
}
],
"responses": {
"200": {
"description": "Account orders info are inside data field",
"schema": {
"allOf": [
{
"$ref": "#/definitions/api.Response"
},
{
"type": "object",
"properties": {
"data": {
"$ref": "#/definitions/handlers.GetOrdersInfoResp"
}
}
}
]
}
}
}
}
},
"/orders/{order_id}": {
"get": {
"description": "Returns single order by order id. Response contains universal data structure. 'data' field contains response value, in case of errors - 'status' and 'desc' contain description of the error, and the 'data' value is empty. This request requires authentication",
"produces": [
"application/json"
],
"tags": [
"Orders"
],
"summary": "Get single order by order id",
"parameters": [
{
"type": "integer",
"description": "Order ID",
"name": "order_id",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Order data is inside data field",
"schema": {
"allOf": [
{
"$ref": "#/definitions/api.Response"
},
{
"type": "object",
"properties": {
"data": {
"$ref": "#/definitions/models.Order"
}
}
}
]
}
}
}
},
"delete": {
"description": "Cancels order placed on the market. Response contains universal datastructure. 'data' field contains response value, in case of errors - 'status' and 'desc' contain description of the error, and the 'data' value is empty. This request requires authentication",
"produces": [
"application/json"
],
"tags": [
"Orders"
],
"summary": "Cancel placed order",
"parameters": [
{
"type": "integer",
"description": "Order ID",
"name": "order_id",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Contains result of order cancellation",
"schema": {
"$ref": "#/definitions/api.Response"
}
}
}
}
}
},
"definitions": {
"api.Response": {
"type": "object",
"properties": {
"data": {
"type": "object"
},
"desc": {
"type": "string"
}
}
},
"handlers.Bar": {
"type": "object",
"properties": {
"close": {
"type": "number"
},
"high": {
"type": "number"
},
"low": {
"type": "number"
},
"open": {
"type": "number"
},
"time": {
"type": "integer"
},
"volume": {
"type": "number"
}
}
},
"handlers.BuildLimitOrderReq": {
"type": "object",
"required": [
"amount",
"market_id",
"order_type",
"price",
"side"
],
"properties": {
"amount": {
"type": "string"
},
"expires": {
"type": "integer"
},
"market_id": {
"type": "string"
},
"order_type": {
"type": "string"
},
"price": {
"type": "string"
},
"side": {
"type": "string"
}
}
},
"handlers.BuildMarketOrderReq": {
"type": "object",
"required": [
"amount",
"market_id",
"side"
],
"properties": {
"amount": {
"type": "string"
},
"expires": {
"type": "integer"
},
"market_id": {
"type": "string"
},
"side": {
"type": "string"
}
}
},
"handlers.BuildOrderResp": {
"type": "object",
"properties": {
"order_id": {
"type": "string"
}
}
},
"handlers.CandlesResp": {
"type": "object",
"properties": {
"candles": {
"type": "array",
"items": {
"$ref": "#/definitions/handlers.Bar"
}
}
}
},
"handlers.GetOrdersInfoResp": {
"type": "object",
"properties": {
"unrealized_pnl": {
"type": "number"
}
}
},
"handlers.Market": {
"type": "object",
"properties": {
"amount24h": {
"type": "number"
},
"amount_decimals": {
"type": "integer"
},
"base_token": {
"type": "string"
},
"base_token_address": {
"type": "string"
},
"base_token_decimals": {
"type": "integer"
},
"base_token_name": {
"type": "string"
},
"id": {
"type": "string"
},
"last_price": {
"type": "number"
},
"min_order_size": {
"type": "number"
},
"price24h": {
"type": "number"
},
"price_decimals": {
"type": "integer"
},
"quote_token": {
"type": "string"
},
"quote_token_address": {
"type": "string"
},
"quote_token_decimals": {
"type": "integer"
},
"quote_token_name": {
"type": "string"
},
"quote_token_volume24h": {
"type": "number"
},
"supported_order_types": {
"type": "array",
"items": {
"type": "string"
}
}
}
},
"handlers.OrdersResp": {
"type": "object",
"properties": {
"count": {
"type": "integer"
},
"orders": {
"type": "array",
"items": {
"$ref": "#/definitions/models.Order"
}
}
}
},
"handlers.PlaceOrderReq": {
"type": "object",
"required": [
"order_id",
"signature"
],
"properties": {
"order_id": {
"type": "string"
},
"signature": {
"type": "string"
}
}
},
"handlers.Snapshot": {
"type": "object",
"properties": {
"asks": {
"type": "array",
"items": {
"type": "array",
"items": {
"type": "string"
}
}
},
"bids": {
"type": "array",
"items": {
"type": "array",
"items": {
"type": "string"
}
}
},
"sequence": {
"type": "integer"
}
}
},
"handlers.TradesResp": {
"type": "object",
"properties": {
"count": {
"type": "integer"
},
"trades": {
"type": "array",
"items": {
"$ref": "#/definitions/models.Trade"
}
}
}
},
"models.Order": {
"type": "object",
"properties": {
"amount": {
"type": "number"
},
"available_amount": {
"type": "number"
},
"canceled_amount": {
"type": "number"
},
"confirmed_amount": {
"type": "number"
},
"created_at": {
"type": "string"
},
"flags": {
"type": "string"
},
"id": {
"type": "string"
},
"market_id": {
"type": "string"
},
"pending_amount": {
"type": "number"
},
"price": {
"type": "number"
},
"side": {
"type": "string"
},
"signature": {
"type": "string"
},
"status": {
"type": "string"
},
"trader_address": {
"type": "string"
},
"type": {
"type": "string"
},
"updated_at": {
"type": "string"
}
}
},
"models.Trade": {
"type": "object",
"properties": {
"amount": {
"type": "number"
},
"created_at": {
"type": "string"
},
"executed_at": {
"type": "string"
},
"id": {
"type": "integer"
},
"maker": {
"type": "string"
},
"maker_order_id": {
"type": "string"
},
"market_id": {
"type": "string"
},
"price": {
"type": "number"
},
"status": {
"type": "string"
},
"taker": {
"type": "string"
},
"taker_order_id": {
"type": "string"
},
"taker_side": {
"type": "string"
},
"transaction_hash": {
"type": "string"
},
"transaction_id": {
"type": "integer"
},
"updated_at": {
"type": "string"
}
}
}
}
}`
type swaggerInfo struct {
Version string
Host string
BasePath string
Schemes []string
Title string
Description string
}
// SwaggerInfo holds exported Swagger Info so clients can modify it
var SwaggerInfo = swaggerInfo{
Version: "1.0.0",
Host: "deridex-dev.ml",
BasePath: "/api/v1",
Schemes: []string{},
Title: "Deridex Backend API",
Description: "This is a documentation for the Deridex market backend API.",
}
type s struct{}
func (s *s) ReadDoc() string {
sInfo := SwaggerInfo
sInfo.Description = strings.Replace(sInfo.Description, "\n", "\\n", -1)
t, err := template.New("swagger_info").Funcs(template.FuncMap{
"marshal": func(v interface{}) string {
a, _ := json.Marshal(v)
return string(a)
},
"escape": func(v interface{}) string {
// escape tabs
str := strings.Replace(v.(string), "\t", "\\t", -1)
// replace " with \", and if that results in \\", replace that with \\\"
str = strings.Replace(str, "\"", "\\\"", -1)
return strings.Replace(str, "\\\\\"", "\\\\\\\"", -1)
},
}).Parse(doc)
if err != nil {
return doc
}
var tpl bytes.Buffer
if err := t.Execute(&tpl, sInfo); err != nil {
return doc
}
return tpl.String()
}
func init() {
swag.Register(swag.Name, &s{})
} | services/api/docs/docs.go | 0.625438 | 0.415966 | docs.go | starcoder |
package n_queen
/*
51. N皇后
https://leetcode-cn.com/problems/n-queens
n 皇后问题研究的是如何将 n 个皇后放置在 n×n 的棋盘上,并且使皇后彼此之间不能相互攻击。
给定一个整数 n,返回所有不同的 n 皇后问题的解决方案。
每一种解法包含一个明确的 n 皇后问题的棋子放置方案,该方案中 'Q' 和 '.' 分别代表了皇后和空位。
示例:
输入: 4
输出: [
[".Q..", // 解法 1
"...Q",
"Q...",
"..Q."],
["..Q.", // 解法 2
"Q...",
"...Q",
".Q.."]
]
解释: 4 皇后问题存在两个不同的解法。
*/
/*
常规回溯
时间复杂度O(n!);空间复杂度O(n)
*/
func solveNQueens(n int) [][]string {
var res [][]string
board := makeBoard(n)
var backtrack func(r int)
// 在行r找到合适的列放置皇后
backtrack = func(r int) {
if r == len(board) {
res = append(res, parse(board))
return
}
for c := 0; c < len(board); c++ {
if !canSetQueen(board, r, c) {
continue
}
board[r][c] = 'Q'
backtrack(r + 1)
board[r][c] = '.'
}
}
backtrack(0)
return res
}
func makeBoard(n int) [][]byte {
board := make([][]byte, n)
for i := range board {
board[i] = make([]byte, n)
for j := range board[i] {
board[i][j] = '.'
}
}
return board
}
func parse(board [][]byte) []string {
r := make([]string, len(board))
for i := range r {
r[i] = string(board[i])
}
return r
}
func canSetQueen(board [][]byte, r, c int) bool {
var i, j int
for i = 0; i < r; i++ { // top
if board[i][c] == 'Q' {
return false
}
}
for i, j = r-1, c-1; i >= 0 && j >= 0; i, j = i-1, j-1 { // topLeft
if board[i][j] == 'Q' {
return false
}
}
for i, j = r-1, c+1; i >= 0 && j < len(board); i, j = i-1, j+1 { // topRight
if board[i][j] == 'Q' {
return false
}
}
return true
}
/*
优化
canSetQueen() 函数耗费的时间可以优化掉
用三个哈希表,分别记录左上右下对角线及列上是不是有皇后会将判断某个位置能否放置皇后变成常数级复杂度
column 记录列上是否已经有皇后,列的取值范围是[0,n]
dia1 记录左上右下方向对角线上是否有皇后,这样的对角线上位置的行列坐标差是定值
(可以想象,从这样的对角线左上端点向右下端点走,每次都是行+1且列+1,所以线上所有位置行号-列号是个定值)
当然每条对角线行号-列号的结果会不同;这样可以用行号-列号的值作为这类对角线的唯一标识,且其取值范围是[1-n, n-1]
类似,dia2 记录左下右上对角线上是否有皇后, 可以用行号+列号唯一标识这样的对角线,且其取值范围是[0, 2n-2]
进一步,这三个哈希表,可以改成数组实现,这样速度更快,在 n 较大时,这个优化效果会很显著
总的时空复杂度同上一解法
*/
func solveNQueens1(n int) [][]string {
var res [][]string
memo := make([]int, n)
column := make([]bool, n)
dia1 := make([]bool, 2*n-1)
dia2 := make([]bool, 2*n-1)
var backtrack func(r int)
backtrack = func(r int) {
if r == n {
res = append(res, parseMemo(memo))
return
}
for c := 0; c < n; c++ {
if column[c] || dia1[r-c+n-1] || dia2[r+c] {
continue
}
memo[r] = c // 这一步可以不回溯,后边如果回溯会被重新赋值
column[c], dia1[r-c+n-1], dia2[r+c] = true, true, true
backtrack(r + 1)
column[c], dia1[r-c+n-1], dia2[r+c] = false, false, false
}
}
backtrack(0)
return res
}
func parseMemo(memo []int) []string {
n := len(memo)
res := make([]string, n)
for i := range res {
row := make([]byte, n)
for j := range row {
row[j] = '.'
}
row[memo[i]] = 'Q'
res[i] = string(row)
}
return res
}
/*
52. N皇后 II
https://leetcode-cn.com/problems/n-queens-ii
与问题51类似,更简单些,只需要返回不同的解决方案的数量
*/
func totalNQueens(n int) int {
total := 0
column := make([]bool, n)
dia1 := make([]bool, 2*n-1)
dia2 := make([]bool, 2*n-1)
var backtrack func(r int)
backtrack = func(r int) {
if r == n {
total++
return
}
for c := 0; c < n; c++ {
if column[c] || dia1[r-c+n-1] || dia2[r+c] {
continue
}
column[c], dia1[r-c+n-1], dia2[r+c] = true, true, true
backtrack(r + 1)
column[c], dia1[r-c+n-1], dia2[r+c] = false, false, false
}
}
backtrack(0)
return total
} | solutions/n-queen/d.go | 0.540196 | 0.529446 | d.go | starcoder |
package leetcode0622
type MyCircularQueue struct {
head, tail, size int
queue []int
}
/** Initialize your data structure here. Set the size of the queue to be k. */
func Constructor(k int) MyCircularQueue {
return MyCircularQueue{
head: -1,
tail: -1,
size: k,
queue: make([]int, k),
}
}
// nextPosition return circular index.
func (this *MyCircularQueue) nextPosition(index int) int {
if next := index + 1; next == this.size {
return 0
} else {
return next
}
}
/** Insert an element into the circular queue. Return true if the operation is successful. */
func (this *MyCircularQueue) EnQueue(value int) bool {
if this.IsFull() {
return false
}
if this.head == -1 {
this.head = 0
}
this.tail = this.nextPosition(this.tail)
this.queue[this.tail] = value
return true
}
/** Delete an element from the circular queue. Return true if the operation is successful. */
func (this *MyCircularQueue) DeQueue() bool {
if this.IsEmpty() {
return false
}
// 只有一个元素,完全删除。
if this.head == this.tail {
this.head = -1
this.tail = -1
} else {
this.head = this.nextPosition(this.head)
}
return true
}
/** Get the front item from the queue. */
func (this *MyCircularQueue) Front() int {
if this.IsEmpty() {
return -1
}
return this.queue[this.head]
}
/** Get the last item from the queue. */
func (this *MyCircularQueue) Rear() int {
if this.IsEmpty() {
return -1
}
return this.queue[this.tail]
}
/** Checks whether the circular queue is empty or not. */
func (this *MyCircularQueue) IsEmpty() bool {
return this.head == -1
}
/** Checks whether the circular queue is full or not. */
func (this *MyCircularQueue) IsFull() bool {
return this.nextPosition(this.tail) == this.head
}
/**
* Your MyCircularQueue object will be instantiated and called as such:
* obj := Constructor(k);
* param_1 := obj.EnQueue(value);
* param_2 := obj.DeQueue();
* param_3 := obj.Front();
* param_4 := obj.Rear();
* param_5 := obj.IsEmpty();
* param_6 := obj.IsFull();
*/ | leetcode.0622.design-circular-queue/design_circular_queue.go | 0.730963 | 0.44089 | design_circular_queue.go | starcoder |
package canvas
import (
"image"
"golang.org/x/image/draw"
"golang.org/x/image/math/f64"
"golang.org/x/image/vector"
)
type Rasterizer struct {
img draw.Image
dpm float64
}
// NewRasterizer creates a renderer that draws to a rasterized image.
func NewRasterizer(img draw.Image, dpm float64) *Rasterizer {
return &Rasterizer{
img: img,
dpm: dpm,
}
}
func (r *Rasterizer) Size() (float64, float64) {
size := r.img.Bounds().Size()
return float64(size.X) / r.dpm, float64(size.Y) / r.dpm
}
func (r *Rasterizer) RenderPath(path *Path, style Style, m Matrix) {
// TODO: use fill rule (EvenOdd, NonZero) for rasterizer
path = path.Transform(m)
strokeWidth := 0.0
if style.StrokeColor.A != 0 && 0.0 < style.StrokeWidth {
strokeWidth = style.StrokeWidth
}
size := r.img.Bounds().Size()
bounds := path.Bounds()
dx, dy := 0, 0
x := int((bounds.X - strokeWidth) * r.dpm)
y := int((bounds.Y - strokeWidth) * r.dpm)
w := int((bounds.W+2*strokeWidth)*r.dpm) + 1
h := int((bounds.H+2*strokeWidth)*r.dpm) + 1
if (x+w <= 0 || size.X <= x) && (y+h <= 0 || size.Y <= y) {
return // outside canvas
}
if x < 0 {
dx = -x
x = 0
}
if y < 0 {
dy = -y
y = 0
}
if size.X <= x+w {
w = size.X - x
}
if size.Y <= y+h {
h = size.Y - y
}
if w <= 0 || h <= 0 {
return // has no size
}
path = path.Translate(-float64(x)/r.dpm, -float64(y)/r.dpm)
if style.FillColor.A != 0 {
ras := vector.NewRasterizer(w, h)
path.ToRasterizer(ras, r.dpm)
ras.Draw(r.img, image.Rect(x, size.Y-y, x+w, size.Y-y-h), image.NewUniform(style.FillColor), image.Point{dx, dy})
}
if style.StrokeColor.A != 0 && 0.0 < style.StrokeWidth {
if 0 < len(style.Dashes) {
path = path.Dash(style.DashOffset, style.Dashes...)
}
path = path.Stroke(style.StrokeWidth, style.StrokeCapper, style.StrokeJoiner)
ras := vector.NewRasterizer(w, h)
path.ToRasterizer(ras, r.dpm)
ras.Draw(r.img, image.Rect(x, size.Y-y, x+w, size.Y-y-h), image.NewUniform(style.StrokeColor), image.Point{dx, dy})
}
}
func (r *Rasterizer) RenderText(text *Text, m Matrix) {
paths, colors := text.ToPaths()
for i, path := range paths {
style := DefaultStyle
style.FillColor = colors[i]
r.RenderPath(path, style, m)
}
}
func (r *Rasterizer) RenderImage(img image.Image, m Matrix) {
origin := m.Dot(Point{0, float64(img.Bounds().Size().Y)}).Mul(r.dpm)
m = m.Scale(r.dpm, r.dpm)
h := float64(r.img.Bounds().Size().Y)
aff3 := f64.Aff3{m[0][0], -m[0][1], origin.X, -m[1][0], m[1][1], h - origin.Y}
// add transparent margin to image for smooth borders when rotating
margin := 4
size := img.Bounds().Size()
img2 := image.NewRGBA(image.Rect(0, 0, size.X+margin*2, size.Y+margin*2))
draw.Draw(img2, image.Rect(margin, margin, size.X, size.Y), img, image.Point{}, draw.Over)
draw.CatmullRom.Transform(r.img, aff3, img2, img2.Bounds(), draw.Over, nil)
} | rasterizer.go | 0.590779 | 0.449816 | rasterizer.go | starcoder |
package sudoku
import "fmt"
/*
Since golang uses a list of lists to support matrices while making operations on the table, we must remember
the row column notation as the first for loop will extract the row and the second essentially extracts the
element from the row so a Sudoku is [row][column]. Basically, we are using the (row, column) fomrat and not the
(i,j) format
In this code the row and columns are indexed from 0
Zerospaces in our Sudoku are treated as empty spaces
*/
// SudokuT - Define a Sudoku as a 9x9 grid of Ints
// Note: Technically a sudoku has only 9 values so using int may be quite wasteful
type SudokuT [9][9]int
// GetSudoku - Constructor for a Sudoku object
func GetSudoku() *SudokuT {
s := new(SudokuT)
return s
}
// Print - Print a Sudoku in Human Readable Format
func (s *SudokuT) Print() {
for _, row := range s {
for _, element := range row {
fmt.Printf("%d\t", element)
}
fmt.Println()
}
}
// GetElement - return the element at (row, column)
func (s *SudokuT) GetElement(row, column int) (int, error) {
if row > 8 || column > 8 || row < 0 || column < 0 {
return 0, fmt.Errorf("Invalid Inputs")
}
n := s[row][column]
return n, nil
}
// SetElement - set the element at (row, column)
func (s *SudokuT) SetElement(row, column, n int) error {
if row > 8 || column > 8 || row < 0 || column < 0 || n > 9 || n < 0 {
return fmt.Errorf("Invalid Inputs")
}
s[row][column] = n
return nil
}
// SetFromArray - Set the elements of a Sudoku from a array of length 81
// Sudoku [r1;r2;...;r9] = [r1,r2,...,r9]
func (s *SudokuT) SetFromArray(l []int) error {
if len(l) != 81 {
return fmt.Errorf("Array length not 81")
}
index := 0
for r := range s {
for c := range s[r] {
err := s.SetElement(r, c, l[index])
if err != nil {
return err
}
index++
}
}
if index != 81 {
return fmt.Errorf("Something Went Wrong setting the sudoku from array. index = %v", index)
}
return nil
}
// ArrayFormat - Represent the elements of a Sudoku as an array of length 81
// Sudoku [r1,r2,...,r9] = [r1;r2;...;r9]
func (s *SudokuT) ArrayFormat() ([]int, error) {
a := make([]int, 0)
for r := range s {
for c := range s[r] {
e, err := s.GetElement(r, c)
if err != nil {
return []int{}, err
}
a = append(a, e)
}
}
if len(a) != 81 {
return a, fmt.Errorf("Lenth of the array not 81 len = %v", len(a))
}
return a, nil
} | sudoku/defines.go | 0.64579 | 0.628878 | defines.go | starcoder |
package tdigest
import (
"math"
"sort"
)
// TDigest is a data structure for accurate on-line accumulation of
// rank-based statistics such as quantiles and trimmed means.
type TDigest struct {
compression float64
maxProcessed int
maxUnprocessed int
processed []centroid
unprocessed []centroid
cumulative []float64
processedWeight float64
unprocessedWeight float64
min float64
max float64
sum float64
sumSquares float64
}
// New initializes a new distribution with a default compression.
func New() *TDigest {
return NewWithCompression(1000)
}
// NewWithCompression initializes a new distribution with custom compression.
func NewWithCompression(c int) *TDigest {
t := &TDigest{compression: float64(c)}
t.maxProcessed = processedSize(0, c)
t.maxUnprocessed = unprocessedSize(0, c)
t.processed = make([]centroid, 0, t.maxProcessed)
t.unprocessed = make([]centroid, 0, t.maxUnprocessed+1)
t.Reset()
return t
}
// Reset resets the distribution to its initial state.
func (t *TDigest) Reset() {
t.processed = t.processed[:0]
t.unprocessed = t.unprocessed[:0]
t.cumulative = t.cumulative[:0]
t.processedWeight = 0
t.unprocessedWeight = 0
t.min = math.MaxFloat64
t.max = -math.MaxFloat64
t.sum = 0
t.sumSquares = 0
}
// Add adds a value x with a weight w to the distribution.
func (t *TDigest) Add(x, w float64) {
t.add(centroid{Mean: x, Weight: w})
}
func (t *TDigest) add(c centroid) {
if math.IsNaN(c.Mean) || c.Weight <= 0 || math.IsNaN(c.Weight) || math.IsInf(c.Weight, 1) {
return
}
t.unprocessed = append(t.unprocessed, c)
t.unprocessedWeight += c.Weight
val := c.Mean * c.Weight
t.sum += val
t.sumSquares += val * c.Mean
if len(t.processed) > t.maxProcessed || len(t.unprocessed) > t.maxUnprocessed {
t.process()
}
}
// Merge merges the supplied digest into this digest. Functionally equivalent to
// calling t.AddCentroidList(t2.Centroids(nil)), but avoids making an extra
// copy of the CentroidList.
func (t *TDigest) Merge(t2 *TDigest) {
t2.process()
for _, c := range t2.processed {
t.add(c)
}
}
func (t *TDigest) process() {
if len(t.unprocessed) > 0 || len(t.processed) > t.maxProcessed {
// Append all processed centroids to the unprocessed list and sort
t.unprocessed = append(t.unprocessed, t.processed...)
sort.Sort(centroids(t.unprocessed))
// Reset processed list with first centroid
t.processed = t.processed[:0]
t.processed = append(t.processed, t.unprocessed[0])
t.processedWeight += t.unprocessedWeight
t.unprocessedWeight = 0
soFar := t.unprocessed[0].Weight
limit := t.processedWeight * t.integratedQ(1.0)
for _, centroid := range t.unprocessed[1:] {
projected := soFar + centroid.Weight
if projected <= limit {
soFar = projected
t.processed[len(t.processed)-1].merge(centroid)
} else {
k1 := t.integratedLocation(soFar / t.processedWeight)
limit = t.processedWeight * t.integratedQ(k1+1.0)
soFar += centroid.Weight
t.processed = append(t.processed, centroid)
}
}
t.min = math.Min(t.min, t.processed[0].Mean)
t.max = math.Max(t.max, t.processed[len(t.processed)-1].Mean)
t.unprocessed = t.unprocessed[:0]
}
}
// NumCentroids returns the number of centroids in the digest.
func (t *TDigest) NumCentroids() int {
t.process()
return len(t.processed)
}
// Centroid returns the mean and weight of centroid at the given index.
// The index must be >=0 and <NumCentroids(), otherwise this method will panic.
func (t *TDigest) Centroid(index int) (mean, weight float64) {
t.process()
c := t.processed[index]
return c.Mean, c.Weight
}
// Count returns the total weight observed.
func (t *TDigest) Count() float64 {
return t.unprocessedWeight + t.processedWeight
}
// Sum returns the weighted sum of all observed values.
func (t *TDigest) Sum() float64 {
return t.sum
}
// Mean returns a mean average.
func (t *TDigest) Mean() float64 {
if t.Count() == 0 {
return math.NaN()
}
return t.sum / t.Count()
}
// SampleVariance is the sample variance.
func (t *TDigest) SampleVariance() float64 {
weight := t.Count()
if weight <= 1 {
return math.NaN()
}
return (t.sumSquares - (t.sum * t.sum / weight)) / (weight - 1)
}
// PopulationVariance is the population variance.
func (t *TDigest) PopulationVariance() float64 {
weight := t.Count()
if weight <= 0 {
return math.NaN()
}
return (t.sumSquares - (t.sum * t.sum / weight)) / weight
}
func (t *TDigest) updateCumulative() {
// Weight can only increase, so the final cumulative value will always be
// either equal to, or less than, the total weight. If they are the same,
// then nothing has changed since the last update.
if len(t.cumulative) > 0 && t.cumulative[len(t.cumulative)-1] == t.processedWeight {
return
}
if n := len(t.processed) + 1; n <= cap(t.cumulative) {
t.cumulative = t.cumulative[:n]
} else {
t.cumulative = make([]float64, n)
}
prev := 0.0
for i, centroid := range t.processed {
cur := centroid.Weight
t.cumulative[i] = prev + cur/2.0
prev = prev + cur
}
t.cumulative[len(t.processed)] = prev
}
// Min returns the minimum observed value.
func (t *TDigest) Min() float64 {
t.process()
return t.min
}
// Max returns the maximum observed value.
func (t *TDigest) Max() float64 {
t.process()
return t.max
}
// Quantile returns the (approximate) quantile of
// the distribution. Accepted values for q are between 0.0 and 1.0.
// Returns NaN if Count is zero or bad inputs.
func (t *TDigest) Quantile(q float64) float64 {
t.process()
t.updateCumulative()
if q < 0 || q > 1 || len(t.processed) == 0 {
return math.NaN()
}
if len(t.processed) == 1 {
return t.processed[0].Mean
}
index := q * t.processedWeight
if index <= t.processed[0].Weight/2.0 {
return t.min + 2.0*index/t.processed[0].Weight*(t.processed[0].Mean-t.min)
}
lower := sort.Search(len(t.cumulative), func(i int) bool {
return t.cumulative[i] >= index
})
if lower+1 != len(t.cumulative) {
z1 := index - t.cumulative[lower-1]
z2 := t.cumulative[lower] - index
return weightedAverage(t.processed[lower-1].Mean, z2, t.processed[lower].Mean, z1)
}
z1 := index - t.processedWeight - t.processed[lower-1].Weight/2.0
z2 := (t.processed[lower-1].Weight / 2.0) - z1
return weightedAverage(t.processed[len(t.processed)-1].Mean, z1, t.max, z2)
}
// CDF returns the cumulative distribution function for a given value x.
func (t *TDigest) CDF(x float64) float64 {
t.process()
t.updateCumulative()
switch len(t.processed) {
case 0:
return 0.0
case 1:
width := t.max - t.min
if x <= t.min {
return 0.0
}
if x >= t.max {
return 1.0
}
if (x - t.min) <= width {
// min and max are too close together to do any viable interpolation
return 0.5
}
return (x - t.min) / width
}
if x <= t.min {
return 0.0
}
if x >= t.max {
return 1.0
}
m0 := t.processed[0].Mean
// Left Tail
if x <= m0 {
if m0-t.min > 0 {
return (x - t.min) / (m0 - t.min) * t.processed[0].Weight / t.processedWeight / 2.0
}
return 0.0
}
// Right Tail
mn := t.processed[len(t.processed)-1].Mean
if x >= mn {
if t.max-mn > 0.0 {
return 1.0 - (t.max-x)/(t.max-mn)*t.processed[len(t.processed)-1].Weight/t.processedWeight/2.0
}
return 1.0
}
upper := sort.Search(len(t.processed), func(i int) bool {
return t.processed[i].Mean > x
})
z1 := x - t.processed[upper-1].Mean
z2 := t.processed[upper].Mean - x
return weightedAverage(t.cumulative[upper-1], z2, t.cumulative[upper], z1) / t.processedWeight
}
func (t *TDigest) integratedQ(k float64) float64 {
return (math.Sin(math.Min(k, t.compression)*math.Pi/t.compression-math.Pi/2.0) + 1.0) / 2.0
}
func (t *TDigest) integratedLocation(q float64) float64 {
return t.compression * (math.Asin(2.0*q-1.0) + math.Pi/2.0) / math.Pi
}
func weightedAverage(x1, w1, x2, w2 float64) float64 {
if x1 <= x2 {
return weightedAverageSorted(x1, w1, x2, w2)
}
return weightedAverageSorted(x2, w2, x1, w1)
}
func weightedAverageSorted(x1, w1, x2, w2 float64) float64 {
x := (x1*w1 + x2*w2) / (w1 + w2)
return math.Max(x1, math.Min(x, x2))
}
func processedSize(size, compression int) int {
if size == 0 {
return 2 * compression
}
return size
}
func unprocessedSize(size, compression int) int {
if size == 0 {
return 8 * compression
}
return size
}
type centroid struct {
Mean float64
Weight float64
}
func (c *centroid) merge(r centroid) {
c.Weight += r.Weight
c.Mean += r.Weight * (r.Mean - c.Mean) / c.Weight
}
// centroids
type centroids []centroid
func (l centroids) Len() int { return len(l) }
func (l centroids) Less(i, j int) bool { return l[i].Mean < l[j].Mean }
func (l centroids) Swap(i, j int) { l[i], l[j] = l[j], l[i] } | tdigest.go | 0.777638 | 0.681137 | tdigest.go | starcoder |
package env
import (
"os"
"time"
"github.com/kubecost/cost-model/pkg/util/mapper"
)
//--------------------------------------------------------------------------
// EnvVar mapper.Map Implementation
//--------------------------------------------------------------------------
// envMap contains Getter and Setter implementations for environment variables
type envMap struct{}
// Get returns the value for the provided environment variable
func (em *envMap) Get(key string) string {
return os.Getenv(key)
}
// Set sets the value for the provided key and returns true if successful. Otherwise,
// false is returned.
func (em *envMap) Set(key string, value string) error {
return os.Setenv(key, value)
}
// This PrimitiveMapper implementation leverages os.Getenv() and os.Setenv() to get/set
// primitive go values as environment variables.
var envMapper mapper.PrimitiveMap = mapper.NewMapper(&envMap{})
//--------------------------------------------------------------------------
// Package Funcs
//--------------------------------------------------------------------------
// Get parses an string from the environment variable key parameter. If the environment
// variable is empty, the defaultValue parameter is returned.
func Get(key string, defaultValue string) string {
return envMapper.Get(key, defaultValue)
}
// GetInt parses an int from the environment variable key parameter. If the environment
// variable is empty or fails to parse, the defaultValue parameter is returned.
func GetInt(key string, defaultValue int) int {
return envMapper.GetInt(key, defaultValue)
}
// GetInt8 parses an int8 from the environment variable key parameter. If the environment
// variable is empty or fails to parse, the defaultValue parameter is returned.
func GetInt8(key string, defaultValue int8) int8 {
return envMapper.GetInt8(key, defaultValue)
}
// GetInt16 parses an int16 from the environment variable key parameter. If the environment
// variable is empty or fails to parse, the defaultValue parameter is returned.
func GetInt16(key string, defaultValue int16) int16 {
return envMapper.GetInt16(key, defaultValue)
}
// GetInt32 parses an int32 from the environment variable key parameter. If the environment
// variable is empty or fails to parse, the defaultValue parameter is returned.
func GetInt32(key string, defaultValue int32) int32 {
return envMapper.GetInt32(key, defaultValue)
}
// GetInt64 parses an int64 from the environment variable key parameter. If the environment
// variable is empty or fails to parse, the defaultValue parameter is returned.
func GetInt64(key string, defaultValue int64) int64 {
return envMapper.GetInt64(key, defaultValue)
}
// GetUInt parses a uint from the environment variable key parameter. If the environment
// variable is empty or fails to parse, the defaultValue parameter is returned.
func GetUInt(key string, defaultValue uint) uint {
return envMapper.GetUInt(key, defaultValue)
}
// GetUInt8 parses a uint8 from the environment variable key parameter. If the environment
// variable is empty or fails to parse, the defaultValue parameter is returned.
func GetUInt8(key string, defaultValue uint8) uint8 {
return envMapper.GetUInt8(key, defaultValue)
}
// GetUInt16 parses a uint16 from the environment variable key parameter. If the environment
// variable is empty or fails to parse, the defaultValue parameter is returned.
func GetUInt16(key string, defaultValue uint16) uint16 {
return envMapper.GetUInt16(key, defaultValue)
}
// GetUInt32 parses a uint32 from the environment variable key parameter. If the environment
// variable is empty or fails to parse, the defaultValue parameter is returned.
func GetUInt32(key string, defaultValue uint32) uint32 {
return envMapper.GetUInt32(key, defaultValue)
}
// GetUInt64 parses a uint64 from the environment variable key parameter. If the environment
// variable is empty or fails to parse, the defaultValue parameter is returned.
func GetUInt64(key string, defaultValue uint64) uint64 {
return envMapper.GetUInt64(key, defaultValue)
}
// GetFloat32 parses a float32 from the environment variable key parameter. If the environment
// variable is empty or fails to parse, the defaultValue parameter is returned.
func GetFloat32(key string, defaultValue float32) float32 {
return envMapper.GetFloat32(key, defaultValue)
}
// GetFloat64 parses a float64 from the environment variable key parameter. If the environment
// variable is empty or fails to parse, the defaultValue parameter is returned.
func GetFloat64(key string, defaultValue float64) float64 {
return envMapper.GetFloat64(key, defaultValue)
}
// GetBool parses a bool from the environment variable key parameter. If the environment
// variable is empty or fails to parse, the defaultValue parameter is returned.
func GetBool(key string, defaultValue bool) bool {
return envMapper.GetBool(key, defaultValue)
}
// GetDuration parses a time.Duration from the environment variable key parameter. If the environment
// variable is empty or fails to parse, the defaultValue parameter is returned.
func GetDuration(key string, defaultValue time.Duration) time.Duration {
return envMapper.GetDuration(key, defaultValue)
}
// Set sets the environment variable for the key provided using the value provided.
func Set(key string, value string) error {
return envMapper.Set(key, value)
}
// SetInt sets the environment variable to a string formatted int value
func SetInt(key string, value int) error {
return envMapper.SetInt(key, value)
}
// SetInt8 sets the environment variable to a string formatted int8 value.
func SetInt8(key string, value int8) error {
return envMapper.SetInt8(key, value)
}
// SetInt16 sets the environment variable to a string formatted int16 value.
func SetInt16(key string, value int16) error {
return envMapper.SetInt16(key, value)
}
// SetInt32 sets the environment variable to a string formatted int32 value.
func SetInt32(key string, value int32) error {
return envMapper.SetInt32(key, value)
}
// SetInt64 sets the environment variable to a string formatted int64 value.
func SetInt64(key string, value int64) error {
return envMapper.SetInt64(key, value)
}
// SetUInt sets the environment variable to a string formatted uint value
func SetUInt(key string, value uint) error {
return envMapper.SetUInt(key, value)
}
// SetUInt8 sets the environment variable to a string formatted uint8 value
func SetUInt8(key string, value uint8) error {
return envMapper.SetUInt8(key, value)
}
// SetUInt16 sets the environment variable to a string formatted uint16 value
func SetUInt16(key string, value uint16) error {
return envMapper.SetUInt16(key, value)
}
// SetUInt32 sets the environment variable to a string formatted uint32 value
func SetUInt32(key string, value uint32) error {
return envMapper.SetUInt32(key, value)
}
// SetUInt64 sets the environment variable to a string formatted uint64 value
func SetUInt64(key string, value uint64) error {
return envMapper.SetUInt64(key, value)
}
// SetBool sets the environment variable to a string formatted bool value.
func SetBool(key string, value bool) error {
return envMapper.SetBool(key, value)
}
// SetDuration sets the environment variable to a string formatted time.Duration
func SetDuration(key string, value time.Duration) error {
return envMapper.SetDuration(key, value)
} | pkg/env/env.go | 0.795777 | 0.437463 | env.go | starcoder |
package main
/*
Given a string containing just the characters '(', ')', '{', '}', '[' and ']', determine if the input string is valid.
An input string is valid if:
Open brackets must be closed by the same type of brackets.
Open brackets must be closed in the correct order.
Note that an empty string is also considered valid.
Example 1:
Input: "()"
Output: true
Example 2:
Input: "()[]{}"
Output: true
Example 3:
Input: "(]"
Output: false
Example 4:
Input: "([)]"
Output: false
Example 5:
Input: "{[]}"
Output: true
给定一个只包括 '(',')','{','}','[',']' 的字符串,判断字符串是否有效。
有效字符串需满足:
左括号必须用相同类型的右括号闭合。
左括号必须以正确的顺序闭合。
注意空字符串可被认为是有效字符串。
示例 1:
输入: "()"
输出: true
示例 2:
输入: "()[]{}"
输出: true
示例 3:
输入: "(]"
输出: false
示例 4:
输入: "([)]"
输出: false
示例 5:
输入: "{[]}"
输出: true
*/
import "fmt"
// 解法一使用压栈的方法,产生顺序符合并且匹配的两个括号就弹出,最后如果栈是空的说明所有括号都被匹配了。
// 时间复杂度 O(n),空间复杂度O(n)
/*Runtime: 0 ms, faster than 100.00% of Go online submissions for Valid Parentheses.
Memory Usage: 1.9 MB, less than 100.00% of Go online submissions for Valid Parentheses.*/
/*func bracketMatch(left, right byte) bool {
if (left == '(' && right == ')') || (left == '[' && right == ']') || (left == '{' && right == '}') {
return true
}
return false
}
func isValid(s string) bool {
sLen := len(s)
if sLen%2 != 0 {
return false
}
if sLen == 0 {
return true
}
stack := make([]byte, len(s))
stack[0] = s[0]
top := 1
for i := 1; i < sLen; i++ {
// 如果没有达到栈底进行这个判断,判断是否匹配
if top >= 1 && bracketMatch(stack[top-1], s[i]) {
top--
} else {
stack[top] = s[i]
top++
}
fmt.Println(stack)
}
if top == 0 {
return true
}
return false
}*/
// 解法一优化
// 判断是否匹配的一部写了几个||的逻辑,实际上可以用map代替,只需判断一次即可
/*时间复杂度:O(n)O(n),因为我们一次只遍历给定的字符串中的一个字符并在栈上进行 O(1)O(1) 的推入和弹出操作。
空间复杂度:O(n)O(n),当我们将所有的开括号都推到栈上时以及在最糟糕的情况下,我们最终要把所有括号推到栈上。例如 ((((((((((*/
/*
执行用时 : 0 ms, 在Valid Parentheses的Go提交中击败了100.00% 的用户
内存消耗 : 2 MB, 在Valid Parentheses的Go提交中击败了17.00% 的用户
*/
func bracketMatch(left, right byte) bool {
dict := map[byte]byte{
'{': '}',
'[': ']',
'(': ')',
}
if val, ok := dict[left]; ok && val == right {
return true
}
return false
}
func isValid(s string) bool {
sLen := len(s)
// 若输入空字符串,有效
if sLen == 0 {
return true
}
// 若输入括号的数目是奇数,不可能完全匹配
if sLen%2 != 0 {
return false
}
// 用一个map存储映射关系
dict := map[byte]byte{
'{': '}',
'[': ']',
'(': ')',
}
// 用byte的切片模拟一个栈
stack := make([]byte, len(s))
stack[0] = s[0]
top := 1
for i := 1; i < sLen; i++ {
// 未到达栈底时,可以进行判断
if top >= 1 {
if val, ok := dict[stack[top-1]]; ok && val == s[i] {
top--
} else {
// 如果不匹配,继续添加这个byte到栈顶
stack[top] = s[i]
top++
}
// 若已经在栈底,添加这个byte到栈顶
} else {
stack[top] = s[i]
top++
}
// fmt.Println(stack)
}
// 若最后栈为空,说明全部匹配完成
if top == 0 {
return true
}
return false
}
func main() {
// input1:="{}"
// input2 := "()[]{}"
input3 := "{{)}"
fmt.Println(isValid(input3))
} | Programs/020Valid Parentheses/020Valid Parentheses.go | 0.524638 | 0.401746 | 020Valid Parentheses.go | starcoder |
package boomer
import (
"errors"
"math"
"strconv"
"strings"
"sync/atomic"
"time"
)
// runner uses a rate limiter to put limits on task executions.
type rateLimiter interface {
start()
acquire() bool
stop()
}
// stableRateLimiter uses the token bucket algorithm.
// the bucket is refilled according to the refill period, no burst is allowed.
type stableRateLimiter struct {
threshold int64
currentThreshold int64
refillPeroid time.Duration
broadcastChannel chan bool
quitChannel chan bool
}
func newStableRateLimiter(threshold int64, refillPeroid time.Duration) (rateLimiter *stableRateLimiter) {
rateLimiter = &stableRateLimiter{
threshold: threshold,
currentThreshold: threshold,
refillPeroid: refillPeroid,
broadcastChannel: make(chan bool),
}
return rateLimiter
}
func (limiter *stableRateLimiter) start() {
limiter.quitChannel = make(chan bool)
quitChannel := limiter.quitChannel
go func() {
for {
select {
case <-quitChannel:
return
default:
atomic.StoreInt64(&limiter.currentThreshold, limiter.threshold)
time.Sleep(limiter.refillPeroid)
close(limiter.broadcastChannel)
limiter.broadcastChannel = make(chan bool)
}
}
}()
}
func (limiter *stableRateLimiter) acquire() (blocked bool) {
permit := atomic.AddInt64(&limiter.currentThreshold, -1)
if permit < 0 {
blocked = true
// block until the bucket is refilled
<-limiter.broadcastChannel
} else {
blocked = false
}
return blocked
}
func (limiter *stableRateLimiter) stop() {
close(limiter.quitChannel)
}
// ErrParsingWarmUpRate is the error returned if the format of warmUpRate is invalid.
var ErrParsingWarmUpRate = errors.New("ratelimiter: invalid format of warmUpRate, try \"1\" or \"1/1s\"")
// warmUpRateLimiter uses the token bucket algorithm.
// the threshold is updated according to the warm up rate.
// the bucket is refilled according to the refill period, no burst is allowed.
type warmUpRateLimiter struct {
maxThreshold int64
nextThreshold int64
currentThreshold int64
refillPeroid time.Duration
warmUpRate string
warmUpStep int64
warmUpPeroid time.Duration
broadcastChannel chan bool
warmUpChannel chan bool
quitChannel chan bool
}
func newWarmUpRateLimiter(maxThreshold int64, warmUpRate string, refillPeroid time.Duration) (rateLimiter *warmUpRateLimiter, err error) {
rateLimiter = &warmUpRateLimiter{
maxThreshold: maxThreshold,
nextThreshold: 0,
currentThreshold: 0,
warmUpRate: warmUpRate,
refillPeroid: refillPeroid,
broadcastChannel: make(chan bool),
}
rateLimiter.warmUpStep, rateLimiter.warmUpPeroid, err = rateLimiter.parseWarmUpRate(rateLimiter.warmUpRate)
if err != nil {
return nil, err
}
return rateLimiter, nil
}
func (limiter *warmUpRateLimiter) parseWarmUpRate(warmUpRate string) (warmUpStep int64, warmUpPeroid time.Duration, err error) {
if strings.Contains(warmUpRate, "/") {
tmp := strings.Split(warmUpRate, "/")
if len(tmp) != 2 {
return warmUpStep, warmUpPeroid, ErrParsingWarmUpRate
}
warmUpStep, err := strconv.ParseInt(tmp[0], 10, 64)
if err != nil {
return warmUpStep, warmUpPeroid, ErrParsingWarmUpRate
}
warmUpPeroid, err := time.ParseDuration(tmp[1])
if err != nil {
return warmUpStep, warmUpPeroid, ErrParsingWarmUpRate
}
return warmUpStep, warmUpPeroid, nil
}
warmUpStep, err = strconv.ParseInt(warmUpRate, 10, 64)
if err != nil {
return warmUpStep, warmUpPeroid, ErrParsingWarmUpRate
}
warmUpPeroid = time.Second
return warmUpStep, warmUpPeroid, nil
}
func (limiter *warmUpRateLimiter) start() {
limiter.quitChannel = make(chan bool)
quitChannel := limiter.quitChannel
// bucket updater
go func() {
for {
select {
case <-quitChannel:
return
default:
atomic.StoreInt64(&limiter.currentThreshold, limiter.nextThreshold)
time.Sleep(limiter.refillPeroid)
close(limiter.broadcastChannel)
limiter.broadcastChannel = make(chan bool)
}
}
}()
// threshold updater
go func() {
for {
select {
case <-quitChannel:
return
default:
limiter.nextThreshold = limiter.nextThreshold + limiter.warmUpStep
if limiter.nextThreshold < 0 {
// int64 overflow
limiter.nextThreshold = int64(math.MaxInt64)
}
if limiter.nextThreshold > limiter.maxThreshold {
limiter.nextThreshold = limiter.maxThreshold
}
time.Sleep(limiter.warmUpPeroid)
}
}
}()
}
func (limiter *warmUpRateLimiter) acquire() (blocked bool) {
permit := atomic.AddInt64(&limiter.currentThreshold, -1)
if permit < 0 {
blocked = true
// block until the bucket is refilled
<-limiter.broadcastChannel
} else {
blocked = false
}
return blocked
}
func (limiter *warmUpRateLimiter) stop() {
limiter.nextThreshold = 0
close(limiter.quitChannel)
} | ratelimiter.go | 0.579638 | 0.428652 | ratelimiter.go | starcoder |
package positionhash
import (
"fmt"
"github.com/emilyselwood/orbcalc/orbcore"
"strings"
"time"
)
/*
Hasher defines a way to create spacial temporal hashes.
Inspired by geohash, this extends to 4 dimensions.
*/
type Hasher interface {
Hash(pos *orbcore.Position) (string, error)
Box(hash string) (*orbcore.BoundingBox, error)
}
/*
HexHash is a Hasher that uses 16 buckets per level. This splits each dimension in half every go. Ending up with a binary
tree across four dimensions.
The idea here is like a geohash but across more dimensions so we can define a box of space and time and easily match
positions that are in the box or not.
An instance of HexHasher is *not* thread safe.
*/
type HexHasher struct {
Space *orbcore.BoundingBox
Depth int
boxBuffer [16]orbcore.BoundingBox
sb strings.Builder
}
func (hh *HexHasher) Hash(pos *orbcore.Position) (string, error) {
if !hh.Space.Contains(pos) {
return "", fmt.Errorf("position is not valid for this hasher")
}
hh.sb.Reset()
hh.sb.Grow(hh.Depth)
err := hh.generateHexHash(pos, *(hh.Space))
if err != nil {
return "", err
}
return hh.sb.String(), nil
}
func (hh *HexHasher) Box(hash string) (orbcore.BoundingBox, error) {
return hh.findBox(hash, *(hh.Space))
}
func (hh *HexHasher) generateHexHash(pos *orbcore.Position, box orbcore.BoundingBox) error {
if hh.sb.Len() == hh.Depth {
return nil
}
const hexValues string = "0123456789ABCDEF"
splits := splitBox(box, hh.boxBuffer)
for i := 0; i < 16; i ++ {
b := splits[i]
if b.Contains(pos) {
hh.sb.WriteByte(hexValues[i])
return hh.generateHexHash(pos, b)
}
}
return fmt.Errorf("could not find sub bounding box to select from %v for point %v", box, pos)
}
func (hh *HexHasher) findBox(hash string, parent orbcore.BoundingBox) (orbcore.BoundingBox, error) {
if hash == "" {
return parent, nil
}
index := int(hexToDec(hash[0]))
if index < 0 || index >= 16 {
return parent, fmt.Errorf("unknown character in hash, 0-9A-F are valid")
}
splits := splitBox(parent, hh.boxBuffer)
return hh.findBox(hash[1:], splits[index])
}
/*
splitBox cuts a bounding box in two along all of its dimensions
*/
func splitBox(box orbcore.BoundingBox, result [16]orbcore.BoundingBox) [16]orbcore.BoundingBox {
minX, midX, maxX := splitFloat64(box.MinX, box.MaxX)
minY, midY, maxY := splitFloat64(box.MinY, box.MaxY)
minZ, midZ, maxZ := splitFloat64(box.MinZ, box.MaxZ)
minTime, midTime, maxTime := splitTime(box.MinTime, box.MaxTime)
// unrolled array population to avoid branching at all.
// yes this is ugly but it is damn quick
// it replaces a loop that used a bit of an int to control which side of each field was used.
result[0].MinX, result[0].MaxX = minX, midX
result[0].MinY, result[0].MaxY = minY, midY
result[0].MinZ, result[0].MaxZ = minZ, midZ
result[0].MinTime, result[0].MaxTime = minTime, midTime
result[1].MinX, result[1].MaxX = minX, maxX
result[1].MinY, result[1].MaxY = midY, midY
result[1].MinZ, result[1].MaxZ = minZ, midZ
result[1].MinTime, result[1].MaxTime = minTime, midTime
result[2].MinX, result[2].MaxX = minX, midX
result[2].MinY, result[2].MaxY = midY, maxY
result[2].MinZ, result[2].MaxZ = minZ, midZ
result[2].MinTime, result[2].MaxTime = minTime, midTime
result[3].MinX, result[3].MaxX = midX, maxX
result[3].MinY, result[3].MaxY = midY, maxY
result[3].MinZ, result[3].MaxZ = minZ, midZ
result[3].MinTime, result[3].MaxTime = minTime, midTime
result[4].MinX, result[4].MaxX = minX, midX
result[4].MinY, result[4].MaxY = minY, midY
result[4].MinZ, result[4].MaxZ = midZ, maxZ
result[4].MinTime, result[4].MaxTime = minTime, midTime
result[5].MinX, result[5].MaxX = midX, maxX
result[5].MinY, result[5].MaxY = minY, midY
result[5].MinZ, result[5].MaxZ = midZ, maxZ
result[5].MinTime, result[5].MaxTime = minTime, midTime
result[6].MinX, result[6].MaxX = minX, midX
result[6].MinY, result[6].MaxY = midY, maxY
result[6].MinZ, result[6].MaxZ = midZ, maxZ
result[6].MinTime, result[6].MaxTime = minTime, midTime
result[7].MinX, result[7].MaxX = midX, maxX
result[7].MinY, result[7].MaxY = midY, maxY
result[7].MinZ, result[7].MaxZ = midZ, maxZ
result[7].MinTime, result[7].MaxTime = minTime, midTime
result[8].MinX, result[8].MaxX = minX, midX
result[8].MinY, result[8].MaxY = minY, midY
result[8].MinZ, result[8].MaxZ = minZ, midZ
result[8].MinTime, result[8].MaxTime = midTime, maxTime
result[9].MinX, result[9].MaxX = minX, maxX
result[9].MinY, result[9].MaxY = midY, midY
result[9].MinZ, result[9].MaxZ = minZ, midZ
result[9].MinTime, result[9].MaxTime = midTime, maxTime
result[10].MinX, result[10].MaxX = minX, midX
result[10].MinY, result[10].MaxY = midY, maxY
result[10].MinZ, result[10].MaxZ = minZ, midZ
result[10].MinTime, result[10].MaxTime = midTime, maxTime
result[11].MinX, result[11].MaxX = midX, maxX
result[11].MinY, result[11].MaxY = midY, maxY
result[11].MinZ, result[11].MaxZ = minZ, midZ
result[11].MinTime, result[11].MaxTime = midTime, maxTime
result[12].MinX, result[12].MaxX = minX, midX
result[12].MinY, result[12].MaxY = minY, midY
result[12].MinZ, result[12].MaxZ = midZ, maxZ
result[12].MinTime, result[12].MaxTime = midTime, maxTime
result[13].MinX, result[13].MaxX = midX, maxX
result[13].MinY, result[13].MaxY = minY, midY
result[13].MinZ, result[13].MaxZ = midZ, maxZ
result[13].MinTime, result[13].MaxTime = midTime, maxTime
result[14].MinX, result[14].MaxX = minX, midX
result[14].MinY, result[14].MaxY = midY, maxY
result[14].MinZ, result[14].MaxZ = midZ, maxZ
result[14].MinTime, result[14].MaxTime = midTime, maxTime
result[15].MinX, result[15].MaxX = midX, maxX
result[15].MinY, result[15].MaxY = midY, maxY
result[15].MinZ, result[15].MaxZ = midZ, maxZ
result[15].MinTime, result[15].MaxTime = midTime, maxTime
return result
}
/*
splitFloat64 finds the mid point between two floats
*/
func splitFloat64(min, max float64) (float64, float64, float64) {
return min, ((max - min) / 2) + min, max
}
func splitTime(min, max time.Time) (time.Time, time.Time, time.Time) {
return min, min.Add(time.Duration(int64(max.Sub(min)) / 2)), max
}
func hexToDec(c uint8) uint8 {
if c >= '0' && c <= '9' {
return c - '0'
} else {
return (c - 'A') + 10
}
} | orbcore/positionhash/hasher.go | 0.730963 | 0.447521 | hasher.go | starcoder |
package osmfile
type DataKind int
const (
DataKindNodes DataKind = 0
DataKindWays DataKind = 1
DataKindRelations DataKind = 2
)
func (k DataKind) String() string {
switch k {
case DataKindNodes:
return "nodes"
case DataKindWays:
return "ways"
case DataKindRelations:
return "relations"
default:
return "unknown"
}
}
type blockNode struct {
id int64
lat float64
lon float64
sset uint32 // position of first string
send uint32 // position of last string plus one
}
// Node ...
type Node struct {
blockNode
block Block
}
// ID ...
func (n Node) ID() int64 {
return n.id
}
// Lat ...
func (n Node) Lat() float64 {
return n.lat
}
// Lon ...
func (n Node) Lon() float64 {
return n.lon
}
// NumStrings ...
func (n Node) NumStrings() int {
return int(n.send - n.sset)
}
// StringAt ...
func (n Node) StringAt(index int) string {
return n.block.StringAt(int(n.block.nodeStrings[n.sset:n.send][index]))
}
type blockRelation struct {
id int64
sset uint32 // position of first string
send uint32 // position of last string plus one
mset uint32 // position of first member ref
mend uint32 // position of last member ref plus one
}
// Relation ..
type Relation struct {
blockRelation
block Block
}
// ID ...
func (r Relation) ID() int64 {
return r.id
}
// NumStrings ...
func (r Relation) NumStrings() int {
return int(r.send - r.sset)
}
// StringAt ...
func (r Relation) StringAt(index int) string {
return r.block.StringAt(int(r.block.relationStrings[r.sset:r.send][index]))
}
// NumMembers ...
func (r Relation) NumMembers() int {
return int(r.mend - r.mset)
}
// MemberAt ...
func (r Relation) MemberAt(index int) (typ byte, ref int64, role string) {
typ = r.block.relationMemberTypes[r.mset:r.mend][index]
ref = r.block.relationMemberRefs[r.mset:r.mend][index]
role = r.block.StringAt(int(
r.block.relationMemberRoles[r.mset:r.mend][index],
))
return
}
type blockWay struct {
id int64
sset uint32 // position of first string
send uint32 // position of last string plus one
rset uint32 // position of first ref
rend uint32 // position of last ref
}
// Way ...
type Way struct {
blockWay
block Block
}
// ID ...
func (w Way) ID() int64 {
return w.id
}
// NumRefs ...
func (w Way) NumRefs() int {
return int(w.rend - w.rset)
}
// RefAt ...
func (w Way) RefAt(index int) int64 {
return w.block.wayRefs[w.rset:w.rend][index]
}
// NumStrings ...
func (w Way) NumStrings() int {
return int(w.send - w.sset)
}
// StringAt ...
func (w Way) StringAt(index int) string {
return w.block.StringAt(int(w.block.wayStrings[w.sset:w.send][index]))
}
// Block ...
type Block struct {
// skip bool
granularity int64
latOffset int64
lonOffset int64
dateGranularity int64
// shared
// num int
dataKind int // 0 = nodes, 1 = ways, 2 = relations
stringsCount int
stringsOne string
strings []string
// nodes
nodes []blockNode
nodeStrings []uint32
// ways
ways []blockWay
wayStrings []uint32
wayRefs []int64
// relations
relations []blockRelation
relationStrings []uint32
relationMemberRoles []uint32
relationMemberRefs []int64
relationMemberTypes []byte
}
// // Weight ...
// func (b Block) Weight() uint64 {
// return uint64(0 +
// int(unsafe.Sizeof(Block{})) +
// len(b.stringsOne) + len(b.strings)*int(unsafe.Sizeof("")) +
// cap(b.nodes)*int(unsafe.Sizeof(blockNode{})) + cap(b.nodeStrings)*4 +
// cap(b.ways)*int(unsafe.Sizeof(blockWay{})) + cap(b.wayStrings)*4 +
// /* */ cap(b.wayRefs)*8 +
// cap(b.relations)*int(unsafe.Sizeof(blockRelation{})) +
// /* */ cap(b.relationStrings)*4 +
// 0,
// )
// }
// DataKind ...
func (b Block) DataKind() DataKind {
return DataKind(b.dataKind)
}
// // Index ...
// func (b Block) Index() int {
// return b.num
// }
// NumStrings ...
func (b Block) NumStrings() int {
return b.stringsCount
}
// StringAt ...
func (b Block) StringAt(index int) string {
return b.strings[index]
}
// NumNodes ...
func (b Block) NumNodes() int {
return len(b.nodes)
}
// NodeAt ...
func (b Block) NodeAt(index int) Node {
return Node{blockNode: b.nodes[index], block: b}
}
// NumWays ...
func (b Block) NumWays() int {
return len(b.ways)
}
// WayAt ...
func (b Block) WayAt(index int) Way {
return Way{blockWay: b.ways[index], block: b}
}
// NumRelations ...
func (b Block) NumRelations() int {
return len(b.relations)
}
// RelationAt ...
func (b Block) RelationAt(index int) Relation {
return Relation{blockRelation: b.relations[index], block: b}
} | block.go | 0.509276 | 0.513059 | block.go | starcoder |
package kubernetes
import (
"regexp"
"github.com/hashicorp/terraform-plugin-sdk/helper/schema"
"github.com/hashicorp/terraform-plugin-sdk/helper/validation"
)
func affinityFields() map[string]*schema.Schema {
return map[string]*schema.Schema{
"node_affinity": {
Type: schema.TypeList,
Description: "Node affinity scheduling rules for the pod.",
Optional: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: nodeAffinityFields(),
},
},
"pod_affinity": {
Type: schema.TypeList,
Description: "Inter-pod topological affinity. rules that specify that certain pods should be placed in the same topological domain (e.g. same node, same rack, same zone, same power domain, etc.)",
Optional: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: podAffinityFields(),
},
},
"pod_anti_affinity": {
Type: schema.TypeList,
Description: "Inter-pod topological affinity. rules that specify that certain pods should be placed in the same topological domain (e.g. same node, same rack, same zone, same power domain, etc.)",
Optional: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: podAffinityFields(),
},
},
}
}
func nodeAffinityFields() map[string]*schema.Schema {
return map[string]*schema.Schema{
"required_during_scheduling_ignored_during_execution": {
Type: schema.TypeList,
Description: "If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a node label update), the system may or may not try to eventually evict the pod from its node.",
Optional: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: nodeSelectorFields(),
},
},
"preferred_during_scheduling_ignored_during_execution": {
Type: schema.TypeList,
Description: "The scheduler will prefer to schedule pods to nodes that satisfy the affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, RequiredDuringScheduling affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding 'weight' to the sum if the node matches the corresponding MatchExpressions; the node(s) with the highest sum are the most preferred.",
Optional: true,
Elem: &schema.Resource{
Schema: preferredSchedulingTermFields(),
},
},
}
}
func nodeSelectorFields() map[string]*schema.Schema {
return map[string]*schema.Schema{
"node_selector_term": {
Type: schema.TypeList,
Description: "List of node selector terms. The terms are ORed.",
Optional: true,
Elem: &schema.Resource{
Schema: nodeSelectorRequirementsFields(),
},
},
}
}
func preferredSchedulingTermFields() map[string]*schema.Schema {
return map[string]*schema.Schema{
"weight": {
Type: schema.TypeInt,
Description: "weight is in the range 1-100",
Required: true,
},
"preference": {
Type: schema.TypeList,
Description: "A node selector term, associated with the corresponding weight.",
Required: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: nodeSelectorRequirementsFields(),
},
},
}
}
func nodeSelectorRequirementsFields() map[string]*schema.Schema {
return map[string]*schema.Schema{
"match_expressions": {
Type: schema.TypeList,
Description: "List of node selector requirements. The requirements are ANDed.",
Optional: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"key": {
Type: schema.TypeString,
Description: "The label key that the selector applies to.",
Optional: true,
},
"operator": {
Type: schema.TypeString,
Description: "Operator represents a key's relationship to a set of values. Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.",
Optional: true,
ValidateFunc: validation.StringInSlice([]string{"In", "NotIn", "Exists", "DoesNotExist", "Gt", "Lt"}, false),
},
"values": {
Type: schema.TypeSet,
Description: "Values is an array of string values. If the operator is In or NotIn, the values array must be non-empty. If the operator is Exists or DoesNotExist, the values array must be empty. If the operator is Gt or Lt, the values array must have a single element, which will be interpreted as an integer. This array is replaced during a strategic merge patch.",
Optional: true,
Elem: &schema.Schema{Type: schema.TypeString},
Set: schema.HashString,
},
},
},
},
}
}
func podAffinityFields() map[string]*schema.Schema {
return map[string]*schema.Schema{
"required_during_scheduling_ignored_during_execution": {
Type: schema.TypeList,
Description: "If the affinity requirements specified by this field are not met at scheduling time, the pod will not be scheduled onto the node. If the affinity requirements specified by this field cease to be met at some point during pod execution (e.g. due to a pod label update), the system may or may not try to eventually evict the pod from its node. When there are multiple elements, the lists of nodes corresponding to each PodAffinityTerm are intersected, i.e. all terms must be satisfied.",
Optional: true,
Elem: &schema.Resource{
Schema: podAffinityTermFields(),
},
},
"preferred_during_scheduling_ignored_during_execution": {
Type: schema.TypeList,
Description: "The scheduler will prefer to schedule pods to nodes that satisfy the anti-affinity expressions specified by this field, but it may choose a node that violates one or more of the expressions. The node that is most preferred is the one with the greatest sum of weights, i.e. for each node that meets all of the scheduling requirements (resource request, RequiredDuringScheduling anti-affinity expressions, etc.), compute a sum by iterating through the elements of this field and adding 'weight' to the sum if the node matches the corresponding MatchExpressions; the node(s) with the highest sum are the most preferred.",
Optional: true,
Elem: &schema.Resource{
Schema: weightedPodAffinityTermFields(),
},
},
}
}
func podAffinityTermFields() map[string]*schema.Schema {
return map[string]*schema.Schema{
"label_selector": {
Type: schema.TypeList,
Description: "A label query over a set of resources, in this case pods.",
Optional: true,
Elem: &schema.Resource{
Schema: labelSelectorFields(true),
},
},
"namespaces": {
Type: schema.TypeSet,
Description: "namespaces specifies which namespaces the labelSelector applies to (matches against); null or empty list means 'this pod's namespace'",
Optional: true,
Elem: &schema.Schema{Type: schema.TypeString},
Set: schema.HashString,
},
"topology_key": {
Type: schema.TypeString,
Description: "empty topology key is interpreted by the scheduler as 'all topologies'",
Optional: true,
ValidateFunc: validation.StringMatch(regexp.MustCompile(`^.+$`), "value cannot be empty"),
},
}
}
func weightedPodAffinityTermFields() map[string]*schema.Schema {
return map[string]*schema.Schema{
"weight": {
Type: schema.TypeInt,
Description: "weight associated with matching the corresponding podAffinityTerm, in the range 1-100",
Required: true,
},
"pod_affinity_term": {
Type: schema.TypeList,
Description: "A pod affinity term, associated with the corresponding weight",
Required: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: podAffinityTermFields(),
},
},
}
} | vendor/github.com/hashicorp/terraform-provider-kubernetes/kubernetes/schema_affinity_spec.go | 0.664214 | 0.477615 | schema_affinity_spec.go | starcoder |
package pgsql
import (
"database/sql"
"database/sql/driver"
"encoding/hex"
)
// ByteaArrayFromByteSliceSlice returns a driver.Valuer that produces a PostgreSQL bytea[] from the given Go [][]byte.
func ByteaArrayFromByteSliceSlice(val [][]byte) driver.Valuer {
return byteaArrayFromByteSliceSlice{val: val}
}
// ByteaArrayToByteSliceSlice returns an sql.Scanner that converts a PostgreSQL bytea[] into a Go [][]byte and sets it to val.
func ByteaArrayToByteSliceSlice(val *[][]byte) sql.Scanner {
return byteaArrayToByteSliceSlice{val: val}
}
// ByteaArrayFromStringSlice returns a driver.Valuer that produces a PostgreSQL bytea[] from the given Go []string.
func ByteaArrayFromStringSlice(val []string) driver.Valuer {
return byteaArrayFromStringSlice{val: val}
}
// ByteaArrayToStringSlice returns an sql.Scanner that converts a PostgreSQL bytea[] into a Go []string and sets it to val.
func ByteaArrayToStringSlice(val *[]string) sql.Scanner {
return byteaArrayToStringSlice{val: val}
}
type byteaArrayFromByteSliceSlice struct {
val [][]byte
}
func (v byteaArrayFromByteSliceSlice) Value() (driver.Value, error) {
if v.val == nil {
return nil, nil
} else if len(v.val) == 0 {
return []byte{'{', '}'}, nil
}
out := make([]byte, 1, (len(v.val)*3)+1)
out[0] = '{'
for i := 0; i < len(v.val); i++ {
out = append(out, '"', '\\', '\\', 'x')
dst := make([]byte, hex.EncodedLen(len(v.val[i])))
_ = hex.Encode(dst, v.val[i])
out = append(out, dst...)
out = append(out, '"', ',')
}
out[len(out)-1] = '}'
return out, nil
}
type byteaArrayToByteSliceSlice struct {
val *[][]byte
}
func (v byteaArrayToByteSliceSlice) Scan(src interface{}) error {
arr, err := srcbytes(src)
if err != nil {
return err
} else if arr == nil {
*v.val = nil
return nil
}
elems := pgParseCommaArray(arr)
out := make([][]byte, len(elems))
for i := 0; i < len(elems); i++ {
src := elems[i]
// drop the initial "\\x and the last "
src = src[4 : len(src)-1]
dst := make([]byte, hex.DecodedLen(len(src)))
if _, err := hex.Decode(dst, src); err != nil {
return err
}
out[i] = dst
}
*v.val = out
return nil
}
type byteaArrayFromStringSlice struct {
val []string
}
func (v byteaArrayFromStringSlice) Value() (driver.Value, error) {
if v.val == nil {
return nil, nil
} else if len(v.val) == 0 {
return []byte{'{', '}'}, nil
}
out := make([]byte, 1, (len(v.val)*3)+1)
out[0] = '{'
for i := 0; i < len(v.val); i++ {
out = append(out, '"', '\\', '\\', 'x')
src := []byte(v.val[i])
dst := make([]byte, hex.EncodedLen(len(src)))
_ = hex.Encode(dst, src)
out = append(out, dst...)
out = append(out, '"', ',')
}
out[len(out)-1] = '}'
return out, nil
}
type byteaArrayToStringSlice struct {
val *[]string
}
func (v byteaArrayToStringSlice) Scan(src interface{}) error {
arr, err := srcbytes(src)
if err != nil {
return err
} else if arr == nil {
*v.val = nil
return nil
}
elems := pgParseCommaArray(arr)
out := make([]string, len(elems))
for i := 0; i < len(elems); i++ {
src := elems[i]
// drop the initial "\\x and the last "
src = src[4 : len(src)-1]
dst := make([]byte, hex.DecodedLen(len(src)))
if _, err := hex.Decode(dst, src); err != nil {
return err
}
out[i] = string(dst)
}
*v.val = out
return nil
} | pgsql/byteaarr.go | 0.716615 | 0.466177 | byteaarr.go | starcoder |
package gt
import (
"database/sql/driver"
"encoding/json"
"fmt"
"time"
)
// `gt.NullTime` version of `time.Date`.
func NullTimeIn(year int, month time.Month, day, hour, min, sec, nsec int, loc *time.Location) NullTime {
return NullTime(time.Date(year, month, day, hour, min, sec, nsec, loc))
}
// Shortcut for `gt.NullTimeIn` with `T00:00:00`.
func NullDateIn(year int, month time.Month, day int, loc *time.Location) NullTime {
return NullTime(time.Date(year, month, day, 0, 0, 0, 0, loc))
}
/*
Shortcut for `gt.NullTimeIn` in UTC.
Note: due to peculiarities of civil time, `gt.NullTimeUTC(1, 1, 1, 0, 0, 0, 0)`
returns a zero value, while `gt.NullTimeUTC(0, 0, 0, 0, 0, 0, 0)` returns
a "negative" time.
*/
func NullTimeUTC(year int, month time.Month, day, hour, min, sec, nsec int) NullTime {
return NullTimeIn(year, month, day, hour, min, sec, nsec, time.UTC)
}
/*
Shortcut for `gt.NullDateIn` in UTC.
Note: due to peculiarities of civil time, `gt.NullDateUTC(1, 1, 1)` returns a
zero value, while `gt.NullDateUTC(0, 0, 0)` returns a "negative" time.
*/
func NullDateUTC(year int, month time.Month, day int) NullTime {
return NullDateIn(year, month, day, time.UTC)
}
// `gt.NullTime` version of `time.Now`.
func NullTimeNow() NullTime { return NullTime(time.Now()) }
// `gt.NullTime` version of `time.Since`.
func NullTimeSince(val NullTime) time.Duration { return NullTimeNow().Sub(val) }
/*
Shortcut: parses successfully or panics. Should be used only in root scope. When
error handling is relevant, use `.Parse`.
*/
func ParseNullTime(src string) (val NullTime) {
try(val.Parse(src))
return
}
/*
Variant of `time.Time` where zero value is considered empty in text, and null in
JSON and SQL. Prevents you from accidentally inserting nonsense times like
0001-01-01 or 1970-01-01 into date/time columns, without the hassle of pointers
such as `*time.Time` or unusable types such as `sql.NullTime`.
Differences from `time.Time`:
* Zero value is "" in text.
* Zero value is `null` in JSON.
* Zero value is `null` in SQL.
* Default text encoding is RFC3339.
* Text encoding/decoding is automatically reversible.
Differences from `"database/sql".NullTime`:
* Much easier to use.
* Supports text; zero value is "".
* Supports JSON; zero value is `null`.
* Fewer states: null and zero are one.
In your data model, `time.Time` is often the wrong choice, because the zero
value of `time.Time` is considered "non-empty". It leads to accidentally
inserting junk data. `*time.Time` is a better choice, but it introduces nil
pointer hazards without eliminating the invalid state `&time.Time{}`.
`sql.NullTime` is unusable due to lack of support for text and JSON encoding.
`gt.NullTime` avoids all of those issues.
For civil dates without time, use `gt.NullDate`.
*/
type NullTime time.Time
var (
_ = Encodable(NullTime{})
_ = Decodable((*NullTime)(nil))
)
/*
Implement `gt.Zeroable`. Same as `self.Time().IsZero()`. Unlike most
implementations of `gt.Zeroable` in this package, this is NOT equivalent to
`reflect.ValueOf(self).IsZero()`, but rather a superset of it.
*/
func (self NullTime) IsZero() bool { return self.Time().IsZero() }
// Implement `gt.Nullable`. True if zero.
func (self NullTime) IsNull() bool { return self.IsZero() }
// Implement `gt.PtrGetter`, returning `*time.Time`.
func (self *NullTime) GetPtr() interface{} { return self.TimePtr() }
// Implement `gt.Getter`. If zero, returns `nil`, otherwise returns `time.Time`.
func (self NullTime) Get() interface{} {
if self.IsNull() {
return nil
}
return self.Time()
}
// Implement `gt.Setter`, using `.Scan`. Panics on error.
func (self *NullTime) Set(src interface{}) { try(self.Scan(src)) }
// Implement `gt.Zeroer`, zeroing the receiver.
func (self *NullTime) Zero() {
if self != nil {
*self = NullTime{}
}
}
/*
Implement `fmt.Stringer`. If zero, returns an empty string. Otherwise returns a
text representation in the RFC3339 format.
*/
func (self NullTime) String() string {
if self.IsNull() {
return ``
}
return self.Time().Format(timeFormat)
}
/*
Implement `gt.Parser`. If the input is empty, zeroes the receiver. Otherwise
requires an RFC3339 timestamp (default time parsing format in Go).
*/
func (self *NullTime) Parse(src string) error {
if len(src) == 0 {
self.Zero()
return nil
}
val, err := time.Parse(timeFormat, src)
if err != nil {
return err
}
*self = NullTime(val)
return nil
}
// Implement `gt.Appender`, using the same representation as `.String`.
func (self NullTime) Append(buf []byte) []byte {
if self.IsNull() {
return buf
}
return self.Time().AppendFormat(buf, timeFormat)
}
/*
Implement `encoding.TextMarhaler`. If zero, returns nil. Otherwise returns the
same representation as `.String`.
*/
func (self NullTime) MarshalText() ([]byte, error) {
if self.IsNull() {
return nil, nil
}
return self.Append(nil), nil
}
// Implement `encoding.TextUnmarshaler`, using the same algorithm as `.Parse`.
func (self *NullTime) UnmarshalText(src []byte) error {
return self.Parse(bytesString(src))
}
/*
Implement `json.Marshaler`. If zero, returns bytes representing `null`.
Otherwise uses the default `json.Marshal` behavior for `time.Time`.
*/
func (self NullTime) MarshalJSON() ([]byte, error) {
if self.IsNull() {
return bytesNull, nil
}
return json.Marshal(self.Get())
}
/*
Implement `json.Unmarshaler`. If the input is empty or represents JSON `null`,
zeroes the receiver. Otherwise uses the default `json.Unmarshal` behavior
for `*time.Time`.
*/
func (self *NullTime) UnmarshalJSON(src []byte) error {
if isJsonEmpty(src) {
self.Zero()
return nil
}
return json.Unmarshal(src, self.GetPtr())
}
// Implement `driver.Valuer`, using `.Get`.
func (self NullTime) Value() (driver.Value, error) {
return self.Get(), nil
}
/*
Implement `sql.Scanner`, converting an arbitrary input to `gt.NullTime` and
modifying the receiver. Acceptable inputs:
* `nil` -> use `.Zero`
* `string` -> use `.Parse`
* `[]byte` -> use `.UnmarshalText`
* `time.Time` -> assign
* `*time.Time` -> use `.Zero` or assign
* `gt.NullTime` -> assign
* `gt.NullDate` -> assume UTC, convert, assign
* `gt.Getter` -> scan underlying value
*/
func (self *NullTime) Scan(src interface{}) error {
switch src := src.(type) {
case nil:
self.Zero()
return nil
case string:
return self.Parse(src)
case []byte:
return self.UnmarshalText(src)
case time.Time:
*self = NullTime(src)
return nil
case *time.Time:
if src == nil {
self.Zero()
} else {
*self = NullTime(*src)
}
return nil
case NullTime:
*self = src
return nil
case NullDate:
*self = src.NullTimeUTC()
return nil
default:
val, ok := get(src)
if ok {
return self.Scan(val)
}
return errScanType(self, src)
}
}
/*
Implement `fmt.GoStringer`, returning Go code that constructs this value. For
UTC, the resulting code is valid. For non-UTC, the resulting code is invalid,
because `*time.Location` doesn't implement `fmt.GoStringer`.
*/
func (self NullTime) GoString() string {
year, month, day := self.Date()
hour, min, sec, nsec := self.Hour(), self.Minute(), self.Second(), self.Nanosecond()
loc := self.Location()
if hour == 0 && min == 0 && sec == 0 && nsec == 0 {
if loc == time.UTC {
return fmt.Sprintf(`gt.NullDateUTC(%v, %v, %v)`, year, int(month), day)
}
return fmt.Sprintf(`gt.NullDateIn(%v, %v, %v, %q)`, year, int(month), day, loc)
}
if loc == time.UTC {
return fmt.Sprintf(`gt.NullTimeUTC(%v, %v, %v, %v, %v, %v, %v)`, year, int(month), day, hour, min, sec, nsec)
}
return fmt.Sprintf(`gt.NullTimeIn(%v, %v, %v, %v, %v, %v, %v, %q)`, year, int(month), day, hour, min, sec, nsec, loc)
}
// Free cast to `time.Time`.
func (self NullTime) Time() time.Time { return time.Time(self) }
// Free cast to `*time.Time`.
func (self *NullTime) TimePtr() *time.Time { return (*time.Time)(self) }
// If zero, returns nil. Otherwise returns a non-nil `*time.Time`.
func (self NullTime) MaybeTime() *time.Time {
if self.IsNull() {
return nil
}
return self.TimePtr()
}
// Shortcut for `gt.NullDateFrom(self.Date())`.
func (self NullTime) NullDate() NullDate {
return NullDateFrom(self.Date())
}
/*
Adds the interval to the time, returning the modified time. If the interval is a
zero value, the resulting time should be identical to the source.
*/
func (self NullTime) AddInterval(val Interval) NullTime {
return self.AddDate(val.Date()).Add(val.OnlyTime().Duration())
}
// Same as `gt.NullTime.AddInterval` but for `gt.NullInterval`.
func (self NullTime) AddNullInterval(val NullInterval) NullTime {
return self.AddInterval(Interval(val))
}
/*
Similar to `time.Time.After`, but variadic. Returns true if ALL given times are
non-null and ordered sequentially: A > B > C > ...
Note that while `time.Time{}` is considered to be the start of the first day of
the first month of the first year, `gt.NullTime{}` is considered empty/null. If
any timestamp is null, this returns false.
*/
func (self NullTime) After(vals ...NullTime) bool {
cursor := self
if cursor.IsNull() {
return false
}
for _, val := range vals {
if val.IsNull() || !(cursor.Time().After(val.Time())) {
return false
}
cursor = val
}
return true
}
/*
Similar to `time.Time.Before`, but variadic. Returns true if ALL given times are
non-null and ordered sequentially: A < B < C < ...
Note that while `time.Time{}` is considered to be the start of the first day of
the first month of the first year, `gt.NullTime{}` is considered empty/null. If
any timestamp is null, this returns false.
*/
func (self NullTime) Before(vals ...NullTime) bool {
cursor := self
if cursor.IsNull() {
return false
}
for _, val := range vals {
if val.IsNull() || !(cursor.Time().Before(val.Time())) {
return false
}
cursor = val
}
return true
}
// `gt.NullTime` version of `time.Time.Equal`.
func (self NullTime) Equal(val NullTime) bool { return self.Time().Equal(val.Time()) }
// `gt.NullTime` version of `time.Time.Date`.
func (self NullTime) Date() (int, time.Month, int) { return self.Time().Date() }
// `gt.NullTime` version of `time.Time.Year`.
func (self NullTime) Year() int { return self.Time().Year() }
// `gt.NullTime` version of `time.Time.Month`.
func (self NullTime) Month() time.Month { return self.Time().Month() }
// `gt.NullTime` version of `time.Time.Day`.
func (self NullTime) Day() int { return self.Time().Day() }
// `gt.NullTime` version of `time.Time.Weekday`.
func (self NullTime) Weekday() time.Weekday { return self.Time().Weekday() }
// `gt.NullTime` version of `time.Time.ISOWeek`.
func (self NullTime) ISOWeek() (int, int) { return self.Time().ISOWeek() }
// `gt.NullTime` version of `time.Time.Clock`.
func (self NullTime) Clock() (int, int, int) { return self.Time().Clock() }
// `gt.NullTime` version of `time.Time.Hour`.
func (self NullTime) Hour() int { return self.Time().Hour() }
// `gt.NullTime` version of `time.Time.Minute`.
func (self NullTime) Minute() int { return self.Time().Minute() }
// `gt.NullTime` version of `time.Time.Second`.
func (self NullTime) Second() int { return self.Time().Second() }
// `gt.NullTime` version of `time.Time.Nanosecond`.
func (self NullTime) Nanosecond() int { return self.Time().Nanosecond() }
// `gt.NullTime` version of `time.Time.YearDay`.
func (self NullTime) YearDay() int { return self.Time().YearDay() }
// `gt.NullTime` version of `time.Time.Add`.
func (self NullTime) Add(val time.Duration) NullTime { return NullTime(self.Time().Add(val)) }
// `gt.NullTime` version of `time.Time.Sub`.
func (self NullTime) Sub(val NullTime) time.Duration { return self.Time().Sub(val.Time()) }
// `gt.NullTime` version of `time.Time.AddDate`.
func (self NullTime) AddDate(y, m, d int) NullTime { return NullTime(self.Time().AddDate(y, m, d)) }
// `gt.NullTime` version of `time.Time.UTC`.
func (self NullTime) UTC() NullTime { return NullTime(self.Time().UTC()) }
// `gt.NullTime` version of `time.Time.Local`.
func (self NullTime) Local() NullTime { return NullTime(self.Time().Local()) }
// `gt.NullTime` version of `time.Time.In`.
func (self NullTime) In(loc *time.Location) NullTime { return NullTime(self.Time().In(loc)) }
// `gt.NullTime` version of `time.Time.Location`.
func (self NullTime) Location() *time.Location { return self.Time().Location() }
// `gt.NullTime` version of `time.Time.Zone`.
func (self NullTime) Zone() (string, int) { return self.Time().Zone() }
// `gt.NullTime` version of `time.Time.Unix`.
func (self NullTime) Unix() int64 { return self.Time().Unix() }
// `gt.NullTime` version of `time.Time.UnixMilli`.
func (self NullTime) UnixMilli() int64 { return self.Time().UnixMilli() }
// `gt.NullTime` version of `time.Time.UnixMicro`.
func (self NullTime) UnixMicro() int64 { return self.Time().UnixMicro() }
// `gt.NullTime` version of `time.Time.UnixNano`.
func (self NullTime) UnixNano() int64 { return self.Time().UnixNano() }
// `gt.NullTime` version of `time.Time.IsDST`.
func (self NullTime) IsDST() bool { return self.Time().IsDST() }
// `gt.NullTime` version of `time.Time.Truncate`.
func (self NullTime) Truncate(val time.Duration) NullTime { return NullTime(self.Time().Truncate(val)) }
// `gt.NullTime` version of `time.Time.Round`.
func (self NullTime) Round(val time.Duration) NullTime { return NullTime(self.Time().Round(val)) }
// `gt.NullTime` version of `time.Time.Format`.
func (self NullTime) Format(layout string) string { return self.Time().Format(layout) }
// `gt.NullTime` version of `time.Time.AppendFormat`.
func (self NullTime) AppendFormat(a []byte, b string) []byte { return self.Time().AppendFormat(a, b) } | gt_null_time.go | 0.807347 | 0.699126 | gt_null_time.go | starcoder |
package square
import (
"fmt"
"image/color"
log "github.com/Sirupsen/logrus"
"github.com/Willyfrog/peano/drawing"
"github.com/Willyfrog/peano/point"
"github.com/Willyfrog/peano/utils"
)
type Square struct {
X int
Y int
Width float32
Points []*point.Point
}
// fitsIn Given a point.Point, check if it's inside it's boundaries
func (sq Square) fitsIn(pnt point.Point) bool {
xo, yo := sq.Origin()
xe, ye := sq.End()
return utils.Between(pnt.X, xo, xe) && utils.Between(pnt.Y, yo, ye)
}
// Origin Get the equivalent of (0,0) for this square
func (sq Square) Origin() (x, y float32) {
x = (float32(sq.X)) * sq.Width
y = (float32(sq.Y)) * sq.Width
return
}
// TopRight Get the equivalent of (0,1) for this square
func (sq Square) TopRight() (x, y float32) {
x = (float32(sq.X)) * sq.Width
y = (float32(sq.Y + 1)) * sq.Width
return
}
// BottomLeft Get the equivalent of (1,0) for this square
func (sq Square) BottomLeft() (x, y float32) {
x = (float32(sq.X + 1)) * sq.Width
y = (float32(sq.Y)) * sq.Width
return
}
// End Get the equivalent of (1,1) for this square
func (sq Square) End() (x, y float32) {
x = (float32(sq.X + 1)) * sq.Width
y = (float32(sq.Y + 1)) * sq.Width
return
}
// Empty are any points inside?
func (sq Square) Empty() bool {
return len(sq.Points) == 0
}
// Partition given a square subidivide it into 4
func (sq Square) Partition() [2][2]Square {
width := sq.Width / 2.0
sub := [2][2]Square{
{
Square{Width: width, Points: make([]*point.Point, 0)},
Square{Width: width, Points: make([]*point.Point, 0)},
},
{
Square{Width: width, Points: make([]*point.Point, 0)},
Square{Width: width, Points: make([]*point.Point, 0)},
},
}
pointsAssigned := 0
for i, line := range sub {
for j, subsq := range line {
subsq.X = sq.X*2 + j
subsq.Y = sq.Y*2 + i
for _, p := range sq.Points {
if subsq.fitsIn(*p) {
subsq.Points = append(subsq.Points, p)
}
}
sub[i][j] = subsq // WTF?
pointsAssigned += len(subsq.Points)
}
}
if pointsAssigned != len(sq.Points) {
message := fmt.Sprintf("We missed some points while subdividing %d!=%d", pointsAssigned, len(sq.Points)) // don't want to find out later...
log.Fatal(message)
panic(message)
}
return sub
}
// Draw the square.
// Remember that the square should be ordered previously to make sure it
// prints in the intended order. Take a look at
// `github.com/Willyfrog/peano/matrix.Strategy` Interface
func (sq *Square) Draw(canvas *drawing.Canvas) {
path := canvas.GetContext()
if log.StandardLogger().Level == log.DebugLevel {
if sq.X == 0 && sq.Y == 0 {
path.SetStrokeColor(color.RGBA{0x44, 0xff, 0x44, 0xff})
} else if sq.X == 0 && sq.Y == 1 {
path.SetStrokeColor(color.RGBA{0xff, 0x44, 0x44, 0xff})
} else {
path.SetStrokeColor(color.RGBA{0xcc, 0xcc, 0xcc, 0xff})
}
} else {
path.SetStrokeColor(color.RGBA{0xcc, 0xcc, 0xcc, 0xff})
}
path.SetLineWidth(1)
xo, yo := sq.Origin()
xe, ye := sq.End()
drawing.DrawSquare(xo, yo, xe, ye, path)
path.Stroke()
//path.FillStroke()
//log.Debug(fmt.Sprintf("Result: %v", sq.Points))
var origin *point.Point
for _, pt := range sq.Points {
pt.Draw(canvas)
if origin != nil {
linepath := canvas.GetContext()
path.SetStrokeColor(color.RGBA{0x44, 0x44, 0x88, 0xff})
path.SetLineWidth(5)
drawing.DrawLine(origin.X, origin.Y, pt.X, pt.Y, linepath)
linepath.Stroke()
}
origin = pt
}
}
func (sq *Square) Connect() point.PointList {
return point.PointList(sq.Points).Polyline(point.SortXY)
}
// String make a printable version of the square and its contents
func (sq *Square) String() string {
pl := point.PointList(sq.Points)
return fmt.Sprintf("[%d, %d]:\n%s", sq.X, sq.Y, (&pl).String())
} | square/square.go | 0.644113 | 0.400398 | square.go | starcoder |
package genericcomparator
import (
"bytes"
"fmt"
"strings"
)
// Type defines the type of the generic Comparator that compares two values and returns -1 if a is smaller than b, 1 if
// a is bigger than b and 0 if both values are equal.
type Type func(a interface{}, b interface{}) int
// Comparator implements a function that compares the builtin basic types of Go and that returns -1 if a is smaller than
// b, 1 if a is bigger than b and 0 if both values are equal.
func Comparator(a, b interface{}) int {
switch aCasted := a.(type) {
case string:
return strings.Compare(aCasted, b.(string))
case []byte:
return bytes.Compare(aCasted, b.([]byte))
case int:
bCasted := b.(int)
switch {
case aCasted < bCasted:
return -1
case aCasted > bCasted:
return 1
}
case uint:
bCasted := b.(uint)
switch {
case aCasted < bCasted:
return -1
case aCasted > bCasted:
return 1
}
case int8:
bCasted := b.(int8)
switch {
case aCasted < bCasted:
return -1
case aCasted > bCasted:
return 1
}
case uint8:
bCasted := b.(uint8)
switch {
case aCasted < bCasted:
return -1
case aCasted > bCasted:
return 1
}
case int16:
bCasted := b.(int16)
switch {
case aCasted < bCasted:
return -1
case aCasted > bCasted:
return 1
}
case uint16:
bCasted := b.(uint16)
switch {
case aCasted < bCasted:
return -1
case aCasted > bCasted:
return 1
}
case int32:
bCasted := b.(int32)
switch {
case aCasted < bCasted:
return -1
case aCasted > bCasted:
return 1
}
case uint32:
bCasted := b.(uint32)
switch {
case aCasted < bCasted:
return -1
case aCasted > bCasted:
return 1
}
case int64:
bCasted := b.(int64)
switch {
case aCasted < bCasted:
return -1
case aCasted > bCasted:
return 1
}
case uint64:
bCasted := b.(uint64)
switch {
case aCasted < bCasted:
return -1
case aCasted > bCasted:
return 1
}
case float32:
bCasted := b.(float32)
switch {
case aCasted < bCasted:
return -1
case aCasted > bCasted:
return 1
}
case float64:
bCasted := b.(float64)
switch {
case aCasted < bCasted:
return -1
case aCasted > bCasted:
return 1
}
default:
panic(fmt.Sprintf("unsupported key type: %v", a))
}
return 0
} | datastructure/genericcomparator/genericcomparator.go | 0.694095 | 0.429071 | genericcomparator.go | starcoder |
package year2021
import (
"sort"
"github.com/lanphiergm/adventofcodego/internal/utils"
)
// Smoke Basin Part 1 computes the sum of all risk levels for the heightmap
func SmokeBasinPart1(filename string) interface{} {
data := utils.ReadStrings(filename)
sum := 0
for i, row := range data {
for j, cell := range row {
v := utils.Rtoi(cell)
if i > 0 && v >= utils.Btoi(data[i-1][j]) { // up
continue
}
if i < len(data)-1 && v >= utils.Btoi(data[i+1][j]) { // down
continue
}
if j > 0 && v >= utils.Btoi(row[j-1]) { // left
continue
}
if j < len(row)-1 && v >= utils.Btoi(row[j+1]) {
continue
}
sum += v + 1
}
}
return sum
}
// Smoke Basin Part 2 computes the product of the size of the three largest basins
func SmokeBasinPart2(filename string) interface{} {
data := utils.ReadStrings(filename)
basinSizes := []int{}
for i, row := range data {
for j, cell := range row {
v := utils.Rtoi(cell)
if i > 0 && v >= utils.Btoi(data[i-1][j]) { // up
continue
}
if i < len(data)-1 && v >= utils.Btoi(data[i+1][j]) { // down
continue
}
if j > 0 && v >= utils.Btoi(row[j-1]) { // left
continue
}
if j < len(row)-1 && v >= utils.Btoi(row[j+1]) {
continue
}
coords := map[utils.Coord]struct{}{}
findBasinCoords(data, coords, i, j)
basinSizes = append(basinSizes, len(coords))
}
}
sort.Sort(sort.Reverse(sort.IntSlice(basinSizes)))
return basinSizes[0] * basinSizes[1] * basinSizes[2]
}
func findBasinCoords(data []string, coords map[utils.Coord]struct{}, initialI int, initialJ int) {
if utils.ContainsCoord(coords, utils.Coord{X: initialI, Y: initialJ}) {
return
}
coords[utils.Coord{X: initialI, Y: initialJ}] = struct{}{}
for i := initialI - 1; i >= 0; i-- { // up
v := utils.Btoi(data[i][initialJ])
if v == 9 {
break
}
findBasinCoords(data, coords, i, initialJ)
}
for i := initialI + 1; i < len(data); i++ { // down
v := utils.Btoi(data[i][initialJ])
if v == 9 {
break
}
findBasinCoords(data, coords, i, initialJ)
}
for j := initialJ - 1; j >= 0; j-- { // left
v := utils.Btoi(data[initialI][j])
if v == 9 {
break
}
findBasinCoords(data, coords, initialI, j)
}
for j := initialJ + 1; j < len(data[initialI]); j++ { // right
v := utils.Btoi(data[initialI][j])
if v == 9 {
break
}
findBasinCoords(data, coords, initialI, j)
}
} | internal/puzzles/year2021/day_09_smoke_basin.go | 0.540196 | 0.467757 | day_09_smoke_basin.go | starcoder |
package internal
import (
"fmt"
"math"
"gopkg.in/yaml.v3"
"github.com/lyraproj/dgo/util"
"github.com/lyraproj/dgo/dgo"
)
type (
// floatVal is a float64 that implements the dgo.Value interface
floatVal float64
floatType int
exactFloatType float64
floatRangeType struct {
min float64
max float64
inclusive bool
}
)
// DefaultFloatType is the unconstrained floatVal type
const DefaultFloatType = floatType(0)
// FloatRangeType returns a dgo.FloatRangeType that is limited to the inclusive range given by min and max
// If inclusive is true, then the range has an inclusive end.
func FloatRangeType(min, max float64, inclusive bool) dgo.FloatRangeType {
if min == max {
if !inclusive {
panic(fmt.Errorf(`non inclusive range cannot have equal min and max`))
}
return exactFloatType(min)
}
if max < min {
t := max
max = min
min = t
}
if min == -math.MaxFloat64 && max == math.MaxFloat64 {
return DefaultFloatType
}
return &floatRangeType{min: min, max: max, inclusive: inclusive}
}
func (t *floatRangeType) Assignable(other dgo.Type) bool {
switch ot := other.(type) {
case exactFloatType:
return t.IsInstance(float64(ot))
case *floatRangeType:
if t.min > ot.min {
return false
}
if t.inclusive || t.inclusive == ot.inclusive {
return t.max >= ot.max
}
return t.max > ot.max
}
return CheckAssignableTo(nil, other, t)
}
func (t *floatRangeType) Equals(other interface{}) bool {
if ot, ok := other.(*floatRangeType); ok {
return *t == *ot
}
return false
}
func (t *floatRangeType) HashCode() int {
h := int(dgo.TiFloatRange)
if t.min > 0 {
h = h*31 + int(t.min)
}
if t.max < math.MaxInt64 {
h = h*31 + int(t.max)
}
if t.inclusive {
h *= 3
}
return h
}
func (t *floatRangeType) Instance(value interface{}) bool {
f, ok := ToFloat(value)
return ok && t.IsInstance(f)
}
func (t *floatRangeType) IsInstance(value float64) bool {
if t.min <= value {
if t.inclusive {
return value <= t.max
}
return value < t.max
}
return false
}
func (t *floatRangeType) Max() float64 {
return t.max
}
func (t *floatRangeType) Inclusive() bool {
return t.inclusive
}
func (t *floatRangeType) Min() float64 {
return t.min
}
func (t *floatRangeType) String() string {
return TypeString(t)
}
func (t *floatRangeType) Type() dgo.Type {
return &metaType{t}
}
func (t *floatRangeType) TypeIdentifier() dgo.TypeIdentifier {
return dgo.TiFloatRange
}
func (t exactFloatType) Assignable(other dgo.Type) bool {
if ot, ok := other.(exactFloatType); ok {
return t == ot
}
return CheckAssignableTo(nil, other, t)
}
func (t exactFloatType) Equals(other interface{}) bool {
return t == other
}
func (t exactFloatType) HashCode() int {
return floatVal(t).HashCode() * 3
}
func (t exactFloatType) Inclusive() bool {
return true
}
func (t exactFloatType) Instance(value interface{}) bool {
f, ok := ToFloat(value)
return ok && float64(t) == f
}
func (t exactFloatType) IsInstance(value float64) bool {
return float64(t) == value
}
func (t exactFloatType) Max() float64 {
return float64(t)
}
func (t exactFloatType) Min() float64 {
return float64(t)
}
func (t exactFloatType) Type() dgo.Type {
return &metaType{t}
}
func (t exactFloatType) String() string {
return TypeString(t)
}
func (t exactFloatType) TypeIdentifier() dgo.TypeIdentifier {
return dgo.TiFloatExact
}
func (t exactFloatType) Value() dgo.Value {
v := (floatVal)(t)
return v
}
func (t floatType) Assignable(other dgo.Type) bool {
switch other.(type) {
case floatType, exactFloatType, *floatRangeType:
return true
}
return false
}
func (t floatType) Equals(other interface{}) bool {
_, ok := other.(floatType)
return ok
}
func (t floatType) HashCode() int {
return int(dgo.TiFloat)
}
func (t floatType) Inclusive() bool {
return true
}
func (t floatType) Instance(value interface{}) bool {
_, ok := ToFloat(value)
return ok
}
func (t floatType) IsInstance(value float64) bool {
return true
}
func (t floatType) Max() float64 {
return math.MaxFloat64
}
func (t floatType) Min() float64 {
return -math.MaxFloat64
}
func (t floatType) String() string {
return TypeString(t)
}
func (t floatType) Type() dgo.Type {
return &metaType{t}
}
func (t floatType) TypeIdentifier() dgo.TypeIdentifier {
return dgo.TiFloat
}
// Float returns the dgo.Float for the given float64
func Float(f float64) dgo.Float {
return floatVal(f)
}
func (v floatVal) Type() dgo.Type {
return exactFloatType(v)
}
func (v floatVal) CompareTo(other interface{}) (r int, ok bool) {
ok = true
if ov, isFloat := ToFloat(other); isFloat {
fv := float64(v)
switch {
case fv > ov:
r = 1
case fv < ov:
r = -1
default:
r = 0
}
return
}
if oi, isInt := ToInt(other); isInt {
fv := float64(v)
ov := float64(oi)
switch {
case fv > ov:
r = 1
case fv < ov:
r = -1
default:
r = 0
}
return
}
if other == Nil || other == nil {
r = 1
} else {
ok = false
}
return
}
func (v floatVal) Equals(other interface{}) bool {
f, ok := ToFloat(other)
return ok && float64(v) == f
}
func (v floatVal) HashCode() int {
return int(v)
}
func (v floatVal) MarshalYAML() (interface{}, error) {
return &yaml.Node{Kind: yaml.ScalarNode, Tag: `!!float`, Value: v.String()}, nil
}
func (v floatVal) String() string {
return util.Ftoa(float64(v))
}
func (v floatVal) ToFloat() float64 {
return float64(v)
}
func (v floatVal) ToInt() int64 {
return int64(v)
}
func (v floatVal) GoFloat() float64 {
return float64(v)
}
// ToFloat returns the given value as a float64 if, and only if, the value is a float32 or float64. An
// additional boolean is returned to indicate if that was the case or not.
func ToFloat(value interface{}) (v float64, ok bool) {
ok = true
switch value := value.(type) {
case floatVal:
v = float64(value)
case float64:
v = value
case float32:
v = float64(value)
default:
ok = false
}
return
} | internal/float.go | 0.82994 | 0.531513 | float.go | starcoder |
package util
import (
"fmt"
"math"
"sort"
)
// BezierCurve stores the derivative curve weights.
type BezierCurve struct {
Weights [][]float64
WeightsDt [][]float64
WeightsDt2 [][]float64
WeightsDt3 [][]float64
}
// NewBezierCurve creates a new BezierCurve with the weights of the first, second
// and third order derivatives of the supplied curve.
func NewBezierCurve(weights [][]float64) *BezierCurve {
bc := BezierCurve{}
bc.Weights = weights
bc.WeightsDt = CalcDerivativeWeights(bc.Weights)
bc.WeightsDt2 = CalcDerivativeWeights(bc.WeightsDt)
bc.WeightsDt3 = CalcDerivativeWeights(bc.WeightsDt2)
return &bc
}
// CurveX returns the X value for the curve at t.
func (bc *BezierCurve) CurveX(t float64) float64 {
return DeCasteljau(bc.Weights, t)[0]
}
// CurveY returns the Y value for the curve at t.
func (bc *BezierCurve) CurveY(t float64) float64 {
return DeCasteljau(bc.Weights, t)[1]
}
// CurveDtX returns the X value for the derivative of the curve at t.
func (bc *BezierCurve) CurveDtX(t float64) float64 {
return DeCasteljau(bc.WeightsDt, t)[0]
}
// CurveDtY returns the Y value for the derivative of the curve at t.
func (bc *BezierCurve) CurveDtY(t float64) float64 {
return DeCasteljau(bc.WeightsDt, t)[1]
}
// CurveDt2X returns the X value for the second order derivative of the curve at t.
func (bc *BezierCurve) CurveDt2X(t float64) float64 {
return DeCasteljau(bc.WeightsDt2, t)[0]
}
// CurveDt2Y returns the Y value for the second order derivative of the curve at t.
func (bc *BezierCurve) CurveDt2Y(t float64) float64 {
return DeCasteljau(bc.WeightsDt2, t)[1]
}
// CurveDt3X returns the X value for the third order derivative of the curve at t.
func (bc *BezierCurve) CurveDt3X(t float64) float64 {
return DeCasteljau(bc.WeightsDt3, t)[0]
}
// CurveDt3Y returns the Y value for the third order derivative of the curve at t.
func (bc *BezierCurve) CurveDt3Y(t float64) float64 {
return DeCasteljau(bc.WeightsDt3, t)[1]
}
// Kappa calculates the curvature at t. Radius of curvature at t is 1/kappa(t)
func (bc *BezierCurve) Kappa(t float64) float64 {
dpt := DeCasteljau(bc.WeightsDt, t)
d2pt := DeCasteljau(bc.WeightsDt2, t)
return (dpt[0]*d2pt[1] - d2pt[0]*dpt[1]) / math.Pow(dpt[0]*dpt[0]+dpt[1]*dpt[1], 1.5)
}
// CalcExtremities finds the extremes of a curve in terms of t.
func CalcExtremities(points [][]float64) []float64 {
n := len(points)
if n == 2 {
return []float64{0, 1}
}
bc := NewBezierCurve(points)
fmap := make(map[string]bool) // Use "%.4f"
fmap["0.0000"], fmap["1.0000"] = true, true
// Find local minima and maxima with Dt and Dt2
tmap := make(map[string]bool)
calcRoots(bc.CurveDtX, bc.CurveDt2X, tmap)
addMap(fmap, tmap)
tmap = make(map[string]bool)
calcRoots(bc.CurveDtY, bc.CurveDt2Y, tmap)
addMap(fmap, tmap)
if n > 3 {
// Find inflection points with Dt2 and Dt3
tmap = make(map[string]bool)
calcRoots(bc.CurveDt2X, bc.CurveDt3X, tmap)
addMap(fmap, tmap)
tmap = make(map[string]bool)
calcRoots(bc.CurveDt2Y, bc.CurveDt3Y, tmap)
addMap(fmap, tmap)
}
// Convert t values back to float64
res := make([]float64, len(fmap))
i := 0
for k := range fmap {
fmt.Sscanf(k, "%f", &res[i])
i++
}
sort.Float64s(res)
return res
}
// Add map2 to map1
func addMap(map1, map2 map[string]bool) {
for k, v := range map2 {
map1[k] = v
}
}
func calcRoots(f, df func(float64) float64, tmap map[string]bool) {
// Find roots in range [0,1] via brute force
dt := 1.0 / 100
for t := 0.0; t <= 1; t += dt {
r, e := NRM(t, f, df)
if e != nil {
continue
}
tmap[fmt.Sprintf("%.4f", r)] = true
}
}
// NRM is a modified Newton-Raphson root search that bails if t falls outside
// of the range [0,1] since the curve isn't defined there.
func NRM(start float64, f, df func(float64) float64) (float64, error) {
t := start
for i := 0; i < 100; i++ {
d := df(t)
if Equals(d, 0) {
return 0, fmt.Errorf("zero derivative at %f", t)
}
dt := f(t) / d
if Equals(dt, 0) {
return t, nil
}
t = t - dt
if t < 0 || t > 1 {
return 0, fmt.Errorf("t %f outside of [0,1]", t)
}
}
return 0, fmt.Errorf("failed to converge on root")
} | util/bcroots.go | 0.832781 | 0.678473 | bcroots.go | starcoder |
package schema
/*
Okay, so. There are several fun considerations for a "validate" method.
---
There's two radically different approaches to "validate"/"reify":
- Option 1: Look at the schema.Type info and check if a data node seems
to match it -- recursing on the type info.
- Option 2: Use the schema.Type{}.RepresentationNodeBuilder() to feed data
into it -- recursing on what the nodebuilder already expresses.
(Option 2 also need to take a `memStorage ipld.NodeBuilder` param, btw,
for handling all the cases where we *aren't* doing codegen.)
Option 1 provides a little more opportunity for returning multiple errors.
Option 2 will generally have a hard time with that (nodebuilers are not
necessarily in a valid state after their first error encounter).
As a result of having these two options at all, we may indeed end up with
at least two very different functions -- despite seeming to do similar
things, their interior will radically diverge.
---
We may also need to consider distinct reification paths: we may want one
that returns a new node tree which is eagerly converted to typed.Node
recursively; and another that returns a lazyNode which wraps things
with their typed node constraints only as they're requested.
(Note that the latter would have interesting implications for any code
which has expectations about pointer equality consistency.)
---
A further fun issue which needs consideration: well, I'll just save a snip
of prospective docs I wrote while trying to iterate on these functions:
// Note that using Validate on a node that's already a typed.Node is likely
// to be nonsensical. In many schemas, the typed.Node tree is actually a
// different depth than its representational tree (e.g. unions can cause this),
... and that's ... that's a fairly sizable issue that needs resolving.
There's a couple of different ways to handle some of the behaviors around
unions, and some of them make the tradeoff described above, and I'm really
unsure if all the implications have been sussed out yet. We should defer
writing code that depends on this issue until gathering some more info.
---
One more note: about returning multiple errors from a Validate function:
there's an upper bound of the utility of the thing. Going farther than the
first parse error is nice, but it will still hit limits: for example,
upon encountering a union and failing to match it, we can't generally
produce further errors from anywhere deeper in the tree without them being
combinatorial "if previous juncture X was type Y, then..." nonsense.
(This applies to all recursive kinds to some degree, but it's especially
rough with unions. For most of the others, it's flatly a missing field,
or an excessive field, or a leaf error; with unions it can be hard to tell.)
---
And finally: both "Validate" and "Reify" methods might actually belong
in the typed.Node package -- if they make *any* reference to `typed.Node`,
then they have no choice (otherwise, cyclic imports would occur).
If we make a "Validate" that works purely on the schema.Type info, and
returns *only* errors: only then we can have it in the schema package.
*/ | vendor/github.com/ipld/go-ipld-prime/schema/validate.go | 0.606265 | 0.775095 | validate.go | starcoder |
// +build ignore
package main
import (
"math"
"github.com/cpmech/gosl/io"
"github.com/cpmech/gosl/la"
"github.com/cpmech/gosl/utl"
)
// Generator holds data for one generator
type Generator struct {
a, b, c float64 // cost coefficients
α, β, γ, ζ, λ float64 // emission coefficients
Pmin, Pmax float64 // capacity constraints
}
// System holds generators and other data
type System struct {
Pdemand float64
Lossless bool
B00 float64
B0 []float64
B [][]float64
G []Generator
}
// Init initialises system
func (o *System) Init(Pdemand float64, lossless, check bool) {
o.Pdemand = Pdemand
o.Lossless = lossless
// units:
// a [$ / (hMW²)]
// b [$ / (hMW)]
// c [$ / h]
// α [tons / (hMW²)]
// β [tons / (hMW)]
// γ [tons / h]
// ζ [tons / h]
// λ [MW⁻¹]
// Pmin [MW / 100]
// Pmax [MW / 100]
o.G = []Generator{
{a: 10, b: 200, c: 100, α: 4.091e-2, β: -5.554e-2, γ: 6.490e-2, ζ: 2.0e-4, λ: 2.857, Pmin: 0.05, Pmax: 0.5},
{a: 10, b: 150, c: 120, α: 2.543e-2, β: -6.047e-2, γ: 5.638e-2, ζ: 5.0e-4, λ: 3.333, Pmin: 0.05, Pmax: 0.6},
{a: 20, b: 180, c: 40., α: 4.258e-2, β: -5.094e-2, γ: 4.586e-2, ζ: 1.0e-6, λ: 8.000, Pmin: 0.05, Pmax: 1.0},
{a: 10, b: 100, c: 60., α: 5.326e-2, β: -3.550e-2, γ: 3.380e-2, ζ: 2.0e-3, λ: 2.000, Pmin: 0.05, Pmax: 1.2},
{a: 20, b: 180, c: 40., α: 4.258e-2, β: -5.094e-2, γ: 4.586e-2, ζ: 1.0e-6, λ: 8.000, Pmin: 0.05, Pmax: 1.0},
{a: 10, b: 150, c: 100, α: 6.131e-2, β: -5.555e-2, γ: 5.151e-2, ζ: 1.0e-5, λ: 6.667, Pmin: 0.05, Pmax: 0.6},
}
o.B00 = 0.00098573
o.B0 = []float64{-0.0107, +0.0060, -0.0017, +0.0009, +0.0002, +0.0030}
o.B = [][]float64{
{+0.1382, -0.0299, +0.0044, -0.0022, -0.0010, -0.0008},
{-0.0299, +0.0487, -0.0025, +0.0004, +0.0016, +0.0041},
{+0.0044, -0.0025, +0.0182, -0.0070, -0.0066, -0.0066},
{-0.0022, +0.0004, -0.0070, +0.0137, +0.0050, +0.0033},
{-0.0010, +0.0016, -0.0066, +0.0050, +0.0109, +0.0005},
{-0.0008, +0.0041, -0.0066, +0.0033, +0.0005, +0.0244},
}
if check {
// lossless and unsecured: cost only
P_best_cost := []float64{0.10954, 0.29967, 0.52447, 1.01601, 0.52469, 0.35963}
c := o.FuelCost(P_best_cost)
e := o.Emission(P_best_cost)
io.Pf("lossless and unsecured: cost only\n")
io.Pforan("c = %.3f (600.114)\n", c)
io.Pforan("e = %.5f (0.22214)\n", e)
P_best_cost = []float64{0.1265, 0.2843, 0.5643, 1.0468, 0.5278, 0.2801}
c = o.FuelCost(P_best_cost)
io.Pfgreen("c = %.3f\n", c)
Pdemand := 2.834
o.PrintConstraints(P_best_cost, Pdemand, true)
// lossless and unsecured: emission only
P_best_emission := []float64{0.40584, 0.45915, 0.53797, 0.38300, 0.53791, 0.51012}
c = o.FuelCost(P_best_emission)
e = o.Emission(P_best_emission)
io.Pf("\nlossless and unsecured: emission only\n")
io.Pforan("c = %.3f (638.260)\n", c)
io.Pforan("e = %.5f (0.19420)\n", e)
P_best_cost = []float64{0.1500, 0.3000, 0.5500, 1.0500, 0.4600, 0.3500}
c = o.FuelCost(P_best_cost)
e = o.Emission(P_best_cost)
io.Pforan("\nc = %.3f (606.314)\n", c)
io.Pforan("e = %.5f (0.22330)\n", e)
P_best_emission = []float64{0.4000, 0.4500, 0.5500, 0.4000, 0.5500, 0.5000}
c = o.FuelCost(P_best_emission)
e = o.Emission(P_best_emission)
io.Pforan("\nc = %.3f (639.600)\n", c)
io.Pforan("e = %.5f (0.19424)\n", e)
}
return
}
// FuelCost computes the total $/h fuel cost for given P[i] (power outputs)
func (o System) FuelCost(P []float64) (C float64) {
for i, g := range o.G {
C += g.a + g.b*P[i] + g.c*P[i]*P[i]
}
return
}
// Emission computes the total ton/h emmision of atmospheric pollutants
func (o System) Emission(P []float64) (E float64) {
for i, g := range o.G {
E += g.α + g.β*P[i] + g.γ*P[i]*P[i] + g.ζ*math.Exp(g.λ*P[i])
}
return
}
// Ploss computes P_{loss}
func (o System) Ploss(P []float64) (res float64) {
if o.Lossless {
return
}
res = o.B00
ngs := len(o.G)
for i := 0; i < ngs; i++ {
res += o.B0[i] * P[i]
for j := 0; j < ngs; j++ {
res += P[i] * o.B[i][j] * P[j]
}
}
return
}
// Balance computes load balance
func (o System) Balance(P []float64) float64 {
return math.Abs(la.VecAccum(P) - o.Pdemand - o.Ploss(P))
}
// PrintConstraints prints violated or not constraints
func (o System) PrintConstraints(P []float64, Pdemand float64, full bool) {
sumP := 0.0
for i, g := range o.G {
if full {
io.Pfyel("P%d range error = %v\n", i, utl.GtePenalty(P[i], g.Pmin, 1)+utl.GtePenalty(g.Pmax, P[i], 1))
}
sumP += P[i]
}
Ploss := 0.0
io.Pf("balance error = %v\n", math.Abs(sumP-Pdemand-Ploss))
} | examples/07-eed/generators.go | 0.529993 | 0.446977 | generators.go | starcoder |
package conditional
// Function execute left if v, else right.
func Function(v bool, left, right func()) {
if v {
left()
} else {
right()
}
}
// String return left if v, else right.
func String(v bool, left, right string) string {
if v {
return left
}
return right
}
// String return left if v, else right.
func Bool(v bool, left, right bool) bool {
if v {
return left
}
return right
}
// Int64 return left if v, else right.
func Int64(v bool, left, right int64) int64 {
if v {
return left
}
return right
}
// Int32 return left if v, else right.
func Int32(v bool, left, right int32) int32 {
if v {
return left
}
return right
}
// Int16 return left if v, else right.
func Int16(v bool, left, right int16) int16 {
if v {
return left
}
return right
}
// Int8 return left if v, else right.
func Int8(v bool, left, right int8) int8 {
if v {
return left
}
return right
}
// Int return left if v, else right.
func Int(v bool, left, right int) int {
if v {
return left
}
return right
}
// Uint64 return left if v, else right.
func Uint64(v bool, left, right uint64) uint64 {
if v {
return left
}
return right
}
// Uint32 return left if v, else right.
func Uint32(v bool, left, right uint32) uint32 {
if v {
return left
}
return right
}
// Uint16 return left if v, else right.
func Uint16(v bool, left, right uint16) uint16 {
if v {
return left
}
return right
}
// Uint8 return left if v, else right.
func Uint8(v bool, left, right uint8) uint8 {
if v {
return left
}
return right
}
// Uint return left if v, else right.
func Uint(v bool, left, right uint) uint {
if v {
return left
}
return right
}
// Float32 return left if v, else right.
func Float32(v bool, left, right float32) float32 {
if v {
return left
}
return right
}
// Float64 return left if v, else right.
func Float64(v bool, left, right float64) float64 {
if v {
return left
}
return right
}
// Complex64 return left if v, else right.
func Complex64(v bool, left, right complex64) complex64 {
if v {
return left
}
return right
}
// Complex128 return left if v, else right.
func Complex128(v bool, left, right complex128) complex128 {
if v {
return left
}
return right
}
// Uintptr return left if v, else right.
func Uintptr(v bool, left, right uintptr) uintptr {
if v {
return left
}
return right
}
// Any return left if v, else right.
func Any(v bool, left, right interface{}) interface{} {
if v {
return left
}
return right
} | controlflow/conditional/conditional.go | 0.738386 | 0.604107 | conditional.go | starcoder |
package gorgonia
/*
This file holds code for ndarray related reduction Ops.
What this means is we take a ndarray, and reduce the dimensions down - typically to 1.
For example, summing all the values in a matrix, or finding the max value.
There is an additional field in each of these Ops - the 'along' field. This is because it's not always we want to reduce a ndarray down to a single scalar number
*/
import (
"encoding/binary"
"fmt"
"hash"
"hash/fnv"
"github.com/chewxy/gorgonia/tensor"
tf32 "github.com/chewxy/gorgonia/tensor/f32"
tf64 "github.com/chewxy/gorgonia/tensor/f64"
"github.com/chewxy/gorgonia/tensor/types"
"github.com/pkg/errors"
)
type maxOp struct {
along axes
d int
}
func newMaxOp(along axes, dim int) *maxOp {
return &maxOp{
along: along,
d: dim,
}
}
func (op maxOp) Type() Type {
a := newTypeVariable("a", withTVConstraints(summable))
t := newTensorType(op.d, a)
var retType Type
if op.d == 1 || len(op.along) == 0 || len(op.along) == op.d {
// then it redueces down
retType = a
return newFunctionType(t, a)
} else {
retType = newTensorType(op.d-1, a)
}
return newFunctionType(t, retType)
}
func (op maxOp) inferShape(Type, ...*Node) (types.Shape, error) { return scalarShape, nil } // TODO, THIS IS INCORRECT
func (op maxOp) DiffWRT(i int) []bool { return []bool{true} }
func (op maxOp) SymDiff(inputs Nodes, output, gradNode *Node) (retVal Nodes, err error) {
if len(inputs) != 1 {
err = NewError(GraphError, "Expect at least 1 input. Got %d instead", len(inputs))
return
}
t := inputs[0]
opDim := len(t.Shape())
var leftAxes []byte
for i := 0; i < opDim; i++ {
for _, ax := range op.along {
if i == ax {
leftAxes = append(leftAxes, byte(i))
break
}
}
}
var eq *Node
bcpat := NewBroadcastPattern(leftAxes, nil)
if eq, err = Broadcast(eqOpType, output, t, bcpat); err != nil {
err = errors.Wrap(err, operationError)
return
}
retVal[0], err = Broadcast(mulOpType, gradNode, eq, bcpat)
return
}
func (op maxOp) Do(inputs ...Value) (retVal Value, err error) {
if len(inputs) != 1 {
err = NewError(GraphError, "Expected only one input for maxop. Got %d instead", len(inputs))
return
}
return nil, NewError(NotYetImplemented, "maxOp")
}
func (op maxOp) returnsPtr() bool { return true }
func (op maxOp) overwriteInput() int { return 0 }
func (op maxOp) callsExtern() bool { return false }
func (op maxOp) WriteHash(h hash.Hash) {
h.Write([]byte("max"))
if err := binary.Write(h, binary.LittleEndian, byte(op.d)); err != nil {
panic(err)
}
fmt.Fprintf(h, "%v", op.d, op.along)
}
func (op maxOp) Hashcode() uint32 {
h := fnv.New32a()
op.WriteHash(h)
return h.Sum32()
}
func (op maxOp) String() string { return fmt.Sprintf("MaxAlong%v", op.along) }
func (op maxOp) isUnary() bool { return true }
/* ARGMAX OP */
// type argmaxOp struct {
// along int // axis
// }
// func (op argmaxOp) Type() Type {
// a := newTypeVariable("a")
// }
/* SUM OP */
type sumOp struct {
along axes
d int
inputShape types.Shape
}
func newSumOp(along axes, s types.Shape, d int) sumOp {
return sumOp{
along: along,
d: d,
inputShape: s,
}
}
// sumOp is a function with this type:
// sumOp :: (Summable a) ⇒ Tensor d a → Tensor d-1 a
func (op sumOp) Type() Type {
a := newTypeVariable("a", withTVConstraints(summable))
t := newTensorType(op.d, a)
var retType Type
if op.d == 1 || len(op.along) == 0 || len(op.along) == op.d {
// then it redueces down
retType = a
return newFunctionType(t, a)
} else {
retType = newTensorType(op.d-1, a)
}
return newFunctionType(t, retType)
}
func (op sumOp) inferShape(t Type, inputs ...*Node) (shape types.Shape, err error) {
if len(inputs) != 1 {
err = NewError(GraphError, "sumOp requires only one input")
return
}
in := inputs[0]
shapeLogf("Infering... Type: %v", t)
shapeLogf("input shape: %v", in.shape)
switch {
case in.IsScalar():
shape = scalarShape
case in.IsVector() && !in.IsRowVec() && !in.IsColVec():
if len(op.along) > 1 || (len(op.along) == 1 && op.along[0] != 0) {
err = NewError(ShapeError, "Shape mismatch: along is %v. Shape is %v", op.along, in.shape)
return
}
shape = scalarShape
default:
shape = in.Shape().Clone()
if len(op.along) > len(shape) {
err = NewError(ShapeError, "Shape mismatch: %v and %v", shape, op.along)
return
}
if monotonic, incr1 := types.IsMonotonicInts(op.along); monotonic && incr1 && len(op.along) == len(shape) {
shape = scalarShape
return
}
for _, a := range op.along {
if a >= len(shape) {
err = NewError(ShapeError, "Axis %d is greater or equal to the length of the shape %v", a, shape)
return
}
shape[a] = 1
}
if oneone.Eq(shape) {
shape = scalarShape
}
}
return
}
func (op sumOp) DiffWRT(i int) []bool { return []bool{true} }
func (op sumOp) SymDiff(inputs Nodes, output, gradNode *Node) (retVal Nodes, err error) {
if len(inputs) != 1 {
err = NewError(GraphError, "Requires only one input to differentiate sumop")
return
}
children := make(Nodes, len(op.along)+1)
children[0] = gradNode
for i, a := range op.along {
var n *Node
if n, err = SizeOf(a, inputs[0]); err != nil {
err = errors.Wrap(err, operationError)
return
}
WithGroupName(gradClust)(n)
children[i+1] = n
}
retVal = make(Nodes, 1)
repeat := newRepeatOp(op.along, children)
symdiffLogf("repeat: %v", repeat.Type())
symdiffLogf("children %#Y", children)
symdiffLogf("children: %v", children)
retVal[0], err = applyOp(repeat, children...)
retVal[0].setGroup(gradClust)
return
}
func (op sumOp) DoDiff(inputs Nodes, output *Node) (err error) {
if len(inputs) != 1 {
err = NewError(GraphError, "Requires only one input to differentiate sumop")
return
}
xdv := inputs[0].boundTo.(*dualValue)
ydv := output.boundTo.(*dualValue)
xShape := xdv.Value.Shape()
var T types.Tensor
switch ydvd := ydv.d.(type) {
case Scalar:
switch ydvd.t {
case Float64:
f := ydvd.v.(float64)
T = tf64.NewTensor(tf64.AsScalar(f))
case Float32:
f := ydvd.v.(float32)
T = tf32.NewTensor(tf32.AsScalar(f))
default:
err = nyi("sumOp.DoDiff", ydvd.t)
return
}
case Tensor:
T = ydvd.Tensor
}
var val Value
if !T.Shape().Eq(xdv.d.Shape()) {
// TO DO: Optimize: figure out a way to bunch it all up so you can repeat in one call
for _, a := range op.along {
if xShape[a] == 1 {
continue // don't need to repeat
}
if T, err = tensor.Repeat(T, a, xShape[a]); err != nil {
err = errors.Wrapf(err, repFail, a, xShape[a])
return
}
}
val = FromTensor(T)
} else {
val = ydv.d
}
// then just add the two
add := newEBOByType(addOpType, xdv.d.Type(), val.Type())
var d Value
if d, err = add.UnsafeDo(xdv.d, val); err != nil {
err = errors.Wrapf(err, unsafeDoFail, add)
return
}
// check if xdv.d is scalar
if xdv.d.Type().isScalar() {
return xdv.SetDeriv(d)
}
return
}
func (op sumOp) Do(inputs ...Value) (retVal Value, err error) {
if len(inputs) != 1 {
err = NewError(GraphError, "Expect only one input for sumOp. GOt %v instead", len(inputs))
return
}
a := inputs[0]
at := a.(Tensor)
switch t := at.Tensor.(type) {
case *tf64.Tensor:
var ret *tf64.Tensor
if ret, err = t.Sum(op.along...); err == nil {
if ret.IsScalar() {
retVal = NewScalarValue(ret.ScalarValue())
} else {
retVal = FromTensor(ret)
}
}
case *tf32.Tensor:
var ret *tf32.Tensor
if ret, err = t.Sum(op.along...); err == nil {
if ret.IsScalar() {
retVal = NewScalarValue(ret.ScalarValue())
} else {
retVal = FromTensor(ret)
}
}
default:
err = nyi("sumOp.Do", at.Tensor)
}
return
}
func (op sumOp) returnsPtr() bool { return true }
func (op sumOp) overwriteInput() int { return 0 }
func (op sumOp) callsExtern() bool { return false }
func (op sumOp) WriteHash(h hash.Hash) {
h.Write([]byte("sum"))
fmt.Fprintf(h, "%v->%v", op.along, op.inputShape)
}
func (op sumOp) Hashcode() uint32 {
h := fnv.New32a()
op.WriteHash(h)
return h.Sum32()
}
func (op sumOp) String() string { return fmt.Sprintf("Σ%v", op.along) }
func (op sumOp) isUnary() bool { return true } | op_reduction.go | 0.644449 | 0.646014 | op_reduction.go | starcoder |
package rainsd
import (
"fmt"
"sort"
"sync"
log "github.com/inconshreveable/log15"
"github.com/netsec-ethz/rains/internal/pkg/section"
)
//isAssertionConsistent checks if the incoming assertion is consistent with the elements in the cache.
//If not, every element of this zone and context is dropped and it returns false
func isAssertionConsistent(assertion *section.Assertion, consistCache consistencyCache,
assertionsCache assertionCache, negAssertionCache negativeAssertionCache) bool {
negAssertions := consistCache.Get(assertion.Context, assertion.SubjectZone, assertion)
for _, negAssertion := range negAssertions {
switch negAssertion := negAssertion.(type) {
case *section.Assertion:
//TODO CFE do nothing???
case *section.Shard:
if togetherValid(assertion, negAssertion) && !shardContainsAssertion(assertion, negAssertion) {
log.Warn("Inconsistency encountered between assertion and shard. Drop all sections for given context and zone.", "assertion", assertion, "shard", negAssertion)
dropAllWithContextZone(assertion.Context, assertion.SubjectZone, assertionsCache, negAssertionCache)
return false
}
case *section.Zone:
if togetherValid(assertion, negAssertion) && !zoneContainsAssertion(assertion, negAssertion) {
dropAllWithContextZone(assertion.Context, assertion.SubjectZone, assertionsCache, negAssertionCache)
return false
}
default:
log.Warn(fmt.Sprintf("Not supported type. Expected *Shard or *Zone. Got=%T", negAssertion))
}
}
return true
}
//isShardConsistent checks if the incoming shard is consistent with the elements in the cache.
//If not every element of this zone is dropped and it return false
func isShardConsistent(shard *section.Shard, consistCache consistencyCache,
assertionsCache assertionCache, negAssertionCache negativeAssertionCache) bool {
secs := consistCache.Get(shard.Context, shard.SubjectZone, shard)
for _, v := range secs {
switch v := v.(type) {
case *section.Assertion:
if togetherValid(shard, v) && !shardContainsAssertion(v, shard) {
dropAllWithContextZone(shard.Context, shard.SubjectZone, assertionsCache, negAssertionCache)
return false
}
case *section.Shard:
if togetherValid(shard, v) && !isShardConsistentWithShard(shard, v) {
dropAllWithContextZone(shard.Context, shard.SubjectZone, assertionsCache, negAssertionCache)
return false
}
case *section.Zone:
if togetherValid(shard, v) && !isShardConsistentWithZone(shard, v) {
dropAllWithContextZone(shard.Context, shard.SubjectZone, assertionsCache, negAssertionCache)
return false
}
default:
log.Warn(fmt.Sprintf("Not supported type. Expected *Shard or *Zone. Got=%T", v))
}
}
return true
}
//isZoneConsistent checks if the incoming zone is consistent with the elements in the cache.
//If not every element of this zone is dropped and it return false
func isZoneConsistent(zone *section.Zone, assertionsCache assertionCache,
negAssertionCache negativeAssertionCache) bool {
secs, ok := negAssertionCache.Get(zone.Context, zone.SubjectZone, zone)
if ok {
for _, v := range secs {
switch v := v.(type) {
case *section.Assertion:
if togetherValid(zone, v) && !zoneContainsAssertion(v, zone) {
dropAllWithContextZone(zone.Context, zone.SubjectZone, assertionsCache, negAssertionCache)
return false
}
case *section.Shard:
if togetherValid(zone, v) && !isShardConsistentWithZone(v, zone) {
dropAllWithContextZone(zone.Context, zone.SubjectZone, assertionsCache, negAssertionCache)
return false
}
case *section.Zone:
if togetherValid(zone, v) && !isZoneConsistentWithZone(v, zone) {
dropAllWithContextZone(zone.Context, zone.SubjectZone, assertionsCache, negAssertionCache)
return false
}
default:
log.Warn(fmt.Sprintf("Not supported type. Expected *Shard or *Zone. Got=%T", v))
}
}
}
return true
}
//isAddressAssertionConsistent checks if the incoming address assertion is consistent with the elements in the cache.
//If not, every element of this zone and context is dropped and it returns false
func isAddressAssertionConsistent(assertion *section.AddrAssertion) bool {
//TODO CFE implement
return false
}
//togetherValid returns true if both sections are at some point both valid
func togetherValid(s1, s2 section.WithSig) bool {
return s1.ValidUntil() >= s2.ValidSince() && s1.ValidSince() <= s2.ValidUntil()
}
//dropAllWithContextZone deletes all assertions, shards and zones in the cache with the given context and zone
func dropAllWithContextZone(context, zone string, assertionsCache assertionCache,
negAssertionCache negativeAssertionCache) {
assertionsCache.RemoveZone(zone)
negAssertionCache.RemoveZone(zone)
}
//shardContainsAssertion returns true if the given shard contains the given assertion
func shardContainsAssertion(a *section.Assertion, s *section.Shard) bool {
for _, assertion := range s.Content {
if a.EqualContextZoneName(assertion) {
return true
}
}
log.Warn("Encountered valid assertion together with a valid shard that does not contain it.", "assertion", *a, "shard", *s)
return false
}
//zoneContainsAssertion returns true if the given zone contains the given assertion and that all contained shards in range of the assertion contain the assertion.
func zoneContainsAssertion(a *section.Assertion, z *section.Zone) bool {
isContained := false //checks that zone contains given assertion
for _, v := range z.Content {
switch v := v.(type) {
case *section.Assertion:
if a.EqualContextZoneName(v) {
isContained = true
}
case *section.Shard:
if v.RangeFrom < a.SubjectName && v.RangeTo > a.SubjectName {
if shardContainsAssertion(a, v) { //checks that all shards in range contain the assertion
isContained = true
} else {
return false
}
}
default:
log.Warn(fmt.Sprintf("Not supported type. Expected *Shard or *Assertion. Got=%T", v))
return false
}
log.Warn("Encountered valid assertion together with a valid zone that does not contain it.", "assertion", *a, "zone", *z)
}
return isContained
}
//isShardConsistentWithShard returns true if both shards are consistent with each other
//This is the case when all assertions in the intersecting interval are present in both shards
func isShardConsistentWithShard(s1, s2 *section.Shard) bool {
v1 := &sortedAssertions{assertions: []*section.Assertion{}}
v2 := &sortedAssertions{assertions: []*section.Assertion{}}
addAssertionsinRangeToList(s1, s2, v1)
addAssertionsinRangeToList(s2, s1, v2)
return v1.Equal(v2)
}
//isShardConsistentWithZone returns true if the shard is consistent with the zone
func isShardConsistentWithZone(s *section.Shard, z *section.Zone) bool {
assertionsInZone := &sortedAssertions{assertions: []*section.Assertion{}}
//check that all elements of the zone in the range of the shard are also contained in the shard
for _, v := range z.Content {
switch v := v.(type) {
case *section.Assertion:
if v.SubjectName > s.RangeFrom && v.SubjectName < s.RangeTo {
if !shardContainsAssertion(v, s) {
log.Warn("Shard is not consistent with zone. Zone contains assertion in range of shard which is missing in shard")
return false
}
}
assertionsInZone.Add(v)
case *section.Shard:
if !isShardConsistentWithShard(v, s) {
log.Warn("Shard is not consistent with zone. Zone contains shard in range of another shard which are not consistent")
return false
}
addAssertionsinRangeToList(v, section.TotalInterval{}, assertionsInZone)
default:
log.Warn(fmt.Sprintf("Not supported type. Expected *Shard or *Assertion. Got=%T", v))
}
}
//check that all elements of the shard are also contained in the zone.
for _, a := range s.Content {
_, ok := assertionsInZone.Get(a)
if !ok {
assertions, _ := assertionsInZone.Get(section.TotalInterval{}) //only used for logging
log.Warn("Shard is not consistent with zone. Shard contains an assertion which is not contained in the zone", "zone", z,
"assertionInZone", assertions, "shard", s)
return false
}
}
return true
}
func isZoneConsistentWithZone(z1, z2 *section.Zone) bool {
assertionsInZone1 := &sortedAssertions{assertions: []*section.Assertion{}}
assertionsInZone2 := &sortedAssertions{assertions: []*section.Assertion{}}
for _, v := range z1.Content {
switch v := v.(type) {
case *section.Assertion:
if !zoneContainsAssertion(v, z2) {
return false
}
assertionsInZone1.Add(v)
case *section.Shard:
for _, val := range z2.Content {
switch val := val.(type) {
case *section.Assertion:
if !shardContainsAssertion(val, v) {
return false
}
case *section.Shard:
if !isShardConsistentWithShard(val, v) {
return false
}
default:
log.Warn(fmt.Sprintf("Not supported type. Expected *Shard or *Assertion. Got=%T", v))
}
}
addAssertionsinRangeToList(v, section.TotalInterval{}, assertionsInZone1)
default:
log.Warn(fmt.Sprintf("Not supported type. Expected *Shard or *Assertion. Got=%T", v))
}
}
//check that there is no assertion in z2 which is missing in z1.
for _, v := range z2.Content {
switch v := v.(type) {
case *section.Assertion:
assertionsInZone2.Add(v)
case *section.Shard:
addAssertionsinRangeToList(v, section.TotalInterval{}, assertionsInZone2)
default:
log.Warn(fmt.Sprintf("Not supported type. Expected *Shard or *Assertion. Got=%T", v))
}
}
if !assertionsInZone1.Equal(assertionsInZone2) {
return false
}
return true
}
//containedShardsAreConsistent checks that all contained shards are mutually consistent and also consistent with the contained assertions.
func containedShardsAreConsistent(z *section.Zone) bool {
for i, v := range z.Content {
switch v := v.(type) {
case *section.Assertion:
for _, val := range z.Content[i+1:] {
switch val := val.(type) {
case *section.Assertion:
//assertion is always consistent with another assertion
case *section.Shard:
if val.RangeFrom < v.SubjectName && val.RangeTo > v.SubjectName && !shardContainsAssertion(v, val) {
log.Info("zone is internally not consistent. Zone contains an assertion which is not present in a shard in the range",
"assertion", *v, "shard", *val)
return false
}
default:
log.Warn(fmt.Sprintf("Not supported type. Expected *Shard or *Assertion. Got=%T", v))
}
}
case *section.Shard:
for _, val := range z.Content[i+1:] {
switch val := val.(type) {
case *section.Assertion:
if v.RangeFrom < val.SubjectName && v.RangeTo > val.SubjectName && !shardContainsAssertion(val, v) {
log.Info("zone is internally not consistent. Zone contains an assertion which is not present in a shard in the range",
"assertion", *val, "shard", *v)
return false
}
case *section.Shard:
if val.RangeFrom < v.RangeTo && val.RangeTo > v.RangeFrom && !isShardConsistentWithShard(v, val) {
log.Info("zone is internally not consistent. Zone contains a shard which is not consistent with another shard")
return false
}
default:
log.Warn(fmt.Sprintf("Not supported type. Expected *Shard or *Assertion. Got=%T", v))
}
}
default:
log.Warn(fmt.Sprintf("Not supported type. Expected *Shard or *Assertion. Got=%T", v))
}
}
return true
}
//addAssertionsinRangeToList adds all assertions from s which are in the range of interval to the returned sortedAssertions list
func addAssertionsinRangeToList(s *section.Shard, interval section.Interval, list *sortedAssertions) {
for _, a := range s.Content {
if a.SubjectName > interval.Begin() && a.SubjectName < interval.End() {
list.Add(a)
}
}
}
type sortedAssertions struct {
assertions []*section.Assertion
assertionsLock sync.RWMutex
}
//Add adds the assertion to the sorted list at the correct position.
//It returns true if it added a and false if a is already contained
func (s *sortedAssertions) Add(a *section.Assertion) bool {
s.assertionsLock.Lock()
defer s.assertionsLock.Unlock()
i := sort.Search(len(s.assertions), func(i int) bool {
return s.assertions[i].SubjectName >= a.SubjectName
})
if i != len(s.assertions) && s.assertions[i].EqualContextZoneName(a) {
return false
}
s.assertions = append(s.assertions[:i], append([]*section.Assertion{a}, s.assertions[i:]...)...)
return true
}
//Delete removes the assertion from the sorted list.
//Returns true if element was successfully deleted from the list. If a not part of list returns false
func (s *sortedAssertions) Delete(a *section.Assertion) bool {
s.assertionsLock.Lock()
defer s.assertionsLock.Unlock()
i := sort.Search(len(s.assertions), func(i int) bool {
return s.assertions[i].SubjectName >= a.SubjectName
})
if !s.assertions[i].EqualContextZoneName(a) {
return false
}
s.assertions = append(s.assertions[:i], s.assertions[i+1:]...)
return true
}
//Len returns the number of element in this sorted slice
func (s *sortedAssertions) Len() int {
s.assertionsLock.RLock()
defer s.assertionsLock.RUnlock()
return len(s.assertions)
}
//Get returns true and all assertions which are in the given interval if there are any
func (s *sortedAssertions) Get(interval section.Interval) ([]*section.Assertion, bool) {
s.assertionsLock.RLock()
defer s.assertionsLock.RUnlock()
elements := []*section.Assertion{}
i := sort.Search(len(s.assertions), func(i int) bool {
return s.assertions[i].SubjectName >= interval.Begin()
})
if i == len(s.assertions) {
return nil, false
}
if s.assertions[i].SubjectName < interval.Begin() {
return elements, false
}
for ; i < len(s.assertions); i++ {
if s.assertions[i].SubjectName > interval.End() {
break
}
elements = append(elements, s.assertions[i])
}
return elements, len(elements) > 0
}
//Equal returns true if both list contain the same assertions where the EqualContextZoneName method on assertions is used to compare them.
func (s *sortedAssertions) Equal(s2 *sortedAssertions) bool {
s.assertionsLock.RLock()
s2.assertionsLock.RLock()
defer s.assertionsLock.RUnlock()
defer s2.assertionsLock.RUnlock()
for i := 0; i < len(s.assertions); i++ {
if !s.assertions[i].EqualContextZoneName(s2.assertions[i]) {
return false
}
}
return true
} | internal/pkg/rainsd/consistencyChecks.go | 0.504394 | 0.522507 | consistencyChecks.go | starcoder |
package kernel
import (
"math"
"github.com/joaowiciuk/matrix"
)
// Laplacian generates the laplatian kernel, commonly used for edge detection.
func Laplacian() *matrix.Matrix {
return &matrix.Matrix{
{1, 1, 1},
{1, -8, 1},
{1, 1, 1},
}
}
// Sharpen generates the sharpen kernel, used for image enhancement.
func Sharpen() *matrix.Matrix {
return &matrix.Matrix{
{0, -1, 0},
{-1, 5, -1},
{0, -1, 0},
}
}
// Line180 generates the 180 degrees edge detector.
func Line180() *matrix.Matrix {
return &matrix.Matrix{
{-1, -1, -1},
{2, 2, 2},
{-1, -1, -1},
}
}
// Line90 generates the 90 degrees edge detector.
func Line90() *matrix.Matrix {
return &matrix.Matrix{
{-1, 2, -1},
{-1, 2, -1},
{-1, 2, -1},
}
}
// Line45 generates the 45 degrees edge detector.
func Line45() *matrix.Matrix {
return &matrix.Matrix{
{2, -1, -1},
{-1, 2, -1},
{-1, -1, 2},
}
}
// Line135 generates the 35 degrees edge detector.
func Line135() *matrix.Matrix {
return &matrix.Matrix{
{-1, -1, 2},
{-1, 2, -1},
{2, -1, -1},
}
}
// LoG generates the laplatian of gaussian kernel, commonly used for edge detection.
func LoG() *matrix.Matrix {
return &matrix.Matrix{
{0, 0, -1, 0, 0},
{0, -1, -2, -1, 0},
{-1, -2, 16, -2, -1},
{0, -1, -2, -1, 0},
{0, 0, -1, 0, 0},
}
}
// Box generates the box blur kernel, used for image blurring.
func Box() *matrix.Matrix {
A := &matrix.Matrix{
{1, 1, 1},
{1, 1, 1},
{1, 1, 1},
}
return A.Scal(1. / 9.)
}
// Unsharp55 generates the 5x5 unsharp kernel.
func Unsharp55() *matrix.Matrix {
A := &matrix.Matrix{
{1, 4, 6, 4, 1},
{4, 16, 24, 16, 4},
{6, 24, -476, 24, 6},
{4, 16, 24, 16, 4},
{1, 4, 6, 4, 1},
}
return A.Scal(-1. / 256.)
}
// SobelX generates the n-by-n horizontal sobel edge detector.
func SobelX(n int) *matrix.Matrix {
if n < 3 {
n = 3
}
if n%2 == 0 {
n++
}
A := matrix.New(n, n)
A.Law(func(r, c int) float64 {
y := float64(c-n/2) / (math.Pow(float64(c-n/2), 2) + math.Pow(float64(r-n/2), 2))
if !math.IsNaN(y) {
return y
}
return 0
})
return A
}
// SobelY generates the n-by-n vertical sobel edge detector.
func SobelY(n int) *matrix.Matrix {
if n < 3 {
n = 3
}
if n%2 == 0 {
n++
}
A := matrix.New(n, n)
A.Law(func(r, c int) float64 {
y := float64(r-n/2) / (math.Pow(float64(c-n/2), 2) + math.Pow(float64(r-n/2), 2))
if !math.IsNaN(y) {
return y
}
return 0
})
return A
}
// Gaussian generates the n-by-n gaussian kernel with standart deviation σ, commonly used for image blurring.
func Gaussian(n int, σ float64) *matrix.Matrix {
if n < 3 {
n = 3
}
if n%2 == 0 {
n++
}
X := matrix.New(n, n)
u := float64(n / 2)
X.Law(func(r, c int) float64 {
x := float64(c)
y := float64(r)
return math.Exp(-(math.Pow(x-u, 2) + math.Pow(y-u, 2)) / (2. * σ * σ))
})
s := X.Sum()
return X.ForEach(func(x float64) float64 { return x / s })
}
// Gaussian1D generates the 1D gaussian kernel with standart deviation σ, used for separable convolution.
func Gaussian1D(n int, σ float64) (A *matrix.Matrix) {
if n < 3 {
n = 3
}
if n%2 == 0 {
n++
}
X := matrix.New(n, n)
u := float64(n / 2)
X.Law(func(r, c int) float64 {
x := float64(c)
y := float64(r)
return math.Exp(-(math.Pow(x-u, 2) + math.Pow(y-u, 2)) / (2. * σ * σ))
})
s := X.Sum()
X = X.ForEach(func(x float64) float64 { return x / s })
return X.Row(n / 2)
} | kernel/kernel.go | 0.792183 | 0.687442 | kernel.go | starcoder |
package geos
// #cgo LDFLAGS: -lgeos_c
// #include "geos.h"
import "C"
import (
"fmt"
"sort"
"strings"
"unsafe"
"github.com/brendan-ward/arrowtiler/tiles"
)
// GeometryArray holds GEOS Geometry pointers (to CGO objects) and a tree (STRtree)
// that is created as part of Query() calls.
// GeometryArray must be manually freed using Release()
type GeometryArray struct {
geometries []GEOSGeometry
tree STRtree
}
// Release GEOS Geometry objects
func (g *GeometryArray) Release() {
freeGeometries(g.geometries)
if g.tree != nil {
C.destroy_tree(g.tree)
}
// clear out previous references
*g = GeometryArray{}
}
func freeGeometries(geometries []GEOSGeometry) {
if len(geometries) > 0 {
C.free_geometry_array((**C.GEOSGeometry)(&(geometries[0])), (C.size_t)(len(geometries)))
}
}
// newGeometryArrayFromGEOS creates a new GeometryArray from a pointer to a C
// array of GEOS geometries and a size.
// Caller must free the C array.
func newGeometryArrayFromGEOS(ptr *GEOSGeometry, size int) *GeometryArray {
// copy from C array to Go slice (C array must be freed by caller)
cArr := unsafe.Slice((**C.GEOSGeometry)(ptr), size)
geometries := make([]GEOSGeometry, size)
var p unsafe.Pointer
for i := 0; i < size; i++ {
p = unsafe.Pointer(cArr[i])
if p != nil {
geometries[i] = GEOSGeometry(p)
}
}
g := &GeometryArray{
geometries: geometries,
}
return g
}
// Create a new GeometryArray from a slice of Geometry Well-Known Text strings.
// The GeometryArray must be freed manually be calling Release().
func NewGeometryArrayFromWKT(wkts []string) (*GeometryArray, error) {
size := len(wkts)
// copy from Go strings into C char arrays
buffer := make([](*C.char), size)
for i := 0; i < size; i++ {
buffer[i] = C.CString(wkts[i])
}
// char arrays must be deallocated
defer func() {
for i := 0; i < size; i++ {
C.free(unsafe.Pointer(buffer[i]))
}
}()
var ptr *GEOSGeometry = (*GEOSGeometry)(C.from_wkt((**C.char)(&(buffer[0])), (C.size_t)(size)))
if ptr == nil {
// TODO: check GEOS error
return nil, fmt.Errorf("could not parse WKTs")
}
defer C.free(unsafe.Pointer(ptr))
return newGeometryArrayFromGEOS(ptr, size), nil
}
// Create a new GeometryArray from a slice of Geometry Well-Known Binary byte slices.
// The GeometryArray must be freed manually be calling Release().
func NewGeometryArrayFromWKB(wkbs [][]byte) (*GeometryArray, error) {
size := len(wkbs)
counts := make([](C.size_t), size)
// copy from Go strings into C uchar arrays
buffer := make([](*C.uchar), size)
for i := 0; i < size; i++ {
buffer[i] = (*C.uchar)(C.CBytes(wkbs[i]))
counts[i] = C.size_t(len(wkbs[i]))
}
// char arrays must be deallocated
defer func() {
for i := 0; i < size; i++ {
C.free(unsafe.Pointer(buffer[i]))
}
}()
var ptr *GEOSGeometry = (*GEOSGeometry)(C.from_wkb((**C.uchar)(&(buffer[0])), (*C.size_t)(&(counts[0])), C.size_t(len(wkbs))))
if ptr == nil {
// TODO: check GEOS error
return nil, fmt.Errorf("could not parse WKTs")
}
defer C.free(unsafe.Pointer(ptr))
return newGeometryArrayFromGEOS(ptr, len(wkbs)), nil
}
// ToWKT writes the GEOS Geometries using Well-Known Text, according to the
// specified decimal precision.
func (g *GeometryArray) ToWKT(precision int) ([]string, error) {
size := len(g.geometries)
if size == 0 {
return nil, nil
}
ptr := C.to_wkt((**C.GEOSGeometry)(&(g.geometries[0])), C.size_t(size), (C.int)(precision))
if ptr == nil {
// TODO: check GEOS error
return nil, fmt.Errorf("could not write to WKT")
}
cArr := unsafe.Slice((**C.char)(ptr), size)
var p unsafe.Pointer
defer func() {
for i := 0; i < size; i++ {
p = unsafe.Pointer(cArr[i])
if p != nil {
C.free(p)
}
}
}()
out := make([]string, size)
for i := 0; i < size; i++ {
out[i] = C.GoString(cArr[i])
}
return out, nil
}
func (g *GeometryArray) Size() int {
if g == nil {
return 0
}
return len(g.geometries)
}
func (g *GeometryArray) String() string {
if len(g.geometries) == 0 {
return ""
}
truncate := 60
wkts, err := g.ToWKT(2)
if err != nil {
panic(err)
}
var b strings.Builder
b.WriteString("[")
for i := 0; i < len(wkts); i++ {
b.WriteString("<")
if len(wkts[i]) > truncate {
b.WriteString(wkts[i][:truncate-3] + "...")
} else {
b.WriteString(wkts[i])
}
b.WriteString(">")
if i < len(wkts) {
b.WriteString(", ")
}
}
b.WriteString("]")
return b.String()
}
func (g *GeometryArray) TotalBounds() ([4]float64, error) {
if g == nil {
panic("GeometryArray not initialized")
}
bounds := [4]float64{}
if C.get_total_bounds((**C.GEOSGeometry)(&(g.geometries[0])),
(C.size_t)(len(g.geometries)), (*C.double)(&bounds[0]), (*C.double)(&bounds[1]), (*C.double)(&bounds[2]), (*C.double)(&bounds[3])) == 0 {
return bounds, fmt.Errorf("could not calculate outer bounds of GeometryArray")
}
return bounds, nil
}
func (g *GeometryArray) createTree() {
g.tree = C.create_tree((**C.GEOSGeometry)(&(g.geometries[0])), C.size_t(len(g.geometries)))
if g.tree == nil {
panic("could not create tree for GeometryArray")
}
}
// Query returns a slice of integer indexes into GeometryArray that overlap with
// the bounds defined by xmin, ymin, xmax, ymax.
// Will return nil if there are no results.
func (g *GeometryArray) Query(xmin, ymin, xmax, ymax float64) ([]int, error) {
if g == nil {
panic("GeometryArray not initialized")
}
if g.tree == nil {
g.createTree()
}
var cArr *C.uint32_t
var cSize C.size_t
ret := int(C.query_tree(g.tree, C.double(xmin), C.double(ymin), C.double(xmax), C.double(ymax), (**C.uint32_t)(&cArr), (*C.size_t)(&cSize)))
if ret != 1 {
return nil, fmt.Errorf("failed during query of tree")
}
defer C.free(unsafe.Pointer(cArr))
size := int(cSize)
values := unsafe.Slice((*C.uint32_t)(cArr), size)
// copy values from uint32_t to int
indexes := make([]int, size)
for i := 0; i < size; i++ {
indexes[i] = (int)(values[i])
}
// results are in tree-traversal order; put them into incremental order
sort.Ints(indexes)
return indexes, nil
}
// Return a new GeometryArray with coordinates projected to Mercator.
// GeometryCollections are not supported.
// Geometries may be null if outside Mercator world bounds.
func (g *GeometryArray) ToMercator() (*GeometryArray, error) {
var ptr *GEOSGeometry = (*GEOSGeometry)(C.project_to_mercator((**C.GEOSGeometry)(&(g.geometries[0])), C.size_t(len(g.geometries))))
if ptr == nil {
// TODO: check GEOS error
return nil, fmt.Errorf("could not project to Mercator")
}
defer C.free(unsafe.Pointer(ptr))
return newGeometryArrayFromGEOS(ptr, len(g.geometries)), nil
}
// Return a new GeometryArray with coordinates projected to Mercator, and
// release the previous array.
func (g *GeometryArray) ToMercatorInPlace() error {
newGeoms, err := g.ToMercator()
if err != nil {
return err
}
g.Release()
*g = *newGeoms
return err
}
// Take creates a new GeometryArray by taking geometries from the GeometryArray
// specified by integer indexes. Out of bounds indexes will cause a panic.
// The new GeometryArray points to the same underlying geometries.
// TODO: this may fail badly if the master array from which these are taken is released first!
// Should we clone geometries instead!
func (g *GeometryArray) Take(indexes []int) *GeometryArray {
geometries := make([]GEOSGeometry, len(indexes))
for i, index := range indexes {
geometries[i] = g.geometries[index]
}
return &GeometryArray{
geometries: geometries,
tree: nil,
}
}
// Returns integer of indexes into original array for geometries encoded into the tile,
// MVT geometry type, and geometries encoded to MVT uint32 commands and coordinates.
// Input GeometryArray must already be in Mercator coordinates.
func (g *GeometryArray) ToTile(t *tiles.TileID, config *tiles.EncodingConfig) ([]int, []byte, [][]uint32, error) {
if g == nil {
panic("GeometryArray not initialized")
}
if len(g.geometries) == 0 {
return nil, nil, nil, nil
}
// first figure out if there are any geometries in tile
if g.tree == nil {
g.createTree()
}
xmin, ymin, xmax, ymax := t.MercatorBounds()
hits, err := g.Query(xmin, ymin, xmax, ymax)
if err != nil {
return nil, nil, nil, err
}
size := len(hits)
if size == 0 {
return nil, nil, nil, nil
}
simplification := config.Simplification
if simplification > 0 && t.Zoom >= config.SimplificationMaxZoom {
simplification = 0
}
inGeoms := make([]GEOSGeometry, size)
for i := 0; i < size; i++ {
inGeoms[i] = g.geometries[hits[i]]
}
var ptr *GEOSGeometry = (*GEOSGeometry)(C.clip_project_to_tile((**C.GEOSGeometry)(&(inGeoms[0])), C.size_t(size), C.double(xmin), C.double(ymin), C.double(xmax), C.double(ymax), C.uint16_t(config.Extent), C.uint16_t(config.Buffer), C.uchar(config.Precision), C.uchar(simplification)))
if ptr == nil {
return nil, nil, nil, fmt.Errorf("could not extract GeometryArray in tile %v", t)
}
defer C.free(unsafe.Pointer(ptr))
// copy from C array to Go slice
indexes := make([]int, 0, size+1)
geomsToEncode := make([]GEOSGeometry, 0, size+1)
cArr := unsafe.Slice((**C.GEOSGeometry)(ptr), size)
var p unsafe.Pointer
for i := 0; i < size; i++ {
p = unsafe.Pointer(cArr[i])
if p != nil {
indexes = append(indexes, hits[i])
geomsToEncode = append(geomsToEncode, GEOSGeometry(p))
}
}
// make sure that geometries are released
defer freeGeometries(geomsToEncode)
if len(geomsToEncode) == 0 {
return nil, nil, nil, nil
}
// TODO: additional simplification steps would occur here
// encode to MVT geometries
types, encodedGeoms, err := encodeMVTGeometries(geomsToEncode)
if err != nil {
return nil, nil, nil, err
}
return indexes, types, encodedGeoms, nil
}
func encodeMVTGeometries(geometries []GEOSGeometry) ([]byte, [][]uint32, error) {
if len(geometries) == 0 {
return nil, nil, fmt.Errorf("cannot encode empty array of geometries")
}
size := len(geometries)
var cTypesArrPtr *C.uchar
var cArrPtr **C.uint32_t
var cSizes *C.size_t
ret := (int)(C.encode_geometries((**C.GEOSGeometry)(&(geometries[0])), (C.size_t)(size), (**C.uchar)(&cTypesArrPtr), (***C.uint32_t)(&cArrPtr), (**C.size_t)(&cSizes)))
if ret != 1 {
return nil, nil, fmt.Errorf("encode geometries failed")
}
// free subarrays and containing arrays
defer C.free_uint32_subarrays((**C.uint32_t)(cArrPtr), (*C.size_t)(cSizes), (C.size_t)(size))
defer C.free(unsafe.Pointer(cTypesArrPtr))
cTypes := unsafe.Slice((*C.uchar)(cTypesArrPtr), size)
cArrs := unsafe.Slice((**C.uint32_t)(cArrPtr), size)
sizes := unsafe.Slice((*C.size_t)(cSizes), size)
types := make([]byte, size)
buffers := make([][]uint32, size)
var p unsafe.Pointer
for i := 0; i < size; i++ {
p = unsafe.Pointer(cArrs[i])
if p == nil {
buffers[i] = nil
continue
}
types[i] = byte(cTypes[i])
buffers[i] = make([]uint32, int(sizes[i]))
cValues := unsafe.Slice((*C.uint32_t)(p), (int)(sizes[i]))
for j, v := range cValues {
buffers[i][j] = uint32(v)
}
}
return types, buffers, nil
} | geos/array.go | 0.54819 | 0.416441 | array.go | starcoder |
package utils
type endpoint struct {
Key string `json:"key"`
Value string `json:"value"`
}
type EndpointPattern struct {
Endpoints []*endpoint `json:"endpoints"`
}
func ACRRegion() *EndpointPattern {
ep := &EndpointPattern{
Endpoints: []*endpoint{
{Key: "cn-hangzhou", Value: "https://registry.cn-hangzhou.aliyuncs.com"},
{Key: "cn-shanghai", Value: "https://registry.cn-shanghai.aliyuncs.com"},
{Key: "<KEY>", Value: "https://registry.cn-qingdao.aliyuncs.com"},
{Key: "<KEY>", Value: "https://registry.cn-beijing.aliyuncs.com"},
{Key: "<KEY>", Value: "https://registry.cn-zhangjiakou.aliyuncs.com"},
{Key: "<KEY>", Value: "https://registry.cn-huhehaote.aliyuncs.com"},
{Key: "cn-shenzhen", Value: "https://registry.cn-shenzhen.aliyuncs.com"},
{Key: "cn-chengdu", Value: "https://registry.cn-chengdu.aliyuncs.com"},
{Key: "cn-hongkong", Value: "https://registry.cn-hongkong.aliyuncs.com"},
{Key: "ap-southeast-1", Value: "https://registry.ap-southeast-1.aliyuncs.com"},
{Key: "ap-southeast-2", Value: "https://registry.ap-southeast-2.aliyuncs.com"},
{Key: "ap-southeast-3", Value: "https://registry.ap-southeast-3.aliyuncs.com"},
{Key: "ap-southeast-5", Value: "https://registry.ap-southeast-5.aliyuncs.com"},
{Key: "ap-northeast-1", Value: "https://registry.ap-northeast-1.aliyuncs.com"},
{Key: "ap-south-1", Value: "https://registry.ap-south-1.aliyuncs.com"},
{Key: "eu-central-1", Value: "https://registry.eu-central-1.aliyuncs.com"},
{Key: "eu-west-1", Value: "https://registry.eu-west-1.aliyuncs.com"},
{Key: "us-west-1", Value: "https://registry.us-west-1.aliyuncs.com"},
{Key: "us-east-1", Value: "https://registry.us-east-1.aliyuncs.com"},
{Key: "me-east-1", Value: "https://registry.me-east-1.aliyuncs.com"},
},
}
return ep
}
func SWRRegion() *EndpointPattern {
ep := &EndpointPattern{
Endpoints: []*endpoint{
{Key: "af-south-1", Value: "af-south-1.myhuaweicloud.com"},
{Key: "ap-southeast-1", Value: "ap-southeast-1.myhuaweicloud.com"},
{Key: "ap-southeast-2", Value: "ap-southeast-2.myhuaweicloud.com"},
{Key: "ap-southeast-3", Value: "ap-southeast-3.myhuaweicloud.com"},
{Key: "cn-east-2", Value: "cn-east-2.myhuaweicloud.com"},
{Key: "cn-east-3", Value: "cn-east-3.myhuaweicloud.com"},
{Key: "cn-north-1", Value: "cn-north-1.myhuaweicloud.com"},
{Key: "cn-north-2", Value: "cn-north-2.myhuaweicloud.com"},
{Key: "cn-north-4", Value: "cn-north-4.myhuaweicloud.com"},
{Key: "cn-south-1", Value: "cn-south-1.myhuaweicloud.com"},
{Key: "cn-south-2", Value: "cn-south-2.myhuaweicloud.com"},
{Key: "cn-southwest-2", Value: "cn-southwest-2.myhuaweicloud.com"},
{Key: "ru-northwest-2", Value: "ru-northwest-2.myhuaweicloud.com"},
},
}
return ep
}
func AWSRegion() *EndpointPattern {
ep := &EndpointPattern{
Endpoints: []*endpoint{
{
Key: "us-east-1",
Value: "dkr.ecr.us-east-1.amazonaws.com",
},
{
Key: "us-east-2",
Value: "dkr.ecr.us-east-2.amazonaws.com",
},
{
Key: "us-west-1",
Value: "dkr.ecr.us-west-1.amazonaws.com",
},
{
Key: "us-west-2",
Value: "dkr.ecr.us-west-2.amazonaws.com",
},
{
Key: "af-south-1",
Value: "dkr.ecr.af-south-1.amazonaws.com",
},
{
Key: "ap-east-1",
Value: "dkr.ecr.ap-east-1.amazonaws.com",
},
{
Key: "ap-south-1",
Value: "dkr.ecr.ap-south-1.amazonaws.com",
},
{
Key: "ap-northeast-3",
Value: "dkr.ecr.ap-northeast-3.amazonaws.com",
},
{
Key: "ap-northeast-2",
Value: "dkr.ecr.ap-northeast-2.amazonaws.com",
},
{
Key: "ap-southeast-1",
Value: "dkr.ecr.ap-southeast-1.amazonaws.com",
},
{
Key: "ap-southeast-2",
Value: "dkr.ecr.ap-southeast-2.amazonaws.com",
},
{
Key: "ap-northeast-1",
Value: "dkr.ecr.ap-northeast-1.amazonaws.com",
},
{
Key: "ca-central-1",
Value: "dkr.ecr.ca-central-1.amazonaws.com",
},
{
Key: "eu-central-1",
Value: "dkr.ecr.eu-central-1.amazonaws.com",
},
{
Key: "eu-west-1",
Value: "dkr.ecr.eu-west-1.amazonaws.com",
},
{
Key: "eu-west-2",
Value: "dkr.ecr.eu-west-2.amazonaws.com",
},
{
Key: "eu-south-1",
Value: "dkr.ecr.eu-south-1.amazonaws.com",
},
{
Key: "eu-west-3",
Value: "dkr.ecr.eu-west-3.amazonaws.com",
},
{
Key: "eu-north-1",
Value: "dkr.ecr.eu-north-1.amazonaws.com",
},
{
Key: "me-south-1",
Value: "dkr.ecr.me-south-1.amazonaws.com",
},
{
Key: "sa-east-1",
Value: "dkr.ecr.sa-east-1.amazonaws.com",
},
},
}
return ep
}
func DockerHubRegion() *EndpointPattern {
ep := &EndpointPattern{
Endpoints: []*endpoint{
{
Key: "hub.docker.com",
Value: "https://hub.docker.com",
},
},
}
return ep
}
func GoogleRegion() *EndpointPattern {
ep := &EndpointPattern{
Endpoints: []*endpoint{
{
Key: "gcr.io",
Value: "https://gcr.io",
},
{
Key: "us.gcr.io",
Value: "https://us.gcr.io",
},
{
Key: "eu.gcr.io",
Value: "https://eu.gcr.io",
},
{
Key: "asia.gcr.io",
Value: "https://asia.gcr.io",
},
},
}
return ep
} | utils/typeinfos.go | 0.619586 | 0.406037 | typeinfos.go | starcoder |
package stargen
import (
"math"
"github.com/dayaftereh/discover/server/mathf"
"github.com/dayaftereh/discover/server/game/persistence/types"
)
type HabitableZoneMode string
const (
RecentVenus HabitableZoneMode = "recent-venus"
RunawayGreenhouse HabitableZoneMode = "runaway-greenhouse"
MoistGreenhouse HabitableZoneMode = "moist-greenhouse"
EarthLike HabitableZoneMode = "earth-like"
FirstCO2CondensationLimit HabitableZoneMode = "first-co2"
MaximumGreenhouse HabitableZoneMode = "max-greenhouse"
EarlyMars HabitableZoneMode = "early-mars"
TwoAUCloudLimit HabitableZoneMode = "two-au"
)
func quadfix(x, y, w, z, p, q float64) (float64, float64, float64) {
a := ((q * (w - x)) - (w * y) + (p * (y - z)) + (x * z)) / ((p - w) * (p - x) * (w - x))
b := ((q * (math.Pow(x, 2.0) - math.Pow(w, 2.0))) + (math.Pow(w, 2.0) * y) - (math.Pow(x, 2.0) * z) + (math.Pow(p, 2.0) * (z - y))) / ((p - w) * (p - x) * (w - x))
c := ((q * w * x * (w - x)) + (p * ((p * w * y) - (math.Pow(w, 2) * y) - (p * x * z) + (math.Pow(x, 2) * z)))) / ((p - w) * (p - x) * (w - x))
return a, b, c
}
func quadTrend(a, b, c, x float64) float64 {
return (a * math.Pow(x, 2.0)) + (b * x) + c
}
func planetRadiusHelper(planetMass, mass1, radius1, mass2, radius2, mass3, radius3 float64) float64 {
a, b, c := quadfix(mass1, radius1, mass2, radius2, mass3, radius3)
radius := quadTrend(a, b, c, planetMass)
return radius
}
func logFix(x, y, w, z float64) (float64, float64) {
a := ((y * math.Log(w)) - (z * math.Log(x))) / (math.Log(w) - math.Log(x))
b := (z - y) / (math.Log(w) - math.Log(x))
return a, b
}
func lnTrend(a, b, x float64) float64 {
return a + (b * math.Log(x))
}
func eTrend(a, b, x float64) float64 {
return a + (b * math.Exp(x))
}
func eFix(x, y, w, z float64) (float64, float64) {
a := ((math.Exp(x) * z) - (math.Exp(w) * y)) / (math.Exp(x) - math.Exp(w))
b := (y - z) / (math.Exp(x) - math.Exp(w))
return a, b
}
func planetRadiusHelper2(planetMass, mass1, radius1, mass2, radius2 float64) float64 {
a, b := logFix(mass1, radius1, mass2, radius2)
radius := lnTrend(a, b, planetMass)
return radius
}
func calculateStellarFlux(a, b, c, d, seff, starTemp, starLuminosity float64) float64 {
t := starTemp - 5780.0
return seff + (a * t) + (b * math.Pow(t, 2.0)) + (c * math.Pow(t, 3.0)) + (d * math.Pow(t, 4.0))
}
func habitableZoneDistanceHelper(effectiveTemperature, luminosity float64, mode HabitableZoneMode, mass float64) float64 {
mass = mathf.Clamp(mass, 0.1, 10.0)
if mode == RecentVenus {
return calculateStellarFlux(2.136E-4, 2.533E-8, -1.332E-11, -3.097E-15, 1.776, effectiveTemperature, luminosity)
}
if mode == RunawayGreenhouse {
var a, b, c, d, seff float64
if mass < 1.0 {
a = planetRadiusHelper(mass, 0.1, 1.209E-4, 1.0, 1.332E-4, 5.0, 1.433E-4)
b = planetRadiusHelper(mass, 0.1, 1.404E-8, 1.0, 1.58E-8, 5.0, 1.707E-8)
c = planetRadiusHelper(mass, 0.1, -7.418E-12, 1.0, -8.308E-12, 5.0, -8.968E-12)
d = planetRadiusHelper(mass, 0.1, -1.713E-15, 1.0, -1.931E-15, 5.0, -2.084E-15)
seff = planetRadiusHelper(mass, 0.1, 0.99, 1.0, 1.107, 5.0, 1.188)
} else {
a = planetRadiusHelper2(mass, 1.0, 1.332E-4, 5.0, 1.433E-4)
b = planetRadiusHelper2(mass, 1.0, 1.58E-8, 5.0, 1.707E-8)
c = planetRadiusHelper2(mass, 1.0, -8.308E-12, 5.0, -8.968E-12)
d = planetRadiusHelper2(mass, 1.0, -1.931E-15, 5.0, -2.084E-15)
seff = planetRadiusHelper2(mass, 1.0, 1.107, 5.0, 1.188)
}
return calculateStellarFlux(a, b, c, d, seff, effectiveTemperature, luminosity)
}
if mode == MoistGreenhouse {
stellarFluxGreen1 := calculateStellarFlux(1.332E-4, 1.58E-8, -8.308E-12, -1.931E-15, 1.107, effectiveTemperature, luminosity)
stellarFluxMoist1 := calculateStellarFlux(8.1774E-5, 1.7063E-9, -4.3241E-12, -6.6462E-16, 1.0140, effectiveTemperature, luminosity)
stellarFluxMax1 := calculateStellarFlux(5.8942E-5, 1.6558E-9, -3.0045E-12, -5.2983E-16, 0.3438, effectiveTemperature, luminosity)
diff := stellarFluxGreen1 - stellarFluxMax1
percent := (stellarFluxGreen1 - stellarFluxMoist1) / diff
var a, b, c, d, seff float64
if mass < 1.0 {
a = planetRadiusHelper(mass, 0.1, 1.209E-4, 1.0, 1.332E-4, 5.0, 1.433E-4)
b = planetRadiusHelper(mass, 0.1, 1.404E-8, 1.0, 1.58E-8, 5.0, 1.707E-8)
c = planetRadiusHelper(mass, 0.1, -7.418E-12, 1.0, -8.308E-12, 5.0, -8.968E-12)
d = planetRadiusHelper(mass, 0.1, -1.713E-15, 1.0, -1.931E-15, 5.0, -2.084E-15)
seff = planetRadiusHelper(mass, 0.1, 0.99, 1.0, 1.107, 5.0, 1.188)
} else {
a = planetRadiusHelper2(mass, 1.0, 1.332E-4, 5.0, 1.433E-4)
b = planetRadiusHelper2(mass, 1.0, 1.58E-8, 5.0, 1.707E-8)
c = planetRadiusHelper2(mass, 1.0, -8.308E-12, 5.0, -8.968E-12)
d = planetRadiusHelper2(mass, 1.0, -1.931E-15, 5.0, -2.084E-15)
seff = planetRadiusHelper2(mass, 1.0, 1.107, 5.0, 1.188)
}
stellarFluxGreen2 := calculateStellarFlux(a, b, c, d, seff, effectiveTemperature, luminosity)
stellarFluxMax2 := calculateStellarFlux(6.171E-5, 1.698E-9, -3.198E-12, -5.575E-16, 0.356, effectiveTemperature, luminosity)
diff = stellarFluxGreen2 - stellarFluxMax2
temp := diff * percent
return stellarFluxGreen2 - (temp * diff)
}
if mode == EarthLike {
stellarFluxGreen1 := calculateStellarFlux(1.332E-4, 1.58E-8, -8.308E-12, -1.931E-15, 1.107, effectiveTemperature, luminosity)
stellarFluxEarth1 := calculateStellarFlux(8.3104E-5, 1.7677E-9, -4.39E-12, -6.79E-16, 1.0, effectiveTemperature, luminosity)
stellarFluxMax1 := calculateStellarFlux(5.8942E-5, 1.6558E-9, -3.0045E-12, -5.2983E-16, 0.3438, effectiveTemperature, luminosity)
diff := stellarFluxGreen1 - stellarFluxMax1
percent := (stellarFluxGreen1 - stellarFluxEarth1) / diff
var a, b, c, d, seff float64
if mass < 1.0 {
a = planetRadiusHelper(mass, 0.1, 1.209E-4, 1.0, 1.332E-4, 5.0, 1.433E-4)
b = planetRadiusHelper(mass, 0.1, 1.404E-8, 1.0, 1.58E-8, 5.0, 1.707E-8)
c = planetRadiusHelper(mass, 0.1, -7.418E-12, 1.0, -8.308E-12, 5.0, -8.968E-12)
d = planetRadiusHelper(mass, 0.1, -1.713E-15, 1.0, -1.931E-15, 5.0, -2.084E-15)
seff = planetRadiusHelper(mass, 0.1, 0.99, 1.0, 1.107, 5.0, 1.188)
} else {
a = planetRadiusHelper2(mass, 1.0, 1.332E-4, 5.0, 1.433E-4)
b = planetRadiusHelper2(mass, 1.0, 1.58E-8, 5.0, 1.707E-8)
c = planetRadiusHelper2(mass, 1.0, -8.308E-12, 5.0, -8.968E-12)
d = planetRadiusHelper2(mass, 1.0, -1.931E-15, 5.0, -2.084E-15)
seff = planetRadiusHelper2(mass, 1.0, 1.107, 5.0, 1.188)
}
stellarFluxGreen2 := calculateStellarFlux(a, b, c, d, seff, effectiveTemperature, luminosity)
stellarFluxMax2 := calculateStellarFlux(6.171E-5, 1.698E-9, -3.198E-12, -5.575E-16, 0.356, effectiveTemperature, luminosity)
diff = stellarFluxGreen2 - stellarFluxMax2
temp := diff * percent
return stellarFluxGreen2 - (temp * diff)
}
if mode == FirstCO2CondensationLimit {
return calculateStellarFlux(4.4499e-5, 1.4065e-10, 2.2750e-12, -3.3509e-16, 0.5408, effectiveTemperature, luminosity)
}
if mode == MaximumGreenhouse {
return calculateStellarFlux(6.171E-5, 1.698E-9, -3.198E-12, -5.575E-16, 0.356, effectiveTemperature, luminosity)
}
if mode == EarlyMars {
return calculateStellarFlux(5.547E-5, 1.526E-9, -2.874E-12, -5.011E-16, 0.32, effectiveTemperature, luminosity)
}
if mode == TwoAUCloudLimit {
return calculateStellarFlux(4.2588e-5, 1.1963e-9, -2.1709e-12, -3.8282e-16, 0.2484, effectiveTemperature, luminosity)
}
return 0.0
}
func habitableZoneDistance(sun *types.Sun, mode HabitableZoneMode, mass float64) float64 {
if sun.EffectiveTemperature >= 2600 && sun.EffectiveTemperature <= 7200 {
stellarFlux := habitableZoneDistanceHelper(sun.EffectiveTemperature, sun.Luminosity, mode, mass)
return math.Sqrt(sun.Luminosity / stellarFlux)
}
if mode == RecentVenus || mode == RunawayGreenhouse || mode == MoistGreenhouse {
return math.Sqrt(sun.Luminosity / 1.51)
}
if mode == EarthLike {
return math.Sqrt(sun.Luminosity)
}
if mode == FirstCO2CondensationLimit || mode == MaximumGreenhouse || mode == EarlyMars || mode == TwoAUCloudLimit {
return math.Sqrt(sun.Luminosity / 0.48)
}
return 0.0
} | server/game/universe/generator/stargen/habitable.go | 0.773644 | 0.422147 | habitable.go | starcoder |
package main
import (
"fmt"
)
func main() {
testCases()
}
func findMedianSortedArrays(nums1 []int, nums2 []int) float64 {
totalLength := len(nums1) + len(nums2)
isEven := (totalLength % 2) == 0
startIndex := (totalLength - 1) / 2
var currentIndex int = 0
res := &result{isEven: isEven, startIndex: startIndex, currentIndex: ¤tIndex, isReady: false}
var binaryTreeRootNode *node
binaryTreeRootNode = addArray(nums1, binaryTreeRootNode)
binaryTreeRootNode = addArray(nums2, binaryTreeRootNode)
binaryTreeRootNode.walk(res)
return res.value
}
func testCases() {
x1 := []int{-106, -106, 0, -11}
x2 := []int{1, -40, 106, 106}
r := findMedianSortedArrays(x1, x2)
assert(-5.5, r)
a1 := []int{1, 3}
a2 := []int{2}
result := findMedianSortedArrays(a1, a2)
assert(2, result)
b1 := []int{1, 2}
b2 := []int{3, 4}
result = findMedianSortedArrays(b1, b2)
assert(2.5, result)
c1 := []int{0, 0}
c2 := []int{0, 0}
result = findMedianSortedArrays(c1, c2)
assert(0, result)
d1 := []int{}
d2 := []int{1}
result = findMedianSortedArrays(d1, d2)
assert(1, result)
e1 := []int{2}
e2 := []int{}
result = findMedianSortedArrays(e1, e2)
assert(2, result)
}
func assert(expected float64, actual float64) {
if expected != actual {
panic(fmt.Sprintf("expected: %f, actual: %f\n", expected, actual))
}
}
type node struct {
value int
left *node
right *node
}
type result struct {
isEven bool
startIndex int
currentIndex *int
isReady bool
value float64
}
func (refNode *node) add(newNode *node) {
if newNode.value < refNode.value {
if refNode.left == nil {
refNode.left = newNode
return
}
refNode.left.add(newNode)
} else {
if refNode.right == nil {
refNode.right = newNode
return
}
refNode.right.add(newNode)
}
}
func (refNode *node) walk(res *result) {
if refNode == nil || res.isReady {
return
}
refNode.left.walk(res)
refNode.calculateMedian(res)
refNode.right.walk(res)
}
func (refNode *node) calculateMedian(res *result) {
if *res.currentIndex == res.startIndex {
if !res.isEven {
res.isReady = true
}
res.value = float64(refNode.value)
}
if res.isEven && (*res.currentIndex == (res.startIndex + 1)) {
res.isReady = true
res.value = (float64(refNode.value) + res.value) / 2
}
*res.currentIndex = *res.currentIndex + 1
}
func addArray(nums []int, refNode *node) *node {
if len(nums) == 0 {
return refNode
}
var numbers []int = nums
if refNode == nil {
refNode = &node{value: nums[0], left: nil, right: nil}
numbers = nums[1:]
}
for _, i := range numbers {
binaryTreeNode := &node{value: i, left: nil, right: nil}
refNode.add(binaryTreeNode)
}
return refNode
} | leetcode/median-two-sorted-arrays/task.go | 0.609292 | 0.478894 | task.go | starcoder |
package limiter
/*
TokenLimiter is the interface that wraps the AcquireToken and ReleaseToken methods, representing the use of a token mechanism to enforce concurrency limits.
AcquireToken blocks until a token can be acquired from the limiter's supply. The token must be held for the duration of the activity which needs to be limited, and then it must be passed to the ReleaseToken method without modification.
ReleaseToken notifies the limiter that the provided token (pointer and value) can be used by another goroutine. The caller must not modify the value of the token at any time, but if the token implementation is known by the caller then unmarshaling of its value is not discouraged.
The token type is a pointer to a 16-byte array (128 bits) to give limiter implementations many options with a fixed type, like 128-bit binary time.Time or UUID values.
- time.Time objects can be converted to/from []byte using the Time::MarshalBinary() and Time::UnmarshalBinary(data []byte) methods.
- Hexadecimal string UUIDs can be converted to/from []byte using the "encoding/hex" package.
- Numeric values can be converted to/from []byte using the "encoding/binary" package.
*/
type TokenLimiter interface {
AcquireToken() (token *[16]byte)
ReleaseToken(token *[16]byte)
}
/*
RateLimiter is the interface that wraps the CheckWait method, representing the use of a delay mechanism to enforce a rate limit.
CheckWait should be called at the beginning of the caller's action. It blocks if the limiter needs to restrict execution, otherwise it returns immediately. Restriction is typically based on consumption of a fixed rate budget, but may also be controlled by other factors.
*/
type RateLimiter interface {
CheckWait()
}
/*
FailLimiter is the interface that wraps the CheckWait and Report methods, representing the use of a delay mechanism to enforce a rate limit and a feedback method to control it.
CheckWait should be called at the beginning of the caller's action. It blocks if the limiter needs to restrict execution, otherwise it returns immediately. Restriction is typically based on the last received status, but may also be controlled by other factors.
Report should be called at the end of the caller's action, providing the limiter with the success/fail status of the action. Failure statuses should be expected to incur rate throttling on subsequent calls to CheckWait.
*/
type FailLimiter interface {
CheckWait()
Report(success bool)
}
/*
TokenAndFailLimiter is the interface that simplifies the combination of a TokenLimiter and a FailLimiter, wrapping a ReleaseTokenAndReport method instead of ReleaseToken.
AcquireToken blocks until a token can be acquired from the limiter's supply, and also blocks if the limiter needs to restrict execution. The token must be held for the duration of the action which needs to be limited, and then it must be passed to the ReleaseTokenAndReport method without modification.
ReleaseTokenAndReport should be called at the end of the caller's action, notifying the limiter that the provided token (pointer and value) can be used by another goroutine and providing the limiter with the success/fail status of the action. The caller must not modify the value of the token at any time, but if the token implementation is known by the caller then unmarshaling of its value is not discouraged.
Report can be called outside the context of a rate-limited action to notify the limiter that an error has occurred and that the allowed execution rate should be throttled.
*/
type TokenAndFailLimiter interface {
AcquireToken() (token *[16]byte)
ReleaseTokenAndReport(token *[16]byte, success bool)
Report(success bool)
}
/*
InvocationLimiter is the interface that wraps the Invoke method.
Invoke enforces the limiter's limits around the invocation of the passed function. The error returned by the function invocation is returned to the caller without modification, and its existence may be used by the limiter to delay the current return or subsequent invocations.
*/
type InvocationLimiter interface {
Invoke(f func() error) error
} | interfaces.go | 0.568775 | 0.497742 | interfaces.go | starcoder |
package goop2
import (
"fmt"
log "github.com/sirupsen/logrus"
)
// Sum returns the sum of the given expressions. It creates a new empty
// expression and adds to it the given expressions.
func Sum(exprs ...Expr) Expr {
newExpr := NewExpr(0)
for _, e := range exprs {
newExpr.Plus(e)
}
return newExpr
}
// SumVars returns the sum of the given variables. It creates a new empty
// expression and adds to it the given variables.
func SumVars(vs ...*Var) Expr {
newExpr := NewExpr(0)
for _, v := range vs {
newExpr.Plus(v)
}
return newExpr
}
// SumRow returns the sum of all the variables in a single specified row of
// a variable matrix.
func SumRow(vs [][]*Var, row int) Expr {
newExpr := NewExpr(0)
for col := 0; col < len(vs[0]); col++ {
newExpr.Plus(vs[row][col])
}
return newExpr
}
// SumCol returns the sum of all variables in a single specified column of
// a variable matrix.
func SumCol(vs [][]*Var, col int) Expr {
newExpr := NewExpr(0)
for row := 0; row < len(vs); row++ {
newExpr.Plus(vs[row][col])
}
return newExpr
}
// Dot returns the dot product of a vector of variables and slice of floats.
func Dot(vs []*Var, coeffs []float64) Expr {
if len(vs) != len(coeffs) {
log.WithFields(log.Fields{
"num_vars": len(vs),
"num_coeffs": len(coeffs),
}).Panic("Number of vars and coeffs mismatch")
}
newExpr := NewExpr(0)
for i := range vs {
newExpr.Plus(vs[i].Mult(coeffs[i]))
}
return newExpr
}
/*
FindInSlice
Description:
Identifies if the input xIn is in the slice sliceIn.
If it is, then this function returns the index such that xIn = sliceIn[index] and no errors.
If it is not, then this function returns the index -1 and the boolean value false.
*/
func FindInSlice(xIn interface{}, sliceIn interface{}) (int, error) {
switch xIn.(type) {
case string:
x := xIn.(string)
slice := sliceIn.([]string)
// Perform Search
xLocationInSliceIn := -1
for sliceIndex, sliceValue := range slice {
if x == sliceValue {
xLocationInSliceIn = sliceIndex
}
}
return xLocationInSliceIn, nil
case int:
x := xIn.(int)
slice := sliceIn.([]int)
// Perform Search
xLocationInSliceIn := -1
for sliceIndex, sliceValue := range slice {
if x == sliceValue {
xLocationInSliceIn = sliceIndex
}
}
return xLocationInSliceIn, nil
case uint64:
x := xIn.(uint64)
slice := sliceIn.([]uint64)
// Perform Search
xLocationInSliceIn := -1
for sliceIndex, sliceValue := range slice {
if x == sliceValue {
xLocationInSliceIn = sliceIndex
}
}
return xLocationInSliceIn, nil
default:
return -1, fmt.Errorf("The FindInSlice() function was only defined for type string, not type %T:", xIn)
}
} | util.go | 0.787032 | 0.639609 | util.go | starcoder |
package config
import (
"github.com/Azure/azure-service-operator/v2/tools/generator/internal/astmodel"
)
// configurationVisitor is used to facilitate easy walking of the ObjectModelConfiguration hierarchy, abstracting
// away traversal logic so that new uses of the hierarchy can concentrate on their specific functionality.
// By default will traverse the entire configuration but may optionally be constrained to just a specific type by
// construction with a typeName, or to a property by also providing the name of the property.
// Only one handler should be present, as we don't do any traversal below an invoked handler (but a handler is free to
// do independent visiting with a different instance if it chooses)
type configurationVisitor struct {
ref astmodel.PackageReference // Optional Package reference used to constrain the walk
typeName string // Optional TypeName used to constrain the walk
property *astmodel.PropertyName // Optional PropertyName used to constrain the walk
handleGroup func(groupConfig *GroupConfiguration) error // Optional handler for visiting a group
handleVersion func(versionConfig *VersionConfiguration) error // Optional handler for visiting a version
handleType func(typeConfig *TypeConfiguration) error // Optional handler for visiting a Type
handleProperty func(propertyConfig *PropertyConfiguration) error // Optional handler for visiting a property
}
// newSinglePropertyConfigurationVisitor creates a ConfigurationVisitor to apply an action to the property specified.
// typeName is the fully qualified name of the type expected to contain the property.
// property is the name of the property to visit.
// action is the action to apply to that property.
// Returns (true, nil) if the property is found and the action successfully applied, (true, error) if the action returns
// an error, and (false, nil) if the type or property does not exist.
func newSinglePropertyConfigurationVisitor(
typeName astmodel.TypeName,
property astmodel.PropertyName,
action func(configuration *PropertyConfiguration) error) *configurationVisitor {
return &configurationVisitor{
ref: typeName.PackageReference,
typeName: typeName.Name(),
property: &property,
handleProperty: action,
}
}
// newEveryPropertyConfigurationVisitor creates a ConfigurationVisitor to apply an action to every property
// configuration we have.
// action is the action to apply to each property.
// Returns nil if every call to action was successful (returned nil); otherwise returns an aggregated error containing
// all the errors returned.
func newEveryPropertyConfigurationVisitor(
action func(configuration *PropertyConfiguration) error) *configurationVisitor {
return &configurationVisitor{
handleProperty: action,
}
}
// newSingleTypeConfigurationVisitor creates a ConfigurationVisitor to apply an action to the type specified.
// typeName is the fully qualified name of the type expected.
// action is the action to apply to that type.
// Returns (true, nil) if the type is found and the action successfully applied, (true, error) if the action returns
// an error, and (false, nil) if the type does not exist.
func newSingleTypeConfigurationVisitor(
typeName astmodel.TypeName,
action func(configuration *TypeConfiguration) error) *configurationVisitor {
return &configurationVisitor{
ref: typeName.PackageReference,
typeName: typeName.Name(),
handleType: action,
}
}
// newEveryTypeConfigurationVisitor creates a ConfigurationVisitor to apply an action to every type configuration
// specified.
// action is the action to apply to each type.
// Returns nil if every call to action returned nil; otherwise returns an aggregated error containing all the errors returned.
func newEveryTypeConfigurationVisitor(
action func(configuration *TypeConfiguration) error) *configurationVisitor {
return &configurationVisitor{
handleType: action,
}
}
// newSingleVersionConfigurationVisitor creates a ConfigurationVisitor to apply an action to the version specified
// typeName is the fully qualified name of the type expected.
// action is the action to apply to that type.
// Returns (true, nil) if the type is found and the action successfully applied, (true, error) if the action returns
// an error, and (false, nil) if the type does not exist.
func newSingleVersionConfigurationVisitor(
ref astmodel.PackageReference,
action func(configuration *VersionConfiguration) error) *configurationVisitor {
return &configurationVisitor{
ref: ref,
handleVersion: action,
}
}
// Visit visits the specified ObjectModelConfiguration.
func (v *configurationVisitor) Visit(omc *ObjectModelConfiguration) error {
if v.ref != nil {
return omc.visitGroup(v.ref, v)
}
return omc.visitGroups(v)
}
// visitGroup visits the specified group configuration.
// If a group handler is present, it's called. Otherwise, if we're interested in precisely one nested version, we visit
// that. Otherwise, we visit all nested versions.
func (v *configurationVisitor) visitGroup(groupConfig *GroupConfiguration) error {
if v.handleGroup != nil {
return v.handleGroup(groupConfig)
}
if v.ref != nil {
return groupConfig.visitVersion(v.ref, v)
}
return groupConfig.visitVersions(v)
}
// visitVersion visits the specified version configuration.
// If a version handler is present, it's called. Otherwise, if we're interested in precisely one nested type, we visit
// that. Otherwise, we visit all nested types.
func (v *configurationVisitor) visitVersion(versionConfig *VersionConfiguration) error {
if v.handleVersion != nil {
return v.handleVersion(versionConfig)
}
if v.typeName != "" {
return versionConfig.visitType(v.typeName, v)
}
return versionConfig.visitTypes(v)
}
// visitType visits the specified type configuration.
// If a type handler is present, it's called. Otherwise, if we're interested in precisely one property, we visit that.
// Otherwise, we visit all nested properties.
func (v *configurationVisitor) visitType(typeConfig *TypeConfiguration) error {
if v.handleType != nil {
return v.handleType(typeConfig)
}
if v.property != nil {
return typeConfig.visitProperty(*v.property, v)
}
return typeConfig.visitProperties(v)
}
// visitProperty visits the specified property configuration. If a property handler is present, it's called.
func (v *configurationVisitor) visitProperty(propertyConfig *PropertyConfiguration) error {
if v.handleProperty != nil {
return v.handleProperty(propertyConfig)
}
return nil
} | v2/tools/generator/internal/config/configuration_visitor.go | 0.850577 | 0.427516 | configuration_visitor.go | starcoder |
package continuous
import (
"github.com/jtejido/stats"
"github.com/jtejido/stats/err"
"math"
"math/rand"
)
// Arcsine distribution
// https://en.wikipedia.org/wiki/Arcsine_distribution
type ArcsineBounded struct {
Arcsine
min, max float64
}
func NewArcsineBounded(min, max float64) (*ArcsineBounded, error) {
return NewArcsineBoundedWithSource(min, max, nil)
}
func NewArcsineBoundedWithSource(min, max float64, src rand.Source) (*ArcsineBounded, error) {
if max <= min {
return nil, err.Invalid()
}
r := new(ArcsineBounded)
r.min = min
r.max = max
r.src = src
return r, nil
}
func (asb *ArcsineBounded) String() string {
return "ArcsineBounded: Parameters - " + asb.Parameters().String() + ", Support(x) - " + asb.Support().String()
}
// a ∈ (-∞,∞)
// b ∈ (a,∞)
func (asb *ArcsineBounded) Parameters() stats.Limits {
return stats.Limits{
"A": stats.Interval{math.Inf(-1), math.Inf(1), true, true},
"B": stats.Interval{asb.min, math.Inf(1), true, true},
}
}
// x ∈ [a,b]
func (asb *ArcsineBounded) Support() stats.Interval {
return stats.Interval{asb.min, asb.max, false, false}
}
func (asb *ArcsineBounded) Probability(x float64) float64 {
if asb.Support().IsWithinInterval(x) {
return 1 / (math.Pi * math.Sqrt((x-asb.min)*(asb.max-x)))
}
return 0
}
func (asb *ArcsineBounded) Distribution(x float64) float64 {
if asb.Support().IsWithinInterval(x) {
return (2. / math.Pi) * math.Asin(math.Sqrt((x-asb.min)/(asb.max-asb.min)))
}
return 0
}
func (asb *ArcsineBounded) Inverse(p float64) float64 {
if p <= 0 {
return asb.min
}
if p >= 1 {
return asb.max
}
return asb.min + (asb.max-asb.min)*math.Pow(math.Sin((math.Pi*p)/2), 2)
}
func (asb *ArcsineBounded) Mean() float64 {
return (asb.min + asb.max) / 2.
}
func (asb *ArcsineBounded) Median() float64 {
return (asb.min + asb.max) / 2.
}
func (asb *ArcsineBounded) Mode() float64 {
return (asb.min + asb.max) / 2.
}
func (asb *ArcsineBounded) Variance() float64 {
return (1. / 8) * math.Pow(asb.max-asb.min, 2.)
}
func (asb *ArcsineBounded) ExKurtosis() float64 {
return -(3. / 2.)
}
func (asb *ArcsineBounded) Entropy() float64 {
return -0.24156447527049044469 + math.Log(asb.max-asb.min)
}
func (asb *ArcsineBounded) Rand() float64 {
var rnd float64
if asb.src != nil {
rnd = rand.New(asb.src).Float64()
} else {
rnd = rand.Float64()
}
return asb.Inverse(rnd)
} | dist/continuous/arcsine_bounded.go | 0.824744 | 0.424651 | arcsine_bounded.go | starcoder |
package ent
import (
"fmt"
"strings"
"entgo.io/ent/dialect/sql"
"github.com/nint8835/entgql-bug-repro/ent/othertest"
)
// OtherTest is the model entity for the OtherTest schema.
type OtherTest struct {
config `json:"-"`
// ID of the ent.
ID int `json:"id,omitempty"`
// Test holds the value of the "test" field.
Test othertest.Test `json:"test,omitempty"`
}
// scanValues returns the types for scanning values from sql.Rows.
func (*OtherTest) scanValues(columns []string) ([]interface{}, error) {
values := make([]interface{}, len(columns))
for i := range columns {
switch columns[i] {
case othertest.FieldID:
values[i] = new(sql.NullInt64)
case othertest.FieldTest:
values[i] = new(sql.NullString)
default:
return nil, fmt.Errorf("unexpected column %q for type OtherTest", columns[i])
}
}
return values, nil
}
// assignValues assigns the values that were returned from sql.Rows (after scanning)
// to the OtherTest fields.
func (ot *OtherTest) assignValues(columns []string, values []interface{}) error {
if m, n := len(values), len(columns); m < n {
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
}
for i := range columns {
switch columns[i] {
case othertest.FieldID:
value, ok := values[i].(*sql.NullInt64)
if !ok {
return fmt.Errorf("unexpected type %T for field id", value)
}
ot.ID = int(value.Int64)
case othertest.FieldTest:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field test", values[i])
} else if value.Valid {
ot.Test = othertest.Test(value.String)
}
}
}
return nil
}
// Update returns a builder for updating this OtherTest.
// Note that you need to call OtherTest.Unwrap() before calling this method if this OtherTest
// was returned from a transaction, and the transaction was committed or rolled back.
func (ot *OtherTest) Update() *OtherTestUpdateOne {
return (&OtherTestClient{config: ot.config}).UpdateOne(ot)
}
// Unwrap unwraps the OtherTest entity that was returned from a transaction after it was closed,
// so that all future queries will be executed through the driver which created the transaction.
func (ot *OtherTest) Unwrap() *OtherTest {
tx, ok := ot.config.driver.(*txDriver)
if !ok {
panic("ent: OtherTest is not a transactional entity")
}
ot.config.driver = tx.drv
return ot
}
// String implements the fmt.Stringer.
func (ot *OtherTest) String() string {
var builder strings.Builder
builder.WriteString("OtherTest(")
builder.WriteString(fmt.Sprintf("id=%v", ot.ID))
builder.WriteString(", test=")
builder.WriteString(fmt.Sprintf("%v", ot.Test))
builder.WriteByte(')')
return builder.String()
}
// OtherTests is a parsable slice of OtherTest.
type OtherTests []*OtherTest
func (ot OtherTests) config(cfg config) {
for _i := range ot {
ot[_i].config = cfg
}
} | ent/othertest.go | 0.691185 | 0.441733 | othertest.go | starcoder |
package layer
import tf "github.com/galeone/tensorflow/tensorflow/go"
type LZeroPadding1D struct {
dtype DataType
inputs []Layer
name string
padding float64
shape tf.Shape
trainable bool
layerWeights []*tf.Tensor
}
func ZeroPadding1D() *LZeroPadding1D {
return &LZeroPadding1D{
dtype: Float32,
name: UniqueName("zero_padding1d"),
padding: 1,
trainable: true,
}
}
func (l *LZeroPadding1D) SetDtype(dtype DataType) *LZeroPadding1D {
l.dtype = dtype
return l
}
func (l *LZeroPadding1D) SetName(name string) *LZeroPadding1D {
l.name = name
return l
}
func (l *LZeroPadding1D) SetPadding(padding float64) *LZeroPadding1D {
l.padding = padding
return l
}
func (l *LZeroPadding1D) SetShape(shape tf.Shape) *LZeroPadding1D {
l.shape = shape
return l
}
func (l *LZeroPadding1D) SetTrainable(trainable bool) *LZeroPadding1D {
l.trainable = trainable
return l
}
func (l *LZeroPadding1D) SetLayerWeights(layerWeights []*tf.Tensor) *LZeroPadding1D {
l.layerWeights = layerWeights
return l
}
func (l *LZeroPadding1D) GetShape() tf.Shape {
return l.shape
}
func (l *LZeroPadding1D) GetDtype() DataType {
return l.dtype
}
func (l *LZeroPadding1D) SetInputs(inputs ...Layer) Layer {
l.inputs = inputs
return l
}
func (l *LZeroPadding1D) GetInputs() []Layer {
return l.inputs
}
func (l *LZeroPadding1D) GetName() string {
return l.name
}
func (l *LZeroPadding1D) GetLayerWeights() []*tf.Tensor {
return l.layerWeights
}
type jsonConfigLZeroPadding1D struct {
ClassName string `json:"class_name"`
Name string `json:"name"`
Config map[string]interface{} `json:"config"`
InboundNodes [][][]interface{} `json:"inbound_nodes"`
}
func (l *LZeroPadding1D) GetKerasLayerConfig() interface{} {
inboundNodes := [][][]interface{}{
{},
}
for _, input := range l.inputs {
inboundNodes[0] = append(inboundNodes[0], []interface{}{
input.GetName(),
0,
0,
map[string]bool{},
})
}
return jsonConfigLZeroPadding1D{
ClassName: "ZeroPadding1D",
Name: l.name,
Config: map[string]interface{}{
"dtype": l.dtype.String(),
"name": l.name,
"padding": l.padding,
"trainable": l.trainable,
},
InboundNodes: inboundNodes,
}
}
func (l *LZeroPadding1D) GetCustomLayerDefinition() string {
return ``
} | layer/ZeroPadding1D.go | 0.731634 | 0.443179 | ZeroPadding1D.go | starcoder |
package parser
import (
"fmt"
"strings"
)
type NodeType string
const (
// ListNode is an array element of a path.
ListNode NodeType = "List"
// ObjectNode is the final Node in a path, what is being referenced.
ObjectNode NodeType = "Object"
)
type Node interface {
Type() NodeType
DeepCopyNode() Node
// String converts the Node into an equivalent String representation.
// Calling Parse on the result yields an equivalent Node, but may differ in
// structure if the Node is a Path containing Path Nodes.
String() string
}
// Path represents an entire parsed path specification.
type Path struct {
Nodes []Node
}
func (r Path) DeepCopy() Path {
out := Path{
Nodes: make([]Node, len(r.Nodes)),
}
for i := 0; i < len(r.Nodes); i++ {
out.Nodes[i] = r.Nodes[i].DeepCopyNode()
}
return out
}
func (r Path) String() string {
result := strings.Builder{}
for i, n := range r.Nodes {
nStr := n.String()
if n.Type() == ObjectNode && i > 0 {
// No leading separator, and no separators before List Nodes.
result.WriteString(".")
}
result.WriteString(nStr)
}
return result.String()
}
type Object struct {
Reference string
}
var _ Node = Object{}
func (o Object) Type() NodeType {
return ObjectNode
}
func (o Object) DeepCopyNode() Node {
oOut := o.DeepCopy()
return &oOut
}
func (o Object) DeepCopy() Object {
return Object{
Reference: o.Reference,
}
}
func (o Object) String() string {
return quote(o.Reference)
}
type List struct {
KeyField string
KeyValue interface{}
Glob bool
}
var _ Node = List{}
func (l List) Type() NodeType {
return ListNode
}
func (l List) DeepCopyNode() Node {
lout := l.DeepCopy()
return &lout
}
func (l List) DeepCopy() List {
out := List{}
out.KeyField = l.KeyField
out.Glob = l.Glob
// KeyValue (interface{}) will be one of: [string, int, nil]
out.KeyValue = l.KeyValue
return out
}
func (l List) String() string {
key := quote(l.KeyField)
if l.Glob {
return fmt.Sprintf("[%s: *]", key)
}
switch v := l.KeyValue.(type) {
case string:
q := quote(v)
return fmt.Sprintf("[%s: %s]", key, q)
case int, int64:
return fmt.Sprintf("[%s: %d]", key, v)
case nil:
default:
}
// Represents an improperly specified List node.
return fmt.Sprintf("[%s: ]", key)
}
// quote optionally adds double quotes around the passed string if needed.
// Quotes are needed for:
// * Strings containing whitespace, quotes, or other "ambiguous" characters that will
// be tokenized as non-strings and need escaping.
// * Strings starting digits, that would otherwise be tokenized as an integer
// * Empty strings
func quote(s string) string {
if len(s) == 0 {
return `""`
}
switch {
case strings.ContainsAny(s, "'\"\t\n \\*[]:."),
strings.ContainsAny(s[0:1], "0123456789"):
// Using fmt.Sprintf with %q converts whitespace to escape sequences, and we
// don't want that.
s = strings.ReplaceAll(s, `\`, `\\`)
s = strings.ReplaceAll(s, `"`, `\"`)
return `"` + s + `"`
}
return s
} | pkg/mutation/path/parser/node.go | 0.748076 | 0.424651 | node.go | starcoder |
package schema
import (
"strings"
)
// MySQL schema dialect
func MySQL(query Query) Dialect {
return mysql{
query: query,
}
}
// PostgreSQL schema dialect
func PostgreSQL(query Query) Dialect {
return postgreSQL{
query: query,
}
}
// SQLite3 schema dialect
func SQLite3(query Query) Dialect {
return sqlite3{
query: query,
}
}
// query and load
type Query func(sql string, dest interface{}, args ...interface{}) (int, error)
// schema dialect
type Dialect interface {
// Create the column definition for a char type.
TypeChar(column Column) (string, error)
// Create the column definition for a string type.
TypeString(column Column) (string, error)
// Create the column definition for a text type.
TypeText(column Column) (string, error)
// Create the column definition for a medium text type.
TypeMediumText(column Column) (string, error)
// Create the column definition for a long text type.
TypeLongText(column Column) (string, error)
// Create the column definition for a big integer type.
TypeBigInteger(column Column) (string, error)
// Create the column definition for an integer type.
TypeInteger(column Column) (string, error)
// Create the column definition for a medium integer type.
TypeMediumInteger(column Column) (string, error)
// Create the column definition for a tiny integer type.
TypeTinyInteger(column Column) (string, error)
// Create the column definition for a tiny blob type.
TypeTinyBlob(column Column) (string, error)
// Create the column definition for an blob type.
TypeBlob(column Column) (string, error)
// Create the column definition for a medium blob type.
TypeMediumBlob(column Column) (string, error)
// Create the column definition for a long blob type.
TypeLongBlob(column Column) (string, error)
// Create the column definition for a small integer type.
TypeSmallInteger(column Column) (string, error)
// Create the column definition for a float type.
TypeFloat(column Column) (string, error)
// Create the column definition for a double type.
TypeDouble(column Column) (string, error)
// Create the column definition for a decimal type.
TypeDecimal(column Column) (string, error)
// Create the column definition for a boolean type.
TypeBoolean(column Column) (string, error)
// Create the column definition for an enumeration type.
TypeEnum(column Column) (string, error)
// Create the column definition for a set enumeration type.
TypeSet(column Column) (string, error)
// Create the column definition for a json type.
TypeJson(column Column) (string, error)
// Create the column definition for a jsonb type.
TypeJsonb(column Column) (string, error)
// Create the column definition for a date type.
TypeDate(column Column) (string, error)
// Create the column definition for a date-time type.
TypeDateTime(column Column) (string, error)
// Create the column definition for a date-time (with time zone) type.
TypeDateTimeTz(column Column) (string, error)
// Create the column definition for a time type.
TypeTime(column Column) (string, error)
// Create the column definition for a time (with time zone) type.
TypeTimeTz(column Column) (string, error)
// Create the column definition for a timestamp type.
TypeTimestamp(column Column) (string, error)
// Create the column definition for a timestamp (with time zone) type.
TypeTimestampTz(column Column) (string, error)
// Create the column definition for a year type.
TypeYear(column Column) (string, error)
// Create the column definition for a binary type.
TypeBinary(column Column) (string, error)
// Create the column definition for a uuid type.
TypeUuid(column Column) (string, error)
// Create the column definition for an IP address type.
TypeIpAddress(column Column) (string, error)
// Create the column definition for a MAC address type.
TypeMacAddress(column Column) (string, error)
// Create the column definition for a spatial Geometry type.
TypeGeometry(column Column) (string, error)
// Create the column definition for a spatial Point type.
TypePoint(column Column) (string, error)
// Create the column definition for a spatial LineString type.
TypeLineString(column Column) (string, error)
// Create the column definition for a spatial Polygon type.
TypePolygon(column Column) (string, error)
// Create the column definition for a spatial GeometryCollection type.
TypeGeometryCollection(column Column) (string, error)
// Create the column definition for a spatial MultiPoint type.
TypeMultiPoint(column Column) (string, error)
// Create the column definition for a spatial MultiLineString type.
TypeMultiLineString(column Column) (string, error)
// Create the column definition for a spatial MultiPolygon type.
TypeMultiPolygon(column Column) (string, error)
// Modify the column
ModifyColumn(column Column) string
// Compile the query exists of the table
CompileTableExists(tableName string, tableSchema ...string) (string, error)
// Compile the query to determine the list of columns.
CompileColumnListing(tableName string, tableSchema ...string) (string, error)
// Compile a create table command.
CompileCreate(table Table) (string, error)
// Compile a modify columns command.
CompileModifyColumns(table Table) (string, error)
// Compile a modify column command.
CompileModifyColumn(table Table, columnName string) (string, error)
// Compile add columns.
CompileAddColumns(table Table) (string, error)
// Compile add columns.
CompileAddColumn(table Table, columnName string) (string, error)
// Compile a primary key command.
CompilePrimaryKey(table Table, columnNames ...string) (string, error)
// Compile a drop primary key command.
CompileDropPrimaryKey(table Table) (string, error)
// Compile an index creation command.
CompileIndex(table Table, index Index) (string, error)
// Compile a drop index command.
CompileDropIndex(table Table, indexName string) (string, error)
// Compile a drop unique index command.
CompileDropUnique(table Table, indexName string) (string, error)
// Compile a drop spatial index command.
CompileDropSpatialIndex(table Table, indexName string) (string, error)
// Compile a drop foreign index command.
CompileDropForeign(table Table, indexName string) (string, error)
// Compile a drop table command.
CompileDrop(tableName string) (string, error)
// Compile a drop table (if exists) command.
CompileDropIfExists(tableName string) (string, error)
// Compile a drop column command.
CompileDropColumn(table Table, columnNames []string) (string, error)
// Compile a rename table command.
CompileRenameTable(table Table, toName string) (string, error)
// Compile a rename index command.
CompileRenameIndex(table Table, from string, to string) (string, error)
// Compile the SQL needed to drop all tables. [PostgreSQL]
CompileDropAllTables(tableNames ...string) (string, error)
// Compile the SQL needed to drop all views.
CompileDropAllViews(viewNames ...string) (string, error)
// Compile the SQL needed to drop all types.
CompileDropAllTypes(schemaNames ...string) (string, error)
// Compile the SQL needed to retrieve all table names.
CompileGetAllTables(schemaNames ...string) (string, error)
// Compile the SQL needed to retrieve all view names.
CompileGetAllViews(schemaNames ...string) (string, error)
// Compile the SQL needed to retrieve all type names. [PostgreSQL]
CompileGetAllTypes() (string, error)
// Compile the SQL needed to rebuild the database. [SQLite]
CompileRebuild() (string, error)
// Compile the command to enable foreign key constraints.
CompileEnableForeignKeyConstraints() (string, error)
// Compile the command to disable foreign key constraints.
CompileDisableForeignKeyConstraints() (string, error)
// Load table columns from the database
LoadColumns(tableName string, tableSchema ...string) ([]Column, error)
// Load table from the database
LoadTable(tableName string, tableSchema ...string) (Table, error)
// Load table constraints from the database
LoadTableConstraints(tableName string, tableSchema ...string) (TableConstraints, error)
}
// wrap table name if prefix exists
func wrapTableName(table Table, quoteIdent func(s string) string) string {
var b strings.Builder
if table.Schema() != "" {
b.WriteString(quoteIdent(table.Schema()))
b.WriteString(".")
}
if table.Prefix() == "" {
b.WriteString(quoteIdent(table.Name()))
} else {
b.WriteString(quoteIdent(table.Prefix() + table.Name()))
}
return b.String()
} | schema/schema.go | 0.522933 | 0.617138 | schema.go | starcoder |
package llrp
import (
"time"
)
/*
This parameter, LLRPConfigurationStateValue, is a 32-bit value which represents a
Reader’s entire LLRP configuration state including: LLRP configuration parameters,
vendor extension configuration parameters, ROSpecs, and AccessSpecs. A Reader
SHALL change this value only:
• Upon successful execution of any of the following messages:
o ADD_ROSPEC
o DELETE_ROSPEC
o ADD_ACCESSSPEC
o DELETE_ACCESSSPEC
o SET_READER_CONFIG
o Any CUSTOM_MESSAGE command that alters the reader’s internal configuration.
• Upon an automatically deleted AccessSpec due to OperationCountValue number of operations (Section 12.2.1.1).
A Reader SHALL not change this value when the CurrentState of a ROSpec or AccessSpec changes.
The mechanism used to compute the LLRP configuration state value is implementation
dependent. However, a good implementation will insure that there’s a high probability
that the value will change when the Reader’s configuration state changes.
It is expected that a Client will configure the Reader and then request the Reader’s
configuration state value. The Client will then save this state value. If this value does not
change between two requests for it, then a Client may assume that the above components
of the LLRP configuration have also not changed.
*/
func LLRPConfigurationStateValue(llrpConfigurationStateValue int) []interface{} {
return commonSpec(
P_LLRPConfigurationStateValue,
[]interface{}{
uint32(llrpConfigurationStateValue),
},
)
}
// IDType used by Identification Parameter
const (
C_Identification_IDType_MAC = iota
C_Identification_IDType_EPC
)
// Reader ID: Byte array. If IDType=0, the MAC address SHALL be encoded as EUI-64.[EUI64]
func Identification(IDType int, ReaderId string) []interface{} {
b := []uint8(ReaderId)
inf := []interface{}{
uint8(IDType),
uint16(len(b)),
}
for _, k := range b {
inf = append(inf, k)
}
return commonSpec(
P_Identification,
inf,
)
}
// This parameter carries the data pertinent to perform the write to a general purpose output port.
/*
GPO Port Number : Unsigned Short Integer. 0 is invalid.
GPO Data: Boolean. The state to output on the specified GPO port.
*/
func GPOWriteDataFunc(GPOPortNumber int, GPOData bool) []interface{} {
data := uint8(0)
if GPOData {
data += 0x80
}
return commonSpec(
P_GPOWriteData,
[]interface{}{
uint16(GPOPortNumber),
data,
},
)
}
// This parameter carries the specification for the keepalive message generation by the Reader. This includes the definition of the periodic trigger to send the keepalive message
// PeriodicTriggerValue: Integer. Time interval in milliseconds. This field is ignored when KeepaliveTriggerType is not 1.
func KeepaliveSpec(PeriodicTriggerValue time.Duration) []interface{} {
typeof := uint8(0)
if PeriodicTriggerValue > 0 {
typeof += 1
}
return commonSpec(
P_KeepaliveSpec,
[]interface{}{
typeof,
uint32(
// convert to millisecond
PeriodicTriggerValue / 1000000,
),
},
)
}
/*
This parameter carries a single antenna's properties. The properties include the gain and the connectivity status of the antenna.The antenna gain is the composite gain and includes the loss of the associated cable from the Reader to the antenna. The gain is represented in dBi*100 to allow fractional dBi representation.
*/
func AntennaProperties(AntennaID, AntennaGain int, AntennaConnected bool) []interface{} {
and := uint8(0)
if AntennaConnected {
and += 0x80
}
return commonSpec(
P_AntennaProperties,
[]interface{}{
and,
uint16(AntennaID),
uint16(AntennaGain),
},
)
}
/*
This parameter carries a single antenna's configuration and it specifies the default values for the parameter set that are passed in this parameter block. The scope of the default values is the antenna. The default values are used for parameters during an operation on this antenna if the parameter was unspecified in the spec that describes the operation.
*/
// params = RFReceiver Parameter , RFTransmitter Parameter , AirProtocolInventoryCommandSettings Parameter , Custom Parameter
func AntennaConfiguration(AntennaID int, params ...[]interface{}) []interface{} {
return commonSpec(
P_AntennaConfiguration,
[]interface{}{
uint16(AntennaID),
},
params...,
)
}
/*
This Parameter carries the RF transmitter information. The Transmit Power defines the transmit power for the antenna expressed as an index into the TransmitPowerTable (section 10.2.4.1.1). The HopTableID is the index of the frequency hop table to be used by the Reader (section 10.2.4.1.2.1) and is used when operating in frequency-hopping regulatory regions. This field is ignored in non-frequency-hopping regulatory regions.
The ChannelIndex is the one-based channel index in the FixedFrequencyTable to use during transmission (section 10.2.4.1.2.2) and is used when operating in non-frequency-hopping regulatory regions. This field is ignored in frequency-hopping regulatory regions.
*/
func RFReceiver(TransmitPower, HopTableID, ChannelIndex int) []interface{} {
return commonSpec(
P_RFReceiver,
[]interface{}{
uint16(HopTableID),
uint16(ChannelIndex),
uint16(TransmitPower),
},
)
}
/*
This Parameter carries the current configuration and state of a single GPI port. In a SET_READER_CONFIG message, this parameter is used to enable or disable the GPI port using the GPIConfig field; the GPIState field is ignored by the reader. In a GET_READER_CONFIG message, this parameter reports both the configuration and state of the GPI port. When a ROSpec or AISpec is configured on a GPI-capable reader with GPI start and/or stop triggers, those GPIs must be enabled by the client with a SET_READER_CONFIG message for the triggers to function.
*/
func GPIPortCurrentState(GPIPortNum, GPIState int, GPIConfig bool) []interface{} {
config := uint8(0)
if GPIConfig {
config += 0x80
}
return commonSpec(
P_GPIPortCurrentState,
[]interface{}{
uint16(GPIPortNum),
config,
uint8(GPIState),
},
)
}
/*
This parameter controls the behavior of the Reader when a new LLRP connection is established. In a SET_READER_CONFIG message, this parameter is used to enable or
disable the holding of events and reports upon connection using the HoldEventsAndReportsUponReconnect field. In a GET_READER_CONFIG message,this parameter reports the current configuration. If the ldEventsAndReportsUponReconnect is true, the reader will not deliver any reports or events (except the ConnectionAttemptEvent) to the Client until the Client issues an ENABLE_EVENTS_AND_REPORTS message. Once the ENABLE_EVENTS_AND_REPORTS message is received the reader ceases its hold on events and reports for the duration of the connection.
*/
func EventsAndReports(HoldEventsAndReportsUponReconnect bool) []interface{} {
return commonSpec(
P_EventsAndReports,
[]interface{}{
convertBooleanUint8(HoldEventsAndReportsUponReconnect),
},
)
}
/*
This Parameter carries the RF transmitter information. The Transmit Power defines the transmit power for the antenna expressed as an index into the TransmitPowerTable (section 10.2.4.1.1). The HopTableID is the index of the frequency hop table to be used
by the Reader (section 10.2.4.1.2.1) and is used when operating in frequency-hopping regulatory regions. This field is ignored in non-frequency-hopping regulatory regions. The ChannelIndex is the one-based channel index in the FixedFrequencyTable to use during transmission (section 10.2.4.1.2.2) and is used when operating in non-frequency-hopping regulatory regions. This field is ignored in frequency-hopping regulatory regions.
*/
func RFTransmitter(HopTableID, ChannelIndex, TransmitPower int) []interface{} {
return commonSpec(
P_RFTransmitter,
[]interface{}{
uint16(HopTableID),
uint16(ChannelIndex),
uint16(TransmitPower),
},
)
} | llrp/params_configuration.go | 0.767864 | 0.522811 | params_configuration.go | starcoder |
package movie
import "strings"
// Movie parses an IMDB movie record text blob, extracting all metadata about
// a movie title.
// The core details are extracted from the MOVI entry, but also all the other
// entry types; ADPT, NOVL, CRIT, SCRP, etc.
type Movie struct {
Title string
Year int
Month string // as Roman Numerals
TV bool
Adaptations []Adaptation
Books []Book
Critiques []Critique
Essays []Essay
Interviews []Interview
Novels []Novel
Others []Other
ProductionProtocols []ProductionProtocol
Screenplays []Screenplay
}
// Adaptation parses an ADPT (adapted literary source) record entry.
type Adaptation struct {
Book
}
// Book parse a BOOK (monographic book) record entry.
type Book struct {
Title string
Author string
Publisher Publisher
Date Date
PageCount int
ISBN string
FirstPublished int
Note string
}
// Novel parses a NOVL (original literary source) record entry.
type Novel struct {
Book
}
// Critique parses a CRIT (printed media reviews) record entry.
type Critique struct {
publication
}
// Essay parses an ESSY (printed essay) record entry.
type Essay struct {
publication
}
// Interview parses an IVIW (interview with cast or crew) record entry.
type Interview struct {
publication
}
// Other parses an OTHR (other literature) record entry.
type Other struct {
publication
}
// ProductionProtocol parses a PROT (production protocol) record entry.
type ProductionProtocol struct {
publication
}
// Screenplay parses a SCRP (published screenplay) record entry.
type Screenplay struct {
publication
}
// Publication is a base type used by non-book entries such as CRIT, ESSY, etc.
// generally representing a magazine.
type publication struct {
// core publication details.
Name string
Publisher Publisher
Date Date
Volume string
Issue string
ISSN string // sometime contains an ISBN
// details related specifically to the IMDB entries.
ArticleAuthor string
ArticleTitle string
ArticlePages string // e.g. `1-17`, `56`, `23, 24, 66`
// The interview subject, only used by IVIW.
ArticleInterviewee string
}
// Publisher metadata is used by all books and publications for the publisher details.
type Publisher struct {
Name string
City string
Country string
}
// Date is a generic type for storing dates without having to parse into time.Time objects.
type Date struct {
Year int
Month int
Day int
}
// Unmarshall processes all record entries types.
func Unmarshall(data string, movie *Movie) {
entry := extractEntryDataTypes(data)
entry.movieTitleDetails(movie)
entry.adaptations(movie)
entry.books(movie)
entry.novels(movie)
entry.critiques(movie)
entry.essays(movie)
entry.interviews(movie)
entry.others(movie)
entry.productionProtocols(movie)
entry.screenplays(movie)
}
// UnmarshallBook processes only the record entries types that are types of books.
func UnmarshallBooks(data string, movie *Movie) {
entry := extractEntryDataTypes(data)
entry.movieTitleDetails(movie)
entry.adaptations(movie)
entry.books(movie)
entry.novels(movie)
}
// IsAdaptation checks all book types (ADPT, BOOK, NOVL) and returns true if a
// title/author match is found.
func (m Movie) IsAdaptation(title, author string) bool {
for _, a := range m.Adaptations {
if m.titleMatches(a.Title, title) && m.authorMatches(a.Author, author) {
return true
}
}
for _, b := range m.Books {
if m.titleMatches(b.Title, title) && m.authorMatches(b.Author, author) {
return true
}
}
for _, n := range m.Novels {
if m.titleMatches(n.Title, title) && m.authorMatches(n.Author, author) {
return true
}
}
return false
}
func (m Movie) titleMatches(srcTitle, testTitle string) bool {
title := strings.ToLower(srcTitle)
testable := strings.ToLower(testTitle)
title = strings.ReplaceAll(title, "the ", "")
testable = strings.ReplaceAll(testable, "the ", "")
return strings.Contains(title, testable)
}
func (m Movie) authorMatches(srcAuthor, testAuthor string) bool {
author := strings.ToLower(srcAuthor)
testable := strings.ToLower(testAuthor)
author = strings.ReplaceAll(author, ",", "")
testable = strings.ReplaceAll(testable, ",", "")
// perhaps splitting the name on spaces and checking each part is present
// would be a reasonable approach:
matching := true
names := strings.Fields(testable)
for _, name := range names {
if !strings.Contains(author, name) {
matching = false
}
}
return matching
} | movie/movie.go | 0.61451 | 0.439386 | movie.go | starcoder |
package cast
import (
"errors"
"strconv"
)
// Float32 will converts argument to float32 or return a error.
func Float32(value interface{}) (float32, error) {
switch v := value.(type) {
case bool:
if v {
return float32(1), nil
}
return float32(0), nil
case float32:
return float32(v), nil
case float64:
return float32(v), nil
case int:
return float32(v), nil
case int8:
return float32(v), nil
case int16:
return float32(v), nil
case int32:
return float32(v), nil
case int64:
return float32(v), nil
case uint:
return float32(v), nil
case uint8:
return float32(v), nil
case uint16:
return float32(v), nil
case uint32:
return float32(v), nil
case uint64:
return float32(v), nil
case string:
f, err := strconv.ParseFloat(v, 64)
return float32(f), err
case []byte:
f, err := strconv.ParseFloat(string(v), 32)
return float32(f), err
case nil:
return float32(0), nil
default:
return float32(0), errors.New("Unknown type")
}
}
// MustFloat32 converts argument to float32 or panic if an error occurred.
func MustFloat32(value interface{}) float32 {
v, err := Float32(value)
if err != nil {
panic(err)
}
return v
}
// Float64 will converts argument to float64 or return a error.
func Float64(value interface{}) (float64, error) {
switch v := value.(type) {
case bool:
if v {
return float64(1), nil
}
return float64(0), nil
case float32:
return float64(v), nil
case float64:
return float64(v), nil
case int:
return float64(v), nil
case int8:
return float64(v), nil
case int16:
return float64(v), nil
case int32:
return float64(v), nil
case int64:
return float64(v), nil
case uint:
return float64(v), nil
case uint8:
return float64(v), nil
case uint16:
return float64(v), nil
case uint32:
return float64(v), nil
case uint64:
return float64(v), nil
case string:
f, err := strconv.ParseFloat(v, 64)
return float64(f), err
case []byte:
f, err := strconv.ParseFloat(string(v), 32)
return float64(f), err
case nil:
return float64(0), nil
default:
return float64(0), errors.New("Unknown type")
}
}
// MustFloat64 converts argument to float64 or panic if an error occurred.
func MustFloat64(value interface{}) float64 {
v, err := Float64(value)
if err != nil {
panic(err)
}
return v
} | vendor/github.com/frozzare/go/cast/float.go | 0.785514 | 0.476032 | float.go | starcoder |
package ewkb
import (
"database/sql/driver"
"fmt"
"strings"
"github.com/twpayne/go-geom"
"github.com/twpayne/go-geom/encoding/wkbcommon"
)
// ErrExpectedByteSlice is returned when a []byte is expected.
type ErrExpectedByteSlice struct {
Value interface{}
}
func (e ErrExpectedByteSlice) Error() string {
return fmt.Sprintf("wkb: want []byte, got %T", e.Value)
}
// A Point is a EWKB-encoded Point that implements the sql.Scanner and
// driver.Value interfaces.
type Point struct {
*geom.Point
}
// A LineString is a EWKB-encoded LineString that implements the
// sql.Scanner and driver.Value interfaces.
type LineString struct {
*geom.LineString
}
// A Polygon is a EWKB-encoded Polygon that implements the sql.Scanner and
// driver.Value interfaces.
type Polygon struct {
*geom.Polygon
}
// A MultiPoint is a EWKB-encoded MultiPoint that implements the
// sql.Scanner and driver.Value interfaces.
type MultiPoint struct {
*geom.MultiPoint
}
// A MultiLineString is a EWKB-encoded MultiLineString that implements the
// sql.Scanner and driver.Value interfaces.
type MultiLineString struct {
*geom.MultiLineString
}
// A MultiPolygon is a EWKB-encoded MultiPolygon that implements the
// sql.Scanner and driver.Value interfaces.
type MultiPolygon struct {
*geom.MultiPolygon
}
// A GeometryCollection is a EWKB-encoded GeometryCollection that implements
// the sql.Scanner and driver.Value interfaces.
type GeometryCollection struct {
*geom.GeometryCollection
}
// Scan scans from a []byte.
func (p *Point) Scan(src interface{}) error {
if src == nil {
p.Point = nil
return nil
}
b, ok := src.([]byte)
if !ok {
return ErrExpectedByteSlice{Value: src}
}
got, err := Unmarshal(b)
if err != nil {
return err
}
p1, ok := got.(*geom.Point)
if !ok {
return wkbcommon.ErrUnexpectedType{Got: p1, Want: p}
}
p.Point = p1
return nil
}
// Valid returns true if p has a value.
func (p *Point) Valid() bool {
return p != nil && p.Point != nil
}
// Value returns the EWKB encoding of p.
func (p *Point) Value() (driver.Value, error) {
if p.Point == nil {
return nil, nil
}
return value(p.Point)
}
// Scan scans from a []byte.
func (ls *LineString) Scan(src interface{}) error {
if src == nil {
ls.LineString = nil
return nil
}
b, ok := src.([]byte)
if !ok {
return ErrExpectedByteSlice{Value: src}
}
got, err := Unmarshal(b)
if err != nil {
return err
}
ls1, ok := got.(*geom.LineString)
if !ok {
return wkbcommon.ErrUnexpectedType{Got: ls1, Want: ls}
}
ls.LineString = ls1
return nil
}
// Valid return true if ls has a value.
func (ls *LineString) Valid() bool {
return ls != nil && ls.LineString != nil
}
// Value returns the EWKB encoding of ls.
func (ls *LineString) Value() (driver.Value, error) {
if ls.LineString == nil {
return nil, nil
}
return value(ls.LineString)
}
// Scan scans from a []byte.
func (p *Polygon) Scan(src interface{}) error {
if src == nil {
p.Polygon = nil
return nil
}
b, ok := src.([]byte)
if !ok {
return ErrExpectedByteSlice{Value: src}
}
got, err := Unmarshal(b)
if err != nil {
return err
}
p1, ok := got.(*geom.Polygon)
if !ok {
return wkbcommon.ErrUnexpectedType{Got: p1, Want: p}
}
p.Polygon = p1
return nil
}
// Valid returns true if p has a value.
func (p *Polygon) Valid() bool {
return p != nil && p.Polygon != nil
}
// Value returns the EWKB encoding of p.
func (p *Polygon) Value() (driver.Value, error) {
if p.Polygon == nil {
return nil, nil
}
return value(p.Polygon)
}
// Scan scans from a []byte.
func (mp *MultiPoint) Scan(src interface{}) error {
if src == nil {
mp.MultiPoint = nil
return nil
}
b, ok := src.([]byte)
if !ok {
return ErrExpectedByteSlice{Value: src}
}
got, err := Unmarshal(b)
if err != nil {
return err
}
mp1, ok := got.(*geom.MultiPoint)
if !ok {
return wkbcommon.ErrUnexpectedType{Got: mp1, Want: mp}
}
mp.MultiPoint = mp1
return nil
}
// Valid returns true if mp has a value.
func (mp *MultiPoint) Valid() bool {
return mp != nil && mp.MultiPoint != nil
}
// Value returns the EWKB encoding of mp.
func (mp *MultiPoint) Value() (driver.Value, error) {
if mp.MultiPoint == nil {
return nil, nil
}
return value(mp.MultiPoint)
}
// Scan scans from a []byte.
func (mls *MultiLineString) Scan(src interface{}) error {
if src == nil {
mls.MultiLineString = nil
return nil
}
b, ok := src.([]byte)
if !ok {
return ErrExpectedByteSlice{Value: src}
}
got, err := Unmarshal(b)
if err != nil {
return err
}
mls1, ok := got.(*geom.MultiLineString)
if !ok {
return wkbcommon.ErrUnexpectedType{Got: mls1, Want: mls}
}
mls.MultiLineString = mls1
return nil
}
// Valid returns true if mls has a value.
func (mls *MultiLineString) Valid() bool {
return mls != nil && mls.MultiLineString != nil
}
// Value returns the EWKB encoding of mls.
func (mls *MultiLineString) Value() (driver.Value, error) {
if mls.MultiLineString == nil {
return nil, nil
}
return value(mls.MultiLineString)
}
// Scan scans from a []byte.
func (mp *MultiPolygon) Scan(src interface{}) error {
if src == nil {
mp.MultiPolygon = nil
return nil
}
b, ok := src.([]byte)
if !ok {
return ErrExpectedByteSlice{Value: src}
}
got, err := Unmarshal(b)
if err != nil {
return err
}
mp1, ok := got.(*geom.MultiPolygon)
if !ok {
return wkbcommon.ErrUnexpectedType{Got: mp1, Want: mp}
}
mp.MultiPolygon = mp1
return nil
}
// Valid returns true if mp has a value.
func (mp *MultiPolygon) Valid() bool {
return mp != nil && mp.MultiPolygon != nil
}
// Value returns the EWKB encoding of mp.
func (mp *MultiPolygon) Value() (driver.Value, error) {
if mp.MultiPolygon == nil {
return nil, nil
}
return value(mp.MultiPolygon)
}
// Scan scans from a []byte.
func (gc *GeometryCollection) Scan(src interface{}) error {
if src == nil {
gc.GeometryCollection = nil
return nil
}
b, ok := src.([]byte)
if !ok {
return ErrExpectedByteSlice{Value: src}
}
got, err := Unmarshal(b)
if err != nil {
return err
}
gc1, ok := got.(*geom.GeometryCollection)
if !ok {
return wkbcommon.ErrUnexpectedType{Got: gc1, Want: gc}
}
gc.GeometryCollection = gc1
return nil
}
// Valid returns true if gc has a value.
func (gc *GeometryCollection) Valid() bool {
return gc != nil && gc.GeometryCollection != nil
}
// Value returns the EWKB encoding of gc.
func (gc *GeometryCollection) Value() (driver.Value, error) {
if gc.GeometryCollection == nil {
return nil, nil
}
return value(gc.GeometryCollection)
}
func value(g geom.T) (driver.Value, error) {
sb := &strings.Builder{}
if err := Write(sb, NDR, g); err != nil {
return nil, err
}
return []byte(sb.String()), nil
} | encoding/ewkb/sql.go | 0.668123 | 0.424651 | sql.go | starcoder |
package rename
import (
"github.com/ozontech/file.d/cfg"
"github.com/ozontech/file.d/fd"
"github.com/ozontech/file.d/pipeline"
)
/*{ introduction
It renames the fields of the event. You can provide an unlimited number of config parameters. Each parameter handled as `cfg.FieldSelector`:`string`.
When `override` is set to `false`, the field won't be renamed in the case of field name collision.
Sequence of rename operations isn't guaranteed. Use different actions for prioritization.
**Example:**
```yaml
pipelines:
example_pipeline:
...
actions:
- type: rename
override: false
my_object.field.subfield: new_sub_field
...
```
The resulting event could look like:
```yaml
{
"my_object": {
"field": {
"new_sub_field":"value"
}
},
```
}*/
type Plugin struct {
paths [][]string
names []string
preserveFields bool
}
type Config map[string]interface{}
func init() {
fd.DefaultPluginRegistry.RegisterAction(&pipeline.PluginStaticInfo{
Type: "rename",
Factory: factory,
})
}
func factory() (pipeline.AnyPlugin, pipeline.AnyConfig) {
return &Plugin{}, &Config{}
}
func (p *Plugin) Start(config pipeline.AnyConfig, _ *pipeline.ActionPluginParams) {
sharedConfig := *config.(*Config)
localConfig := make(map[string]interface{}) // clone shared config to be able to modify it
for k, v := range sharedConfig {
localConfig[k] = v
}
p.preserveFields = localConfig["override"] == nil || !localConfig["override"].(bool)
delete(localConfig, "override")
m := cfg.UnescapeMap(localConfig)
for path, name := range m {
selector := cfg.ParseFieldSelector(path)
p.paths = append(p.paths, selector)
p.names = append(p.names, name)
}
}
func (p *Plugin) Stop() {
}
func (p *Plugin) Do(event *pipeline.Event) pipeline.ActionResult {
for index, path := range p.paths {
if p.preserveFields {
if event.Root.Dig(p.names[index]) != nil {
continue
}
}
node := event.Root.Dig(path...)
if node == nil {
continue
}
node.Suicide()
event.Root.AddFieldNoAlloc(event.Root, p.names[index]).MutateToNode(node)
}
return pipeline.ActionPass
} | plugin/action/rename/rename.go | 0.76999 | 0.497192 | rename.go | starcoder |
package collectors
import (
"encoding/json"
"fmt"
"strconv"
"strings"
"time"
"bosun.org/metadata"
"bosun.org/opentsdb"
"bosun.org/slog"
"bosun.org/util"
)
func init() {
const interval = time.Minute * 5
collectors = append(collectors,
&IntervalCollector{F: c_omreport_chassis, Interval: interval},
&IntervalCollector{F: c_omreport_fans, Interval: interval},
&IntervalCollector{F: c_omreport_memory, Interval: interval},
&IntervalCollector{F: c_omreport_processors, Interval: interval},
&IntervalCollector{F: c_omreport_ps, Interval: interval},
&IntervalCollector{F: c_omreport_ps_amps_sysboard_pwr, Interval: interval},
&IntervalCollector{F: c_omreport_storage_battery, Interval: interval},
&IntervalCollector{F: c_omreport_storage_controller, Interval: interval},
&IntervalCollector{F: c_omreport_storage_enclosure, Interval: interval},
&IntervalCollector{F: c_omreport_storage_vdisk, Interval: interval},
&IntervalCollector{F: c_omreport_system, Interval: interval},
&IntervalCollector{F: c_omreport_temps, Interval: interval},
&IntervalCollector{F: c_omreport_volts, Interval: interval},
)
}
func c_omreport_chassis() (opentsdb.MultiDataPoint, error) {
var md opentsdb.MultiDataPoint
readOmreport(func(fields []string) {
if len(fields) != 2 || fields[0] == "SEVERITY" {
return
}
component := strings.Replace(fields[1], " ", "_", -1)
Add(&md, "hw.chassis", severity(fields[0]), opentsdb.TagSet{"component": component}, metadata.Gauge, metadata.Ok, descDellHWChassis)
}, "chassis")
return md, nil
}
func c_omreport_system() (opentsdb.MultiDataPoint, error) {
var md opentsdb.MultiDataPoint
readOmreport(func(fields []string) {
if len(fields) != 2 || fields[0] == "SEVERITY" {
return
}
component := strings.Replace(fields[1], " ", "_", -1)
Add(&md, "hw.system", severity(fields[0]), opentsdb.TagSet{"component": component}, metadata.Gauge, metadata.Ok, descDellHWSystem)
}, "system")
return md, nil
}
func c_omreport_storage_enclosure() (opentsdb.MultiDataPoint, error) {
var md opentsdb.MultiDataPoint
readOmreport(func(fields []string) {
if len(fields) < 3 || fields[0] == "ID" {
return
}
id := strings.Replace(fields[0], ":", "_", -1)
Add(&md, "hw.storage.enclosure", severity(fields[1]), opentsdb.TagSet{"id": id}, metadata.Gauge, metadata.Ok, descDellHWStorageEnc)
}, "storage", "enclosure")
return md, nil
}
func c_omreport_storage_vdisk() (opentsdb.MultiDataPoint, error) {
var md opentsdb.MultiDataPoint
readOmreport(func(fields []string) {
if len(fields) < 3 || fields[0] == "ID" {
return
}
id := strings.Replace(fields[0], ":", "_", -1)
Add(&md, "hw.storage.vdisk", severity(fields[1]), opentsdb.TagSet{"id": id}, metadata.Gauge, metadata.Ok, descDellHWVDisk)
}, "storage", "vdisk")
return md, nil
}
func c_omreport_ps() (opentsdb.MultiDataPoint, error) {
var md opentsdb.MultiDataPoint
readOmreport(func(fields []string) {
if len(fields) < 3 || fields[0] == "Index" {
return
}
id := strings.Replace(fields[0], ":", "_", -1)
ts := opentsdb.TagSet{"id": id}
Add(&md, "hw.ps", severity(fields[1]), ts, metadata.Gauge, metadata.Ok, descDellHWPS)
pm := &metadata.HWPowerSupply{}
if len(fields) < 6 {
return
}
if fields[4] != "" {
pm.RatedInputWattage = fields[4]
}
if fields[5] != "" {
pm.RatedOutputWattage = fields[5]
}
if j, err := json.Marshal(&pm); err == nil {
metadata.AddMeta("", ts, "psMeta", string(j), true)
} else {
slog.Error(err)
}
}, "chassis", "pwrsupplies")
return md, nil
}
func c_omreport_ps_amps_sysboard_pwr() (opentsdb.MultiDataPoint, error) {
var md opentsdb.MultiDataPoint
readOmreport(func(fields []string) {
if len(fields) == 2 && strings.Contains(fields[0], "Current") {
i_fields := strings.Split(fields[0], "Current")
v_fields := strings.Fields(fields[1])
if len(i_fields) < 2 && len(v_fields) < 2 {
return
}
id := strings.Replace(i_fields[0], " ", "", -1)
Add(&md, "hw.chassis.current.reading", v_fields[0], opentsdb.TagSet{"id": id}, metadata.Gauge, metadata.A, descDellHWCurrent)
} else if len(fields) == 6 && (fields[2] == "System Board Pwr Consumption" || fields[2] == "System Board System Level") {
v_fields := strings.Fields(fields[3])
warn_fields := strings.Fields(fields[4])
fail_fields := strings.Fields(fields[5])
if len(v_fields) < 2 || len(warn_fields) < 2 || len(fail_fields) < 2 {
return
}
Add(&md, "hw.chassis.power.reading", v_fields[0], nil, metadata.Gauge, metadata.Watt, descDellHWPower)
Add(&md, "hw.chassis.power.warn_level", warn_fields[0], nil, metadata.Gauge, metadata.Watt, descDellHWPowerThreshold)
Add(&md, "hw.chassis.power.fail_level", fail_fields[0], nil, metadata.Gauge, metadata.Watt, descDellHWPowerThreshold)
}
}, "chassis", "pwrmonitoring")
return md, nil
}
func c_omreport_storage_battery() (opentsdb.MultiDataPoint, error) {
var md opentsdb.MultiDataPoint
readOmreport(func(fields []string) {
if len(fields) < 3 || fields[0] == "ID" {
return
}
id := strings.Replace(fields[0], ":", "_", -1)
Add(&md, "hw.storage.battery", severity(fields[1]), opentsdb.TagSet{"id": id}, metadata.Gauge, metadata.Ok, descDellHWStorageBattery)
}, "storage", "battery")
return md, nil
}
func c_omreport_storage_controller() (opentsdb.MultiDataPoint, error) {
var md opentsdb.MultiDataPoint
readOmreport(func(fields []string) {
if len(fields) < 3 || fields[0] == "ID" {
return
}
c_omreport_storage_pdisk(fields[0], &md)
id := strings.Replace(fields[0], ":", "_", -1)
ts := opentsdb.TagSet{"id": id}
Add(&md, "hw.storage.controller", severity(fields[1]), ts, metadata.Gauge, metadata.Ok, descDellHWStorageCtl)
cm := &metadata.HWControllerMeta{}
if len(fields) < 8 {
return
}
if fields[2] != "" {
cm.Name = fields[2]
}
if fields[3] != "" {
cm.SlotId = fields[3]
}
if fields[4] != "" {
cm.State = fields[4]
}
if fields[5] != "" {
cm.FirmwareVersion = fields[5]
}
if fields[7] != "" {
cm.DriverVersion = fields[7]
}
if j, err := json.Marshal(&cm); err == nil {
metadata.AddMeta("", ts, "controllerMeta", string(j), true)
} else {
slog.Error(err)
}
}, "storage", "controller")
return md, nil
}
// c_omreport_storage_pdisk is called from the controller func, since it needs the encapsulating id.
func c_omreport_storage_pdisk(id string, md *opentsdb.MultiDataPoint) {
readOmreport(func(fields []string) {
if len(fields) < 3 || fields[0] == "ID" {
return
}
//Need to find out what the various ID formats might be
id := strings.Replace(fields[0], ":", "_", -1)
ts := opentsdb.TagSet{"id": id}
Add(md, "hw.storage.pdisk", severity(fields[1]), ts, metadata.Gauge, metadata.Ok, descDellHWPDisk)
if len(fields) < 32 {
return
}
dm := &metadata.HWDiskMeta{}
if fields[2] != "" {
dm.Name = fields[2]
}
if fields[6] != "" {
dm.Media = fields[6]
}
if fields[19] != "" {
dm.Capacity = fields[19]
}
if fields[23] != "" {
dm.VendorId = fields[23]
}
if fields[24] != "" {
dm.ProductId = fields[24]
}
if fields[25] != "" {
dm.Serial = fields[25]
}
if fields[26] != "" {
dm.Part = fields[26]
}
if fields[27] != "" {
dm.NegotatiedSpeed = fields[27]
}
if fields[28] != "" {
dm.CapableSpeed = fields[28]
}
if fields[31] != "" {
dm.SectorSize = fields[31]
}
if j, err := json.Marshal(&dm); err == nil {
metadata.AddMeta("", ts, "physicalDiskMeta", string(j), true)
} else {
slog.Error(err)
}
}, "storage", "pdisk", "controller="+id)
}
func c_omreport_processors() (opentsdb.MultiDataPoint, error) {
var md opentsdb.MultiDataPoint
readOmreport(func(fields []string) {
if len(fields) != 8 {
return
}
if _, err := strconv.Atoi(fields[0]); err != nil {
return
}
ts := opentsdb.TagSet{"name": replace(fields[2])}
Add(&md, "hw.chassis.processor", severity(fields[1]), ts, metadata.Gauge, metadata.Ok, descDellHWCPU)
metadata.AddMeta("", ts, "processor", clean(fields[3], fields[4]), true)
}, "chassis", "processors")
return md, nil
}
func c_omreport_fans() (opentsdb.MultiDataPoint, error) {
var md opentsdb.MultiDataPoint
readOmreport(func(fields []string) {
if len(fields) != 8 {
return
}
if _, err := strconv.Atoi(fields[0]); err != nil {
return
}
ts := opentsdb.TagSet{"name": replace(fields[2])}
Add(&md, "hw.chassis.fan", severity(fields[1]), ts, metadata.Gauge, metadata.Ok, descDellHWFan)
fs := strings.Fields(fields[3])
if len(fs) == 2 && fs[1] == "RPM" {
i, err := strconv.Atoi(fs[0])
if err == nil {
Add(&md, "hw.chassis.fan.reading", i, ts, metadata.Gauge, metadata.RPM, descDellHWFanSpeed)
}
}
}, "chassis", "fans")
return md, nil
}
func c_omreport_memory() (opentsdb.MultiDataPoint, error) {
var md opentsdb.MultiDataPoint
readOmreport(func(fields []string) {
if len(fields) != 5 {
return
}
if _, err := strconv.Atoi(fields[0]); err != nil {
return
}
ts := opentsdb.TagSet{"name": replace(fields[2])}
Add(&md, "hw.chassis.memory", severity(fields[1]), ts, metadata.Gauge, metadata.Ok, descDellHWMemory)
metadata.AddMeta("", ts, "memory", clean(fields[4]), true)
}, "chassis", "memory")
return md, nil
}
func c_omreport_temps() (opentsdb.MultiDataPoint, error) {
var md opentsdb.MultiDataPoint
readOmreport(func(fields []string) {
if len(fields) != 8 {
return
}
if _, err := strconv.Atoi(fields[0]); err != nil {
return
}
ts := opentsdb.TagSet{"name": replace(fields[2])}
Add(&md, "hw.chassis.temps", severity(fields[1]), ts, metadata.Gauge, metadata.Ok, descDellHWTemp)
fs := strings.Fields(fields[3])
if len(fs) == 2 && fs[1] == "C" {
i, err := strconv.ParseFloat(fs[0], 64)
if err == nil {
Add(&md, "hw.chassis.temps.reading", i, ts, metadata.Gauge, metadata.C, descDellHWTempReadings)
}
}
}, "chassis", "temps")
return md, nil
}
func c_omreport_volts() (opentsdb.MultiDataPoint, error) {
var md opentsdb.MultiDataPoint
readOmreport(func(fields []string) {
if len(fields) != 8 {
return
}
if _, err := strconv.Atoi(fields[0]); err != nil {
return
}
ts := opentsdb.TagSet{"name": replace(fields[2])}
Add(&md, "hw.chassis.volts", severity(fields[1]), ts, metadata.Gauge, metadata.Ok, descDellHWVolt)
if i, err := extract(fields[3], "V"); err == nil {
Add(&md, "hw.chassis.volts.reading", i, ts, metadata.Gauge, metadata.V, descDellHWVoltReadings)
}
}, "chassis", "volts")
return md, nil
}
// extract tries to return a parsed number from s with given suffix. A space may
// be present between number ond suffix.
func extract(s, suffix string) (float64, error) {
if !strings.HasSuffix(s, suffix) {
return 0, fmt.Errorf("extract: suffix not found")
}
s = s[:len(s)-len(suffix)]
return strconv.ParseFloat(strings.TrimSpace(s), 64)
}
// severity returns 0 if s is not "Ok" or "Non-Critical", else 1.
func severity(s string) int {
if s != "Ok" && s != "Non-Critical" {
return 1
}
return 0
}
func readOmreport(f func([]string), args ...string) {
args = append(args, "-fmt", "ssv")
_ = util.ReadCommand(func(line string) error {
sp := strings.Split(line, ";")
for i, s := range sp {
sp[i] = clean(s)
}
f(sp)
return nil
}, "omreport", args...)
}
// clean concatenates arguments with a space and removes extra whitespace.
func clean(ss ...string) string {
v := strings.Join(ss, " ")
fs := strings.Fields(v)
return strings.Join(fs, " ")
}
func replace(name string) string {
r, _ := opentsdb.Replace(name, "_")
return r
}
const (
descDellHWChassis = "Overall status of chassis components."
descDellHWSystem = "Overall status of system components."
descDellHWStorageEnc = "Overall status of storage enclosures."
descDellHWVDisk = "Overall status of virtual disks."
descDellHWPS = "Overall status of power supplies."
descDellHWCurrent = "Amps used per power supply."
descDellHWPower = "System board power usage."
descDellHWPowerThreshold = "The warning and failure levels set on the device for system board power usage."
descDellHWStorageBattery = "Status of storage controller backup batteries."
descDellHWStorageCtl = "Overall status of storage controllers."
descDellHWPDisk = "Overall status of physical disks."
descDellHWCPU = "Overall status of CPUs."
descDellHWFan = "Overall status of system fans."
descDellHWFanSpeed = "System fan speed."
descDellHWMemory = "System RAM DIMM status."
descDellHWTemp = "Overall status of system temperature readings."
descDellHWTempReadings = "System temperature readings."
descDellHWVolt = "Overall status of power supply volt readings."
descDellHWVoltReadings = "Volts used per power supply."
) | cmd/scollector/collectors/dell_hw.go | 0.524638 | 0.488405 | dell_hw.go | starcoder |
package theory
import (
"fmt"
"sort"
"strings"
"github.com/go-audio/midi"
)
// Chords is a slice of chords
type Chords []*Chord
// ToBytes compresses a slice of chords where each chord is represented by a
// byte. A dictionary is also returned so a byte can be converted back to a
// chord (but the octave and repeated notes within the chord will be lost)
func (chords Chords) ToBytes() (data []byte, dict map[byte]string) {
// build the dictionaries
uNames := strings.Split(chords.Uniques().String(), ",")
reverseDict := make(map[string]byte, len(uNames))
dict = make(map[byte]string, len(uNames))
currentByte := byte(1)
for _, name := range uNames {
dict[currentByte] = name
reverseDict[name] = currentByte
currentByte++
}
// build the data
for _, name := range strings.Split(chords.String(), ",") {
data = append(data, reverseDict[name])
}
return data, dict
}
// ChordsFromBytes convert an encoded slice of bytes back to a slice of chords
// using the passed dictionary. Note that this is a lossy conversion, the octave
// of the original chords will be lost.
func ChordsFromBytes(data []byte, dict map[byte]string) Chords {
chords := make(Chords, len(data))
for i, b := range data {
chords[i] = NewChordFromAbbrev(dict[b])
}
return chords
}
// String converts the sequence of chords in a nice, coma delimited string
// such as Bmin,Dmaj,F#min,Emaj,Bmin,Dmaj,F#min,Emaj,Bmin,Dmaj
func (chords Chords) String() string {
if chords == nil || len(chords) < 1 {
return ""
}
if len(chords) < 2 {
if chords[0] == nil || len(chords[0].Keys) < 2 {
return ""
}
return chords[0].AbbrevName()
}
b := strings.Builder{}
for i := 0; i < len(chords)-1; i++ {
if len(chords[i].Keys) > 1 {
b.WriteString(chords[i].AbbrevName() + ",")
}
}
lastIdx := len(chords) - 1
if len(chords[lastIdx].Keys) > 1 {
b.WriteString(chords[lastIdx].AbbrevName())
}
return b.String()
}
// RootNotes returns the root notes of each chord (index 0)
func (chords Chords) RootNotes() []int {
notes := make([]int, len(chords))
for i, c := range chords {
notes[i] = c.Def().RootInt()
}
return notes
}
// UniqueNotes returns an octave-less unique list of the notes used in the chords.
func (chords Chords) UniqueNotes() []int {
notes := []int{}
mNotes := map[int]bool{}
for _, c := range chords {
for _, k := range c.Keys {
mNotes[k%12] = true
}
}
notes = make([]int, len(mNotes))
var i int
for k := range mNotes {
notes[i] = k
i++
}
return notes
}
// EligibleScales returns a list of potentially matching scales.
// This can be used to calculate the chord progression within a scale.
func (chords Chords) EligibleScales() Scales {
notes := chords.Uniques().SortedOnRoots().RootNotes()
return EligibleScalesForNotes(notes)
}
// Uniques returns a copy of the unique chords contains in targetted chords.
func (chords Chords) Uniques() Chords {
uChords := Chords{}
mChords := map[string]*Chord{}
var abbrevName string
for _, c := range chords {
abbrevName = c.AbbrevName()
if _, ok := mChords[abbrevName]; !ok {
mChords[abbrevName] = c
}
}
for _, c := range mChords {
uChords = append(uChords, c)
}
return uChords
}
// SortedOnRoots does an in place sorting of the chords based on their root
// notes. The chords are returned so the calls can be chained
func (chords Chords) SortedOnRoots() Chords {
sort.Slice(chords, func(a, b int) bool {
return chords[a].Def().RootInt() < chords[b].Def().RootInt()
})
return chords
}
// ProgressionDesc returns the roman numerals describing the figured chords.
func (chords Chords) ProgressionDesc() string {
var out string
notes := []int{}
for _, c := range chords {
if len(c.Keys) < 1 {
continue
}
notes = append(notes, midi.NotesToInt[c.Def().Root])
out += fmt.Sprintf("%s ", c.Def().Root)
}
out += "\n"
//get the possible scales for the root notes
scales := EligibleScalesForNotes(notes).Popular()
for _, scale := range scales {
scaleNotes, _ := ScaleNotes(midi.Notes[scale.Root%12], scale.Def.Name)
romanScale := RomanNumerals[scale.Def.Name]
for _, note := range notes {
idx := sliceIndex(len(scaleNotes), func(i int) bool { return scaleNotes[i] == note })
if len(romanScale) > 0 {
out += fmt.Sprintf("%s ", romanScale[idx])
} else {
out += fmt.Sprintf("%d ", idx)
}
}
out = fmt.Sprintf("%s in %s\n", out, scale.String())
}
return out
} | theory/chords.go | 0.713931 | 0.409693 | chords.go | starcoder |
package compactor
import (
"time"
"github.com/cockroachdb/cockroach/pkg/settings"
"github.com/pkg/errors"
)
func validateFraction(v float64) error {
if v >= 0 && v <= 1 { // handles +-Inf, Nan
return nil
}
return errors.Errorf("value %v not between zero and one", v)
}
var enabled = settings.RegisterBoolSetting(
"compactor.enabled",
"when false, the system will reclaim space occupied by deleted data less aggressively",
true,
)
// minInterval indicates the minimum period of
// time to wait before any compaction activity is considered, after
// suggestions are made. The intent is to allow sufficient time for
// all ranges to be cleared when a big table is dropped, so the
// compactor can determine contiguous stretches and efficient delete
// sstable files.
var minInterval = func() *settings.DurationSetting {
s := settings.RegisterDurationSetting(
"compactor.min_interval",
"minimum time interval to wait before compacting",
15*time.Second,
)
s.SetSensitive()
return s
}()
// thresholdBytes is the threshold in bytes of suggested
// reclamation, after which the compactor will begin processing
// (taking compactor min interval into account). Note that we want
// to target roughly the target size of an L6 SSTable (128MB) but
// these are logical bytes (as in, from MVCCStats) which can't be
// translated into SSTable-bytes. As a result, we conservatively set
// a higher threshold.
var thresholdBytes = func() *settings.ByteSizeSetting {
s := settings.RegisterByteSizeSetting(
"compactor.threshold_bytes",
"minimum expected logical space reclamation required before considering an aggregated suggestion",
256<<20, // more than 256MiB will trigger
)
s.SetSensitive()
return s
}()
// thresholdBytesUsedFraction is the fraction of total logical
// bytes used which are up for suggested reclamation, after which
// the compactor will begin processing (taking compactor min
// interval into account). Note that this threshold handles the case
// where a table is dropped which is a significant fraction of the
// total space in the database, but does not exceed the absolute
// defaultThresholdBytes threshold.
var thresholdBytesUsedFraction = func() *settings.FloatSetting {
s := settings.RegisterValidatedFloatSetting(
"compactor.threshold_used_fraction",
"consider suggestions for at least the given percentage of the used logical space (zero to disable)",
0.10, // more than 10% of space will trigger
validateFraction,
)
s.SetSensitive()
return s
}()
// thresholdBytesAvailableFraction is the fraction of remaining
// available space on a disk, which, if exceeded by the size of a suggested
// compaction, should trigger the processing of said compaction. This
// threshold is meant to make compaction more aggressive when a store is
// nearly full, since reclaiming space is much more important in such
// scenarios. ThresholdBytesAvailableFraction() float64
var thresholdBytesAvailableFraction = func() *settings.FloatSetting {
s := settings.RegisterValidatedFloatSetting(
"compactor.threshold_available_fraction",
"consider suggestions for at least the given percentage of the available logical space (zero to disable)",
0.10, // more than 10% of space will trigger
validateFraction,
)
s.SetSensitive()
return s
}()
// maxSuggestedCompactionRecordAge is the maximum age of a
// suggested compaction record. If not processed within this time
// interval since the compaction was suggested, it will be deleted.
var maxSuggestedCompactionRecordAge = func() *settings.DurationSetting {
s := settings.RegisterNonNegativeDurationSetting(
"compactor.max_record_age",
"discard suggestions not processed within this duration",
24*time.Hour,
)
s.SetSensitive()
return s
}() | pkg/storage/compactor/settings.go | 0.656878 | 0.475727 | settings.go | starcoder |
package canvas
import (
"image/color"
"github.com/jesseduffield/fyne"
)
// Declare conformity with CanvasObject interface
var _ fyne.CanvasObject = (*Circle)(nil)
// Circle describes a colored circle primitive in a Fyne canvas
type Circle struct {
Position1 fyne.Position // The current top-left position of the Circle
Position2 fyne.Position // The current bottomright position of the Circle
Hidden bool // Is this circle currently hidden
FillColor color.Color // The circle fill color
StrokeColor color.Color // The circle stroke color
StrokeWidth float32 // The stroke width of the circle
}
// Size returns the current size of bounding box for this circle object
func (l *Circle) Size() fyne.Size {
return fyne.NewSize(l.Position2.X-l.Position1.X, l.Position2.Y-l.Position1.Y)
}
// Resize sets a new bottom-right position for the circle object
func (l *Circle) Resize(size fyne.Size) {
l.Position2 = fyne.NewPos(l.Position1.X+size.Width, l.Position1.Y+size.Height)
}
// Position gets the current top-left position of this circle object, relative to its parent / canvas
func (l *Circle) Position() fyne.Position {
return l.Position1
}
// Move the circle object to a new position, relative to its parent / canvas
func (l *Circle) Move(pos fyne.Position) {
size := l.Size()
l.Position1 = pos
l.Position2 = fyne.NewPos(l.Position1.X+size.Width, l.Position1.Y+size.Height)
}
// MinSize for a Circle simply returns Size{1, 1} as there is no
// explicit content
func (l *Circle) MinSize() fyne.Size {
return fyne.NewSize(1, 1)
}
// Visible returns true if this circle is visible, false otherwise
func (l *Circle) Visible() bool {
return !l.Hidden
}
// Show will set this circle to be visible
func (l *Circle) Show() {
l.Hidden = false
l.Refresh()
}
// Hide will set this circle to not be visible
func (l *Circle) Hide() {
l.Hidden = true
l.Refresh()
}
// Refresh causes this object to be redrawn in it's current state
func (l *Circle) Refresh() {
Refresh(l)
}
// NewCircle returns a new Circle instance
func NewCircle(color color.Color) *Circle {
return &Circle{
FillColor: color,
}
} | canvas/circle.go | 0.859162 | 0.400075 | circle.go | starcoder |
package google_graph
import (
"fmt"
"github.com/keep94/toolbox/http_util"
"net/url"
"strings"
)
const (
kGoogleAlphabet = "<KEY>"
)
// GraphData represents a dataset to be graphed.
type GraphData interface {
// The number of data points.
Len() int
// The title
Title() string
// The label of the 0-based idx data point.
Label(idx int) string
// The value of the 0-based idx data point.
Value(idx int) int64
}
// GraphData2D represents a 2D dataset to be graphed.
type GraphData2D interface {
// The number of X data points
XLen() int
// The number of Y data points
YLen() int
// Return 0-based label for X axis
XLabel(x int) string
// Return 0-based label for Y axis
YLabel(y int) string
// Return value at (x, y)
Value(x, y int) int64
}
// Grapher returns the URL for a graph of a dataset.
type Grapher interface {
GraphURL(gd GraphData) *url.URL
}
// Grapher2D returns the URL for a graph of a 2D dataset.
type Grapher2D interface {
GraphURL2D(gd GraphData2D) *url.URL
}
// BarGraph builds a link to a google bar graph.
type BarGraph struct {
// Palette consists of the RGB colors to use in the bar graph.
// e.g []String{"FF0000", "00FF00", "0000FF"}
Palette []string
// a value of 10^Scale is one unit on bar graph.
Scale int
}
// GraphURL returns a link to a bar graph displaying particular graph data.
// GraphURL returns nil if given graph data of length 0.
func (b *BarGraph) GraphURL(gd GraphData) *url.URL {
return b.GraphURL2D(to2D{gd})
}
// GraphURL2D returns a link to a bar graph displaying particular graph data.
// GraphURL2D returns nil if given graph data of length 0 in either dimension.
func (b *BarGraph) GraphURL2D(gd GraphData2D) *url.URL {
xlength := gd.XLen()
ylength := gd.YLen()
if xlength <= 0 || ylength <= 0 {
return nil
}
labels := make([]string, xlength)
titles := make([]string, ylength)
values := make([][]int64, ylength)
var includeChdl bool
for y := range values {
titles[y] = gd.YLabel(y)
if titles[y] != "" {
includeChdl = true
}
values[y] = make([]int64, xlength)
}
for x := range labels {
labels[x] = gd.XLabel(x)
for y := range values {
values[y][x] = gd.Value(x, y)
}
}
max := maxInt64(values...)
if max == 0 {
max = 1
}
for i := 0; i < b.Scale; i++ {
max = (max + 9) / 10
}
actualMax := max
for i := 0; i < b.Scale; i++ {
max *= 10
}
encoded := encodeInt64(max, values...)
url, _ := url.Parse("http://chart.apis.google.com/chart")
urlParams := []string{
"chs", "500x250",
"cht", "bvg",
"chco", encodeColors(len(values), b.Palette, ","),
"chd", encoded,
"chxt", "x,y",
"chbh", "a",
"chxr", fmt.Sprintf("1,0,%d", actualMax),
"chxl", fmt.Sprintf("0:|%s", strings.Join(labels, "|")),
"chdl", strings.Join(titles, "|")}
// If we aren't including chdl parameter, chop it off of end of parameter
// list
if !includeChdl {
urlParams = urlParams[:len(urlParams)-2]
}
return http_util.AppendParams(url, urlParams...)
}
// PieGraph builds a link to a google pie graph.
type PieGraph struct {
// Palette consists of the RGB colors to use in the pie graph.
// e.g []String{"FF0000", "00FF00", "0000FF"}
Palette []string
}
// GraphURL returns a link to a pie graph displaying particular graph data.
// GraphURL returns nil if given graph data of length 0.
func (p *PieGraph) GraphURL(gd GraphData) *url.URL {
length := gd.Len()
if length <= 0 {
return nil
}
labels := make([]string, length)
values := make([]int64, length)
for idx := range labels {
labels[idx] = gd.Label(idx)
values[idx] = gd.Value(idx)
}
encoded := encodeInt64(maxInt64(values), values)
url, _ := url.Parse("http://chart.apis.google.com/chart")
return http_util.AppendParams(
url,
"chs", "500x250",
"cht", "p3",
"chco", encodeColors(len(values), p.Palette, "|"),
"chd", encoded,
"chdl", strings.Join(labels, "|"))
}
type to2D struct {
GraphData
}
func (t to2D) XLen() int {
return t.Len()
}
func (t to2D) YLen() int {
return 1
}
func (t to2D) XLabel(x int) string {
return t.Label(x)
}
func (t to2D) YLabel(x int) string {
return t.Title()
}
func (t to2D) Value(x, y int) int64 {
return t.GraphData.Value(x)
}
func encodeInt64(max int64, datasets ...[]int64) string {
encoded := make([]string, len(datasets))
for idx := range datasets {
encoded[idx] = _encodeInt64(datasets[idx], max)
}
return fmt.Sprintf("s:%s", strings.Join(encoded, ","))
}
func _encodeInt64(data []int64, max int64) string {
buffer := make([]byte, len(data))
for idx := range data {
buffer[idx] = kGoogleAlphabet[scaleInt64For61(data[idx], max)]
}
return string(buffer)
}
func scaleInt64For61(amount, max int64) int64 {
if amount <= 0 {
return 0
}
return (amount*61 + max/2) / max
}
func encodeColors(count int, palette []string, separator string) string {
colors := make([]string, count)
plen := len(palette)
for idx := range colors {
colors[idx] = palette[idx%plen]
}
return strings.Join(colors, separator)
}
func maxInt64(data ...[]int64) int64 {
var result int64
for _, v1 := range data {
for _, v2 := range v1 {
if v2 > result {
result = v2
}
}
}
return result
} | google_graph/google.go | 0.654674 | 0.458773 | google.go | starcoder |
package camera
import (
"github.com/eriklupander/pathtracer-ocl/internal/app/geom"
"math"
)
type Camera struct {
Width int
Height int
Fov float64
Transform geom.Mat4x4
Inverse geom.Mat4x4
PixelSize float64
HalfWidth float64
HalfHeight float64
Aperture float64
FocalLength float64
}
func NewCamera(width int, height int, fov float64, from geom.Tuple4, lookAt geom.Tuple4) Camera {
// Get the length of half the opposite part of the triangle
halfView := math.Tan(fov / 2)
aspect := float64(width) / float64(height)
var halfWidth, halfHeight float64
if aspect >= 1.0 {
halfWidth = halfView
halfHeight = halfView / aspect
} else {
halfWidth = halfView * aspect
halfHeight = halfView
}
pixelSize := (halfWidth * 2) / float64(width)
transform := ViewTransform(from, lookAt, geom.NewVector(0, 1, 0))
inverse := geom.Inverse(transform)
return Camera{
Width: width,
Height: height,
Fov: fov,
Transform: transform,
Inverse: inverse,
PixelSize: pixelSize,
HalfWidth: halfWidth,
HalfHeight: halfHeight,
Aperture: 0.0, // default, pinhole
}
}
func ViewTransform(from, to, up geom.Tuple4) geom.Mat4x4 {
// Create a new matrix from the identity matrix.
vt := geom.IdentityMatrix //Mat4x4{Elems: make([]float64, 16)}
// Sub creates the initial vector between the eye and what we're looking at.
forward := geom.Normalize(geom.Sub(to, from))
// Normalize the up vector
upN := geom.Normalize(up)
// Use the cross product to get the "third" axis (in this case, not the forward or up one)
left := geom.Cross(forward, upN)
// Again, use cross product between the just computed left and forward to get the "true" up.
trueUp := geom.Cross(left, forward)
// copy each axis into the matrix
vt[0] = left.Get(0)
vt[1] = left.Get(1)
vt[2] = left.Get(2)
vt[4] = trueUp.Get(0)
vt[5] = trueUp.Get(1)
vt[6] = trueUp.Get(2)
vt[8] = -forward.Get(0)
vt[9] = -forward.Get(1)
vt[10] = -forward.Get(2)
// finally, move the view matrix opposite the camera position to emulate that the camera has moved.
return geom.Multiply(vt, geom.Translate(-from.Get(0), -from.Get(1), -from.Get(2)))
} | internal/app/camera/camera.go | 0.817392 | 0.564519 | camera.go | starcoder |
package unit
import (
"errors"
"io"
"reflect"
"strconv"
"strings"
"github.com/coreos/go-systemd/unit"
)
// Definition of a unit matching the fields found in unit-file
type Definition struct {
Unit struct {
Description string
Documentation string
Wants, Requires, Conflicts, Before, After []string
}
Install struct {
WantedBy, RequiredBy []string
}
}
// Description returns a string as found in Definition
func (def Definition) Description() string {
return def.Unit.Description
}
// Documentation returns a string as found in Definition
func (def Definition) Documentation() string {
return def.Unit.Documentation
}
// Wants returns a slice of unit names as found in Definition
func (def Definition) Wants() []string {
return def.Unit.Wants
}
// Requires returns a slice of unit names as found in Definition
func (def Definition) Requires() []string {
return def.Unit.Requires
}
// Conflicts returns a slice of unit names as found in Definition
func (def Definition) Conflicts() []string {
return def.Unit.Conflicts
}
// After returns a slice of unit names as found in Definition
func (def Definition) After() []string {
return def.Unit.After
}
// Before returns a slice of unit names as found in Definition
func (def Definition) Before() []string {
return def.Unit.Before
}
// RequiredBy returns a slice of unit names as found in Definition
func (def Definition) RequiredBy() []string {
return def.Install.RequiredBy
}
// WantedBy returns a slice of unit names as found in Definition
func (def Definition) WantedBy() []string {
return def.Install.WantedBy
}
// ParseDefinition parses the data in Systemd unit-file format and stores the result in value pointed by Definition
func ParseDefinition(r io.Reader, v interface{}) (err error) {
// Access the underlying value of the pointer
def := reflect.ValueOf(v).Elem()
if !def.IsValid() || !def.CanSet() {
return ErrWrongVal
}
// Deserialized options
var opts []*unit.UnitOption
if opts, err = unit.Deserialize(r); err != nil {
return
}
// Loop over deserialized options trying to match them to the ones as found in Definition
for _, opt := range opts {
if v := def.FieldByName(opt.Section); v.IsValid() && v.CanSet() {
if v := v.FieldByName(opt.Name); v.IsValid() && v.CanSet() {
// reflect.Kind of field in Definition
switch v.Kind() {
case reflect.String:
v.SetString(opt.Value)
case reflect.Bool:
if opt.Value == "yes" {
v.SetBool(true)
} else if opt.Value != "no" {
return ParseErr(opt.Name, errors.New(`Value should be "yes" or "no"`))
}
case reflect.Slice:
if _, ok := v.Interface().([]string); ok { // []string
v.Set(reflect.ValueOf(strings.Fields(opt.Value)))
} else if _, ok := v.Interface().([]int); ok { // []int
ints := []int{}
for _, val := range strings.Fields(opt.Value) {
if converted, err := strconv.Atoi(val); err == nil {
ints = append(ints, converted)
} else {
return ParseErr(opt.Name, err)
}
}
v.Set(reflect.ValueOf(ints))
}
default:
return ParseErr(opt.Name, ErrUnknownType)
}
} else {
return ParseErr(opt.Name, ErrNotExist)
}
} else {
return ParseErr(opt.Name, ErrNotExist)
}
}
return
} | unit/definition.go | 0.699049 | 0.426142 | definition.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.