code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package validator
import (
"github.com/benchlab/asteroid/typing"
"github.com/benchlab/asteroid/ast"
)
func (v *Validator) validateStatement(node ast.Node) {
switch n := node.(type) {
case *ast.AssignmentStatementNode:
v.validateAssignment(n)
break
case *ast.ForStatementNode:
v.validateForStatement(n)
break
case *ast.IfStatementNode:
v.validateIfStatement(n)
break
case *ast.ReturnStatementNode:
v.validateReturnStatement(n)
break
case *ast.SwitchStatementNode:
v.validateSwitchStatement(n)
break
case *ast.ForEachStatementNode:
v.validateForEachStatement(n)
break
case *ast.ImportStatementNode:
v.validateImportStatement(n)
return
case *ast.PackageStatementNode:
v.validatePackageStatement(n)
return
}
v.finishedImports = true
}
func (v *Validator) validateAssignment(node *ast.AssignmentStatementNode) {
for _, l := range node.Left {
if l == nil {
v.addError(node.Start(), errAsteroidUnknown)
return
} else {
switch l.Type() {
case ast.CallExpression, ast.Literal, ast.MapLiteral,
ast.ArrayLiteral, ast.SliceExpression, ast.FuncLiteral:
v.addError(l.Start(), errAsteroidNonValidExpressionLeft)
}
}
}
leftTuple := v.ExpressionTuple(node.Left)
rightTuple := v.ExpressionTuple(node.Right)
if len(leftTuple.Types) > len(rightTuple.Types) && len(rightTuple.Types) == 1 {
right := rightTuple.Types[0]
for _, left := range leftTuple.Types {
if !v.bvm.Assignable(v, left, right, node.Right[0]) {
v.addError(node.Left[0].Start(), errAsteroidAssignmentInvalid, typing.WriteType(left), typing.WriteType(right))
}
}
for i, left := range node.Left {
if leftTuple.Types[i] == typing.Unknown() {
if id, ok := left.(*ast.IdentifierNode); ok {
ty := rightTuple.Types[0]
id.Resolved = ty
id.Resolved.SetModifiers(nil)
ignored := "_"
if id.Name != ignored {
v.declareVar(id.Start(), id.Name, id.Resolved)
}
}
}
}
} else {
if len(leftTuple.Types) == len(rightTuple.Types) {
count := 0
remaining := 0
for i, left := range leftTuple.Types {
right := rightTuple.Types[i]
if !v.bvm.Assignable(v, left, right, node.Right[count]) {
v.addError(node.Start(), errAsteroidAssignmentInvalid, typing.WriteType(leftTuple), typing.WriteType(rightTuple))
break
}
if remaining == 0 {
if node.Right[count] == nil {
count++
} else {
switch a := node.Right[count].ResolvedType().(type) {
case *typing.Tuple:
remaining = len(a.Types) - 1
break
default:
count++
}
}
} else {
remaining--
}
}
} else {
v.addError(node.Start(), errAsteroidAssignmentInvalid, typing.WriteType(leftTuple), typing.WriteType(rightTuple))
}
if len(node.Left) == len(rightTuple.Types) {
for i, left := range node.Left {
if leftTuple.Types[i] == typing.Unknown() {
if id, ok := left.(*ast.IdentifierNode); ok {
id.Resolved = rightTuple.Types[i]
if id.Name != "_" {
v.declareVar(id.Start(), id.Name, id.Resolved)
}
}
}
}
}
}
}
func (v *Validator) validateIfStatement(node *ast.IfStatementNode) {
v.openScope(nil, nil)
if node.Init != nil {
v.validateAssignment(node.Init.(*ast.AssignmentStatementNode))
}
for _, cond := range node.Conditions {
v.requireType(cond.Condition.Start(), typing.Boolean(), v.resolveExpression(cond.Condition))
v.validateScope(node, cond.Body)
}
if node.Else != nil {
v.validateScope(node, node.Else)
}
v.closeScope()
}
func (v *Validator) validateSwitchStatement(node *ast.SwitchStatementNode) {
switchType := typing.Boolean()
if node.Target != nil {
switchType = v.resolveExpression(node.Target)
}
for _, node := range node.Cases.Sequence {
if node.Type() == ast.CaseStatement {
v.validateCaseStatement(switchType, node.(*ast.CaseStatementNode))
}
}
}
func (v *Validator) validateCaseStatement(switchType typing.Type, clause *ast.CaseStatementNode) {
for _, expr := range clause.Expressions {
t := v.resolveExpression(expr)
if !v.bvm.Assignable(v, switchType, t, expr) {
v.addError(clause.Start(), errAsteroidSwitchTargetInvalid, typing.WriteType(switchType), typing.WriteType(t))
}
}
v.validateScope(clause, clause.Block)
}
func (v *Validator) validateReturnStatement(node *ast.ReturnStatementNode) {
for c := v.scope; c != nil; c = c.parent {
if c.context != nil {
switch a := c.context.(type) {
case *ast.FuncDeclarationNode:
results := a.Resolved.(*typing.Func).Results
returned := v.ExpressionTuple(node.Results)
if (results == nil || len(results.Types) == 0) && len(returned.Types) > 0 {
v.addError(node.Start(), errAsteroidReturnInvalidFromVoid, typing.WriteType(returned), a.Signature.Identifier)
return
}
if !typing.AssignableTo(results, returned, false) {
v.addError(node.Start(), errAsteroidReturnInvalid, typing.WriteType(returned), a.Signature.Identifier, typing.WriteType(results))
}
return
case *ast.FuncLiteralNode:
results := a.Resolved.(*typing.Func).Results
returned := v.ExpressionTuple(node.Results)
if (results == nil || len(results.Types) == 0) && len(returned.Types) > 0 {
v.addError(node.Start(), errAsteroidReturnInvalidFromVoid, typing.WriteType(returned), "literal")
return
}
if !typing.AssignableTo(results, returned, false) {
v.addError(node.Start(), errAsteroidReturnInvalid, typing.WriteType(returned), "literal", typing.WriteType(results))
}
return
}
}
}
v.addError(node.Start(), errAsteroidReturnStatementInvalidOutsideFunction)
}
func (v *Validator) validateForEachStatement(node *ast.ForEachStatementNode) {
v.openScope(nil, nil)
gen := v.resolveExpression(node.Producer)
var req int
switch a := gen.(type) {
case *typing.Map:
req = 2
if len(node.Variables) != req {
v.addError(node.Begin, errAsteroidForEachVarInvalid, len(node.Variables), req)
} else {
v.declareVar(node.Start(), node.Variables[0], a.Key)
v.declareVar(node.Start(), node.Variables[1], a.Value)
}
break
case *typing.Array:
req = 2
if len(node.Variables) != req {
v.addError(node.Start(), errAsteroidForEachVarInvalid, len(node.Variables), req)
} else {
v.declareVar(node.Start(), node.Variables[0], v.LargestNumericType(false))
v.declareVar(node.Start(), node.Variables[1], a.Value)
}
break
default:
v.addError(node.Start(), errAsteroidForEachTypeInvalid, typing.WriteType(gen))
}
v.validateScope(node, node.Block)
v.closeScope()
}
func (v *Validator) validateForStatement(node *ast.ForStatementNode) {
v.openScope(nil, nil)
if node.Init != nil {
v.validateAssignment(node.Init)
}
v.requireType(node.Cond.Start(), typing.Boolean(), v.resolveExpression(node.Cond))
if node.Post != nil {
v.validateStatement(node.Post)
}
v.validateScope(node, node.Block)
v.closeScope()
}
func (v *Validator) createPackageType(path string) *typing.Package {
scope, errs := ValidatePackage(v.bvm, path)
if errs != nil {
v.errs = append(v.errs, errs...)
}
pkg := new(typing.Package)
pkg.Variables = scope.variables
pkg.Types = scope.types
return pkg
}
func trimPath(n string) string {
lastSlash := 0
for i := 0; i < len(n); i++ {
if n[i] == '/' {
lastSlash = i
}
}
return n[lastSlash:]
}
func (v *Validator) validateImportStatement(node *ast.ImportStatementNode) {
if v.finishedImports {
v.addError(node.Start(), errAsteroidImportsFinished)
}
if node.Alias != "" {
v.declareType(node.Start(), node.Alias, v.createPackageType(node.Path))
} else {
v.declareType(node.Start(), trimPath(node.Path), v.createPackageType(node.Path))
}
}
func (v *Validator) validatePackageStatement(node *ast.PackageStatementNode) {
if node.Name == "" {
v.addError(node.Start(), errAsteroidPackageNameInvalid, node.Name)
return
}
if v.packageName == "" {
v.packageName = node.Name
} else {
if v.packageName != node.Name {
v.addError(node.Start(), errAsteroidDuplicatePackageName, node.Name, v.packageName)
}
}
} | validator/statements.go | 0.553023 | 0.423816 | statements.go | starcoder |
package fastconvert
import "encoding/binary"
// region 16 bit
// region int16 Converters
// ReadByteArrayToInt16LEArray reads a int16 array from specified byte buffer in Little Endian format.
// The number of items are len(data) / 2 or len(out). Which one is the lower.
func ReadByteArrayToInt16LEArray(data []byte, out []int16) {
var pos = 0
var itemsOnBuffer = len(data) / 2
var itemsToRead = itemsOnBuffer
if len(out) < itemsToRead {
itemsToRead = len(out)
}
for idx := 0; idx < itemsToRead; idx++ {
out[idx] = int16(binary.LittleEndian.Uint16(data[pos : pos+2]))
pos += 2
}
}
func ByteArrayToInt16LEArray(data []byte) []int16 {
var out = make([]int16, len(data)/2)
ReadByteArrayToInt16LEArray(data, out)
return out
}
// ReadByteArrayToInt16BEArray reads a int16 array from specified byte buffer in Big Endian format.
// The number of items are len(data) / 2 or len(out). Which one is the lower.
func ReadByteArrayToInt16BEArray(data []byte, out []int16) {
var pos = 0
var itemsOnBuffer = len(data) / 2
var itemsToRead = itemsOnBuffer
if len(out) < itemsToRead {
itemsToRead = len(out)
}
for idx := 0; idx < itemsToRead; idx++ {
out[idx] = int16(binary.BigEndian.Uint16(data[pos : pos+2]))
pos += 2
}
}
func ByteArrayToInt16BEArray(data []byte) []int16 {
var out = make([]int16, len(data)/2)
ReadByteArrayToInt16BEArray(data, out)
return out
}
// endregion
// region uint16 Converters
// ReadByteArrayToInt16LEArray reads a int16 array from specified byte buffer in Little Endian format.
// The number of items are len(data) / 2 or len(out). Which one is the lower.
func ReadByteArrayToUInt16LEArray(data []byte, out []uint16) {
var pos = 0
var itemsOnBuffer = len(data) / 2
var itemsToRead = itemsOnBuffer
if len(out) < itemsToRead {
itemsToRead = len(out)
}
for idx := 0; idx < itemsToRead; idx++ {
out[idx] = binary.LittleEndian.Uint16(data[pos : pos+2])
pos += 2
}
}
func ByteArrayToUInt16LEArray(data []byte) []uint16 {
var out = make([]uint16, len(data)/2)
ReadByteArrayToUInt16LEArray(data, out)
return out
}
// ReadByteArrayToUInt16BEArray reads a uint16 array from specified byte buffer in Big Endian format.
// The number of items are len(data) / 2 or len(out). Which one is the lower.
func ReadByteArrayToUInt16BEArray(data []byte, out []uint16) {
var pos = 0
var itemsOnBuffer = len(data) / 2
var itemsToRead = itemsOnBuffer
if len(out) < itemsToRead {
itemsToRead = len(out)
}
for idx := 0; idx < itemsToRead; idx++ {
out[idx] = binary.BigEndian.Uint16(data[pos : pos+2])
pos += 2
}
}
// ByteArrayToUInt16BEArray reads a uint16 array from specified byte buffer in Big Endian format.
func ByteArrayToUInt16BEArray(data []byte) []uint16 {
var out = make([]uint16, len(data)/2)
ReadByteArrayToUInt16BEArray(data, out)
return out
}
// endregion
// endregion
// region 32 bit
// region int32 Converters
// ReadByteArrayToInt32LEArray reads a int32 array from specified byte buffer in Little Endian format.
// The number of items are len(data) / 4 or len(out). Which one is the lower.
func ReadByteArrayToInt32LEArray(data []byte, out []int32) {
var pos = 0
var itemsOnBuffer = len(data) / 4
var itemsToRead = itemsOnBuffer
if len(out) < itemsToRead {
itemsToRead = len(out)
}
for idx := 0; idx < itemsToRead; idx++ {
out[idx] = int32(binary.LittleEndian.Uint32(data[pos : pos+4]))
pos += 4
}
}
func ByteArrayToInt32LEArray(data []byte) []int32 {
var out = make([]int32, len(data)/4)
ReadByteArrayToInt32LEArray(data, out)
return out
}
// ReadByteArrayToInt32BEArray reads a int32 array from specified byte buffer in Big Endian format.
// The number of items are len(data) / 2 or len(out). Which one is the lower.
func ReadByteArrayToInt32BEArray(data []byte, out []int32) {
var pos = 0
var itemsOnBuffer = len(data) / 4
var itemsToRead = itemsOnBuffer
if len(out) < itemsToRead {
itemsToRead = len(out)
}
for idx := 0; idx < itemsToRead; idx++ {
out[idx] = int32(binary.BigEndian.Uint32(data[pos : pos+4]))
pos += 4
}
}
func ByteArrayToInt32BEArray(data []byte) []int32 {
var out = make([]int32, len(data)/4)
ReadByteArrayToInt32BEArray(data, out)
return out
}
// endregion
// region uint32 Converters
// ReadByteArrayToInt32LEArray reads a int32 array from specified byte buffer in Little Endian format.
// The number of items are len(data) / 4 or len(out). Which one is the lower.
func ReadByteArrayToUInt32LEArray(data []byte, out []uint32) {
var pos = 0
var itemsOnBuffer = len(data) / 4
var itemsToRead = itemsOnBuffer
if len(out) < itemsToRead {
itemsToRead = len(out)
}
for idx := 0; idx < itemsToRead; idx++ {
out[idx] = binary.LittleEndian.Uint32(data[pos : pos+4])
pos += 4
}
}
func ByteArrayToUInt32LEArray(data []byte) []uint32 {
var out = make([]uint32, len(data)/4)
ReadByteArrayToUInt32LEArray(data, out)
return out
}
// ReadByteArrayToUInt32BEArray reads a uint32 array from specified byte buffer in Big Endian format.
// The number of items are len(data) / 2 or len(out). Which one is the lower.
func ReadByteArrayToUInt32BEArray(data []byte, out []uint32) {
var pos = 0
var itemsOnBuffer = len(data) / 4
var itemsToRead = itemsOnBuffer
if len(out) < itemsToRead {
itemsToRead = len(out)
}
for idx := 0; idx < itemsToRead; idx++ {
out[idx] = binary.BigEndian.Uint32(data[pos : pos+4])
pos += 4
}
}
// ByteArrayToUInt32BEArray reads a uint32 array from specified byte buffer in Big Endian format.
func ByteArrayToUInt32BEArray(data []byte) []uint32 {
var out = make([]uint32, len(data)/4)
ReadByteArrayToUInt32BEArray(data, out)
return out
}
// endregion
// endregion
// region 64 bit
// region int64 Converters
// ReadByteArrayToInt64LEArray reads a int64 array from specified byte buffer in Little Endian format.
// The number of items are len(data) / 8 or len(out). Which one is the lower.
func ReadByteArrayToInt64LEArray(data []byte, out []int64) {
var pos = 0
var itemsOnBuffer = len(data) / 8
var itemsToRead = itemsOnBuffer
if len(out) < itemsToRead {
itemsToRead = len(out)
}
for idx := 0; idx < itemsToRead; idx++ {
out[idx] = int64(binary.LittleEndian.Uint64(data[pos : pos+8]))
pos += 8
}
}
func ByteArrayToInt64LEArray(data []byte) []int64 {
var out = make([]int64, len(data)/8)
ReadByteArrayToInt64LEArray(data, out)
return out
}
// ReadByteArrayToInt64BEArray reads a int64 array from specified byte buffer in Big Endian format.
// The number of items are len(data) / 8 or len(out). Which one is the lower.
func ReadByteArrayToInt64BEArray(data []byte, out []int64) {
var pos = 0
var itemsOnBuffer = len(data) / 8
var itemsToRead = itemsOnBuffer
if len(out) < itemsToRead {
itemsToRead = len(out)
}
for idx := 0; idx < itemsToRead; idx++ {
out[idx] = int64(binary.BigEndian.Uint64(data[pos : pos+8]))
pos += 8
}
}
func ByteArrayToInt64BEArray(data []byte) []int64 {
var out = make([]int64, len(data)/8)
ReadByteArrayToInt64BEArray(data, out)
return out
}
// endregion
// region uint64 Converters
// ReadByteArrayToInt64LEArray reads a int64 array from specified byte buffer in Little Endian format.
// The number of items are len(data) / 8 or len(out). Which one is the lower.
func ReadByteArrayToUInt64LEArray(data []byte, out []uint64) {
var pos = 0
var itemsOnBuffer = len(data) / 8
var itemsToRead = itemsOnBuffer
if len(out) < itemsToRead {
itemsToRead = len(out)
}
for idx := 0; idx < itemsToRead; idx++ {
out[idx] = binary.LittleEndian.Uint64(data[pos : pos+8])
pos += 8
}
}
func ByteArrayToUInt64LEArray(data []byte) []uint64 {
var out = make([]uint64, len(data)/8)
ReadByteArrayToUInt64LEArray(data, out)
return out
}
// ReadByteArrayToUInt64BEArray reads a uint64 array from specified byte buffer in Big Endian format.
// The number of items are len(data) / 8 or len(out). Which one is the lower.
func ReadByteArrayToUInt64BEArray(data []byte, out []uint64) {
var pos = 0
var itemsOnBuffer = len(data) / 8
var itemsToRead = itemsOnBuffer
if len(out) < itemsToRead {
itemsToRead = len(out)
}
for idx := 0; idx < itemsToRead; idx++ {
out[idx] = binary.BigEndian.Uint64(data[pos : pos+8])
pos += 8
}
}
// ByteArrayToUInt64BEArray reads a uint64 array from specified byte buffer in Big Endian format.
func ByteArrayToUInt64BEArray(data []byte) []uint64 {
var out = make([]uint64, len(data)/8)
ReadByteArrayToUInt64BEArray(data, out)
return out
}
// endregion
// endregion | intconverters.go | 0.609873 | 0.400955 | intconverters.go | starcoder |
// Package rnd implements random numbers generators (wrapping the standard functions or the Mersenne
// Twister library). It also implements probability distribution functions.
package rnd
import (
"math/rand"
"time"
"gosl/utl"
)
// Init initialises random numbers generators
// Input:
// seed -- seed value; use seed <= 0 to use current time
func Init(seed int) {
if seed <= 0 {
seed = int(time.Now().Unix())
}
rand.Seed(int64(seed))
}
// Int generates pseudo random integer between low and high.
// Input:
// low -- lower limit
// high -- upper limit
// Output:
// random integer
func Int(low, high int) int {
return rand.Int()%(high-low+1) + low
}
// Ints generates pseudo random integers between low and high.
// Input:
// low -- lower limit
// high -- upper limit
// Output:
// values -- slice to be filled with len(values) numbers
func Ints(values []int, low, high int) {
if len(values) < 1 {
return
}
for i := 0; i < len(values); i++ {
values[i] = Int(low, high)
}
}
// Float64 generates a pseudo random real number between low and high; i.e. in [low, right)
// Input:
// low -- lower limit (closed)
// high -- upper limit (open)
// Output:
// random float64
func Float64(low, high float64) float64 {
return low + (high-low)*rand.Float64()
}
// Float64s generates pseudo random real numbers between low and high; i.e. in [low, right)
// Input:
// low -- lower limit (closed)
// high -- upper limit (open)
// Output:
// values -- slice to be filled with len(values) numbers
func Float64s(values []float64, low, high float64) {
for i := 0; i < len(values); i++ {
values[i] = low + (high-low)*rand.Float64()
}
}
// FlipCoin generates a Bernoulli variable; throw a coin with probability p
func FlipCoin(p float64) bool {
if p == 1.0 {
return true
}
if p == 0.0 {
return false
}
if rand.Float64() <= p {
return true
}
return false
}
// IntGetUnique randomly selects n items in a list avoiding duplicates
// Note: using the 'reservoir sampling' method; see Wikipedia:
// https://en.wikipedia.org/wiki/Reservoir_sampling
func IntGetUnique(values []int, n int) (selected []int) {
if n < 1 {
return
}
if n >= len(values) {
return IntGetShuffled(values)
}
selected = make([]int, n)
for i := 0; i < n; i++ {
selected[i] = values[i]
}
var j int
for i := n; i < len(values); i++ {
j = rand.Intn(i + 1)
if j < n {
selected[j] = values[i]
}
}
return
}
// IntGetUniqueN randomly selects n items from start to endp1-1 avoiding duplicates
// Note: using the 'reservoir sampling' method; see Wikipedia:
// https://en.wikipedia.org/wiki/Reservoir_sampling
func IntGetUniqueN(start, endp1, n int) (selected []int) {
if n < 1 {
return
}
size := endp1 - start
if n >= size {
selected = utl.IntRange2(start, endp1)
IntShuffle(selected)
return
}
selected = make([]int, n)
for i := 0; i < n; i++ {
selected[i] = start + i
}
var j int
for i := n; i < size; i++ {
j = rand.Intn(i + 1)
if j < n {
selected[j] = start + i
}
}
return
}
// IntShuffle shuffles a slice of integers
func IntShuffle(values []int) {
var j, tmp int
for i := len(values) - 1; i > 0; i-- {
j = rand.Int() % i
tmp = values[j]
values[j] = values[i]
values[i] = tmp
}
}
// IntGetShuffled returns a shufled slice of integers
func IntGetShuffled(values []int) (shuffled []int) {
shuffled = make([]int, len(values))
copy(shuffled, values)
IntShuffle(shuffled)
return
}
// Shuffle shuffles a slice of float point numbers
func Shuffle(values []float64) {
var tmp float64
var j int
for i := len(values) - 1; i > 0; i-- {
j = rand.Int() % i
tmp = values[j]
values[j] = values[i]
values[i] = tmp
}
}
// IntGetGroups randomly selects indices from pool separating them in groups
// Input:
// pool -- all ints.
// Output:
// groups -- [ngroups][size_of_group] pre-allocated slices
func IntGetGroups(groups [][]int, pool []int) {
ngroups := len(groups)
sizeg := len(groups[0])
indices := IntGetShuffled(pool)
var k int
for i := 0; i < ngroups; i++ {
for j := 0; j < sizeg; j++ {
groups[i][j] = indices[k]
k++
}
}
} | rnd/random.go | 0.690768 | 0.574096 | random.go | starcoder |
package commons
import (
"errors"
"github.com/yuyenews/Beerus/commons/util"
"strconv"
"strings"
"time"
)
// BeeSession Session Management, Based on the aes algorithm
// The basic principle of creating a token is to convert the data into a json string, then splice a timeout to the end, then perform aes encryption, and then convert the encrypted cipher text to base64 output
// To restore the token, reverse the creation process, first convert the base64 string to cipher text, then decrypt the cipher text with aes, after decryption, get a string with a timeout at the end, split the string into json and timeout, and determine whether the timeout is over, if not, convert the json string to the specified type of data.
type BeeSession struct {
Timeout int64
Secret string
InitializationVector string
}
// CreateToken Create a token based on the parameters passed in
// Parameters must be of type struct
func (bs BeeSession) CreateToken(data interface{}) (string, error) {
err := bs.validVariables()
if err != nil {
return "", err
}
if bs.Timeout <= 0 {
bs.Timeout = 86400000
}
// Converting data to json strings
jsonStr, err := util.ToJSONString(data)
if err != nil {
return "", err
}
// Splice the timeout at the end of the json string
timeOut := bs.Timeout + time.Now().UnixMilli()
jsonStr = jsonStr + CarriageReturn + strconv.FormatInt(timeOut, 10)
// AES encryption and conversion to base64 return
dat, err := util.EncryptionToString(jsonStr, bs.InitializationVector, bs.Secret)
if err != nil {
return "", err
}
return dat, nil
}
// RestoreToken Restore the token to the original data
// The second parameter must be a pointer of type struct
func (bs BeeSession) RestoreToken(token string, dst interface{}) error {
if token == "" {
return errors.New("token is incorrect")
}
err := bs.validVariables()
if err != nil {
return err
}
// Restore the base64 and decrypt it to the original data by AES (json spliced with timeout)
dstStr, err := util.DecryptionForString(token, bs.InitializationVector, bs.Secret)
if err != nil {
return err
}
// Splitting data into json and timeout
index := strings.LastIndex(dstStr, CarriageReturn)
if index < 0 {
return errors.New("token is incorrect")
}
jsonStr := dstStr[:index]
timeOutStr := dstStr[(index + len(CarriageReturn)):]
timeOut, errMsg := strconv.ParseInt(timeOutStr, 10, 64)
if errMsg != nil {
return errors.New("token is incorrect" + errMsg.Error())
}
// If the timeout expires, the user is prompted
if time.Now().UnixMilli() > timeOut {
return errors.New("token is no longer valid")
}
// If it doesn't time out, the json string is converted to the specified struct
err = util.ParseStruct(jsonStr, dst)
if err != nil {
return err
}
return nil
}
// validVariables Verify that the secret key and initialization vector are empty
func (bs BeeSession) validVariables() error {
if bs.Secret == "" {
return errors.New("you need to set a secret key first before you can use BeeSession")
}
if bs.InitializationVector == "" {
return errors.New("you need to set a initialization vector first before you can use BeeSession")
}
return nil
} | network/http/commons/BeeSession.go | 0.643889 | 0.400544 | BeeSession.go | starcoder |
package temporal
import (
"fmt"
"math"
"time"
"github.com/m3db/m3/src/query/executor/transform"
)
const (
// HoltWintersType produces a smoothed value for time series based on the specified interval.
// The algorithm used comes from https://en.wikipedia.org/wiki/Exponential_smoothing#Double_exponential_smoothing.
// Holt-Winters should only be used with gauges.
HoltWintersType = "holt_winters"
)
// NewHoltWintersOp creates a new base Holt-Winters transform with a specified node.
func NewHoltWintersOp(args []interface{}) (transform.Params, error) {
// todo(braskin): move this logic to the parser.
if len(args) != 3 {
return emptyOp, fmt.Errorf("invalid number of args for %s: %d", HoltWintersType, len(args))
}
duration, ok := args[0].(time.Duration)
if !ok {
return emptyOp, fmt.Errorf("unable to cast to scalar argument: %v for %s", args[0], HoltWintersType)
}
sf, ok := args[1].(float64)
if !ok {
return emptyOp, fmt.Errorf("unable to cast to scalar argument: %v for %s", args[1], HoltWintersType)
}
tf, ok := args[2].(float64)
if !ok {
return emptyOp, fmt.Errorf("unable to cast to scalar argument: %v for %s", args[2], HoltWintersType)
}
// Sanity check the input.
if sf <= 0 || sf >= 1 {
return emptyOp, fmt.Errorf("invalid smoothing factor. Expected: 0 < sf < 1, got: %f", sf)
}
if tf <= 0 || tf >= 1 {
return emptyOp, fmt.Errorf("invalid trend factor. Expected: 0 < tf < 1, got: %f", tf)
}
aggregationFunc := makeHoltWintersFn(sf, tf)
a := aggProcessor{
aggFunc: aggregationFunc,
}
return newBaseOp(duration, HoltWintersType, a)
}
func makeHoltWintersFn(sf, tf float64) aggFunc {
return func(vals []float64) float64 {
var (
foundFirst, foundSecond bool
secondVal float64
trendVal float64
scaledSmoothVal, scaledTrendVal float64
prev, curr float64
idx int
)
for _, val := range vals {
if math.IsNaN(val) {
continue
}
if !foundFirst {
foundFirst = true
curr = val
idx++
continue
}
if !foundSecond {
foundSecond = true
secondVal = val
trendVal = secondVal - curr
}
// scale the raw value against the smoothing factor.
scaledSmoothVal = sf * val
// scale the last smoothed value with the trend at this point.
trendVal = calcTrendValue(idx-1, sf, tf, prev, curr, trendVal)
scaledTrendVal = (1 - sf) * (curr + trendVal)
prev, curr = curr, scaledSmoothVal+scaledTrendVal
idx++
}
// need at least two values to apply a smoothing operation.
if !foundSecond {
return math.NaN()
}
return curr
}
}
// Calculate the trend value at the given index i in raw data d.
// This is somewhat analogous to the slope of the trend at the given index.
// The argument "s" is the set of computed smoothed values.
// The argument "b" is the set of computed trend factors.
// The argument "d" is the set of raw input values.
func calcTrendValue(i int, sf, tf, s0, s1, b float64) float64 {
if i == 0 {
return b
}
x := tf * (s1 - s0)
y := (1 - tf) * b
return x + y
} | src/query/functions/temporal/holt_winters.go | 0.722625 | 0.510435 | holt_winters.go | starcoder |
package vec
import (
"bytes"
"fmt"
"math"
"sort"
)
type Vector []float64
func MakeVec(len int, source func(i int) float64) Vector {
var vector = make(Vector, 0, len)
for i := 0; i < len; i++ {
vector = append(vector, source(i))
}
return vector
}
func (vector Vector) New() Vector {
return make(Vector, 0, len(vector))
}
func (vector Vector) Len() int {
return len(vector)
}
func (vector Vector) Copy() Vector {
return append(vector.New(), vector...)
}
func (vector Vector) AddVector(xx Vector) Vector {
if vector.Len() != xx.Len() {
panic("add expects vectors with equal lengths")
}
var result = vector.Copy()
for i, x := range xx {
result[i] += x
}
return result
}
func (vector Vector) AddScalar(k float64) Vector {
var result = vector.Copy()
for i := range vector {
result[i] += k
}
return result
}
func (vector Vector) MulVector(xx Vector) Vector {
if vector.Len() != xx.Len() {
panic("mul expect vectors with equal lengths")
}
var result = vector.Copy()
for i, x := range xx {
result[i] *= x
}
return result
}
func (vector Vector) MulScalar(k float64) Vector {
var result = vector.Copy()
for i := range vector {
result[i] *= k
}
return result
}
func (vector Vector) MulDot(another Vector) float64 {
if vector.Len() != another.Len() {
panic("mul expect vectors with equal lengths")
}
var result float64
for i, x := range another {
result += vector[i] * x
}
return result
}
func (vector Vector) Append(xx Vector) Vector {
return append(vector.Copy(), xx...)
}
func (vector Vector) Sum() float64 {
var accum float64
for _, x := range vector {
accum += x
}
return accum
}
func (vector Vector) Map(op func(x float64) float64) Vector {
var result = vector.Copy()
for i, x := range vector {
result[i] = op(x)
}
return result
}
func (vector Vector) ReduceLeft(start float64, op func(x, y float64) float64) float64 {
var accum = start
for _, x := range vector {
accum = op(accum, x)
}
return accum
}
func (vector Vector) ReduceRight(start float64, op func(x, y float64) float64) float64 {
var accum = start
var vLen = vector.Len()
for i := vLen - 1; i >= 0; i-- {
var x = vector[i]
accum = op(accum, x)
}
return accum
}
func (vector Vector) Filter(pred func(x float64) bool) Vector {
var filtered = vector.New()
for _, x := range vector {
if pred(x) {
filtered = append(filtered, x)
}
}
return filtered
}
func (vector Vector) Mean() float64 {
return vector.Sum() / float64(vector.Len())
}
func (vector Vector) SumOfSquares() float64 {
return vector.Map(Square).Sum()
}
func (vector Vector) XY(i int) (x, y float64) {
return float64(i), vector[i]
}
func (vector Vector) Value(i int) float64 {
return vector[i]
}
func (vector Vector) Max() float64 {
if vector.Len() == 0 {
return 0
}
return vector.ReduceLeft(vector[0], math.Max)
}
func (vector Vector) Min() float64 {
if vector.Len() == 0 {
return 0
}
return vector.ReduceLeft(vector[0], math.Min)
}
func (vector Vector) Less(i, j int) bool {
return vector[i] < vector[j]
}
func (vector Vector) Sorted() Vector {
var sorted = vector.Copy()
sort.Float64s(sorted)
return sorted
}
func (vector Vector) IsSorted() bool {
return sort.Float64sAreSorted(vector)
}
func (vector Vector) Median() float64 {
var vLen = vector.Len()
switch vLen {
case 0:
return 0
case 1:
return vector[0]
case 2:
return (vector[0] + vector[1]) / 2
default:
var sorted = vector.Sorted()
var midpoint = vLen / 2
if vLen%2 != 0 {
return sorted[midpoint]
}
return (sorted[midpoint] + sorted[midpoint-1]) / 2
}
}
func (vector Vector) Quantile(p int) float64 {
if p < 0 || p > 100 {
panic(fmt.Sprintf("[vec.Quantile] expected p in range 0..100, got %d", p))
}
return vector.Sorted()[vector.Len()*p/100]
}
func (vector Vector) Dot(xx Vector) float64 {
if vector.Len() != xx.Len() {
panic(fmt.Sprintf("[vec.Dot] expected vectors of equal length, got %d and %d",
vector.Len(), xx.Len()))
}
var result float64
for i, x := range xx {
result += vector[i] * x
}
return result
}
func (vector Vector) DeMean() Vector {
var mean = vector.Mean()
return vector.Map(func(x float64) float64 {
return x - mean
})
}
func (vector Vector) Indexes() Vector {
var indexes = vector.New()
for index := range vector {
indexes = append(indexes, float64(index))
}
return indexes
}
func (vector Vector) Mode() Vector {
return vector.Count().Mode()
}
func (vector Vector) Variance() float64 {
var vLenSubOne = float64(vector.Len() - 1)
var mean = vector.Mean()
return vector.Map(func(x float64) float64 {
return Square(mean - x)
}).Sum() / vLenSubOne
}
func (vector Vector) StdDeviation() float64 {
return math.Sqrt(vector.Variance())
}
func (vector Vector) InterquartileRange() float64 {
return vector.Quantile(75) - vector.Quantile(25)
}
func (vector Vector) Covariance(xx Vector) float64 {
if vector.Len() != xx.Len() {
PanicF("[vec.Covariance] expected vector of length %d, got %d",
vector.Len(), xx.Len())
}
var vLenSubOne = float64(vector.Len() - 1)
return vector.DeMean().Dot(xx.DeMean()) / vLenSubOne
}
func (vector Vector) Correlation(xx Vector) float64 {
var devV = vector.StdDeviation()
var devX = xx.StdDeviation()
if devX > 0 && devV > 0 {
return vector.Covariance(xx) / (devX * devV)
}
return 0
}
func (vec Vector) WithLabels(labels Labels) LabeledVec {
return VecWithLabels(vec, labels)
}
func (vector Vector) Stats() Stats {
var stats Stats
if vector.Len() == 0 {
return stats
}
stats.Sum = vector[0]
stats.Max = vector[0]
stats.Min = vector[0]
for _, x := range vector[1:] {
if x > stats.Max {
stats.Max = x
}
if x < stats.Min {
stats.Min = x
}
stats.Sum += x
}
return stats
}
type Stats struct {
Max float64
Min float64
Sum float64
}
func (stats Stats) String() string {
return fmt.Sprintf(
"max: %v\n"+
"min: %v\n"+
"sum: %v",
stats.Max,
stats.Min,
stats.Sum)
}
func (stats Stats) Range() float64 {
return stats.Max - stats.Min
}
func (vector Vector) CSVLine(delim string) string {
var buf = bytes.NewBuffer(make([]byte, 0, 10*vector.Len()))
var vLen = vector.Len()
for i, x := range vector {
if i == vLen-1 {
delim = ""
}
fmt.Fprintf(buf, "%g%s", x, delim)
}
return buf.String()
} | vec/vector.go | 0.802362 | 0.575916 | vector.go | starcoder |
package core
import (
"fmt"
"math/bits"
"time"
)
const Int64MostSigBitSet = 0x8000000000000000
// Convert a board coordinate as a string - such as f6 or b2 -
// into a position into a 64-length array.
func CoordinateToPos(coordinate string) int {
file := coordinate[0] - 'a'
rank := charToDigit(coordinate[1]) - 1
return int(rank*8 + int(file))
}
// Convert a position from a 64-length array, into a board
// coordinate as a string.
func PosToCoordinate(pos int) string {
file := pos % 8
rank := pos / 8
return string(rune('a'+file)) + string(rune('0'+rank+1))
}
func charToDigit(r byte) int {
return int(r - '0')
}
// A helper function for pretty-printing a bitstring as a 2d board
func Print2dBitboard(bitboard uint64) {
bitstring := fmt.Sprintf("%064b\n", bitboard)
fmt.Println()
for rankStartPos := 56; rankStartPos >= 0; rankStartPos -= 8 {
fmt.Printf("%v | ", (rankStartPos/8)+1)
for index := rankStartPos; index < rankStartPos+8; index++ {
squareChar := bitstring[index]
if squareChar == '0' {
squareChar = '.'
}
fmt.Printf("%c ", squareChar)
}
fmt.Println()
}
fmt.Print(" ")
for fileNo := 0; fileNo < 8; fileNo++ {
fmt.Print("--")
}
fmt.Print("\n ")
for _, file := range "abcdefgh" {
fmt.Printf("%c ", file)
}
fmt.Println()
}
// Set the bit of a 64-bit integer at the position given,
// where index 0 = the most significant bit
func setBit(integer *uint64, pos int) {
*integer |= (Int64MostSigBitSet >> pos)
}
// Set a singe bit in a 64-bit integer and return the
// given value. This is contrasted to setBit, which
// does it's work in-place.
func setSingleBit(pos int) uint64 {
return Int64MostSigBitSet >> pos
}
// Clear the bit of a 64-bit integer at the position given,
// where index 0 = the most significant bit
func clearBit(integer *uint64, pos int) {
var mask uint64 = ^(Int64MostSigBitSet >> pos)
*integer &= mask
}
// Determine if the bit at the given position is set in the
// 64-bit integer given.
func hasBitSet(integer uint64, pos int) bool {
return (integer & (Int64MostSigBitSet >> pos)) > 0
}
// Get the position of the least significant bit, where
// where index 0 = the most significant bit
func getLSBPos(integer uint64) int {
return 63 - bits.TrailingZeros64(integer)
}
// Get the position of the most significant bit, where
// where index 0 = the most significant bit
func getMSBPos(integer uint64) int {
return bits.LeadingZeros64(integer)
}
// Get the position of the next least significant bit,
// (where index 0 = the most significant bit), and the
// bitboard for it, and finally clear the bit from the
// 64-bit integer given. This is a useful helper function
// when generating moves from a bitboard.
func popLSB(integer *uint64) (int, uint64) {
pos := getLSBPos(*integer)
bbWithPosSet := *integer & -*integer
*integer &= *integer - 1
return pos, bbWithPosSet
}
// Get the absolute value of an integer
func abs(integer int) int {
if integer < 0 {
return -integer
} else {
return integer
}
}
// Get the maximum between two numbers
func max(a, b int) int {
if a > b {
return a
}
return b
}
// Get the minimum between two numbers
func min(a, b int) int {
if a < b {
return a
}
return b
}
// A convience function to measure the execution time of a function
func timeit(start time.Time) {
elapsed := time.Since(start)
fmt.Printf("ms: %vms\n", int64(elapsed/time.Millisecond))
}
// Convert an internal move for blunder into a UCI formatted move string
func ConvertMoveToLongAlgebraicNotation(move uint16) string {
from, to, moveType := GetMoveInfo(move)
fromCoord := PosToCoordinate(from)
toCoord := PosToCoordinate(to)
if moveType == KnightPromotion {
return fmt.Sprintf("%v%vn", fromCoord, toCoord)
} else if moveType == BishopPromotion {
return fmt.Sprintf("%v%vb", fromCoord, toCoord)
} else if moveType == RookPromotion {
return fmt.Sprintf("%v%vr", fromCoord, toCoord)
} else if moveType == QueenPromotion {
return fmt.Sprintf("%v%vq", fromCoord, toCoord)
} else {
return fmt.Sprintf("%v%v", fromCoord, toCoord)
}
}
// Convert a move in UCI format to an interal move for Blunder
func ConvertLongAlgebraicNotationToMove(board *Board, moveAsString string) uint16 {
fromPos := CoordinateToPos(moveAsString[0:2])
toPos := CoordinateToPos(moveAsString[2:4])
movePieceType := GetPieceType(board.Pieces[fromPos])
var moveType int
moveAsStringLen := len(moveAsString)
if moveAsStringLen == 5 {
if moveAsString[moveAsStringLen-1] == 'n' {
moveType = KnightPromotion
} else if moveAsString[moveAsStringLen-1] == 'b' {
moveType = BishopPromotion
} else if moveAsString[moveAsStringLen-1] == 'r' {
moveType = RookPromotion
} else if moveAsString[moveAsStringLen-1] == 'q' {
moveType = QueenPromotion
}
} else if moveAsString == "e1g1" && movePieceType == KingBB {
moveType = CastleWKS
} else if moveAsString == "e1c1" && movePieceType == KingBB {
moveType = CastleWQS
} else if moveAsString == "e8g8" && movePieceType == KingBB {
moveType = CastleBKS
} else if moveAsString == "e8c8" && movePieceType == KingBB {
moveType = CastleBQS
} else if toPos == board.EPSquare {
moveType = AttackEP
} else {
capturePiece := board.Pieces[toPos]
if capturePiece == NoPiece {
moveType = Quiet
} else {
moveType = Attack
}
}
return MakeMove(fromPos, toPos, moveType)
} | core/utils.go | 0.71113 | 0.450541 | utils.go | starcoder |
package main
import (
"encoding/json"
"fmt"
"math"
"strings"
"time"
)
type Date struct {
time.Time
definition DateDefinition
}
type DateDefinition struct {
Date string `json:"date"`
Year int `json:"year"`
Month int `json:"month"`
Week int `json:"week"`
YearWeek string `json:"yearWeek"`
YearMonth string `json:"yearMonth"`
Day int `json:"day"`
IsoYear int `json:"isoYear"`
IsoWeek int `json:"isoWeek"`
IsoYearIsoWeek string `json:"isoYearIsoWeek"`
Weekday int `json:"weekday"`
Yearday int `json:"yearday"`
}
func NewDate(t time.Time) Date {
isoYear, isoWeek := t.ISOWeek()
sundayWeek := sundayWeek(t)
definition := DateDefinition{
Date: t.Format(dateFormat),
Year: int(t.Year()),
Month: int(t.Month()),
Week: sundayWeek,
YearWeek: fmt.Sprintf("%d%02d", t.Year(), sundayWeek),
YearMonth: fmt.Sprintf("%d%02d", t.Year(), t.Month()),
IsoWeek: isoWeek,
Day: int(t.Day()),
IsoYear: isoYear,
IsoYearIsoWeek: fmt.Sprintf("%d%02d", isoYear, isoWeek),
Weekday: int(t.Weekday()),
Yearday: int(t.YearDay()),
}
return Date{t, definition}
}
const dateFormat = "2006-01-02"
// sundayWeek returns the week count based on every week starting on sunday.
func sundayWeek(t time.Time) int {
y, _, _ := t.Date()
beginningOfYear := time.Date(y, time.January, 1, 0, 0, 0, 0, t.Location())
yearDay := float64(t.YearDay())
weekDayOfFirstDayOfYear := float64(beginningOfYear.Weekday())
ic := yearDay - 7 + weekDayOfFirstDayOfYear
return int(math.Ceil(ic/7) + 1)
}
func (t Date) MarshalJSON() ([]byte, error) {
return json.Marshal(t.definition)
}
func calculateRange(start time.Time, end time.Time) []Date {
var result []Date
for d := start; !d.After(end); d = d.AddDate(0, 0, 1) {
result = append(result, NewDate(d))
}
return result
}
func uniqueDatesByKey(dates []Date, key string) []Date {
u := make([]Date, 0, len(dates))
m := make(map[string]bool)
for _, date := range dates {
identifier := date.definition.getUniqueValueByKey(key)
if _, ok := m[identifier]; !ok {
m[identifier] = true
u = append(u, date)
}
}
return u
}
func (d DateDefinition) getUniqueValueByKey(key string) string {
builder := strings.Builder{}
switch key {
case "date":
builder.WriteString(string(d.Date))
case "year":
builder.WriteString(string(d.Year))
case "month":
builder.WriteString(string(d.Year))
builder.WriteString(string(d.Month))
case "week":
builder.WriteString(string(d.Year))
builder.WriteString(string(d.Week))
case "yearWeek":
builder.WriteString(string(d.Year))
builder.WriteString(string(d.Week))
case "yearMonth":
builder.WriteString(string(d.Year))
builder.WriteString(string(d.Month))
case "day":
builder.WriteString(string(d.Year))
builder.WriteString(string(d.Month))
builder.WriteString(string(d.Day))
case "isoYear":
builder.WriteString(string(d.IsoYear))
case "isoWeek":
builder.WriteString(string(d.IsoYear))
builder.WriteString(string(d.IsoWeek))
case "isoYearIsoWeek":
builder.WriteString(string(d.IsoYearIsoWeek))
case "weekday":
builder.WriteString(string(d.Year))
builder.WriteString(string(d.Week))
builder.WriteString(string(d.Day))
case "yearday":
builder.WriteString(string(d.Year))
builder.WriteString(string(d.Yearday))
}
return builder.String()
} | date.go | 0.71113 | 0.433562 | date.go | starcoder |
package schnorr
import (
"math/big"
"github.com/xlab-si/emmy/crypto/common"
)
// ProveEquality demonstrates how prover can prove the knowledge of log_g1(t1), log_g2(t2) and
// that log_g1(t1) = log_g2(t2).
func ProveEquality(secret, g1, g2, t1, t2 *big.Int, group *Group) bool {
eProver := NewEqualityProver(group)
eVerifier := NewEqualityVerifier(group)
x1, x2 := eProver.GetProofRandomData(secret, g1, g2)
challenge := eVerifier.GetChallenge(g1, g2, t1, t2, x1, x2)
z := eProver.GetProofData(challenge)
verified := eVerifier.Verify(z)
return verified
}
type EqualityProver struct {
Group *Group
r *big.Int
secret *big.Int
g1 *big.Int
g2 *big.Int
}
func NewEqualityProver(group *Group) *EqualityProver {
prover := EqualityProver{
Group: group,
}
return &prover
}
func (p *EqualityProver) GetProofRandomData(secret, g1, g2 *big.Int) (*big.Int, *big.Int) {
// Sets the values that are needed before the protocol can be run.
// The protocol proves the knowledge of log_g1(t1), log_g2(t2) and
// that log_g1(t1) = log_g2(t2).
p.secret = secret
p.g1 = g1
p.g2 = g2
r := common.GetRandomInt(p.Group.Q)
p.r = r
x1 := p.Group.Exp(p.g1, r)
x2 := p.Group.Exp(p.g2, r)
return x1, x2
}
func (p *EqualityProver) GetProofData(challenge *big.Int) *big.Int {
// z = r + challenge * secret
z := new(big.Int)
z.Mul(challenge, p.secret)
z.Add(z, p.r)
z.Mod(z, p.Group.Q)
return z
}
type EqualityVerifier struct {
Group *Group
challenge *big.Int
g1 *big.Int
g2 *big.Int
x1 *big.Int
x2 *big.Int
t1 *big.Int
t2 *big.Int
}
func NewEqualityVerifier(group *Group) *EqualityVerifier {
verifier := EqualityVerifier{
Group: group,
}
return &verifier
}
func (v *EqualityVerifier) GetChallenge(g1, g2, t1, t2, x1, x2 *big.Int) *big.Int {
// Set the values that are needed before the protocol can be run.
// The protocol proves the knowledge of log_g1(t1), log_g2(t2) and
// that log_g1(t1) = log_g2(t2).
v.g1 = g1
v.g2 = g2
v.t1 = t1
v.t2 = t2
// Sets the values g1^r1 and g2^r2.
v.x1 = x1
v.x2 = x2
challenge := common.GetRandomInt(v.Group.Q)
v.challenge = challenge
return challenge
}
// It receives z = r + secret * challenge.
//It returns true if g1^z = g1^r * (g1^secret) ^ challenge and g2^z = g2^r * (g2^secret) ^ challenge.
func (v *EqualityVerifier) Verify(z *big.Int) bool {
left1 := v.Group.Exp(v.g1, z)
left2 := v.Group.Exp(v.g2, z)
r11 := v.Group.Exp(v.t1, v.challenge)
r12 := v.Group.Exp(v.t2, v.challenge)
right1 := v.Group.Mul(r11, v.x1)
right2 := v.Group.Mul(r12, v.x2)
return left1.Cmp(right1) == 0 && left2.Cmp(right2) == 0
} | crypto/schnorr/dlog_equality.go | 0.721449 | 0.474449 | dlog_equality.go | starcoder |
package model
import (
"log"
"time"
"gonum.org/v1/gonum/mat"
)
// Perceptron for input classification.
type Perceptron struct {
inputSize int
outputSize int
input mat.Vector
output mat.Vector
weights []mat.Vector
biases mat.Vector
}
// CreatePerceptron with the given parameters.
func CreatePerceptron(nInputs, nOutputs int, weights [][]float64, biases []float64) *Perceptron {
w := make([]mat.Vector, nOutputs)
for i := 0; i < nOutputs; i++ {
w[i] = mat.NewVecDense(nInputs, weights[i])
}
b := mat.NewVecDense(nOutputs, biases)
return &Perceptron{
inputSize: nInputs,
outputSize: nOutputs,
weights: w,
biases: b,
}
}
// FeedForward an input through the perceptron.
func (p *Perceptron) FeedForward(pattern mat.Vector) {
p.input = pattern
sumVec := mat.NewVecDense(p.inputSize, nil)
y := mat.NewVecDense(p.outputSize, nil)
for i := 0; i < p.outputSize; i++ {
sumVec.MulElemVec(p.input, p.weights[i])
z := mat.Sum(sumVec)
z += p.biases.AtVec(i)
y.SetVec(i, activation(z))
}
p.output = y
}
func activation(z float64) float64 {
if z >= 0 {
return 1.0
} else {
return 0.0
}
}
// UpdateWeights of the perceptron based on the target output.
func (p *Perceptron) UpdateWeights(t int) {
newWeights := mat.NewVecDense(p.inputSize, nil)
newBiases := mat.NewVecDense(p.outputSize, nil)
for i := 0; i < p.outputSize; i++ {
if i == t && p.output.AtVec(i) != 1.0 {
newWeights.AddVec(p.weights[i], p.input)
newBiases.SetVec(i, p.biases.AtVec(i)+1.0)
} else if i != t && p.output.AtVec(i) != 0.0 {
newWeights.SubVec(p.weights[i], p.input)
newBiases.SetVec(i, p.biases.AtVec(i)-1.0)
} else {
continue
}
p.weights[i] = mat.VecDenseCopyOf(newWeights)
}
p.biases = newBiases
}
// Train the network with the given patterns.
func (p *Perceptron) Train(patterns []mat.Vector, labels []int, epochs int) {
log.Println(">> Training started")
start := time.Now()
for i := 0; i < epochs; i++ {
log.Printf("> Starting epoch %d", i)
for j, pattern := range patterns {
p.FeedForward(pattern)
p.UpdateWeights(labels[j])
}
p.Test(patterns, labels)
}
elapsed := time.Since(start)
log.Printf(">> Training finished | Time elapsed: %f", elapsed.Seconds())
}
// Test the perceptron with the given patterns.
func (p *Perceptron) Test(patterns []mat.Vector, labels []int) {
failures := 0
for i, pattern := range patterns {
p.FeedForward(pattern)
if p.output.AtVec(labels[i]) != 1.0 {
failures++
}
}
failureRate := (float64(failures) / float64(len(patterns))) * float64(100)
log.Println(">>>> Test finished")
log.Printf("Number of patterns: %d", len(patterns))
log.Printf("Number of failures: %d", failures)
log.Printf("Falure rate: %f", failureRate)
} | internal/model/perceptron.go | 0.770206 | 0.495606 | perceptron.go | starcoder |
package sliceutils
import (
"sort"
)
type sortableFloat32Slice []float32
func (s sortableFloat32Slice) Len() int {
return len(s)
}
func (s sortableFloat32Slice) Less(i, j int) bool {
return s[i] < s[j]
}
func (s sortableFloat32Slice) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
// Float32Sort sorts the given slice.
func Float32Sort(slice []float32) {
sort.Sort(sortableFloat32Slice(slice))
}
type sortableFloat64Slice []float64
func (s sortableFloat64Slice) Len() int {
return len(s)
}
func (s sortableFloat64Slice) Less(i, j int) bool {
return s[i] < s[j]
}
func (s sortableFloat64Slice) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
// Float64Sort sorts the given slice.
func Float64Sort(slice []float64) {
sort.Sort(sortableFloat64Slice(slice))
}
type sortableIntSlice []int
func (s sortableIntSlice) Len() int {
return len(s)
}
func (s sortableIntSlice) Less(i, j int) bool {
return s[i] < s[j]
}
func (s sortableIntSlice) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
// IntSort sorts the given slice.
func IntSort(slice []int) {
sort.Sort(sortableIntSlice(slice))
}
type sortableInt16Slice []int16
func (s sortableInt16Slice) Len() int {
return len(s)
}
func (s sortableInt16Slice) Less(i, j int) bool {
return s[i] < s[j]
}
func (s sortableInt16Slice) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
// Int16Sort sorts the given slice.
func Int16Sort(slice []int16) {
sort.Sort(sortableInt16Slice(slice))
}
type sortableInt32Slice []int32
func (s sortableInt32Slice) Len() int {
return len(s)
}
func (s sortableInt32Slice) Less(i, j int) bool {
return s[i] < s[j]
}
func (s sortableInt32Slice) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
// Int32Sort sorts the given slice.
func Int32Sort(slice []int32) {
sort.Sort(sortableInt32Slice(slice))
}
type sortableInt64Slice []int64
func (s sortableInt64Slice) Len() int {
return len(s)
}
func (s sortableInt64Slice) Less(i, j int) bool {
return s[i] < s[j]
}
func (s sortableInt64Slice) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
// Int64Sort sorts the given slice.
func Int64Sort(slice []int64) {
sort.Sort(sortableInt64Slice(slice))
}
type sortableInt8Slice []int8
func (s sortableInt8Slice) Len() int {
return len(s)
}
func (s sortableInt8Slice) Less(i, j int) bool {
return s[i] < s[j]
}
func (s sortableInt8Slice) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
// Int8Sort sorts the given slice.
func Int8Sort(slice []int8) {
sort.Sort(sortableInt8Slice(slice))
}
type sortableUintSlice []uint
func (s sortableUintSlice) Len() int {
return len(s)
}
func (s sortableUintSlice) Less(i, j int) bool {
return s[i] < s[j]
}
func (s sortableUintSlice) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
// UintSort sorts the given slice.
func UintSort(slice []uint) {
sort.Sort(sortableUintSlice(slice))
}
type sortableUint16Slice []uint16
func (s sortableUint16Slice) Len() int {
return len(s)
}
func (s sortableUint16Slice) Less(i, j int) bool {
return s[i] < s[j]
}
func (s sortableUint16Slice) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
// Uint16Sort sorts the given slice.
func Uint16Sort(slice []uint16) {
sort.Sort(sortableUint16Slice(slice))
}
type sortableUint32Slice []uint32
func (s sortableUint32Slice) Len() int {
return len(s)
}
func (s sortableUint32Slice) Less(i, j int) bool {
return s[i] < s[j]
}
func (s sortableUint32Slice) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
// Uint32Sort sorts the given slice.
func Uint32Sort(slice []uint32) {
sort.Sort(sortableUint32Slice(slice))
}
type sortableUint64Slice []uint64
func (s sortableUint64Slice) Len() int {
return len(s)
}
func (s sortableUint64Slice) Less(i, j int) bool {
return s[i] < s[j]
}
func (s sortableUint64Slice) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
// Uint64Sort sorts the given slice.
func Uint64Sort(slice []uint64) {
sort.Sort(sortableUint64Slice(slice))
}
type sortableUint8Slice []uint8
func (s sortableUint8Slice) Len() int {
return len(s)
}
func (s sortableUint8Slice) Less(i, j int) bool {
return s[i] < s[j]
}
func (s sortableUint8Slice) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
// Uint8Sort sorts the given slice.
func Uint8Sort(slice []uint8) {
sort.Sort(sortableUint8Slice(slice))
}
type sortableStringSlice []string
func (s sortableStringSlice) Len() int {
return len(s)
}
func (s sortableStringSlice) Less(i, j int) bool {
return s[i] < s[j]
}
func (s sortableStringSlice) Swap(i, j int) {
s[i], s[j] = s[j], s[i]
}
// StringSort sorts the given slice.
func StringSort(slice []string) {
sort.Sort(sortableStringSlice(slice))
} | pkg/sliceutils/gen-builtins-generic_comparable.go | 0.778186 | 0.434761 | gen-builtins-generic_comparable.go | starcoder |
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package compiler
import (
"github.com/open2b/scriggo/ast"
)
// isTypeGuard reports whether node is a switch type guard, as x.(type) and
// v := x.(type).
func isTypeGuard(node ast.Node) bool {
switch v := node.(type) {
case *ast.Assignment:
if len(v.Rhs) != 1 {
return false
}
if ta, ok := v.Rhs[0].(*ast.TypeAssertion); ok {
return ta.Type == nil
}
case *ast.TypeAssertion:
return v.Type == nil
}
return false
}
// parseSwitch parses a switch statement and returns an Switch or TypeSwitch
// node. Panics on error.
func (p *parsing) parseSwitch(tok token, end tokenTyp) ast.Node {
pos := tok.pos
var assignment *ast.Assignment
// "{%" "switch" [ beforeSemicolon ";" ] afterSemicolon "%}"
var beforeSemicolon, afterSemicolon ast.Node
expressions, tok := p.parseExprList(p.next(), true, false, true)
want := tokenLeftBrace
if end == tokenEndStatement {
want = tokenEndStatement
}
switch {
case tok.typ == want:
switch len(expressions) {
case 0:
// switch {
case 1:
// switch x {
// switch x + 2 {
// switch f(2) {
// switch x.(type) {
afterSemicolon = expressions[0]
default:
// switch x + 2, y + 1 {
panic(syntaxError(tok.pos, "unexpected %s, expecting := or = or comma", want))
}
case tok.typ == tokenSemicolon:
switch len(expressions) { // # of expressions before ;
case 0:
// switch ; x + 2 {
// switch ; {
// switch ; x := a.(type) {
// switch ; a.(type) {
case 1:
// switch f(3); x {
beforeSemicolon = expressions[0]
default:
// switch f(), g(); x + 2 {
// switch f(), g(); {
panic(syntaxError(tok.pos, "unexpected semicolon, expecting := or = or comma"))
}
if isTypeGuard(beforeSemicolon) {
// TODO (Gianluca): use type assertion node position instead of last read token position
// TODO (Gianluca): move to type-checker:
panic(syntaxError(tok.pos, "use of .(type) outside type switch"))
}
expressions, tok = p.parseExprList(p.next(), true, false, true)
switch len(expressions) { // # of expressions after ;
case 0:
// switch ; {
// switch f3(); {
case 1:
// switch f(3); x {
// switch ; x + 2 {
// switch x + 3; x.(type) {
// switch ; x.(type) {
afterSemicolon = expressions[0]
default:
// switch x; a, b {
// switch ; a, b {
// switch ; a, b, c {
panic(syntaxError(tok.pos, "unexpected %s, expecting := or = or comma", want))
}
case isAssignmentToken(tok):
// switch x := 3; x {
// switch x := 3; x + y {
// switch x = y.(type) {
// switch x := 2; x = y.(type) {
assignment, tok = p.parseAssignment(expressions, tok, false, true, true)
switch tok.typ {
case tokenSemicolon:
if isTypeGuard(assignment) {
// TODO (Gianluca): use type assertion node position instead of last read token position
panic(syntaxError(tok.pos, "use of .(type) outside type switch"))
}
beforeSemicolon = assignment
// switch x := 2; {
// switch x := 3; x {
// switch x := 3; x + y {
// switch x := 2; x = y.(type) {
expressions, tok = p.parseExprList(p.next(), true, false, true)
if isAssignmentToken(tok) {
// This is the only valid case where there is an assignment
// before and after the semicolon token:
// switch x := 2; x = y.(type) {
assignment, tok = p.parseAssignment(expressions, tok, false, true, true)
ta, ok := assignment.Rhs[0].(*ast.TypeAssertion)
// TODO (Gianluca): should error contain the position of the
// expression which caused the error instead of the token (as Go
// does)?
if !ok || ta.Type != nil || len(assignment.Lhs) != 1 {
panic(cannotUseAsValueError(tok.pos, assignment))
}
afterSemicolon = assignment
} else {
switch len(expressions) {
case 0:
// switch x := 2; {
case 1:
// switch x := 3; x {
// switch x := 3; x + y {
// switch x := 2; y.(type) {
afterSemicolon = expressions[0]
default:
// switch x := 2; x + y, y + z {
panic(syntaxError(tok.pos, "unexpected %s, expecting := or = or comma", want))
}
}
case want:
// switch x = y.(type) {
// switch x := y.(type) {
if len(assignment.Rhs) != 1 {
panic(syntaxError(tok.pos, "unexpected %s, expecting expression", want))
}
ta, ok := assignment.Rhs[0].(*ast.TypeAssertion)
if !ok || ta.Type != nil || len(assignment.Lhs) != 1 {
panic(cannotUseAsValueError(tok.pos, assignment))
}
afterSemicolon = assignment
}
}
if tok.typ != want {
panic(syntaxError(tok.pos, "unexpected %s, expecting %s", tok, want))
}
pos.End = tok.pos.End
var node ast.Node
if isTypeGuard(afterSemicolon) {
if a, ok := afterSemicolon.(*ast.TypeAssertion); ok {
afterSemicolon = ast.NewAssignment(
a.Pos(), []ast.Expression{ast.NewIdentifier(a.Pos(), "_")}, ast.AssignmentSimple, []ast.Expression{a},
)
}
node = ast.NewTypeSwitch(pos, beforeSemicolon, afterSemicolon.(*ast.Assignment), nil, nil)
} else {
if afterSemicolon != nil {
node = ast.NewSwitch(pos, beforeSemicolon, afterSemicolon.(ast.Expression), nil, nil)
} else {
node = ast.NewSwitch(pos, beforeSemicolon, nil, nil, nil)
}
}
return node
} | vendor/github.com/open2b/scriggo/internal/compiler/parser_switch.go | 0.520253 | 0.442817 | parser_switch.go | starcoder |
// Data structures and helpers that describe Wikidata signature
// resources that we want to work with.
package mappings
import (
"encoding/json"
"fmt"
)
// WikidataMapping provides a way to persist Wikidata resources in
// memory.
var WikidataMapping = make(map[string]Wikidata)
// Wikidata stores information about something which constitutes a
// format resource in Wikidata. I.e. Anything which has a URI and
// describes a file-format.
type Wikidata struct {
ID string // Wikidata short name, e.g. Q12345 can be appended to a URI to be dereferenced.
Name string // Name of the format as described in Wikidata.
URI string // URI is the absolute URL in Wikidata terms that can be dereferenced.
PRONOM []string // 1:1 mapping to PRONOM wherever possible.
Extension []string // Extension returned by Wikidata.
Mimetype []string // Mimetype as recorded by Wikidata.
Signatures []Signature // Signature associated with a record which we will convert to a new Type.
disableSignatures bool // If a bad heuristic was found we can't reliably add signatures to the record.
}
// Signature describes a complete signature resource, i.e. a way to
// identify a file format using Wikidata information.
type Signature struct {
ByteSequences []ByteSequence // A signature is made up of multiple byte sequences that encode a position and a pattern, e.g. BOF and EOF.
Provenance string // Provenance of the signature.
Date string // Date the signature was submitted.
}
// ByteSequence describes a sequence that goes into a signature, where
// a signature is made up of 1..* sequences. Usually up to three.
type ByteSequence struct {
Signature string // Signature byte sequence.
Offset int // Offset used by the signature.
Encoding int // Signature encoding, e.g. Hexadecimal, ASCII, PRONOM.
Relativity string // Position relative to beginning or end of file, or elsewhere.
}
// Serialize the signature component of our record to a string for
// debugging purposes.
func (signature Signature) String() string {
report, err := json.MarshalIndent(signature, "", " ")
if err != nil {
return ""
}
return fmt.Sprintf("%s", report)
}
// Serialize the byte sequence component of our record to a string for
// debugging purposes.
func (byteSequence ByteSequence) String() string {
report, err := json.MarshalIndent(byteSequence, "", " ")
if err != nil {
return ""
}
return fmt.Sprintf("%s", report)
}
// DisableSignatures is used when processing Wikidata records when a
// critical error is discovered with a record that needs to be looked
// into beyond what Roy can do for us.
func (wikidata *Wikidata) DisableSignatures() {
wikidata.disableSignatures = true
}
// SignaturesDisabled tells us whether the signatures are disabled for
// a given record.
func (wikidata Wikidata) SignaturesDisabled() bool {
return wikidata.disableSignatures
}
// PUIDs enables the Wikidata format records to be mapped to existing
// PRONOM records when run in PRONOM mode, i.e. not just with Wikidata
// signatures.
func (wikidata Wikidata) PUIDs() []string {
var puids []string
for _, puid := range wikidata.PRONOM {
puids = append(puids, puid)
}
return puids
} | pkg/wikidata/internal/mappings/wikidata_mapping_structs.go | 0.598782 | 0.440048 | wikidata_mapping_structs.go | starcoder |
package countingsort
// IntsStable sorts integers slice using stable counting sort algorithm
// but it allocates more memory and works slower
func IntsStable(items []int, max int) {
sorted := GetSortedInts(items, max)
copy(items, sorted)
}
// Ints sorts integers slice using counting sort algorithm that is
// less stable but in the most cases much faster due to less memory allocations
func Ints(items []int, max int) {
c := make([]int, max+1)
for _, item := range items {
c[item]++
}
b := 0
for i := 0; i < max+1; i++ {
for j := 0; j < c[i]; j++ {
items[b] = i
b++
}
}
}
// Ints64Stable sorts int64 slice using stable counting sort algorithm,
// but it allocates more memory and works slower
func Ints64Stable(items []int64, max int64) {
sorted := GetSortedInts64(items, max)
copy(items, sorted)
}
// Ints64 sorts integers slice using counting sort algorithm that is
// less stable but in the most cases much faster due to less memory allocations
func Ints64(items []int64, max int64) {
c := make([]int64, max+1)
for _, item := range items {
c[item]++
}
b := 0
for i := int64(0); i < max+1; i++ {
for j := int64(0); j < c[i]; j++ {
items[b] = i
b++
}
}
}
// GetSortedInts returns sorted integers slice
func GetSortedInts(a []int, max int) []int {
b := make([]int, len(a))
c := make([]int, max+1)
for _, item := range a {
c[item]++
}
for i := 1; i < len(c); i++ {
c[i] += c[i-1]
}
for i := len(a) - 1; i >= 0; i-- {
c[a[i]]--
b[c[a[i]]] = a[i]
}
return b
}
// GetSortedInts64 returns sorted int64 slice
func GetSortedInts64(a []int64, max int64) []int64 {
b := make([]int64, len(a))
c := make([]int64, max+1)
for _, item := range a {
c[item]++
}
for i := 1; i < len(c); i++ {
c[i] += c[i-1]
}
for i := len(a) - 1; i >= 0; i-- {
c[a[i]]--
b[c[a[i]]] = a[i]
}
return b
}
// Int64Slice attaches the methods of Interface to []int64, sorting in increasing order.
type Int64Slice []int64
func (p Int64Slice) Len() int { return len(p) }
func (p Int64Slice) Less(i, j int) bool { return p[i] < p[j] }
func (p Int64Slice) Swap(i, j int) { p[i], p[j] = p[j], p[i] } | countingsort/countingsort.go | 0.85738 | 0.480113 | countingsort.go | starcoder |
// Memory Tracing gives us a general idea if our software is healthy as related to the GC and
// memory in the heap that we are working with.
// We are using a special environmental variable called GODEBUG. It gives us the ability to do
// a memory trace and a scheduler trace. Below is a sample program that causes memory leak that we
// can use GODEBUG to understand what's going on.
// Here are the steps to build and run:
// Build the program by: go build memory_tracing.go
// Run the binary: GODEBUG=gctrace=1 ./memory_tracing
// Setting the GODEBUG=gctrace=1 causes the garbage collector to emit a single line to standard
// error at each collection, summarizing the amount of memory collected and the length of the pause.
// What we are gonna see are bad traces followed by this pattern:
/*
gc {0} @{1}s {2}%: {3}+...+{4} ms clock, {5}+...+{6} ms cpu, {7}->{8}->{9} MB, {10} MB goal, {11} P
where:
{0} : The number of times gc run
{1} : The amount of time the program has been running.
{2} : The percentage of CPU the gc is taking away from us.
{3} : Stop of wall clock time - a measure of the real time including time that passes due to programmed
delays or waiting for resources to become available.
{4} : Stop of wall clock. This is normally a more important number to look at.
{5} : CPU clock
{6} : CPU clock
{7} : The size of the heap prior to the gc starting.
{8} : The size of the heap after the gc run.
{9} : The size of the live heap.
{10}: The goal of the gc, pacing algorithm.
{11}: The number of processes.
*/
// For example:
/*
gc 1 @0.007s 0%: 0.010+0.13+0.030 ms clock, 0.080+0/0.058/0.15+0.24 ms cpu, 5->5->3 MB, 6 MB goal, 8 P
gc 2 @0.013s 0%: 0.003+0.21+0.034 ms clock, 0.031+0/0.030/0.22+0.27 ms cpu, 9->9->7 MB, 10 MB goal, 8 P
gc 3 @0.029s 0%: 0.003+0.23+0.030 ms clock, 0.029+0.050/0.016/0.25+0.24 ms cpu, 18->18->15 MB, 19 MB goal, 8 P
gc 4 @0.062s 0%: 0.003+0.40+0.040 ms clock, 0.030+0/0.28/0.11+0.32 ms cpu, 36->36->30 MB, 37 MB goal, 8 P
gc 5 @0.135s 0%: 0.003+0.63+0.045 ms clock, 0.027+0/0.026/0.64+0.36 ms cpu, 72->72->60 MB, 73 MB goal, 8 P
gc 6 @0.302s 0%: 0.003+0.98+0.043 ms clock, 0.031+0.078/0.016/0.88+0.34 ms cpu, 65->66->42 MB, 120 MB goal, 8 P
gc 7 @0.317s 0%: 0.003+1.2+0.080 ms clock, 0.026+0/1.1/0.13+0.64 ms cpu, 120->121->120 MB, 121 MB goal, 8 P
gc 8 @0.685s 0%: 0.004+1.6+0.041 ms clock, 0.032+0/1.5/0.72+0.33 ms cpu, 288->288->241 MB, 289 MB goal, 8 P
gc 9 @1.424s 0%: 0.004+4.0+0.081 ms clock, 0.033+0.027/3.8/0.53+0.65 ms cpu, 577->577->482 MB, 578 MB goal, 8 P
gc 10 @2.592s 0%: 0.003+11+0.045 ms clock, 0.031+0/5.9/5.2+0.36 ms cpu, 499->499->317 MB, 964 MB goal, 8 P
*/
// It go really fast in the beginning and start to slow down. This is bad.
// The size of the heap is increasing every time the gc run. It shows that there is a memory leak.
package main
import (
"os"
"os/signal"
)
func main() {
// Create a Goroutine that leaks memory. Dumping key-value pairs to put tons of allocation.
go func() {
m := make(map[int]int)
for i := 0; ; i++ {
m[i] = i
}
}()
// Shutdown the program with Ctrl-C
sig := make(chan os.Signal, 1)
signal.Notify(sig)
<-sig
} | go/profiling/memory_tracing.go | 0.628977 | 0.632191 | memory_tracing.go | starcoder |
package animation
import "github.com/taylorza/go-gfx/pkg/gfx"
type rect struct {
x, y, w, h int
}
// Animation represents an animation sequence made up of 1 or more frames
type Animation struct {
t *gfx.Texture
frames []rect
frameTime float64
bidi bool
reverse bool
}
// Option is the signature of a configuration function for an animation
type Option func(a *Animation)
// New creates an instance of an animation with frames extracted from a texture
func New(t *gfx.Texture, opts ...Option) *Animation {
if t == nil {
panic("texture cannot be nil for animation")
}
a := &Animation{
t: t,
frameTime: 0.1,
bidi: false,
}
for _, opt := range opts {
opt(a)
}
return a
}
// FrameSlicer is an Option function that slices the texture into individual frames for the animation
// Frames are sequenced left to right top to bottom
// offsetX, offsetY - starting point for slicing frames
// frameWidth, frameHeight - size of each frame
// colCount - number of horizontal frames to extract for this animation
// rowCount - number of vertical frames to extract for the animation
func FrameSlicer(offsetX, offsetY, frameWidth, frameHeight, colCount, rowCount int) Option {
return func(a *Animation) {
for row := 0; row < rowCount; row++ {
for column := 0; column < colCount; column++ {
a.frames = append(a.frames, rect{
x: offsetX + column*frameWidth,
y: offsetY + row*frameHeight,
w: frameWidth,
h: frameHeight,
})
}
}
}
}
// Frame is an Option function that defines a single frame from the texture
func Frame(x, y, w, h int) Option {
return func(a *Animation) {
a.frames = append(a.frames, rect{
x: x,
y: y,
w: w,
h: h,
})
}
}
// Bidi is an Option function make the animation run in a cycle the switches direction when it reaches either end
func Bidi() Option {
return func(a *Animation) {
a.bidi = true
}
}
// Fps is an Option function that sets the frame rate that the animation will run at
func Fps(fps int) Option {
return func(a *Animation) {
a.frameTime = 1 / float64(fps)
}
}
// Reverse is an Option function that sets the animation to run in reverse
func Reverse() Option {
return func(a *Animation) {
a.reverse = true
}
} | pkg/gfx/animation/animation.go | 0.821796 | 0.407098 | animation.go | starcoder |
package check
import (
"fmt"
)
var ErrLimitReached = fmt.Errorf("limit reached")
type StatusSeries struct {
index int
series []Status
}
func NewStatusSeries(size int) *StatusSeries {
return &StatusSeries{
series: make([]Status, size),
}
}
// Add adds a status to the series
func (ss *StatusSeries) Add(status Status) error {
if len(ss.series) == ss.index {
return ErrLimitReached
}
ss.series[ss.index] = status
ss.index++
return nil
}
// Merge updates current series with the source series according to the merging strategy: least non-zero status rule,
// i.e.: Down < Up < Unknown.
func (ss *StatusSeries) Merge(src *StatusSeries) error {
if ss.size() != src.size() {
return fmt.Errorf("the capacity of status series must be equal, got %d and %d", ss.size(), src.size())
}
for i := 0; i < ss.size(); i++ {
ss.series[i] = mergeStrategy(ss.series[i], src.series[i])
}
return nil
}
func (ss *StatusSeries) Stats() Stats {
stats := Stats{
Expected: ss.size(),
}
for _, status := range ss.series {
switch status {
case Up:
stats.Up++
case Down:
stats.Down++
case Unknown:
stats.Unknown++
}
}
return stats
}
func (ss *StatusSeries) Clean() {
ss.index = 0
ss.series = make([]Status, len(ss.series))
}
func (ss *StatusSeries) size() int {
return len(ss.series)
}
// mergeStrategy prefers new information when it is more valuable:
// Down more than Up, Up more than Unknown, anything more that nodata which is just a zero.
func mergeStrategy(dst, src Status) Status {
if src == nodata {
return dst
}
if dst == nodata {
return src
}
if dst > src {
return src
}
return dst
}
func MergeStatusSeries(size int, byId map[string]*StatusSeries, ids []string) (*StatusSeries, error) {
acc := NewStatusSeries(size)
for _, id := range ids {
series, ok := byId[id]
if !ok {
// no data means no data
return NewStatusSeries(size), nil
}
err := acc.Merge(series)
if err != nil {
return nil, fmt.Errorf("cannot merge status series: %v", err)
}
}
return acc, nil
} | modules/500-upmeter/images/upmeter/pkg/check/series.go | 0.681197 | 0.521532 | series.go | starcoder |
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package quat
import "math"
// Sin returns the sine of q.
func Sin(q Quat) Quat {
w, uv := split(q)
if uv == zero {
return lift(math.Sin(w))
}
v := Abs(uv)
s, c := math.Sincos(w)
sh, ch := sinhcosh(v)
return join(s*ch, Scale(c*sh/v, uv))
}
// Sinh returns the hyperbolic sine of q.
func Sinh(q Quat) Quat {
w, uv := split(q)
if uv == zero {
return lift(math.Sinh(w))
}
v := Abs(uv)
s, c := math.Sincos(v)
sh, ch := sinhcosh(w)
return join(c*sh, Scale(s*ch/v, uv))
}
// Cos returns the cosine of q.
func Cos(q Quat) Quat {
w, uv := split(q)
if uv == zero {
return lift(math.Cos(w))
}
v := Abs(uv)
s, c := math.Sincos(w)
sh, ch := sinhcosh(v)
return join(c*ch, Scale(-s*sh/v, uv))
}
// Cosh returns the hyperbolic cosine of q.
func Cosh(q Quat) Quat {
w, uv := split(q)
if uv == zero {
return lift(math.Cosh(w))
}
v := Abs(uv)
s, c := math.Sincos(v)
sh, ch := sinhcosh(w)
return join(c*ch, Scale(s*sh/v, uv))
}
// Tan returns the tangent of q.
func Tan(q Quat) Quat {
d := Cos(q)
if d == zero {
return Inf()
}
return Mul(Sin(q), Inv(d))
}
// Tanh returns the hyperbolic tangent of q.
func Tanh(q Quat) Quat {
d := Cosh(q)
if d == zero {
return Inf()
}
return Mul(Sinh(q), Inv(d))
}
// Asin returns the inverse sine of q.
func Asin(q Quat) Quat {
_, uv := split(q)
if uv == zero {
return lift(math.Asin(q.Real))
}
u := unit(uv)
return Mul(Scale(-1, u), Log(Add(Mul(u, q), Sqrt(Sub(Quat{Real: 1}, Mul(q, q))))))
}
// Asinh returns the inverse hyperbolic sine of q.
func Asinh(q Quat) Quat {
return Log(Add(q, Sqrt(Add(Quat{Real: 1}, Mul(q, q)))))
}
// Acos returns the inverse cosine of q.
func Acos(q Quat) Quat {
w, uv := split(Asin(q))
return join(math.Pi/2-w, Scale(-1, uv))
}
// Acosh returns the inverse hyperbolic cosine of q.
func Acosh(q Quat) Quat {
w := Acos(q)
_, uv := split(w)
if uv == zero {
return w
}
w = Mul(w, unit(uv))
if w.Real < 0 {
w = Scale(-1, w)
}
return w
}
// Atan returns the inverse tangent of q.
func Atan(q Quat) Quat {
w, uv := split(q)
if uv == zero {
return lift(math.Atan(w))
}
u := unit(uv)
return Mul(Mul(lift(0.5), u), Log(Mul(Add(u, q), Inv(Sub(u, q)))))
}
// Atanh returns the inverse hyperbolic tangent of q.
func Atanh(q Quat) Quat {
w, uv := split(q)
if uv == zero {
return lift(math.Atanh(w))
}
u := unit(uv)
return Mul(Scale(-1, u), Atan(Mul(u, q)))
}
// calculate sinh and cosh
func sinhcosh(x float64) (sh, ch float64) {
if math.Abs(x) <= 0.5 {
return math.Sinh(x), math.Cosh(x)
}
e := math.Exp(x)
ei := 0.5 / e
e *= 0.5
return e - ei, e + ei
} | num/quat/trig.go | 0.900745 | 0.467271 | trig.go | starcoder |
package canvas
import (
"image"
"image/color"
"math"
)
// LinearGradient defines a Gradient travelling straight at a given angle.
// The only supported values for the angle are `0.0` (vertical) and `90.0` (horizontal), currently.
type LinearGradient struct {
baseObject
StartColor color.Color // The beginning RGBA color of the gradient
EndColor color.Color // The end RGBA color of the gradient
Angle float64 // The angle of the gradient (0/180 for vertical; 90/270 for horizontal)
}
// Generate calculates an image of the gradient with the specified width and height.
func (g *LinearGradient) Generate(iw, ih int) image.Image {
w, h := float64(iw), float64(ih)
var generator func(x, y float64) float64
switch g.Angle {
case 90: // horizontal flipped
generator = func(x, _ float64) float64 {
return (w - x) / w
}
case 270: // horizontal
generator = func(x, _ float64) float64 {
return x / w
}
case 45: // diagonal negative flipped
generator = func(x, y float64) float64 {
return math.Abs((w+h)-(x+h-y)) / math.Abs(w+h)
}
case 225: // diagonal negative
generator = func(x, y float64) float64 {
return math.Abs(x+h-y) / math.Abs(w+h)
}
case 135: // diagonal positive flipped
generator = func(x, y float64) float64 {
return math.Abs((w+h)-(x+y)) / math.Abs(w+h)
}
case 315: // diagonal positive
generator = func(x, y float64) float64 {
return math.Abs(x+y) / math.Abs(w+h)
}
case 180: // vertical flipped
generator = func(_, y float64) float64 {
return (h - y) / h
}
default: // vertical
generator = func(_, y float64) float64 {
return y / h
}
}
return computeGradient(generator, iw, ih, g.StartColor, g.EndColor)
}
// Refresh causes this object to be redrawn in it's current state
func (g *LinearGradient) Refresh() {
Refresh(g)
}
// RadialGradient defines a Gradient travelling radially from a center point outward.
type RadialGradient struct {
baseObject
StartColor color.Color // The beginning RGBA color of the gradient
EndColor color.Color // The end RGBA color of the gradient
// The offset of the center for generation of the gradient.
// This is not a DP measure but relates to the width/height.
// A value of 0.5 would move the center by the half width/height.
CenterOffsetX, CenterOffsetY float64
}
// Generate calculates an image of the gradient with the specified width and height.
func (g *RadialGradient) Generate(iw, ih int) image.Image {
w, h := float64(iw), float64(ih)
// define center plus offset
centerX := w/2 + w*g.CenterOffsetX
centerY := h/2 + h*g.CenterOffsetY
// handle negative offsets
var a, b float64
if g.CenterOffsetX < 0 {
a = w - centerX
} else {
a = centerX
}
if g.CenterOffsetY < 0 {
b = h - centerY
} else {
b = centerY
}
generator := func(x, y float64) float64 {
// calculate distance from center for gradient multiplier
dx, dy := centerX-x, centerY-y
da := math.Sqrt(dx*dx + dy*dy*a*a/b/b)
if da > a {
return 1
}
return da / a
}
return computeGradient(generator, iw, ih, g.StartColor, g.EndColor)
}
// Refresh causes this object to be redrawn in it's current state
func (g *RadialGradient) Refresh() {
Refresh(g)
}
func calculatePixel(d float64, startColor, endColor color.Color) *color.RGBA64 {
// fetch RGBA values
aR, aG, aB, aA := startColor.RGBA()
bR, bG, bB, bA := endColor.RGBA()
// Get difference
dR := float64(bR) - float64(aR)
dG := float64(bG) - float64(aG)
dB := float64(bB) - float64(aB)
dA := float64(bA) - float64(aA)
// Apply gradations
pixel := &color.RGBA64{
R: uint16(float64(aR) + d*dR),
B: uint16(float64(aB) + d*dB),
G: uint16(float64(aG) + d*dG),
A: uint16(float64(aA) + d*dA),
}
return pixel
}
func computeGradient(generator func(x, y float64) float64, w, h int, startColor, endColor color.Color) image.Image {
img := image.NewRGBA(image.Rect(0, 0, w, h))
if startColor == nil && endColor == nil {
return img
} else if startColor == nil {
startColor = color.Transparent
} else if endColor == nil {
endColor = color.Transparent
}
for x := 0; x < w; x++ {
for y := 0; y < h; y++ {
distance := generator(float64(x)+0.5, float64(y)+0.5)
img.Set(x, y, calculatePixel(distance, startColor, endColor))
}
}
return img
}
// NewHorizontalGradient creates a new horizontally travelling linear gradient.
// The start color will be at the left of the gradient and the end color will be at the right.
func NewHorizontalGradient(start, end color.Color) *LinearGradient {
g := &LinearGradient{StartColor: start, EndColor: end}
g.Angle = 270
return g
}
// NewLinearGradient creates a linear gradient at a the specified angle.
// The angle parameter is the degree angle along which the gradient is calculated.
// A NewHorizontalGradient uses 270 degrees and NewVerticalGradient is 0 degrees.
func NewLinearGradient(start, end color.Color, angle float64) *LinearGradient {
g := &LinearGradient{StartColor: start, EndColor: end}
g.Angle = angle
return g
}
// NewRadialGradient creates a new radial gradient.
func NewRadialGradient(start, end color.Color) *RadialGradient {
return &RadialGradient{StartColor: start, EndColor: end}
}
// NewVerticalGradient creates a new vertically travelling linear gradient.
// The start color will be at the top of the gradient and the end color will be at the bottom.
func NewVerticalGradient(start color.Color, end color.Color) *LinearGradient {
return &LinearGradient{StartColor: start, EndColor: end}
} | canvas/gradient.go | 0.909733 | 0.706621 | gradient.go | starcoder |
package transform
import "github.com/frictionlessdata/tableschema-go/schema"
// Schema extends `schema.Schema` adding a `Headers()` method.
type Schema schema.Schema
// Headers returns an array with strings with the names of the fields.
func (s *Schema) Headers() []string {
var h []string
for _, f := range s.Fields {
h = append(h, f.Name)
}
return h
}
// CNAESchema follows Frictionless Data's table schema.
var CNAESchema = Schema{
Fields: []schema.Field{
{
Name: "cnpj",
Type: schema.StringType,
Constraints: schema.Constraints{Required: true, Pattern: "\\d{14}"},
},
{
Name: "cnae",
Type: schema.IntegerType,
Constraints: schema.Constraints{Required: true},
},
},
}
// PartnerSchema follows Frictionless Data's table schema.
var PartnerSchema = Schema{
Fields: []schema.Field{
{
Name: "cnpj",
Type: schema.StringType,
Constraints: schema.Constraints{Required: true, Pattern: "\\d{14}"},
},
{
Name: "identificador_de_socio",
Type: schema.IntegerType,
Constraints: schema.Constraints{Enum: []interface{}{1, 2, 3}},
},
{
Name: "nome_socio",
Type: schema.StringType,
},
{
Name: "cnpj_cpf_do_socio",
Type: schema.StringType,
MissingValues: map[string]struct{}{"000": struct{}{}},
},
{
Name: "codigo_qualificacao_socio",
Type: schema.IntegerType,
},
{
Name: "percentual_capital_social",
Type: schema.NumberType,
},
{
Name: "data_entrada_sociedade",
Type: schema.DateType,
},
{
Name: "cpf_representante_legal",
Type: schema.StringType,
MissingValues: map[string]struct{}{"": struct{}{}},
},
{
Name: "nome_representante_legal",
Type: schema.StringType,
},
{
Name: "codigo_qualificacao_representante_legal",
Type: schema.StringType,
},
},
}
// CompanySchema follows Frictionless Data's table schema.
var CompanySchema = Schema{
Fields: []schema.Field{
{
Name: "cnpj",
Type: schema.StringType,
Constraints: schema.Constraints{Required: true, Pattern: "\\d{14}"},
},
{
Name: "identificador_matriz_filial",
Type: schema.IntegerType,
Constraints: schema.Constraints{Enum: []interface{}{1, 2}},
},
{
Name: "razao_social",
Type: schema.StringType,
},
{
Name: "nome_fantasia",
Type: schema.StringType,
},
{
Name: "situacao_cadastral",
Type: schema.IntegerType,
Constraints: schema.Constraints{Enum: []interface{}{1, 2, 3, 4, 8}},
},
{
Name: "data_situacao_cadastral",
Type: schema.DateType,
},
{
Name: "motivo_situacao_cadastral",
Type: schema.StringType,
},
{
Name: "nome_cidade_exterior",
Type: schema.StringType,
},
{
Name: "codigo_natureza_juridica",
Type: schema.IntegerType,
},
{
Name: "data_inicio_atividade",
Type: schema.DateType,
},
{
Name: "cnae_fiscal",
Type: schema.IntegerType,
},
{
Name: "descricao_tipo_logradouro",
Type: schema.StringType,
},
{
Name: "logradouro",
Type: schema.StringType,
},
{
Name: "numero",
Type: schema.StringType,
},
{
Name: "complemento",
Type: schema.StringType,
},
{
Name: "bairro",
Type: schema.StringType,
},
{
Name: "cep",
Type: schema.StringType,
},
{
Name: "uf",
Type: schema.StringType,
Constraints: schema.Constraints{MinLength: 2, MaxLength: 2}},
{
Name: "codigo_municipio",
Type: schema.IntegerType,
},
{
Name: "municipio",
Type: schema.StringType,
},
{
Name: "ddd_telefone1",
Type: schema.StringType,
},
{
Name: "ddd_telefone2",
Type: schema.StringType,
},
{
Name: "ddd_fax",
Type: schema.StringType,
},
{
Name: "qualificacao_do_responsavel",
Type: schema.IntegerType,
},
{
Name: "capital_social",
Type: schema.NumberType,
},
{
Name: "porte",
Type: schema.IntegerType,
Constraints: schema.Constraints{Enum: []interface{}{0, 1, 3, 5}},
},
{
Name: "opcao_pelo_simples",
Type: schema.BooleanType,
TrueValues: []string{"5", "7"},
FalseValues: []string{"", "0", "6", "8"},
Constraints: schema.Constraints{Enum: []interface{}{"", 0, 5, 6, 7, 8}},
},
{
Name: "data_opcao_pelo_simples",
Type: schema.DateType,
},
{
Name: "data_exclusao_do_simples",
Type: schema.DateType,
},
{
Name: "opcao_pelo_mei",
Type: schema.BooleanType,
TrueValues: []string{"S"},
FalseValues: []string{"N"},
Constraints: schema.Constraints{Enum: []interface{}{"S", "N", ""}},
},
{
Name: "situacao_especial",
Type: schema.StringType,
},
{
Name: "data_situacao_especial",
Type: schema.StringType,
},
},
PrimaryKeys: []string{"cnpj"},
} | transform/descriptors.go | 0.674265 | 0.508788 | descriptors.go | starcoder |
package asciitree
import (
"fmt"
"reflect"
"strings"
)
// Caches the label and children field indices for a specific reflection type.
// Oh, and properties, and ... roots (to unify things slightly).
type fieldCacheItem struct {
labelIndex int
propertiesIndex int
childrenIndex int
rootsIndex int
}
// Cache for quickly looking up the label, children field indices for a given
// reflection type.
var fieldCache = make(map[reflect.Type]*fieldCacheItem)
// Returns the field indices for tagged structs, based on a specific node. We
// employ caching in order to avoid finding the fields (field indices) over
// and over again, especially for mono-type struct trees.
func structInfo(node reflect.Value) (sinfo *fieldCacheItem) {
if node.Kind() != reflect.Struct {
return nil
}
// Try to look up this (struct) type from the cache, if already known.
typ := node.Type()
sinfo, found := fieldCache[typ]
if found {
return
}
// This struct type is yet unknown, so scan the type's fields for
// asciitree tags, and if found and valid, then learn the field indices.
sinfo = &fieldCacheItem{
labelIndex: -1,
propertiesIndex: -1,
childrenIndex: -1,
rootsIndex: -1,
}
for idx := 0; idx < typ.NumField(); idx++ {
field := typ.Field(idx)
if field.Type.Kind() == reflect.Struct && field.Anonymous {
// Oops, it's an anonymous embedded struct, so we need to check that too!
anon := structInfo(node.Field(idx))
if anon.labelIndex >= 0 {
if sinfo.labelIndex >= 0 {
panic(fmt.Sprintf("double ascii:\"label\" tag for anonymously embedded type %T", field))
}
sinfo.labelIndex = anon.labelIndex
}
if anon.propertiesIndex >= 0 {
if sinfo.propertiesIndex >= 0 {
panic(fmt.Sprintf("double ascii:\"properties\" tag for anonymously embedded type %T", field))
}
sinfo.propertiesIndex = anon.propertiesIndex
}
if anon.childrenIndex >= 0 {
if sinfo.childrenIndex >= 0 {
panic(fmt.Sprintf("double ascii:\"children\" tag for anonymously embedded type %T", field))
}
sinfo.childrenIndex = anon.childrenIndex
}
if anon.rootsIndex >= 0 {
if sinfo.rootsIndex >= 0 {
panic(fmt.Sprintf("double ascii:\"roots\" tag for anonymously embedded type %T", field))
}
sinfo.rootsIndex = anon.rootsIndex
}
}
tags, ok := asciitreeTagValues(typ.Field(idx).Tag.Get("asciitree"))
if !ok {
panic(fmt.Sprintf("invalid asciitree tag(s) %v", tags))
}
for _, tag := range tags {
switch tag {
case "label":
if sinfo.labelIndex >= 0 {
panic(fmt.Sprintf("double ascii:\"label\" tag for type %T", node))
}
sinfo.labelIndex = idx
case "properties":
if sinfo.propertiesIndex >= 0 {
panic(fmt.Sprintf("double ascii:\"properties\" tag for type %T", node))
}
sinfo.propertiesIndex = idx
case "children":
if sinfo.childrenIndex >= 0 {
panic(fmt.Sprintf("double ascii:\"children\" tag for type %T", node))
}
sinfo.childrenIndex = idx
case "roots":
if sinfo.rootsIndex >= 0 {
panic(fmt.Sprintf("double ascii:\"roots\" tag for type %T", node))
}
sinfo.rootsIndex = idx
}
}
}
fieldCache[typ] = sinfo // cache it.
return
}
// Returns the (split) tag values of an asciitree tag if they are valid,
// together with an "ok" indication. Otherwise, returns the invalid tag values
// (and only those) with a "nok".
func asciitreeTagValues(tagval string) ([]string, bool) {
vals := strings.Split(tagval, ",")
errors := []string{}
values := make([]string, 0, len(vals))
for _, value := range vals {
value = strings.TrimSpace(value)
if len(value) > 0 {
switch value {
case "roots", "label", "properties", "children":
values = append(values, value)
default:
errors = append(errors, value)
}
}
}
if len(errors) > 0 {
return errors, false
}
return values, true
} | fieldcache.go | 0.537041 | 0.4165 | fieldcache.go | starcoder |
package ent
import (
"fmt"
"opencensus/core/ent/infectedrecord"
"strings"
"time"
"entgo.io/ent/dialect/sql"
)
// InfectedRecord is the model entity for the InfectedRecord schema.
type InfectedRecord struct {
config `json:"-"`
// ID of the ent.
ID int `json:"id,omitempty"`
// ReportedDate holds the value of the "reportedDate" field.
ReportedDate time.Time `json:"reportedDate,omitempty"`
// CollectedDate holds the value of the "collectedDate" field.
CollectedDate time.Time `json:"collectedDate,omitempty"`
// PcrTotalTests holds the value of the "pcrTotalTests" field.
PcrTotalTests int `json:"pcrTotalTests,omitempty"`
// PrTotalTests holds the value of the "prTotalTests" field.
PrTotalTests int `json:"prTotalTests,omitempty"`
// AgTotalTests holds the value of the "agTotalTests" field.
AgTotalTests int `json:"agTotalTests,omitempty"`
// PcrPositiveTests holds the value of the "pcrPositiveTests" field.
PcrPositiveTests int `json:"pcrPositiveTests,omitempty"`
// PrPositiveTests holds the value of the "prPositiveTests" field.
PrPositiveTests int `json:"prPositiveTests,omitempty"`
// AgPositiveTests holds the value of the "agPositiveTests" field.
AgPositiveTests int `json:"agPositiveTests,omitempty"`
// Edges holds the relations/edges for other nodes in the graph.
// The values are being populated by the InfectedRecordQuery when eager-loading is set.
Edges InfectedRecordEdges `json:"edges"`
}
// InfectedRecordEdges holds the relations/edges for other nodes in the graph.
type InfectedRecordEdges struct {
// Places holds the value of the places edge.
Places []*Place `json:"places,omitempty"`
// loadedTypes holds the information for reporting if a
// type was loaded (or requested) in eager-loading or not.
loadedTypes [1]bool
}
// PlacesOrErr returns the Places value or an error if the edge
// was not loaded in eager-loading.
func (e InfectedRecordEdges) PlacesOrErr() ([]*Place, error) {
if e.loadedTypes[0] {
return e.Places, nil
}
return nil, &NotLoadedError{edge: "places"}
}
// scanValues returns the types for scanning values from sql.Rows.
func (*InfectedRecord) scanValues(columns []string) ([]interface{}, error) {
values := make([]interface{}, len(columns))
for i := range columns {
switch columns[i] {
case infectedrecord.FieldID, infectedrecord.FieldPcrTotalTests, infectedrecord.FieldPrTotalTests, infectedrecord.FieldAgTotalTests, infectedrecord.FieldPcrPositiveTests, infectedrecord.FieldPrPositiveTests, infectedrecord.FieldAgPositiveTests:
values[i] = &sql.NullInt64{}
case infectedrecord.FieldReportedDate, infectedrecord.FieldCollectedDate:
values[i] = &sql.NullTime{}
default:
return nil, fmt.Errorf("unexpected column %q for type InfectedRecord", columns[i])
}
}
return values, nil
}
// assignValues assigns the values that were returned from sql.Rows (after scanning)
// to the InfectedRecord fields.
func (ir *InfectedRecord) assignValues(columns []string, values []interface{}) error {
if m, n := len(values), len(columns); m < n {
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
}
for i := range columns {
switch columns[i] {
case infectedrecord.FieldID:
value, ok := values[i].(*sql.NullInt64)
if !ok {
return fmt.Errorf("unexpected type %T for field id", value)
}
ir.ID = int(value.Int64)
case infectedrecord.FieldReportedDate:
if value, ok := values[i].(*sql.NullTime); !ok {
return fmt.Errorf("unexpected type %T for field reportedDate", values[i])
} else if value.Valid {
ir.ReportedDate = value.Time
}
case infectedrecord.FieldCollectedDate:
if value, ok := values[i].(*sql.NullTime); !ok {
return fmt.Errorf("unexpected type %T for field collectedDate", values[i])
} else if value.Valid {
ir.CollectedDate = value.Time
}
case infectedrecord.FieldPcrTotalTests:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field pcrTotalTests", values[i])
} else if value.Valid {
ir.PcrTotalTests = int(value.Int64)
}
case infectedrecord.FieldPrTotalTests:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field prTotalTests", values[i])
} else if value.Valid {
ir.PrTotalTests = int(value.Int64)
}
case infectedrecord.FieldAgTotalTests:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field agTotalTests", values[i])
} else if value.Valid {
ir.AgTotalTests = int(value.Int64)
}
case infectedrecord.FieldPcrPositiveTests:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field pcrPositiveTests", values[i])
} else if value.Valid {
ir.PcrPositiveTests = int(value.Int64)
}
case infectedrecord.FieldPrPositiveTests:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field prPositiveTests", values[i])
} else if value.Valid {
ir.PrPositiveTests = int(value.Int64)
}
case infectedrecord.FieldAgPositiveTests:
if value, ok := values[i].(*sql.NullInt64); !ok {
return fmt.Errorf("unexpected type %T for field agPositiveTests", values[i])
} else if value.Valid {
ir.AgPositiveTests = int(value.Int64)
}
}
}
return nil
}
// QueryPlaces queries the "places" edge of the InfectedRecord entity.
func (ir *InfectedRecord) QueryPlaces() *PlaceQuery {
return (&InfectedRecordClient{config: ir.config}).QueryPlaces(ir)
}
// Update returns a builder for updating this InfectedRecord.
// Note that you need to call InfectedRecord.Unwrap() before calling this method if this InfectedRecord
// was returned from a transaction, and the transaction was committed or rolled back.
func (ir *InfectedRecord) Update() *InfectedRecordUpdateOne {
return (&InfectedRecordClient{config: ir.config}).UpdateOne(ir)
}
// Unwrap unwraps the InfectedRecord entity that was returned from a transaction after it was closed,
// so that all future queries will be executed through the driver which created the transaction.
func (ir *InfectedRecord) Unwrap() *InfectedRecord {
tx, ok := ir.config.driver.(*txDriver)
if !ok {
panic("ent: InfectedRecord is not a transactional entity")
}
ir.config.driver = tx.drv
return ir
}
// String implements the fmt.Stringer.
func (ir *InfectedRecord) String() string {
var builder strings.Builder
builder.WriteString("InfectedRecord(")
builder.WriteString(fmt.Sprintf("id=%v", ir.ID))
builder.WriteString(", reportedDate=")
builder.WriteString(ir.ReportedDate.Format(time.ANSIC))
builder.WriteString(", collectedDate=")
builder.WriteString(ir.CollectedDate.Format(time.ANSIC))
builder.WriteString(", pcrTotalTests=")
builder.WriteString(fmt.Sprintf("%v", ir.PcrTotalTests))
builder.WriteString(", prTotalTests=")
builder.WriteString(fmt.Sprintf("%v", ir.PrTotalTests))
builder.WriteString(", agTotalTests=")
builder.WriteString(fmt.Sprintf("%v", ir.AgTotalTests))
builder.WriteString(", pcrPositiveTests=")
builder.WriteString(fmt.Sprintf("%v", ir.PcrPositiveTests))
builder.WriteString(", prPositiveTests=")
builder.WriteString(fmt.Sprintf("%v", ir.PrPositiveTests))
builder.WriteString(", agPositiveTests=")
builder.WriteString(fmt.Sprintf("%v", ir.AgPositiveTests))
builder.WriteByte(')')
return builder.String()
}
// InfectedRecords is a parsable slice of InfectedRecord.
type InfectedRecords []*InfectedRecord
func (ir InfectedRecords) config(cfg config) {
for _i := range ir {
ir[_i].config = cfg
}
} | ent/infectedrecord.go | 0.682256 | 0.468365 | infectedrecord.go | starcoder |
package csr
import "math/rand"
// MatrixGenerator defines a matrix generator
type MatrixGenerator struct {
numNode, numConnection uint32
xCoords, yCoords []uint32
values []float32
positionOccupied map[uint32]bool
xCoordIndex, yCoordIndex map[uint32][]uint32
}
// MakeMatrixGenerator returns a matrixGenerator
func MakeMatrixGenerator(numNode, numConnection uint32) MatrixGenerator {
return MatrixGenerator{
numNode: numNode,
numConnection: numConnection,
}
}
// GenerateMatrix generates matrix
func (g MatrixGenerator) GenerateMatrix() Matrix {
g.init()
g.generateConnections()
g.normalize()
m := g.outputCSRFormat()
return m
}
func (g *MatrixGenerator) init() {
g.xCoords = make([]uint32, 0, g.numConnection)
g.yCoords = make([]uint32, 0, g.numConnection)
g.values = make([]float32, 0, g.numConnection)
g.positionOccupied = make(map[uint32]bool)
g.xCoordIndex = make(map[uint32][]uint32)
g.yCoordIndex = make(map[uint32][]uint32)
}
func (g *MatrixGenerator) generateConnections() {
for i := uint32(0); i < g.numConnection; i++ {
g.generateOneConnection()
}
}
func (g *MatrixGenerator) normalize() {
for i := uint32(0); i < g.numNode; i++ {
sum := g.sumColumn(i)
if sum == 0 {
continue
}
indexes := g.xCoordIndex[i]
for _, index := range indexes {
g.values[index] /= sum
}
}
}
func (g MatrixGenerator) outputCSRFormat() Matrix {
m := Matrix{}
rowOffset := uint32(0)
for i := uint32(0); i < g.numNode; i++ {
cols, values := g.selectRowData(i)
g.sortRowData(cols, values)
m.RowOffsets = append(m.RowOffsets, rowOffset)
m.ColumnNumbers = append(m.ColumnNumbers, cols...)
m.Values = append(m.Values, values...)
rowOffset += uint32(len(cols))
}
m.RowOffsets = append(m.RowOffsets, rowOffset)
return m
}
func (g MatrixGenerator) selectRowData(
row uint32,
) (
cols []uint32,
values []float32,
) {
indexes := g.yCoordIndex[row]
for _, index := range indexes {
cols = append(cols, g.xCoords[index])
values = append(values, g.values[index])
}
return
}
func (g MatrixGenerator) sortRowData(cols []uint32, values []float32) {
for i := 0; i < len(cols); i++ {
for j := i; j < len(cols); j++ {
if cols[i] >= cols[j] {
cols[i], cols[j] = cols[j], cols[i]
values[i], values[j] = values[j], values[i]
}
}
}
}
func (g MatrixGenerator) sumColumn(i uint32) float32 {
sum := float32(0)
indexes := g.xCoordIndex[i]
for _, index := range indexes {
sum += g.values[index]
}
return sum
}
func (g *MatrixGenerator) generateOneConnection() {
x, y := g.generateUnoccupiedPosition()
v := rand.Float32()
g.xCoords = append(g.xCoords, x)
g.yCoords = append(g.yCoords, y)
g.values = append(g.values, v)
if _, ok := g.xCoordIndex[x]; !ok {
g.xCoordIndex[x] = make([]uint32, 0)
}
if _, ok := g.yCoordIndex[y]; !ok {
g.yCoordIndex[y] = make([]uint32, 0)
}
g.xCoordIndex[x] = append(g.xCoordIndex[x], uint32(len(g.values)-1))
g.yCoordIndex[y] = append(g.yCoordIndex[y], uint32(len(g.values)-1))
}
func (g MatrixGenerator) generateUnoccupiedPosition() (x, y uint32) {
for {
x = uint32(rand.Int()) % g.numNode
y = uint32(rand.Int()) % g.numNode
if !g.isPositionOccupied(x, y) {
g.markPositionOccupied(x, y)
return
}
}
}
func (g MatrixGenerator) isPositionOccupied(x, y uint32) bool {
_, ok := g.positionOccupied[y*g.numNode+x]
return ok
}
func (g MatrixGenerator) markPositionOccupied(x, y uint32) {
g.positionOccupied[y*g.numNode+x] = true
} | benchmarks/matrix/csr/matrixgenerator.go | 0.712032 | 0.53868 | matrixgenerator.go | starcoder |
package isbn
// Identifier represents ISBN identifier group and its prefix
type Identifier struct {
// GroupName is the identifying group name which can be National or Language name
GroupName string
// Abbreviation is the shorthand name of the GroupName
Abbreviation string
// Prefix is the ISBN prefix assigned to the Group
Prefix string
// Identifiers is the ISBN identifier assigned to the Group
Identifier string
}
// Identifiers is list of ISBN identifer groups
var Identifiers = []*Identifier{
{
GroupName: "English",
Abbreviation: "en",
Prefix: "978",
Identifier: "0",
},
{
GroupName: "English2",
Abbreviation: "en2",
Prefix: "978",
Identifier: "1",
},
{
GroupName: "French",
Abbreviation: "fr",
Prefix: "978",
Identifier: "2",
},
{
GroupName: "German",
Abbreviation: "de",
Prefix: "978",
Identifier: "3",
},
{
GroupName: "Japan",
Abbreviation: "jp",
Prefix: "978",
Identifier: "4",
},
{
GroupName: "Russia",
Abbreviation: "ru",
Prefix: "978",
Identifier: "5",
},
{
GroupName: "China",
Abbreviation: "cn",
Prefix: "978",
Identifier: "7",
},
{
GroupName: "Brazil",
Abbreviation: "br",
Prefix: "978",
Identifier: "65",
},
{
GroupName: "Czech",
Abbreviation: "cz",
Prefix: "978",
Identifier: "80",
},
{
GroupName: "India",
Abbreviation: "in",
Prefix: "978",
Identifier: "81",
},
{
GroupName: "Norge",
Abbreviation: "no",
Prefix: "978",
Identifier: "82",
},
{
GroupName: "Poland",
Abbreviation: "pl",
Prefix: "978",
Identifier: "83",
},
{
GroupName: "Spain",
Abbreviation: "es",
Prefix: "978",
Identifier: "84",
},
{
GroupName: "Brazil2",
Abbreviation: "br2",
Prefix: "978",
Identifier: "85",
},
{
GroupName: "Serbia",
Abbreviation: "rs",
Prefix: "978",
Identifier: "86",
},
{
GroupName: "Denmark",
Abbreviation: "dk",
Prefix: "978",
Identifier: "87",
},
{
GroupName: "Italy",
Abbreviation: "it",
Prefix: "978",
Identifier: "88",
},
{
GroupName: "SouthKorea",
Abbreviation: "kr",
Prefix: "978",
Identifier: "89",
},
{
GroupName: "Netherlands",
Abbreviation: "nl",
Prefix: "978",
Identifier: "90",
},
{
GroupName: "Sweden",
Abbreviation: "se",
Prefix: "978",
Identifier: "91",
},
{
GroupName: "NGO",
Abbreviation: "ngo",
Prefix: "978",
Identifier: "92",
},
{
GroupName: "India2",
Abbreviation: "in2",
Prefix: "978",
Identifier: "93",
},
{
GroupName: "Netherlands2",
Abbreviation: "nl2",
Prefix: "978",
Identifier: "94",
},
{
GroupName: "French2",
Abbreviation: "fr2",
Prefix: "979",
Identifier: "10",
},
{
GroupName: "SouthKorea2",
Abbreviation: "kr2",
Prefix: "979",
Identifier: "11",
},
{
GroupName: "Italy2",
Abbreviation: "it2",
Prefix: "979",
Identifier: "12",
},
}
// SearchIdentifier search Identifer from Identifiers by given name which is supposed to be
// Identifier's GroupName or Abbreviation
func SearchIdentifier(name string) *Identifier {
for _, v := range Identifiers {
if name == v.GroupName {
return v
}
if name == v.Abbreviation {
return v
}
}
return nil
} | isbn/identifier_group.go | 0.527317 | 0.523542 | identifier_group.go | starcoder |
package pb
const (
swagger = `{
"swagger": "2.0",
"info": {
"title": "pb/profile.proto",
"version": "version not set"
},
"schemes": [
"http",
"https"
],
"consumes": [
"application/json"
],
"produces": [
"application/json"
],
"paths": {
"/api/v1/create": {
"post": {
"summary": "Create attempts to create a new profile.",
"operationId": "Create",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/profileProfileResponse"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/profileProfileCreationRequest"
}
}
],
"tags": [
"Profile"
]
}
},
"/api/v1/hard_delete": {
"post": {
"summary": "HardDelete attempts to delete an existing profile physically.",
"operationId": "HardDelete",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/profileProfileResponseLight"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/profileProfileRequest"
}
}
],
"tags": [
"Profile"
]
}
},
"/api/v1/list": {
"post": {
"summary": "List returns a list of profiles matching a set of criteria.",
"operationId": "List",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/profileProfileList"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/profileProfileListRequest"
}
}
],
"tags": [
"Profile"
]
}
},
"/api/v1/read": {
"post": {
"summary": "Read returns information about an existing profile.",
"operationId": "Read",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/profileProfileInfo"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/profileProfileRequest"
}
}
],
"tags": [
"Profile"
]
}
},
"/api/v1/services/status": {
"get": {
"summary": "ServicesStatus method receives no paramaters and returns all services status message",
"operationId": "ServicesStatus",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/profileServicesStatusList"
}
}
},
"tags": [
"Profile"
]
}
},
"/api/v1/soft_delete": {
"post": {
"summary": "SoftDelete attempts to delete an existing profile logically.",
"operationId": "SoftDelete",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/profileProfileResponse"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/profileProfileRequest"
}
}
],
"tags": [
"Profile"
]
}
},
"/api/v1/update": {
"post": {
"summary": "Update attempts to update an existing profile.",
"operationId": "Update",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/profileProfileResponse"
}
}
},
"parameters": [
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/profileProfileInfo"
}
}
],
"tags": [
"Profile"
]
}
},
"/api/v1/version": {
"get": {
"summary": "Version method receives no paramaters and returns a version message.",
"operationId": "Version",
"responses": {
"200": {
"description": "",
"schema": {
"$ref": "#/definitions/profileVersionResponse"
}
}
},
"tags": [
"Profile"
]
}
}
},
"definitions": {
"ServiceStatusStatus": {
"type": "string",
"enum": [
"OK",
"UNAVAILABLE"
],
"default": "OK"
},
"profileGenders": {
"type": "string",
"enum": [
"UNKNOW",
"MALE",
"FEMALE"
],
"default": "UNKNOW"
},
"profileProfileCreationRequest": {
"type": "object",
"properties": {
"gender": {
"$ref": "#/definitions/profileGenders"
},
"email": {
"type": "string"
},
"name": {
"type": "string"
},
"birthday": {
"type": "string"
}
},
"description": "ProfileCreationRequest encodes a profile creation request."
},
"profileProfileInfo": {
"type": "object",
"properties": {
"uuid": {
"type": "string"
},
"gender": {
"$ref": "#/definitions/profileGenders"
},
"email": {
"type": "string"
},
"name": {
"type": "string"
},
"birthday": {
"type": "string"
},
"created_at": {
"type": "string"
},
"updated_at": {
"type": "string"
},
"deleted_at": {
"type": "string"
}
},
"description": "ProfileInfo encodes information about a profile."
},
"profileProfileList": {
"type": "object",
"properties": {
"result_set_size": {
"type": "integer",
"format": "int64"
},
"has_more": {
"type": "boolean",
"format": "boolean"
},
"profiles": {
"type": "array",
"items": {
"$ref": "#/definitions/profileProfileInfo"
}
}
},
"description": "ProfileList encodes the result of a ProfileListRequest."
},
"profileProfileListRequest": {
"type": "object",
"properties": {
"page_number": {
"type": "integer",
"format": "int64"
},
"page_size": {
"type": "integer",
"format": "int64"
},
"order": {
"type": "string"
},
"exclude_soft_deleted": {
"type": "boolean",
"format": "boolean"
},
"soft_deleted_only": {
"type": "boolean",
"format": "boolean"
},
"gender": {
"$ref": "#/definitions/profileGenders"
}
},
"description": "ProfileListRequest encodes a set of criteria for the retrieval of a list of profiles."
},
"profileProfileRequest": {
"type": "object",
"properties": {
"uuid": {
"type": "string"
}
},
"description": "ProfileRequest encodes a profile identifier."
},
"profileProfileResponse": {
"type": "object",
"properties": {
"ok": {
"type": "boolean",
"format": "boolean"
},
"info": {
"$ref": "#/definitions/profileProfileInfo"
}
},
"description": "ProfileResponse encodes the result of a profile operation."
},
"profileProfileResponseLight": {
"type": "object",
"properties": {
"ok": {
"type": "boolean",
"format": "boolean"
}
},
"description": "ProfileResponseLight encodes the result of a profile operation."
},
"profileServiceStatus": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"version": {
"type": "string"
},
"status": {
"$ref": "#/definitions/ServiceStatusStatus"
},
"e_msg": {
"type": "string"
}
},
"title": "SeviceStatus represents a sub services status message"
},
"profileServicesStatusList": {
"type": "object",
"properties": {
"services": {
"type": "array",
"items": {
"$ref": "#/definitions/profileServiceStatus"
}
}
},
"title": "ServicesStatusList is the sub services status list"
},
"profileVersionResponse": {
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "Id represents the message identifier."
},
"version": {
"type": "string"
}
},
"title": "VersionMessage represents a version message"
}
}
}
`
) | vendor/github.com/gomeet-examples/svc-profile/pb/swagger.pb.go | 0.722037 | 0.421552 | swagger.pb.go | starcoder |
package container
import (
"encoding/json"
"golang.org/x/exp/constraints"
)
// List2D is a 2D slice with some helper methods. It is NOT thread safe.
type List2D[T any] struct {
data []T
width int
height int
}
func min[T constraints.Ordered](a T, b T) T {
if a < b {
return a
}
return b
}
func (l *List2D[T]) Resize(width, height int) {
prevl := l.Copy()
l.data = make([]T, width*height)
l.width = width
l.height = height
for x := 0; x < min(width, prevl.Width()); x++ {
for y := 0; y < min(height, prevl.Height()); y++ {
l.Set(x, y, prevl.Get(x, y))
}
}
}
func (l *List2D[T]) Set(x, y int, v T) {
l.data[x+y*l.width] = v
}
func (l *List2D[T]) Get(x, y int) T {
return l.data[x+y*l.width]
}
func (l *List2D[T]) GetRef(x, y int) *T {
return &l.data[x+y*l.width]
}
// GetW is the same as Get, but wraps around if is out of bounds. It returns the
// zero value if the width or height is zero
func (l *List2D[T]) GetW(x, y int) T {
if l.width < 1 || l.height < 1 {
var zv T
return zv
}
for x < 0 {
x += l.width
}
for x >= l.width {
x -= l.width
}
for y < 0 {
y += l.height
}
for y >= l.height {
y -= l.height
}
return l.data[x+y*l.width]
}
// Copy returns a copy of the list
func (l *List2D[T]) Copy() *List2D[T] {
l2 := &List2D[T]{
data: make([]T, len(l.data)),
width: l.width,
height: l.height,
}
copy(l2.data, l.data)
return l2
}
// Copy returns a copy of the list determined by the given parameters.
// It panics if the given parameters are out of bounds.
func (l *List2D[T]) CopyRect(x, y, width, height int) *List2D[T] {
l2 := &List2D[T]{
data: make([]T, width*height),
width: width,
height: height,
}
for lx := 0; lx < width; lx++ {
for ly := 0; ly < height; ly++ {
l2.Set(lx, ly, l.Get(x+lx, y+ly))
}
}
return l2
}
// ShiftX shifts all the elements in the x and y dimensions of the list by
// the offsets.
func (l *List2D[T]) Shift(xoffset, yoffset int) {
if l.width < 1 || l.height < 1 {
return
}
l2 := l.Copy()
for x := 0; x < l.width; x++ {
for y := 0; y < l.height; y++ {
l.Set(x, y, l2.GetW(x-xoffset, y-yoffset))
}
}
}
func (l *List2D[T]) Width() int {
return l.width
}
func (l *List2D[T]) Height() int {
return l.height
}
func NewList2D[T any](width, height int) *List2D[T] {
l := new(List2D[T])
l.width = width
l.height = height
l.data = make([]T, width*height)
return l
}
// NewList2DFrom2DSlice creates a new list from a [y][x] slice.
func NewList2DFrom2DSlice[T any](src [][]T) *List2D[T] {
if len(src) < 1 {
return NewList2D[T](0, 0)
}
h := len(src)
if len(src[0]) < 1 {
return NewList2D[T](0, h)
}
w := len(src[0])
l := NewList2D[T](w, h)
for y := range src {
copy(l.data[y*w:], src[y][:w])
}
return l
}
type list2dj[T any] struct {
W int `json:"w"`
H int `json:"h"`
D []T `json:"d"`
}
func (l List2D[T]) MarshalText() (text []byte, err error) {
j := list2dj[T]{
W: l.width,
H: l.height,
D: l.data,
}
return json.Marshal(j)
}
func (l *List2D[T]) UnmarshalText(text []byte) error {
j := new(list2dj[T])
err := json.Unmarshal(text, j)
if err != nil {
return err
}
l.width = j.W
l.height = j.H
l.data = j.D
return nil
}
func (l List2D[T]) MarshalJSON() ([]byte, error) {
j := list2dj[T]{
W: l.width,
H: l.height,
D: l.data,
}
return json.Marshal(j)
}
func (l *List2D[T]) UnmarshalJSON(text []byte) error {
j := new(list2dj[T])
err := json.Unmarshal(text, j)
if err != nil {
return err
}
l.width = j.W
l.height = j.H
l.data = j.D
return nil
} | list2d.go | 0.781539 | 0.500916 | list2d.go | starcoder |
package marker
import (
"errors"
"fmt"
"reflect"
"strconv"
)
type ArgumentType int
func (argumentType ArgumentType) String() string {
return argumentTypeText[argumentType]
}
const (
InvalidType ArgumentType = iota
RawType
AnyType
BoolType
IntegerType
StringType
SliceType
MapType
)
var argumentTypeText = map[ArgumentType]string{
InvalidType: "InvalidType",
RawType: "RawType",
AnyType: "AnyType",
BoolType: "BoolType",
IntegerType: "IntegerType",
StringType: "StringType",
SliceType: "SliceType",
MapType: "MapType",
}
var (
interfaceType = reflect.TypeOf((*interface{})(nil)).Elem()
rawType = reflect.TypeOf((*[]byte)(nil)).Elem()
)
type ArgumentTypeInfo struct {
ActualType ArgumentType
ItemType *ArgumentTypeInfo
}
func GetArgumentTypeInfo(typ reflect.Type) (ArgumentTypeInfo, error) {
typeInfo := &ArgumentTypeInfo{}
if typ.Kind() == reflect.Ptr {
typ = typ.Elem()
}
if typ == rawType {
typeInfo.ActualType = RawType
return *typeInfo, nil
}
if typ == interfaceType {
typeInfo.ActualType = AnyType
return *typeInfo, nil
}
switch typ.Kind() {
case reflect.String:
typeInfo.ActualType = StringType
case reflect.Uint8, reflect.Uint16, reflect.Uint, reflect.Uint32, reflect.Uint64:
typeInfo.ActualType = IntegerType
case reflect.Int8, reflect.Int16, reflect.Int, reflect.Int32, reflect.Int64:
typeInfo.ActualType = IntegerType
case reflect.Bool:
typeInfo.ActualType = BoolType
case reflect.Slice:
typeInfo.ActualType = SliceType
itemType, err := GetArgumentTypeInfo(typ.Elem())
if err != nil {
return ArgumentTypeInfo{}, fmt.Errorf("bad slice item type: %w", err)
}
typeInfo.ItemType = &itemType
case reflect.Map:
if typ.Key().Kind() != reflect.String {
return ArgumentTypeInfo{}, fmt.Errorf("map key must be string")
}
typeInfo.ActualType = MapType
itemType, err := GetArgumentTypeInfo(typ.Elem())
if err != nil {
return ArgumentTypeInfo{}, fmt.Errorf("bad map item type: %w", err)
}
typeInfo.ItemType = &itemType
default:
return ArgumentTypeInfo{}, fmt.Errorf("type has unsupported kind %s", typ.Kind())
}
return *typeInfo, nil
}
func (typeInfo ArgumentTypeInfo) Parse(scanner *Scanner, out reflect.Value) error {
switch typeInfo.ActualType {
case BoolType:
return typeInfo.parseBoolean(scanner, out)
case IntegerType:
return typeInfo.parseInteger(scanner, out)
case StringType:
return typeInfo.parseString(scanner, out)
case SliceType:
return typeInfo.parseSlice(scanner, out)
case MapType:
return typeInfo.parseMap(scanner, out)
case AnyType:
inferredType, _ := typeInfo.inferType(scanner, out, false)
newOut := out
switch inferredType.ActualType {
case SliceType:
newType, err := inferredType.makeSliceType()
if err != nil {
return err
}
newOut = reflect.Indirect(reflect.New(newType))
case MapType:
newType, err := inferredType.makeMapType()
if err != nil {
return err
}
newOut = reflect.Indirect(reflect.New(newType))
}
if newOut.Kind() == reflect.Ptr {
newOut = newOut.Elem()
}
if !newOut.CanSet() {
return nil
}
err := inferredType.Parse(scanner, newOut)
if err != nil {
return err
}
inferredType.setValue(out, newOut)
}
return nil
}
func (typeInfo ArgumentTypeInfo) setValue(out, value reflect.Value) {
outType := out.Type()
if outType.Kind() == reflect.Ptr {
outType = outType.Elem()
out = out.Elem()
}
if outType != value.Type() {
value = value.Convert(outType)
}
out.Set(value)
}
func (typeInfo ArgumentTypeInfo) parseBoolean(scanner *Scanner, out reflect.Value) error {
if scanner == nil {
return errors.New("scanner cannot be nil")
}
if !scanner.Expect(Identifier, "Boolean (true or false)") {
return nil
}
switch scanner.Token() {
case "false":
typeInfo.setValue(out, reflect.ValueOf(false))
case "true":
typeInfo.setValue(out, reflect.ValueOf(true))
}
return fmt.Errorf("expected true or false, got %q", scanner.Token())
}
func (typeInfo ArgumentTypeInfo) parseInteger(scanner *Scanner, out reflect.Value) error {
if scanner == nil {
return errors.New("scanner cannot be nil")
}
nextCharacter := scanner.Peek()
isNegative := false
if nextCharacter == '-' {
isNegative = true
scanner.Scan()
}
if !scanner.Expect(Integer, "Integer") {
return nil
}
text := scanner.Token()
if isNegative {
text = "-" + text
}
intValue, err := strconv.Atoi(text)
typeInfo.setValue(out, reflect.ValueOf(intValue))
if err != nil {
return fmt.Errorf("unable to parse integer: %v", err)
}
return nil
}
func (typeInfo ArgumentTypeInfo) parseString(scanner *Scanner, out reflect.Value) error {
if scanner == nil {
return errors.New("scanner cannot be nil")
}
startPosition := scanner.searchIndex
token := scanner.Scan()
if token == String {
value, err := strconv.Unquote(scanner.Token())
if err != nil {
return err
}
typeInfo.setValue(out, reflect.ValueOf(value))
return nil
}
for character := scanner.SkipWhitespaces(); character != ',' && character != ';' && character != ':' && character != '}' && character != EOF; character = scanner.SkipWhitespaces() {
scanner.Scan()
}
endPosition := scanner.searchIndex
value := string(scanner.source[startPosition:endPosition])
typeInfo.setValue(out, reflect.ValueOf(value))
return nil
}
func (typeInfo ArgumentTypeInfo) parseSlice(scanner *Scanner, out reflect.Value) error {
if scanner == nil {
return errors.New("scanner cannot be nil")
}
sliceType := reflect.Zero(out.Type())
sliceItemType := reflect.Indirect(reflect.New(out.Type().Elem()))
if scanner.SkipWhitespaces() == '{' {
scanner.Scan()
for character := scanner.SkipWhitespaces(); character != '}' && character != EOF; character = scanner.SkipWhitespaces() {
err := typeInfo.ItemType.Parse(scanner, sliceItemType)
if err != nil {
return err
}
sliceType = reflect.Append(sliceType, sliceItemType)
token := scanner.SkipWhitespaces()
if token == '}' {
break
}
if !scanner.Expect(',', "Comma ','") {
return nil
}
}
if !scanner.Expect('}', "Right Curly Bracket '}'") {
return nil
}
typeInfo.setValue(out, sliceType)
return nil
}
for character := scanner.SkipWhitespaces(); character != ',' && character != '}' && character != EOF; character = scanner.SkipWhitespaces() {
err := typeInfo.ItemType.Parse(scanner, sliceItemType)
if err != nil {
return err
}
sliceType = reflect.Append(sliceType, sliceItemType)
token := scanner.SkipWhitespaces()
if token == ',' || token == '}' || token == EOF {
break
}
scanner.Scan()
if token != ';' {
return nil
}
}
typeInfo.setValue(out, sliceType)
return nil
}
func (typeInfo ArgumentTypeInfo) parseMap(scanner *Scanner, out reflect.Value) error {
if scanner == nil {
return errors.New("scanner cannot be nil")
}
mapType := reflect.MakeMap(out.Type())
key := reflect.Indirect(reflect.New(out.Type().Key()))
value := reflect.Indirect(reflect.New(out.Type().Elem()))
if !scanner.Expect('{', "Left Curly Bracket") {
return nil
}
for character := scanner.SkipWhitespaces(); character != '}' && character != EOF; character = scanner.SkipWhitespaces() {
err := typeInfo.parseString(scanner, key)
if err != nil {
return err
}
if !scanner.Expect(':', "Colon ':'") {
return nil
}
err = typeInfo.ItemType.Parse(scanner, value)
if err != nil {
return err
}
mapType.SetMapIndex(key, value)
if scanner.SkipWhitespaces() == '}' {
break
}
if !scanner.Expect(',', "Comma ','") {
return nil
}
}
if !scanner.Expect('}', "Right Curly Bracket '}'") {
return nil
}
typeInfo.setValue(out, mapType)
return nil
}
func (typeInfo ArgumentTypeInfo) inferType(scanner *Scanner, out reflect.Value, ignoreLegacySlice bool) (ArgumentTypeInfo, error) {
character := scanner.SkipWhitespaces()
searchIndex := scanner.searchIndex
if !ignoreLegacySlice {
itemType, _ := typeInfo.inferType(scanner, out, true)
var token rune
for token = scanner.Scan(); token != ',' && token != EOF && token != ';'; token = scanner.Scan() {
}
scanner.SetSearchIndex(searchIndex)
if token == ';' {
return ArgumentTypeInfo{
ActualType: SliceType,
ItemType: &itemType,
}, nil
}
return itemType, nil
}
switch character {
case '"', '\'', '`':
return ArgumentTypeInfo{
ActualType: StringType,
}, nil
}
if character == '{' {
scanner.Scan()
elementType, _ := typeInfo.inferType(scanner, out, true)
// skip left curly bracket character
scanner.SetSearchIndex(searchIndex + 1)
if elementType.ActualType == StringType {
var keyString string
(ArgumentTypeInfo{ActualType: StringType}).parseString(scanner, reflect.Indirect(reflect.ValueOf(&keyString)))
if scanner.Scan() == ':' {
scanner.SetSearchIndex(searchIndex)
return ArgumentTypeInfo{
ActualType: MapType,
ItemType: &ArgumentTypeInfo{
ActualType: AnyType,
},
}, nil
}
}
scanner.SetSearchIndex(searchIndex)
return ArgumentTypeInfo{
ActualType: SliceType,
ItemType: &elementType,
}, nil
}
canBeString := false
if character == 't' || character == 'f' {
if token := scanner.Scan(); token == Identifier {
switch scanner.Token() {
case "true", "false":
scanner.SetSearchIndex(searchIndex)
return ArgumentTypeInfo{
ActualType: BoolType,
}, nil
}
canBeString = true
} else {
return ArgumentTypeInfo{
ActualType: InvalidType,
}, nil
}
}
if !canBeString {
token := scanner.Scan()
if token == '-' {
token = scanner.Scan()
}
if token == Integer {
return ArgumentTypeInfo{
ActualType: IntegerType,
}, nil
}
}
return ArgumentTypeInfo{
ActualType: StringType,
}, nil
}
func (typeInfo ArgumentTypeInfo) makeSliceType() (reflect.Type, error) {
if typeInfo.ActualType != SliceType {
return nil, errors.New("this is not slice type")
}
if typeInfo.ItemType == nil {
return nil, errors.New("item type cannot be nil for slice type")
}
var itemType reflect.Type
switch typeInfo.ItemType.ActualType {
case IntegerType:
itemType = reflect.TypeOf(int(0))
case BoolType:
itemType = reflect.TypeOf(false)
case StringType:
itemType = reflect.TypeOf("")
case SliceType:
subItemType, err := typeInfo.ItemType.makeSliceType()
if err != nil {
return nil, err
}
itemType = subItemType
case MapType:
subItemType, err := typeInfo.ItemType.makeMapType()
if err != nil {
return nil, err
}
itemType = subItemType
default:
return nil, fmt.Errorf("invalid type: %v", typeInfo.ItemType.ActualType)
}
return reflect.SliceOf(itemType), nil
}
func (typeInfo ArgumentTypeInfo) makeMapType() (reflect.Type, error) {
if typeInfo.ActualType != MapType {
return nil, errors.New("this is not map type")
}
if typeInfo.ItemType == nil {
return nil, errors.New("item type cannot be nil for map type")
}
var itemType reflect.Type
switch typeInfo.ItemType.ActualType {
case IntegerType:
itemType = reflect.TypeOf(int(0))
case BoolType:
itemType = reflect.TypeOf(false)
case StringType:
itemType = reflect.TypeOf("")
case SliceType:
subItemType, err := typeInfo.ItemType.makeSliceType()
if err != nil {
return nil, err
}
itemType = subItemType
case MapType:
subItemType, err := typeInfo.ItemType.makeMapType()
if err != nil {
return nil, err
}
itemType = subItemType
case AnyType:
itemType = interfaceType
default:
return nil, fmt.Errorf("invalid type: %v", typeInfo.ItemType.ActualType)
}
return reflect.MapOf(reflect.TypeOf(""), itemType), nil
} | type.go | 0.706899 | 0.457379 | type.go | starcoder |
package input
import (
"github.com/lolopinto/ent/internal/schema/change"
"github.com/lolopinto/ent/internal/tsimport"
)
func NodeEqual(existing, node *Node) bool {
return existing.TableName == node.TableName &&
fieldsEqual(existing.Fields, node.Fields) &&
assocEdgesEqual(existing.AssocEdges, node.AssocEdges) &&
actionsEqual(existing.Actions, node.Actions) &&
existing.EnumTable == node.EnumTable &&
change.MapListEqual(existing.DBRows, node.DBRows) &&
constraintsEqual(existing.Constraints, node.Constraints) &&
indicesEqual(existing.Indices, node.Indices) &&
existing.HideFromGraphQL == node.HideFromGraphQL &&
existing.EdgeConstName == node.EdgeConstName &&
existing.PatternName == node.PatternName
}
func PatternEqual(existing, pattern *Pattern) bool {
return existing.Name == pattern.Name &&
fieldsEqual(existing.Fields, pattern.Fields) &&
assocEdgesEqual(existing.AssocEdges, pattern.AssocEdges)
}
func fieldsEqual(existing, fields []*Field) bool {
if len(existing) != len(fields) {
return false
}
for i := range existing {
if !fieldEqual(existing[i], fields[i]) {
return false
}
}
return true
}
func fieldEqual(existingField, field *Field) bool {
return existingField.Name == field.Name &&
fieldTypeEqual(existingField.Type, field.Type) &&
existingField.Nullable == field.Nullable &&
existingField.StorageKey == field.StorageKey &&
existingField.Unique == field.Unique &&
existingField.HideFromGraphQL == field.HideFromGraphQL &&
existingField.Private == field.Private &&
existingField.GraphQLName == field.GraphQLName &&
existingField.Index == field.Index &&
existingField.PrimaryKey == field.PrimaryKey &&
existingField.DefaultToViewerOnCreate == field.DefaultToViewerOnCreate &&
fieldEdgeEqual(existingField.FieldEdge, field.FieldEdge) &&
foreignKeyEqual(existingField.ForeignKey, field.ForeignKey) &&
existingField.ServerDefault == field.ServerDefault &&
existingField.DisableUserEditable == field.DisableUserEditable &&
existingField.HasDefaultValueOnCreate == field.HasDefaultValueOnCreate &&
existingField.HasDefaultValueOnEdit == field.HasDefaultValueOnEdit &&
PolymorphicOptionsEqual(existingField.Polymorphic, field.Polymorphic) &&
existingField.DerivedWhenEmbedded == field.DerivedWhenEmbedded &&
fieldsEqual(existingField.DerivedFields, field.DerivedFields) &&
existingField.PatternName == field.PatternName
}
func fieldTypeEqual(existing, fieldType *FieldType) bool {
ret := change.CompareNilVals(existing == nil, fieldType == nil)
if ret != nil {
return *ret
}
return existing.DBType == fieldType.DBType &&
fieldTypeEqual(existing.ListElemType, fieldType.ListElemType) &&
change.StringListEqual(existing.Values, fieldType.Values) &&
change.StringMapEqual(existing.EnumMap, fieldType.EnumMap) &&
existing.Type == fieldType.Type &&
existing.GraphQLType == fieldType.GraphQLType &&
existing.CustomType == fieldType.CustomType &&
tsimport.ImportPathEqual(existing.ImportType, fieldType.ImportType)
}
func fieldEdgeEqual(existing, fieldEdge *FieldEdge) bool {
ret := change.CompareNilVals(existing == nil, fieldEdge == nil)
if ret != nil {
return *ret
}
return existing.Schema == fieldEdge.Schema &&
existing.DisableBuilderType == fieldEdge.DisableBuilderType &&
InverseFieldEdgeEqual(existing.InverseEdge, fieldEdge.InverseEdge)
}
func InverseFieldEdgeEqual(existing, inverseFieldEdge *InverseFieldEdge) bool {
ret := change.CompareNilVals(existing == nil, inverseFieldEdge == nil)
if ret != nil {
return *ret
}
// here is why we want derived field info instead of this...
// HideFromGraphQL should alter only graphql
return existing.Name == inverseFieldEdge.Name &&
existing.TableName == inverseFieldEdge.TableName &&
existing.HideFromGraphQL == inverseFieldEdge.HideFromGraphQL &&
existing.EdgeConstName == inverseFieldEdge.EdgeConstName
}
func foreignKeyEqual(existing, fkey *ForeignKey) bool {
ret := change.CompareNilVals(existing == nil, fkey == nil)
if ret != nil {
return *ret
}
return existing.Schema == fkey.Schema &&
existing.Column == fkey.Column &&
existing.Name == fkey.Name &&
existing.DisableIndex == fkey.DisableIndex &&
existing.DisableBuilderType == fkey.DisableBuilderType
}
func PolymorphicOptionsEqual(existing, p *PolymorphicOptions) bool {
ret := change.CompareNilVals(existing == nil, p == nil)
if ret != nil {
return *ret
}
return change.StringListEqual(existing.Types, p.Types) &&
existing.HideFromInverseGraphQL == p.HideFromInverseGraphQL &&
existing.DisableBuilderType == p.DisableBuilderType
}
func assocEdgesEqual(existing, edges []*AssocEdge) bool {
if len(existing) != len(edges) {
return false
}
for i := range existing {
if !assocEdgeEqual(existing[i], edges[i]) {
return false
}
}
return true
}
func assocEdgeEqual(existing, edge *AssocEdge) bool {
return existing.Name == edge.Name &&
existing.SchemaName == edge.SchemaName &&
existing.Symmetric == edge.Symmetric &&
existing.Unique == edge.Unique &&
existing.TableName == edge.TableName &&
inverseAssocEdgeEqual(existing.InverseEdge, edge.InverseEdge) &&
edgeActionsEqual(existing.EdgeActions, edge.EdgeActions) &&
existing.HideFromGraphQL == edge.HideFromGraphQL &&
existing.EdgeConstName == edge.EdgeConstName &&
existing.PatternName == edge.PatternName
}
func inverseAssocEdgeEqual(existing, edge *InverseAssocEdge) bool {
ret := change.CompareNilVals(existing == nil, edge == nil)
if ret != nil {
return *ret
}
return existing.Name == edge.Name &&
existing.EdgeConstName == edge.EdgeConstName
}
func edgeActionsEqual(existing, actions []*EdgeAction) bool {
if len(existing) != len(actions) {
return false
}
for i := range existing {
if !edgeActionEqual(existing[i], actions[i]) {
return false
}
}
return true
}
func edgeActionEqual(existing, action *EdgeAction) bool {
ret := change.CompareNilVals(existing == nil, action == nil)
if ret != nil {
return *ret
}
return existing.Operation == action.Operation &&
existing.CustomActionName == action.CustomActionName &&
existing.CustomGraphQLName == action.CustomGraphQLName &&
existing.CustomInputName == action.CustomInputName &&
existing.HideFromGraphQL == action.HideFromGraphQL &&
actionOnlyFieldsEqual(existing.ActionOnlyFields, action.ActionOnlyFields)
}
func actionOnlyFieldsEqual(existing, actions []*ActionField) bool {
if len(existing) != len(actions) {
return false
}
for i := range existing {
if !actionOnlyFieldEqual(existing[i], actions[i]) {
return false
}
}
return true
}
func actionOnlyFieldEqual(existing, af *ActionField) bool {
ret := change.CompareNilVals(existing == nil, af == nil)
if ret != nil {
return *ret
}
return existing.Name == af.Name &&
existing.Type == af.Type &&
existing.Nullable == af.Nullable &&
existing.list == af.list &&
existing.nullableContents == af.nullableContents &&
existing.ActionName == af.ActionName &&
change.StringListEqual(existing.ExcludedFields, af.ExcludedFields)
}
func assocEdgeGroupEqual(existing, group *AssocEdgeGroup) bool {
return existing.Name == group.Name &&
existing.GroupStatusName == group.GroupStatusName &&
existing.TableName == group.TableName &&
assocEdgesEqual(existing.AssocEdges, group.AssocEdges) &&
edgeActionsEqual(existing.EdgeActions, group.EdgeActions) &&
change.StringListEqual(existing.StatusEnums, group.StatusEnums) &&
existing.NullStateFn == group.NullStateFn &&
change.StringListEqual(existing.NullStates, group.NullStates) &&
edgeActionEqual(existing.EdgeAction, group.EdgeAction)
}
func actionsEqual(existing, actions []*Action) bool {
if len(existing) != len(actions) {
return false
}
for i := range existing {
if !actionEqual(existing[i], actions[i]) {
return false
}
}
return true
}
func actionEqual(existing, action *Action) bool {
return existing.Operation == action.Operation &&
change.StringListEqual(existing.Fields, action.Fields) &&
change.StringListEqual(existing.ExcludedFields, action.ExcludedFields) &&
change.StringListEqual(existing.OptionalFields, action.OptionalFields) &&
change.StringListEqual(existing.RequiredFields, action.RequiredFields) &&
existing.NoFields == action.NoFields &&
existing.CustomActionName == action.CustomActionName &&
existing.CustomInputName == action.CustomInputName &&
existing.HideFromGraphQL == action.HideFromGraphQL &&
actionOnlyFieldsEqual(existing.ActionOnlyFields, action.ActionOnlyFields)
}
func foreignKeyInfoEqual(existing, fkey *ForeignKeyInfo) bool {
ret := change.CompareNilVals(existing == nil, fkey == nil)
if ret != nil {
return *ret
}
return existing.TableName == fkey.TableName &&
change.StringListEqual(existing.Columns, fkey.Columns) &&
existing.OnDelete == fkey.OnDelete
}
func constraintsEqual(existing, constraints []*Constraint) bool {
if len(existing) != len(constraints) {
return false
}
for i := range existing {
if !constraintEqual(existing[i], constraints[i]) {
return false
}
}
return true
}
func constraintEqual(existing, constraint *Constraint) bool {
ret := change.CompareNilVals(existing == nil, constraint == nil)
if ret != nil {
return *ret
}
return existing.Name == constraint.Name &&
existing.Type == constraint.Type &&
change.StringListEqual(existing.Columns, constraint.Columns) &&
foreignKeyInfoEqual(existing.ForeignKey, constraint.ForeignKey) &&
existing.Condition == constraint.Condition
}
func indicesEqual(existing, indices []*Index) bool {
if len(existing) != len(indices) {
return false
}
for i := range existing {
if !indexEqual(existing[i], indices[i]) {
return false
}
}
return true
}
func indexEqual(existing, index *Index) bool {
return existing.Name == index.Name &&
change.StringListEqual(existing.Columns, index.Columns) &&
existing.Unique == index.Unique
} | internal/schema/input/compare.go | 0.632957 | 0.462352 | compare.go | starcoder |
package hashing
import (
"bytes"
"math"
"math/big"
"github.com/ethereum/go-ethereum/common"
solsha3 "github.com/miguelmota/go-solidity-sha3"
"github.com/offchainlabs/arbitrum/packages/arb-util/protocol"
"github.com/offchainlabs/arbitrum/packages/arb-util/value"
"github.com/offchainlabs/arbitrum/packages/arb-validator/valmessage"
)
func CreateVMHash(data *valmessage.CreateVMValidatorRequest) [32]byte {
var ret [32]byte
keys := make([]common.Address, 0, len(data.Config.AssertKeys))
for _, key := range data.Config.AssertKeys {
var address common.Address
copy(address[:], key.Value)
keys = append(keys, address)
}
var owner common.Address
copy(owner[:], data.Config.Owner.Value)
var escrowCurrency common.Address
copy(escrowCurrency[:], data.Config.EscrowCurrency.Value)
createHash := solsha3.SoliditySHA3(
solsha3.Uint32(uint32(data.Config.GracePeriod)),
solsha3.Uint128(value.NewBigIntFromBuf(data.Config.EscrowRequired)),
solsha3.Address(escrowCurrency),
solsha3.Uint32(data.Config.MaxExecutionStepCount),
solsha3.Bytes32(value.NewHashFromBuf(data.VmState)),
solsha3.Uint16(data.ChallengeManagerNum),
solsha3.Address(owner),
solsha3.AddressArray(keys),
)
copy(ret[:], createHash)
createHash = nil
return ret
}
func SplitMessages(
outMsgs []protocol.Message,
) ([]uint16, []*big.Int, [][32]byte, [][21]byte) {
balance := protocol.NewBalanceTrackerFromMessages(outMsgs)
tokenNums := make([]uint16, 0, len(outMsgs))
amounts := make([]*big.Int, 0, len(outMsgs))
destinations := make([][32]byte, 0, len(outMsgs))
for _, msg := range outMsgs {
tokenNums = append(tokenNums,
uint16(balance.TokenIndex(msg.TokenType, msg.Currency)))
amounts = append(amounts, msg.Currency)
destinations = append(destinations, msg.Destination)
}
tokenTypes, _ := balance.GetTypesAndAmounts()
return tokenNums, amounts, destinations, tokenTypes
}
func UnanimousAssertPartialPartialHash(
newInboxHash [32]byte,
assertion *protocol.Assertion,
messageData bytes.Buffer,
destinations [][32]byte,
) []byte {
return solsha3.SoliditySHA3(
solsha3.Bytes32(newInboxHash),
solsha3.Bytes32(assertion.AfterHash),
messageData.Bytes(),
value.Bytes32ArrayEncoded(destinations),
)
}
func UnanimousAssertPartialHash(
sequenceNum uint64,
beforeHash [32]byte,
newInboxHash [32]byte,
originalInboxHash [32]byte,
assertion *protocol.Assertion,
) ([32]byte, error) {
tokenNums, amounts, destinations, tokenTypes := SplitMessages(assertion.OutMsgs)
var messageData bytes.Buffer
for _, msg := range assertion.OutMsgs {
err := value.MarshalValue(msg.Data, &messageData)
if err != nil {
return [32]byte{}, err
}
}
var ret [32]byte
if sequenceNum == math.MaxUint64 {
copy(ret[:], solsha3.SoliditySHA3(
UnanimousAssertPartialPartialHash(
newInboxHash,
assertion,
messageData,
destinations,
),
solsha3.Bytes32(beforeHash),
solsha3.Bytes32(originalInboxHash),
protocol.TokenTypeArrayEncoded(tokenTypes),
solsha3.Uint16Array(tokenNums),
solsha3.Uint256Array(amounts),
))
} else {
copy(ret[:], solsha3.SoliditySHA3(
UnanimousAssertPartialPartialHash(
newInboxHash,
assertion,
messageData,
destinations,
),
solsha3.Bytes32(beforeHash),
solsha3.Bytes32(originalInboxHash),
protocol.TokenTypeArrayEncoded(tokenTypes),
solsha3.Uint16Array(tokenNums),
solsha3.Uint256Array(amounts),
solsha3.Uint64(sequenceNum),
))
}
return ret, nil
}
func UnanimousAssertHash(
vmID [32]byte,
sequenceNum uint64,
beforeHash [32]byte,
newInboxHash [32]byte,
originalInboxHash [32]byte,
assertion *protocol.Assertion,
) ([32]byte, error) {
partialHash, err := UnanimousAssertPartialHash(
sequenceNum,
beforeHash,
newInboxHash,
originalInboxHash,
assertion,
)
if err != nil {
return [32]byte{}, nil
}
var hash [32]byte
copy(hash[:], solsha3.SoliditySHA3(
solsha3.Bytes32(vmID),
solsha3.Bytes32(partialHash),
solsha3.Bytes32(assertion.LogsHash()),
))
return hash, nil
} | packages/arb-validator/hashing/hashing.go | 0.598782 | 0.403214 | hashing.go | starcoder |
package analyze
import (
"fmt"
"io"
"math"
"sort"
"github.com/valyala/histogram"
)
// Bucket has the information for a collection of requests associated to the same memory size
type Bucket struct {
Size int64
Count int
DurationHist *histogram.Fast
MemoryHist *histogram.Fast
CountByBilledDuration map[int64]int64
}
func newBucket(size int64) *Bucket {
return &Bucket{
Size: size,
DurationHist: histogram.NewFast(),
MemoryHist: histogram.NewFast(),
CountByBilledDuration: map[int64]int64{},
}
}
func (b *Bucket) update(duration float64, memoryUsed float64, billedDuration int64) {
b.Count++
b.DurationHist.Update(duration)
b.MemoryHist.Update(memoryUsed)
b.CountByBilledDuration[billedDuration]++
}
// CalculateSuggestedMemory returns a suggestion for optimizing the given percentile
func (b *Bucket) CalculateSuggestedMemory(percentile float64) int {
duration := b.DurationHist.Quantile(percentile)
targetDurationPos, _ := findBilledDuration(duration)
currentMemoryPos, _ := findMemoryIndex(int(b.Size)) // memory matters because it's usually proportional to current duration if the task is cpu bounded
avgPos := targetDurationPos + (currentMemoryPos / 2)
if avgPos >= len(memoryBuckets) {
avgPos = len(memoryBuckets) - 1
}
return memoryBuckets[avgPos]
}
// Print information associated with the bucket
func (b *Bucket) Print(output io.Writer) {
percentiles := []float64{
0.01, 0.25, 0.50, 0.75, 0.99,
}
fmt.Fprintln(output, ">> Analyzing stats for memory bucket:", b.Size, "MB (total requests:", b.Count, ")")
fmt.Fprintln(output, "> Top requests per billed duration")
billedDurations := make([][]int64, 0, len(b.CountByBilledDuration))
for billedDuration, count := range b.CountByBilledDuration {
billedDurations = append(billedDurations, []int64{billedDuration, count})
}
maxCount := int64(0)
sort.Slice(billedDurations, func(i, j int) bool {
if billedDurations[i][1] > maxCount {
maxCount = billedDurations[i][1]
}
return billedDurations[i][0] < billedDurations[j][0]
})
estimatedCostPerMillion := 0.20
for _, billedDuration := range billedDurations {
percent := (float64(billedDuration[1]) / float64(b.Count))
if billedDuration[1] > int64(float64(maxCount)*0.1) {
fmt.Fprintf(output, "%d ms: %d (%0.2f%%)\n", billedDuration[0], billedDuration[1], percent*100)
}
totalRequests := 1_000_000.0 * percent
units := billedDuration[0] / 100
estimatedCostPerMillion += costMapping[b.Size] * float64(totalRequests) * float64(units)
}
fmt.Fprintf(output, "Estimated cost per million requests: %0.2f$\n", estimatedCostPerMillion)
fmt.Fprintln(output, "")
fmt.Fprintln(output, "> Distribution for durations")
for _, percentile := range percentiles {
pDuration := b.DurationHist.Quantile(percentile)
printDurationPercentile(output, fmt.Sprintf("%dth percentile", int(percentile*100)), pDuration)
}
fmt.Fprintln(output, "")
fmt.Fprintln(output, "> Distribution for used memory")
for _, percentile := range percentiles {
pMemory := b.MemoryHist.Quantile(percentile)
fmt.Fprintf(output, "%dth percentile: %0.1f MB\n", int(percentile*100), pMemory)
}
fmt.Fprintln(output, "")
fmt.Fprintln(output, "> Suggested memory based on your usage")
for _, percentile := range percentiles {
fmt.Fprintf(output, "Suggestion for %dth percentile: %d MB\n", int(percentile*100), b.CalculateSuggestedMemory(percentile))
}
fmt.Fprintln(output, "")
}
func printDurationPercentile(output io.Writer, label string, value float64) {
_, billed := findBilledDuration(value)
fmt.Fprintln(output, label, value, "ms", "billed:", billed, "ms")
}
func findMemoryIndex(usedMemory int) (int, int) {
for i, memory := range memoryBuckets {
if memory >= usedMemory {
return i + 1, memory
}
}
return 0, 128
}
func findBilledDuration(duration float64) (int, int) {
incSize := 100
durI := int(math.Ceil(duration))
index := 0
for dur := incSize; dur <= 900000; dur += incSize {
if dur > durI {
return index, dur
}
index++
}
return 0, incSize
} | analyze/bucket.go | 0.70069 | 0.439807 | bucket.go | starcoder |
package cpu
import (
. "github.com/retroenv/nesgo/pkg/addressing"
)
// Opcode is a NES CPU opcode that contains the instruction info and used
// addressing mode.
type Opcode struct {
Instruction *Instruction
Addressing Mode
Timing byte
PageCrossCycle bool
}
// Opcodes maps first opcode bytes to NES CPU instruction information.
// https://www.masswerk.at/6502/6502_instruction_set.html
var Opcodes = map[byte]Opcode{
// Official instructions
0x00: {Instruction: brk, Addressing: ImpliedAddressing, Timing: 7},
0x01: {Instruction: ora, Addressing: IndirectXAddressing, Timing: 6},
0x05: {Instruction: ora, Addressing: ZeroPageAddressing, Timing: 3},
0x06: {Instruction: asl, Addressing: ZeroPageAddressing, Timing: 5},
0x08: {Instruction: php, Addressing: ImpliedAddressing, Timing: 3},
0x09: {Instruction: ora, Addressing: ImmediateAddressing, Timing: 2},
0x0a: {Instruction: asl, Addressing: AccumulatorAddressing, Timing: 2},
0x0d: {Instruction: ora, Addressing: AbsoluteAddressing, Timing: 4},
0x0e: {Instruction: asl, Addressing: AbsoluteAddressing, Timing: 6},
0x10: {Instruction: bpl, Addressing: RelativeAddressing, Timing: 2},
0x11: {Instruction: ora, Addressing: IndirectYAddressing, Timing: 5, PageCrossCycle: true},
0x15: {Instruction: ora, Addressing: ZeroPageXAddressing, Timing: 4},
0x16: {Instruction: asl, Addressing: ZeroPageXAddressing, Timing: 6},
0x18: {Instruction: clc, Addressing: ImpliedAddressing, Timing: 2},
0x19: {Instruction: ora, Addressing: AbsoluteYAddressing, Timing: 4, PageCrossCycle: true},
0x1d: {Instruction: ora, Addressing: AbsoluteXAddressing, Timing: 4, PageCrossCycle: true},
0x1e: {Instruction: asl, Addressing: AbsoluteXAddressing, Timing: 7},
0x20: {Instruction: jsr, Addressing: AbsoluteAddressing, Timing: 6},
0x21: {Instruction: and, Addressing: IndirectXAddressing, Timing: 6},
0x24: {Instruction: bit, Addressing: ZeroPageAddressing, Timing: 3},
0x25: {Instruction: and, Addressing: ZeroPageAddressing, Timing: 3},
0x26: {Instruction: rol, Addressing: ZeroPageAddressing, Timing: 5},
0x28: {Instruction: plp, Addressing: ImpliedAddressing, Timing: 4},
0x29: {Instruction: and, Addressing: ImmediateAddressing, Timing: 2},
0x2a: {Instruction: rol, Addressing: AccumulatorAddressing, Timing: 2},
0x2c: {Instruction: bit, Addressing: AbsoluteAddressing, Timing: 4},
0x2d: {Instruction: and, Addressing: AbsoluteAddressing, Timing: 4},
0x2e: {Instruction: rol, Addressing: AbsoluteAddressing, Timing: 6},
0x30: {Instruction: bmi, Addressing: RelativeAddressing, Timing: 2},
0x31: {Instruction: and, Addressing: IndirectYAddressing, Timing: 5, PageCrossCycle: true},
0x35: {Instruction: and, Addressing: ZeroPageXAddressing, Timing: 4},
0x36: {Instruction: rol, Addressing: ZeroPageXAddressing, Timing: 6},
0x38: {Instruction: sec, Addressing: ImpliedAddressing, Timing: 2},
0x39: {Instruction: and, Addressing: AbsoluteYAddressing, Timing: 4, PageCrossCycle: true},
0x3d: {Instruction: and, Addressing: AbsoluteXAddressing, Timing: 4, PageCrossCycle: true},
0x3e: {Instruction: rol, Addressing: AbsoluteXAddressing, Timing: 7},
0x40: {Instruction: rti, Addressing: ImpliedAddressing, Timing: 6},
0x41: {Instruction: eor, Addressing: IndirectXAddressing, Timing: 6},
0x45: {Instruction: eor, Addressing: ZeroPageAddressing, Timing: 3},
0x46: {Instruction: lsr, Addressing: ZeroPageAddressing, Timing: 5},
0x48: {Instruction: pha, Addressing: ImpliedAddressing, Timing: 3},
0x49: {Instruction: eor, Addressing: ImmediateAddressing, Timing: 2},
0x4a: {Instruction: lsr, Addressing: AccumulatorAddressing, Timing: 2},
0x4c: {Instruction: jmp, Addressing: AbsoluteAddressing, Timing: 3},
0x4d: {Instruction: eor, Addressing: AbsoluteAddressing, Timing: 4},
0x4e: {Instruction: lsr, Addressing: AbsoluteAddressing, Timing: 6},
0x50: {Instruction: bvc, Addressing: RelativeAddressing, Timing: 2},
0x51: {Instruction: eor, Addressing: IndirectYAddressing, Timing: 5, PageCrossCycle: true},
0x55: {Instruction: eor, Addressing: ZeroPageXAddressing, Timing: 4},
0x56: {Instruction: lsr, Addressing: ZeroPageXAddressing, Timing: 6},
0x58: {Instruction: cli, Addressing: ImpliedAddressing, Timing: 2},
0x59: {Instruction: eor, Addressing: AbsoluteYAddressing, Timing: 4, PageCrossCycle: true},
0x5d: {Instruction: eor, Addressing: AbsoluteXAddressing, Timing: 4, PageCrossCycle: true},
0x5e: {Instruction: lsr, Addressing: AbsoluteXAddressing, Timing: 7, PageCrossCycle: true},
0x60: {Instruction: rts, Addressing: ImpliedAddressing, Timing: 6},
0x61: {Instruction: adc, Addressing: IndirectXAddressing, Timing: 6},
0x65: {Instruction: adc, Addressing: ZeroPageAddressing, Timing: 3},
0x66: {Instruction: ror, Addressing: ZeroPageAddressing, Timing: 5},
0x68: {Instruction: pla, Addressing: ImpliedAddressing, Timing: 4},
0x69: {Instruction: adc, Addressing: ImmediateAddressing, Timing: 2},
0x6a: {Instruction: ror, Addressing: AccumulatorAddressing, Timing: 2},
0x6c: {Instruction: jmp, Addressing: IndirectAddressing, Timing: 5},
0x6d: {Instruction: adc, Addressing: AbsoluteAddressing, Timing: 4},
0x6e: {Instruction: ror, Addressing: AbsoluteAddressing, Timing: 6},
0x70: {Instruction: bvs, Addressing: RelativeAddressing, Timing: 2},
0x71: {Instruction: adc, Addressing: IndirectYAddressing, Timing: 5, PageCrossCycle: true},
0x75: {Instruction: adc, Addressing: ZeroPageXAddressing, Timing: 4},
0x76: {Instruction: ror, Addressing: ZeroPageXAddressing, Timing: 6},
0x78: {Instruction: sei, Addressing: ImpliedAddressing, Timing: 2},
0x79: {Instruction: adc, Addressing: AbsoluteYAddressing, Timing: 4, PageCrossCycle: true},
0x7d: {Instruction: adc, Addressing: AbsoluteXAddressing, Timing: 4, PageCrossCycle: true},
0x7e: {Instruction: ror, Addressing: AbsoluteXAddressing, Timing: 7},
0x81: {Instruction: sta, Addressing: IndirectXAddressing, Timing: 6},
0x84: {Instruction: sty, Addressing: ZeroPageAddressing, Timing: 3},
0x85: {Instruction: sta, Addressing: ZeroPageAddressing, Timing: 3},
0x86: {Instruction: stx, Addressing: ZeroPageAddressing, Timing: 3},
0x88: {Instruction: dey, Addressing: ImpliedAddressing, Timing: 2},
0x8a: {Instruction: txa, Addressing: ImpliedAddressing, Timing: 2},
0x8c: {Instruction: sty, Addressing: AbsoluteAddressing, Timing: 4},
0x8d: {Instruction: sta, Addressing: AbsoluteAddressing, Timing: 4},
0x8e: {Instruction: stx, Addressing: AbsoluteAddressing, Timing: 4},
0x90: {Instruction: bcc, Addressing: RelativeAddressing, Timing: 2},
0x91: {Instruction: sta, Addressing: IndirectYAddressing, Timing: 6},
0x94: {Instruction: sty, Addressing: ZeroPageXAddressing, Timing: 4},
0x95: {Instruction: sta, Addressing: ZeroPageXAddressing, Timing: 4},
0x96: {Instruction: stx, Addressing: ZeroPageYAddressing, Timing: 4},
0x98: {Instruction: tya, Addressing: ImpliedAddressing, Timing: 2},
0x99: {Instruction: sta, Addressing: AbsoluteYAddressing, Timing: 5},
0x9a: {Instruction: txs, Addressing: ImpliedAddressing, Timing: 2},
0x9d: {Instruction: sta, Addressing: AbsoluteXAddressing, Timing: 5},
0xa0: {Instruction: ldy, Addressing: ImmediateAddressing, Timing: 2},
0xa1: {Instruction: lda, Addressing: IndirectXAddressing, Timing: 6},
0xa2: {Instruction: ldx, Addressing: ImmediateAddressing, Timing: 2},
0xa4: {Instruction: ldy, Addressing: ZeroPageAddressing, Timing: 3},
0xa5: {Instruction: lda, Addressing: ZeroPageAddressing, Timing: 3},
0xa6: {Instruction: ldx, Addressing: ZeroPageAddressing, Timing: 3},
0xa8: {Instruction: tay, Addressing: ImpliedAddressing, Timing: 2},
0xa9: {Instruction: lda, Addressing: ImmediateAddressing, Timing: 2},
0xaa: {Instruction: tax, Addressing: ImpliedAddressing, Timing: 2},
0xac: {Instruction: ldy, Addressing: AbsoluteAddressing, Timing: 4},
0xad: {Instruction: lda, Addressing: AbsoluteAddressing, Timing: 4},
0xae: {Instruction: ldx, Addressing: AbsoluteAddressing, Timing: 4},
0xb0: {Instruction: bcs, Addressing: RelativeAddressing, Timing: 2},
0xb1: {Instruction: lda, Addressing: IndirectYAddressing, Timing: 5, PageCrossCycle: true},
0xb4: {Instruction: ldy, Addressing: ZeroPageXAddressing, Timing: 4},
0xb5: {Instruction: lda, Addressing: ZeroPageXAddressing, Timing: 4},
0xb6: {Instruction: ldx, Addressing: ZeroPageYAddressing, Timing: 4},
0xb8: {Instruction: clv, Addressing: ImpliedAddressing, Timing: 2},
0xb9: {Instruction: lda, Addressing: AbsoluteYAddressing, Timing: 4, PageCrossCycle: true},
0xba: {Instruction: tsx, Addressing: ImpliedAddressing, Timing: 2},
0xbc: {Instruction: ldy, Addressing: AbsoluteXAddressing, Timing: 4, PageCrossCycle: true},
0xbd: {Instruction: lda, Addressing: AbsoluteXAddressing, Timing: 4, PageCrossCycle: true},
0xbe: {Instruction: ldx, Addressing: AbsoluteYAddressing, Timing: 4, PageCrossCycle: true},
0xc0: {Instruction: cpy, Addressing: ImmediateAddressing, Timing: 2},
0xc1: {Instruction: cmp, Addressing: IndirectXAddressing, Timing: 6},
0xc4: {Instruction: cpy, Addressing: ZeroPageAddressing, Timing: 3},
0xc5: {Instruction: cmp, Addressing: ZeroPageAddressing, Timing: 3},
0xc6: {Instruction: dec, Addressing: ZeroPageAddressing, Timing: 5},
0xc8: {Instruction: iny, Addressing: ImpliedAddressing, Timing: 2},
0xc9: {Instruction: cmp, Addressing: ImmediateAddressing, Timing: 2},
0xca: {Instruction: dex, Addressing: ImpliedAddressing, Timing: 2},
0xcc: {Instruction: cpy, Addressing: AbsoluteAddressing, Timing: 4},
0xcd: {Instruction: cmp, Addressing: AbsoluteAddressing, Timing: 4},
0xce: {Instruction: dec, Addressing: AbsoluteAddressing, Timing: 6},
0xd0: {Instruction: bne, Addressing: RelativeAddressing, Timing: 2},
0xd1: {Instruction: cmp, Addressing: IndirectYAddressing, Timing: 5, PageCrossCycle: true},
0xd5: {Instruction: cmp, Addressing: ZeroPageXAddressing, Timing: 4},
0xd6: {Instruction: dec, Addressing: ZeroPageXAddressing, Timing: 6},
0xd8: {Instruction: cld, Addressing: ImpliedAddressing, Timing: 2},
0xd9: {Instruction: cmp, Addressing: AbsoluteYAddressing, Timing: 4, PageCrossCycle: true},
0xdd: {Instruction: cmp, Addressing: AbsoluteXAddressing, Timing: 4, PageCrossCycle: true},
0xde: {Instruction: dec, Addressing: AbsoluteXAddressing, Timing: 7},
0xe0: {Instruction: cpx, Addressing: ImmediateAddressing, Timing: 2},
0xe1: {Instruction: sbc, Addressing: IndirectXAddressing, Timing: 6},
0xe4: {Instruction: cpx, Addressing: ZeroPageAddressing, Timing: 3},
0xe5: {Instruction: sbc, Addressing: ZeroPageAddressing, Timing: 3},
0xe6: {Instruction: inc, Addressing: ZeroPageAddressing, Timing: 5},
0xe8: {Instruction: inx, Addressing: ImpliedAddressing, Timing: 2},
0xe9: {Instruction: sbc, Addressing: ImmediateAddressing, Timing: 2},
0xea: {Instruction: nop, Addressing: ImpliedAddressing, Timing: 2},
0xec: {Instruction: cpx, Addressing: AbsoluteAddressing, Timing: 4},
0xed: {Instruction: sbc, Addressing: AbsoluteAddressing, Timing: 4},
0xee: {Instruction: inc, Addressing: AbsoluteAddressing, Timing: 6},
0xf0: {Instruction: beq, Addressing: RelativeAddressing, Timing: 2},
0xf1: {Instruction: sbc, Addressing: IndirectYAddressing, Timing: 5, PageCrossCycle: true},
0xf5: {Instruction: sbc, Addressing: ZeroPageXAddressing, Timing: 4},
0xf6: {Instruction: inc, Addressing: ZeroPageXAddressing, Timing: 6},
0xf8: {Instruction: sed, Addressing: ImpliedAddressing, Timing: 2},
0xf9: {Instruction: sbc, Addressing: AbsoluteYAddressing, Timing: 4, PageCrossCycle: true},
0xfd: {Instruction: sbc, Addressing: AbsoluteXAddressing, Timing: 4, PageCrossCycle: true},
0xfe: {Instruction: inc, Addressing: AbsoluteXAddressing, Timing: 7, PageCrossCycle: true},
// Unofficial instructions
0x03: {Instruction: unofficialSlo, Addressing: IndirectXAddressing, Timing: 8},
0x04: {Instruction: unofficialNop, Addressing: ZeroPageAddressing, Timing: 3},
0x07: {Instruction: unofficialSlo, Addressing: ZeroPageAddressing, Timing: 5},
0x0c: {Instruction: unofficialNop, Addressing: AbsoluteAddressing, Timing: 4},
0x0f: {Instruction: unofficialSlo, Addressing: AbsoluteAddressing, Timing: 6},
0x13: {Instruction: unofficialSlo, Addressing: IndirectYAddressing, Timing: 8},
0x14: {Instruction: unofficialNop, Addressing: ZeroPageXAddressing, Timing: 4},
0x17: {Instruction: unofficialSlo, Addressing: ZeroPageXAddressing, Timing: 6},
0x1a: {Instruction: unofficialNop, Addressing: ImpliedAddressing, Timing: 2},
0x1b: {Instruction: unofficialSlo, Addressing: AbsoluteYAddressing, Timing: 7},
0x1c: {Instruction: unofficialNop, Addressing: AbsoluteXAddressing, Timing: 4, PageCrossCycle: true},
0x1f: {Instruction: unofficialSlo, Addressing: AbsoluteXAddressing, Timing: 7},
0x23: {Instruction: unofficialRla, Addressing: IndirectXAddressing, Timing: 8},
0x27: {Instruction: unofficialRla, Addressing: ZeroPageAddressing, Timing: 5},
0x2f: {Instruction: unofficialRla, Addressing: AbsoluteAddressing, Timing: 6},
0x33: {Instruction: unofficialRla, Addressing: IndirectYAddressing, Timing: 8},
0x34: {Instruction: unofficialNop, Addressing: ZeroPageXAddressing, Timing: 4},
0x37: {Instruction: unofficialRla, Addressing: ZeroPageXAddressing, Timing: 6},
0x3a: {Instruction: unofficialNop, Addressing: ImpliedAddressing, Timing: 2},
0x3b: {Instruction: unofficialRla, Addressing: AbsoluteYAddressing, Timing: 7},
0x3c: {Instruction: unofficialNop, Addressing: AbsoluteXAddressing, Timing: 4, PageCrossCycle: true},
0x3f: {Instruction: unofficialRla, Addressing: AbsoluteXAddressing, Timing: 7},
0x43: {Instruction: unofficialSre, Addressing: IndirectXAddressing, Timing: 8},
0x44: {Instruction: unofficialNop, Addressing: ZeroPageAddressing, Timing: 3},
0x47: {Instruction: unofficialSre, Addressing: ZeroPageAddressing, Timing: 5},
0x4f: {Instruction: unofficialSre, Addressing: AbsoluteAddressing, Timing: 6},
0x53: {Instruction: unofficialSre, Addressing: IndirectYAddressing, Timing: 8},
0x54: {Instruction: unofficialNop, Addressing: ZeroPageXAddressing, Timing: 4},
0x57: {Instruction: unofficialSre, Addressing: ZeroPageXAddressing, Timing: 6},
0x5a: {Instruction: unofficialNop, Addressing: ImpliedAddressing, Timing: 2},
0x5b: {Instruction: unofficialSre, Addressing: AbsoluteYAddressing, Timing: 7},
0x5c: {Instruction: unofficialNop, Addressing: AbsoluteXAddressing, Timing: 4, PageCrossCycle: true},
0x5f: {Instruction: unofficialSre, Addressing: AbsoluteXAddressing, Timing: 7},
0x63: {Instruction: unofficialRra, Addressing: IndirectXAddressing, Timing: 8},
0x64: {Instruction: unofficialNop, Addressing: ZeroPageAddressing, Timing: 3},
0x67: {Instruction: unofficialRra, Addressing: ZeroPageAddressing, Timing: 5},
0x6f: {Instruction: unofficialRra, Addressing: AbsoluteAddressing, Timing: 6},
0x73: {Instruction: unofficialRra, Addressing: IndirectYAddressing, Timing: 8},
0x74: {Instruction: unofficialNop, Addressing: ZeroPageXAddressing, Timing: 4},
0x77: {Instruction: unofficialRra, Addressing: ZeroPageXAddressing, Timing: 6},
0x7a: {Instruction: unofficialNop, Addressing: ImpliedAddressing, Timing: 2},
0x7b: {Instruction: unofficialRra, Addressing: AbsoluteYAddressing, Timing: 7},
0x7c: {Instruction: unofficialNop, Addressing: AbsoluteXAddressing, Timing: 4, PageCrossCycle: true},
0x7f: {Instruction: unofficialRra, Addressing: AbsoluteXAddressing, Timing: 7},
0x80: {Instruction: unofficialNop, Addressing: ImmediateAddressing, Timing: 2},
0x82: {Instruction: unofficialNop, Addressing: ImmediateAddressing, Timing: 2},
0x83: {Instruction: unofficialSax, Addressing: IndirectXAddressing, Timing: 6},
0x87: {Instruction: unofficialSax, Addressing: ZeroPageAddressing, Timing: 3},
0x89: {Instruction: unofficialNop, Addressing: ImmediateAddressing, Timing: 2},
0x8f: {Instruction: unofficialSax, Addressing: AbsoluteAddressing, Timing: 4},
0x97: {Instruction: unofficialSax, Addressing: ZeroPageYAddressing, Timing: 4},
0xa3: {Instruction: unofficialLax, Addressing: IndirectXAddressing, Timing: 6},
0xa7: {Instruction: unofficialLax, Addressing: ZeroPageAddressing, Timing: 3},
0xaf: {Instruction: unofficialLax, Addressing: AbsoluteAddressing, Timing: 4},
0xb3: {Instruction: unofficialLax, Addressing: IndirectYAddressing, Timing: 5, PageCrossCycle: true},
0xb7: {Instruction: unofficialLax, Addressing: ZeroPageYAddressing, Timing: 4},
0xbf: {Instruction: unofficialLax, Addressing: AbsoluteYAddressing, Timing: 4},
0xc2: {Instruction: unofficialNop, Addressing: ImmediateAddressing, Timing: 2},
0xc3: {Instruction: unofficialDcp, Addressing: IndirectXAddressing, Timing: 8},
0xc7: {Instruction: unofficialDcp, Addressing: ZeroPageAddressing, Timing: 5},
0xcf: {Instruction: unofficialDcp, Addressing: AbsoluteAddressing, Timing: 6},
0xd3: {Instruction: unofficialDcp, Addressing: IndirectYAddressing, Timing: 8},
0xd4: {Instruction: unofficialNop, Addressing: ZeroPageXAddressing, Timing: 4},
0xd7: {Instruction: unofficialDcp, Addressing: ZeroPageXAddressing, Timing: 6},
0xda: {Instruction: unofficialNop, Addressing: ImpliedAddressing, Timing: 2},
0xdb: {Instruction: unofficialDcp, Addressing: AbsoluteYAddressing, Timing: 7},
0xdc: {Instruction: unofficialNop, Addressing: AbsoluteXAddressing, Timing: 4, PageCrossCycle: true},
0xdf: {Instruction: unofficialDcp, Addressing: AbsoluteXAddressing, Timing: 7},
0xe2: {Instruction: unofficialNop, Addressing: ImmediateAddressing, Timing: 2},
0xe3: {Instruction: unofficialIsb, Addressing: IndirectXAddressing, Timing: 8},
0xe7: {Instruction: unofficialIsb, Addressing: ZeroPageAddressing, Timing: 5},
0xeb: {Instruction: unofficialSbc, Addressing: ImmediateAddressing, Timing: 2},
0xef: {Instruction: unofficialIsb, Addressing: AbsoluteAddressing, Timing: 6},
0xf3: {Instruction: unofficialIsb, Addressing: IndirectYAddressing, Timing: 8},
0xf4: {Instruction: unofficialNop, Addressing: ZeroPageXAddressing, Timing: 4},
0xf7: {Instruction: unofficialIsb, Addressing: ZeroPageXAddressing, Timing: 6},
0xfa: {Instruction: unofficialNop, Addressing: ImpliedAddressing, Timing: 2},
0xfb: {Instruction: unofficialIsb, Addressing: AbsoluteYAddressing, Timing: 7},
0xfc: {Instruction: unofficialNop, Addressing: AbsoluteXAddressing, Timing: 4, PageCrossCycle: true},
0xff: {Instruction: unofficialIsb, Addressing: AbsoluteXAddressing, Timing: 7},
} | pkg/cpu/opcode.go | 0.569613 | 0.498291 | opcode.go | starcoder |
package lm
import (
"fmt"
"github.com/barnex/fmath"
)
type Vec3 [3]float32
func (v *Vec3) Pointer() *[3]float32 { return (*[3]float32)(v) }
func (v *Vec3) Slice() []float32 { return v[:] }
func (v Vec3) X() float32 { return v[0] }
func (v Vec3) Y() float32 { return v[1] }
func (v Vec3) Z() float32 { return v[2] }
func (v Vec3) XY() (float32, float32) { return v[0], v[1] }
func (v Vec3) XZ() (float32, float32) { return v[0], v[2] }
func (v Vec3) YX() (float32, float32) { return v[1], v[0] }
func (v Vec3) YZ() (float32, float32) { return v[1], v[2] }
func (v Vec3) ZX() (float32, float32) { return v[2], v[0] }
func (v Vec3) ZY() (float32, float32) { return v[2], v[1] }
func (v Vec3) XYZ() (float32, float32, float32) { return v[0], v[1], v[2] }
func (v Vec3) XYVec() Vec2 { return Vec2{v[0], v[1]} }
func (v Vec3) XZVec() Vec2 { return Vec2{v[0], v[2]} }
func (v Vec3) YXVec() Vec2 { return Vec2{v[1], v[0]} }
func (v Vec3) YZVec() Vec2 { return Vec2{v[1], v[2]} }
func (v Vec3) ZXVec() Vec2 { return Vec2{v[2], v[0]} }
func (v Vec3) ZYVec() Vec2 { return Vec2{v[2], v[1]} }
func (v *Vec3) SetX(x float32) { v[0] = x }
func (v *Vec3) SetY(y float32) { v[1] = y }
func (v *Vec3) SetZ(z float32) { v[2] = z }
func (v *Vec3) SetXYZ(x, y, z float32) {
v[0] = x
v[1] = y
v[2] = z
return
}
func (v Vec3) String() string {
return fmt.Sprintf("[%f,%f,%f]", v[0], v[1], v[2])
}
func (v1 Vec3) Add(v2 Vec3) Vec3 {
return Vec3{v1[0] + v2[0], v1[1] + v2[1], v1[2] + v2[2]}
}
func (v1 Vec3) Sub(v2 Vec3) Vec3 {
return Vec3{v1[0] - v2[0], v1[1] - v2[1], v1[2] - v2[2]}
}
func (v1 Vec3) Mul(v2 Vec3) Vec3 {
return Vec3{v1[0] * v2[0], v1[1] * v2[1], v1[2] * v2[2]}
}
func (v Vec3) MulF(f float32) Vec3 {
return Vec3{v[0] * f, v[1] * f, v[2] * f}
}
func (v Vec3) MulMat4x4(m Mat4x4) Vec3 {
v, w := v.MulMat4x4W(m)
return v.DivF(w)
}
func (v Vec3) MulMat4x4W(m Mat4x4) (Vec3, float32) {
return Vec3{
m[0]*v[0] + m[4]*v[1] + m[8]*v[2] + m[12],
m[1]*v[0] + m[5]*v[1] + m[9]*v[2] + m[13],
m[2]*v[0] + m[6]*v[1] + m[10]*v[2] + m[14]},
m[3]*v[0] + m[7]*v[1] + m[11]*v[2] + m[15]
}
func (v Vec3) MulQuat(q Quat) Vec3 {
return q.MulQuat(Quat{v[0], v[1], v[2], 0}).MulQuat(q.Conjugate()).XYZVec()
}
func (v1 Vec3) Div(v2 Vec3) Vec3 {
return Vec3{v1[0] / v2[0], v1[1] / v2[1], v1[2] / v2[2]}
}
func (v Vec3) DivF(f float32) Vec3 {
return Vec3{v[0] / f, v[1] / f, v[2] / f}
}
func (v1 Vec3) Dot(v2 Vec3) float32 {
return v1[0]*v2[0] + v1[1]*v2[1] + v1[2]*v2[2]
}
func (v1 Vec3) Cross(v2 Vec3) Vec3 {
return Vec3{
v1[1]*v2[2] - v1[2]*v2[1],
v1[2]*v2[0] - v1[0]*v2[2],
v1[0]*v2[1] - v1[1]*v2[0]}
}
func (v Vec3) Len() float32 {
return fmath.Sqrt(v[0]*v[0] + v[1]*v[1] + v[2]*v[2])
}
func (v Vec3) LenSqr() float32 {
return v[0]*v[0] + v[1]*v[1] + v[2]*v[2]
}
func (v Vec3) Norm() Vec3 {
return v.MulF(1.0 / v.Len())
}
func (v Vec3) AngleTo(v2 Vec3) float32 {
return fmath.Acos(v.Norm().Dot(v2.Norm()))
} | vec3.go | 0.769254 | 0.585249 | vec3.go | starcoder |
package render3d
import (
"math"
"github.com/unixpickle/essentials"
"github.com/unixpickle/model3d/model3d"
)
// A rayRenderer renders objects using any algorithm that
// can render pixels given an outgoing ray.
type rayRenderer struct {
RayColor func(g *goInfo, obj Object, ray *model3d.Ray) Color
Camera *Camera
NumSamples int
MinSamples int
MaxStddev float64
OversaturatedStddevs float64
Convergence func(mean, stddev Color) bool
Antialias float64
LogFunc func(frac float64, sampleRate float64)
}
func (r *rayRenderer) Render(img *Image, obj Object) {
if r.NumSamples == 0 {
panic("must set NumSamples to non-zero for rayRenderer")
}
maxX := float64(img.Width) - 1
maxY := float64(img.Height) - 1
caster := r.Camera.Caster(maxX, maxY)
progressCh := make(chan int, 1)
go func() {
mapCoordinates(img.Width, img.Height, func(g *goInfo, x, y, idx int) {
color, numSamples := r.estimateColor(g, obj, float64(x), float64(y), caster)
img.Data[idx] = color
progressCh <- numSamples
})
close(progressCh)
}()
updateInterval := essentials.MaxInt(1, img.Width*img.Height/1000)
var pixelsComplete int
var samplesTaken int
for n := range progressCh {
if r.LogFunc != nil {
pixelsComplete++
samplesTaken += n
if pixelsComplete%updateInterval == 0 {
r.LogFunc(float64(pixelsComplete)/float64(img.Width*img.Height),
float64(samplesTaken)/float64(pixelsComplete))
}
}
}
}
func (r *rayRenderer) RenderVariance(img *Image, obj Object, numSamples int) {
maxX := float64(img.Width) - 1
maxY := float64(img.Height) - 1
caster := r.Camera.Caster(maxX, maxY)
mapCoordinates(img.Width, img.Height, func(g *goInfo, x, y, idx int) {
img.Data[idx] = r.estimateVariance(g, obj, float64(x), float64(y), caster,
numSamples)
})
}
func (r *rayRenderer) RayVariance(obj Object, width, height, samples int) float64 {
if samples < 2 {
panic("need to take at least two samples")
}
img := NewImage(width, height)
// No anti-aliasing for backwards-compatibility.
r1 := *r
r1.Antialias = 0
r1.RenderVariance(img, obj, samples)
var totalVariance float64
for _, c := range img.Data {
totalVariance += c.Sum()
}
return totalVariance / float64(3*width*height)
}
func (r *rayRenderer) estimateVariance(g *goInfo, obj Object, x, y float64,
caster func(x, y float64) model3d.Coord3D, numSamples int) Color {
ray := model3d.Ray{Origin: r.Camera.Origin}
ray.Direction = caster(x, y)
var colorSum Color
var colorSqSum Color
for i := 0; i < numSamples; i++ {
if r.Antialias != 0 {
dx := r.Antialias * (g.Gen.Float64() - 0.5)
dy := r.Antialias * (g.Gen.Float64() - 0.5)
ray.Direction = caster(x+dx, y+dy)
}
sampleColor := r.RayColor(g, obj, &ray)
colorSum = colorSum.Add(sampleColor)
colorSqSum = colorSqSum.Add(sampleColor.Mul(sampleColor))
}
mean := colorSum.Scale(1 / float64(numSamples))
variance := colorSqSum.Scale(1 / float64(numSamples)).Sub(mean.Mul(mean))
// Bessel's correction.
variance = variance.Scale(float64(numSamples) / float64(numSamples-1))
return variance.Max(Color{})
}
func (r *rayRenderer) estimateColor(g *goInfo, obj Object, x, y float64,
caster func(x, y float64) model3d.Coord3D) (sampleMean Color, numSamples int) {
ray := model3d.Ray{Origin: r.Camera.Origin}
ray.Direction = caster(x, y)
var colorSum Color
var colorSqSum Color
for numSamples = 0; numSamples < r.NumSamples; numSamples++ {
if r.Antialias != 0 {
dx := r.Antialias * (g.Gen.Float64() - 0.5)
dy := r.Antialias * (g.Gen.Float64() - 0.5)
ray.Direction = caster(x+dx, y+dy)
}
sampleColor := r.RayColor(g, obj, &ray)
colorSum = colorSum.Add(sampleColor)
if !r.HasConvergenceCheck() {
continue
}
colorSqSum = colorSqSum.Add(sampleColor.Mul(sampleColor))
if numSamples < r.MinSamples || numSamples < 2 {
continue
}
mean := colorSum.Scale(1 / float64(numSamples))
variance := colorSqSum.Scale(1 / float64(numSamples)).Sub(mean.Mul(mean))
variance = variance.Max(Color{})
stddev := Color{
X: math.Sqrt(variance.X),
Y: math.Sqrt(variance.Y),
Z: math.Sqrt(variance.Z),
}.Scale(math.Sqrt(float64(numSamples)) / float64(numSamples-1))
if r.Converged(mean, stddev) {
break
}
}
return colorSum.Scale(1 / float64(numSamples)), numSamples
}
func (r *rayRenderer) HasConvergenceCheck() bool {
return r.MinSamples != 0 && (r.MaxStddev != 0 || r.Convergence != nil)
}
func (r *rayRenderer) Converged(mean, stddev Color) bool {
if r.Convergence != nil {
return r.Convergence(mean, stddev)
}
meanArr := mean.Array()
for i, stddev := range stddev.Array() {
switch true {
case stddev < r.MaxStddev:
case r.OversaturatedStddevs != 0 && meanArr[i]-r.OversaturatedStddevs*stddev > 1:
default:
return false
}
}
return true
} | render3d/ray_renderer.go | 0.70791 | 0.47658 | ray_renderer.go | starcoder |
package iso20022
// Payment instrument between a debtor and a creditor, which flows through one or more financial institutions or systems.
type CreditTransfer6 struct {
// Information supplied to enable the matching of an entry with the items that the transfer is intended to settle, such as commercial invoices in an accounts' receivable system.
Reference *Max35Text `xml:"Ref,omitempty"`
// Party that owes an amount of money to the (ultimate) creditor. In the context of the payment model, the debtor is also the debit account owner.
Debtor *PartyIdentification2Choice `xml:"Dbtr,omitempty"`
// Unambiguous identification of the account of the debtor to which a debit entry will be made as a result of the transaction.
DebtorAccount *AccountIdentificationAndName3 `xml:"DbtrAcct,omitempty"`
// Financial institution servicing an account for the debtor.
DebtorAgent *FinancialInstitutionIdentification3Choice `xml:"DbtrAgt,omitempty"`
// Identifies the account of the debtor's agent.
DebtorAgentAccount *AccountIdentificationAndName3 `xml:"DbtrAgtAcct,omitempty"`
// Agent between the debtor's agent and the creditor's agent.
IntermediaryAgent1 *FinancialInstitutionIdentification3Choice `xml:"IntrmyAgt1,omitempty"`
// Unambiguous identification of the account of the intermediary agent 1 at its servicing agent in the payment chain.
IntermediaryAgent1Account *AccountIdentificationAndName3 `xml:"IntrmyAgt1Acct,omitempty"`
// Agent between the debtor's agent and the creditor's agent.
IntermediaryAgent2 *FinancialInstitutionIdentification3Choice `xml:"IntrmyAgt2,omitempty"`
// Unambiguous identification of the account of the intermediary agent 2 at its servicing agent in the payment chain.
IntermediaryAgent2Account *AccountIdentificationAndName3 `xml:"IntrmyAgt2Acct,omitempty"`
// Financial institution servicing an account for the creditor.
CreditorAgent *FinancialInstitutionIdentification3Choice `xml:"CdtrAgt"`
// Unambiguous identification of the account of the creditor agent at its servicing agent to which a credit entry will be made as a result of the payment transaction.
CreditorAgentAccount *AccountIdentificationAndName3 `xml:"CdtrAgtAcct,omitempty"`
// Party that receives an amount of money from the debtor. In the context of the payment model, the creditor is also the credit account owner.
Creditor *PartyIdentification2Choice `xml:"Cdtr,omitempty"`
// Unambiguous identification of the account of the creditor to which a credit entry will be posted as a result of the payment transaction.
CreditorAccount *AccountIdentificationAndName3 `xml:"CdtrAcct"`
}
func (c *CreditTransfer6) SetReference(value string) {
c.Reference = (*Max35Text)(&value)
}
func (c *CreditTransfer6) AddDebtor() *PartyIdentification2Choice {
c.Debtor = new(PartyIdentification2Choice)
return c.Debtor
}
func (c *CreditTransfer6) AddDebtorAccount() *AccountIdentificationAndName3 {
c.DebtorAccount = new(AccountIdentificationAndName3)
return c.DebtorAccount
}
func (c *CreditTransfer6) AddDebtorAgent() *FinancialInstitutionIdentification3Choice {
c.DebtorAgent = new(FinancialInstitutionIdentification3Choice)
return c.DebtorAgent
}
func (c *CreditTransfer6) AddDebtorAgentAccount() *AccountIdentificationAndName3 {
c.DebtorAgentAccount = new(AccountIdentificationAndName3)
return c.DebtorAgentAccount
}
func (c *CreditTransfer6) AddIntermediaryAgent1() *FinancialInstitutionIdentification3Choice {
c.IntermediaryAgent1 = new(FinancialInstitutionIdentification3Choice)
return c.IntermediaryAgent1
}
func (c *CreditTransfer6) AddIntermediaryAgent1Account() *AccountIdentificationAndName3 {
c.IntermediaryAgent1Account = new(AccountIdentificationAndName3)
return c.IntermediaryAgent1Account
}
func (c *CreditTransfer6) AddIntermediaryAgent2() *FinancialInstitutionIdentification3Choice {
c.IntermediaryAgent2 = new(FinancialInstitutionIdentification3Choice)
return c.IntermediaryAgent2
}
func (c *CreditTransfer6) AddIntermediaryAgent2Account() *AccountIdentificationAndName3 {
c.IntermediaryAgent2Account = new(AccountIdentificationAndName3)
return c.IntermediaryAgent2Account
}
func (c *CreditTransfer6) AddCreditorAgent() *FinancialInstitutionIdentification3Choice {
c.CreditorAgent = new(FinancialInstitutionIdentification3Choice)
return c.CreditorAgent
}
func (c *CreditTransfer6) AddCreditorAgentAccount() *AccountIdentificationAndName3 {
c.CreditorAgentAccount = new(AccountIdentificationAndName3)
return c.CreditorAgentAccount
}
func (c *CreditTransfer6) AddCreditor() *PartyIdentification2Choice {
c.Creditor = new(PartyIdentification2Choice)
return c.Creditor
}
func (c *CreditTransfer6) AddCreditorAccount() *AccountIdentificationAndName3 {
c.CreditorAccount = new(AccountIdentificationAndName3)
return c.CreditorAccount
} | CreditTransfer6.go | 0.630799 | 0.621053 | CreditTransfer6.go | starcoder |
package csvquery
import "strings"
// LogicalOperator describes logical operator type.
type LogicalOperator string
const (
// AndOperator describes AND logical operator.
AndOperator LogicalOperator = "AND"
// OrOperator describes OR logical operator.
OrOperator LogicalOperator = "OR"
)
// ComparisonOperator describes comparison operator type.
type ComparisonOperator string
const (
// EqualOperator describes equal operator.
EqualOperator ComparisonOperator = "="
// NotEqualOperator describes "not equal" operator.
NotEqualOperator ComparisonOperator = "!="
// LessOperator describes less operator
LessOperator ComparisonOperator = "<"
// LessOrEqualOperator describes less or equal operator.
LessOrEqualOperator ComparisonOperator = "<="
// GreaterOperator describes greater operator.
GreaterOperator ComparisonOperator = ">"
// GreaterOrEqualOperator describes greater or equal operator.
GreaterOrEqualOperator ComparisonOperator = ">="
)
// ComparisonOperators contains list of possible comparison operators.
var ComparisonOperators = []ComparisonOperator{
EqualOperator,
NotEqualOperator,
LessOperator,
LessOrEqualOperator,
GreaterOperator,
GreaterOrEqualOperator,
}
// opPriority contains priority of logical operators.
var opPriority = map[LogicalOperator]int{
OrOperator: 1,
AndOperator: 2,
}
// IsOperator returns true if operator is one of the logical operators.
func IsOperator(operator string) bool {
return strings.EqualFold(operator, string(AndOperator)) || strings.EqualFold(operator, string(OrOperator))
}
// IsSameOperator returns true if strOp and op describes the same operator and false otherwise.
func IsSameOperator(strOp string, op LogicalOperator) bool {
return LogicalOperator(strOp) == op
}
// Calc calculates binary statement.
func Calc(left, right bool, op LogicalOperator) bool {
if op == AndOperator {
return left && right
}
return left || right
}
// GetPriority returns priority of the binary operator.
func GetPriority(operator LogicalOperator) int {
if priority, ok := opPriority[operator]; ok {
return priority
}
return -1
} | internal/csvquery/operators.go | 0.7478 | 0.405272 | operators.go | starcoder |
package _514_Freedom_Trail
/*
https://leetcode.com/problems/freedom-trail/description/
In the video game Fallout 4, the quest "Road to Freedom" requires players to reach a metal dial called the "Freedom Trail Ring", and use the dial to spell a specific keyword in order to open the door.
Given a string ring, which represents the code engraved on the outer ring and another string key, which represents the keyword needs to be spelled. You need to find the minimum number of steps in order to spell all the characters in the keyword.
Initially, the first character of the ring is aligned at 12:00 direction. You need to spell all the characters in the string key one by one by rotating the ring clockwise or anticlockwise to make each character of the string key aligned at 12:00 direction and then by pressing the center button.
At the stage of rotating the ring to spell the key character key[i]:
You can rotate the ring clockwise or anticlockwise one place, which counts as 1 step. The final purpose of the rotation is to align one of the string ring's characters at the 12:00 direction, where this character must equal to the character key[i].
If the character key[i] has been aligned at the 12:00 direction, you need to press the center button to spell, which also counts as 1 step. After the pressing, you could begin to spell the next character in the key (next stage), otherwise, you've finished all the spelling.
https://leetcode.com/static/images/problemset/ring.jpg
Example:
Input: ring = "godding", key = "gd"
Output: 4
Explanation:
For the first key character 'g', since it is already in place, we just need 1 step to spell this character.
For the second key character 'd', we need to rotate the ring "godding" anticlockwise by two steps to make it become "ddinggo".
Also, we need 1 more step for spelling.
So the final output is 4.
Note:
Length of both ring and key will be in range 1 to 100.
There are only lowercase letters in both strings and might be some duplcate characters in both strings.
It's guaranteed that string key could always be spelled by rotating the string ring.
*/
import "fmt"
func findRotateSteps(ring string, key string) int {
// pre fill
ringMap := map[rune][]int{}
for k, v := range ring {
ringMap[v] = append(ringMap[v], k)
}
current := []int{0} //default state
for ki, k := range key { //move by key letters
prev := current
current = make([]int, len(ringMap[k]))
for ri, r := range ringMap[k] { //iterate on current key letter possibilities on ring
current[ri] = 1005000 // max value
for pi, p := range prev { //match variants with previous letter possibilities
prevPosition := 0
if ki > 0 {
prevPosition = ringMap[rune(key[ki-1])][pi] //find position of previous letter on ring
}
newR := min(r, prevPosition, len(ring))
if current[ri] > newR+p {
current[ri] = newR + p
}
}
}
}
result := 1005000
fmt.Print("posibilities: ")
for _, c := range current {
fmt.Print(c, " | ")
if c < result {
result = c
}
}
fmt.Printf(" result: %d \n", result)
return result + len(key)
}
func min(current, newVal, ringSize int) int {
//straight
var t1, t2 int
if current < newVal {
t2 = (newVal - current)
} else {
t2 = (current - newVal)
}
//overlap
t1 = ringSize - t2
if t1 < t2 {
return t1
} else {
return t2
}
} | 514_Freedom_Trail/solution.go | 0.845528 | 0.693285 | solution.go | starcoder |
package search
import (
"container/heap"
"math"
"github.com/gonum/graph"
"github.com/gonum/graph/concrete"
)
var inf = math.Inf(1)
type searchFuncs struct {
successors, predecessors, neighbors func(graph.Node) []graph.Node
isSuccessor, isPredecessor, isNeighbor func(graph.Node, graph.Node) bool
cost graph.CostFunc
heuristicCost graph.HeuristicCostFunc
edgeTo, edgeBetween func(graph.Node, graph.Node) graph.Edge
}
func genIsSuccessor(g graph.DirectedGraph) func(graph.Node, graph.Node) bool {
return func(node, succ graph.Node) bool {
return g.EdgeTo(node, succ) != nil
}
}
func genIsPredecessor(g graph.DirectedGraph) func(graph.Node, graph.Node) bool {
return func(node, succ graph.Node) bool {
return g.EdgeTo(succ, node) != nil
}
}
func genIsNeighbor(g graph.Graph) func(graph.Node, graph.Node) bool {
return func(node, succ graph.Node) bool {
return g.EdgeBetween(succ, node) != nil
}
}
// Sets up the cost functions and successor functions so I don't have to do a type switch every
// time. This almost always does more work than is necessary, but since it's only executed once
// per function, and graph functions are rather costly, the "extra work" should be negligible.
func setupFuncs(g graph.Graph, cost graph.CostFunc, heuristicCost graph.HeuristicCostFunc) searchFuncs {
sf := searchFuncs{}
switch g := g.(type) {
case graph.DirectedGraph:
sf.successors = g.Successors
sf.predecessors = g.Predecessors
sf.neighbors = g.Neighbors
sf.isSuccessor = genIsSuccessor(g)
sf.isPredecessor = genIsPredecessor(g)
sf.isNeighbor = genIsNeighbor(g)
sf.edgeBetween = g.EdgeBetween
sf.edgeTo = g.EdgeTo
default:
sf.successors = g.Neighbors
sf.predecessors = g.Neighbors
sf.neighbors = g.Neighbors
isNeighbor := genIsNeighbor(g)
sf.isSuccessor = isNeighbor
sf.isPredecessor = isNeighbor
sf.isNeighbor = isNeighbor
sf.edgeBetween = g.EdgeBetween
sf.edgeTo = g.EdgeBetween
}
if heuristicCost != nil {
sf.heuristicCost = heuristicCost
} else {
if g, ok := g.(graph.HeuristicCoster); ok {
sf.heuristicCost = g.HeuristicCost
} else {
sf.heuristicCost = NullHeuristic
}
}
if cost != nil {
sf.cost = cost
} else {
if g, ok := g.(graph.Coster); ok {
sf.cost = g.Cost
} else {
sf.cost = UniformCost
}
}
return sf
}
/** Sorts a list of edges by weight, agnostic to repeated edges as well as direction **/
type edgeSorter []concrete.WeightedEdge
func (e edgeSorter) Len() int {
return len(e)
}
func (e edgeSorter) Less(i, j int) bool {
return e[i].Cost < e[j].Cost
}
func (e edgeSorter) Swap(i, j int) {
e[i], e[j] = e[j], e[i]
}
/** Keeps track of a node's scores so they can be used in a priority queue for A* **/
type internalNode struct {
graph.Node
gscore, fscore float64
}
/* A* stuff */
type aStarPriorityQueue struct {
indexList map[int]int
nodes []internalNode
}
func (pq *aStarPriorityQueue) Less(i, j int) bool {
// As the heap documentation says, a priority queue is listed if the actual values
// are treated as if they were negative
return pq.nodes[i].fscore < pq.nodes[j].fscore
}
func (pq *aStarPriorityQueue) Swap(i, j int) {
pq.indexList[pq.nodes[i].ID()] = j
pq.indexList[pq.nodes[j].ID()] = i
pq.nodes[i], pq.nodes[j] = pq.nodes[j], pq.nodes[i]
}
func (pq *aStarPriorityQueue) Len() int {
return len(pq.nodes)
}
func (pq *aStarPriorityQueue) Push(x interface{}) {
node := x.(internalNode)
pq.nodes = append(pq.nodes, node)
pq.indexList[node.ID()] = len(pq.nodes) - 1
}
func (pq *aStarPriorityQueue) Pop() interface{} {
x := pq.nodes[len(pq.nodes)-1]
pq.nodes = pq.nodes[:len(pq.nodes)-1]
delete(pq.indexList, x.ID())
return x
}
func (pq *aStarPriorityQueue) Fix(id int, newGScore, newFScore float64) {
if i, ok := pq.indexList[id]; ok {
pq.nodes[i].gscore = newGScore
pq.nodes[i].fscore = newFScore
heap.Fix(pq, i)
}
}
func (pq *aStarPriorityQueue) Find(id int) (internalNode, bool) {
loc, ok := pq.indexList[id]
if ok {
return pq.nodes[loc], true
} else {
return internalNode{}, false
}
}
func (pq *aStarPriorityQueue) Exists(id int) bool {
_, ok := pq.indexList[id]
return ok
}
type denseNodeSorter []graph.Node
func (dns denseNodeSorter) Less(i, j int) bool {
return dns[i].ID() < dns[j].ID()
}
func (dns denseNodeSorter) Swap(i, j int) {
dns[i], dns[j] = dns[j], dns[i]
}
func (dns denseNodeSorter) Len() int {
return len(dns)
}
// General utility funcs
// Rebuilds a path backwards from the goal.
func rebuildPath(predecessors map[int]graph.Node, goal graph.Node) []graph.Node {
if n, ok := goal.(internalNode); ok {
goal = n.Node
}
path := []graph.Node{goal}
curr := goal
for prev, ok := predecessors[curr.ID()]; ok; prev, ok = predecessors[curr.ID()] {
if n, ok := prev.(internalNode); ok {
prev = n.Node
}
path = append(path, prev)
curr = prev
}
// Reverse the path since it was built backwards
for i, j := 0, len(path)-1; i < j; i, j = i+1, j-1 {
path[i], path[j] = path[j], path[i]
}
return path
}
type nodeStack []graph.Node
func (s *nodeStack) len() int { return len(*s) }
func (s *nodeStack) pop() graph.Node {
v := *s
v, n := v[:len(v)-1], v[len(v)-1]
*s = v
return n
}
func (s *nodeStack) push(n graph.Node) { *s = append(*s, n) }
func min(a, b int) int {
if a < b {
return a
}
return b
} | Godeps/_workspace/src/github.com/gonum/graph/search/internals.go | 0.699049 | 0.526221 | internals.go | starcoder |
package goOctree
import (
"strconv"
)
type Node struct {
Uid string
Center *Vector3
Size float32
// [0] = -X -Y -Z //left low back
// [1] = -X -Y +Z //left low front
// [2] = -X +Y -Z //left high back
// [3] = -X +Y +Z //left high front
// [4] = +X -Y -Z //right low back
// [5] = +X -Y +Z //right low front
// [6] = +X +Y -Z //right high back
// [7] = +X +Y +Z //right high front
Children [8]*Node
Point *Vector3
MaxDepth uint8
Parent *Node
}
func (n *Node) MakeChildren() {
counter := 0
for x := float32(-1); x <= 1; x += 2 {
for y := float32(-1); y <= 1; y += 2 {
for z := float32(-1); z <= 1; z += 2 {
nudge := &Vector3{
X: n.Size * 0.25 * x,
Y: n.Size * 0.25 * y,
Z: n.Size * 0.25 * z,
}
newCenter := n.Center.Add(nudge)
newNode := Node{
Uid: n.Uid + strconv.Itoa(counter),
Center: newCenter,
Size: n.Size * 0.5,
Children: [8]*Node{},
Point: nil,
MaxDepth: 0,
Parent: n,
}
n.Children[counter] = &newNode
counter++
}
}
}
n.RaiseMaxDepth(n.MaxDepth + 1)
}
func (n *Node) RaiseMaxDepth(childDepth uint8) {
if childDepth+1 > n.MaxDepth {
n.MaxDepth = childDepth + 1
if n.Parent != nil {
n.Parent.RaiseMaxDepth(n.MaxDepth)
}
}
}
func inside(low float32, high float32, val float32) bool {
return (low <= val) && (val <= high)
}
func (n *Node) PointFits(point *Vector3) bool {
if !inside(n.Center.X-0.5*n.Size, n.Center.X+0.5*n.Size, point.X) {
return false
}
if !inside(n.Center.Y-0.5*n.Size, n.Center.Y+0.5*n.Size, point.Y) {
return false
}
if !inside(n.Center.Z-0.5*n.Size, n.Center.Z+0.5*n.Size, point.Z) {
return false
}
return true
}
func (n *Node) HasChildren() bool {
return n.Children[0] == nil
}
func (n *Node) HasFreeChild() bool {
if !n.HasChildren() {
if n.Point == nil {
return true
} else {
return false
}
} else {
for _, child := range n.Children {
if child.HasFreeChild() {
return true
}
}
}
return false
}
func (n *Node) IsFree() bool {
return !n.HasChildren() && n.Point == nil
} | Node.go | 0.548674 | 0.432483 | Node.go | starcoder |
package adapters
import (
"database/sql"
"strings"
"time"
v1 "github.com/Ruscigno/ruscigno-gosdk/ticker-beats/v1"
model "github.com/Ruscigno/ticker-heart/internal/transaction/tradetransaction"
"github.com/Ruscigno/ticker-heart/internal/utils"
)
//ProtoToTradeTransaction transform an proto to an TradeTransaction model
func ProtoToTradeTransaction(accountID, creationOrder int64, a *v1.TradeTransaction) *model.TradeTransaction {
if a == nil {
return &model.TradeTransaction{AccountID: -1}
}
return &model.TradeTransaction{
InternalID: a.InternalId,
AccountID: accountID,
OrderID: a.OrderId,
CreationOrder: creationOrder,
DealID: sql.NullInt64{Int64: a.DealId, Valid: a.DealId > 0},
Symbol: sql.NullString{String: a.Symbol, Valid: strings.TrimSpace(a.Symbol) != ""},
TradeType: v1.TradeTransactionType_name[int32(a.TradeType)],
OrderType: v1.OrderType_name[int32(a.OrderType)],
OrderState: v1.OrderState_name[int32(a.OrderState)],
DealType: v1.DealType_name[int32(a.DealType)],
TimeType: v1.OrderTypeTime_name[int32(a.TimeType)],
TimeExpiration: sql.NullTime{Time: time.Unix(a.TimeExpiration, 0).Add(time.Millisecond * time.Duration(a.TimeGMTOffset)), Valid: a.TimeExpiration > utils.MT5_MIN_DATE},
Price: sql.NullFloat64{Float64: a.Price, Valid: a.Price != 0},
PriceTrigger: sql.NullFloat64{Float64: a.PriceTrigger, Valid: a.PriceTrigger != 0},
PriceStopLoss: sql.NullFloat64{Float64: a.PriceStopLoss, Valid: a.PriceStopLoss != 0},
PriceTakeProfit: sql.NullFloat64{Float64: a.PriceStopLoss, Valid: a.PriceStopLoss != 0},
Volume: sql.NullFloat64{Float64: a.Volume, Valid: a.Volume != 0},
PositionID: sql.NullInt64{Int64: a.PositionId, Valid: a.PositionId > 0},
PositionBy: sql.NullInt64{Int64: a.PositionBy, Valid: a.PositionBy != 0},
Created: time.Unix(a.Created*int64(time.Millisecond), 0).Add(time.Millisecond * time.Duration(a.TimeGMTOffset)),
Updated: time.Unix(a.Updated*int64(time.Millisecond), 0).Add(time.Millisecond * time.Duration(a.TimeGMTOffset)),
Deleted: sql.NullTime{Time: time.Unix(a.Deleted*int64(time.Millisecond), 0).Add(time.Millisecond * time.Duration(a.TimeGMTOffset)), Valid: a.Deleted > utils.MT5_MIN_DATE},
}
}
//ProtoToTradeTransaction transform an proto to an TradeTransaction model
func ProtoToTradeResult(accountID, creationOrder int64, a *v1.TradeResult) *model.TradeResult {
if a == nil {
return &model.TradeResult{AccountID: -1}
}
return &model.TradeResult{
AccountID: accountID,
OrderID: a.OrderId,
CreationOrder: creationOrder,
RetCode: a.Retcode,
DealID: sql.NullInt64{Int64: a.DealId, Valid: a.DealId > 0},
Volume: sql.NullFloat64{Float64: a.Volume, Valid: a.Volume > 0},
Price: sql.NullFloat64{Float64: a.Price, Valid: a.Price > 0},
Bid: sql.NullFloat64{Float64: a.Bid, Valid: a.Bid > 0},
Ask: sql.NullFloat64{Float64: a.Ask, Valid: a.Ask > 0},
Comment: a.Comment,
RequestID: a.RequestId,
RetcodeExternal: sql.NullInt64{Int64: int64(a.RetcodeExternal), Valid: a.RetcodeExternal > 0},
}
}
//ProtoToTradeTransaction transform an proto to an TradeTransaction model
func ProtoToTradeRequest(accountID, creationOrder int64, a *v1.TradeRequest) *model.TradeRequest {
if a == nil {
return &model.TradeRequest{AccountID: -1}
}
return &model.TradeRequest{
AccountID: accountID,
OrderID: a.OrderId,
CreationOrder: creationOrder,
Action: v1.TradeRequestActions_name[int32(a.Action)],
Magic: sql.NullInt64{Int64: a.Magic, Valid: a.Magic != 0},
Symbol: sql.NullString{String: a.Symbol, Valid: strings.TrimSpace(a.Symbol) != ""},
Volume: a.Volume,
Price: sql.NullFloat64{Float64: a.Price, Valid: a.Price != 0},
StopLimit: sql.NullFloat64{Float64: a.StopLimit, Valid: a.StopLimit != 0},
StopLoss: sql.NullFloat64{Float64: a.StopLoss, Valid: a.StopLoss != 0},
TakeProfit: sql.NullFloat64{Float64: a.TakeProfit, Valid: a.TakeProfit != 0},
Deviation: sql.NullInt64{Int64: a.Deviation, Valid: a.Deviation != 0},
OrderType: v1.OrderType_name[int32(a.OrderType)],
TypeFilling: v1.OrderFillingType_name[int32(a.TypeFilling)],
TypeTime: v1.OrderTypeTime_name[int32(a.TypeTime)],
TimeExpiration: sql.NullTime{Time: time.Unix(a.TimeExpiration, 0).Add(time.Millisecond * time.Duration(a.TimeGMTOffset)), Valid: a.TimeExpiration > utils.MT5_MIN_DATE},
Comment: a.Comment,
PositionID: sql.NullInt64{Int64: a.PositionId, Valid: a.PositionId > 0},
PositionBy: sql.NullInt64{Int64: a.PositionBy, Valid: a.PositionBy > 0},
Created: time.Unix(a.Created*int64(time.Millisecond), 0).Add(time.Millisecond * time.Duration(a.TimeGMTOffset)),
Updated: time.Unix(a.Updated*int64(time.Millisecond), 0).Add(time.Millisecond * time.Duration(a.TimeGMTOffset)),
Deleted: sql.NullTime{Time: time.Unix(a.Deleted*int64(time.Millisecond), 0).Add(time.Millisecond * time.Duration(a.TimeGMTOffset)), Valid: a.Deleted > utils.MT5_MIN_DATE},
}
}
//ProtoToTradeTransaction transform an proto to an TradeTransaction model
func TradeRequestToProto(a *model.TradeRequest) *v1.TradeRequest {
deleted := a.Deleted.Time.UTC().Unix()
if !a.Deleted.Valid {
deleted = 0
}
price := a.Price.Float64
if !a.Price.Valid {
price = 0
}
stopLimit := a.StopLimit.Float64
if !a.StopLimit.Valid {
stopLimit = 0
}
stopLoss := a.StopLoss.Float64
if !a.StopLoss.Valid {
stopLoss = 0
}
takeProfit := a.TakeProfit.Float64
if !a.TakeProfit.Valid {
takeProfit = 0
}
deviation := a.Deviation.Int64
if !a.Deviation.Valid {
deviation = 0
}
positionId := a.PositionID.Int64
if !a.PositionID.Valid {
positionId = 0
}
positionBy := a.PositionBy.Int64
if !a.PositionBy.Valid {
positionBy = 0
}
magic := a.Magic.Int64
if !a.Magic.Valid {
magic = 0
}
Symbol := a.Symbol.String
if !a.Symbol.Valid {
Symbol = ""
}
return &v1.TradeRequest{
AccountId: a.AccountID,
OrderId: a.OrderID,
CreationOrder: a.CreationOrder,
Action: v1.TradeRequestActions(v1.TradeRequestActions_value[a.Action]),
Magic: magic,
Symbol: Symbol,
Volume: a.Volume,
Price: price,
StopLimit: stopLimit,
StopLoss: stopLoss,
TakeProfit: takeProfit,
Deviation: deviation,
OrderType: v1.OrderType(v1.OrderType_value[a.OrderType]),
TypeFilling: v1.OrderFillingType(v1.OrderFillingType_value[a.TypeFilling]),
TypeTime: v1.OrderTypeTime(v1.OrderTypeTime_value[a.TypeTime]),
TimeExpiration: a.TimeExpiration.Time.UTC().Unix(),
Comment: a.Comment,
PositionId: positionId,
PositionBy: positionBy,
Created: a.Created.UTC().Unix(),
Updated: a.Updated.UTC().Unix(),
Deleted: deleted,
Entry: v1.DealEntry(v1.DealEntry_value[a.Entry]),
}
} | internal/api/adapters/tradetransaction.go | 0.612657 | 0.449272 | tradetransaction.go | starcoder |
package slices
type (
// FilterFunc is a function that returns a boolean value
// that depends on the current element.
FilterFunc[Elem any] func(Elem) bool
// MapFunc is a function that transforms the current element into
// a new value of any type.
MapFunc[Elem, NewElem any] func(Elem) NewElem
// ReduceFunc is a reducer function: it returns an accumulated value
// that depends on the previously accumulated value and the current element.
ReduceFunc[Accumulator, Elem any] func(Accumulator, Elem) Accumulator
)
// Map applies f on each element of src and returns the resulting slice.
// The output is guaranteed to be the same length as src.
// src remains unaltered.
func Map[Elem, NewElem any](src []Elem, f MapFunc[Elem, NewElem]) []NewElem {
out := make([]NewElem, len(src))
for i, v := range src {
out[i] = f(v)
}
return out
}
// Filter filters out elements of src for which f(element) returns false
// and returns the resulting slice.
// The output length is inferior or equal to src's length.
// src remains unaltered.
func Filter[T any](src []T, f FilterFunc[T]) []T {
out := []T{}
for _, v := range src {
if f(v) {
out = append(out, v)
}
}
return out
}
// Reduce applies reducer f to src starting from ini and returns
// the accumulated value.
func Reduce[Elem, Accumulator any](
src []Elem,
f ReduceFunc[Accumulator, Elem],
ini Accumulator,
) Accumulator {
out := ini
for _, v := range src {
out = f(out, v)
}
return out
}
// Apply iterates over src and calls f(currentIndex, currentElement)
// each iteration until the end is reached.
func Apply[Elem any](src []Elem, f func(i int, v Elem)) {
for i, v := range src {
f(i, v)
}
}
// ApplyUntil iterates over src and calls f(currentIndex, currentElement)
// each iteration until it returns false or the end is reached.
func ApplyUntil[Elem any](src []Elem, f func(i int, v Elem) bool) {
for i, v := range src {
if !f(i, v) {
return
}
}
}
// KeysOf returns a slice of src's keys in an undeterminated order.
func KeysOf[Key comparable, Val any](src map[Key]Val) []Key {
out := make([]Key, len(src))
i := 0
for k := range src {
out[i] = k
i++
}
return out
}
// ValuesOf returns a slice of src's values in an undeterminated order.
func ValuesOf[Key comparable, Val any](src map[Key]Val) []Val {
out := make([]Val, len(src))
i := 0
for _, v := range src {
out[i] = v
i++
}
return out
}
// AsAny converts src to a new []any having the same length
// and underlying values.
func AsAny[Elem any](src []Elem) []any {
out := make([]any, len(src))
for i, v := range src {
out[i] = v
}
return out
} | slices.go | 0.823293 | 0.4206 | slices.go | starcoder |
package proj
import (
"fmt"
"math"
)
func msfnz(eccent, sinphi, cosphi float64) float64 {
var con = eccent * sinphi
return cosphi / (math.Sqrt(1 - con*con))
}
func sign(x float64) float64 {
if x < 0 {
return -1
}
return 1
}
const (
twoPi = math.Pi * 2
// SPI is slightly greater than Math.PI, so values that exceed the -180..180
// degree range by a tiny amount don't get wrapped. This prevents points that
// have drifted from their original location along the 180th meridian (due to
// floating point error) from changing their sign.
sPi = 3.14159265359
halfPi = math.Pi / 2
)
func adjust_lon(x float64) float64 {
if math.Abs(x) <= sPi {
return x
}
return (x - (sign(x) * twoPi))
}
func adjust_lat(x float64) float64 {
if math.Abs(x) < halfPi {
return x
}
return (x - (sign(x) * math.Pi))
}
func tsfnz(eccent, phi, sinphi float64) float64 {
var con = eccent * sinphi
var com = 0.5 * eccent
con = math.Pow(((1 - con) / (1 + con)), com)
return (math.Tan(0.5*(halfPi-phi)) / con)
}
func phi2z(eccent, ts float64) (float64, error) {
var eccnth = 0.5 * eccent
phi := halfPi - 2*math.Atan(ts)
for i := 0; i <= 15; i++ {
con := eccent * math.Sin(phi)
dphi := halfPi - 2*math.Atan(ts*(math.Pow(((1-con)/(1+con)), eccnth))) - phi
phi += dphi
if math.Abs(dphi) <= 0.0000000001 {
return phi, nil
}
}
return math.NaN(), fmt.Errorf("phi2z has no convergence")
}
func e0fn(x float64) float64 {
return (1 - 0.25*x*(1+x/16*(3+1.25*x)))
}
func e1fn(x float64) float64 {
return (0.375 * x * (1 + 0.25*x*(1+0.46875*x)))
}
func e2fn(x float64) float64 {
return (0.05859375 * x * x * (1 + 0.75*x))
}
func e3fn(x float64) float64 {
return (x * x * x * (35 / 3072))
}
func mlfn(e0, e1, e2, e3, phi float64) float64 {
return (e0*phi - e1*math.Sin(2*phi) + e2*math.Sin(4*phi) - e3*math.Sin(6*phi))
}
func asinz(x float64) float64 {
if math.Abs(x) > 1 {
if x > 1 {
x = 1
} else {
x = -1
}
}
return math.Asin(x)
}
func qsfnz(eccent, sinphi float64) float64 {
var con float64
if eccent > 1.0e-7 {
con = eccent * sinphi
return ((1 - eccent*eccent) * (sinphi/(1-con*con) - (0.5/eccent)*math.Log((1-con)/(1+con))))
} else {
return (2 * sinphi)
}
}
func imlfn(ml, e0, e1, e2, e3 float64) (float64, error) {
phi := ml / e0
for i := 0; i < 15; i++ {
dphi := (ml - (e0*phi - e1*math.Sin(2*phi) + e2*math.Sin(4*phi) - e3*math.Sin(6*phi))) / (e0 - 2*e1*math.Cos(2*phi) + 4*e2*math.Cos(4*phi) - 6*e3*math.Cos(6*phi))
phi += dphi
if math.Abs(dphi) <= 0.0000000001 {
return phi, nil
}
}
return math.NaN(), fmt.Errorf("proj: imlfn: Latitude failed to converge after 15 iterations")
} | proj/common.go | 0.736021 | 0.513668 | common.go | starcoder |
package psetter
import (
"errors"
"fmt"
"github.com/nickwells/golem/check"
"github.com/nickwells/golem/param"
"strconv"
)
// Float64Setter allows you to specify a parameter that can be used to set an
// float64 value. You can also supply a check function that will validate
// the Value. There are some helper functions given below (called
// Float64Check...) which will return functions that can perform a few
// common checks. For instance you can ensure that the value is positive by
// setting one of the Checks to the value returned by
// Float64CheckGT(0)
type Float64Setter struct {
Value *float64
Checks []check.Float64
}
// ValueReq returns param.Mandatory indicating that some value must follow
// the parameter
func (s Float64Setter) ValueReq() param.ValueReq { return param.Mandatory }
// Set (called when there is no following value) returns an error
func (s Float64Setter) Set(_ string) error {
return errors.New("no number given (it should be followed by '=num')")
}
// SetWithVal (called when a value follows the parameter) checks that the value
// can be parsed to a float, if it cannot be parsed successfully it returns an
// error. If there is a check and the check is violated it returns an
// error. Only if the value is parsed successfully and the check is not
// violated is the Value set.
func (s Float64Setter) SetWithVal(_ string, paramVal string) error {
v, err := strconv.ParseFloat(paramVal, 64)
if err != nil {
return fmt.Errorf("could not parse '%s' as a float value: %s",
paramVal, err)
}
if len(s.Checks) != 0 {
for _, check := range s.Checks {
if check == nil {
continue
}
err := check(v)
if err != nil {
return err
}
}
}
*s.Value = v
return nil
}
// AllowedValues returns a string describing the allowed values
func (s Float64Setter) AllowedValues() string {
rval := "any value that can be read as a number with a decimal place"
if len(s.Checks) != 0 {
rval += " subject to checks"
}
return rval
}
// CurrentValue returns the current setting of the parameter value
func (s Float64Setter) CurrentValue() string {
return fmt.Sprintf("%v", *s.Value)
}
// CheckSetter panics if the setter has not been properly created - if the
// Value is nil.
func (s Float64Setter) CheckSetter(name string) {
if s.Value == nil {
panic(name + ": Float64Setter Check failed: the Value to be set is nil")
}
} | param/psetter/float64Setter.go | 0.786746 | 0.426083 | float64Setter.go | starcoder |
package vm
import (
"encoding/binary"
"encoding/hex"
"math/big"
"xfsgo/common"
)
type CTypeUint8 [1]byte
type CTypeBool [1]byte
type CTypeUint16 [2]byte
type CTypeUint32 [4]byte
type CTypeUint64 [8]byte
type CTypeUint256 [32]byte
type CTypeString []byte
type CTypeAddress [25]byte
func (t CTypeUint8) uint8() uint8 {
return t[0]
}
func (t CTypeUint8) MarshalText() (d []byte, err error) {
ds := hex.EncodeToString(t[:])
d = make([]byte, len(ds))
copy(d[:], ds[:])
return
}
func (t *CTypeUint8) UnmarshalText(text []byte) (err error) {
var bs []byte
bs, err = hex.DecodeString(string(text))
copy(t[:], bs)
return
}
func (t CTypeUint16) Uint16() uint16 {
return binary.LittleEndian.Uint16(t[:])
}
func (t CTypeUint16) MarshalText() (d []byte, err error) {
ds := hex.EncodeToString(t[:])
d = make([]byte, len(ds))
copy(d[:], ds[:])
return
}
func (t *CTypeUint16) UnmarshalText(text []byte) (err error) {
var bs []byte
bs, err = hex.DecodeString(string(text))
copy(t[:], bs)
return
}
func (t CTypeUint32) Uint32() uint32 {
return binary.LittleEndian.Uint32(t[:])
}
func (t CTypeUint32) MarshalText() (d []byte, err error) {
ds := hex.EncodeToString(t[:])
d = make([]byte, len(ds))
copy(d[:], ds[:])
return
}
func (t *CTypeUint32) UnmarshalText(text []byte) (err error) {
var bs []byte
bs, err = hex.DecodeString(string(text))
copy(t[:], bs)
return
}
func (t CTypeUint64) Uint64() uint64 {
return binary.LittleEndian.Uint64(t[:])
}
func (t CTypeUint64) MarshalText() (d []byte, err error) {
ds := hex.EncodeToString(t[:])
d = make([]byte, len(ds))
copy(d[:], ds[:])
return
}
func (t *CTypeUint64) UnmarshalText(text []byte) (err error) {
var bs []byte
bs, err = hex.DecodeString(string(text))
copy(t[:], bs)
return
}
func (t CTypeUint256) BigInt() *big.Int {
return new(big.Int).SetBytes(t[:])
}
func (t CTypeUint256) MarshalText() (d []byte, err error) {
ds := hex.EncodeToString(t[:])
d = make([]byte, len(ds))
copy(d[:], ds[:])
return
}
func (t *CTypeUint256) UnmarshalText(text []byte) (err error) {
var bs []byte
bs, err = hex.DecodeString(string(text))
copy(t[:], bs)
return
}
func (t CTypeString) String() string {
return string(t[:])
}
func (t CTypeString) MarshalText() (d []byte, err error) {
ds := hex.EncodeToString(t[:])
d = make([]byte, len(ds))
copy(d[:], ds[:])
return
}
func (t *CTypeString) UnmarshalText(text []byte) (err error) {
var bs []byte
bs, err = hex.DecodeString(string(text))
*t = make([]byte, len(bs))
copy(*t, bs)
return
}
func (t CTypeAddress) MarshalText() (d []byte, err error) {
ds := hex.EncodeToString(t[:])
d = make([]byte, len(ds))
copy(d[:], ds[:])
return
}
func (t *CTypeAddress) UnmarshalText(text []byte) (err error) {
var bs []byte
bs, err = hex.DecodeString(string(text))
copy(t[:], bs)
return
}
func (t CTypeAddress) Address() common.Address {
return common.Bytes2Address(t[:])
}
func (t CTypeBool) Bool() bool {
if t[0] == 1 {
return true
}
return false
}
func NewUint8(n uint8) CTypeUint8 {
return CTypeUint8{n}
}
func NewUint16(n uint16) (m CTypeUint16) {
binary.LittleEndian.PutUint16(m[:], n)
return
}
func NewUint32(n uint32) (m CTypeUint32) {
binary.LittleEndian.PutUint32(m[:], n)
return
}
func NewUint64(n uint64) (m CTypeUint64) {
binary.LittleEndian.PutUint64(m[:], n)
return
}
func NewUint256(n *big.Int) (m CTypeUint256) {
bs := n.Bytes()
copy(m[:], bs)
return
} | vm/types.go | 0.566738 | 0.430985 | types.go | starcoder |
package bank
import (
"encoding/json"
"github.com/hyperledger/fabric/core/chaincode/shim"
sc "github.com/hyperledger/fabric/protos/peer"
"github.com/shopspring/decimal"
)
//BankChaincode is the struct that all chaincode methods are associated with
//The bank chaincode provides a simple representation of a bank. It allows for the creation of bank
//accounts and the transfer of funds between bank accounts. There are several functions:
// init - initialize the chaincode
// invoke - called upon invocation and calls other functions
// createAccount - create a bank account
// deposit - deposit funds into a bank account
// transfer - transfer funds between accounts, either interbank or intrabank
type BankChaincode struct {
}
// bank is a struct that represents a bank, it is stored on the ledeger under the key "bank"
//Name string - the name of the bank
//ID string - the ID of the bank, used to route between banks, analogous to an IBAN or SWIFT code
//ForexContract - the name of a ForexChaincode, deployed to the same peer as the bank, use to provide intrabank currency exchange
//InterbankContract - the name of the InterbankChaincode, used to transfer funds between banks
// A bank contract must be initalized with and name and ID. The two contracts are optional but required to do interbank transfers
//and interbank currency exchange - without them these will produce an error.
type bank struct {
Name string `json:"name"`
ID string `json:"bankID"`
ForexContract string `json:"forexContract"`
InterbankContract string `json:"interbankContract"`
}
type account struct {
Name string `json:"name"`
AccNumber string `json:"id"`
Balance decimal.Decimal `json:"balance"`
Currency string `json:"currency"`
}
type forexPair struct {
Pair string `json:"pair"`
Rate float64 `json:"rate"`
}
//Init method is run on chaincode installation and upgrade
//Args:
// Name string The Name of the Bank
// ID string The institution ID of the bank, (e.g. IBAN, SWIFT or other routing code)
// ForexContract string The name of the contract that provides Forex services to this bank
// InterbankContract string The name of the contrat providing interbank transfer to this bank
func (s *BankChaincode) Init(stub shim.ChaincodeStubInterface) sc.Response {
args := stub.GetStringArgs()
if len(args) < 2 {
return shim.Error("Incorrect arguments. Expecting a bank name, ID. Optionally and the name of the ForexContract and the name of the InterBank contract")
}
name := args[0]
id := args[1]
forexContract := ""
interbankContract := ""
if len(args) > 2 {
forexContract = args[2]
}
if len(args) > 3 {
interbankContract = args[3]
}
bank := bank{Name: name, ID: id, ForexContract: forexContract, InterbankContract: interbankContract}
bankBytes, _ := json.Marshal(bank)
err := stub.PutState("bank", bankBytes)
if err != nil {
return shim.Error(err.Error())
}
return shim.Success(nil)
}
//Invoke is called when external applications invoke the smart contract
func (s *BankChaincode) Invoke(stub shim.ChaincodeStubInterface) sc.Response {
function, args := stub.GetFunctionAndParameters()
if function == "createAccount" {
return s.createAccount(stub, args)
} else if function == "queryAccount" {
return s.queryAccount(stub, args)
} else if function == "transfer" {
return s.transfer(stub, args)
} else if function == "deposit" {
return s.deposit(stub, args)
} else if function == "getTransactionHistory" {
return s.getTransactionHistory(stub, args)
}
return shim.Error("Invalid function")
} | chaincode/src/bank/bank.go | 0.565299 | 0.406509 | bank.go | starcoder |
package continuous
import (
"github.com/jtejido/stats"
"github.com/jtejido/stats/err"
smath "github.com/jtejido/stats/math"
"math"
"math/rand"
)
// Q-Exponential distribution
// https://en.wikipedia.org/wiki/Q-exponential_distribution
type QExponential struct {
rate, q float64 // λ, q
src rand.Source
}
func NewQExponential(rate, q float64) (*QExponential, error) {
return NewQExponentialWithSource(rate, q, nil)
}
func NewQExponentialWithSource(rate, q float64, src rand.Source) (*QExponential, error) {
if rate <= 0 || q >= 2 {
return nil, err.Invalid()
}
r := new(QExponential)
r.rate = rate
r.q = q
r.src = src
return r, nil
}
func (q *QExponential) String() string {
return "QExponential: Parameters - " + q.Parameters().String() + ", Support(x) - " + q.Support().String()
}
// λ ∈ (0,∞)
// q ∈ (-∞,2)
func (q *QExponential) Parameters() stats.Limits {
return stats.Limits{
"λ": stats.Interval{0, math.Inf(1), true, true},
"q": stats.Interval{math.Inf(-1), 2, true, true},
}
}
// x ∈ [0,∞) for q >= 1
// x ∈ [0,1/(λ(1-q))]
func (q *QExponential) Support() stats.Interval {
if q.q >= 1 {
return stats.Interval{0, math.Inf(1), false, true}
}
return stats.Interval{0, 1 / (q.rate * (1 - q.q)), false, true}
}
func (q *QExponential) Probability(x float64) float64 {
if q.Support().IsWithinInterval(x) {
return (2 - q.q) * q.rate * smath.Expq(-q.rate*x, q.q)
}
return 0
}
func (q *QExponential) Distribution(x float64) float64 {
qp := 1 / (2 - q.q)
return 1 - smath.Expq(-(q.rate*x)/qp, qp)
}
func (q *QExponential) Inverse(p float64) float64 {
if p <= 0 {
return 0
}
if p >= 1 {
if q.q >= 1 {
return math.Inf(1)
}
return 1 / (q.rate * (1 - q.q))
}
qp := 1 / (2 - q.q)
return (-qp * smath.Logq(p, qp)) / q.rate
}
func (q *QExponential) Mean() float64 {
if q.q < 3./2 {
return 1 / (q.rate * (3 - 2*q.q))
}
return math.NaN()
}
func (q *QExponential) Median() float64 {
qp := 1 / (2 - q.q)
return (-qp * smath.Logq(.5, qp)) / q.rate
}
func (q *QExponential) Mode() float64 {
return 0
}
func (q *QExponential) Variance() float64 {
if q.q < 4./3 {
return (q.q - 2) / (math.Pow(2*q.q-3, 2) * (3*q.q - 4) * (q.rate * q.rate))
}
return math.NaN()
}
func (q *QExponential) Skewness() float64 {
if q.q < 5./4 {
return (2 / (5 - 4*q.q)) * math.Sqrt((3*q.q-4)/(q.q-2))
}
return math.NaN()
}
func (q *QExponential) ExKurtosis() float64 {
if q.q < 6./5 {
return 6 * ((-4*(q.q*q.q*q.q) + 17*(q.q*q.q) - 20*q.q + 6) / ((q.q - 2) * (4*q.q - 5) * (5*q.q - 6)))
}
return math.NaN()
}
func (q *QExponential) Rand() float64 {
var rnd func() float64
if q.src != nil {
rnd = rand.New(q.src).Float64
} else {
rnd = rand.Float64
}
return q.Inverse(rnd())
} | dist/continuous/q_exponential.go | 0.81538 | 0.51379 | q_exponential.go | starcoder |
package plot
import "image/color"
// Grid implements faceted background.
type Grid struct {
GridTheme
}
// NewGrid creates a new grid plot.
func NewGrid() *Grid {
return &Grid{}
}
// Draw draws the element to canvas.
func (grid *Grid) Draw(plot *Plot, canvas Canvas) {
x, y := plot.X, plot.Y
size := canvas.Bounds().Size()
// xmin, xmax := x.ToCanvas(x.Min, 0, size.X), x.ToCanvas(x.Max, 0, size.X)
// ymin, ymax := y.ToCanvas(y.Min, 0, size.Y), y.ToCanvas(y.Max, 0, size.Y)
xmin, xmax := 0.0, size.X
ymin, ymax := 0.0, size.Y
theme := &grid.GridTheme
if theme.IsZero() {
theme = &plot.Theme.Grid
}
canvas.Rect(canvas.Bounds(), &Style{
Fill: theme.Fill,
Class: "grid-fill",
})
major := &Style{
Size: 1,
Stroke: theme.Major,
Class: "grid-major",
}
minor := &Style{
Size: 1,
Stroke: theme.Minor,
Class: "grid-minor",
}
for _, tick := range x.Ticks.Ticks(x) {
p := x.ToCanvas(tick.Value, 0, size.X)
if tick.Minor {
canvas.Poly(Ps(p, ymin, p, ymax), minor)
} else {
canvas.Poly(Ps(p, ymin, p, ymax), major)
}
}
for _, tick := range y.Ticks.Ticks(y) {
p := y.ToCanvas(tick.Value, 0, size.Y)
if tick.Minor {
canvas.Poly(Ps(xmin, p, xmax, p), minor)
} else {
canvas.Poly(Ps(xmin, p, xmax, p), major)
}
}
}
// Gizmo implements drawing X and Y axis.
type Gizmo struct {
Center Point
}
// NewGizmo creates a new gizmo element.
func NewGizmo() *Gizmo {
return &Gizmo{}
}
// Draw draws the element to canvas.
func (gizmo *Gizmo) Draw(plot *Plot, canvas Canvas) {
x, y := plot.X, plot.Y
size := canvas.Bounds().Size()
// x0, xmin, xmax := x.ToCanvas(gizmo.Center.X, 0, size.X), x.ToCanvas(x.Min, 0, size.X), x.ToCanvas(x.Max, 0, size.X)
// y0, ymin, ymax := y.ToCanvas(gizmo.Center.Y, 0, size.Y), y.ToCanvas(y.Min, 0, size.Y), y.ToCanvas(y.Max, 0, size.Y)
x0, xmin, xmax := x.ToCanvas(gizmo.Center.X, 0, size.X), 0.0, size.X
y0, ymin, ymax := y.ToCanvas(gizmo.Center.Y, 0, size.Y), 0.0, size.Y
if xmin < x0 && x0 < xmax {
canvas.Poly(Ps(x0, ymin, x0, ymax), &Style{
Stroke: color.NRGBA{30, 0, 0, 100},
})
}
if ymin < y0 && y0 < ymax {
canvas.Poly(Ps(xmin, y0, xmax, y0), &Style{
Stroke: color.NRGBA{0, 30, 0, 100},
})
}
} | grid.go | 0.868046 | 0.595669 | grid.go | starcoder |
package bezout
import (
"math/big"
)
// BBPair is a Bachet-Bézout pair. The number X we are looking for
// verifies X == Remainder % Divisor
type BBPair struct {
Divisor big.Int
Remainder big.Int
}
// Solve a Chinese Remainder Theorem and returns the smallest positive value that matches
func Solve(c []BBPair) big.Int {
switch len(c) {
case 0:
// unlikely, but you never know...
return *big.NewInt(0)
case 1:
// shouldn't happen, but this is a valid answer
return c[0].Remainder
case 2:
// this is the end of the task, usually
// get the Bézout coefficients
x, y := extendedEuclideanAlgorithm(c[0].Divisor, c[1].Divisor)
res := add(mul(mul(y, c[0].Remainder), c[1].Divisor), mul(mul(x, c[1].Remainder), c[0].Divisor))
// let's try and make it the smallest positive result - we can add or subtract c[0].Divisor * c[1].Divisor at will
c0c1 := mul(c[0].Divisor, c[1].Divisor)
// at this point, res could be negative, so let's take the smallest positive answer
if res.Sign() == -1 {
q := quo(sub(*big.NewInt(0), res), c0c1)
res = add(res, mul(add(q, *big.NewInt(1)), c0c1))
}
return rem(res, c0c1)
default:
// we'll solve this complex problem recursively: solve the first two equations,
// and then use that solution with the remaining equations.
// merge the first two equations
mergedRemainder := Solve(c[:2])
mergedPair := BBPair{Divisor: mul(c[0].Divisor, c[1].Divisor), Remainder: mergedRemainder}
// this new problem has a complexity (length of input) reduced by 1 when compared to the current problem.
return Solve(append(c[2:], mergedPair))
}
}
// extendedEuclideanAlgorithm returns the (x,y) Bézout coefficients that satisfy a*x + b*y = 1, given a and b are coprime
func extendedEuclideanAlgorithm(a, b big.Int) (x, y big.Int) {
// graciously taken from Wikipedia
oldr, r := a, b
olds, s := *big.NewInt(1), *big.NewInt(0)
oldt, t := *big.NewInt(0), *big.NewInt(1)
// check if r is 0
for r.Sign() != 0 {
quotient := quo(oldr, r) // q = oldr / r
oldr, r = r, sub(oldr, mul(quotient, r)) // oldr, r = r, oldr - q*r
olds, s = s, sub(olds, mul(quotient, s)) // olds, s = s, olds - q*s
oldt, t = t, sub(oldt, mul(quotient, t)) // oldt, t = t, oldt - q*t
}
return olds, oldt
}
// I didn't like big.Int's operations.
// It won't let you big.Mul(a, b big.Int) big.Int, for instance. For some reason, you already need a big.Int
// Hopefully, generics will solve this...
// mul returns the product a*b
func mul(a, b big.Int) big.Int {
return *big.NewInt(1).Mul(&a, &b)
}
// add returns the sum a+b
func add(a, b big.Int) big.Int {
return *big.NewInt(1).Add(&a, &b)
}
// sub returns the difference a-b
func sub(a, b big.Int) big.Int {
return *big.NewInt(1).Sub(&a, &b)
}
// quo returns the integer part of the quotient a/b
func quo(a, b big.Int) big.Int {
return *big.NewInt(1).Quo(&a, &b)
}
// rem is the remainder of a/b, it's equal to a%b
func rem(a, b big.Int) big.Int {
// it's me in the corner
return *big.NewInt(1).Rem(&a, &b)
} | internal/2020/dec13/bezout/bezout.go | 0.701304 | 0.48121 | bezout.go | starcoder |
package hplot
import (
"image/color"
"math"
"gonum.org/v1/plot"
"gonum.org/v1/plot/plotter"
"gonum.org/v1/plot/vg/draw"
)
// Band implements the plot.Plotter interface, drawing a colored band made of
// two lines.
type Band struct {
top plotter.XYs
bottom plotter.XYs
// LineStyle is the style of the line contouring the band.
// Use zero width to disable.
draw.LineStyle
// FillColor is the color to fill the area between
// the top and bottom data points.
// Use nil to disable the filling.
FillColor color.Color
}
func NewBand(fill color.Color, top, bottom plotter.XYer) *Band {
band := &Band{
top: make(plotter.XYs, top.Len()),
bottom: make(plotter.XYs, bottom.Len()),
FillColor: fill,
}
for i := range band.top {
x, y := top.XY(i)
band.top[i].X = x
band.top[i].Y = y
}
for i := range band.bottom {
x, y := bottom.XY(i)
band.bottom[i].X = x
band.bottom[i].Y = y
}
return band
}
func (band *Band) Plot(c draw.Canvas, plt *plot.Plot) {
switch {
case len(band.top) <= 1:
return
case len(band.bottom) <= 1:
return
}
xys := make(plotter.XYs, 0, len(band.top)+len(band.bottom))
xys = append(xys, band.bottom...)
for i := range band.top {
xys = append(xys, band.top[len(band.top)-1-i])
}
poly := plotter.Polygon{
XYs: []plotter.XYs{xys},
LineStyle: band.LineStyle,
Color: band.FillColor,
}
poly.Plot(c, plt)
}
// DataRange returns the minimum and maximum
// x and y values, implementing the plot.DataRanger interface.
func (band *Band) DataRange() (xmin, xmax, ymin, ymax float64) {
xmin1, xmax1, ymin1, ymax1 := plotter.XYRange(band.top)
xmin2, xmax2, ymin2, ymax2 := plotter.XYRange(band.bottom)
xmin = math.Min(xmin1, xmin2)
xmax = math.Max(xmax1, xmax2)
ymin = math.Min(ymin1, ymin2)
ymax = math.Max(ymax1, ymax2)
return xmin, xmax, ymin, ymax
}
var (
_ plot.Plotter = (*VertLine)(nil)
_ plot.Plotter = (*HorizLine)(nil)
_ plot.Plotter = (*Band)(nil)
_ plot.DataRanger = (*Band)(nil)
) | hplot/band.go | 0.874091 | 0.454533 | band.go | starcoder |
package segments
import (
"fmt"
"math"
"github.com/pzduniak/unipdf/common"
"github.com/pzduniak/unipdf/internal/jbig2/bitmap"
"github.com/pzduniak/unipdf/internal/jbig2/reader"
)
// HalftoneRegion is the model for the jbig2 halftone region segment implementation - 7.4.5.1.
type HalftoneRegion struct {
r reader.StreamReader
h *Header
DataHeaderOffset int64
DataHeaderLength int64
DataOffset int64
DataLength int64
// Region segment information field, 7.4.1.
RegionSegment *RegionSegment
// Halftone segment information field, 7.4.5.1.1.
HDefaultPixel int8
CombinationOperator bitmap.CombinationOperator
HSkipEnabled bool
HTemplate byte
IsMMREncoded bool
// Halftone grid position and size, 7.4.5.1.2
// Width of the gray-scale image, 7.4.5.1.2.1
HGridWidth uint32
// Height of the gray-scale image, 7.4.5.1.2.2
HGridHeight uint32
// Horizontal offset of the grid, 7.4.5.1.2.3
HGridX int32
// Vertical offset of the grid, 7.4.5.1.2.4
HGridY int32
// Halftone grid vector, 7.4.5.1.3
// Horizontal coordinate of the halftone grid vector, 7.4.5.1.3.1
HRegionX uint16
// Vertical coordinate of the halftone grod vector, 7.4.5.1.3.2
HRegionY uint16
// Decoded data
HalftoneRegionBitmap *bitmap.Bitmap
// Previously decoded data from other regions or dictionaries, stored to use as patterns in this region.
Patterns []*bitmap.Bitmap
}
// Init implements Segmenter interface.
func (h *HalftoneRegion) Init(hd *Header, r reader.StreamReader) error {
h.r = r
h.h = hd
h.RegionSegment = NewRegionSegment(r)
return h.parseHeader()
}
// GetRegionBitmap implements Regioner interface.
func (h *HalftoneRegion) GetRegionBitmap() (*bitmap.Bitmap, error) {
if h.HalftoneRegionBitmap != nil {
return h.HalftoneRegionBitmap, nil
}
var err error
// 6.6.5 1)
h.HalftoneRegionBitmap = bitmap.New(int(h.RegionSegment.BitmapWidth), int(h.RegionSegment.BitmapHeight))
if h.Patterns == nil || len(h.Patterns) == 0 {
h.Patterns, err = h.GetPatterns()
if err != nil {
return nil, err
}
}
if h.HDefaultPixel == 1 {
h.HalftoneRegionBitmap.SetDefaultPixel()
}
// 3)
bitsPerValueF := math.Ceil(math.Log(float64(len(h.Patterns))) / math.Log(2))
bitsPerValue := int(bitsPerValueF)
// 4)
var grayScaleValues [][]int
grayScaleValues, err = h.grayScaleDecoding(bitsPerValue)
if err != nil {
return nil, err
}
if err = h.renderPattern(grayScaleValues); err != nil {
return nil, err
}
return h.HalftoneRegionBitmap, nil
}
// GetRegionInfo implements Regioner interface.
func (h *HalftoneRegion) GetRegionInfo() *RegionSegment {
return h.RegionSegment
}
// GetPatterns gets the HalftoneRegion patterns.
func (h *HalftoneRegion) GetPatterns() ([]*bitmap.Bitmap, error) {
var (
patterns []*bitmap.Bitmap
err error
)
for _, s := range h.h.RTSegments {
var data Segmenter
data, err = s.GetSegmentData()
if err != nil {
common.Log.Debug("GetSegmentData failed: %v", err)
return nil, err
}
pattern, ok := data.(*PatternDictionary)
if !ok {
err = fmt.Errorf("related segment not a pattern dictionary: %T", data)
return nil, err
}
var tempPatterns []*bitmap.Bitmap
tempPatterns, err = pattern.GetDictionary()
if err != nil {
common.Log.Debug("pattern GetDictionary failed: %v", err)
return nil, err
}
patterns = append(patterns, tempPatterns...)
}
return patterns, nil
}
func (h *HalftoneRegion) checkInput() error {
if h.IsMMREncoded {
if h.HTemplate != 0 {
common.Log.Debug("HTemplate = %d should contain the value 0", h.HTemplate)
}
if h.HSkipEnabled {
common.Log.Debug("HSkipEnabled 0 %v (should contain the value false)", h.HSkipEnabled)
}
}
return nil
}
func (h *HalftoneRegion) combineGrayscalePlanes(grayScalePlanes []*bitmap.Bitmap, j int) error {
byteIndex := 0
for y := 0; y < grayScalePlanes[j].Height; y++ {
for x := 0; x < grayScalePlanes[j].Width; x += 8 {
newValue, err := grayScalePlanes[j+1].GetByte(byteIndex)
if err != nil {
return err
}
oldValue, err := grayScalePlanes[j].GetByte(byteIndex)
if err != nil {
return err
}
err = grayScalePlanes[j].SetByte(byteIndex, bitmap.CombineBytes(oldValue, newValue, bitmap.CmbOpXor))
if err != nil {
return err
}
byteIndex++
}
}
return nil
}
func (h *HalftoneRegion) computeGrayScalePlanes(grayScalePlanes []*bitmap.Bitmap, bitsPerValue int) ([][]int, error) {
grayScaleValues := make([][]int, h.HGridHeight)
for i := 0; i < len(grayScaleValues); i++ {
grayScaleValues[i] = make([]int, h.HGridWidth)
}
for y := 0; y < int(h.HGridHeight); y++ {
for x := 0; x < int(h.HGridWidth); x += 8 {
var minorWidth int
if d := int(h.HGridWidth) - x; d > 8 {
minorWidth = 8
} else {
minorWidth = d
}
byteIndex := grayScalePlanes[0].GetByteIndex(x, y)
for minorX := 0; minorX < minorWidth; minorX++ {
i := minorX + x
grayScaleValues[y][i] = 0
for j := 0; j < bitsPerValue; j++ {
bv, err := grayScalePlanes[j].GetByte(byteIndex)
if err != nil {
return nil, err
}
shifted := (bv >> uint(7-i&7))
and1 := shifted & 1
multiplier := 1 << uint(j)
v := int(and1) * multiplier
grayScaleValues[y][i] += v
}
}
}
}
return grayScaleValues, nil
}
func (h *HalftoneRegion) computeSegmentDataStructure() error {
h.DataOffset = h.r.StreamPosition()
h.DataHeaderLength = h.DataOffset - h.DataHeaderOffset
h.DataLength = int64(h.r.Length()) - h.DataHeaderLength
return nil
}
func (h *HalftoneRegion) computeX(m, n int) int {
return h.shiftAndFill(int(h.HGridX) + m*int(h.HRegionY) + n*int(h.HRegionX))
}
func (h *HalftoneRegion) computeY(m, n int) int {
return h.shiftAndFill(int(h.HGridY) + m*int(h.HRegionX) - n*int(h.HRegionY))
}
func (h *HalftoneRegion) grayScaleDecoding(bitsPerValue int) ([][]int, error) {
var (
gbAtX []int8
gbAtY []int8
)
if !h.IsMMREncoded {
gbAtX = make([]int8, 4)
gbAtY = make([]int8, 4)
if h.HTemplate <= 1 {
gbAtX[0] = 3
} else if h.HTemplate >= 2 {
gbAtX[0] = 2
}
gbAtY[0] = -1
gbAtX[1] = -3
gbAtY[1] = -1
gbAtX[2] = 2
gbAtY[2] = -2
gbAtX[3] = -2
gbAtY[3] = -2
}
grayScalePlanes := make([]*bitmap.Bitmap, bitsPerValue)
// 1)
genericRegion := NewGenericRegion(h.r)
genericRegion.setParametersMMR(h.IsMMREncoded, h.DataOffset, h.DataLength, h.HGridHeight, h.HGridWidth, h.HTemplate, false, h.HSkipEnabled, gbAtX, gbAtY)
// 2)
j := bitsPerValue - 1
var err error
grayScalePlanes[j], err = genericRegion.GetRegionBitmap()
if err != nil {
return nil, err
}
for j > 0 {
j--
genericRegion.Bitmap = nil
grayScalePlanes[j], err = genericRegion.GetRegionBitmap()
if err != nil {
return nil, err
}
if err = h.combineGrayscalePlanes(grayScalePlanes, j); err != nil {
return nil, err
}
}
return h.computeGrayScalePlanes(grayScalePlanes, bitsPerValue)
}
func (h *HalftoneRegion) parseHeader() error {
if err := h.RegionSegment.parseHeader(); err != nil {
return err
}
// Bit 7
b, err := h.r.ReadBit()
if err != nil {
return err
}
h.HDefaultPixel = int8(b)
// Bit 4-6
temp, err := h.r.ReadBits(3)
if err != nil {
return err
}
h.CombinationOperator = bitmap.CombinationOperator(temp & 0xf)
// Bit 3
b, err = h.r.ReadBit()
if err != nil {
return err
}
if b == 1 {
h.HSkipEnabled = true
}
// Bit 1 - 2
temp, err = h.r.ReadBits(2)
if err != nil {
return err
}
h.HTemplate = byte(temp & 0xf)
// Bit 0
b, err = h.r.ReadBit()
if err != nil {
return err
}
if b == 1 {
h.IsMMREncoded = true
}
temp, err = h.r.ReadBits(32)
if err != nil {
return err
}
h.HGridWidth = uint32(temp & math.MaxUint32)
temp, err = h.r.ReadBits(32)
if err != nil {
return err
}
h.HGridHeight = uint32(temp & math.MaxUint32)
temp, err = h.r.ReadBits(32)
if err != nil {
return err
}
h.HGridX = int32(temp & math.MaxInt32)
temp, err = h.r.ReadBits(32)
if err != nil {
return err
}
h.HGridY = int32(temp & math.MaxInt32)
temp, err = h.r.ReadBits(16)
if err != nil {
return err
}
h.HRegionX = uint16(temp & math.MaxUint16)
temp, err = h.r.ReadBits(16)
if err != nil {
return err
}
h.HRegionY = uint16(temp & math.MaxUint16)
if err = h.computeSegmentDataStructure(); err != nil {
return err
}
return h.checkInput()
}
// renderPattern draws the pattern into the region bitmap, as described in 6.6.5.2.
func (h *HalftoneRegion) renderPattern(grayScaleValues [][]int) (err error) {
var x, y int
for m := 0; m < int(h.HGridHeight); m++ {
for n := 0; n < int(h.HGridWidth); n++ {
x = h.computeX(m, n)
y = h.computeY(m, n)
patternBitmap := h.Patterns[grayScaleValues[m][n]]
if err = bitmap.Blit(
patternBitmap, h.HalftoneRegionBitmap,
x+int(h.HGridX), y+int(h.HGridY), h.CombinationOperator,
); err != nil {
return err
}
}
}
return nil
}
func newHalftoneRegion(r *reader.Reader) *HalftoneRegion {
return &HalftoneRegion{r: r, RegionSegment: NewRegionSegment(r)}
}
func findMSB(n int) int {
if n == 0 {
return 0
}
n |= n >> 1
n |= n >> 2
n |= n >> 4
n |= n >> 8
n |= n >> 16
return (n + 1) >> 1
}
func (h *HalftoneRegion) shiftAndFill(value int) int {
value >>= 8
if value < 0 {
bitPosition := int(math.Log(float64(findMSB(value))) / math.Log(2))
l := 31 - bitPosition
for i := 1; i < l; i++ {
value |= (1 << uint(31-i))
}
}
return value
} | bot/vendor/github.com/pzduniak/unipdf/internal/jbig2/segments/halftone-segment.go | 0.683208 | 0.446434 | halftone-segment.go | starcoder |
package aerospike
// OperationType determines operation type
type OperationType *struct{ op byte }
// Valid OperationType values that can be used to create custom Operations.
// The names are self-explanatory.
var (
READ OperationType = &struct{ op byte }{1}
// READ_HEADER *OperationType = &struct{op: 1 }
WRITE OperationType = &struct{ op byte }{2}
CDT_READ OperationType = &struct{ op byte }{3}
CDT_MODIFY OperationType = &struct{ op byte }{4}
MAP_READ OperationType = &struct{ op byte }{3}
MAP_MODIFY OperationType = &struct{ op byte }{4}
ADD OperationType = &struct{ op byte }{5}
APPEND OperationType = &struct{ op byte }{9}
PREPEND OperationType = &struct{ op byte }{10}
TOUCH OperationType = &struct{ op byte }{11}
)
// Operation contains operation definition.
// This struct is used in client's operate() method.
type Operation struct {
// OpType determines type of operation.
OpType OperationType
// BinName (Optional) determines the name of bin used in operation.
BinName string
// BinValue (Optional) determines bin value used in operation.
BinValue Value
// will be true ONLY for GetHeader() operation
headerOnly bool
}
// GetOpForBin creates read bin database operation.
func GetOpForBin(binName string) *Operation {
return &Operation{OpType: READ, BinName: binName, BinValue: NewNullValue()}
}
// GetOp creates read all record bins database operation.
func GetOp() *Operation {
return &Operation{OpType: READ, BinValue: NewNullValue()}
}
// GetHeaderOp creates read record header database operation.
func GetHeaderOp() *Operation {
return &Operation{OpType: READ, headerOnly: true, BinValue: NewNullValue()}
}
// PutOp creates set database operation.
func PutOp(bin *Bin) *Operation {
return &Operation{OpType: WRITE, BinName: bin.Name, BinValue: bin.Value}
}
// AppendOp creates string append database operation.
func AppendOp(bin *Bin) *Operation {
return &Operation{OpType: APPEND, BinName: bin.Name, BinValue: bin.Value}
}
// PrependOp creates string prepend database operation.
func PrependOp(bin *Bin) *Operation {
return &Operation{OpType: PREPEND, BinName: bin.Name, BinValue: bin.Value}
}
// AddOp creates integer add database operation.
func AddOp(bin *Bin) *Operation {
return &Operation{OpType: ADD, BinName: bin.Name, BinValue: bin.Value}
}
// TouchOp creates touch database operation.
func TouchOp() *Operation {
return &Operation{OpType: TOUCH, BinValue: NewNullValue()}
} | operation.go | 0.657978 | 0.401394 | operation.go | starcoder |
package siastats
import (
"encoding/json"
)
// ComparisonMatrix struct for ComparisonMatrix
type ComparisonMatrix struct {
Stored *int32 `json:"stored,omitempty"`
Price *float32 `json:"price,omitempty"`
Collateral *float32 `json:"collateral,omitempty"`
Up *float32 `json:"up,omitempty"`
Down *float32 `json:"down,omitempty"`
ContractPrice *float32 `json:"contractPrice,omitempty"`
}
// NewComparisonMatrix instantiates a new ComparisonMatrix object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewComparisonMatrix() *ComparisonMatrix {
this := ComparisonMatrix{}
return &this
}
// NewComparisonMatrixWithDefaults instantiates a new ComparisonMatrix object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewComparisonMatrixWithDefaults() *ComparisonMatrix {
this := ComparisonMatrix{}
return &this
}
// GetStored returns the Stored field value if set, zero value otherwise.
func (o *ComparisonMatrix) GetStored() int32 {
if o == nil || o.Stored == nil {
var ret int32
return ret
}
return *o.Stored
}
// GetStoredOk returns a tuple with the Stored field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ComparisonMatrix) GetStoredOk() (*int32, bool) {
if o == nil || o.Stored == nil {
return nil, false
}
return o.Stored, true
}
// HasStored returns a boolean if a field has been set.
func (o *ComparisonMatrix) HasStored() bool {
if o != nil && o.Stored != nil {
return true
}
return false
}
// SetStored gets a reference to the given int32 and assigns it to the Stored field.
func (o *ComparisonMatrix) SetStored(v int32) {
o.Stored = &v
}
// GetPrice returns the Price field value if set, zero value otherwise.
func (o *ComparisonMatrix) GetPrice() float32 {
if o == nil || o.Price == nil {
var ret float32
return ret
}
return *o.Price
}
// GetPriceOk returns a tuple with the Price field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ComparisonMatrix) GetPriceOk() (*float32, bool) {
if o == nil || o.Price == nil {
return nil, false
}
return o.Price, true
}
// HasPrice returns a boolean if a field has been set.
func (o *ComparisonMatrix) HasPrice() bool {
if o != nil && o.Price != nil {
return true
}
return false
}
// SetPrice gets a reference to the given float32 and assigns it to the Price field.
func (o *ComparisonMatrix) SetPrice(v float32) {
o.Price = &v
}
// GetCollateral returns the Collateral field value if set, zero value otherwise.
func (o *ComparisonMatrix) GetCollateral() float32 {
if o == nil || o.Collateral == nil {
var ret float32
return ret
}
return *o.Collateral
}
// GetCollateralOk returns a tuple with the Collateral field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ComparisonMatrix) GetCollateralOk() (*float32, bool) {
if o == nil || o.Collateral == nil {
return nil, false
}
return o.Collateral, true
}
// HasCollateral returns a boolean if a field has been set.
func (o *ComparisonMatrix) HasCollateral() bool {
if o != nil && o.Collateral != nil {
return true
}
return false
}
// SetCollateral gets a reference to the given float32 and assigns it to the Collateral field.
func (o *ComparisonMatrix) SetCollateral(v float32) {
o.Collateral = &v
}
// GetUp returns the Up field value if set, zero value otherwise.
func (o *ComparisonMatrix) GetUp() float32 {
if o == nil || o.Up == nil {
var ret float32
return ret
}
return *o.Up
}
// GetUpOk returns a tuple with the Up field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ComparisonMatrix) GetUpOk() (*float32, bool) {
if o == nil || o.Up == nil {
return nil, false
}
return o.Up, true
}
// HasUp returns a boolean if a field has been set.
func (o *ComparisonMatrix) HasUp() bool {
if o != nil && o.Up != nil {
return true
}
return false
}
// SetUp gets a reference to the given float32 and assigns it to the Up field.
func (o *ComparisonMatrix) SetUp(v float32) {
o.Up = &v
}
// GetDown returns the Down field value if set, zero value otherwise.
func (o *ComparisonMatrix) GetDown() float32 {
if o == nil || o.Down == nil {
var ret float32
return ret
}
return *o.Down
}
// GetDownOk returns a tuple with the Down field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ComparisonMatrix) GetDownOk() (*float32, bool) {
if o == nil || o.Down == nil {
return nil, false
}
return o.Down, true
}
// HasDown returns a boolean if a field has been set.
func (o *ComparisonMatrix) HasDown() bool {
if o != nil && o.Down != nil {
return true
}
return false
}
// SetDown gets a reference to the given float32 and assigns it to the Down field.
func (o *ComparisonMatrix) SetDown(v float32) {
o.Down = &v
}
// GetContractPrice returns the ContractPrice field value if set, zero value otherwise.
func (o *ComparisonMatrix) GetContractPrice() float32 {
if o == nil || o.ContractPrice == nil {
var ret float32
return ret
}
return *o.ContractPrice
}
// GetContractPriceOk returns a tuple with the ContractPrice field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *ComparisonMatrix) GetContractPriceOk() (*float32, bool) {
if o == nil || o.ContractPrice == nil {
return nil, false
}
return o.ContractPrice, true
}
// HasContractPrice returns a boolean if a field has been set.
func (o *ComparisonMatrix) HasContractPrice() bool {
if o != nil && o.ContractPrice != nil {
return true
}
return false
}
// SetContractPrice gets a reference to the given float32 and assigns it to the ContractPrice field.
func (o *ComparisonMatrix) SetContractPrice(v float32) {
o.ContractPrice = &v
}
func (o ComparisonMatrix) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if o.Stored != nil {
toSerialize["stored"] = o.Stored
}
if o.Price != nil {
toSerialize["price"] = o.Price
}
if o.Collateral != nil {
toSerialize["collateral"] = o.Collateral
}
if o.Up != nil {
toSerialize["up"] = o.Up
}
if o.Down != nil {
toSerialize["down"] = o.Down
}
if o.ContractPrice != nil {
toSerialize["contractPrice"] = o.ContractPrice
}
return json.Marshal(toSerialize)
}
type NullableComparisonMatrix struct {
value *ComparisonMatrix
isSet bool
}
func (v NullableComparisonMatrix) Get() *ComparisonMatrix {
return v.value
}
func (v *NullableComparisonMatrix) Set(val *ComparisonMatrix) {
v.value = val
v.isSet = true
}
func (v NullableComparisonMatrix) IsSet() bool {
return v.isSet
}
func (v *NullableComparisonMatrix) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableComparisonMatrix(val *ComparisonMatrix) *NullableComparisonMatrix {
return &NullableComparisonMatrix{value: val, isSet: true}
}
func (v NullableComparisonMatrix) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableComparisonMatrix) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | model_comparison_matrix.go | 0.848769 | 0.40204 | model_comparison_matrix.go | starcoder |
package utils
import (
"bytes"
"io"
"strconv"
"strings"
)
// An Indenter helps building strings where all newlines are supposed to be followed by
// a sequence of zero or many spaces that reflect an indent level.
type Indenter struct {
b *bytes.Buffer
i int
}
// An Indentable can create build a string representation of itself using an Indenter
type Indentable interface {
// AppendTo appends a string representation of the Node to the Indenter
AppendTo(w *Indenter)
}
// IndentedString will produce a string from an Indentable using an Indenter
func IndentedString(ia Indentable) string {
i := NewIndenter()
ia.AppendTo(i)
return i.String()
}
// NewIndenter creates a new Indenter for indent level zero
func NewIndenter() *Indenter {
return &Indenter{b: &bytes.Buffer{}, i: 0}
}
// NewIndenterWithLevel creates a new Indenter for the given level
func NewIndenterWithLevel(level int) *Indenter {
return &Indenter{b: &bytes.Buffer{}, i: level}
}
// Len returns the current number of bytes that has been appended to the indenter
func (i *Indenter) Len() int {
return i.b.Len()
}
// Level returns the indent level for the indenter
func (i *Indenter) Level() int {
return i.i
}
// Reset resets the internal buffer. It does not reset the indent
func (i *Indenter) Reset() {
i.b.Reset()
}
// String returns the current string that has been built using the indenter. Trailing whitespaces
// are deleted from all lines.
func (i *Indenter) String() string {
n := bytes.NewBuffer(make([]byte, 0, i.b.Len()))
wb := &bytes.Buffer{}
for {
r, _, err := i.b.ReadRune()
if err == io.EOF {
break
}
if r == ' ' || r == '\t' {
// Defer whitespace output
wb.WriteByte(byte(r))
continue
}
if r == '\n' {
// Truncate trailing space
wb.Reset()
} else {
if wb.Len() > 0 {
n.Write(wb.Bytes())
wb.Reset()
}
}
n.WriteRune(r)
}
return n.String()
}
// WriteString appends a string to the internal buffer without checking for newlines
func (i *Indenter) WriteString(s string) (n int, err error) {
return i.b.WriteString(s)
}
// Write appends a slice of bytes to the internal buffer without checking for newlines
func (i *Indenter) Write(p []byte) (n int, err error) {
return i.b.Write(p)
}
// AppendRune appends a rune to the internal buffer without checking for newlines
func (i *Indenter) AppendRune(r rune) {
i.b.WriteRune(r)
}
// Append appends a string to the internal buffer without checking for newlines
func (i *Indenter) Append(s string) {
WriteString(i.b, s)
}
// AppendIndented is like Append but replaces all occurrences of newline with an indented newline
func (i *Indenter) AppendIndented(s string) {
for ni := strings.IndexByte(s, '\n'); ni >= 0; ni = strings.IndexByte(s, '\n') {
if ni > 0 {
WriteString(i.b, s[:ni])
}
i.NewLine()
ni++
if ni >= len(s) {
return
}
s = s[ni:]
}
if len(s) > 0 {
WriteString(i.b, s)
}
}
// AppendBool writes the string "true" or "false" to the internal buffer
func (i *Indenter) AppendBool(b bool) {
var s string
if b {
s = `true`
} else {
s = `false`
}
WriteString(i.b, s)
}
// AppendInt writes the result of calling strconf.Itoa() in the given argument
func (i *Indenter) AppendInt(b int) {
WriteString(i.b, strconv.Itoa(b))
}
// Indent returns a new Indenter instance that shares the same buffer but has an
// indent level that is increased by one.
func (i *Indenter) Indent() *Indenter {
return &Indenter{b: i.b, i: i.i + 1}
}
// Printf formats according to a format specifier and writes to the internal buffer.
func (i *Indenter) Printf(s string, args ...interface{}) {
Fprintf(i.b, s, args...)
}
// NewLine writes a newline followed by the current indent after trimming trailing whitespaces
func (i *Indenter) NewLine() {
i.b.WriteByte('\n')
for n := 0; n < i.i; n++ {
WriteString(i.b, ` `)
}
} | utils/indenter.go | 0.718496 | 0.41052 | indenter.go | starcoder |
package iso20022
// Set of elements providing the total sum of entries per bank transaction code.
type NumberAndSumOfTransactionsPerBankTransactionCode1 struct {
// Number of individual entries contained in the report.
NumberOfEntries *Max15NumericText `xml:"NbOfNtries,omitempty"`
// Total of all individual entries included in the report.
Sum *DecimalNumber `xml:"Sum,omitempty"`
// Resulting amount of the netted amounts for all debit and credit entries per bank transaction code.
TotalNetEntryAmount *DecimalNumber `xml:"TtlNetNtryAmt,omitempty"`
// Indicates whether the total net entry amount is a credit or a debit amount.
CreditDebitIndicator *CreditDebitCode `xml:"CdtDbtInd,omitempty"`
// Set of elements to fully identify the type of underlying transaction resulting in an entry.
BankTransactionCode *BankTransactionCodeStructure1 `xml:"BkTxCd"`
// Set of elements used to indicate when the booked amount of money will become available, ie can be accessed and start generating interest.
Availability []*CashBalanceAvailability1 `xml:"Avlbty,omitempty"`
}
func (n *NumberAndSumOfTransactionsPerBankTransactionCode1) SetNumberOfEntries(value string) {
n.NumberOfEntries = (*Max15NumericText)(&value)
}
func (n *NumberAndSumOfTransactionsPerBankTransactionCode1) SetSum(value string) {
n.Sum = (*DecimalNumber)(&value)
}
func (n *NumberAndSumOfTransactionsPerBankTransactionCode1) SetTotalNetEntryAmount(value string) {
n.TotalNetEntryAmount = (*DecimalNumber)(&value)
}
func (n *NumberAndSumOfTransactionsPerBankTransactionCode1) SetCreditDebitIndicator(value string) {
n.CreditDebitIndicator = (*CreditDebitCode)(&value)
}
func (n *NumberAndSumOfTransactionsPerBankTransactionCode1) AddBankTransactionCode() *BankTransactionCodeStructure1 {
n.BankTransactionCode = new(BankTransactionCodeStructure1)
return n.BankTransactionCode
}
func (n *NumberAndSumOfTransactionsPerBankTransactionCode1) AddAvailability() *CashBalanceAvailability1 {
newValue := new (CashBalanceAvailability1)
n.Availability = append(n.Availability, newValue)
return newValue
} | NumberAndSumOfTransactionsPerBankTransactionCode1.go | 0.768038 | 0.421552 | NumberAndSumOfTransactionsPerBankTransactionCode1.go | starcoder |
package czml
// Cartesian2 is two-dimensional Cartesian value specified as [X, Y]. If the array has two elements,
// the value is constant. If it has three or more elements, they are time-tagged samples arranged as
// [Time, X, Y, Time, X, Y, ...], where Time is an ISO 8601 date and time string or seconds since
// epoch.
// https://github.com/AnalyticalGraphicsInc/czml-writer/wiki/Cartesian2Value
type Cartesian2Value []float64
// Cartesian2ListValue is a list of two-dimensional Cartesian values specified as
// [X, Y, X, Y, ...].
// https://github.com/AnalyticalGraphicsInc/czml-writer/wiki/Cartesian2ListValue
type Cartesian2ListValue *[]float64
// Cartesian3Value is the position specified as a three-dimensional
// Cartesian value [X, Y, Z] in meters relative to the referenceFrame
// https://github.com/AnalyticalGraphicsInc/czml-writer/wiki/Cartesian3Value
type Cartesian3Value []float64
// Cartesian3VelocityValue is a three-dimensional Cartesian value and its derivative specified as
// [X, Y, Z, dX, dY, dZ]. If the array has six elements, the value is constant. If it has seven or
// more elements, they are time-tagged samples arranged as [Time, X, Y, Z, dX, dY, dZ, Time, X,...],
// where Time is an ISO 8601 date and time string or seconds since epoch.
// https://github.com/AnalyticalGraphicsInc/czml-writer/wiki/Cartesian3VelocityValue
type Cartesian3VelocityValue []float64
// UnitCartesian3Value is a three-dimensional unit magnitude Cartesian value specified as [X, Y, Z].
// If the array has three elements, the value is constant. If it has four or more elements, they are
// time-tagged samples arranged as [Time, X, Y, Z, Time, X, Y, Z, ...], where Time is an ISO 8601
// date and time string or seconds since epoch.
// https://github.com/AnalyticalGraphicsInc/czml-writer/wiki/UnitCartesian3Value
type UnitCartesian3Value []interface{}
// UnitCartesian3ListValue is a list of three-dimensional unit magnitude Cartesian values,
// specified as [X, Y, Z, X, Y, Z, ...].
// https://github.com/AnalyticalGraphicsInc/czml-writer/wiki/UnitCartesian3ListValue
type UnitCartesian3ListValue []interface{}
// Cartesian3ListValue is a list of three-dimensional Cartesian values specified as
// [X, Y, Z, X, Y, Z, ...].
// https://github.com/AnalyticalGraphicsInc/czml-writer/wiki/Cartesian3ListValue
type Cartesian3ListValue []float64
// Cartesian3ListOfListsValue is a list of lists of three-dimensional Cartesian values specified
// as [X, Y, Z, X, Y, Z, ...].
// https://github.com/AnalyticalGraphicsInc/czml-writer/wiki/Cartesian3ListOfListsValue
type Cartesian3ListOfListsValue []Cartesian3Value | cartesian.go | 0.888753 | 0.755727 | cartesian.go | starcoder |
package image_generator
import (
"image/color"
"strings"
"github.com/minio/minio/pkg/env"
)
// image dimensions and margins
var (
height = float64(1920)
width = float64(1080)
marginTop = float64(250)
marginBottom = float64(250)
marginLeft = float64(100)
marginRight = float64(100)
fontSize = float64(96)
)
// colors, font-type, etc ...
var palettes = []palette{
{
fontColor: color.RGBA{A: 0xff},
backgroundColor: color.RGBA{R: 0x4C, G: 0xF1, B: 0xE1, A: 0xff},
},
{
fontColor: color.RGBA{A: 0xff},
backgroundColor: color.RGBA{R: 0xDB, G: 0xCF, B: 0xB0, A: 0xff},
},
{
fontColor: color.RGBA{A: 0xff},
backgroundColor: color.RGBA{R: 0xBF, G: 0xC8, B: 0xAD, A: 0xff},
},
{
fontColor: color.RGBA{A: 0xff},
backgroundColor: color.RGBA{R: 0x90, G: 0xB4, B: 0x94, A: 0xff},
},
{
fontColor: color.RGBA{R: 255, G: 255, B: 255, A: 0xff},
backgroundColor: color.RGBA{R: 0x71, G: 0x8F, B: 0x94, A: 0xff},
},
{
fontColor: color.RGBA{R: 255, G: 255, B: 255, A: 0xff},
backgroundColor: color.RGBA{R: 0x54, G: 0x57, B: 0x75, A: 0xff},
},
{
fontColor: color.RGBA{R: 255, G: 255, B: 255, A: 0xff},
backgroundColor: color.RGBA{R: 232, G: 30, B: 99, A: 0xff},
},
{
fontColor: color.RGBA{R: 255, G: 255, B: 255, A: 0xff},
backgroundColor: color.RGBA{R: 156, G: 39, B: 175, A: 0xff},
},
{
fontColor: color.RGBA{R: 255, G: 255, B: 255, A: 0xff},
backgroundColor: color.RGBA{R: 33, G: 150, B: 242, A: 0xff},
},
{
fontColor: color.RGBA{R: 255, G: 255, B: 255, A: 0xff},
backgroundColor: color.RGBA{R: 0x4C, G: 0xAE, B: 0x50, A: 0xff},
},
{
fontColor: color.RGBA{A: 0xff},
backgroundColor: color.RGBA{R: 0xCC, G: 0xDB, B: 0x39, A: 0xff},
},
{
fontColor: color.RGBA{A: 0xff},
backgroundColor: color.RGBA{R: 0xFE, G: 0x98, B: 0x00, A: 0xff},
},
{
fontColor: color.RGBA{A: 0xff},
backgroundColor: color.RGBA{R: 0xFE, G: 0xEA, B: 0x3B, A: 0xff},
},
{
fontColor: color.RGBA{R: 255, G: 255, B: 255, A: 0xff},
backgroundColor: color.RGBA{R: 0xc2, G: 0x18, B: 0x5b, A: 0xff},
},
{
fontColor: color.RGBA{R: 255, G: 255, B: 255, A: 0xff},
backgroundColor: color.RGBA{R: 0x21, G: 0x21, B: 0x21, A: 0xff},
},
{
fontColor: color.RGBA{A: 0xff},
backgroundColor: color.RGBA{R: 0xff, G: 0x80, B: 0xab, A: 0xff},
},
{
fontColor: color.RGBA{A: 0xff},
backgroundColor: color.RGBA{R: 255, G: 255, B: 255, A: 0xff},
},
{
fontColor: color.RGBA{R: 255, G: 255, B: 255, A: 0xff},
backgroundColor: color.RGBA{R: 0xab, G: 0x47, B: 0xbc, A: 0xff},
},
}
var fonts = []string{
"assets/fonts/Roboto-Black.ttf",
}
var publicationsMock = []publication{
{
title: "New Framework Released to Protect Machine Learning Systems From Adversarial Attacks",
url: "https://thehackernews.com/2020/10/adversarial-ml-threat-matrix.html",
source: "The Hacker News",
},
{
title: "The RIAA is coming for the YouTube downloaders",
url: "https://thehackernews.com/2020/10/adversarial-ml-threat-matrix.html",
source: "TechCrunch",
},
{
title: "Daily Crunch: Uber and Lyft defeated again in court",
url: "https://thehackernews.com/2020/10/adversarial-ml-threat-matrix.html",
source: "TechCrunch",
},
{
title: "U.S. Levies Sanctions Against Russian Research Institution Linked to Triton Malware",
url: "https://thehackernews.com/2020/10/adversarial-ml-threat-matrix.html",
source: "Threatpost",
},
{
title: "Cybercriminals Could be Coming After Your Coffee",
url: "https://thehackernews.com/2020/10/adversarial-ml-threat-matrix.html",
source: "Dark Reading",
},
{
title: "Iran-Linked Seedworm APT target orgs in the Middle East",
url: "https://thehackernews.com/2020/10/adversarial-ml-threat-matrix.html",
source: "Security Affairs",
},
}
// assets constants
const (
avatarPath = "assets/images/avatar.png"
lightFont = "assets/fonts/Rajdhani-Regular.ttf"
)
// ENV variables for additional configuration
const (
authorBioDescription = "AUTHOR_BIO_DESCRIPTION"
)
// author description, it's a long text separated by comma
func getAuthorBioDescription() []string {
description := strings.TrimSpace(env.Get(authorBioDescription, "If you want to view paradise, Simply look around and view it, Follow me @alevskey"))
return strings.Split(description, ",")
} | internal/image-generator/constants.go | 0.539954 | 0.433142 | constants.go | starcoder |
package tempconv
// CToF converts a Celsius temperature to Fahrenheit.
func CToF (c Celsius) Fahrenheit {
return Fahrenheit(c*9/5 + 32)
}
// CToK converts a Celsius temperature to Kelvin.
func CToK (c Celsius) Kelvin {
return Kelvin(c + 273.15)
}
// CToRa converts a Celsius temperature to Rankine.
func CToRa (c Celsius) Rankine {
return Rankine(c * 1.8 + 491.67)
}
// CToRé converts a Celsius temperature to Réaumur.
func CToRé (c Celsius) Réaumur {
return Réaumur(c * 0.8)
}
// CToDe converts a Celsius temperature to Delisle.
func CToDe (c Celsius) Delisle {
return Delisle((100 - c) * 1.5)
}
// CToN converts a Celsius temperature to Newton.
func CToN (c Celsius) Newton {
return Newton(c * 0.33)
}
// CToRø converts a Celsius temperature to Rømer.
func CToRø (c Celsius) Rømer {
return Rømer(c * 21/40 + 7.5)
}
// FToC converts a Fahrenheit temperature to Celsius.
func FToC (f Fahrenheit) Celsius {
return Celsius((f - 32) * 5 / 9)
}
// FToK converts a Fahrenheit temperature to Kelvin.
func FToK (f Fahrenheit) Kelvin {
return Kelvin(((f - 32) * 5 / 9) + 273.15)
}
// FToRa converts a Fahrenheit temperature to Rankine.
func FToRa (f Fahrenheit) Rankine {
return Rankine(f + 459.67)
}
// FToRé converts a Fahrenheit temperature to Réaumur.
func FToRé (f Fahrenheit) Réaumur {
return Réaumur((f -32) * 4/9)
}
// FToDe converts a Fahrenheit temperature to Delisle.
func FToDe (f Fahrenheit) Delisle {
return Delisle((212 - f) * 5/6)
}
// FToN converts a Fahrenheit temperature to Newton.
func FToN (f Fahrenheit) Newton {
return Newton((f - 32) * 11/60)
}
// FToRø converts a Fahrenheit temperature to Rømer.
func FToRø (f Fahrenheit) Rømer {
return Rømer((f - 32) * 7/24 + 7.5)
}
// KToC converts a Kelvin temperature to Celsius.
func KToC (k Kelvin) Celsius {
return Celsius(k - 273.15)
}
// KToF converts a Kelvin temperature to Fahrenheit.
func KToF (k Kelvin) Fahrenheit {
return Fahrenheit((k - 273.15) * 1.8 + 32)
}
// KToRa converts a Kelvin temperature to Rankine.
func KToRa (k Kelvin) Rankine {
return Rankine(k * 9/5)
}
// KToRé converts a Kelvin temperature to Réaumur.
func KToRé (k Kelvin) Réaumur {
return Réaumur((k - 273.15) * 0.8)
}
// KToDe converts a Kelvin temperature to Delisle.
func KToDe (k Kelvin) Delisle {
return Delisle((373.15 - k) * 1.5)
}
// KToN converts a Kelvin temperature to Newton.
func KToN (k Kelvin) Newton {
return Newton((k - 273.15) * 0.33)
}
// KToRø converts a Kelvin temperature to Rømer.
func KToRø (k Kelvin) Rømer {
return Rømer((k - 273.15) * 21/40 + 7.5)
}
// RéToC converts a Réaumur temperature to Celsius.
func RéToC (ré Réaumur) Celsius {
return Celsius(ré * 1.25)
}
// RéToF converts a Réaumur temperature to Fahrenheit.
func RéToF (ré Réaumur) Fahrenheit {
return Fahrenheit(ré * 2.25 + 32)
}
// RéToRa converts a Réaumur temperature to Rankine.
func RéToRa (ré Réaumur) Rankine {
return Rankine(ré * 2.25 + 491.67)
}
// RéToK converts a Réaumur temperature to Kelvin.
func RéToF (ré Réaumur) Kelvin {
return Kelvin(ré * 1.25 + 273.15)
}
// RéToDe converts a Réaumur temperature to Delisle.
func RéToDe (ré Réaumur) Delisle {
return Delisle((80 - ré) * 1.875)
}
// RéToN converts a Réaumur temperature to Newton.
func RéToN (ré Réaumur) Newton {
return Newton(ré * 33/80)
}
// RéToRø converts a Réaumur temperature to Rømer.
func RéToRø (ré Réaumur) Rømer {
return Rømer(ré * 21/32 + 7.5)
}
// RaToC converts a Rankine temperature to Celsius.
func RaToC (ra Rankine) Celsius {
return Rankine(ra * 5/9 - 273.15)
}
// RaToF converts a Rankine temperature to Fahrenheit.
func RaToF (ra Rankine) Fahrenheit {
return Fahrenheit(ra - 459.67)
}
// RaToK converts a Rankine temperature to Kelvin.
func RaToK (ra Rankine) Kelvin {
return Kelvin(ra * 5/9)
}
// RaToRé converts a Rankine temperature to Réaumur.
func RaToRé (ra Rankine) Réaumur {
return Réaumur(ra * 4/9 - 218.52)
}
// RaToDe converts a Rankine temperature to Delisle.
func RaToDe (ra Rankine) Delisle {
return Delisle((671.67 - ra) * 5/6)
}
// RaToN convertsa Rankine temperature to Newton.
func RaToN (ra Rankine) Newton {
return Newton((ra - 491.67) * 11/60)
}
// RaToRø converts a Rankine temperature to Rømer.
func RaToRø (ra Rankine) Rømer {
return Rømer((ra - 491.67) * 7/24 + 7.5)
}
// NToC convert a Newton temperature to Celsius.
func NToC (n Newton) Celsius {
return Celsius(n * 100/33)
}
// NToF converts a Newton temperature to Fahrenheit.
func NToF (n Newton) Fahrenheit {
return Fahrenheit(n * 60/11 + 32)
}
// NToK converts a Newton temperature to Kelvin.
func NToK (n Newton) Kelvin {
return Kelvin(n * 100/33 + 273.15)
}
// NToRa converts a Newton temperature to Rankine.
func NToRa (n Newton) Rankine {
return Rankine(n * 60/11 + 491.67)
}
// NToRé converts a Newton temperature to Réaumur.
func NToRé (n Newton) Réaumur {
return Réaumur(n * 80/33)
}
// NToDe converts a Newton temperature to Delisle.
func NToDe (n Newton) Delisle {
return Delisle((33 - n) * 50/11)
}
// NToRø converts a Newton temperature to Rømer.
func NToRø (n Newton) Rømer {
return Rømer(n * 35/22 + 7.5)
} | tempconv/conv.go | 0.90904 | 0.733571 | conv.go | starcoder |
package elref
import (
"reflect"
)
// If `v` is nil return true.
func IsNil(v interface{}) bool {
if v == nil {
return true
}
r := reflect.ValueOf(v)
switch r.Kind() {
case reflect.Chan, reflect.Func, reflect.Map, reflect.Ptr, reflect.Slice, reflect.Interface:
if r.IsNil() {
return true
}
}
return false
}
// If `v` is empty value, return true.
func IsEmpty(v interface{}) bool {
if v == nil {
return true
}
r := reflect.ValueOf(v)
switch r.Kind() {
case reflect.String, reflect.Array:
return r.Len() == 0
case reflect.Map, reflect.Slice:
return r.IsNil() || r.Len() == 0
case reflect.Bool:
return !r.Bool()
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return r.Int() == 0
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return r.Uint() == 0
case reflect.Float32, reflect.Float64:
return r.Float() == 0
case reflect.Interface, reflect.Ptr:
return r.IsNil()
}
return reflect.DeepEqual(v, reflect.Zero(r.Type()).Interface())
}
// If `needle` is in `haystack` return true.
// supports array, slice, struct and map.
func IsIn(needle interface{}, haystack interface{}) bool {
val := reflect.ValueOf(haystack)
switch val.Kind() {
case reflect.Array, reflect.Slice:
for i := 0; i < val.Len(); i++ {
if reflect.DeepEqual(needle, val.Index(i).Interface()) {
return true
}
}
case reflect.Map:
for _, k := range val.MapKeys() {
if reflect.DeepEqual(needle, val.MapIndex(k).Interface()) {
return true
}
}
case reflect.Struct:
t := val.Type()
for i := 0; i < t.NumField(); i++ {
if reflect.DeepEqual(needle, t.Field(i).Name) {
return true
}
}
default:
panic("[IsIn]haystack type must be array, slice, struct or map")
}
return false
}
// Returns a string of variable type
func Type(v interface{}) string {
return reflect.TypeOf(v).String()
}
// Get fields in struct.
func GetStructFields(v interface{}) []string {
if !IsStruct(v) {
panic("[GetStructFields] v must be a struct")
}
t := reflect.ValueOf(v).Type()
res := make([]string, t.NumField())
for i := 0; i < t.NumField(); i++ {
res[i] = t.Field(i).Name
}
return res
}
// Get values in struct.
func GetStructValues(v interface{}) []interface{} {
if !IsStruct(v) {
panic("[GetStructValues] v must be a struct")
}
r := reflect.ValueOf(v)
t := r.Type()
res := make([]interface{}, t.NumField())
for i := 0; i < t.NumField(); i++ {
res[i] = r.Field(i).Interface()
}
return res
} | elref/ref.go | 0.656108 | 0.476762 | ref.go | starcoder |
package reflect
import (
"reflect"
)
func IndirectType(reflectType reflect.Type) reflect.Type {
kind := reflectType.Kind()
if kind == reflect.Ptr {
return reflectType.Elem()
}
return reflectType
}
func KindElemType(reflectType reflect.Type) reflect.Kind {
return IndirectType(reflectType).Kind()
}
type CallInParameterFunc func(i int, param reflect.Type) interface{}
// Call in func params
// It panics if the type's Kind is not Func.
func CallInParameterType(reflectType reflect.Type, paramFunc CallInParameterFunc) []interface{} {
// make size = reflectType.NumIn()
results := make([]interface{}, reflectType.NumIn())
for i := 0; i < reflectType.NumIn(); i++ {
values := paramFunc(i, reflectType.In(i))
results[i] = values
//results = append(results, values)
}
return results
}
type CallFieldFunc func(i int, field reflect.StructField) interface{}
// Call struct fields
// It panics if the type's Kind is not Struct.
func CallFieldType(reflectType reflect.Type, fieldFunc CallFieldFunc) map[string]interface{} {
reflectType = IndirectType(reflectType)
results := make(map[string]interface{}, 0)
for i := 0; i < reflectType.NumField(); i++ {
reflectField := reflectType.Field(i)
results[reflectField.Name] = fieldFunc(i, reflectField)
}
return results
}
type CallMethodFunc func(i int, method reflect.Method) interface{}
func CallMethodType(reflectType reflect.Type, methodFunc CallMethodFunc) map[string]interface{} {
results := make(map[string]interface{}, 0)
for i := 0; i < reflectType.NumMethod(); i++ {
reflectMethod := reflectType.Method(i)
results[reflectMethod.Name] = methodFunc(i, reflectMethod)
}
return results
}
func Methods(reflectType reflect.Type) map[string]reflect.Method {
methods := make(map[string]reflect.Method, 0)
for i := 0; i < reflectType.NumMethod(); i++ {
method := reflectType.Method(i)
methods[method.Name] = method
}
return methods
}
func MethodExists(reflectType reflect.Type, name string) bool {
_, ok := reflectType.MethodByName(name)
return ok
}
// It panics if the type's Kind is not Struct.
func StructFields(reflectType reflect.Type) map[string]reflect.StructField {
reflectType = IndirectType(reflectType)
fields := make(map[string]reflect.StructField, 0)
for i := 0; i < reflectType.NumField(); i++ {
reflectField := reflectType.Field(i)
fields[reflectField.Name] = reflectField
}
return fields
} | support/reflect/type.go | 0.615435 | 0.47792 | type.go | starcoder |
package functions
import (
"strconv"
"strings"
"time"
"github.com/robjporter/go-functions/as"
"github.com/robjporter/go-functions/now"
)
func CurrentMonthName() string {
return time.Now().Month().String()
}
func CurrentYear() string {
return as.ToString(time.Now().Year())
}
func IsYear(input string) string {
if isNumber(input) {
if isValidYear(input) {
return input
}
}
return CurrentYear()
}
func isValidYear(input string) bool {
if result, err := strconv.ParseInt(input, 10, 64); err == nil {
if result > 1999 && result < 2031 {
return true
}
}
return false
}
func isNumber(input string) bool {
if _, err := strconv.ParseInt(input, 10, 64); err == nil {
return true
}
return false
}
func IsMonth(input string) string {
if monthContains(input, "jan", "january") {
return "January"
}
if monthContains(input, "feb", "february") {
return "February"
}
if monthContains(input, "mar", "march") {
return "March"
}
if monthContains(input, "apr", "april") {
return "April"
}
if monthContains(input, "may", "may") {
return "May"
}
if monthContains(input, "jun", "june") {
return "June"
}
if monthContains(input, "jul", "july") {
return "July"
}
if monthContains(input, "aug", "august") {
return "August"
}
if monthContains(input, "sep", "september") {
return "September"
}
if monthContains(input, "oct", "october") {
return "October"
}
if monthContains(input, "nov", "november") {
return "November"
}
if monthContains(input, "dec", "december") {
return "December"
}
return ""
}
func monthContains(input string, start string, end string) bool {
input = strings.ToLower(input)
if input == start || input == end {
return true
}
if len(input) >= len(start) && len(input) <= len(end) {
pos := len(input)
part := end[:pos]
if input == part {
return true
}
} else {
return false
}
return false
}
func GetTimestampStartOfMonth(month string, year int) int64 {
if getMonthPos(month) > 0 {
if isValidYear(as.ToString(year)) {
t := time.Date(year, time.Month(getMonthPos(month)), 1, 0, 0, 0, 0, time.Now().Location())
return now.New(t).BeginningOfMonth().Unix()
}
}
return 0
}
func GetTimestampEndOfMonth(month string, year int) int64 {
if getMonthPos(month) > 0 {
if isValidYear(as.ToString(year)) {
t := time.Date(year, time.Month(getMonthPos(month)), 1, 0, 0, 0, 0, time.Now().Location())
return now.New(t).EndOfMonth().Unix()
}
}
return 0
}
func GetStartOfMonth(month string, year int) string {
if getMonthPos(month) > 0 {
if isValidYear(as.ToString(year)) {
t := time.Date(year, time.Month(getMonthPos(month)), 1, 0, 0, 0, 0, time.Now().Location())
return now.New(t).BeginningOfMonth().String()
}
}
return ""
}
func GetEndOfMonth(month string, year int) string {
if getMonthPos(month) > 0 {
if isValidYear(as.ToString(year)) {
t := time.Date(year, time.Month(getMonthPos(month)), 1, 0, 0, 0, 0, time.Now().Location())
return now.New(t).EndOfMonth().String()
}
}
return ""
}
func getMonthPos(month string) int {
months := []string{"january", "february", "march", "april", "may", "june", "july", "august", "september", "october", "november", "december"}
for i := 0; i < len(months); i++ {
if strings.ToLower(month) == months[i] {
return i + 1
}
}
return 0
} | functions/functions.go | 0.52829 | 0.459925 | functions.go | starcoder |
package onshape
import (
"encoding/json"
)
// BTPTopLevelUserTypeDeclaration288 struct for BTPTopLevelUserTypeDeclaration288
type BTPTopLevelUserTypeDeclaration288 struct {
BTPTopLevelTypeDeclaration287
BtType *string `json:"btType,omitempty"`
Typecheck *BTPName261 `json:"typecheck,omitempty"`
}
// NewBTPTopLevelUserTypeDeclaration288 instantiates a new BTPTopLevelUserTypeDeclaration288 object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewBTPTopLevelUserTypeDeclaration288() *BTPTopLevelUserTypeDeclaration288 {
this := BTPTopLevelUserTypeDeclaration288{}
return &this
}
// NewBTPTopLevelUserTypeDeclaration288WithDefaults instantiates a new BTPTopLevelUserTypeDeclaration288 object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewBTPTopLevelUserTypeDeclaration288WithDefaults() *BTPTopLevelUserTypeDeclaration288 {
this := BTPTopLevelUserTypeDeclaration288{}
return &this
}
// GetBtType returns the BtType field value if set, zero value otherwise.
func (o *BTPTopLevelUserTypeDeclaration288) GetBtType() string {
if o == nil || o.BtType == nil {
var ret string
return ret
}
return *o.BtType
}
// GetBtTypeOk returns a tuple with the BtType field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTPTopLevelUserTypeDeclaration288) GetBtTypeOk() (*string, bool) {
if o == nil || o.BtType == nil {
return nil, false
}
return o.BtType, true
}
// HasBtType returns a boolean if a field has been set.
func (o *BTPTopLevelUserTypeDeclaration288) HasBtType() bool {
if o != nil && o.BtType != nil {
return true
}
return false
}
// SetBtType gets a reference to the given string and assigns it to the BtType field.
func (o *BTPTopLevelUserTypeDeclaration288) SetBtType(v string) {
o.BtType = &v
}
// GetTypecheck returns the Typecheck field value if set, zero value otherwise.
func (o *BTPTopLevelUserTypeDeclaration288) GetTypecheck() BTPName261 {
if o == nil || o.Typecheck == nil {
var ret BTPName261
return ret
}
return *o.Typecheck
}
// GetTypecheckOk returns a tuple with the Typecheck field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTPTopLevelUserTypeDeclaration288) GetTypecheckOk() (*BTPName261, bool) {
if o == nil || o.Typecheck == nil {
return nil, false
}
return o.Typecheck, true
}
// HasTypecheck returns a boolean if a field has been set.
func (o *BTPTopLevelUserTypeDeclaration288) HasTypecheck() bool {
if o != nil && o.Typecheck != nil {
return true
}
return false
}
// SetTypecheck gets a reference to the given BTPName261 and assigns it to the Typecheck field.
func (o *BTPTopLevelUserTypeDeclaration288) SetTypecheck(v BTPName261) {
o.Typecheck = &v
}
func (o BTPTopLevelUserTypeDeclaration288) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
serializedBTPTopLevelTypeDeclaration287, errBTPTopLevelTypeDeclaration287 := json.Marshal(o.BTPTopLevelTypeDeclaration287)
if errBTPTopLevelTypeDeclaration287 != nil {
return []byte{}, errBTPTopLevelTypeDeclaration287
}
errBTPTopLevelTypeDeclaration287 = json.Unmarshal([]byte(serializedBTPTopLevelTypeDeclaration287), &toSerialize)
if errBTPTopLevelTypeDeclaration287 != nil {
return []byte{}, errBTPTopLevelTypeDeclaration287
}
if o.BtType != nil {
toSerialize["btType"] = o.BtType
}
if o.Typecheck != nil {
toSerialize["typecheck"] = o.Typecheck
}
return json.Marshal(toSerialize)
}
type NullableBTPTopLevelUserTypeDeclaration288 struct {
value *BTPTopLevelUserTypeDeclaration288
isSet bool
}
func (v NullableBTPTopLevelUserTypeDeclaration288) Get() *BTPTopLevelUserTypeDeclaration288 {
return v.value
}
func (v *NullableBTPTopLevelUserTypeDeclaration288) Set(val *BTPTopLevelUserTypeDeclaration288) {
v.value = val
v.isSet = true
}
func (v NullableBTPTopLevelUserTypeDeclaration288) IsSet() bool {
return v.isSet
}
func (v *NullableBTPTopLevelUserTypeDeclaration288) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableBTPTopLevelUserTypeDeclaration288(val *BTPTopLevelUserTypeDeclaration288) *NullableBTPTopLevelUserTypeDeclaration288 {
return &NullableBTPTopLevelUserTypeDeclaration288{value: val, isSet: true}
}
func (v NullableBTPTopLevelUserTypeDeclaration288) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableBTPTopLevelUserTypeDeclaration288) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | onshape/model_btp_top_level_user_type_declaration_288.go | 0.675658 | 0.439507 | model_btp_top_level_user_type_declaration_288.go | starcoder |
package cryptypes
import "database/sql/driver"
// EncryptedInt32 supports encrypting Int32 data
type EncryptedInt32 struct {
Field
Raw int32
}
// Scan converts the value from the DB into a usable EncryptedInt32 value
func (s *EncryptedInt32) Scan(value interface{}) error {
return decrypt(value.([]byte), &s.Raw)
}
// Value converts an initialized EncryptedInt32 value into a value that can safely be stored in the DB
func (s EncryptedInt32) Value() (driver.Value, error) {
return encrypt(s.Raw)
}
// NullEncryptedInt32 supports encrypting nullable Int32 data
type NullEncryptedInt32 struct {
Field
Raw int32
Empty bool
}
// Scan converts the value from the DB into a usable NullEncryptedInt32 value
func (s *NullEncryptedInt32) Scan(value interface{}) error {
if value == nil {
s.Raw = 0
s.Empty = true
return nil
}
return decrypt(value.([]byte), &s.Raw)
}
// Value converts an initialized NullEncryptedInt32 value into a value that can safely be stored in the DB
func (s NullEncryptedInt32) Value() (driver.Value, error) {
if s.Empty {
return nil, nil
}
return encrypt(s.Raw)
}
// SignedInt32 supports signing Int32 data
type SignedInt32 struct {
Field
Raw int32
Valid bool
}
// Scan converts the value from the DB into a usable SignedInt32 value
func (s *SignedInt32) Scan(value interface{}) (err error) {
s.Valid, err = verify(value.([]byte), &s.Raw)
return
}
// Value converts an initialized SignedInt32 value into a value that can safely be stored in the DB
func (s SignedInt32) Value() (driver.Value, error) {
return sign(s.Raw)
}
// NullSignedInt32 supports signing nullable Int32 data
type NullSignedInt32 struct {
Field
Raw int32
Empty bool
Valid bool
}
// Scan converts the value from the DB into a usable NullSignedInt32 value
func (s *NullSignedInt32) Scan(value interface{}) (err error) {
if value == nil {
s.Raw = 0
s.Empty = true
s.Valid = true
return nil
}
s.Valid, err = verify(value.([]byte), &s.Raw)
return
}
// Value converts an initialized NullSignedInt32 value into a value that can safely be stored in the DB
func (s NullSignedInt32) Value() (driver.Value, error) {
if s.Empty {
return nil, nil
}
return sign(s.Raw)
}
// SignedEncryptedInt32 supports signing and encrypting Int32 data
type SignedEncryptedInt32 struct {
Field
Raw int32
Valid bool
}
// Scan converts the value from the DB into a usable SignedEncryptedInt32 value
func (s *SignedEncryptedInt32) Scan(value interface{}) (err error) {
s.Valid, err = decryptVerify(value.([]byte), &s.Raw)
return
}
// Value converts an initialized SignedEncryptedInt32 value into a value that can safely be stored in the DB
func (s SignedEncryptedInt32) Value() (driver.Value, error) {
return encryptSign(s.Raw)
}
// NullSignedEncryptedInt32 supports signing and encrypting nullable Int32 data
type NullSignedEncryptedInt32 struct {
Field
Raw int32
Empty bool
Valid bool
}
// Scan converts the value from the DB into a usable NullSignedEncryptedInt32 value
func (s *NullSignedEncryptedInt32) Scan(value interface{}) (err error) {
if value == nil {
s.Raw = 0
s.Empty = true
s.Valid = true
return nil
}
s.Valid, err = decryptVerify(value.([]byte), &s.Raw)
return
}
// Value converts an initialized NullSignedEncryptedInt32 value into a value that can safely be stored in the DB
func (s NullSignedEncryptedInt32) Value() (driver.Value, error) {
if s.Empty {
return nil, nil
}
return encryptSign(s.Raw)
} | cryptypes/type_int32.go | 0.799521 | 0.566198 | type_int32.go | starcoder |
package main
import (
"fmt"
"math"
"sort"
"github.com/kissgyorgy/adventofcode2019/point"
)
const (
// mapFile = "day10-example3.txt"
mapFile = "day10-input.txt"
)
var (
// this comes from solution of part1
selectedPoint = point.Point{26, 36}
// selectedPoint = point.Point{3, 4} // example1
// selectedPoint = point.Point{11, 13} // example3
yAxisVector = point.Point{0, -1}
)
type pointProps struct {
point point.Point // the original point
relativePoint point.Point // relative to the Point we subtracted
angleToY float64
relativeLength float64
}
// This will calculate the full degrees between the starting point and the relative point
// so return value will be between 0 and 2π
func calculateFullAngle(start, p2 point.Point) float64 {
// 0 <= angle <= π, see: https://en.wikipedia.org/wiki/Inverse_trigonometric_functions#arccos
angle := float64(point.Angle(start, p2))
fmt.Printf("P: %v Angle: %v\n", p2, angle*180/math.Pi)
// this is needed so we can have a number between 0 and 2π
// because rotation starts upside, everything to the left is π+x rad
// so it will come later when sorted.
if start.X > p2.X {
angle = 2*math.Pi - angle
}
// we need this, because otherwise the results wouldn't match with example3
return math.Round(angle*100000) / 100000
}
func main() {
asteroidMap := loadMap(mapFile)
printMap(asteroidMap)
asteroidCoords := convertMapToPoints(asteroidMap)
asteroidProps := make([]pointProps, 0, len(asteroidCoords)-1)
for _, p := range asteroidCoords {
if p == selectedPoint {
continue
}
relativePoint := p.Minus(selectedPoint)
props := pointProps{
point: p,
relativePoint: relativePoint,
angleToY: calculateFullAngle(yAxisVector, relativePoint),
relativeLength: relativePoint.Length(),
}
asteroidProps = append(asteroidProps, props)
}
sort.Slice(asteroidProps, func(i, j int) bool {
pi, pj := asteroidProps[i], asteroidProps[j]
if pi.angleToY == pj.angleToY {
return pi.relativeLength < pj.relativeLength
}
return pi.angleToY < pj.angleToY
})
fmt.Println("Sorted:")
for _, p := range asteroidProps {
fmt.Println(p)
}
fmt.Println("Result:")
nth := 1
for len(asteroidProps) > 0 {
var prevAngle float64 = 999
for i := 0; i < len(asteroidProps); i++ {
props := asteroidProps[i]
if prevAngle == props.angleToY {
continue
}
fmt.Printf("%d. %v\n", nth, props)
nth++
if i < len(asteroidProps)-1 {
asteroidProps = append(asteroidProps[:i], asteroidProps[i+1:]...)
i--
} else {
asteroidProps = asteroidProps[:len(asteroidProps)-1]
}
prevAngle = props.angleToY
}
}
} | day10/day10part2.go | 0.608129 | 0.457985 | day10part2.go | starcoder |
package solutions
import (
"fmt"
"github.com/encero/advent-of-code-2021/helpers"
)
func Day11DumboOcto() error {
var input = [][]DumboOctopus{
{{PowerLevel: 6}, {PowerLevel: 6}, {PowerLevel: 1}, {PowerLevel: 7}, {PowerLevel: 1}, {PowerLevel: 1}, {PowerLevel: 3}, {PowerLevel: 5}, {PowerLevel: 8}, {PowerLevel: 4}},
{{PowerLevel: 6}, {PowerLevel: 5}, {PowerLevel: 4}, {PowerLevel: 4}, {PowerLevel: 2}, {PowerLevel: 1}, {PowerLevel: 8}, {PowerLevel: 6}, {PowerLevel: 3}, {PowerLevel: 8}},
{{PowerLevel: 5}, {PowerLevel: 4}, {PowerLevel: 5}, {PowerLevel: 7}, {PowerLevel: 3}, {PowerLevel: 3}, {PowerLevel: 1}, {PowerLevel: 4}, {PowerLevel: 8}, {PowerLevel: 8}},
{{PowerLevel: 1}, {PowerLevel: 1}, {PowerLevel: 3}, {PowerLevel: 5}, {PowerLevel: 6}, {PowerLevel: 7}, {PowerLevel: 5}, {PowerLevel: 5}, {PowerLevel: 8}, {PowerLevel: 7}},
{{PowerLevel: 1}, {PowerLevel: 2}, {PowerLevel: 2}, {PowerLevel: 1}, {PowerLevel: 3}, {PowerLevel: 5}, {PowerLevel: 3}, {PowerLevel: 2}, {PowerLevel: 1}, {PowerLevel: 6}},
{{PowerLevel: 1}, {PowerLevel: 8}, {PowerLevel: 1}, {PowerLevel: 1}, {PowerLevel: 1}, {PowerLevel: 2}, {PowerLevel: 4}, {PowerLevel: 3}, {PowerLevel: 7}, {PowerLevel: 8}},
{{PowerLevel: 1}, {PowerLevel: 3}, {PowerLevel: 8}, {PowerLevel: 7}, {PowerLevel: 8}, {PowerLevel: 6}, {PowerLevel: 4}, {PowerLevel: 3}, {PowerLevel: 6}, {PowerLevel: 8}},
{{PowerLevel: 4}, {PowerLevel: 4}, {PowerLevel: 2}, {PowerLevel: 7}, {PowerLevel: 6}, {PowerLevel: 3}, {PowerLevel: 7}, {PowerLevel: 2}, {PowerLevel: 6}, {PowerLevel: 2}},
{{PowerLevel: 6}, {PowerLevel: 7}, {PowerLevel: 7}, {PowerLevel: 8}, {PowerLevel: 6}, {PowerLevel: 4}, {PowerLevel: 5}, {PowerLevel: 4}, {PowerLevel: 8}, {PowerLevel: 6}},
{{PowerLevel: 3}, {PowerLevel: 6}, {PowerLevel: 8}, {PowerLevel: 2}, {PowerLevel: 1}, {PowerLevel: 4}, {PowerLevel: 6}, {PowerLevel: 7}, {PowerLevel: 4}, {PowerLevel: 5}},
}
simulator := NewDumboSimulator(input)
for count := 0; count < 100; count++ {
simulator.Step()
}
fmt.Println("Day 11 Part1: ", simulator.Flashes())
for {
simulator.Step()
if simulator.LastStepFlashes() == 100 {
break
}
}
fmt.Println("Day 11 Part 2: ", simulator.StepCount())
return nil
}
type DumboOctopus struct {
PowerLevel int
RecentlyFlashed bool
}
type DumboSimulator struct {
grid [][]DumboOctopus
flashCount int
simulationSteps int
lastFlashCount int
}
func NewDumboSimulator(grid [][]DumboOctopus) *DumboSimulator {
return &DumboSimulator{
grid: grid,
flashCount: 0,
}
}
func (d *DumboSimulator) Step() {
d.lastFlashCount = 0
d.simulationSteps++
flashy := d.raisePowerLevels()
if len(flashy) == 0 {
return
}
d.flashTheOctos(flashy)
}
func (d *DumboSimulator) Flashes() int {
return d.flashCount
}
func (d DumboSimulator) LastStepFlashes() int {
return d.lastFlashCount
}
func (d DumboSimulator) StepCount() int {
return d.simulationSteps
}
func (d *DumboSimulator) raisePowerLevels() []Point {
var flashy []Point
for x := 0; x < len(d.grid); x++ {
for y := 0; y < len(d.grid[x]); y++ {
d.grid[x][y].PowerLevel += 1
d.grid[x][y].RecentlyFlashed = false
if d.grid[x][y].PowerLevel > 9 {
flashy = append(flashy, Point{x, y})
}
}
}
return flashy
}
func (d *DumboSimulator) flashTheOctos(flashy []Point) {
for len(flashy) > 0 {
nei := d.flashOne(flashy[0])
flashy = append(flashy, nei...)
flashy = flashy[1:]
}
}
func (d *DumboSimulator) flashOne(point Point) []Point {
theOne := d.grid[point.X][point.Y]
if theOne.RecentlyFlashed {
return []Point{}
}
if theOne.PowerLevel <= 9 {
panic("Flashy octo is not charged up!")
}
theOne.RecentlyFlashed = true
theOne.PowerLevel = 0
d.flashCount++
d.lastFlashCount++
d.grid[point.X][point.Y] = theOne
var nextFlashies = []Point{}
for _, direction := range helpers.EightDirections {
neiX := point.X + direction.X
neiY := point.Y + direction.Y
if neiX < 0 || neiY < 0 || neiX >= len(d.grid) || neiY >= len(d.grid[neiX]) {
continue
}
neighbour := d.grid[neiX][neiY]
if neighbour.RecentlyFlashed {
continue
}
neighbour.PowerLevel += 1
if neighbour.PowerLevel > 9 {
nextFlashies = append(nextFlashies, Point{neiX, neiY})
}
d.grid[neiX][neiY] = neighbour
}
return nextFlashies
}
func (d DumboSimulator) Dump() {
for x := 0; x < len(d.grid); x++ {
for y := 0; y < len(d.grid[x]); y++ {
fmt.Print(d.grid[x][y].PowerLevel)
}
fmt.Println()
}
fmt.Println("==========================")
} | solutions/day11_dumbo.go | 0.500732 | 0.6971 | day11_dumbo.go | starcoder |
package dmap
import (
"ditto/dfs"
"fmt"
"github.com/pterm/pterm"
)
type Md5Hash string
// Dmap structure will hold our file duplication data.
// It is the primary data structure that will house the results
// that will eventually be returned to the user.
type Dmap struct {
filesMap map[Md5Hash][]string
fileCount uint
}
// NewDmap returns a new Dmap structure.
func NewDmap() (*Dmap, error) {
dmap := &Dmap{
fileCount: 0,
}
// Initialize our map.
dmap.filesMap = make(map[Md5Hash][]string)
return dmap, nil
}
// Add will take a dfile and add it the map.
func (d *Dmap) Add(dfile *dfs.Dfile) {
d.filesMap[Md5Hash(dfile.Hash())] = append(d.filesMap[Md5Hash(dfile.Hash())], dfile.FileName())
d.fileCount++
}
// PrintDmap will print entries currently stored in map.
func (d *Dmap) PrintDmap() {
for k, v := range d.filesMap {
if len(v) < 2 {
continue
}
fmt.Printf("Hash: %s Files: \n", k)
for i, f := range v {
fmt.Printf("\t%d: %s \n", i, f)
}
fmt.Println("--------------------------")
}
}
// ShowResults will display duplicates held in our Dmap as
// a pretty tree.
func (d *Dmap) ShowResults() {
// Banner
var leveledList pterm.LeveledList
for hash, files := range d.filesMap {
// Only show files that have at least one other duplicate.
if len(files) < 2 {
continue
}
// Our hash value will be our level 0 item from which all duplicate files
// will be subitems.
listItem := pterm.LeveledListItem{Level: 0, Text: string(hash)}
leveledList = append(leveledList, listItem)
for _, f := range files {
listItem = pterm.LeveledListItem{Level: 1, Text: f}
leveledList = append(leveledList, listItem)
}
}
root := pterm.NewTreeFromLeveledList(leveledList)
pterm.DefaultTree.WithRoot(root).Render()
}
// MapSize returns number of entries in the map.
func (d *Dmap) MapSize() int {
return len(d.filesMap)
}
// FileCount will return the number of files our map currently
// references.
func (d *Dmap) FileCount() uint {
return d.fileCount
}
// Get will get slice of files associated with hash.
func (d *Dmap) Get(hash Md5Hash) (files []string, err error) {
res, ok := d.filesMap[hash]
if !ok {
return []string{}, err
}
return res, nil
} | dmap/dmap.go | 0.585931 | 0.462716 | dmap.go | starcoder |
package blurhash
import (
"math"
"strings"
"github.com/sergeisadov/blurhash/internal/base83"
"github.com/sergeisadov/blurhash/pkg/utils"
)
const bytesPerPixel = 4
const (
minComponent = 1
maxComponent = 9
)
// Encode method takes custom img params and returns blurhash string as result
// components must be in interval from 1 to 9 to pass method's validation
func Encode(img *utils.Img) (string, error) {
if img.ComponentX < minComponent || img.ComponentX > maxComponent || img.ComponentY < minComponent || img.ComponentY > maxComponent {
return "", ErrIncorrectComponents
}
if img.Width*img.Height*4 != len(img.Pixels) {
return "", ErrWrongSize
}
factors := make([][]float64, 0, img.ComponentX)
for i := range factors {
factors[i] = make([]float64, 0, 3)
}
hashSB := strings.Builder{}
for y := 0; y < img.ComponentY; y++ {
for x := 0; x < img.ComponentX; x++ {
normalization := 2
if x == 0 && y == 0 {
normalization = 1
}
factor := multiplyBasisFunction(img.Pixels, img.Width, img.Height, func(i, j int) float64 {
return float64(normalization) *
math.Cos(math.Pi*float64(x)*float64(i)/float64(img.Width)) *
math.Cos(math.Pi*float64(y)*float64(j)/float64(img.Height))
})
factors = append(factors, factor)
}
}
dc := factors[0]
ac := factors[1:]
hashSB.Grow(1 + 1 + 4 + 2*(len(factors)-1)*9)
sizeFlag := img.ComponentX - 1 + (img.ComponentY-1)*9
hashSB.WriteString(base83.Encode83(float64(sizeFlag), 1))
var maximumValue float64
if len(ac) > 0 {
actualMaximumValue := max(ac)
quantisedMaximumValue := math.Floor(math.Max(0, math.Min(82, math.Floor(actualMaximumValue*166-0.5))))
maximumValue = (quantisedMaximumValue + 1) / 166
hashSB.WriteString(base83.Encode83(math.Round(quantisedMaximumValue), 1))
} else {
maximumValue = 1
hashSB.WriteString(base83.Encode83(0, 1))
}
hashSB.WriteString(base83.Encode83(float64(encodeDC(dc)), 4))
for i := range ac {
hashSB.WriteString(base83.Encode83(encodeAC(ac[i], maximumValue), 2))
}
return hashSB.String(), nil
}
func sRGBToLinear(value float64) float64 {
v := value / 255
if v <= 0.04045 {
return v / 12.92
}
return math.Pow((v+0.055)/1.055, 2.4)
}
func linearTosRGB(value float64) float64 {
v := math.Max(0, math.Min(1, value))
if v <= 0.0031308 {
return math.Round(v*12.92*255 + 0.5)
}
return math.Round((1.055*math.Pow(v, 1/2.4)-0.055)*255 + 0.5)
}
func sign(n float64) float64 {
if n < 0 {
return -1
}
return 1
}
func signPow(val, exp float64) float64 {
return sign(val) * math.Pow(math.Abs(val), exp)
}
func multiplyBasisFunction(pixels []uint8, width, height int, basisFunc func(i, j int) float64) []float64 {
r := float64(0)
g := float64(0)
b := float64(0)
bytesPerRow := width * bytesPerPixel
for x := 0; x < width; x++ {
for y := 0; y < height; y++ {
basis := basisFunc(x, y)
r += basis * sRGBToLinear(float64(pixels[bytesPerPixel*x+0+y*bytesPerRow]))
g += basis * sRGBToLinear(float64(pixels[bytesPerPixel*x+1+y*bytesPerRow]))
b += basis * sRGBToLinear(float64(pixels[bytesPerPixel*x+2+y*bytesPerRow]))
}
}
scale := float64(1) / (float64(width) * float64(height))
return []float64{r * scale, g * scale, b * scale}
}
func max(input [][]float64) float64 {
var max float64
for i := range input {
for j := range input[i] {
if max < input[i][j] {
max = input[i][j]
}
}
}
return max
}
func encodeDC(val []float64) int {
roundedR := int(linearTosRGB(val[0]))
roundedG := int(linearTosRGB(val[1]))
roundedB := int(linearTosRGB(val[2]))
return (roundedR << 16) + (roundedG << 8) + roundedB
}
func encodeAC(value []float64, maximumValue float64) float64 {
exponent := 0.5
minX := float64(18)
quantR := math.Floor(math.Max(0,
math.Min(minX, math.Floor(signPow(value[0]/maximumValue, exponent)*9+9.5))))
quantG := math.Floor(math.Max(0,
math.Min(minX, math.Floor(signPow(value[1]/maximumValue, exponent)*9+9.5))))
quantB := math.Floor(math.Max(0,
math.Min(minX, math.Floor(signPow(value[2]/maximumValue, exponent)*9+9.5))))
return quantR*19*19 + quantG*19 + quantB
} | pkg/blurhash/encode.go | 0.781289 | 0.416322 | encode.go | starcoder |
package objects
//Binary (0.0.0.0/4): Binary protocols
//This is a superclass for classes that are generally unreadable in their plain form and require translation.
const PONumBinary = 0
const PODFMaskBinary = `0.0.0.0/4`
const PODFBinary = `0.0.0.0`
const POMaskBinary = 4
//Text (192.168.127.12/4): Human readable text
//This is a superclass for classes that are moderately understandable if they are read directly in their binary form. Generally these are protocols that were designed specifically to be human readable.
const PONumText = 1073741824
const PODFMaskText = `192.168.127.12/4`
const PODFText = `192.168.127.12`
const POMaskText = 4
//Blob (1.0.0.0/8): Blob
//This is a class for schemas that do not use a public encoding format. In general it should be avoided. Schemas below this should include the key "readme" with a url to a description of the schema that is sufficiently detailed to allow for a developer to reverse engineer the protocol if required.
const PONumBlob = 16777216
const PODFMaskBlob = `1.0.0.0/8`
const PODFBlob = `1.0.0.0`
const POMaskBlob = 8
//MsgPack (2.0.0.0/8): MsgPack
//This class is for schemas that are represented in MsgPack
const PONumMsgPack = 33554432
const PODFMaskMsgPack = `2.0.0.0/8`
const PODFMsgPack = `2.0.0.0`
const POMaskMsgPack = 8
//CapnP (3.0.0.0/8): Captain Proto
//This class is for captain proto interfaces. Schemas below this should include the key "schema" with a url to their .capnp file
const PONumCapnP = 50331648
const PODFMaskCapnP = `3.0.0.0/8`
const PODFCapnP = `3.0.0.0`
const POMaskCapnP = 8
//JSON (172.16.31.10/8): JSON
//This class is for schemas that are represented in JSON
const PONumJSON = 1090519040
const PODFMaskJSON = `172.16.31.10/8`
const PODFJSON = `172.16.31.10`
const POMaskJSON = 8
//XML (192.168.127.12/8): XML
//This class is for schemas that are represented in XML
const PONumXML = 1107296256
const PODFMaskXML = `192.168.127.12/8`
const PODFXML = `192.168.127.12`
const POMaskXML = 8
//YAML (192.168.3.11/8): YAML
//This class is for schemas that are represented in YAML
const PONumYAML = 1124073472
const PODFMaskYAML = `192.168.3.11/8`
const PODFYAML = `192.168.3.11`
const POMaskYAML = 8
//BWRoutingObject (0.0.0.0/24): Bosswave Routing Object
//This class and schema block is reserved for bosswave routing objects represented using the full PID.
const PONumBWRoutingObject = 0
const PODFMaskBWRoutingObject = `0.0.0.0/24`
const PODFBWRoutingObject = `0.0.0.0`
const POMaskBWRoutingObject = 24
//LogDict (2.0.1.0/24): LogDict
//This class is for log messages encoded in msgpack
const PONumLogDict = 33554688
const PODFMaskLogDict = `2.0.1.0/24`
const PODFLogDict = `2.0.1.0`
const POMaskLogDict = 24
//TSTaggedMP (2.0.3.0/24): TSTaggedMP
//This superclass describes "ts"->int64 tagged msgpack objects. The timestamp is used for merging entries and determining which is later and should be the final value.
const PONumTSTaggedMP = 33555200
const PODFMaskTSTaggedMP = `2.0.3.0/24`
const PODFTSTaggedMP = `2.0.3.0`
const POMaskTSTaggedMP = 24
//HamiltonBase (2.0.4.0/24): Hamilton Messages
//This is the base class for messages used with the Hamilton motes. The only key guaranteed is "#" that contains a uint16 representation of the serial of the mote the message is destined for or originated from.
const PONumHamiltonBase = 33555456
const PODFMaskHamiltonBase = `2.0.4.0/24`
const PODFHamiltonBase = `2.0.4.0`
const POMaskHamiltonBase = 24
//HamiltonTelemetry (2.0.4.64/26): Hamilton Telemetry
//This object contains a "#" field for the serial number, as well as possibly containing an "A" field with a list of X, Y, and Z accelerometer values. A "T" field containing the temperature as an integer in degrees C multiplied by 10000, and an "L" field containing the illumination in Lux.
const PONumHamiltonTelemetry = 33555520
const PODFMaskHamiltonTelemetry = `2.0.4.64/26`
const PODFHamiltonTelemetry = `2.0.4.64`
const POMaskHamiltonTelemetry = 26
//ROAccessDChainHash (0.0.0.1/32): Access DChain hash
//An access dchain hash
const PONumROAccessDChainHash = 1
const PODFMaskROAccessDChainHash = `0.0.0.1/32`
const PODFROAccessDChainHash = `0.0.0.1`
const POMaskROAccessDChainHash = 32
//ROAccessDChain (0.0.0.2/32): Access DChain
//An access dchain
const PONumROAccessDChain = 2
const PODFMaskROAccessDChain = `0.0.0.2/32`
const PODFROAccessDChain = `0.0.0.2`
const POMaskROAccessDChain = 32
//ROPermissionDChainHash (0.0.0.17/32): Permission DChain hash
//A permission dchain hash
const PONumROPermissionDChainHash = 17
const PODFMaskROPermissionDChainHash = `0.0.0.17/32`
const PODFROPermissionDChainHash = `0.0.0.17`
const POMaskROPermissionDChainHash = 32
//ROPermissionDChain (0.0.0.18/32): Permission DChain
//A permission dchain
const PONumROPermissionDChain = 18
const PODFMaskROPermissionDChain = `0.0.0.18/32`
const PODFROPermissionDChain = `0.0.0.18`
const POMaskROPermissionDChain = 32
//ROAccessDOT (0.0.0.32/32): Access DOT
//An access DOT
const PONumROAccessDOT = 32
const PODFMaskROAccessDOT = `0.0.0.32/32`
const PODFROAccessDOT = `0.0.0.32`
const POMaskROAccessDOT = 32
//ROPermissionDOT (0.0.0.33/32): Permission DOT
//A permission DOT
const PONumROPermissionDOT = 33
const PODFMaskROPermissionDOT = `0.0.0.33/32`
const PODFROPermissionDOT = `0.0.0.33`
const POMaskROPermissionDOT = 32
//ROEntity (0.0.0.48/32): Entity
//An entity
const PONumROEntity = 48
const PODFMaskROEntity = `0.0.0.48/32`
const PODFROEntity = `0.0.0.48`
const POMaskROEntity = 32
//ROOriginVK (0.0.0.49/32): Origin verifying key
//The origin VK of a message that does not contain a PAC
const PONumROOriginVK = 49
const PODFMaskROOriginVK = `0.0.0.49/32`
const PODFROOriginVK = `0.0.0.49`
const POMaskROOriginVK = 32
//ROEntityWKey (0.0.0.50/32): Entity with signing key
//An entity with signing key
const PONumROEntityWKey = 50
const PODFMaskROEntityWKey = `0.0.0.50/32`
const PODFROEntityWKey = `0.0.0.50`
const POMaskROEntityWKey = 32
//RODRVK (0.0.0.51/32): Designated router verifying key
//a 32 byte designated router verifying key
const PONumRODRVK = 51
const PODFMaskRODRVK = `0.0.0.51/32`
const PODFRODRVK = `0.0.0.51`
const POMaskRODRVK = 32
//ROExpiry (0.0.0.64/32): Expiry
//Sets an expiry for the message
const PONumROExpiry = 64
const PODFMaskROExpiry = `0.0.0.64/32`
const PODFROExpiry = `0.0.0.64`
const POMaskROExpiry = 32
//RORevocation (0.0.0.80/32): Revocation
//A revocation for an Entity or a DOT
const PONumRORevocation = 80
const PODFMaskRORevocation = `0.0.0.80/32`
const PODFRORevocation = `0.0.0.80`
const POMaskRORevocation = 32
//BinaryActuation (1.0.1.0/32): Binary actuation
//This payload object is one byte long, 0x00 for off, 0x01 for on.
const PONumBinaryActuation = 16777472
const PODFMaskBinaryActuation = `1.0.1.0/32`
const PODFBinaryActuation = `1.0.1.0`
const POMaskBinaryActuation = 32
//BWMessage (1.0.1.1/32): Packed Bosswave Message
//This object contains an entire signed and encoded bosswave message
const PONumBWMessage = 16777473
const PODFMaskBWMessage = `1.0.1.1/32`
const PODFBWMessage = `1.0.1.1`
const POMaskBWMessage = 32
//Double (1.0.2.0/32): Double
//This payload is an 8 byte long IEEE 754 double floating point value encoded in little endian. This should only be used if the semantic meaning is obvious in the context, otherwise a PID with a more specific semantic meaning should be used.
const PONumDouble = 16777728
const PODFMaskDouble = `1.0.2.0/32`
const PODFDouble = `1.0.2.0`
const POMaskDouble = 32
//Wavelet (1.0.6.1/32): Wavelet binary
//This object contains a BOSSWAVE Wavelet
const PONumWavelet = 16778753
const PODFMaskWavelet = `1.0.6.1/32`
const PODFWavelet = `1.0.6.1`
const POMaskWavelet = 32
//SpawnpointLog (2.0.2.0/32): Spawnpoint stdout
//This contains stdout data from a spawnpoint container. It is a msgpacked dictionary that contains a "service" key, a "time" key (unix nano timestamp) and a "contents" key and a "spalias" key.
const PONumSpawnpointLog = 33554944
const PODFMaskSpawnpointLog = `2.0.2.0/32`
const PODFSpawnpointLog = `2.0.2.0`
const POMaskSpawnpointLog = 32
//SMetadata (2.0.3.1/32): Simple Metadata entry
//This contains a simple "val" string and "ts" int64 metadata entry. The key is determined by the URI. Other information MAY be present in the msgpacked object. The timestamp is used for merging metadata entries.
const PONumSMetadata = 33555201
const PODFMaskSMetadata = `2.0.3.1/32`
const PODFSMetadata = `2.0.3.1`
const POMaskSMetadata = 32
//HSBLightMessage (2.0.5.1/32): HSBLight Message
//This object may contain "hue", "saturation", "brightness" fields with a float from 0 to 1. It may also contain an "on" key with a boolean. Omitting fields leaves them at their previous state.
const PONumHSBLightMessage = 33555713
const PODFMaskHSBLightMessage = `2.0.5.1/32`
const PODFHSBLightMessage = `2.0.5.1`
const POMaskHSBLightMessage = 32
//InterfaceDescriptor (2.0.6.1/32): InterfaceDescriptor
//This object is used to describe an interface. It contains "uri", "iface","svc","namespace" "prefix" and "metadata" keys.
const PONumInterfaceDescriptor = 33555969
const PODFMaskInterfaceDescriptor = `2.0.6.1/32`
const PODFInterfaceDescriptor = `2.0.6.1`
const POMaskInterfaceDescriptor = 32
//String (192.168.127.12/32): String
//A plain string with no rigid semantic meaning. This can be thought of as a print statement. Anything that has semantic meaning like a process log should use a different schema.
const PONumString = 1073742080
const PODFMaskString = `192.168.127.12/32`
const PODFString = `192.168.127.12`
const POMaskString = 32
//FMDIntentString (172.16.58.3/32): FMD Intent String
//A plain string used as an intent for the follow-me display service.
const PONumFMDIntentString = 1073742081
const PODFMaskFMDIntentString = `172.16.58.3/32`
const PODFFMDIntentString = `172.16.58.3`
const POMaskFMDIntentString = 32
//AccountBalance (192.168.127.12/32): Account balance
//A comma seperated representation of an account and its balance as addr,decimal,human_readable. For example 0x49b1d037c33fdaad75d2532cd373fb5db87cc94c,57203431159181996982272,57203.4311 Ether . Be careful in that the decimal representation will frequently be bigger than an int64.
const PONumAccountBalance = 1073742082
const PODFMaskAccountBalance = `192.168.127.12/32`
const PODFAccountBalance = `192.168.127.12`
const POMaskAccountBalance = 32
//SpawnpointConfig (172.16.58.3/32): SpawnPoint config
//A configuration file for SpawnPoint (github.com/immesys/spawnpoint)
const PONumSpawnpointConfig = 1124073984
const PODFMaskSpawnpointConfig = `172.16.58.3/32`
const PODFSpawnpointConfig = `172.16.58.3`
const POMaskSpawnpointConfig = 32
//SpawnpointHeartbeat (192.168.3.11/32): SpawnPoint heartbeat
//A heartbeat message from spawnpoint
const PONumSpawnpointHeartbeat = 1124073985
const PODFMaskSpawnpointHeartbeat = `192.168.3.11/32`
const PODFSpawnpointHeartbeat = `192.168.3.11`
const POMaskSpawnpointHeartbeat = 32 | vendor/github.com/immesys/bw2/objects/poSymNames.go | 0.679604 | 0.486819 | poSymNames.go | starcoder |
package main
/*
You are given two strings s and p where p is a subsequence of s.
You are also given a distinct 0-indexed integer array removable containing a subset of indices of s (s is also 0-indexed).
You want to choose an integer k (0 <= k <= removable.length) such that,
after removing k characters from s using the first k indices in removable, p is still a subsequence of s.
More formally, you will mark the character at s[removable[i]] for each 0 <= i < k,
then remove all marked characters and check if p is still a subsequence.
Return the maximum k you can choose such that p is still a subsequence of s after the removals.
A subsequence of a string is a new string generated from the original string with
some characters (can be none) deleted without changing the relative order of the remaining characters.
Example 1:
Input: s = "abcacb", p = "ab", removable = [3,1,0]
Output: 2
Explanation: After removing the characters at indices 3 and 1, "abcacb" becomes "accb".
"ab" is a subsequence of "accb".
If we remove the characters at indices 3, 1, and 0, "abcacb" becomes "ccb", and "ab" is no longer a subsequence.
Hence, the maximum k is 2.
Example 2:
Input: s = "abcbddddd", p = "abcd", removable = [3,2,1,4,5,6]
Output: 1
Explanation: After removing the character at index 3, "abcbddddd" becomes "abcddddd".
"abcd" is a subsequence of "abcddddd".
Example 3:
Input: s = "abcab", p = "abc", removable = [0,1,2,3,4]
Output: 0
Explanation: If you remove the first index in the array removable, "abc" is no longer a subsequence.
Constraints:
1 <= p.length <= s.length <= 105
0 <= removable.length < s.length
0 <= removable[i] < s.length
p is a subsequence of s.
s and p both consist of lowercase English letters.
The elements in removable are distinct.
*/
func isSubSequence(s string, p string, removedLetters map[int]bool) bool {
i := 0
j := 0
for i < len(s) && j < len(p) {
if !removedLetters[i] && s[i] == p[j] {
i++
j++
} else {
i++
}
}
return j == len(p)
}
// time O(n * k)
// where n is len(p) and k is len(removable)
// space O(n)
func maximumRemovals(s string, p string, removable []int) int {
removedLetters := make(map[int]bool)
removals := 0
for _, letterIndexToRemove := range removable {
removedLetters[letterIndexToRemove] = true
if !isSubSequence(s, p, removedLetters) {
return removals
}
removals++
}
return removals
}
func main() {
} | golang/algorithms/others/maximum_number_of_removable_characters/main.go | 0.84124 | 0.596727 | main.go | starcoder |
package data
import (
"time"
)
// Condition defines parameters to look for in a sample. Either SampleType or SampleID
// (or both) can be set. They can't both be "".
type Condition struct {
ID string
Description string
NodeID string
PointType string
PointID string
PointIndex int
PointValueType string
Operator string
PointValue float64
PointTextValue string
MinTimeActive float64
Active bool
}
// Action defines actions that can be taken if a rule is active.
// Template can optionally be used to customize the message that is sent and
// uses Io Type or IDs to fill in the values. Example might be:
// JamMonitoring: Alert: {{ description }} is in ALARM state with tank level of {{ tankLevel }}.
type Action struct {
ID string
Description string
Action string
NodeID string
PointType string
PointValueType string
PointValue float64
PointTextValue string
}
// RuleConfig contains parts of the rule that a users changes
type RuleConfig struct {
}
// RuleState contains parts of a rule that the system changes
type RuleState struct {
Active bool `json:"active"`
LastAction time.Time `json:"lastAction"`
}
// Rule defines a conditions and actions that are run if condition is true. Global indicates
// the rule applies to all Devices. The rule config and state is separated so we can make updates
// to the Rule without config affecting state, and state affecting config as these are typically
// done by two different entities.
type Rule struct {
ID string
Description string
Active bool
Conditions []Condition
Actions []Action
}
// NodeToRule converts nodes that make up a rule to a node
func NodeToRule(ruleNode NodeEdge, conditionNodes, actionNodes []NodeEdge) (*Rule, error) {
ret := &Rule{}
ret.ID = ruleNode.ID
for _, p := range ruleNode.Points {
switch p.Type {
case PointTypeDescription:
ret.Description = p.Text
case PointTypeActive:
ret.Active = FloatToBool(p.Value)
}
}
for _, cond := range conditionNodes {
var newCond Condition
newCond.ID = cond.ID
newCond.PointIndex = -1
for _, p := range cond.Points {
switch p.Type {
case PointTypeDescription:
newCond.Description = p.Text
case PointTypeID:
newCond.NodeID = p.Text
case PointTypePointType:
newCond.PointType = p.Text
case PointTypePointID:
newCond.PointID = p.Text
case PointTypePointIndex:
newCond.PointIndex = int(p.Value)
case PointTypeValueType:
newCond.PointValueType = p.Text
case PointTypeOperator:
newCond.Operator = p.Text
case PointTypeValue:
newCond.PointValue = p.Value
case PointTypeMinActive:
newCond.MinTimeActive = p.Value
case PointTypeActive:
newCond.Active = FloatToBool(p.Value)
}
}
ret.Conditions = append(ret.Conditions, newCond)
}
for _, act := range actionNodes {
var newAct Action
newAct.ID = act.ID
for _, p := range act.Points {
switch p.Type {
case PointTypeDescription:
newAct.Description = p.Text
case PointTypeActionType:
newAct.Action = p.Text
case PointTypeID:
newAct.NodeID = p.Text
case PointTypePointType:
newAct.PointType = p.Text
case PointTypeValueType:
newAct.PointValueType = p.Text
case PointTypeValue:
newAct.PointValue = p.Value
newAct.PointTextValue = p.Text
}
}
ret.Actions = append(ret.Actions, newAct)
}
return ret, nil
} | data/rule.go | 0.681197 | 0.53206 | rule.go | starcoder |
package pnn
import (
"container/heap"
"github.com/fiwippi/go-quantise/pkg/colours"
"image"
"image/color"
"math"
"sort"
)
// Used as a variable for each pnn operation to determine what type of distance calculation to use,
// this variable is available for the whole scope of the pnn operation
type PNNMode uint8
// Which colour mode to use when calculating the distances between colours
const (
RGB PNNMode = iota
LAB
)
// Quantises a given into a palette of "m" colours to best represent it
func (mode PNNMode) QuantiseColour(img image.Image, M int) color.Palette {
// Creates the histogram of the image
hist := CreatePNNHistogram(img)
// Make the linked list of nodes
S, H := mode.initialiseColours(hist)
m := H.Len() + 1
count := 0
for m != M {
n := mode.recalculateNeighbours(H, count)
mode.updateColourStructs(n, n.NN, H, count)
m = m - 1
count += 1
}
thresholds := make(color.Palette, 0, M)
for S != nil {
clr := color.RGBA{uint8(S.R), uint8(S.G), uint8(S.B), uint8(S.A)}
thresholds = append(thresholds, clr)
S = S.Next
}
return thresholds
}
// Recalculates nearest neighbours
func (mode PNNMode) recalculateNeighbours(H *Heap, count int) *Node {
for {
S := H.Front().(*Node)
if S.UpdateCount >= S.MergeCount && S.UpdateCount >= S.NN.MergeCount {
return S
} else {
mode.nearestNeighbour(S)
heap.Fix(H, S.Index)
S.UpdateCount = count
}
}
}
// Initialises the linked list and heap used by the PNN Algorithm to quantise the image
func (mode PNNMode) initialiseColours(hist Histogram) (*Node, *Heap) {
// Initialise List
var currentNode *Node
var previousNode *Node
keys := make([]int, 0)
for k, _ := range hist {
keys = append(keys, int(k))
}
sort.Ints(keys)
//fmt.Println("Number of Colours:", len(keys))
var head = hist[uint32(keys[0])]
previousNode = nil
for _, i := range keys {
currentNode = hist[uint32(i)]
currentNode.A /= currentNode.N
currentNode.R /= currentNode.N
currentNode.G /= currentNode.N
currentNode.B /= currentNode.N
currentNode.Prev = previousNode
if previousNode != nil {
previousNode.Next = currentNode
}
// Make the current node the next previous node
previousNode = currentNode
}
// Make the heap
h := make(Heap, 0)
heap.Init(&h)
// Initialise nearest neighbour for each node and build heap of nodes
n := head
for n != nil {
mode.nearestNeighbour(n)
if n.Next != nil {
heap.Push(&h, n)
}
n = n.Next
}
return head, &h
}
// For a given node, finds the nearest neighbour, this is the node which has the smallest merge cost
func (mode PNNMode) nearestNeighbour(node *Node) {
var err = math.MaxFloat64
var nn *Node
if mode == LAB {
lab1 := node.LAB()
tmp := node.Next
for tmp != nil {
lab2 := tmp.LAB()
nerr := colours.LABDistance(lab1, lab2)
if nerr < err {
err = nerr
nn = tmp
}
tmp = tmp.Next
}
} else { // Default to RGB if not LAB or any other mode
tmp := node.Next
for tmp != nil {
nerr := VectorCost(node, tmp)
if nerr < err {
err = nerr
nn = tmp
}
tmp = tmp.Next
}
}
node.NN = nn
node.D = err
}
// Reduces the size of the linked list to eventually achieve a quantised palette
func (mode PNNMode) updateColourStructs(a, b *Node, h *Heap, count int) {
Nq := a.N + b.N
a.A = (a.N*a.A + b.N*b.A) / Nq
a.R = (a.N*a.R + b.N*b.R) / Nq
a.G = (a.N*a.G + b.N*b.G) / Nq
a.B = (a.N*a.B + b.N*b.B) / Nq
a.N = Nq
// Unchain the nearest neighbour bin
if b.Next != nil {
b.Next.Prev = b.Prev
}
if b.Prev != nil {
//fmt.Printf("Removing %p %+v\n", b, b)
b.Prev.Next = b.Next
}
// Remove the neighbour from the bin
if b.Index >= 0 && b.Index < h.Len() {
_ = heap.Remove(h, b.Index)
}
// Remove element from heap if its at the
// end of the list and not already removed
if a.Next == nil && a.Index != -1 {
_ = heap.Remove(h, a.Index)
}
a.MergeCount = count + 1
b.MergeCount = math.MaxInt32
} | internal/quantisers/pnn/colour.go | 0.649467 | 0.424173 | colour.go | starcoder |
package bsc
import (
"github.com/jesand/stats"
"github.com/jesand/stats/channel"
"github.com/jesand/stats/dist"
"github.com/jesand/stats/factor"
"github.com/jesand/stats/variable"
"math/rand"
)
// Create a new binary symmetric channel with the specified noise rates.
func NewBSCPair(noiseRate1, noiseRate2 float64) *BSCPair {
if noiseRate1 < 0 || noiseRate1 > 1 {
panic(stats.ErrfInvalidProb(noiseRate1))
} else if noiseRate2 < 0 || noiseRate2 > 1 {
panic(stats.ErrfInvalidProb(noiseRate2))
}
ch := &BSCPair{
NoiseRate1: variable.NewContinuousRV(noiseRate1, dist.UnitIntervalSpace),
NoiseRate2: variable.NewContinuousRV(noiseRate2, dist.UnitIntervalSpace),
}
ch.DefChannelSampleN.Channel = ch
return ch
}
// A binary symmetric channel. Given a Bernoulli random variable X, it emits
// a Bernoulli random variable Y such that with probability `NoiseRate`
// Y = !X, and Y = X otherwise. In other words, the channel has fixed
// probability `NoiseRate` of outputting Y with the opposite value of X.
type BSCPair struct {
// The probability of flipping the input for each layer of the channel
NoiseRate1, NoiseRate2 *variable.ContinuousRV
channel.DefChannelSampleN
}
// Send an input to the channel and sample an output
func (ch BSCPair) Sample(input variable.RandomVariable) variable.RandomVariable {
var (
rv = input.(*variable.DiscreteRV)
space = dist.BooleanSpace
x = space.BoolValue(rv.Outcome())
flip1 = rand.Float64() <= ch.NoiseRate1.Val()
flip2 = rand.Float64() <= ch.NoiseRate2.Val()
)
if flip1 != flip2 {
return variable.NewDiscreteRV(space.BoolOutcome(!x), space)
} else {
return variable.NewDiscreteRV(rv.Outcome(), space)
}
}
// Build a factor relating an input variable to an output variable
func (ch BSCPair) Factor(input variable.RandomVariable, output variable.RandomVariable) factor.Factor {
return &BSCPairFactor{
Input: input.(*variable.DiscreteRV),
Output: output.(*variable.DiscreteRV),
NoiseRate1: ch.NoiseRate1,
NoiseRate2: ch.NoiseRate2,
}
}
// Build factors relating an input variable to a sequence of output variables
func (ch BSCPair) Factors(input variable.RandomVariable, outputs []variable.RandomVariable) []factor.Factor {
var fs []factor.Factor
for _, rv := range outputs {
fs = append(fs, ch.Factor(input, rv))
}
return fs
}
// A factor connecting an input variable to its output, as perturbed by a constant
// Bernoulli noise rate.
type BSCPairFactor struct {
Input, Output *variable.DiscreteRV
NoiseRate1, NoiseRate2 *variable.ContinuousRV
}
// Do the input and output currently match?
func (factor BSCPairFactor) OutputMatchesInput() bool {
return factor.Input.Equals(factor.Output)
}
// The adjacent random variables
func (factor BSCPairFactor) Adjacent() []variable.RandomVariable {
return []variable.RandomVariable{factor.Output, factor.Input,
factor.NoiseRate1, factor.NoiseRate2}
}
// The factor's current score, based on the values of adjacent variables
func (factor BSCPairFactor) Score() float64 {
var n1, n2 = factor.NoiseRate1.Val(), factor.NoiseRate2.Val()
if factor.OutputMatchesInput() {
return (n1 * n2) + ((1 - n1) * (1 - n2))
} else {
return ((1 - n1) * n2) + (n1 * (1 - n2))
}
} | channel/bsc/bsc_pair.go | 0.794903 | 0.631324 | bsc_pair.go | starcoder |
// Package vga provides the VGA 256-color default palette, famously used in
// video mode 13h.
// See also https://en.wikipedia.org/wiki/Video_Graphics_Array#Color_palette
package vga
import "image/color"
// DefaultPalette is the VGA 256-color default palette. It can be used as a
// color.Palette from the standard library, e.g to create an image.Paletted.
var DefaultPalette = []color.Color{
color.RGBA{R: 0x00, G: 0x00, B: 0x00, A: 0xff},
color.RGBA{R: 0x00, G: 0x00, B: 0xaa, A: 0xff},
color.RGBA{R: 0x00, G: 0xaa, B: 0x00, A: 0xff},
color.RGBA{R: 0x00, G: 0xaa, B: 0xaa, A: 0xff},
color.RGBA{R: 0xaa, G: 0x00, B: 0x00, A: 0xff},
color.RGBA{R: 0xaa, G: 0x00, B: 0xaa, A: 0xff},
color.RGBA{R: 0xaa, G: 0x55, B: 0x00, A: 0xff},
color.RGBA{R: 0xaa, G: 0xaa, B: 0xaa, A: 0xff},
color.RGBA{R: 0x55, G: 0x55, B: 0x55, A: 0xff},
color.RGBA{R: 0x55, G: 0x55, B: 0xff, A: 0xff},
color.RGBA{R: 0x55, G: 0xff, B: 0x55, A: 0xff},
color.RGBA{R: 0x55, G: 0xff, B: 0xff, A: 0xff},
color.RGBA{R: 0xff, G: 0x55, B: 0x55, A: 0xff},
color.RGBA{R: 0xff, G: 0x55, B: 0xff, A: 0xff},
color.RGBA{R: 0xff, G: 0xff, B: 0x55, A: 0xff},
color.RGBA{R: 0xff, G: 0xff, B: 0xff, A: 0xff},
color.RGBA{R: 0x00, G: 0x00, B: 0x00, A: 0xff},
color.RGBA{R: 0x14, G: 0x14, B: 0x14, A: 0xff},
color.RGBA{R: 0x20, G: 0x20, B: 0x20, A: 0xff},
color.RGBA{R: 0x2c, G: 0x2c, B: 0x2c, A: 0xff},
color.RGBA{R: 0x38, G: 0x38, B: 0x38, A: 0xff},
color.RGBA{R: 0x45, G: 0x45, B: 0x45, A: 0xff},
color.RGBA{R: 0x51, G: 0x51, B: 0x51, A: 0xff},
color.RGBA{R: 0x61, G: 0x61, B: 0x61, A: 0xff},
color.RGBA{R: 0x71, G: 0x71, B: 0x71, A: 0xff},
color.RGBA{R: 0x82, G: 0x82, B: 0x82, A: 0xff},
color.RGBA{R: 0x92, G: 0x92, B: 0x92, A: 0xff},
color.RGBA{R: 0xa2, G: 0xa2, B: 0xa2, A: 0xff},
color.RGBA{R: 0xb6, G: 0xb6, B: 0xb6, A: 0xff},
color.RGBA{R: 0xcb, G: 0xcb, B: 0xcb, A: 0xff},
color.RGBA{R: 0xe3, G: 0xe3, B: 0xe3, A: 0xff},
color.RGBA{R: 0xff, G: 0xff, B: 0xff, A: 0xff},
color.RGBA{R: 0x00, G: 0x00, B: 0xff, A: 0xff},
color.RGBA{R: 0x41, G: 0x00, B: 0xff, A: 0xff},
color.RGBA{R: 0x7d, G: 0x00, B: 0xff, A: 0xff},
color.RGBA{R: 0xbe, G: 0x00, B: 0xff, A: 0xff},
color.RGBA{R: 0xff, G: 0x00, B: 0xff, A: 0xff},
color.RGBA{R: 0xff, G: 0x00, B: 0xbe, A: 0xff},
color.RGBA{R: 0xff, G: 0x00, B: 0x7d, A: 0xff},
color.RGBA{R: 0xff, G: 0x00, B: 0x41, A: 0xff},
color.RGBA{R: 0xff, G: 0x00, B: 0x00, A: 0xff},
color.RGBA{R: 0xff, G: 0x41, B: 0x00, A: 0xff},
color.RGBA{R: 0xff, G: 0x7d, B: 0x00, A: 0xff},
color.RGBA{R: 0xff, G: 0xbe, B: 0x00, A: 0xff},
color.RGBA{R: 0xff, G: 0xff, B: 0x00, A: 0xff},
color.RGBA{R: 0xbe, G: 0xff, B: 0x00, A: 0xff},
color.RGBA{R: 0x7d, G: 0xff, B: 0x00, A: 0xff},
color.RGBA{R: 0x41, G: 0xff, B: 0x00, A: 0xff},
color.RGBA{R: 0x00, G: 0xff, B: 0x00, A: 0xff},
color.RGBA{R: 0x00, G: 0xff, B: 0x41, A: 0xff},
color.RGBA{R: 0x00, G: 0xff, B: 0x7d, A: 0xff},
color.RGBA{R: 0x00, G: 0xff, B: 0xbe, A: 0xff},
color.RGBA{R: 0x00, G: 0xff, B: 0xff, A: 0xff},
color.RGBA{R: 0x00, G: 0xbe, B: 0xff, A: 0xff},
color.RGBA{R: 0x00, G: 0x7d, B: 0xff, A: 0xff},
color.RGBA{R: 0x00, G: 0x41, B: 0xff, A: 0xff},
color.RGBA{R: 0x7d, G: 0x7d, B: 0xff, A: 0xff},
color.RGBA{R: 0x9e, G: 0x7d, B: 0xff, A: 0xff},
color.RGBA{R: 0xbe, G: 0x7d, B: 0xff, A: 0xff},
color.RGBA{R: 0xdf, G: 0x7d, B: 0xff, A: 0xff},
color.RGBA{R: 0xff, G: 0x7d, B: 0xff, A: 0xff},
color.RGBA{R: 0xff, G: 0x7d, B: 0xdf, A: 0xff},
color.RGBA{R: 0xff, G: 0x7d, B: 0xbe, A: 0xff},
color.RGBA{R: 0xff, G: 0x7d, B: 0x9e, A: 0xff},
color.RGBA{R: 0xff, G: 0x7d, B: 0x7d, A: 0xff},
color.RGBA{R: 0xff, G: 0x9e, B: 0x7d, A: 0xff},
color.RGBA{R: 0xff, G: 0xbe, B: 0x7d, A: 0xff},
color.RGBA{R: 0xff, G: 0xdf, B: 0x7d, A: 0xff},
color.RGBA{R: 0xff, G: 0xff, B: 0x7d, A: 0xff},
color.RGBA{R: 0xdf, G: 0xff, B: 0x7d, A: 0xff},
color.RGBA{R: 0xbe, G: 0xff, B: 0x7d, A: 0xff},
color.RGBA{R: 0x9e, G: 0xff, B: 0x7d, A: 0xff},
color.RGBA{R: 0x7d, G: 0xff, B: 0x7d, A: 0xff},
color.RGBA{R: 0x7d, G: 0xff, B: 0x9e, A: 0xff},
color.RGBA{R: 0x7d, G: 0xff, B: 0xbe, A: 0xff},
color.RGBA{R: 0x7d, G: 0xff, B: 0xdf, A: 0xff},
color.RGBA{R: 0x7d, G: 0xff, B: 0xff, A: 0xff},
color.RGBA{R: 0x7d, G: 0xdf, B: 0xff, A: 0xff},
color.RGBA{R: 0x7d, G: 0xbe, B: 0xff, A: 0xff},
color.RGBA{R: 0x7d, G: 0x9e, B: 0xff, A: 0xff},
color.RGBA{R: 0xb6, G: 0xb6, B: 0xff, A: 0xff},
color.RGBA{R: 0xc7, G: 0xb6, B: 0xff, A: 0xff},
color.RGBA{R: 0xdb, G: 0xb6, B: 0xff, A: 0xff},
color.RGBA{R: 0xeb, G: 0xb6, B: 0xff, A: 0xff},
color.RGBA{R: 0xff, G: 0xb6, B: 0xff, A: 0xff},
color.RGBA{R: 0xff, G: 0xb6, B: 0xeb, A: 0xff},
color.RGBA{R: 0xff, G: 0xb6, B: 0xdb, A: 0xff},
color.RGBA{R: 0xff, G: 0xb6, B: 0xc7, A: 0xff},
color.RGBA{R: 0xff, G: 0xb6, B: 0xb6, A: 0xff},
color.RGBA{R: 0xff, G: 0xc7, B: 0xb6, A: 0xff},
color.RGBA{R: 0xff, G: 0xdb, B: 0xb6, A: 0xff},
color.RGBA{R: 0xff, G: 0xeb, B: 0xb6, A: 0xff},
color.RGBA{R: 0xff, G: 0xff, B: 0xb6, A: 0xff},
color.RGBA{R: 0xeb, G: 0xff, B: 0xb6, A: 0xff},
color.RGBA{R: 0xdb, G: 0xff, B: 0xb6, A: 0xff},
color.RGBA{R: 0xc7, G: 0xff, B: 0xb6, A: 0xff},
color.RGBA{R: 0xb6, G: 0xff, B: 0xb6, A: 0xff},
color.RGBA{R: 0xb6, G: 0xff, B: 0xc7, A: 0xff},
color.RGBA{R: 0xb6, G: 0xff, B: 0xdb, A: 0xff},
color.RGBA{R: 0xb6, G: 0xff, B: 0xeb, A: 0xff},
color.RGBA{R: 0xb6, G: 0xff, B: 0xff, A: 0xff},
color.RGBA{R: 0xb6, G: 0xeb, B: 0xff, A: 0xff},
color.RGBA{R: 0xb6, G: 0xdb, B: 0xff, A: 0xff},
color.RGBA{R: 0xb6, G: 0xc7, B: 0xff, A: 0xff},
color.RGBA{R: 0x00, G: 0x00, B: 0x71, A: 0xff},
color.RGBA{R: 0x1c, G: 0x00, B: 0x71, A: 0xff},
color.RGBA{R: 0x38, G: 0x00, B: 0x71, A: 0xff},
color.RGBA{R: 0x55, G: 0x00, B: 0x71, A: 0xff},
color.RGBA{R: 0x71, G: 0x00, B: 0x71, A: 0xff},
color.RGBA{R: 0x71, G: 0x00, B: 0x55, A: 0xff},
color.RGBA{R: 0x71, G: 0x00, B: 0x38, A: 0xff},
color.RGBA{R: 0x71, G: 0x00, B: 0x1c, A: 0xff},
color.RGBA{R: 0x71, G: 0x00, B: 0x00, A: 0xff},
color.RGBA{R: 0x71, G: 0x1c, B: 0x00, A: 0xff},
color.RGBA{R: 0x71, G: 0x38, B: 0x00, A: 0xff},
color.RGBA{R: 0x71, G: 0x55, B: 0x00, A: 0xff},
color.RGBA{R: 0x71, G: 0x71, B: 0x00, A: 0xff},
color.RGBA{R: 0x55, G: 0x71, B: 0x00, A: 0xff},
color.RGBA{R: 0x38, G: 0x71, B: 0x00, A: 0xff},
color.RGBA{R: 0x1c, G: 0x71, B: 0x00, A: 0xff},
color.RGBA{R: 0x00, G: 0x71, B: 0x00, A: 0xff},
color.RGBA{R: 0x00, G: 0x71, B: 0x1c, A: 0xff},
color.RGBA{R: 0x00, G: 0x71, B: 0x38, A: 0xff},
color.RGBA{R: 0x00, G: 0x71, B: 0x55, A: 0xff},
color.RGBA{R: 0x00, G: 0x71, B: 0x71, A: 0xff},
color.RGBA{R: 0x00, G: 0x55, B: 0x71, A: 0xff},
color.RGBA{R: 0x00, G: 0x38, B: 0x71, A: 0xff},
color.RGBA{R: 0x00, G: 0x1c, B: 0x71, A: 0xff},
color.RGBA{R: 0x38, G: 0x38, B: 0x71, A: 0xff},
color.RGBA{R: 0x45, G: 0x38, B: 0x71, A: 0xff},
color.RGBA{R: 0x55, G: 0x38, B: 0x71, A: 0xff},
color.RGBA{R: 0x61, G: 0x38, B: 0x71, A: 0xff},
color.RGBA{R: 0x71, G: 0x38, B: 0x71, A: 0xff},
color.RGBA{R: 0x71, G: 0x38, B: 0x61, A: 0xff},
color.RGBA{R: 0x71, G: 0x38, B: 0x55, A: 0xff},
color.RGBA{R: 0x71, G: 0x38, B: 0x45, A: 0xff},
color.RGBA{R: 0x71, G: 0x38, B: 0x38, A: 0xff},
color.RGBA{R: 0x71, G: 0x45, B: 0x38, A: 0xff},
color.RGBA{R: 0x71, G: 0x55, B: 0x38, A: 0xff},
color.RGBA{R: 0x71, G: 0x61, B: 0x38, A: 0xff},
color.RGBA{R: 0x71, G: 0x71, B: 0x38, A: 0xff},
color.RGBA{R: 0x61, G: 0x71, B: 0x38, A: 0xff},
color.RGBA{R: 0x55, G: 0x71, B: 0x38, A: 0xff},
color.RGBA{R: 0x45, G: 0x71, B: 0x38, A: 0xff},
color.RGBA{R: 0x38, G: 0x71, B: 0x38, A: 0xff},
color.RGBA{R: 0x38, G: 0x71, B: 0x45, A: 0xff},
color.RGBA{R: 0x38, G: 0x71, B: 0x55, A: 0xff},
color.RGBA{R: 0x38, G: 0x71, B: 0x61, A: 0xff},
color.RGBA{R: 0x38, G: 0x71, B: 0x71, A: 0xff},
color.RGBA{R: 0x38, G: 0x61, B: 0x71, A: 0xff},
color.RGBA{R: 0x38, G: 0x55, B: 0x71, A: 0xff},
color.RGBA{R: 0x38, G: 0x45, B: 0x71, A: 0xff},
color.RGBA{R: 0x51, G: 0x51, B: 0x71, A: 0xff},
color.RGBA{R: 0x59, G: 0x51, B: 0x71, A: 0xff},
color.RGBA{R: 0x61, G: 0x51, B: 0x71, A: 0xff},
color.RGBA{R: 0x69, G: 0x51, B: 0x71, A: 0xff},
color.RGBA{R: 0x71, G: 0x51, B: 0x71, A: 0xff},
color.RGBA{R: 0x71, G: 0x51, B: 0x69, A: 0xff},
color.RGBA{R: 0x71, G: 0x51, B: 0x61, A: 0xff},
color.RGBA{R: 0x71, G: 0x51, B: 0x59, A: 0xff},
color.RGBA{R: 0x71, G: 0x51, B: 0x51, A: 0xff},
color.RGBA{R: 0x71, G: 0x59, B: 0x51, A: 0xff},
color.RGBA{R: 0x71, G: 0x61, B: 0x51, A: 0xff},
color.RGBA{R: 0x71, G: 0x69, B: 0x51, A: 0xff},
color.RGBA{R: 0x71, G: 0x71, B: 0x51, A: 0xff},
color.RGBA{R: 0x69, G: 0x71, B: 0x51, A: 0xff},
color.RGBA{R: 0x61, G: 0x71, B: 0x51, A: 0xff},
color.RGBA{R: 0x59, G: 0x71, B: 0x51, A: 0xff},
color.RGBA{R: 0x51, G: 0x71, B: 0x51, A: 0xff},
color.RGBA{R: 0x51, G: 0x71, B: 0x59, A: 0xff},
color.RGBA{R: 0x51, G: 0x71, B: 0x61, A: 0xff},
color.RGBA{R: 0x51, G: 0x71, B: 0x69, A: 0xff},
color.RGBA{R: 0x51, G: 0x71, B: 0x71, A: 0xff},
color.RGBA{R: 0x51, G: 0x69, B: 0x71, A: 0xff},
color.RGBA{R: 0x51, G: 0x61, B: 0x71, A: 0xff},
color.RGBA{R: 0x51, G: 0x59, B: 0x71, A: 0xff},
color.RGBA{R: 0x00, G: 0x00, B: 0x41, A: 0xff},
color.RGBA{R: 0x10, G: 0x00, B: 0x41, A: 0xff},
color.RGBA{R: 0x20, G: 0x00, B: 0x41, A: 0xff},
color.RGBA{R: 0x30, G: 0x00, B: 0x41, A: 0xff},
color.RGBA{R: 0x41, G: 0x00, B: 0x41, A: 0xff},
color.RGBA{R: 0x41, G: 0x00, B: 0x30, A: 0xff},
color.RGBA{R: 0x41, G: 0x00, B: 0x20, A: 0xff},
color.RGBA{R: 0x41, G: 0x00, B: 0x10, A: 0xff},
color.RGBA{R: 0x41, G: 0x00, B: 0x00, A: 0xff},
color.RGBA{R: 0x41, G: 0x10, B: 0x00, A: 0xff},
color.RGBA{R: 0x41, G: 0x20, B: 0x00, A: 0xff},
color.RGBA{R: 0x41, G: 0x30, B: 0x00, A: 0xff},
color.RGBA{R: 0x41, G: 0x41, B: 0x00, A: 0xff},
color.RGBA{R: 0x30, G: 0x41, B: 0x00, A: 0xff},
color.RGBA{R: 0x20, G: 0x41, B: 0x00, A: 0xff},
color.RGBA{R: 0x10, G: 0x41, B: 0x00, A: 0xff},
color.RGBA{R: 0x00, G: 0x41, B: 0x00, A: 0xff},
color.RGBA{R: 0x00, G: 0x41, B: 0x10, A: 0xff},
color.RGBA{R: 0x00, G: 0x41, B: 0x20, A: 0xff},
color.RGBA{R: 0x00, G: 0x41, B: 0x30, A: 0xff},
color.RGBA{R: 0x00, G: 0x41, B: 0x41, A: 0xff},
color.RGBA{R: 0x00, G: 0x30, B: 0x41, A: 0xff},
color.RGBA{R: 0x00, G: 0x20, B: 0x41, A: 0xff},
color.RGBA{R: 0x00, G: 0x10, B: 0x41, A: 0xff},
color.RGBA{R: 0x20, G: 0x20, B: 0x41, A: 0xff},
color.RGBA{R: 0x28, G: 0x20, B: 0x41, A: 0xff},
color.RGBA{R: 0x30, G: 0x20, B: 0x41, A: 0xff},
color.RGBA{R: 0x38, G: 0x20, B: 0x41, A: 0xff},
color.RGBA{R: 0x41, G: 0x20, B: 0x41, A: 0xff},
color.RGBA{R: 0x41, G: 0x20, B: 0x38, A: 0xff},
color.RGBA{R: 0x41, G: 0x20, B: 0x30, A: 0xff},
color.RGBA{R: 0x41, G: 0x20, B: 0x28, A: 0xff},
color.RGBA{R: 0x41, G: 0x20, B: 0x20, A: 0xff},
color.RGBA{R: 0x41, G: 0x28, B: 0x20, A: 0xff},
color.RGBA{R: 0x41, G: 0x30, B: 0x20, A: 0xff},
color.RGBA{R: 0x41, G: 0x38, B: 0x20, A: 0xff},
color.RGBA{R: 0x41, G: 0x41, B: 0x20, A: 0xff},
color.RGBA{R: 0x38, G: 0x41, B: 0x20, A: 0xff},
color.RGBA{R: 0x30, G: 0x41, B: 0x20, A: 0xff},
color.RGBA{R: 0x28, G: 0x41, B: 0x20, A: 0xff},
color.RGBA{R: 0x20, G: 0x41, B: 0x20, A: 0xff},
color.RGBA{R: 0x20, G: 0x41, B: 0x28, A: 0xff},
color.RGBA{R: 0x20, G: 0x41, B: 0x30, A: 0xff},
color.RGBA{R: 0x20, G: 0x41, B: 0x38, A: 0xff},
color.RGBA{R: 0x20, G: 0x41, B: 0x41, A: 0xff},
color.RGBA{R: 0x20, G: 0x38, B: 0x41, A: 0xff},
color.RGBA{R: 0x20, G: 0x30, B: 0x41, A: 0xff},
color.RGBA{R: 0x20, G: 0x28, B: 0x41, A: 0xff},
color.RGBA{R: 0x2c, G: 0x2c, B: 0x41, A: 0xff},
color.RGBA{R: 0x30, G: 0x2c, B: 0x41, A: 0xff},
color.RGBA{R: 0x34, G: 0x2c, B: 0x41, A: 0xff},
color.RGBA{R: 0x3c, G: 0x2c, B: 0x41, A: 0xff},
color.RGBA{R: 0x41, G: 0x2c, B: 0x41, A: 0xff},
color.RGBA{R: 0x41, G: 0x2c, B: 0x3c, A: 0xff},
color.RGBA{R: 0x41, G: 0x2c, B: 0x34, A: 0xff},
color.RGBA{R: 0x41, G: 0x2c, B: 0x30, A: 0xff},
color.RGBA{R: 0x41, G: 0x2c, B: 0x2c, A: 0xff},
color.RGBA{R: 0x41, G: 0x30, B: 0x2c, A: 0xff},
color.RGBA{R: 0x41, G: 0x34, B: 0x2c, A: 0xff},
color.RGBA{R: 0x41, G: 0x3c, B: 0x2c, A: 0xff},
color.RGBA{R: 0x41, G: 0x41, B: 0x2c, A: 0xff},
color.RGBA{R: 0x3c, G: 0x41, B: 0x2c, A: 0xff},
color.RGBA{R: 0x34, G: 0x41, B: 0x2c, A: 0xff},
color.RGBA{R: 0x30, G: 0x41, B: 0x2c, A: 0xff},
color.RGBA{R: 0x2c, G: 0x41, B: 0x2c, A: 0xff},
color.RGBA{R: 0x2c, G: 0x41, B: 0x30, A: 0xff},
color.RGBA{R: 0x2c, G: 0x41, B: 0x34, A: 0xff},
color.RGBA{R: 0x2c, G: 0x41, B: 0x3c, A: 0xff},
color.RGBA{R: 0x2c, G: 0x41, B: 0x41, A: 0xff},
color.RGBA{R: 0x2c, G: 0x3c, B: 0x41, A: 0xff},
color.RGBA{R: 0x2c, G: 0x34, B: 0x41, A: 0xff},
color.RGBA{R: 0x2c, G: 0x30, B: 0x41, A: 0xff},
color.RGBA{R: 0x00, G: 0x00, B: 0x00, A: 0xff},
color.RGBA{R: 0x00, G: 0x00, B: 0x00, A: 0xff},
color.RGBA{R: 0x00, G: 0x00, B: 0x00, A: 0xff},
color.RGBA{R: 0x00, G: 0x00, B: 0x00, A: 0xff},
color.RGBA{R: 0x00, G: 0x00, B: 0x00, A: 0xff},
color.RGBA{R: 0x00, G: 0x00, B: 0x00, A: 0xff},
color.RGBA{R: 0x00, G: 0x00, B: 0x00, A: 0xff},
color.RGBA{R: 0x00, G: 0x00, B: 0x00, A: 0xff},
} | palette.go | 0.755907 | 0.712145 | palette.go | starcoder |
package bow
import (
"fmt"
"github.com/apache/arrow/go/arrow/array"
"github.com/apache/arrow/go/arrow/bitutil"
)
func NewBuffer(size int, typ Type) Buffer {
switch typ {
case Int64:
return Buffer{
Data: make([]int64, size),
nullBitmapBytes: make([]byte, bitutil.CeilByte(size)/8),
}
case Float64:
return Buffer{
Data: make([]float64, size),
nullBitmapBytes: make([]byte, bitutil.CeilByte(size)/8),
}
case Boolean:
return Buffer{
Data: make([]bool, size),
nullBitmapBytes: make([]byte, bitutil.CeilByte(size)/8),
}
case String:
return Buffer{
Data: make([]string, size),
nullBitmapBytes: make([]byte, bitutil.CeilByte(size)/8),
}
default:
panic(fmt.Errorf("unsupported type %s", typ))
}
}
func NewBufferFromData(data interface{}) Buffer {
var l int
switch data.(type) {
case []int64:
case []float64:
case []bool:
case []string:
default:
panic(fmt.Errorf("unhandled type %T", data))
}
return Buffer{
Data: data,
nullBitmapBytes: buildNullBitmapBytes(l, nil),
}
}
func (b Buffer) Len() int {
switch data := b.Data.(type) {
case []int64:
return len(data)
case []float64:
return len(data)
case []bool:
return len(data)
case []string:
return len(data)
default:
panic(fmt.Errorf("unsupported type '%T'", b.Data))
}
}
func (b *Buffer) SetOrDrop(i int, value interface{}) {
var valid bool
switch v := b.Data.(type) {
case []int64:
v[i], valid = Int64.Convert(value).(int64)
case []float64:
v[i], valid = Float64.Convert(value).(float64)
case []bool:
v[i], valid = Boolean.Convert(value).(bool)
case []string:
v[i], valid = String.Convert(value).(string)
default:
panic(fmt.Errorf("unsupported type %T", v))
}
if valid {
bitutil.SetBit(b.nullBitmapBytes, i)
} else {
bitutil.ClearBit(b.nullBitmapBytes, i)
}
}
func (b *Buffer) SetOrDropStrict(i int, value interface{}) {
var valid bool
switch v := b.Data.(type) {
case []int64:
v[i], valid = value.(int64)
case []float64:
v[i], valid = value.(float64)
case []bool:
v[i], valid = value.(bool)
case []string:
v[i], valid = value.(string)
default:
panic(fmt.Errorf("unsupported type %T", v))
}
if valid {
bitutil.SetBit(b.nullBitmapBytes, i)
} else {
bitutil.ClearBit(b.nullBitmapBytes, i)
}
}
func (b *Buffer) GetValue(i int) interface{} {
if bitutil.BitIsNotSet(b.nullBitmapBytes, i) {
return nil
}
switch v := b.Data.(type) {
case []int64:
return v[i]
case []float64:
return v[i]
case []bool:
return v[i]
case []string:
return v[i]
default:
panic(fmt.Errorf("unsupported type %T", v))
}
}
func (b Buffer) Less(i, j int) bool {
switch v := b.Data.(type) {
case []int64:
return v[i] < v[j]
case []float64:
return v[i] < v[j]
case []string:
return v[i] < v[j]
case []bool:
return !v[i] && v[j]
default:
panic(fmt.Errorf("unsupported type %T", v))
}
}
func (b *bow) NewBufferFromCol(colIndex int) Buffer {
data := b.Column(colIndex).Data()
switch b.ColumnType(colIndex) {
case Int64:
arr := array.NewInt64Data(data)
nullBitmapBytes := arr.NullBitmapBytes()[:bitutil.CeilByte(arr.Data().Len())/8]
nullBitmapBytesCopy := make([]byte, len(nullBitmapBytes))
copy(nullBitmapBytesCopy, nullBitmapBytes)
return Buffer{
Data: Int64Values(arr),
nullBitmapBytes: nullBitmapBytesCopy,
}
case Float64:
arr := array.NewFloat64Data(data)
nullBitmapBytes := arr.NullBitmapBytes()[:bitutil.CeilByte(arr.Data().Len())/8]
nullBitmapBytesCopy := make([]byte, len(nullBitmapBytes))
copy(nullBitmapBytesCopy, nullBitmapBytes)
return Buffer{
Data: Float64Values(arr),
nullBitmapBytes: nullBitmapBytesCopy,
}
case Boolean:
arr := array.NewBooleanData(data)
nullBitmapBytes := arr.NullBitmapBytes()[:bitutil.CeilByte(arr.Data().Len())/8]
nullBitmapBytesCopy := make([]byte, len(nullBitmapBytes))
copy(nullBitmapBytesCopy, nullBitmapBytes)
return Buffer{
Data: BooleanValues(arr),
nullBitmapBytes: nullBitmapBytesCopy,
}
case String:
arr := array.NewStringData(data)
nullBitmapBytes := arr.NullBitmapBytes()[:bitutil.CeilByte(arr.Data().Len())/8]
nullBitmapBytesCopy := make([]byte, len(nullBitmapBytes))
copy(nullBitmapBytesCopy, nullBitmapBytes)
return Buffer{
Data: StringValues(arr),
nullBitmapBytes: nullBitmapBytesCopy,
}
default:
panic(fmt.Errorf(
"unsupported type %+v", b.ColumnType(colIndex)))
}
} | bowbuffer.gen.go | 0.508056 | 0.424114 | bowbuffer.gen.go | starcoder |
package lib
import (
"encoding"
"encoding/base64"
"fmt"
"gopkg.in/dedis/crypto.v0/abstract"
"gopkg.in/dedis/crypto.v0/random"
"gopkg.in/dedis/onet.v1/log"
"gopkg.in/dedis/onet.v1/network"
"strings"
"sync"
)
// MaxHomomorphicInt is upper bound for integers used in messages, a failed decryption will return this value.
const MaxHomomorphicInt int64 = 100000
// PointToInt creates a map between EC points and integers.
var PointToInt = make(map[string]int64, MaxHomomorphicInt)
var currentGreatestM abstract.Point
var currentGreatestInt int64
var suite = network.Suite
// CipherText is an ElGamal encrypted point.
type CipherText struct {
K, C abstract.Point
}
// CipherVector is a slice of ElGamal encrypted points.
type CipherVector []CipherText
// DeterministCipherText deterministic encryption of a point.
type DeterministCipherText struct {
Point abstract.Point
}
// DeterministCipherVector slice of deterministic encrypted points.
type DeterministCipherVector []DeterministCipherText
// Constructors
//______________________________________________________________________________________________________________________
// NewCipherText creates a ciphertext of null elements.
func NewCipherText() *CipherText {
return &CipherText{K: suite.Point().Null(), C: suite.Point().Null()}
}
// NewCipherTextFromBase64 creates a ciphertext of null elements.
func NewCipherTextFromBase64(b64Encoded string) *CipherText {
cipherText := &CipherText{K: suite.Point().Null(), C: suite.Point().Null()}
(*cipherText).Deserialize(b64Encoded)
return cipherText
}
// NewCipherVector creates a ciphervector of null elements.
func NewCipherVector(length int) *CipherVector {
cv := make(CipherVector, length)
for i := 0; i < length; i++ {
cv[i] = CipherText{suite.Point().Null(), suite.Point().Null()}
}
return &cv
}
// NewDeterministicCipherText create determinist cipher text of null element.
func NewDeterministicCipherText() *DeterministCipherText {
dc := DeterministCipherText{suite.Point().Null()}
return &dc
}
// NewDeterministicCipherVector creates a vector of determinist ciphertext of null elements.
func NewDeterministicCipherVector(length int) *DeterministCipherVector {
dcv := make(DeterministCipherVector, length)
for i := 0; i < length; i++ {
dcv[i] = DeterministCipherText{suite.Point().Null()}
}
return &dcv
}
// Key Pairs (mostly used in tests)
//----------------------------------------------------------------------------------------------------------------------
// GenKey permits to generate a public/private key pairs.
func GenKey() (secKey abstract.Scalar, pubKey abstract.Point) {
secKey = suite.Scalar().Pick(random.Stream)
pubKey = suite.Point().Mul(suite.Point().Base(), secKey)
return
}
// GenKeys permits to generate ElGamal public/private key pairs.
func GenKeys(n int) (abstract.Point, []abstract.Scalar, []abstract.Point) {
priv := make([]abstract.Scalar, n)
pub := make([]abstract.Point, n)
group := suite.Point().Null()
for i := 0; i < n; i++ {
priv[i], pub[i] = GenKey()
group.Add(group, pub[i])
}
return group, priv, pub
}
// Encryption
//______________________________________________________________________________________________________________________
// encryptPoint creates an elliptic curve point from a non-encrypted point and encrypt it using ElGamal encryption.
func encryptPoint(pubkey abstract.Point, M abstract.Point) *CipherText {
B := suite.Point().Base()
k := suite.Scalar().Pick(random.Stream) // ephemeral private key
// ElGamal-encrypt the point to produce ciphertext (K,C).
K := suite.Point().Mul(B, k) // ephemeral DH public key
S := suite.Point().Mul(pubkey, k) // ephemeral DH shared secret
C := S.Add(S, M) // message blinded with secret
return &CipherText{K, C}
}
// IntToPoint maps an integer to a point in the elliptic curve
func IntToPoint(integer int64) abstract.Point {
B := suite.Point().Base()
i := suite.Scalar().SetInt64(integer)
M := suite.Point().Mul(B, i)
return M
}
// PointToCipherText converts a point into a ciphertext
func PointToCipherText(point abstract.Point) CipherText {
return CipherText{K: suite.Point().Null(), C: point}
}
// IntToCipherText converts an int into a ciphertext
func IntToCipherText(integer int64) CipherText {
return PointToCipherText(IntToPoint(integer))
}
// IntArrayToCipherVector converts an array of int to a CipherVector
func IntArrayToCipherVector(integers []int64) CipherVector {
result := make(CipherVector, len(integers))
for i, v := range integers {
result[i] = PointToCipherText(IntToPoint(v))
}
return result
}
// EncryptInt encodes i as iB, encrypt it into a CipherText and returns a pointer to it.
func EncryptInt(pubkey abstract.Point, integer int64) *CipherText {
return encryptPoint(pubkey, IntToPoint(integer))
}
// EncryptIntVector encrypts a []int into a CipherVector and returns a pointer to it.
func EncryptIntVector(pubkey abstract.Point, intArray []int64) *CipherVector {
var wg sync.WaitGroup
cv := make(CipherVector, len(intArray))
if PARALLELIZE {
for i := 0; i < len(intArray); i = i + VPARALLELIZE {
wg.Add(1)
go func(i int) {
for j := 0; j < VPARALLELIZE && (j+i < len(intArray)); j++ {
cv[j+i] = *EncryptInt(pubkey, intArray[j+i])
}
defer wg.Done()
}(i)
}
wg.Wait()
} else {
for i, n := range intArray {
cv[i] = *EncryptInt(pubkey, n)
}
}
return &cv
}
// NullCipherVector encrypts an 0-filled slice under the given public key.
func NullCipherVector(length int, pubkey abstract.Point) *CipherVector {
return EncryptIntVector(pubkey, make([]int64, length))
}
// Decryption
//______________________________________________________________________________________________________________________
// decryptPoint decrypts an elliptic point from an El-Gamal cipher text.
func decryptPoint(prikey abstract.Scalar, c CipherText) abstract.Point {
S := suite.Point().Mul(c.K, prikey) // regenerate shared secret
M := suite.Point().Sub(c.C, S) // use to un-blind the message
return M
}
// DecryptInt decrypts an integer from an ElGamal cipher text where integer are encoded in the exponent.
func DecryptInt(prikey abstract.Scalar, cipher CipherText) int64 {
M := decryptPoint(prikey, cipher)
return discreteLog(M)
}
// DecryptIntVector decrypts a cipherVector.
func DecryptIntVector(prikey abstract.Scalar, cipherVector *CipherVector) []int64 {
result := make([]int64, len(*cipherVector))
for i, c := range *cipherVector {
result[i] = DecryptInt(prikey, c)
}
return result
}
// Brute-Forces the discrete log for integer decoding.
func discreteLog(P abstract.Point) int64 {
B := suite.Point().Base()
var Bi abstract.Point
var m int64
var ok bool
if m, ok = PointToInt[P.String()]; ok {
return m
}
if currentGreatestInt == 0 {
currentGreatestM = suite.Point().Null()
}
for Bi, m = currentGreatestM, currentGreatestInt; !Bi.Equal(P) && m < MaxHomomorphicInt; Bi, m = Bi.Add(Bi, B), m+1 {
PointToInt[Bi.String()] = m
}
currentGreatestM = Bi
PointToInt[Bi.String()] = m
currentGreatestInt = m
//no negative responses
if m == MaxHomomorphicInt {
return 0
}
return m
}
// DeterministicTagging is a distributed deterministic Tagging switching, removes server contribution and multiplies
func (c *CipherText) DeterministicTagging(gc *CipherText, private, secretContrib abstract.Scalar) {
c.K = suite.Point().Mul(gc.K, secretContrib)
contrib := suite.Point().Mul(gc.K, private)
c.C = suite.Point().Sub(gc.C, contrib)
c.C = suite.Point().Mul(c.C, secretContrib)
}
// DeterministicTagging performs one step in the distributed deterministic Tagging process on a vector
// and stores the result in receiver.
func (cv *CipherVector) DeterministicTagging(cipher *CipherVector, private, secretContrib abstract.Scalar) {
var wg sync.WaitGroup
if PARALLELIZE {
for i := 0; i < len(*cipher); i = i + VPARALLELIZE {
wg.Add(1)
go func(i int) {
for j := 0; j < VPARALLELIZE && (j+i < len(*cipher)); j++ {
(*cv)[i+j].DeterministicTagging(&(*cipher)[i+j], private, secretContrib)
}
defer wg.Done()
}(i)
}
wg.Wait()
} else {
for i, c := range *cipher {
(*cv)[i].DeterministicTagging(&c, private, secretContrib)
}
}
}
// TaggingDet performs one step in the distributed deterministic tagging process and creates corresponding proof
func (cv *CipherVector) TaggingDet(privKey, secretContrib abstract.Scalar, pubKey abstract.Point, proofs bool) {
switchedVect := NewCipherVector(len(*cv))
switchedVect.DeterministicTagging(cv, privKey, secretContrib)
if proofs {
p1 := VectorDeterministicTagProofCreation(*cv, *switchedVect, secretContrib, privKey)
//proof publication
commitSecret := suite.Point().Mul(suite.Point().Base(), secretContrib)
publishedProof := PublishedDeterministicTaggingProof{Dhp: p1, VectBefore: *cv, VectAfter: *switchedVect, K: pubKey, SB: commitSecret}
_ = publishedProof
}
*cv = *switchedVect
}
// ReplaceContribution computes the new CipherText with the old mask contribution replaced by new and save in receiver.
func (c *CipherText) ReplaceContribution(cipher CipherText, old, new abstract.Point) {
c.C.Sub(cipher.C, old)
c.C.Add(c.C, new)
}
// KeySwitching performs one step in the Key switching process and stores result in receiver.
func (c *CipherText) KeySwitching(cipher CipherText, originalEphemeralKey, newKey abstract.Point, private abstract.Scalar) abstract.Scalar {
r := suite.Scalar().Pick(random.Stream)
oldContrib := suite.Point().Mul(originalEphemeralKey, private)
newContrib := suite.Point().Mul(newKey, r)
ephemContrib := suite.Point().Mul(suite.Point().Base(), r)
c.ReplaceContribution(cipher, oldContrib, newContrib)
c.K.Add(cipher.K, ephemContrib)
return r
}
// KeySwitching performs one step in the Key switching process on a vector and stores result in receiver.
func (cv *CipherVector) KeySwitching(cipher CipherVector, originalEphemeralKeys []abstract.Point, newKey abstract.Point, private abstract.Scalar) []abstract.Scalar {
r := make([]abstract.Scalar, len(*cv))
var wg sync.WaitGroup
if PARALLELIZE {
for i := 0; i < len(cipher); i = i + VPARALLELIZE {
wg.Add(1)
go func(i int) {
for j := 0; j < VPARALLELIZE && (j+i < len(cipher)); j++ {
r[i+j] = (*cv)[i+j].KeySwitching(cipher[i+j], (originalEphemeralKeys)[i+j], newKey, private)
}
defer wg.Done()
}(i)
}
wg.Wait()
} else {
for i, c := range cipher {
r[i] = (*cv)[i].KeySwitching(c, (originalEphemeralKeys)[i], newKey, private)
}
}
return r
}
// Homomorphic operations
//______________________________________________________________________________________________________________________
// Add two ciphertexts and stores result in receiver.
func (c *CipherText) Add(c1, c2 CipherText) {
c.C.Add(c1.C, c2.C)
c.K.Add(c1.K, c2.K)
}
// MulCipherTextbyScalar multiplies two components of a ciphertext by a scalar
func (c *CipherText) MulCipherTextbyScalar(cMul CipherText, a abstract.Scalar) {
c.C = suite.Point().Mul(cMul.C, a)
c.K = suite.Point().Mul(cMul.K, a)
}
// Add two ciphervectors and stores result in receiver.
func (cv *CipherVector) Add(cv1, cv2 CipherVector) {
var wg sync.WaitGroup
if PARALLELIZE {
for i := 0; i < len(cv1); i = i + VPARALLELIZE {
wg.Add(1)
go func(i int) {
for j := 0; j < VPARALLELIZE && (j+i < len(cv1)); j++ {
(*cv)[i+j].Add(cv1[i+j], cv2[i+j])
}
defer wg.Done()
}(i)
}
} else {
for i := range cv1 {
(*cv)[i].Add(cv1[i], cv2[i])
}
}
if PARALLELIZE {
wg.Wait()
}
}
// Rerandomize rerandomizes an element in a ciphervector at position j, following the Neff SHuffling algorithm
func (cv *CipherVector) Rerandomize(cv1 CipherVector, a, b abstract.Scalar, ciphert CipherText, g, h abstract.Point, j int) {
var tmp1, tmp2 abstract.Point
if ciphert.C == nil {
//no precomputed value
tmp1 = suite.Point().Mul(g, a)
tmp2 = suite.Point().Mul(h, b)
} else {
tmp1 = ciphert.K
tmp2 = ciphert.C
}
(*cv)[j].K.Add(cv1[j].K, tmp1)
(*cv)[j].C.Add(cv1[j].C, tmp2)
}
// Sub two ciphertexts and stores result in receiver.
func (c *CipherText) Sub(c1, c2 CipherText) {
c.C.Sub(c1.C, c2.C)
c.K.Sub(c1.K, c2.K)
}
// Sub two cipherVectors and stores result in receiver.
func (cv *CipherVector) Sub(cv1, cv2 CipherVector) {
for i := range cv1 {
(*cv)[i].Sub(cv1[i], cv2[i])
}
}
// Representation
//______________________________________________________________________________________________________________________
// CipherVectorToDeterministicTag creates a tag (grouping key) from a cipher vector
func CipherVectorToDeterministicTag(cipherVect CipherVector, privKey, secContrib abstract.Scalar, pubKey abstract.Point, proofs bool) GroupingKey {
cipherVect.TaggingDet(privKey, secContrib, pubKey, proofs)
deterministicGroupAttributes := make(DeterministCipherVector, len(cipherVect))
for j, c := range cipherVect {
deterministicGroupAttributes[j] = DeterministCipherText{Point: c.C}
}
return deterministicGroupAttributes.Key()
}
// Key is used in order to get a map-friendly representation of grouping attributes to be used as keys.
func (dcv *DeterministCipherVector) Key() GroupingKey {
var key []string
for _, a := range *dcv {
key = append(key, a.String())
}
return GroupingKey(strings.Join(key, ""))
}
// Equal checks equality between deterministic ciphervector.
func (dcv *DeterministCipherVector) Equal(dcv2 *DeterministCipherVector) bool {
if dcv == nil || dcv2 == nil {
return dcv == dcv2
}
for i := range *dcv2 {
if !(*dcv)[i].Equal(&(*dcv2)[i]) {
return false
}
}
return true
}
// Equal checks equality between deterministic ciphertexts.
func (dc *DeterministCipherText) Equal(dc2 *DeterministCipherText) bool {
return dc2.Point.Equal(dc.Point)
}
// String representation of deterministic ciphertext.
func (dc *DeterministCipherText) String() string {
cstr := "<nil>"
if (*dc).Point != nil {
cstr = (*dc).Point.String()
}
return fmt.Sprintf("%s", cstr)
}
// String returns a string representation of a ciphertext.
func (c *CipherText) String() string {
cstr := "nil"
kstr := cstr
if (*c).C != nil {
cstr = (*c).C.String()[1:7]
}
if (*c).K != nil {
kstr = (*c).K.String()[1:7]
}
return fmt.Sprintf("CipherText{%s,%s}", kstr, cstr)
}
// RandomScalarSlice creates a random slice of chosen size
func RandomScalarSlice(k int) []abstract.Scalar {
beta := make([]abstract.Scalar, k)
rand := suite.Cipher(abstract.RandomKey)
for i := 0; i < k; i++ {
beta[i] = suite.Scalar().Pick(rand)
//beta[i] = suite.Scalar().Zero() to test without shuffle
}
return beta
}
// RandomPermutation shuffles a slice of int
func RandomPermutation(k int) []int {
// Pick a random permutation
pi := make([]int, k)
rand := suite.Cipher(abstract.RandomKey)
for i := 0; i < k; i++ {
// Initialize a trivial permutation
pi[i] = i
}
for i := k - 1; i > 0; i-- {
// Shuffle by random swaps
j := int(random.Uint64(rand) % uint64(i+1))
if j != i {
t := pi[j]
pi[j] = pi[i]
pi[i] = t
}
}
return pi
}
// Conversion
//______________________________________________________________________________________________________________________
// ToBytes converts a CipherVector to a byte array
func (cv *CipherVector) ToBytes() ([]byte, int) {
b := make([]byte, 0)
for _, el := range *cv {
b = append(b, el.ToBytes()...)
}
return b, len(*cv)
}
// FromBytes converts a byte array to a CipherVector. Note that you need to create the (empty) object beforehand.
func (cv *CipherVector) FromBytes(data []byte, length int) {
(*cv) = make(CipherVector, length)
for i, pos := 0, 0; i < length*64; i, pos = i+64, pos+1 {
ct := CipherText{}
ct.FromBytes(data[i : i+64])
(*cv)[pos] = ct
}
}
// ToBytes converts a CipherText to a byte array
func (c *CipherText) ToBytes() []byte {
k, errK := (*c).K.MarshalBinary()
if errK != nil {
log.Fatal(errK)
}
cP, errC := (*c).C.MarshalBinary()
if errC != nil {
log.Fatal(errC)
}
b := append(k, cP...)
return b
}
// FromBytes converts a byte array to a CipherText. Note that you need to create the (empty) object beforehand.
func (c *CipherText) FromBytes(data []byte) {
(*c).K = suite.Point()
(*c).C = suite.Point()
(*c).K.UnmarshalBinary(data[:32])
(*c).C.UnmarshalBinary(data[32:])
}
// Serialize encodes a CipherText in a base64 string
func (c *CipherText) Serialize() string {
return base64.StdEncoding.EncodeToString((*c).ToBytes())
}
// Deserialize decodes a CipherText from a base64 string
func (c *CipherText) Deserialize(b64Encoded string) error {
decoded, err := base64.StdEncoding.DecodeString(b64Encoded)
if err != nil {
log.Error("Invalid CipherText (decoding failed).", err)
return err
}
(*c).FromBytes(decoded)
return nil
}
// SerializeElement serializes a BinaryMarshaller-compatible element using base64 encoding (e.g. abstract.Point or abstract.Scalar)
func SerializeElement(el encoding.BinaryMarshaler) (string, error) {
bytes, err := el.MarshalBinary()
if err != nil {
log.Error("Error marshalling element.", err)
return "", err
}
return base64.StdEncoding.EncodeToString(bytes), nil
}
// SerializePoint serializes a point
func SerializePoint(point abstract.Point) (string, error) {
return SerializeElement(point)
}
// SerializeScalar serializes a scalar
func SerializeScalar(scalar encoding.BinaryMarshaler) (string, error) {
return SerializeElement(scalar)
}
// DeserializePoint deserializes a point using base64 encoding
func DeserializePoint(encodedPoint string) (abstract.Point, error) {
decoded, errD := base64.StdEncoding.DecodeString(encodedPoint)
if errD != nil {
log.Error("Error decoding point.", errD)
return nil, errD
}
point := network.Suite.Point()
errM := point.UnmarshalBinary(decoded)
if errM != nil {
log.Error("Error unmarshalling point.", errM)
return nil, errM
}
return point, nil
}
// DeserializeScalar deserializes a scalar using base64 encoding
func DeserializeScalar(encodedScalar string) (abstract.Scalar, error) {
decoded, errD := base64.StdEncoding.DecodeString(encodedScalar)
if errD != nil {
log.Error("Error decoding scalar.", errD)
return nil, errD
}
scalar := network.Suite.Scalar()
errM := scalar.UnmarshalBinary(decoded)
if errM != nil {
log.Error("Error unmarshalling scalar.", errM)
return nil, errM
}
return scalar, nil
}
// AbstractPointsToBytes converts an array of abstract.Point to a byte array
func AbstractPointsToBytes(aps []abstract.Point) []byte {
var err error
var apsBytes []byte
response := make([]byte, 0)
for i := range aps {
apsBytes, err = aps[i].MarshalBinary()
if err != nil {
log.Fatal(err)
}
response = append(response, apsBytes...)
}
return response
}
// BytesToAbstractPoints converts a byte array to an array of abstract.Point
func BytesToAbstractPoints(target []byte) []abstract.Point {
var err error
aps := make([]abstract.Point, 0)
for i := 0; i < len(target); i += 32 {
ap := network.Suite.Point()
if err = ap.UnmarshalBinary(target[i : i+32]); err != nil {
log.Fatal(err)
}
aps = append(aps, ap)
}
return aps
} | lib/crypto.go | 0.620507 | 0.449513 | crypto.go | starcoder |
package restaurants_us
const PAGE_SIZE_LIMIT = 50
const ROW_LIMIT = 500
type restuarantsUSData struct {
factual_id string "The Factual ID"
name string "Business/POI name"
address string "Address number and street name"
address_extended string "Additional address, incl. suite numbers"
po_box string "PO Box. As they do not represent the physical location of a brick-and-mortar store, PO Boxes are often excluded from mobile use cases."
locality string "City, town or equivalent"
neighborhood string "The neighborhood(s) or other informal geography in which this entity is found."
region string "State, province, territory, or equivalent"
postcode string "Postcode or equivalent (zipcode in US)"
country string
latitude float64 "Latitude in decimal degrees (WGS84 datum). Value will not exceed 6 decimal places (0.111m)"
longitude float64 "Longitude in decimal degrees (WGS84 datum). Value will not exceed 6 decimal places (0.111m)"
tel string "Telephone number with local formatting"
fax string "Fax number in local formatting"
chain_id string "Indicates which chain (brand or franchise) this entity is a member of. See documentation for more information on Factual Chains."
chain_name string "Label indicating which chain (brand or franchise) this entity is a member of. See documentation for more information on Factual Chains."
category_ids int "Category IDs that classify this entity."
category_label string "Category labels that describe the category branch or 'breadcrumb'."
website string "Authority page (official website)"
email string "Primary contact email address of organization"
cuisine string "0-n string values describing, very loosely, the food served. A JSON enumerated list is available for download."
price int "A price metric between one and five; we've used the following scale as an extremely coarse guide: 1: -$15 2: $15-30 3: $30-50 4: $50-75 5: $75+ Usually represented by dollar symbols or the local currency equivalent. Values are available as formatted JSON."
rating float64 "A rating between 1 and 5, rounded to nearest half; usually represented graphically by stars. Calculated as the mean of multiple rating rollups across various sites and partners."
payment_cashonly bool "Only accepts cash"
reservations bool "Accepts reservations"
hours string "JSON representation of hours of operation"
hours_display string "Structured JSON representation of opening hours"
open_24hrs bool "Open 24x7"
founded string "year founded"
attire string "A single value from an enumerated list: streetwear, casual,business casual,smart casual,formal This wordlist is available as downloadable JSON."
attire_required string "Gotta have this on to get in. One or more strings from the following list:shirt with collar,shirt,shoes,jacket,tie,This wordlist is available as downloadable JSON."
attire_prohibited string "Can't get in if you are sporting this. One or more strings from the following list:denim,cuttoffs,tank tops,sneakers,flipflops,sandals,hats,athletic wear,sports wear,boots This wordlist is available as JSON."
parking bool "Some kind of parking is advertised; this will be true when any other parking attributes are true"
parking_valet bool "valet parking is available"
parking_garage bool "Garage parking is available"
parking_street bool "Parking on-street"
parking_lot bool "Parking lot adjacent, not necessarily dedicated to this place"
parking_validated bool "Validated parking is available"
parking_free bool "Free parking is available. This is common in most civilized places, but unknown in LA"
smoking bool "This place allows smoking somewhere"
meal_breakfast bool "serves breakfast"
meal_lunch bool "serves lunch"
meal_dinner bool "serves dinner"
meal_takeout bool "Provides takeout/takeaway"
meal_cater bool "provides catering"
alcohol bool "Alcohol is served or can be consumed on the premesis; this will be true when any other alcohol attributes are true"
alcohol_bar bool "Has a full bar"
alcohol_beer_wine bool "Serves beer and wine only"
alcohol_byob bool "bring your own bottle"
kids_goodfor bool "noted as being good for kids"
kids_menu bool "Had a kids menu"
groups_goodfor bool "Noted as being good for groups"
accessible_wheelchair bool "Premesis are noted explictly as being accessible by wheelchair"
seating_outdoor bool "Outdoor seating is available"
wifi bool "Wifi is provided by the establishment"
owner string "Owner name(s)"
room_private bool "Private dining room is available"
options_vegetarian bool "vegetarian options noted"
options_vegan bool "Vegan options noted"
options_glutenfree bool "Gluten free items noted"
options_lowfat bool "Lowfat options noted"
options_organic bool "Organic options noted"
options_healthy bool "Healthy dishes are explicitly available"
admin_region string "Additional sub-division. Usually, but not always, a country sub-division"
post_town string "Town/place employed in postal addressing. May not reflect the formal geographic location of a place."
status string "Is the business a going concern: closed (0) or open (1). We are aware that this will prove confusing to electrical engineers. Deprecated, as we now expose only open businesses."
} | social_integretions/factual-integretion/factual_go_driver/table/restaurants_us/restaurants_us.go | 0.631481 | 0.467089 | restaurants_us.go | starcoder |
package iso20022
// Safekeeping or investment account. A safekeeping account is an account on which a securities entry is made. An investment account is an account between an investor(s) and a fund manager or a fund. The account can contain holdings in any investment fund or investment fund class managed (or distributed) by the fund manager, within the same fund family.
type SafekeepingAccount2 struct {
// Unique and unambiguous identification for the account between the account owner and the account servicer.
Identification *AccountIdentificationFormatChoice `xml:"Id"`
// Indicates whether the securities in the account are fungible, ie, interchangeable.
FungibleIndicator *YesNoIndicator `xml:"FngbInd"`
// Name of the account. It provides an additional means of identification, and is designated by the account servicer in agreement with the account owner.
Name *Max35Text `xml:"Nm,omitempty"`
// Supplementary registration information applying to a specific block of units for dealing and reporting purposes. The supplementary registration information may be used when all the units are registered, for example, to a funds supermarket, but holdings for each investor have to reconciled individually.
Designation *Max35Text `xml:"Dsgnt,omitempty"`
// Party that provides services relating to financial products to investors, eg, advice on products and placement of orders for the investment fund.
IntermediaryInformation []*Intermediary11 `xml:"IntrmyInf,omitempty"`
// Party that legally owns the account.
AccountOwner *PartyIdentification2Choice `xml:"AcctOwnr,omitempty"`
// Party that manages the account on behalf of the account owner, that is manages the registration and booking of entries on the account, calculates balances on the account and provides information about the account.
AccountServicer *PartyIdentification2Choice `xml:"AcctSvcr,omitempty"`
}
func (s *SafekeepingAccount2) AddIdentification() *AccountIdentificationFormatChoice {
s.Identification = new(AccountIdentificationFormatChoice)
return s.Identification
}
func (s *SafekeepingAccount2) SetFungibleIndicator(value string) {
s.FungibleIndicator = (*YesNoIndicator)(&value)
}
func (s *SafekeepingAccount2) SetName(value string) {
s.Name = (*Max35Text)(&value)
}
func (s *SafekeepingAccount2) SetDesignation(value string) {
s.Designation = (*Max35Text)(&value)
}
func (s *SafekeepingAccount2) AddIntermediaryInformation() *Intermediary11 {
newValue := new(Intermediary11)
s.IntermediaryInformation = append(s.IntermediaryInformation, newValue)
return newValue
}
func (s *SafekeepingAccount2) AddAccountOwner() *PartyIdentification2Choice {
s.AccountOwner = new(PartyIdentification2Choice)
return s.AccountOwner
}
func (s *SafekeepingAccount2) AddAccountServicer() *PartyIdentification2Choice {
s.AccountServicer = new(PartyIdentification2Choice)
return s.AccountServicer
} | SafekeepingAccount2.go | 0.693161 | 0.5169 | SafekeepingAccount2.go | starcoder |
package iso20022
// Specifies rates related to a corporate action option.
type CorporateActionRate79 struct {
// Rate used for additional tax that cannot be categorised.
AdditionalTax *RateAndAmountFormat46Choice `xml:"AddtlTax,omitempty"`
// Cash dividend amount per equity before deductions or allowances have been made.
GrossDividendRate []*GrossDividendRateFormat25Choice `xml:"GrssDvddRate,omitempty"`
// Cash dividend amount per equity after deductions or allowances have been made.
NetDividendRate []*NetDividendRateFormat27Choice `xml:"NetDvddRate,omitempty"`
// Public index rate applied to the amount paid to adjust it to inflation.
IndexFactor *RateAndAmountFormat46Choice `xml:"IndxFctr,omitempty"`
// The actual interest rate used for the payment of the interest for the specified interest period.
// Usage guideline: It is used to provide the applicable rate for the current payment, after all calculations have been performed, that is, application of period and method of interest computation.
InterestRateUsedForPayment []*InterestRateUsedForPaymentFormat10Choice `xml:"IntrstRateUsdForPmt,omitempty"`
// Maximum percentage of shares available through the over subscription privilege, usually a percentage of the basic subscription shares, for example, an account owner subscribing to 100 shares may over subscribe to a maximum of 50 additional shares when the over subscription maximum is 50 percent.
MaximumAllowedOversubscriptionRate *RateFormat3Choice `xml:"MaxAllwdOvrsbcptRate,omitempty"`
// Proportionate allocation used for the offer.
ProrationRate *RateFormat3Choice `xml:"PrratnRate,omitempty"`
// Percentage of a cash distribution that will be withheld by the tax authorities of the jurisdiction of the issuer, for which a relief at source and/or reclaim may be possible.
WithholdingTaxRate []*RateAndAmountFormat47Choice `xml:"WhldgTaxRate,omitempty"`
// Rate at which the income will be withheld by a jurisdiction other than the jurisdiction of the issuer’s country of tax incorporation, for which a relief at source and/or reclaim may be possible. It is levied in complement or offset of the withholding tax rate (TAXR) levied by the jurisdiction of the issuer’s tax domicile.
SecondLevelTax []*RateAndAmountFormat47Choice `xml:"ScndLvlTax,omitempty"`
// Amount included in the dividend/NAV that is identified as gains directly or indirectly derived from interest payments, for example, in the context of the EU Savings directive.
TaxableIncomePerDividendShare []*RateTypeAndAmountAndStatus33 `xml:"TaxblIncmPerDvddShr,omitempty"`
// Exchange rate (provided by the issuer) between the dividend or interest rate in the paid currency and the declared dividend or interest rate.
IssuerDeclaredExchangeRate *ForeignExchangeTerms19 `xml:"IssrDclrdXchgRate,omitempty"`
// Overall tax withheld at source by fund managers prior to considering the tax obligation of each unit holder.
TaxOnIncome *RateAndAmountFormat46Choice `xml:"TaxOnIncm,omitempty"`
}
func (c *CorporateActionRate79) AddAdditionalTax() *RateAndAmountFormat46Choice {
c.AdditionalTax = new(RateAndAmountFormat46Choice)
return c.AdditionalTax
}
func (c *CorporateActionRate79) AddGrossDividendRate() *GrossDividendRateFormat25Choice {
newValue := new(GrossDividendRateFormat25Choice)
c.GrossDividendRate = append(c.GrossDividendRate, newValue)
return newValue
}
func (c *CorporateActionRate79) AddNetDividendRate() *NetDividendRateFormat27Choice {
newValue := new(NetDividendRateFormat27Choice)
c.NetDividendRate = append(c.NetDividendRate, newValue)
return newValue
}
func (c *CorporateActionRate79) AddIndexFactor() *RateAndAmountFormat46Choice {
c.IndexFactor = new(RateAndAmountFormat46Choice)
return c.IndexFactor
}
func (c *CorporateActionRate79) AddInterestRateUsedForPayment() *InterestRateUsedForPaymentFormat10Choice {
newValue := new(InterestRateUsedForPaymentFormat10Choice)
c.InterestRateUsedForPayment = append(c.InterestRateUsedForPayment, newValue)
return newValue
}
func (c *CorporateActionRate79) AddMaximumAllowedOversubscriptionRate() *RateFormat3Choice {
c.MaximumAllowedOversubscriptionRate = new(RateFormat3Choice)
return c.MaximumAllowedOversubscriptionRate
}
func (c *CorporateActionRate79) AddProrationRate() *RateFormat3Choice {
c.ProrationRate = new(RateFormat3Choice)
return c.ProrationRate
}
func (c *CorporateActionRate79) AddWithholdingTaxRate() *RateAndAmountFormat47Choice {
newValue := new(RateAndAmountFormat47Choice)
c.WithholdingTaxRate = append(c.WithholdingTaxRate, newValue)
return newValue
}
func (c *CorporateActionRate79) AddSecondLevelTax() *RateAndAmountFormat47Choice {
newValue := new(RateAndAmountFormat47Choice)
c.SecondLevelTax = append(c.SecondLevelTax, newValue)
return newValue
}
func (c *CorporateActionRate79) AddTaxableIncomePerDividendShare() *RateTypeAndAmountAndStatus33 {
newValue := new(RateTypeAndAmountAndStatus33)
c.TaxableIncomePerDividendShare = append(c.TaxableIncomePerDividendShare, newValue)
return newValue
}
func (c *CorporateActionRate79) AddIssuerDeclaredExchangeRate() *ForeignExchangeTerms19 {
c.IssuerDeclaredExchangeRate = new(ForeignExchangeTerms19)
return c.IssuerDeclaredExchangeRate
}
func (c *CorporateActionRate79) AddTaxOnIncome() *RateAndAmountFormat46Choice {
c.TaxOnIncome = new(RateAndAmountFormat46Choice)
return c.TaxOnIncome
} | CorporateActionRate79.go | 0.863909 | 0.609873 | CorporateActionRate79.go | starcoder |
package gomfa
func Epb(dj1 float64, dj2 float64) float64 {
/*
** - - - -
** E p b
** - - - -
**
** Julian Date to Besselian Epoch.
**
** Given:
** dj1,dj2 float64 Julian Date (see note)
**
** Returned (function value):
** float64 Besselian Epoch.
**
** Note:
**
** The Julian Date is supplied in two pieces, in the usual ERFA
** manner, which is designed to preserve time resolution. The
** Julian Date is available as a single number by adding dj1 and
** dj2. The maximum resolution is achieved if dj1 is 2451545.0
** (J2000.0).
**
** Reference:
**
** <NAME>., 1979. Astron.Astrophys., 73, 282.
**
** This revision: 2021 May 11
*/
/* J2000.0-B1900.0 (2415019.81352) in days */
const D1900 = 36524.68648
return 1900.0 + ((dj1-DJ00)+(dj2+D1900))/DTY
/* Finished. */
}
/*----------------------------------------------------------------------
**
**
** Copyright (C) 2021, <NAME>
** All rights reserved.
**
** This library is derived, with permission, from the International
** Astronomical Union's "Standards of Fundamental Astronomy" library,
** available from http://www.iausofa.org.
**
** The GOMFA version is intended to retain identical functionality to
** the SOFA library, but made distinct through different namespaces and
** file names, as set out in the SOFA license conditions. The SOFA
** original has a role as a reference standard for the IAU and IERS,
** and consequently redistribution is permitted only in its unaltered
** state. The GOMFA version is not subject to this restriction and
** therefore can be included in distributions which do not support the
** concept of "read only" software.
**
** Although the intent is to replicate the SOFA API (other than
** replacement of prefix names) and results (with the exception of
** bugs; any that are discovered will be fixed), SOFA is not
** responsible for any errors found in this version of the library.
**
** If you wish to acknowledge the SOFA heritage, please acknowledge
** that you are using a library derived from SOFA, rather than SOFA
** itself.
**
**
** TERMS AND CONDITIONS
**
** Redistribution and use in source and binary forms, with or without
** modification, are permitted provided that the following conditions
** are met:
**
** 1 Redistributions of source code must retain the above copyright
** notice, this list of conditions and the following disclaimer.
**
** 2 Redistributions in binary form must reproduce the above copyright
** notice, this list of conditions and the following disclaimer in
** the documentation and/or other materials provided with the
** distribution.
**
** 3 Neither the name of the Standards Of Fundamental Astronomy Board,
** the International Astronomical Union nor the names of its
** contributors may be used to endorse or promote products derived
** from this software without specific prior written permission.
**
** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
** "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
** LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
** FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
** COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
** INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
** BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
** LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
** CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
** LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
** ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
** POSSIBILITY OF SUCH DAMAGE.
**
*/ | epb.go | 0.766818 | 0.569853 | epb.go | starcoder |
package finance
import "github.com/hyperjiang/php"
// Loan is a loan, default method is EqualPayment
type Loan struct {
Amount float64
Periods int
AnnualRate float64
Method int
}
// Installment is an installment
type Installment struct {
Period int
Payment float64
Principal float64
Interest float64
RemainingAmount float64
}
// CalculatePayment calculates payment in given period
func (loan Loan) CalculatePayment(period int) float64 {
return loan.CalculatePrincipal(period) + loan.CalculateInterest(period)
}
// CalculatePrincipal calculates principal in given period
func (loan Loan) CalculatePrincipal(period int) float64 {
if loan.Method == EqualPrincipal {
return loan.Amount / float64(loan.Periods)
}
monthlyRate := loan.AnnualRate / 12
return PPMT(monthlyRate, period, loan.Periods, -loan.Amount, 0, 0)
}
// CalculateInterest calculates interest in given period
func (loan Loan) CalculateInterest(period int) float64 {
monthlyRate := loan.AnnualRate / 12
if loan.Method == EqualPrincipal {
remainingAmount := loan.Amount * float64(loan.Periods-period+1) / float64(loan.Periods)
return remainingAmount * monthlyRate
}
return IPMT(monthlyRate, period, loan.Periods, -loan.Amount, 0, 0)
}
// CalculateTotalPayment calculates total payment
func (loan Loan) CalculateTotalPayment() float64 {
monthlyRate := loan.AnnualRate / 12
if loan.Method == EqualPrincipal {
return php.Round(loan.Amount*(1+monthlyRate*float64(1+loan.Periods)/2), Precision)
}
return php.Round(PMT(monthlyRate, loan.Periods, -loan.Amount, 0, 0)*float64(loan.Periods), Precision)
}
// CalculateTotalInterest calculates total interest
func (loan Loan) CalculateTotalInterest() float64 {
return php.Round(loan.CalculateTotalPayment()-loan.Amount, Precision)
}
// CalculateInstallments calculates installments
func (loan Loan) CalculateInstallments() []Installment {
var installments []Installment
remainingAmount := loan.Amount
for p := 1; p <= loan.Periods; p++ {
var installment Installment
installment.Period = p
installment.Payment = php.Round(loan.CalculatePayment(p), Precision)
installment.Principal = php.Round(loan.CalculatePrincipal(p), Precision)
installment.Interest = php.Round(loan.CalculateInterest(p), Precision)
remainingAmount = php.Round(remainingAmount-installment.Principal, Precision)
installment.RemainingAmount = remainingAmount
if p == loan.Periods {
installment.RemainingAmount = 0
}
installments = append(installments, installment)
}
return installments
} | installment.go | 0.687 | 0.598254 | installment.go | starcoder |
package led
import (
"bytes"
"github.com/pkg/term"
)
// StartTerm starts a terminal.
func StartTerm(ts ...Iterm) *Term {
t := Term{}
if len(ts) > 0 {
t.tty = ts[0]
} else {
t.tty = &termWrap{}
}
t.tty.Start()
return &t
}
// Term represents a terminal
type Term struct {
tty Iterm
pos int
}
// Read returns a channel for reading keys from the terminal. See keys.Read.
func (t *Term) Read() chan Key {
return Read(t.tty)
}
// Write writes the given chars to the terminal.
func (t *Term) Write(b []byte) {
t.tty.Write(b)
}
// Del writes the given number of delete chars (`\x7F`) to the terminal
// (defaults to 1), deleting the char after the cursor position.
func (t *Term) Del(i ...int) {
if len(i) == 0 {
i = []int{1}
}
t.tty.Write(bytes.Repeat(chars(Del), i[0]))
}
// Newline writes a newline char to the terminal.
func (t *Term) Newline() {
t.Write(chars(Newline))
}
// Return writes a carriage return char to the terminal.
func (t *Term) Return() {
t.Write(chars(Cr))
}
// ClearLine clears the current line.
func (t *Term) ClearLine() {
t.Return()
t.Clear()
}
// Clear clears from the current cursor position to the end of the line.
func (t *Term) Clear() {
t.Write(chars(Clear))
}
// ShowCursor shows the cursor.
func (t *Term) ShowCursor() {
t.Write(chars(ShowCursor))
}
// HideCursor hides the cursor.
func (t *Term) HideCursor() {
t.Write(chars(HideCursor))
}
// SetCursor moves the cursor to the given horizontal position.
func (t *Term) SetCursor(pos int) {
t.Write(SetCursor(pos))
}
// MoveCursor moves the cursor by the given number of chars in the given
// direction.
func (t *Term) MoveCursor(i int, dir int) {
t.Write(MoveCursor(i, dir))
}
// Pause pauses the terminal, restoring the previous mode and settings.
func (t *Term) Pause() {
t.tty.Restore()
}
// Resume resumes the terminal, setting the terminal in raw mode.
func (t *Term) Resume() {
t.tty.RawMode()
}
// Stop stops the terminal, restoring the previous mode and settings, and
// closing the tty.
func (t *Term) Stop() {
t.tty.Restore()
t.tty.Close()
}
// Iterm represents a subset of the tty implemented in github.com/pkg/term.
type Iterm interface {
Start()
Read(b []byte) (int, error)
Write(b []byte) (int, error)
Restore() error
RawMode() error
Close() error
}
type termWrap struct {
tty *term.Term
}
func (t *termWrap) Start() {
tty, _ := term.Open("/dev/tty")
t.tty = tty
t.RawMode()
}
func (t *termWrap) Read(b []byte) (int, error) {
return t.tty.Read(b)
}
func (t *termWrap) Write(b []byte) (int, error) {
return t.tty.Write(b)
}
func (t *termWrap) Restore() error {
return t.tty.Restore()
}
func (t *termWrap) Close() error {
return t.tty.Close()
}
func (t *termWrap) RawMode() error {
return term.RawMode(t.tty)
}
func chars(c int) []byte {
return Ansi(c)
} | term.go | 0.698946 | 0.439988 | term.go | starcoder |
// Package function implements some functions for control the function execution and some is for functional programming.
package function
import (
"reflect"
"time"
)
// After creates a function that invokes func once it's called n or more times
func After(n int, fn any) func(args ...any) []reflect.Value {
// Catch programming error while constructing the closure
mustBeFunction(fn)
return func(args ...any) []reflect.Value {
n--
if n < 1 {
return unsafeInvokeFunc(fn, args...)
}
return nil
}
}
// Before creates a function that invokes func once it's called less than n times
func Before(n int, fn any) func(args ...any) []reflect.Value {
// Catch programming error while constructing the closure
mustBeFunction(fn)
var res []reflect.Value
return func(args ...any) []reflect.Value {
if n > 0 {
res = unsafeInvokeFunc(fn, args...)
}
if n <= 0 {
fn = nil
}
n--
return res
}
}
// Fn is for curry function which is func(...any) any
type Fn func(...any) any
// Curry make a curry function
func (f Fn) Curry(i any) func(...any) any {
return func(values ...any) any {
v := append([]any{i}, values...)
return f(v...)
}
}
// Compose compose the functions from right to left
func Compose(fnList ...func(...any) any) func(...any) any {
return func(s ...any) any {
f := fnList[0]
restFn := fnList[1:]
if len(fnList) == 1 {
return f(s...)
}
return f(Compose(restFn...)(s...))
}
}
// Delay make the function execution after delayed time
func Delay(delay time.Duration, fn any, args ...any) {
// Catch programming error while constructing the closure
mustBeFunction(fn)
time.Sleep(delay)
invokeFunc(fn, args...)
}
// Debounced creates a debounced function that delays invoking fn until after wait duration have elapsed since the last time the debounced function was invoked.
func Debounced(fn func(), duration time.Duration) func() {
// Catch programming error while constructing the closure
mustBeFunction(fn)
timer := time.NewTimer(duration)
timer.Stop()
go func() {
for {
select {
case <-timer.C:
go fn()
}
}
}()
return func() { timer.Reset(duration) }
}
// Schedule invoke function every duration time, util close the returned bool chan
func Schedule(d time.Duration, fn any, args ...any) chan bool {
// Catch programming error while constructing the closure
mustBeFunction(fn)
quit := make(chan bool)
go func() {
for {
unsafeInvokeFunc(fn, args...)
select {
case <-time.After(d):
case <-quit:
return
}
}
}()
return quit
} | function/function.go | 0.639961 | 0.414366 | function.go | starcoder |
package triangulate
import (
"fmt"
)
func cross(v0x, v0y, v1x, v1y float32) float32 {
return v0x*v1y - v0y*v1x
}
func triangleCross(pt0, pt1, pt2 Point) float32 {
return cross(pt1.X-pt0.X, pt1.Y-pt0.Y, pt2.X-pt1.X, pt2.Y-pt1.Y)
}
func adjacentIndices(indices []uint16, idx int) (uint16, uint16, uint16) {
return indices[(idx+len(indices)-1)%len(indices)], indices[idx], indices[(idx+1)%len(indices)]
}
func InTriangle(pt, pt0, pt1, pt2 Point) bool {
if pt.X <= pt0.X && pt.X <= pt1.X && pt.X <= pt2.X {
return false
}
if pt.X >= pt0.X && pt.X >= pt1.X && pt.X >= pt2.X {
return false
}
if pt.Y <= pt0.Y && pt.Y <= pt1.Y && pt.Y <= pt2.Y {
return false
}
if pt.Y >= pt0.Y && pt.Y >= pt1.Y && pt.Y >= pt2.Y {
return false
}
c0 := cross(pt.X-pt0.X, pt.Y-pt0.Y, pt1.X-pt0.X, pt1.Y-pt0.Y)
c1 := cross(pt.X-pt1.X, pt.Y-pt1.Y, pt2.X-pt1.X, pt2.Y-pt1.Y)
c2 := cross(pt.X-pt2.X, pt.Y-pt2.Y, pt0.X-pt2.X, pt0.Y-pt2.Y)
return (c0 <= 0 && c1 <= 0 && c2 <= 0) || (c0 >= 0 && c1 >= 0 && c2 >= 0)
}
// Triangulate triangulates the region surrounded by the points pts and returnes the point indices.
func Triangulate(pts []Point) []uint16 {
if len(pts) < 3 {
return nil
}
var currentIndices []uint16
// Split pts into the two point groups if there are the same points.
for i := range pts {
for j := 0; j < i; j++ {
if pts[i] == pts[j] {
is0 := Triangulate(pts[j:i])
for idx := range is0 {
is0[idx] += uint16(j)
}
is1 := Triangulate(append(pts[i:], pts[:j]...))
for idx := range is1 {
is1[idx] = uint16((int(is1[idx]) + i) % len(pts))
}
return append(is0, is1...)
}
}
currentIndices = append(currentIndices, uint16(i))
}
var indices []uint16
// Triangulation by Ear Clipping.
// https://www.geometrictools.com/Documentation/TriangulationByEarClipping.pdf
// TODO: Adopt a more efficient algorithm.
for len(currentIndices) >= 3 {
// Calculate cross-products and remove unneeded vertices.
cs := make([]float32, len(currentIndices))
idxToRemove := -1
// Determine the direction of the polygon from the upper-left point.
var upperLeft int
for i := range currentIndices {
i0, i1, i2 := adjacentIndices(currentIndices, i)
pt0 := pts[i0]
pt1 := pts[i1]
pt2 := pts[i2]
c := triangleCross(pt0, pt1, pt2)
if c == 0 {
idxToRemove = i
break
}
cs[i] = c
if pts[currentIndices[upperLeft]].X > pts[currentIndices[i]].X {
upperLeft = i
} else if pts[currentIndices[upperLeft]].X == pts[currentIndices[i]].X &&
pts[currentIndices[upperLeft]].Y > pts[currentIndices[i]].Y {
upperLeft = i
}
}
if idxToRemove != -1 {
currentIndices = append(currentIndices[:idxToRemove], currentIndices[idxToRemove+1:]...)
continue
}
clockwise := cs[upperLeft] < 0
idx := -1
index:
for i := range currentIndices {
c := cs[i]
if c == 0 {
panic("math: cross value must not be 0")
}
if c < 0 && !clockwise || c > 0 && clockwise {
// The angle is more than 180 degrees. This is not an ear.
continue
}
i0, i1, i2 := adjacentIndices(currentIndices, i)
pt0 := pts[i0]
pt1 := pts[i1]
pt2 := pts[i2]
for _, j := range currentIndices {
if j == i0 || j == i1 || j == i2 {
continue
}
if InTriangle(pts[j], pt0, pt1, pt2) {
// If the triangle includes another point, the triangle is not an ear.
continue index
}
}
// The angle is less than 180 degrees. This is an ear.
idx = i
break
}
if idx < 0 {
// TODO: This happens when there is self-crossing.
panic(fmt.Sprintf("math: there is no ear in the polygon: %v", pts))
}
i0, i1, i2 := adjacentIndices(currentIndices, idx)
indices = append(indices, i0, i1, i2)
currentIndices = append(currentIndices[:idx], currentIndices[idx+1:]...)
}
return indices
} | vector/internal/triangulate/triangulate.go | 0.606498 | 0.405566 | triangulate.go | starcoder |
package value
import (
"math/big"
)
// Binary operators.
// To avoid initialization cycles when we refer to the ops from inside
// themselves, we use an init function to initialize the ops.
// binaryArithType returns the maximum of the two types,
// so the smaller value is appropriately up-converted.
func binaryArithType(t1, t2 valueType) valueType {
if t1 > t2 {
return t1
}
return t2
}
// divType is like binaryArithType but never returns smaller than BigInt,
// because the only implementation of exponentiation we have is in big.Int.
func divType(t1, t2 valueType) valueType {
if t1 == intType {
t1 = bigIntType
}
return binaryArithType(t1, t2)
}
// rationalType promotes scalars to rationals so we can do rational division.
func rationalType(t1, t2 valueType) valueType {
if t1 < bigRatType {
t1 = bigRatType
}
return binaryArithType(t1, t2)
}
// atLeastVectorType promotes both arguments to at least vectors.
func atLeastVectorType(t1, t2 valueType) valueType {
if t1 < matrixType && t2 < matrixType {
return vectorType
}
return matrixType
}
// shiftCount converts x to an unsigned integer.
func shiftCount(x Value) uint {
switch count := x.(type) {
case Int:
if count < 0 || count >= maxInt {
Errorf("illegal shift count %d", count)
}
return uint(count)
case BigInt:
// Must be small enough for an int; that will happen if
// the LHS is a BigInt because the RHS will have been lifted.
reduced := count.shrink()
if _, ok := reduced.(Int); ok {
return shiftCount(reduced)
}
}
Errorf("illegal shift count type")
panic("not reached")
}
func binaryBigIntOp(u Value, op func(*big.Int, *big.Int, *big.Int) *big.Int, v Value) Value {
i, j := u.(BigInt), v.(BigInt)
z := bigInt64(0)
op(z.Int, i.Int, j.Int)
return z.shrink()
}
func binaryBigRatOp(u Value, op func(*big.Rat, *big.Rat, *big.Rat) *big.Rat, v Value) Value {
i, j := u.(BigRat), v.(BigRat)
z := bigRatInt64(0)
op(z.Rat, i.Rat, j.Rat)
return z.shrink()
}
func binaryBigFloatOp(c Context, u Value, op func(*big.Float, *big.Float, *big.Float) *big.Float, v Value) Value {
i, j := u.(BigFloat), v.(BigFloat)
z := bigFloatInt64(c.Config(), 0)
op(z.Float, i.Float, j.Float)
return z.shrink()
}
// bigIntExp is the "op" for exp on *big.Int. Different signature for Exp means we can't use *big.Exp directly.
// Also we need a context (really a config); see the bigIntExpOp function below.
// We know this is not 0**negative.
func bigIntExp(c Context, i, j, k *big.Int) *big.Int {
if j.Cmp(bigOne.Int) == 0 || j.Sign() == 0 {
return i.Set(j)
}
// -1ⁿ is just parity.
if j.Cmp(bigMinusOne.Int) == 0 {
if k.And(k, bigOne.Int).Int64() == 0 {
return i.Neg(j)
}
return i.Set(j)
}
// Large exponents can be very expensive.
// First, it must fit in an int64.
if k.BitLen() > 63 {
Errorf("%s**%s: exponent too large", j, k)
}
exp := k.Int64()
if exp < 0 {
exp = -exp
}
// "2" is just shift. math/big should do this, really.
if j.Cmp(bigTwo.Int) == 0 && exp >= 0 {
return i.Lsh(big.NewInt(1), uint(exp))
}
mustFit(c.Config(), int64(j.BitLen())*exp)
i.Exp(j, k, nil)
return i
}
// bigIntExpOp wraps bigIntExp with a Context and returns the closure as an op.
func bigIntExpOp(c Context) func(i, j, k *big.Int) *big.Int {
return func(i, j, k *big.Int) *big.Int {
return bigIntExp(c, i, j, k)
}
}
// toInt turns the boolean into an Int 0 or 1.
func toInt(t bool) Value {
if t {
return one
}
return zero
}
// toBool turns the Value into a Go bool.
func toBool(t Value) bool {
switch t := t.(type) {
case Int:
return t != 0
case Char:
return t != 0
case BigInt:
return t.Sign() != 0
case BigRat:
return t.Sign() != 0
case BigFloat:
return t.Sign() != 0
}
Errorf("cannot convert %T to bool", t)
panic("not reached")
}
var (
zero = Int(0)
one = Int(1)
minusOne = Int(-1)
bigZero = bigInt64(0)
bigOne = bigInt64(1)
bigTwo = bigInt64(2)
bigMinusOne = bigInt64(-1)
)
var BinaryOps = make(map[string]BinaryOp)
func init() {
var ops = []*binaryOp{
{
name: "+",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
return (u.(Int) + v.(Int)).maybeBig()
},
bigIntType: func(c Context, u, v Value) Value {
mustFit(c.Config(), u.(BigInt).BitLen()+1)
mustFit(c.Config(), v.(BigInt).BitLen()+1)
return binaryBigIntOp(u, (*big.Int).Add, v)
},
bigRatType: func(c Context, u, v Value) Value {
return binaryBigRatOp(u, (*big.Rat).Add, v)
},
bigFloatType: func(c Context, u, v Value) Value {
return binaryBigFloatOp(c, u, (*big.Float).Add, v)
},
},
},
{
name: "-",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
return (u.(Int) - v.(Int)).maybeBig()
},
bigIntType: func(c Context, u, v Value) Value {
mustFit(c.Config(), u.(BigInt).BitLen()+1)
mustFit(c.Config(), v.(BigInt).BitLen()+1)
return binaryBigIntOp(u, (*big.Int).Sub, v)
},
bigRatType: func(c Context, u, v Value) Value {
return binaryBigRatOp(u, (*big.Rat).Sub, v)
},
bigFloatType: func(c Context, u, v Value) Value {
return binaryBigFloatOp(c, u, (*big.Float).Sub, v)
},
},
},
{
name: "*",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
return (u.(Int) * v.(Int)).maybeBig()
},
bigIntType: func(c Context, u, v Value) Value {
mustFit(c.Config(), u.(BigInt).BitLen()+v.(BigInt).BitLen())
return binaryBigIntOp(u, (*big.Int).Mul, v)
},
bigRatType: func(c Context, u, v Value) Value {
return binaryBigRatOp(u, (*big.Rat).Mul, v)
},
bigFloatType: func(c Context, u, v Value) Value {
return binaryBigFloatOp(c, u, (*big.Float).Mul, v)
},
},
},
{ // Rational division.
name: "/",
elementwise: true,
whichType: rationalType, // Use BigRats to avoid the analysis here.
fn: [numType]binaryFn{
bigRatType: func(c Context, u, v Value) Value {
if v.(BigRat).Sign() == 0 {
Errorf("division by zero")
}
return binaryBigRatOp(u, (*big.Rat).Quo, v) // True division.
},
bigFloatType: func(c Context, u, v Value) Value {
return binaryBigFloatOp(c, u, (*big.Float).Quo, v)
},
},
},
{
name: "idiv", // Go integer division.
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
if v.(Int) == 0 {
Errorf("division by zero")
}
return u.(Int) / v.(Int)
},
bigIntType: func(c Context, u, v Value) Value {
if v.(BigInt).Sign() == 0 {
Errorf("division by zero")
}
return binaryBigIntOp(u, (*big.Int).Quo, v) // Go-like division.
},
bigRatType: nil, // Not defined for rationals. Use div.
bigFloatType: nil,
},
},
{
name: "imod",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
if v.(Int) == 0 {
Errorf("modulo by zero")
}
return u.(Int) % v.(Int)
},
bigIntType: func(c Context, u, v Value) Value {
if v.(BigInt).Sign() == 0 {
Errorf("modulo by zero")
}
return binaryBigIntOp(u, (*big.Int).Rem, v) // Go-like modulo.
},
bigRatType: nil, // Not defined for rationals. Use mod.
bigFloatType: nil,
},
},
{ // Euclidean integer division.
name: "div",
elementwise: true,
whichType: divType, // Use BigInts to avoid the analysis here.
fn: [numType]binaryFn{
bigIntType: func(c Context, u, v Value) Value {
if v.(BigInt).Sign() == 0 {
Errorf("division by zero")
}
return binaryBigIntOp(u, (*big.Int).Div, v) // Euclidean division.
},
bigRatType: nil, // Not defined for rationals. Use div.
bigFloatType: nil,
},
},
{ // Euclidean integer modulus.
name: "mod",
elementwise: true,
whichType: divType, // Use BigInts to avoid the analysis here.
fn: [numType]binaryFn{
bigIntType: func(c Context, u, v Value) Value {
if v.(BigInt).Sign() == 0 {
Errorf("modulo by zero")
}
return binaryBigIntOp(u, (*big.Int).Mod, v) // Euclidan modulo.
},
bigRatType: nil, // Not defined for rationals. Use mod.
bigFloatType: nil,
},
},
{
name: "**",
elementwise: true,
whichType: divType,
fn: [numType]binaryFn{
bigIntType: func(c Context, u, v Value) Value {
switch v.(BigInt).Sign() {
case 0:
return one
case -1:
if u.(BigInt).Sign() == 0 {
Errorf("negative exponent of zero")
}
v = c.EvalUnary("abs", v).toType(c.Config(), bigIntType)
return c.EvalUnary("/", binaryBigIntOp(u, bigIntExpOp(c), v))
}
x := u.(BigInt).Int
if x.Cmp(bigOne.Int) == 0 || x.Sign() == 0 {
return u
}
return binaryBigIntOp(u, bigIntExpOp(c), v)
},
bigRatType: func(c Context, u, v Value) Value {
// (n/d)**2 is n**2/d**2.
rexp := v.(BigRat)
positive := true
switch rexp.Sign() {
case 0:
return one
case -1:
if u.(BigRat).Sign() == 0 {
Errorf("negative exponent of zero")
}
positive = false
rexp = c.EvalUnary("-", v).toType(c.Config(), bigRatType).(BigRat)
}
if !rexp.IsInt() {
// Lift to float.
return c.EvalBinary(floatSelf(c, u), "**", floatSelf(c, v))
}
exp := rexp.Num()
rat := u.(BigRat)
num := new(big.Int).Set(rat.Num())
den := new(big.Int).Set(rat.Denom())
bigIntExp(c, num, num, exp)
bigIntExp(c, den, den, exp)
z := bigRatInt64(0)
if positive {
z.SetFrac(num, den)
} else {
z.SetFrac(den, num)
}
return z.shrink()
},
bigFloatType: func(c Context, u, v Value) Value { return power(c, u, v) },
},
},
{
name: "log",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: logBaseU,
bigIntType: logBaseU,
bigRatType: logBaseU,
bigFloatType: logBaseU,
},
},
{
name: "!",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
a := int64(u.(Int))
b := int64(v.(Int))
if a == 0 || b == 0 || a == b {
return bigOne
}
if a < 0 || b < 0 || a > b {
return bigZero
}
aFac := factorial(a)
bFac := factorial(b)
bMinusAFac := factorial(b - a)
bFac.Div(bFac, aFac)
bFac.Div(bFac, bMinusAFac)
return BigInt{bFac}.shrink()
},
},
},
{
name: "&",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
return u.(Int) & v.(Int)
},
bigIntType: func(c Context, u, v Value) Value {
return binaryBigIntOp(u, (*big.Int).And, v)
},
},
},
{
name: "|",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
return u.(Int) | v.(Int)
},
bigIntType: func(c Context, u, v Value) Value {
return binaryBigIntOp(u, (*big.Int).Or, v)
},
},
},
{
name: "^",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
return u.(Int) ^ v.(Int)
},
bigIntType: func(c Context, u, v Value) Value {
return binaryBigIntOp(u, (*big.Int).Xor, v)
},
},
},
{
name: "<<",
elementwise: true,
whichType: divType, // Shifts are like exp: let BigInt do the work.
fn: [numType]binaryFn{
bigIntType: func(c Context, u, v Value) Value {
i, j := u.(BigInt), v.(BigInt)
z := bigInt64(0)
z.Lsh(i.Int, shiftCount(j))
return z.shrink()
},
// TODO: lsh for bigfloat
},
},
{
name: ">>",
elementwise: true,
whichType: divType, // Shifts are like exp: let BigInt do the work.
fn: [numType]binaryFn{
bigIntType: func(c Context, u, v Value) Value {
i, j := u.(BigInt), v.(BigInt)
z := bigInt64(0)
z.Rsh(i.Int, shiftCount(j))
return z.shrink()
},
// TODO: rsh for bigfloat
},
},
{
name: "==",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
return toInt(u.(Int) == v.(Int))
},
charType: func(c Context, u, v Value) Value {
return toInt(u.(Char) == v.(Char))
},
bigIntType: func(c Context, u, v Value) Value {
i, j := u.(BigInt), v.(BigInt)
return toInt(i.Cmp(j.Int) == 0)
},
bigRatType: func(c Context, u, v Value) Value {
i, j := u.(BigRat), v.(BigRat)
return toInt(i.Cmp(j.Rat) == 0)
},
bigFloatType: func(c Context, u, v Value) Value {
i, j := u.(BigFloat), v.(BigFloat)
return toInt(i.Cmp(j.Float) == 0)
},
},
},
{
name: "!=",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
return toInt(u.(Int) != v.(Int))
},
charType: func(c Context, u, v Value) Value {
return toInt(u.(Char) != v.(Char))
},
bigIntType: func(c Context, u, v Value) Value {
i, j := u.(BigInt), v.(BigInt)
return toInt(i.Cmp(j.Int) != 0)
},
bigRatType: func(c Context, u, v Value) Value {
i, j := u.(BigRat), v.(BigRat)
return toInt(i.Cmp(j.Rat) != 0)
},
bigFloatType: func(c Context, u, v Value) Value {
i, j := u.(BigFloat), v.(BigFloat)
return toInt(i.Cmp(j.Float) != 0)
},
},
},
{
name: "<",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
return toInt(u.(Int) < v.(Int))
},
charType: func(c Context, u, v Value) Value {
return toInt(u.(Char) < v.(Char))
},
bigIntType: func(c Context, u, v Value) Value {
i, j := u.(BigInt), v.(BigInt)
return toInt(i.Cmp(j.Int) < 0)
},
bigRatType: func(c Context, u, v Value) Value {
i, j := u.(BigRat), v.(BigRat)
return toInt(i.Cmp(j.Rat) < 0)
},
bigFloatType: func(c Context, u, v Value) Value {
i, j := u.(BigFloat), v.(BigFloat)
return toInt(i.Cmp(j.Float) < 0)
},
},
},
{
name: "<=",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
return toInt(u.(Int) <= v.(Int))
},
charType: func(c Context, u, v Value) Value {
return toInt(u.(Char) <= v.(Char))
},
bigIntType: func(c Context, u, v Value) Value {
i, j := u.(BigInt), v.(BigInt)
return toInt(i.Cmp(j.Int) <= 0)
},
bigRatType: func(c Context, u, v Value) Value {
i, j := u.(BigRat), v.(BigRat)
return toInt(i.Cmp(j.Rat) <= 0)
},
bigFloatType: func(c Context, u, v Value) Value {
i, j := u.(BigFloat), v.(BigFloat)
return toInt(i.Cmp(j.Float) <= 0)
},
},
},
{
name: ">",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
return toInt(u.(Int) > v.(Int))
},
charType: func(c Context, u, v Value) Value {
return toInt(u.(Char) > v.(Char))
},
bigIntType: func(c Context, u, v Value) Value {
i, j := u.(BigInt), v.(BigInt)
return toInt(i.Cmp(j.Int) > 0)
},
bigRatType: func(c Context, u, v Value) Value {
i, j := u.(BigRat), v.(BigRat)
return toInt(i.Cmp(j.Rat) > 0)
},
bigFloatType: func(c Context, u, v Value) Value {
i, j := u.(BigFloat), v.(BigFloat)
return toInt(i.Cmp(j.Float) > 0)
},
},
},
{
name: ">=",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
return toInt(u.(Int) >= v.(Int))
},
charType: func(c Context, u, v Value) Value {
return toInt(u.(Char) >= v.(Char))
},
bigIntType: func(c Context, u, v Value) Value {
i, j := u.(BigInt), v.(BigInt)
return toInt(i.Cmp(j.Int) >= 0)
},
bigRatType: func(c Context, u, v Value) Value {
i, j := u.(BigRat), v.(BigRat)
return toInt(i.Cmp(j.Rat) >= 0)
},
bigFloatType: func(c Context, u, v Value) Value {
i, j := u.(BigFloat), v.(BigFloat)
return toInt(i.Cmp(j.Float) >= 0)
},
},
},
{
name: "and",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
return toInt(toBool(u) && toBool(v))
},
charType: func(c Context, u, v Value) Value {
return toInt(toBool(u) && toBool(v))
},
bigIntType: func(c Context, u, v Value) Value {
return toInt(toBool(u) && toBool(v))
},
bigRatType: func(c Context, u, v Value) Value {
return toInt(toBool(u) && toBool(v))
},
bigFloatType: func(c Context, u, v Value) Value {
return toInt(toBool(u) && toBool(v))
},
},
},
{
name: "or",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
return toInt(toBool(u) || toBool(v))
},
charType: func(c Context, u, v Value) Value {
return toInt(toBool(u) || toBool(v))
},
bigIntType: func(c Context, u, v Value) Value {
return toInt(toBool(u) || toBool(v))
},
bigRatType: func(c Context, u, v Value) Value {
return toInt(toBool(u) || toBool(v))
},
bigFloatType: func(c Context, u, v Value) Value {
return toInt(toBool(u) || toBool(v))
},
},
},
{
name: "xor",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
return toInt(toBool(u) != toBool(v))
},
charType: func(c Context, u, v Value) Value {
return toInt(toBool(u) != toBool(v))
},
bigIntType: func(c Context, u, v Value) Value {
return toInt(toBool(u) != toBool(v))
},
bigRatType: func(c Context, u, v Value) Value {
return toInt(toBool(u) != toBool(v))
},
bigFloatType: func(c Context, u, v Value) Value {
return toInt(toBool(u) != toBool(v))
},
},
},
{
name: "nand",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
return toInt(!(toBool(u) && toBool(v)))
},
charType: func(c Context, u, v Value) Value {
return toInt(!(toBool(u) && toBool(v)))
},
bigIntType: func(c Context, u, v Value) Value {
return toInt(!(toBool(u) && toBool(v)))
},
bigRatType: func(c Context, u, v Value) Value {
return toInt(!(toBool(u) && toBool(v)))
},
bigFloatType: func(c Context, u, v Value) Value {
return toInt(!(toBool(u) && toBool(v)))
},
},
},
{
name: "nor",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
return toInt(!(toBool(u) || toBool(v)))
},
charType: func(c Context, u, v Value) Value {
return toInt(!(toBool(u) || toBool(v)))
},
bigIntType: func(c Context, u, v Value) Value {
return toInt(!(toBool(u) || toBool(v)))
},
bigRatType: func(c Context, u, v Value) Value {
return toInt(!(toBool(u) || toBool(v)))
},
bigFloatType: func(c Context, u, v Value) Value {
return toInt(!(toBool(u) || toBool(v)))
},
},
},
{
name: "?",
elementwise: false,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
A := u.(Int)
B := v.(Int)
if uint64(A) > maxInt || uint64(B) > maxInt {
Errorf("negative or too-large operand in %d?%d", A, B)
}
if A > B {
Errorf("left operand larger than right in %d?%d", A, B)
}
ints := c.Config().Random().Perm(int(B))
origin := c.Config().Origin()
res := make([]Value, A)
for i := range res {
res[i] = Int(ints[i] + origin)
}
return NewVector(res)
},
},
},
{
name: "decode",
whichType: atLeastVectorType,
fn: [numType]binaryFn{
vectorType: func(c Context, u, v Value) Value {
// A decode B is the result of polyomial B at x=A.
// If A is a vector, the elements of A align with B.
A, B := u.(Vector), v.(Vector)
if len(A) == 0 || len(B) == 0 {
return Int(0)
}
if len(A) == 1 || len(B) == 1 || len(A) == len(B) {
result := Value(Int(0))
prod := Value(Int(1))
get := func(v Vector, i int) Value {
if len(v) == 1 {
return v[0]
}
return v[i]
}
n := len(A)
if len(B) > n {
n = len(B)
}
for i := n - 1; i >= 0; i-- {
result = c.EvalBinary(result, "+", c.EvalBinary(prod, "*", get(B, i)))
prod = c.EvalBinary(prod, "*", get(A, i))
}
return result
}
if len(A) != len(B) {
Errorf("decode of unequal lengths")
}
return nil
},
},
},
{
name: "encode",
whichType: atLeastVectorType,
fn: [numType]binaryFn{
vectorType: func(c Context, u, v Value) Value {
// A encode B is a matrix of len(A) rows and len(B) columns.
// Each entry is the residue base A[i] of B[j].
// Thus 2 encode 3 is just the low bit of 3, 2 2 encode 3 is the low 2 bits,
// and 2 2 encode 1 2 3 has 3 columns encoding 1 2 3 downwards:
// 0 1 1
// 1 0 1
// If they are negative the answers disagree with APL because
// of how modulo arithmetic works.
mod := func(b, a Value) Value {
if z, ok := a.(Int); ok && z == 0 {
return b
}
return c.EvalBinary(b, "mod", a)
}
div := func(b, a Value) Value {
if z, ok := a.(Int); ok && z == 0 {
return b
}
return c.EvalBinary(b, "div", a)
}
A, B := u.(Vector), v.(Vector)
// Scalar.
if len(A) == 1 && len(B) == 1 {
return mod(B[0], A[0])
}
// Vector.
if len(B) == 1 {
// 2 2 2 2 encode 11 is 1 0 1 1.
elems := make([]Value, len(A))
b := B[0]
for i := len(A) - 1; i >= 0; i-- {
a := A[i]
elems[i] = mod(b, a)
b = div(b, a)
}
return NewVector(elems)
}
if len(A) == 1 {
// 3 encode 1 2 3 4 is 1 2 0 1
elems := make([]Value, len(B))
a := A[0]
for i := range B {
elems[i] = mod(B[i], a)
}
return NewVector(elems)
}
// Matrix.
// 2 2 encode 1 2 3 has 3 columns encoding 1 2 3 downwards:
// 0 1 1
// 1 0 1
elems := make([]Value, len(A)*len(B))
shape := []int{len(A), len(B)}
for j := range B {
b := B[j]
for i := len(A) - 1; i >= 0; i-- {
a := A[i]
elems[j+i*len(B)] = mod(b, a)
b = div(b, a)
}
}
return NewMatrix(shape, elems)
},
},
},
{
name: "in",
// A in B: Membership: 0 or 1 according to which elements of A present in B.
whichType: atLeastVectorType,
fn: [numType]binaryFn{
vectorType: func(c Context, u, v Value) Value {
return membership(c, u.(Vector), v.(Vector))
},
matrixType: func(c Context, u, v Value) Value {
return membership(c, u.(*Matrix).data, v.(*Matrix).data)
},
},
},
{
name: "[]",
whichType: binaryArithType,
fn: [numType]binaryFn{
vectorType: func(c Context, u, v Value) Value {
// A[B]: The successive elements of A with indexes elements of B.
A, B := u.(Vector), v.(Vector)
values := make([]Value, len(B))
origin := Int(c.Config().Origin())
for i, b := range B {
x, ok := b.(Int)
if !ok {
Errorf("index must be integer")
}
x -= origin
if x < 0 || Int(len(A)) <= x {
Errorf("index %d out of range", x+origin)
}
values[i] = A[x]
}
if len(values) == 1 {
return values[0]
}
return NewVector(values).shrink()
},
matrixType: func(c Context, u, v Value) Value {
// A[B]: The successive elements of A with indexes given by elements of B.
A, mB := u.(*Matrix), v.(*Matrix)
B := mB.data
origin := Int(c.Config().Origin())
switch mB.Rank() {
case 1:
case 0:
Errorf("bad index rank %d", mB.Rank())
default:
if A.Rank() != 1 {
Errorf("bad index rank %d", mB.Rank())
}
// We are indexing a vector by an interesting shape.
// Special generalization not in APL: If the LHS is vector-like,
// the return value has the shape of the RHS. We could
// generalize more, but that's hard.
values := make(Vector, len(B))
for i, b := range B {
x, ok := b.(Int)
if !ok {
Errorf("index must be integer")
}
x -= origin
if x < 0 || Int(A.shape[0]) <= x {
Errorf("index %d out of range (shape %s)", x+origin, NewIntVector(A.shape))
}
values[i] = A.data[x]
}
newShape := make([]int, mB.Rank())
copy(newShape, mB.shape)
return NewMatrix(newShape, values)
}
ElemSize := Int(A.ElemSize())
values := make(Vector, 0, ElemSize*Int(len(B)))
for _, b := range B {
x, ok := b.(Int)
if !ok {
Errorf("index must be integer")
}
x -= origin
if x < 0 || Int(A.shape[0]) <= x {
Errorf("index %d out of range (shape %s)", x+origin, NewIntVector(A.shape))
}
start := ElemSize * x
values = append(values, A.data[start:start+ElemSize]...)
}
if len(B) == 1 {
// Special considerations. The result might need type reduction.
// TODO: Should this be Matrix.shrink?
// TODO: In some cases, can get a scalar.
// Is the result a vector?
if A.Rank() == 2 {
return values
}
// Matrix of one less degree.
newShape := make([]int, A.Rank()-1)
copy(newShape, A.shape[1:])
return NewMatrix(newShape, values)
}
newShape := make([]int, A.Rank())
copy(newShape, A.shape)
newShape[0] = len(B)
return NewMatrix(newShape, values)
},
},
},
{
name: "iota",
whichType: atLeastVectorType,
fn: [numType]binaryFn{
vectorType: func(c Context, u, v Value) Value {
// A⍳B: The location (index) of B in A; 0 if not found. (APL does 1+⌈/⍳⍴A)
A, B := u.(Vector), v.(Vector)
indices := make([]Value, len(B))
// TODO: This is n^2.
origin := c.Config().Origin()
Outer:
for i, b := range B {
for j, a := range A {
if toBool(c.EvalBinary(a, "==", b)) {
indices[i] = Int(j + origin)
continue Outer
}
}
indices[i] = zero
}
return NewVector(indices)
},
},
},
{
name: "min",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
if u.(Int) < v.(Int) {
return u
}
return v
},
charType: func(c Context, u, v Value) Value {
if u.(Char) < v.(Char) {
return u
}
return v
},
bigIntType: func(c Context, u, v Value) Value {
i, j := u.(BigInt), v.(BigInt)
if i.Cmp(j.Int) < 0 {
return i.shrink()
}
return j.shrink()
},
bigRatType: func(c Context, u, v Value) Value {
i, j := u.(BigRat), v.(BigRat)
if i.Cmp(j.Rat) < 0 {
return i.shrink()
}
return j.shrink()
},
bigFloatType: func(c Context, u, v Value) Value {
i, j := u.(BigFloat), v.(BigFloat)
if i.Cmp(j.Float) < 0 {
return i.shrink()
}
return j.shrink()
},
},
},
{
name: "max",
elementwise: true,
whichType: binaryArithType,
fn: [numType]binaryFn{
intType: func(c Context, u, v Value) Value {
if u.(Int) > v.(Int) {
return u
}
return v
},
charType: func(c Context, u, v Value) Value {
if u.(Char) > v.(Char) {
return u
}
return v
},
bigIntType: func(c Context, u, v Value) Value {
i, j := u.(BigInt), v.(BigInt)
if i.Cmp(j.Int) > 0 {
return u
}
return v
},
bigRatType: func(c Context, u, v Value) Value {
i, j := u.(BigRat), v.(BigRat)
if i.Cmp(j.Rat) > 0 {
return i.shrink()
}
return j.shrink()
},
bigFloatType: func(c Context, u, v Value) Value {
i, j := u.(BigFloat), v.(BigFloat)
if i.Cmp(j.Float) > 0 {
return i.shrink()
}
return j.shrink()
},
},
},
{
name: "rho",
whichType: atLeastVectorType,
fn: [numType]binaryFn{
vectorType: func(c Context, u, v Value) Value {
return reshape(u.(Vector), v.(Vector))
},
matrixType: func(c Context, u, v Value) Value {
// LHS must be a vector underneath.
A, B := u.(*Matrix), v.(*Matrix)
if A.Rank() != 1 {
Errorf("lhs of rho cannot be matrix")
}
return reshape(A.data, B.data)
},
},
},
{
name: ",",
whichType: atLeastVectorType,
fn: [numType]binaryFn{
vectorType: func(c Context, u, v Value) Value {
return append(u.(Vector), v.(Vector)...)
},
matrixType: func(c Context, u, v Value) Value {
A := u.(*Matrix)
B := v.(*Matrix)
if A.Rank() == 0 || B.Rank() == 0 {
Errorf("empty matrix for ,")
}
if A.Rank() != B.Rank()+1 || A.ElemSize() != B.Size() {
Errorf("catenate rank mismatch: %s != %s", NewIntVector(A.shape[1:]), NewIntVector(B.shape))
}
ElemSize := A.ElemSize()
newShape := make([]int, A.Rank())
copy(newShape, A.shape)
newData := make(Vector, len(A.data), int64(len(A.data))+ElemSize)
copy(newData, A.data)
newData = append(newData, B.data...)
newShape[0] = newShape[0] + 1
return NewMatrix(newShape, newData)
},
},
},
{
name: "take",
whichType: atLeastVectorType,
fn: [numType]binaryFn{
vectorType: func(c Context, u, v Value) Value {
const bad = Error("bad count for take")
i := v.(Vector)
nv, ok := u.(Vector)
if !ok || len(nv) != 1 {
panic(bad)
}
n, ok := nv[0].(Int)
if !ok {
panic(bad)
}
len := Int(len(i))
switch {
case n < 0:
if -n > len {
panic(bad)
}
i = i[len+n : len]
case n == 0:
return NewVector(nil)
case n > 0:
if n > len {
panic(bad)
}
i = i[0:n]
}
return i
},
},
},
{
name: "drop",
whichType: atLeastVectorType,
fn: [numType]binaryFn{
vectorType: func(c Context, u, v Value) Value {
const bad = Error("bad count for drop")
i := v.(Vector)
nv, ok := u.(Vector)
if !ok || len(nv) != 1 {
panic(bad)
}
n, ok := nv[0].(Int)
if !ok {
panic(bad)
}
len := Int(len(i))
switch {
case n < 0:
if -n > len {
panic(bad)
}
i = i[0 : len+n]
case n == 0:
case n > 0:
if n > len {
panic(bad)
}
i = i[n:]
}
return i
},
},
},
{
name: "rot",
whichType: atLeastVectorType,
fn: [numType]binaryFn{
vectorType: func(c Context, u, v Value) Value {
countVec := u.(Vector)
count, ok := countVec[0].(Int)
if !ok {
Errorf("rot: count must be small integer")
}
return v.(Vector).rotate(int(count))
},
matrixType: func(c Context, u, v Value) Value {
countMat := u.(*Matrix)
if countMat.Rank() != 1 || len(countMat.data) != 1 {
Errorf("rot: count must be small integer")
}
count, ok := countMat.data[0].(Int)
if !ok {
Errorf("rot: count must be small integer")
}
return v.(*Matrix).rotate(int(count))
},
},
},
{
name: "flip",
whichType: atLeastVectorType,
fn: [numType]binaryFn{
vectorType: func(c Context, u, v Value) Value {
countVec := u.(Vector)
if len(countVec) != 1 {
Errorf("flip: count must be small integer")
}
count, ok := countVec[0].(Int)
if !ok {
Errorf("flip: count must be small integer")
}
return v.(Vector).rotate(int(count))
},
matrixType: func(c Context, u, v Value) Value {
countMat := u.(*Matrix)
if countMat.Rank() != 1 || len(countMat.data) != 1 {
Errorf("flip: count must be small integer")
}
count, ok := countMat.data[0].(Int)
if !ok {
Errorf("flip: count must be small integer")
}
return v.(*Matrix).vrotate(int(count))
},
},
},
{
name: "fill",
whichType: atLeastVectorType,
fn: [numType]binaryFn{
vectorType: func(c Context, u, v Value) Value {
i := u.(Vector)
j := v.(Vector)
if len(i) == 0 {
return NewVector(nil)
}
// All lhs values must be small integers.
var count int64
numLeft := 0
for _, x := range i {
y, ok := x.(Int)
if !ok {
Errorf("left operand of fill must be small integers")
}
switch {
case y == 0:
count++
case y < 0:
count -= int64(y)
default:
numLeft++
count += int64(y)
}
}
if numLeft != len(j) {
Errorf("fill: count > 0 on left (%d) must equal length of right (%d)", numLeft, len(j))
}
if count > 1e8 {
Errorf("fill: result too large: %d elements", count)
}
result := make([]Value, 0, count)
jx := 0
var zero Value
if j.AllChars() {
zero = Char(' ')
} else {
zero = Int(0)
}
for _, x := range i {
y := x.(Int)
switch {
case y == 0:
result = append(result, zero)
case y < 0:
for y = -y; y > 0; y-- {
result = append(result, zero)
}
default:
for ; y > 0; y-- {
result = append(result, j[jx])
}
jx++
}
}
return NewVector(result)
},
},
},
{
name: "sel",
whichType: atLeastVectorType,
fn: [numType]binaryFn{
vectorType: func(c Context, u, v Value) Value {
i := u.(Vector)
j := v.(Vector)
if len(i) == 0 {
return NewVector(nil)
}
// All lhs values must be small integers.
var count int64
for _, x := range i {
y, ok := x.(Int)
if !ok {
Errorf("left operand of sel must be small integers")
}
if y < 0 {
count -= int64(y)
} else {
count += int64(y)
}
}
if count > 1e8 {
Errorf("sel: result too large: %d elements", count)
}
result := make([]Value, 0, count)
add := func(howMany, what Value) {
hm := int(howMany.(Int))
if hm < 0 {
hm = -hm
what = Int(0)
}
for ; hm > 0; hm-- {
result = append(result, what)
}
}
if len(i) == 1 {
for _, y := range j {
add(i[0], y)
}
} else {
if len(i) != len(j) {
Errorf("sel: unequal lengths %d != %d", len(i), len(j))
}
for x, y := range j {
add(i[x], y)
}
}
return NewVector(result)
},
},
},
{
// Special case, handled in EvalBinary: don't modify types.
name: "text",
elementwise: true,
whichType: nil,
fn: [numType]binaryFn{
0: fmtText,
},
},
}
for _, op := range ops {
BinaryOps[op.name] = op
}
} | vendor/robpike.io/ivy/value/binary.go | 0.794584 | 0.644868 | binary.go | starcoder |
Package cache implements data structures used by the kubelet plugin manager to
keep track of registered plugins.
*/
package cache
import (
"fmt"
"sync"
"time"
"k8s.io/klog"
)
// ActualStateOfWorld defines a set of thread-safe operations for the kubelet
// plugin manager's actual state of the world cache.
// This cache contains a map of socket file path to plugin information of
// all plugins attached to this node.
type ActualStateOfWorld interface {
// GetRegisteredPlugins generates and returns a list of plugins
// that are successfully registered plugins in the current actual state of world.
GetRegisteredPlugins() []PluginInfo
// AddPlugin add the given plugin in the cache.
// An error will be returned if socketPath of the PluginInfo object is empty.
// Note that this is different from desired world cache's AddOrUpdatePlugin
// because for the actual state of world cache, there won't be a scenario where
// we need to update an existing plugin if the timestamps don't match. This is
// because the plugin should have been unregistered in the reconciller and therefore
// removed from the actual state of world cache first before adding it back into
// the actual state of world cache again with the new timestamp
AddPlugin(pluginInfo PluginInfo) error
// RemovePlugin deletes the plugin with the given socket path from the actual
// state of world.
// If a plugin does not exist with the given socket path, this is a no-op.
RemovePlugin(socketPath string)
// PluginExists checks if the given plugin exists in the current actual
// state of world cache with the correct timestamp
PluginExistsWithCorrectTimestamp(pluginInfo PluginInfo) bool
}
// NewActualStateOfWorld returns a new instance of ActualStateOfWorld
func NewActualStateOfWorld() ActualStateOfWorld {
return &actualStateOfWorld{
socketFileToInfo: make(map[string]PluginInfo),
}
}
type actualStateOfWorld struct {
// socketFileToInfo is a map containing the set of successfully registered plugins
// The keys are plugin socket file paths. The values are PluginInfo objects
socketFileToInfo map[string]PluginInfo
sync.RWMutex
}
var _ ActualStateOfWorld = &actualStateOfWorld{}
// PluginInfo holds information of a plugin
type PluginInfo struct {
SocketPath string
FoundInDeprecatedDir bool
Timestamp time.Time
}
func (asw *actualStateOfWorld) AddPlugin(pluginInfo PluginInfo) error {
asw.Lock()
defer asw.Unlock()
if pluginInfo.SocketPath == "" {
return fmt.Errorf("Socket path is empty")
}
if _, ok := asw.socketFileToInfo[pluginInfo.SocketPath]; ok {
klog.V(2).Infof("Plugin (Path %s) exists in actual state cache", pluginInfo.SocketPath)
}
asw.socketFileToInfo[pluginInfo.SocketPath] = pluginInfo
return nil
}
func (asw *actualStateOfWorld) RemovePlugin(socketPath string) {
asw.Lock()
defer asw.Unlock()
if _, ok := asw.socketFileToInfo[socketPath]; ok {
delete(asw.socketFileToInfo, socketPath)
}
}
func (asw *actualStateOfWorld) GetRegisteredPlugins() []PluginInfo {
asw.RLock()
defer asw.RUnlock()
currentPlugins := []PluginInfo{}
for _, pluginInfo := range asw.socketFileToInfo {
currentPlugins = append(currentPlugins, pluginInfo)
}
return currentPlugins
}
func (asw *actualStateOfWorld) PluginExistsWithCorrectTimestamp(pluginInfo PluginInfo) bool {
asw.RLock()
defer asw.RUnlock()
// We need to check both if the socket file path exists, and the timestamp
// matches the given plugin (from the desired state cache) timestamp
actualStatePlugin, exists := asw.socketFileToInfo[pluginInfo.SocketPath]
return exists && (actualStatePlugin.Timestamp == pluginInfo.Timestamp)
} | pkg/kubelet/pluginmanager/cache/actual_state_of_world.go | 0.678647 | 0.468973 | actual_state_of_world.go | starcoder |
Package stats implements statistics collection and reporting.
It is used by server to report internal statistics, such as number of
requests and responses.
*/
package stats
import (
"fmt"
"strings"
"sync"
ptp "github.com/facebook/time/ptp/protocol"
)
// Stats is a metric collection interface
type Stats interface {
// Start starts a stat reporter
// Use this for passive reporters
Start(monitoringport int)
// Snapshot the values so they can be reported atomically
Snapshot()
// Reset atomically sets all the counters to 0
Reset()
// IncSubscription atomically add 1 to the counter
IncSubscription(t ptp.MessageType)
// IncRX atomically add 1 to the counter
IncRX(t ptp.MessageType)
// IncTX atomically add 1 to the counter
IncTX(t ptp.MessageType)
// IncRXSignaling atomically add 1 to the counter
IncRXSignaling(t ptp.MessageType)
// IncTXSignaling atomically add 1 to the counter
IncTXSignaling(t ptp.MessageType)
// IncWorkerSubs atomically add 1 to the counter
IncWorkerSubs(workerid int)
// DecSubscription atomically removes 1 from the counter
DecSubscription(t ptp.MessageType)
// DecRX atomically removes 1 from the counter
DecRX(t ptp.MessageType)
// DecTX atomically removes 1 from the counter
DecTX(t ptp.MessageType)
// DecRXSignaling atomically removes 1 from the counter
DecRXSignaling(t ptp.MessageType)
// DecTXSignaling atomically removes 1 from the counter
DecTXSignaling(t ptp.MessageType)
// DecWorkerSubs atomically removes 1 from the counter
DecWorkerSubs(workerid int)
// SetMaxWorkerQueue atomically sets worker queue len
SetMaxWorkerQueue(workerid int, queue int64)
// SetMaxTXTSAttempts atomically sets number of retries for get latest TX timestamp
SetMaxTXTSAttempts(workerid int, retries int64)
// SetUTCOffset atomically sets the utcoffset
SetUTCOffset(utcoffset int64)
}
// syncMapInt64 sync map of PTP messages
type syncMapInt64 struct {
sync.Mutex
m map[int]int64
}
// init initializes the underlying map
func (s *syncMapInt64) init() {
s.m = make(map[int]int64)
}
// keys returns slice of keys of the underlying map
func (s *syncMapInt64) keys() []int {
keys := make([]int, 0, len(s.m))
s.Lock()
for k := range s.m {
keys = append(keys, k)
}
s.Unlock()
return keys
}
// load gets the value by the key
func (s *syncMapInt64) load(key int) int64 {
s.Lock()
defer s.Unlock()
return s.m[key]
}
// inc increments the counter for the given key
func (s *syncMapInt64) inc(key int) {
s.Lock()
s.m[key]++
s.Unlock()
}
// dec decrements the counter for the given key
func (s *syncMapInt64) dec(key int) {
s.Lock()
s.m[key]--
s.Unlock()
}
// store saves the value with the key
func (s *syncMapInt64) store(key int, value int64) {
s.Lock()
s.m[key] = value
s.Unlock()
}
// copy all key-values between maps
func (s *syncMapInt64) copy(dst *syncMapInt64) {
for _, t := range s.keys() {
dst.store(t, s.load(t))
}
}
// reset stats to 0
func (s *syncMapInt64) reset() {
s.Lock()
for t := range s.m {
s.m[t] = 0
}
s.Unlock()
}
type counters struct {
rx syncMapInt64
rxSignaling syncMapInt64
subscriptions syncMapInt64
tx syncMapInt64
txSignaling syncMapInt64
txtsattempts syncMapInt64
workerQueue syncMapInt64
workerSubs syncMapInt64
utcoffset int64
}
func (c *counters) init() {
c.subscriptions.init()
c.rx.init()
c.tx.init()
c.rxSignaling.init()
c.txSignaling.init()
c.workerQueue.init()
c.workerSubs.init()
c.txtsattempts.init()
}
func (c *counters) reset() {
c.subscriptions.reset()
c.rx.reset()
c.tx.reset()
c.rxSignaling.reset()
c.txSignaling.reset()
c.workerQueue.reset()
c.workerSubs.reset()
c.txtsattempts.reset()
c.utcoffset = 0
}
// toMap converts counters to a map
func (c *counters) toMap() (export map[string]int64) {
res := make(map[string]int64)
for _, t := range c.subscriptions.keys() {
c := c.subscriptions.load(t)
mt := strings.ToLower(ptp.MessageType(t).String())
res[fmt.Sprintf("subscriptions.%s", mt)] = c
}
for _, t := range c.rx.keys() {
c := c.rx.load(t)
mt := strings.ToLower(ptp.MessageType(t).String())
res[fmt.Sprintf("rx.%s", mt)] = c
}
for _, t := range c.tx.keys() {
c := c.tx.load(t)
mt := strings.ToLower(ptp.MessageType(t).String())
res[fmt.Sprintf("tx.%s", mt)] = c
}
for _, t := range c.rxSignaling.keys() {
c := c.rxSignaling.load(t)
mt := strings.ToLower(ptp.MessageType(t).String())
res[fmt.Sprintf("rx.signaling.%s", mt)] = c
}
for _, t := range c.txSignaling.keys() {
c := c.txSignaling.load(t)
mt := strings.ToLower(ptp.MessageType(t).String())
res[fmt.Sprintf("tx.signaling.%s", mt)] = c
}
for _, t := range c.workerQueue.keys() {
c := c.workerQueue.load(t)
res[fmt.Sprintf("worker.%d.queue", t)] = c
}
for _, t := range c.workerSubs.keys() {
c := c.workerSubs.load(t)
res[fmt.Sprintf("worker.%d.subscriptions", t)] = c
}
for _, t := range c.txtsattempts.keys() {
c := c.txtsattempts.load(t)
res[fmt.Sprintf("worker.%d.txtsattempts", t)] = c
}
res["utcoffset"] = c.utcoffset
return res
} | ptp/ptp4u/stats/stats.go | 0.612541 | 0.457258 | stats.go | starcoder |
package types
import (
"encoding/json"
"math"
"testing"
"math/big"
"math/rand"
)
func newIntegerFromString(s string) (*big.Int, bool) {
return new(big.Int).SetString(s, 0)
}
func equal(i *big.Int, i2 *big.Int) bool { return i.Cmp(i2) == 0 }
func gt(i *big.Int, i2 *big.Int) bool { return i.Cmp(i2) == 1 }
func lt(i *big.Int, i2 *big.Int) bool { return i.Cmp(i2) == -1 }
func add(i *big.Int, i2 *big.Int) *big.Int { return new(big.Int).Add(i, i2) }
func sub(i *big.Int, i2 *big.Int) *big.Int { return new(big.Int).Sub(i, i2) }
func mul(i *big.Int, i2 *big.Int) *big.Int { return new(big.Int).Mul(i, i2) }
func div(i *big.Int, i2 *big.Int) *big.Int { return new(big.Int).Div(i, i2) }
func mod(i *big.Int, i2 *big.Int) *big.Int { return new(big.Int).Mod(i, i2) }
func neg(i *big.Int) *big.Int { return new(big.Int).Neg(i) }
func random(i *big.Int) *big.Int { return new(big.Int).Rand(rand.New(rand.NewSource(rand.Int63())), i) }
func min(i *big.Int, i2 *big.Int) *big.Int {
if i.Cmp(i2) == 1 {
return new(big.Int).Set(i2)
}
return new(big.Int).Set(i)
}
// MarshalAmino for custom encoding scheme
func marshalAmino(i *big.Int) (string, error) {
bz, err := i.MarshalText()
return string(bz), err
}
// UnmarshalAmino for custom decoding scheme
func unmarshalAmino(i *big.Int, text string) (err error) {
return i.UnmarshalText([]byte(text))
}
// MarshalJSON for custom encoding scheme
// Must be encoded as a string for JSON precision
func marshalJSON(i *big.Int) ([]byte, error) {
text, err := i.MarshalText()
if err != nil {
return nil, err
}
return json.Marshal(string(text))
}
// UnmarshalJSON for custom decoding scheme
// Must be encoded as a string for JSON precision
func unmarshalJSON(i *big.Int, bz []byte) error {
var text string
err := json.Unmarshal(bz, &text)
if err != nil {
return err
}
return i.UnmarshalText([]byte(text))
}
// Int wraps integer with 256 bit range bound
// Checks overflow, underflow and division by zero
// Exists in range from -(2^255-1) to 2^255-1
type Int struct {
i *big.Int
}
// BigInt converts Int to big.Int
func (i Int) BigInt() *big.Int {
return new(big.Int).Set(i.i)
}
// NewInt constructs Int from int64
func NewInt(n int64) Int {
return Int{big.NewInt(n)}
}
// NewIntFromBigInt constructs Int from big.Int
func NewIntFromBigInt(i *big.Int) Int {
if i.BitLen() > 255 {
panic("NewIntFromBigInt() out of bound")
}
return Int{i}
}
// NewIntFromString constructs Int from string
func NewIntFromString(s string) (res Int, ok bool) {
i, ok := newIntegerFromString(s)
if !ok {
return
}
// Check overflow
if i.BitLen() > 255 {
ok = false
return
}
return Int{i}, true
}
// NewIntWithDecimal constructs Int with decimal
// Result value is n*10^dec
func NewIntWithDecimal(n int64, dec int) Int {
if dec < 0 {
panic("NewIntWithDecimal() decimal is negative")
}
exp := new(big.Int).Exp(big.NewInt(10), big.NewInt(int64(dec)), nil)
i := new(big.Int)
i.Mul(big.NewInt(n), exp)
// Check overflow
if i.BitLen() > 255 {
panic("NewIntWithDecimal() out of bound")
}
return Int{i}
}
// ZeroInt returns Int value with zero
func ZeroInt() Int { return Int{big.NewInt(0)} }
// OneInt returns Int value with one
func OneInt() Int { return Int{big.NewInt(1)} }
// Int64 converts Int to int64
// Panics if the value is out of range
func (i Int) Int64() int64 {
if !i.i.IsInt64() {
panic("Int64() out of bound")
}
return i.i.Int64()
}
// IsInt64 returns true if Int64() not panics
func (i Int) IsInt64() bool {
return i.i.IsInt64()
}
// IsZero returns true if Int is zero
func (i Int) IsZero() bool {
return i.i.Sign() == 0
}
// Sign returns sign of Int
func (i Int) Sign() int {
return i.i.Sign()
}
// Equal compares two Ints
func (i Int) Equal(i2 Int) bool {
return equal(i.i, i2.i)
}
// GT returns true if first Int is greater than second
func (i Int) GT(i2 Int) bool {
return gt(i.i, i2.i)
}
// LT returns true if first Int is lesser than second
func (i Int) LT(i2 Int) bool {
return lt(i.i, i2.i)
}
// Add adds Int from another
func (i Int) Add(i2 Int) (res Int) {
res = Int{add(i.i, i2.i)}
// Check overflow
if res.i.BitLen() > 255 {
panic("Int overflow")
}
return
}
// AddRaw adds int64 to Int
func (i Int) AddRaw(i2 int64) Int {
return i.Add(NewInt(i2))
}
// Sub subtracts Int from another
func (i Int) Sub(i2 Int) (res Int) {
res = Int{sub(i.i, i2.i)}
// Check overflow
if res.i.BitLen() > 255 {
panic("Int overflow")
}
return
}
// SubRaw subtracts int64 from Int
func (i Int) SubRaw(i2 int64) Int {
return i.Sub(NewInt(i2))
}
// Mul multiples two Ints
func (i Int) Mul(i2 Int) (res Int) {
// Check overflow
if i.i.BitLen()+i2.i.BitLen()-1 > 255 {
panic("Int overflow")
}
res = Int{mul(i.i, i2.i)}
// Check overflow if sign of both are same
if res.i.BitLen() > 255 {
panic("Int overflow")
}
return
}
// MulRaw multipies Int and int64
func (i Int) MulRaw(i2 int64) Int {
return i.Mul(NewInt(i2))
}
// Div divides Int with Int
func (i Int) Div(i2 Int) (res Int) {
// Check division-by-zero
if i2.i.Sign() == 0 {
panic("Division by zero")
}
return Int{div(i.i, i2.i)}
}
// DivRaw divides Int with int64
func (i Int) DivRaw(i2 int64) Int {
return i.Div(NewInt(i2))
}
// Mod returns remainder after dividing with Int
func (i Int) Mod(i2 Int) Int {
if i2.Sign() == 0 {
panic("division-by-zero")
}
return Int{mod(i.i, i2.i)}
}
// ModRaw returns remainder after dividing with int64
func (i Int) ModRaw(i2 int64) Int {
return i.Mod(NewInt(i2))
}
// Neg negates Int
func (i Int) Neg() (res Int) {
return Int{neg(i.i)}
}
// Return the minimum of the ints
func MinInt(i1, i2 Int) Int {
return Int{min(i1.BigInt(), i2.BigInt())}
}
// Human readable string
func (i Int) String() string {
return i.i.String()
}
// Testing purpose random Int generator
func randomInt(i Int) Int {
return NewIntFromBigInt(random(i.BigInt()))
}
// MarshalAmino defines custom encoding scheme
func (i Int) MarshalAmino() (string, error) {
if i.i == nil { // Necessary since default Uint initialization has i.i as nil
i.i = new(big.Int)
}
return marshalAmino(i.i)
}
// UnmarshalAmino defines custom decoding scheme
func (i *Int) UnmarshalAmino(text string) error {
if i.i == nil { // Necessary since default Int initialization has i.i as nil
i.i = new(big.Int)
}
return unmarshalAmino(i.i, text)
}
// MarshalJSON defines custom encoding scheme
func (i Int) MarshalJSON() ([]byte, error) {
if i.i == nil { // Necessary since default Uint initialization has i.i as nil
i.i = new(big.Int)
}
return marshalJSON(i.i)
}
// UnmarshalJSON defines custom decoding scheme
func (i *Int) UnmarshalJSON(bz []byte) error {
if i.i == nil { // Necessary since default Int initialization has i.i as nil
i.i = new(big.Int)
}
return unmarshalJSON(i.i, bz)
}
// Int wraps integer with 256 bit range bound
// Checks overflow, underflow and division by zero
// Exists in range from 0 to 2^256-1
type Uint struct {
i *big.Int
}
// BigInt converts Uint to big.Unt
func (i Uint) BigInt() *big.Int {
return new(big.Int).Set(i.i)
}
// NewUint constructs Uint from int64
func NewUint(n uint64) Uint {
i := new(big.Int)
i.SetUint64(n)
return Uint{i}
}
// NewUintFromBigUint constructs Uint from big.Uint
func NewUintFromBigInt(i *big.Int) Uint {
res := Uint{i}
if UintOverflow(res) {
panic("Uint overflow")
}
return res
}
// NewUintFromString constructs Uint from string
func NewUintFromString(s string) (res Uint, ok bool) {
i, ok := newIntegerFromString(s)
if !ok {
return
}
// Check overflow
if i.Sign() == -1 || i.Sign() == 1 && i.BitLen() > 256 {
ok = false
return
}
return Uint{i}, true
}
// NewUintWithDecimal constructs Uint with decimal
// Result value is n*10^dec
func NewUintWithDecimal(n uint64, dec int) Uint {
if dec < 0 {
panic("NewUintWithDecimal() decimal is negative")
}
exp := new(big.Int).Exp(big.NewInt(10), big.NewInt(int64(dec)), nil)
i := new(big.Int)
i.Mul(new(big.Int).SetUint64(n), exp)
res := Uint{i}
if UintOverflow(res) {
panic("NewUintWithDecimal() out of bound")
}
return res
}
// ZeroUint returns Uint value with zero
func ZeroUint() Uint { return Uint{big.NewInt(0)} }
// OneUint returns Uint value with one
func OneUint() Uint { return Uint{big.NewInt(1)} }
// Uint64 converts Uint to uint64
// Panics if the value is out of range
func (i Uint) Uint64() uint64 {
if !i.i.IsUint64() {
panic("Uint64() out of bound")
}
return i.i.Uint64()
}
// IsUint64 returns true if Uint64() not panics
func (i Uint) IsUint64() bool {
return i.i.IsUint64()
}
// IsZero returns true if Uint is zero
func (i Uint) IsZero() bool {
return i.i.Sign() == 0
}
// Sign returns sign of Uint
func (i Uint) Sign() int {
return i.i.Sign()
}
// Equal compares two Uints
func (i Uint) Equal(i2 Uint) bool {
return equal(i.i, i2.i)
}
// GT returns true if first Uint is greater than second
func (i Uint) GT(i2 Uint) bool {
return gt(i.i, i2.i)
}
// LT returns true if first Uint is lesser than second
func (i Uint) LT(i2 Uint) bool {
return lt(i.i, i2.i)
}
// Add adds Uint from another
func (i Uint) Add(i2 Uint) (res Uint) {
res = Uint{add(i.i, i2.i)}
if UintOverflow(res) {
panic("Uint overflow")
}
return
}
// AddRaw adds uint64 to Uint
func (i Uint) AddRaw(i2 uint64) Uint {
return i.Add(NewUint(i2))
}
// Sub subtracts Uint from another
func (i Uint) Sub(i2 Uint) (res Uint) {
res = Uint{sub(i.i, i2.i)}
if UintOverflow(res) {
panic("Uint overflow")
}
return
}
// SafeSub attempts to subtract one Uint from another. A boolean is also returned
// indicating if the result contains integer overflow.
func (i Uint) SafeSub(i2 Uint) (Uint, bool) {
res := Uint{sub(i.i, i2.i)}
if UintOverflow(res) {
return res, true
}
return res, false
}
// SubRaw subtracts uint64 from Uint
func (i Uint) SubRaw(i2 uint64) Uint {
return i.Sub(NewUint(i2))
}
// Mul multiples two Uints
func (i Uint) Mul(i2 Uint) (res Uint) {
if i.i.BitLen()+i2.i.BitLen()-1 > 256 {
panic("Uint overflow")
}
res = Uint{mul(i.i, i2.i)}
if UintOverflow(res) {
panic("Uint overflow")
}
return
}
// MulRaw multipies Uint and uint64
func (i Uint) MulRaw(i2 uint64) Uint {
return i.Mul(NewUint(i2))
}
// Div divides Uint with Uint
func (i Uint) Div(i2 Uint) (res Uint) {
// Check division-by-zero
if i2.Sign() == 0 {
panic("division-by-zero")
}
return Uint{div(i.i, i2.i)}
}
// Div divides Uint with uint64
func (i Uint) DivRaw(i2 uint64) Uint {
return i.Div(NewUint(i2))
}
// Mod returns remainder after dividing with Uint
func (i Uint) Mod(i2 Uint) Uint {
if i2.Sign() == 0 {
panic("division-by-zero")
}
return Uint{mod(i.i, i2.i)}
}
// ModRaw returns remainder after dividing with uint64
func (i Uint) ModRaw(i2 uint64) Uint {
return i.Mod(NewUint(i2))
}
// Return the minimum of the Uints
func MinUint(i1, i2 Uint) Uint {
return Uint{min(i1.BigInt(), i2.BigInt())}
}
// Human readable string
func (i Uint) String() string {
return i.i.String()
}
// Testing purpose random Uint generator
func randomUint(i Uint) Uint {
return NewUintFromBigInt(random(i.BigInt()))
}
// MarshalAmino defines custom encoding scheme
func (i Uint) MarshalAmino() (string, error) {
if i.i == nil { // Necessary since default Uint initialization has i.i as nil
i.i = new(big.Int)
}
return marshalAmino(i.i)
}
// UnmarshalAmino defines custom decoding scheme
func (i *Uint) UnmarshalAmino(text string) error {
if i.i == nil { // Necessary since default Uint initialization has i.i as nil
i.i = new(big.Int)
}
return unmarshalAmino(i.i, text)
}
// MarshalJSON defines custom encoding scheme
func (i Uint) MarshalJSON() ([]byte, error) {
if i.i == nil { // Necessary since default Uint initialization has i.i as nil
i.i = new(big.Int)
}
return marshalJSON(i.i)
}
// UnmarshalJSON defines custom decoding scheme
func (i *Uint) UnmarshalJSON(bz []byte) error {
if i.i == nil { // Necessary since default Uint initialization has i.i as nil
i.i = new(big.Int)
}
return unmarshalJSON(i.i, bz)
}
//__________________________________________________________________________
// UintOverflow returns true if a given unsigned integer overflows and false
// otherwise.
func UintOverflow(x Uint) bool {
return x.i.Sign() == -1 || x.i.Sign() == 1 && x.i.BitLen() > 256
}
// AddUint64Overflow performs the addition operation on two uint64 integers and
// returns a boolean on whether or not the result overflows.
func AddUint64Overflow(a, b uint64) (uint64, bool) {
if math.MaxUint64-a < b {
return 0, true
}
return a + b, false
}
// intended to be used with require/assert: require.True(IntEq(...))
func IntEq(t *testing.T, exp, got Int) (*testing.T, bool, string, string, string) {
return t, exp.Equal(got), "expected:\t%v\ngot:\t\t%v", exp.String(), got.String()
} | types/int.go | 0.713132 | 0.422981 | int.go | starcoder |
package ir
import (
"fmt"
"reflect"
"strings"
)
// Operand is an interface for instruction operands.
type Operand interface {
fmt.Stringer
operand() // sealed
}
// Register is a machine word operand.
type Register string
func (r Register) String() string { return string(r) }
func (Register) operand() {}
// Discard is a special register to ignore an output of an instruction.
var Discard = Register("_")
// SelectRegisters selects the registers from the list of operands.
func SelectRegisters(ops []Operand) []Register {
var rs []Register
for _, op := range ops {
if r, ok := op.(Register); ok && r != Discard {
rs = append(rs, r)
}
}
return rs
}
// Constant is a constant operand.
type Constant uint64
func (c Constant) String() string { return fmt.Sprintf("$%#x", uint64(c)) }
func (Constant) operand() {}
// Zero is the zero constant.
var Zero = Constant(0)
// Flag is a single-bit constant operand.
type Flag uint64
func (f Flag) String() string { return fmt.Sprintf("$%d", uint64(f)) }
func (Flag) operand() {}
// Program is a sequence of instructions.
type Program struct {
Instructions []Instruction
}
func (p *Program) String() string {
s := ""
for _, i := range p.Instructions {
s += FormatInstruction(i) + "\n"
}
return s
}
// Instruction in the intermediate representation.
type Instruction interface {
Operands() []Operand
instruction() // sealed
}
// FormatInstruction returns a string representation of the instruction.
func FormatInstruction(i Instruction) string {
mnemonic := reflect.TypeOf(i).Name()
ops := []string{}
for _, op := range i.Operands() {
ops = append(ops, op.String())
}
return mnemonic + "\t" + strings.Join(ops, ", ")
}
// MOV is a move instruction.
type MOV struct {
Source Operand
Destination Register
}
func (i MOV) Operands() []Operand {
return []Operand{i.Source, i.Destination}
}
func (MOV) instruction() {}
// CMOV is a conditional move.
type CMOV struct {
Source Operand
Destination Register
Flag Operand
Equals Flag
}
func (i CMOV) Operands() []Operand {
return []Operand{i.Source, i.Destination, i.Flag, i.Equals}
}
func (CMOV) instruction() {}
// ADD is an add with carry instruction.
type ADD struct {
X Operand
Y Operand
CarryIn Operand
Sum Register
CarryOut Register
}
func (i ADD) Operands() []Operand {
return []Operand{i.X, i.Y, i.CarryIn, i.Sum, i.CarryOut}
}
func (ADD) instruction() {}
// SUB is an subtract with borrow instruction.
type SUB struct {
X Operand
Y Operand
BorrowIn Operand
Diff Register
BorrowOut Register
}
func (i SUB) Operands() []Operand {
return []Operand{i.X, i.Y, i.BorrowIn, i.Diff, i.BorrowOut}
}
func (SUB) instruction() {}
// MUL is a multiply instruction providing lower and upper parts of the result.
type MUL struct {
X Operand
Y Operand
High Register
Low Register
}
func (i MUL) Operands() []Operand {
return []Operand{i.X, i.Y, i.High, i.Low}
}
func (MUL) instruction() {}
// SHL is a shift left instruction.
type SHL struct {
X Operand
Shift Constant
Result Register
}
func (i SHL) Operands() []Operand {
return []Operand{i.X, i.Result}
}
func (SHL) instruction() {}
// SHR is a shift right instruction.
type SHR struct {
X Operand
Shift Constant
Result Register
}
func (i SHR) Operands() []Operand {
return []Operand{i.X, i.Result}
}
func (SHR) instruction() {} | arith/ir/isa.go | 0.709623 | 0.45048 | isa.go | starcoder |
package expression
import (
"fmt"
"github.com/dolthub/go-mysql-server/sql"
)
// InTuple is an expression that checks an expression is inside a list of expressions.
type InTuple struct {
BinaryExpression
}
// We implement Comparer because we have a Left() and a Right(), but we can't be Compare()d
var _ Comparer = (*InTuple)(nil)
func (in *InTuple) Compare(ctx *sql.Context, row sql.Row) (int, error) {
panic("Compare not implemented for InTuple")
}
func (in *InTuple) Type() sql.Type {
return sql.Boolean
}
func (in *InTuple) Left() sql.Expression {
return in.BinaryExpression.Left
}
func (in *InTuple) Right() sql.Expression {
return in.BinaryExpression.Right
}
// NewInTuple creates an InTuple expression.
func NewInTuple(left sql.Expression, right sql.Expression) *InTuple {
return &InTuple{BinaryExpression{left, right}}
}
// Eval implements the Expression interface.
func (in *InTuple) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
typ := in.Left().Type().Promote()
leftElems := sql.NumColumns(typ)
left, err := in.Left().Eval(ctx, row)
if err != nil {
return nil, err
}
if left == nil {
return nil, nil
}
// The NULL handling for IN expressions is tricky. According to
// https://dev.mysql.com/doc/refman/8.0/en/comparison-operators.html#operator_in:
// To comply with the SQL standard, IN() returns NULL not only if the expression on the left hand side is NULL, but
// also if no match is found in the list and one of the expressions in the list is NULL.
rightNull := false
left, err = typ.Convert(left)
if err != nil {
return nil, err
}
switch right := in.Right().(type) {
case Tuple:
for _, el := range right {
if sql.NumColumns(el.Type()) != leftElems {
return nil, sql.ErrInvalidOperandColumns.New(leftElems, sql.NumColumns(el.Type()))
}
}
for _, el := range right {
right, err := el.Eval(ctx, row)
if err != nil {
return nil, err
}
if !rightNull && right == nil {
rightNull = true
continue
}
right, err = typ.Convert(right)
if err != nil {
return nil, err
}
cmp, err := typ.Compare(left, right)
if err != nil {
return nil, err
}
if cmp == 0 {
return true, nil
}
}
if rightNull {
return nil, nil
}
return false, nil
default:
return nil, ErrUnsupportedInOperand.New(right)
}
}
// WithChildren implements the Expression interface.
func (in *InTuple) WithChildren(children ...sql.Expression) (sql.Expression, error) {
if len(children) != 2 {
return nil, sql.ErrInvalidChildrenNumber.New(in, len(children), 2)
}
return NewInTuple(children[0], children[1]), nil
}
func (in *InTuple) String() string {
return fmt.Sprintf("(%s IN %s)", in.Left(), in.Right())
}
func (in *InTuple) DebugString() string {
return fmt.Sprintf("(%s IN %s)", sql.DebugString(in.Left()), sql.DebugString(in.Right()))
}
// Children implements the Expression interface.
func (in *InTuple) Children() []sql.Expression {
return []sql.Expression{in.Left(), in.Right()}
}
// NewNotInTuple creates a new NotInTuple expression.
func NewNotInTuple(left sql.Expression, right sql.Expression) sql.Expression {
return NewNot(NewInTuple(left, right))
} | vendor/github.com/dolthub/go-mysql-server/sql/expression/in.go | 0.801819 | 0.472744 | in.go | starcoder |
package conf
import "strings"
type queryStringValue map[string]string
func newQueryStringValue(val map[string]string, p *map[string]string) *queryStringValue {
*p = val
return (*queryStringValue)(p)
}
func (p queryStringValue) Set(val string) error {
// Clear the map from default values
for k := range p {
delete(p, k)
}
for _, v := range strings.Split(val, "&") {
param := strings.SplitN(v, "=", 2)
if len(param) != 2 {
continue
}
p[param[0]] = param[1]
}
return nil
}
func (queryStringValue) Type() string {
return "queryString"
}
func (p queryStringValue) String() string {
var query string
for key, value := range p {
if query != "" {
query += "&"
}
query += key + "=" + value
}
return query
}
// QueryStringVar defines a query string flag and environment variable with specified name, default value, and usage string.
// The argument p points to a query string (string map) variable in which to store the value of the flag and/or environment variable.
func (c *Configurator) QueryStringVar(p *map[string]string, name string, value map[string]string, usage string) {
c.env().QueryStringVar(p, name, value, usage)
c.flag().Var(newQueryStringValue(value, p), name, usage)
}
// QueryString defines a query string flag and environment variable with specified name, default value, and usage string.
// The return value is the address of a query string (string map) variable that stores the value of the flag and/or environment variable.
func (c *Configurator) QueryString(name string, value map[string]string, usage string) *map[string]string {
p := new(map[string]string)
c.QueryStringVar(p, name, value, usage)
return p
}
// QueryStringVarE defines a query string environment variable with specified name, default value, and usage string.
// The argument p points to a query string (string map) variable in which to store the value of the environment variable.
func (c *Configurator) QueryStringVarE(p *map[string]string, name string, value map[string]string, usage string) {
c.env().QueryStringVar(p, name, value, usage)
}
// QueryStringE defines a query string environment variable with specified name, default value, and usage string.
// The return value is the address of a query string (string map) variable that stores the value of the environment variable.
func (c *Configurator) QueryStringE(name string, value map[string]string, usage string) *map[string]string {
p := new(map[string]string)
c.QueryStringVarE(p, name, value, usage)
return p
}
// QueryStringVarF defines a query string flag with specified name, default value, and usage string.
// The argument p points to a query string (string map) variable in which to store the value of the flag.
func (c *Configurator) QueryStringVarF(p *map[string]string, name string, value map[string]string, usage string) {
c.flag().Var(newQueryStringValue(value, p), name, usage)
}
// QueryStringF defines a query string flag with specified name, default value, and usage string.
// The return value is the address of a query string (string map) variable that stores the value of the flag.
func (c *Configurator) QueryStringF(name string, value map[string]string, usage string) *map[string]string {
p := new(map[string]string)
c.QueryStringVarF(p, name, value, usage)
return p
}
// QueryStringVar defines a query string flag and environment variable with specified name, default value, and usage string.
// The argument p points to a query string (string map) variable in which to store the value of the flag and/or environment variable.
func QueryStringVar(p *map[string]string, name string, value map[string]string, usage string) {
Global.QueryStringVar(p, name, value, usage)
}
// QueryString defines a query string flag and environment variable with specified name, default value, and usage string.
// The return value is the address of a query string (string map) variable that stores the value of the flag and/or environment variable.
func QueryString(name string, value map[string]string, usage string) *map[string]string {
return Global.QueryString(name, value, usage)
}
// QueryStringVarE defines a query string environment variable with specified name, default value, and usage string.
// The argument p points to a query string (string map) variable in which to store the value of the environment variable.
func QueryStringVarE(p *map[string]string, name string, value map[string]string, usage string) {
Global.QueryStringVarE(p, name, value, usage)
}
// QueryStringE defines a query string environment variable with specified name, default value, and usage string.
// The return value is the address of a query string (string map) variable that stores the value of the environment variable.
func QueryStringE(name string, value map[string]string, usage string) *map[string]string {
return Global.QueryStringE(name, value, usage)
}
// QueryStringVarF defines a query string flag with specified name, default value, and usage string.
// The argument p points to a query string (string map) variable in which to store the value of the flag.
func QueryStringVarF(p *map[string]string, name string, value map[string]string, usage string) {
Global.QueryStringVarF(p, name, value, usage)
}
// QueryStringF defines a query string flag with specified name, default value, and usage string.
// The return value is the address of a query string (string map) variable that stores the value of the flag.
func QueryStringF(name string, value map[string]string, usage string) *map[string]string {
return Global.QueryStringF(name, value, usage)
} | value_query_string.go | 0.788461 | 0.48688 | value_query_string.go | starcoder |
package animation
import (
"math"
"time"
"github.com/go-game/go-game/gfx"
)
// NewTween returns a new Tween for two Params, where start and end are the two Params to be interpolated.
// offset indicates the duration after which the interpolation starts. looping indicates if the interpolation should go
// back to the beginning when the end is reached.
func NewTween(start, end *gfx.Params, duration time.Duration, offset time.Duration, looping bool) *Tween {
return &Tween{
start: start,
end: end,
duration: duration,
offset: offset,
looping: looping,
tweened: &gfx.Params{},
progress: -offset,
}
}
// Tween interpolates two Params over time. Use NewTween() to create a new Tween.
type Tween struct {
start *gfx.Params
end *gfx.Params
duration time.Duration
offset time.Duration
progress time.Duration
looping bool
tweened *gfx.Params
}
// GetParams returns the interpolated Params.
func (t *Tween) GetParams() *gfx.Params {
return t.tweened
}
// Finished indicates if the animation is at the end.
func (t *Tween) Finished() bool {
if t.looping {
return false
}
return t.progress >= t.duration
}
// Update updates the tweened Params.
func (t *Tween) Update(delta time.Duration) {
t.progress += delta
if t.looping {
t.progress %= t.duration
} else {
if t.progress > t.duration {
t.progress = t.duration
}
}
f1 := 1 - math.Max(0.0, float64(t.progress))/float64(t.duration)
f2 := 1 - f1
t.tweened.X = t.start.X*f1 + t.end.X*f2
t.tweened.Y = t.start.Y*f1 + t.end.Y*f2
t.tweened.R = t.start.R*f1 + t.end.R*f2
t.tweened.G = t.start.G*f1 + t.end.G*f2
t.tweened.B = t.start.B*f1 + t.end.B*f2
t.tweened.A = t.start.A*f1 + t.end.A*f2
t.tweened.Rot.Angle = t.start.Rot.Angle*f1 + t.end.Rot.Angle*f2
t.tweened.Rot.X = t.start.Rot.X*f1 + t.end.Rot.X*f2
t.tweened.Rot.Y = t.start.Rot.Y*f1 + t.end.Rot.Y*f2
t.tweened.Scale.Factor = t.start.Scale.Factor*f1 + t.end.Scale.Factor*f2
t.tweened.Scale.X = t.start.Scale.X*f1 + t.end.Scale.X*f2
t.tweened.Scale.Y = t.start.Scale.Y*f1 + t.end.Scale.Y*f2
} | gfx/animation/tween.go | 0.781831 | 0.436682 | tween.go | starcoder |
package pgsql
import (
"database/sql"
"database/sql/driver"
"time"
)
// TstzRangeArrayFromTimeArray2Slice returns a driver.Valuer that produces a PostgreSQL tstzrange[] from the given Go [][2]time.Time.
func TstzRangeArrayFromTimeArray2Slice(val [][2]time.Time) driver.Valuer {
return tstzRangeArrayFromTimeArray2Slice{val: val}
}
// TstzRangeArrayToTimeArray2Slice returns an sql.Scanner that converts a PostgreSQL tstzrange[] into a Go [][2]time.Time and sets it to val.
func TstzRangeArrayToTimeArray2Slice(val *[][2]time.Time) sql.Scanner {
return tstzRangeArrayToTimeArray2Slice{val: val}
}
type tstzRangeArrayFromTimeArray2Slice struct {
val [][2]time.Time
}
func (v tstzRangeArrayFromTimeArray2Slice) Value() (driver.Value, error) {
if v.val == nil {
return nil, nil
} else if len(v.val) == 0 {
return []byte{'{', '}'}, nil
}
size := (len(v.val) * 57) + // len(`"[\"yyyy-mm-dd hh:mm:ss-hh\",\"yyyy-mm-dd hh:mm:ss-hh\")"`) == 57
(len(v.val) - 1) + // number of commas between array elements
2 // surrounding curly braces
out := make([]byte, 1, size)
out[0] = '{'
for _, a := range v.val {
out = append(out, '"', '[', '\\', '"')
out = append(out, a[0].Format(timestamptzLayout)...)
out = append(out, '\\', '"', ',', '\\', '"')
out = append(out, a[1].Format(timestamptzLayout)...)
out = append(out, '\\', '"', ')', '"', ',')
}
out[len(out)-1] = '}' // replace last "," with "}"
return out, nil
}
type tstzRangeArrayToTimeArray2Slice struct {
val *[][2]time.Time
}
func (v tstzRangeArrayToTimeArray2Slice) Scan(src interface{}) error {
data, err := srcbytes(src)
if err != nil {
return err
} else if data == nil {
*v.val = nil
return nil
}
elems := pgParseQuotedStringArray(data)
slice := make([][2]time.Time, len(elems))
for i := 0; i < len(elems); i++ {
a := pgParseRange(elems[i])
// drop surrounding escaped double quotes
a[0] = a[0][2 : len(a[0])-2]
a[1] = a[1][2 : len(a[1])-2]
var t0, t1 time.Time
t0, err = time.ParseInLocation(timestamptzLayout, string(a[0]), noZone)
if err != nil {
return err
}
t1, err = time.ParseInLocation(timestamptzLayout, string(a[1]), noZone)
if err != nil {
return err
}
slice[i][0] = t0
slice[i][1] = t1
}
*v.val = slice
return nil
} | pgsql/tstzrangearr.go | 0.668664 | 0.468791 | tstzrangearr.go | starcoder |
package storage
import (
"encoding/json"
"fmt"
"testing"
"github.com/ingrammicro/concerto/api/types"
"github.com/ingrammicro/concerto/utils"
"github.com/stretchr/testify/assert"
)
// TODO exclude from release compile
// GetVolumeListMocked test mocked function
func GetVolumeListMocked(t *testing.T, volumesIn []*types.Volume) []*types.Volume {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// to json
dIn, err := json.Marshal(volumesIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Get", "/storage/volumes").Return(dIn, 200, nil)
volumesOut, err := ds.GetVolumeList("")
assert.Nil(err, "Error getting volume list")
assert.Equal(volumesIn, volumesOut, "GetVolumeList returned different volumes")
return volumesOut
}
func GetVolumeListMockedFilteredByServer(t *testing.T, volumesIn []*types.Volume) []*types.Volume {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// to json
dIn, err := json.Marshal(volumesIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Get", fmt.Sprintf("/cloud/servers/%s/volumes", volumesIn[0].AttachedServerID)).Return(dIn, 200, nil)
volumesOut, err := ds.GetVolumeList(volumesIn[0].AttachedServerID)
assert.Nil(err, "Error getting volume list filtered by server")
assert.Equal(volumesIn, volumesOut, "GetVolumeList returned different volumes")
return volumesOut
}
// GetVolumeListFailErrMocked test mocked function
func GetVolumeListFailErrMocked(t *testing.T, volumesIn []*types.Volume) []*types.Volume {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// to json
dIn, err := json.Marshal(volumesIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Get", "/storage/volumes").Return(dIn, 200, fmt.Errorf("mocked error"))
volumesOut, err := ds.GetVolumeList("")
assert.NotNil(err, "We are expecting an error")
assert.Nil(volumesOut, "Expecting nil output")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
return volumesOut
}
// GetVolumeListFailStatusMocked test mocked function
func GetVolumeListFailStatusMocked(t *testing.T, volumesIn []*types.Volume) []*types.Volume {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// to json
dIn, err := json.Marshal(volumesIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Get", "/storage/volumes").Return(dIn, 499, nil)
volumesOut, err := ds.GetVolumeList("")
assert.NotNil(err, "We are expecting an status code error")
assert.Nil(volumesOut, "Expecting nil output")
assert.Contains(err.Error(), "499", "Error should contain http code 499")
return volumesOut
}
// GetVolumeListFailJSONMocked test mocked function
func GetVolumeListFailJSONMocked(t *testing.T, volumesIn []*types.Volume) []*types.Volume {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// wrong json
dIn := []byte{10, 20, 30}
// call service
cs.On("Get", "/storage/volumes").Return(dIn, 200, nil)
volumesOut, err := ds.GetVolumeList("")
assert.NotNil(err, "We are expecting a marshalling error")
assert.Nil(volumesOut, "Expecting nil output")
assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'")
return volumesOut
}
// GetVolumeMocked test mocked function
func GetVolumeMocked(t *testing.T, volumeIn *types.Volume) *types.Volume {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// to json
dIn, err := json.Marshal(volumeIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Get", fmt.Sprintf("/storage/volumes/%s", volumeIn.ID)).Return(dIn, 200, nil)
volumeOut, err := ds.GetVolume(volumeIn.ID)
assert.Nil(err, "Error getting volume")
assert.Equal(*volumeIn, *volumeOut, "GetVolume returned different volumes")
return volumeOut
}
// GetVolumeFailErrMocked test mocked function
func GetVolumeFailErrMocked(t *testing.T, volumeIn *types.Volume) *types.Volume {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// to json
dIn, err := json.Marshal(volumeIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Get", fmt.Sprintf("/storage/volumes/%s", volumeIn.ID)).Return(dIn, 200, fmt.Errorf("mocked error"))
volumeOut, err := ds.GetVolume(volumeIn.ID)
assert.NotNil(err, "We are expecting an error")
assert.Nil(volumeOut, "Expecting nil output")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
return volumeOut
}
// GetVolumeFailStatusMocked test mocked function
func GetVolumeFailStatusMocked(t *testing.T, volumeIn *types.Volume) *types.Volume {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// to json
dIn, err := json.Marshal(volumeIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Get", fmt.Sprintf("/storage/volumes/%s", volumeIn.ID)).Return(dIn, 499, nil)
volumeOut, err := ds.GetVolume(volumeIn.ID)
assert.NotNil(err, "We are expecting an status code error")
assert.Nil(volumeOut, "Expecting nil output")
assert.Contains(err.Error(), "499", "Error should contain http code 499")
return volumeOut
}
// GetVolumeFailJSONMocked test mocked function
func GetVolumeFailJSONMocked(t *testing.T, volumeIn *types.Volume) *types.Volume {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// wrong json
dIn := []byte{10, 20, 30}
// call service
cs.On("Get", fmt.Sprintf("/storage/volumes/%s", volumeIn.ID)).Return(dIn, 200, nil)
volumeOut, err := ds.GetVolume(volumeIn.ID)
assert.NotNil(err, "We are expecting a marshalling error")
assert.Nil(volumeOut, "Expecting nil output")
assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'")
return volumeOut
}
// CreateVolumeMocked test mocked function
func CreateVolumeMocked(t *testing.T, volumeIn *types.Volume) *types.Volume {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*volumeIn)
assert.Nil(err, "Volume test data corrupted")
// to json
dOut, err := json.Marshal(volumeIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Post", "/storage/volumes/", mapIn).Return(dOut, 200, nil)
volumeOut, err := ds.CreateVolume(mapIn)
assert.Nil(err, "Error creating volume list")
assert.Equal(volumeIn, volumeOut, "CreateVolume returned different volumes")
return volumeOut
}
// CreateVolumeFailErrMocked test mocked function
func CreateVolumeFailErrMocked(t *testing.T, volumeIn *types.Volume) *types.Volume {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*volumeIn)
assert.Nil(err, "Volume test data corrupted")
// to json
dOut, err := json.Marshal(volumeIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Post", "/storage/volumes/", mapIn).Return(dOut, 200, fmt.Errorf("mocked error"))
volumeOut, err := ds.CreateVolume(mapIn)
assert.NotNil(err, "We are expecting an error")
assert.Nil(volumeOut, "Expecting nil output")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
return volumeOut
}
// CreateVolumeFailStatusMocked test mocked function
func CreateVolumeFailStatusMocked(t *testing.T, volumeIn *types.Volume) *types.Volume {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*volumeIn)
assert.Nil(err, "Volume test data corrupted")
// to json
dOut, err := json.Marshal(volumeIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Post", "/storage/volumes/", mapIn).Return(dOut, 499, nil)
volumeOut, err := ds.CreateVolume(mapIn)
assert.NotNil(err, "We are expecting an status code error")
assert.Nil(volumeOut, "Expecting nil output")
assert.Contains(err.Error(), "499", "Error should contain http code 499")
return volumeOut
}
// CreateVolumeFailJSONMocked test mocked function
func CreateVolumeFailJSONMocked(t *testing.T, volumeIn *types.Volume) *types.Volume {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*volumeIn)
assert.Nil(err, "Volume test data corrupted")
// wrong json
dIn := []byte{10, 20, 30}
// call service
cs.On("Post", "/storage/volumes/", mapIn).Return(dIn, 200, nil)
volumeOut, err := ds.CreateVolume(mapIn)
assert.NotNil(err, "We are expecting a marshalling error")
assert.Nil(volumeOut, "Expecting nil output")
assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'")
return volumeOut
}
// UpdateVolumeMocked test mocked function
func UpdateVolumeMocked(t *testing.T, volumeIn *types.Volume) *types.Volume {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*volumeIn)
assert.Nil(err, "Volume test data corrupted")
// to json
dOut, err := json.Marshal(volumeIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Put", fmt.Sprintf("/storage/volumes/%s", volumeIn.ID), mapIn).Return(dOut, 200, nil)
volumeOut, err := ds.UpdateVolume(mapIn, volumeIn.ID)
assert.Nil(err, "Error updating volume list")
assert.Equal(volumeIn, volumeOut, "UpdateVolume returned different volumes")
return volumeOut
}
// UpdateVolumeFailErrMocked test mocked function
func UpdateVolumeFailErrMocked(t *testing.T, volumeIn *types.Volume) *types.Volume {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*volumeIn)
assert.Nil(err, "Volume test data corrupted")
// to json
dOut, err := json.Marshal(volumeIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Put", fmt.Sprintf("/storage/volumes/%s", volumeIn.ID), mapIn).Return(dOut, 200, fmt.Errorf("mocked error"))
volumeOut, err := ds.UpdateVolume(mapIn, volumeIn.ID)
assert.NotNil(err, "We are expecting an error")
assert.Nil(volumeOut, "Expecting nil output")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
return volumeOut
}
// UpdateVolumeFailStatusMocked test mocked function
func UpdateVolumeFailStatusMocked(t *testing.T, volumeIn *types.Volume) *types.Volume {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*volumeIn)
assert.Nil(err, "Volume test data corrupted")
// to json
dOut, err := json.Marshal(volumeIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Put", fmt.Sprintf("/storage/volumes/%s", volumeIn.ID), mapIn).Return(dOut, 499, nil)
volumeOut, err := ds.UpdateVolume(mapIn, volumeIn.ID)
assert.NotNil(err, "We are expecting an status code error")
assert.Nil(volumeOut, "Expecting nil output")
assert.Contains(err.Error(), "499", "Error should contain http code 499")
return volumeOut
}
// UpdateVolumeFailJSONMocked test mocked function
func UpdateVolumeFailJSONMocked(t *testing.T, volumeIn *types.Volume) *types.Volume {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*volumeIn)
assert.Nil(err, "Volume test data corrupted")
// wrong json
dIn := []byte{10, 20, 30}
// call service
cs.On("Put", fmt.Sprintf("/storage/volumes/%s", volumeIn.ID), mapIn).Return(dIn, 200, nil)
volumeOut, err := ds.UpdateVolume(mapIn, volumeIn.ID)
assert.NotNil(err, "We are expecting a marshalling error")
assert.Nil(volumeOut, "Expecting nil output")
assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'")
return volumeOut
}
// AttachVolumeMocked test mocked function
func AttachVolumeMocked(t *testing.T, volumeIn *types.Volume) *types.Server {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*volumeIn)
assert.Nil(err, "Volume test data corrupted")
// to json
dOut, err := json.Marshal(types.Server{ID: volumeIn.AttachedServerID})
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Post", fmt.Sprintf("/storage/volumes/%s/attached_server", volumeIn.ID), mapIn).Return(dOut, 200, nil)
serverOut, err := ds.AttachVolume(mapIn, volumeIn.ID)
assert.Nil(err, "Error attaching volume")
assert.Equal(volumeIn.AttachedServerID, serverOut.ID, "AttachVolume returned invalid values")
return serverOut
}
// AttachVolumeFailErrMocked test mocked function
func AttachVolumeFailErrMocked(t *testing.T, volumeIn *types.Volume) *types.Server {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*volumeIn)
assert.Nil(err, "Volume test data corrupted")
// to json
dOut, err := json.Marshal(types.Server{ID: volumeIn.AttachedServerID})
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Post", fmt.Sprintf("/storage/volumes/%s/attached_server", volumeIn.ID), mapIn).Return(dOut, 200, fmt.Errorf("mocked error"))
serverOut, err := ds.AttachVolume(mapIn, volumeIn.ID)
assert.NotNil(err, "We are expecting an error")
assert.Nil(serverOut, "Expecting nil output")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
return serverOut
}
// AttachVolumeFailStatusMocked test mocked function
func AttachVolumeFailStatusMocked(t *testing.T, volumeIn *types.Volume) *types.Server {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*volumeIn)
assert.Nil(err, "Volume test data corrupted")
// to json
dOut, err := json.Marshal(types.Server{ID: volumeIn.AttachedServerID})
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Post", fmt.Sprintf("/storage/volumes/%s/attached_server", volumeIn.ID), mapIn).Return(dOut, 499, nil)
serverOut, err := ds.AttachVolume(mapIn, volumeIn.ID)
assert.NotNil(err, "We are expecting an status code error")
assert.Nil(serverOut, "Expecting nil output")
assert.Contains(err.Error(), "499", "Error should contain http code 499")
return serverOut
}
// AttachVolumeFailJSONMocked test mocked function
func AttachVolumeFailJSONMocked(t *testing.T, volumeIn *types.Volume) *types.Server {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// convertMap
mapIn, err := utils.ItemConvertParams(*volumeIn)
assert.Nil(err, "Volume test data corrupted")
// wrong json
dIn := []byte{10, 20, 30}
// call service
cs.On("Post", fmt.Sprintf("/storage/volumes/%s/attached_server", volumeIn.ID), mapIn).Return(dIn, 200, nil)
serverOut, err := ds.AttachVolume(mapIn, volumeIn.ID)
assert.NotNil(err, "We are expecting a marshalling error")
assert.Nil(serverOut, "Expecting nil output")
assert.Contains(err.Error(), "invalid character", "Error message should include the string 'invalid character'")
return serverOut
}
// DetachVolumeMocked test mocked function
func DetachVolumeMocked(t *testing.T, volumeIn *types.Volume) {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// to json
dIn, err := json.Marshal(volumeIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Delete", fmt.Sprintf("/storage/volumes/%s/attached_server", volumeIn.ID)).Return(dIn, 200, nil)
err = ds.DetachVolume(volumeIn.ID)
assert.Nil(err, "Error detaching volume")
}
// DetachVolumeFailErrMocked test mocked function
func DetachVolumeFailErrMocked(t *testing.T, volumeIn *types.Volume) {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// to json
dIn, err := json.Marshal(volumeIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Delete", fmt.Sprintf("/storage/volumes/%s/attached_server", volumeIn.ID)).Return(dIn, 200, fmt.Errorf("mocked error"))
err = ds.DetachVolume(volumeIn.ID)
assert.NotNil(err, "We are expecting an error")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
}
// DetachVolumeFailStatusMocked test mocked function
func DetachVolumeFailStatusMocked(t *testing.T, volumeIn *types.Volume) {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// to json
dIn, err := json.Marshal(volumeIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Delete", fmt.Sprintf("/storage/volumes/%s/attached_server", volumeIn.ID)).Return(dIn, 499, nil)
err = ds.DetachVolume(volumeIn.ID)
assert.NotNil(err, "We are expecting an status code error")
assert.Contains(err.Error(), "499", "Error should contain http code 499")
}
// DeleteVolumeMocked test mocked function
func DeleteVolumeMocked(t *testing.T, volumeIn *types.Volume) {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// to json
dIn, err := json.Marshal(volumeIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Delete", fmt.Sprintf("/storage/volumes/%s", volumeIn.ID)).Return(dIn, 200, nil)
err = ds.DeleteVolume(volumeIn.ID)
assert.Nil(err, "Error deleting volume")
}
// DeleteVolumeFailErrMocked test mocked function
func DeleteVolumeFailErrMocked(t *testing.T, volumeIn *types.Volume) {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// to json
dIn, err := json.Marshal(volumeIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Delete", fmt.Sprintf("/storage/volumes/%s", volumeIn.ID)).Return(dIn, 200, fmt.Errorf("mocked error"))
err = ds.DeleteVolume(volumeIn.ID)
assert.NotNil(err, "We are expecting an error")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
}
// DeleteVolumeFailStatusMocked test mocked function
func DeleteVolumeFailStatusMocked(t *testing.T, volumeIn *types.Volume) {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// to json
dIn, err := json.Marshal(volumeIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Delete", fmt.Sprintf("/storage/volumes/%s", volumeIn.ID)).Return(dIn, 499, nil)
err = ds.DeleteVolume(volumeIn.ID)
assert.NotNil(err, "We are expecting an status code error")
assert.Contains(err.Error(), "499", "Error should contain http code 499")
}
// DiscardVolumeMocked test mocked function
func DiscardVolumeMocked(t *testing.T, volumeIn *types.Volume) {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// to json
dIn, err := json.Marshal(volumeIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Delete", fmt.Sprintf("/storage/volumes/%s/discard", volumeIn.ID)).Return(dIn, 200, nil)
err = ds.DiscardVolume(volumeIn.ID)
assert.Nil(err, "Error discarding volume")
}
// DiscardVolumeFailErrMocked test mocked function
func DiscardVolumeFailErrMocked(t *testing.T, volumeIn *types.Volume) {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// to json
dIn, err := json.Marshal(volumeIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Delete", fmt.Sprintf("/storage/volumes/%s/discard", volumeIn.ID)).Return(dIn, 200, fmt.Errorf("mocked error"))
err = ds.DiscardVolume(volumeIn.ID)
assert.NotNil(err, "We are expecting an error")
assert.Equal(err.Error(), "mocked error", "Error should be 'mocked error'")
}
// DiscardVolumeFailStatusMocked test mocked function
func DiscardVolumeFailStatusMocked(t *testing.T, volumeIn *types.Volume) {
assert := assert.New(t)
// wire up
cs := &utils.MockConcertoService{}
ds, err := NewVolumeService(cs)
assert.Nil(err, "Couldn't load volume service")
assert.NotNil(ds, "Volume service not instanced")
// to json
dIn, err := json.Marshal(volumeIn)
assert.Nil(err, "Volume test data corrupted")
// call service
cs.On("Delete", fmt.Sprintf("/storage/volumes/%s/discard", volumeIn.ID)).Return(dIn, 499, nil)
err = ds.DiscardVolume(volumeIn.ID)
assert.NotNil(err, "We are expecting an status code error")
assert.Contains(err.Error(), "499", "Error should contain http code 499")
} | api/storage/volumes_api_mocked.go | 0.506591 | 0.465327 | volumes_api_mocked.go | starcoder |
package yologo
import (
"fmt"
"image"
"sort"
"gorgonia.org/tensor"
)
// DetectionRectangle Representation of detection
type DetectionRectangle struct {
conf float32
rect image.Rectangle
class string
score float32
}
func (dr *DetectionRectangle) String() string {
return fmt.Sprintf("Detection:\n\tClass = %s\n\tScore = %f\n\tConfidence = %f\n\tCoordinates: [RightTopX = %d, RightTopY = %d, LeftBottomX = %d, LeftBottomY = %d]",
dr.class, dr.score, dr.conf, dr.rect.Min.X, dr.rect.Min.Y, dr.rect.Max.X, dr.rect.Max.Y,
)
}
// GetClass Returns class of object
func (dr *DetectionRectangle) GetClass() string {
return dr.class
}
// Detections Detection rectangles
type Detections []*DetectionRectangle
/* Methods to match sort.Interface interface */
func (detections Detections) Len() int { return len(detections) }
func (detections Detections) Swap(i, j int) {
detections[i], detections[j] = detections[j], detections[i]
}
func (detections Detections) Less(i, j int) bool { return detections[i].conf < detections[j].conf }
// DetectionsOrder Ordering for X-axis
type DetectionsOrder []*DetectionRectangle
/* Methods to match sort.Interface interface */
func (detections DetectionsOrder) Len() int { return len(detections) }
func (detections DetectionsOrder) Swap(i, j int) {
detections[i], detections[j] = detections[j], detections[i]
}
func (detections DetectionsOrder) Less(i, j int) bool {
return detections[i].rect.Min.X < detections[j].rect.Min.X
}
// ProcessOutput Returns postprocessed detections
func (net *YOLOv3) ProcessOutput(classes []string, scoreTreshold, iouTreshold float32) (Detections, error) {
if len(classes) != net.classesNum {
return nil, fmt.Errorf("length of provided slice of classes is not equal to YOLO network 'classesNum' field")
}
preparedDetections := make(Detections, 0)
out := net.GetOutput()
for i := range out {
nodeValue := out[i].Value()
var tensorValue tensor.Tensor
switch nodeValue.(type) {
case tensor.Tensor:
tensorValue = nodeValue.(tensor.Tensor)
break
default:
fmt.Printf("Warning: YOLO output node #%d should be type of tensor.Tensor", i)
break
}
dataValue := tensorValue.Data()
dataF32 := make([]float32, 0)
switch dataValue.(type) {
case []float32:
dataF32 = dataValue.([]float32)
break
default:
fmt.Printf("Warning: YOLO output tensor #%d should be type of []float32", i)
break
}
detections := prepareDetections(dataF32, scoreTreshold, net.netSize, classes)
preparedDetections = append(preparedDetections, detections...)
}
finalDetections := nonMaxSupr(preparedDetections, iouTreshold)
sort.Sort(DetectionsOrder(finalDetections))
return finalDetections, nil
}
// prepareDetections Filter detections
func prepareDetections(data []float32, scoreTreshold float32, netSize int, classes []string) Detections {
detections := make(Detections, 0)
for i := 0; i < len(data); i += (len(classes) + 5) {
class := 0
maxProbability := float32(0.0)
for j := 5; j < 5+len(classes); j++ {
if data[i+j] > maxProbability {
maxProbability = data[i+j]
class = (j - 5) % len(classes)
}
}
if maxProbability*data[i+4] > scoreTreshold {
box := &DetectionRectangle{
conf: data[i+4],
rect: Rectify(int(data[i]), int(data[i+1]), int(data[i+2]), int(data[i+3]), netSize, netSize),
class: classes[class],
score: maxProbability,
}
detections = append(detections, box)
}
}
return detections
}
// nonMaxSupr Sorts boxes by confidence
func nonMaxSupr(detections Detections, iouTreshold float32) Detections {
sort.Sort(detections)
nms := make(Detections, 0)
if len(detections) == 0 {
return nms
}
nms = append(nms, detections[0])
for i := 1; i < len(detections); i++ {
tocheck, del := len(nms), false
for j := 0; j < tocheck; j++ {
currIOU := IOUFloat32(detections[i].rect, nms[j].rect)
if currIOU > iouTreshold && detections[i].class == nms[j].class {
del = true
break
}
}
if !del {
nms = append(nms, detections[i])
}
}
return nms
} | detections.go | 0.706899 | 0.477189 | detections.go | starcoder |
package quicktime
import "errors"
import "fmt"
// A STBLAtom stores the three Atoms requires to look up the file offset of
// an individual frame ("sample") in the file: STSZ (sample size),
// STCO or CO64 (chunk offset) and STSC (samples per chunk)
type STBLAtom struct {
Stsz STSZAtom
Stco CO64Atom
Stsc STSCAtom
}
// ParseSTBL converts a generic Atom containing a "stbl" to a STBLAtom.
func ParseSTBL(atom *Atom) (STBLAtom, error) {
if atom.Type != "stbl" {
return STBLAtom{}, errors.New("Not an STBL atom")
}
if !atom.HasData() {
return STBLAtom{}, errors.New("STBL Atom doesn't have data")
}
stbl := STBLAtom{}
// Having data implies all children have data as well
for _, child := range atom.Children {
switch child.Type {
case "stsc":
stbl.Stsc, _ = ParseSTSC(child)
case "stco", "co64":
stbl.Stco, _ = ParseSTCO(child)
case "stsz":
stbl.Stsz, _ = ParseSTSZ(child)
}
}
return stbl, nil
}
// SampleOffset calculates the byte offset of a given sample within the
// quicktime file
func (stbl STBLAtom) SampleOffset(sample int) (int64, error) {
if uint64(sample) > stbl.NumFrames() {
return 0, fmt.Errorf("Requested sample %d in file %d samples long", sample, stbl.NumFrames())
}
// Use STCO to determine which chunk it's in
chunk, chunkStart, _, err := stbl.Stsc.SampleChunk(sample)
if chunk < 0 {
panic(fmt.Sprintln("Couldn't determine which chunk", sample, "is in"))
} else if err != nil {
panic(fmt.Sprintln("Error determining chunk: %s", err.Error()))
}
//fmt.Printf("STBL: Believe sample %d is number %d in chunk %d which starts with sample %d\n", sample, remainder, chunk, chunkStart)
offset := stbl.Stco.ChunkOffset(chunk)
for i := chunkStart; i < sample; i++ {
offset += int64(stbl.Stsz.SampleSize(i))
}
return offset, nil
}
// NumFrames returns the number of samples (frames) in the stbl
func (stbl STBLAtom) NumFrames() uint64 {
// What if there's only one sample?
return stbl.Stsz.NumSamples()
}
// SampleOffsetSize calculates both the offset and size of a sample
func (stbl STBLAtom) SampleOffsetSize(sample int) (int64, int, error) {
offset, err := stbl.SampleOffset(sample)
sz := stbl.Stsz.SampleSize(sample)
return offset, sz, err
} | stbl_atom.go | 0.657648 | 0.51879 | stbl_atom.go | starcoder |
package processor
import (
"bytes"
"encoding/json"
"fmt"
"time"
"github.com/Jeffail/benthos/v3/internal/docs"
"github.com/Jeffail/benthos/v3/internal/tracing"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/message"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/response"
"github.com/Jeffail/benthos/v3/lib/types"
"github.com/itchyny/gojq"
)
func init() {
Constructors[TypeJQ] = TypeSpec{
constructor: NewJQ,
Status: docs.StatusStable,
Categories: []Category{
CategoryMapping,
},
Summary: `
Transforms and filters messages using jq queries.`,
Description: `
:::note Try out Bloblang
For better performance and improved capabilities try out native Benthos mapping with the [bloblang processor](/docs/components/processors/bloblang).
:::
The provided query is executed on each message, targeting either the contents
as a structured JSON value or as a raw string using the field ` + "`raw`" + `,
and the message is replaced with the query result.
Message metadata is also accessible within the query from the variable
` + "`$metadata`" + `.
This processor uses the [gojq library][gojq], and therefore does not require
jq to be installed as a dependency. However, this also means there are some
differences in how these queries are executed versus the jq cli which you can
[read about here][gojq-difference].
If the query does not emit any value then the message is filtered, if the query
returns multiple values then the resulting message will be an array containing
all values.
The full query syntax is described in [jq's documentation][jq-docs].
## Error Handling
Queries can fail, in which case the message remains unchanged, errors are
logged, and the message is flagged as having failed, allowing you to use
[standard processor error handling patterns](/docs/configuration/error_handling).`,
Footnotes: `
[gojq]: https://github.com/itchyny/gojq
[gojq-difference]: https://github.com/itchyny/gojq#difference-to-jq
[jq-docs]: https://stedolan.github.io/jq/manual/`,
Examples: []docs.AnnotatedExample{
{
Title: "Mapping",
Summary: `
When receiving JSON documents of the form:
` + "```json" + `
{
"locations": [
{"name": "Seattle", "state": "WA"},
{"name": "New York", "state": "NY"},
{"name": "Bellevue", "state": "WA"},
{"name": "Olympia", "state": "WA"}
]
}
` + "```" + `
We could collapse the location names from the state of Washington into a field ` + "`Cities`" + `:
` + "```json" + `
{"Cities": "Bellevue, Olympia, Seattle"}
` + "```" + `
With the following config:`,
Config: `
pipeline:
processors:
- jq:
query: '{Cities: .locations | map(select(.state == "WA").name) | sort | join(", ") }'
`,
},
},
FieldSpecs: docs.FieldSpecs{
docs.FieldCommon("query", "The jq query to filter and transform messages with."),
docs.FieldAdvanced("raw", "Whether to process the input as a raw string instead of as JSON."),
docs.FieldAdvanced("output_raw", "Whether to output raw text (unquoted) instead of JSON strings when the emitted values are string types."),
},
}
}
//------------------------------------------------------------------------------
// JQConfig contains configuration fields for the JQ processor.
type JQConfig struct {
Query string `json:"query" yaml:"query"`
Raw bool `json:"raw" yaml:"raw"`
OutputRaw bool `json:"output_raw" yaml:"output_raw"`
}
// NewJQConfig returns a JQConfig with default values.
func NewJQConfig() JQConfig {
return JQConfig{
Query: ".",
}
}
//------------------------------------------------------------------------------
var jqCompileOptions = []gojq.CompilerOption{
gojq.WithVariables([]string{"$metadata"}),
}
// JQ is a processor that passes messages through gojq.
type JQ struct {
conf JQConfig
log log.Modular
stats metrics.Type
code *gojq.Code
mCount metrics.StatCounter
mCountParts metrics.StatCounter
mSent metrics.StatCounter
mBatchSent metrics.StatCounter
mDropped metrics.StatCounter
mDroppedParts metrics.StatCounter
mErr metrics.StatCounter
mErrJSONParse metrics.StatCounter
mErrJSONSet metrics.StatCounter
mErrQuery metrics.StatCounter
}
// NewJQ returns a JQ processor.
func NewJQ(
conf Config, mgr types.Manager, log log.Modular, stats metrics.Type,
) (Type, error) {
j := &JQ{
conf: conf.JQ,
stats: stats,
log: log,
mCount: stats.GetCounter("count"),
mCountParts: stats.GetCounter("count_parts"),
mSent: stats.GetCounter("sent"),
mBatchSent: stats.GetCounter("batch.count"),
mDropped: stats.GetCounter("dropped"),
mDroppedParts: stats.GetCounter("dropped_num_parts"),
mErr: stats.GetCounter("error"),
mErrJSONParse: stats.GetCounter("error.json_parse"),
mErrJSONSet: stats.GetCounter("error.json_set"),
mErrQuery: stats.GetCounter("error.query"),
}
query, err := gojq.Parse(j.conf.Query)
if err != nil {
return nil, fmt.Errorf("error parsing jq query: %w", err)
}
j.code, err = gojq.Compile(query, jqCompileOptions...)
if err != nil {
return nil, fmt.Errorf("error compiling jq query: %w", err)
}
return j, nil
}
//------------------------------------------------------------------------------
func (j *JQ) getPartMetadata(part types.Part) map[string]interface{} {
metadata := map[string]interface{}{}
part.Metadata().Iter(func(k, v string) error {
metadata[k] = v
return nil
})
return metadata
}
func (j *JQ) getPartValue(part types.Part, raw bool) (obj interface{}, err error) {
if raw {
return string(part.Get()), nil
}
obj, err = part.JSON()
if err == nil {
obj, err = message.CopyJSON(obj)
}
if err != nil {
j.mErrJSONParse.Incr(1)
j.log.Debugf("Failed to parse part into json: %v\n", err)
return nil, err
}
return obj, nil
}
// ProcessMessage applies the processor to a message, either creating >0
// resulting messages or a response to be sent back to the message source.
func (j *JQ) ProcessMessage(msg types.Message) ([]types.Message, types.Response) {
j.mCount.Incr(1)
newMsg := msg.Copy()
iteratePartsFilterableWithSpan(TypeJQ, nil, newMsg, func(index int, span *tracing.Span, part types.Part) (bool, error) {
in, err := j.getPartValue(part, j.conf.Raw)
if err != nil {
j.mErr.Incr(1)
return false, err
}
metadata := j.getPartMetadata(part)
var emitted []interface{}
iter := j.code.Run(in, metadata)
for {
out, ok := iter.Next()
if !ok {
break
}
if err, ok := out.(error); ok {
j.log.Debugf(err.Error())
j.mErr.Incr(1)
j.mErrQuery.Incr(1)
return false, err
}
j.mSent.Incr(1)
emitted = append(emitted, out)
}
if j.conf.OutputRaw {
raw, err := j.marshalRaw(emitted)
if err != nil {
j.log.Debugf("Failed to marshal raw text: %s", err)
j.mErr.Incr(1)
return false, err
}
// Sometimes the query result is an empty string. Example:
// echo '{ "foo": "" }' | jq .foo
// In that case we want pass on the empty string instead of treating it as
// an empty message and dropping it
if len(raw) == 0 && len(emitted) == 0 {
j.mDroppedParts.Incr(1)
return false, nil
}
part.Set(raw)
return true, nil
} else if len(emitted) > 1 {
if err = part.SetJSON(emitted); err != nil {
j.log.Debugf("Failed to set part JSON: %v\n", err)
j.mErr.Incr(1)
j.mErrJSONSet.Incr(1)
return false, err
}
} else if len(emitted) == 1 {
if err = part.SetJSON(emitted[0]); err != nil {
j.log.Debugf("Failed to set part JSON: %v\n", err)
j.mErr.Incr(1)
j.mErrJSONSet.Incr(1)
return false, err
}
} else {
j.mDroppedParts.Incr(1)
return false, nil
}
return true, nil
})
if newMsg.Len() == 0 {
j.mDropped.Incr(1)
return nil, response.NewAck()
}
j.mBatchSent.Incr(1)
j.mSent.Incr(int64(newMsg.Len()))
return []types.Message{newMsg}, nil
}
// CloseAsync shuts down the processor and stops processing requests.
func (*JQ) CloseAsync() {
}
// WaitForClose blocks until the processor has closed down.
func (*JQ) WaitForClose(timeout time.Duration) error {
return nil
}
func (j *JQ) marshalRaw(values []interface{}) ([]byte, error) {
buf := bytes.NewBufferString("")
for index, el := range values {
var rawResult []byte
val, isString := el.(string)
if isString {
rawResult = []byte(val)
} else {
marshalled, err := json.Marshal(el)
if err != nil {
return nil, fmt.Errorf("failed marshal JQ result at index %d: %w", index, err)
}
rawResult = marshalled
}
if _, err := buf.Write(rawResult); err != nil {
return nil, fmt.Errorf("failed to write JQ result at index %d: %w", index, err)
}
}
bs := buf.Bytes()
return bs, nil
} | lib/processor/jq.go | 0.771155 | 0.719882 | jq.go | starcoder |
package gl
import (
"image/color"
"fyne.io/fyne"
"fyne.io/fyne/canvas"
"fyne.io/fyne/internal/cache"
"fyne.io/fyne/internal/painter"
)
func rectInnerCoords(size fyne.Size, pos fyne.Position, fill canvas.ImageFill, aspect float32) (fyne.Size, fyne.Position) {
if fill == canvas.ImageFillContain || fill == canvas.ImageFillOriginal {
// change pos and size accordingly
viewAspect := float32(size.Width) / float32(size.Height)
newWidth, newHeight := size.Width, size.Height
widthPad, heightPad := 0, 0
if viewAspect > aspect {
newWidth = int(float32(size.Height) * aspect)
widthPad = (size.Width - newWidth) / 2
} else if viewAspect < aspect {
newHeight = int(float32(size.Width) / aspect)
heightPad = (size.Height - newHeight) / 2
}
return fyne.NewSize(newWidth, newHeight), fyne.NewPos(pos.X+widthPad, pos.Y+heightPad)
}
return size, pos
}
// rectCoords calculates the openGL coordinate space of a rectangle
func (p *glPainter) rectCoords(size fyne.Size, pos fyne.Position, frame fyne.Size,
fill canvas.ImageFill, aspect float32, pad int) []float32 {
size, pos = rectInnerCoords(size, pos, fill, aspect)
xPos := float32(pos.X-pad) / float32(frame.Width)
x1 := -1 + xPos*2
x2Pos := float32(pos.X+size.Width+pad) / float32(frame.Width)
x2 := -1 + x2Pos*2
yPos := float32(pos.Y-pad) / float32(frame.Height)
y1 := 1 - yPos*2
y2Pos := float32(pos.Y+size.Height+pad) / float32(frame.Height)
y2 := 1 - y2Pos*2
return []float32{
// coord x, y, z texture x, y
x1, y2, 0, 0.0, 1.0, // top left
x1, y1, 0, 0.0, 0.0, // bottom left
x2, y2, 0, 1.0, 1.0, // top right
x2, y1, 0, 1.0, 0.0, // bottom right
}
}
func (p *glPainter) drawTextureWithDetails(o fyne.CanvasObject, creator func(canvasObject fyne.CanvasObject) Texture,
pos fyne.Position, size, frame fyne.Size, fill canvas.ImageFill, alpha, aspect float32, pad int) {
texture := getTexture(o, creator)
if texture == NoTexture {
return
}
points := p.rectCoords(size, pos, frame, fill, aspect, pad)
vbo := p.glCreateBuffer(points)
p.glDrawTexture(texture, alpha)
p.glFreeBuffer(vbo)
}
func (p *glPainter) drawWidget(wid fyne.Widget, pos fyne.Position, frame fyne.Size) {
if cache.Renderer(wid).BackgroundColor() == color.Transparent {
return
}
p.drawTextureWithDetails(wid, p.newGlRectTexture, pos, wid.Size(), frame, canvas.ImageFillStretch, 1.0, 0.0, 0)
}
func (p *glPainter) drawCircle(circle *canvas.Circle, pos fyne.Position, frame fyne.Size) {
p.drawTextureWithDetails(circle, p.newGlCircleTexture, pos, circle.Size(), frame, canvas.ImageFillStretch,
1.0, 0.0, painter.VectorPad(circle))
}
func (p *glPainter) drawLine(line *canvas.Line, pos fyne.Position, frame fyne.Size) {
p.drawTextureWithDetails(line, p.newGlLineTexture, pos, line.Size(), frame, canvas.ImageFillStretch,
1.0, 0.0, painter.VectorPad(line))
}
func (p *glPainter) drawImage(img *canvas.Image, pos fyne.Position, frame fyne.Size) {
aspect := painter.GetAspect(img)
if aspect == 0 {
aspect = 1
}
p.drawTextureWithDetails(img, p.newGlImageTexture, pos, img.Size(), frame, img.FillMode, float32(img.Alpha()), aspect, 0)
}
func (p *glPainter) drawRaster(img *canvas.Raster, pos fyne.Position, frame fyne.Size) {
p.drawTextureWithDetails(img, p.newGlRasterTexture, pos, img.Size(), frame, canvas.ImageFillStretch, float32(img.Alpha()), 0.0, 0)
}
func (p *glPainter) drawGradient(o fyne.CanvasObject, texCreator func(fyne.CanvasObject) Texture, pos fyne.Position, frame fyne.Size) {
p.drawTextureWithDetails(o, texCreator, pos, o.Size(), frame, canvas.ImageFillStretch, 1.0, 0.0, 0)
}
func (p *glPainter) drawRectangle(rect *canvas.Rectangle, pos fyne.Position, frame fyne.Size) {
p.drawTextureWithDetails(rect, p.newGlRectTexture, pos, rect.Size(), frame, canvas.ImageFillStretch,
1.0, 0.0, painter.VectorPad(rect))
}
func (p *glPainter) drawText(text *canvas.Text, pos fyne.Position, frame fyne.Size) {
if text.Text == "" {
return
}
size := text.MinSize()
containerSize := text.Size()
switch text.Alignment {
case fyne.TextAlignTrailing:
pos = fyne.NewPos(pos.X+containerSize.Width-size.Width, pos.Y)
case fyne.TextAlignCenter:
pos = fyne.NewPos(pos.X+(containerSize.Width-size.Width)/2, pos.Y)
}
if text.Size().Height > text.MinSize().Height {
pos = fyne.NewPos(pos.X, pos.Y+(text.Size().Height-text.MinSize().Height)/2)
}
p.drawTextureWithDetails(text, p.newGlTextTexture, pos, size, frame, canvas.ImageFillStretch, 1.0, 0.0, 0)
}
func (p *glPainter) drawObject(o fyne.CanvasObject, pos fyne.Position, frame fyne.Size) {
if !o.Visible() {
return
}
switch obj := o.(type) {
case *canvas.Circle:
p.drawCircle(obj, pos, frame)
case *canvas.Line:
p.drawLine(obj, pos, frame)
case *canvas.Image:
p.drawImage(obj, pos, frame)
case *canvas.Raster:
p.drawRaster(obj, pos, frame)
case *canvas.Rectangle:
p.drawRectangle(obj, pos, frame)
case *canvas.Text:
p.drawText(obj, pos, frame)
case *canvas.LinearGradient:
p.drawGradient(obj, p.newGlLinearGradientTexture, pos, frame)
case *canvas.RadialGradient:
p.drawGradient(obj, p.newGlRadialGradientTexture, pos, frame)
case fyne.Widget:
p.drawWidget(obj, pos, frame)
}
} | internal/painter/gl/draw.go | 0.706494 | 0.438485 | draw.go | starcoder |
package qrcode
import (
"github.com/yeqown/go-qrcode/matrix"
)
// maskPatternModulo ...
// mask Pattern ref to: https://www.thonky.com/qr-code-tutorial/mask-patterns
type maskPatternModulo uint32
const (
// modulo0 (x+y) mod 2 == 0
modulo0 maskPatternModulo = iota
// modulo1 (x) mod 2 == 0
modulo1
// modulo2 (y) mod 3 == 0
modulo2
// modulo3 (x+y) mod 3 == 0
modulo3
// modulo4 (floor (x/ 2) + floor (y/ 3) mod 2 == 0
modulo4
// modulo5 (x * y) mod 2) + (x * y) mod 3) == 0
modulo5
// modulo6 (x * y) mod 2) + (x * y) mod 3) mod 2 == 0
modulo6
// modulo7 (x + y) mod 2) + (x * y) mod 3) mod 2 == 0
modulo7
)
var (
// 1011101 0000
statePattern1 = []matrix.State{matrix.StateTrue, matrix.StateFalse, matrix.StateTrue, matrix.StateTrue, matrix.StateTrue, matrix.StateFalse, matrix.StateTrue,
matrix.StateFalse, matrix.StateFalse, matrix.StateFalse, matrix.StateFalse}
// 0000 1011101
statePattern2 = []matrix.State{matrix.StateFalse, matrix.StateFalse, matrix.StateFalse, matrix.StateFalse,
matrix.StateTrue, matrix.StateFalse, matrix.StateTrue, matrix.StateTrue, matrix.StateTrue, matrix.StateFalse, matrix.StateTrue}
)
// calculateScore calculate the maskScore of masking result ...
func calculateScore(mat *matrix.Matrix) int {
debugLogf("calculate maskScore starting")
score1 := rule1(mat.Copy())
score2 := rule2(mat.Copy())
score3 := rule3(mat.Copy())
score4 := rule4(mat.Copy())
debugLogf("maskScore: %d", score1+score2+score3+score4)
return score1 + score2 + score3 + score4
}
// 第一条规则为一行(或列)中的每组五个或更多相同颜色的模块提供QR代码。
func rule1(mat *matrix.Matrix) int {
// Row socre
var (
score int
rowCurState matrix.State
rowCurColorCnt int
colCurState matrix.State
colCurColorCnt int
)
mat.Iterate(matrix.ROW, func(x, y int, value matrix.State) {
if x == 0 {
rowCurColorCnt = 0
rowCurState = value
return
}
if value == rowCurState {
rowCurColorCnt++
} else {
rowCurState = value
}
if rowCurColorCnt == 5 {
score += 3
} else if rowCurColorCnt > 5 {
score++
}
})
// column
mat.Iterate(matrix.COLUMN, func(x, y int, value matrix.State) {
if x == 0 {
colCurColorCnt = 0
colCurState = value
return
}
if value == colCurState {
colCurColorCnt++
} else {
colCurState = value
}
if colCurColorCnt == 5 {
score += 3
} else if colCurColorCnt > 5 {
score++
}
})
return score
}
// 第二个规则给出了QR码对矩阵中相同颜色模块的每个2x2区域的惩罚。
func rule2(mat *matrix.Matrix) int {
var (
score int
s0, s1, s2, s3 matrix.State
)
for x := 0; x < mat.Width()-1; x++ {
for y := 0; y < mat.Height()-1; y++ {
s0, _ = mat.Get(x, y)
s1, _ = mat.Get(x+1, y)
s2, _ = mat.Get(x, y+1)
s3, _ = mat.Get(x+1, y+1)
if s0 == s1 && s2 == s3 && s1 == s2 {
score += 3
}
}
}
return score
}
// 如果存在看起来类似于取景器模式的模式,则第三规则给QR码一个大的惩罚
// dark-light-dark-dark-dark-light-dark // 1011101 0000 or 0000 1011101
func rule3(mat *matrix.Matrix) int {
var (
score int
stateSlice []matrix.State
)
for y := 0; y < mat.Height(); y++ {
for x := 0; x < mat.Width()-11; x++ {
for i := 0; i < 11; i++ {
s, _ := mat.Get(x+i, y)
stateSlice = append(stateSlice, s)
}
if matrix.StateSliceMatched(statePattern1, stateSlice) {
score += 40
}
if matrix.StateSliceMatched(statePattern2, stateSlice) {
score += 40
}
}
}
for x := 0; x < mat.Width(); x++ {
for y := 0; y < mat.Height()-11; y++ {
// stateSlice =
for i := 0; i < 11; i++ {
s, _ := mat.Get(x, y+i)
stateSlice = append(stateSlice, s)
}
if matrix.StateSliceMatched(statePattern1, stateSlice) {
score += 40
}
if matrix.StateSliceMatched(statePattern2, stateSlice) {
score += 40
}
}
}
return score
}
// 如果超过一半的模块是暗的或轻的,则第四规则给QR码一个惩罚,对较大的差异有较大的惩罚
func rule4(mat *matrix.Matrix) int {
var (
totalCnt = mat.Width() * mat.Height()
darkCnt, darkPercent int
)
mat.Iterate(matrix.ROW, func(x, y int, s matrix.State) {
if s == matrix.StateTrue {
darkCnt++
}
})
darkPercent = (darkCnt * 100) / totalCnt
x := 0
if darkPercent%5 == 0 {
x = 1
}
last5Times := abs(((darkPercent/5)-x)*5 - 50)
next5Times := abs(((darkPercent/5)+1)*5 - 50)
// get the min maskScore
if last5Times > next5Times {
// scoreC <- next5Times / 5 * 10
return next5Times * 2
} else {
return last5Times * 2
}
}
func abs(x int) int {
if x < 0 {
return -x
}
return x
}
type mask struct {
mat *matrix.Matrix // matrix
mode maskPatternModulo // mode
}
// newMask ...
func newMask(m *matrix.Matrix, mode maskPatternModulo) *mask {
mask := &mask{
mat: m.Copy(),
mode: mode,
}
mask.init()
return mask
}
// moduloFunc to define what's modulo func
type moduloFunc func(int, int) bool
// init generate maks by mode
func (m *mask) init() {
var f moduloFunc
switch m.mode {
case modulo0:
f = modulo0Func
case modulo1:
f = modulo1Func
case modulo2:
f = modulo2Func
case modulo3:
f = modulo3Func
case modulo4:
f = modulo4Func
case modulo5:
f = modulo5Func
case modulo6:
f = modulo6Func
case modulo7:
f = modulo7Func
}
m.mat.Iterate(matrix.ROW, func(x, y int, s matrix.State) {
// skip the function modules
if state, _ := m.mat.Get(x, y); state != matrix.StateInit {
_ = m.mat.Set(x, y, matrix.StateInit)
return
}
if f(x, y) {
_ = m.mat.Set(x, y, matrix.StateTrue)
} else {
_ = m.mat.Set(x, y, matrix.StateFalse)
}
})
}
// modulo0Func for maskPattern function
// modulo0 (x+y) mod 2 == 0
func modulo0Func(x, y int) bool {
return (x+y)%2 == 0
}
// modulo1Func for maskPattern function
// modulo1 (y) mod 2 == 0
func modulo1Func(x, y int) bool {
return y%2 == 0
}
// modulo2Func for maskPattern function
// modulo2 (x) mod 3 == 0
func modulo2Func(x, y int) bool {
return x%3 == 0
}
// modulo3Func for maskPattern function
// modulo3 (x+y) mod 3 == 0
func modulo3Func(x, y int) bool {
return (x+y)%3 == 0
}
// modulo4Func for maskPattern function
// modulo4 (floor (x/ 2) + floor (y/ 3) mod 2 == 0
func modulo4Func(x, y int) bool {
return (x/3+y/2)%2 == 0
}
// modulo5Func for maskPattern function
// modulo5 (x * y) mod 2 + (x * y) mod 3 == 0
func modulo5Func(x, y int) bool {
return (x*y)%2+(x*y)%3 == 0
}
// modulo6Func for maskPattern function
// modulo6 (x * y) mod 2) + (x * y) mod 3) mod 2 == 0
func modulo6Func(x, y int) bool {
return ((x*y)%2+(x*y)%3)%2 == 0
}
// modulo7Func for maskPattern function
// modulo7 (x + y) mod 2) + (x * y) mod 3) mod 2 == 0
func modulo7Func(x, y int) bool {
return ((x+y)%2+(x*y)%3)%2 == 0
} | mask.go | 0.590307 | 0.548008 | mask.go | starcoder |
package main
import (
"fmt"
)
// Defines the minimum set of functions needed for a Feature.
type Feature interface {
Add(int64) // Add a particular value to a feature
Export() string // Export the contents of a feature in string form
Get() int64
Set(int64) // Reset the feature to a particular value
}
// A feature which takes values and bins them according to their value.
type BinFeature struct {
num_bins int // The number of bins for this feature
bin_sep int // Ie. the magnitude of the range contained in each bin
bins []int // Stores the actual count for each bin
}
// Initializes the BinFeature to contain bins starting at min and going to max.
// Anything below min is thrown into the lowest bin, and anything above max is
// put in the last bin. num_bins is the number of bins required in the range
// [min, max]
func (f *BinFeature) Init(min int, max int, num_bins int) {
f.num_bins = num_bins - 1
diff := max - min
f.bin_sep = diff / f.num_bins
f.bins = make([]int, num_bins)
for i := 0; i < num_bins; i++ {
f.bins[i] = 0
}
}
func (f *BinFeature) Add(val int64) {
bin := MinInt(int(val)/f.bin_sep, f.num_bins)
f.bins[bin] += 1
}
func (f *BinFeature) Export() string {
ret := ""
for i := 0; i < len(f.bins); i++ {
if i > 0 {
ret += fmt.Sprintf(",")
}
ret += fmt.Sprintf("%d", f.bins[i])
}
return ret
}
func (f *BinFeature) Get() int64 {
return int64(f.bins[0])
}
func (f *BinFeature) Set(val int64) {
for i := 0; i < len(f.bins); i++ {
f.bins[i] = int(val)
}
}
type DistributionFeature struct {
sum int64
sumsq int64
count int64
min int64
max int64
}
func (f *DistributionFeature) Init(val int64) {
f.Set(val)
}
func (f *DistributionFeature) Add(val int64) {
f.sum += val
f.sumsq += val * val
f.count++
if (val < f.min) || (f.min == 0) {
f.min = val
}
if val > f.max {
f.max = val
}
}
func (f *DistributionFeature) Export() string {
var (
stdDev int64 = 0
mean int64 = 0
)
if f.count > 0 {
stdDev = int64(stddev(float64(f.sumsq), float64(f.sum), f.count))
mean = f.sum / f.count
}
return fmt.Sprintf("%d,%d,%d,%d", f.min, mean, f.max, stdDev)
}
func (f *DistributionFeature) Get() int64 {
return f.count
}
// Set the DistributionFeature to include val as the single value in the Feature.
func (f *DistributionFeature) Set(val int64) {
f.sum = val
f.sumsq = val * val
f.count = val
f.min = val
f.max = val
}
type ValueFeature struct {
value int64
}
func (f *ValueFeature) Init(val int64) {
f.Set(val)
}
func (f *ValueFeature) Add(val int64) {
f.value += val
}
func (f *ValueFeature) Export() string {
return fmt.Sprintf("%d", f.value)
}
func (f *ValueFeature) Get() int64 {
return f.value
}
func (f *ValueFeature) Set(val int64) {
f.value = val
} | features.go | 0.59972 | 0.462291 | features.go | starcoder |
package llvmutil
// This file contains utility functions to pack and unpack sets of values. It
// can take in a list of values and tries to store it efficiently in the pointer
// itself if possible and legal.
import (
"tinygo.org/x/go-llvm"
)
// EmitPointerPack packs the list of values into a single pointer value using
// bitcasts, or else allocates a value on the heap if it cannot be packed in the
// pointer value directly. It returns the pointer with the packed data.
// If the values are all constants, they are be stored in a constant global and deduplicated.
func EmitPointerPack(builder llvm.Builder, mod llvm.Module, needsStackObjects bool, values []llvm.Value) llvm.Value {
ctx := mod.Context()
targetData := llvm.NewTargetData(mod.DataLayout())
i8ptrType := llvm.PointerType(mod.Context().Int8Type(), 0)
uintptrType := ctx.IntType(llvm.NewTargetData(mod.DataLayout()).PointerSize() * 8)
valueTypes := make([]llvm.Type, len(values))
for i, value := range values {
valueTypes[i] = value.Type()
}
packedType := ctx.StructType(valueTypes, false)
// Allocate memory for the packed data.
size := targetData.TypeAllocSize(packedType)
if size == 0 {
return llvm.ConstPointerNull(i8ptrType)
} else if len(values) == 1 && values[0].Type().TypeKind() == llvm.PointerTypeKind {
return builder.CreateBitCast(values[0], i8ptrType, "pack.ptr")
} else if size <= targetData.TypeAllocSize(i8ptrType) {
// Packed data fits in a pointer, so store it directly inside the
// pointer.
if len(values) == 1 && values[0].Type().TypeKind() == llvm.IntegerTypeKind {
// Try to keep this cast in SSA form.
return builder.CreateIntToPtr(values[0], i8ptrType, "pack.int")
}
// Because packedType is a struct and we have to cast it to a *i8, store
// it in a *i8 alloca first and load the *i8 value from there. This is
// effectively a bitcast.
packedAlloc, _, _ := CreateTemporaryAlloca(builder, mod, i8ptrType, "")
if size < targetData.TypeAllocSize(i8ptrType) {
// The alloca is bigger than the value that will be stored in it.
// To avoid having some bits undefined, zero the alloca first.
// Hopefully this will get optimized away.
builder.CreateStore(llvm.ConstNull(i8ptrType), packedAlloc)
}
// Store all values in the alloca.
packedAllocCast := builder.CreateBitCast(packedAlloc, llvm.PointerType(packedType, 0), "")
for i, value := range values {
indices := []llvm.Value{
llvm.ConstInt(ctx.Int32Type(), 0, false),
llvm.ConstInt(ctx.Int32Type(), uint64(i), false),
}
gep := builder.CreateInBoundsGEP(packedAllocCast, indices, "")
builder.CreateStore(value, gep)
}
// Load value (the *i8) from the alloca.
result := builder.CreateLoad(packedAlloc, "")
// End the lifetime of the alloca, to help the optimizer.
packedPtr := builder.CreateBitCast(packedAlloc, i8ptrType, "")
packedSize := llvm.ConstInt(ctx.Int64Type(), targetData.TypeAllocSize(packedAlloc.Type()), false)
EmitLifetimeEnd(builder, mod, packedPtr, packedSize)
return result
} else {
// Check if the values are all constants.
constant := true
for _, v := range values {
if !v.IsConstant() {
constant = false
break
}
}
if constant {
// The data is known at compile time, so store it in a constant global.
// The global address is marked as unnamed, which allows LLVM to merge duplicates.
funcName := builder.GetInsertBlock().Parent().Name()
global := llvm.AddGlobal(mod, packedType, funcName+"$pack")
global.SetInitializer(ctx.ConstStruct(values, false))
global.SetGlobalConstant(true)
global.SetUnnamedAddr(true)
global.SetLinkage(llvm.PrivateLinkage)
return llvm.ConstBitCast(global, i8ptrType)
}
// Packed data is bigger than a pointer, so allocate it on the heap.
sizeValue := llvm.ConstInt(uintptrType, size, false)
alloc := mod.NamedFunction("runtime.alloc")
packedHeapAlloc := builder.CreateCall(alloc, []llvm.Value{
sizeValue,
llvm.Undef(i8ptrType), // unused context parameter
llvm.ConstPointerNull(i8ptrType), // coroutine handle
}, "")
if needsStackObjects {
trackPointer := mod.NamedFunction("runtime.trackPointer")
builder.CreateCall(trackPointer, []llvm.Value{
packedHeapAlloc,
llvm.Undef(i8ptrType), // unused context parameter
llvm.ConstPointerNull(i8ptrType), // coroutine handle
}, "")
}
packedAlloc := builder.CreateBitCast(packedHeapAlloc, llvm.PointerType(packedType, 0), "")
// Store all values in the heap pointer.
for i, value := range values {
indices := []llvm.Value{
llvm.ConstInt(ctx.Int32Type(), 0, false),
llvm.ConstInt(ctx.Int32Type(), uint64(i), false),
}
gep := builder.CreateInBoundsGEP(packedAlloc, indices, "")
builder.CreateStore(value, gep)
}
// Return the original heap allocation pointer, which already is an *i8.
return packedHeapAlloc
}
}
// EmitPointerUnpack extracts a list of values packed using EmitPointerPack.
func EmitPointerUnpack(builder llvm.Builder, mod llvm.Module, ptr llvm.Value, valueTypes []llvm.Type) []llvm.Value {
ctx := mod.Context()
targetData := llvm.NewTargetData(mod.DataLayout())
i8ptrType := llvm.PointerType(mod.Context().Int8Type(), 0)
uintptrType := ctx.IntType(llvm.NewTargetData(mod.DataLayout()).PointerSize() * 8)
packedType := ctx.StructType(valueTypes, false)
// Get a correctly-typed pointer to the packed data.
var packedAlloc, packedRawAlloc llvm.Value
size := targetData.TypeAllocSize(packedType)
if size == 0 {
// No data to unpack.
} else if len(valueTypes) == 1 && valueTypes[0].TypeKind() == llvm.PointerTypeKind {
// A single pointer is always stored directly.
return []llvm.Value{builder.CreateBitCast(ptr, valueTypes[0], "unpack.ptr")}
} else if size <= targetData.TypeAllocSize(i8ptrType) {
// Packed data stored directly in pointer.
if len(valueTypes) == 1 && valueTypes[0].TypeKind() == llvm.IntegerTypeKind {
// Keep this cast in SSA form.
return []llvm.Value{builder.CreatePtrToInt(ptr, valueTypes[0], "unpack.int")}
}
// Fallback: load it using an alloca.
packedRawAlloc, _, _ = CreateTemporaryAlloca(builder, mod, llvm.PointerType(i8ptrType, 0), "unpack.raw.alloc")
packedRawValue := builder.CreateBitCast(ptr, llvm.PointerType(i8ptrType, 0), "unpack.raw.value")
builder.CreateStore(packedRawValue, packedRawAlloc)
packedAlloc = builder.CreateBitCast(packedRawAlloc, llvm.PointerType(packedType, 0), "unpack.alloc")
} else {
// Packed data stored on the heap. Bitcast the passed-in pointer to the
// correct pointer type.
packedAlloc = builder.CreateBitCast(ptr, llvm.PointerType(packedType, 0), "unpack.raw.ptr")
}
// Load each value from the packed data.
values := make([]llvm.Value, len(valueTypes))
for i, valueType := range valueTypes {
if targetData.TypeAllocSize(valueType) == 0 {
// This value has length zero, so there's nothing to load.
values[i] = llvm.ConstNull(valueType)
continue
}
indices := []llvm.Value{
llvm.ConstInt(ctx.Int32Type(), 0, false),
llvm.ConstInt(ctx.Int32Type(), uint64(i), false),
}
gep := builder.CreateInBoundsGEP(packedAlloc, indices, "")
values[i] = builder.CreateLoad(gep, "")
}
if !packedRawAlloc.IsNil() {
allocPtr := builder.CreateBitCast(packedRawAlloc, i8ptrType, "")
allocSize := llvm.ConstInt(ctx.Int64Type(), targetData.TypeAllocSize(uintptrType), false)
EmitLifetimeEnd(builder, mod, allocPtr, allocSize)
}
return values
} | compiler/llvmutil/wordpack.go | 0.626467 | 0.549278 | wordpack.go | starcoder |
package mutable
import (
"github.com/pkg/errors"
"github.com/chris-tomich/immutability-benchmarking"
)
// Matrix is a matrix with mutating operations.
type Matrix struct {
matrix [immutabilitybenchmarking.MatrixHeight][immutabilitybenchmarking.MatrixWidth]int
}
// New creates a new matrix with the given initial values.
func New(matrix [immutabilitybenchmarking.MatrixHeight][immutabilitybenchmarking.MatrixWidth]int) *Matrix {
return &Matrix{matrix: matrix}
}
// NewEmpty createas a new empty matrix with the given dimensions.
func NewEmpty(width int, height int) (*Matrix, error) {
if width == 0 || height == 0 {
return nil, errors.New("width and height must both be non-zero")
}
m := &Matrix{}
return m, nil
}
// Width returns the number of columns in the matrix.
func (m *Matrix) Width() int {
return len(m.matrix[0])
}
// Height returns the number of rows in the matrix.
func (m *Matrix) Height() int {
return len(m.matrix)
}
// Get returns the integer at the provided coordinates.
func (m *Matrix) Get(row int, col int) int {
return m.matrix[row][col]
}
// Equals will compare a matrix against this matrix and return if they are equal.
func (m *Matrix) Equals(m2 immutabilitybenchmarking.Matrix) bool {
if m.Height() != m2.Height() {
return false
}
if m.Width() != m2.Width() {
return false
}
for r := 0; r < len(m.matrix); r++ {
for c := 0; c < len(m.matrix[r]); c++ {
if m.matrix[r][c] != m2.Get(r, c) {
return false
}
}
}
return true
}
// Add will add the values of a matrix to this matrix.
func (m *Matrix) Add(m2 immutabilitybenchmarking.Matrix) (immutabilitybenchmarking.Matrix, error) {
if m.Height() != m2.Height() {
return nil, errors.New("width of both matrices are not the same")
}
if m.Width() != m2.Width() {
return nil, errors.New("height of both matrices are not the same")
}
for r := 0; r < len(m.matrix); r++ {
for c := 0; c < len(m.matrix[r]); c++ {
m.matrix[r][c] = m.matrix[r][c] + m2.Get(r ,c)
}
}
return m, nil
}
// Subtract will subtract the values of a matrix from this matrix.
func (m *Matrix) Subtract(m2 immutabilitybenchmarking.Matrix) (immutabilitybenchmarking.Matrix, error) {
if m.Height() != m2.Height() {
return nil, errors.New("width of both matrices are not the same")
}
if m.Width() != m2.Width() {
return nil, errors.New("height of both matrices are not the same")
}
for r := 0; r < len(m.matrix); r++ {
for c := 0; c < len(m.matrix[r]); c++ {
m.matrix[r][c] = m.matrix[r][c] - m2.Get(r, c)
}
}
return m, nil
}
// ScalarMultiply will multiply this matrix by a given scalar value.
func (m *Matrix) ScalarMultiply(s int) immutabilitybenchmarking.Matrix {
for r := 0; r < len(m.matrix); r++ {
for c := 0; c < len(m.matrix[r]); c++ {
m.matrix[r][c] = m.matrix[r][c] * s
}
}
return m
}
// Transpose will transpose this matrix.
func (m *Matrix) Transpose() immutabilitybenchmarking.Matrix {
t := [immutabilitybenchmarking.MatrixHeight][immutabilitybenchmarking.MatrixWidth]int{}
for rt := 0; rt < len(t); rt++ {
for ct := 0; ct < len(m.matrix); ct++ {
t[rt][ct] = m.matrix[ct][rt]
}
}
m.matrix = t
return m
}
// MatrixMultiply will multiple the given matrix against this matrix.
func (m *Matrix) MatrixMultiply(m2 immutabilitybenchmarking.Matrix) (immutabilitybenchmarking.Matrix, error) {
if m.Width() != m2.Height() {
return nil, errors.New("the dimensions of the matrices are incompatible, try transposing one first")
}
n := [immutabilitybenchmarking.MatrixHeight][immutabilitybenchmarking.MatrixWidth]int{}
for rm := 0; rm < m.Height(); rm++ {
for cm2 := 0; cm2 < m2.Width(); cm2++ {
product := 0
for cm := 0; cm < m.Width(); cm++ {
product = product + m.matrix[rm][cm]*m2.Get(cm, cm2)
}
n[rm][cm2] = product
}
}
m.matrix = n
return m, nil
} | array/mutable/matrix.go | 0.858526 | 0.726207 | matrix.go | starcoder |
package sedpf
import (
"fmt"
"math"
)
type Gaussian struct {
Mean float64
Variance float64
}
func sum(elems []float64) float64 {
var a float64 = 0
for i := 0; i < len(elems); i++ {
a += elems[i]
}
return a
}
func NewGaussian(Mean, Variance float64) Gaussian {
return Gaussian{
Mean: Mean,
Variance: Variance,
}
}
// https://github.com/chobie/go-gaussian/blob/master/gaussian.go
func NewGaussianFromPrecision(precision, precisionMean float64) Gaussian {
return NewGaussian(precisionMean/precision, 1.0/precision)
}
func NewGaussianFromSeries(series []float64) Gaussian {
mean := sum(series) / float64(len(series))
var variance float64
for _, x := range series {
variance += math.Pow(x-mean, 2)
}
variance /= float64(len(series))
return NewGaussian(mean, variance)
}
// Probability density function (phi)
func Pdf(x float64) float64 {
return math.Exp(-((x * x) / 2)) / math.Sqrt(2*math.Pi)
}
// Cumulative Distribution Function (Phi)
func Cdf(x float64) float64 {
return math.Erfc(-(x / math.Sqrt2)) / 2
}
func CoVariance(u, v Gaussian) float64 {
return u.Mul(v).Mean - u.Mean*v.Mean
}
func CorrelationCoefficient(u, v Gaussian) float64 {
return CoVariance(u, v) / (u.StdDev() * v.StdDev())
}
func (u Gaussian) StdDev() float64 {
return math.Sqrt(u.Variance)
}
func (u Gaussian) Add(v Gaussian) Gaussian {
return NewGaussian(u.Mean+v.Mean, u.Variance+v.Variance)
}
func (u Gaussian) Addf(v float64) Gaussian {
return NewGaussian(u.Mean+v, u.Variance)
}
func (u Gaussian) Subf(v float64) Gaussian {
return NewGaussian(u.Mean-v, u.Variance)
}
func (u Gaussian) Sub(v Gaussian) Gaussian {
return NewGaussian(u.Mean-v.Mean, u.Variance-v.Variance)
}
// https://github.com/chobie/go-gaussian/blob/master/gaussian.go
func (u Gaussian) Mul(v Gaussian) Gaussian {
precision := 1.0 / u.Variance
dprecision := 1.0 / v.Variance
return NewGaussianFromPrecision(precision+dprecision, precision*u.Mean+dprecision*v.Mean)
}
// http://www1.up.poznan.pl/cb48/prezentacje/Oliveira.pdf
func Max(u, v Gaussian) Gaussian {
// XXX assume uncorrelated distributions
// this need not be correct
mean := u.Mean * v.Mean
variance := u.Mean * u.Mean * v.Variance
variance += v.Mean * v.Mean * u.Variance
variance += u.Variance * v.Variance
return NewGaussian(mean, variance)
}
func MaxPartitionMBT(queue []Gaussian) Gaussian {
g := queue[0] // g is first element in queue
queue = queue[1:] // pop g
for len(queue) > 2 {
queue = append(queue, g) // append g to the end of the queue
i := queue[0]
j := queue[1]
queue = queue[2:] // pop i and j
g = Max(i, j)
}
return g
}
func (g Gaussian) ToString() string {
return fmt.Sprintf("Mean: %f, Variance: %f", g.Mean, g.Variance)
} | src/sedpf/gaussian.go | 0.924492 | 0.57087 | gaussian.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.