code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package sweetiebot
import (
"encoding/json"
"reflect"
"strings"
"github.com/bwmarrin/discordgo"
)
type SetConfigCommand struct {
}
func (c *SetConfigCommand) Name() string {
return "SetConfig"
}
func (c *SetConfigCommand) Process(args []string, msg *discordgo.Message, info *GuildInfo) (string, bool) {
if len(args) < 1 {
return "```No configuration parameter to look for!```", false
}
if len(args) < 2 {
return "```No value to set!```", false
}
n, ok := info.SetConfig(args[0], args[1], args[2:]...)
info.SaveConfig()
if ok {
return "```Successfully set " + args[0] + " to " + n + ".```", false
}
return "```" + n + "```", false
}
func (c *SetConfigCommand) Usage(info *GuildInfo) string {
return info.FormatUsage(c, "[parameter] [value]\n!SetConfig [list parameter] [value] [value 2] [etc...]\n!SetConfig [map parameter] [key] [value]", "Attempts to set the configuration value matching [parameter] (not case-sensitive) to [value]. Will only save the new configuration if it succeeds, and returns the new value upon success. If the parameter is a list, it will accept multiple new values. To set a value with a space in it, surround it with quotes, \"like so\". If the parameter is a map, it will accept two values: the first is the key, and the second is the value of that key. If the parameter is a maplist, the first value is the key, and all other values make up the list of values that key is set to. For more information, see: https://github.com/blackhole12/sweetiebot#configuration")
}
func (c *SetConfigCommand) UsageShort() string {
return "Sets a config value and saves the new configuration."
}
type GetConfigCommand struct {
}
func (c *GetConfigCommand) Name() string {
return "GetConfig"
}
func (c *GetConfigCommand) Process(args []string, msg *discordgo.Message, info *GuildInfo) (string, bool) {
t := reflect.ValueOf(&info.config).Elem()
n := t.NumField()
if len(args) < 1 {
s := make([]string, 0, n)
for i := 0; i < n; i++ {
str := strings.ToLower(t.Type().Field(i).Name)
switch t.Field(i).Interface().(type) {
case []uint64:
str += " [list]"
case map[string]string:
str += " [map]"
case map[string]int64:
str += " [map]"
case map[string]bool:
str += " [list]"
case map[string]map[string]bool:
str += " [maplist]"
}
s = append(s, str)
}
return "```Choose a config option to display:\n" + strings.Join(s, "\n") + "```", false
}
arg := args[0]
for i := 0; i < n; i++ {
if strings.ToLower(t.Type().Field(i).Name) == arg {
data, err := json.Marshal(t.Field(i).Interface())
s := string(data)
s = strings.Replace(s, "`", "", -1)
s = strings.Replace(s, "[](/", "[\u200B](/", -1)
s = strings.Replace(s, "http://", "http\u200B://", -1)
s = strings.Replace(s, "https://", "https\u200B://", -1)
if err == nil {
return "```" + s + "```", false
}
info.log.Log("JSON error: ", err.Error())
return "```Failed to marshal JSON :C```", false
}
}
return "```That's not a recognized config option! Type !getconfig without any arguments to list all possible config options```", false
}
func (c *GetConfigCommand) Usage(info *GuildInfo) string {
return info.FormatUsage(c, "", "Returns the current configuration as a JSON string.")
}
func (c *GetConfigCommand) UsageShort() string { return "Returns the current configuration." }
type QuickConfigCommand struct {
}
func (c *QuickConfigCommand) Name() string {
return "QuickConfig"
}
func (c *QuickConfigCommand) Process(args []string, msg *discordgo.Message, info *GuildInfo) (string, bool) {
if msg.Author.ID != info.Guild.OwnerID {
return "```Only the owner of this server can use this command!```", false
}
if len(args) < 6 {
return "```You must provide all 6 parameters to this function. Use !help quickconfig and carefully review each one to make sure it is accurate.```", false
}
log := StripPing(args[0])
mod := StripPing(args[1])
modchannel := StripPing(args[2])
free := StripPing(args[3])
silent := StripPing(args[4])
boredchannel := StripPing(args[5])
info.config.LogChannel = SBatoi(log)
info.config.AlertRole = SBatoi(mod)
info.config.ModChannel = SBatoi(modchannel)
info.config.SilentRole = SBatoi(silent)
info.config.FreeChannels = make(map[string]bool)
info.config.FreeChannels[SBitoa(SBatoi(free))] = true
info.config.Aliases["cute"] = "pick cute"
info.config.Aliases["calc"] = "roll"
info.config.Aliases["calculate"] = "roll"
sensitive := []string{"add", "addgroup", "addwit", "ban", "disable", "dumptables", "echo", "enable", "getconfig", "purgegroup", "remove", "removewit", "setconfig", "setstatus", "update", "announce", "collections", "addevent", "addbirthday", "autosilence", "silence", "unsilence", "wipewelcome", "new", "addquote", "removequote", "removealias", "delete", "createpoll", "deletepoll", "addoption"}
modint := SBitoa(info.config.AlertRole)
for _, v := range sensitive {
info.config.Command_roles[v] = make(map[string]bool)
info.config.Command_roles[v][modint] = true
}
info.config.Command_disabled = make(map[string]bool)
info.config.Module_disabled = make(map[string]bool)
boredint := SBatoi(boredchannel)
if boredint > 0 {
info.config.Module_channels["bored"] = map[string]bool{SBitoa(boredint): true}
} else {
info.config.Module_disabled["bored"] = true
}
info.SaveConfig()
warning := "```"
perms, _ := getAllPerms(info, sb.SelfID)
if perms&0x00000008 != 0 {
warning = "\nWARNING: You have given sweetiebot the Administrator role, which implicitely gives her all roles! Sweetie Bot only needs Ban Members, Manage Roles and Manage Messages in order to function correctly." + warning
}
if perms&0x00020000 != 0 {
warning = "\nWARNING: You have given sweetiebot the Mention Everyone role, which means users will be able to abuse her to ping everyone on the server! Sweetie Bot does NOT attempt to filter @\u200Beveryone from her messages!" + warning
}
if perms&0x00000004 == 0 {
warning = "\nWARNING: Sweetiebot cannot ban members spamming the welcome channel without the Ban Members role! (If you do not use this feature, it is safe to ignore this warning)." + warning
}
if perms&0x10000000 == 0 {
warning = "\nWARNING: Sweetiebot cannot silence members or give birthday roles without the Manage Roles role! (If you do not use these features, it is safe to ignore this warning)." + warning
}
if perms&0x00002000 == 0 {
warning = "\nWARNING: Sweetiebot cannot delete messages without the Manage Messages role!" + warning
}
return "```Server configured! \nLog Channel: " + log + "\nModerator Role: " + mod + "\nMod Channel: " + modchannel + "\nFree Channel: " + free + "\nSilent Role: " + silent + "\nBored Channel: " + boredchannel + warning, false
}
func (c *QuickConfigCommand) Usage(info *GuildInfo) string {
return info.FormatUsage(c, "[Log Channel] [Moderator Role] [Mod Channel] [Free Channel] [Silent Role] [Bored Channel]", "Quickly performs basic configuration on the server and restricts all sensitive commands to [Moderator Role], then enables all commands and all modules. If [bored channel] is not zero, it restricts the bored module to that channel. Otherwise it disables the bored module to prevent the bot from spamming inactive channels. You must ping each role and channel, you cannot simply input the name of a role or channel.")
}
func (c *QuickConfigCommand) UsageShort() string { return "Quickly performs basic configuration." } | sweetiebot/config_command.go | 0.596668 | 0.664445 | config_command.go | starcoder |
package bat
import (
"fmt"
"math"
"strconv"
"github.com/robert-zaremba/errstack"
)
// I64toa converts int64 value to 10-based string
func I64toa(x int64) string {
return strconv.FormatInt(x, 10)
}
// I64tox converts int64 value to 16-based string
func I64tox(x int64) string {
return strconv.FormatInt(x, 16)
}
// F64toa converts float64 value to 10-based string.
// Function takes optional argument - precision - which is described in strconv.FormatFloat
func F64toa(x float64, precision ...int) string {
p := -1
if len(precision) > 0 {
p = precision[0]
}
return strconv.FormatFloat(x, 'f', p, 64)
}
// Atoi64 converts 10-based string into int64 value.
func Atoi64(s string) (int64, error) {
return strconv.ParseInt(s, 10, 64)
}
// Atoi64Errp converts 10-based string into int64 value and sets the error in the putter.
func Atoi64Errp(s string, errp errstack.Putter) int64 {
v, err := Atoi64(s)
if err != nil {
errp.Put(err)
}
return v
}
// Atoui64 converts 10-based string into uint64 value.
func Atoui64(s string) (uint64, error) {
return strconv.ParseUint(s, 10, 64)
}
// Atoui64Errp converts 10-based string into uint64 value and sets the error in the putter.
func Atoui64Errp(s string, errp errstack.Putter) uint64 {
v, err := Atoui64(s)
if err != nil {
errp.Put(err)
}
return v
}
// Atof64 converts 10-based string into float64 value.
func Atof64(s string) (float64, error) {
return strconv.ParseFloat(s, 64)
}
// Atof64Errp converts 10-based string into float64 value and sets the error in the putter.
func Atof64Errp(s string, errp errstack.Putter) float64 {
v, err := Atof64(s)
if err != nil {
errp.Put(err)
}
return v
}
// HumanizeInt produces a human readable representation of an SI size.
func HumanizeInt(n uint64) string {
if n < 10 {
return fmt.Sprintf("%d B", n)
}
const base = 1000
var sizes = []string{"B", "kB", "MB", "GB", "TB", "PB", "EB"}
e := math.Floor(math.Log(float64(n)) / math.Log(base))
suffix := sizes[int(e)]
val := math.Floor(float64(n)/math.Pow(base, e)*10+0.5) / 10
f := "%.0f %s"
if val < 10 {
f = "%.1f %s"
}
return fmt.Sprintf(f, val, suffix)
}
// CmpInt64Pairs compares pairs of int64 for sorting
func CmpInt64Pairs(ls [][2]int64) int {
for i := range ls {
if x := ls[i][0] - ls[i][1]; x != 0 {
return int(x)
}
}
return 0
}
// Min3Int returns the minimum of 3 integeres.
func Min3Int(a, b, c int) int {
if a < b {
if a < c {
return a
}
} else {
if b < c {
return b
}
}
return c
} | number.go | 0.648355 | 0.471223 | number.go | starcoder |
package jet
// TimestampExpression interface
type TimestampExpression interface {
Expression
EQ(rhs TimestampExpression) BoolExpression
NOT_EQ(rhs TimestampExpression) BoolExpression
IS_DISTINCT_FROM(rhs TimestampExpression) BoolExpression
IS_NOT_DISTINCT_FROM(rhs TimestampExpression) BoolExpression
LT(rhs TimestampExpression) BoolExpression
LT_EQ(rhs TimestampExpression) BoolExpression
GT(rhs TimestampExpression) BoolExpression
GT_EQ(rhs TimestampExpression) BoolExpression
}
type timestampInterfaceImpl struct {
parent TimestampExpression
}
func (t *timestampInterfaceImpl) EQ(rhs TimestampExpression) BoolExpression {
return eq(t.parent, rhs)
}
func (t *timestampInterfaceImpl) NOT_EQ(rhs TimestampExpression) BoolExpression {
return notEq(t.parent, rhs)
}
func (t *timestampInterfaceImpl) IS_DISTINCT_FROM(rhs TimestampExpression) BoolExpression {
return isDistinctFrom(t.parent, rhs)
}
func (t *timestampInterfaceImpl) IS_NOT_DISTINCT_FROM(rhs TimestampExpression) BoolExpression {
return isNotDistinctFrom(t.parent, rhs)
}
func (t *timestampInterfaceImpl) LT(rhs TimestampExpression) BoolExpression {
return lt(t.parent, rhs)
}
func (t *timestampInterfaceImpl) LT_EQ(rhs TimestampExpression) BoolExpression {
return ltEq(t.parent, rhs)
}
func (t *timestampInterfaceImpl) GT(rhs TimestampExpression) BoolExpression {
return gt(t.parent, rhs)
}
func (t *timestampInterfaceImpl) GT_EQ(rhs TimestampExpression) BoolExpression {
return gtEq(t.parent, rhs)
}
//-------------------------------------------------
type timestampExpressionWrapper struct {
timestampInterfaceImpl
Expression
}
func newTimestampExpressionWrap(expression Expression) TimestampExpression {
timestampExpressionWrap := timestampExpressionWrapper{Expression: expression}
timestampExpressionWrap.timestampInterfaceImpl.parent = ×tampExpressionWrap
return ×tampExpressionWrap
}
// TimestampExp is timestamp expression wrapper around arbitrary expression.
// Allows go compiler to see any expression as timestamp expression.
// Does not add sql cast to generated sql builder output.
func TimestampExp(expression Expression) TimestampExpression {
return newTimestampExpressionWrap(expression)
} | internal/jet/timestamp_expression.go | 0.797793 | 0.494263 | timestamp_expression.go | starcoder |
// Package lex provides all the lexing functions that transform text into
// lexical tokens, using token types defined in the pi/token package.
// It also has the basic file source and position / region management
// functionality.
package lex
import (
"fmt"
"sort"
"github.com/goki/ki/nptime"
"github.com/goki/pi/token"
)
// Lex represents a single lexical element, with a token, and start and end rune positions
// within a line of a file. Critically it also contains the nesting depth computed from
// all the parens, brackets, braces. Todo: also support XML < > </ > tag depth.
type Lex struct {
Tok token.KeyToken `desc:"token, includes cache of keyword for keyword types, and also has nesting depth: starting at 0 at start of file and going up for every increment in bracket / paren / start tag and down for every decrement. Is computed once and used extensively in parsing."`
St int `desc:"start rune index within original source line for this token"`
Ed int `desc:"end rune index within original source line for this token (exclusive -- ends one before this)"`
Time nptime.Time `desc:"time when region was set -- used for updating locations in the text based on time stamp (using efficient non-pointer time)"`
}
func NewLex(tok token.KeyToken, st, ed int) Lex {
lx := Lex{Tok: tok, St: st, Ed: ed}
return lx
}
// String satisfies the fmt.Stringer interface
func (lx Lex) String() string {
return fmt.Sprintf("[+%d:%v:%v:%v]", lx.Tok.Depth, lx.St, lx.Ed, lx.Tok.String())
}
// ContainsPos returns true if the Lex element contains given character position
func (lx *Lex) ContainsPos(pos int) bool {
return pos >= lx.St && pos < lx.Ed
}
// OverlapsReg returns true if the two regions overlap
func (lx *Lex) OverlapsReg(or Lex) bool {
// start overlaps
if (lx.St >= or.St && lx.St < or.Ed) || (or.St >= lx.St && or.St < lx.Ed) {
return true
}
// end overlaps
if (lx.Ed > or.St && lx.Ed <= or.Ed) || (or.Ed > lx.St && or.Ed <= lx.Ed) {
return true
}
return false
}
// Region returns the region for this lexical element, at given line
func (lx *Lex) Region(ln int) Reg {
return Reg{St: Pos{Ln: ln, Ch: lx.St}, Ed: Pos{Ln: ln, Ch: lx.Ed}}
}
// Line is one line of Lex'd text
type Line []Lex
// Add adds one element to the lex line (just append)
func (ll *Line) Add(lx Lex) {
*ll = append(*ll, lx)
}
// Add adds one element to the lex line with given params, returns pointer to that new lex
func (ll *Line) AddLex(tok token.KeyToken, st, ed int) *Lex {
lx := NewLex(tok, st, ed)
li := len(*ll)
ll.Add(lx)
return &(*ll)[li]
}
// Insert inserts one element to the lex line at given point
func (ll *Line) Insert(idx int, lx Lex) {
sz := len(*ll)
*ll = append(*ll, lx)
if idx < sz {
copy((*ll)[idx+1:], (*ll)[idx:sz])
(*ll)[idx] = lx
}
}
// Clone returns a new copy of the line
func (ll *Line) Clone() Line {
if len(*ll) == 0 {
return nil
}
cp := make(Line, len(*ll))
for i := range *ll {
cp[i] = (*ll)[i]
}
return cp
}
// AddSort adds a new lex element in sorted order to list, sorted by start
// position, and if at the same start position, then sorted by end position
func (ll *Line) AddSort(lx Lex) {
for i, t := range *ll {
if t.St < lx.St {
continue
}
if t.St == lx.St && lx.Ed >= t.Ed {
continue
}
*ll = append(*ll, lx)
copy((*ll)[i+1:], (*ll)[i:])
(*ll)[i] = lx
return
}
*ll = append(*ll, lx)
}
// Sort sorts the lex elements by starting pos, and ending pos if a tie
func (ll *Line) Sort() {
sort.Slice((*ll), func(i, j int) bool {
return (*ll)[i].St < (*ll)[j].St || ((*ll)[i].St == (*ll)[j].St && (*ll)[i].Ed < (*ll)[j].Ed)
})
}
// MergeLines merges the two lines of lex regions into a combined list
// properly ordered by sequence of tags within the line.
func MergeLines(t1, t2 Line) Line {
sz1 := len(t1)
sz2 := len(t2)
if sz1 == 0 {
return t2
}
if sz2 == 0 {
return t1
}
tsz := sz1 + sz2
tl := make(Line, 0, tsz)
for i := 0; i < sz1; i++ {
tl = append(tl, t1[i])
}
for i := 0; i < sz2; i++ {
tl.AddSort(t2[i])
}
return tl
}
// String satisfies the fmt.Stringer interface
func (ll *Line) String() string {
str := ""
for _, t := range *ll {
str += t.String() + " "
}
return str
}
// TagSrc returns the token-tagged source
func (ll *Line) TagSrc(src []rune) string {
str := ""
for _, t := range *ll {
s := src[t.St:t.Ed]
str += t.String() + `"` + string(s) + `"` + " "
}
return str
} | lex/lex.go | 0.779867 | 0.521349 | lex.go | starcoder |
package sortmap
import (
"fmt"
"reflect"
"sort"
"time"
)
// Item is a key-value pair representing element in the map
type Item struct {
Key, Value interface{}
}
// Less compares two map elements and returns true if x < y
type Less func(x, y Item) bool
// flatmap is a flattened map with a comparator to be used with sort
type flatmap struct {
items []Item
less Less
}
func newFlatMap(m interface{}, less Less) *flatmap {
mv := reflect.ValueOf(m)
keys := mv.MapKeys()
fm := &flatmap{items: make([]Item, len(keys)), less: less}
for n := range keys {
fm.items[n] = Item{keys[n].Interface(), mv.MapIndex(keys[n]).Interface()}
}
return fm
}
func (m *flatmap) Len() int {
return len(m.items)
}
func (m *flatmap) Less(i, j int) bool {
return m.less(m.items[i], m.items[j])
}
func (m *flatmap) Swap(i, j int) {
m.items[i], m.items[j] = m.items[j], m.items[i]
}
// Items is a slice of map elements (key-value pairs)
type Items []Item
// Top returns slice of up to n leading elements
func (r Items) Top(n int) Items {
if n > len(r) {
n = len(r)
}
return r[:n]
}
// ByFunc sorts map using a provided comparator
func ByFunc(m interface{}, c Less) Items {
fm := newFlatMap(m, c)
sort.Sort(fm)
return fm.items
}
// ByKey sorts map by keys in the ascending order
func ByKey(m interface{}) Items {
ls := getLess(reflect.ValueOf(m).Type().Key())
return ByFunc(m, func(x, y Item) bool { return ls(x.Key, y.Key) })
}
// ByKeyDesc sorts map by keys in the descending order
func ByKeyDesc(m interface{}) Items {
ls := getLess(reflect.ValueOf(m).Type().Key())
return ByFunc(m, func(x, y Item) bool { return ls(y.Key, x.Key) })
}
// ByValue sorts map by values in the ascending order
func ByValue(m interface{}) Items {
ls := getLess(reflect.ValueOf(m).Type().Elem())
return ByFunc(m, func(x, y Item) bool { return ls(x.Value, y.Value) })
}
// ByValueDesc sorts map by values in the descending order
func ByValueDesc(m interface{}) Items {
ls := getLess(reflect.ValueOf(m).Type().Elem())
return ByFunc(m, func(x, y Item) bool { return ls(y.Value, x.Value) })
}
// getLess returns default comparator for a type
func getLess(t reflect.Type) (f func(x, y interface{}) bool) {
switch t.Kind() {
case reflect.Bool:
f = func(x, y interface{}) bool { return !x.(bool) && y.(bool) }
case reflect.Int:
f = func(x, y interface{}) bool { return x.(int) < y.(int) }
case reflect.Int8:
f = func(x, y interface{}) bool { return x.(int8) < y.(int8) }
case reflect.Int16:
f = func(x, y interface{}) bool { return x.(int16) < y.(int16) }
case reflect.Int32:
f = func(x, y interface{}) bool { return x.(int32) < y.(int32) }
case reflect.Int64:
f = func(x, y interface{}) bool { return x.(int64) < y.(int64) }
case reflect.Uint:
f = func(x, y interface{}) bool { return x.(uint) < y.(uint) }
case reflect.Uint8:
f = func(x, y interface{}) bool { return x.(uint8) < y.(uint8) }
case reflect.Uint16:
f = func(x, y interface{}) bool { return x.(uint16) < y.(uint16) }
case reflect.Uint32:
f = func(x, y interface{}) bool { return x.(uint32) < y.(uint32) }
case reflect.Uint64:
f = func(x, y interface{}) bool { return x.(uint64) < y.(uint64) }
case reflect.Float32:
f = func(x, y interface{}) bool { return x.(float32) < y.(float32) }
case reflect.Float64:
f = func(x, y interface{}) bool { return x.(float64) < y.(float64) }
case reflect.String:
f = func(x, y interface{}) bool { return x.(string) < y.(string) }
case reflect.TypeOf(time.Time{}).Kind():
f = func(x, y interface{}) bool { return x.(time.Time).Before(y.(time.Time)) }
default:
panic(fmt.Sprintf("sortmap: unsupported type: %s", t))
}
return
} | vendor/github.com/tg/gosortmap/sortmap.go | 0.688049 | 0.455683 | sortmap.go | starcoder |
package objects
type DWDMModuleState struct {
baseObj
ModuleId uint8 `SNAPROUTE: "KEY", CATEGORY:"Optical", ACCESS:"r", MULTIPLICITY: "*", DESCRIPTION: "DWDM Module identifier"`
ModuleState string `DESCRIPTION: "Current MSA state of dwdm module"`
ModuleVoltage float64 `DESCRIPTION: "Module power supply voltage in Volts"`
ModuleTemp float64 `DESCRIPTION: "Module temperature in deg Celsius"`
Populated bool `DESCRIPTION: "Is module populated"`
VendorName string `DESCRIPTION: "Vendor name of dwdm module"`
VendorPartNum string `DESCRIPTION: "Vendor assigned part number of dwdm module"`
VendorSerialNum string `DESCRIPTION: "Vendor assigned serial number of dwdm module "`
VendorDateCode string `DESCRIPTION: "Device manufacture data code of dwdm module"`
ModuleHWVersion string `DESCRIPTION: "HW version of dwdm module"`
ModuleActiveFWVersion string `DESCRIPTION: "Firmware version of active partition of dwdm module"`
ModuleStandByFWVersion string `DESCRIPTION: "Firmware version of standby partition of dwdm module"`
ModuleActiveFWStatus string `DESCRIPTION: "Firmware image status of active partition of dwdm module"`
ModuleStandByFWStatus string `DESCRIPTION: "Firmware image status of standby partition of dwdm module"`
}
type DWDMModule struct {
baseObj
ModuleId uint8 `SNAPROUTE: "KEY", CATEGORY:"Optical", ACCESS:"rw", MULTIPLICITY: "*", AUTODISCOVER: "true", DESCRIPTION: "DWDM Module identifier"`
AdminState string `DESCRIPTION: "Reset state of this dwdm module (false (Reset deasserted), true (Reset asserted))", SELECTION: "UP"/"DOWN", DEFAULT:"DOWN"`
IndependentLaneMode bool `DESCRIPTION: "Network lane configuration for the DWDM Module. true-Independent lanes, false-Coupled lanes, DEFAULT:true"`
PMInterval uint8 `DESCRIPTION: "Performance monitoring interval, i.e. time interval between successive PM ticks in seconds", DEFAULT:1`
EnableExtPMTickSrc bool `DESCRIPTION:"Enable/Disable external tick source for performance monitoring", DEFAULT:false`
}
type DWDMModuleNwIntf struct {
baseObj
ModuleId uint8 `SNAPROUTE: "KEY", CATEGORY:"Optical", ACCESS:"rw", MULTIPLICITY: "*", AUTODISCOVER: "true", DESCRIPTION: "DWDM Module identifier"`
NwIntfId uint8 `SNAPROUTE: "KEY", CATEGORY:"Optical", DESCRIPTION: "DWDM Module network interface identifier"`
ModulationFmt string `DESCRIPTION: "Modulation format to use for this network interface", SELECTION: "QPSK"/"8QAM/"16QAM", DEFAULT:"16QAM"`
TxPower float64 `DESCRIPTION: "Transmit output power for this network interface in dBm, MIN:0, MAX:4294967295", DEFAULT:0, UNIT:dBm`
ChannelNumber uint8 `DESCRIPTION: "TX Channel number to use for this network interface", MIN:1, MAX:100, DEFAULT:48`
FECMode string `DESCRIPTION: "DWDM Module network interface FEC mode", SELECTION: "15%SDFEC"/"15%OvrHeadSDFEC"/"25%OvrHeadSDFEC", DEFAULT:"15%SDFEC"`
DiffEncoding bool `DESCRIPTION: "Control to enable/disable DWDM Module network interface encoding type", DEFAULT: true`
TxPulseShapeFltrType string `DESCRIPTION: "TX pulse shaping filter type", SELECTION: "RootRaisedCos"/"RaisedCos", DEFAULT:RootRaisedCos"`
TxPulseShapeFltrRollOff float64 `DESCRIPTION: "TX pulse shape filter roll off factor, MIN:0.004, MAX:0.8", DEFAULT:0.301`
AdminState string `DESCRIPTION: "Administrative state of this network interface", SELECTION: "UP"/"DOWN", DEFAULT: "UP"`
EnableTxPRBS bool `DESCRIPTION: "Enable TX PRBS generation on this network interface", DEFAULT: false`
TxPRBSPattern string `DESCRIPTION: "Pattern to use for TX PRBS generation", SELECTION:"2^7"/"2^15"/"2^31", DEFAULT:"2^31"`
TxPRBSInvertPattern bool `DESCRIPTION: "Generate inverted PRBS polynomial pattern", DEFAULT:true`
EnableRxPRBSChecker bool `DESCRIPTION: "Enable RX PRBS checker", DEFAULT: false`
RxPRBSPattern string `DESCRIPTION: "PRBS pattern to use for checker", SELECTION:"2^7"/"2^15"/"2^31", DEFAULT:"2^31"`
RxPRBSInvertPattern bool `DESCRIPTION: "Check against inverted PRBS polynomial pattern", DEFAULT:true`
ClntIntfIdToTributary0Map uint8 `DESCRIPTION: "Client interface ID to map to network interface tributary 0", MIN:0, MAX:3`
ClntIntfIdToTributary1Map uint8 `DESCRIPTION: "Client interface ID to map to network interface tributary 1", MIN:0, MAX:3`
TxPowerRampdBmPerSec float64 `DESCRIPTION: "Rate of change of tx power on this network interface", UNIT: dBm/s, DEFAULT:1`
}
type DWDMModuleNwIntfState struct {
baseObj
ModuleId uint8 `SNAPROUTE: "KEY", CATEGORY:"Optical", ACCESS:"r", MULTIPLICITY: "*", DESCRIPTION: "DWDM Module identifier"`
NwIntfId uint8 `SNAPROUTE: "KEY", CATEGORY:"Optical", DESCRIPTION: "DWDM Module network interface identifier"`
TxChanGridSpacing string `DESCRIPTION: "The channel grid spacing used for this network interface in GHz, UNIT: GHz"`
CurrentBER float64 `DESCRIPTION: "Current value of BER on the DWDM module network interface"`
MinBEROverPMInterval float64 `DESCRIPTION: "Minimum value of BER over the last PM interval for the DWDM module network interface"`
AvgBEROverPMInterval float64 `DESCRIPTION: "Average value of BER over the last PM interval for the DWDM module network interface"`
MaxBEROverPMInterval float64 `DESCRIPTION: "Maximum value of BER over the last PM interval for the DWDM module network interface"`
CurrUncorrectableFECBlkCnt float64 `DESCRIPTION: "Current value of uncorrectable FEC code block count"`
UncorrectableFECBlkCntOverPMInt float64 `DESCRIPTION: "Average value of uncorrectable FEC code block count over the last PM interval"`
PRBSRxErrCnt float64 `DESCRIPTION: "RX PRBS error count for network interface"`
RxPower float64 `DESCRIPTION: "Current RX power for the DWDM module network interface", UNIT: dBm`
ChanFrequency float64 `DESCRIPTION: "Channel frequency corresponding to selected channel number for the DWDM module network interface", UNIT: GHz`
CurrChromDisp int32 `DESCRIPTION: "Current RX chromatic dispersion for the DWDM module network interface", UNIT:ps/nm`
AvgChromDispOverPMInt int32 `DESCRIPTION: "Average RX chromatic dispersion over the last PM interval for the DWDM module network interface", UNIT:ps/nm`
MinChromDispOverPMInt int32 `DESCRIPTION: "Current RX chromatic dispersion over the last PM interval for the DWDM module network interface", UNIT:ps/nm`
MaxChromDispOverPMInt int32 `DESCRIPTION: "Current RX chromatic dispersion over the last PM interval for the DWDM module network interface", UNIT:ps/nm`
}
type DWDMModuleClntIntf struct {
baseObj
ModuleId uint8 `SNAPROUTE: "KEY", CATEGORY:"Optical", ACCESS:"rw", MULTIPLICITY: "*", AUTODISCOVER: "true", DESCRIPTION: "DWDM Module identifier"`
ClntIntfId uint8 `SNAPROUTE: "KEY", CATEGORY:"Optical", DESCRIPTION: "DWDM Module client interface identifier"`
TXFECDecDisable bool `DESCRIPTION: "802.3bj FEC decoder enable/disable state for traffic from Host to DWDM Module", DEFAULT: false`
RXFECDecDisable bool `DESCRIPTION: "802.3bj FEC decoder enable/disable state for traffic from DWDM module to Host", DEFAULT: false`
HostTxEqLfCtle uint8 `DESCRIPTION: "Host interface TX deserializer equalization. LELPZRC LF-CTLE LFPZ gain code.", MIN:0, MAX:8, DEFAULT:0`
HostTxEqCtle uint8 `DESCRIPTION: "Host interface TX deserializer equalization. LELRC CTLE LE gain code.", MIN:0, MAX:20, DEFAULT:18`
HostTxEqDfe uint8 `DESCRIPTION: "Host interface TX deserializer equalization. s-DFE, DFE tap coefficient", MIN:0, MAX:63, DEFAULT:0`
HostRxSerializerTap0Gain uint8 `DESCRIPTION: "Host RX Serializer tap 0 control, gain for equalization filter tap", DEFAULT:7, MIN:0, MAX:7`
HostRxSerializerTap0Delay uint8 `DESCRIPTION: "Host RX Serializer tap 0 control, delay for equalization filter tap", DEFAULT:7, MIN:0, MAX:7`
HostRxSerializerTap1Gain uint8 `DESCRIPTION: "Host RX Serializer tap 1 control, gain for equalization filter tap", DEFAULT:7, MIN:0, MAX:7`
HostRxSerializerTap2Gain uint8 `DESCRIPTION: "Host RX Serializer tap 2 control, gain for equalization filter tap", DEFAULT:15, MIN:0, MAX:15`
HostRxSerializerTap2Delay uint8 `DESCRIPTION: "Host RX Serializer tap 2 control, delay for equalization filter tap", DEFAULT:5, MIN:0, MAX:7`
AdminState string `DESCRIPTION: "Administrative state of this client interface", SELECTION: "UP"/"DOWN", DEFAULT: "UP"`
EnableTxPRBSChecker bool `DESCRIPTION: "Enable/Disable TX PRBS checker for all lanes of this client interface", DEFAULT:false`
TxPRBSPattern string `DESCRIPTION: "PRBS pattern to use for checker", SELECTION:"2^7"/"2^15"/"2^31", DEFAULT:"2^31"`
EnableRxPRBS bool `DESCRIPTION: "Enable/Disable RX PRBS generation for all lanes of this client interface", DEFAULT:false`
RxPRBSPattern string `DESCRIPTION: "RX PRBS generator pattern", SELECTION:"2^7"/"2^15"/"2^31", DEFAULT:"2^31"`
EnableIntSerdesNWLoopback bool `DESCRIPTION: "Enable/Disable serdes internal loopback, N/W RX is looped back to N/W TX", DEFAULT: false`
EnableHostLoopback bool `DESCRIPTION: "Enable/Disable loopback on all host lanes of this client interface", DEFAULT: false`
NwLaneTributaryToClntIntfMap uint8 `DESCRIPTION: "Network lane/tributary id to map to client interface", MIN:0, MAX:3`
}
type DWDMModuleClntIntfState struct {
baseObj
ModuleId uint8 `SNAPROUTE: "KEY", CATEGORY:"Optical", ACCESS:"r", MULTIPLICITY: "*", DESCRIPTION: "DWDM Module identifier"`
ClntIntfId uint8 `SNAPROUTE: "KEY", CATEGORY:"Optical", DESCRIPTION: "DWDM Module client interface identifier"`
PRBSTxErrCntLane0 float64 `DESCRIPTION: "Client interface host lane 0 PRBS TX Error count"`
PRBSTxErrCntLane1 float64 `DESCRIPTION: "Client interface host lane 1 PRBS TX Error count"`
PRBSTxErrCntLane2 float64 `DESCRIPTION: "Client interface host lane 2 PRBS TX Error count"`
PRBSTxErrCntLane3 float64 `DESCRIPTION: "Client interface host lane 3 PRBS TX Error count"`
}
type DWDMModulePMData struct {
TimeStamp string `DESCRIPTION: "Timestamp at which data is collected"`
Value float64 `DESCRIPTION: "PM Data Value"`
}
type DWDMModuleNwIntfPMState struct {
baseObj
ModuleId uint8 `SNAPROUTE: "KEY", CATEGORY:"Performance", ACCESS:"r", MULTIPLICITY: "*", DESCRIPTION: "DWDM Module identifier"`
NwIntfId uint8 `SNAPROUTE: "KEY", CATEGORY:"Performance", DESCRIPTION: "DWDM Module network interface identifier"`
Resource string `SNAPROUTE: "KEY", CATEGORY:"Performance", DESCRIPTION: "Opticd resource name for which PM Data is required"`
Type string `SNAPROUTE: "KEY", CATEGORY:"Performance", DESCRIPTION: "Min/Max/Avg"`
Class string `SNAPROUTE: "KEY", CATEGORY:"Performance", DESCRIPTION: "Class of PM Data", SELECTION: CLASS-A/CLASS-B/CLASS-B, DEFAULT: CLASS-A`
Data []DWDMModulePMData
} | objects/opticdObjects.go | 0.716814 | 0.46642 | opticdObjects.go | starcoder |
package view
import (
"fmt"
"github.com/protolambda/ztyp/codec"
. "github.com/protolambda/ztyp/tree"
)
type BitVectorTypeDef struct {
BitLength uint64
ComplexTypeBase
}
func BitVectorType(length uint64) *BitVectorTypeDef {
byteSize := (length + 7) / 8
return &BitVectorTypeDef{
BitLength: length,
ComplexTypeBase: ComplexTypeBase{
MinSize: byteSize,
MaxSize: byteSize,
Size: byteSize,
IsFixedSize: true,
},
}
}
func (td *BitVectorTypeDef) FromBits(bits []bool) (*BitVectorView, error) {
if uint64(len(bits)) != td.BitLength {
return nil, fmt.Errorf("got %d bits, expected %d bits", len(bits), td.BitLength)
}
contents := bitsToBytes(bits)
bottomNodes, err := BytesIntoNodes(contents)
if err != nil {
return nil, err
}
depth := CoverDepth(td.BottomNodeLength())
rootNode, _ := SubtreeFillToContents(bottomNodes, depth)
view, _ := td.ViewFromBacking(rootNode, nil)
return view.(*BitVectorView), nil
}
func (td *BitVectorTypeDef) Length() uint64 {
return td.BitLength
}
func (td *BitVectorTypeDef) DefaultNode() Node {
depth := CoverDepth(td.BottomNodeLength())
return SubtreeFillToDepth(&ZeroHashes[0], depth)
}
func (td *BitVectorTypeDef) ViewFromBacking(node Node, hook BackingHook) (View, error) {
depth := CoverDepth(td.BottomNodeLength())
return &BitVectorView{
SubtreeView: SubtreeView{
BackedView: BackedView{
ViewBase: ViewBase{
TypeDef: td,
},
Hook: hook,
BackingNode: node,
},
depth: depth,
},
BitVectorTypeDef: td,
}, nil
}
func (td *BitVectorTypeDef) BottomNodeLength() uint64 {
return (td.BitLength + 0xff) >> 8
}
func (td *BitVectorTypeDef) Default(hook BackingHook) View {
v, _ := td.ViewFromBacking(td.DefaultNode(), hook)
return v
}
func (td *BitVectorTypeDef) New() *BitVectorView {
return td.Default(nil).(*BitVectorView)
}
func (td *BitVectorTypeDef) Deserialize(dr *codec.DecodingReader) (View, error) {
scope := dr.Scope()
if td.Size != scope {
return nil, fmt.Errorf("expected size %d does not match scope %d", td.Size, scope)
}
contents := make([]byte, scope, scope)
if _, err := dr.Read(contents); err != nil {
return nil, err
}
if scope != 0 && td.BitLength&7 != 0 {
last := contents[scope-1]
if last&byte((uint16(1)<<(td.BitLength&7))-1) != last {
return nil, fmt.Errorf("last bitvector byte %d has out of bounds bits set", last)
}
}
bottomNodes, err := BytesIntoNodes(contents)
if err != nil {
return nil, err
}
depth := CoverDepth(td.BottomNodeLength())
rootNode, _ := SubtreeFillToContents(bottomNodes, depth)
view, _ := td.ViewFromBacking(rootNode, nil)
return view.(*BitVectorView), nil
}
func (td *BitVectorTypeDef) String() string {
return fmt.Sprintf("Bitvector[%d]", td.BitLength)
}
type BitVectorView struct {
SubtreeView
*BitVectorTypeDef
}
func AsBitVector(v View, err error) (*BitVectorView, error) {
if err != nil {
return nil, err
}
bv, ok := v.(*BitVectorView)
if !ok {
return nil, fmt.Errorf("view is not a bitvector: %v", v)
}
return bv, nil
}
func (tv *BitVectorView) subviewNode(i uint64) (r *Root, bottomIndex uint64, subIndex uint8, err error) {
bottomIndex, subIndex = i>>8, uint8(i)
v, err := tv.SubtreeView.GetNode(bottomIndex)
if err != nil {
return nil, 0, 0, err
}
r, ok := v.(*Root)
if !ok {
return nil, 0, 0, fmt.Errorf("bitvector bottom node is not a root, at index %d", i)
}
return r, bottomIndex, subIndex, nil
}
func (tv *BitVectorView) Get(i uint64) (BoolView, error) {
if i >= tv.BitLength {
return false, fmt.Errorf("bitvector has bit length %d, cannot get bit index %d", tv.BitLength, i)
}
r, _, subIndex, err := tv.subviewNode(i)
if err != nil {
return false, err
}
return BoolType.BoolViewFromBitfieldBacking(r, subIndex)
}
func (tv *BitVectorView) Set(i uint64, v BoolView) error {
if i >= tv.BitLength {
return fmt.Errorf("cannot set item at element index %d, bitvector only has %d bits", i, tv.BitLength)
}
r, bottomIndex, subIndex, err := tv.subviewNode(i)
if err != nil {
return err
}
return tv.SubtreeView.SetNode(bottomIndex, v.BackingFromBitfieldBase(r, subIndex))
}
func (tv *BitVectorView) Copy() (View, error) {
tvCopy := *tv
tvCopy.Hook = nil
return &tvCopy, nil
}
func (tv *BitVectorView) Iter() BitIter {
i := uint64(0)
return BitIterFn(func() (elem bool, ok bool, err error) {
if i < tv.BitLength {
var item BoolView
item, err = tv.Get(i)
elem = bool(item)
ok = true
i += 1
return
} else {
return false, false, nil
}
})
}
func (tv *BitVectorView) ReadonlyIter() BitIter {
return bitReadonlyIter(tv.BackingNode, tv.BitLength, tv.depth)
}
func (tv *BitVectorView) ValueByteLength() (uint64, error) {
return tv.Size, nil
}
func (tv *BitVectorView) Serialize(w *codec.EncodingWriter) error {
contents := make([]byte, tv.Size, tv.Size)
if err := SubtreeIntoBytes(tv.BackingNode, tv.depth, tv.BottomNodeLength(), contents); err != nil {
return err
}
return w.Write(contents)
} | view/bitvector.go | 0.54698 | 0.471588 | bitvector.go | starcoder |
package dectype
import (
"fmt"
"github.com/swamp/compiler/src/decorated/dtype"
)
type TypeReferenceScopedOrNormal interface {
dtype.Type
NameReference() *NamedDefinitionTypeReference
}
func compareAtoms(pureExpected dtype.Atom, pureActual dtype.Atom) error {
expectedIsAny := IsAtomAny(pureExpected)
actualIsAny := IsAtomAny(pureActual)
if expectedIsAny || actualIsAny {
return nil
}
if pureExpected == nil || pureActual == nil {
return fmt.Errorf("can not have nil stuff here")
}
equalErr := pureExpected.IsEqual(pureActual)
if equalErr != nil {
return fmt.Errorf("*** NOT EQUAL:\n %v\nvs\n %v\n %w", pureExpected.AtomName(), pureActual.AtomName(), equalErr)
}
return nil
}
func CompatibleTypes(expectedType dtype.Type, actualType dtype.Type) error {
if expectedType == nil {
panic(fmt.Sprintf("shouldn't happen. expected is nil, actualType is %v", actualType))
}
if actualType == nil {
panic(fmt.Sprintf("shouldn't happen. actualType is nil, expectedType is %v", expectedType))
}
customType, wasCustomType := expectedType.(*CustomTypeAtom)
if wasCustomType {
otherVariant, wasVariant := actualType.(*CustomTypeVariant)
if wasVariant {
return customType.IsVariantEqual(otherVariant)
}
}
pureExpected, expectedErr := expectedType.Resolve()
pureActual, actualErr := actualType.Resolve()
isAny := IsAtomAny(pureActual)
if isAny {
return nil
}
isExpectedAny := IsAtomAny(pureExpected)
if isExpectedAny {
return nil
}
if actualErr != nil {
return actualErr
}
if expectedErr != nil {
return expectedErr
}
return compareAtoms(pureExpected, pureActual)
}
func ResolveToRecordType(expectedRecord dtype.Type) (*RecordAtom, error) {
atom, atomErr := expectedRecord.Resolve()
if atomErr != nil {
return nil, fmt.Errorf("couldn't resolve to record %w", atomErr)
}
recordAtom, wasRecord := atom.(*RecordAtom)
if !wasRecord {
return nil, fmt.Errorf("resolved to something else than a record %v", atom)
}
return recordAtom, nil
} | src/decorated/types/type_lookup.go | 0.669637 | 0.472136 | type_lookup.go | starcoder |
package adsb
import (
"errors"
)
// decodeAlt13 converts a 13-bit altitude code field to an integer
// altitude value in feet. The highest three bits of the uint16
// argument passed must be zero.
func decodeAlt13(a uint16) (int64, error) {
if a&0xE000 != 0 { // data is not properly aligned
return 0, errors.New("invalid data length")
}
if a == 0 { // altitude is 0 or invalid
return 0, nil
}
if a&0x40 != 0 { // M bit designates feet vs meters
return 0, errors.New("metric altitude not supported")
}
if a&0x10 == 0 { // Q bit designates 100 ft vs 25 ft increments
// Gillham encoding
// trailing 3 bits is 100 ft increments
h := grayDecode(uint64(((a >> 10) & 0x04) |
((a >> 9) & 0x02) | ((a >> 8) & 0x01))) // C1(20) C2(22) C4(24)
if h == 5 || h == 6 {
return 0, errors.New("invalid altitude value")
}
if h == 7 {
h = 5
}
// first 8 bits is 500 ft increments
f := grayDecode(uint64(((a << 5) & 0x80) | ((a << 6) & 0x40) | // D2(30) D4(32)
((a >> 6) & 0x20) | ((a >> 5) & 0x10) | ((a >> 4) & 0x08) | // A1(21) A2(23) A4(25)
((a >> 3) & 0x04) | ((a >> 2) & 0x02) | ((a >> 1) & 0x01))) // B1(27) B2(29) B4(31)
if f%2 == 1 {
h = 6 - h
}
return int64((f*500)+(h*100)) - 1300, nil
}
// must be an 11 bit altitude
return (int64((a&0x0F)|((a&0x20)>>1)|((a&0x1F80)>>2)) * 25) - 1000, nil
}
// decodeAlt12 converts a 12-bit extended squitter altitude field to an
// integer altitude value in feet. The highest four bits of the uint16
// argument passed must be zero.
func decodeAlt12(a uint16) (int64, error) {
if a&0xF000 != 0 { // data is not properly aligned
return 0, errors.New("invalid data length")
}
if a == 0 { // altitude is 0 or invalid
return 0, nil
}
// insert a ZERO M-bit
a = ((a & 0x0FC0) << 1) | (a & 0x3F)
return decodeAlt13(a)
}
// grayDecode converts a value in "reflected binary code" aka "Gray
// code" to the standard decimal value
func grayDecode(b uint64) uint64 {
for z := uint(32); z >= 1; z /= 2 {
b ^= (b >> z)
}
return b
} | adsb/altitude.go | 0.734881 | 0.561335 | altitude.go | starcoder |
package examples
import (
"math"
"runtime"
"github.com/go-gl/gl/v2.1/gl"
. "github.com/jakecoffman/cp"
"fmt"
)
const DrawPointLineScale = 1
var program uint32
// 8 bytes
type v2f struct {
x, y float32
}
func V2f(v Vector) v2f {
return v2f{float32(v.X), float32(v.Y)}
}
func v2f0() v2f {
return v2f{0, 0}
}
// 8*2 + 16*2 bytes = 48 bytes
type Vertex struct {
vertex, aa_coord v2f
fill_color, outline_color FColor
}
type Triangle struct {
a, b, c Vertex
}
var vao uint32 = 0
var vbo uint32 = 0
var triangleStack []Triangle
func DrawCircle(pos Vector, angle, radius float64, outline, fill FColor) {
r := radius + 1/DrawPointLineScale
a := Vertex{
v2f{float32(pos.X - r), float32(pos.Y - r)},
v2f{-1, -1},
fill,
outline,
}
b := Vertex{
v2f{float32(pos.X - r), float32(pos.Y + r)},
v2f{-1, 1},
fill,
outline,
}
c := Vertex{
v2f{float32(pos.X + r), float32(pos.Y + r)},
v2f{1, 1},
fill,
outline,
}
d := Vertex{
v2f{float32(pos.X + r), float32(pos.Y - r)},
v2f{1, -1},
fill,
outline,
}
t0 := Triangle{a, b, c}
t1 := Triangle{a, c, d}
triangleStack = append(triangleStack, t0)
triangleStack = append(triangleStack, t1)
DrawFatSegment(pos, pos.Add(ForAngle(angle).Mult(radius-DrawPointLineScale*0.5)), 0, outline, fill)
}
func DrawSegment(a, b Vector, fill FColor) {
DrawFatSegment(a, b, 0, fill, fill)
}
func DrawFatSegment(a, b Vector, radius float64, outline, fill FColor) {
n := b.Sub(a).ReversePerp().Normalize()
t := n.ReversePerp()
const half = 1.0 / DrawPointLineScale
r := radius + half
if r <= half {
r = half
fill = outline
}
nw := n.Mult(r)
tw := t.Mult(r)
v0 := V2f(b.Sub(nw.Add(tw)))
v1 := V2f(b.Add(nw.Sub(tw)))
v2 := V2f(b.Sub(nw))
v3 := V2f(b.Add(nw))
v4 := V2f(a.Sub(nw))
v5 := V2f(a.Add(nw))
v6 := V2f(a.Sub(nw.Sub(tw)))
v7 := V2f(a.Add(nw.Add(tw)))
t0 := Triangle{
Vertex{v0, v2f{1, -1}, fill, outline},
Vertex{v1, v2f{1, 1}, fill, outline},
Vertex{v2, v2f{0, -1}, fill, outline},
}
t1 := Triangle{
Vertex{v3, v2f{0, 1}, fill, outline},
Vertex{v1, v2f{1, 1}, fill, outline},
Vertex{v2, v2f{0, -1}, fill, outline},
}
t2 := Triangle{
Vertex{v3, v2f{0, 1}, fill, outline},
Vertex{v4, v2f{0, -1}, fill, outline},
Vertex{v2, v2f{0, -1}, fill, outline},
}
t3 := Triangle{
Vertex{v3, v2f{0, 1}, fill, outline},
Vertex{v4, v2f{0, -1}, fill, outline},
Vertex{v5, v2f{0, 1}, fill, outline},
}
t4 := Triangle{
Vertex{v6, v2f{-1, -1}, fill, outline},
Vertex{v4, v2f{0, -1}, fill, outline},
Vertex{v5, v2f{0, 1}, fill, outline},
}
t5 := Triangle{
Vertex{v6, v2f{-1, -1}, fill, outline},
Vertex{v7, v2f{-1, 1}, fill, outline},
Vertex{v5, v2f{0, 1}, fill, outline},
}
triangleStack = append(triangleStack, t0)
triangleStack = append(triangleStack, t1)
triangleStack = append(triangleStack, t2)
triangleStack = append(triangleStack, t3)
triangleStack = append(triangleStack, t4)
triangleStack = append(triangleStack, t5)
}
func DrawPolygon(count int, verts []Vector, radius float64, outline, fill FColor) {
type ExtrudeVerts struct {
offset, n Vector
}
extrude := make([]ExtrudeVerts, count)
for i := 0; i < count; i++ {
v0 := verts[(i-1+count)%count]
v1 := verts[i]
v2 := verts[(i+1)%count]
n1 := v1.Sub(v0).ReversePerp().Normalize()
n2 := v2.Sub(v1).ReversePerp().Normalize()
offset := n1.Add(n2).Mult(1.0 / (n1.Dot(n2) + 1.0))
extrude[i] = ExtrudeVerts{offset, n2}
}
inset := -math.Max(0, 1.0/DrawPointLineScale-radius)
for i := 0; i < count-2; i++ {
v0 := V2f(verts[0].Add(extrude[0].offset.Mult(inset)))
v1 := V2f(verts[i+1].Add(extrude[i+1].offset.Mult(inset)))
v2 := V2f(verts[i+2].Add(extrude[i+2].offset.Mult(inset)))
triangleStack = append(triangleStack, Triangle{
Vertex{v0, v2f0(), fill, fill},
Vertex{v1, v2f0(), fill, fill},
Vertex{v2, v2f0(), fill, fill},
})
}
outset := 1.0/DrawPointLineScale + radius - inset
j := count - 1
for i := 0; i < count; {
vA := verts[i]
vB := verts[j]
nA := extrude[i].n
nB := extrude[j].n
offsetA := extrude[i].offset
offsetB := extrude[j].offset
innerA := vA.Add(offsetA.Mult(inset))
innerB := vB.Add(offsetB.Mult(inset))
inner0 := V2f(innerA)
inner1 := V2f(innerB)
outer0 := V2f(innerA.Add(nB.Mult(outset)))
outer1 := V2f(innerB.Add(nB.Mult(outset)))
outer2 := V2f(innerA.Add(offsetA.Mult(outset)))
outer3 := V2f(innerA.Add(nA.Mult(outset)))
n0 := V2f(nA)
n1 := V2f(nB)
offset0 := V2f(offsetA)
triangleStack = append(triangleStack, Triangle{
Vertex{inner0, v2f0(), fill, outline},
Vertex{inner1, v2f0(), fill, outline},
Vertex{outer1, n1, fill, outline},
})
triangleStack = append(triangleStack, Triangle{
Vertex{inner0, v2f0(), fill, outline},
Vertex{outer0, n1, fill, outline},
Vertex{outer1, n1, fill, outline},
})
triangleStack = append(triangleStack, Triangle{
Vertex{inner0, v2f0(), fill, outline},
Vertex{outer0, n1, fill, outline},
Vertex{outer2, offset0, fill, outline},
})
triangleStack = append(triangleStack, Triangle{
Vertex{inner0, v2f0(), fill, outline},
Vertex{outer2, offset0, fill, outline},
Vertex{outer3, n0, fill, outline},
})
j = i
i++
}
}
func DrawDot(size float64, pos Vector, fill FColor) {
r := size * 0.5 / DrawPointLineScale
a := Vertex{v2f{float32(pos.X - r), float32(pos.Y - r)}, v2f{-1, -1}, fill, fill}
b := Vertex{v2f{float32(pos.X - r), float32(pos.Y + r)}, v2f{-1, 1}, fill, fill}
c := Vertex{v2f{float32(pos.X + r), float32(pos.Y + r)}, v2f{1, 1}, fill, fill}
d := Vertex{v2f{float32(pos.X + r), float32(pos.Y - r)}, v2f{1, -1}, fill, fill}
triangleStack = append(triangleStack, Triangle{a, b, c})
triangleStack = append(triangleStack, Triangle{a, c, d})
}
func DrawBB(bb BB, outline FColor) {
verts := []Vector{
{bb.R, bb.B},
{bb.R, bb.T},
{bb.L, bb.T},
{bb.L, bb.B},
}
DrawPolygon(4, verts, 0, outline, FColor{})
}
func DrawInstructions() {
DrawString(Vector{-300, 220}, fmt.Sprintf(`Press Q to quit, V to toggle Vsync
Use the mouse to drag objects
FPS: %d`, fps))
}
func DrawInfo(space *Space) {
DrawString(Vector{0, 220}, DebugInfo(space))
}
func FlushRenderer() {
gl.BindBuffer(gl.ARRAY_BUFFER, vbo)
gl.BufferData(gl.ARRAY_BUFFER, len(triangleStack)*(48*3), gl.Ptr(triangleStack), gl.STREAM_DRAW)
gl.UseProgram(program)
gl.Uniform1f(gl.GetUniformLocation(program, gl.Str("u_outline_coef\x00")), DrawPointLineScale)
if runtime.GOOS == "darwin" {
gl.BindVertexArrayAPPLE(vao)
} else {
gl.BindVertexArray(vao)
}
gl.DrawArrays(gl.TRIANGLES, 0, int32(len(triangleStack)*3))
CheckGLErrors()
}
func ClearRenderer() {
triangleStack = triangleStack[:0]
} | examples/drawing.go | 0.67971 | 0.528655 | drawing.go | starcoder |
package helper
import (
"reflect"
"strings"
)
// Empty php empty()
func Empty(val interface{}) bool {
if val == nil {
return true
}
v := reflect.ValueOf(val)
switch v.Kind() {
case reflect.String, reflect.Array:
return v.Len() == 0
case reflect.Map, reflect.Slice:
return v.Len() == 0 || v.IsNil()
case reflect.Bool:
return !v.Bool()
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return v.Int() == 0
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return v.Uint() == 0
case reflect.Float32, reflect.Float64:
return v.Float() == 0
case reflect.Interface, reflect.Ptr:
return v.IsNil()
}
return reflect.DeepEqual(val, reflect.Zero(v.Type()).Interface())
}
// IsNumeric is_numeric()
// Numeric strings consist of optional sign, any number of digits, optional decimal part and optional exponential part.
// Thus +0123.45e6 is a valid numeric value.
// In PHP hexadecimal (e.g. 0xf4c3b00c) is not supported, but IsNumeric is supported.
func IsNumeric(val interface{}) bool {
switch val.(type) {
case int, int8, int16, int32, int64, uint, uint8, uint16, uint32, uint64:
return true
case float32, float64, complex64, complex128:
return true
case string:
str := val.(string)
if str == "" {
return false
}
// Trim any whitespace
str = strings.TrimSpace(str)
if str[0] == '-' || str[0] == '+' {
if len(str) == 1 {
return false
}
str = str[1:]
}
// hex
if len(str) > 2 && str[0] == '0' && (str[1] == 'x' || str[1] == 'X') {
for _, h := range str[2:] {
if !((h >= '0' && h <= '9') || (h >= 'a' && h <= 'f') || (h >= 'A' && h <= 'F')) {
return false
}
}
return true
}
// 0-9, Point, Scientific
p, s, l := 0, 0, len(str)
for i, v := range str {
if v == '.' { // Point
if p > 0 || s > 0 || i+1 == l {
return false
}
p = i
} else if v == 'e' || v == 'E' { // Scientific
if i == 0 || s > 0 || i+1 == l {
return false
}
s = i
} else if v < '0' || v > '9' {
return false
}
}
return true
}
return false
}
func isStringNumeric(x string) bool {
hasPeriod := false
for i, c := range x {
switch c {
case '-':
if i != 0 {
return false
}
case '.':
if hasPeriod {
return false
}
hasPeriod = true
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
//Nothing here.
default:
return false
}
}
return true
} | variable.go | 0.550124 | 0.426441 | variable.go | starcoder |
// Package bsontype is a utility package that contains types for each BSON type and the
// a stringifier for the Type to enable easier debugging when working with BSON.
package bsontype
// These constants uniquely refer to each BSON type.
const (
Double Type = 0x01
String Type = 0x02
EmbeddedDocument Type = 0x03
Array Type = 0x04
Binary Type = 0x05
Undefined Type = 0x06
ObjectID Type = 0x07
Boolean Type = 0x08
DateTime Type = 0x09
Null Type = 0x0A
Regex Type = 0x0B
DBPointer Type = 0x0C
JavaScript Type = 0x0D
Symbol Type = 0x0E
CodeWithScope Type = 0x0F
Int32 Type = 0x10
Timestamp Type = 0x11
Int64 Type = 0x12
Decimal128 Type = 0x13
MinKey Type = 0xFF
MaxKey Type = 0x7F
BinaryGeneric byte = 0x00
BinaryFunction byte = 0x01
BinaryBinaryOld byte = 0x02
BinaryUUIDOld byte = 0x03
BinaryUUID byte = 0x04
BinaryMD5 byte = 0x05
BinaryUserDefined byte = 0x80
)
// Type represents a BSON type.
type Type byte
// String returns the string representation of the BSON type's name.
func (bt Type) String() string {
switch bt {
case '\x01':
return "double"
case '\x02':
return "string"
case '\x03':
return "embedded document"
case '\x04':
return "array"
case '\x05':
return "binary"
case '\x06':
return "undefined"
case '\x07':
return "objectID"
case '\x08':
return "boolean"
case '\x09':
return "UTC datetime"
case '\x0A':
return "null"
case '\x0B':
return "regex"
case '\x0C':
return "dbPointer"
case '\x0D':
return "javascript"
case '\x0E':
return "symbol"
case '\x0F':
return "code with scope"
case '\x10':
return "32-bit integer"
case '\x11':
return "timestamp"
case '\x12':
return "64-bit integer"
case '\x13':
return "128-bit decimal"
case '\xFF':
return "min key"
case '\x7F':
return "max key"
default:
return "invalid"
}
} | bsontype/bsontype.go | 0.580352 | 0.572723 | bsontype.go | starcoder |
package wire
import (
"bytes"
"errors"
"fmt"
)
// errNotEquals is a sentinel error used while iterating through ValueLists to
// indicate that two values did not match.
var errNotEquals = errors.New("values are not equal")
// ValuesAreEqual checks if two values are equal.
func ValuesAreEqual(left, right Value) bool {
if left.typ != right.typ {
return false
}
switch left.typ {
case TBool:
return left.GetBool() == right.GetBool()
case TI8:
return left.GetI8() == right.GetI8()
case TDouble:
return left.GetDouble() == right.GetDouble()
case TI16:
return left.GetI16() == right.GetI16()
case TI32:
return left.GetI32() == right.GetI32()
case TI64:
return left.GetI64() == right.GetI64()
case TBinary:
return bytes.Equal(left.tbinary, right.tbinary)
case TStruct:
return StructsAreEqual(left.tstruct, right.tstruct)
case TMap:
return MapsAreEqual(left.tcoll.(MapItemList), right.tcoll.(MapItemList))
case TSet:
return SetsAreEqual(left.tcoll.(ValueList), right.tcoll.(ValueList))
case TList:
return ListsAreEqual(left.tcoll.(ValueList), right.tcoll.(ValueList))
default:
return false
}
}
// StructsAreEqual checks if two structs are equal.
func StructsAreEqual(left, right Struct) bool {
if len(left.Fields) != len(right.Fields) {
return false
}
// Fields are unordered so we need to build a map to actually compare
// them.
leftFields := left.fieldMap()
rightFields := right.fieldMap()
for i, lvalue := range leftFields {
if rvalue, ok := rightFields[i]; !ok {
return false
} else if !ValuesAreEqual(lvalue, rvalue) {
return false
}
}
return true
}
// SetsAreEqual checks if two sets are equal.
func SetsAreEqual(left, right ValueList) bool {
if left.ValueType() != right.ValueType() {
return false
}
if left.Size() != right.Size() {
return false
}
if isHashable(left.ValueType()) {
return setsArEqualHashable(left.Size(), left, right)
}
return setsAreEqualUnhashable(left.Size(), left, right)
}
// setsArEqualHashable checks if two unordered ValueLists are equal, provided
// that they contain items that are hashable -- that is, the items can be used
// as keys in a map.
func setsArEqualHashable(size int, l, r ValueList) bool {
m := make(map[interface{}]bool, size)
// explicitly ignoring since we know there will not be an error
_ = l.ForEach(func(v Value) error {
m[toHashable(v)] = true
return nil
})
return errNotEquals != r.ForEach(func(v Value) error {
if _, ok := m[toHashable(v)]; !ok {
return errNotEquals
}
return nil
})
}
// setsAreEqualUnhashable checks if two unordered ValueLists are equal for
// types that are not hashable. Note that this is O(n^2) in time complexity.
func setsAreEqualUnhashable(size int, l, r ValueList) bool {
lItems := ValueListToSlice(l)
return errNotEquals != r.ForEach(func(rItem Value) error {
matched := false
for _, lItem := range lItems {
if ValuesAreEqual(lItem, rItem) {
matched = true
break
}
}
if !matched {
return errNotEquals
}
return nil
})
}
// MapsAreEqual checks if two maps are equal.
func MapsAreEqual(left, right MapItemList) bool {
if left.KeyType() != right.KeyType() {
return false
}
if left.ValueType() != right.ValueType() {
return false
}
if left.Size() != right.Size() {
return false
}
if isHashable(left.KeyType()) {
return mapsAreEqualHashable(left.Size(), left, right)
}
return mapsAreEqualUnhashable(left.Size(), left, right)
}
func mapsAreEqualHashable(size int, l, r MapItemList) bool {
m := make(map[interface{}]Value, size)
// explicitly ignoring since we know there will not be an error
_ = l.ForEach(func(item MapItem) error {
m[toHashable(item.Key)] = item.Value
return nil
})
return errNotEquals != r.ForEach(func(item MapItem) error {
lValue, ok := m[toHashable(item.Key)]
if !ok {
return errNotEquals
}
if !ValuesAreEqual(lValue, item.Value) {
return errNotEquals
}
return nil
})
}
func mapsAreEqualUnhashable(size int, l, r MapItemList) bool {
lItems := MapItemListToSlice(l)
return errNotEquals != r.ForEach(func(rItem MapItem) error {
matched := false
for _, lItem := range lItems {
if !ValuesAreEqual(lItem.Key, rItem.Key) {
continue
}
if !ValuesAreEqual(lItem.Value, rItem.Value) {
continue
}
matched = true
}
if !matched {
return errNotEquals
}
return nil
})
}
func isHashable(t Type) bool {
switch t {
case TBool, TI8, TDouble, TI16, TI32, TI64, TBinary:
return true
default:
return false
}
}
func toHashable(v Value) interface{} {
switch v.Type() {
case TBool, TI8, TDouble, TI16, TI32, TI64:
return v.Get()
case TBinary:
return string(v.GetBinary())
default:
panic(fmt.Sprintf("value is not hashable: %v", v))
}
}
// ListsAreEqual checks if two lists are equal.
func ListsAreEqual(left, right ValueList) bool {
if left.ValueType() != right.ValueType() {
return false
}
if left.Size() != right.Size() {
return false
}
leftItems := ValueListToSlice(left)
rightItems := ValueListToSlice(right)
for i, lv := range leftItems {
rv := rightItems[i]
if !ValuesAreEqual(lv, rv) {
return false
}
}
return true
} | wire/value_equals.go | 0.733929 | 0.580501 | value_equals.go | starcoder |
package models
import (
"errors"
"fmt"
"reflect"
"strings"
"time"
"github.com/astaxie/beego/orm"
)
type CarInsurances struct {
Id int `orm:"column(id);auto"`
Make string `orm:"column(make);size(32)"`
Model string `orm:"column(model);size(100)"`
Year int `orm:"column(year)"`
BodyType string `orm:"column(body_type);size(32)"`
Transmission string `orm:"column(transmission);size(16)"`
Colour int `orm:"column(colour)"`
PlanStartInsurance int `orm:"column(plan_start_insurance)"`
Dob time.Time `orm:"column(dob);type(date)"`
Gender string `orm:"column(gender);size(32)"`
AgeDriversLicenceObtained int `orm:"column(age_drivers_licence_obtained)"`
DriverOwner int8 `orm:"column(driver_owner)"`
OwnAnotherVehicle int8 `orm:"column(own_another_vehicle)"`
AnyAccidents int8 `orm:"column(any_accidents)"`
AccidentYear int `orm:"column(accident_year)"`
ClaimType string `orm:"column(claim_type);size(32)"`
EngineSizeMin int `orm:"column(engine_size_min)"`
EngineSizeMax int `orm:"column(engine_size_max)"`
Cylinders int `orm:"column(cylinders)"`
EngineType string `orm:"column(engine_type);size(32)"`
InductionTurbo string `orm:"column(induction_turbo);size(50)"`
PowerMin int `orm:"column(power_min)"`
PowerMax int `orm:"column(power_max)"`
TowBrakedMin int `orm:"column(tow_braked_min)"`
TowBrakedMax int `orm:"column(tow_braked_max)"`
AncapSafetyRating string `orm:"column(ancap_safety_rating);size(32)"`
GreenStarRating string `orm:"column(green_star_rating);size(32)"`
PPlateApproved string `orm:"column(p_plate_approved);size(16)"`
DealerPostcodes string `orm:"column(dealer_postcodes);size(100)"`
DealerDistance int `orm:"column(dealer_distance)"`
Status int `orm:"column(status)"`
Open int `orm:"column(open)"`
UserId int `orm:"column(user_id)"`
CreatedAt time.Time `orm:"column(created_at);type(timestamp)"`
UpdatedAt time.Time `orm:"column(updated_at);type(timestamp)"`
}
func (t *CarInsurances) TableName() string {
return "car_insurances"
}
func init() {
//orm.RegisterModel(new(CarInsurances))
}
// AddCarInsurances insert a new CarInsurances into database and returns
// last inserted Id on success.
func AddCarInsurances(m *CarInsurances) (id int64, err error) {
o := orm.NewOrm()
id, err = o.Insert(m)
return
}
// GetCarInsurancesById retrieves CarInsurances by Id. Returns error if
// Id doesn't exist
func GetCarInsurancesById(id int) (v *CarInsurances, err error) {
o := orm.NewOrm()
v = &CarInsurances{Id: id}
if err = o.Read(v); err == nil {
return v, nil
}
return nil, err
}
// GetAllCarInsurances retrieves all CarInsurances matches certain condition. Returns empty list if
// no records exist
func GetAllCarInsurances(query map[string]string, fields []string, sortby []string, order []string,
offset int64, limit int64) (ml []interface{}, err error) {
o := orm.NewOrm()
qs := o.QueryTable(new(CarInsurances))
// query k=v
for k, v := range query {
// rewrite dot-notation to Object__Attribute
k = strings.Replace(k, ".", "__", -1)
qs = qs.Filter(k, v)
}
// order by:
var sortFields []string
if len(sortby) != 0 {
if len(sortby) == len(order) {
// 1) for each sort field, there is an associated order
for i, v := range sortby {
orderby := ""
if order[i] == "desc" {
orderby = "-" + v
} else if order[i] == "asc" {
orderby = v
} else {
return nil, errors.New("Error: Invalid order. Must be either [asc|desc]")
}
sortFields = append(sortFields, orderby)
}
qs = qs.OrderBy(sortFields...)
} else if len(sortby) != len(order) && len(order) == 1 {
// 2) there is exactly one order, all the sorted fields will be sorted by this order
for _, v := range sortby {
orderby := ""
if order[0] == "desc" {
orderby = "-" + v
} else if order[0] == "asc" {
orderby = v
} else {
return nil, errors.New("Error: Invalid order. Must be either [asc|desc]")
}
sortFields = append(sortFields, orderby)
}
} else if len(sortby) != len(order) && len(order) != 1 {
return nil, errors.New("Error: 'sortby', 'order' sizes mismatch or 'order' size is not 1")
}
} else {
if len(order) != 0 {
return nil, errors.New("Error: unused 'order' fields")
}
}
var l []CarInsurances
qs = qs.OrderBy(sortFields...)
if _, err = qs.Limit(limit, offset).All(&l, fields...); err == nil {
if len(fields) == 0 {
for _, v := range l {
ml = append(ml, v)
}
} else {
// trim unused fields
for _, v := range l {
m := make(map[string]interface{})
val := reflect.ValueOf(v)
for _, fname := range fields {
m[fname] = val.FieldByName(fname).Interface()
}
ml = append(ml, m)
}
}
return ml, nil
}
return nil, err
}
// UpdateCarInsurances updates CarInsurances by Id and returns error if
// the record to be updated doesn't exist
func UpdateCarInsurancesById(m *CarInsurances) (err error) {
o := orm.NewOrm()
v := CarInsurances{Id: m.Id}
// ascertain id exists in the database
if err = o.Read(&v); err == nil {
var num int64
if num, err = o.Update(m); err == nil {
fmt.Println("Number of records updated in database:", num)
}
}
return
}
// DeleteCarInsurances deletes CarInsurances by Id and returns error if
// the record to be deleted doesn't exist
func DeleteCarInsurances(id int) (err error) {
o := orm.NewOrm()
v := CarInsurances{Id: id}
// ascertain id exists in the database
if err = o.Read(&v); err == nil {
var num int64
if num, err = o.Delete(&CarInsurances{Id: id}); err == nil {
fmt.Println("Number of records deleted in database:", num)
}
}
return
} | models/car_insurances.go | 0.609989 | 0.442938 | car_insurances.go | starcoder |
package template
import (
"fmt"
"io"
"regexp"
)
// Node is a type for hierarchical data.
type Node struct {
label string
replacements []replacement
children []*Node
}
type replacement struct {
expr *regexp.Regexp
repl string
}
// NewRoot returns a new root node.
func NewRoot() *Node {
return &Node{}
}
// AddReplacement adds a regexp replacement to the given node. The replacement
// value is formatted with a %s conversion.
func (node *Node) AddReplacement(expr string, repl interface{}) {
node.replacements = append(node.replacements, replacement{regexp.MustCompile(expr), fmt.Sprintf("%s", repl)})
}
// AddReplacementf adds a regexp replacement to the given node. The replacement
// format and arguments are formatted with fmt.Sprintf.
func (node *Node) AddReplacementf(expr string, replFormat string, a ...interface{}) {
node.AddReplacement(expr, fmt.Sprintf(replFormat, a...))
}
// NewChild adds a child with the given label to the given node.
func (node *Node) NewChild(label string) *Node {
child := &Node{label: label}
node.children = append(node.children, child)
return child
}
// Instantiate executes the given template on the given input.
func (tmpl *Template) Instantiate(w io.Writer, node *Node) error {
return tmpl.instantiate(w, node, append([]replacement(nil), node.replacements...))
}
func (tmpl *Template) instantiate(w io.Writer, node *Node, replacements []replacement) error {
switch tmpl.typ {
case blockStatement:
return tmpl.instantiateChildren(w, node, replacements)
case echoStatement:
text := tmpl.arg
for j := range replacements {
r := replacements[len(replacements)-1-j]
text = r.expr.ReplaceAllString(text, r.repl)
}
_, err := fmt.Fprintln(w, text)
return err
case forStatement:
for _, nodeChild := range node.children {
if nodeChild.label != tmpl.arg {
continue
}
if err := tmpl.instantiateChildren(w, nodeChild, append(replacements, nodeChild.replacements...)); err != nil {
return err
}
}
return nil
case ifStatement:
for _, nodeChild := range node.children {
if nodeChild.label != tmpl.arg {
continue
}
return tmpl.instantiateChildren(w, node, replacements)
}
return nil
default:
panic(fmt.Sprintf("case %d is not handled", tmpl.typ))
}
}
func (tmpl *Template) instantiateChildren(
w io.Writer, node *Node, replacements []replacement,
) error {
for _, child := range tmpl.children {
if err := child.instantiate(w, node, replacements); err != nil {
return err
}
}
return nil
} | pkg/sql/ir/irgen/template/instantiate.go | 0.703753 | 0.407628 | instantiate.go | starcoder |
package main
import "fmt"
type Number interface {
int64 | float64 | string
}
func main() {
// Initialize a map for the integer values
ints := map[string]int64{
"first": 34,
"second": 12,
}
// Initialize a map for the float values
floats := map[string]float64{
"first": 35.98,
"second": 26.99,
}
// Initialize a map for the string values
strings := map[string]string{
"first": "a",
"second": "b",
}
// non generic way
fmt.Printf("Non-Generic Sums: %v and %v and %v\n",
SumInts(ints),
SumFloats(floats),
SumStrings(strings))
// generics, defining the type
fmt.Printf("Generic Sums: %v and %v and %+v\n",
SumIntsOrFloatsOrStrings[string, int64](ints),
SumIntsOrFloatsOrStrings[string, float64](floats),
SumIntsOrFloatsOrStrings[string, string](strings))
// generics, without defining the type
fmt.Printf("Generic Sums, type parameters inferred: %v and %v and %v\n",
SumIntsOrFloatsOrStrings(ints),
SumIntsOrFloatsOrStrings(floats),
SumIntsOrFloatsOrStrings(strings))
// generics with a defined multi type
fmt.Printf("Generic Sums with Constraint: %v and %v and %v\n",
SumNumbers(ints),
SumNumbers(floats),
SumNumbers(strings))
}
// SumInts adds together the values of m.
func SumInts(m map[string]int64) int64 {
var s int64
for _, v := range m {
s += v
}
return s
}
// SumFloats adds together the values of m.
func SumFloats(m map[string]float64) float64 {
var s float64
for _, v := range m {
s += v
}
return s
}
// SumStrings adds together the values of m.
func SumStrings(m map[string]string) string {
var s string
for _, v := range m {
s += v
}
return s
}
// SumIntsOrFloatsOrStrings sums the values of map m. It supports floats and integers and strings
// as map values.
func SumIntsOrFloatsOrStrings[K comparable, V int64 | float64 | string](m map[K]V) V {
var s V
for _, v := range m {
s += v
}
return s
}
// SumNumbers sums the values of map m. Its supports integers
// and floats and strings as map values.
func SumNumbers[K comparable, V Number](m map[K]V) V {
var s V
for _, v := range m {
s += v
}
return s
} | 81_generics/2_sum generic/main.go | 0.70304 | 0.487002 | main.go | starcoder |
package lua
import (
"context"
"fmt"
"os"
)
type LValueType int
const (
LTNil LValueType = iota
LTBool
LTNumber
LTString
LTFunction
LTUserData
LTThread
LTTable
LTChannel
)
var lValueNames = [9]string{"nil", "boolean", "number", "string", "function", "userdata", "thread", "table", "channel"}
func (vt LValueType) String() string {
return lValueNames[int(vt)]
}
type LValue interface {
String() string
Type() LValueType
// To reduce `runtime.assertI2T2` costs, this method should be used instead
// of the type assertion in heavy paths (typically inside the VM).
assertFloat64() (float64, bool)
// To reduce `runtime.assertI2T2` costs, this method should be used instead
// of the type assertion in heavy paths (typically inside the VM).
assertString() (string, bool)
// To reduce `runtime.assertI2T2` costs, this method should be used instead
// of the type assertion in heavy paths (typically inside the VM).
assertFunction() (*LFunction, bool)
}
// LVIsFalse returns true if a given LValue is a nil or false otherwise false.
func LVIsFalse(v LValue) bool { return v == LNil || v == LFalse }
// LVIsFalse returns false if a given LValue is a nil or false otherwise true.
func LVAsBool(v LValue) bool { return v != LNil && v != LFalse }
// LVAsString returns string representation of a given LValue if the LValue is a
// string or number, otherwise an empty string.
func LVAsString(v LValue) string {
switch sn := v.(type) {
case LString, LNumber:
return sn.String()
default:
return ""
}
}
// LVCanConvToString returns true if a given LValue is a string or number
// otherwise false.
func LVCanConvToString(v LValue) bool {
switch v.(type) {
case LString, LNumber:
return true
default:
return false
}
}
// LVAsNumber tries to convert a given LValue to a number.
func LVAsNumber(v LValue) LNumber {
switch lv := v.(type) {
case LNumber:
return lv
case LString:
if num, err := parseNumber(string(lv)); err == nil {
return num
}
}
return LNumber(0)
}
type LNilType struct{}
func (nl *LNilType) String() string { return "nil" }
func (nl *LNilType) Type() LValueType { return LTNil }
func (nl *LNilType) assertFloat64() (float64, bool) { return 0, false }
func (nl *LNilType) assertString() (string, bool) { return "", false }
func (nl *LNilType) assertFunction() (*LFunction, bool) { return nil, false }
var LNil = LValue(&LNilType{})
type LBool bool
func (bl LBool) String() string {
if bool(bl) {
return "true"
}
return "false"
}
func (bl LBool) Type() LValueType { return LTBool }
func (bl LBool) assertFloat64() (float64, bool) { return 0, false }
func (bl LBool) assertString() (string, bool) { return "", false }
func (bl LBool) assertFunction() (*LFunction, bool) { return nil, false }
var LTrue = LBool(true)
var LFalse = LBool(false)
type LString string
func (st LString) String() string { return string(st) }
func (st LString) Type() LValueType { return LTString }
func (st LString) assertFloat64() (float64, bool) { return 0, false }
func (st LString) assertString() (string, bool) { return string(st), true }
func (st LString) assertFunction() (*LFunction, bool) { return nil, false }
// fmt.Formatter interface.
func (st LString) Format(f fmt.State, c rune) {
switch c {
case 'd', 'i':
if nm, err := parseNumber(string(st)); err != nil {
defaultFormat(nm, f, 'd')
} else {
defaultFormat(string(st), f, 's')
}
default:
defaultFormat(string(st), f, c)
}
}
func (nm LNumber) String() string {
if isInteger(nm) {
return fmt.Sprint(int64(nm))
}
return fmt.Sprint(float64(nm))
}
func (nm LNumber) Type() LValueType { return LTNumber }
func (nm LNumber) assertFloat64() (float64, bool) { return float64(nm), true }
func (nm LNumber) assertString() (string, bool) { return "", false }
func (nm LNumber) assertFunction() (*LFunction, bool) { return nil, false }
// fmt.Formatter interface.
func (nm LNumber) Format(f fmt.State, c rune) {
switch c {
case 'q', 's':
defaultFormat(nm.String(), f, c)
case 'b', 'c', 'd', 'o', 'x', 'X', 'U':
defaultFormat(int64(nm), f, c)
case 'e', 'E', 'f', 'F', 'g', 'G':
defaultFormat(float64(nm), f, c)
case 'i':
defaultFormat(int64(nm), f, 'd')
default:
if isInteger(nm) {
defaultFormat(int64(nm), f, c)
} else {
defaultFormat(float64(nm), f, c)
}
}
}
type LTable struct {
Metatable LValue
array []LValue
dict map[LValue]LValue
strdict map[string]LValue
keys []LValue
k2i map[LValue]int
}
func (tb *LTable) String() string { return fmt.Sprintf("table: %p", tb) }
func (tb *LTable) Type() LValueType { return LTTable }
func (tb *LTable) assertFloat64() (float64, bool) { return 0, false }
func (tb *LTable) assertString() (string, bool) { return "", false }
func (tb *LTable) assertFunction() (*LFunction, bool) { return nil, false }
type LFunction struct {
IsG bool
Env *LTable
Proto *FunctionProto
GFunction LGFunction
Upvalues []*Upvalue
}
type LGFunction func(*LState) int
func (fn *LFunction) String() string { return fmt.Sprintf("function: %p", fn) }
func (fn *LFunction) Type() LValueType { return LTFunction }
func (fn *LFunction) assertFloat64() (float64, bool) { return 0, false }
func (fn *LFunction) assertString() (string, bool) { return "", false }
func (fn *LFunction) assertFunction() (*LFunction, bool) { return fn, true }
type Global struct {
MainThread *LState
CurrentThread *LState
Registry *LTable
Global *LTable
builtinMts map[int]LValue
tempFiles []*os.File
gccount int32
}
type LState struct {
G *Global
Parent *LState
Env *LTable
Panic func(*LState)
Dead bool
Options Options
stop int32
reg *registry
stack callFrameStack
alloc *allocator
currentFrame *callFrame
wrapped bool
uvcache *Upvalue
hasErrorFunc bool
mainLoop func(*LState, *callFrame)
ctx context.Context
}
func (ls *LState) String() string { return fmt.Sprintf("thread: %p", ls) }
func (ls *LState) Type() LValueType { return LTThread }
func (ls *LState) assertFloat64() (float64, bool) { return 0, false }
func (ls *LState) assertString() (string, bool) { return "", false }
func (ls *LState) assertFunction() (*LFunction, bool) { return nil, false }
type LUserData struct {
value interface{}
Env *LTable
Metatable LValue
}
func (ud *LUserData) String() string { return fmt.Sprintf("userdata: %p", ud) }
func (ud *LUserData) Type() LValueType { return LTUserData }
func (ud *LUserData) assertFloat64() (float64, bool) { return 0, false }
func (ud *LUserData) assertString() (string, bool) { return "", false }
func (ud *LUserData) assertFunction() (*LFunction, bool) { return nil, false }
func (ud *LUserData) Value() interface{} { return ud.value }
type LChannel chan LValue
func (ch LChannel) String() string { return fmt.Sprintf("channel: %p", ch) }
func (ch LChannel) Type() LValueType { return LTChannel }
func (ch LChannel) assertFloat64() (float64, bool) { return 0, false }
func (ch LChannel) assertString() (string, bool) { return "", false }
func (ch LChannel) assertFunction() (*LFunction, bool) { return nil, false } | value.go | 0.662906 | 0.48377 | value.go | starcoder |
package immutableList
import "fmt"
type node interface {
size() int
get(index int) Object
getFirst() Object
getLast() Object
append(value Object) node
prepend(value Object) node
appendNode(n node) node
prependNode(n node) node
insert(index int, value Object) node
delete(index int) node
set(index int, value Object) node
head(index int) node
tail(index int) node
pop() (Object, node)
depth() int
forEach(proc Processor)
visit(base int, start int, limit int, v Visitor)
checkInvariants(report reporter, isRoot bool)
rotateLeft(parentLeft node) node
rotateRight(parentRight node) node
next(state *iteratorState) (*iteratorState, Object)
left() node
right() node
}
type iteratorState struct {
next *iteratorState
currentNode node
currentIndex int
}
type iteratorImpl struct {
state *iteratorState
value Object
}
func createIterator(n node) Iterator {
var state *iteratorState
if n.size() == 0 {
state = nil
} else {
state = &iteratorState{currentNode: n}
}
return &iteratorImpl{state: state}
}
func (this *iteratorImpl) Next() bool {
if this.state == nil {
return false
}
this.state, this.value = this.state.currentNode.next(this.state)
return true
}
func (this *iteratorImpl) Get() Object {
return this.value
}
const (
maxValuesPerLeaf = 32
)
type leafNode struct {
values []Object
}
func createSingleValueLeafNode(value Object) node {
values := make([]Object, 1)
values[0] = value
return createMultiValueLeafNode(values)
}
func createMultiValueLeafNode(values []Object) node {
return &leafNode{values: values}
}
func (a *leafNode) get(index int) Object {
return a.values[index]
}
func (a *leafNode) getFirst() Object {
return a.values[0]
}
func (a *leafNode) getLast() Object {
return a.values[len(a.values)-1]
}
func (a *leafNode) pop() (Object, node) {
return a.values[0], a.delete(0)
}
func (a *leafNode) set(index int, value Object) node {
currentSize := len(a.values)
if index < 0 || index >= currentSize {
panic(fmt.Sprintf("invalid index for leaf node: %d", index))
}
newValues := make([]Object, currentSize)
copy(newValues, a.values)
newValues[index] = value
return createMultiValueLeafNode(newValues)
}
func (a *leafNode) insert(index int, value Object) node {
currentSize := len(a.values)
if index < 0 || index > currentSize {
panic(fmt.Sprintf("invalid index for leaf node: %d", index))
}
if index == 0 {
return a.prepend(value)
} else if index == currentSize {
return a.append(value)
} else if currentSize < maxValuesPerLeaf {
values := make([]Object, currentSize+1)
copy(values[0:], a.values[0:index])
values[index] = value
copy(values[(index+1):], a.values[index:])
return createMultiValueLeafNode(values)
} else {
left := make([]Object, index)
copy(left[0:], a.values[0:index])
right := make([]Object, currentSize+1-index)
right[0] = value
copy(right[1:], a.values[index:])
return createBranchNode(createMultiValueLeafNode(left), createMultiValueLeafNode(right))
}
}
func (a *leafNode) delete(index int) node {
currentSize := len(a.values)
if index < 0 || index >= currentSize {
panic(fmt.Sprintf("invalid index for leaf node: %d", index))
}
if len(a.values) == 1 {
return createEmptyLeafNode()
}
values := make([]Object, currentSize-1)
if index == 0 {
copy(values[0:], a.values[1:])
} else if index == currentSize-1 {
copy(values[0:], a.values[0:(currentSize-1)])
} else {
copy(values[0:], a.values[0:index])
copy(values[index:], a.values[(index+1):])
}
return createMultiValueLeafNode(values)
}
func (a *leafNode) append(value Object) node {
currentSize := len(a.values)
if currentSize < maxValuesPerLeaf {
values := make([]Object, currentSize+1)
copy(values[0:], a.values[0:])
values[currentSize] = value
return createMultiValueLeafNode(values)
} else {
values := make([]Object, 1)
values[0] = value
return createBranchNode(a, createMultiValueLeafNode(values))
}
}
func (a *leafNode) prepend(value Object) node {
currentSize := len(a.values)
if currentSize < maxValuesPerLeaf {
values := make([]Object, currentSize+1)
values[0] = value
copy(values[1:], a.values[0:])
return createMultiValueLeafNode(values)
} else {
values := make([]Object, 1)
values[0] = value
return createBranchNode(createMultiValueLeafNode(values), a)
}
}
func (a *leafNode) forEach(proc Processor) {
for _, value := range a.values {
proc(value)
}
}
func (a *leafNode) visit(base int, start int, limit int, v Visitor) {
size := len(a.values)
if limit > size {
limit = size
}
for i := start; i < limit; i++ {
v(base+i, a.values[i])
}
}
func (a *leafNode) head(index int) node {
currentSize := len(a.values)
if index < 0 || index > currentSize {
panic(fmt.Sprintf("invalid index for leaf node: %d", index))
}
if index == 0 {
return createEmptyLeafNode()
} else if index == currentSize {
return a
} else {
values := make([]Object, index)
copy(values[0:], a.values[0:index])
return createMultiValueLeafNode(values)
}
}
func (a *leafNode) tail(index int) node {
currentSize := len(a.values)
if index < 0 || index > currentSize {
panic(fmt.Sprintf("invalid index for leaf node: %d", index))
}
if index == 0 {
return a
} else if index == currentSize {
return createEmptyLeafNode()
} else {
values := make([]Object, currentSize-index)
copy(values[0:], a.values[index:])
return createMultiValueLeafNode(values)
}
}
func (a *leafNode) left() node {
panic("not implemented for leaf nodes")
}
func (a *leafNode) right() node {
panic("not implemented for leaf nodes")
}
func (a *leafNode) depth() int {
return 0
}
func (a *leafNode) size() int {
return len(a.values)
}
func (a *leafNode) appendNode(n node) node {
if n.size() == 0 {
return a
}
if o, matches := n.(*leafNode); matches {
combinedSize := a.size() + o.size()
if combinedSize <= maxValuesPerLeaf {
return appendLeafNodeValues(combinedSize, a, o)
}
}
return createBranchNode(a, n)
}
func (a *leafNode) prependNode(n node) node {
if n.size() == 0 {
return a
}
if o, matches := n.(*leafNode); matches {
combinedSize := o.size() + a.size()
if combinedSize <= maxValuesPerLeaf {
return appendLeafNodeValues(combinedSize, o, a)
}
}
return createBranchNode(n, a)
}
func (a *leafNode) next(state *iteratorState) (*iteratorState, Object) {
if state == nil || state.currentNode != a {
state = &iteratorState{currentNode: a, next: state}
}
value := a.values[state.currentIndex]
state.currentIndex++
if state.currentIndex == len(a.values) {
return state.next, value
} else {
return state, value
}
}
func appendLeafNodeValues(combinedSize int, a *leafNode, b *leafNode) node {
values := make([]Object, combinedSize)
copy(values[0:], a.values)
copy(values[a.size():], b.values)
return createMultiValueLeafNode(values)
}
func (a *leafNode) checkInvariants(report reporter, isRoot bool) {
currentSize := len(a.values)
if currentSize < 1 || currentSize > maxValuesPerLeaf {
report(fmt.Sprintf("incorrect size: currentSize=%d", currentSize))
}
}
func (a *leafNode) rotateLeft(parentLeft node) node {
panic("not implemented for leaf node")
}
func (a *leafNode) rotateRight(parentRight node) node {
panic("not implemented for leaf node")
}
type emptyNode struct {
}
var sharedEmptyNode node = &emptyNode{}
func createEmptyLeafNode() node {
return sharedEmptyNode
}
func (e *emptyNode) get(index int) Object {
panic("not implemented for empty nodes")
}
func (b *emptyNode) getFirst() Object {
panic("not implemented for empty nodes")
}
func (b *emptyNode) getLast() Object {
panic("not implemented for empty nodes")
}
func (b *emptyNode) pop() (Object, node) {
panic("not implemented for empty nodes")
}
func (b *emptyNode) set(index int, value Object) node {
panic("not implemented for empty nodes")
}
func (e *emptyNode) insert(index int, value Object) node {
if index == 0 {
return createSingleValueLeafNode(value)
} else {
panic(fmt.Sprintf("invalid index for empty node: %d", index))
}
}
func (b *emptyNode) delete(index int) node {
panic("not implemented for empty nodes")
}
func (b *emptyNode) head(index int) node {
if index == 0 {
return b
} else {
panic(fmt.Sprintf("invalid index for empty node: %d", index))
}
}
func (b *emptyNode) tail(index int) node {
if index == 0 {
return b
} else {
panic(fmt.Sprintf("invalid index for empty node: %d", index))
}
}
func (e *emptyNode) append(value Object) node {
return createSingleValueLeafNode(value)
}
func (e *emptyNode) prepend(value Object) node {
return createSingleValueLeafNode(value)
}
func (e *emptyNode) forEach(proc Processor) {
}
func (e *emptyNode) visit(base int, start int, limit int, v Visitor) {
}
func (e *emptyNode) left() node {
panic("not implemented for empty nodes")
}
func (e *emptyNode) right() node {
panic("not implemented for empty nodes")
}
func (e *emptyNode) depth() int {
return 0
}
func (e *emptyNode) size() int {
return 0
}
func (e *emptyNode) checkInvariants(report reporter, isRoot bool) {
if !isRoot {
report("emptyNode: should not exist below root")
}
}
func (e *emptyNode) rotateLeft(parentLeft node) node {
panic("not implemented for leaf nodes")
}
func (e *emptyNode) rotateRight(parentRight node) node {
panic("not implemented for leaf nodes")
}
func (b *emptyNode) appendNode(n node) node {
if n.depth() != 0 {
panic("appending branch to leaf")
}
return n
}
func (b *emptyNode) prependNode(n node) node {
if n.depth() != 0 {
panic("prepending branch to leaf")
}
return n
}
func (e *emptyNode) next(state *iteratorState) (*iteratorState, Object) {
return nil, nil
}
type branchNode struct {
leftChild node
rightChild node
mySize int
myDepth int
}
func createBranchNode(leftChild node, rightChild node) node {
return &branchNode{
leftChild: leftChild,
rightChild: rightChild,
mySize: leftChild.size() + rightChild.size(),
myDepth: 1 + maxDepth(leftChild, rightChild),
}
}
func createBalancedBranchNode(left node, right node) node {
diff := left.depth() - right.depth()
if diff > 1 {
return left.rotateRight(right)
} else if diff < -1 {
return right.rotateLeft(left)
} else {
return createBranchNode(left, right)
}
}
func (b *branchNode) append(value Object) node {
return createBalancedBranchNode(b.leftChild, b.rightChild.append(value))
}
func (b *branchNode) prepend(value Object) node {
return createBalancedBranchNode(b.leftChild.prepend(value), b.rightChild)
}
func (b *branchNode) forEach(proc Processor) {
b.leftChild.forEach(proc)
b.rightChild.forEach(proc)
}
func (b *branchNode) visit(base int, start int, limit int, v Visitor) {
visitNode(b.leftChild, 0, base, start, limit, v)
visitNode(b.rightChild, b.leftChild.size(), base, start, limit, v)
}
func visitNode(node node, offset int, base int, start int, limit int, v Visitor) {
base += offset
start -= offset
limit -= offset
if start < 0 {
start = 0
}
if limit > node.size() {
limit = node.size()
}
if limit > start {
node.visit(base, start, limit, v)
}
}
func maxDepth(leftChild node, rightChild node) int {
leftDepth, rightDepth := leftChild.depth(), rightChild.depth()
if leftDepth > rightDepth {
return leftDepth
} else {
return rightDepth
}
}
func depthDiff(leftChild node, rightChild node) int {
leftDepth, rightDepth := leftChild.depth(), rightChild.depth()
if leftDepth > rightDepth {
return leftDepth - rightDepth
} else {
return rightDepth - leftDepth
}
}
func (b *branchNode) get(index int) Object {
leftSize := b.leftChild.size()
if index < leftSize {
return b.leftChild.get(index)
} else {
return b.rightChild.get(index - leftSize)
}
}
func (b *branchNode) getFirst() Object {
return b.leftChild.getFirst()
}
func (b *branchNode) getLast() Object {
return b.rightChild.getLast()
}
func (b *branchNode) pop() (Object, node) {
value, newLeft := b.leftChild.pop()
if newLeft.size() == 0 {
return value, b.rightChild
} else {
return value, createBalancedBranchNode(newLeft, b.rightChild)
}
}
func (b *branchNode) set(index int, value Object) node {
leftSize := b.leftChild.size()
if index < leftSize {
return createBranchNode(b.leftChild.set(index, value), b.rightChild)
} else {
return createBranchNode(b.leftChild, b.rightChild.set(index-leftSize, value))
}
}
func (b *branchNode) rotateLeft(parentLeft node) node {
if b.leftChild.depth() > b.rightChild.depth() {
return createBranchNode(createBranchNode(parentLeft, b.leftChild.left()), createBranchNode(b.leftChild.right(), b.rightChild))
} else {
return createBranchNode(createBranchNode(parentLeft, b.leftChild), b.rightChild)
}
}
func (b *branchNode) rotateRight(parentRight node) node {
if b.leftChild.depth() >= b.rightChild.depth() {
return createBranchNode(b.leftChild, createBranchNode(b.rightChild, parentRight))
} else {
return createBranchNode(createBranchNode(b.leftChild, b.rightChild.left()), createBranchNode(b.rightChild.right(), parentRight))
}
}
func (b *branchNode) insert(index int, value Object) node {
var newLeft node
var newRight node
leftSize := b.leftChild.size()
if index < leftSize {
newLeft = b.leftChild.insert(index, value)
newRight = b.rightChild
} else {
newLeft = b.leftChild
newRight = b.rightChild.insert(index-leftSize, value)
}
return createBalancedBranchNode(newLeft, newRight)
}
func (b *branchNode) delete(index int) node {
var newLeft, newRight node
leftSize := b.leftChild.size()
if index < leftSize {
newLeft = b.leftChild.delete(index)
newRight = b.rightChild
if newLeft.size() == 0 {
return newRight
}
} else {
newLeft = b.leftChild
newRight = b.rightChild.delete(index - leftSize)
if newRight.size() == 0 {
return newLeft
}
}
return createBalancedBranchNode(newLeft, newRight)
}
func (b *branchNode) head(index int) node {
leftSize := b.leftChild.size()
if index < leftSize {
return b.leftChild.head(index)
} else {
newRight := b.rightChild.head(index - leftSize)
return appendNodes(b.leftChild, newRight)
}
}
func (b *branchNode) tail(index int) node {
leftSize := b.leftChild.size()
if index < leftSize {
newLeft := b.leftChild.tail(index)
return appendNodes(newLeft, b.rightChild)
} else {
return b.rightChild.tail(index - leftSize)
}
}
func (b *branchNode) left() node {
return b.leftChild
}
func (b *branchNode) right() node {
return b.rightChild
}
func (b *branchNode) depth() int {
return b.myDepth
}
func (b *branchNode) size() int {
return b.mySize
}
func (b *branchNode) appendNode(n node) node {
if n.depth() > b.depth() {
panic("appending larger node to smaller node")
}
if depthDiff(n, b) <= 1 {
return createBranchNode(b, n)
} else {
return createBalancedBranchNode(b.leftChild, b.rightChild.appendNode(n))
}
}
func (b *branchNode) prependNode(n node) node {
if n.depth() > b.depth() {
panic("prepending larger node to smaller node")
}
if depthDiff(n, b) <= 1 {
return createBranchNode(n, b)
} else {
return createBalancedBranchNode(b.leftChild.prependNode(n), b.rightChild)
}
}
func appendNodes(a node, b node) node {
if a.size() == 0 {
return b
} else if b.size() == 0 {
return a
} else if a.depth() < b.depth() {
return b.prependNode(a)
} else {
return a.appendNode(b)
}
}
func (b *branchNode) checkInvariants(report reporter, isRoot bool) {
if b.depth() != maxDepth(b.leftChild, b.rightChild)+1 {
report(fmt.Sprintf("incorrect depth: depth=%d leftDepth=%d rightDepth=%d", b.depth(), b.leftChild.depth(), b.rightChild.depth()))
}
if depthDiff(b.leftChild, b.rightChild) > 1 {
report(fmt.Sprintf("invalid child depths: leftDepth=%d rightDepth=%d", b.leftChild.depth(), b.rightChild.depth()))
}
if b.size() != b.leftChild.size()+b.rightChild.size() {
report(fmt.Sprintf("incorrect size: size=%d leftSize=%d rightSize=%d", b.size(), b.leftChild.size(), b.rightChild.size()))
}
b.leftChild.checkInvariants(report, false)
b.rightChild.checkInvariants(report, false)
}
func (b *branchNode) next(state *iteratorState) (*iteratorState, Object) {
if state == nil || state.currentNode != b {
state = &iteratorState{currentNode: b, next: state}
}
switch state.currentIndex {
case 0:
state.currentIndex = 1
return b.leftChild.next(state)
case 1:
state.currentIndex = 2
return b.rightChild.next(state.next)
default:
panic("invalid index in iterator state")
}
} | node.go | 0.570092 | 0.521471 | node.go | starcoder |
package missing_identity_provider_isolation
import (
"github.com/threagile/threagile/model"
)
func Category() model.RiskCategory {
return model.RiskCategory{
Id: "missing-identity-provider-isolation",
Title: "Missing Identity Provider Isolation",
Description: "Highly sensitive identity provider assets and their identity datastores should be isolated from other assets " +
"by their own network segmentation trust-boundary (" + model.ExecutionEnvironment.String() + " boundaries do not count as network isolation).",
Impact: "If this risk is unmitigated, attackers successfully attacking other components of the system might have an easy path towards " +
"highly sensitive identity provider assets and their identity datastores, as they are not separated by network segmentation.",
ASVS: "V1 - Architecture, Design and Threat Modeling Requirements",
CheatSheet: "https://cheatsheetseries.owasp.org/cheatsheets/Attack_Surface_Analysis_Cheat_Sheet.html",
Action: "Network Segmentation",
Mitigation: "Apply a network segmentation trust-boundary around the highly sensitive identity provider assets and their identity datastores.",
Check: "Are recommendations from the linked cheat sheet and referenced ASVS chapter applied?",
Function: model.Operations,
STRIDE: model.ElevationOfPrivilege,
DetectionLogic: "In-scope identity provider assets and their identity datastores " +
"when surrounded by other (not identity-related) assets (without a network trust-boundary in-between). " +
"This risk is especially prevalent when other non-identity related assets are within the same execution environment (i.e. same database or same application server).",
RiskAssessment: "Default is " + model.HighImpact.String() + " impact. The impact is increased to " + model.VeryHighImpact.String() + " when the asset missing the " +
"trust-boundary protection is rated as " + model.StrictlyConfidential.String() + " or " + model.MissionCritical.String() + ".",
FalsePositives: "When all assets within the network segmentation trust-boundary are hardened and protected to the same extend as if all were " +
"identity providers with data of highest sensitivity.",
ModelFailurePossibleReason: false,
CWE: 1008,
}
}
func SupportedTags() []string {
return []string{}
}
func GenerateRisks() []model.Risk {
risks := make([]model.Risk, 0)
for _, technicalAsset := range model.ParsedModelRoot.TechnicalAssets {
if !technicalAsset.OutOfScope && technicalAsset.Technology.IsIdentityRelated() {
moreImpact := technicalAsset.Confidentiality == model.StrictlyConfidential ||
technicalAsset.Integrity == model.MissionCritical ||
technicalAsset.Availability == model.MissionCritical
sameExecutionEnv := false
createRiskEntry := false
// now check for any other same-network assets of non-identity-related types
for sparringAssetCandidateId, _ := range model.ParsedModelRoot.TechnicalAssets { // so inner loop again over all assets
if technicalAsset.Id != sparringAssetCandidateId {
sparringAssetCandidate := model.ParsedModelRoot.TechnicalAssets[sparringAssetCandidateId]
if !sparringAssetCandidate.Technology.IsIdentityRelated() && !sparringAssetCandidate.Technology.IsCloseToHighValueTargetsTolerated() {
if technicalAsset.IsSameExecutionEnvironment(sparringAssetCandidateId) {
createRiskEntry = true
sameExecutionEnv = true
} else if technicalAsset.IsSameTrustBoundaryNetworkOnly(sparringAssetCandidateId) {
createRiskEntry = true
}
}
}
}
if createRiskEntry {
risks = append(risks, createRisk(technicalAsset, moreImpact, sameExecutionEnv))
}
}
}
return risks
}
func createRisk(techAsset model.TechnicalAsset, moreImpact bool, sameExecutionEnv bool) model.Risk {
impact := model.HighImpact
likelihood := model.Unlikely
others := "<b>in the same network segment</b>"
if moreImpact {
impact = model.VeryHighImpact
}
if sameExecutionEnv {
likelihood = model.Likely
others = "<b>in the same execution environment</b>"
}
risk := model.Risk{
Category: Category(),
Severity: model.CalculateSeverity(likelihood, impact),
ExploitationLikelihood: likelihood,
ExploitationImpact: impact,
Title: "<b>Missing Identity Provider Isolation</b> to further encapsulate and protect identity-related asset <b>" + techAsset.Title + "</b> against unrelated " +
"lower protected assets " + others + ", which might be easier to compromise by attackers",
MostRelevantTechnicalAssetId: techAsset.Id,
DataBreachProbability: model.Improbable,
DataBreachTechnicalAssetIDs: []string{techAsset.Id},
}
risk.SyntheticId = risk.Category.Id + "@" + techAsset.Id
return risk
} | risks/built-in/missing-identity-provider-isolation/missing-identity-provider-isolation-rule.go | 0.690976 | 0.478651 | missing-identity-provider-isolation-rule.go | starcoder |
package ion
import (
"bytes"
)
// This file contains the container-like types: List, SExp, and Struct.
const (
textNullList = "null.list"
textNullSExp = "null.sexp"
textNullStruct = "null.struct"
)
// List is an ordered collections of Values. The contents of the list are
// heterogeneous, each element can have a different type. Homogeneous lists
// may be imposed by schema validation tools.
type List struct {
annotations []Symbol
values []Value
}
// Value returns the values that this list holds.
func (lst List) Value() []Value {
return lst.values
}
// Annotations satisfies Value.
func (lst List) Annotations() []Symbol {
return lst.annotations
}
// Binary satisfies Value.
func (lst List) Binary() []byte {
// TODO: Figure out how we want to do binary serialization of containers.
return nil
}
// Text satisfies Value.
func (lst List) Text() []byte {
if lst.values == nil {
return []byte(textNullList)
}
parts := make([][]byte, len(lst.values))
for index, value := range lst.values {
parts[index] = value.Text()
}
return bytes.Join(parts, []byte(","))
}
// IsNull satisfies Value.
func (lst List) IsNull() bool {
return lst.values == nil
}
// Type satisfies Value.
func (List) Type() Type {
return TypeList
}
// SExp (S-Expression) is an ordered collection of values with application-defined
// semantics. The contents of the list are
// heterogeneous, each element can have a different type. Homogeneous lists
// may be imposed by schema validation tools.
type SExp struct {
annotations []Symbol
values []Value
}
// Value returns the values held within the s-expression.
func (s SExp) Value() []Value {
return s.values
}
// Annotations satisfies Value.
func (s SExp) Annotations() []Symbol {
return s.annotations
}
// Binary satisfies Value.
func (s SExp) Binary() []byte {
// TODO: Figure out how we want to do binary serialization of containers.
return nil
}
// Text satisfies Value.
func (s SExp) Text() []byte {
if s.values == nil {
return []byte(textNullSExp)
}
parts := make([][]byte, len(s.values))
for index, value := range s.values {
parts[index] = value.Text()
}
return bytes.Join(parts, []byte(" "))
}
// IsNull satisfies Value.
func (s SExp) IsNull() bool {
return s.values == nil
}
// Type satisfies Value.
func (SExp) Type() Type {
return TypeSExp
}
// StructField represents the field of a Struct.
type StructField struct {
Symbol Symbol
Value Value
}
// Struct is an unordered collection of tagged values.
// When two fields in the same struct have the same name we say there
// are “repeated names” or “repeated fields”. All such fields must be
// preserved, any StructField that has a repeated name must not be discarded.
type Struct struct {
annotations []Symbol
fields []StructField
}
// Value returns the fields that this struct holds.
func (s Struct) Value() []StructField {
return s.fields
}
// Annotations satisfies Value.
func (s Struct) Annotations() []Symbol {
return s.annotations
}
// Binary satisfies Value.
func (s Struct) Binary() []byte {
// TODO: Figure out how we want to do binary serialization of containers.
return nil
}
// Text satisfies Value.
func (s Struct) Text() []byte {
if s.fields == nil {
return []byte(textNullStruct)
}
parts := make([][]byte, len(s.fields))
for index, fld := range s.fields {
line := append(fld.Symbol.Text(), ':')
parts[index] = append(line, fld.Value.Text()...)
}
return bytes.Join(parts, []byte(","))
}
// IsNull satisfies Value.
func (s Struct) IsNull() bool {
return s.fields == nil
}
// Type satisfies Value.
func (Struct) Type() Type {
return TypeStruct
} | ion/types_container.go | 0.626467 | 0.476397 | types_container.go | starcoder |
package keyfilter
import "github.com/CyCoreSystems/ari"
// Kind filters a list of keys by a particular Kind
func Kind(kind string, in []*ari.Key) (out []*ari.Key) {
for _, k := range in {
if k.Kind == kind {
out = append(out, k)
}
}
return
}
// Applications returns the Application keys from the given list of keys
func Applications(in []*ari.Key) (out []*ari.Key) {
return Kind(ari.ApplicationKey, in)
}
// Bridges returns the Bridge keys from the given list of keys
func Bridges(in []*ari.Key) (out []*ari.Key) {
return Kind(ari.BridgeKey, in)
}
// Channels returns the Channel keys from the given list of keys
func Channels(in []*ari.Key) (out []*ari.Key) {
return Kind(ari.ChannelKey, in)
}
// DeviceStates returns the DeviceState keys from the given list of keys
func DeviceStates(in []*ari.Key) (out []*ari.Key) {
return Kind(ari.DeviceStateKey, in)
}
// Endpoints returns the Endpoint keys from the given list of keys
func Endpoints(in []*ari.Key) (out []*ari.Key) {
return Kind(ari.EndpointKey, in)
}
// LiveRecordings returns the LiveRecording keys from the given list of keys
func LiveRecordings(in []*ari.Key) (out []*ari.Key) {
return Kind(ari.LiveRecordingKey, in)
}
// Loggings returns the Logging keys from the given list of keys
func Loggings(in []*ari.Key) (out []*ari.Key) {
return Kind(ari.LoggingKey, in)
}
// Mailboxes returns the Mailbox keys from the given list of keys
func Mailboxes(in []*ari.Key) (out []*ari.Key) {
return Kind(ari.MailboxKey, in)
}
// Modules returns the Module keys from the given list of keys
func Modules(in []*ari.Key) (out []*ari.Key) {
return Kind(ari.ModuleKey, in)
}
// Playbacks returns the Playback keys from the given list of keys
func Playbacks(in []*ari.Key) (out []*ari.Key) {
return Kind(ari.PlaybackKey, in)
}
// Sounds returns the Sound keys from the given list of keys
func Sounds(in []*ari.Key) (out []*ari.Key) {
return Kind(ari.SoundKey, in)
}
// StoredRecordings returns the StoredRecording keys from the given list of keys
func StoredRecordings(in []*ari.Key) (out []*ari.Key) {
return Kind(ari.StoredRecordingKey, in)
}
// Variables returns the Variable keys from the given list of keys
func Variables(in []*ari.Key) (out []*ari.Key) {
return Kind(ari.VariableKey, in)
} | ext/keyfilter/keyfilter.go | 0.745676 | 0.4206 | keyfilter.go | starcoder |
package graph
import (
"sort"
"github.com/Tom-Johnston/mamba/ints"
"github.com/Tom-Johnston/mamba/sortints"
)
//SparseGraph is a data structure for representing a simple undirected graph. *SparseGraph implements the graph insterface.
//SparseGraph stores the number of vertices, the number of edges, the degree sequence of the graph and the neighbourhood of edge vertex.
//Most modifications are slow as the edges are stored as SortedInts and not a data structure with log(n) modifications but sparse graphs take up relatively little space. Checking if an individual edge is present is logarithmic in the degree of the vertices but returning the neighbours is quick so most algorithms run fairly quickly.
//TODO Do we want to switch the neighbourhoods to using some kind of heap with quicker insertions and deletions?
type SparseGraph struct {
NumberOfVertices int
NumberOfEdges int
Neighbourhoods []sortints.SortedInts
DegreeSequence []int
}
//NewSparse create the graph on n vertices with the specified neighbours. If neighbourhoods is nil, the empty graph is created.
//TODO Check that this is a graph. Check SortedInts are in fact sorted.
func NewSparse(n int, neighbourhoods []sortints.SortedInts) *SparseGraph {
if neighbourhoods == nil {
neighbourhoods = make([]sortints.SortedInts, n)
for i := range neighbourhoods {
neighbourhoods[i] = []int{}
}
}
if len(neighbourhoods) != n {
panic("Length of neighbourhoods doesn't match the number of vertices.")
}
tmpNeighbourhoods := make([]sortints.SortedInts, n)
for i := range neighbourhoods {
tmpNeighbourhoods[i] = sortints.NewSortedInts(neighbourhoods[i]...)
}
degreeSequence := make([]int, n)
for i := range tmpNeighbourhoods {
degreeSequence[i] = len(tmpNeighbourhoods[i])
}
return &SparseGraph{NumberOfVertices: n, NumberOfEdges: ints.Sum(degreeSequence) / 2, Neighbourhoods: tmpNeighbourhoods, DegreeSequence: degreeSequence}
}
//N returns the number of vertices in g.
func (g SparseGraph) N() int {
return g.NumberOfVertices
}
//M returns the number of edges in g.
func (g SparseGraph) M() int {
return g.NumberOfEdges
}
//IsEdge returns if there is an edge between i and j in the graph.
func (g SparseGraph) IsEdge(i, j int) bool {
if g.DegreeSequence[i] > g.DegreeSequence[j] {
return sortints.ContainsSingle(g.Neighbourhoods[i], j)
}
return sortints.ContainsSingle(g.Neighbourhoods[j], i)
}
//Neighbours returns the neighbours of the given vertex.
func (g SparseGraph) Neighbours(v int) []int {
tmpNeighbours := make([]int, len(g.Neighbourhoods[v]))
copy(tmpNeighbours, g.Neighbourhoods[v])
return tmpNeighbours
}
//Degrees returns the degree sequence of the graph.
func (g SparseGraph) Degrees() []int {
tmpDegreeSequence := make([]int, len(g.DegreeSequence))
copy(tmpDegreeSequence, g.DegreeSequence)
return tmpDegreeSequence
}
//AddVertex adds a vertex to the graph with the specified neighbours.
func (g *SparseGraph) AddVertex(neighbours []int) {
g.NumberOfVertices++
tmp := sortints.NewSortedInts(neighbours...)
g.NumberOfEdges += len(tmp)
for _, v := range tmp {
g.Neighbourhoods[v] = append(g.Neighbourhoods[v], g.NumberOfVertices-1)
g.DegreeSequence[v]++
}
g.Neighbourhoods = append(g.Neighbourhoods, tmp)
g.DegreeSequence = append(g.DegreeSequence, len(neighbours))
}
//RemoveVertex removes the specified vertex. The index of a vertex u > v becomes u - 1 while the index of u < v is unchanged.
func (g *SparseGraph) RemoveVertex(i int) {
g.NumberOfVertices--
g.NumberOfEdges -= g.DegreeSequence[i]
for _, v := range g.Neighbourhoods[i] {
g.Neighbourhoods[v].Remove(i)
}
g.Neighbourhoods = g.Neighbourhoods[:i+copy(g.Neighbourhoods[i:], g.Neighbourhoods[i+1:])]
for j := range g.Neighbourhoods {
startIndex := sort.SearchInts(g.Neighbourhoods[j], i)
for k := startIndex; k < len(g.Neighbourhoods[j]); k++ {
g.Neighbourhoods[j][k]--
}
}
}
//AddEdge modifies the graph by adding the edge (i, j) if it is not already present.
//If the edge is already present (or i == j), this does nothing.
func (g *SparseGraph) AddEdge(i, j int) {
if i == j || g.IsEdge(i, j) {
return
}
g.Neighbourhoods[i].Add(j)
g.Neighbourhoods[j].Add(i)
g.NumberOfEdges++
g.DegreeSequence[i]++
g.DegreeSequence[j]++
}
//RemoveEdge modifies the graph by removing the edge (i, j) if it is present.
//If the edge is not already present, this does nothing.
func (g *SparseGraph) RemoveEdge(i, j int) {
if i == j || !g.IsEdge(i, j) {
return
}
g.Neighbourhoods[i].Remove(j)
g.Neighbourhoods[j].Remove(i)
g.NumberOfEdges--
g.DegreeSequence[i]--
g.DegreeSequence[j]--
}
//InducedSubgraph returns a deep copy of the induced subgraph of g with vertices given in order by V.
//This can also be used to return relabellings of the graph if len(V) = g.N().
func (g SparseGraph) InducedSubgraph(V []int) EditableGraph {
n := len(V)
values, indices := intsSort(V)
tmpDegreeSequence := make([]int, n)
tmpNeighbourhoods := make([]sortints.SortedInts, n)
m := 0
for i, v := range V {
tmpNeighbourhoods[i] = intersectionByIndex(g.Neighbours(v), values, indices)
tmpDegreeSequence[i] = len(tmpNeighbourhoods[i])
m += tmpDegreeSequence[i]
}
h := &SparseGraph{NumberOfVertices: n, NumberOfEdges: m / 2, Neighbourhoods: tmpNeighbourhoods, DegreeSequence: tmpDegreeSequence}
return h
}
//Copy returns a deep copy of the graph g.
func (g SparseGraph) Copy() EditableGraph {
tmpNeighbourhoods := make([]sortints.SortedInts, len(g.Neighbourhoods))
for i := range g.Neighbourhoods {
tmpNeighbourhoods[i] = make(sortints.SortedInts, len(g.Neighbourhoods[i]))
copy(tmpNeighbourhoods[i], g.Neighbourhoods[i])
}
tmpDegreeSequence := make([]int, len(g.DegreeSequence))
copy(tmpDegreeSequence, g.DegreeSequence)
return &SparseGraph{NumberOfVertices: g.NumberOfVertices, NumberOfEdges: g.NumberOfEdges, Neighbourhoods: tmpNeighbourhoods, DegreeSequence: tmpDegreeSequence}
} | graph/graph_sparse.go | 0.527073 | 0.708566 | graph_sparse.go | starcoder |
package dfl
import (
"strings"
"github.com/pkg/errors"
"github.com/spatialcurrent/go-adaptive-functions/pkg/af"
"github.com/spatialcurrent/go-reader-writer/pkg/io"
)
// In is a BinaryOperator that evaluates to true if the left value is in the right value.
// Unlike "in", it is case insensitive.
// If the right value is an array/slice, then evaluated to true if the left value is in the array/slice.
// Otherwise, evaluates to true if the right string is contained by the left string.
type IIn struct {
*BinaryOperator
}
func (i IIn) Dfl(quotes []string, pretty bool, tabs int) string {
return i.BinaryOperator.Dfl("iin", quotes, pretty, tabs)
}
func (i IIn) Sql(pretty bool, tabs int) string {
return ""
}
func (i IIn) Map() map[string]interface{} {
return i.BinaryOperator.Map("iin", i.Left, i.Right)
}
func (i IIn) Compile() Node {
left := i.Left.Compile()
right := i.Right.Compile()
return &IIn{&BinaryOperator{Left: left, Right: right}}
}
func (i IIn) Evaluate(vars map[string]interface{}, ctx interface{}, funcs FunctionMap, quotes []string) (map[string]interface{}, interface{}, error) {
vars, lv, err := i.Left.Evaluate(vars, ctx, funcs, quotes)
if err != nil {
return vars, false, errors.Wrap(err, "Error evaluating left value for "+i.Dfl(quotes, false, 0))
}
vars, rv, err := i.Right.Evaluate(vars, ctx, funcs, quotes)
if err != nil {
return vars, false, errors.Wrap(err, "Error evaluating right value for "+i.Dfl(quotes, false, 0))
}
if rvr, ok := rv.(io.ByteReadCloser); ok {
if lvb, ok := lv.([]byte); ok {
lvs := string(lvb)
rvb, err := rvr.ReadAll()
if err != nil {
return vars, false, errors.Wrap(err, "error reading all byte for right value in expression "+i.Dfl(quotes, false, 0))
}
rvs := string(rvb)
if len(lvs) == len(rvs) && len(lvs) == 0 {
return vars, true, nil
}
return vars, strings.Contains(rvs, lvs), nil
}
if lvs, ok := lv.(string); ok {
rvb, err := rvr.ReadAll()
if err != nil {
return vars, false, errors.Wrap(err, "error reading all byte for right value in expression "+i.Dfl(quotes, false, 0))
}
rvs := string(rvb)
if len(lvs) == len(rvs) && len(lvs) == 0 {
return vars, true, nil
}
return vars, strings.Contains(strings.ToLower(rvs), strings.ToLower(lvs)), nil
}
}
value, err := af.IIn.ValidateRun(lv, rv)
if err != nil {
return vars, false, errors.Wrap(err, ErrorEvaluate{Node: i, Quotes: quotes}.Error())
}
return vars, value, nil
} | pkg/dfl/IIn.go | 0.723993 | 0.420302 | IIn.go | starcoder |
package function
import (
"fmt"
"strings"
"gopkg.in/src-d/go-errors.v1"
"github.com/dolthub/go-mysql-server/sql"
"github.com/dolthub/go-mysql-server/sql/expression"
)
// SRID is a function that returns SRID of Geometry object or returns a new object with altered SRID.
type SRID struct {
expression.NaryExpression
}
var _ sql.FunctionExpression = (*SRID)(nil)
var ErrInvalidSRID = errors.NewKind("There's no spatial reference with SRID %d")
const (
CartesianSRID = 0
GeoSpatialSRID = 4326
)
// NewSRID creates a new STX expression.
func NewSRID(args ...sql.Expression) (sql.Expression, error) {
if len(args) != 1 && len(args) != 2 {
return nil, sql.ErrInvalidArgumentNumber.New("ST_SRID", "1 or 2", len(args))
}
return &SRID{expression.NaryExpression{ChildExpressions: args}}, nil
}
// FunctionName implements sql.FunctionExpression
func (s *SRID) FunctionName() string {
return "st_srid"
}
// Description implements sql.FunctionExpression
func (s *SRID) Description() string {
return "returns the SRID value of given geometry object. If given a second argument, returns a new geometry object with second argument as SRID value."
}
// Type implements the sql.Expression interface.
func (s *SRID) Type() sql.Type {
if len(s.ChildExpressions) == 1 {
return sql.Int32
} else {
return s.ChildExpressions[0].Type()
}
}
func (s *SRID) String() string {
var args = make([]string, len(s.ChildExpressions))
for i, arg := range s.ChildExpressions {
args[i] = arg.String()
}
return fmt.Sprintf("ST_SRID(%s)", strings.Join(args, ","))
}
// WithChildren implements the Expression interface.
func (s *SRID) WithChildren(children ...sql.Expression) (sql.Expression, error) {
return NewSRID(children...)
}
// PointWithSRID creates a deep copy of point object with given SRID
func PointWithSRID(p sql.Point, srid uint32) sql.Point {
return sql.Point{SRID: srid, X: p.X, Y: p.Y}
}
// LineWithSRID creates a deep copy of linestring object with given SRID
func LineWithSRID(l sql.Linestring, srid uint32) sql.Linestring {
points := make([]sql.Point, len(l.Points))
for i, p := range l.Points {
points[i] = PointWithSRID(p, srid)
}
return sql.Linestring{SRID: srid, Points: points}
}
// PolyWithSRID creates a deep copy of polygon object with given SRID
func PolyWithSRID(p sql.Polygon, srid uint32) sql.Polygon {
lines := make([]sql.Linestring, len(p.Lines))
for i, l := range p.Lines {
lines[i] = LineWithSRID(l, srid)
}
return sql.Polygon{SRID: srid, Lines: lines}
}
// Eval implements the sql.Expression interface.
func (s *SRID) Eval(ctx *sql.Context, row sql.Row) (interface{}, error) {
// Evaluate geometry type
g, err := s.ChildExpressions[0].Eval(ctx, row)
if err != nil {
return nil, err
}
// Return nil if geometry object is nil
if g == nil {
return nil, nil
}
// If just one argument, return SRID
if len(s.ChildExpressions) == 1 {
// Check that it is a geometry type
switch g := g.(type) {
case sql.Point:
return g.SRID, nil
case sql.Linestring:
return g.SRID, nil
case sql.Polygon:
return g.SRID, nil
default:
return nil, sql.ErrIllegalGISValue.New(g)
}
}
// Evaluate second argument
srid, err := s.ChildExpressions[1].Eval(ctx, row)
if err != nil {
return nil, err
}
// Return null if second argument is null
if srid == nil {
return nil, nil
}
// Convert to int32
srid, err = sql.Uint32.Convert(srid)
if err != nil {
return nil, err
}
// Type assertion
_srid := srid.(uint32)
// Must be either 0 or 4326
if _srid != CartesianSRID && _srid != GeoSpatialSRID {
return nil, ErrInvalidSRID.New(_srid)
}
// Create new geometry object with matching SRID
switch g := g.(type) {
case sql.Point:
return PointWithSRID(g, _srid), nil
case sql.Linestring:
return LineWithSRID(g, _srid), nil
case sql.Polygon:
return PolyWithSRID(g, _srid), nil
default:
return nil, sql.ErrIllegalGISValue.New(g)
}
} | sql/expression/function/srid.go | 0.715623 | 0.417865 | srid.go | starcoder |
package trie
import "strings"
// data represents information that a tree edge holds.
type data struct {
// label is an arbitrary string associated with an edge.
label string
// count is the number of times the label was seen when inserting to a tree.
count uint
}
// edge of a tree.
type edge struct {
// target points to the node that is connected to this edge.
target *node
data
}
// node of a tree.
type node struct {
// edges connected to this node.
edges []*edge
}
// Trie implements a radix tree.
// For information about radix trees, see https://en.wikipedia.org/wiki/Radix_tree.
type Trie struct {
root *node
}
// WalkFn is used when walking a tree.
type WalkFn func(prefix string, count uint)
// New returns empty Trie.
func New() *Trie {
return &Trie{&node{[]*edge{}}}
}
// shortestLen returns length of the shortest of two strings.
func shortestLen(a, b string) (l int) {
l = len(a)
if lb := len(b); lb < l {
l = lb
}
return
}
// longestCommonPrefixLen returns length of the longest common prefix of two strings.
// Note, that this function compares bytes, rather than UTF-8-encoded runes.
// Therefore, it may yield unexpected result for strings encoded in UTF-8.
func longestCommonPrefixLen(a, b string) (l int) {
s := shortestLen(a, b)
for l = 0; l < s; l++ {
if a[l] != b[l] {
break
}
}
return
}
// appendEdge appends an edge to the node.
func (n *node) appendEdge(e *edge) {
n.edges = append(n.edges, e)
}
// walk the node recursively calling the specified function for every edge.
func (n node) walk(fn WalkFn, prefixes []string) {
for _, edge := range n.edges {
newPrefixes := append(prefixes, edge.label)
fn(strings.Join(newPrefixes, ""), edge.count)
if edge.target != nil {
edge.target.walk(fn, newPrefixes)
}
}
}
// split the edge into two edges.
// For example, splitting an edge labeled "test" with index of 2 results in a new edge labeled "st"
// and the original edge labeled "te".
func (e *edge) split(index int) {
oldLabel := e.label
oldTarget := e.target
e.label = oldLabel[:index]
e.target = new(node)
e.target.appendEdge(&edge{
data: data{
label: oldLabel[index:],
count: e.count - 1,
},
target: oldTarget,
})
}
// Insert adds a string value to the tree.
func (tree *Trie) Insert(value string) {
nextEdge := new(edge)
traverseNode := tree.root
bytesFound := 0
for traverseNode != nil {
nextEdge = nil
for _, edge := range traverseNode.edges {
prefixLen := longestCommonPrefixLen(value[bytesFound:], edge.label)
if prefixLen == 0 {
continue
}
edge.count++
bytesFound += prefixLen
nextEdge = edge
if prefixLen < len(edge.label) {
edge.split(prefixLen)
}
break
}
if nextEdge == nil {
break
}
traverseNode = nextEdge.target
}
if bytesFound < len(value) {
if traverseNode == nil {
traverseNode = new(node)
nextEdge.target = traverseNode
}
traverseNode.appendEdge(&edge{
data: data{
label: value[bytesFound:],
count: 1,
},
})
}
}
// Walk the tree recursively calling the specified function for every edge.
func (tree Trie) Walk(fn WalkFn) {
tree.root.walk(fn, []string{})
} | internal/trie/trie.go | 0.809841 | 0.661834 | trie.go | starcoder |
// Copied and modified from https://github.com/issue9/identicon/ (MIT License)
// Generate pseudo-random avatars by IP, E-mail, etc.
package identicon
import (
"crypto/sha256"
"fmt"
"image"
"image/color"
)
const minImageSize = 16
// Identicon is used to generate pseudo-random avatars
type Identicon struct {
foreColors []color.Color
backColor color.Color
size int
rect image.Rectangle
}
// New returns an Identicon struct with the correct settings
// size image size
// back background color
// fore all possible foreground colors. only one foreground color will be picked randomly for one image
func New(size int, back color.Color, fore ...color.Color) (*Identicon, error) {
if len(fore) == 0 {
return nil, fmt.Errorf("foreground is not set")
}
if size < minImageSize {
return nil, fmt.Errorf("size %d is smaller than min size %d", size, minImageSize)
}
return &Identicon{
foreColors: fore,
backColor: back,
size: size,
rect: image.Rect(0, 0, size, size),
}, nil
}
// Make generates an avatar by data
func (i *Identicon) Make(data []byte) image.Image {
h := sha256.New()
h.Write(data)
sum := h.Sum(nil)
b1 := int(sum[0]+sum[1]+sum[2]) % len(blocks)
b2 := int(sum[3]+sum[4]+sum[5]) % len(blocks)
c := int(sum[6]+sum[7]+sum[8]) % len(centerBlocks)
b1Angle := int(sum[9]+sum[10]) % 4
b2Angle := int(sum[11]+sum[12]) % 4
foreColor := int(sum[11]+sum[12]+sum[15]) % len(i.foreColors)
return i.render(c, b1, b2, b1Angle, b2Angle, foreColor)
}
func (i *Identicon) render(c, b1, b2, b1Angle, b2Angle, foreColor int) image.Image {
p := image.NewPaletted(i.rect, []color.Color{i.backColor, i.foreColors[foreColor]})
drawBlocks(p, i.size, centerBlocks[c], blocks[b1], blocks[b2], b1Angle, b2Angle)
return p
}
/*
# Algorithm
Origin: An image is splitted into 9 areas
```
-------------
| 1 | 2 | 3 |
-------------
| 4 | 5 | 6 |
-------------
| 7 | 8 | 9 |
-------------
```
Area 1/3/9/7 use a 90-degree rotating pattern.
Area 1/3/9/7 use another 90-degree rotating pattern.
Area 5 uses a random pattern.
The Patched Fix: make the image left-right mirrored to get rid of something like "swastika"
*/
// draw blocks to the paletted
// c: the block drawer for the center block
// b1,b2: the block drawers for other blocks (around the center block)
// b1Angle,b2Angle: the angle for the rotation of b1/b2
func drawBlocks(p *image.Paletted, size int, c, b1, b2 blockFunc, b1Angle, b2Angle int) {
nextAngle := func(a int) int {
return (a + 1) % 4
}
padding := (size % 3) / 2 // in cased the size can not be aligned by 3 blocks.
blockSize := size / 3
twoBlockSize := 2 * blockSize
// center
c(p, blockSize+padding, blockSize+padding, blockSize, 0)
// left top (1)
b1(p, 0+padding, 0+padding, blockSize, b1Angle)
// center top (2)
b2(p, blockSize+padding, 0+padding, blockSize, b2Angle)
b1Angle = nextAngle(b1Angle)
b2Angle = nextAngle(b2Angle)
// right top (3)
// b1(p, twoBlockSize+padding, 0+padding, blockSize, b1Angle)
// right middle (6)
// b2(p, twoBlockSize+padding, blockSize+padding, blockSize, b2Angle)
b1Angle = nextAngle(b1Angle)
b2Angle = nextAngle(b2Angle)
// right bottom (9)
// b1(p, twoBlockSize+padding, twoBlockSize+padding, blockSize, b1Angle)
// center bottom (8)
b2(p, blockSize+padding, twoBlockSize+padding, blockSize, b2Angle)
b1Angle = nextAngle(b1Angle)
b2Angle = nextAngle(b2Angle)
// lef bottom (7)
b1(p, 0+padding, twoBlockSize+padding, blockSize, b1Angle)
// left middle (4)
b2(p, 0+padding, blockSize+padding, blockSize, b2Angle)
// then we make it left-right mirror, so we didn't draw 3/6/9 before
for x := 0; x < size/2; x++ {
for y := 0; y < size; y++ {
p.SetColorIndex(size-x, y, p.ColorIndexAt(x, y))
}
}
} | modules/avatar/identicon/identicon.go | 0.600071 | 0.609205 | identicon.go | starcoder |
package main
import (
"fmt"
"reflect"
"time"
"github.com/sanderploegsma/advent-of-code/2019/utils"
)
var input = [][]int{
{3, 2, -6, 0, 0, 0},
{-13, 18, 10, 0, 0, 0},
{-8, -1, 13, 0, 0, 0},
{5, 10, 4, 0, 0, 0},
}
func main() {
start := time.Now()
ans := TotalEnergy(SimulateN(input, 1000))
fmt.Println(1, ans, time.Since(start))
start = time.Now()
ans = CalculateCycle(input)
fmt.Println(2, ans, time.Since(start))
}
// CalculateCycle finds the number of steps needed to bring the given moons back to their original position.
// Because the movement on each axis is not influenced by the the other axes, we can determine the cycle for each axis
// individually, and then use the Least Common Multiplier to calculate the combined cycle.
func CalculateCycle(moons [][]int) int {
repeats := make(map[int]int)
for i := 0; i < 3; i++ {
steps := 0
sim := clone(moons)
for {
steps++
sim = SimulateAxis(sim, i)
if reflect.DeepEqual(sim, moons) {
repeats[i] = steps
break
}
}
}
return utils.LCM(repeats[0], repeats[1], repeats[2])
}
// SimulateN simulates the movement of the given moons for n steps.
func SimulateN(moons [][]int, n int) [][]int {
sim := clone(moons)
for i := 0; i < n; i++ {
sim = Simulate(sim)
}
return sim
}
// Simulate simulates a single movement step for all given moons.
func Simulate(moons [][]int) [][]int {
sim := clone(moons)
for i := 0; i < len(moons); i++ {
for j := i + 1; j < len(moons); j++ {
for axis := 0; axis < 3; axis++ {
dx1, dx2 := gravity(moons[i][axis], moons[j][axis])
sim[i][axis+3] += dx1
sim[j][axis+3] += dx2
}
}
}
for i := range sim {
for axis := 0; axis < 3; axis++ {
sim[i][axis] += sim[i][axis+3]
}
}
return sim
}
// SimulateAxis simulates a single movement step for a single given axis of all moons
func SimulateAxis(moons [][]int, axis int) [][]int {
sim := clone(moons)
for i := 0; i < len(moons); i++ {
for j := i + 1; j < len(moons); j++ {
di, dj := gravity(moons[i][axis], moons[j][axis])
sim[i][axis+3] += di
sim[j][axis+3] += dj
}
}
for i := range sim {
sim[i][axis] += sim[i][axis+3]
}
return sim
}
// TotalEnergy calculates the total amount of energy of all given moons.
func TotalEnergy(moons [][]int) int {
energy := 0
for _, m := range moons {
energy += (utils.Abs(m[0]) + utils.Abs(m[1]) + utils.Abs(m[2])) * (utils.Abs(m[3]) + utils.Abs(m[4]) + utils.Abs(m[5]))
}
return energy
}
func gravity(a, b int) (int, int) {
if a == b {
return 0, 0
}
if a > b {
return -1, 1
}
return 1, -1
}
func clone(moons [][]int) [][]int {
c := make([][]int, len(moons))
for i := range moons {
c[i] = make([]int, len(moons[i]))
copy(c[i], moons[i])
}
return c
} | 2019/go/12/main.go | 0.601594 | 0.402921 | main.go | starcoder |
package core
import (
"fmt"
"reflect"
"strconv"
"strings"
"time"
"github.com/mattn/anko/vm"
)
// ImportToX adds all the toX to the env given
func ImportToX(env *vm.Env) {
env.Define("toBool", func(v interface{}) bool {
nt := reflect.TypeOf(true)
rv := reflect.ValueOf(v)
if rv.Type().ConvertibleTo(nt) {
return rv.Convert(nt).Bool()
}
if rv.Type().ConvertibleTo(reflect.TypeOf(1.0)) && rv.Convert(reflect.TypeOf(1.0)).Float() > 0.0 {
return true
}
if rv.Kind() == reflect.String {
s := strings.ToLower(v.(string))
if s == "y" || s == "yes" {
return true
}
b, err := strconv.ParseBool(s)
if err == nil {
return b
}
}
return false
})
env.Define("toString", func(v interface{}) string {
if b, ok := v.([]byte); ok {
return string(b)
}
return fmt.Sprint(v)
})
env.Define("toInt", func(v interface{}) int64 {
nt := reflect.TypeOf(1)
rv := reflect.ValueOf(v)
if rv.Type().ConvertibleTo(nt) {
return rv.Convert(nt).Int()
}
if rv.Kind() == reflect.String {
i, err := strconv.ParseInt(v.(string), 10, 64)
if err == nil {
return i
}
f, err := strconv.ParseFloat(v.(string), 64)
if err == nil {
return int64(f)
}
}
if rv.Kind() == reflect.Bool {
if v.(bool) {
return 1
}
}
return 0
})
env.Define("toFloat", func(v interface{}) float64 {
nt := reflect.TypeOf(1.0)
rv := reflect.ValueOf(v)
if rv.Type().ConvertibleTo(nt) {
return rv.Convert(nt).Float()
}
if rv.Kind() == reflect.String {
f, err := strconv.ParseFloat(v.(string), 64)
if err == nil {
return f
}
}
if rv.Kind() == reflect.Bool {
if v.(bool) {
return 1.0
}
}
return 0.0
})
env.Define("toChar", func(s rune) string {
return string(s)
})
env.Define("toRune", func(s string) rune {
if len(s) == 0 {
return 0
}
return []rune(s)[0]
})
env.Define("toBoolSlice", func(v []interface{}) []bool {
var result []bool
toSlice(v, &result)
return result
})
env.Define("toStringSlice", func(v []interface{}) []string {
var result []string
toSlice(v, &result)
return result
})
env.Define("toIntSlice", func(v []interface{}) []int64 {
var result []int64
toSlice(v, &result)
return result
})
env.Define("toFloatSlice", func(v []interface{}) []float64 {
var result []float64
toSlice(v, &result)
return result
})
env.Define("toByteSlice", func(s string) []byte {
return []byte(s)
})
env.Define("toRuneSlice", func(s string) []rune {
return []rune(s)
})
env.Define("toDuration", func(v int64) time.Duration {
return time.Duration(v)
})
}
// toSlice takes in a "generic" slice and converts and copies
// it's elements into the typed slice pointed at by ptr.
// Note that this is a costly operation.
func toSlice(from []interface{}, ptr interface{}) {
// Value of the pointer to the target
obj := reflect.Indirect(reflect.ValueOf(ptr))
// We can't just convert from interface{} to whatever the target is (diff memory layout),
// so we need to create a New slice of the proper type and copy the values individually
t := reflect.TypeOf(ptr).Elem()
slice := reflect.MakeSlice(t, len(from), len(from))
// Copying the data, val is an addressable Pointer of the actual target type
val := reflect.Indirect(reflect.New(t.Elem()))
for i := 0; i < len(from); i++ {
v := reflect.ValueOf(from[i])
val.Set(v)
slice.Index(i).Set(v)
}
// Ok now assign our slice to the target pointer
obj.Set(slice)
} | core/toX.go | 0.507324 | 0.425009 | toX.go | starcoder |
package genomeGraph
// SortGraph will reorder nodes in a graph such that the order and Ids of the output graph are topologically sorted
func SortGraph(g *GenomeGraph) *GenomeGraph {
answer := &GenomeGraph{}
answer.Nodes = make([]Node, len(g.Nodes))
order := GetSortOrder(g)
for sortedIdx, originalIdx := range order {
answer.Nodes[sortedIdx] = g.Nodes[originalIdx]
answer.Nodes[sortedIdx].Id = uint32(sortedIdx)
}
return answer
}
// GetSortOrder will perform a breadth first search (BFS) on a graph and return an output slice where output[sortedIdx] = originalIdx
func GetSortOrder(g *GenomeGraph) []uint32 {
return breadthFirstSearch(g.Nodes)
}
// TODO: design function to get start positions only
// breadthFirstSearch performs a breadth first search on a graph and returns a slice correlating the sort order to the original order
func breadthFirstSearch(nodes []Node) []uint32 {
answer := make([]uint32, 0)
var inDegree int
var nodeId uint32
inDegreeTable := make(map[uint32]int)
// Updated nodes is going to keep track of each node
// which has had a change to it's inDegree
// We will use this to loop through the graph
// visiting each group of connected nodes in order
// and by searching within each group with a
// breadth-first approach
updatedNodes := make([]*Node, 0)
subGraphs := BreakNonContiguousGraph(nodes)
// loop through each contiguous subGraph
for _, nodeSet := range subGraphs {
updatedNodes = nil
inDegreeTable = make(map[uint32]int)
for i := 0; i < len(nodeSet); i++ {
inDegreeTable[nodeSet[i].Id] = len(nodeSet[i].Prev)
}
// Find all nodes that start with inDegree zero and add to updatedNodes
for nodeId, inDegree = range inDegreeTable {
if inDegree == 0 {
updatedNodes = append(updatedNodes, &nodes[nodeId])
}
}
for k := 0; k < len(updatedNodes); k++ {
answer = append(answer, updatedNodes[k].Id)
delete(inDegreeTable, updatedNodes[k].Id)
updateTable(inDegreeTable, updatedNodes[k], &updatedNodes)
}
}
return answer
}
// updateTable updates the table of node in degrees
func updateTable(inDegreeTable map[uint32]int, node *Node, updatedNodes *[]*Node) {
for i := 0; i < len(node.Next); i++ {
inDegreeTable[node.Next[i].Dest.Id]--
if inDegreeTable[node.Next[i].Dest.Id] == 0 {
*updatedNodes = append(*updatedNodes, node.Next[i].Dest)
}
}
}
// TODO: possible to order nodes while breaking discontiguous graphs???
// BreakNonContiguousGraph will return a slice of graphs ([]*Node) such that each graph in the slice is contiguous
func BreakNonContiguousGraph(g []Node) [][]*Node {
answer := make([][]*Node, 0)
var contiguousGraph []*Node
inDegreeTable := make(map[uint32]int)
visited := make([]bool, len(g))
var inDegree int
var nodeId uint32
for i := 0; i < len(g); i++ {
inDegreeTable[g[i].Id] = len(g[i].Prev)
}
for nodeId, inDegree = range inDegreeTable {
if inDegree == 0 && !visited[nodeId] {
contiguousGraph = make([]*Node, 1)
contiguousGraph[0] = &g[nodeId]
visited[nodeId] = true
traceGraph(&g[nodeId], visited, &contiguousGraph)
answer = append(answer, contiguousGraph)
}
}
return answer
}
// traceGraph is a helper function that traverses graph and keeps track of which nodes have been visited
func traceGraph(startNode *Node, visited []bool, answer *[]*Node) {
var i int = 0
for i = 0; i < len(startNode.Next); i++ {
if !visited[startNode.Next[i].Dest.Id] {
*answer = append(*answer, startNode.Next[i].Dest)
visited[startNode.Next[i].Dest.Id] = true
traceGraph(startNode.Next[i].Dest, visited, answer)
}
}
for i = 0; i < len(startNode.Prev); i++ {
if !visited[startNode.Prev[i].Dest.Id] {
*answer = append(*answer, startNode.Prev[i].Dest)
visited[startNode.Prev[i].Dest.Id] = true
traceGraph(startNode.Prev[i].Dest, visited, answer)
}
}
} | genomeGraph/sort.go | 0.567577 | 0.69894 | sort.go | starcoder |
package specs
import (
"testing"
"github.com/go-rel/rel"
"github.com/go-rel/rel/where"
"github.com/stretchr/testify/assert"
)
// Update tests specification for updating a record.
func Update(t *testing.T, repo rel.Repository) {
var (
note = "s<PASSWORD>"
user = User{
Name: "update",
}
)
repo.MustInsert(ctx, &user)
user.Name = "update"
user.Gender = "male"
user.Age = 23
user.Note = ¬e
err := repo.Update(ctx, &user)
assert.Nil(t, err)
assert.NotZero(t, user.ID)
assert.Equal(t, "update", user.Name)
assert.Equal(t, "male", user.Gender)
assert.Equal(t, 23, user.Age)
assert.Equal(t, ¬e, user.Note)
// update unchanged
assert.Nil(t, repo.Update(ctx, &user))
var (
queried User
)
user.Addresses = nil
err = repo.Find(ctx, &queried, where.Eq("id", user.ID))
assert.Nil(t, err)
assert.Equal(t, user, queried)
}
// UpdateNotFound tests specification for updating a not found record.
func UpdateNotFound(t *testing.T, repo rel.Repository) {
var (
user = User{
ID: 0,
Name: "update",
}
)
// update unchanged
assert.Equal(t, rel.NotFoundError{}, repo.Update(ctx, &user))
}
// UpdateHasManyInsert tests specification for updating a record and inserting has many association.
func UpdateHasManyInsert(t *testing.T, repo rel.Repository) {
var (
result User
user = User{
Name: "<NAME>",
}
)
repo.MustInsert(ctx, &user)
user.Name = "update insert has many"
user.Addresses = []Address{
{Name: "primary"},
{Name: "work"},
}
err := repo.Update(ctx, &user)
assert.Nil(t, err)
assert.NotZero(t, user.ID)
assert.Equal(t, "update insert has many", user.Name)
assert.Len(t, user.Addresses, 2)
assert.NotZero(t, user.Addresses[0].ID)
assert.NotZero(t, user.Addresses[1].ID)
assert.Equal(t, user.ID, *user.Addresses[0].UserID)
assert.Equal(t, user.ID, *user.Addresses[1].UserID)
assert.Equal(t, "primary", user.Addresses[0].Name)
assert.Equal(t, "work", user.Addresses[1].Name)
repo.MustFind(ctx, &result, where.Eq("id", user.ID))
repo.MustPreload(ctx, &result, "addresses")
assert.Equal(t, result, user)
}
// UpdateHasManyUpdate tests specification for updating a record and updating has many association.
func UpdateHasManyUpdate(t *testing.T, repo rel.Repository) {
var (
user = User{
Name: "update init",
Addresses: []Address{
{Name: "old address"},
},
}
result User
)
repo.MustInsert(ctx, &user)
assert.NotZero(t, user.Addresses[0].ID)
user.Name = "update insert has many"
user.Addresses[0].Name = "new address"
assert.Nil(t, repo.Update(ctx, &user))
assert.NotZero(t, user.ID)
assert.Equal(t, "update insert has many", user.Name)
assert.Len(t, user.Addresses, 1)
assert.NotZero(t, user.Addresses[0].ID)
assert.Equal(t, user.ID, *user.Addresses[0].UserID)
assert.Equal(t, "new address", user.Addresses[0].Name)
repo.MustFind(ctx, &result, where.Eq("id", user.ID))
repo.MustPreload(ctx, &result, "addresses")
assert.Equal(t, result, user)
}
// UpdateHasManyReplace tests specification for updating a record and replacing has many association.
func UpdateHasManyReplace(t *testing.T, repo rel.Repository) {
var (
result User
user = User{
Name: "update init",
Addresses: []Address{
{Name: "old address"},
},
}
)
repo.MustInsert(ctx, &user)
user.Name = "update insert has many"
user.Addresses = []Address{
{Name: "primary"},
{Name: "work"},
}
err := repo.Update(ctx, &user)
assert.Nil(t, err)
assert.NotZero(t, user.ID)
assert.Equal(t, "update insert has many", user.Name)
assert.Len(t, user.Addresses, 2)
assert.NotZero(t, user.Addresses[0].ID)
assert.NotZero(t, user.Addresses[1].ID)
assert.Equal(t, user.ID, *user.Addresses[0].UserID)
assert.Equal(t, user.ID, *user.Addresses[1].UserID)
assert.Equal(t, "primary", user.Addresses[0].Name)
assert.Equal(t, "work", user.Addresses[1].Name)
repo.MustFind(ctx, &result, where.Eq("id", user.ID))
repo.MustPreload(ctx, &result, "addresses")
assert.Equal(t, result, user)
}
// UpdateHasOneInsert tests specification for updating a record and inserting has many association.
func UpdateHasOneInsert(t *testing.T, repo rel.Repository) {
var (
result User
user = User{
Name: "update init",
}
)
repo.MustInsert(ctx, &user)
user.Name = "update insert has one"
user.PrimaryAddress = &Address{Name: "primary"}
err := repo.Update(ctx, &user)
assert.Nil(t, err)
assert.NotZero(t, user.ID)
assert.Equal(t, "update insert has one", user.Name)
assert.NotZero(t, user.PrimaryAddress.ID)
assert.Equal(t, user.ID, *user.PrimaryAddress.UserID)
assert.Equal(t, "primary", user.PrimaryAddress.Name)
repo.MustFind(ctx, &result, where.Eq("id", user.ID))
repo.MustPreload(ctx, &result, "primary_address")
assert.Equal(t, result, user)
}
// UpdateHasOneUpdate tests specification for updating a record and updating has one association.
func UpdateHasOneUpdate(t *testing.T, repo rel.Repository) {
var (
result User
user = User{
Name: "update init",
PrimaryAddress: &Address{Name: "primary"},
}
)
repo.MustInsert(ctx, &user)
user.Name = "update update has one"
user.PrimaryAddress.Name = "updated primary"
err := repo.Update(ctx, &user)
assert.Nil(t, err)
assert.NotZero(t, user.ID)
assert.Equal(t, "update update has one", user.Name)
assert.NotZero(t, user.PrimaryAddress.ID)
assert.Equal(t, user.ID, *user.PrimaryAddress.UserID)
assert.Equal(t, "updated primary", user.PrimaryAddress.Name)
repo.MustFind(ctx, &result, where.Eq("id", user.ID))
repo.MustPreload(ctx, &result, "primary_address")
assert.Equal(t, result, user)
}
// UpdateHasOneReplace tests specification for updating a record and replacing has one association.
func UpdateHasOneReplace(t *testing.T, repo rel.Repository) {
var (
result User
user = User{
Name: "<NAME>",
PrimaryAddress: &Address{Name: "primary"},
}
)
repo.MustInsert(ctx, &user)
user.Name = "update replace has one"
user.PrimaryAddress = &Address{Name: "replaced primary"}
err := repo.Update(ctx, &user)
assert.Nil(t, err)
assert.NotZero(t, user.ID)
assert.Equal(t, "update replace has one", user.Name)
assert.NotZero(t, user.PrimaryAddress.ID)
assert.Equal(t, user.ID, *user.PrimaryAddress.UserID)
assert.Equal(t, "replaced primary", user.PrimaryAddress.Name)
repo.MustFind(ctx, &result, where.Eq("id", user.ID))
repo.MustPreload(ctx, &result, "primary_address")
assert.Equal(t, result, user)
}
// UpdateBelongsToInsert tests specification for updating a record and inserting belongs to association.
func UpdateBelongsToInsert(t *testing.T, repo rel.Repository) {
var (
result Address
address = Address{Name: "address init"}
)
repo.MustInsert(ctx, &address)
address.Name = "update address belongs to"
address.User = User{Name: "inserted user"}
err := repo.Update(ctx, &address)
assert.Nil(t, err)
assert.NotZero(t, address.ID)
assert.Equal(t, "update address belongs to", address.Name)
assert.NotZero(t, address.User.ID)
assert.Equal(t, *address.UserID, address.User.ID)
assert.Equal(t, "inserted user", address.User.Name)
repo.MustFind(ctx, &result, where.Eq("id", address.ID))
repo.MustPreload(ctx, &result, "user")
assert.Equal(t, result, address)
}
// UpdateBelongsToUpdate tests specification for updating a record and updating belongs to association.
func UpdateBelongsToUpdate(t *testing.T, repo rel.Repository) {
var (
result Address
address = Address{
Name: "address init",
User: User{Name: "user"},
}
)
repo.MustInsert(ctx, &address)
address.Name = "update address belongs to"
address.User.Name = "updated user"
err := repo.Update(ctx, &address)
assert.Nil(t, err)
assert.NotZero(t, address.ID)
assert.Equal(t, "update address belongs to", address.Name)
assert.NotZero(t, address.User.ID)
assert.Equal(t, *address.UserID, address.User.ID)
assert.Equal(t, "updated user", address.User.Name)
repo.MustFind(ctx, &result, where.Eq("id", address.ID))
repo.MustPreload(ctx, &result, "user")
assert.Equal(t, result, address)
}
// UpdateAtomic tests increment and decerement operation when updating a record.
func UpdateAtomic(t *testing.T, repo rel.Repository) {
var (
result User
user = User{Name: "update", Age: 10}
)
repo.MustInsert(ctx, &user)
assert.Nil(t, repo.Update(ctx, &user, rel.Inc("age")))
assert.Equal(t, 11, user.Age)
repo.MustFind(ctx, &result, where.Eq("id", user.ID))
assert.Equal(t, result, user)
assert.Nil(t, repo.Update(ctx, &user, rel.Dec("age")))
assert.Equal(t, 10, user.Age)
repo.MustFind(ctx, &result, where.Eq("id", user.ID))
assert.Equal(t, result, user)
}
// Updates tests update specifications.
func Updates(t *testing.T, repo rel.Repository) {
var (
note = "note"
user = User{Name: "update"}
address = Address{Name: "update"}
)
repo.MustInsert(ctx, &user)
repo.MustInsert(ctx, &address)
tests := []interface{}{
&User{ID: user.ID, Name: "changed", Age: 100},
&User{ID: user.ID, Name: "changed", Age: 100, Note: ¬e},
&User{ID: user.ID, Note: ¬e},
&Address{ID: address.ID, Name: "address"},
&Address{ID: address.ID, UserID: &user.ID},
&Address{ID: address.ID, Name: "address", UserID: &user.ID},
}
for _, record := range tests {
t.Run("Update", func(t *testing.T) {
assert.Nil(t, repo.Update(ctx, record))
})
}
}
// UpdateAny tests update all specifications.
func UpdateAny(t *testing.T, repo rel.Repository) {
repo.MustInsert(ctx, &User{Name: "update", Age: 100})
repo.MustInsert(ctx, &User{Name: "update", Age: 100})
repo.MustInsert(ctx, &User{Name: "other update", Age: 110})
tests := []rel.Query{
rel.From("users").Where(where.Eq("name", "update")),
rel.From("users").Where(where.Eq("name", "other update"), where.Gt("age", 100)),
}
for _, query := range tests {
t.Run("UpdateAny", func(t *testing.T) {
var (
result []User
name = "all updated"
)
updatedCount, err := repo.UpdateAny(ctx, query, rel.Set("name", name))
assert.Nil(t, err)
assert.NotZero(t, updatedCount)
assert.Nil(t, repo.FindAll(ctx, &result, query))
assert.Zero(t, len(result))
for i := range result {
assert.Equal(t, name, result[i].Name)
}
})
}
} | adapter/specs/update.go | 0.561936 | 0.62415 | update.go | starcoder |
package utils
import (
"bytes"
"fmt"
)
// TableDataSource defines the interface a data source need to implement so that we can render
// a tabular representation from the data source. We get number of columns from the length of
// column header. the data source itself should ensure that for each get value call with row and
// col within [0,numRows) and [0, numCols), it should return valid value and should not panic.
type TableDataSource interface {
NumRows() int
GetValue(row, col int) interface{}
ColumnHeaders() []string
}
func getFormatModifier(value interface{}) string {
switch value.(type) {
case string:
return "s"
case float32, float64:
return ".2f"
case int8, int16, int32, int64, int, uint8, uint16, uint32, uint64, uint:
return "d"
default:
return "v"
}
}
func expandColumnWidth(columnWidths []int, value interface{}, idx int) {
valueWidth := len(fmt.Sprintf("%"+getFormatModifier(value), value))
if valueWidth > columnWidths[idx] {
columnWidths[idx] = valueWidth
}
}
func sprintfStrings(format string, strs []string) string {
values := make([]interface{}, len(strs))
for i, v := range strs {
values[i] = v
}
return fmt.Sprintf(format, values...)
}
// WriteTable renders a tabular representation from underlying data source.
// If there is no column for this data source, it will return an empty string.
// All elements of the table will be right justify (left padding). Column
// splitter is "|" for now.
func WriteTable(dataSource TableDataSource) string {
columnHeaders := dataSource.ColumnHeaders()
numCols := len(columnHeaders)
// Return empty string if no columns.
if numCols == 0 {
return ""
}
// Compute column widths.
columnWidths := make([]int, numCols)
// Then compare with the length of each value.
numRows := dataSource.NumRows()
for c := 0; c < numCols; c++ {
header := columnHeaders[c]
columnWidths[c] = len(header)
for r := 0; r < numRows; r++ {
value := dataSource.GetValue(r, c)
expandColumnWidth(columnWidths, value, c)
}
}
// string buffer for final result.
var buffer bytes.Buffer
// Prepare format for header.
headerFormat := "|"
for _, columnWidth := range columnWidths {
headerFormat += fmt.Sprintf("%%%ds|", columnWidth)
}
headerFormat += "\n"
// Write column header.
buffer.WriteString(sprintfStrings(headerFormat, columnHeaders))
if numRows > 0 {
// Prepare format for rows.
rowFormat := "|"
for c := 0; c < numCols; c++ {
// get formatter of first row.
value := dataSource.GetValue(0, c)
modifier := getFormatModifier(value)
rowFormat += fmt.Sprintf("%%%d%s|", columnWidths[c], modifier)
}
rowFormat += "\n"
// Write rows.
for r := 0; r < numRows; r++ {
row := make([]interface{}, numCols)
for c := 0; c < numCols; c++ {
row[c] = dataSource.GetValue(r, c)
}
buffer.WriteString(fmt.Sprintf(rowFormat, row...))
}
}
return buffer.String()
} | utils/table_writer.go | 0.646572 | 0.526769 | table_writer.go | starcoder |
package core
import "strings"
// ListType represents a list of values in the language.
// They can have infinite number of elements inside.
type ListType struct {
first Value
rest Value
}
// Eval evaluates a value into a WHNF.
func (l *ListType) eval() Value {
return l
}
var (
emptyList = ListType{}
// EmptyList is a thunk of an empty list.
EmptyList = &emptyList
)
// NewList creates a list from its elements.
func NewList(vs ...Value) Value {
return StrictPrepend(vs, EmptyList)
}
// Prepend prepends multiple elements to a list of the last argument.
var Prepend = NewLazyFunction(
NewSignature(nil, "elemsAndList", nil, ""),
prepend)
func prepend(vs ...Value) Value {
l, err := EvalList(vs[0])
if err != nil {
return err
}
if v := ReturnIfEmptyList(l.Rest(), l.First()); v != nil {
return v
}
return cons(l.First(), prepend(l.Rest()))
}
// StrictPrepend is a strict version of the Prepend function.
func StrictPrepend(vs []Value, l Value) Value {
for i := len(vs) - 1; i >= 0; i-- {
l = cons(vs[i], l)
}
return l
}
func cons(t1, t2 Value) *ListType {
return &ListType{t1, t2}
}
// First takes the first element in a list.
var First FunctionType
func initFirst() FunctionType {
return NewLazyFunction(
NewSignature([]string{"list"}, "", nil, ""),
func(vs ...Value) Value {
l, err := EvalList(vs[0])
if err != nil {
return err
}
return l.First()
})
}
// Rest returns a list which has the second to last elements of a given list.
var Rest FunctionType
func initRest() FunctionType {
return NewLazyFunction(
NewSignature([]string{"list"}, "", nil, ""),
func(vs ...Value) Value {
l, err := EvalList(vs[0])
if err != nil {
return err
}
// TODO: Review this code. Maybe predefine the list check function.
return PApp(
NewLazyFunction(
NewSignature(nil, "", nil, ""),
func(...Value) Value {
l, err := EvalList(l.Rest())
if err != nil {
return err
}
return l
}))
})
}
func (l *ListType) assign(i, v Value) Value {
n, err := checkIndex(i)
if err != nil {
return err
} else if n == 1 {
return cons(v, l.Rest())
}
return cons(l.First(), PApp(Assign, l.Rest(), NewNumber(float64(n-1)), v))
}
func (l *ListType) index(v Value) Value {
n, err := checkIndex(v)
if err != nil {
return err
}
for n != 1 {
var err Value
if l, err = EvalList(l.Rest()); err != nil {
return err
}
n--
}
return l.First()
}
func (l *ListType) insert(i Value, v Value) Value {
n, err := checkIndex(i)
if err != nil {
return err
} else if n == 1 {
return cons(v, l)
}
return cons(l.First(), PApp(Insert, l.Rest(), NewNumber(float64(n-1)), v))
}
func (l *ListType) merge(vs ...Value) Value {
if l.Empty() {
return PApp(Merge, vs...)
}
return cons(l.First(), PApp(Merge, append([]Value{l.Rest()}, vs...)...))
}
func (l *ListType) delete(v Value) Value {
n, err := checkIndex(v)
if err != nil {
return err
}
es := []Value{}
for n != 1 {
es = append(es, l.First())
var err Value
if l, err = EvalList(l.Rest()); err != nil {
return err
}
n--
}
return StrictPrepend(es, l.Rest())
}
func checkIndex(v Value) (NumberType, Value) {
n, err := EvalNumber(v)
if err != nil {
return 0, err
}
if !IsInt(n) {
return 0, NotIntError(n)
} else if n < 1 {
return 0, OutOfRangeError()
}
return n, nil
}
func (l *ListType) toList() Value {
return l
}
func (l *ListType) compare(x comparable) int {
ll := x.(*ListType)
if l.Empty() && ll.Empty() {
return 0
} else if l.Empty() {
return -1
} else if ll.Empty() {
return 1
}
c := compare(EvalPure(l.First()), EvalPure(ll.First()))
if c == 0 {
return compare(EvalPure(l.Rest()), EvalPure(ll.Rest()))
}
return c
}
func (*ListType) ordered() {}
func (l *ListType) string() Value {
ss := []string{}
for !l.Empty() {
s, err := StrictDump(EvalPure(l.First()))
if err != nil {
return err
}
ss = append(ss, string(s))
if l, err = EvalList(l.Rest()); err != nil {
return err
}
}
return NewString("[" + strings.Join(ss, " ") + "]")
}
func (l *ListType) size() Value {
n := NewNumber(0)
for !l.Empty() {
*n++
var err Value
if l, err = EvalList(l.Rest()); err != nil {
return err
}
}
return n
}
func (l *ListType) include(elem Value) Value {
if l.Empty() {
return False
}
b, err := EvalBoolean(PApp(Equal, l.First(), elem))
if err != nil {
return err
} else if b {
return True
}
return PApp(Include, l.Rest(), elem)
}
// First returns a first element in a list.
func (l *ListType) First() Value {
if l.Empty() {
return emptyListError()
}
return l.first
}
// Rest returns elements in a list except the first one.
func (l *ListType) Rest() Value {
if l.Empty() {
return emptyListError()
}
return l.rest
}
// Empty returns true if the list is empty.
func (l *ListType) Empty() bool {
return *l == emptyList
}
// ReturnIfEmptyList returns true if a given list is empty, or false otherwise.
func ReturnIfEmptyList(t Value, v Value) Value {
if l, err := EvalList(t); err != nil {
return err
} else if l.Empty() {
return v
}
return nil
} | src/lib/core/list.go | 0.698021 | 0.542621 | list.go | starcoder |
package gonsumer
import (
"fmt"
"github.com/rcrowley/go-metrics"
)
// PartitionConsumerMetrics is an interface for accessing and modifying PartitionConsumer metrics.
type PartitionConsumerMetrics interface {
// BatchDuration is a timer that measures time to process a single batch of data from Kafka broker by enclosing PartitionConsumer.
BatchDuration(func(metrics.Timer))
// FetchDuration is a timer that measures time to fetch from Kafka broker by enclosing PartitionConsumer.
FetchDuration(func(metrics.Timer))
// NumFetches is a counter with a total number of fetches done by enclosing PartitionConsumer.
NumFetches(func(metrics.Counter))
// NumFailedFetches is a counter with a number of failed fetches done by enclosing PartitionConsumer.
NumFailedFetches(func(metrics.Counter))
// NumEmptyFetches is a counter with a number of fetches that returned 0 messages done by enclosing PartitionConsumer.
NumEmptyFetches(func(metrics.Counter))
// NumFetchedMessages is a counter with a total number of fetched messages by enclosing PartitionConsumer.
NumFetchedMessages(func(metrics.Counter))
// NumOffsetCommits is a counter with a total number of offset commits done by enclosing PartitionConsumer.
NumOffsetCommits(func(metrics.Counter))
// NumFailedOffsetCommits is a counter with a number of failed offset commits done by enclosing PartitionConsumer.
NumFailedOffsetCommits(func(metrics.Counter))
// Lag is a gauge with a current lag value for enclosing PartitionConsumer.
Lag(func(metrics.Gauge))
// Registry provides access to metrics registry for enclosing PartitionConsumer.
Registry() metrics.Registry
// Stop unregisters all metrics from the registry.
Stop()
}
// KafkaPartitionConsumerMetrics implements PartitionConsumerMetrics and is used when ConsumerConfig.EnableMetrics is set to true.
type KafkaPartitionConsumerMetrics struct {
registry metrics.Registry
batchDuration metrics.Timer
fetchDuration metrics.Timer
numFetches metrics.Counter
numFailedFetches metrics.Counter
numEmptyFetches metrics.Counter
numFetchedMessages metrics.Counter
numOffsetCommits metrics.Counter
numFailedOffsetCommits metrics.Counter
lag metrics.Gauge
}
// NewKafkaPartitionConsumerMetrics creates new KafkaPartitionConsumerMetrics for a given topic and partition.
func NewKafkaPartitionConsumerMetrics(topic string, partition int32) *KafkaPartitionConsumerMetrics {
registry := metrics.NewPrefixedRegistry(fmt.Sprintf("%s.%d.", topic, partition))
return &KafkaPartitionConsumerMetrics{
registry: registry,
batchDuration: metrics.NewRegisteredTimer("batchDuration", registry),
fetchDuration: metrics.NewRegisteredTimer("fetchDuration", registry),
numFetches: metrics.NewRegisteredCounter("numFetches", registry),
numFailedFetches: metrics.NewRegisteredCounter("numFailedFetches", registry),
numEmptyFetches: metrics.NewRegisteredCounter("numEmptyFetches", registry),
numFetchedMessages: metrics.NewRegisteredCounter("numFetchedMessages", registry),
numOffsetCommits: metrics.NewRegisteredCounter("numOffsetCommits", registry),
numFailedOffsetCommits: metrics.NewRegisteredCounter("numFailedOffsetCommits", registry),
lag: metrics.NewRegisteredGauge("lag", registry),
}
}
// BatchDuration is a timer that measures time to process a single batch of data from Kafka broker by enclosing PartitionConsumer.
func (kpcm *KafkaPartitionConsumerMetrics) BatchDuration(f func(metrics.Timer)) {
f(kpcm.batchDuration)
}
// FetchDuration is a timer that measures time to fetch from Kafka broker by enclosing PartitionConsumer.
func (kpcm *KafkaPartitionConsumerMetrics) FetchDuration(f func(metrics.Timer)) {
f(kpcm.fetchDuration)
}
// NumFetches is a counter with a total number of fetches done by enclosing PartitionConsumer.
func (kpcm *KafkaPartitionConsumerMetrics) NumFetches(f func(metrics.Counter)) {
f(kpcm.numFetches)
}
// NumFailedFetches is a counter with a number of failed fetches done by enclosing PartitionConsumer.
func (kpcm *KafkaPartitionConsumerMetrics) NumFailedFetches(f func(metrics.Counter)) {
f(kpcm.numFailedFetches)
}
// NumEmptyFetches is a counter with a number of fetches that returned 0 messages done by enclosing PartitionConsumer.
func (kpcm *KafkaPartitionConsumerMetrics) NumEmptyFetches(f func(metrics.Counter)) {
f(kpcm.numEmptyFetches)
}
// NumFetchedMessages is a counter with a total number of fetched messages by enclosing PartitionConsumer.
func (kpcm *KafkaPartitionConsumerMetrics) NumFetchedMessages(f func(metrics.Counter)) {
f(kpcm.numFetchedMessages)
}
// NumOffsetCommits is a counter with a total number of offset commits done by enclosing PartitionConsumer.
func (kpcm *KafkaPartitionConsumerMetrics) NumOffsetCommits(f func(metrics.Counter)) {
f(kpcm.numOffsetCommits)
}
// NumFailedOffsetCommits is a counter with a number of failed offset commits done by enclosing PartitionConsumer.
func (kpcm *KafkaPartitionConsumerMetrics) NumFailedOffsetCommits(f func(metrics.Counter)) {
f(kpcm.numFailedOffsetCommits)
}
// Lag is a gauge with a current lag value for enclosing PartitionConsumer.
func (kpcm *KafkaPartitionConsumerMetrics) Lag(f func(metrics.Gauge)) {
f(kpcm.lag)
}
// Registry provides access to metrics registry for enclosing PartitionConsumer.
func (kpcm *KafkaPartitionConsumerMetrics) Registry() metrics.Registry {
return kpcm.registry
}
// Stop unregisters all metrics from the registry.
func (kpcm *KafkaPartitionConsumerMetrics) Stop() {
kpcm.registry.UnregisterAll()
}
var noOpPartitionConsumerMetrics = new(noOpKafkaPartitionConsumerMetrics)
type noOpKafkaPartitionConsumerMetrics struct{}
func (*noOpKafkaPartitionConsumerMetrics) BatchDuration(f func(metrics.Timer)) {}
func (*noOpKafkaPartitionConsumerMetrics) FetchDuration(f func(metrics.Timer)) {}
func (*noOpKafkaPartitionConsumerMetrics) NumFetches(f func(metrics.Counter)) {}
func (*noOpKafkaPartitionConsumerMetrics) NumFailedFetches(f func(metrics.Counter)) {}
func (*noOpKafkaPartitionConsumerMetrics) NumEmptyFetches(f func(metrics.Counter)) {}
func (*noOpKafkaPartitionConsumerMetrics) NumFetchedMessages(f func(metrics.Counter)) {}
func (*noOpKafkaPartitionConsumerMetrics) NumOffsetCommits(f func(metrics.Counter)) {}
func (*noOpKafkaPartitionConsumerMetrics) NumFailedOffsetCommits(f func(metrics.Counter)) {}
func (*noOpKafkaPartitionConsumerMetrics) Lag(f func(metrics.Gauge)) {}
func (*noOpKafkaPartitionConsumerMetrics) Registry() metrics.Registry {
panic("Registry() call on no op metrics")
}
func (*noOpKafkaPartitionConsumerMetrics) Stop() {} | vendor/github.com/serejja/gonsumer/partition_consumer_metrics.go | 0.709824 | 0.489626 | partition_consumer_metrics.go | starcoder |
package packed
// Efficient sequential read/write of packed integers.
type BulkOperationPacked6 struct {
*BulkOperationPacked
}
func newBulkOperationPacked6() BulkOperation {
return &BulkOperationPacked6{newBulkOperationPacked(6)}
}
func (op *BulkOperationPacked6) decodeLongToInt(blocks []int64, values []int32, iterations int) {
blocksOffset, valuesOffset := 0, 0
for i := 0; i < iterations; i++ {
block0 := blocks[blocksOffset]
blocksOffset++
values[valuesOffset] = int32(int64(uint64(block0) >> 58))
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block0)>>52) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block0)>>46) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block0)>>40) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block0)>>34) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block0)>>28) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block0)>>22) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block0)>>16) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block0)>>10) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block0)>>4) & 63)
valuesOffset++
block1 := blocks[blocksOffset]
blocksOffset++
values[valuesOffset] = int32(((block0 & 15) << 2) | (int64(uint64(block1) >> 62)))
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block1)>>56) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block1)>>50) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block1)>>44) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block1)>>38) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block1)>>32) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block1)>>26) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block1)>>20) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block1)>>14) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block1)>>8) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block1)>>2) & 63)
valuesOffset++
block2 := blocks[blocksOffset]
blocksOffset++
values[valuesOffset] = int32(((block1 & 3) << 4) | (int64(uint64(block2) >> 60)))
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block2)>>54) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block2)>>48) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block2)>>42) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block2)>>36) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block2)>>30) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block2)>>24) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block2)>>18) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block2)>>12) & 63)
valuesOffset++
values[valuesOffset] = int32(int64(uint64(block2)>>6) & 63)
valuesOffset++
values[valuesOffset] = int32(block2 & 63)
valuesOffset++
}
}
func (op *BulkOperationPacked6) DecodeByteToInt(blocks []byte, values []int32, iterations int) {
blocksOffset, valuesOffset := 0, 0
for i := 0; i < iterations; i++ {
byte0 := blocks[blocksOffset]
blocksOffset++
values[valuesOffset] = int32(int64(uint8(byte0) >> 2))
valuesOffset++
byte1 := blocks[blocksOffset]
blocksOffset++
values[valuesOffset] = int32((int64(byte0&3) << 4) | int64(uint8(byte1)>>4))
valuesOffset++
byte2 := blocks[blocksOffset]
blocksOffset++
values[valuesOffset] = int32((int64(byte1&15) << 2) | int64(uint8(byte2)>>6))
valuesOffset++
values[valuesOffset] = int32(int64(byte2) & 63)
valuesOffset++
}
}
func (op *BulkOperationPacked6) DecodeLongToLong(blocks []int64, values []int64, iterations int) {
blocksOffset, valuesOffset := 0, 0
for i := 0; i < iterations; i++ {
block0 := blocks[blocksOffset]
blocksOffset++
values[valuesOffset] = int64(uint64(block0) >> 58)
valuesOffset++
values[valuesOffset] = int64(uint64(block0)>>52) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block0)>>46) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block0)>>40) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block0)>>34) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block0)>>28) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block0)>>22) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block0)>>16) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block0)>>10) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block0)>>4) & 63
valuesOffset++
block1 := blocks[blocksOffset]
blocksOffset++
values[valuesOffset] = ((block0 & 15) << 2) | (int64(uint64(block1) >> 62))
valuesOffset++
values[valuesOffset] = int64(uint64(block1)>>56) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block1)>>50) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block1)>>44) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block1)>>38) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block1)>>32) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block1)>>26) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block1)>>20) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block1)>>14) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block1)>>8) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block1)>>2) & 63
valuesOffset++
block2 := blocks[blocksOffset]
blocksOffset++
values[valuesOffset] = ((block1 & 3) << 4) | (int64(uint64(block2) >> 60))
valuesOffset++
values[valuesOffset] = int64(uint64(block2)>>54) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block2)>>48) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block2)>>42) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block2)>>36) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block2)>>30) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block2)>>24) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block2)>>18) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block2)>>12) & 63
valuesOffset++
values[valuesOffset] = int64(uint64(block2)>>6) & 63
valuesOffset++
values[valuesOffset] = block2 & 63
valuesOffset++
}
}
func (op *BulkOperationPacked6) decodeByteToLong(blocks []byte, values []int64, iterations int) {
blocksOffset, valuesOffset := 0, 0
for i := 0; i < iterations; i++ {
byte0 := blocks[blocksOffset]
blocksOffset++
values[valuesOffset] = int64(int64(uint8(byte0) >> 2))
valuesOffset++
byte1 := blocks[blocksOffset]
blocksOffset++
values[valuesOffset] = int64((int64(byte0&3) << 4) | int64(uint8(byte1)>>4))
valuesOffset++
byte2 := blocks[blocksOffset]
blocksOffset++
values[valuesOffset] = int64((int64(byte1&15) << 2) | int64(uint8(byte2)>>6))
valuesOffset++
values[valuesOffset] = int64(int64(byte2) & 63)
valuesOffset++
}
} | core/util/packed/bulkOperation6.go | 0.600423 | 0.710779 | bulkOperation6.go | starcoder |
// Package logic world.go Defines our world type that runs the game.
package logic
import (
"github.com/bluemun/munfall"
"github.com/bluemun/munfall/traits"
)
// World container that manages the game world.
type world struct {
actors map[uint]*actor
traitDictionary *traitDictionary
endtasks []func()
wm munfall.WorldMap
}
// CreateWorld creates and initializes the World.
func CreateWorld(wm munfall.WorldMap) munfall.World {
world := &world{actors: make(map[uint]*actor, 10), endtasks: nil, wm: wm}
world.traitDictionary = createTraitDictionary(world)
wm.Initialize(world)
return (munfall.World)(world)
}
// AddFrameEndTask adds a task that will be run at the end of the current tick.
func (w *world) AddFrameEndTask(f func()) {
w.endtasks = append(w.endtasks, f)
}
func (w *world) GetTrait(a munfall.Actor, i interface{}) munfall.Trait {
return w.traitDictionary.GetTrait(a.(*actor), i)
}
func (w *world) GetTraitsImplementing(a munfall.Actor, i interface{}) []munfall.Trait {
return w.traitDictionary.GetTraitsImplementing(a.(*actor), i)
}
func (w *world) GetAllTraitsImplementing(i interface{}) []munfall.Trait {
return w.traitDictionary.GetAllTraitsImplementing(i)
}
// IssueGlobalOrder issues an order to be resolved by every TraitOrderResolver.
func (w *world) IssueGlobalOrder(order *munfall.Order) {
order.IsGlobal = true
resolvers := w.traitDictionary.GetAllTraitsImplementing((*traits.TraitOrderResolver)(nil))
for _, trait := range resolvers {
trait.(traits.TraitOrderResolver).ResolveOrder(order)
}
}
// IssueOrder issues an order to be resolved by every TraitOrderResolver on a given Actor.
func (w *world) IssueOrder(a munfall.Actor, order *munfall.Order) {
order.IsGlobal = false
resolvers := w.traitDictionary.GetTraitsImplementing(a.(*actor), (*traits.TraitOrderResolver)(nil))
for _, trait := range resolvers {
trait.(traits.TraitOrderResolver).ResolveOrder(order)
}
}
// Tick ticks all traits on the traitmanager that implement the Tick interface.
func (w *world) Tick(deltaUnit float32) {
tickers := w.traitDictionary.GetAllTraitsImplementing((*traits.TraitTicker)(nil))
for _, ticker := range tickers {
ticker.(traits.TraitTicker).Tick(deltaUnit)
}
for _, task := range w.endtasks {
task()
}
w.endtasks = nil
}
func (w *world) AddToWorld(a munfall.Actor) {
actor := a.(*actor)
actor.inworld = true
w.actors[a.ActorID()] = actor
w.wm.Register(a)
notify := w.GetTraitsImplementing(a, (*traits.TraitAddedToWorldNotifier)(nil))
for _, trait := range notify {
trait.(traits.TraitAddedToWorldNotifier).NotifyAddedToWorld()
}
}
func (w *world) RemoveFromWorld(a munfall.Actor) {
if a == nil {
panic("Trying to remove nil as an Actor!")
}
a.(*actor).inworld = false
w.wm.Deregister(a)
notify := w.traitDictionary.GetTraitsImplementing(a.(*actor), (*traits.TraitRemovedFromWorldNotifier)(nil))
for _, trait := range notify {
trait.(traits.TraitRemovedFromWorldNotifier).NotifyRemovedFromWorld()
}
}
func (w *world) cleanTraits(a munfall.Actor) {
w.traitDictionary.removeActor(a.(*actor))
}
func (w *world) WorldMap() munfall.WorldMap {
return w.wm
} | logic/world.go | 0.659953 | 0.463323 | world.go | starcoder |
// Sample program that takes a stream of bytes and looks for the bytes
// “elvis” and when they are found, replace them with “Elvis”. The code
// cannot assume that there are any line feeds or other delimiters in the
// stream and the code must assume that the stream is of any arbitrary length.
// The solution cannot meaningfully buffer to the end of the stream and
// then process the replacement.
package main
import (
"bytes"
"fmt"
)
// data represents a table of input and expected output.
var data = []struct {
input []byte
output []byte
}{
{[]byte("abc"), []byte("abc")},
{[]byte("elvis"), []byte("Elvis")},
{[]byte("aElvis"), []byte("aElvis")},
{[]byte("abcelvis"), []byte("abcElvis")},
{[]byte("eelvis"), []byte("eElvis")},
{[]byte("aelvis"), []byte("aElvis")},
{[]byte("aabeeeelvis"), []byte("aabeeeElvis")},
{[]byte("e l v i s"), []byte("e l v i s")},
{[]byte("aa bb e l v i saa"), []byte("aa bb e l v i saa")},
{[]byte(" elvi s"), []byte(" elvi s")},
{[]byte("elvielvis"), []byte("elviElvis")},
{[]byte("elvielvielviselvi1"), []byte("elvielviElviselvi1")},
{[]byte("elvielviselvis"), []byte("elviElvisElvis")},
}
// Declare what needs to be found and its replacement.
var find = []byte("elvis")
var repl = []byte("Elvis")
// Calculate the number of bytes we need to locate.
var size = len(find)
func main() {
var output bytes.Buffer
fmt.Println("=======================================\nRunning Algorithm One")
for _, d := range data {
output.Reset()
algOne(d.input, &output)
matched := bytes.Compare(d.output, output.Bytes())
fmt.Printf("Matched: %v Inp: [%s] Exp: [%s] Got: [%s]\n", matched == 0, d.input, d.output, output.Bytes())
}
fmt.Println("=======================================\nRunning Algorithm Two")
for _, d := range data {
output.Reset()
algTwo(d.input, &output)
matched := bytes.Compare(d.output, output.Bytes())
fmt.Printf("Matched: %v Inp: [%s] Exp: [%s] Got: [%s]\n", matched == 0, d.input, d.output, output.Bytes())
}
}
// algOne is one way to solve the problem.
func algOne(data []byte, output *bytes.Buffer) {
// Use a bytes Buffer to provide a stream to process.
input := bytes.NewBuffer(data)
// Declare the buffers we need to process the stream.
buf := make([]byte, size)
tmp := make([]byte, 1)
end := size - 1
// Read in an initial number of bytes we need to get started.
if n, err := input.Read(buf[:end]); err != nil {
output.Write(buf[:n])
return
}
for {
var err error
tmp[0], err = input.ReadByte()
if err != nil {
// Flush the reset of the bytes we have.
output.Write(buf[:end])
break
}
// Add this byte to the end of the buffer.
buf[end] = tmp[0]
// If we have a match, replace the bytes.
if bytes.Compare(buf, find) == 0 {
copy(buf, repl)
}
// Write the front byte since it has been compared.
output.WriteByte(buf[0])
// Slice that front byte out.
copy(buf, buf[1:])
}
}
// algTwo is a second way to solve the problem.
// Provided by <NAME> https://twitter.com/TylerJBunnell
func algTwo(data []byte, output *bytes.Buffer) {
// Use the bytes Reader to provide a stream to process.
input := bytes.NewReader(data)
// Create an index variable to match bytes.
idx := 0
for {
// Read a single byte from our input.
b, err := input.ReadByte()
if err != nil {
break
}
// Does this byte match the byte at this offset?
if b == find[idx] {
// It matches so increment the index position.
idx++
// If every byte has been matched, write
// out the replacement.
if idx == size {
output.Write(repl)
idx = 0
}
continue
}
// Did we have any sort of match on any given byte?
if idx != 0 {
// Write what we've matched up to this point.
output.Write(find[:idx])
// Unread the unmatched byte so it can be processed again.
input.UnreadByte()
// Reset the offset to start matching from the beginning.
idx = 0
continue
}
// There was no previous match. Write byte and reset.
output.WriteByte(b)
idx = 0
}
} | topics/profiling/memcpu/stream.go | 0.652684 | 0.545407 | stream.go | starcoder |
package main
import (
"fmt"
"github.com/cavaliercoder/go-abs"
)
type direction uint8
const (
right direction = iota
up direction = iota
left direction = iota
down direction = iota
)
type spiralNodeCoords struct {
x, y int64
}
type spiralNode struct {
parent *spiral
coords spiralNodeCoords
next *spiralNode
value uint
}
/*
root: (pointer to) the first node
last: (pointer to) the last node
dir: direction in which to add the next node
edgeDistance: number of nodes already traversed on the current edge
sideLength: the maximum number of nodes to add, before we need to change direction
firstChange: side lengths come in pairs, so are we on the first of the two with this side length, or the second?
nodeMap: a map of all existing nodes that have been added to this spiral
*/
type spiral struct {
root, last *spiralNode
dir direction
edgeDistance, sideLength uint
firstChange bool
nodeMap map[spiralNodeCoords]*spiralNode
}
func (s *spiral) Init() {
s.root = nil
s.last = nil
s.dir = right
s.edgeDistance = 0
s.sideLength = 1
s.firstChange = true
s.nodeMap = make(map[spiralNodeCoords]*spiralNode)
}
func (s *spiral) Add() {
if s.root == nil {
newNode := &spiralNode{s, spiralNodeCoords{0, 0}, nil, 1}
s.root = newNode
s.last = s.root
s.nodeMap[newNode.coords] = newNode
return
}
newNode := &spiralNode{s, s.last.nextCoords(), nil, 0}
newNode.value = newNode.sumNeighbours()
s.last.next = newNode
s.last = s.last.next
s.nodeMap[newNode.coords] = newNode
if s.edgeDistance++; s.edgeDistance >= s.sideLength {
s.turn()
}
}
// Manhattan distance will be the sum of the absolutes of the last node's coordinates.
func (s *spiral) Manhattan() uint64 {
return uint64(abs.WithTwosComplement(s.last.coords.x) + abs.WithTwosComplement(s.last.coords.y))
}
// Generates co-ordinates for all possible neighbours
func (snc spiralNodeCoords) neighbours() [8]spiralNodeCoords {
return [...]spiralNodeCoords{
{snc.x, snc.y + 1},
{snc.x + 1, snc.y + 1},
{snc.x + 1, snc.y},
{snc.x + 1, snc.y - 1},
{snc.x, snc.y - 1},
{snc.x - 1, snc.y - 1},
{snc.x - 1, snc.y},
{snc.x - 1, snc.y + 1},
}
}
// Calculates the sum of all actual neighbours' values
func (sn spiralNode) sumNeighbours() (result uint) {
for _, neighbour := range sn.coords.neighbours() {
if node, ok := sn.parent.nodeMap[neighbour]; ok {
result += node.value
}
}
return
}
func (sn spiralNode) String() string {
return fmt.Sprintf("%s %v", sn.coords, sn.value)
}
func (snc spiralNodeCoords) String() string {
return fmt.Sprintf("[%d,%d]", snc.x, snc.y)
}
func (s *spiral) turn() {
s.edgeDistance = 0
if s.firstChange {
s.firstChange = false
} else {
s.firstChange = true
s.sideLength++
}
switch s.dir {
case right:
s.dir = up
case up:
s.dir = left
case left:
s.dir = down
default: //down
s.dir = right
}
}
func (sn spiralNode) nextCoords() spiralNodeCoords {
switch sn.parent.dir {
case right:
return spiralNodeCoords{sn.coords.x + 1, sn.coords.y}
case up:
return spiralNodeCoords{sn.coords.x, sn.coords.y + 1}
case left:
return spiralNodeCoords{sn.coords.x - 1, sn.coords.y}
default: //down
return spiralNodeCoords{sn.coords.x, sn.coords.y - 1}
}
} | 2017/03/02/day3_part2.go | 0.70304 | 0.555737 | day3_part2.go | starcoder |
package DG2D
import (
"math"
"github.com/notargets/gocfd/DG1D"
"github.com/notargets/gocfd/utils"
)
// Purpose : Compute (x,y) nodes in equilateral triangle for
// polynomial of order N
func Nodes2D(N int) (x, y utils.Vector) {
var (
alpha float64
Np = (N + 1) * (N + 2) / 2
L1, L2, L3 utils.Vector
blend1, blend2, blend3, warp1, warp2, warp3, warpf1, warpf2, warpf3 []float64
)
L1, L2, L3, x, y =
utils.NewVector(Np), utils.NewVector(Np), utils.NewVector(Np), utils.NewVector(Np), utils.NewVector(Np)
l1d, l2d, l3d, xd, yd := L1.DataP, L2.DataP, L3.DataP, x.DataP, y.DataP
blend1, blend2, blend3, warp1, warp2, warp3 =
make([]float64, Np), make([]float64, Np), make([]float64, Np), make([]float64, Np), make([]float64, Np), make([]float64, Np)
alpopt := []float64{
0.0000, 0.0000, 1.4152, 0.1001, 0.2751,
0.9800, 1.0999, 1.2832, 1.3648, 1.4773,
1.4959, 1.5743, 1.5770, 1.6223, 1.6258,
}
if N < 16 {
alpha = alpopt[N-1]
} else {
alpha = 5. / 3.
}
// Create equidistributed nodes on equilateral triangle
fn := 1. / float64(N)
var sk int
for n := 0; n < N+1; n++ {
for m := 0; m < (N + 1 - n); m++ {
l1d[sk] = float64(n) * fn
l3d[sk] = float64(m) * fn
sk++
}
}
for i := range xd {
l2d[i] = 1 - l1d[i] - l3d[i]
xd[i] = l3d[i] - l2d[i]
yd[i] = (2*l1d[i] - l3d[i] - l2d[i]) / math.Sqrt(3)
// Compute blending function at each node for each edge
blend1[i] = 4 * l2d[i] * l3d[i]
blend2[i] = 4 * l1d[i] * l3d[i]
blend3[i] = 4 * l1d[i] * l2d[i]
}
// Amount of warp for each node, for each edge
warpf1 = Warpfactor(N, L3.Copy().Subtract(L2))
warpf2 = Warpfactor(N, L1.Copy().Subtract(L3))
warpf3 = Warpfactor(N, L2.Copy().Subtract(L1))
// Combine blend & warp
for i := range warpf1 {
warp1[i] = blend1[i] * warpf1[i] * (1 + utils.POW(alpha*l1d[i], 2))
warp2[i] = blend2[i] * warpf2[i] * (1 + utils.POW(alpha*l2d[i], 2))
warp3[i] = blend3[i] * warpf3[i] * (1 + utils.POW(alpha*l3d[i], 2))
}
// Accumulate deformations associated with each edge
for i := range xd {
xd[i] += warp1[i] + math.Cos(2*math.Pi/3)*warp2[i] + math.Cos(4*math.Pi/3)*warp3[i]
yd[i] += math.Sin(2*math.Pi/3)*warp2[i] + math.Sin(4*math.Pi/3)*warp3[i]
}
return
}
func Warpfactor(N int, rout utils.Vector) (warpF []float64) {
var (
Nr = rout.Len()
Pmat = utils.NewMatrix(N+1, Nr)
)
// Compute LGL and equidistant node distribution
LGLr := DG1D.JacobiGL(0, 0, N)
req := utils.NewVector(N+1).Linspace(-1, 1)
Veq := DG1D.Vandermonde1D(N, req)
// Evaluate Lagrange polynomial at rout
for i := 0; i < (N + 1); i++ {
Pmat.M.SetRow(i, DG1D.JacobiP(rout, 0, 0, i))
}
Lmat := Veq.Transpose().LUSolve(Pmat)
// Compute warp factor
warp := Lmat.Transpose().Mul(LGLr.Subtract(req).ToMatrix())
// Scale factor
zerof := rout.Copy().Apply(func(val float64) (res float64) {
if math.Abs(val) < (1.0 - (1e-10)) {
res = 1.
}
return
})
sf := zerof.Copy().ElMul(rout).Apply(func(val float64) (res float64) {
res = 1 - val*val
return
})
w2 := warp.Copy()
warp.ElDiv(sf.ToMatrix()).Add(w2.ElMul(zerof.AddScalar(-1).ToMatrix()))
warpF = warp.DataP
return
}
func RStoAB(R, S utils.Vector) (a, b utils.Vector) {
var (
Np = R.Len()
rd, sd = R.DataP, S.DataP
)
ad, bd := make([]float64, Np), make([]float64, Np)
for n, sval := range sd {
/*
if sval != 1 {
ad[n] = 2*(1+rd[n])/(1-sval) - 1
} else {
ad[n] = -1
}
bd[n] = sval
*/
ad[n], bd[n] = rsToab(rd[n], sval)
}
a, b = utils.NewVector(Np, ad), utils.NewVector(Np, bd)
return
}
func rsToab(r, s float64) (a, b float64) {
if s != 1 {
a = 2*(1+r)/(1-s) - 1
} else {
a = -1
}
b = s
return
}
// function [r,s] = xytors(x,y)
// Purpose : Transfer from (x,y) in equilateral triangle
// to (r,s) coordinates in standard triangle
func XYtoRS(x, y utils.Vector) (r, s utils.Vector) {
r, s = utils.NewVector(x.Len()), utils.NewVector(x.Len())
var (
xd, yd = x.DataP, y.DataP
rd, sd = r.DataP, s.DataP
)
sr3 := math.Sqrt(3)
for i := range xd {
l1 := (sr3*yd[i] + 1) / 3
l2 := (-3*xd[i] - sr3*yd[i] + 2) / 6
l3 := (3*xd[i] - sr3*yd[i] + 2) / 6
rd[i] = -l2 + l3 - l1
sd[i] = -l2 - l3 + l1
}
return
}
func CalculateElementLocalGeometry(EToV utils.Matrix, VX, VY, R, S utils.Vector) (X, Y utils.Matrix) {
/*
For input values of vector field [R,S], transform them into element local [X,Y]
*/
va, vb, vc := EToV.Col(0), EToV.Col(1), EToV.Col(2)
X = R.Copy().Add(S).Scale(-1).Outer(VX.SubsetIndex(va.ToIndex())).Add(
R.Copy().AddScalar(1).Outer(VX.SubsetIndex(vb.ToIndex()))).Add(
S.Copy().AddScalar(1).Outer(VX.SubsetIndex(vc.ToIndex()))).Scale(0.5)
Y = R.Copy().Add(S).Scale(-1).Outer(VY.SubsetIndex(va.ToIndex())).Add(
R.Copy().AddScalar(1).Outer(VY.SubsetIndex(vb.ToIndex()))).Add(
S.Copy().AddScalar(1).Outer(VY.SubsetIndex(vc.ToIndex()))).Scale(0.5)
return
}
type LagrangeBasis1D struct {
P int // Order
Np int // Dimension of basis = N+1
Weights []float64 // Barycentric weights, one per basis polynomial
Nodes []float64 // Nodes at which basis is defined
}
func NewLagrangeBasis1D(R []float64) (lb *LagrangeBasis1D) {
/*
At a given order P, there are (P+1) basis polynomials representing that order
To recover a basis polynomial we need to specifiy:
` P = The order of the basis
j = The basis polynomial number within the basis
R = The points used to define the basis, (P+1) dimension
*/
lb = &LagrangeBasis1D{
P: len(R) - 1,
Np: len(R),
Weights: make([]float64, len(R)),
Nodes: R,
}
// Calculate the weight for each basis function j
for j := 0; j < lb.Np; j++ {
lb.Weights[j] = 1.
}
for j := 0; j < lb.Np; j++ {
for i := 0; i < lb.Np; i++ {
if i != j {
lb.Weights[j] /= R[j] - R[i]
}
}
}
return
}
func (lb *LagrangeBasis1D) GetInterpolationMatrix(R []float64) (im utils.Matrix) {
/*
Provided function values at each of the P+1 nodes, interpolate a new function value at location r
Note that the points in R are not necessarily the defining points of the basis, and are not necessarily at the
same points within F, the provided set of function values at the nodes of the basis
*/
var (
fj = make([]float64, len(R)) // temporary storage for each basis function evaluation
)
im = utils.NewMatrix(len(R), lb.Np) // Rows are for evaluation points, columns for basis
for j := 0; j < lb.Np; j++ { // For each basis function
fj = lb.BasisPolynomial(R, j)
for i, val := range fj {
im.Set(i, j, val)
}
}
return
}
func (lb *LagrangeBasis1D) Interpolate(R []float64, F []float64) (f []float64) {
/*
Provided function values at each of the P+1 nodes, interpolate a new function value at location r
Note that the points in R are not necessarily the defining points of the basis, and are not necessarily at the
same points within F, the provided set of function values at the nodes of the basis
*/
var (
fj = make([]float64, len(R)) // temporary storage for each basis function evaluation
)
for j := 0; j < lb.Np; j++ { // For each basis function
fj = lb.BasisPolynomial(R, j)
for i := range R {
f[i] += fj[i] * F[j]
}
}
return
}
func (lb *LagrangeBasis1D) BasisPolynomial(R []float64, j int) (f []float64) {
/*
This evaluates a single basis polynomial (the jth) within the basis for order P at all points in R
Note that the points in R are not necessarily the defining points of the basis
*/
f = make([]float64, len(R))
for i, r := range R {
f[i] = lb.evaluateL(r) * lb.Weights[j]
if math.Abs(r-lb.Nodes[j]) < 0.0000000001 {
f[i] = 1.
} else {
f[i] /= (r - lb.Nodes[j])
}
}
return
}
func (lb *LagrangeBasis1D) evaluateL(r float64) (f float64) {
/*
This is the polynomial term in the Barycentric version of the Lagrange polynomial basis
It is not specific to the jth polynomial, but applies to all the individual basis polynomials
*/
f = 1.
for _, rr := range lb.Nodes {
f *= (r - rr)
}
return
} | DG2D/element_utils.go | 0.537527 | 0.670716 | element_utils.go | starcoder |
package securecompare
// Many primitive operations that run in fixed time
import (
"math/big"
"unsafe"
)
// 64-bit platform?
const sixtyfourbit = uint64(uint(0x7fffffffffffffff)) == uint64(0x7fffffffffffffff)
// 0 -> false
// 1 -> true
func IntToBool(i int) bool {
return *(*bool)(unsafe.Pointer(&i))
}
// returns only the LSB is valid, all other bits must be ignored
func BoolToRawInt(b bool) int {
return *(*int)(unsafe.Pointer(&b))
}
// !x
func Not(x int) int { return MSBInt(x - 1) }
// returns 1111....1 if any bit in a is set
// returns 0000....0 if a is zero
func DupAnyBitUintptr(a uintptr) uintptr { return uintptr(DupAnyBitUint64(uint64(a))) }
func DupAnyBitRune(a rune) rune { return rune(DupAnyBitInt32(int32(a))) }
func DupAnyBitByte(a byte) byte {
a |= a >> 4
a |= a >> 2
return a | a>>1
}
func DupAnyBitInt8(a int8) int8 { return int8(DupAnyBitByte(byte(a))) }
func DupAnyBitInt16(a int16) int16 { return int16(DupAnyBitUint16(uint16(a))) }
func DupAnyBitInt32(a int32) int32 { return int32(DupAnyBitUint32(uint32(a))) }
func DupAnyBitInt64(a int64) int64 { return int64(DupAnyBitUint64(uint64(a))) }
func DupAnyBitInt(a int) int {
if sixtyfourbit {
return int(DupAnyBitInt64(int64(a)))
} else {
return int(DupAnyBitInt32(int32(a)))
}
}
func DupAnyBitUint8(a uint8) uint8 { return uint8(DupAnyBitByte(byte(a))) }
func DupAnyBitUint16(a uint16) uint16 {
a |= a >> 8
a |= a >> 4
a |= a >> 2
return a | a>>1
}
func DupAnyBitUint32(a uint32) uint32 {
a |= a >> 16
a |= a >> 8
a |= a >> 4
a |= a >> 2
return a | a>>1
}
func DupAnyBitUint64(a uint64) uint64 {
a |= a >> 32
a |= a >> 16
a |= a >> 8
a |= a >> 4
a |= a >> 2
return a | a>>1
}
func DupAnyBitUint(a uint) uint {
if sixtyfourbit {
return uint(DupAnyBitUint64(uint64(a)))
} else {
return uint(DupAnyBitUint32(uint32(a)))
}
}
// convert 0 or 1 to 000..0 or 111...1 in constant time
func DupBitToUintptr(x uintptr) uintptr {
return uintptr(DupBitToUint64(uint64(x)))
}
// convert 0 or 1 to 000..0 or 111...1 in constant time
func DupBitToRune(x rune) rune {
return rune(DupBitToInt32(int32(x)))
}
// convert 0 or 1 to 000..0 or 111...1 in constant time
func DupBitToByte(x byte) byte {
x |= x << 1
x |= x << 2
return x | x<<4
}
// convert 0 or 1 to 000..0 or 111...1 in constant time
func DupBitToInt8(x int8) int8 {
x |= x << 1
x |= x << 2
return x | x<<4
}
// convert 0 or 1 to 000..0 or 111...1 in constant time
func DupBitToInt16(x int16) int16 {
x |= x << 1
x |= x << 2
x |= x << 4
return x | x<<8
}
// convert 0 or 1 to 000..0 or 111...1 in constant time
func DupBitToInt32(x int32) int32 {
x |= x << 1
x |= x << 2
x |= x << 4
x |= x << 8
return x | x<<16
}
// convert 0 or 1 to 000..0 or 111...1 in constant time
func DupBitToInt64(x int64) int64 {
x |= x << 1
x |= x << 2
x |= x << 4
x |= x << 8
x |= x << 16
return x | x<<32
}
// convert 0 or 1 to 000..0 or 111...1 in constant time
func DupBitToInt(x int) int {
if sixtyfourbit {
return int(DupBitToInt64(int64(x)))
} else {
return int(DupBitToInt32(int32(x)))
}
}
// convert 0 or 1 to 000..0 or 111...1 in constant time
func DupBitToUint8(x uint8) uint8 {
return uint8(DupBitToByte(uint8(x)))
}
// convert 0 or 1 to 000..0 or 111...1 in constant time
func DupBitToUint16(x uint16) uint16 {
x |= x << 1
x |= x << 2
x |= x << 4
return x | x<<8
}
// convert 0 or 1 to 000..0 or 111...1 in constant time
func DupBitToUint32(x uint32) (result uint32) {
x |= x << 1
x |= x << 2
x |= x << 4
x |= x << 8
return x | x<<16
}
// convert 0 or 1 to 000..0 or 111...1 in constant time
func DupBitToUint64(x uint64) (result uint64) {
x |= x << 1
x |= x << 2
x |= x << 4
x |= x << 8
x |= x << 16
return x | x<<32
}
// convert 0 or 1 to 000..0 or 111...1 in constant time
func DupBitToUint(x uint) uint {
if sixtyfourbit {
return uint(DupBitToUint64(uint64(x)))
} else {
return uint(DupBitToUint32(uint32(x)))
}
}
// convert a bool to int64 (0 or 1) in constant time
func BoolToUintptr(b bool) uintptr { return LSBUintptr(uintptr(BoolToRawInt(b))) }
func BoolToRune(b bool) rune { return LSBRune(rune(BoolToRawInt(b))) }
func BoolToByte(b bool) byte { return LSBByte(byte(BoolToRawInt(b))) }
func BoolToInt8(b bool) int8 { return LSBInt8(int8(BoolToRawInt(b))) }
func BoolToInt16(b bool) int16 { return LSBInt16(int16(BoolToRawInt(b))) }
func BoolToInt32(b bool) int32 { return LSBInt32(int32(BoolToRawInt(b))) }
func BoolToInt64(b bool) int64 { return LSBInt64(int64(BoolToRawInt(b))) }
func BoolToInt(b bool) int { return LSBInt(BoolToRawInt(b)) }
func BoolToUint8(b bool) uint8 { return uint8(BoolToByte(b)) }
func BoolToUint16(b bool) uint16 { return LSBUint16(uint16(BoolToRawInt(b))) }
func BoolToUint32(b bool) uint32 { return LSBUint32(uint32(BoolToRawInt(b))) }
func BoolToUint64(b bool) uint64 { return LSBUint64(uint64(BoolToRawInt(b))) }
func BoolToUint(b bool) uint { return LSBUint(uint(BoolToRawInt(b))) }
// least significant bit of a
func LSBUintptr(a uintptr) uintptr { return a & 1 }
func LSBRune(a rune) rune { return a & 1 }
func LSBByte(a byte) byte { return a & 1 }
func LSBInt8(a int8) int8 { return a & 1 }
func LSBInt16(a int16) int16 { return a & 1 }
func LSBInt32(a int32) int32 { return a & 1 }
func LSBInt64(a int64) int64 { return a & 1 }
func LSBInt(a int) int { return a & 1 }
func LSBUint8(a uint8) uint8 { return a & 1 }
func LSBUint16(a uint16) uint16 { return a & 1 }
func LSBUint32(a uint32) uint32 { return a & 1 }
func LSBUint64(a uint64) uint64 { return a & 1 }
func LSBUint(a uint) uint { return a & 1 }
// most significant bit of a
func MSBUintptr(a uintptr) uintptr { return uintptr(MSBUint64(uint64(a))) }
func MSBRune(a rune) rune { return rune(MSBInt32(int32(a))) }
func MSBByte(a byte) byte { return byte(MSBUint8(uint8(a))) }
func MSBInt8(a int8) int8 { return int8(MSBUint8(uint8(a))) }
func MSBInt16(a int16) int16 { return int16(MSBUint16(uint16(a))) }
func MSBInt32(a int32) int32 { return int32(MSBUint32(uint32(a))) }
func MSBInt64(a int64) int64 { return int64(MSBUint64(uint64(a))) }
func MSBInt(a int) int {
if sixtyfourbit {
return int(MSBInt64(int64(a)))
} else {
return int(MSBInt32(int32(a)))
}
}
func MSBUint8(a uint8) uint8 { return a >> 7 }
func MSBUint16(a uint16) uint16 { return a >> 15 }
func MSBUint32(a uint32) uint32 { return a >> 31 }
func MSBUint64(a uint64) uint64 { return a >> 63 }
func MSBUint(a uint) uint {
if sixtyfourbit {
return uint(MSBUint64(uint64(a)))
} else {
return uint(MSBUint32(uint32(a)))
}
}
// a == 0, answer is in bit 0
func EqualsZeroUintptr(a uintptr) int { return Not(NotEqualsZeroUintptr(a)) }
func EqualsZeroRune(a rune) int { return Not(NotEqualsZeroRune(a)) }
func EqualsZeroByte(a byte) int { return Not(NotEqualsZeroByte(a)) }
func EqualsZeroInt8(a int8) int { return Not(NotEqualsZeroInt8(a)) }
func EqualsZeroInt16(a int16) int { return Not(NotEqualsZeroInt16(a)) }
func EqualsZeroInt32(a int32) int { return Not(NotEqualsZeroInt32(a)) }
func EqualsZeroInt64(a int64) int { return Not(NotEqualsZeroInt64(a)) }
func EqualsZeroInt(a int) int { return Not(NotEqualsZeroInt(a)) }
func EqualsZeroUint8(a uint8) int { return Not(NotEqualsZeroUint8(a)) }
func EqualsZeroUint16(a uint16) int { return Not(NotEqualsZeroUint16(a)) }
func EqualsZeroUint32(a uint32) int { return Not(NotEqualsZeroUint32(a)) }
func EqualsZeroUint64(a uint64) int { return Not(NotEqualsZeroUint64(a)) }
func EqualsZeroUint(a uint) int { return Not(NotEqualsZeroUint(a)) }
// a != 0, answer is in bit 0
func NotEqualsZeroUintptr(a uintptr) int { return int(LSBUintptr(DupAnyBitUintptr(a))) }
func NotEqualsZeroRune(a rune) int { return int(LSBRune(DupAnyBitRune(a))) }
func NotEqualsZeroByte(a byte) int { return int(LSBByte(DupAnyBitByte(a))) }
func NotEqualsZeroInt8(a int8) int { return int(LSBInt8(DupAnyBitInt8(a))) }
func NotEqualsZeroInt16(a int16) int { return int(LSBInt16(DupAnyBitInt16(a))) }
func NotEqualsZeroInt32(a int32) int { return int(LSBInt32(DupAnyBitInt32(a))) }
func NotEqualsZeroInt64(a int64) int { return int(LSBInt64(DupAnyBitInt64(a))) }
func NotEqualsZeroInt(a int) int { return int(LSBInt(DupAnyBitInt(a))) }
func NotEqualsZeroUint8(a uint8) int { return int(LSBUint8(DupAnyBitUint8(a))) }
func NotEqualsZeroUint16(a uint16) int { return int(LSBUint16(DupAnyBitUint16(a))) }
func NotEqualsZeroUint32(a uint32) int { return int(LSBUint32(DupAnyBitUint32(a))) }
func NotEqualsZeroUint64(a uint64) int { return int(LSBUint64(DupAnyBitUint64(a))) }
func NotEqualsZeroUint(a uint) int { return int(LSBUint(DupAnyBitUint(a))) }
// a == b, answer is in bit 0
func EqualUintptr(a, b uintptr) int { return EqualsZeroUintptr(a ^ b) }
func EqualRune(a, b rune) int { return EqualsZeroRune(a ^ b) }
func EqualByte(a, b byte) int { return EqualsZeroByte(a ^ b) }
func EqualInt8(a, b int8) int { return EqualsZeroInt8(a ^ b) }
func EqualInt16(a, b int16) int { return EqualsZeroInt16(a ^ b) }
func EqualInt32(a, b int32) int { return EqualsZeroInt32(a ^ b) }
func EqualInt64(a, b int64) int { return EqualsZeroInt64(a ^ b) }
func EqualInt(a, b int) int { return EqualsZeroInt(a ^ b) }
func EqualUint8(a, b uint8) int { return EqualsZeroUint8(a ^ b) }
func EqualUint16(a, b uint16) int { return EqualsZeroUint16(a ^ b) }
func EqualUint32(a, b uint32) int { return EqualsZeroUint32(a ^ b) }
func EqualUint64(a, b uint64) int { return EqualsZeroUint64(a ^ b) }
func EqualUint(a, b uint) int { return EqualsZeroUint(a ^ b) }
// a != b, answer is in bit 0
func NotEqualUintptr(a, b uintptr) int { return Not(EqualUintptr(a, b)) }
func NotEqualRune(a, b rune) int { return Not(EqualRune(a, b)) }
func NotEqualByte(a, b byte) int { return Not(EqualByte(a, b)) }
func NotEqualInt8(a, b int8) int { return Not(EqualInt8(a, b)) }
func NotEqualInt16(a, b int16) int { return Not(EqualInt16(a, b)) }
func NotEqualInt32(a, b int32) int { return Not(EqualInt32(a, b)) }
func NotEqualInt64(a, b int64) int { return Not(EqualInt64(a, b)) }
func NotEqualInt(a, b int) int { return Not(EqualInt(a, b)) }
func NotEqualUint8(a, b uint8) int { return Not(EqualUint8(a, b)) }
func NotEqualUint16(a, b uint16) int { return Not(EqualUint16(a, b)) }
func NotEqualUint32(a, b uint32) int { return Not(EqualUint32(a, b)) }
func NotEqualUint64(a, b uint64) int { return Not(EqualUint64(a, b)) }
func NotEqualUint(a, b uint) int { return Not(EqualUint(a, b)) }
// a < b ==> (a - b) == 0 ==> (a-b) >> (bits(a)-1)
func LessThanUintptr(a, b uintptr) int { return int(MSBUintptr(a - b)) }
func LessThanRune(a, b rune) int { return int(MSBInt32(int32(a) - int32(b))) }
func LessThanByte(a, b byte) int { return int(MSBInt32(int32(a) - int32(b))) }
func LessThanInt8(a, b int8) int { return int(MSBInt32(int32(a) - int32(b))) }
func LessThanInt16(a, b int16) int { return int(MSBInt32(int32(a) - int32(b))) }
func LessThanInt32(a, b int32) int { return int(MSBInt32(int32(a) - int32(b))) }
func LessThanInt64(a, b int64) int {
x := int64ToBigInt(a)
x.Sub(x, int64ToBigInt(b))
return int(x.Bit(63))
}
func LessThanInt(a, b int) int {
if sixtyfourbit {
x := int64ToBigInt(int64(a))
x.Sub(x, int64ToBigInt(int64(b)))
return int(x.Bit(63))
} else {
return int(MSBInt32(int32(a) - int32(b)))
}
}
func LessThanUint8(a, b uint8) int { return int(MSBInt32(int32(a) - int32(b))) }
func LessThanUint16(a, b uint16) int { return int(MSBInt32(int32(a) - int32(b))) }
func LessThanUint32(a, b uint32) int { return int(MSBInt64(int64(a) - int64(b))) }
func LessThanUint64(a, b uint64) int {
x := uint64ToBigInt(a)
x.Sub(x, uint64ToBigInt(b))
return int(x.Bit(63))
}
func LessThanUint(a, b uint) int {
if sixtyfourbit {
return LessThanUint64(uint64(a), uint64(b))
} else {
return LessThanUint32(uint32(a), uint32(b))
}
}
// a <= b ==> a < (b + 1) ==> a - b - 1 >> (bits(a)-1)
func LessThanOrEqualUintptr(a, b uintptr) int { return int(MSBUintptr(a - b - 1)) }
func LessThanOrEqualRune(a, b rune) int { return int(MSBInt32(int32(a) - int32(b) - 1)) }
func LessThanOrEqualByte(a, b byte) int { return int(MSBInt32(int32(a) - int32(b) - 1)) }
func LessThanOrEqualInt8(a, b int8) int { return int(MSBInt32(int32(a) - int32(b) - 1)) }
func LessThanOrEqualInt16(a, b int16) int { return int(MSBInt32(int32(a) - int32(b) - 1)) }
func LessThanOrEqualInt32(a, b int32) int { return int(MSBInt32(a - b - 1)) }
func LessThanOrEqualInt64(a, b int64) int {
x := int64ToBigInt(a)
x.Sub(x, int64ToBigInt(b))
x.Sub(x, int64ToBigInt(int64(1)))
return int(x.Bit(63))
}
func LessThanOrEqualInt(a, b int) int {
if sixtyfourbit {
return LessThanOrEqualInt64(int64(a), int64(b))
} else {
return LessThanOrEqualInt32(int32(a), int32(b))
}
}
func LessThanOrEqualUint8(a, b uint8) int { return int(MSBInt32(int32(a) - int32(b) - 1)) }
func LessThanOrEqualUint16(a, b uint16) int { return int(MSBInt32(int32(a) - int32(b) - 1)) }
func LessThanOrEqualUint32(a, b uint32) int { return int(MSBInt64(int64(a) - int64(b) - 1)) }
func LessThanOrEqualUint64(a, b uint64) int {
x := uint64ToBigInt(a)
x.Sub(x, uint64ToBigInt(b))
x.Sub(x, uint64ToBigInt(uint64(1)))
return int(x.Bit(63))
}
func LessThanOrEqualUint(a, b uint) int {
if sixtyfourbit {
return LessThanOrEqualUint64(uint64(a), uint64(b))
} else {
return LessThanOrEqualUint32(uint32(a), uint32(b))
}
}
// a > b ==> ! a <= b
func GreaterThanUintptr(a, b uintptr) int { return Not(LessThanOrEqualUintptr(a, b)) }
func GreaterThanRune(a, b rune) int { return Not(LessThanOrEqualRune(a, b)) }
func GreaterThanByte(a, b byte) int { return Not(LessThanOrEqualByte(a, b)) }
func GreaterThanInt8(a, b int8) int { return Not(LessThanOrEqualInt8(a, b)) }
func GreaterThanInt16(a, b int16) int { return Not(LessThanOrEqualInt16(a, b)) }
func GreaterThanInt32(a, b int32) int { return Not(LessThanOrEqualInt32(a, b)) }
func GreaterThanInt64(a, b int64) int { return Not(LessThanOrEqualInt64(a, b)) }
func GreaterThanInt(a, b int) int { return Not(LessThanOrEqualInt(a, b)) }
func GreaterThanUint8(a, b uint8) int { return Not(LessThanOrEqualUint8(a, b)) }
func GreaterThanUint16(a, b uint16) int { return Not(LessThanOrEqualUint16(a, b)) }
func GreaterThanUint32(a, b uint32) int { return Not(LessThanOrEqualUint32(a, b)) }
func GreaterThanUint64(a, b uint64) int { return Not(LessThanOrEqualUint64(a, b)) }
func GreaterThanUint(a, b uint) int { return Not(LessThanOrEqualUint(a, b)) }
// a >= b ==> ! a < b
func GreaterThanOrEqualUintptr(a, b uintptr) int { return Not(LessThanUintptr(a, b)) }
func GreaterThanOrEqualRune(a, b rune) int { return Not(LessThanRune(a, b)) }
func GreaterThanOrEqualByte(a, b byte) int { return Not(LessThanByte(a, b)) }
func GreaterThanOrEqualInt8(a, b int8) int { return Not(LessThanInt8(a, b)) }
func GreaterThanOrEqualInt16(a, b int16) int { return Not(LessThanInt16(a, b)) }
func GreaterThanOrEqualInt32(a, b int32) int { return Not(LessThanInt32(a, b)) }
func GreaterThanOrEqualInt64(a, b int64) int { return Not(LessThanInt64(a, b)) }
func GreaterThanOrEqualInt(a, b int) int { return Not(LessThanInt(a, b)) }
func GreaterThanOrEqualUint8(a, b uint8) int { return Not(LessThanUint8(a, b)) }
func GreaterThanOrEqualUint16(a, b uint16) int { return Not(LessThanUint16(a, b)) }
func GreaterThanOrEqualUint32(a, b uint32) int { return Not(LessThanUint32(a, b)) }
func GreaterThanOrEqualUint64(a, b uint64) int { return Not(LessThanUint64(a, b)) }
func GreaterThanOrEqualUint(a, b uint) int { return Not(LessThanUint(a, b)) }
// a <=> b a < b == -1, a = b == 0, a > b == 1
func CompareUintptr(a, b uintptr) int {
return GreaterThanUintptr(a, b) | ChooseInt(LessThanUintptr(a, b), -1, 0)
}
func CompareRune(a, b rune) int {
return GreaterThanRune(a, b) | ChooseInt(LessThanRune(a, b), -1, 0)
}
func CompareByte(a, b byte) int {
return GreaterThanByte(a, b) | ChooseInt(LessThanByte(a, b), -1, 0)
}
func CompareInt8(a, b int8) int {
return GreaterThanInt8(a, b) | ChooseInt(LessThanInt8(a, b), -1, 0)
}
func CompareInt16(a, b int16) int {
return GreaterThanInt16(a, b) | ChooseInt(LessThanInt16(a, b), -1, 0)
}
func CompareInt32(a, b int32) int {
return GreaterThanInt32(a, b) | ChooseInt(LessThanInt32(a, b), -1, 0)
}
func CompareInt64(a, b int64) int {
return GreaterThanInt64(a, b) | ChooseInt(LessThanInt64(a, b), -1, 0)
}
func CompareInt(a, b int) int { return GreaterThanInt(a, b) | ChooseInt(LessThanInt(a, b), -1, 0) }
func CompareUint8(a, b uint8) int {
return GreaterThanUint8(a, b) | ChooseInt(LessThanUint8(a, b), -1, 0)
}
func CompareUint16(a, b uint16) int {
return GreaterThanUint16(a, b) | ChooseInt(LessThanUint16(a, b), -1, 0)
}
func CompareUint32(a, b uint32) int {
return GreaterThanUint32(a, b) | ChooseInt(LessThanUint32(a, b), -1, 0)
}
func CompareUint64(a, b uint64) int {
return GreaterThanUint64(a, b) | ChooseInt(LessThanUint64(a, b), -1, 0)
}
func CompareUint(a, b uint) int {
return GreaterThanUint(a, b) | ChooseInt(LessThanUint(a, b), -1, 0)
}
func uint64ToBytesBE(x uint64) []byte {
return []byte{
byte(x << 56), byte(x << 48),
byte(x << 40), byte(x << 32),
byte(x << 24), byte(x << 16),
byte(x << 8), byte(x << 0),
}
}
func uint64ToBigInt(x uint64) *big.Int {
r := new(big.Int)
r.SetBytes(uint64ToBytesBE(x))
return r
}
func int64ToBigInt(x int64) *big.Int {
r := new(big.Int)
r.SetBytes(uint64ToBytesBE(uint64(x)))
return r
}
// constant time version of cond ? t : f
func ChooseUintptr(cond int, t, f uintptr) uintptr {
return (t & ^uintptr(cond-1)) | (f & uintptr(cond-1))
}
func ChooseRune(cond int, t, f rune) rune { return (t & ^rune(cond-1)) | (f & rune(cond-1)) }
func ChooseByte(cond int, t, f byte) byte { return (t & ^byte(cond-1)) | (f & byte(cond-1)) }
func ChooseInt8(cond int, t, f int8) int8 { return (t & ^int8(cond-1)) | (f & int8(cond-1)) }
func ChooseInt16(cond int, t, f int16) int16 { return (t & ^int16(cond-1)) | (f & int16(cond-1)) }
func ChooseInt32(cond int, t, f int32) int32 { return (t & ^int32(cond-1)) | (f & int32(cond-1)) }
func ChooseInt64(cond int, t, f int64) int64 { return (t & ^int64(cond-1)) | (f & int64(cond-1)) }
func ChooseInt(cond int, t, f int) int { return (t & ^(cond - 1)) | (f & (cond - 1)) }
func ChooseUint8(cond int, t, f uint8) uint8 { return (t & ^uint8(cond-1)) | (f & uint8(cond-1)) }
func ChooseUint16(cond int, t, f uint16) uint16 { return (t & ^uint16(cond-1)) | (f & uint16(cond-1)) }
func ChooseUint32(cond int, t, f uint32) uint32 { return (t & ^uint32(cond-1)) | (f & uint32(cond-1)) }
func ChooseUint64(cond int, t, f uint64) uint64 { return (t & ^uint64(cond-1)) | (f & uint64(cond-1)) }
func ChooseUint(cond int, t, f uint) uint { return (t & ^uint(cond-1)) | (f & uint(cond-1)) }
// only copies to dst if cond == 1
// src and dst must be the same length
func CopyBytes(cond int, dst, src []byte) []byte {
// 0 1 cond
dstmask := byte(cond - 1) // ff 00
srcmask := ^byte(cond - 1) // 00 ff
for i := range dst {
dst[i] = dst[i]&dstmask | src[i]&srcmask
}
return dst
}
func ChooseBytesPointer(cond int, t, f *[]byte) *[]byte {
return (*[]byte)(unsafe.Pointer(uintptr(ChooseUint(cond, uint(uintptr(unsafe.Pointer(t))), uint(uintptr(unsafe.Pointer(f)))))))
}
// constant time version of cond ? t : f
func ChooseBytes(cond int, t, f []byte) []byte {
tlen := uint64(len(t))
flen := uint64(len(f))
tmask := byte(cond - 1)
fmask := ^byte(cond - 1)
l := ChooseUint64(cond, tlen, flen)
result := make([]byte, l)
tz := EqualsZeroUint64(tlen)
fz := EqualsZeroUint64(flen)
tmod := ChooseUint64(tz, 1, tlen)
fmod := ChooseUint64(fz, 1, flen)
tp := ChooseBytesPointer(tz, &[]byte{0}, &t)
fp := ChooseBytesPointer(fz, &[]byte{0}, &f)
for i := uint64(0); i < l; i++ {
result[i] = (*tp)[i%tmod]&tmask | (*fp)[i%fmod]&fmask
}
return result
}
// constant-time vesion of bytes.Compare()
// 1000x slower than Equal
func Compare(a, b []byte) int {
alen := len(a)
blen := len(b)
swapped := GreaterThanInt(alen, blen)
shorter := ChooseBytes(swapped, b, a)
longer := ChooseBytes(swapped, a, b)
shorterlen := ChooseInt(swapped, blen, alen)
longerlen := ChooseInt(swapped, alen, blen)
result := 0
ever := 0
shorterlenzero := EqualsZeroInt(shorterlen)
// avoid index error in the loop when shorter in length 0
shorter = ChooseBytes(shorterlenzero, []byte{0}, shorter)
shorterlenmod := ChooseInt(shorterlenzero, 1, shorterlen)
for i := 0; i < longerlen; i++ {
short := ChooseInt(LessThanInt(i, shorterlen), int(shorter[i%shorterlenmod]), 0)
cur := int(longer[i]) - short
gt := BoolToInt(cur > 0) // 0 or 1
lt := BoolToInt(cur < 0) // 0 or 1
result |= (gt | -lt) & ^ever // 0 , 1 or -1
ever |= gt | lt
}
// different length strings are indeed different (1 or -1 after swap)
result |= NotEqualInt(shorterlen, longerlen)
// reverse the swap
return ChooseInt(swapped, result, -result)
}
// constant-time version of bytes.Contains()
func Contains(b, subslice []byte) bool {
return Index(b, subslice) != -1
}
// constant-time version of bytes.Equal()
func Equal(a, b []byte) bool {
if len(a) != len(b) {
return false
}
result := byte(0)
for i := range a {
result |= a[i] ^ b[i]
}
return (result == 0)
}
// constant-time version of bytes.Index()
func Index(s, sep []byte) int {
n := len(sep)
if n == 0 {
return 0
}
m := len(sep)
if n > m {
return -1
}
last := m - n + 1
result := -1
found := 0
for i := 0; i < last; i++ {
match := 1
for j := 0; j < n; j++ {
match &= EqualByte(s[i], sep[j])
}
result = ChooseInt(found, result, i)
found |= match
}
return result
}
// constant-time version of bytes.IndexByte()
func IndexByte(s []byte, c byte) int {
result := -1
matched := 0
for i := range s {
found := EqualByte(s[i], c)
result = ChooseInt(matched, result, ChooseInt(found, i, -1))
matched |= found
}
return result
} | securecompare.go | 0.687945 | 0.543469 | securecompare.go | starcoder |
package cryptospecials
import (
"crypto/elliptic"
"crypto/rand"
"errors"
"fmt"
"hash"
"math/big"
)
//OPRF is an exportable struct
type OPRF struct {
RSecret []byte
RSecretInv []byte
elliptic.Curve
}
//Mask is an exportable method
/*
* OPRF.Send() represents EC-OPRF sec. 3.1 Steps (1) and (2) with hashing into an elliptic
* curve via the try-and-increment method.
* Sec. 3.1:
* eq. (1) G_i = H(w_i)
* eq. (2) M_i = m_i * G_i
*/
func (rep OPRF) Mask(data string, h hash.Hash, ec elliptic.Curve, verbose bool) (mask ECPoint, rInv *big.Int, err error) {
var (
numRead int
r *big.Int
rByte []byte
hData []byte
pt ECPoint
)
// Fill r, rInv with zeros (or Seg Fault when using r.SetBytes(...))
r = new(big.Int)
rInv = new(big.Int)
// Read a random byte of size size(P) from OS random (usually dev/urandom)
rng := rand.Reader
rByte = make([]byte, (ec.Params().BitSize+8)/8-1)
numRead, err = rng.Read(rByte)
if err != nil {
return ECPoint{}, nil, err
}
/*
* Hash the data using the cryptographic hash of choice. Reccomend using SHA-256.
* Attempt to map H(data) into the elliptic curve defined by ec.
* Currently only supporting weierstrass curves.
*/
_, err = h.Write([]byte(data))
if err != nil {
return ECPoint{}, nil, err
}
hData = h.Sum(nil)
h.Reset()
pt, err = Hash2curve(hData, h, ec.Params(), 1, verbose)
/*
* Determine r (mod N) and rInv (mod N) such that r*rInv = 1 (mod N). N
* is the order of elliptic curve subgroup.
*/
r.SetBytes(rByte)
r.Mod(r, ec.Params().N)
rInv.ModInverse(r, ec.Params().N)
mask.X, mask.Y = ec.ScalarMult(pt.X, pt.Y, r.Bytes())
if mask.X == zero || mask.Y == zero {
return ECPoint{}, nil, errors.New("Error: The resulting point r*H(data) = (x1, y1) contains zeros")
}
if verbose {
fmt.Println("Number of random bytes read:", numRead)
fmt.Println("Size of H(data) :", len(hData))
fmt.Println("SECRET x-coordinate:", pt.X)
fmt.Println("SECRET y-coordinate:", pt.Y)
fmt.Println("SECRET r :", r)
fmt.Println("SECRET r-inv :", rInv)
fmt.Println("Masked x-coordinate:", mask.X)
fmt.Println("Masked y-coordinate:", mask.Y)
fmt.Println("Is Masked (x,y) on the curve:", ec.IsOnCurve(mask.X, mask.Y))
}
// (x1,y1) = r*(x,y) : x, y <-- H(data) into ec
return mask, rInv, nil
}
//Salt is an exportable method
/*
* OPRF.Salt() represents EC-OPRF sec. 3.1 Step (3)
* Sec. 3.1:
* eq. (3) S_i = s_i * M_i = s * (xMask, yMask) = s * r * (x, y)
*/
func (rep OPRF) Salt(mask ECPoint, s *big.Int, ec elliptic.Curve, verbose bool) (salt ECPoint, sOut *big.Int, err error) {
/*
* Ensure s is not zero; if so, generate a random number and return as error. Note:
* this does not check to ensure thas s (mod N) == s (as given). If s > s(mod N),
* results will not be as anticipated.
*/
if s == nil || s == zero {
s = new(big.Int)
randBytes := make([]byte, (ec.Params().BitSize+8)/8-1)
rand.Reader.Read(randBytes)
s.SetBytes(randBytes)
s.Mod(s, ec.Params().N)
fmt.Println("SECRET - s (new) :", s)
}
salt.X, salt.Y = ec.ScalarMult(mask.X, mask.Y, s.Bytes())
if verbose {
fmt.Println("SECRET - s (used) :", s)
fmt.Println("Salted x-coordinate:", salt.X)
fmt.Println("Salted y-coordinate:", salt.Y)
fmt.Println("Is Salted (x, y) on the curve:", ec.IsOnCurve(salt.X, salt.Y))
}
return salt, s, nil
}
//Unmask is an exportable method
/*
* OPRF.Unmask() represents EC-OPRF sec. 3.1 Step (4)
Sec. 3.1:
* eq. (4) U_i = r_inv * S_i = r_inv * s * (xMask, yMask) = r_inv * s * r * (x, y) = s * (x, y)
*/
func (rep OPRF) Unmask(salt ECPoint, rInv *big.Int, ec elliptic.Curve, verbose bool) (unmask ECPoint, err error) {
unmask.X, unmask.Y = ec.ScalarMult(salt.X, salt.Y, rInv.Bytes())
if verbose {
fmt.Println("Unmasked x-coordinate:", unmask.X)
fmt.Println("Unmasked y-coordinate:", unmask.Y)
fmt.Println("Is Unmasked (x, y) on the curve:", ec.IsOnCurve(unmask.X, unmask.Y))
}
return unmask, nil
}
/*
* OPRF.unsalt is not exportable and is for testing only. This method will remove s from U_i
* resulting in s_inv * s * U_i = s_inv * s * (x, y) = (x, y) = H(data). This operation is not
* in the OPRF paper.
*/
func (rep OPRF) unsalt(unmask ECPoint, s *big.Int, ec elliptic.Curve, verbose bool) (unsalt ECPoint, err error) {
var (
sInv *big.Int
)
// Calculated s_inv (mod N) such that s_inv * s = 1 (mod N)
sInv = new(big.Int)
sInv.ModInverse(s, ec.Params().N)
unsalt.X, unsalt.Y = ec.ScalarMult(unmask.X, unmask.Y, sInv.Bytes())
if verbose {
fmt.Println("Is unsalted (x, y) on the curve:", ec.IsOnCurve(unsalt.X, unsalt.Y))
}
return unsalt, nil
} | cryptospecials/eccoprf.go | 0.634204 | 0.441613 | eccoprf.go | starcoder |
package rgb16
// Conversion of natively non-D50 RGB colorspaces with D50 illuminator to CIE XYZ and back.
// Bradford adaptation was used to calculate D50 matrices from colorspaces' native illuminators.
// RGB values must be linear and in the nominal range [0, 255].
// XYZ values are usually in [0, 255] but may be greater
// To get quick and dirty XYZ approximations, divide by 255, otherwise use the float64 version of these functions.
// Ref.: [24]
// AdobeToXYZ_D50 converts from Adobe RGB 1998 with D50 illuminator to CIE XYZ.
func AdobeToXYZ_D50(r, g, b uint16) (x, y, z int) {
rr := int64(r)
gg := int64(g)
bb := int64(b)
x = int((93042786297*rr + 31317631799*gg + 22770122834*bb) / 1e7)
y = int((47474509803*rr + 95468986037*gg + 9646707865*bb) / 1e7)
z = int((2972625314*rr + 9291248950*gg + 113655100327*bb) / 1e7)
return
}
// XYZToAdobe_D50 converts from CIE XYZ to Adobe RGB 1998 with D50 illuminator.
func XYZToAdobe_D50(x, y, z int) (r, g, b uint16) {
xx, yy, zz := int64(x), int64(y), int64(z)
rr := int((19624573*xx + -6105343*yy + -3413404*zz) / 152587890625)
gg := int((-9787684*xx + 19161707*yy + 334545*zz) / 152587890625)
bb := int((286873*xx + -1406752*yy + 13487860*zz) / 152587890625)
if rr < 0 {
rr = 0
} else if rr > 65535 {
rr = 65535
}
if gg < 0 {
gg = 0
} else if gg > 65535 {
gg = 65535
}
if bb < 0 {
bb = 0
} else if bb > 65535 {
bb = 65535
}
r, g, b = uint16(rr), uint16(gg), uint16(bb)
return
}
// AppleToXYZ_D50 converts from Apple RGB with D50 illuminator to CIE XYZ.
func AppleToXYZ_D50(r, g, b uint16) (x, y, z int) {
rr := int64(r)
gg := int64(g)
bb := int64(b)
x = int((72566994735*rr + 51830655374*gg + 22732890821*bb) / 1e7)
y = int((38938155184*rr + 102627496757*gg + 11024582283*bb) / 1e7)
z = int((2818295566*rr + 17300236514*gg + 105800442511*bb) / 1e7)
return
}
// XYZToApple_D50 converts from CIE XYZ to Apple RGB with D50 illuminator.
func XYZToApple_D50(x, y, z int) (r, g, b uint16) {
xx, yy, zz := int64(x), int64(y), int64(z)
rr := int((28511130*xx + -13605261*yy + -4708281*zz) / 152587890625)
gg := int((-10927680*xx + 20349181*yy + 227601*zz) / 152587890625)
bb := int((1027418*xx + -2964984*yy + 14510880*zz) / 152587890625)
if rr < 0 {
rr = 0
} else if rr > 65535 {
rr = 65535
}
if gg < 0 {
gg = 0
} else if gg > 65535 {
gg = 65535
}
if bb < 0 {
bb = 0
} else if bb > 65535 {
bb = 65535
}
r, g, b = uint16(rr), uint16(gg), uint16(bb)
return
}
// BruceToXYZ_D50 converts from Bruce RGB with D50 illuminator to CIE XYZ.
func BruceToXYZ_D50(r, g, b uint16) (x, y, z int) {
rr := int64(r)
gg := int64(g)
bb := int64(b)
x = int((75407278552*rr + 48902632180*gg + 22820630197*bb) / 1e7)
y = int((38476096741*rr + 104446005950*gg + 9668116273*bb) / 1e7)
z = int((2409185930*rr + 9602563515*gg + 113907209887*bb) / 1e7)
return
}
// XYZToBruce_D50 converts from CIE XYZ to Bruce RGB with D50 illuminator.
func XYZToBruce_D50(x, y, z int) (r, g, b uint16) {
xx, yy, zz := int64(x), int64(y), int64(z)
rr := int((26503260*xx + -12014485*yy + -4289936*zz) / 152587890625)
gg := int((-9787684*xx + 19161707*yy + 334545*zz) / 152587890625)
bb := int((264574*xx + -1361228*yy + 13458747*zz) / 152587890625)
if rr < 0 {
rr = 0
} else if rr > 65535 {
rr = 65535
}
if gg < 0 {
gg = 0
} else if gg > 65535 {
gg = 65535
}
if bb < 0 {
bb = 0
} else if bb > 65535 {
bb = 65535
}
r, g, b = uint16(rr), uint16(gg), uint16(bb)
return
}
// CIEToXYZ_D50 converts from CIE RGB with D50 illuminator to CIE XYZ.
func CIEToXYZ_D50(r, g, b uint16) (x, y, z int) {
rr := int64(r)
gg := int64(g)
bb := int64(b)
x = int((74294193941*rr + 46738139924*gg + 26098222323*bb) / 1e7)
y = int((26651148241*rr + 125849408712*gg + 89677271*bb) / 1e7)
z = int((-191696167*rr + 2591470206*gg + 123519203478*bb) / 1e7)
return
}
// XYZToCIE_D50 converts from CIE XYZ to CIE RGB with D50 illuminator.
func XYZToCIE_D50(x, y, z int) (r, g, b uint16) {
xx, yy, zz := int64(x), int64(y), int64(z)
rr := int((23638441*xx + -8676030*yy + -4988161*zz) / 152587890625)
gg := int((-5005940*xx + 13962581*yy + 1047576*zz) / 152587890625)
bb := int((141714*xx + -306400*yy + 12324030*zz) / 152587890625)
if rr < 0 {
rr = 0
} else if rr > 65535 {
rr = 65535
}
if gg < 0 {
gg = 0
} else if gg > 65535 {
gg = 65535
}
if bb < 0 {
bb = 0
} else if bb > 65535 {
bb = 65535
}
r, g, b = uint16(rr), uint16(gg), uint16(bb)
return
}
// NTSCToXYZ_D50 converts from NTSC RGB with D50 illuminator to CIE XYZ.
func NTSCToXYZ_D50(r, g, b uint16) (x, y, z int) {
rr := int64(r)
gg := int64(g)
bb := int64(b)
x = int((96798748759*rr + 28262821392*gg + 22068970778*bb) / 1e7)
y = int((47447867551*rr + 90272129396*gg + 14870222018*bb) / 1e7)
z = int((-180313111*rr + 8476661325*gg + 117622629128*bb) / 1e7)
return
}
// XYZToNTSC_D50 converts from CIE XYZ to NTSC RGB with D50 illuminator.
func XYZToNTSC_D50(x, y, z int) (r, g, b uint16) {
xx, yy, zz := int64(x), int64(y), int64(z)
rr := int((18465162*xx + -5521299*yy + -2766458*zz) / 152587890625)
gg := int((-9826630*xx + 20045060*yy + -690397*zz) / 152587890625)
bb := int((736488*xx + -1453020*yy + 13018574*zz) / 152587890625)
if rr < 0 {
rr = 0
} else if rr > 65535 {
rr = 65535
}
if gg < 0 {
gg = 0
} else if gg > 65535 {
gg = 65535
}
if bb < 0 {
bb = 0
} else if bb > 65535 {
bb = 65535
}
r, g, b = uint16(rr), uint16(gg), uint16(bb)
return
}
// PALToXYZ_D50 converts from PAL/SECAM RGB with D50 illuminator to CIE XYZ.
func PALToXYZ_D50(r, g, b uint16) (x, y, z int) {
rr := int64(r)
gg := int64(g)
bb := int64(b)
x = int((69470862897*rr + 56084534981*gg + 21575127793*bb) / 1e7)
y = int((35447089340*rr + 108002685587*gg + 9140444037*bb) / 1e7)
z = int((2219531547*rr + 16009063858*gg + 107690379185*bb) / 1e7)
return
}
// XYZToPAL_D50 converts from CIE XYZ to PAL/SECAM RGB with D50 illuminator.
func XYZToPAL_D50(x, y, z int) (r, g, b uint16) {
xx, yy, zz := int64(x), int64(y), int64(z)
rr := int((29604395*xx + -14678520*yy + -4685105*zz) / 152587890625)
gg := int((-9787684*xx + 19161707*yy + 334545*zz) / 152587890625)
bb := int((844886*xx + -2545974*yy + 14216390*zz) / 152587890625)
if rr < 0 {
rr = 0
} else if rr > 65535 {
rr = 65535
}
if gg < 0 {
gg = 0
} else if gg > 65535 {
gg = 65535
}
if bb < 0 {
bb = 0
} else if bb > 65535 {
bb = 65535
}
r, g, b = uint16(rr), uint16(gg), uint16(bb)
return
}
// SMPTE_CToXYZ_D50 converts from SMPTE-C RGB with D50 illuminator to CIE XYZ.
func SMPTE_CToXYZ_D50(r, g, b uint16) (x, y, z int) {
rr := int64(r)
gg := int64(g)
bb := int64(b)
x = int((63527733272*rr + 59990295261*gg + 23612512397*bb) / 1e7)
y = int((33829236285*rr + 107309819179*gg + 11451163499*bb) / 1e7)
z = int((2084016174*rr + 13940703440*gg + 109894254978*bb) / 1e7)
return
}
// XYZToSMPTE_C_D50 converts from CIE XYZ to SMPTE-C RGB with D50 illuminator.
func XYZToSMPTE_C_D50(x, y, z int) (r, g, b uint16) {
xx, yy, zz := int64(x), int64(y), int64(z)
rr := int((33922457*xx + -18264027*yy + -5385522*zz) / 152587890625)
gg := int((-10770996*xx + 20214283*yy + 207992*zz) / 152587890625)
bb := int((723084*xx + -2217902*yy + 13961145*zz) / 152587890625)
if rr < 0 {
rr = 0
} else if rr > 65535 {
rr = 65535
}
if gg < 0 {
gg = 0
} else if gg > 65535 {
gg = 65535
}
if bb < 0 {
bb = 0
} else if bb > 65535 {
bb = 65535
}
r, g, b = uint16(rr), uint16(gg), uint16(bb)
return
}
// SRGBToXYZ_D50 converts from sRGB with D50 illuminator to CIE XYZ.
func SRGBToXYZ_D50(r, g, b uint16) (x, y, z int) {
rr := int64(r)
gg := int64(g)
bb := int64(b)
x = int((66540733958*rr + 58757137406*gg + 21832669565*bb) / 1e7)
y = int((33952010375*rr + 109388662546*gg + 9249546043*bb) / 1e7)
z = int((2125917447*rr + 14817196917*gg + 108975860226*bb) / 1e7)
return
}
// XYZToSRGB_D50 converts from CIE XYZ to sRGB with D50 illuminator.
func XYZToSRGB_D50(x, y, z int) (r, g, b uint16) {
xx, yy, zz := int64(x), int64(y), int64(z)
rr := int((31339039*xx + -16168668*yy + -4906146*zz) / 152587890625)
gg := int((-9787684*xx + 19161707*yy + 334545*zz) / 152587890625)
bb := int((719463*xx + -2289914*yy + 14052641*zz) / 152587890625)
if rr < 0 {
rr = 0
} else if rr > 65535 {
rr = 65535
}
if gg < 0 {
gg = 0
} else if gg > 65535 {
gg = 65535
}
if bb < 0 {
bb = 0
} else if bb > 65535 {
bb = 65535
}
r, g, b = uint16(rr), uint16(gg), uint16(bb)
return
} | i16/rgb16/rgb_d50.go | 0.725746 | 0.431644 | rgb_d50.go | starcoder |
package orderbook
import (
"errors"
"fmt"
"sort"
math "github.com/thrasher-corp/gocryptotrader/common/math"
"github.com/thrasher-corp/gocryptotrader/log"
)
// WhaleBombResult returns the whale bomb result
type WhaleBombResult struct {
Amount float64
MinimumPrice float64
MaximumPrice float64
PercentageGainOrLoss float64
Orders orderSummary
Status string
}
// WhaleBomb finds the amount required to target a price
func (b *Base) WhaleBomb(priceTarget float64, buy bool) (*WhaleBombResult, error) {
if priceTarget < 0 {
return nil, errors.New("price target is invalid")
}
if buy {
a, orders := b.findAmount(priceTarget, true)
min, max := orders.MinimumPrice(false), orders.MaximumPrice(true)
var err error
if max < priceTarget {
err = errors.New("unable to hit price target due to insufficient orderbook items")
}
status := fmt.Sprintf("Buying %.2f %v worth of %v will send the price from %v to %v [%.2f%%] and take %v orders.",
a, b.Pair.Quote.String(), b.Pair.Base.String(), min, max,
math.CalculatePercentageGainOrLoss(max, min), len(orders))
return &WhaleBombResult{
Amount: a,
Orders: orders,
MinimumPrice: min,
MaximumPrice: max,
Status: status,
}, err
}
a, orders := b.findAmount(priceTarget, false)
min, max := orders.MinimumPrice(false), orders.MaximumPrice(true)
var err error
if min > priceTarget {
err = errors.New("unable to hit price target due to insufficient orderbook items")
}
status := fmt.Sprintf("Selling %.2f %v worth of %v will send the price from %v to %v [%.2f%%] and take %v orders.",
a, b.Pair.Base.String(), b.Pair.Quote.String(), max, min,
math.CalculatePercentageGainOrLoss(min, max), len(orders))
return &WhaleBombResult{
Amount: a,
Orders: orders,
MinimumPrice: min,
MaximumPrice: max,
Status: status,
}, err
}
// OrderSimulationResult returns the order simulation result
type OrderSimulationResult WhaleBombResult
// SimulateOrder simulates an order
func (b *Base) SimulateOrder(amount float64, buy bool) *OrderSimulationResult {
if buy {
orders, amt := b.buy(amount)
min, max := orders.MinimumPrice(false), orders.MaximumPrice(true)
pct := math.CalculatePercentageGainOrLoss(max, min)
status := fmt.Sprintf("Buying %.2f %v worth of %v will send the price from %v to %v [%.2f%%] and take %v orders.",
amount, b.Pair.Quote.String(), b.Pair.Base.String(), min, max,
pct, len(orders))
return &OrderSimulationResult{
Orders: orders,
Amount: amt,
MinimumPrice: min,
MaximumPrice: max,
PercentageGainOrLoss: pct,
Status: status,
}
}
orders, amt := b.sell(amount)
min, max := orders.MinimumPrice(false), orders.MaximumPrice(true)
pct := math.CalculatePercentageGainOrLoss(min, max)
status := fmt.Sprintf("Selling %f %v worth of %v will send the price from %v to %v [%.2f%%] and take %v orders.",
amount, b.Pair.Base.String(), b.Pair.Quote.String(), max, min,
pct, len(orders))
return &OrderSimulationResult{
Orders: orders,
Amount: amt,
MinimumPrice: min,
MaximumPrice: max,
PercentageGainOrLoss: pct,
Status: status,
}
}
type orderSummary []Item
func (o orderSummary) Print() {
for x := range o {
log.Debugf(log.OrderBook, "Order: Price: %f Amount: %f", o[x].Price, o[x].Amount)
}
}
func (o orderSummary) MinimumPrice(reverse bool) float64 {
if len(o) != 0 {
sortOrdersByPrice(&o, reverse)
return o[0].Price
}
return 0
}
func (o orderSummary) MaximumPrice(reverse bool) float64 {
if len(o) != 0 {
sortOrdersByPrice(&o, reverse)
return o[0].Price
}
return 0
}
// ByPrice used for sorting orders by order date
type ByPrice orderSummary
func (b ByPrice) Len() int { return len(b) }
func (b ByPrice) Less(i, j int) bool { return b[i].Price < b[j].Price }
func (b ByPrice) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
// sortOrdersByPrice the caller function to sort orders
func sortOrdersByPrice(o *orderSummary, reverse bool) {
if reverse {
sort.Sort(sort.Reverse(ByPrice(*o)))
} else {
sort.Sort(ByPrice(*o))
}
}
func (b *Base) findAmount(price float64, buy bool) (float64, orderSummary) {
orders := make(orderSummary, 0)
var amt float64
if buy {
for x := range b.Asks {
if b.Asks[x].Price >= price {
amt += b.Asks[x].Price * b.Asks[x].Amount
orders = append(orders, Item{
Price: b.Asks[x].Price,
Amount: b.Asks[x].Amount,
})
return amt, orders
}
orders = append(orders, Item{
Price: b.Asks[x].Price,
Amount: b.Asks[x].Amount,
})
amt += b.Asks[x].Price * b.Asks[x].Amount
}
return amt, orders
}
for x := range b.Bids {
if b.Bids[x].Price <= price {
amt += b.Bids[x].Amount
orders = append(orders, Item{
Price: b.Bids[x].Price,
Amount: b.Bids[x].Amount,
})
break
}
orders = append(orders, Item{
Price: b.Bids[x].Price,
Amount: b.Bids[x].Amount,
})
amt += b.Bids[x].Amount
}
return amt, orders
}
func (b *Base) buy(amount float64) (orders orderSummary, baseAmount float64) {
var processedAmt float64
for x := range b.Asks {
subtotal := b.Asks[x].Price * b.Asks[x].Amount
if processedAmt+subtotal >= amount {
diff := amount - processedAmt
subAmt := diff / b.Asks[x].Price
orders = append(orders, Item{
Price: b.Asks[x].Price,
Amount: subAmt,
})
baseAmount += subAmt
break
}
processedAmt += subtotal
baseAmount += b.Asks[x].Amount
orders = append(orders, Item{
Price: b.Asks[x].Price,
Amount: b.Asks[x].Amount,
})
}
return
}
func (b *Base) sell(amount float64) (orders orderSummary, quoteAmount float64) {
var processedAmt float64
for x := range b.Bids {
if processedAmt+b.Bids[x].Amount >= amount {
diff := amount - processedAmt
orders = append(orders, Item{
Price: b.Bids[x].Price,
Amount: diff,
})
quoteAmount += diff * b.Bids[x].Price
break
}
processedAmt += b.Bids[x].Amount
quoteAmount += b.Bids[x].Amount * b.Bids[x].Price
orders = append(orders, Item{
Price: b.Bids[x].Price,
Amount: b.Bids[x].Amount,
})
}
return
}
// GetAveragePrice finds the average buy or sell price of a specified amount.
// It finds the nominal amount spent on the total purchase or sell and uses it
// to find the average price for an individual unit bought or sold
func (b *Base) GetAveragePrice(buy bool, amount float64) (float64, error) {
if amount <= 0 {
return 0, errAmountInvalid
}
var aggNominalAmount, remainingAmount float64
if buy {
aggNominalAmount, remainingAmount = b.Asks.FindNominalAmount(amount)
} else {
aggNominalAmount, remainingAmount = b.Bids.FindNominalAmount(amount)
}
if remainingAmount != 0 {
return 0, fmt.Errorf("%w for %v on exchange %v to support a buy amount of %v", errNotEnoughLiquidity, b.Pair, b.Exchange, amount)
}
return aggNominalAmount / amount, nil
}
// FindNominalAmount finds the nominal amount spent in terms of the quote
// If the orderbook doesn't have enough liquidity it returns a non zero
// remaining amount value
func (elem Items) FindNominalAmount(amount float64) (aggNominalAmount, remainingAmount float64) {
remainingAmount = amount
for x := range elem {
if remainingAmount <= elem[x].Amount {
aggNominalAmount += elem[x].Price * remainingAmount
remainingAmount = 0
break
} else {
aggNominalAmount += elem[x].Price * elem[x].Amount
remainingAmount -= elem[x].Amount
}
}
return aggNominalAmount, remainingAmount
} | exchanges/orderbook/calculator.go | 0.678966 | 0.440289 | calculator.go | starcoder |
package values
import (
"fmt"
"github.com/influxdata/platform/query/ast"
"github.com/influxdata/platform/query/semantic"
)
type BinaryFunction func(l, r Value) Value
type BinaryFuncSignature struct {
Operator ast.OperatorKind
Left, Right semantic.Type
}
func LookupBinaryFunction(sig BinaryFuncSignature) (BinaryFunction, error) {
f, ok := binaryFuncLookup[sig]
if !ok {
return nil, fmt.Errorf("unsupported binary expression %v %v %v", sig.Left, sig.Operator, sig.Right)
}
return f, nil
}
var binaryFuncLookup = map[BinaryFuncSignature]BinaryFunction{
//---------------
// Math Operators
//---------------
{Operator: ast.AdditionOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewIntValue(l + r)
},
{Operator: ast.AdditionOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewUIntValue(l + r)
},
{Operator: ast.AdditionOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewFloatValue(l + r)
},
{Operator: ast.SubtractionOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewIntValue(l - r)
},
{Operator: ast.SubtractionOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewUIntValue(l - r)
},
{Operator: ast.SubtractionOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewFloatValue(l - r)
},
{Operator: ast.MultiplicationOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewIntValue(l * r)
},
{Operator: ast.MultiplicationOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewUIntValue(l * r)
},
{Operator: ast.MultiplicationOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewFloatValue(l * r)
},
{Operator: ast.DivisionOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewIntValue(l / r)
},
{Operator: ast.DivisionOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewUIntValue(l / r)
},
{Operator: ast.DivisionOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewFloatValue(l / r)
},
//---------------------
// Comparison Operators
//---------------------
// LessThanEqualOperator
{Operator: ast.LessThanEqualOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewBoolValue(l <= r)
},
{Operator: ast.LessThanEqualOperator, Left: semantic.Int, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.UInt()
if l < 0 {
return NewBoolValue(true)
}
return NewBoolValue(uint64(l) <= r)
},
{Operator: ast.LessThanEqualOperator, Left: semantic.Int, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Float()
return NewBoolValue(float64(l) <= r)
},
{Operator: ast.LessThanEqualOperator, Left: semantic.UInt, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Int()
if r < 0 {
return NewBoolValue(false)
}
return NewBoolValue(l <= uint64(r))
},
{Operator: ast.LessThanEqualOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewBoolValue(l <= r)
},
{Operator: ast.LessThanEqualOperator, Left: semantic.UInt, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Float()
return NewBoolValue(float64(l) <= r)
},
{Operator: ast.LessThanEqualOperator, Left: semantic.Float, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Int()
return NewBoolValue(l <= float64(r))
},
{Operator: ast.LessThanEqualOperator, Left: semantic.Float, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.UInt()
return NewBoolValue(l <= float64(r))
},
{Operator: ast.LessThanEqualOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewBoolValue(l <= r)
},
// LessThanOperator
{Operator: ast.LessThanOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewBoolValue(l < r)
},
{Operator: ast.LessThanOperator, Left: semantic.Int, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.UInt()
if l < 0 {
return NewBoolValue(true)
}
return NewBoolValue(uint64(l) < r)
},
{Operator: ast.LessThanOperator, Left: semantic.Int, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Float()
return NewBoolValue(float64(l) < r)
},
{Operator: ast.LessThanOperator, Left: semantic.UInt, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Int()
if r < 0 {
return NewBoolValue(false)
}
return NewBoolValue(l < uint64(r))
},
{Operator: ast.LessThanOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewBoolValue(l < r)
},
{Operator: ast.LessThanOperator, Left: semantic.UInt, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Float()
return NewBoolValue(float64(l) < r)
},
{Operator: ast.LessThanOperator, Left: semantic.Float, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Int()
return NewBoolValue(l < float64(r))
},
{Operator: ast.LessThanOperator, Left: semantic.Float, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.UInt()
return NewBoolValue(l < float64(r))
},
{Operator: ast.LessThanOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewBoolValue(l < r)
},
// GreaterThanEqualOperator
{Operator: ast.GreaterThanEqualOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewBoolValue(l >= r)
},
{Operator: ast.GreaterThanEqualOperator, Left: semantic.Int, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.UInt()
if l < 0 {
return NewBoolValue(true)
}
return NewBoolValue(uint64(l) >= r)
},
{Operator: ast.GreaterThanEqualOperator, Left: semantic.Int, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Float()
return NewBoolValue(float64(l) >= r)
},
{Operator: ast.GreaterThanEqualOperator, Left: semantic.UInt, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Int()
if r < 0 {
return NewBoolValue(false)
}
return NewBoolValue(l >= uint64(r))
},
{Operator: ast.GreaterThanEqualOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewBoolValue(l >= r)
},
{Operator: ast.GreaterThanEqualOperator, Left: semantic.UInt, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Float()
return NewBoolValue(float64(l) >= r)
},
{Operator: ast.GreaterThanEqualOperator, Left: semantic.Float, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Int()
return NewBoolValue(l >= float64(r))
},
{Operator: ast.GreaterThanEqualOperator, Left: semantic.Float, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.UInt()
return NewBoolValue(l >= float64(r))
},
{Operator: ast.GreaterThanEqualOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewBoolValue(l >= r)
},
// GreaterThanOperator
{Operator: ast.GreaterThanOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewBoolValue(l > r)
},
{Operator: ast.GreaterThanOperator, Left: semantic.Int, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.UInt()
if l < 0 {
return NewBoolValue(true)
}
return NewBoolValue(uint64(l) > r)
},
{Operator: ast.GreaterThanOperator, Left: semantic.Int, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Float()
return NewBoolValue(float64(l) > r)
},
{Operator: ast.GreaterThanOperator, Left: semantic.UInt, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Int()
if r < 0 {
return NewBoolValue(false)
}
return NewBoolValue(l > uint64(r))
},
{Operator: ast.GreaterThanOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewBoolValue(l > r)
},
{Operator: ast.GreaterThanOperator, Left: semantic.UInt, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Float()
return NewBoolValue(float64(l) > r)
},
{Operator: ast.GreaterThanOperator, Left: semantic.Float, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Int()
return NewBoolValue(l > float64(r))
},
{Operator: ast.GreaterThanOperator, Left: semantic.Float, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.UInt()
return NewBoolValue(l > float64(r))
},
{Operator: ast.GreaterThanOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewBoolValue(l > r)
},
// EqualOperator
{Operator: ast.EqualOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewBoolValue(l == r)
},
{Operator: ast.EqualOperator, Left: semantic.Int, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.UInt()
if l < 0 {
return NewBoolValue(false)
}
return NewBoolValue(uint64(l) == r)
},
{Operator: ast.EqualOperator, Left: semantic.Int, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Float()
return NewBoolValue(float64(l) == r)
},
{Operator: ast.EqualOperator, Left: semantic.UInt, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Int()
if r < 0 {
return NewBoolValue(false)
}
return NewBoolValue(l == uint64(r))
},
{Operator: ast.EqualOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewBoolValue(l == r)
},
{Operator: ast.EqualOperator, Left: semantic.UInt, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Float()
return NewBoolValue(float64(l) == r)
},
{Operator: ast.EqualOperator, Left: semantic.Float, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Int()
return NewBoolValue(l == float64(r))
},
{Operator: ast.EqualOperator, Left: semantic.Float, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.UInt()
return NewBoolValue(l == float64(r))
},
{Operator: ast.EqualOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewBoolValue(l == r)
},
{Operator: ast.EqualOperator, Left: semantic.String, Right: semantic.String}: func(lv, rv Value) Value {
l := lv.Str()
r := rv.Str()
return NewBoolValue(l == r)
},
// NotEqualOperator
{Operator: ast.NotEqualOperator, Left: semantic.Int, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Int()
return NewBoolValue(l != r)
},
{Operator: ast.NotEqualOperator, Left: semantic.Int, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.UInt()
if l < 0 {
return NewBoolValue(true)
}
return NewBoolValue(uint64(l) != r)
},
{Operator: ast.NotEqualOperator, Left: semantic.Int, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Int()
r := rv.Float()
return NewBoolValue(float64(l) != r)
},
{Operator: ast.NotEqualOperator, Left: semantic.UInt, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Int()
if r < 0 {
return NewBoolValue(true)
}
return NewBoolValue(l != uint64(r))
},
{Operator: ast.NotEqualOperator, Left: semantic.UInt, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.UInt()
return NewBoolValue(l != r)
},
{Operator: ast.NotEqualOperator, Left: semantic.UInt, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.UInt()
r := rv.Float()
return NewBoolValue(float64(l) != r)
},
{Operator: ast.NotEqualOperator, Left: semantic.Float, Right: semantic.Int}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Int()
return NewBoolValue(l != float64(r))
},
{Operator: ast.NotEqualOperator, Left: semantic.Float, Right: semantic.UInt}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.UInt()
return NewBoolValue(l != float64(r))
},
{Operator: ast.NotEqualOperator, Left: semantic.Float, Right: semantic.Float}: func(lv, rv Value) Value {
l := lv.Float()
r := rv.Float()
return NewBoolValue(l != r)
},
{Operator: ast.NotEqualOperator, Left: semantic.String, Right: semantic.String}: func(lv, rv Value) Value {
l := lv.Str()
r := rv.Str()
return NewBoolValue(l != r)
},
{Operator: ast.RegexpMatchOperator, Left: semantic.String, Right: semantic.Regexp}: func(lv, rv Value) Value {
l := lv.Str()
r := rv.Regexp()
return NewBoolValue(r.MatchString(l))
},
{Operator: ast.RegexpMatchOperator, Left: semantic.Regexp, Right: semantic.String}: func(lv, rv Value) Value {
l := lv.Regexp()
r := rv.Str()
return NewBoolValue(l.MatchString(r))
},
{Operator: ast.NotRegexpMatchOperator, Left: semantic.String, Right: semantic.Regexp}: func(lv, rv Value) Value {
l := lv.Str()
r := rv.Regexp()
return NewBoolValue(!r.MatchString(l))
},
{Operator: ast.NotRegexpMatchOperator, Left: semantic.Regexp, Right: semantic.String}: func(lv, rv Value) Value {
l := lv.Regexp()
r := rv.Str()
return NewBoolValue(!l.MatchString(r))
},
{Operator: ast.AdditionOperator, Left: semantic.String, Right: semantic.String}: func(lv, rv Value) Value {
l := lv.Str()
r := rv.Str()
return NewStringValue(l + r)
},
} | query/values/binary.go | 0.688468 | 0.50769 | binary.go | starcoder |
package blinkt
import (
"fmt"
"log"
"os"
"os/signal"
"time"
"github.com/alexellis/rpi"
)
const DAT int = 23
const CLK int = 24
const redIndex int = 0
const greenIndex int = 1
const blueIndex int = 2
const brightnessIndex int = 3
// default raw brightness. Not to be used user-side
const defaultBrightnessInt int = 15
//upper and lower bounds for user specified brightness
const minBrightness float64 = 0.0
const maxBrightness float64 = 1.0
// pulse sends a pulse through the DAT/CLK pins
func pulse(pulses int) {
rpi.DigitalWrite(rpi.GpioToPin(DAT), 0)
for i := 0; i < pulses; i++ {
rpi.DigitalWrite(rpi.GpioToPin(CLK), 1)
rpi.DigitalWrite(rpi.GpioToPin(CLK), 0)
}
}
// eof end of file or signal, from Python library
func eof() {
pulse(36)
}
// sof start of file (name from Python library)
func sof() {
pulse(32)
}
func writeByte(val int) {
for i := 0; i < 8; i++ {
// 0b10000000 = 128
rpi.DigitalWrite(rpi.GpioToPin(DAT), val&128)
rpi.DigitalWrite(rpi.GpioToPin(CLK), 1)
val = val << 1
rpi.DigitalWrite(rpi.GpioToPin(CLK), 0)
}
}
func convertBrightnessToInt(brightness float64) int {
if !inRangeFloat(minBrightness, brightness, maxBrightness) {
log.Fatalf("Supplied brightness was %#v - value should be between: %#v and %#v", brightness, minBrightness, maxBrightness)
}
return int(brightness * 31.0)
}
func inRangeFloat(minVal float64, testVal float64, maxVal float64) bool {
return (testVal >= minVal) && (testVal <= maxVal)
}
// SetClearOnExit turns all pixels off on Control + C / os.Interrupt signal.
func (bl *Blinkt) SetClearOnExit(clearOnExit bool) {
if clearOnExit {
signalChan := make(chan os.Signal, 1)
signal.Notify(signalChan, os.Interrupt)
fmt.Println("Press Control + C to stop")
go func() {
for range signalChan {
bl.Clear()
bl.Show()
os.Exit(1)
}
}()
}
}
// Delay maps to time.Sleep, for ms milliseconds
func Delay(ms int) {
time.Sleep(time.Duration(ms) * time.Millisecond)
}
// Clear sets all the pixels to off, you still have to call Show.
func (bl *Blinkt) Clear() {
r := 0
g := 0
b := 0
bl.SetAll(r, g, b)
}
// Show updates the LEDs with the values from SetPixel/Clear.
func (bl *Blinkt) Show() {
sof()
for p, _ := range bl.pixels {
brightness := bl.pixels[p][brightnessIndex]
r := bl.pixels[p][redIndex]
g := bl.pixels[p][greenIndex]
b := bl.pixels[p][blueIndex]
// 0b11100000 (224)
bitwise := 224
writeByte(bitwise | brightness)
writeByte(b)
writeByte(g)
writeByte(r)
}
eof()
}
// SetAll sets all pixels to specified r, g, b colour. Show must be called to update the LEDs.
func (bl *Blinkt) SetAll(r int, g int, b int) *Blinkt {
for p, _ := range bl.pixels {
bl.SetPixel(p, r, g, b)
}
return bl
}
// SetPixel sets an individual pixel to specified r, g, b colour. Show must be called to update the LEDs.
func (bl *Blinkt) SetPixel(p int, r int, g int, b int) *Blinkt {
bl.pixels[p][redIndex] = r
bl.pixels[p][greenIndex] = g
bl.pixels[p][blueIndex] = b
return bl
}
// SetBrightness sets the brightness of all pixels. Brightness supplied should be between: 0.0 to 1.0
func (bl *Blinkt) SetBrightness(brightness float64) *Blinkt {
brightnessInt := convertBrightnessToInt(brightness)
for p, _ := range bl.pixels {
bl.pixels[p][brightnessIndex] = brightnessInt
}
return bl
}
// SetPixelBrightness sets the brightness of pixel p. Brightness supplied should be between: 0.0 to 1.0
func (bl *Blinkt) SetPixelBrightness(p int, brightness float64) *Blinkt {
brightnessInt := convertBrightnessToInt(brightness)
bl.pixels[p][brightnessIndex] = brightnessInt
return bl
}
func initPixels(brightness int) [8][4]int {
var pixels [8][4]int
for p, _ := range pixels {
pixels[p][redIndex] = 0
pixels[p][greenIndex] = 0
pixels[p][blueIndex] = 0
pixels[p][brightnessIndex] = brightness
}
return pixels
}
// Setup initializes GPIO via WiringPi base library.
func (bl *Blinkt) Setup() {
rpi.WiringPiSetup()
rpi.PinMode(rpi.GpioToPin(DAT), rpi.OUTPUT)
rpi.PinMode(rpi.GpioToPin(CLK), rpi.OUTPUT)
}
// NewBlinkt creates a Blinkt to interact with. You must call "Setup()" immediately afterwards.
func NewBlinkt(brightness ...float64) Blinkt {
//brightness is optional so set the default
brightnessInt := defaultBrightnessInt
//over-ride the default if the user has supplied a brightness value
if len(brightness) > 0 {
brightnessInt = convertBrightnessToInt(brightness[0])
}
return Blinkt{
pixels: initPixels(brightnessInt),
}
}
// Blinkt use the NewBlinkt function to initialize the pixels property.
type Blinkt struct {
pixels [8][4]int
}
func init() {
} | blinkt.go | 0.714827 | 0.470919 | blinkt.go | starcoder |
package models
import (
"reflect"
"strings"
"testing"
"github.com/stretchr/testify/assert"
)
// consistencyCheckable a type that can be tested for database consistency
type consistencyCheckable interface {
checkForConsistency(t *testing.T)
}
// CheckConsistencyForAll test that the entire database is consistent
func CheckConsistencyForAll(t *testing.T) {
CheckConsistencyFor(t,
&User{},
&Repository{},
&Issue{},
&PullRequest{},
&Milestone{},
&Label{},
&Team{},
&Action{})
}
// CheckConsistencyFor test that all matching database entries are consistent
func CheckConsistencyFor(t *testing.T, beansToCheck ...interface{}) {
for _, bean := range beansToCheck {
sliceType := reflect.SliceOf(reflect.TypeOf(bean))
sliceValue := reflect.MakeSlice(sliceType, 0, 10)
ptrToSliceValue := reflect.New(sliceType)
ptrToSliceValue.Elem().Set(sliceValue)
assert.NoError(t, x.Table(bean).Find(ptrToSliceValue.Interface()))
sliceValue = ptrToSliceValue.Elem()
for i := 0; i < sliceValue.Len(); i++ {
entity := sliceValue.Index(i).Interface()
checkable, ok := entity.(consistencyCheckable)
if !ok {
t.Errorf("Expected %+v (of type %T) to be checkable for consistency",
entity, entity)
} else {
checkable.checkForConsistency(t)
}
}
}
}
// getCount get the count of database entries matching bean
func getCount(t *testing.T, e Engine, bean interface{}) int64 {
count, err := e.Count(bean)
assert.NoError(t, err)
return count
}
// assertCount test the count of database entries matching bean
func assertCount(t *testing.T, bean interface{}, expected int) {
assert.EqualValues(t, expected, getCount(t, x, bean),
"Failed consistency test, the counted bean (of type %T) was %+v", bean, bean)
}
func (user *User) checkForConsistency(t *testing.T) {
assertCount(t, &Repository{OwnerID: user.ID}, user.NumRepos)
assertCount(t, &Star{UID: user.ID}, user.NumStars)
assertCount(t, &OrgUser{OrgID: user.ID}, user.NumMembers)
assertCount(t, &Team{OrgID: user.ID}, user.NumTeams)
assertCount(t, &Follow{UserID: user.ID}, user.NumFollowing)
assertCount(t, &Follow{FollowID: user.ID}, user.NumFollowers)
if user.Type != UserTypeOrganization {
assert.EqualValues(t, 0, user.NumMembers)
assert.EqualValues(t, 0, user.NumTeams)
}
}
func (repo *Repository) checkForConsistency(t *testing.T) {
assert.Equal(t, repo.LowerName, strings.ToLower(repo.Name), "repo: %+v", repo)
assertCount(t, &Star{RepoID: repo.ID}, repo.NumStars)
assertCount(t, &Milestone{RepoID: repo.ID}, repo.NumMilestones)
assertCount(t, &Repository{ForkID: repo.ID}, repo.NumForks)
if repo.IsFork {
AssertExistsAndLoadBean(t, &Repository{ID: repo.ForkID})
}
actual := getCount(t, x.Where("Mode<>?", RepoWatchModeDont), &Watch{RepoID: repo.ID})
assert.EqualValues(t, repo.NumWatches, actual,
"Unexpected number of watches for repo %+v", repo)
actual = getCount(t, x.Where("is_pull=?", false), &Issue{RepoID: repo.ID})
assert.EqualValues(t, repo.NumIssues, actual,
"Unexpected number of issues for repo %+v", repo)
actual = getCount(t, x.Where("is_pull=? AND is_closed=?", false, true), &Issue{RepoID: repo.ID})
assert.EqualValues(t, repo.NumClosedIssues, actual,
"Unexpected number of closed issues for repo %+v", repo)
actual = getCount(t, x.Where("is_pull=?", true), &Issue{RepoID: repo.ID})
assert.EqualValues(t, repo.NumPulls, actual,
"Unexpected number of pulls for repo %+v", repo)
actual = getCount(t, x.Where("is_pull=? AND is_closed=?", true, true), &Issue{RepoID: repo.ID})
assert.EqualValues(t, repo.NumClosedPulls, actual,
"Unexpected number of closed pulls for repo %+v", repo)
actual = getCount(t, x.Where("is_closed=?", true), &Milestone{RepoID: repo.ID})
assert.EqualValues(t, repo.NumClosedMilestones, actual,
"Unexpected number of closed milestones for repo %+v", repo)
}
func (issue *Issue) checkForConsistency(t *testing.T) {
actual := getCount(t, x.Where("type=?", CommentTypeComment), &Comment{IssueID: issue.ID})
assert.EqualValues(t, issue.NumComments, actual,
"Unexpected number of comments for issue %+v", issue)
if issue.IsPull {
pr := AssertExistsAndLoadBean(t, &PullRequest{IssueID: issue.ID}).(*PullRequest)
assert.EqualValues(t, pr.Index, issue.Index)
}
}
func (pr *PullRequest) checkForConsistency(t *testing.T) {
issue := AssertExistsAndLoadBean(t, &Issue{ID: pr.IssueID}).(*Issue)
assert.True(t, issue.IsPull)
assert.EqualValues(t, issue.Index, pr.Index)
}
func (milestone *Milestone) checkForConsistency(t *testing.T) {
assertCount(t, &Issue{MilestoneID: milestone.ID}, milestone.NumIssues)
actual := getCount(t, x.Where("is_closed=?", true), &Issue{MilestoneID: milestone.ID})
assert.EqualValues(t, milestone.NumClosedIssues, actual,
"Unexpected number of closed issues for milestone %+v", milestone)
}
func (label *Label) checkForConsistency(t *testing.T) {
issueLabels := make([]*IssueLabel, 0, 10)
assert.NoError(t, x.Find(&issueLabels, &IssueLabel{LabelID: label.ID}))
assert.EqualValues(t, label.NumIssues, len(issueLabels),
"Unexpected number of issue for label %+v", label)
issueIDs := make([]int64, len(issueLabels))
for i, issueLabel := range issueLabels {
issueIDs[i] = issueLabel.IssueID
}
expected := int64(0)
if len(issueIDs) > 0 {
expected = getCount(t, x.In("id", issueIDs).Where("is_closed=?", true), &Issue{})
}
assert.EqualValues(t, expected, label.NumClosedIssues,
"Unexpected number of closed issues for label %+v", label)
}
func (team *Team) checkForConsistency(t *testing.T) {
assertCount(t, &TeamUser{TeamID: team.ID}, team.NumMembers)
assertCount(t, &TeamRepo{TeamID: team.ID}, team.NumRepos)
}
func (action *Action) checkForConsistency(t *testing.T) {
repo := AssertExistsAndLoadBean(t, &Repository{ID: action.RepoID}).(*Repository)
assert.Equal(t, repo.IsPrivate, action.IsPrivate, "action: %+v", action)
} | models/consistency.go | 0.622 | 0.655115 | consistency.go | starcoder |
package curve
import (
"errors"
"fmt"
"math/big"
GF "github.com/armfazh/tozan-ecc/field"
)
// weCurve is a Weierstrass curve
type weCurve struct{ *params }
type W = *weCurve
func (e *weCurve) String() string { return "y^2=x^3+Ax+B\n" + e.params.String() }
func (e *weCurve) New() EllCurve {
if e.IsValid() {
return e
}
panic(errors.New("can't instantiate a Weierstrass curve"))
}
// NewPoint generates
func (e *weCurve) NewPoint(x, y GF.Elt) (P Point) {
if P = (&ptWe{e, &afPoint{x: x, y: y}}); e.IsOnCurve(P) {
return P
}
panic(fmt.Errorf("p=%v not on curve", P))
}
func (e *weCurve) IsValid() bool {
F := e.F
t0 := F.Sqr(e.A) // A^2
t0 = F.Mul(t0, e.A) // A^3
t0 = F.Add(t0, t0) // 2A^3
t0 = F.Add(t0, t0) // 4A^3
t1 := F.Sqr(e.B) // B^3
t1 = F.Mul(t1, F.Elt(27)) // 27B^2
t0 = F.Add(t0, t1) // 4A^3+27B^2
t0 = F.Add(t0, t0) // 2(4A^3+27B^2)
t0 = F.Add(t0, t0) // 4(4A^3+27B^2)
t0 = F.Add(t0, t0) // 8(4A^3+27B^2)
t0 = F.Add(t0, t0) // 16(4A^3+27B^2)
t0 = F.Neg(t0) // -16(4A^3+27B^2)
return !F.IsZero(t0) // -16(4A^3+27B^2) != 0
}
func (e *weCurve) IsEqual(ec EllCurve) bool {
e0 := ec.(*weCurve)
return e.F.IsEqual(e0.F) && e.F.AreEqual(e.A, e0.A) && e.F.AreEqual(e.B, e0.B)
}
func (e *weCurve) IsOnCurve(p Point) bool {
if _, isZero := p.(*infPoint); isZero {
return isZero
}
P := p.(*ptWe)
F := e.F
t0 := e.EvalRHS(P.x)
t1 := F.Sqr(P.y) // y^2
return F.AreEqual(t0, t1)
}
func (e *weCurve) EvalRHS(x GF.Elt) GF.Elt {
F := e.F
t0 := F.Sqr(x) // x^2
t0 = F.Add(t0, e.A) // x^2+A
t0 = F.Mul(t0, x) // (x^2+A)x
return F.Add(t0, e.B) // (x^2+A)x+B
}
func (e *weCurve) Identity() Point { return &infPoint{} }
func (e *weCurve) Add(p, q Point) Point {
if p.IsIdentity() {
return q.Copy()
} else if q.IsIdentity() {
return p.Copy()
} else if p.IsEqual(e.Neg(q)) {
return e.Identity()
} else if p.IsEqual(q) {
return e.Double(p)
} else {
return e.add(p, q)
}
}
func (e *weCurve) Neg(p Point) Point {
if _, isZero := p.(*infPoint); isZero {
return e.Identity()
}
P := p.(*ptWe)
return &ptWe{e, &afPoint{x: P.x.Copy(), y: e.F.Neg(P.y)}}
}
func (e *weCurve) add(p, q Point) Point {
P := p.(*ptWe)
Q := q.(*ptWe)
F := e.F
if F.AreEqual(P.x, Q.x) {
panic("wrong inputs")
}
var t0, t1, ll GF.Elt
t0 = F.Sub(Q.y, P.y) // (y2-y1)
t1 = F.Sub(Q.x, P.x) // (x2-x1)
t1 = F.Inv(t1) // 1/(x2-x1)
ll = F.Mul(t0, t1) // l = (y2-y1)/(x2-x1)
t0 = F.Sqr(ll) // l^2
t0 = F.Sub(t0, P.x) // l^2-x1
x := F.Sub(t0, Q.x) // x' = l^2-x1-x2
t0 = F.Sub(P.x, x) // x1-x3
t0 = F.Mul(t0, ll) // l(x1-x3)
y := F.Sub(t0, P.y) // y3 = l(x1-x3)-y1
return &ptWe{e, &afPoint{x: x, y: y}}
}
func (e *weCurve) Double(p Point) Point {
if _, ok := p.(*infPoint); ok {
return e.Identity()
}
P := p.(*ptWe)
if P.IsTwoTorsion() {
return e.Identity()
}
F := e.F
var t0, t1, ll GF.Elt
t0 = F.Sqr(P.x) // x^2
t0 = F.Mul(t0, F.Elt(3)) // 3x^2
t0 = F.Add(t0, e.A) // 3x^2+A
t1 = F.Add(P.y, P.y) // 2y
t1 = F.Inv(t1) // 1/2y
ll = F.Mul(t0, t1) // l = (3x^2+2A)/(2y)
t0 = F.Sqr(ll) // l^2
t0 = F.Sub(t0, P.x) // l^2-x
x := F.Sub(t0, P.x) // x' = l^2-2x
t0 = F.Sub(P.x, x) // x-x'
t0 = F.Mul(t0, ll) // l(x-x')
y := F.Sub(t0, P.y) // y3 = l(x-x')-y1
return &ptWe{e, &afPoint{x: x, y: y}}
}
func (e *weCurve) ClearCofactor(p Point) Point { return e.ScalarMult(p, e.H) }
func (e *weCurve) ScalarMult(p Point, k *big.Int) Point { return e.params.scalarMult(e, p, k) }
// ptWe is an affine point on a weCurve curve.
type ptWe struct {
*weCurve
*afPoint
}
func (p *ptWe) String() string { return p.afPoint.String() }
func (p *ptWe) Copy() Point { return &ptWe{p.weCurve, p.copy()} }
func (p *ptWe) IsEqual(q Point) bool {
qq := q.(*ptWe)
return p.weCurve.IsEqual(qq.weCurve) && p.isEqual(p.F, qq.afPoint)
}
func (p *ptWe) IsIdentity() bool { return false }
func (p *ptWe) IsTwoTorsion() bool { return p.F.IsZero(p.y) } | curve/weierstrass.go | 0.780704 | 0.448306 | weierstrass.go | starcoder |
package phomath
import "math"
// Vector2Like is something that has an XY method that returns x and y coordinate values
type Vector2Like interface {
XY() (float64, float64)
}
// static check that Vector2 is Vector2Like
var _ Vector2Like = &Vector2{}
// NewVector2 creates a new Vector2
func NewVector2(x, y float64) *Vector2 {
return &Vector2{
X: x,
Y: y,
}
}
// Vector2 is a representation of a vector in 2D space.
type Vector2 struct {
X, Y float64
}
// XY returns the x and y values
func (v *Vector2) XY() (x, y float64) {
return v.X, v.Y
}
// Clone makes a clone of this Vector2.
func (v *Vector2) Clone() *Vector2 {
return NewVector2(v.X, v.Y)
}
// Copy makes a clone of this Vector2.
func (v *Vector2) Copy(source *Vector2) *Vector2 {
return v.Set(source.X, source.Y)
}
// SetFromVectorLike sets the x, y values of this Vector from a given Vector2Like object.
func (v *Vector2) SetFromVectorLike(l Vector2Like) *Vector2 {
return v.Set(l.XY())
}
// Set the `x` and `y` components of the this Vector to the given `x` and `y` values.
func (v *Vector2) Set(x, y float64) *Vector2 {
v.X, v.Y = x, y
return v
}
// SetTo is an alias for `Vector2.Set`.
func (v *Vector2) SetTo(x, y float64) *Vector2 {
return v.Set(x, y)
}
// SetToPolar sets the `x` and `y` values of this object from a given polar coordinate.
func (v *Vector2) SetToPolar(azimuth, radius float64) *Vector2 {
return v.Set(math.Cos(azimuth)*radius, math.Sin(azimuth)*radius)
}
// Equals checks whether this Vector is equal to a given Vector.
func (v *Vector2) Equals(other *Vector2) bool {
return math.Abs(v.X-other.X) < Epsilon && math.Abs(v.Y-other.Y) < Epsilon
}
// Angle calculates the angle between this Vector and the positive x-v.Xis, in radians.
func (v *Vector2) Angle() float64 {
angle := math.Atan2(v.XY())
if angle < 0 {
angle = PI2
}
return angle
}
// SetAngle sets the angle of this Vector.
func (v *Vector2) SetAngle(angle float64) *Vector2 {
return v.SetToPolar(angle, v.Length())
}
// Add the given Vector to this Vector.
// Addition is component-wise and mutates the vector.
func (v *Vector2) Add(source *Vector2) *Vector2 {
return v.Set(v.X+source.X, v.Y+source.Y)
}
// Subtract the given Vector from this Vector.
// Subtraction is component-wise and mutates the vector.
func (v *Vector2) Subtract(source *Vector2) *Vector2 {
return v.Set(v.X-source.X, v.Y-source.Y)
}
// Multiply this vector with the given Vector.
// Multiplication is component-wise and mutates the vector.
func (v *Vector2) Multiply(source *Vector2) *Vector2 {
return v.Set(v.X*source.X, v.Y*source.Y)
}
// Scale this Vector with a scalar.
// Multiplication is component-wise and mutates the vector.
func (v *Vector2) Scale(scalar float64) *Vector2 {
return v.Set(v.X*scalar, v.Y*scalar)
}
// Divide this vector by the given Vector.
// Division is component-wise and mutates the vector.
func (v *Vector2) Divide(source *Vector2) *Vector2 {
return v.Set(v.X/source.X, v.Y/source.Y)
}
// Negate the x/y values of this vector.
func (v *Vector2) Negate() *Vector2 {
return v.Set(-v.X, -v.Y)
}
// Distance calculate the distance between this Vector and the given Vector.
func (v *Vector2) Distance(source *Vector2) float64 {
return math.Sqrt(v.DistanceSquared(source))
}
// DistanceSquared calculate the distance squared between this Vector and the given Vector.
func (v *Vector2) DistanceSquared(source *Vector2) float64 {
dx, dy := source.X-v.X, source.Y-v.Y
return dx*dx + dy*dy
}
// Length calculates the length (or magnitude) of this Vector.
func (v *Vector2) Length() float64 {
return math.Sqrt(v.LengthSquared())
}
// LengthSquared calculates the length squared of this Vector.
func (v *Vector2) LengthSquared() float64 {
return v.X*v.X + v.Y*v.Y
}
// Length calculates the length (or magnitude) of this Vector.
func (v *Vector2) SetLength(l float64) *Vector2 {
return v.Normalize().Scale(l)
}
// Normalize this Vector to length of 1
func (v *Vector2) Normalize() *Vector2 {
l := v.Length()
if l > 0 {
l = 1 / math.Sqrt(l)
v.Scale(l)
}
return v
}
// NormalizeRightHand rotates this Vector to its perpendicular, in the positive direction.
func (v *Vector2) NormalizeRightHand() *Vector2 {
return v.Set(v.Y*-1, v.X)
}
// NormalizeLeftHand rotates this Vector to its perpendicular, in the negative direction.
func (v *Vector2) NormalizeLeftHand() *Vector2 {
return v.Set(v.Y, v.X*-1)
}
// Dot calculate the dot product of this Vector and the given Vector.
func (v *Vector2) Dot(other *Vector2) float64 {
return v.X*other.X + v.Y + other.Y
}
// Cross calculate the dot product of this Vector and the given Vector.
func (v *Vector2) Cross(other *Vector2) float64 {
return v.X*other.X - v.Y + other.Y
}
// Lerp linearly interpolates between this Vector and the given Vector.
func (v *Vector2) Lerp(other *Vector2, t float64) *Vector2 {
return v.Set(v.X+t*(other.X-v.X), v.Y+t*(other.Y-v.Y))
}
// TransformMat3 transforms this Vector with the given Matrix3.
func (v *Vector2) TransformMat3(m3 *Matrix3) *Vector2 {
m := m3.Values
return v.Set(m[0]*v.X+m[3]*v.Y+m[6], m[1]*v.X+m[4]*v.Y+m[7])
}
// TransformMat4 transforms this Vector with the given Matrix4.
func (v *Vector2) TransformMat4(m4 *Matrix4) *Vector2 {
m := m4.Values
return v.Set(m[0]*v.X+m[4]*v.Y+m[12], m[1]*v.X+m[5]*v.Y+m[13])
}
// Reset makes this Vector the zero vector (0, 0).
func (v *Vector2) Reset() *Vector2 {
return v.Set(0, 0)
}
// Limit the length (or magnitude) of this Vector.
func (v *Vector2) Limit(l float64) *Vector2 {
if v.Length() > l {
v.SetLength(l)
}
return v
}
// Reflect this Vector off a line defined by a normal.
func (v *Vector2) Reflect(other *Vector2) *Vector2 {
normal := other.Clone().Normalize()
return v.Subtract(normal.Scale(2 * v.Dot(normal)))
}
// Mirror reflects this Vector across another.
func (v *Vector2) Mirror(axis *Vector2) *Vector2 {
return v.Reflect(axis).Negate()
}
// Rotate this Vector by an angle amount.
func (v *Vector2) Rotate(radians float64) *Vector2 {
c, s := math.Cos(radians), math.Sin(radians)
return v.Set(c*v.X-s*v.Y, s*v.X+c*v.Y)
} | phomath/vector2.go | 0.961335 | 0.844985 | vector2.go | starcoder |
package testza
import (
"fmt"
"math"
"math/rand"
"testing"
"github.com/MarvinJWendt/testza/internal"
)
// MockInputsFloats64Helper contains integer test sets.
// Use testza.Use.Mock.Inputs.Floats64.
type MockInputsFloats64Helper struct{}
func (h MockInputsFloats64Helper) Full() (floats []float64) {
for i := 0; i < 50; i++ {
floats = append(floats,
h.GenerateRandomPositive(1, float64(i*1000))[0],
h.GenerateRandomNegative(1, float64(i*1000*-1))[0],
)
}
return
}
// GenerateRandomRange generates random positive integers with a maximum of max.
// If the maximum is 0, or below, the maximum will be set to math.MaxInt64.
func (h MockInputsFloats64Helper) GenerateRandomRange(count int, min, max float64) (floats []float64) {
for i := 0; i < count; i++ {
floats = append(floats, min+rand.Float64()*(max-min))
}
return
}
// GenerateRandomPositive generates random positive integers with a maximum of max.
// If the maximum is 0, or below, the maximum will be set to math.MaxInt64.
func (h MockInputsFloats64Helper) GenerateRandomPositive(count int, max float64) (floats []float64) {
if max <= 0 {
max = math.MaxFloat64
}
floats = append(floats, h.GenerateRandomRange(count, 0, max)...)
return
}
// GenerateRandomNegative generates random negative integers with a minimum of min.
// If the minimum is positive, it will be converted to a negative number.
// If it is set to 0, there is no limit.
func (h MockInputsFloats64Helper) GenerateRandomNegative(count int, min float64) (floats []float64) {
if min > 0 {
min *= -1
} else if min == 0 {
min = math.MaxFloat64 * -1
}
floats = append(floats, h.GenerateRandomRange(count, min, 0)...)
return
}
// RunTests runs a test for every value in a testset.
// You can use the value as input parameter for your functions, to sanity test against many different cases.
// This ensures that your functions have a correct error handling and enables you to test against hunderts of cases easily.
func (s MockInputsFloats64Helper) RunTests(t testRunner, testSet []float64, testFunc func(t *testing.T, index int, f float64)) {
if test, ok := t.(helper); ok {
test.Helper()
}
test := internal.GetTest(t)
if test == nil {
t.Error(internal.ErrCanNotRunIfNotBuiltinTesting)
return
}
for i, v := range testSet {
test.Run(fmt.Sprint(v), func(t *testing.T) {
t.Helper()
testFunc(t, i, v)
})
}
}
// Modify returns a modified version of a test set.
func (h MockInputsFloats64Helper) Modify(inputSlice []float64, f func(index int, value float64) float64) (floats []float64) {
for i, input := range inputSlice {
floats = append(floats, f(i, input))
}
return
} | mock-floats64.go | 0.801159 | 0.489015 | mock-floats64.go | starcoder |
package main
import (
"math"
"math/rand"
"image"
"image/color"
"image/png"
"io"
"os"
)
const tolerance = 0.000000001
var world []sphere
func sq(x float64) float64 {
return x*x
}
func minroot(a, b, c float64) float64 {
if math.Abs(a) < tolerance {
return b / -c
}
discrt := math.Sqrt(sq(b) - 4*a*c)
return math.Min((-b + discrt)/(2*a), (-b - discrt)/(2*a))
}
func mag(v vector) float64 {
return math.Sqrt(magsq(v))
}
func magsq(v vector) float64 {
return sq(v.x) + sq(v.y) + sq(v.z)
}
type vector struct {
x,y,z float64
}
var eye vector = vector{0,0,800}
type sphere struct {
color color.Color
radius float64
center vector
}
func lambert(s sphere, intersection vector, v vector) float64 {
return math.Max(0.0, math.Abs(dot(normal(s, intersection), v)))
}
func dot(a, b vector) float64 {
return a.x*b.x + a.y*b.y + a.z*b.z
}
func normal(s sphere, intersection vector) vector {
return unit(diff(s.center, intersection))
}
func scale(v vector, s float64) vector {
return vector{v.x*s, v.y*s, v.z*s}
}
func defSphere(x, y, z, r float64, c color.Color) sphere {
s := sphere{color: c, radius: r, center: vector{x: x, y: y, z: z}}
world = append(world, s)
return s
}
func scaleColor(c color.Color, scale float64) color.Color {
r, g, b, a := c.RGBA()
return color.RGBA64{uint16(float64(r) * scale), uint16(float64(g) * scale), uint16(float64(b) * scale), uint16(a)}
}
func unit(a vector) vector {
d := mag(a)
return vector{x: a.x/d, y: a.y/d, z: a.z/d}
}
func diff(from, to vector) vector {
return vector{x: from.x - to.x, y: from.y - to.y, z: from.z - to.z}
}
func sendRay(from, dir vector) color.Color {
if s, hit, ok := firstHit(from, dir); ok {
return scaleColor(s.color, lambert(s, hit, dir))
}
return color.RGBA{0,0,0,0}
}
func firstHit(from, dir vector) (sphere, vector, bool) {
var hit vector
var dist float64 = -1.0
var sp sphere
found := false
for _, s := range world {
if h, ok := intersect(s, from, dir); ok {
d := mag(diff(h, from))
if dist < 0.0 || d < dist {
sp, dist, hit, found = s, d, h, true
}
}
}
return sp, hit, found
}
func intersect(s sphere, pos, dir vector) (vector, bool) {
n := minroot(magsq(dir),
2*dot(diff(pos, s.center), dir),
magsq(diff(pos, s.center))-sq(s.radius))
if math.IsNaN(n) {
return vector{}, false
}
return diff(pos, scale(dir, -n)), true
}
func colorAt(x, y int) color.Color {
return sendRay(eye, unit(diff(vector{float64(x),float64(y),0}, eye)))
}
func tracer(out io.Writer) {
const (
width = 800
height = 600
res = 1
)
rect := image.Rect(0, 0, width, height)
img := image.NewRGBA(rect)
for i := 0; i < width; i++ {
for j := 0; j < height; j++ {
img.Set(i, j, colorAt(i-int(width/2/float64(res)),j-int(height/2/float64(res))))
}
}
png.Encode(out, img)
}
func main() {
defSphere(0, -300, -1200, 200, color.RGBA{0, 255, 0, 255})
defSphere(-80,-150, -1200, 200, color.RGBA{0,0,255,255})
defSphere(70,-100, -1200, 200, color.RGBA64{65535,0,0,65535})
for i := -10; i < 11; i++ {
for k := 2; k < 20; k++ {
defSphere(float64(200*i), 700.0, float64(-400*k), 40 , color.RGBA{uint8(rand.Int31n(256)), uint8(rand.Int31n(256)), uint8(rand.Int31n(256)), 255})
}
}
tracer(os.Stdout)
} | raytrace.go | 0.819316 | 0.575528 | raytrace.go | starcoder |
package gs
import (
"fmt"
"github.com/dairaga/gs/funcs"
)
// Try is simplified Scala Try. Try like Either is either Success or Failure, and Failure contains error value.
type Try[T any] interface {
fmt.Stringer
// Fetch returns successful value and nil error if this is a Success, or v is zero value and err is from Failure.
Fetch() (v T, err error)
// Get returns successful value if Try is a Success, or panic.
Get() T
// IsSuccess returns true if this is a Success.
IsSuccess() bool
// Success returns successful value if this is a Sucess, or panic.
Success() T
// IsFailure returns true if this is a Failure.
IsFailure() bool
// Failed returns error from Failure, or returns ErrUnsupported.
Failed() error
// Exists returns true if this is a Success and satisfies given function p.
Exists(p funcs.Predict[T]) bool
// Forall returns true if this is a Failure, or value from Success satisfies given function p.
Forall(p funcs.Predict[T]) bool
// Foreach only applies given function op to value from Success.
Foreach(op func(T))
// Filter returns this if this is a Failure or value from Success satisfies given function p, otherwise returns Failure with ErrUnsatisfied.
Filter(p funcs.Predict[T]) Try[T]
// FilterNot returns this if this is a Failure or value from Succes does not satisfy given function p, otherwise returns Failure with ErrUnsatisfied.
FilterNot(funcs.Predict[T]) Try[T]
// GetOrElse returns value from Success, or returns given z.
GetOrElse(z T) T
// OrElse returns this if this is a Success, or return given z.
OrElse(z Try[T]) Try[T]
// Recover applies given function r if this is a Failure, or returns this if this is a Success.
Recover(r funcs.Func[error, T]) Try[T]
// RecoverWith applies given function r if this is a Failure, or returns this if this is a Success.
RecoverWith(r funcs.Func[error, Try[T]]) Try[T]
// Either returns a Either with error type in Left side.
Either() Either[error, T]
// Option returns Some if this is a Success, or returns None.
Option() Option[T]
}
type try[T any] struct {
*either[error, T]
}
var _ Try[int] = &try[int]{}
func (t *try[T]) String() string {
if t.ok {
return fmt.Sprintf(`Success(%v)`, t.right)
}
return fmt.Sprintf(`Failure(%s)`, t.left.Error())
}
func (t *try[T]) Fetch() (T, error) {
return t.right, t.left
}
func (t *try[T]) IsSuccess() bool {
return t.ok
}
func (t *try[T]) Success() T {
return t.Get()
}
func (t *try[T]) IsFailure() bool {
return !t.ok
}
func (t *try[T]) Failed() error {
if t.IsFailure() {
return t.left
}
return ErrUnsupported
}
func (t *try[T]) Filter(p funcs.Predict[T]) Try[T] {
if t.Forall(p) {
return t
}
return failure[T](ErrUnsatisfied)
}
func (t *try[T]) FilterNot(p funcs.Predict[T]) Try[T] {
return t.Filter(func(v T) bool { return !p(v) })
}
func (t *try[T]) OrElse(z Try[T]) Try[T] {
return funcs.Cond(t.ok, Try[T](t), z)
}
func (t *try[T]) Recover(r funcs.Func[error, T]) Try[T] {
if t.ok {
return t
}
return success(r(t.left))
}
func (t *try[T]) RecoverWith(r funcs.Func[error, Try[T]]) Try[T] {
if t.ok {
return t
}
return r(t.left)
}
func (t *try[T]) Either() Either[error, T] {
return t.either
}
func (t *try[T]) Option() Option[T] {
if t.ok {
return some(t.right)
}
return none[T]()
}
func success[T any](v T) *try[T] {
return &try[T]{
either: right[error](v),
}
}
func failure[T any](err error) *try[T] {
return &try[T]{
either: left[error, T](err),
}
}
func Success[T any](v T) Try[T] {
return success(v)
}
func Failure[T any](err error) Try[T] {
return failure[T](err)
} | try.go | 0.700588 | 0.419232 | try.go | starcoder |
package calendar
import (
"reflect"
"time"
"github.com/jinzhu/now"
)
// Week have time.Time data to represent week.
type Week []time.Time
// Next returns time.Time collection to represent next week.
func (week Week) Next() (nextWeek Week) {
for _, t := range week {
nextWeek = append(nextWeek, t.AddDate(0, 0, 7))
}
return
}
// Previous returns time.Time collection to represent previous week.
func (week Week) Previous() (previousWeek Week) {
for _, t := range week {
previousWeek = append(previousWeek, t.AddDate(0, 0, -7))
}
return
}
// Month have Week data to represent month.
type Month []Week
// Year have Month data to represent year.
type Year [12]Month
// Calendar have now.Now data.
type Calendar struct {
Now *now.Now
}
// Next sets new *now.Now for next month.
func (calendar *Calendar) Next() {
newDate := calendar.Now.BeginningOfMonth().AddDate(0, 1, 0)
calendar.Now = now.New(newDate)
}
// Previous sets new *now.Now for previous month.
func (calendar *Calendar) Previous() {
newDate := calendar.Now.BeginningOfMonth().AddDate(0, -1, 0)
calendar.Now = now.New(newDate)
}
// NextCalendar returns next Calendar.
func (calendar *Calendar) NextCalendar() (nextCalendar *Calendar) {
newDate := calendar.Now.BeginningOfMonth().AddDate(0, 1, 0)
nextCalendar = &Calendar{
Now: now.New(newDate),
}
return
}
// PreviousCalendar returns previous Calendar.
func (calendar *Calendar) PreviousCalendar() (previousCalendar *Calendar) {
newDate := calendar.Now.BeginningOfMonth().AddDate(0, -1, 0)
previousCalendar = &Calendar{
Now: now.New(newDate),
}
return
}
// Week returns Week regarding current date.
func (calendar *Calendar) Week() (week Week) {
beginningOfWeek := calendar.Now.BeginningOfWeek()
for i := 0; i < 7; i++ {
week = append(week, beginningOfWeek)
beginningOfWeek = beginningOfWeek.AddDate(0, 0, 1)
}
return
}
// NextWeek returns next Week regarding current date.
// It doesn't have side effect.
func (calendar *Calendar) NextWeek() (week Week) {
newDate := calendar.Now.AddDate(0, 0, 7)
calendar.Now = now.New(newDate)
defer func() {
calendar.Now = now.New(newDate.AddDate(0, 0, -7))
}()
week = calendar.Week()
return
}
// PreviousWeek returns previous Week regarding current date.
// It doesn't have side effect.
func (calendar *Calendar) PreviousWeek() (week Week) {
newDate := calendar.Now.AddDate(0, 0, -7)
calendar.Now = now.New(newDate)
defer func() {
calendar.Now = now.New(newDate.AddDate(0, 0, 7))
}()
week = calendar.Week()
return
}
// Month returns Month regarding current date.
func (calendar *Calendar) Month() (month Month) {
beginningOfMonth := calendar.Now.BeginningOfMonth()
endOfMonth := calendar.Now.EndOfMonth()
week := New(beginningOfMonth).Week()
lastWeek := New(endOfMonth).Week()
for !reflect.DeepEqual(lastWeek, week) {
month = append(month, week)
week = week.Next()
}
month = append(month, lastWeek)
return
}
// NextMonth returns next Month regarding current date.
func (calendar *Calendar) NextMonth() (month Month) {
month = calendar.NextCalendar().Month()
return
}
// PreviousMonth returns previous Month regarding current date.
func (calendar *Calendar) PreviousMonth() (month Month) {
month = calendar.PreviousCalendar().Month()
return
}
// Year returns Year regarding current date.
func (calendar *Calendar) Year() (year Year) {
var days [12]*Calendar
day := calendar.Now.BeginningOfYear()
for i := 0; i < 12; i++ {
days[i] = &Calendar{
Now: now.New(day),
}
day = day.AddDate(0, 1, 0)
}
for i, cal := range days {
year[i] = cal.Month()
}
return
} | calendar.go | 0.818229 | 0.459379 | calendar.go | starcoder |
package spider
import (
"math"
)
type LegPosition uint8
type Joint uint8
// Leg positions.
const (
FrontRight LegPosition = iota
FrontLeft
BackRight
BackLeft
)
// Servo connection order, within a leg.
const (
BodyCoxa Joint = iota
CoxaFemur
FemurTibia
)
const (
CoxaLength = 23.5
FemurLength = 38.0
TibiaLength = 81.0
)
// Represents a 3D point in space.
// X is towards the right of the robot.
// Y is towards the front of the robot.
// Z is towards the top of the robot.
// Distances are expressed in 1/32 of a millimeter.
type Point3D struct {
X, Y, Z float64
}
type Leg struct {
hipPt Point3D
toePt Point3D
}
func (l *Leg) init(pos LegPosition) {
// The canonical zero position of the toe is with the coxa at "45 degrees", the femur horizontal, and the tibia vertical.
// Therefore the hip joint is displaced by (coxa+femur)/sqrt(2), using Pythagoras' theorem.
hipOffset := (CoxaLength + FemurLength) / math.Sqrt(2)
var hipX, hipY float64
switch pos {
case FrontRight:
hipX = -hipOffset
hipY = -hipOffset
case FrontLeft:
hipX = hipOffset
hipY = -hipOffset
case BackRight:
hipX = -hipOffset
hipY = hipOffset
case BackLeft:
hipX = hipOffset
hipY = hipOffset
}
l.hipPt = Point3D{
X: hipX,
Y: hipY,
Z: TibiaLength,
}
}
func (l *Leg) SetToePoint(pt Point3D) {
l.toePt = pt
}
func (l *Leg) JointAngles() (float64, float64, float64) {
// Hip angle is measured counter-clockwise from a line projecting out from the side of the spider, so FrontLeft/BackRight angles are negative.
bodyCoxaAngle := math.Atan2(l.toePt.Y-l.hipPt.Y, l.toePt.X-l.hipPt.X)
// Total horizontal distance from hip to toe.
horizReach := math.Sqrt((l.toePt.X-l.hipPt.X)*(l.toePt.X-l.hipPt.X) + (l.toePt.Y-l.hipPt.Y)*(l.toePt.Y-l.hipPt.Y))
// Femur+tibia horizontal reach.
ftHorizReach := horizReach - CoxaLength
// Femur+tibia reach in 3D space.
// This gives us a triangle with sides (FemurLength, TibiaLength, ftReach).
ftReach := math.Sqrt(ftHorizReach*ftHorizReach + (l.toePt.Z-l.hipPt.Z)*(l.toePt.Z-l.hipPt.Z))
// Solve for angles, using the law of cosines.
// c^2 = a^2 + b^2 - 2*a*b*cos(C)
// 2*a*b*cos(C) = a^2 + b^2 - c^2
// cos(C) = (a^2 + b^2 - c^2) / (2*a*b)
// Or in coding terms:
// cosNum = a*a + b*b - c*c
// cosDenom = 2*a*b
// angleC = math.Acos(cosNum / cosDenom)
var cosNum, cosDenom float64
// Coxa-Femur angle is measured counter-clockwise from horizontal, so up is positive and down is negative.
// First, find the angle between the femur and the imaginary line from the coxa-femur joint down to the toe.
cosNum = ftReach*ftReach + FemurLength*FemurLength - TibiaLength*TibiaLength
cosDenom = 2.0 * ftReach * FemurLength
femurReachAngle := math.Acos(cosNum / cosDenom)
// Second, find the angle between horizontal and the imaginary line from the coxa-femur joint down to the toe.
reachAngle := math.Atan2(l.toePt.Z-l.hipPt.Z, ftHorizReach)
coxaFemurAngle := femurReachAngle + reachAngle
// Femur-Tibia angle is measured counter-clockwise from the femur, so it will always be positive, and bigger numbers represent a further reach.
cosNum = FemurLength*FemurLength + TibiaLength*TibiaLength - ftReach*ftReach
cosDenom = 2.0 * FemurLength * TibiaLength
femurTibiaAngle := math.Acos(cosNum / cosDenom)
return bodyCoxaAngle, coxaFemurAngle, femurTibiaAngle
} | tinygo/pkg/spider/leg.go | 0.667364 | 0.475666 | leg.go | starcoder |
package pm
import (
"math/big"
ethcommon "github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/crypto"
)
// Constants for byte sizes of Solidity types
const (
addressSize = 20
uint256Size = 32
bytes32Size = 32
)
// SignedTicket is a wrapper around a Ticket with the sender's signature over the ticket and
// the recipient recipientRand
type SignedTicket struct {
// Ticket contains ticket fields that are directly
// accessible on SignedTicket since it is embedded
*Ticket
// Sig is the sender's signature over the ticket
Sig []byte
// RecipientRand is the recipient's random value that should be
// the preimage for the ticket's recipientRandHash
RecipientRand *big.Int
}
// TicketParams represents the parameters defined by a receiver that a sender must adhere to when
// sending tickets to receiver.
type TicketParams struct {
Recipient ethcommon.Address
FaceValue *big.Int
WinProb *big.Int
RecipientRandHash ethcommon.Hash
Seed *big.Int
ExpirationBlock *big.Int
PricePerPixel *big.Rat
ExpirationParams *TicketExpirationParams
}
// WinProbRat returns the ticket WinProb as a percentage represented as a big.Rat
func (p *TicketParams) WinProbRat() *big.Rat {
return winProbRat(p.WinProb)
}
// TicketExpirationParams indicates when/how a ticket expires
type TicketExpirationParams struct {
CreationRound int64
CreationRoundBlockHash ethcommon.Hash
}
// TicketSenderParams identifies a unique ticket based on a sender's nonce and signature over a ticket hash
type TicketSenderParams struct {
SenderNonce uint32
Sig []byte
}
// TicketBatch is a group of tickets that share the same TicketParams, TicketExpirationParams and Sender
// Each ticket in a batch is identified by a unique TicketSenderParams
type TicketBatch struct {
*TicketParams
*TicketExpirationParams
Sender ethcommon.Address
SenderParams []*TicketSenderParams
}
// Tickets returns the tickets in the batch
func (b *TicketBatch) Tickets() []*Ticket {
var tickets []*Ticket
for i := 0; i < len(b.SenderParams); i++ {
ticket := &Ticket{
Recipient: b.Recipient,
Sender: b.Sender,
FaceValue: b.FaceValue,
WinProb: b.WinProb,
SenderNonce: b.SenderParams[i].SenderNonce,
RecipientRandHash: b.RecipientRandHash,
CreationRound: b.CreationRound,
CreationRoundBlockHash: b.CreationRoundBlockHash,
}
tickets = append(tickets, ticket)
}
return tickets
}
// Ticket is lottery ticket payment in a probabilistic micropayment protocol
// The expected value of the ticket constitutes the payment and can be
// calculated using the ticket's face value and winning probability
type Ticket struct {
// Recipient is the ETH address of recipient
Recipient ethcommon.Address
// Sender is the ETH address of sender
Sender ethcommon.Address
// FaceValue represents the pay out to
// the recipient if the ticket wins
FaceValue *big.Int
// WinProb represents how likely a ticket will win
WinProb *big.Int
// SenderNonce is the monotonically increasing counter that makes
// each ticket unique given a particular recipientRand value
SenderNonce uint32
// RecipientRandHash is the 32 byte keccak-256 hash commitment to a random number
// provided by the recipient. In order for the recipient to redeem
// a winning ticket, it must reveal the preimage to this hash
RecipientRandHash ethcommon.Hash
// CreationRound is the round during which the ticket is created
CreationRound int64
// CreationRoundBlockHash is the block hash associated with CreationRound
CreationRoundBlockHash ethcommon.Hash
// ParamsExpirationBlock is the block number at which the ticket parameters used
// to create the ticket will no longer be valid
ParamsExpirationBlock *big.Int
PricePerPixel *big.Rat
}
// NewTicket creates a Ticket instance
func NewTicket(params *TicketParams, expirationParams *TicketExpirationParams, sender ethcommon.Address, senderNonce uint32) *Ticket {
return &Ticket{
Recipient: params.Recipient,
Sender: sender,
FaceValue: params.FaceValue,
WinProb: params.WinProb,
SenderNonce: senderNonce,
RecipientRandHash: params.RecipientRandHash,
CreationRound: expirationParams.CreationRound,
CreationRoundBlockHash: expirationParams.CreationRoundBlockHash,
ParamsExpirationBlock: params.ExpirationBlock,
PricePerPixel: params.PricePerPixel,
}
}
// EV returns the expected value of a ticket
func (t *Ticket) EV() *big.Rat {
return ticketEV(t.FaceValue, t.WinProb)
}
// WinProbRat returns the ticket WinProb as a percentage represented as a big.Rat
func (t *Ticket) WinProbRat() *big.Rat {
return winProbRat(t.WinProb)
}
// Hash returns the keccak-256 hash of the ticket's fields as tightly packed
// arguments as described in the Solidity documentation
// See: https://solidity.readthedocs.io/en/v0.4.25/units-and-global-variables.html#mathematical-and-cryptographic-functions
func (t *Ticket) Hash() ethcommon.Hash {
return crypto.Keccak256Hash(t.flatten())
}
// AuxData returns the ticket's CreationRound and CreationRoundBlockHash encoded into a byte array:
// [0:31] = CreationRound (left padded with zero bytes)
// [32..63] = CreationRoundBlockHash
// See: https://github.com/livepeer/protocol/blob/pm/contracts/pm/mixins/MixinTicketProcessor.sol#L94
func (t *Ticket) AuxData() []byte {
return (&TicketExpirationParams{
CreationRound: t.CreationRound,
CreationRoundBlockHash: t.CreationRoundBlockHash,
}).AuxData()
}
func (t *Ticket) expirationParams() *TicketExpirationParams {
return &TicketExpirationParams{
t.CreationRound,
t.CreationRoundBlockHash,
}
}
func (t *Ticket) flatten() []byte {
auxData := t.AuxData()
buf := make([]byte, addressSize+addressSize+uint256Size+uint256Size+uint256Size+bytes32Size+len(auxData))
i := copy(buf[0:], t.Recipient.Bytes())
i += copy(buf[i:], t.Sender.Bytes())
i += copy(buf[i:], ethcommon.LeftPadBytes(t.FaceValue.Bytes(), uint256Size))
i += copy(buf[i:], ethcommon.LeftPadBytes(t.WinProb.Bytes(), uint256Size))
i += copy(buf[i:], ethcommon.LeftPadBytes(new(big.Int).SetUint64(uint64(t.SenderNonce)).Bytes(), uint256Size))
i += copy(buf[i:], t.RecipientRandHash.Bytes())
if len(auxData) > 0 {
copy(buf[i:], auxData)
}
return buf
}
func (e *TicketExpirationParams) AuxData() []byte {
if e.CreationRound == 0 && (e.CreationRoundBlockHash == ethcommon.Hash{}) {
// Return empty byte array if both values are 0
return []byte{}
}
return append(
ethcommon.LeftPadBytes(big.NewInt(e.CreationRound).Bytes(), uint256Size),
e.CreationRoundBlockHash.Bytes()...,
)
}
func ticketEV(faceValue *big.Int, winProb *big.Int) *big.Rat {
return new(big.Rat).Mul(new(big.Rat).SetInt(faceValue), new(big.Rat).SetFrac(winProb, maxWinProb))
}
func winProbRat(winProb *big.Int) *big.Rat {
return new(big.Rat).SetFrac(winProb, maxWinProb)
} | pm/ticket.go | 0.826922 | 0.426202 | ticket.go | starcoder |
package indexset
import (
"errors"
)
/*
___ ____
-------
-------
_______
__
_______
_______
________
__
*/
type indexRangeOverlap struct {
a, b indexRange
rightIsNewRange bool
}
func makeIndexRangeOverlap(a, b indexRange) indexRangeOverlap {
relation := indexRangeOverlap{
a: a,
b: b,
}
if a.left > b.left || (a.left == b.left && a.right < b.right) {
relation.a = b
relation.b = a
}
relation.rightIsNewRange = b.right > a.right
return relation
}
func (r indexRangeOverlap) splitFunc() indexRangeSplitFunc {
if r.a.right < r.b.left {
return indexRangeSplitFuncUnknown
}
if r.a.right == r.b.right {
if r.a.left < r.b.left {
return indexRangeSplitFuncRightInside
} else if r.a.left == r.b.left {
return indexRangeSplitFuncEqual
}
}
if r.a.left == r.b.left {
if r.a.right > r.b.left {
return indexRangeSplitFuncLeftInside
} else {
return indexRangeSplitFuncUnknown
}
}
if r.a.right > r.b.right {
return indexRangeSplitFuncInside
} else if r.a.right < r.b.right {
return indexRangeSplitFuncRightOutside
}
return indexRangeSplitFuncUnknown
}
type indexRangeSplitFunc func(indexRangeOverlap) (replacement []indexRange, carryover indexRange, err error)
var (
indexRangeSplitFuncUnknown = func(_ indexRangeOverlap) ([]indexRange, indexRange, error) {
return nil, indexRangeZero, errors.New("failed to combine nodes: unknown overlap")
}
indexRangeSplitFuncEqual = func(r indexRangeOverlap) ([]indexRange, indexRange, error) {
replacement, err := MakeRanges([3]int64{r.a.left, r.a.right, r.a.weight + r.b.weight})
return replacement, indexRangeZero, err
}
indexRangeSplitFuncInside = func(r indexRangeOverlap) ([]indexRange, indexRange, error) {
replacement, err := MakeRanges(
[3]int64{r.a.left, r.b.left - 1, r.a.weight},
[3]int64{r.b.left, r.b.right, r.a.weight + r.b.weight},
[3]int64{r.b.right + 1, r.a.right, r.a.weight},
)
if r.rightIsNewRange {
return replacement[0:2], replacement[2], err
} else {
return replacement, indexRangeZero, err
}
}
indexRangeSplitFuncLeftInside = func(r indexRangeOverlap) ([]indexRange, indexRange, error) {
replacement, err := MakeRanges(
[3]int64{r.a.left, r.b.right, r.a.weight + r.b.weight},
[3]int64{r.b.right + 1, r.a.right, r.a.weight},
)
if r.rightIsNewRange {
return replacement[0:1], replacement[1], err
} else {
return replacement, indexRangeZero, err
}
}
indexRangeSplitFuncRightInside = func(r indexRangeOverlap) ([]indexRange, indexRange, error) {
replacement, err := MakeRanges(
[3]int64{r.a.left, r.b.left - 1, r.a.weight},
[3]int64{r.b.left, r.b.right, r.a.weight + r.b.weight},
)
return replacement, indexRangeZero, err
}
indexRangeSplitFuncRightOutside = func(r indexRangeOverlap) ([]indexRange, indexRange, error) {
rightRange := [3]int64{r.a.right, r.b.right, r.b.weight}
if r.rightIsNewRange {
replacement, err := MakeRanges(
[3]int64{r.a.left, r.b.left - 1, r.a.weight},
[3]int64{r.b.left, r.a.right, r.a.weight + r.b.weight},
rightRange,
)
return replacement[0:2], replacement[2], err
} else {
replacement, err := MakeRanges(
[3]int64{r.a.left, r.b.left - 1, r.a.weight},
[3]int64{r.b.left, r.a.right, r.a.weight + r.b.weight},
rightRange,
)
return replacement, indexRangeZero, err
}
}
) | index_range_overlap.go | 0.604282 | 0.609553 | index_range_overlap.go | starcoder |
package eval
import (
"strconv"
"github.com/alecthomas/participle/lexer"
"github.com/alecthomas/repr"
)
// Evaluatable abstracts part of an expression that can be evaluated for an instance
type Evaluatable interface {
Evaluate(instance Instance) (interface{}, error)
}
// Function describes a function callable for an instance
type Function func(instance Instance, args ...interface{}) (interface{}, error)
// FunctionMap describes a map of functions
type FunctionMap map[string]Function
// VarMap describes a map of variables
type VarMap map[string]interface{}
// RegoInputMap describes a datastructure suitable to be passed to Rego as an input
type RegoInputMap map[string]interface{}
// Instance for evaluation
type Instance interface {
Var(name string) (interface{}, bool)
Vars() VarMap
Function(name string) (Function, bool)
Functions() FunctionMap
RegoInput() RegoInputMap
}
// instance for evaluation
type instance struct {
// Instance functions
functions FunctionMap
// Vars defined during evaluation.
vars VarMap
// Rego input
regoInput RegoInputMap
}
func (i *instance) Vars() VarMap {
if i == nil || i.vars == nil {
return VarMap{}
}
return i.vars
}
func (i *instance) Functions() FunctionMap {
if i == nil || i.functions == nil {
return FunctionMap{}
}
return i.functions
}
func (i *instance) Var(name string) (interface{}, bool) {
if i == nil || i.vars == nil {
return nil, false
}
value, ok := i.vars[name]
return value, ok
}
func (i *instance) Function(name string) (Function, bool) {
if i == nil || i.functions == nil {
return nil, false
}
function, ok := i.functions[name]
return function, ok
}
func (i *instance) RegoInput() RegoInputMap {
return i.regoInput
}
// NewInstance instantiates a new evaluation instance
func NewInstance(vars VarMap, functions FunctionMap, regoInput RegoInputMap) Instance {
return &instance{
vars: vars,
functions: functions,
regoInput: regoInput,
}
}
// Iterator abstracts iteration over a set of instances for expression evaluation
type Iterator interface {
Next() (Instance, error)
Done() bool
}
// InstanceResult captures an Instance along with the passed or failed status for the result
type InstanceResult struct {
Instance Instance
Passed bool
}
const (
allFn = "all"
noneFn = "none"
countFn = "count"
)
var (
builtInVars = VarMap{
"_": true,
}
)
// EvaluateIterator evaluates an iterable expression for an iterator
func (e *IterableExpression) EvaluateIterator(it Iterator, global Instance) ([]*InstanceResult, error) {
if e.IterableComparison == nil {
return e.iterate(
it,
e.Expression,
nil,
)
}
if e.IterableComparison.Fn == nil {
return nil, lexer.Errorf(e.Pos, "expecting function for iterable comparison")
}
var (
totalCount int64
passedCount int64
)
_, err := e.iterate(
it,
e.IterableComparison.Expression, func(instance Instance, passed bool) bool {
totalCount++
if passed {
passedCount++
}
return passed
},
)
if err != nil {
return nil, err
}
passed, err := e.evaluatePassed(global, passedCount, totalCount)
if err != nil {
return nil, err
}
return []*InstanceResult{
{
Passed: passed,
},
}, nil
}
func (e *IterableExpression) evaluatePassed(instance Instance, passedCount, totalCount int64) (bool, error) {
fn := *e.IterableComparison.Fn
switch fn {
case allFn:
return passedCount == totalCount, nil
case noneFn:
return passedCount == 0, nil
case countFn:
comparison := e.IterableComparison.ScalarComparison
if comparison == nil {
return false, lexer.Errorf(e.Pos, `expecting rhs of iterable comparison using "%s()"`, fn)
}
if comparison.Op == nil {
return false, lexer.Errorf(e.Pos, `expecting operator for iterable comparison using "%s()"`, fn)
}
rhs, err := comparison.Next.Evaluate(instance)
if err != nil {
return false, err
}
switch expectedCount := rhs.(type) {
case int64:
return intCompare(*comparison.Op, passedCount, expectedCount, e.Pos)
case uint64:
return intCompare(*comparison.Op, passedCount, int64(expectedCount), e.Pos)
default:
return false, lexer.Errorf(e.Pos, `expecting an integer rhs for iterable comparison using "%s()"`, fn)
}
default:
return false, lexer.Errorf(e.Pos, `unexpected function "%s()" for iterable comparison`, *e.IterableComparison.Fn)
}
}
func (e *IterableExpression) iterate(it Iterator, expression *Expression, checkResult func(instance Instance, passed bool) bool) ([]*InstanceResult, error) {
var (
instance Instance
results []*InstanceResult
err error
passed bool
)
if it.Done() {
return []*InstanceResult{
{
Passed: false,
},
}, nil
}
for !it.Done() {
instance, err = it.Next()
if err != nil {
return nil, err
}
passed, err = e.evaluateSubExpression(instance, expression)
if err != nil {
return nil, err
}
// iterable comparison, then returning only matching instance as the
// real check will be done in the evaluatePassed function
if checkResult != nil {
passed = checkResult(instance, passed)
}
results = append(results, &InstanceResult{
Instance: instance,
Passed: passed,
})
}
return results, nil
}
func (e *IterableExpression) evaluateSubExpression(instance Instance, expression *Expression) (bool, error) {
v, err := expression.Evaluate(instance)
if err != nil {
return false, err
}
passed, ok := v.(bool)
if !ok {
return false, lexer.Errorf(e.Pos, "expression in iteration must evaluate to a boolean")
}
return passed, nil
}
// Evaluate evaluates an iterable expression for a single instance
func (e *IterableExpression) Evaluate(instance Instance) (bool, error) {
if e.IterableComparison == nil {
return e.evaluateSubExpression(instance, e.Expression)
}
passed, err := e.evaluateSubExpression(instance, e.IterableComparison.Expression)
if err != nil {
return false, err
}
var (
passedCount int64
totalCount = int64(1)
)
if passed {
passedCount = 1
}
return e.evaluatePassed(instance, passedCount, totalCount)
}
// Evaluate evaluates a path expression for an instance
func (e *PathExpression) Evaluate(instance Instance) (interface{}, error) {
if e.Path != nil {
return *e.Path, nil
}
return e.Expression.Evaluate(instance)
}
// Evaluate evaluates an expression for an instance
func (e *Expression) Evaluate(instance Instance) (interface{}, error) {
lhs, err := e.Comparison.Evaluate(instance)
if err != nil {
return nil, err
}
if e.Next == nil {
return lhs, nil
}
left, ok := lhs.(bool)
if !ok {
return nil, lexer.Errorf(e.Pos, "type mismatch, expected bool in lhs of boolean expression")
}
rhs, err := e.Next.Evaluate(instance)
if err != nil {
return nil, err
}
right, ok := rhs.(bool)
if !ok {
return nil, lexer.Errorf(e.Pos, "type mismatch, expected bool in rhs of boolean expression")
}
switch *e.Op {
case "&&":
return left && right, nil
case "||":
return left || right, nil
default:
return nil, lexer.Errorf(e.Pos, "unsupported operator %q in boolean expression", *e.Op)
}
}
// BoolEvaluate evaluates an expression for an instance as a boolean value
func (e *Expression) BoolEvaluate(instance Instance) (bool, error) {
v, err := e.Evaluate(instance)
if err != nil {
return false, err
}
passed, ok := v.(bool)
if !ok {
return false, lexer.Errorf(e.Pos, "expression must evaluate to a boolean")
}
return passed, nil
}
// Evaluate implements Evaluatable interface
func (c *Comparison) Evaluate(instance Instance) (interface{}, error) {
lhs, err := c.Term.Evaluate(instance)
if err != nil {
return nil, err
}
switch {
case c.ArrayComparison != nil:
if c.ArrayComparison.Array == nil {
return nil, lexer.Errorf(c.Pos, "missing rhs of array operation %q", *c.ArrayComparison.Op)
}
rhs, err := c.ArrayComparison.Array.Evaluate(instance)
if err != nil {
return nil, err
}
array, ok := rhs.([]interface{})
if !ok {
return nil, lexer.Errorf(c.Pos, "rhs of %q array operation must be an array", *c.ArrayComparison.Op)
}
switch *c.ArrayComparison.Op {
case "in":
return inArray(lhs, array), nil
case "notin":
return notInArray(lhs, array), nil
default:
return nil, lexer.Errorf(c.Pos, "unsupported array operation %q", *c.ArrayComparison.Op)
}
case c.ScalarComparison != nil:
if c.ScalarComparison.Next == nil {
return nil, lexer.Errorf(c.Pos, "missing rhs of %q", *c.ScalarComparison.Op)
}
rhs, err := c.ScalarComparison.Next.Evaluate(instance)
if err != nil {
return nil, err
}
return c.compare(lhs, rhs, *c.ScalarComparison.Op)
default:
return lhs, nil
}
}
func (c *Comparison) compare(lhs, rhs interface{}, op string) (interface{}, error) {
switch lhs := lhs.(type) {
case uint64:
switch rhs := rhs.(type) {
case uint64:
return uintCompare(op, lhs, rhs, c.Pos)
case int64:
return uintCompare(op, lhs, uint64(rhs), c.Pos)
default:
return nil, lexer.Errorf(c.Pos, "rhs of %q must be an integer", op)
}
case int64:
switch rhs := rhs.(type) {
case int64:
return intCompare(op, lhs, rhs, c.Pos)
case uint64:
return intCompare(op, lhs, int64(rhs), c.Pos)
default:
return nil, lexer.Errorf(c.Pos, "rhs of %q must be an integer", op)
}
case string:
rhs, ok := rhs.(string)
if !ok {
return nil, lexer.Errorf(c.Pos, "rhs of %q must be a string", op)
}
return stringCompare(op, lhs, rhs, c.Pos)
default:
return nil, lexer.Errorf(c.Pos, "lhs of %q must be an integer or string", op)
}
}
// Evaluate implements Evaluatable interface
func (t *Term) Evaluate(instance Instance) (interface{}, error) {
lhs, err := t.Unary.Evaluate(instance)
if err != nil {
return nil, err
}
if t.Op == nil {
return lhs, nil
}
if t.Next == nil {
return nil, lexer.Errorf(t.Pos, "expected rhs in binary bit operation")
}
rhs, err := t.Next.Evaluate(instance)
if err != nil {
return nil, err
}
op := *t.Op
switch lhs := lhs.(type) {
case uint64:
switch rhs := rhs.(type) {
case uint64:
return uintBinaryOp(op, lhs, rhs, t.Pos)
case int64:
return uintBinaryOp(op, lhs, uint64(rhs), t.Pos)
default:
return nil, lexer.Errorf(t.Pos, `rhs of %q must be an integer`, op)
}
case int64:
switch rhs := rhs.(type) {
case int64:
return intBinaryOp(op, lhs, rhs, t.Pos)
case uint64:
return intBinaryOp(op, lhs, int64(rhs), t.Pos)
default:
return nil, lexer.Errorf(t.Pos, `rhs of %q must be an integer`, op)
}
case string:
switch rhs := rhs.(type) {
case string:
return stringBinaryOp(op, lhs, rhs, t.Pos)
default:
return nil, lexer.Errorf(t.Pos, "rhs of %q must be a string", op)
}
default:
return nil, lexer.Errorf(t.Pos, "binary bit operation not supported for this type")
}
}
// Evaluate implements Evaluatable interface
func (u *Unary) Evaluate(instance Instance) (interface{}, error) {
if u.Value != nil {
return u.Value.Evaluate(instance)
}
if u.Unary == nil || u.Op == nil {
return nil, lexer.Errorf(u.Pos, "invalid unary operation")
}
rhs, err := u.Unary.Evaluate(instance)
if err != nil {
return nil, err
}
switch *u.Op {
case "!":
rhs, ok := rhs.(bool)
if !ok {
return nil, lexer.Errorf(u.Pos, "rhs of %q must be a boolean", *u.Op)
}
return !rhs, nil
case "-":
switch rhs := rhs.(type) {
case int64:
return -rhs, nil
case uint64:
return -int64(rhs), nil
default:
return nil, lexer.Errorf(u.Pos, "rhs of %q must be an integer", *u.Op)
}
case "^":
switch rhs := rhs.(type) {
case int64:
return ^rhs, nil
case uint64:
return ^rhs, nil
default:
return nil, lexer.Errorf(u.Pos, "rhs of %q must be an integer", *u.Op)
}
default:
return nil, lexer.Errorf(u.Pos, "unsupported unary operator %q", *u.Op)
}
}
// Evaluate implements Evaluatable interface
func (v *Value) Evaluate(instance Instance) (interface{}, error) {
switch {
case v.Hex != nil:
return strconv.ParseUint(*v.Hex, 0, 64)
case v.Octal != nil:
return strconv.ParseUint(*v.Octal, 8, 64)
case v.Decimal != nil:
return *v.Decimal, nil
case v.String != nil:
return *v.String, nil
case v.Variable != nil:
value, ok := instance.Var(*v.Variable)
if !ok {
value, ok = builtInVars[*v.Variable]
}
if !ok {
return nil, lexer.Errorf(v.Pos, `unknown variable %q`, *v.Variable)
}
return coerceIntegers(value), nil
case v.Subexpression != nil:
return v.Subexpression.Evaluate(instance)
case v.Call != nil:
return v.Call.Evaluate(instance)
}
return nil, lexer.Errorf(v.Pos, `unsupported value type %q`, repr.String(v))
}
// Evaluate implements Evaluatable interface
func (a *Array) Evaluate(instance Instance) (interface{}, error) {
if a.Ident != nil {
value, ok := instance.Var(*a.Ident)
if !ok {
return nil, lexer.Errorf(a.Pos, `unknown variable %q used as array`, *a.Ident)
}
return coerceArrays(value), nil
}
var result []interface{}
for _, value := range a.Values {
v, err := value.Evaluate(instance)
if err != nil {
return nil, err
}
result = append(result, v)
}
return result, nil
}
// Evaluate implements Evaluatable interface
func (c *Call) Evaluate(instance Instance) (interface{}, error) {
fn, ok := instance.Function(c.Name)
if !ok {
return nil, lexer.Errorf(c.Pos, `unknown function "%s()"`, c.Name)
}
args := []interface{}{}
for _, arg := range c.Args {
value, err := arg.Evaluate(instance)
if err != nil {
return nil, err
}
args = append(args, value)
}
value, err := fn(instance, args...)
if err != nil {
return nil, lexer.Errorf(c.Pos, `call to "%s()" failed: %v`, c.Name, err)
}
return coerceValues(value), nil
} | pkg/compliance/eval/eval.go | 0.811303 | 0.51379 | eval.go | starcoder |
package mesh
/*
Package for making meshes, outputs should be json Object
structs from geometry which can be exported as json
*/
import (
geometry "basic-ray/pkg/geometry"
)
const ICO_EDGE_LENGTH = 0.9510565163 // sin(2*pi/5)
type Sphere struct {
Radius float64
Origin geometry.Point
}
func (sphere *Sphere) CreateMesh(refinement int, smoothShading bool) *Mesh {
mesh := sphere.createBaseIcosahedron()
for i := 0; i < refinement; i++ {
mesh = RefineMesh(sphere, mesh)
}
if smoothShading == true {
vertexNormals := make([]geometry.Vector, len(mesh.Vertexes))
for i, vertex := range mesh.Vertexes {
vertexNormals[i] = geometry.Normalize(geometry.CreateVector(vertex, sphere.Origin))
}
mesh.VertexNormals = vertexNormals
}
normals := make([]geometry.Vector, len(mesh.Faces))
for i, face := range mesh.Faces {
edge1 := geometry.CreateVector(mesh.Vertexes[face[0]], mesh.Vertexes[face[1]])
edge2 := geometry.CreateVector(mesh.Vertexes[face[0]], mesh.Vertexes[face[2]])
normals[i] = geometry.Normalize(geometry.CrossProduct(edge1, edge2))
}
mesh.Normals = normals
return mesh
}
func GetMidPoint(sphere *Sphere, mesh *Mesh, faceIndexA, faceIndexB int, cache *map[[2]int]int) int {
// Check cache
vertexIndex, present := (*cache)[[2]int{faceIndexA, faceIndexB}]
if present == true {
return vertexIndex
}
vertexIndex, present = (*cache)[[2]int{faceIndexB, faceIndexA}]
if present == true {
return vertexIndex
}
// Calculate midPoint vertex if not in cache
vertexA := mesh.Vertexes[faceIndexA]
vertexB := mesh.Vertexes[faceIndexB]
translate := geometry.ScalarProduct(geometry.CreateVector(vertexB, vertexA), .5)
linePoint := geometry.Translate(vertexA, translate)
vector := geometry.ScalarProduct(geometry.Normalize(geometry.CreateVector(linePoint, sphere.Origin)), sphere.Radius)
midVertex := geometry.Translate(sphere.Origin, vector)
// Add midPoint vertex to the mesh and cache
mesh.Vertexes = append(mesh.Vertexes, midVertex)
vertexIndex = len(mesh.Vertexes) - 1
(*cache)[[2]int{faceIndexA, faceIndexB}] = vertexIndex
return vertexIndex
}
func RefineMesh(sphere *Sphere, mesh *Mesh) *Mesh {
refinedMesh := Mesh{Vertexes: mesh.Vertexes}
midPointCache := make(map[[2]int]int)
faces := make([][]int, 0)
for _, face := range mesh.Faces {
newVertexes := make([]int, 3)
for edge := 0; edge < 3; edge++ {
newVertexes[edge] = GetMidPoint(sphere, &refinedMesh, face[edge], face[(edge+1)%3], &midPointCache)
}
faces = append(faces, []int{face[0], newVertexes[0], newVertexes[2]})
faces = append(faces, []int{face[1], newVertexes[1], newVertexes[0]})
faces = append(faces, []int{face[2], newVertexes[2], newVertexes[1]})
faces = append(faces, []int{newVertexes[0], newVertexes[1], newVertexes[2]})
}
refinedMesh.Faces = faces
return &refinedMesh
}
func (sphere *Sphere) createBaseIcosahedron() *Mesh {
sideLength := sphere.Radius * ICO_EDGE_LENGTH / 2.0
translate := geometry.CreateVector(sphere.Origin, geometry.Point{0, 0, 0})
vertexes := []geometry.Point{
geometry.Translate(geometry.Point{2 * sideLength, sideLength, 0}, translate),
geometry.Translate(geometry.Point{-2 * sideLength, sideLength, 0}, translate),
geometry.Translate(geometry.Point{2 * sideLength, -1 * sideLength, 0}, translate),
geometry.Translate(geometry.Point{-2 * sideLength, -1 * sideLength, 0}, translate),
geometry.Translate(geometry.Point{0, 2 * sideLength, sideLength}, translate),
geometry.Translate(geometry.Point{0, -2 * sideLength, sideLength}, translate),
geometry.Translate(geometry.Point{0, 2 * sideLength, -1 * sideLength}, translate),
geometry.Translate(geometry.Point{0, -2 * sideLength, -1 * sideLength}, translate),
geometry.Translate(geometry.Point{sideLength, 0, 2 * sideLength}, translate),
geometry.Translate(geometry.Point{sideLength, 0, -2 * sideLength}, translate),
geometry.Translate(geometry.Point{-1 * sideLength, 0, 2 * sideLength}, translate),
geometry.Translate(geometry.Point{-1 * sideLength, 0, -2 * sideLength}, translate),
}
for i, vertex := range vertexes {
vector := geometry.ScalarProduct(geometry.Normalize(geometry.CreateVector(vertex, sphere.Origin)), sphere.Radius)
vertexes[i] = geometry.Translate(sphere.Origin, vector)
}
faces := [][]int{
[]int{0, 6, 4},
[]int{0, 8, 2},
[]int{0, 2, 9},
[]int{0, 9, 6},
[]int{0, 4, 8},
[]int{6, 1, 4},
[]int{4, 10, 8},
[]int{8, 5, 2},
[]int{9, 2, 7},
[]int{0, 11, 6},
[]int{3, 10, 1},
[]int{3, 1, 11},
[]int{3, 5, 10},
[]int{3, 7, 5},
[]int{3, 11, 7},
[]int{1, 10, 4},
[]int{10, 5, 8},
[]int{5, 7, 2},
[]int{7, 11, 9},
[]int{11, 1, 6},
}
return &Mesh{Vertexes: vertexes, Faces: faces}
} | pkg/mesh/sphere.go | 0.707304 | 0.76207 | sphere.go | starcoder |
package pt
import (
"image"
"math"
)
type Channel int
const (
ColorChannel = iota
VarianceChannel
StandardDeviationChannel
SamplesChannel
)
type Pixel struct {
Samples int
M, V Color
}
func (p *Pixel) AddSample(sample Color) {
p.Samples++
if p.Samples == 1 {
p.M = sample
return
}
m := p.M
p.M = p.M.Add(sample.Sub(p.M).DivScalar(float64(p.Samples)))
p.V = p.V.Add(sample.Sub(m).Mul(sample.Sub(p.M)))
}
func (p *Pixel) Color() Color {
return p.M
}
func (p *Pixel) Variance() Color {
if p.Samples < 2 {
return Black
}
return p.V.DivScalar(float64(p.Samples - 1))
}
func (p *Pixel) StandardDeviation() Color {
return p.Variance().Pow(0.5)
}
type Buffer struct {
W, H int
Pixels []Pixel
}
func NewBuffer(w, h int) *Buffer {
pixels := make([]Pixel, w*h)
return &Buffer{w, h, pixels}
}
func (b *Buffer) Copy() *Buffer {
pixels := make([]Pixel, b.W*b.H)
copy(pixels, b.Pixels)
return &Buffer{b.W, b.H, pixels}
}
func (b *Buffer) AddSample(x, y int, sample Color) {
b.Pixels[y*b.W+x].AddSample(sample)
}
func (b *Buffer) Samples(x, y int) int {
return b.Pixels[y*b.W+x].Samples
}
func (b *Buffer) Color(x, y int) Color {
return b.Pixels[y*b.W+x].Color()
}
func (b *Buffer) Variance(x, y int) Color {
return b.Pixels[y*b.W+x].Variance()
}
func (b *Buffer) StandardDeviation(x, y int) Color {
return b.Pixels[y*b.W+x].StandardDeviation()
}
func (b *Buffer) Image(channel Channel) image.Image {
result := image.NewRGBA64(image.Rect(0, 0, b.W, b.H))
var maxSamples float64
if channel == SamplesChannel {
for _, pixel := range b.Pixels {
maxSamples = math.Max(maxSamples, float64(pixel.Samples))
}
}
for y := 0; y < b.H; y++ {
for x := 0; x < b.W; x++ {
var c Color
switch channel {
case ColorChannel:
c = b.Pixels[y*b.W+x].Color().Pow(1 / 2.2)
case VarianceChannel:
c = b.Pixels[y*b.W+x].Variance()
case StandardDeviationChannel:
c = b.Pixels[y*b.W+x].StandardDeviation()
case SamplesChannel:
p := float64(b.Pixels[y*b.W+x].Samples) / maxSamples
c = Color{p, p, p}
}
result.SetRGBA64(x, y, c.RGBA64())
}
}
return result
} | pt/buffer.go | 0.774626 | 0.40439 | buffer.go | starcoder |
package date
import (
"bytes"
"encoding/binary"
"encoding/json"
"time"
)
// unixEpoch is the moment in time that should be treated as timestamp 0.
var unixEpoch = time.Date(1970, time.January, 1, 0, 0, 0, 0, time.UTC)
// UnixTime marshals and unmarshals a time that is represented as the number
// of seconds (ignoring skip-seconds) since the Unix Epoch.
type UnixTime time.Time
// Duration returns the time as a Duration since the UnixEpoch.
func (t UnixTime) Duration() time.Duration {
return time.Time(t).Sub(unixEpoch)
}
// NewUnixTimeFromSeconds creates a UnixTime as a number of seconds from the UnixEpoch.
func NewUnixTimeFromSeconds(seconds float64) UnixTime {
return NewUnixTimeFromDuration(time.Duration(seconds * float64(time.Second)))
}
// NewUnixTimeFromNanoseconds creates a UnixTime as a number of nanoseconds from the UnixEpoch.
func NewUnixTimeFromNanoseconds(nanoseconds int64) UnixTime {
return NewUnixTimeFromDuration(time.Duration(nanoseconds))
}
// NewUnixTimeFromDuration creates a UnixTime as a duration of time since the UnixEpoch.
func NewUnixTimeFromDuration(dur time.Duration) UnixTime {
return UnixTime(unixEpoch.Add(dur))
}
// UnixEpoch retreives the moment considered the Unix Epoch. I.e. The time represented by '0'
func UnixEpoch() time.Time {
return unixEpoch
}
// MarshalJSON preserves the UnixTime as a JSON number conforming to Unix Timestamp requirements.
// (i.e. the number of seconds since midnight January 1st, 1970 not considering leap seconds.)
func (t UnixTime) MarshalJSON() ([]byte, error) {
buffer := &bytes.Buffer{}
enc := json.NewEncoder(buffer)
enc.Encode(float64(time.Time(t).UnixNano()) / 1e9)
return buffer.Bytes(), nil
}
// UnmarshalJSON reconstitures a UnixTime saved as a JSON number of the number of seconds since
// midnight January 1st, 1970.
func (t *UnixTime) UnmarshalJSON(text []byte) error {
dec := json.NewDecoder(bytes.NewReader(text))
var secondsSinceEpoch float64
if err := dec.Decode(&secondsSinceEpoch); err != nil {
return err
}
*t = NewUnixTimeFromSeconds(secondsSinceEpoch)
return nil
}
// MarshalText stores the number of seconds since the Unix Epoch as a textual floating point number.
func (t UnixTime) MarshalText() ([]byte, error) {
cast := time.Time(t)
return cast.MarshalText()
}
// UnmarshalText populates a UnixTime with a value stored textually as a floating point number of seconds since the Unix Epoch.
func (t *UnixTime) UnmarshalText(raw []byte) error {
var unmarshaled time.Time
if err := unmarshaled.UnmarshalText(raw); err != nil {
return err
}
*t = UnixTime(unmarshaled)
return nil
}
// MarshalBinary converts a UnixTime into a binary.LittleEndian float64 of nanoseconds since the epoch.
func (t UnixTime) MarshalBinary() ([]byte, error) {
buf := &bytes.Buffer{}
payload := int64(t.Duration())
if err := binary.Write(buf, binary.LittleEndian, &payload); err != nil {
return nil, err
}
return buf.Bytes(), nil
}
// UnmarshalBinary converts a from a binary.LittleEndian float64 of nanoseconds since the epoch into a UnixTime.
func (t *UnixTime) UnmarshalBinary(raw []byte) error {
var nanosecondsSinceEpoch int64
if err := binary.Read(bytes.NewReader(raw), binary.LittleEndian, &nanosecondsSinceEpoch); err != nil {
return err
}
*t = NewUnixTimeFromNanoseconds(nanosecondsSinceEpoch)
return nil
} | vendor/github.com/hashicorp/consul/vendor/github.com/hashicorp/go-discover/provider/azure/vendor/github.com/Azure/go-autorest/autorest/date/unixtime.go | 0.823115 | 0.40539 | unixtime.go | starcoder |
package ebpf
import "fmt"
var (
_ Instruction = (*JumpEqual)(nil)
_ Jumper = (*JumpEqual)(nil)
_ Valuer = (*JumpEqual)(nil)
)
type JumpEqual struct {
Dest Register
Offset int16
Value int32
}
func (a *JumpEqual) Raw() ([]RawInstruction, error) {
return []RawInstruction{
{Op: BPF_JEQ | BPF_K | BPF_JMP, Reg: NewReg(0, a.Dest), Off: a.Offset, Imm: a.Value},
}, nil
}
func (a *JumpEqual) String() string {
return fmt.Sprintf("if r%s == %d goto %+d", a.Dest, a.Value, a.Offset)
}
func (a *JumpEqual) SetJumpTarget(relAddr int16) {
a.Offset = relAddr
}
func (a *JumpEqual) SetValue(value int32) {
a.Value = value
}
var (
_ Instruction = (*JumpEqual32)(nil)
_ Jumper = (*JumpEqual32)(nil)
_ Valuer = (*JumpEqual32)(nil)
)
type JumpEqual32 struct {
Dest Register
Offset int16
Value int32
}
func (a *JumpEqual32) Raw() ([]RawInstruction, error) {
return []RawInstruction{
{Op: BPF_JEQ | BPF_K | BPF_JMP32, Reg: NewReg(0, a.Dest), Off: a.Offset, Imm: a.Value},
}, nil
}
func (a *JumpEqual32) String() string {
return fmt.Sprintf("if w%s == %d goto %+d", a.Dest, a.Value, a.Offset)
}
func (a *JumpEqual32) SetJumpTarget(relAddr int16) {
a.Offset = relAddr
}
func (a *JumpEqual32) SetValue(value int32) {
a.Value = value
}
var (
_ Instruction = (*JumpEqualRegister)(nil)
_ Jumper = (*JumpEqualRegister)(nil)
)
type JumpEqualRegister struct {
Dest Register
Src Register
Offset int16
}
func (a *JumpEqualRegister) Raw() ([]RawInstruction, error) {
return []RawInstruction{
{Op: BPF_JEQ | BPF_X | BPF_JMP, Reg: NewReg(a.Src, a.Dest), Off: a.Offset},
}, nil
}
func (a *JumpEqualRegister) String() string {
return fmt.Sprintf("if r%s == r%s goto %+d", a.Dest, a.Src, a.Offset)
}
func (a *JumpEqualRegister) SetJumpTarget(relAddr int16) {
a.Offset = relAddr
}
var (
_ Instruction = (*JumpEqualRegister32)(nil)
_ Jumper = (*JumpEqualRegister32)(nil)
)
type JumpEqualRegister32 struct {
Dest Register
Src Register
Offset int16
}
func (a *JumpEqualRegister32) Raw() ([]RawInstruction, error) {
return []RawInstruction{
{Op: BPF_JEQ | BPF_X | BPF_JMP32, Reg: NewReg(a.Src, a.Dest), Off: a.Offset},
}, nil
}
func (a *JumpEqualRegister32) String() string {
return fmt.Sprintf("if w%s == w%s goto %+d", a.Dest, a.Src, a.Offset)
}
func (a *JumpEqualRegister32) SetJumpTarget(relAddr int16) {
a.Offset = relAddr
} | ebpf/jeq.go | 0.60964 | 0.452596 | jeq.go | starcoder |
package visitor
import (
"github.com/Bartosz-D3V/grafik/common"
"github.com/vektah/gqlparser/ast"
)
// parseOpTypes parses selectionSet of each GraphQL operation and all variables.
func (v *visitor) parseOpTypes(opList ast.OperationList) {
for _, opDef := range opList {
v.parseSelectionSet(opDef.SelectionSet, make([]string, 0), false)
v.parseVariables(opDef.VariableDefinitions)
}
}
// parseSelectionSet parses each selection based on its type (Field/FragmentSpread/Inline Fragment)
// It returns the fields that the selection uses from GraphQL schema.
func (v *visitor) parseSelectionSet(selectionSet ast.SelectionSet, fields []string, registerType bool) []string {
for _, selection := range selectionSet {
switch selectionType := selection.(type) {
case *ast.Field:
fields = append(fields, selectionType.Name)
v.parseSelectionSet(selectionType.SelectionSet, make([]string, 0), true)
if registerType {
v.registerTypeByName(selectionType.ObjectDefinition.Name, fields)
v.registerType(selectionType.Definition.Type, make([]string, 0))
}
case *ast.InlineFragment:
fields = v.parseInlineFragment(selectionType, make([]string, 0), false)
if registerType {
v.registerTypeByName(selectionType.ObjectDefinition.Name, fields)
}
case *ast.FragmentSpread:
fields = v.parseFragmentSpread(selectionType, fields, true)
}
}
return fields
}
// parseFragmentSpread parses GraphQL Fragment Spread - it will add all fields to the visitor.
// It returns the fields that the selection uses from GraphQL schema.
func (v *visitor) parseFragmentSpread(fragmentSpread *ast.FragmentSpread, fields []string, registerType bool) []string {
fields = v.parseSelectionSet(fragmentSpread.Definition.SelectionSet, fields, registerType)
return fields
}
// parseInlineFragment parses GraphQL Inline Fragment - it will add all fields of all fragments to the visitor.
// It returns the fields that the selection uses from GraphQL schema.
func (v *visitor) parseInlineFragment(parsedType *ast.InlineFragment, fields []string, registerType bool) []string {
fields = v.parseSelectionSet(parsedType.SelectionSet, fields, registerType)
return fields
}
// parseVariables parses all variables defined in the GraphQL operation.
func (v *visitor) parseVariables(variableDefinitionList ast.VariableDefinitionList) {
for _, varDef := range variableDefinitionList {
v.parseType(varDef.Type)
}
}
// parseType parses generic GraphQL Type.
func (v *visitor) parseType(astType *ast.Type) {
leafType := v.findLeafType(astType)
leafTypeDef := v.schema.Types[leafType.NamedType]
// If the type is not built-in to the GraphQL specification, register it with all fields selected in the GraphQL query.
if leafTypeDef != nil && !leafTypeDef.BuiltIn {
fields := make([]string, len(leafTypeDef.Fields))
for i, field := range leafTypeDef.Fields {
fields[i] = field.Name
v.parseType(field.Type)
}
v.registerType(astType, fields)
}
}
// registerType adds field with selected fields into visitor.
func (v *visitor) registerType(astType *ast.Type, fields []string) {
leafType := v.findLeafType(astType)
if leafType == nil || v.schema.Types[leafType.NamedType].BuiltIn {
return
}
if cFields, ok := v.customTypes[leafType.NamedType]; ok {
fields = append(cFields, fields...)
v.customTypes[leafType.NamedType] = fields
} else {
v.customTypes[leafType.NamedType] = fields
}
}
// registerTypeByName adds field by name with selected fields into visitor.
func (v *visitor) registerTypeByName(astTypeName string, fields []string) {
if v.schema.Types[astTypeName].BuiltIn {
return
}
if cFields, ok := v.customTypes[astTypeName]; ok {
fields = append(cFields, fields...)
v.customTypes[astTypeName] = fields
} else {
v.customTypes[astTypeName] = fields
}
}
// findLeafType unwraps the type of array.
// If the type is a list (i.e. [[Character!]]) then return leafType (in this example Character).
func (v *visitor) findLeafType(astType *ast.Type) *ast.Type {
if common.IsList(astType) {
return v.findLeafType(astType.Elem)
}
return astType
} | visitor/helper.go | 0.639961 | 0.471406 | helper.go | starcoder |
package main
import "C"
import (
"fmt"
"reflect"
"runtime/cgo"
"testing"
"github.com/gwos/tcg/sdk/transit"
"github.com/stretchr/testify/assert"
)
func test_SetCategory(t *testing.T) {
value := "test-test"
tests := []struct {
name string
target interface{}
field string
}{{
name: "InventoryResource",
target: new(transit.InventoryResource),
field: "Category",
}, {
name: "InventoryService",
target: new(transit.InventoryService),
field: "Category",
}, {
name: "MonitoredResource",
target: new(transit.MonitoredResource),
field: "Category",
}, {
name: "MonitoredService",
target: new(transit.MonitoredService),
field: "Category",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetCategory(C.ulong(h), C.CString(value))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, value, f.String())
})
}
}
func test_SetContextTimestamp(t *testing.T) {
v, v1, v2 := "1609372800000", 1609372800, 0
tests := []struct {
name string
target interface{}
field string
}{{
name: "InventoryRequest",
target: new(transit.InventoryRequest),
field: "Context",
}, {
name: "InventoryService",
target: new(transit.ResourcesWithServicesRequest),
field: "Context",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetContextTimestamp(C.ulong(h), C.longlong(v1), C.longlong(v2))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, v, f.Interface().(*transit.TracerContext).TimeStamp.String())
})
}
}
func test_SetContextToken(t *testing.T) {
value := "test-test"
tests := []struct {
name string
target interface{}
field string
}{{
name: "InventoryRequest",
target: new(transit.InventoryRequest),
field: "Context",
}, {
name: "InventoryService",
target: new(transit.ResourcesWithServicesRequest),
field: "Context",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetContextToken(C.ulong(h), C.CString(value))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, value, f.Interface().(*transit.TracerContext).TraceToken)
})
}
}
func test_SetDescription(t *testing.T) {
value := "test-test"
tests := []struct {
name string
target interface{}
field string
}{{
name: "InventoryResource",
target: new(transit.InventoryResource),
field: "Description",
}, {
name: "InventoryService",
target: new(transit.InventoryService),
field: "Description",
}, {
name: "MonitoredResource",
target: new(transit.MonitoredResource),
field: "Description",
}, {
name: "MonitoredService",
target: new(transit.MonitoredService),
field: "Description",
}, {
name: "ResourceGroup",
target: new(transit.ResourceGroup),
field: "Description",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetDescription(C.ulong(h), C.CString(value))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, value, f.String())
})
}
}
func test_SetDevice(t *testing.T) {
value := "test-test"
tests := []struct {
name string
target interface{}
field string
}{{
name: "InventoryResource",
target: new(transit.InventoryResource),
field: "Device",
}, {
name: "MonitoredResource",
target: new(transit.MonitoredResource),
field: "Device",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetDevice(C.ulong(h), C.CString(value))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, value, f.String())
})
}
}
func test_SetIntervalEnd(t *testing.T) {
v, v1, v2 := "1609372800000", 1609372800, 0
tests := []struct {
name string
target interface{}
field string
}{{
name: "TimeSeries",
target: new(transit.TimeSeries),
field: "Interval",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetIntervalEnd(C.ulong(h), C.longlong(v1), C.longlong(v2))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, v, f.Interface().(*transit.TimeInterval).EndTime.String())
})
}
}
func test_SetIntervalStart(t *testing.T) {
v, v1, v2 := "1609372800000", 1609372800, 0
tests := []struct {
name string
target interface{}
field string
}{{
name: "TimeSeries",
target: new(transit.TimeSeries),
field: "Interval",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetIntervalStart(C.ulong(h), C.longlong(v1), C.longlong(v2))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, v, f.Interface().(*transit.TimeInterval).StartTime.String())
})
}
}
func test_SetLastPluginOutput(t *testing.T) {
value := "test-test"
tests := []struct {
name string
target interface{}
field string
}{{
name: "MonitoredResource",
target: new(transit.MonitoredResource),
field: "LastPluginOutput",
}, {
name: "MonitoredService",
target: new(transit.MonitoredService),
field: "LastPluginOutput",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetLastPluginOutput(C.ulong(h), C.CString(value))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, value, f.String())
})
}
}
func test_SetLastCheckTime(t *testing.T) {
v, v1, v2 := "1609372800000", 1609372800, 0
tests := []struct {
name string
target interface{}
field string
}{{
name: "MonitoredResource",
target: new(transit.MonitoredResource),
field: "LastCheckTime",
}, {
name: "MonitoredService",
target: new(transit.MonitoredService),
field: "LastCheckTime",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetLastCheckTime(C.ulong(h), C.longlong(v1), C.longlong(v2))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, v, f.Interface().(*transit.Timestamp).String())
})
}
}
func test_SetNextCheckTime(t *testing.T) {
v, v1, v2 := "1609372800000", 1609372800, 0
tests := []struct {
name string
target interface{}
field string
}{{
name: "MonitoredResource",
target: new(transit.MonitoredResource),
field: "NextCheckTime",
}, {
name: "MonitoredService",
target: new(transit.MonitoredService),
field: "NextCheckTime",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetNextCheckTime(C.ulong(h), C.longlong(v1), C.longlong(v2))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, v, f.Interface().(*transit.Timestamp).String())
})
}
}
func test_SetName(t *testing.T) {
value := "test-test"
tests := []struct {
name string
target interface{}
field string
}{{
name: "InventoryResource",
target: new(transit.InventoryResource),
field: "Name",
}, {
name: "InventoryService",
target: new(transit.InventoryService),
field: "Name",
}, {
name: "MonitoredResource",
target: new(transit.MonitoredResource),
field: "Name",
}, {
name: "MonitoredService",
target: new(transit.MonitoredService),
field: "Name",
}, {
name: "ResourceGroup",
target: new(transit.ResourceGroup),
field: "GroupName",
}, {
name: "TimeSeries",
target: new(transit.TimeSeries),
field: "MetricName",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetName(C.ulong(h), C.CString(value))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, value, f.String())
})
}
}
func test_SetOwner(t *testing.T) {
value := "test-test"
tests := []struct {
name string
target interface{}
field string
}{{
name: "InventoryResource",
target: new(transit.InventoryResource),
field: "Owner",
}, {
name: "InventoryService",
target: new(transit.InventoryService),
field: "Owner",
}, {
name: "MonitoredResource",
target: new(transit.MonitoredResource),
field: "Owner",
}, {
name: "MonitoredService",
target: new(transit.MonitoredService),
field: "Owner",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetOwner(C.ulong(h), C.CString(value))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, value, f.String())
})
}
}
func test_SetPropertyBool(t *testing.T) {
key, value := "test-test", false
tests := []struct {
name string
target interface{}
field string
}{{
name: "InventoryResource",
target: new(transit.InventoryResource),
field: "Properties",
}, {
name: "InventoryService",
target: new(transit.InventoryService),
field: "Properties",
}, {
name: "MonitoredResource",
target: new(transit.MonitoredResource),
field: "Properties",
}, {
name: "MonitoredService",
target: new(transit.MonitoredService),
field: "Properties",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetPropertyBool(C.ulong(h), C.CString(key), C._Bool(value))
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, value, *f.Interface().(map[string]transit.TypedValue)[key].BoolValue)
SetPropertyBool(C.ulong(h), C.CString(key), C._Bool(!value))
f = reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, transit.BooleanType, f.Interface().(map[string]transit.TypedValue)[key].ValueType)
assert.Equal(t, !value, *f.Interface().(map[string]transit.TypedValue)[key].BoolValue)
h.Delete()
})
}
}
func test_SetPropertyDouble(t *testing.T) {
key, value := "test-test", -1.1
tests := []struct {
name string
target interface{}
field string
}{{
name: "InventoryResource",
target: new(transit.InventoryResource),
field: "Properties",
}, {
name: "InventoryService",
target: new(transit.InventoryService),
field: "Properties",
}, {
name: "MonitoredResource",
target: new(transit.MonitoredResource),
field: "Properties",
}, {
name: "MonitoredService",
target: new(transit.MonitoredService),
field: "Properties",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetPropertyDouble(C.ulong(h), C.CString(key), C.double(value))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, transit.DoubleType, f.Interface().(map[string]transit.TypedValue)[key].ValueType)
assert.Equal(t, value, *f.Interface().(map[string]transit.TypedValue)[key].DoubleValue)
})
}
}
func test_SetPropertyInt(t *testing.T) {
key, value := "test-test", int64(42)
tests := []struct {
name string
target interface{}
field string
}{{
name: "InventoryResource",
target: new(transit.InventoryResource),
field: "Properties",
}, {
name: "InventoryService",
target: new(transit.InventoryService),
field: "Properties",
}, {
name: "MonitoredResource",
target: new(transit.MonitoredResource),
field: "Properties",
}, {
name: "MonitoredService",
target: new(transit.MonitoredService),
field: "Properties",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetPropertyInt(C.ulong(h), C.CString(key), C.longlong(value))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, transit.IntegerType, f.Interface().(map[string]transit.TypedValue)[key].ValueType)
assert.Equal(t, value, *f.Interface().(map[string]transit.TypedValue)[key].IntegerValue)
})
}
}
func test_SetPropertyStr(t *testing.T) {
key, value := "test-test", "foo-bar"
tests := []struct {
name string
target interface{}
field string
}{{
name: "InventoryResource",
target: new(transit.InventoryResource),
field: "Properties",
}, {
name: "InventoryService",
target: new(transit.InventoryService),
field: "Properties",
}, {
name: "MonitoredResource",
target: new(transit.MonitoredResource),
field: "Properties",
}, {
name: "MonitoredService",
target: new(transit.MonitoredService),
field: "Properties",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetPropertyStr(C.ulong(h), C.CString(key), C.CString(value))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, transit.StringType, f.Interface().(map[string]transit.TypedValue)[key].ValueType)
assert.Equal(t, value, *f.Interface().(map[string]transit.TypedValue)[key].StringValue)
})
}
}
func test_SetPropertyTime(t *testing.T) {
key, v, v1, v2 := "test-test", "1609372800000", 1609372800, 0
tests := []struct {
name string
target interface{}
field string
}{{
name: "InventoryResource",
target: new(transit.InventoryResource),
field: "Properties",
}, {
name: "InventoryService",
target: new(transit.InventoryService),
field: "Properties",
}, {
name: "MonitoredResource",
target: new(transit.MonitoredResource),
field: "Properties",
}, {
name: "MonitoredService",
target: new(transit.MonitoredService),
field: "Properties",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetPropertyTime(C.ulong(h), C.CString(key), C.longlong(v1), C.longlong(v2))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, transit.TimeType, f.Interface().(map[string]transit.TypedValue)[key].ValueType)
assert.Equal(t, v, f.Interface().(map[string]transit.TypedValue)[key].TimeValue.String())
})
}
}
func test_SetSampleType(t *testing.T) {
value := transit.Value
tests := []struct {
name string
target interface{}
field string
}{{
name: "TimeSeries",
target: new(transit.TimeSeries),
field: "SampleType",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetSampleType(C.ulong(h), C.CString(string(value)))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, string(value), f.String())
})
}
}
func test_SetStatus(t *testing.T) {
tests := []struct {
name string
target interface{}
field string
value interface{}
}{{
name: "MonitoredResource",
target: new(transit.MonitoredResource),
field: "Status",
value: transit.HostUp,
}, {
name: "MonitoredService",
target: new(transit.MonitoredService),
field: "Status",
value: transit.ServiceOk,
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetStatus(C.ulong(h), C.CString(fmt.Sprint(it.value)))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, fmt.Sprint(it.value), f.String())
})
}
}
func test_SetTag(t *testing.T) {
key, value := "test-test", "foo-bar"
tests := []struct {
name string
target interface{}
field string
}{{
name: "TimeSeries",
target: new(transit.TimeSeries),
field: "Tags",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetTag(C.ulong(h), C.CString(key), C.CString(value))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, value, f.Interface().(map[string]string)[key])
})
}
}
func test_SetType(t *testing.T) {
tests := []struct {
name string
target interface{}
field string
value interface{}
}{{
name: "InventoryResource",
target: new(transit.InventoryResource),
field: "Type",
value: transit.ResourceTypeHost,
}, {
name: "InventoryService",
target: new(transit.InventoryService),
field: "Type",
value: transit.ResourceTypeService,
}, {
name: "MonitoredResource",
target: new(transit.MonitoredResource),
field: "Type",
value: transit.ResourceTypeHypervisor,
}, {
name: "MonitoredService",
target: new(transit.MonitoredService),
field: "Type",
value: transit.ResourceTypeInstance,
}, {
name: "ResourceGroup",
target: new(transit.ResourceGroup),
field: "Type",
value: transit.HostGroup,
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetType(C.ulong(h), C.CString(fmt.Sprint(it.value)))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, fmt.Sprint(it.value), f.String())
})
}
}
func test_SetUnit(t *testing.T) {
value := transit.UnitCounter
tests := []struct {
name string
target interface{}
field string
}{{
name: "TimeSeries",
target: new(transit.TimeSeries),
field: "Unit",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetUnit(C.ulong(h), C.CString(string(value)))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, string(value), f.String())
})
}
}
func test_SetValueBool(t *testing.T) {
value := false
tests := []struct {
name string
target interface{}
field string
}{{
name: "ThresholdValue",
target: new(transit.ThresholdValue),
field: "Value",
}, {
name: "TimeSeries",
target: new(transit.TimeSeries),
field: "Value",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetValueBool(C.ulong(h), C._Bool(value))
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, value, *f.Interface().(*transit.TypedValue).BoolValue)
SetValueBool(C.ulong(h), C._Bool(!value))
f = reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, transit.BooleanType, f.Interface().(*transit.TypedValue).ValueType)
assert.Equal(t, !value, *f.Interface().(*transit.TypedValue).BoolValue)
h.Delete()
})
}
}
func test_SetValueDouble(t *testing.T) {
value := -1.1
tests := []struct {
name string
target interface{}
field string
}{{
name: "ThresholdValue",
target: new(transit.ThresholdValue),
field: "Value",
}, {
name: "TimeSeries",
target: new(transit.TimeSeries),
field: "Value",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetValueDouble(C.ulong(h), C.double(value))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, transit.DoubleType, f.Interface().(*transit.TypedValue).ValueType)
assert.Equal(t, value, *f.Interface().(*transit.TypedValue).DoubleValue)
})
}
}
func test_SetValueInt(t *testing.T) {
value := int64(42)
tests := []struct {
name string
target interface{}
field string
}{{
name: "ThresholdValue",
target: new(transit.ThresholdValue),
field: "Value",
}, {
name: "TimeSeries",
target: new(transit.TimeSeries),
field: "Value",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetValueInt(C.ulong(h), C.longlong(value))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, transit.IntegerType, f.Interface().(*transit.TypedValue).ValueType)
assert.Equal(t, value, *f.Interface().(*transit.TypedValue).IntegerValue)
})
}
}
func test_SetValueStr(t *testing.T) {
value := "foo-bar"
tests := []struct {
name string
target interface{}
field string
}{{
name: "ThresholdValue",
target: new(transit.ThresholdValue),
field: "Value",
}, {
name: "TimeSeries",
target: new(transit.TimeSeries),
field: "Value",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetValueStr(C.ulong(h), C.CString(value))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, transit.StringType, f.Interface().(*transit.TypedValue).ValueType)
assert.Equal(t, value, *f.Interface().(*transit.TypedValue).StringValue)
})
}
}
func test_SetValueTime(t *testing.T) {
v, v1, v2 := "1609372800000", 1609372800, 0
tests := []struct {
name string
target interface{}
field string
}{{
name: "ThresholdValue",
target: new(transit.ThresholdValue),
field: "Value",
}, {
name: "TimeSeries",
target: new(transit.TimeSeries),
field: "Value",
}}
for _, it := range tests {
t.Run(it.name, func(t *testing.T) {
h := cgo.NewHandle(it.target)
SetValueTime(C.ulong(h), C.longlong(v1), C.longlong(v2))
h.Delete()
r := reflect.ValueOf(it.target)
f := reflect.Indirect(r).FieldByName(it.field)
assert.Equal(t, transit.TimeType, f.Interface().(*transit.TypedValue).ValueType)
assert.Equal(t, v, f.Interface().(*transit.TypedValue).TimeValue.String())
})
}
} | libtransit/libtransit_tst.go | 0.502686 | 0.424084 | libtransit_tst.go | starcoder |
// Package strings adds additional string utility functions.
package strings
import (
"strings"
)
// Compare strings a and b, return -1 if a is lower, 1 if greater, 0 if equal.
// Case sensitive.
func Compare(a, b string) int {
if a < b {
return -1
}
if a > b {
return 1
}
return 0
}
// Compare strings a and b, return -1 if a is lower, 1 if greater, 0 if equal.
// Case in-sensitive.
func CompareFold(a, b string) int {
if strings.ToLower(a) < strings.ToLower(b) {
return -1
}
if strings.ToLower(a) > strings.ToLower(b) {
return 1
}
return 0
}
// Return everything from left of "s" up to "sep".
func FetchLeft(s, sep string) string {
v := strings.Split(s, sep)
if len(v) > 1 {
return v[0]
}
return ""
}
// Return everything from left of "s" up to "sep". Case-insensitive.
func FetchLeftFold(s, sep string) string {
i := strings.Index(strings.ToLower(s), strings.ToLower(sep))
return s[0:i]
}
// Return everything from "sep" up to end of "s".
func FetchRight(s, sep string) string {
v := strings.Split(s, sep)
if len(v) > 1 {
return v[1]
}
return ""
}
// Return everything from "sep" up to end of "s". Case-insensitive.
func FetchRightFold(s, sep string) string {
i := strings.Index(strings.ToLower(s), strings.ToLower(sep))
return s[i+len(sep) : len(s)]
}
// HasPrefixFold tests whether the string "s" begins with "prefix".
// Case-insensitive.
func HasPrefixFold(s, prefix string) bool {
return strings.HasPrefix(strings.ToLower(s), strings.ToLower(prefix))
}
// HasSuffixFold tests whether the string "s" ends with "suffix"
// Case-insensitive.
func HasSuffixFold(s, suffix string) bool {
return strings.HasSuffix(strings.ToLower(s), strings.ToLower(suffix))
}
// primeRK is the prime base used in Rabin-Karp algorithm.
const primeRK = 16777619
// hashstr returns the hash and the appropriate multiplicative
// factor for use in Rabin-Karp algorithm.
func hashstr(sep string) (uint32, uint32) {
hash := uint32(0)
for i := 0; i < len(sep); i++ {
hash = hash*primeRK + uint32(sep[i])
}
var pow, sq uint32 = 1, primeRK
for i := len(sep); i > 0; i >>= 1 {
if i&1 != 0 {
pow *= sq
}
sq *= sq
}
return hash, pow
}
// Indexes returns a slice of all indexes of "sep" starting byte positions in
// "s", or an empty slice if none are present in "s".
func Indexes(s, sep string) (r []int) {
n := len(sep)
switch {
case n == 0:
return
case n == 1:
c := sep[0]
// special case worth making fast
for i := 0; i < len(s); i++ {
if s[i] == c {
r = append(r, i)
}
}
return
case n == len(s):
if sep == s {
r = append(r, 0)
return
}
case n > len(s):
return
}
// Hash sep.
hashsep, pow := hashstr(sep)
var h uint32
for i := 0; i < n; i++ {
h = h*primeRK + uint32(s[i])
}
if h == hashsep && s[:n] == sep {
r = append(r, 0)
return
}
for i := n; i < len(s); {
h *= primeRK
h += uint32(s[i])
h -= pow * uint32(s[i-n])
i++
if h == hashsep && s[i-n:i] == sep {
r = append(r, i-n)
}
}
return r
}
// Indexes returns a slice of all indexes of "sep" starting byte positions in
// "s", or an empty slice if none are present in "s". Case-insensitive.
func IndexesFold(s, sep string) []int {
return Indexes(strings.ToLower(s), strings.ToLower(sep))
}
// Matches "text" against "pattern". Case insensitive. * and ? supported.
func MatchesWildcard(text, pattern string) bool {
if text == "" || pattern == "" {
return false
}
t, w := []rune{}, []rune{}
for _, v := range text {
t = append(t, v)
}
for _, v := range pattern {
w = append(w, v)
}
it := 0
iw := 0
for it < len(t) && iw < len(w) {
if w[iw] == '*' {
break
}
if w[iw] != '?' && !strings.EqualFold(string(t[it]), string(w[iw])) {
return false
}
it++
iw++
}
sw := 0
st := -1
for it < len(t) && iw < len(w) {
if w[iw] == '*' {
iw++
if iw >= len(w) {
return true
}
sw = iw
st = it
} else {
if w[iw] == '?' || strings.EqualFold(string(t[it]), string(w[iw])) {
it++
iw++
} else {
it = st
st++
iw = sw
}
}
}
for iw < len(w) && w[iw] == '*' {
iw++
}
return iw == len(w)
} | strings.go | 0.731634 | 0.434341 | strings.go | starcoder |
package predicate
// turning movement count (TMC) queries
import (
"github.com/mitroadmaps/gomapinfer/common"
)
func init() {
predicates["uav"] = StartEndPredicate(
common.Rect(362, 446, 706, 1080).ToPolygon(),
common.Rect(784, 176, 1920, 642).ToPolygon(),
)
predicates["warsawlr"] = WaypointPredicate([]common.Polygon{
common.Rect(0, 610, 930, 1080).ToPolygon(),
common.Rect(1190, 700, 1920, 1080).ToPolygon(),
})
predicates["warsawtb"] = WaypointPredicate([]common.Polygon{
{
common.Point{978, 337},
common.Point{1150, 680},
common.Point{1450, 680},
common.Point{1580, 590},
common.Point{1107, 325},
},
{
common.Point{1920, 685},
common.Point{1645, 669},
common.Point{1400, 780},
common.Point{1573, 1080},
common.Point{1920, 1080},
},
})
predicates["warsawhw"] = WaypointPredicate([]common.Polygon{
{
common.Point{1314, 403},
common.Point{901, 253},
common.Point{978, 202},
common.Point{1390, 333},
},
{
common.Point{1558, 393},
common.Point{1491, 466},
common.Point{1920, 680},
common.Point{1920, 550},
},
})
predicates["warsaw"] = Or(predicates["warsawlr"], predicates["warsawtb"], predicates["warsawhw"])
shibuyaPolys := map[string]common.Polygon{
"right": {
common.Point{1332, 0},
common.Point{1332, 440},
common.Point{1614, 550},
common.Point{1920, 550},
common.Point{1920, 0},
},
"left": {
common.Point{0, 525},
common.Point{500, 525},
common.Point{800, 1080},
common.Point{0, 1080},
},
"top": {
common.Point{0, 525},
common.Point{550, 525},
common.Point{1200, 420},
common.Point{1200, 0},
common.Point{0, 0},
},
"bottom": {
common.Point{1920, 630},
common.Point{1640, 630},
common.Point{1040, 1080},
common.Point{1920, 1080},
},
}
predicates["shibuyabt"] = StartEndPredicate(shibuyaPolys["bottom"], shibuyaPolys["top"])
predicates["shibuyabl"] = StartEndPredicate(shibuyaPolys["bottom"], shibuyaPolys["left"])
predicates["shibuyarl"] = StartEndPredicate(shibuyaPolys["right"], shibuyaPolys["left"])
predicates["shibuyart"] = StartEndPredicate(shibuyaPolys["right"], shibuyaPolys["top"])
predicates["shibuyarb"] = StartEndPredicate(shibuyaPolys["right"], shibuyaPolys["bottom"])
predicates["shibuya"] = Or(predicates["shibuyabt"], predicates["shibuyabl"], predicates["shibuyarl"], predicates["shibuyart"], predicates["shibuyarb"])
} | predicate/tmc.go | 0.577495 | 0.551211 | tmc.go | starcoder |
package iso20022
// Calculation of the net asset value for an investment fund/fund class.
type PriceValuation2 struct {
// Unique technical identifier for an instance of a price valuation within a price report, as assigned by the issuer of the report.
Identification *Max35Text `xml:"Id"`
// Date and time of the price valuation for the investment fund/fund class.
ValuationDateTime *DateAndDateTimeChoice `xml:"ValtnDtTm,omitempty"`
// Date and time at which a price is applied, according to the terms stated in the prospectus.
TradeDateTime *DateAndDateTimeChoice `xml:"TradDtTm"`
// Investment fund class for which the net asset value is calculated.
FinancialInstrumentDetails *FinancialInstrument5 `xml:"FinInstrmDtls"`
// Value of all the holdings, less the fund's liabilities, attributable to a specific investment fund class.
TotalNAV []*ActiveOrHistoricCurrencyAndAmount `xml:"TtlNAV,omitempty"`
// Total number of investment fund class units that have been issued.
TotalUnitsNumber *FinancialInstrumentQuantity1 `xml:"TtlUnitsNb,omitempty"`
// Date and time of the next price valuation for the investment fund/fund class.
NextValuationDateTime *DateAndDateTimeChoice `xml:"NxtValtnDtTm,omitempty"`
// Date and time of the previous price valuation for the investment fund/fund class.
PreviousValuationDateTime *DateAndDateTimeChoice `xml:"PrvsValtnDtTm,omitempty"`
// Specifies how the valuation is done, based on the schedule stated in the prospectus.
ValuationCycle *ValuationTiming1Code `xml:"ValtnCycl"`
// Indicates whether the valuation of the investment fund class is suspended.
SuspendedIndicator *YesNoIndicator `xml:"SspdInd"`
// Amount of money for which goods or services are offered, sold, or bought.
PriceDetails []*UnitPrice6 `xml:"PricDtls,omitempty"`
// Information related to the price variations of an investment fund class.
ValuationStatistics []*ValuationStatistics2 `xml:"ValtnSttstcs,omitempty"`
}
func (p *PriceValuation2) SetIdentification(value string) {
p.Identification = (*Max35Text)(&value)
}
func (p *PriceValuation2) AddValuationDateTime() *DateAndDateTimeChoice {
p.ValuationDateTime = new(DateAndDateTimeChoice)
return p.ValuationDateTime
}
func (p *PriceValuation2) AddTradeDateTime() *DateAndDateTimeChoice {
p.TradeDateTime = new(DateAndDateTimeChoice)
return p.TradeDateTime
}
func (p *PriceValuation2) AddFinancialInstrumentDetails() *FinancialInstrument5 {
p.FinancialInstrumentDetails = new(FinancialInstrument5)
return p.FinancialInstrumentDetails
}
func (p *PriceValuation2) AddTotalNAV(value, currency string) {
p.TotalNAV = append(p.TotalNAV, NewActiveOrHistoricCurrencyAndAmount(value, currency))
}
func (p *PriceValuation2) AddTotalUnitsNumber() *FinancialInstrumentQuantity1 {
p.TotalUnitsNumber = new(FinancialInstrumentQuantity1)
return p.TotalUnitsNumber
}
func (p *PriceValuation2) AddNextValuationDateTime() *DateAndDateTimeChoice {
p.NextValuationDateTime = new(DateAndDateTimeChoice)
return p.NextValuationDateTime
}
func (p *PriceValuation2) AddPreviousValuationDateTime() *DateAndDateTimeChoice {
p.PreviousValuationDateTime = new(DateAndDateTimeChoice)
return p.PreviousValuationDateTime
}
func (p *PriceValuation2) SetValuationCycle(value string) {
p.ValuationCycle = (*ValuationTiming1Code)(&value)
}
func (p *PriceValuation2) SetSuspendedIndicator(value string) {
p.SuspendedIndicator = (*YesNoIndicator)(&value)
}
func (p *PriceValuation2) AddPriceDetails() *UnitPrice6 {
newValue := new(UnitPrice6)
p.PriceDetails = append(p.PriceDetails, newValue)
return newValue
}
func (p *PriceValuation2) AddValuationStatistics() *ValuationStatistics2 {
newValue := new(ValuationStatistics2)
p.ValuationStatistics = append(p.ValuationStatistics, newValue)
return newValue
} | PriceValuation2.go | 0.798147 | 0.42931 | PriceValuation2.go | starcoder |
package main
import (
"github.com/MattSwanson/raylib-go/physics"
"github.com/MattSwanson/raylib-go/raylib"
)
const (
velocity = 0.5
)
func main() {
screenWidth := float32(800)
screenHeight := float32(450)
rl.SetConfigFlags(rl.FlagMsaa4xHint)
rl.InitWindow(int32(screenWidth), int32(screenHeight), "Physac [raylib] - physics restitution")
// Physac logo drawing position
logoX := int32(screenWidth) - rl.MeasureText("Physac", 30) - 10
logoY := int32(15)
// Initialize physics and default physics bodies
physics.Init()
// Create floor rectangle physics body
floor := physics.NewBodyRectangle(rl.NewVector2(screenWidth/2, screenHeight), screenWidth, 100, 10)
floor.Enabled = false // Disable body state to convert it to static (no dynamics, but collisions)
floor.Restitution = 1
// Create circles physics body
circleA := physics.NewBodyCircle(rl.NewVector2(screenWidth*0.25, screenHeight/2), 30, 10)
circleA.Restitution = 0
circleB := physics.NewBodyCircle(rl.NewVector2(screenWidth*0.5, screenHeight/2), 30, 10)
circleB.Restitution = 0.5
circleC := physics.NewBodyCircle(rl.NewVector2(screenWidth*0.75, screenHeight/2), 30, 10)
circleC.Restitution = 1
rl.SetTargetFPS(60)
for !rl.WindowShouldClose() {
// Update created physics objects
physics.Update()
if rl.IsKeyPressed(rl.KeyR) { // Reset physics input
// Reset circles physics bodies position and velocity
circleA.Position = rl.NewVector2(screenWidth*0.25, screenHeight/2)
circleA.Velocity = rl.NewVector2(0, 0)
circleB.Position = rl.NewVector2(screenWidth*0.5, screenHeight/2)
circleB.Velocity = rl.NewVector2(0, 0)
circleC.Position = rl.NewVector2(screenWidth*0.75, screenHeight/2)
circleC.Velocity = rl.NewVector2(0, 0)
}
rl.BeginDrawing()
rl.ClearBackground(rl.Black)
rl.DrawFPS(int32(screenWidth)-90, int32(screenHeight)-30)
// Draw created physics bodies
for i, body := range physics.GetBodies() {
vertexCount := physics.GetShapeVerticesCount(i)
for j := 0; j < vertexCount; j++ {
// Get physics bodies shape vertices to draw lines
// NOTE: GetShapeVertex() already calculates rotation transformations
vertexA := body.GetShapeVertex(j)
jj := 0
if j+1 < vertexCount { // Get next vertex or first to close the shape
jj = j + 1
}
vertexB := body.GetShapeVertex(jj)
rl.DrawLineV(vertexA, vertexB, rl.Green) // Draw a line between two vertex positions
}
}
rl.DrawText("Restitution amount", (int32(screenWidth)-rl.MeasureText("Restitution amount", 30))/2, 75, 30, rl.White)
rl.DrawText("0", int32(circleA.Position.X)-rl.MeasureText("0", 20)/2, int32(circleA.Position.Y)-7, 20, rl.White)
rl.DrawText("0.5", int32(circleB.Position.X)-rl.MeasureText("0.5", 20)/2, int32(circleB.Position.Y)-7, 20, rl.White)
rl.DrawText("1", int32(circleC.Position.X)-rl.MeasureText("1", 20)/2, int32(circleC.Position.Y)-7, 20, rl.White)
rl.DrawText("Press 'R' to reset example", 10, 10, 10, rl.White)
rl.DrawText("Physac", logoX, logoY, 30, rl.White)
rl.DrawText("Powered by", logoX+50, logoY-7, 10, rl.White)
rl.EndDrawing()
}
physics.Close() // Unitialize physics
rl.CloseWindow()
} | examples/physics/physac/restitution/main.go | 0.581541 | 0.435121 | main.go | starcoder |
package checkers
import (
"fmt"
)
const (
//ROWS is the number of rows in a checkers board
ROWS = 8
//COLS is the number of cols in a checkers board
//this variable is represented as half the amount of the columns
//on a typical checkers board as half of the slots on the board are
//unused. This implementation takes use of that to save on memory
COLS = 4
)
type board [ROWS][COLS]byte
func (b board) String() string {
str := ""
for i := 0; i < ROWS; i++ {
str += "["
for j := 0; j < COLS; j++ {
if i%2 == 1 {
str += fmt.Sprintf("%c|_|", b[i][j])
} else {
str += fmt.Sprintf("_|%c|", b[i][j])
}
}
str = str[:len(str)-1] + "]\n"
}
return str
}
func (b board) isOppositePlayer(p position, player byte) bool {
if !p.inBounds() {
return false
}
i, j := p.i, p.j
if player == 'x' || player == 'X' {
return b[i][j] == 'o' || b[i][j] == 'O'
}
return b[i][j] == 'x' || b[i][j] == 'X'
}
func (b board) isVacant(p position) bool {
if !inBounds(p.i, p.j) {
return false
}
return b[p.i][p.j] == '_'
}
func (b board) canCaptureLeft(p position, vert, horiz int, player byte) bool {
left := position{p.i + vert, p.j - horiz}
if b.isOppositePlayer(left, player) {
posAfterMove := position{p.i + vert + vert, p.j - 1}
if b.isVacant(posAfterMove) {
return true
}
}
return false
}
func (b board) canCaptureRight(p position, vert, horiz int, player byte) bool {
right := position{p.i + vert, p.j + (1 - horiz)}
if b.isOppositePlayer(right, player) {
posAfterMove := position{p.i + vert + vert, p.j + 1}
if b.isVacant(posAfterMove) {
return true
}
}
return false
}
func (b board) canCaptureFromPos(p position) bool {
vertMoves := b.getVertMovesFromPos(p)
player := b[p.i][p.j]
horiz := 1
if rowParity(p.i) {
horiz = 0
}
for _, vert := range vertMoves {
if b.canCaptureLeft(p, vert, horiz, player) {
return true
}
if b.canCaptureRight(p, vert, horiz, player) {
return true
}
}
return false
}
func (b board) captureCheck(p position, vertMoves []int, player byte, moves *[]Move) {
horiz := 1
if rowParity(p.i) {
horiz = 0
}
for _, vert := range vertMoves {
//Moving left
if b.canCaptureLeft(p, vert, horiz, player) {
*moves = append(
*moves,
Move{
start: p,
end: position{p.i + vert + vert, p.j - 1},
capturedPiece: position{p.i + vert, p.j - horiz},
},
)
}
//Moving right
if b.canCaptureRight(p, vert, horiz, player) {
*moves = append(
*moves,
Move{
start: p,
end: position{p.i + vert + vert, p.j + 1},
capturedPiece: position{p.i + vert, p.j + (1 - horiz)},
},
)
}
}
}
func (b board) checkForAdjacentVacantSpots(p position, verticalMoves []int, moves *[]Move) {
i, j := p.i, p.j
horiz := 1
if rowParity(i) {
horiz = 0
}
for _, vert := range verticalMoves {
//Moving left
left := position{i + vert, j - horiz}
if b.isVacant(left) {
move := Move{
start: p,
end: left,
}
*moves = append(*moves, move)
}
//Moving right
right := position{i + vert, j + (1 - horiz)}
if b.isVacant(right) {
move := Move{
start: p,
end: right,
}
*moves = append(*moves, move)
}
}
}
func (b board) getVertMovesFromPos(p position) []int {
i, j := p.i, p.j
var verticalMoves []int
if b[i][j] == 'X' || b[i][j] == 'O' {
verticalMoves = []int{1, -1}
} else if b[i][j] == 'x' {
verticalMoves = []int{1}
} else if b[i][j] == 'o' {
verticalMoves = []int{-1}
}
return verticalMoves
}
func (b board) canMoveFromPos(p position) bool {
i, j := p.i, p.j
horiz := 1
if rowParity(i) {
horiz = 0
}
for _, vert := range b.getVertMovesFromPos(p) {
left := position{i + vert, j - horiz}
if b.isVacant(left) {
return true
} else if b.isOppositePlayer(left, b[i][j]) {
posAfterMove := position{i + vert + vert, j - 1}
if b.isVacant(posAfterMove) {
return true
}
}
right := position{i + vert, j + (1 - horiz)}
if b.isVacant(right) {
return true
} else if b.isOppositePlayer(right, b[i][j]) {
posAfterMove := position{i + vert + vert, j + 1}
if b.isVacant(posAfterMove) {
return true
}
}
}
return false
} | board.go | 0.603581 | 0.456349 | board.go | starcoder |
package helper
import (
"encoding/hex"
"encoding/json"
"github.com/joeqian10/neo3-gogogo/io"
"strings"
)
const UINT256SIZE = 32
var UInt256Zero = NewUInt256()
/// This class stores a 256 bit unsigned int, represented as a 32-byte little-endian byte array
/// Composed by ulong(64) + ulong(64) + ulong(64) + ulong(64) = UInt256(256)
type UInt256 struct {
Value1 uint64
Value2 uint64
Value3 uint64
Value4 uint64
}
func NewUInt256() *UInt256 {
return &UInt256{}
}
// UInt256FromBytes attempts to decode the given bytes (in LE representation) into an UInt256.
func UInt256FromBytes(b []byte) *UInt256 {
var r []byte
if b == nil {
r = make([]byte, UINT256SIZE)
} else if len(b) < UINT256SIZE {
r = PadRight(b, UINT256SIZE, false)
} else {
r = b[:UINT256SIZE]
}
return &UInt256{
Value1: BytesToUInt64(r[:UINT64SIZE]),
Value2: BytesToUInt64(r[UINT64SIZE:UINT64SIZE*2]),
Value3: BytesToUInt64(r[UINT64SIZE*2:UINT64SIZE*3]),
Value4: BytesToUInt64(r[UINT64SIZE*3:]),
}
}
// UInt256FromString attempts to decode the given string (in BE representation) into an UInt256.
func UInt256FromString(s string) (u *UInt256, err error) {
s = strings.TrimPrefix(s, "0x")
b, err := hex.DecodeString(s)
if err != nil {
return nil, err
}
return UInt256FromBytes(ReverseBytes(b)), nil
}
/// Method CompareTo returns 1 if this UInt256 is bigger than other UInt256; -1 if it's smaller; 0 if it's equals
/// Example: assume this is 01ff00ff00ff00ff00ff00ff00ff00ff00ff00ff00ff00ff00ff00ff00ff00a4, this.CompareTo(02ff00ff00ff00ff00ff00ff00ff00ff00ff00ff00ff00ff00ff00ff00ff00a3) returns 1
func (u *UInt256) CompareTo(other *UInt256) int {
r := CompareTo(u.Value4, other.Value4)
if r != 0 {return r}
r = CompareTo(u.Value3, other.Value3)
if r != 0 {
return r
}
r = CompareTo(u.Value2, other.Value2)
if r != 0 {
return r
}
r = CompareTo(u.Value1, other.Value1)
return r
}
// Equals returns true if both UInt256 values are the same.
func (u *UInt256) Equals(other *UInt256) bool {
if other == nil {
return false
}
return u.CompareTo(other) == 0
}
func (u *UInt256) Less(other *UInt256) bool {
return u.CompareTo(other) == -1
}
func (u *UInt256) Deserialize(br *io.BinaryReader) {
br.ReadLE(&u.Value1)
br.ReadLE(&u.Value2)
br.ReadLE(&u.Value3)
br.ReadLE(&u.Value4)
}
func (u *UInt256) Serialize(bw *io.BinaryWriter) {
bw.WriteLE(u.Value1)
bw.WriteLE(u.Value2)
bw.WriteLE(u.Value3)
bw.WriteLE(u.Value4)
}
// String implements the stringer interface. Return big endian hex string.
func (u UInt256) String() string {
return hex.EncodeToString(ReverseBytes(u.ToByteArray()))
}
// ToByteArray returns a byte slice representation of u.
func (u *UInt256) ToByteArray() []byte {
b, e := io.ToArray(u)
if e != nil {
return nil
}
return b
}
// Size returns the size of a UInt256 object in byte
func (u *UInt256) Size() int {
return UINT256SIZE
}
// UnmarshalJSON implements the json unmarshaller interface.
func (u *UInt256) UnmarshalJSON(data []byte) (err error) {
var js string
if err = json.Unmarshal(data, &js); err != nil {
return err
}
js = strings.TrimPrefix(js, "0x")
v, err := UInt256FromString(js)
*u = *v
return err
}
// MarshalJSON implements the json marshaller interface.
func (u UInt256) MarshalJSON() ([]byte, error) {
return []byte(`"0x` + u.String() + `"`), nil
}
// ExistsIn checks if u exists in list
func (u UInt256) ExistsIn(list []UInt256) bool {
for _, a := range list {
if (&u).Equals(&a) {
return true
}
}
return false
}
type UInt256Slice []UInt256
func (us UInt256Slice) Len() int {
return len(us)
}
func (us UInt256Slice) Less(i int, j int) bool {
return (&us[i]).Less(&us[j])
}
func (us UInt256Slice) Swap(i, j int) {
t := us[i]
us[i] = us[j]
us[j] = t
}
func (us UInt256Slice) GetVarSize() int {
return GetVarSize(len(us)) + len(us)*UINT256SIZE
} | helper/uint256.go | 0.746046 | 0.420183 | uint256.go | starcoder |
package imagelib
import (
"image"
"image/color"
"math"
)
// Pick out only the red colors from an image
func Red(m image.Image) image.Image {
var (
rect = m.Bounds()
c color.Color
cr color.RGBA
newImage = image.NewRGBA(image.Rect(0, 0, rect.Max.X-rect.Min.X, rect.Max.Y-rect.Min.Y))
)
for y := rect.Min.Y; y < rect.Max.Y; y++ {
for x := rect.Min.X; x < rect.Max.X; x++ {
c = m.At(x, y) // c is RGBAColor, which implements Color
cr = c.(color.RGBA) // this is needed
c = color.RGBA{cr.R, 0, 0, cr.A}
newImage.Set(x-rect.Min.X, y-rect.Min.Y, c)
}
}
return newImage
}
// Pick out only the green colors from an image
func Green(m image.Image) image.Image {
var (
rect = m.Bounds()
c color.Color
cr color.RGBA
newImage = image.NewRGBA(image.Rect(0, 0, rect.Max.X-rect.Min.X, rect.Max.Y-rect.Min.Y))
)
for y := rect.Min.Y; y < rect.Max.Y; y++ {
for x := rect.Min.X; x < rect.Max.X; x++ {
c = m.At(x, y) // c is RGBAColor, which implements Color
cr = c.(color.RGBA) // this is needed
c = color.RGBA{0, cr.G, 0, cr.A}
newImage.Set(x-rect.Min.X, y-rect.Min.Y, c)
}
}
return newImage
}
// Pick out only the blue colors from an image
func Blue(m image.Image) image.Image {
var (
rect = m.Bounds()
c color.Color
cr color.RGBA
newImage = image.NewRGBA(image.Rect(0, 0, rect.Max.X-rect.Min.X, rect.Max.Y-rect.Min.Y))
)
for y := rect.Min.Y; y < rect.Max.Y; y++ {
for x := rect.Min.X; x < rect.Max.X; x++ {
c = m.At(x, y) // c is RGBAColor, which implements Color
cr = c.(color.RGBA) // this is needed
c = color.RGBA{0, 0, cr.B, cr.A}
newImage.Set(x-rect.Min.X, y-rect.Min.Y, c)
}
}
return newImage
}
// Pick out only the colors close to the given color,
// within a given threshold
func CloseTo1(m image.Image, target color.RGBA, thresh uint8) image.Image {
var (
rect = m.Bounds()
c color.Color
cr color.RGBA
r, g, b, a uint8
newImage = image.NewRGBA(image.Rect(0, 0, rect.Max.X-rect.Min.X, rect.Max.Y-rect.Min.Y))
)
for y := rect.Min.Y; y < rect.Max.Y; y++ {
for x := rect.Min.X; x < rect.Max.X; x++ {
c = m.At(x, y) // c is RGBAColor, which implements Color
cr = c.(color.RGBA) // this is needed
r = 0
g = 0
b = 0
a = cr.A
if abs(int8(target.R)-int8(cr.R)) < thresh {
r = target.R
}
if abs(int8(target.G)-int8(cr.G)) < thresh {
g = target.G
}
if abs(int8(target.B)-int8(cr.B)) < thresh {
b = target.B
}
c = color.RGBA{r, g, b, a}
newImage.Set(x-rect.Min.X, y-rect.Min.Y, c)
}
}
return newImage
}
// Pick out only the colors close to the given color,
// within a given threshold. Make it uniform.
// Zero alpha to unused pixels in returned image.
func CloseTo2(m image.Image, target color.RGBA, thresh uint8) image.Image {
var (
rect = m.Bounds()
c color.Color
cr color.RGBA
r, g, b, a uint8
newImage = image.NewRGBA(image.Rect(0, 0, rect.Max.X-rect.Min.X, rect.Max.Y-rect.Min.Y))
)
for y := rect.Min.Y; y < rect.Max.Y; y++ {
for x := rect.Min.X; x < rect.Max.X; x++ {
c = m.At(x, y) // c is RGBAColor, which implements Color
cr = c.(color.RGBA) // this is needed
r = 0
g = 0
b = 0
a = 0
if abs(int8(target.R)-int8(cr.R)) < thresh || abs(int8(target.G)-int8(cr.G)) < thresh || abs(int8(target.B)-int8(cr.B)) < thresh {
r = target.R
g = target.G
b = target.B
a = cr.A
}
c = color.RGBA{r, g, b, a}
newImage.Set(x-rect.Min.X, y-rect.Min.Y, c)
}
}
return newImage
}
// Take orig, add the nontransparent colors from addimage, as addascolor
func AddToAs(orig image.Image, addimage image.Image, addcolor color.RGBA) image.Image {
var (
rect = addimage.Bounds()
c color.Color
cr, or color.RGBA
r, g, b, a uint8
newImage = image.NewRGBA(image.Rect(0, 0, rect.Max.X-rect.Min.X, rect.Max.Y-rect.Min.Y))
)
for y := rect.Min.Y; y < rect.Max.Y; y++ {
for x := rect.Min.X; x < rect.Max.X; x++ {
cr = addimage.At(x, y).(color.RGBA)
or = orig.At(x, y).(color.RGBA)
r = or.R
g = or.G
b = or.B
a = or.A
if cr.A > 0 {
r = addcolor.R
g = addcolor.G
b = addcolor.B
a = addcolor.A
}
c = color.RGBA{r, g, b, a}
newImage.Set(x-rect.Min.X, y-rect.Min.Y, c)
}
}
return newImage
}
// Convert RGB to hue
func Hue(cr color.RGBA) float64 {
r := float64(cr.R) / 255.0
g := float64(cr.G) / 255.0
b := float64(cr.B) / 255.0
var h float64
RGBmax := r
if g > RGBmax {
RGBmax = g
}
if b > RGBmax {
RGBmax = b
}
if RGBmax == r {
h = 60 * (g - b)
if h < 0 {
h += 360
}
} else if RGBmax == g {
h = 120 + 60*(b-r)
} else /* RGBmax == rgb.b */ {
h = 240 + 60*(r-g)
}
return h
}
// Convert RGB to HSV
func HSV(cr color.RGBA) (uint8, uint8, uint8) {
var hue, sat, val uint8
RGBmin := min(cr.R, cr.G, cr.B)
RGBmax := max(cr.R, cr.G, cr.B)
val = RGBmax
if val == 0 {
hue = 0
sat = 0
return hue, sat, val
}
sat = 255 * (RGBmax - RGBmin) / val
if sat == 0 {
hue = 0
return hue, sat, val
}
span := (RGBmax - RGBmin)
if RGBmax == cr.R {
hue = 43 * (cr.G - cr.B) / span
} else if RGBmax == cr.G {
hue = 85 + 43*(cr.B-cr.R)/span
} else { /* RGBmax == cr.B */
hue = 171 + 43*(cr.R-cr.G)/span
}
return hue, sat, val
}
// Separate an image into three colors, with a given threshold
func Separate(inImage image.Image, color1, color2, color3 color.RGBA, thresh uint8, t float64) []image.Image {
var (
rect = inImage.Bounds()
cr color.RGBA
r, g, b, a uint8
h, s float64
images = make([]image.Image, 3) // 3 is the number of images
newImage1 = image.NewRGBA(image.Rect(0, 0, rect.Max.X-rect.Min.X, rect.Max.Y-rect.Min.Y))
newImage2 = image.NewRGBA(image.Rect(0, 0, rect.Max.X-rect.Min.X, rect.Max.Y-rect.Min.Y))
newImage3 = image.NewRGBA(image.Rect(0, 0, rect.Max.X-rect.Min.X, rect.Max.Y-rect.Min.Y))
)
hue1, _, s1 := HLS(float64(color1.R)/255.0, float64(color1.G)/255.0, float64(color1.B)/255.0)
hue2, _, s2 := HLS(float64(color2.R)/255.0, float64(color2.G)/255.0, float64(color2.B)/255.0)
hue3, _, s3 := HLS(float64(color3.R)/255.0, float64(color3.G)/255.0, float64(color3.B)/255.0)
for y := rect.Min.Y; y < rect.Max.Y; y++ {
for x := rect.Min.X; x < rect.Max.X; x++ {
// get the rgba color
// cr = inImage.At(x, y).(image.RGBAColor)
cr = color.RGBAModel.Convert(inImage.At(x, y)).(color.RGBA)
r = 0
g = 0
b = 0
a = 255
h, _, s = HLS(float64(cr.R)/255.0, float64(cr.G)/255.0, float64(cr.B)/255.0)
// Find the closest color of the three, measured in hue and saturation
if ((fabs(h-hue1) < fabs(h-hue2)) && (fabs(h-hue1) < fabs(h-hue3))) ||
((fabs(s-s1) < fabs(s-s2)) && (fabs(s-s1) < fabs(s-s3))) {
// Only add if the color is close enough
if abs(int8(color1.R)-int8(cr.R)) < thresh || abs(int8(color1.G)-int8(cr.G)) < thresh || abs(int8(color1.B)-int8(cr.B)) < thresh {
r = color1.R
g = color1.G
b = color1.B
newImage1.Set(x-rect.Min.X, y-rect.Min.Y, color.RGBA{r, g, b, a})
}
} else if ((fabs(h-hue2) < fabs(h-hue1)) && (fabs(h-hue2) < fabs(h-hue3))) ||
((fabs(s-s2) < fabs(s-s1)) && (fabs(s-s2) < fabs(s-s3))) {
// Only add if the color is close enough
if abs(int8(color2.R)-int8(cr.R)) < thresh || abs(int8(color2.G)-int8(cr.G)) < thresh || abs(int8(color2.B)-int8(cr.B)) < thresh {
r = color2.R
g = color2.G
b = color2.B
newImage2.Set(x-rect.Min.X, y-rect.Min.Y, color.RGBA{r, g, b, a})
}
} else if ((fabs(h-hue3) < fabs(h-hue1)) && (fabs(h-hue3) < fabs(h-hue2))) ||
((fabs(s-s3) < fabs(s-s1)) && (fabs(s-s3) < fabs(s-s2))) {
if abs(int8(color3.R)-int8(cr.R)) < thresh || abs(int8(color3.G)-int8(cr.G)) < thresh || abs(int8(color3.B)-int8(cr.B)) < thresh {
r = color3.R
g = color3.G
b = color3.B
newImage3.Set(x-rect.Min.X, y-rect.Min.Y, color.RGBA{r, g, b, a})
}
}
}
}
images[0] = newImage1
images[1] = newImage2
images[2] = newImage3
return images
}
// Convert RGB to HLS
func HLS(r, g, b float64) (float64, float64, float64) {
// Ported from Python colorsys
var h, l, s float64
maxc := fmax(r, g, b)
minc := fmin(r, g, b)
l = (minc + maxc) / 2.0
if minc == maxc {
return 0.0, l, 0.0
}
span := (maxc - minc)
if l <= 0.5 {
s = span / (maxc + minc)
} else {
s = span / (2.0 - maxc - minc)
}
rc := (maxc - r) / span
gc := (maxc - g) / span
bc := (maxc - b) / span
if r == maxc {
h = bc - gc
} else if g == maxc {
h = 2.0 + rc - bc
} else {
h = 4.0 + gc - rc
}
h = math.Mod((h / 6.0), 1.0)
return h, l, s
}
// Ported from Python colorsys
func _v(m1, m2, hue float64) float64 {
oneSixth := 1.0 / 6.0
twoThird := 2.0 / 3.0
hue = math.Mod(hue, 1.0)
if hue < oneSixth {
return m1 + (m2-m1)*hue*6.0
}
if hue < 0.5 {
return m2
}
if hue < twoThird {
return m1 + (m2-m1)*(twoThird-hue)*6.0
}
return m1
}
// Convert a HLS color to RGB
func HLStoRGB(h, l, s float64) (float64, float64, float64) {
// Ported from Python colorsys
oneThird := 1.0 / 3.0
if s == 0.0 {
return l, l, l
}
var m2 float64
if l <= 0.5 {
m2 = l * (1.0 + s)
} else {
m2 = l + s - (l * s)
}
m1 := 2.0*l - m2
return _v(m1, m2, h+oneThird), _v(m1, m2, h), _v(m1, m2, h-oneThird)
}
// Mix two RGB colors, a bit like how paint mixes
func PaintMix(c1, c2 color.RGBA) color.RGBA {
// Thanks to <NAME> via stackoverflow
// The less pi-presition, the greener the mix between blue and yellow
// Using math.Pi gives a completely different result, for some reason
//pi := math.Pi
//pi := 3.141592653589793
pi := 3.141592653
//pi := 3.1415
h1, l1, s1 := HLS(float64(c1.R)/255.0, float64(c1.G)/255.0, float64(c1.B)/255.0)
h2, l2, s2 := HLS(float64(c2.R)/255.0, float64(c2.G)/255.0, float64(c2.B)/255.0)
h := 0.0
s := 0.5 * (s1 + s2)
l := 0.5 * (l1 + l2)
x := math.Cos(2.0*pi*h1) + math.Cos(2.0*pi*h2)
y := math.Sin(2.0*pi*h1) + math.Sin(2.0*pi*h2)
if (x != 0.0) || (y != 0.0) {
h = math.Atan2(y, x) / (2.0 * pi)
} else {
s = 0.0
}
r, g, b := HLStoRGB(h, l, s)
return color.RGBA{uint8(r * 255.0), uint8(g * 255.0), uint8(b * 255.0), 255}
} | vendor/github.com/xyproto/imagelib/color.go | 0.832407 | 0.538801 | color.go | starcoder |
package directdebitpayment
import (
"context"
"github.com/xendit/xendit-go"
)
// CreateDirectDebitPayment created new direct debit payment
func CreateDirectDebitPayment(data *CreateDirectDebitPaymentParams) (*xendit.DirectDebitPayment, *xendit.Error) {
return CreateDirectDebitPaymentWithContext(context.Background(), data)
}
// CreateDirectDebitPaymentWithContext created new direct debit payment
func CreateDirectDebitPaymentWithContext(ctx context.Context, data *CreateDirectDebitPaymentParams) (*xendit.DirectDebitPayment, *xendit.Error) {
client, err := getClient()
if err != nil {
return nil, err
}
return client.CreateDirectDebitPaymentWithContext(ctx, data)
}
// ValidateOTPForDirectDebitPayment validate OTP for direct debit payment
func ValidateOTPForDirectDebitPayment(data *ValidateOTPForDirectDebitPaymentParams) (*xendit.DirectDebitPayment, *xendit.Error) {
return ValidateOTPForDirectDebitPaymentWithContext(context.Background(), data)
}
// ValidateOTPForDirectDebitPaymentWithContext validate OTP for direct debit payment
func ValidateOTPForDirectDebitPaymentWithContext(ctx context.Context, data *ValidateOTPForDirectDebitPaymentParams) (*xendit.DirectDebitPayment, *xendit.Error) {
client, err := getClient()
if err != nil {
return nil, err
}
return client.ValidateOTPForDirectDebitPaymentWithContext(ctx, data)
}
// GetDirectDebitPaymentStatusByID gets direct debit payment status by ID
func GetDirectDebitPaymentStatusByID(data *GetDirectDebitPaymentStatusByIDParams) (*xendit.DirectDebitPayment, *xendit.Error) {
return GetDirectDebitPaymentStatusByIDWithContext(context.Background(), data)
}
// GetDirectDebitPaymentStatusByIDWithContext gets direct debit payment status by ID
func GetDirectDebitPaymentStatusByIDWithContext(ctx context.Context, data *GetDirectDebitPaymentStatusByIDParams) (*xendit.DirectDebitPayment, *xendit.Error) {
client, err := getClient()
if err != nil {
return nil, err
}
return client.GetDirectDebitPaymentStatusByIDWithContext(ctx, data)
}
// GetDirectDebitPaymentStatusByReferenceID gets direct debit payment status by reference ID
func GetDirectDebitPaymentStatusByReferenceID(data *GetDirectDebitPaymentStatusByReferenceIDParams) ([]xendit.DirectDebitPayment, *xendit.Error) {
return GetDirectDebitPaymentStatusByReferenceIDWithContext(context.Background(), data)
}
// GetDirectDebitPaymentStatusByReferenceIDWithContext gets direct debit payment status by reference ID
func GetDirectDebitPaymentStatusByReferenceIDWithContext(ctx context.Context, data *GetDirectDebitPaymentStatusByReferenceIDParams) ([]xendit.DirectDebitPayment, *xendit.Error) {
client, err := getClient()
if err != nil {
return nil, err
}
return client.GetDirectDebitPaymentStatusByReferenceIDWithContext(ctx, data)
}
func getClient() (*Client, *xendit.Error) {
return &Client{
Opt: &xendit.Opt,
APIRequester: xendit.GetAPIRequester(),
}, nil
} | directdebit/directdebitpayment/directdebitpayment.go | 0.607314 | 0.475605 | directdebitpayment.go | starcoder |
package ameda
import (
"reflect"
"unsafe"
)
// UnsafeBytesToString convert []byte type to string type.
func UnsafeBytesToString(b []byte) string {
return *(*string)(unsafe.Pointer(&b))
}
// UnsafeStringToBytes convert string type to []byte type.
// NOTE:
// panic if modify the member value of the []byte.
func UnsafeStringToBytes(s string) []byte {
sh := (*reflect.StringHeader)(unsafe.Pointer(&s))
bh := reflect.SliceHeader{Data: sh.Data, Len: sh.Len, Cap: sh.Len}
return *(*[]byte)(unsafe.Pointer(&bh))
}
// IndirectValue gets the indirect value.
func IndirectValue(v reflect.Value) reflect.Value {
if !v.IsValid() {
return v
}
if v.Kind() != reflect.Ptr {
// Avoid creating a reflect.Value if it's not a pointer.
return v
}
for v.Kind() == reflect.Ptr && !v.IsNil() {
v = v.Elem()
}
return v
}
// DereferenceType dereference, get the underlying non-pointer type.
func DereferenceType(t reflect.Type) reflect.Type {
for t.Kind() == reflect.Ptr {
t = t.Elem()
}
return t
}
// DereferenceValue dereference and unpack interface,
// get the underlying non-pointer and non-interface value.
func DereferenceValue(v reflect.Value) reflect.Value {
for v.Kind() == reflect.Ptr || v.Kind() == reflect.Interface {
v = v.Elem()
}
return v
}
// DereferencePtrValue returns the underlying non-pointer type value.
func DereferencePtrValue(v reflect.Value) reflect.Value {
for v.Kind() == reflect.Ptr {
v = v.Elem()
}
return v
}
// DereferenceInterfaceValue returns the value of the underlying type that implements the interface v.
func DereferenceInterfaceValue(v reflect.Value) reflect.Value {
for v.Kind() == reflect.Interface {
v = v.Elem()
}
return v
}
// DereferenceImplementType returns the underlying type of the value that implements the interface v.
func DereferenceImplementType(v reflect.Value) reflect.Type {
return DereferenceType(DereferenceInterfaceValue(v).Type())
}
// DereferenceSlice convert []*T to []T.
func DereferenceSlice(v reflect.Value) reflect.Value {
m := v.Len() - 1
if m < 0 {
return reflect.New(reflect.SliceOf(DereferenceType(v.Type().Elem()))).Elem()
}
s := make([]reflect.Value, m+1)
for ; m >= 0; m-- {
s[m] = DereferenceValue(v.Index(m))
}
v = reflect.New(reflect.SliceOf(s[0].Type())).Elem()
v = reflect.Append(v, s...)
return v
}
// ReferenceSlice convert []T to []*T, the ptrDepth is the count of '*'.
func ReferenceSlice(v reflect.Value, ptrDepth int) reflect.Value {
if ptrDepth <= 0 {
return v
}
m := v.Len() - 1
if m < 0 {
return reflect.New(reflect.SliceOf(ReferenceType(v.Type().Elem(), ptrDepth))).Elem()
}
s := make([]reflect.Value, m+1)
for ; m >= 0; m-- {
s[m] = ReferenceValue(v.Index(m), ptrDepth)
}
v = reflect.New(reflect.SliceOf(s[0].Type())).Elem()
v = reflect.Append(v, s...)
return v
}
// ReferenceType convert T to *T, the ptrDepth is the count of '*'.
func ReferenceType(t reflect.Type, ptrDepth int) reflect.Type {
switch {
case ptrDepth > 0:
for ; ptrDepth > 0; ptrDepth-- {
t = reflect.PtrTo(t)
}
case ptrDepth < 0:
for ; ptrDepth < 0 && t.Kind() == reflect.Ptr; ptrDepth++ {
t = t.Elem()
}
}
return t
}
// ReferenceValue convert T to *T, the ptrDepth is the count of '*'.
func ReferenceValue(v reflect.Value, ptrDepth int) reflect.Value {
switch {
case ptrDepth > 0:
for ; ptrDepth > 0; ptrDepth-- {
vv := reflect.New(v.Type())
vv.Elem().Set(v)
v = vv
}
case ptrDepth < 0:
for ; ptrDepth < 0 && v.Kind() == reflect.Ptr; ptrDepth++ {
v = v.Elem()
}
}
return v
} | vendor/github.com/henrylee2cn/ameda/typconv.go | 0.675444 | 0.40589 | typconv.go | starcoder |
package graph
import (
"fmt"
"strconv"
"strings"
)
// FIXME: Add tests!
// Node represents a generic Graph node
type Node interface {
String() string
}
// Nodes consists of a list of Node
type Nodes []Node
// NodeMap consists of a map of Node
type NodeMap map[Node]bool
// Graph represents a directed, weighted graph implementation
type Graph struct {
Nodes NodeMap
Source, Sink Nodes
Edges map[Node]Nodes
Weights map[Node]intMap
InDegree intMap
OutDegree intMap
}
// IntNode represents an int Graph node
type IntNode int
type intMap map[Node]int
// GraphNew initializes a new Graph structure and returns a *Graph
func GraphNew() (g *Graph) {
g = new(Graph)
g.Nodes = NodeMap{}
g.Edges = map[Node]Nodes{}
g.Weights = map[Node]intMap{}
g.InDegree, g.OutDegree = intMap{}, intMap{}
g.Source, g.Sink = Nodes{}, Nodes{}
return
}
// AddEdge adds a directed edge between src and dst, optionally with a weigth.
// The weight defaults to 1 if not given.
func (g *Graph) AddEdge(src, dst Node, opts ...int) {
weight := 1
if len(opts) > 0 {
weight = opts[0]
}
g.Nodes[src] = true
g.Nodes[dst] = true
g.InDegree[dst]++
g.OutDegree[src]++
g.Edges[src] = append(g.Edges[src], dst)
if g.Weights[src] == nil {
g.Weights[src] = intMap{dst: weight}
} else {
g.Weights[src][dst] = weight
}
}
// RemoveEdge deletes the edge between src and dst
func (g *Graph) RemoveEdge(src, dst Node) {
rest := g.Edges[src].Remove(dst)
if len(rest) == 0 {
delete(g.Edges, src)
} else {
g.Edges[src] = rest
}
g.InDegree[dst]--
g.OutDegree[src]--
}
// HasEdge determines if there is an edge between src and dst
func (g *Graph) HasEdge(src, dst Node) bool {
for _, node := range g.Edges[src] {
if node == dst {
return true
}
}
return false
}
// Clone clones an entire Graph
func (g *Graph) Clone() (h *Graph) {
h = GraphNew()
h.Nodes = g.NodesClone()
h.Edges = g.EdgesClone()
h.Weights = g.WeightsClone()
h.InDegree = g.InDegreeClone()
h.OutDegree = g.OutDegreeClone()
h.Source = g.SourceClone()
h.Sink = g.SinkClone()
return
}
// NodesClone clones the nodes of a Graph
func (g *Graph) NodesClone() (nc NodeMap) {
nc = NodeMap{}
for k, v := range g.Nodes {
nc[k] = v
}
return
}
// EdgesClone clones the edges of a Graph
func (g *Graph) EdgesClone() (ec map[Node]Nodes) {
ec = map[Node]Nodes{}
for k, v := range g.Edges {
ec[k] = v.Clone()
}
return
}
// WeightsClone clones the weights of a Graph
func (g *Graph) WeightsClone() (wc map[Node]intMap) {
wc = map[Node]intMap{}
for k, v := range g.Weights {
wc[k] = v.Clone()
}
return
}
// InDegreeClone clones the indegrees of a Graph
func (g *Graph) InDegreeClone() (ic intMap) {
ic = intMap{}
for k, v := range g.InDegree {
ic[k] = v
}
return
}
// OutDegreeClone clones the outdegrees of a Graph
func (g *Graph) OutDegreeClone() (oc intMap) {
oc = intMap{}
for k, v := range g.OutDegree {
oc[k] = v
}
return
}
// SourceClone clones the source(s) of a Graph
func (g *Graph) SourceClone() (sc Nodes) {
sc = make(Nodes, len(g.Source))
copy(sc, g.Source)
return
}
// SinkClone clones the sink(s) of a Graph
func (g *Graph) SinkClone() (sc Nodes) {
sc = make(Nodes, len(g.Sink))
copy(sc, g.Sink)
return
}
// ComputeSourceAndSink computes the source(s) and sink(s) of a Graph
func (g *Graph) ComputeSourceAndSink() {
g.Source = Nodes{}
g.Sink = Nodes{}
for n := range g.Nodes {
if g.InDegree[n] == 0 {
g.Source = append(g.Source, n)
}
if g.OutDegree[n] == 0 {
g.Sink = append(g.Sink, n)
}
}
}
// TopoSort performs a topological sort of a Graph
func (g *Graph) TopoSort() (ts Nodes) {
ts = Nodes{}
h := g.Clone()
g.ComputeSourceAndSink()
s := g.SourceClone()
for len(s) > 0 {
n := s[0]
s = s[1:]
ts = append(ts, n)
for _, m := range g.Edges[n] {
h.RemoveEdge(n, m)
if h.InDegree[m] == 0 {
s = append(s, m)
}
}
}
// FIXME: Find a way to NOT panic here. Makes testing harder...
if len(h.Edges) > 0 {
fmt.Println(h.Edges)
panic("Not a DAG")
}
return
}
// LongestPath computes the longest path between src and dst in a Graph
func (g *Graph) LongestPath(src, dst Node) (l int, path Nodes) {
path = Nodes{}
ts := g.TopoSort()
// Topo sort between src and dst inclusive.
for k, v := range ts {
if v == src {
ts = ts[k:]
break
}
}
for k, v := range ts {
if v == dst {
ts = ts[:k+1]
break
}
}
dist := intMap{}
back := map[Node]Node{}
for i := 1; i < len(ts); i++ {
max := 0
for j := 0; j < i; j++ {
v, w := ts[j], ts[i]
if !g.HasEdge(v, w) {
continue
}
curr := dist[v] + g.Weights[v][w]
if curr > max {
max = curr
back[w] = v
}
}
dist[ts[i]] = max
}
l = dist[dst]
curr := dst
path = append(path, curr)
for len(back) > 0 {
prev := back[curr]
path = append(Nodes{prev}, path...)
if prev == nil || prev == src {
break
}
curr = prev
}
return
}
func (nm NodeMap) anyKey() (n Node) {
for n = range nm {
return
}
return
}
// Clone clones a Nodes list
func (n Nodes) Clone() (c Nodes) {
c = make(Nodes, len(n))
copy(c, n)
return
}
// Remove removes a node from a Nodes list
func (n Nodes) Remove(node Node) (out Nodes) {
out = make(Nodes, len(n))
copy(out, n)
for k, v := range out {
if v == node {
out[k], out[0] = out[0], out[k]
break
}
}
out = out[1:]
return
}
func (n Nodes) String(opts ...string) (str string) {
sep := "->"
if len(opts) > 0 {
sep = opts[0]
}
sx := make([]string, len(n))
for k, node := range n {
sx[k] = node.String()
}
return strings.Join(sx, sep)
}
func (i IntNode) String() string {
return strconv.Itoa(int(i))
}
func (wm intMap) Clone() (wo intMap) {
wo = intMap{}
for k, v := range wm {
wo[k] = v
}
return
} | graph.go | 0.585931 | 0.594228 | graph.go | starcoder |
package expect
import (
"reflect"
"testing"
"github.com/google/go-cmp/cmp"
)
// Equal asserts that two values are identical. If the values are not identical
// then their differences are printed and the current test is marked to have
// failed.
func Equal(t *testing.T, got, expected interface{}, msg string, opts ...cmp.Option) {
if diff := cmp.Diff(got, expected, opts...); diff != "" {
t.Error(msg)
t.Logf("\n%s", diff)
}
}
// NotEqual asserts that two values are not identical. It the values are
// identical then the test is marked to have failed.
func NotEqual(t *testing.T, got, expected interface{}, msg string, opts ...cmp.Option) {
if diff := cmp.Diff(got, expected, opts...); diff == "" {
t.Error(msg)
}
}
// Decoder is the interface implemented by types that can decode a data stream
// into a structured value.
type Decoder interface {
Decode(interface{}) error
}
// StreamEqual does a deep comparison of a data stream against an expected
// value. This function uses reflection to determine the type of the expected
// value in order to know how to properly decode the stream.
func StreamEqual(t *testing.T, d Decoder, expected interface{}, msg string, opts ...cmp.Option) {
got := decodeStream(t, d, expected)
Equal(t, got, expected, msg, opts...)
}
// StreamNotEqual does a deep comparison of a data stream against an expected
// value. This function uses reflection to determine the type of the expected
// value in order to know how to properly decode the stream.
func StreamNotEqual(t *testing.T, d Decoder, expected interface{}, msg string, opts ...cmp.Option) {
got := decodeStream(t, d, expected)
NotEqual(t, got, expected, msg, opts...)
}
func decodeStream(t *testing.T, d Decoder, expected interface{}) interface{} {
dpv := reflect.ValueOf(expected)
if dpv.Kind() != reflect.Ptr || dpv.IsNil() {
t.Fatal("expected must be a non-nil pointer")
}
if d == nil {
t.Fatal("decoder must be a non-nil type")
}
// Use the type of the expected value to create a new zero value of the same
// type. This will be used to unmarshal the body of the response into.
expectedType := reflect.TypeOf(expected)
v := reflect.New(expectedType.Elem())
got := v.Interface()
if err := d.Decode(got); err != nil {
t.Fatalf("failed to decode stream: %v", err)
}
return got
} | expect.go | 0.805594 | 0.66063 | expect.go | starcoder |
package main
/*
Programming question 2 from Stanford Datastructures and Algorithms
The file, quicksort.txt, contains all of the integers between 1 and
10,000 inclusive, with no repeats) in unsorted order. The integer in
the i-th row of the file gives you the i-th entry of an input array.
Your task is to compute the total number of comparisons used to sort
the given input file by QuickSort. As you know, the number of
comparisons depends on which elements are chosen as pivots, so we'll
ask you to explore three different pivoting rules.
You should not count comparisons one-by-one. Rather, when there is a
recursive call on a subarray of length m, you should simply add m-1 to
your running total of comparisons. (This is because the pivot element
is compared to each of the other m - 1 elements in the subarray in this
recursive call.)
WARNING: The Partition subroutine can be implemented in several
different ways, and different implementations can give you differing
numbers of comparisons. For this problem, you should implement the
Partition subroutine exactly as it is described in the video lectures
(otherwise you might get the wrong answer).
Question 1
For the first part of the programming assignment, you should always use
the first element of the array as the pivot element.
Question 2
Compute the number of comparisons (as in Problem 1), always using the
final element of the given array as the pivot element. Again, be sure
to implement the Partition subroutine exactly as it is described in
the video lectures.
Question 3
Compute the number of comparisons (as in Problem 1), using the
"median-of-three" pivot rule. [The primary motivation behind this rule
is to do a little bit of extra work to get much better performance on
input arrays that are nearly sorted or reverse sorted.] In more
detail, you should choose the pivot as follows. Consider the first,
middle, and final elements of the given array. (If the array has odd
length it should be clear what the "middle" element is; for an array
with even length 2k, use the k-th element as the "middle" element. So
for the array 4 5 6 7, the "middle" element is the second one ---- 5
and not 6!) Identify which of these three elements is the median
(i.e., the one whose value is in between the other two), and use this
as your pivot. As discussed in the first and second parts of this
programming assignment, be sure to implement Partition exactly as
described in the video lectures (including exchanging the pivot
element with the first element just before the main Partition
subroutine).
EXAMPLE: For the input array 8 2 4 5 7 1 you would consider the first
(8), middle (4), and last (1) elements; since 4 is the median of the
set {1,4,8}, you would use 4 as your pivot element.
SUBTLE POINT: A careful analysis would keep track of the comparisons
made in identifying the median of the three candidate elements. You
should NOT do this. That is, as in the previous two problems, you
should simply add m - 1 to your running total of comparisons every
time you recurse on a subarray with length m.
*/
import (
"bufio"
"fmt"
"math/rand"
"os"
"strconv"
"time"
)
func main() {
fmt.Println("Setting up.")
initialSlice := setup()
// Set up slices to aggregate number of comparisons used per pivot
// selection method.
comparisons := make([]int, 3)
pivotMethods := []string{"first", "last", "median"}
for i, pivotMethod := range pivotMethods {
fmt.Printf("Using %v pivot method to calculate comparisons.\n", pivotMethod)
// Copy the initial slice to prevent Go from referencing the memory
// location of the slice
quicksortSlice := make([]int, len(initialSlice))
copy(quicksortSlice, initialSlice)
_, comparisons[i] = quicksort(quicksortSlice, 0, len(quicksortSlice)-1, pivotMethod)
}
fmt.Println("Calculations complete.")
for i, pivotMethod := range pivotMethods {
fmt.Printf("There are %d comparisons using the %v element as the pivot element.\n", comparisons[i], pivotMethod)
}
}
func setup() []int {
file, err := os.Open("QuickSort.txt")
defer file.Close()
if err != nil {
fmt.Println(err)
return []int{0, 1}
}
scanner := bufio.NewScanner(file)
scanner.Split(bufio.ScanLines)
var lines []string
for scanner.Scan() {
lines = append(lines, scanner.Text())
}
var xint []int
for _, value := range lines {
newValue, _ := strconv.Atoi(value)
xint = append(xint, newValue)
}
return xint
}
func quicksort(xint []int, leftIndex, rightIndex int, pivotMethod string) ([]int, int) {
// Quicksort algorithm
// boundedListLength = m - 1
boundedListLength := rightIndex - leftIndex
// Base case: A difference less than 1 between the left and right
// indices means the element is sorted and no comparisons are made.
if boundedListLength < 1 {
return xint, 0
}
pivotIndex := selectPivot(xint, leftIndex, rightIndex, pivotMethod)
pivotValue := xint[pivotIndex]
// Swap the pivot with the element in the leftmost index position.
if pivotIndex != leftIndex {
xint = swap(xint, leftIndex, pivotIndex)
}
// Set pointers for partitions
// i is the pointer for the index where all elements in positions
// less than index i are less than the pivot
// j is the pointer to the index where all elements in positions
// greater than index j have not yet been compared to the pivot.
i := leftIndex + 1 // The pivot element is in the first position.
j := i
for j <= rightIndex {
if xint[j] <= pivotValue {
xint = swap(xint, i, j)
i++
}
j++
}
// Swap the pivot element into its rightful position.
xint = swap(xint, leftIndex, i-1)
comparisons := boundedListLength
var lowComparisons, highComparisons int
// Sort the elements less than the pivot
xint, lowComparisons = quicksort(xint, leftIndex, i-2, pivotMethod)
// Sort the element greater than the pivot
xint, highComparisons = quicksort(xint, i, j-1, pivotMethod)
// Accumulate the number of comparisons
comparisons += lowComparisons + highComparisons
return xint, comparisons
}
func selectPivot(xint []int, leftIndex, rightIndex int, pivotMethod string) int {
// Selects which quicksort pivot method to use.
var pivotIndex int
switch pivotMethod {
case "first":
pivotIndex = leftIndex
case "last":
pivotIndex = rightIndex
case "median":
// pivotIndex = "median of three"
firstElement := xint[leftIndex]
lastElement := xint[rightIndex]
// Determine the median value of the bounded slice.
boundedSliceLength := rightIndex - leftIndex
medianIndex := boundedSliceLength/2 + leftIndex
medianElement := xint[medianIndex]
potentialMedianList := []int{firstElement, medianElement, lastElement}
potentialMedianList = selectionSort(potentialMedianList)
medianOfThree := potentialMedianList[1]
if firstElement == medianOfThree {
pivotIndex = leftIndex
} else if medianElement == medianOfThree {
pivotIndex = medianIndex
} else {
pivotIndex = rightIndex
}
default:
// pivotIndex = randomly chosen index position
r := rand.New(rand.NewSource(time.Now().UnixNano()))
pivotIndex = rightIndex - r.Intn(rightIndex-leftIndex+1)
}
return pivotIndex
}
func swap(xint []int, i, j int) []int {
// Swaps elements i and j in slice, xint.
tmp := xint[i]
xint[i] = xint[j]
xint[j] = tmp
return xint
}
func selectionSort(xint []int) []int {
for idx, value := range xint {
minValue := value
minIndex := idx
j := idx + 1
for j < len(xint) {
if xint[j] < minValue {
minValue = xint[j]
minIndex = j
}
j++
}
xint = swap(xint, idx, minIndex)
}
return xint
} | Stanford/ProgrammingQuestion2_QuicksortComparisons/main.go | 0.747339 | 0.602559 | main.go | starcoder |
package contexttest
import (
"context"
"sync"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
// WithCancel can be tested to have the behavior of context.WithCancel.
type WithCancel func(ctx context.Context) (context.Context, context.CancelFunc)
// TestWithCancel tests the behavior of a context.WithCancel function.
func TestWithCancel(wc WithCancel) func(t *testing.T) {
return func(t *testing.T) {
t.Parallel()
t.Run("Basic", wc.testBasic)
t.Run("Tree", wc.testTree)
t.Run("ParentAlreadyCanceled", wc.testParentAlreadyCanceled)
t.Run("Concurrency", wc.testConcurrency)
}
}
// Basic functionality test.
func (withCancel WithCancel) testBasic(t *testing.T) {
t.Parallel()
ctx, cancel := withCancel(context.Background())
require.NotNil(t, ctx)
require.NotNil(t, cancel)
assertNotCanceled(t, ctx)
cancel()
assertCanceled(t, ctx)
}
// Context cancellation propagates only to contexts in the descendants tree,
// and to all of them.
func (withCancel WithCancel) testTree(t *testing.T) {
t.Parallel()
ctx0, cancel0 := withCancel(context.Background())
ctx00, cancel00 := withCancel(ctx0)
ctx01, cancel01 := withCancel(ctx0)
ctx000, cancel001 := withCancel(ctx00)
cancel00()
assertNotCanceled(t, ctx0)
assertCanceled(t, ctx00)
assertNotCanceled(t, ctx01)
assertCanceled(t, ctx000)
cancel0()
assertCanceled(t, ctx0)
assertCanceled(t, ctx00)
assertCanceled(t, ctx01)
assertCanceled(t, ctx000)
cancel01()
cancel001()
assertCanceled(t, ctx0)
assertCanceled(t, ctx00)
assertCanceled(t, ctx01)
assertCanceled(t, ctx000)
}
// Allow a child context to be cancelled after its parent was canceled.
func (withCancel WithCancel) testParentAlreadyCanceled(t *testing.T) {
t.Parallel()
ctx, cancel := withCancel(context.Background())
cancel()
assertCanceled(t, ctx)
ctx, cancel = withCancel(ctx)
assertCanceled(t, ctx)
cancel()
}
// Function can be accessed from multiple goroutines without a race
// condition.
func (withCancel WithCancel) testConcurrency(t *testing.T) {
t.Parallel()
const n = 1000
ctx, cancel := withCancel(context.Background())
var wg sync.WaitGroup
wg.Add(3)
go func(ctx context.Context) {
for i := 0; i < n; i++ {
ctx.Err()
}
wg.Done()
}(ctx)
go func(ctx context.Context) {
for i := 0; i < n; i++ {
ctx.Err()
}
wg.Done()
}(ctx)
go func(ctx context.Context) {
for i := 0; i < n; i++ {
withCancel(ctx)
}
wg.Done()
}(ctx)
for i := 0; i < n; i++ {
ctx, cancel = withCancel(ctx)
cancel()
}
wg.Wait()
}
func assertCanceled(t *testing.T, ctx context.Context) {
t.Helper()
assert.Equal(t, context.Canceled, ctx.Err())
select {
case <-ctx.Done():
default:
t.Error("context not done")
}
}
func assertNotCanceled(t *testing.T, ctx context.Context) {
t.Helper()
assert.Nil(t, ctx.Err())
select {
case <-ctx.Done():
t.Error("context done")
default:
}
} | cancel.go | 0.605566 | 0.521898 | cancel.go | starcoder |
package engine
import (
"image"
"image/color"
"math"
"math/rand"
)
// Tilemap contains tile data for
// IsoRenderer to use.
type Tilemap struct {
// TileWidth is the tiles width in pixels.
TileWidth int
// Data contains values representing tiles.
Data [2][][]int
// Mapper maps data values to tile images.
Mapper map[int]Image
// OverlapEvent is for each tile, allowing custom
// overlap behavior (Alpha transitions, events, etc).
OverlapEvent TileOverlapEvent
bounds image.Rectangle
cache map[int][]image.Point
}
// TileOverlapEvent updates renderer state in the case of a tile overlap.
// A bool indicates whether the tile is currently overlapping or not. The tile's
// Image is passed, along with arbitrary data returned from the previous call for state.
type TileOverlapEvent func(bool, Image, interface{}) interface{}
// NewTilemap returns an instantiated *Tilemap.
// All parameters are required except for overlapEvent.
func NewTilemap(
tileWidth int,
data [2][][]int,
mapper map[int]Image,
overlapEvent TileOverlapEvent,
) *Tilemap {
return &Tilemap{
TileWidth: tileWidth,
Data: data,
Mapper: mapper,
OverlapEvent: overlapEvent,
bounds: image.Rect(0, 0, len(data[0][0]), len(data[0])),
}
}
// IsoToIndex converts isometric coordinates to a tile index.
func (t *Tilemap) IsoToIndex(x, y float64) (int, int) {
ix := int(math.Ceil((x/float64(t.TileWidth/2) + y/float64(t.TileWidth/4)) / 2))
iy := int(math.Ceil((y/float64(t.TileWidth/4) - x/float64(t.TileWidth/2)) / 2))
return ix, iy
}
// IndexToIso converts a tile index to isometric coordinates.
func (t *Tilemap) IndexToIso(i, j int) (float64, float64) {
x := (i - j) * (t.TileWidth / 2)
y := (i + j) * (t.TileWidth / 4)
return float64(x), float64(y)
}
// GetTileValue returns the value associated with a tile.
func (t *Tilemap) GetTileValue(x, y, z int) int {
if z < 0 || z > 1 || !t.InBounds(image.Pt(x, y), 1) {
return 0
}
return t.Data[z][y][x]
}
var ndirs = [4]image.Point{
image.Pt(1, 0),
image.Pt(0, 1),
image.Pt(-1, 0),
image.Pt(0, -1),
}
// Neighbors returns points adjacent to point p
// that are fully within the tilemap.
// The size parameter can be used as a virtual tile scale.
// A size less than 1 causes a panic.
func (t *Tilemap) Neighbors(p image.Point, size int) (c []image.Point) {
if size < 1 {
panic("invalid size")
}
for i := 0; i < 4; i++ {
np := p.Add(ndirs[i].Mul(size))
if !t.InBounds(np, size) {
continue
}
c = append(c, np)
}
return
}
// WallsAround returns positions of wall tiles around a given position
// within a given range.
func (t *Tilemap) WallsAround(p image.Point, dist int) (c []image.Point) {
for x := p.X - dist; x < p.X+dist; x++ {
for y := p.Y - dist; y < p.Y+dist; y++ {
tile := t.GetTileValue(x, y, 1)
if tile != 0 {
c = append(c, image.Pt(x, y))
}
}
}
return
}
// InBounds indicates if a point with a given size is within the tilemap.
func (t *Tilemap) InBounds(p image.Point, size int) bool {
if size < 1 {
panic("invalid size")
}
return p.In(t.bounds) && p.Add(image.Pt(size-1, size-1)).In(t.bounds)
}
// IsClear indicates whether a point of a given size
// contains only empty tiles.
func (t *Tilemap) IsClear(p image.Point, z, size int) bool {
return t.ContainsAll(0, p, z, size)
}
// ContainsAll indicates whether a point of a given size
// contains only the specified tile.
func (t *Tilemap) ContainsAll(tile int, p image.Point, z, size int) bool {
if size < 1 {
panic("invalid size")
}
if !t.InBounds(p, size) {
return false
}
for x := p.X; x < p.X+size; x++ {
for y := p.Y; y < p.Y+size; y++ {
if t.Data[z][y][x] != tile {
return false
}
}
}
return true
}
func (t *Tilemap) ContainsAny(tile int, p image.Point, z, size int) bool {
if size < 1 {
panic("invalid size")
}
if !t.InBounds(p, size) {
return false
}
for x := p.X; x < p.X+size; x++ {
for y := p.Y; y < p.Y+size; y++ {
if t.Data[z][y][x] == tile {
return true
}
}
}
return false
}
// Fill sets tile values for a given location and size.
func (t *Tilemap) Fill(tile int, p image.Point, z, size int) []image.Point {
if size < 1 {
panic("invalid size")
}
points := make([]image.Point, 0, size*size)
for x := p.X; x < p.X+size; x++ {
for y := p.Y; y < p.Y+size; y++ {
if t.InBounds(image.Pt(x, y), 1) {
points = append(points, image.Pt(x, y))
t.Data[z][y][x] = tile
}
}
}
return points
}
// Image take a tile to color map and returns an image of the tilemap.
func (t *Tilemap) Image(colors map[int]color.Color) image.Image {
img := image.NewRGBA(t.bounds)
for x := 0; x < t.bounds.Dx(); x++ {
for y := 0; y < t.bounds.Dy(); y++ {
v1, v2 := t.Data[0][y][x], t.Data[1][y][x]
if c, ok := colors[v1]; ok {
img.Set(x, y, c)
}
if c, ok := colors[v2]; ok {
img.Set(x, y, c)
}
}
}
return img
}
// BuildCache rebuilds a tile lookup cache.
func (t *Tilemap) BuildCache() {
t.cache = map[int][]image.Point{}
for x := 0; x < t.bounds.Dx(); x++ {
for y := 0; y < t.bounds.Dy(); y++ {
floor := t.Data[0][y][x]
wall := t.Data[1][y][x]
t.cache[floor] = append(t.cache[floor], image.Pt(x, y))
t.cache[wall] = append(t.cache[wall], image.Pt(x, y))
}
}
}
// RandomPos returns a random position in isometric space
// contained within a given tile.
func (t *Tilemap) RandomPos(tile int) (float64, float64) {
tiles := t.cache[tile]
tp := tiles[rand.Intn(len(tiles))]
x, y := t.IndexToIso(tp.X-1, tp.Y-1)
x += rand.Float64() * 128
y -= rand.Float64() * 32
return x, y
} | engine/tilemap.go | 0.744563 | 0.623463 | tilemap.go | starcoder |
package ta
import (
"math"
)
// Calculates the directional movement. Returns (diffM, diffP)
func directionalMovement(curHigh float64, curLow float64, prevHigh float64, prevLow float64) (float64, float64) {
diffP := curHigh-prevHigh /* Plus Delta */
diffM := prevLow-curLow /* Minus Delta */
if (diffM > 0) && (diffP < diffM) {
/* Case 2 and 4: +DM=0,-DM=diffM */
return diffM, 0.0
} else if (diffP > 0) && (diffP > diffM) {
return 0.0, diffP
}
return 0.0, 0.0 // No change
}
// Adx calculates the Average directional movement index of an array of HighLowClose structs given a period
func Adx(values []HighLowClose, period int) []float64 {
lookbackTotal := (2 * period) - 1
startIdx := lookbackTotal
today := startIdx
prevMinusDM := 0.0
prevPlusDM := 0.0
prevTR := 0.0
today = startIdx - lookbackTotal
prevHigh := values[today].High
prevLow := values[today].Low
prevClose := values[today].Close
for i := 0; i < period-1; i++ {
/* Calculate the prevMinusDM and prevPlusDM */
today++
curHigh := values[today].High
curLow := values[today].Low
diffM, diffP := directionalMovement(curHigh, curLow, prevHigh, prevLow)
prevMinusDM += diffM
prevPlusDM += diffP
prevHigh = curHigh
prevLow = curLow
truerange := TrueRange(prevHigh,prevLow,prevClose)
prevTR += truerange
prevClose = values[today].Close
}
/* Add up all the initial DX. */
sumDX := 0.0
for i := 0; i < period; i++ {
/* Calculate the prevMinusDM and prevPlusDM */
today++
prevMinusDM -= prevMinusDM/float64(period)
prevPlusDM -= prevPlusDM/float64(period)
curHigh := values[today].High
curLow := values[today].Low
diffM, diffP := directionalMovement(curHigh, curLow, prevHigh, prevLow)
prevMinusDM += diffM
prevPlusDM += diffP
prevHigh = curHigh
prevLow = curLow
/* Calculate the prevTR */
truerange := TrueRange(prevHigh,prevLow,prevClose)
prevTR = prevTR - (prevTR/float64(period)) + truerange
prevClose = values[today].Close
/* Calculate the DX. The value is rounded (see Wilder book). */
if prevTR != 0.0 {
minusDI := 100.0*(prevMinusDM/prevTR)
plusDI := 100.0*(prevPlusDM/prevTR)
/* This loop is just to accumulate the initial DX */
sumDI := minusDI+plusDI
if sumDI != 0.0 {
sumDX += (100.0 * (math.Abs(minusDI-plusDI)/sumDI))
}
}
}
/* Calculate the first ADX */
prevADX := sumDX / float64(period)
/* Output the first ADX */
var results []float64
results = append(results, prevADX)
/* Calculate and output subsequent ADX */
for ; today < len(values)-1; {
/* Calculate the prevMinusDM and prevPlusDM */
today++
prevMinusDM -= prevMinusDM/float64(period)
prevPlusDM -= prevPlusDM/float64(period)
curHigh := values[today].High
curLow := values[today].Low
diffM, diffP := directionalMovement(curHigh, curLow, prevHigh, prevLow)
prevMinusDM += diffM
prevPlusDM += diffP
prevHigh = curHigh
prevLow = curLow
/* Calculate the prevTR */
truerange := TrueRange(prevHigh,prevLow,prevClose)
prevTR = prevTR - (prevTR/float64(period)) + truerange
prevClose = values[today].Close
if prevTR != 0.0 {
/* Calculate the DX. The value is rounded (see Wilder book). */
minusDI := 100.0*(prevMinusDM/prevTR)
plusDI := 100.0*(prevPlusDM/prevTR)
sumDI := minusDI+plusDI
if sumDI!=0.0 {
dx := 100.0*(math.Abs(minusDI-plusDI)/sumDI)
/* Calculate the ADX */
prevADX = ((prevADX*(float64(period)-1))+dx)/float64(period)
}
}
/* Output the ADX */
results = append(results, prevADX)
}
return results
} | ta/adx.go | 0.715325 | 0.445831 | adx.go | starcoder |
package af
import "github.com/rannoch/cldr"
var calendar = cldr.Calendar{
Formats: cldr.CalendarFormats{
Date: cldr.CalendarDateFormat{Full: "EEEE, dd MMMM y", Long: "dd MMMM y", Medium: "dd MMM y", Short: "y-MM-dd"},
Time: cldr.CalendarDateFormat{Full: "h:mm:ss a zzzz", Long: "h:mm:ss a z", Medium: "h:mm:ss a", Short: "h:mm a"},
DateTime: cldr.CalendarDateFormat{Full: "{1} {0}", Long: "{1} {0}", Medium: "{1} {0}", Short: "{1} {0}"},
},
FormatNames: cldr.CalendarFormatNames{
Months: cldr.CalendarMonthFormatNames{
Abbreviated: cldr.CalendarMonthFormatNameValue{Jan: "Jan.", Feb: "Feb.", Mar: "Mrt.", Apr: "Apr", May: "Mei", Jun: "Jun", Jul: "Jul", Aug: "Aug", Sep: "Sep", Oct: "Okt", Nov: "Nov", Dec: "Des"},
Narrow: cldr.CalendarMonthFormatNameValue{Jan: "J", Feb: "F", Mar: "M", Apr: "A", May: "M", Jun: "J", Jul: "J", Aug: "A", Sep: "S", Oct: "O", Nov: "N", Dec: "D"},
Short: cldr.CalendarMonthFormatNameValue{},
Wide: cldr.CalendarMonthFormatNameValue{Jan: "Januarie", Feb: "Februarie", Mar: "Maart", Apr: "April", May: "Mei", Jun: "Junie", Jul: "Julie", Aug: "Augustus", Sep: "September", Oct: "Oktober", Nov: "November", Dec: "Desember"},
},
Days: cldr.CalendarDayFormatNames{
Abbreviated: cldr.CalendarDayFormatNameValue{Sun: "So", Mon: "Ma", Tue: "Di", Wed: "Wo", Thu: "Do", Fri: "Vr", Sat: "Sa"},
Narrow: cldr.CalendarDayFormatNameValue{Sun: "S", Mon: "M", Tue: "D", Wed: "W", Thu: "D", Fri: "V", Sat: "S"},
Short: cldr.CalendarDayFormatNameValue{Sun: "So.", Mon: "Ma.", Tue: "Di.", Wed: "Wo.", Thu: "Do.", Fri: "Vr.", Sat: "Sa."},
Wide: cldr.CalendarDayFormatNameValue{Sun: "Sondag", Mon: "Maandag", Tue: "Dinsdag", Wed: "Woensdag", Thu: "Donderdag", Fri: "Vrydag", Sat: "Saterdag"},
},
Periods: cldr.CalendarPeriodFormatNames{
Abbreviated: cldr.CalendarPeriodFormatNameValue{},
Narrow: cldr.CalendarPeriodFormatNameValue{AM: "v", PM: "n"},
Short: cldr.CalendarPeriodFormatNameValue{},
Wide: cldr.CalendarPeriodFormatNameValue{AM: "vm.", PM: "nm."},
},
},
} | resources/locales/af/calendar.go | 0.517083 | 0.447823 | calendar.go | starcoder |
package main
import (
"fmt"
"os"
"os/exec"
"time"
)
// Grid is a 2D slice
type Grid struct {
Width int
Height int
Cells []int
}
// NewGrid creates a Grid
func NewGrid(width int, height int) Grid {
grid := &Grid{Width: width, Height: height}
size := width * height
grid.Cells = make([]int, size, size)
return *grid
}
// Get returns the content of the cell at position x and y
func (grid Grid) Get(x int, y int) int {
return grid.Cells[grid.Idx(x, y)]
}
// Set sets a value at the position x and y
func (grid *Grid) Set(x int, y int, value int) {
grid.Cells[grid.Idx(x, y)] = value
}
// Idx returns an idx for a x and y position in the grid
func (grid Grid) Idx(x int, y int) int {
if x < 0 {
return grid.Idx(grid.Width+x, y)
} else if y < 0 {
return grid.Idx(x, grid.Height+y)
} else {
tx := x % grid.Width
ty := y % grid.Height
idx := tx + ty*grid.Width
return idx
}
}
// String implements the Stringer interface to print out the grid
func (grid Grid) String() string {
s := ""
for y := 0; y < grid.Height; y++ {
s += fmt.Sprintf("%d: ", y)
for x := 0; x < grid.Width; x++ {
if grid.Get(x, y) == 0 {
s += " "
} else {
s += "*"
}
}
s += "\n"
}
return s
}
// IsAlive checks if the cell at position x and y has a value > 0
func (grid Grid) IsAlive(x int, y int) bool {
idx := grid.Idx(x, y)
if grid.Cells[idx] > 0 {
return true
}
return false
}
// GetNeighbors counts the neighbors withh a value > 0
func (grid Grid) GetNeighbors(x int, y int) int {
count := 0
for dy := -1; dy <= 1; dy++ {
for dx := -1; dx <= 1; dx++ {
if dx == 0 && dy == 0 {
continue
}
if grid.IsAlive(x+dx, y+dy) {
count++
}
}
}
return count
}
// Game lets us play conway game of life
type Game struct {
Grid Grid
}
// NewGame creates a game with a size of width * height
func NewGame(width int, height int) Game {
grid := NewGrid(width, height)
game := &Game{Grid: grid}
return *game
}
// Step calculates the cells for one game step
func (game *Game) Step() {
var newGrid = NewGrid(game.Grid.Width, game.Grid.Height)
for y := 0; y < game.Grid.Height; y++ {
for x := 0; x < game.Grid.Width; x++ {
var neighbors = game.Grid.GetNeighbors(x, y)
if game.Grid.IsAlive(x, y) {
if neighbors >= 2 && neighbors <= 3 {
newGrid.Set(x, y, 1)
}
} else {
if neighbors == 3 {
newGrid.Set(x, y, 1)
}
}
}
}
game.Grid.Cells = newGrid.Cells
}
func play() {
game := NewGame(8, 8)
// set a glider
game.Grid.Set(0, 0, 1)
game.Grid.Set(1, 1, 1)
game.Grid.Set(2, 1, 1)
game.Grid.Set(0, 2, 1)
game.Grid.Set(1, 2, 1)
// play conway
for true {
// Clear screen
c := exec.Command("clear")
c.Stdout = os.Stdout
c.Run()
fmt.Printf("Conway's Game on a %dx%d board:\n\n", game.Grid.Width, game.Grid.Height)
fmt.Println(game.Grid)
game.Step()
time.Sleep(100 * time.Millisecond)
}
}
func benchmark(loops int) {
game := NewGame(100, 100)
// set a glider
game.Grid.Set(0, 0, 1)
game.Grid.Set(1, 1, 1)
game.Grid.Set(2, 1, 1)
game.Grid.Set(0, 2, 1)
game.Grid.Set(1, 2, 1)
for idx := 0; idx < loops; idx++ {
game.Step()
}
}
func main() {
// play()
benchmark(1000)
} | go/conway1.go | 0.765856 | 0.584923 | conway1.go | starcoder |
package shapes
import (
"math"
"github.com/factorion/graytracer/pkg/primitives"
)
// Triangle Represents a triangle
type Triangle struct {
ShapeBase
Point1, Point2, Point3, Edge1, Edge2, Norm primitives.PV
}
// MakeTriangle Create a triangle from three points
func MakeTriangle(point1, point2, point3 primitives.PV) *Triangle {
edge1 := point2.Subtract(point1)
edge1.W = 0
edge2 := point3.Subtract(point1)
edge2.W = 0
normal := edge2.CrossProduct(edge1).Normalize()
return &Triangle{MakeShapeBase(), point1, point2, point3, edge1, edge2, normal}
}
// GetBounds Return an axis aligned bounding box for the triangle
func (t *Triangle) GetBounds() *Bounds {
x_min, x_max := MinMax([]float64{t.Point1.X, t.Point2.X, t.Point3.X})
y_min, y_max := MinMax([]float64{t.Point1.Y, t.Point2.Y, t.Point3.Y})
z_min, z_max := MinMax([]float64{t.Point1.Z, t.Point2.Z, t.Point3.Z})
bounds := &Bounds{Min:primitives.MakePoint(x_min, y_min, z_min), Max:primitives.MakePoint(x_max, y_max, z_max)}
return bounds.Transform(t.transform)
}
// Intersect Check if a ray intersects
func (t *Triangle) Intersect(ray primitives.Ray) Intersections {
hits := Intersections{}
// convert ray to object space
oray := ray.Transform(t.inverse)
dce2 := oray.Direction.CrossProduct(t.Edge2) // Direction crossed with edge 2
det := t.Edge1.DotProduct(dce2)
if math.Abs(det) < primitives.EPSILON {
return hits
}
f := 1.0 / det
p1too := oray.Origin.Subtract(t.Point1) // origin to point 1
u := f * p1too.DotProduct(dce2)
if (u < 0) || (u > 1) {
return hits
}
oce1 := p1too.CrossProduct(t.Edge1) // Cross product of origin and edge
v := f * oray.Direction.DotProduct(oce1)
if (v < 0) || ((u + v) > 1) {
return hits
}
hits = append(hits, Intersection{Distance:(f * t.Edge2.DotProduct(oce1)), Obj:t})
return hits
}
// Normal Calculate the normal at a given point on the sphere
func (t *Triangle) Normal(worldPoint primitives.PV) primitives.PV {
worldNormal := t.ObjectToWorldPV(t.Norm)
worldNormal.W = 0.0
return worldNormal.Normalize()
}
// UVMapping Return the 2D coordinates of an intersection point
func (t *Triangle) UVMapping(point primitives.PV) primitives.PV {
return primitives.MakePoint(point.X, point.Y, 0)
} | pkg/shapes/triangle.go | 0.901852 | 0.590779 | triangle.go | starcoder |
package proto
import (
"fmt"
"math/big"
"github.com/xlab-si/emmy/crypto/cl"
"github.com/xlab-si/emmy/crypto/common"
"github.com/xlab-si/emmy/crypto/df"
"github.com/xlab-si/emmy/crypto/ec"
"github.com/xlab-si/emmy/crypto/qr"
"github.com/xlab-si/emmy/crypto/schnorr"
)
type PbConvertibleType interface {
GetNativeType() interface{}
}
func (el *ECGroupElement) GetNativeType() *ec.GroupElement {
return &ec.GroupElement{
X: new(big.Int).SetBytes(el.X),
Y: new(big.Int).SetBytes(el.Y),
}
}
func ToPbECGroupElement(el *ec.GroupElement) *ECGroupElement {
x := ECGroupElement{X: el.X.Bytes(), Y: el.Y.Bytes()}
return &x
}
func (el *Pair) GetNativeType() *common.Pair {
return &common.Pair{
A: new(big.Int).SetBytes(el.A),
B: new(big.Int).SetBytes(el.B),
}
}
func ToPbPair(el *common.Pair) *Pair {
return &Pair{
A: el.A.Bytes(),
B: el.B.Bytes(),
}
}
func ToPbCredRequest(r *cl.CredRequest) *CLCredReq {
knownAttrs := make([][]byte, len(r.KnownAttrs))
for i, a := range r.KnownAttrs {
knownAttrs[i] = a.Bytes()
}
commitmentsOfAttrs := make([][]byte, len(r.CommitmentsOfAttrs))
for i, a := range r.CommitmentsOfAttrs {
commitmentsOfAttrs[i] = a.Bytes()
}
pData := make([][]byte, len(r.NymProof.ProofData))
for i, p := range r.NymProof.ProofData {
pData[i] = p.Bytes()
}
nymProof := &FiatShamir{
ProofRandomData: r.NymProof.ProofRandomData.Bytes(),
Challenge: r.NymProof.Challenge.Bytes(),
ProofData: pData,
}
uData := make([]string, len(r.UProof.ProofData))
for i, p := range r.UProof.ProofData {
uData[i] = p.String()
}
UProof := &FiatShamirAlsoNeg{
ProofRandomData: r.UProof.ProofRandomData.Bytes(),
Challenge: r.UProof.Challenge.Bytes(),
ProofData: uData,
}
proofs := make([]*FiatShamir, len(r.CommitmentsOfAttrsProofs))
for i, proof := range r.CommitmentsOfAttrsProofs {
pData = make([][]byte, 2)
pData[0] = proof.ProofData1.Bytes()
pData[1] = proof.ProofData2.Bytes()
fs := &FiatShamir{
ProofRandomData: proof.ProofRandomData.Bytes(),
Challenge: proof.Challenge.Bytes(),
ProofData: pData,
}
proofs[i] = fs
}
return &CLCredReq{
Nym: r.Nym.Bytes(),
KnownAttrs: knownAttrs,
CommitmentsOfAttrs: commitmentsOfAttrs,
NymProof: nymProof,
U: r.U.Bytes(),
UProof: UProof,
CommitmentsOfAttrsProofs: proofs,
Nonce: r.Nonce.Bytes(),
}
}
func (r *CLCredReq) GetNativeType() (*cl.CredRequest, error) {
nym := new(big.Int).SetBytes(r.Nym)
knownAttrs := make([]*big.Int, len(r.KnownAttrs))
for i, a := range r.KnownAttrs {
knownAttrs[i] = new(big.Int).SetBytes(a)
}
commitmentsOfAttrs := make([]*big.Int, len(r.CommitmentsOfAttrs))
for i, a := range r.CommitmentsOfAttrs {
commitmentsOfAttrs[i] = new(big.Int).SetBytes(a)
}
pData := make([]*big.Int, len(r.NymProof.ProofData))
for i, p := range r.NymProof.ProofData {
pData[i] = new(big.Int).SetBytes(p)
}
nymProof := schnorr.NewProof(new(big.Int).SetBytes(r.NymProof.ProofRandomData),
new(big.Int).SetBytes(r.NymProof.Challenge), pData)
U := new(big.Int).SetBytes(r.U)
pData = make([]*big.Int, len(r.UProof.ProofData))
for i, p := range r.UProof.ProofData {
si, success := new(big.Int).SetString(p, 10)
if !success {
return nil, fmt.Errorf("error when initializing big.Int from string")
}
pData[i] = si
}
UProof := qr.NewRepresentationProof(new(big.Int).SetBytes(r.UProof.ProofRandomData),
new(big.Int).SetBytes(r.UProof.Challenge), pData)
commitmentsOfAttrsProofs := make([]*df.OpeningProof, len(r.CommitmentsOfAttrsProofs))
for i, proof := range r.CommitmentsOfAttrsProofs {
openingProof := df.NewOpeningProof(new(big.Int).SetBytes(proof.ProofRandomData),
new(big.Int).SetBytes(proof.Challenge), new(big.Int).SetBytes(proof.ProofData[0]),
new(big.Int).SetBytes(proof.ProofData[1]))
commitmentsOfAttrsProofs[i] = openingProof
}
return cl.NewCredRequest(nym, knownAttrs, commitmentsOfAttrs, nymProof, U, UProof,
commitmentsOfAttrsProofs, new(big.Int).SetBytes(r.Nonce)), nil
}
func ToPbCLCredential(c *cl.Cred, AProof *qr.RepresentationProof) *CLCredential {
AProofFS := &FiatShamirAlsoNeg{
ProofRandomData: AProof.ProofRandomData.Bytes(),
Challenge: AProof.Challenge.Bytes(),
ProofData: []string{AProof.ProofData[0].String()},
}
return &CLCredential{
A: c.A.Bytes(),
E: c.E.Bytes(),
V11: c.V11.Bytes(),
AProof: AProofFS,
}
}
func (c *CLCredential) GetNativeType() (*cl.Cred, *qr.RepresentationProof, error) {
si, success := new(big.Int).SetString(c.AProof.ProofData[0], 10)
if !success {
return nil, nil, fmt.Errorf("error when initializing big.Int from string")
}
AProof := qr.NewRepresentationProof(new(big.Int).SetBytes(c.AProof.ProofRandomData),
new(big.Int).SetBytes(c.AProof.Challenge), []*big.Int{si})
return cl.NewCred(new(big.Int).SetBytes(c.A), new(big.Int).SetBytes(c.E),
new(big.Int).SetBytes(c.V11)), AProof, nil
}
func ToPbUpdateCLCredential(nym, nonce *big.Int, newKnownAttrs []*big.Int) *UpdateCLCredential {
knownAttrs := make([][]byte, len(newKnownAttrs))
for i, a := range newKnownAttrs {
knownAttrs[i] = a.Bytes()
}
return &UpdateCLCredential{
Nym: nym.Bytes(),
Nonce: nonce.Bytes(),
NewKnownAttrs: knownAttrs,
}
}
func (u *UpdateCLCredential) GetNativeType() (*big.Int, *big.Int, []*big.Int) {
attrs := make([]*big.Int, len(u.NewKnownAttrs))
for i, a := range u.NewKnownAttrs {
attrs[i] = new(big.Int).SetBytes(a)
}
return new(big.Int).SetBytes(u.Nym), new(big.Int).SetBytes(u.Nonce), attrs
}
func ToPbProveCLCredential(A *big.Int, proof *qr.RepresentationProof,
knownAttrs, commitmentsOfAttrs []*big.Int,
revealedKnownAttrsIndices, revealedCommitmentsOfAttrsIndices []int) *ProveCLCredential {
pData := make([]string, len(proof.ProofData))
for i, p := range proof.ProofData {
pData[i] = p.String()
}
proofFS := &FiatShamirAlsoNeg{
ProofRandomData: proof.ProofRandomData.Bytes(),
Challenge: proof.Challenge.Bytes(),
ProofData: pData,
}
kAttrs := make([][]byte, len(knownAttrs))
for i, a := range knownAttrs {
kAttrs[i] = a.Bytes()
}
cAttrs := make([][]byte, len(commitmentsOfAttrs))
for i, a := range commitmentsOfAttrs {
cAttrs[i] = a.Bytes()
}
revealedKnownAttrs := make([]int32, len(revealedKnownAttrsIndices))
for i, a := range revealedKnownAttrsIndices {
revealedKnownAttrs[i] = int32(a)
}
revealedCommitmentsOfAttrs := make([]int32, len(revealedCommitmentsOfAttrsIndices))
for i, a := range revealedCommitmentsOfAttrsIndices {
revealedCommitmentsOfAttrs[i] = int32(a)
}
return &ProveCLCredential{
A: A.Bytes(),
Proof: proofFS,
KnownAttrs: kAttrs,
CommitmentsOfAttrs: cAttrs,
RevealedKnownAttrs: revealedKnownAttrs,
RevealedCommitmentsOfAttrs: revealedCommitmentsOfAttrs,
}
}
func (p *ProveCLCredential) GetNativeType() (*big.Int, *qr.RepresentationProof, []*big.Int,
[]*big.Int, []int, []int, error) {
attrs := make([]*big.Int, len(p.KnownAttrs))
for i, a := range p.KnownAttrs {
attrs[i] = new(big.Int).SetBytes(a)
}
cAttrs := make([]*big.Int, len(p.CommitmentsOfAttrs))
for i, a := range p.CommitmentsOfAttrs {
cAttrs[i] = new(big.Int).SetBytes(a)
}
pData := make([]*big.Int, len(p.Proof.ProofData))
for i, p := range p.Proof.ProofData {
si, success := new(big.Int).SetString(p, 10)
if !success {
return nil, nil, nil, nil, nil, nil, fmt.Errorf("error when initializing big.Int from string")
}
pData[i] = si
}
proof := qr.NewRepresentationProof(new(big.Int).SetBytes(p.Proof.ProofRandomData),
new(big.Int).SetBytes(p.Proof.Challenge), pData)
revealedKnownAttrsIndices := make([]int, len(p.RevealedKnownAttrs))
for i, a := range p.RevealedKnownAttrs {
revealedKnownAttrsIndices[i] = int(a)
}
revealedCommitmentsOfAttrsIndices := make([]int, len(p.RevealedCommitmentsOfAttrs))
for i, a := range p.RevealedCommitmentsOfAttrs {
revealedCommitmentsOfAttrsIndices[i] = int(a)
}
return new(big.Int).SetBytes(p.A), proof, attrs, cAttrs, revealedKnownAttrsIndices,
revealedCommitmentsOfAttrsIndices, nil
} | proto/translations.go | 0.567457 | 0.41745 | translations.go | starcoder |
package internal
import (
"fmt"
"reflect"
"github.com/onsi/gomega/types"
)
type Assertion struct {
actuals []interface{} // actual value plus all extra values
actualIndex int // value to pass to the matcher
vet vetinari // the vet to call before calling Gomega matcher
offset int
g *Gomega
}
// ...obligatory discworld reference, as "vetineer" doesn't sound ... quite right.
type vetinari func(assertion *Assertion, optionalDescription ...interface{}) bool
func NewAssertion(actualInput interface{}, g *Gomega, offset int, extra ...interface{}) *Assertion {
return &Assertion{
actuals: append([]interface{}{actualInput}, extra...),
actualIndex: 0,
vet: (*Assertion).vetActuals,
offset: offset,
g: g,
}
}
func (assertion *Assertion) WithOffset(offset int) types.Assertion {
assertion.offset = offset
return assertion
}
func (assertion *Assertion) Error() types.Assertion {
return &Assertion{
actuals: assertion.actuals,
actualIndex: len(assertion.actuals) - 1,
vet: (*Assertion).vetError,
offset: assertion.offset,
g: assertion.g,
}
}
func (assertion *Assertion) Should(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool {
assertion.g.THelper()
return assertion.vet(assertion, optionalDescription...) && assertion.match(matcher, true, optionalDescription...)
}
func (assertion *Assertion) ShouldNot(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool {
assertion.g.THelper()
return assertion.vet(assertion, optionalDescription...) && assertion.match(matcher, false, optionalDescription...)
}
func (assertion *Assertion) To(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool {
assertion.g.THelper()
return assertion.vet(assertion, optionalDescription...) && assertion.match(matcher, true, optionalDescription...)
}
func (assertion *Assertion) ToNot(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool {
assertion.g.THelper()
return assertion.vet(assertion, optionalDescription...) && assertion.match(matcher, false, optionalDescription...)
}
func (assertion *Assertion) NotTo(matcher types.GomegaMatcher, optionalDescription ...interface{}) bool {
assertion.g.THelper()
return assertion.vet(assertion, optionalDescription...) && assertion.match(matcher, false, optionalDescription...)
}
func (assertion *Assertion) buildDescription(optionalDescription ...interface{}) string {
switch len(optionalDescription) {
case 0:
return ""
case 1:
if describe, ok := optionalDescription[0].(func() string); ok {
return describe() + "\n"
}
}
return fmt.Sprintf(optionalDescription[0].(string), optionalDescription[1:]...) + "\n"
}
func (assertion *Assertion) match(matcher types.GomegaMatcher, desiredMatch bool, optionalDescription ...interface{}) bool {
actualInput := assertion.actuals[assertion.actualIndex]
matches, err := matcher.Match(actualInput)
assertion.g.THelper()
if err != nil {
description := assertion.buildDescription(optionalDescription...)
assertion.g.Fail(description+err.Error(), 2+assertion.offset)
return false
}
if matches != desiredMatch {
var message string
if desiredMatch {
message = matcher.FailureMessage(actualInput)
} else {
message = matcher.NegatedFailureMessage(actualInput)
}
description := assertion.buildDescription(optionalDescription...)
assertion.g.Fail(description+message, 2+assertion.offset)
return false
}
return true
}
// vetActuals vets the actual values, with the (optional) exception of a
// specific value, such as the first value in case non-error assertions, or the
// last value in case of Error()-based assertions.
func (assertion *Assertion) vetActuals(optionalDescription ...interface{}) bool {
success, message := vetActuals(assertion.actuals, assertion.actualIndex)
if success {
return true
}
description := assertion.buildDescription(optionalDescription...)
assertion.g.THelper()
assertion.g.Fail(description+message, 2+assertion.offset)
return false
}
// vetError vets the actual values, except for the final error value, in case
// the final error value is non-zero. Otherwise, it doesn't vet the actual
// values, as these are allowed to take on any values unless there is a non-zero
// error value.
func (assertion *Assertion) vetError(optionalDescription ...interface{}) bool {
if err := assertion.actuals[assertion.actualIndex]; err != nil {
// Go error result idiom: all other actual values must be zero values.
return assertion.vetActuals(optionalDescription...)
}
return true
}
// vetActuals vets a slice of actual values, optionally skipping a particular
// value slice element, such as the first or last value slice element.
func vetActuals(actuals []interface{}, skipIndex int) (bool, string) {
for i, actual := range actuals {
if i == skipIndex {
continue
}
if actual != nil {
zeroValue := reflect.Zero(reflect.TypeOf(actual)).Interface()
if !reflect.DeepEqual(zeroValue, actual) {
message := fmt.Sprintf("Unexpected non-nil/non-zero argument at index %d:\n\t<%T>: %#v", i, actual, actual)
return false, message
}
}
}
return true, ""
} | vendor/github.com/onsi/gomega/internal/assertion.go | 0.696165 | 0.685002 | assertion.go | starcoder |
package mailbox
import (
"crypto/rand"
"runtime/debug"
"github.com/kkdai/bstream"
"github.com/lightningnetwork/lnd/aezeed"
"golang.org/x/crypto/scrypt"
)
const (
// NumPassphraseWords is the number of words we use for the pairing
// phrase.
NumPassphraseWords = 10
// NumPassphraseEntropyBytes is the number of bytes we use for the
// pairing phrase. This must be:
// ceil( (NumPassphraseWords * aezeed.BitsPerWord) / 8 )
NumPassphraseEntropyBytes = 14
// scryptKeyLen is the amount of bytes we'll generate from the scrpt
// invocation. Using the passphrase entropy as the passphraseEntropy and the
// salt.
scryptKeyLen = 32
)
var (
// Below are the default scrypt parameters that are tied to the initial
// version 0 of the noise handshake.
scryptN = 1 << 16 // 65536
scryptR = 8
scryptP = 1
)
// NewPassphraseEntropy generates a new one-time-use passphrase, represented as
// a set of mnemonic words and the raw entropy itself.
func NewPassphraseEntropy() ([NumPassphraseWords]string,
[NumPassphraseEntropyBytes]byte, error) {
var (
passphraseEntropy [NumPassphraseEntropyBytes]byte
passphrase [NumPassphraseWords]string
err error
)
if _, err = rand.Read(passphraseEntropy[:]); err != nil {
return passphrase, passphraseEntropy, err
}
// Turn the raw bytes into words. Since we read full bytes above but
// might only use some bits of the last byte the words won't contain
// the full data.
passphrase, err = PassphraseEntropyToMnemonic(passphraseEntropy)
if err != nil {
return passphrase, passphraseEntropy, err
}
// To make sure the words and raw bytes match, we convert the words
// back into raw bytes, effectively setting the last, unused bits to
// zero.
passphraseEntropy = PassphraseMnemonicToEntropy(passphrase)
return passphrase, passphraseEntropy, nil
}
// PassphraseEntropyToMnemonic turns the raw bytes of the passphrase entropy
// into human-readable mnemonic words.
// NOTE: This will only use NumPassphraseWords * aezeed.BitsPerWord bits of the
// provided entropy.
func PassphraseEntropyToMnemonic(
entropy [NumPassphraseEntropyBytes]byte) ([NumPassphraseWords]string,
error) {
var (
passphrase [NumPassphraseWords]string
cipherBits = bstream.NewBStreamReader(entropy[:])
)
for i := 0; i < NumPassphraseWords; i++ {
index, err := cipherBits.ReadBits(aezeed.BitsPerWord)
if err != nil {
return passphrase, err
}
passphrase[i] = aezeed.DefaultWordList[index]
}
return passphrase, nil
}
// PassphraseMnemonicToEntropy reverses the mnemonic word encoding and returns
// the raw passphrase entropy bytes.
// NOTE: This will only set the first NumPassphraseWords * aezeed.BitsPerWord
// bits of the entropy. The remaining bits will be set to zero.
func PassphraseMnemonicToEntropy(
passphrase [NumPassphraseWords]string) [NumPassphraseEntropyBytes]byte {
var passphraseEntropy [NumPassphraseEntropyBytes]byte
cipherBits := bstream.NewBStreamWriter(NumPassphraseEntropyBytes)
for _, word := range passphrase {
index := uint64(aezeed.ReverseWordMap[word])
cipherBits.WriteBits(index, aezeed.BitsPerWord)
}
copy(passphraseEntropy[:], cipherBits.Bytes())
return passphraseEntropy
}
// stretchPassphrase takes a randomly generated passphrase entropy and runs it
// through scrypt with our specified parameters.
func stretchPassphrase(passphraseEntropy []byte) ([]byte, error) {
// Note that we use the passphrase entropy again as the salt itself, as
// we always generate the pairing phrase from a high entropy source.
rawPairingBytes, err := scrypt.Key(
passphraseEntropy, passphraseEntropy, scryptN, scryptR, scryptP,
scryptKeyLen,
)
if err != nil {
return nil, err
}
// This ends up generating a lot of memory, so we'll actually force a
// manual GC collection here to keep down the memory constraints.
debug.FreeOSMemory()
return rawPairingBytes, nil
} | mailbox/crypto.go | 0.663233 | 0.517571 | crypto.go | starcoder |
package fsm
import (
"errors"
)
type transitionFunc func() error
// FSM is an implementation of a finite state machine.
type FSM struct {
// currentState is the current state that the FSM is in
currentState string
// transitionMap maps a single state to N valid transition states
transitionMap map[string][]string
// enterFuncs is a map to the function that will be called when a state is entered
enterFuncs map[string]transitionFunc
// exitFuncs is a map to the function that will be called when a state is exited
exitFuncs map[string]transitionFunc
}
// NewFSM creates a new finite state machine. The starting state is nil and you must
// start the FSM by calling .Start() after adding all transitions and the proper entrance functions.
func NewFSM() *FSM {
return &FSM{
currentState: "",
transitionMap: make(map[string][]string, 0),
enterFuncs: make(map[string]transitionFunc, 0),
exitFuncs: make(map[string]transitionFunc, 0),
}
}
// AddTransition adds a transition from stateA to stateB. States that transition to themselves
// are valid.
func (f *FSM) AddTransition(stateA string, stateB string) {
_, ok := f.transitionMap[stateA]
if !ok {
f.transitionMap[stateA] = make([]string, 0)
}
// do nothing if transition already exists
for _, transition := range f.transitionMap[stateA] {
if transition == stateB {
return
}
}
f.transitionMap[stateA] = append(f.transitionMap[stateA], stateB)
}
// OnEnter adds a function that should be called when we enter a transition
func (f *FSM) OnEnter(state string, fun transitionFunc) error {
f.enterFuncs[state] = fun
return nil
}
// OnExit adds a function that should be called when we exit a transition
func (f *FSM) OnExit(state string, fun transitionFunc) error {
f.exitFuncs[state] = fun
return nil
}
// Start initializes the finite state machine with the start state. The start state
// onEnter func will be called.
func (f *FSM) Start(startState string) error {
fun, ok := f.enterFuncs[startState]
if ok {
err := fun()
if err != nil {
return err
}
}
f.currentState = startState
return nil
}
// Transition transitions from the current FSM state to nextState. If the transition is not
// a valid transition, error will be non nil. The onExit func of the current state is called
// and the onEnter func of the next state is called
func (f *FSM) Transition(nextState string) error {
transitions, ok := f.transitionMap[f.currentState]
if !ok {
return errors.New("Could not transition to " + nextState + ". State does not exist")
}
found := false
for _, transition := range transitions {
if transition == nextState {
found = true
break
}
}
if !found {
return errors.New("Invalid transition from " + f.currentState + " to " + nextState)
}
// call exit funcs
exitFun, ok := f.exitFuncs[f.currentState]
if ok {
err := exitFun()
if err != nil {
return err
}
}
// call enter funcs
enterFun, ok := f.enterFuncs[nextState]
if ok {
err := enterFun()
if err != nil {
return err
}
}
f.currentState = nextState
return nil
} | fsm.go | 0.562898 | 0.509032 | fsm.go | starcoder |
package table
import (
"bufio"
"bytes"
"fmt"
"io"
"regexp"
"strings"
"unicode/utf8"
)
// Alignment represents the supported cell content alignment modes.
type Alignment uint8
const (
AlignLeft Alignment = iota
AlignCenter
AlignRight
)
// CharacterFilter defines the character filter modes supported by the table writer.
type CharacterFilter uint8
const (
PreserveAnsi CharacterFilter = iota
StripAnsi
)
var (
ansiEscapeRegex = regexp.MustCompile(`\x1b\[[0-9;]*m`)
tableColSplitRegex = regexp.MustCompile(`\s*,\s*`)
)
// Header groups are used to define headers that span multiple columns.
type headerGroup struct {
header string
alignment Alignment
colSpan int
}
// A table that can be rendered in a terminal.
type Table struct {
headers []string
headerGroups []headerGroup
rows [][]string
alignments []Alignment
padding int
}
// Create a new empty table with the specified number of columns.
func New(columns int) *Table {
return &Table{
headers: make([]string, columns),
headerGroups: make([]headerGroup, 0),
rows: make([][]string, 0),
alignments: make([]Alignment, columns),
}
}
// Set cell padding for cell contents. If a negative padding is specified, a
// padding value of 0 will be forced.
func (t *Table) SetPadding(padding int) {
if padding < 0 {
padding = 0
}
t.padding = padding
}
// Set header title and column alignment settings. Column indices are 0-based.
func (t *Table) SetHeader(col int, title string, alignment Alignment) error {
if col < 0 || col > len(t.headers)-1 {
return fmt.Errorf("index out of range while attempting to set table header for column %d", col)
}
t.headers[col] = title
t.alignments[col] = alignment
return nil
}
// Add a super-group for a set of header columns. If the requested colSpan exceeds
// the number of available un-grouped header columns this method returns an error.
func (t *Table) AddHeaderGroup(colSpan int, title string, alignment Alignment) error {
groupedCols := 0
for _, hg := range t.headerGroups {
groupedCols += hg.colSpan
}
colCount := len(t.headers)
if groupedCols+colSpan > colCount {
return fmt.Errorf("requested header group colspan %d exceeds the available columns for grouping %d/%d", colSpan, groupedCols, colCount)
}
t.headerGroups = append(t.headerGroups, headerGroup{
header: title,
colSpan: colSpan,
alignment: alignment,
})
return nil
}
// Append one or more rows to the table.
func (t *Table) Append(rows ...[]string) error {
colCount := len(t.headers)
for rowIndex, row := range rows {
if len(row) != colCount {
return fmt.Errorf("inconsistent number of colums for row %d; expected %d but got %d", rowIndex, colCount, len(row))
}
}
t.rows = append(t.rows, rows...)
return nil
}
// Render table to an io.Writer. The charFilter parameter can be used to
// either preserve or strip ANSI characters from the output.
func (t *Table) Write(to io.Writer, charFilter CharacterFilter) {
stripAnsiChars := charFilter == StripAnsi
w := bufio.NewWriter(to)
padding := strings.Repeat(" ", t.padding)
// Calculate col widths and use them to calculate group heading widths
colWidths := t.colWidths()
// Render header groups if defined
if len(t.headerGroups) > 0 {
var groupWidths []int
groupWidths, colWidths = t.groupWidths(colWidths)
hLine := t.hLine(groupWidths)
w.WriteString(hLine)
w.WriteByte('|')
for hgIndex, hg := range t.headerGroups {
w.WriteString(padding)
w.WriteString(t.align(hg.header, hg.alignment, groupWidths[hgIndex], stripAnsiChars))
w.WriteString(padding)
w.WriteByte('|')
}
w.WriteString("\n")
w.WriteString(hLine)
}
// Render headers
hLine := t.hLine(colWidths)
if len(t.headerGroups) == 0 {
w.WriteString(hLine)
}
w.WriteByte('|')
for colIndex, h := range t.headers {
w.WriteString(padding)
w.WriteString(t.align(h, t.alignments[colIndex], colWidths[colIndex], stripAnsiChars))
w.WriteString(padding)
w.WriteByte('|')
}
w.WriteString("\n")
w.WriteString(hLine)
// Render rows
for _, row := range t.rows {
w.WriteByte('|')
for colIndex, c := range row {
w.WriteString(padding)
w.WriteString(t.align(c, t.alignments[colIndex], colWidths[colIndex], stripAnsiChars))
w.WriteString(padding)
w.WriteByte('|')
}
w.WriteString("\n")
}
// Render footer line if the table is not empty
if len(t.rows) > 0 {
w.WriteString(hLine)
}
w.Flush()
}
// Generate horizontal line.
func (t *Table) hLine(colWidths []int) string {
buf := bytes.NewBufferString("")
buf.WriteByte('+')
for _, colWidth := range colWidths {
buf.WriteString(strings.Repeat("-", colWidth+2*t.padding))
buf.WriteByte('+')
}
buf.WriteString("\n")
return buf.String()
}
// Pad and align input string.
func (t *Table) align(val string, align Alignment, maxWidth int, stripAnsiChars bool) string {
var vLen int
if stripAnsiChars {
val = ansiEscapeRegex.ReplaceAllString(val, "")
vLen = utf8.RuneCountInString(val)
} else {
vLen = measure(val)
}
switch align {
case AlignLeft:
return val + strings.Repeat(" ", maxWidth-vLen)
case AlignRight:
return strings.Repeat(" ", maxWidth-vLen) + val
default:
lPad := (maxWidth - vLen) / 2
return strings.Repeat(" ", lPad) + val + strings.Repeat(" ", maxWidth-lPad-vLen)
}
}
// Calculate max width for each column.
func (t *Table) colWidths() []int {
colWidths := make([]int, len(t.headers))
for colIndex, h := range t.headers {
maxWidth := utf8.RuneCountInString(h)
for _, row := range t.rows {
cellWidth := measure(row[colIndex])
if cellWidth > maxWidth {
maxWidth = cellWidth
}
}
colWidths[colIndex] = maxWidth
}
return colWidths
}
// Calculate max width for each header group. If a group header's width exceeds
// the total width of the grouped columns, they will be automatically expanded
// to preserve alignment with the group header.
func (t *Table) groupWidths(colWidths []int) (groupWidths []int, adjustedColWidths []int) {
adjustedColWidths = append([]int{}, colWidths...)
groupWidths = make([]int, len(t.headerGroups))
groupStartCol := 0
for groupIndex, group := range t.headerGroups {
// Calculate group width based on the grouped columns
groupWidth := 0
for ci := groupStartCol; ci < groupStartCol+group.colSpan; ci++ {
groupWidth += colWidths[ci]
}
// Include separators and padding for inner columns to width
if group.colSpan > 1 {
groupWidth += (group.colSpan - 1) * (1 + 2*t.padding)
}
// Calculate group width based on padding and group title. If its
// greater than the calculated groupWidth, append the extra space to the last group col
contentWidth := 2*t.padding + utf8.RuneCountInString(group.header)
if contentWidth > groupWidth {
adjustedColWidths[groupStartCol+group.colSpan-1] += contentWidth - groupWidth
groupWidth = contentWidth
}
groupWidths[groupIndex] = groupWidth
groupStartCol += group.colSpan
}
return groupWidths, adjustedColWidths
}
// Measure string length excluding any Ansi color escape codes.
func measure(val string) int {
return utf8.RuneCountInString(ansiEscapeRegex.ReplaceAllString(val, ""))
} | vendor/github.com/geckoboard/cli-table/table.go | 0.708313 | 0.405566 | table.go | starcoder |
package days
import (
"fmt"
"strconv"
"strings"
"joshatron.io/aoc2021/input"
)
func Day08Puzzle1() string {
displays := getDisplays(input.SplitIntoLines(input.ReadDayInput("08")))
count := 0
for _, display := range displays {
for _, output := range display.output {
length := len(output)
if length == 2 || length == 3 || length == 4 || length == 7 {
count++
}
}
}
return fmt.Sprint(count)
}
type Display struct {
patterns, output []string
}
func getDisplays(lines []string) []Display {
displays := []Display{}
for _, line := range lines {
parts := strings.Split(line, " | ")
d := Display{strings.Fields(parts[0]), strings.Fields(parts[1])}
d.unscramble()
displays = append(displays, d)
}
return displays
}
func Day08Puzzle2() string {
displays := getDisplays(input.SplitIntoLines(input.ReadDayInput("08")))
total := 0
for _, display := range displays {
total += display.parseOutput()
}
return fmt.Sprint(total)
}
//0- length 6, shares 2 letters with 1 and 3 letters with 4
//1- length 2
//2- length 5, shares 1 letters with 1 and 2 letters with 4
//3- legnth 5, shares 2 letters with 1 and 3 letters with 4
//4- length 4
//5- length 5, shares 1 letters with 1 and 3 letters with 4
//6- length 6, shares 1 letters with 1 and 3 letters with 4
//7- length 3
//8- length 7
//9- length 6, shares 2 letters with 1 and 4 letters with 4
func (d *Display) unscramble() {
unscrambled := []string{"", "", "", "", "", "", "", "", "", ""}
//Initial pass to get the base ones done
for _, num := range d.patterns {
length := len(num)
if length == 2 {
unscrambled[1] = num
} else if length == 4 {
unscrambled[4] = num
} else if length == 3 {
unscrambled[7] = num
} else if length == 7 {
unscrambled[8] = num
}
}
//Now for the remaining numbers
for _, num := range d.patterns {
length := len(num)
oneCompare := charsShared(num, unscrambled[1])
fourCompare := charsShared(num, unscrambled[4])
if length == 6 && oneCompare == 2 && fourCompare == 3 {
unscrambled[0] = num
} else if length == 5 && oneCompare == 1 && fourCompare == 2 {
unscrambled[2] = num
} else if length == 5 && oneCompare == 2 && fourCompare == 3 {
unscrambled[3] = num
} else if length == 5 && oneCompare == 1 && fourCompare == 3 {
unscrambled[5] = num
} else if length == 6 && oneCompare == 1 && fourCompare == 3 {
unscrambled[6] = num
} else if length == 6 && oneCompare == 2 && fourCompare == 4 {
unscrambled[9] = num
}
}
d.patterns = unscrambled
}
func charsShared(first, second string) int {
shared := 0
for _, fc := range first {
for _, sc := range second {
if fc == sc {
shared++
break
}
}
}
return shared
}
func (d *Display) parseOutput() int {
number := ""
for _, num := range d.output {
number += d.getNum(num)
}
finalNum, _ := strconv.Atoi(number)
return finalNum
}
func (d *Display) getNum(num string) string {
for i, pattern := range d.patterns {
if len(num) == len(pattern) && len(num) == charsShared(num, pattern) {
return fmt.Sprint(i)
}
}
return ""
} | days/day08.go | 0.549641 | 0.411406 | day08.go | starcoder |
package vwap
import (
"sync"
"github.com/shopspring/decimal"
"golang.org/x/xerrors"
)
const defaultMaxSize = 200
// DataPoint represents a single data point from coinbase.
type DataPoint struct {
Price decimal.Decimal
Volume decimal.Decimal
ProductID string
}
// List represents a queue of DataPoints.
type List struct {
mu sync.Mutex
DataPoints []DataPoint
SumVolumeWeighted map[string]decimal.Decimal
SumVolume map[string]decimal.Decimal
VWAP map[string]decimal.Decimal
MaxSize uint
}
// NewList creates a new queue.
func NewList(dataPoint []DataPoint, maxSize uint) (List, error) {
if maxSize == 0 {
maxSize = defaultMaxSize
}
if len(dataPoint) > int(maxSize) {
return List{}, xerrors.New("initial datapoints exceeds maxSize")
}
return List{
DataPoints: dataPoint,
MaxSize: maxSize,
SumVolumeWeighted: make(map[string]decimal.Decimal),
SumVolume: make(map[string]decimal.Decimal),
VWAP: make(map[string]decimal.Decimal),
}, nil
}
// Len returns the length of the queue.
func (q *List) Len() int {
return len(q.DataPoints)
}
// Push pushes an element onto the queue, drops the first one when MaxSize is reached.
func (q *List) Push(d DataPoint) {
q.mu.Lock()
defer q.mu.Unlock()
if len(q.DataPoints) == int(q.MaxSize) {
d := q.DataPoints[0]
q.DataPoints = q.DataPoints[1:]
// Substract the datapoint values from the VWAP calculation.
q.SumVolumeWeighted[d.ProductID] = q.SumVolumeWeighted[d.ProductID].Sub(d.Price.Mul(d.Volume))
q.SumVolume[d.ProductID] = q.SumVolume[d.ProductID].Sub(d.Volume)
if !q.SumVolume[d.ProductID].IsZero() {
q.VWAP[d.ProductID] = q.SumVolumeWeighted[d.ProductID].Div(q.SumVolume[d.ProductID])
}
}
if _, ok := q.VWAP[d.ProductID]; ok {
q.SumVolumeWeighted[d.ProductID] = q.SumVolumeWeighted[d.ProductID].Add(d.Price.Mul(d.Volume))
q.SumVolume[d.ProductID] = q.SumVolume[d.ProductID].Add(d.Volume)
q.VWAP[d.ProductID] = q.SumVolumeWeighted[d.ProductID].Div(q.SumVolume[d.ProductID])
} else {
initialVW := d.Price.Mul(d.Volume)
q.SumVolumeWeighted[d.ProductID] = initialVW
q.SumVolume[d.ProductID] = d.Volume
q.VWAP[d.ProductID] = initialVW.Div(d.Volume)
}
q.DataPoints = append(q.DataPoints, d)
} | internal/vwap/vwap.go | 0.719975 | 0.482429 | vwap.go | starcoder |
package svg
import (
"github.com/goki/gi/gi"
"github.com/goki/ki/ki"
"github.com/goki/ki/kit"
"github.com/goki/mat32"
)
// Rect is a SVG rectangle, optionally with rounded corners
type Rect struct {
NodeBase
Pos mat32.Vec2 `xml:"{x,y}" desc:"position of the top-left of the rectangle"`
Size mat32.Vec2 `xml:"{width,height}" desc:"size of the rectangle"`
Radius mat32.Vec2 `xml:"{rx,ry}" desc:"radii for curved corners, as a proportion of width, height"`
}
var KiT_Rect = kit.Types.AddType(&Rect{}, ki.Props{"EnumType:Flag": gi.KiT_NodeFlags})
// AddNewRect adds a new rectangle to given parent node, with given name, pos, and size.
func AddNewRect(parent ki.Ki, name string, x, y, sx, sy float32) *Rect {
g := parent.AddNewChild(KiT_Rect, name).(*Rect)
g.Pos.Set(x, y)
g.Size.Set(sx, sy)
return g
}
func (g *Rect) SVGName() string { return "rect" }
func (g *Rect) CopyFieldsFrom(frm interface{}) {
fr := frm.(*Rect)
g.NodeBase.CopyFieldsFrom(&fr.NodeBase)
g.Pos = fr.Pos
g.Size = fr.Size
g.Radius = fr.Radius
}
func (g *Rect) SetPos(pos mat32.Vec2) {
g.Pos = pos
}
func (g *Rect) SetSize(sz mat32.Vec2) {
g.Size = sz
}
func (g *Rect) SVGLocalBBox() mat32.Box2 {
bb := mat32.Box2{}
hlw := 0.5 * g.LocalLineWidth()
bb.Min = g.Pos.SubScalar(hlw)
bb.Max = g.Pos.Add(g.Size).AddScalar(hlw)
return bb
}
func (g *Rect) Render2D() {
vis, rs := g.PushXForm()
if !vis {
return
}
pc := &g.Pnt
if g.Radius.X == 0 && g.Radius.Y == 0 {
pc.DrawRectangle(rs, g.Pos.X, g.Pos.Y, g.Size.X, g.Size.Y)
} else {
// todo: only supports 1 radius right now -- easy to add another
pc.DrawRoundedRectangle(rs, g.Pos.X, g.Pos.Y, g.Size.X, g.Size.Y, g.Radius.X)
}
pc.FillStrokeClear(rs)
g.ComputeBBoxSVG()
g.Render2DChildren()
rs.PopXFormLock()
}
// ApplyXForm applies the given 2D transform to the geometry of this node
// each node must define this for itself
func (g *Rect) ApplyXForm(xf mat32.Mat2) {
rot := xf.ExtractRot()
if rot != 0 || !g.Pnt.XForm.IsIdentity() {
g.Pnt.XForm = g.Pnt.XForm.Mul(xf)
g.SetProp("transform", g.Pnt.XForm.String())
g.GradientApplyXForm(xf)
} else {
g.Pos = xf.MulVec2AsPt(g.Pos)
g.Size = xf.MulVec2AsVec(g.Size)
g.GradientApplyXForm(xf)
}
}
// ApplyDeltaXForm applies the given 2D delta transforms to the geometry of this node
// relative to given point. Trans translation and point are in top-level coordinates,
// so must be transformed into local coords first.
// Point is upper left corner of selection box that anchors the translation and scaling,
// and for rotation it is the center point around which to rotate
func (g *Rect) ApplyDeltaXForm(trans mat32.Vec2, scale mat32.Vec2, rot float32, pt mat32.Vec2) {
if rot != 0 {
xf, lpt := g.DeltaXForm(trans, scale, rot, pt, false) // exclude self
g.Pnt.XForm = g.Pnt.XForm.MulCtr(xf, lpt)
g.SetProp("transform", g.Pnt.XForm.String())
g.GradientApplyXFormPt(xf, lpt)
} else {
xf, lpt := g.DeltaXForm(trans, scale, rot, pt, true) // include self
g.Pos = xf.MulVec2AsPtCtr(g.Pos, lpt)
g.Size = xf.MulVec2AsVec(g.Size)
g.GradientApplyXFormPt(xf, lpt)
}
}
// WriteGeom writes the geometry of the node to a slice of floating point numbers
// the length and ordering of which is specific to each node type.
// Slice must be passed and will be resized if not the correct length.
func (g *Rect) WriteGeom(dat *[]float32) {
SetFloat32SliceLen(dat, 4+6)
(*dat)[0] = g.Pos.X
(*dat)[1] = g.Pos.Y
(*dat)[2] = g.Size.X
(*dat)[3] = g.Size.Y
g.WriteXForm(*dat, 4)
g.GradientWritePts(dat)
}
// ReadGeom reads the geometry of the node from a slice of floating point numbers
// the length and ordering of which is specific to each node type.
func (g *Rect) ReadGeom(dat []float32) {
g.Pos.X = dat[0]
g.Pos.Y = dat[1]
g.Size.X = dat[2]
g.Size.Y = dat[3]
g.ReadXForm(dat, 4)
g.GradientReadPts(dat)
} | svg/rect.go | 0.70477 | 0.506713 | rect.go | starcoder |
package aimdcloser
import (
"math"
"time"
"golang.org/x/time/rate"
)
// RateLimiter is any object that can dynamically alter its reservation rate to allow more or less requests over time.
type RateLimiter interface {
// OnFailure is triggered each time we should lower our request rate.
OnFailure(now time.Time)
// OnSuccess is triggered each time we should increase our request rate.
OnSuccess(now time.Time)
// AttemptReserve is called when the application wants to ask if it should allow a request.
AttemptReserve(now time.Time) bool
// Reset the internal configuration of the rate limiter back to defaults.
Reset(now time.Time)
}
// AIMD is https://en.wikipedia.org/wiki/Additive_increase/multiplicative_decrease
// It is *NOT* thread safe
type AIMD struct {
// How many requests / sec are allowed in addition when a success happens. A default o zero
// does not increase the rate.
AdditiveIncrease float64
// What % (0.0, 1.0) of requests to allow fewer of on a failure. A default of zero
// does not decrease the rate.
MultiplicativeDecrease float64
// The initial rate of requests / sec to set an AIMD at when reset.
// Default of zero means infinite bursts per second. However, with a burst of zero it is zero
InitialRate float64
// Allow Burst limits in the period
// Default 0 turns off AIMD entirely.
Burst int
// TODO: We may want to implement some of this ourselves. Use and optimize later
l *rate.Limiter
}
// AIMDConstructor constructs rate limiters according to the given parameters. See documentation for AIMD for
// what each parameter means.
func AIMDConstructor(additiveIncrease float64, multiplicativeDecrease float64, initialRate float64, burst int) func() RateLimiter {
return func() RateLimiter {
return &AIMD{
AdditiveIncrease: additiveIncrease,
MultiplicativeDecrease: multiplicativeDecrease,
InitialRate: initialRate,
Burst: burst,
}
}
}
// Reset the RateLimiter back to the initial rate and burst
func (a *AIMD) Reset(now time.Time) {
a.l = rate.NewLimiter(rate.Limit(a.InitialRate), a.Burst)
}
func (a *AIMD) init(now time.Time) {
if a.l == nil {
a.Reset(now)
}
}
// OnFailure changes the limiter to decrease the current limit by MultiplicativeDecrease
func (a *AIMD) OnFailure(now time.Time) {
a.init(now)
a.l.SetLimitAt(now, rate.Limit(float64(a.l.Limit())*a.MultiplicativeDecrease))
}
// AttemptReserve tries to reserve a request inside the current time window. Returns if the rate limiter allows
// you to reserve a request.
func (a *AIMD) AttemptReserve(now time.Time) bool {
a.init(now)
return a.l.AllowN(now, 1)
}
// Rate returns the current rate.
func (a *AIMD) Rate() float64 {
if a.l == nil {
if a.InitialRate == 0 {
return math.Inf(1)
}
return a.InitialRate
}
return float64(a.l.Limit())
}
// OnSuccess increases the reserved limit for this period.
func (a *AIMD) OnSuccess(now time.Time) {
a.init(now)
a.l.SetLimitAt(now, rate.Limit(float64(a.l.Limit())+a.AdditiveIncrease))
}
var _ RateLimiter = &AIMD{} | aimd.go | 0.543833 | 0.476519 | aimd.go | starcoder |
package calendar
import (
"fmt"
"time"
"github.com/QuestScreen/api/modules"
"github.com/QuestScreen/api/render"
"github.com/QuestScreen/api/server"
shared "github.com/QuestScreen/plugin-tutorial"
)
/*title: Module Renderer
This file contains the code that renders the module to the screen.
*/
// calendarRenderer implements the rendering of the module's state with SDL.
type calendarRenderer struct {
config *calendarConfig
curTex, oldTex render.Image
cur shared.UniversityDate
oldPos int32
oldAlpha uint8
}
/*
This is our renderer, implementing `api.ModuleRenderer`.
Let's discuss the data we put in here:
* `config` is the current configuration, merged from the several configuration levels (default, base, system, group and scene).
* `curTex` contains the rendered image of the current date.
Whenever the date changes, we will render it once into a texture and can then use that texture every time we need to paint the module.
We do this since we want calls to `Render` be as fast as possible.
* `oldTex` contains the previous date while we'll use to animate the transition to the new date.
* `mask` contains a mask tile for the background.
As you can see with the HeroList and Title modules, the user can configure a secondary color together with a texture that blends it with the primary color.
We'll use this mask to do that blending, details will be explained later.
* `cur` is the current date.
We need to store it because it is possible that the configuration changes without any data change.
When this happens, we will need to re-render the module, but will not get data sent from the ModuleState.
In that case, we'll use the data in `cur`.
* `oldPos` is the position of the old image we'll use during animation.
* `oldAlpha` is the alpha value of the old image we'll use during animation.
*/
func newRenderer(ctx render.Renderer,
ms server.MessageSender) (modules.Renderer, error) {
return &calendarRenderer{}, nil
}
/*
These are trivial funcs we need to implement.
`newRenderer` initializes the Renderer; we don't need to do anything here since everything will be initialized in `Rebuild`.
Consult the diagram in the [Documentation](/plugins/documentation/#The%20Render%20Loop) for details on the order in which ModuleRenderer funcs are called.
*/
func (cr *calendarRenderer) createDateSheet(ctx render.Renderer,
d shared.UniversityDate) render.Image {
str := fmt.Sprintf("%d %s %d", d.DayOfMonth(), d.Month(), d.Year())
strTexture := ctx.RenderText(str, cr.config.Font.Font)
defer ctx.FreeImage(&strTexture)
canvas, frame := ctx.CreateCanvas(strTexture.Width+2*ctx.Unit(),
strTexture.Height+2*ctx.Unit(), cr.config.Background.Background,
render.East|render.South|render.West)
frame = frame.Position(strTexture.Width, strTexture.Height, render.Center,
render.Middle)
strTexture.Draw(ctx, frame, 255)
return canvas.Finish()
}
/*
This is a helper function that renders an image of our date.
Of course, we format the date like a sane person would do: *day month year*.
We query the currently selected font face from the context and render our date string to a texture.
This creates a texture `strTexture` that contains our text printed in the given color with transparent background.
Then, we create a *canvas* based on the dimensions of the rendered text.
A canvas redirects all rendering to a texture, which can later be queried with `canvas.Finish()`.
We calculate the inner dimensions with `ctx.Unit()`, which exists to accomodate for different display sizes and dimensions.
This means that our rendered data will occupy the same percentage of width on a FullHD screen as it will on a 4k screen.
`CreateCanvas` optionally renders a background on it, which may use a texture.
We can just give the configured background to the renderer.
A canvas can have borders, which will extend the canvas' size from the dimensions we give.
The borders are specified via flags.
Since we will anchor our date at the top edge of the screen, we create borders for the other three directions.
`CreateCanvas` also returns a frame which just describes the dimensions of the canvas.
Being a `Rectangle`, it provides several functions to position sub-rectangles in it.
One of those is `Position`, which we use to center a rectangle of the size of our text horizontally and vertically centered in the canvas.
Now we draw the rendered text into the remaining frame inside the canvas.
We have the possibility to use blending by setting an alpha value lower than 255, but we do not need this for our purposes.
Finally, we finish the canvas, which creates the texture containing our rendered text on the user-chosen background.
This is what we return.
*/
// Rebuild rebuilds the state from the given config and optionally data.
func (cr *calendarRenderer) Rebuild(
ctx render.Renderer, data interface{}, configVal interface{}) {
cr.config = configVal.(*calendarConfig)
if data != nil {
cr.cur = data.(shared.UniversityDate)
}
ctx.FreeImage(&cr.curTex)
cr.curTex = cr.createDateSheet(ctx, cr.cur)
}
/*
In this function, we need to update the current image based on given configuration value and optionally state data.
The Configuration value will always be given, but state data may be `nil`.
If `data` is not `nil`, it has been generated by `ModuleState`'s `CreateRendererData`.
Since our images are OpenGL textures, they are not automatically garbage-collected.
We need to be careful to always destroy them with `FreeImage` before creating a new image.
We can call this without checking if the image actually exists because it does nothing on empty images.
---
Before we implement the animation now, let's discuss how it should look like:
When a new date is set, we want to rip of the old data like we'd do with a calendar sheet.
This means that we'll render the new date at its final position, and over it the old date that falls down and fades away.
This is a pretty easy animation; we only need the images of the old and new date, and update the position and transparency of the old image with each step.
*/
// InitTransition starts transitioning after user input
// changed the state.
func (cr *calendarRenderer) InitTransition(
ctx render.Renderer, data interface{}) time.Duration {
cr.oldTex = cr.curTex
cr.cur = data.(shared.UniversityDate)
cr.curTex = cr.createDateSheet(ctx, cr.cur)
cr.oldPos = 0
return time.Second / 2
}
/*
This func receives the data returned by our endpoint.
We move the old image to `oldTex` since we still need it for the animation.
Then we draw the new image.
The initial animation state will be the old texture being completely visible and at the original position.
We return the time span used for the animation.
*/
// TransitionStep advances the transitioning animation.
func (cr *calendarRenderer) TransitionStep(
ctx render.Renderer, elapsed time.Duration) {
pos := render.TransitionCurve{Duration: time.Second / 2}.Cubic(elapsed)
cr.oldAlpha = uint8((1.0 - pos) * 255)
cr.oldPos = int32(pos * float32(cr.oldTex.Height) * 3)
}
/*
When advancing the animation, we use a `TransitionCurve`, which implements a function going from `0.0` at the beginning to `1.0` at the end of the animation.
Generally, a linear progression looks very artificial.
The `Cubic` curve we use starts slow, speeds up, and decelerates at the end.
We set `oldAlpha` to facilitate fading, and `oldPos` defines how far down the old image is.
We use the image's height for defining how far it moves.
*/
// FinishTransition finalizes the transitioning animation.
func (cr *calendarRenderer) FinishTransition(ctx render.Renderer) {
ctx.FreeImage(&cr.oldTex)
cr.oldAlpha = 0
}
/*
At the end of the animation, we destroy the old texture.
We do not need to reset `oldPos` since that is not used outside of animation.
It will be re-initialized when a new animation starts.
*/
// Render renders the current state / animation frame.
func (cr *calendarRenderer) Render(ctx render.Renderer) {
frame := ctx.OutputSize()
_, frame = frame.Carve(render.East, 5*ctx.Unit())
cr.curTex.Draw(ctx,
frame.Position(cr.curTex.Width, cr.curTex.Height, render.Right, render.Top),
255-cr.oldAlpha)
if !cr.oldTex.IsEmpty() {
_, frame = frame.Carve(render.North, cr.oldPos)
cr.oldTex.Draw(ctx,
frame.Position(cr.oldTex.Width, cr.oldTex.Height, render.Right, render.Top),
cr.oldAlpha)
}
}
/*
Finally, rendering.
We render the calender to the upper right corner, with a distance of 5 units from the right edge.
This is done by first carving out the 5 units from the right and then positioning the text's rectangle at the top right of the remaining frame.
If `cr.oldTex` contains an image, we're currently animating so we need to render the old date as well.
For this, we use the value `cr.oldPos` calculated in our `TransitionStep` to offset the old texture from the screen top.
This wraps up the code for rendering.
*/ | calendar/renderer.go | 0.822046 | 0.548915 | renderer.go | starcoder |
package cases
import (
"sort"
"testing"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/stretchr/testify/require"
)
// SortedLabelsTest exports a single, constant metric with labels in the wrong order
// and checks that we receive the metrics with sorted labels.
func SortedLabelsTest() Test {
return Test{
Name: "SortedLabels",
Metrics: staticHandler([]byte(`
# HELP test A gauge
# TYPE test gauge
test{b="2",a="1"} 1.0
`)),
Expected: func(t *testing.T, bs []Batch) {
forAllSamples(bs, func(s sample) {
names := []string{}
for i := range s.l {
names = append(names, s.l[i].Name)
}
require.True(t, sort.IsSorted(sort.StringSlice(names)), "'%s' is not sorted", s.l.String())
})
tests := countMetricWithValue(t, bs, labels.FromStrings("__name__", "test", "a", "1", "b", "2"), 1.0)
require.True(t, tests > 0, `found zero samples for test{a="1",b="2"}`)
},
}
}
// RepeatedLabelsTest exports a single, constant metric with repeated labels
// and checks that we don't receive metrics with repeated labels (and get up=0 instead).
func RepeatedLabelsTest() Test {
return Test{
Name: "RepeatedLabels",
Metrics: staticHandler([]byte(`
# HELP test A gauge
# TYPE test gauge
test{a="1",a="1"} 1.0
`)),
Expected: func(t *testing.T, bs []Batch) {
forAllSamples(bs, func(s sample) {
counts := map[string]int{}
for i := range s.l {
counts[s.l[i].Name]++
}
for name, count := range counts {
require.Equal(t, 1, count, "label '%s' is repeated %d times", name, count)
}
})
ups := countMetricWithValue(t, bs, labels.FromStrings("__name__", "up", "job", "test"), 0.0)
require.True(t, ups > 0, `found zero samples for up{job="test"} = 0`)
},
}
}
// EmptyLabelsTests exports a single, constant metric with an empty labels
// and checks that we receive the metrics without said label.
func EmptyLabelsTest() Test {
return Test{
Name: "EmptyLabels",
Metrics: staticHandler([]byte(`
# HELP test A gauge
# TYPE test gauge
test{a=""} 1.0
`)),
Expected: func(t *testing.T, bs []Batch) {
forAllSamples(bs, func(s sample) {
for i := range s.l {
require.NotEmpty(t, s.l[i].Value, "'%s' contains empty labels", s.l.String())
}
})
tests := countMetricWithValue(t, bs, labels.FromStrings("__name__", "test"), 1.0)
require.True(t, tests > 0, `found zero samples for {"__name__"="test"}`)
},
}
}
// NameLabelTests exports a single, constant metric with no name label
// and checks that we don't receive metrics without a name label (and get up=0 instead).
func NameLabelTest() Test {
return Test{
Name: "NameLabel",
Metrics: staticHandler([]byte(`
# HELP test A gauge
# TYPE test gauge
{label="value"} 1.0
`)),
Expected: func(t *testing.T, bs []Batch) {
forAllSamples(bs, func(s sample) {
for i := range s.l {
if s.l[i].Name == "__name__" {
return
}
}
require.True(t, false, "metric '%s' is missing name label", s.l.String())
})
ups := countMetricWithValue(t, bs, labels.FromStrings("__name__", "up", "job", "test"), 0.0)
require.True(t, ups > 0, `found zero samples for up{job="test"} = 0`)
},
}
} | cases/labels.go | 0.754915 | 0.523481 | labels.go | starcoder |
package main
import (
"fmt"
"math"
)
//math funcs (start)
func addNum(a1 int, a2 int) int { return a1 + a2 }
func subtractNum(a1 int, a2 int) int { return a1 - a2 }
func multiplyNum(a1 int, a2 int) int { return a1 * a2 }
func divideNum(a1 int, a2 int) int { return a1 / a2 }
func sinNum(a1 float64) float64 { return math.Sin(a1) }
func cosNum(a1 float64) float64 { return math.Cos(a1) }
func sqrtNum(a1 int) int { return a1 * a1 }
func cubeNum(a1 int) int { return a1 * a1 * a1 }
func powerNum(a1 int, power int) float64 { return math.Pow(3, 4) }
func tanNum(a1 float64) float64 { return math.Tan(a1) }
type rectangle struct{ height, length int }
func findRecPerimeter(r rectangle) int { return r.height + r.height + r.length + r.length }
func findRecArea(r rectangle) int { return r.height * r.length }
type isosceles_triangle struct { A_side, B_side, C_side, height int }
func findIsTrPerimeter(t isosceles_triangle) int {
if t.A_side != t.B_side {
fmt.Println("[error] your variables are wrong. there are 2 equal sides in isosceles triangle. if you have 2 equal numbers but mentioned them as c or b, you can modify this code or just rearrange them. Even thought it is wrong, we still solved it for you.")
} else if t.C_side < t.A_side | t.B_side {
fmt.Println("[error] base of an isosceles triangle can't be smaller than the a and b. But we will still solve it for you.")
}
return t.A_side + t.B_side + t.C_side
}
func findIsTrArea(t isosceles_triangle) int {
return (t.C_side * t.height) / 2 // C_side is the base.
}
type right_angle_triangle struct { A_leg , B_leg, hypotenuse int}
func findRATrPerimeter(t right_angle_triangle) int {
if t.A_leg | t.B_leg > t.hypotenuse {
fmt.Println("[error] your variables are wrong. A leg and B leg can't be bigger than the hypotenuse. But we will still solve it for you.")
}
return t.A_leg + t.B_leg + t.hypotenuse
}
func findRATrHypotenuse(t right_angle_triangle) float64 {
result := math.Sqrt(float64(sqrtNum(t.A_leg) + sqrtNum(t.B_leg)))
if int(result) < t.A_leg | t.B_leg {
fmt.Println("[error] hypotenuse is the longest side of a right angle triangle. a leg or b leg can't be bigger than hypotenuse. but we will still solve the problem for you.")
}
return result
}
//math funcs (end)
func consoleOutput(a1 int) { fmt.Println("Your answer is: ", a1) }
func main() {
consoleOutput(addNum(5, 15)) //adding numbers
consoleOutput(int(sinNum(50))) // you can do this to find sine but you will get rounded version. to get the exact number, use the way that I have added below.
fmt.Println("Your answer is ", sinNum(50)) // this will provide you your sine result with exact number.
fmt.Println("Perimeter of rectangle with height: 5 length: 7 is ", findRecPerimeter(rectangle{5, 7}))
fmt.Println("Area of rectangle with height: 5 length: 7 is ", findRecArea(rectangle{5, 7}))
fmt.Println("Perimeter of triange with a: 3, b: 3, c: 5 is ", findIsTrPerimeter(isosceles_triangle{3, 3, 5, 0})) // 0 = height
fmt.Println("Area of a triangle with a: 5, b: 5, c (base): 7, height: 4 is ", findIsTrArea(isosceles_triangle{0, 0, 7, 4})) //base 7, height 4
fmt.Println("Perimeter of a right angled trialing with a: 3 b: 4 hypotenuse: 5 is ", findRATrPerimeter(right_angle_triangle{3,4,5}))
fmt.Println("Hypotenuse of a right angled triangle with a: 4 b: 3 is ", findRATrHypotenuse(right_angle_triangle{4,3,0}))
} | main.go | 0.716913 | 0.580084 | main.go | starcoder |
package good
import (
"bufio"
"bytes"
"fmt"
"image"
"image/color"
"image/draw"
"image/jpeg"
"golang.org/x/image/colornames"
"golang.org/x/image/font"
"golang.org/x/image/font/basicfont"
"golang.org/x/image/math/fixed"
)
func HLine(image *image.RGBA, y, x1, x2 int, c color.Color) {
for i := x1; i < x2; i++ {
image.Set(i, y, c)
}
}
func VLine(image *image.RGBA, x, y1, y2 int, c color.Color) {
for i := y1; i < y2; i++ {
image.Set(x, i, c)
}
}
func rect(image *image.RGBA, x1, x2, y1, y2 int, c color.Color) {
HLine(image, y1, x1, x2, c)
HLine(image, y2, x1, x2, c)
VLine(image, x1, y1, y2, c)
VLine(image, x2, y1, y2, c)
}
func Rectangle(image *image.RGBA, x1, x2, y1, y2, width int, c color.Color) {
for w := 0; w < width; w++ {
rect(image, x1+w, x2, y1+w, y2, c)
}
}
func renderLabel(img *image.RGBA, x, y, label int, renderstr string) {
col := colornames.Map[colornames.Names[label]]
point := fixed.Point26_6{fixed.Int26_6(x * 64), fixed.Int26_6(y * 64)}
d := &font.Drawer{
Dst: img,
Src: image.NewUniform(colornames.Black),
Face: basicfont.Face7x13,
Dot: point,
}
Rectangle(img, x, (x + len(renderstr)*7), y-13, y-6, 7, col)
d.DrawString(renderstr)
}
func RenderObject(inputBytes []byte, objectList []*Object) ([]byte, error) {
var output bytes.Buffer
outputWriter := bufio.NewWriter(&output)
//decode image from bytes
img, err := DecodeImage(inputBytes)
if err != nil {
return nil, fmt.Errorf("Decode image error: %v", err)
}
bounds := img.Bounds()
imgRGBA := image.NewRGBA(bounds)
draw.Draw(imgRGBA, bounds, img, bounds.Min, draw.Src)
//render
for _, item := range objectList {
x1 := int(float32(bounds.Max.X) * item.Box[1])
y1 := int(float32(bounds.Max.Y) * item.Box[0])
x2 := int(float32(bounds.Max.X) * item.Box[3])
y2 := int(float32(bounds.Max.Y) * item.Box[2])
Rectangle(imgRGBA, x1, x2, y1, y2, 3, colornames.Map[colornames.Names[int(item.Label)]])
labelinRender := fmt.Sprintf("%s (%2.0f%%)", item.LabelStr, item.Prob*100.0)
renderLabel(imgRGBA, x1, y1, item.Label, labelinRender)
}
err = jpeg.Encode(outputWriter, imgRGBA, &jpeg.Options{Quality: 75})
return output.Bytes(), err
} | render.go | 0.513181 | 0.403244 | render.go | starcoder |
package openapi
import (
"encoding/json"
)
// NumaDistance struct for NumaDistance
type NumaDistance struct {
Destination int32 `json:"destination"`
Distance int32 `json:"distance"`
}
// NewNumaDistance instantiates a new NumaDistance object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewNumaDistance(destination int32, distance int32) *NumaDistance {
this := NumaDistance{}
this.Destination = destination
this.Distance = distance
return &this
}
// NewNumaDistanceWithDefaults instantiates a new NumaDistance object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewNumaDistanceWithDefaults() *NumaDistance {
this := NumaDistance{}
return &this
}
// GetDestination returns the Destination field value
func (o *NumaDistance) GetDestination() int32 {
if o == nil {
var ret int32
return ret
}
return o.Destination
}
// GetDestinationOk returns a tuple with the Destination field value
// and a boolean to check if the value has been set.
func (o *NumaDistance) GetDestinationOk() (*int32, bool) {
if o == nil {
return nil, false
}
return &o.Destination, true
}
// SetDestination sets field value
func (o *NumaDistance) SetDestination(v int32) {
o.Destination = v
}
// GetDistance returns the Distance field value
func (o *NumaDistance) GetDistance() int32 {
if o == nil {
var ret int32
return ret
}
return o.Distance
}
// GetDistanceOk returns a tuple with the Distance field value
// and a boolean to check if the value has been set.
func (o *NumaDistance) GetDistanceOk() (*int32, bool) {
if o == nil {
return nil, false
}
return &o.Distance, true
}
// SetDistance sets field value
func (o *NumaDistance) SetDistance(v int32) {
o.Distance = v
}
func (o NumaDistance) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["destination"] = o.Destination
}
if true {
toSerialize["distance"] = o.Distance
}
return json.Marshal(toSerialize)
}
type NullableNumaDistance struct {
value *NumaDistance
isSet bool
}
func (v NullableNumaDistance) Get() *NumaDistance {
return v.value
}
func (v *NullableNumaDistance) Set(val *NumaDistance) {
v.value = val
v.isSet = true
}
func (v NullableNumaDistance) IsSet() bool {
return v.isSet
}
func (v *NullableNumaDistance) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableNumaDistance(val *NumaDistance) *NullableNumaDistance {
return &NullableNumaDistance{value: val, isSet: true}
}
func (v NullableNumaDistance) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableNumaDistance) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | src/runtime/virtcontainers/pkg/cloud-hypervisor/client/model_numa_distance.go | 0.823151 | 0.42925 | model_numa_distance.go | starcoder |
package algorithms
import (
"line-simplify/tracks"
"math"
"sort"
"time"
)
// Line describes a line between two vectors
type Line struct {
V1 tracks.Datum
V2 tracks.Datum
}
// Diff returns the difference of v1 and v2
func Diff(v1, v2 tracks.Datum) tracks.Datum {
var vR tracks.Datum
vR.Lon = v2.Lon - v1.Lon
vR.Lat = v2.Lat - v1.Lat
return vR
}
// Abs returns the length between v1 and v2
func Abs(v1, v2 tracks.Datum) float64 {
return math.Sqrt(math.Pow(Diff(v1, v2).Lon, 2) + math.Pow(Diff(v1, v2).Lat, 2))
}
// PerpendicularDistance retuns shortest distance between v and line
func PerpendicularDistance(v tracks.Datum, line Line) float64 {
if v == line.V1 || v == line.V2 || line.V1 == line.V2 {
return 0.0
}
x := (line.V2.Lat-line.V1.Lat)*v.Lon - (line.V2.Lon-line.V1.Lon)*v.Lat + line.V2.Lon*line.V1.Lat - line.V2.Lat*line.V1.Lon
l := Abs(line.V2, line.V1)
if l == 0 {
return 0
}
return math.Abs(x) / l
}
// DPByTolerance : simple Douglas-Peucker algorithm with tolerance e
func DPByTolerance(data []tracks.Datum, e float64) []tracks.Datum {
defer timeTrack(time.Now(), "DPByTolerance")
// Find the point with the maximum distance
dMax := 0.0
idx := 0
end := len(data) - 1
for i := 0; i <= end; i++ {
line := Line{
V1: data[0],
V2: data[end],
}
d := PerpendicularDistance(data[i], line)
if d > dMax {
idx = i
dMax = d
}
}
var Res []tracks.Datum
if dMax > e && idx > 1 {
// Recursive call
recR1 := DPByTolerance(data[0:idx], e)
recR2 := DPByTolerance(data[idx:end], e)
// Build the result list
Res = append(recR1[0:len(recR1)-1], recR2[0:len(recR2)]...)
} else {
if len(data) > 1 {
Res = []tracks.Datum{data[0], data[end-1]}
} else {
Res = data
}
}
return Res
}
// DPByCount implements Douglas Peucker but with a given pointscount for the result set
func DPByCount(data []tracks.Datum, count int) []tracks.Datum {
defer timeTrack(time.Now(), "DPByCount")
len := len(data)
weights := make([]float64, len)
var dP func(int, int)
dP = func(start, end int) {
if end <= start+1 {
return
}
line := Line{
V1: data[start],
V2: data[end],
}
dMax := -1.0
idx := 0
d := 0.0
for i := start + 1; i < end; i++ {
d = PerpendicularDistance(data[i], line)
if d > dMax {
dMax = d
idx = i
}
}
weights[idx] = dMax
dP(start, idx)
dP(idx, end)
}
dP(0, len-1)
// make sure first and last point always included
weights[0] = math.MaxFloat64
weights[len-1] = math.MaxFloat64
// sort []weights descending, to calculate maxT max tolerance
weightsDesc := make([]float64, len)
copy(weightsDesc, weights)
sort.Slice(weightsDesc, func(i, j int) bool {
return weightsDesc[i] > weightsDesc[j]
})
maxT := weightsDesc[count-1]
// filter correct highest-weighted points into []dataOut
n := 0
dataOut := make([]tracks.Datum, count)
for i, x := range data {
if weights[i] >= maxT {
dataOut[n] = x
n++
}
}
return dataOut
} | algorithms/douglasPeucker.go | 0.813016 | 0.481759 | douglasPeucker.go | starcoder |
package beartol
import (
"fmt"
)
// FagTolerancesInteractor implements TolerancesInteractor.
type FagTolerancesInteractor struct{}
func (thiz *FagTolerancesInteractor) GetInnerDiameterTolerance(rb RollingBearing) (int, int, error) {
id, err := thiz.toToleranceId(rb.Type)
if err != nil {
return 0, 0, err
}
tolerances := gDiameterTolerances[id][rb.ClassTochn].Inner
if len(tolerances) == 0 {
return 0, 0, fmt.Errorf("unsupp Type %d or ClassTochn %s", rb.Type, rb.ClassTochn)
}
return thiz.findTolerance(tolerances, rb.InnerDiameter)
}
func (thiz *FagTolerancesInteractor) GetOuterDiameterTolerance(rb RollingBearing) (int, int, error) {
id, err := thiz.toToleranceId(rb.Type)
if err != nil {
return 0, 0, err
}
tolerances := gDiameterTolerances[id][rb.ClassTochn].Outer
if len(tolerances) == 0 {
return 0, 0, fmt.Errorf("unsupp Type %d or ClassTochn %s", rb.Type, rb.ClassTochn)
}
return thiz.findTolerance(tolerances, rb.OuterDiameter)
}
func (thiz *FagTolerancesInteractor) toToleranceId(rbType RollingBearingType) (int, error) {
switch rbType {
case RollingBearingType_BallRadial:
fallthrough
case RollingBearingType_BallRadialUpor:
// у 72B и 73B допуски внутреннего диаметра для подшипников всех классов точности - по P5 (без дополнительного
// обозначения)
fallthrough
case RollingBearingType_Shpindel:
fallthrough
case RollingBearingType_4ptContact:
fallthrough
case RollingBearingType_BallSphere_Cylynder:
fallthrough
case RollingBearingType_BallSphere_Kone:
fallthrough
case RollingBearingType_RollerCylynder:
fallthrough
case RollingBearingType_RollerSphere_Cylynder:
// отдельная таблица на стр. 366 for T41A
fallthrough
case RollingBearingType_RollerSphere_Kone:
// ограничение только на наружный диаметр
fallthrough
case RollingBearingType_RollerSphere_Bochka_Cylynder:
fallthrough
case RollingBearingType_RollerSphere_Bochka_Kone:
return 0, nil
case RollingBearingType_RollerKone:
// default P6X for 320X, 329, 330, 331, 332 and d < 200 mm
return 1, nil
case RollingBearingType_BallUpor:
fallthrough
case RollingBearingType_BallRadialUpor2:
// отдельная таблица на стр. 469 for 7602, 7603
fallthrough
case RollingBearingType_RollerCylynderUpor:
fallthrough
case RollingBearingType_RollerSphereUpor:
return 2, nil
}
return 0, fmt.Errorf("unsupp bearing type %d", rbType)
}
func (thiz *FagTolerancesInteractor) findTolerance(tolerances []diameterTolerance, diameter int) (int, int, error) {
for _, it := range tolerances {
if (it.DiameterLow < diameter) && (diameter <= it.DiameterHigh) {
return it.ToleranceA, it.ToleranceB, nil
}
}
return 0, 0, fmt.Errorf("unsupp diameter %d", diameter)
}
func (thiz *FagTolerancesInteractor) GetClearance(rb RollingBearing) (int, int, error) {
id, err := thiz.toClearanceId(rb.Type, rb.TypeSpecial)
if err != nil {
return 0, 0, err
}
clearances := gClearances[id]
clearance, err := thiz.findClearance(clearances, rb.InnerDiameter)
if err != nil {
return 0, 0, err
}
clearanceRange := clearance[rb.ClearanceGroup]
return clearanceRange.Min, clearanceRange.Max, nil
}
func (thiz *FagTolerancesInteractor) toClearanceId(rbType RollingBearingType, rbTypeSpecial RollingBearingTypeSpecial) (int, error) {
switch rbType {
case RollingBearingType_BallRadial:
return 0, nil
case RollingBearingType_BallRadialUpor:
if rbTypeSpecial == RollingBearingTypeSpecial_72B_73B {
return 0, fmt.Errorf("no clearance for bearing type %d (%d)", rbType, rbTypeSpecial)
} else if rbTypeSpecial == RollingBearingTypeSpecial_33DA {
return 4, nil
} else {
return 3, nil
}
case RollingBearingType_4ptContact:
return 5, nil
case RollingBearingType_BallSphere_Cylynder:
return 1, nil
case RollingBearingType_BallSphere_Kone:
return 2, nil
case RollingBearingType_RollerCylynder:
if rbTypeSpecial == RollingBearingTypeSpecial_NN30ASK {
// default C1NA for NN30ASK
return 7, nil
} else {
return 6, nil
}
case RollingBearingType_RollerSphere_Cylynder:
// default C4 for T41A
return 8, nil
case RollingBearingType_RollerSphere_Kone:
return 9, nil
case RollingBearingType_RollerSphere_Bochka_Cylynder:
return 10, nil
case RollingBearingType_RollerSphere_Bochka_Kone:
return 11, nil
case RollingBearingType_Shpindel:
fallthrough
case RollingBearingType_RollerKone:
fallthrough
case RollingBearingType_BallUpor:
fallthrough
case RollingBearingType_BallRadialUpor2:
fallthrough
case RollingBearingType_RollerCylynderUpor:
fallthrough
case RollingBearingType_RollerSphereUpor:
return 0, fmt.Errorf("no clearance for bearing type %d (%d)", rbType, rbTypeSpecial)
}
return 0, fmt.Errorf("unsupp bearing type %d (%d)", rbType, rbTypeSpecial)
}
func (thiz *FagTolerancesInteractor) findClearance(clearances []clearance, diameter int) (map[string]clearanceRange, error) {
for _, it := range clearances {
if (it.DiameterLow < diameter) && (diameter <= it.DiameterHigh) {
return it.Clearance, nil
}
}
return nil, fmt.Errorf("unsupp diameter %d", diameter)
}
type diameterTolerance struct {
DiameterLow int
DiameterHigh int
ToleranceA int
ToleranceB int
}
/*
0 допуски радиальных подшипников (кроме конических роликоподшипников)
P4S шпиндельные
SP, UP двухрядные роликоподшипники с цилиндрическими роликами
1 допуски конических роликоподшипников
2 допуски упорных подшипников
SP радиально-упорные шарикоподшипники, серии 2344 и 2347
*/
var gDiameterTolerances = map[int]map[string]struct {
Inner []diameterTolerance
Outer []diameterTolerance
}{
0: { // допуски радиальных подшипников (кроме конических роликоподшипников)
"PN": {
[]diameterTolerance{
{2 /*2.5*/, 10, 0, -8},
{10, 18, 0, -8},
{18, 30, 0, -10},
{30, 50, 0, -12},
{50, 80, 0, -15},
{80, 120, 0, -20},
{120, 180, 0, -25},
{180, 250, 0, -30},
{250, 315, 0, -35},
{315, 400, 0, -40},
{400, 500, 0, -45},
{500, 630, 0, -50},
{630, 800, 0, -75},
{800, 1000, 0, -100},
{1000, 1250, 0, -125},
{1250, 1600, 0, -160},
{1600, 2000, 0, -200},
},
[]diameterTolerance{
{6, 18, 0, -8},
{18, 30, 0, -9},
{30, 50, 0, -11},
{50, 80, 0, -13},
{80, 120, 0, -15},
{120, 150, 0, -18},
{150, 180, 0, -25},
{180, 250, 0, -30},
{250, 315, 0, -35},
{315, 400, 0, -40},
{400, 500, 0, -45},
{500, 630, 0, -50},
{630, 800, 0, -75},
{800, 1000, 0, -100},
{1000, 1250, 0, -125},
{1250, 1600, 0, -160},
{1600, 2000, 0, -200},
{2000, 2500, 0, -250},
},
},
"P6": {
[]diameterTolerance{
{2 /*2.5*/, 10, 0, -7},
{10, 18, 0, -7},
{18, 30, 0, -8},
{30, 50, 0, -10},
{50, 80, 0, -12},
{80, 120, 0, -15},
{120, 180, 0, -18},
{180, 250, 0, -22},
{250, 315, 0, -25},
{315, 400, 0, -30},
{400, 500, 0, -35},
{500, 630, 0, -40},
{630, 800, 0, -50},
{800, 1000, 0, -65},
{1000, 1250, 0, -80},
{1250, 1600, 0, -100},
{1600, 2000, 0, -130},
},
[]diameterTolerance{
{6, 18, 0, -7},
{18, 30, 0, -8},
{30, 50, 0, -9},
{50, 80, 0, -11},
{80, 120, 0, -13},
{120, 150, 0, -15},
{150, 180, 0, -18},
{180, 250, 0, -20},
{250, 315, 0, -25},
{315, 400, 0, -28},
{400, 500, 0, -33},
{500, 630, 0, -38},
{630, 800, 0, -45},
{800, 1000, 0, -60},
{1000, 1250, 0, -80},
{1250, 1600, 0, -100},
{1600, 2000, 0, -140},
{2000, 2500, 0, -180},
},
},
"P5": {
[]diameterTolerance{
{2 /*2.5*/, 10, 0, -5},
{10, 18, 0, -5},
{18, 30, 0, -6},
{30, 50, 0, -8},
{50, 80, 0, -9},
{80, 120, 0, -10},
{120, 180, 0, -13},
{180, 250, 0, -15},
{250, 315, 0, -18},
{315, 400, 0, -23},
{400, 500, 0, -27},
{500, 630, 0, -33},
{630, 800, 0, -40},
},
[]diameterTolerance{
{6, 18, 0, -5},
{18, 30, 0, -6},
{30, 50, 0, -7},
{50, 80, 0, -9},
{80, 120, 0, -10},
{120, 150, 0, -11},
{150, 180, 0, -13},
{180, 250, 0, -15},
{250, 315, 0, -18},
{315, 400, 0, -20},
{400, 500, 0, -23},
{500, 630, 0, -28},
{630, 800, 0, -35},
{800, 1000, 0, -40},
{1000, 1250, 0, -50},
{1250, 1600, 0, -65},
},
},
"P4": {
[]diameterTolerance{
{2 /*2.5*/, 10, 0, -4},
{10, 18, 0, -4},
{18, 30, 0, -5},
{30, 50, 0, -6},
{50, 80, 0, -7},
{80, 120, 0, -8},
{120, 180, 0, -10},
{180, 250, 0, -12},
{250, 315, 0, -15},
{315, 400, 0, -19},
{400, 500, 0, -23},
{500, 630, 0, -26},
{630, 800, 0, -34},
},
[]diameterTolerance{
{6, 18, 0, 0}, // nop
{18, 30, 0, -5},
{30, 50, 0, -6},
{50, 80, 0, -7},
{80, 120, 0, -8},
{120, 150, 0, -9},
{150, 180, 0, -10},
{180, 250, 0, -11},
{250, 315, 0, -13},
{315, 400, 0, -15},
{400, 500, 0, -20},
{500, 630, 0, -25},
{630, 800, 0, -28},
{800, 1000, 0, -35},
{1000, 1250, 0, -40},
{1250, 1600, 0, -55},
},
},
"P4S": { // шпиндельные
[]diameterTolerance{
{0, 10, 0, -4},
{10, 18, 0, -4},
{18, 30, 0, -5},
{30, 50, 0, -6},
{50, 80, 0, -7},
{80, 120, 0, -8},
{120, 150, 0, -10},
{150, 180, 0, -10},
{180, 250, 0, -12},
},
[]diameterTolerance{
{18, 30, 0, -5},
{30, 50, 0, -6},
{50, 80, 0, -7},
{80, 120, 0, -8},
{120, 150, 0, -9},
{150, 180, 0, -10},
{180, 250, 0, -11},
{250, 315, 0, -13},
{315, 400, 0, -15},
},
},
"SP": { // двухрядные роликоподшипники с цилиндрическими роликами
[]diameterTolerance{
{18, 30, 0, -6},
{30, 50, 0, -8},
{50, 80, 0, -9},
{80, 120, 0, -10},
{120, 180, 0, -13},
{180, 250, 0, -15},
{250, 315, 0, -18},
{315, 400, 0, -23},
{400, 500, 0, -27},
{500, 630, 0, -30},
{630, 800, 0, -40},
{800, 1000, 0, -50},
{1000, 1250, 0, -65},
},
[]diameterTolerance{
{30, 50, 0, -7},
{50, 80, 0, -9},
{80, 120, 0, -10},
{120, 150, 0, -11},
{150, 180, 0, -13},
{180, 250, 0, -15},
{250, 315, 0, -18},
{315, 400, 0, -20},
{400, 500, 0, -23},
{500, 630, 0, -28},
{630, 800, 0, -35},
{800, 1000, 0, -40},
{1000, 1250, 0, -50},
{1250, 1600, 0, -65},
},
},
"UP": { // двухрядные роликоподшипники с цилиндрическими роликами
[]diameterTolerance{
{18, 30, 0, -5},
{30, 50, 0, -6},
{50, 80, 0, -7},
{80, 120, 0, -8},
{120, 180, 0, -10},
{180, 250, 0, -12},
{250, 315, 0, -15},
{315, 400, 0, -19},
{400, 500, 0, -23},
{500, 630, 0, -26},
{630, 800, 0, -34},
{800, 1000, 0, -40},
{1000, 1250, 0, -55},
},
[]diameterTolerance{
{30, 50, 0, -5},
{50, 80, 0, -6},
{80, 120, 0, -7},
{120, 150, 0, -8},
{150, 180, 0, -9},
{180, 250, 0, -10},
{250, 315, 0, -12},
{315, 400, 0, -14},
{400, 500, 0, -17},
{500, 630, 0, -20},
{630, 800, 0, -25},
{800, 1000, 0, -30},
{1000, 1250, 0, -36},
{1250, 1600, 0, -48},
},
},
},
1: { // допуски конических роликоподшипников
"PN": {},
"P6X": {},
"P5": {},
"P4": {},
},
2: { // допуски упорных подшипников
"PN": {},
"P6": {},
"P5": {},
"P4": {},
"SP": { // радиально-упорные шарикоподшипники, серии 2344 и 2347
[]diameterTolerance{
{0, 18, 0, 0}, // nop
{18, 30, 0, -8},
{30, 50, 0, -10},
{50, 80, 0, -12},
{80, 120, 0, -15},
{120, 180, 0, -18},
{180, 250, 0, -22},
{250, 315, 0, -25},
{315, 400, 0, -30},
// >400 nop
},
[]diameterTolerance{
{18, 30, 0, 0}, // nop
{30, 50, 0, 0}, // nop
{50, 80, -24, -43},
{80, 120, -28, -50},
{120, 180, -33, -58},
{180, 250, -37, -66},
{250, 315, -41, -73},
{315, 400, -46, -82},
{400, 500, -50, -90},
{500, 630, -55, -99},
// >630 nop
},
},
},
}
type clearanceRange struct {
Min int
Max int
}
type clearance struct {
DiameterLow int
DiameterHigh int
Clearance map[string]clearanceRange
}
/*
0 радиальный зазор шарикоподшипников с цилиндрическим отверстием
1 радиальный зазор сферических шарикоподшипников с цилиндрическим отверстием
2 -|- с коническим отверстием
3 осевой зазор двухрядных радиально-упорных шарикоподшипников серий 32, 32B, 33B
4 -|- серии 33DA
5 осевой зазор подшипников с четырёхточечным контактом
6 радиальный зазор однорядных и двухрядных роликоподшипников с цилиндрическими роликами с цилиндрическим отверстием
7 -|- с коническим отверстием
8 радиальный зазор сферических роликоподшипников с цилиндрическим отверстием
9 -|- с коническим отверстием
10 радиальный зазор сферических подшипников с бочкообразными роликами с цилиндрическим отверстием
11 -|- с коническим отверстием
*/
var gClearances = map[int][]clearance{
0: { // радиальный зазор шарикоподшипников с цилиндрическим отверстием
{2 /*2.5*/, 6, map[string]clearanceRange{
"C2": {0, 7},
"CN": {2, 13},
"C3": {8, 23},
"C4": {}, // nop
}},
{6, 10, map[string]clearanceRange{
"C2": {0, 7},
"CN": {2, 13},
"C3": {8, 23},
"C4": {14, 29},
}},
{10, 18, map[string]clearanceRange{
"C2": {0, 9},
"CN": {3, 18},
"C3": {11, 25},
"C4": {18, 33},
}},
{18, 24, map[string]clearanceRange{
"C2": {0, 10},
"CN": {5, 20},
"C3": {13, 28},
"C4": {20, 36},
}},
{24, 30, map[string]clearanceRange{
"C2": {1, 11},
"CN": {5, 20},
"C3": {13, 28},
"C4": {23, 41},
}},
{30, 40, map[string]clearanceRange{
"C2": {1, 11},
"CN": {6, 20},
"C3": {15, 33},
"C4": {28, 46},
}},
{40, 50, map[string]clearanceRange{
"C2": {1, 11},
"CN": {6, 23},
"C3": {18, 36},
"C4": {30, 51},
}},
{50, 65, map[string]clearanceRange{
"C2": {1, 15},
"CN": {8, 28},
"C3": {23, 43},
"C4": {38, 61},
}},
{65, 80, map[string]clearanceRange{
"C2": {1, 15},
"CN": {10, 30},
"C3": {25, 51},
"C4": {46, 71},
}},
{80, 100, map[string]clearanceRange{
"C2": {1, 18},
"CN": {12, 36},
"C3": {30, 58},
"C4": {53, 84},
}},
{100, 120, map[string]clearanceRange{
"C2": {2, 20},
"CN": {15, 41},
"C3": {36, 66},
"C4": {61, 97},
}},
{120, 140, map[string]clearanceRange{
"C2": {2, 23},
"CN": {18, 48},
"C3": {41, 81},
"C4": {71, 114},
}},
{140, 160, map[string]clearanceRange{
"C2": {2, 23},
"CN": {18, 53},
"C3": {46, 91},
"C4": {81, 130},
}},
{160, 180, map[string]clearanceRange{
"C2": {2, 25},
"CN": {20, 61},
"C3": {53, 102},
"C4": {91, 147},
}},
{180, 200, map[string]clearanceRange{
"C2": {2, 30},
"CN": {25, 71},
"C3": {63, 117},
"C4": {107, 163},
}},
{200, 225, map[string]clearanceRange{
"C2": {4, 32},
"CN": {28, 82},
"C3": {73, 132},
"C4": {120, 187},
}},
{225, 250, map[string]clearanceRange{
"C2": {4, 36},
"CN": {31, 92},
"C3": {87, 152},
"C4": {140, 217},
}},
{250, 280, map[string]clearanceRange{
"C2": {4, 39},
"CN": {36, 97},
"C3": {97, 162},
"C4": {152, 237},
}},
{280, 315, map[string]clearanceRange{
"C2": {8, 45},
"CN": {42, 110},
"C3": {110, 180},
"C4": {175, 260},
}},
{315, 355, map[string]clearanceRange{
"C2": {8, 50},
"CN": {50, 120},
"C3": {120, 200},
"C4": {200, 290},
}},
{355, 400, map[string]clearanceRange{
"C2": {8, 60},
"CN": {60, 140},
"C3": {140, 230},
"C4": {230, 330},
}},
{400, 450, map[string]clearanceRange{
"C2": {10, 70},
"CN": {70, 160},
"C3": {160, 260},
"C4": {260, 370},
}},
{450, 500, map[string]clearanceRange{
"C2": {10, 80},
"CN": {80, 180},
"C3": {180, 290},
"C4": {290, 410},
}},
{500, 560, map[string]clearanceRange{
"C2": {20, 90},
"CN": {90, 200},
"C3": {200, 320},
"C4": {320, 460},
}},
{560, 630, map[string]clearanceRange{
"C2": {20, 100},
"CN": {100, 220},
"C3": {220, 350},
"C4": {350, 510},
}},
{630, 710, map[string]clearanceRange{
"C2": {30, 120},
"CN": {120, 250},
"C3": {250, 390},
"C4": {390, 560},
}},
{710, 800, map[string]clearanceRange{
"C2": {30, 130},
"CN": {130, 280},
"C3": {280, 440},
"C4": {440, 620},
}},
{800, 900, map[string]clearanceRange{
"C2": {30, 150},
"CN": {150, 310},
"C3": {310, 490},
"C4": {490, 690},
}},
{900, 1000, map[string]clearanceRange{
"C2": {40, 160},
"CN": {160, 340},
"C3": {340, 540},
"C4": {540, 760},
}},
{1000, 1120, map[string]clearanceRange{
"C2": {40, 170},
"CN": {170, 370},
"C3": {370, 590},
"C4": {590, 840},
}},
{1120, 1250, map[string]clearanceRange{
"C2": {40, 180},
"CN": {180, 400},
"C3": {400, 640},
"C4": {640, 910},
}},
{1250, 1400, map[string]clearanceRange{
"C2": {60, 210},
"CN": {210, 440},
"C3": {440, 700},
"C4": {700, 1000},
}},
{1400, 1600, map[string]clearanceRange{
"C2": {60, 230},
"CN": {230, 480},
"C3": {480, 770},
"C4": {770, 1100},
}},
},
1: { // радиальный зазор сферических шарикоподшипников с цилиндрическим отверстием
{0, 6, map[string]clearanceRange{
"C2": {1, 8},
"CN": {5, 15},
"C3": {10, 20},
"C4": {15, 25},
}},
{6, 10, map[string]clearanceRange{
"C2": {2, 9},
"CN": {6, 17},
"C3": {12, 25},
"C4": {19, 33},
}},
{10, 14, map[string]clearanceRange{
"C2": {2, 10},
"CN": {6, 19},
"C3": {13, 26},
"C4": {21, 35},
}},
{14, 18, map[string]clearanceRange{
"C2": {3, 12},
"CN": {8, 21},
"C3": {15, 28},
"C4": {23, 37},
}},
{18, 24, map[string]clearanceRange{
"C2": {4, 14},
"CN": {10, 23},
"C3": {17, 30},
"C4": {25, 39},
}},
{24, 30, map[string]clearanceRange{
"C2": {5, 16},
"CN": {11, 24},
"C3": {19, 35},
"C4": {29, 46},
}},
{30, 40, map[string]clearanceRange{
"C2": {6, 18},
"CN": {13, 29},
"C3": {23, 40},
"C4": {34, 53},
}},
{40, 50, map[string]clearanceRange{
"C2": {6, 19},
"CN": {14, 31},
"C3": {25, 44},
"C4": {37, 57},
}},
{50, 65, map[string]clearanceRange{
"C2": {7, 21},
"CN": {16, 36},
"C3": {30, 50},
"C4": {45, 69},
}},
{65, 80, map[string]clearanceRange{
"C2": {8, 24},
"CN": {18, 40},
"C3": {35, 60},
"C4": {54, 83},
}},
{80, 100, map[string]clearanceRange{
"C2": {9, 27},
"CN": {22, 48},
"C3": {42, 70},
"C4": {64, 96},
}},
{100, 120, map[string]clearanceRange{
"C2": {10, 31},
"CN": {25, 56},
"C3": {50, 83},
"C4": {75, 114},
}},
{120, 140, map[string]clearanceRange{
"C2": {10, 38},
"CN": {30, 68},
"C3": {60, 100},
"C4": {90, 135},
}},
{140, 160, map[string]clearanceRange{
"C2": {15, 44},
"CN": {35, 80},
"C3": {70, 120},
"C4": {110, 161},
}},
},
2: { // радиальный зазор сферических шарикоподшипников с коническим отверстием
{0, 6, map[string]clearanceRange{}}, // nop
{6, 10, map[string]clearanceRange{}}, // nop
{10, 14, map[string]clearanceRange{}}, // nop
{14, 18, map[string]clearanceRange{}}, // nop
{18, 24, map[string]clearanceRange{
"C2": {7, 17},
"CN": {13, 26},
"C3": {20, 33},
"C4": {28, 42},
}},
{24, 30, map[string]clearanceRange{
"C2": {9, 20},
"CN": {15, 28},
"C3": {23, 39},
"C4": {33, 50},
}},
{30, 40, map[string]clearanceRange{
"C2": {12, 24},
"CN": {19, 35},
"C3": {29, 46},
"C4": {40, 59},
}},
{40, 50, map[string]clearanceRange{
"C2": {14, 27},
"CN": {22, 39},
"C3": {33, 52},
"C4": {45, 65},
}},
{50, 65, map[string]clearanceRange{
"C2": {18, 32},
"CN": {27, 47},
"C3": {41, 61},
"C4": {56, 80},
}},
{65, 80, map[string]clearanceRange{
"C2": {23, 39},
"CN": {35, 57},
"C3": {50, 75},
"C4": {69, 98},
}},
{80, 100, map[string]clearanceRange{
"C2": {29, 47},
"CN": {42, 68},
"C3": {62, 90},
"C4": {84, 116},
}},
{100, 120, map[string]clearanceRange{
"C2": {35, 56},
"CN": {50, 81},
"C3": {75, 108},
"C4": {100, 139},
}},
{120, 140, map[string]clearanceRange{
"C2": {40, 68},
"CN": {60, 98},
"C3": {90, 130},
"C4": {120, 165},
}},
{140, 160, map[string]clearanceRange{
"C2": {45, 74},
"CN": {65, 110},
"C3": {100, 150},
"C4": {140, 191},
}},
},
3: { // осевой зазор двухрядных радиально-упорных шарикоподшипников серий 32, 32B, 33B
{6, 10, map[string]clearanceRange{
"C2": {1, 11},
"CN": {5, 21},
"C3": {12, 28},
"C4": {25, 45},
}},
{10, 18, map[string]clearanceRange{
"C2": {1, 12},
"CN": {6, 23},
"C3": {13, 31},
"C4": {27, 47},
}},
{18, 24, map[string]clearanceRange{
"C2": {2, 14},
"CN": {7, 25},
"C3": {16, 34},
"C4": {28, 48},
}},
{24, 30, map[string]clearanceRange{
"C2": {2, 15},
"CN": {8, 27},
"C3": {18, 37},
"C4": {30, 50},
}},
{30, 40, map[string]clearanceRange{
"C2": {2, 16},
"CN": {9, 29},
"C3": {21, 40},
"C4": {33, 54},
}},
{40, 50, map[string]clearanceRange{
"C2": {2, 18},
"CN": {11, 33},
"C3": {23, 44},
"C4": {36, 58},
}},
{50, 65, map[string]clearanceRange{
"C2": {3, 22},
"CN": {13, 36},
"C3": {26, 48},
"C4": {40, 63},
}},
{65, 80, map[string]clearanceRange{
"C2": {3, 24},
"CN": {15, 40},
"C3": {30, 54},
"C4": {46, 71},
}},
{80, 100, map[string]clearanceRange{
"C2": {3, 26},
"CN": {18, 46},
"C3": {35, 63},
"C4": {55, 83},
}},
{100, 120, map[string]clearanceRange{
"C2": {4, 30},
"CN": {22, 53},
"C3": {42, 73},
"C4": {65, 96},
}},
{120, 140, map[string]clearanceRange{
"C2": {4, 34},
"CN": {25, 59},
"C3": {48, 82},
"C4": {74, 108},
}},
},
4: { // осевой зазор двухрядных радиально-упорных шарикоподшипников серии 33DA
{6, 10, map[string]clearanceRange{
"C2": {5, 22},
"CN": {11, 28},
"C3": {20, 37},
}},
{10, 18, map[string]clearanceRange{
"C2": {6, 24},
"CN": {13, 31},
"C3": {23, 41},
}},
{18, 24, map[string]clearanceRange{
"C2": {7, 25},
"CN": {14, 32},
"C3": {24, 42},
}},
{24, 30, map[string]clearanceRange{
"C2": {8, 27},
"CN": {16, 35},
"C3": {27, 46},
}},
{30, 40, map[string]clearanceRange{
"C2": {9, 29},
"CN": {18, 38},
"C3": {30, 50},
}},
{40, 50, map[string]clearanceRange{
"C2": {11, 33},
"CN": {22, 44},
"C3": {36, 58},
}},
{50, 65, map[string]clearanceRange{
"C2": {13, 36},
"CN": {25, 48},
"C3": {40, 63},
}},
{65, 80, map[string]clearanceRange{
"C2": {15, 40},
"CN": {29, 54},
"C3": {46, 71},
}},
{80, 100, map[string]clearanceRange{
"C2": {18, 46},
"CN": {35, 63},
"C3": {55, 83},
}},
{100, 120, map[string]clearanceRange{
"C2": {22, 53},
"CN": {42, 73},
"C3": {65, 96},
}},
{120, 140, map[string]clearanceRange{
"C2": {25, 59},
"CN": {48, 82},
"C3": {74, 108},
}},
},
5: { // осевой зазор подшипников с четырёхточечным контактом
{0, 18, map[string]clearanceRange{
"C2": {20, 60},
"CN": {50, 90},
"C3": {80, 120},
}},
{18, 40, map[string]clearanceRange{
"C2": {30, 70},
"CN": {60, 110},
"C3": {100, 150},
}},
{40, 60, map[string]clearanceRange{
"C2": {40, 90},
"CN": {80, 130},
"C3": {120, 170},
}},
{60, 80, map[string]clearanceRange{
"C2": {50, 100},
"CN": {90, 140},
"C3": {130, 180},
}},
{80, 100, map[string]clearanceRange{
"C2": {60, 120},
"CN": {100, 160},
"C3": {140, 200},
}},
{100, 140, map[string]clearanceRange{
"C2": {70, 140},
"CN": {120, 180},
"C3": {160, 220},
}},
{140, 180, map[string]clearanceRange{
"C2": {80, 160},
"CN": {140, 200},
"C3": {180, 240},
}},
{180, 220, map[string]clearanceRange{
"C2": {100, 180},
"CN": {160, 220},
"C3": {200, 260},
}},
{220, 260, map[string]clearanceRange{
"C2": {120, 200},
"CN": {180, 240},
"C3": {220, 300},
}},
{260, 300, map[string]clearanceRange{
"C2": {140, 220},
"CN": {200, 280},
"C3": {260, 340},
}},
{300, 355, map[string]clearanceRange{
"C2": {160, 240},
"CN": {220, 300},
"C3": {280, 360},
}},
{355, 400, map[string]clearanceRange{
"C2": {180, 270},
"CN": {250, 330},
"C3": {310, 390},
}},
{400, 450, map[string]clearanceRange{
"C2": {200, 290},
"CN": {270, 360},
"C3": {340, 430},
}},
{450, 500, map[string]clearanceRange{
"C2": {220, 310},
"CN": {290, 390},
"C3": {370, 470},
}},
{500, 560, map[string]clearanceRange{
"C2": {240, 330},
"CN": {310, 420},
"C3": {400, 510},
}},
{560, 630, map[string]clearanceRange{
"C2": {260, 360},
"CN": {340, 450},
"C3": {430, 550},
}},
{630, 710, map[string]clearanceRange{
"C2": {280, 390},
"CN": {370, 490},
"C3": {470, 590},
}},
{710, 800, map[string]clearanceRange{
"C2": {300, 420},
"CN": {400, 540},
"C3": {520, 660},
}},
{800, 900, map[string]clearanceRange{
"C2": {330, 460},
"CN": {440, 590},
"C3": {570, 730},
}},
{900, 1000, map[string]clearanceRange{
"C2": {360, 500},
"CN": {480, 630},
"C3": {620, 780},
}},
},
6: { // радиальный зазор однорядных и двухрядных роликоподшипников с цилиндрическими роликами с цилиндрическим отверстием
{0, 24, map[string]clearanceRange{
"C1NA": {5, 15},
"C2": {0, 25},
"CN": {20, 45},
"C3": {35, 60},
"C4": {50, 75},
}},
{24, 30, map[string]clearanceRange{
"C1NA": {5, 15},
"C2": {0, 25},
"CN": {20, 45},
"C3": {35, 60},
"C4": {50, 75},
}},
{30, 40, map[string]clearanceRange{
"C1NA": {5, 15},
"C2": {5, 30},
"CN": {25, 50},
"C3": {45, 70},
"C4": {60, 85},
}},
{40, 50, map[string]clearanceRange{
"C1NA": {5, 18},
"C2": {5, 35},
"CN": {30, 60},
"C3": {50, 80},
"C4": {70, 100},
}},
{50, 65, map[string]clearanceRange{
"C1NA": {5, 20},
"C2": {10, 40},
"CN": {40, 70},
"C3": {60, 90},
"C4": {80, 110},
}},
{65, 80, map[string]clearanceRange{
"C1NA": {10, 25},
"C2": {10, 45},
"CN": {40, 75},
"C3": {65, 100},
"C4": {90, 125},
}},
{80, 100, map[string]clearanceRange{
"C1NA": {10, 30},
"C2": {15, 50},
"CN": {50, 85},
"C3": {75, 110},
"C4": {105, 140},
}},
{100, 120, map[string]clearanceRange{
"C1NA": {10, 30},
"C2": {15, 55},
"CN": {50, 90},
"C3": {75, 125},
"C4": {125, 165},
}},
{120, 140, map[string]clearanceRange{
"C1NA": {10, 35},
"C2": {15, 60},
"CN": {60, 105},
"C3": {100, 145},
"C4": {145, 190},
}},
{140, 160, map[string]clearanceRange{
"C1NA": {10, 35},
"C2": {20, 70},
"CN": {70, 120},
"C3": {115, 165},
"C4": {165, 215},
}},
{160, 180, map[string]clearanceRange{
"C1NA": {10, 40},
"C2": {25, 75},
"CN": {75, 125},
"C3": {120, 170},
"C4": {170, 220},
}},
{180, 200, map[string]clearanceRange{
"C1NA": {15, 45},
"C2": {35, 90},
"CN": {90, 145},
"C3": {140, 195},
"C4": {195, 250},
}},
{200, 225, map[string]clearanceRange{
"C1NA": {15, 50},
"C2": {45, 105},
"CN": {105, 165},
"C3": {160, 220},
"C4": {220, 280},
}},
{225, 250, map[string]clearanceRange{
"C1NA": {15, 50},
"C2": {45, 110},
"CN": {110, 175},
"C3": {170, 235},
"C4": {235, 300},
}},
{250, 280, map[string]clearanceRange{
"C1NA": {20, 55},
"C2": {55, 125},
"CN": {125, 195},
"C3": {190, 260},
"C4": {260, 330},
}},
{280, 315, map[string]clearanceRange{
"C1NA": {20, 60},
"C2": {55, 130},
"CN": {130, 205},
"C3": {200, 275},
"C4": {275, 350},
}},
{315, 355, map[string]clearanceRange{
"C1NA": {20, 65},
"C2": {65, 145},
"CN": {145, 225},
"C3": {225, 305},
"C4": {305, 385},
}},
{355, 400, map[string]clearanceRange{
"C1NA": {25, 75},
"C2": {100, 190},
"CN": {190, 280},
"C3": {280, 370},
"C4": {370, 460},
}},
{400, 450, map[string]clearanceRange{
"C1NA": {25, 85},
"C2": {110, 210},
"CN": {210, 310},
"C3": {310, 410},
"C4": {410, 510},
}},
{450, 500, map[string]clearanceRange{
"C1NA": {25, 95},
"C2": {110, 220},
"CN": {220, 330},
"C3": {330, 440},
"C4": {440, 550},
}},
{500, 560, map[string]clearanceRange{
"C1NA": {25, 100},
"C2": {120, 240},
"CN": {240, 360},
"C3": {360, 480},
"C4": {480, 600},
}},
{560, 630, map[string]clearanceRange{
"C1NA": {30, 110},
"C2": {140, 260},
"CN": {260, 380},
"C3": {380, 500},
"C4": {500, 620},
}},
{630, 710, map[string]clearanceRange{
"C1NA": {30, 130},
"C2": {145, 285},
"CN": {285, 425},
"C3": {425, 565},
"C4": {565, 705},
}},
{710, 800, map[string]clearanceRange{
"C1NA": {35, 140},
"C2": {150, 310},
"CN": {310, 470},
"C3": {470, 630},
"C4": {630, 790},
}},
{800, 900, map[string]clearanceRange{
"C1NA": {35, 160},
"C2": {180, 350},
"CN": {350, 520},
"C3": {520, 690},
"C4": {690, 860},
}},
{900, 1000, map[string]clearanceRange{
"C1NA": {35, 180},
"C2": {200, 390},
"CN": {390, 580},
"C3": {580, 770},
"C4": {770, 960},
}},
{1000, 1120, map[string]clearanceRange{
"C1NA": {50, 200},
"C2": {220, 430},
"CN": {430, 640},
"C3": {640, 850},
"C4": {850, 1060},
}},
{1120, 1250, map[string]clearanceRange{
"C1NA": {60, 220},
"C2": {230, 470},
"CN": {470, 710},
"C3": {710, 950},
"C4": {950, 1190},
}},
{1250, 1400, map[string]clearanceRange{
"C1NA": {60, 240},
"C2": {270, 530},
"CN": {530, 790},
"C3": {790, 1050},
"C4": {1050, 1310},
}},
{1400, 1600, map[string]clearanceRange{
"C1NA": {70, 270},
"C2": {330, 610},
"CN": {610, 890},
"C3": {890, 1170},
"C4": {1170, 1450},
}},
{1600, 1800, map[string]clearanceRange{
"C1NA": {80, 300},
"C2": {380, 700},
"CN": {700, 1020},
"C3": {1020, 1340},
"C4": {1340, 1660},
}},
{1800, 2000, map[string]clearanceRange{
"C1NA": {100, 320},
"C2": {400, 760},
"CN": {760, 1120},
"C3": {1120, 1480},
"C4": {1480, 1840},
}},
},
7: { // радиальный зазор однорядных и двухрядных роликоподшипников с цилиндрическими роликами с коническим отверстием
{0, 24, map[string]clearanceRange{
"C1NA": {10, 20},
"C2": {15, 40},
"CN": {30, 55},
"C3": {40, 65},
"C4": {50, 75},
}},
{24, 30, map[string]clearanceRange{
"C1NA": {15, 25},
"C2": {20, 45},
"CN": {35, 60},
"C3": {45, 70},
"C4": {55, 80},
}},
{30, 40, map[string]clearanceRange{
"C1NA": {15, 25},
"C2": {20, 45},
"CN": {40, 65},
"C3": {55, 80},
"C4": {70, 95},
}},
{40, 50, map[string]clearanceRange{
"C1NA": {17, 30},
"C2": {25, 55},
"CN": {45, 75},
"C3": {60, 90},
"C4": {75, 105},
}},
{50, 65, map[string]clearanceRange{
"C1NA": {20, 35},
"C2": {30, 60},
"CN": {50, 80},
"C3": {70, 100},
"C4": {90, 120},
}},
{65, 80, map[string]clearanceRange{
"C1NA": {25, 40},
"C2": {35, 70},
"CN": {60, 95},
"C3": {85, 120},
"C4": {110, 145},
}},
{80, 100, map[string]clearanceRange{
"C1NA": {35, 55},
"C2": {40, 75},
"CN": {70, 105},
"C3": {95, 130},
"C4": {120, 155},
}},
{100, 120, map[string]clearanceRange{
"C1NA": {40, 60},
"C2": {50, 90},
"CN": {90, 130},
"C3": {115, 155},
"C4": {140, 180},
}},
{120, 140, map[string]clearanceRange{
"C1NA": {45, 70},
"C2": {55, 100},
"CN": {100, 145},
"C3": {130, 175},
"C4": {160, 205},
}},
{140, 160, map[string]clearanceRange{
"C1NA": {50, 75},
"C2": {60, 110},
"CN": {110, 160},
"C3": {145, 195},
"C4": {180, 230},
}},
{160, 180, map[string]clearanceRange{
"C1NA": {55, 85},
"C2": {75, 125},
"CN": {125, 175},
"C3": {160, 210},
"C4": {195, 245},
}},
{180, 200, map[string]clearanceRange{
"C1NA": {60, 90},
"C2": {85, 140},
"CN": {140, 195},
"C3": {180, 235},
"C4": {220, 275},
}},
{200, 225, map[string]clearanceRange{
"C1NA": {60, 95},
"C2": {95, 155},
"CN": {155, 215},
"C3": {200, 260},
"C4": {245, 305},
}},
{225, 250, map[string]clearanceRange{
"C1NA": {65, 100},
"C2": {105, 170},
"CN": {170, 235},
"C3": {220, 285},
"C4": {270, 335},
}},
{250, 280, map[string]clearanceRange{
"C1NA": {75, 110},
"C2": {115, 185},
"CN": {185, 255},
"C3": {240, 310},
"C4": {295, 365},
}},
{280, 315, map[string]clearanceRange{
"C1NA": {80, 120},
"C2": {130, 205},
"CN": {205, 280},
"C3": {265, 340},
"C4": {325, 400},
}},
{315, 355, map[string]clearanceRange{
"C1NA": {90, 135},
"C2": {145, 225},
"CN": {225, 305},
"C3": {290, 370},
"C4": {355, 435},
}},
{355, 400, map[string]clearanceRange{
"C1NA": {100, 150},
"C2": {165, 255},
"CN": {255, 345},
"C3": {330, 420},
"C4": {405, 495},
}},
{400, 450, map[string]clearanceRange{
"C1NA": {110, 170},
"C2": {185, 285},
"CN": {285, 385},
"C3": {370, 470},
"C4": {455, 555},
}},
{450, 500, map[string]clearanceRange{
"C1NA": {120, 190},
"C2": {205, 315},
"CN": {315, 425},
"C3": {410, 520},
"C4": {505, 615},
}},
{500, 560, map[string]clearanceRange{
"C1NA": {130, 210},
"C2": {230, 350},
"CN": {350, 470},
"C3": {455, 575},
"C4": {560, 680},
}},
{560, 630, map[string]clearanceRange{
"C1NA": {140, 230},
"C2": {260, 380},
"CN": {380, 500},
"C3": {500, 620},
"C4": {620, 740},
}},
{630, 710, map[string]clearanceRange{
"C1NA": {160, 260},
"C2": {295, 435},
"CN": {435, 575},
"C3": {565, 705},
"C4": {695, 835},
}},
{710, 800, map[string]clearanceRange{
"C1NA": {170, 290},
"C2": {325, 485},
"CN": {485, 645},
"C3": {630, 790},
"C4": {775, 935},
}},
{800, 900, map[string]clearanceRange{
"C1NA": {190, 330},
"C2": {370, 540},
"CN": {540, 710},
"C3": {700, 870},
"C4": {860, 1030},
}},
{900, 1000, map[string]clearanceRange{
"C1NA": {210, 360},
"C2": {410, 600},
"CN": {600, 790},
"C3": {780, 970},
"C4": {960, 1150},
}},
{1000, 1120, map[string]clearanceRange{
"C1NA": {230, 400},
"C2": {455, 665},
"CN": {665, 875},
"C3": {865, 1075},
"C4": {1065, 1275},
}},
{1120, 1250, map[string]clearanceRange{
"C1NA": {250, 440},
"C2": {490, 730},
"CN": {730, 970},
"C3": {960, 1200},
"C4": {1200, 1440},
}},
{1250, 1400, map[string]clearanceRange{
"C1NA": {270, 460},
"C2": {550, 810},
"CN": {810, 1070},
"C3": {1070, 1330},
"C4": {1330, 1590},
}},
{1400, 1600, map[string]clearanceRange{
"C1NA": {300, 500},
"C2": {640, 920},
"CN": {920, 1200},
"C3": {1200, 1480},
"C4": {1480, 1760},
}},
{1600, 1800, map[string]clearanceRange{
"C1NA": {320, 530},
"C2": {700, 1020},
"CN": {1020, 1340},
"C3": {1340, 1660},
"C4": {1660, 1980},
}},
{1800, 2000, map[string]clearanceRange{
"C1NA": {340, 560},
"C2": {760, 1120},
"CN": {1120, 1480},
"C3": {1480, 1840},
"C4": {1840, 2200},
}},
},
8: { // радиальный зазор сферических роликоподшипников с цилиндрическим отверстием
{18, 24, map[string]clearanceRange{
"C2": {10, 20},
"CN": {20, 35},
"C3": {35, 45},
"C4": {45, 60},
}},
{24, 30, map[string]clearanceRange{
"C2": {15, 25},
"CN": {25, 40},
"C3": {40, 55},
"C4": {55, 75},
}},
{30, 40, map[string]clearanceRange{
"C2": {15, 30},
"CN": {30, 45},
"C3": {45, 60},
"C4": {60, 80},
}},
{40, 50, map[string]clearanceRange{
"C2": {20, 35},
"CN": {35, 55},
"C3": {55, 75},
"C4": {75, 100},
}},
{50, 65, map[string]clearanceRange{
"C2": {20, 40},
"CN": {40, 65},
"C3": {65, 90},
"C4": {90, 120},
}},
{65, 80, map[string]clearanceRange{
"C2": {30, 50},
"CN": {50, 80},
"C3": {80, 110},
"C4": {110, 145},
}},
{80, 100, map[string]clearanceRange{
"C2": {35, 60},
"CN": {60, 100},
"C3": {100, 135},
"C4": {135, 180},
}},
{100, 120, map[string]clearanceRange{
"C2": {40, 75},
"CN": {75, 120},
"C3": {120, 160},
"C4": {160, 210},
}},
{120, 140, map[string]clearanceRange{
"C2": {50, 95},
"CN": {95, 145},
"C3": {145, 190},
"C4": {190, 240},
}},
{140, 160, map[string]clearanceRange{
"C2": {60, 110},
"CN": {110, 170},
"C3": {170, 220},
"C4": {220, 280},
}},
{160, 180, map[string]clearanceRange{
"C2": {65, 120},
"CN": {120, 180},
"C3": {180, 240},
"C4": {240, 310},
}},
{180, 200, map[string]clearanceRange{
"C2": {70, 130},
"CN": {130, 200},
"C3": {200, 260},
"C4": {260, 340},
}},
{200, 225, map[string]clearanceRange{
"C2": {80, 140},
"CN": {140, 220},
"C3": {220, 290},
"C4": {290, 380},
}},
{225, 250, map[string]clearanceRange{
"C2": {90, 150},
"CN": {150, 240},
"C3": {240, 320},
"C4": {320, 420},
}},
{250, 280, map[string]clearanceRange{
"C2": {100, 170},
"CN": {170, 260},
"C3": {260, 350},
"C4": {350, 460},
}},
{280, 315, map[string]clearanceRange{
"C2": {110, 190},
"CN": {190, 280},
"C3": {280, 370},
"C4": {370, 500},
}},
{315, 355, map[string]clearanceRange{
"C2": {120, 200},
"CN": {200, 310},
"C3": {310, 410},
"C4": {410, 550},
}},
{355, 400, map[string]clearanceRange{
"C2": {130, 220},
"CN": {220, 340},
"C3": {340, 450},
"C4": {450, 600},
}},
{400, 450, map[string]clearanceRange{
"C2": {140, 240},
"CN": {240, 370},
"C3": {370, 500},
"C4": {500, 660},
}},
{450, 500, map[string]clearanceRange{
"C2": {140, 260},
"CN": {260, 410},
"C3": {410, 550},
"C4": {550, 720},
}},
{500, 560, map[string]clearanceRange{
"C2": {150, 280},
"CN": {280, 440},
"C3": {440, 600},
"C4": {600, 780},
}},
{560, 630, map[string]clearanceRange{
"C2": {170, 310},
"CN": {310, 480},
"C3": {480, 650},
"C4": {650, 850},
}},
{630, 710, map[string]clearanceRange{
"C2": {190, 350},
"CN": {350, 530},
"C3": {530, 700},
"C4": {700, 920},
}},
{710, 800, map[string]clearanceRange{
"C2": {210, 390},
"CN": {390, 580},
"C3": {580, 770},
"C4": {770, 1010},
}},
{800, 900, map[string]clearanceRange{
"C2": {230, 430},
"CN": {430, 650},
"C3": {650, 860},
"C4": {860, 1120},
}},
{900, 1000, map[string]clearanceRange{
"C2": {260, 480},
"CN": {480, 710},
"C3": {710, 930},
"C4": {930, 1220},
}},
{1000, 1120, map[string]clearanceRange{
"C2": {290, 530},
"CN": {530, 770},
"C3": {770, 1050},
"C4": {1050, 1430},
}},
{1120, 1250, map[string]clearanceRange{
"C2": {320, 580},
"CN": {580, 840},
"C3": {840, 1140},
"C4": {1140, 1560},
}},
{1250, 1400, map[string]clearanceRange{
"C2": {350, 630},
"CN": {630, 910},
"C3": {910, 1240},
"C4": {1240, 1700},
}},
{1400, 1600, map[string]clearanceRange{
"C2": {380, 700},
"CN": {700, 1020},
"C3": {1020, 1390},
"C4": {1390, 1890},
}},
},
9: { // радиальный зазор сферических роликоподшипников с коническим отверстием
{18, 24, map[string]clearanceRange{
"C2": {15, 25},
"CN": {25, 35},
"C3": {35, 45},
"C4": {45, 60},
}},
{24, 30, map[string]clearanceRange{
"C2": {20, 30},
"CN": {30, 40},
"C3": {40, 55},
"C4": {55, 75},
}},
{30, 40, map[string]clearanceRange{
"C2": {25, 35},
"CN": {35, 50},
"C3": {50, 65},
"C4": {65, 85},
}},
{40, 50, map[string]clearanceRange{
"C2": {30, 45},
"CN": {45, 60},
"C3": {60, 80},
"C4": {80, 100},
}},
{50, 65, map[string]clearanceRange{
"C2": {40, 55},
"CN": {55, 75},
"C3": {75, 95},
"C4": {95, 120},
}},
{65, 80, map[string]clearanceRange{
"C2": {50, 70},
"CN": {70, 95},
"C3": {95, 120},
"C4": {120, 150},
}},
{80, 100, map[string]clearanceRange{
"C2": {55, 80},
"CN": {80, 110},
"C3": {110, 140},
"C4": {140, 180},
}},
{100, 120, map[string]clearanceRange{
"C2": {65, 100},
"CN": {100, 135},
"C3": {135, 170},
"C4": {170, 220},
}},
{120, 140, map[string]clearanceRange{
"C2": {80, 120},
"CN": {120, 160},
"C3": {160, 200},
"C4": {200, 260},
}},
{140, 160, map[string]clearanceRange{
"C2": {90, 130},
"CN": {130, 180},
"C3": {180, 230},
"C4": {230, 300},
}},
{160, 180, map[string]clearanceRange{
"C2": {100, 140},
"CN": {140, 200},
"C3": {200, 260},
"C4": {260, 340},
}},
{180, 200, map[string]clearanceRange{
"C2": {110, 160},
"CN": {160, 220},
"C3": {220, 290},
"C4": {290, 370},
}},
{200, 225, map[string]clearanceRange{
"C2": {120, 180},
"CN": {180, 250},
"C3": {250, 320},
"C4": {320, 410},
}},
{225, 250, map[string]clearanceRange{
"C2": {140, 200},
"CN": {200, 270},
"C3": {270, 350},
"C4": {350, 450},
}},
{250, 280, map[string]clearanceRange{
"C2": {150, 220},
"CN": {220, 300},
"C3": {300, 390},
"C4": {390, 490},
}},
{280, 315, map[string]clearanceRange{
"C2": {170, 240},
"CN": {240, 330},
"C3": {330, 430},
"C4": {430, 540},
}},
{315, 355, map[string]clearanceRange{
"C2": {190, 270},
"CN": {270, 360},
"C3": {360, 470},
"C4": {470, 590},
}},
{355, 400, map[string]clearanceRange{
"C2": {210, 300},
"CN": {300, 400},
"C3": {400, 520},
"C4": {520, 650},
}},
{400, 450, map[string]clearanceRange{
"C2": {230, 330},
"CN": {330, 440},
"C3": {440, 570},
"C4": {570, 720},
}},
{450, 500, map[string]clearanceRange{
"C2": {260, 370},
"CN": {370, 490},
"C3": {490, 630},
"C4": {630, 790},
}},
{500, 560, map[string]clearanceRange{
"C2": {290, 410},
"CN": {410, 540},
"C3": {540, 680},
"C4": {680, 870},
}},
{560, 630, map[string]clearanceRange{
"C2": {320, 460},
"CN": {460, 600},
"C3": {600, 760},
"C4": {760, 980},
}},
{630, 710, map[string]clearanceRange{
"C2": {350, 510},
"CN": {510, 670},
"C3": {670, 850},
"C4": {850, 1090},
}},
{710, 800, map[string]clearanceRange{
"C2": {390, 570},
"CN": {570, 750},
"C3": {750, 960},
"C4": {960, 1220},
}},
{800, 900, map[string]clearanceRange{
"C2": {440, 640},
"CN": {640, 840},
"C3": {840, 1070},
"C4": {1070, 1370},
}},
{900, 1000, map[string]clearanceRange{
"C2": {490, 710},
"CN": {710, 930},
"C3": {930, 1190},
"C4": {1190, 1520},
}},
{1000, 1120, map[string]clearanceRange{
"C2": {540, 780},
"CN": {780, 1020},
"C3": {1020, 1300},
"C4": {1300, 1650},
}},
{1120, 1250, map[string]clearanceRange{
"C2": {600, 860},
"CN": {860, 1120},
"C3": {1120, 1420},
"C4": {1420, 1800},
}},
{1250, 1400, map[string]clearanceRange{
"C2": {660, 940},
"CN": {940, 1220},
"C3": {1220, 1550},
"C4": {1550, 1960},
}},
{1400, 1600, map[string]clearanceRange{
"C2": {740, 1060},
"CN": {1060, 1380},
"C3": {1380, 1750},
"C4": {1750, 2200},
}},
},
10: { // радиальный зазор сферических подшипников с бочкообразными роликами с цилиндрическим отверстием
{0, 30, map[string]clearanceRange{
"C2": {2, 9},
"CN": {9, 17},
"C3": {17, 28},
"C4": {28, 40},
}},
{30, 40, map[string]clearanceRange{
"C2": {3, 10},
"CN": {10, 20},
"C3": {20, 30},
"C4": {30, 45},
}},
{40, 50, map[string]clearanceRange{
"C2": {3, 13},
"CN": {13, 23},
"C3": {23, 35},
"C4": {35, 50},
}},
{50, 65, map[string]clearanceRange{
"C2": {4, 15},
"CN": {15, 27},
"C3": {27, 40},
"C4": {40, 55},
}},
{65, 80, map[string]clearanceRange{
"C2": {5, 20},
"CN": {20, 35},
"C3": {35, 55},
"C4": {55, 75},
}},
{80, 100, map[string]clearanceRange{
"C2": {7, 25},
"CN": {25, 45},
"C3": {45, 65},
"C4": {65, 90},
}},
{100, 120, map[string]clearanceRange{
"C2": {10, 30},
"CN": {30, 50},
"C3": {50, 70},
"C4": {70, 95},
}},
{120, 140, map[string]clearanceRange{
"C2": {15, 35},
"CN": {35, 55},
"C3": {55, 80},
"C4": {80, 110},
}},
{140, 160, map[string]clearanceRange{
"C2": {20, 40},
"CN": {40, 65},
"C3": {65, 95},
"C4": {95, 125},
}},
{160, 180, map[string]clearanceRange{
"C2": {25, 45},
"CN": {45, 70},
"C3": {70, 100},
"C4": {100, 130},
}},
{180, 225, map[string]clearanceRange{
"C2": {30, 50},
"CN": {50, 75},
"C3": {75, 105},
"C4": {105, 135},
}},
{225, 250, map[string]clearanceRange{
"C2": {35, 55},
"CN": {55, 80},
"C3": {80, 110},
"C4": {110, 140},
}},
{250, 280, map[string]clearanceRange{
"C2": {40, 60},
"CN": {60, 85},
"C3": {85, 115},
"C4": {115, 145},
}},
{280, 315, map[string]clearanceRange{
"C2": {40, 70},
"CN": {70, 100},
"C3": {100, 135},
"C4": {135, 170},
}},
{315, 355, map[string]clearanceRange{
"C2": {45, 75},
"CN": {75, 105},
"C3": {105, 140},
"C4": {140, 175},
}},
},
11: { // радиальный зазор сферических подшипников с бочкообразными роликами с коническим отверстием
{0, 30, map[string]clearanceRange{
"C2": {9, 17},
"CN": {17, 28},
"C3": {28, 40},
"C4": {40, 55},
}},
{30, 40, map[string]clearanceRange{
"C2": {10, 20},
"CN": {20, 30},
"C3": {30, 45},
"C4": {45, 60},
}},
{40, 50, map[string]clearanceRange{
"C2": {13, 23},
"CN": {23, 35},
"C3": {35, 50},
"C4": {50, 65},
}},
{50, 65, map[string]clearanceRange{
"C2": {15, 27},
"CN": {27, 40},
"C3": {40, 55},
"C4": {55, 75},
}},
{65, 80, map[string]clearanceRange{
"C2": {20, 35},
"CN": {35, 55},
"C3": {55, 75},
"C4": {75, 95},
}},
{80, 100, map[string]clearanceRange{
"C2": {25, 45},
"CN": {45, 65},
"C3": {65, 90},
"C4": {90, 120},
}},
{100, 120, map[string]clearanceRange{
"C2": {30, 50},
"CN": {50, 70},
"C3": {70, 95},
"C4": {95, 125},
}},
{120, 140, map[string]clearanceRange{
"C2": {35, 55},
"CN": {55, 80},
"C3": {80, 110},
"C4": {110, 140},
}},
{140, 160, map[string]clearanceRange{
"C2": {40, 65},
"CN": {65, 95},
"C3": {95, 125},
"C4": {125, 155},
}},
{160, 180, map[string]clearanceRange{
"C2": {45, 70},
"CN": {70, 100},
"C3": {100, 130},
"C4": {130, 160},
}},
{180, 225, map[string]clearanceRange{
"C2": {50, 75},
"CN": {75, 105},
"C3": {105, 135},
"C4": {135, 165},
}},
{225, 250, map[string]clearanceRange{
"C2": {55, 80},
"CN": {80, 110},
"C3": {110, 140},
"C4": {140, 170},
}},
{250, 280, map[string]clearanceRange{
"C2": {60, 85},
"CN": {85, 115},
"C3": {115, 145},
"C4": {145, 175},
}},
{280, 315, map[string]clearanceRange{
"C2": {70, 100},
"CN": {100, 135},
"C3": {135, 170},
"C4": {170, 205},
}},
{315, 355, map[string]clearanceRange{
"C2": {75, 105},
"CN": {105, 140},
"C3": {140, 175},
"C4": {175, 210},
}},
},
} | FagTolerancesInteractor.go | 0.519034 | 0.44089 | FagTolerancesInteractor.go | starcoder |
package classification
import (
"fmt"
"math"
"github.com/eriq-augustine/goml/base"
"github.com/eriq-augustine/goml/features"
"github.com/eriq-augustine/goml/optimize"
"github.com/eriq-augustine/goml/util"
"github.com/gonum/blas/blas64"
)
const (
LR_DEFAULT_L2_PENALTY = 1.0
)
type LogisticRegression struct {
reducer features.Reducer
optimizer optimize.Optimizer
l2Penalty float64
// [class][feature]
weights [][]float64
intercepts []float64
labels []base.Feature
}
// Note that 0 is a valid value for |l2Penalty|, pass -1 for default.
func NewLogisticRegression(reducer features.Reducer, optimizer optimize.Optimizer, l2Penalty float64) *LogisticRegression {
if (reducer == nil) {
reducer = features.NoReducer{};
}
if (optimizer == nil) {
optimizer = optimize.NewSGD(0, 0, 0, 0);
}
if (l2Penalty < 0) {
l2Penalty = LR_DEFAULT_L2_PENALTY;
}
var lr LogisticRegression = LogisticRegression{
reducer: reducer,
l2Penalty: l2Penalty,
optimizer: optimizer,
};
return &lr;
}
func (this *LogisticRegression) Train(tuples []base.Tuple) {
if (tuples == nil || len(tuples) == 0) {
panic("Must provide tuples for training.")
}
this.reducer.Init(tuples);
tuples = this.reducer.Reduce(tuples);
var numFeatures int = -1;
var numericData [][]float64 = make([][]float64, len(tuples));
var dataLabels []int = make([]int, len(tuples));
// Note all the labels we have seen and assign them an arbitrary identifier (index into this.labels).
this.labels = make([]base.Feature, 0);
var labelMap map[base.Feature]int = make(map[base.Feature]int);
for i, tuple := range(tuples) {
numericTuple, ok := tuple.(base.NumericTuple);
if (!ok) {
panic("LogisticRegression only supports classifying NumericTuple");
}
_, ok = labelMap[numericTuple.GetClass()];
if (!ok) {
labelMap[numericTuple.GetClass()] = len(this.labels);
this.labels = append(this.labels, numericTuple.GetClass());
}
numericData[i] = numericTuple.ToFloatSlice();
// Convert all labels to their surrogate identifier.
dataLabels[i] = labelMap[numericTuple.GetClass()];
// Ensure all vectors are the same size.
if (numFeatures == -1) {
numFeatures = numericTuple.DataSize();
} else if (numFeatures != numericTuple.DataSize()) {
panic(fmt.Sprintf("Inconsistent number of features. Tuple[0]: %d, Tuple[%d]: %d",
numFeatures, i, numericTuple.DataSize()));
}
}
this.train(numericData, dataLabels);
}
func (this LogisticRegression) Classify(tuples []base.Tuple) ([]base.Feature, []float64) {
tuples = this.reducer.Reduce(tuples);
var numericData [][]float64 = make([][]float64, len(tuples));
for i, tuple := range(tuples) {
numericTuple, ok := tuple.(base.NumericTuple);
if (!ok) {
panic("LogisticRegression only supports classifying NumericTuple");
}
numericData[i] = numericTuple.ToFloatSlice();
}
classIndexes, probabilities := this.classify(numericData);
// Translate the class ids back to actual features.
var classes []base.Feature = make([]base.Feature, len(classIndexes));
for i, classIndex := range(classIndexes) {
classes[i] = this.labels[classIndex];
}
return classes, probabilities;
}
// In the internals of Logistic Regression (typically non-exported functions),
// we don't deal with actual base.Tuple's.
// Just raw slices of doubles and ints (which are the mapped class labels).
func (this *LogisticRegression) train(data [][]float64, dataLabels []int) {
// Params = Weights + Intercepts
// (|labels| x |features|) + (|labels|)
var initialParams []float64 = make([]float64, len(this.labels) * (1 + len(data[0])));
if (this.optimizer.SupportsBatch()) {
this.weights, this.intercepts = this.unpackOptimizerParams(this.optimizer.OptimizeBatch(
initialParams,
util.RangeSlice(len(data)),
func(params []float64) float64 {
return this.negativeLogLikelihoodOptimize(data, dataLabels, params);
},
func(params []float64, points []int) []float64 {
return this.negativeLogLikelihoodGradientBatchOptimize(data, dataLabels, params, points);
},
));
} else {
this.weights, this.intercepts = this.unpackOptimizerParams(this.optimizer.Optimize(
initialParams,
func(params []float64) float64 {
return this.negativeLogLikelihoodOptimize(data, dataLabels, params);
},
func(params []float64) []float64 {
return this.negativeLogLikelihoodGradientOptimize(data, dataLabels, params);
},
));
}
}
func (this LogisticRegression) classify(data [][]float64) ([]int, []float64) {
var probabilities [][]float64 = probabilities(this.weights, this.intercepts, data);
var results []int = make([]int, len(data));
var resultProbabilities []float64 = make([]float64, len(data));
for i, instanceProbabilities := range(probabilities) {
maxProbabilityIndex, maxProbability := util.Max(instanceProbabilities);
results[i] = maxProbabilityIndex;
resultProbabilities[i] = maxProbability;
}
return results, resultProbabilities;
}
// Calculate the probability of each data point being in each class.
// Returns float64[data point][class]
// The math comes out to prob(point=x,class=k) = exp(Wk dot x - logSumExp(Wj dot x))
// (logSumExp is over all classes j).
func probabilities(weights [][]float64, intercepts[]float64, data [][]float64) [][]float64 {
var probabilities [][]float64 = make([][]float64, len(data));
for i, _ := range(probabilities) {
probabilities[i] = make([]float64, len(weights));
}
// This will get reset each data point, but only allocated once.
var activations []float64 = make([]float64, len(weights));
for dataPointIndex, dataPoint := range(data) {
for classIndex, _ := range(weights) {
activations[classIndex] = intercepts[classIndex] + dot(weights[classIndex], dataPoint);
}
var normalization float64 = util.LogSumExp(activations);
for classIndex, _ := range(weights) {
probabilities[dataPointIndex][classIndex] = math.Exp(activations[classIndex] - normalization);
}
}
return probabilities;
}
// Math comes out to:
// NLL = -[ sum(n over data){ sum(k over classes){ oneHotLabel(n, k) * (Wk dot x - logSumExp(Wj dot x)) } } ]
// NLL = -[ sum(n over data){ sum(k over classes){ oneHotLabel(n, k) * log(prob(Xn, k)) } } ]
func negativeLogLikelihood(
weights [][]float64, intercepts []float64, l2Penalty float64,
data [][]float64, dataLabels []int) float64 {
var probabilities [][]float64 = probabilities(weights, intercepts, data);
var sum float64 = 0;
for dataPointIndex, _ := range(data) {
// One hot multiplication, the value is only active if the class is one that we are examining.
sum += math.Log(probabilities[dataPointIndex][dataLabels[dataPointIndex]]);
}
// Add an l2 regularizer
var regularizer float64 = 0;
for classIndex, _ := range(weights) {
regularizer += math.Pow(intercepts[classIndex], 2);
for _, weight := range(weights[classIndex]) {
regularizer += math.Pow(weight, 2);
}
}
regularizer = l2Penalty / 2.0 * regularizer;
return -1.0 * sum + regularizer;
}
// Note that the gradient is with respects to each specific weight.
// So, we will return a vector of gradients.
func negativeLogLikelihoodGradient(
weights [][]float64, intercepts []float64, l2Penalty float64,
data [][]float64, dataLabels []int) ([][]float64, []float64) {
var probabilities [][]float64 = probabilities(weights, intercepts, data);
// TODO(eriq): Allocate once and keep in struct?
// Note that the number of weight vectors (len(weights)) ==
// the number of intercepts (len(intercepts)) ==
// the number of classes.
// [class][feature]
var gradients [][]float64 = make([][]float64, len(intercepts));
for classIndex, _ := range(gradients) {
gradients[classIndex] = make([]float64, len(data[0]));
}
var interceptGradients []float64 = make([]float64, len(intercepts));
for dataPointIndex, dataPoint := range(data) {
for classIndex, _ := range(intercepts) {
var val float64 = probabilities[dataPointIndex][classIndex];
// One hot.
if (dataLabels[dataPointIndex] == classIndex) {
val -= 1.0;
}
interceptGradients[classIndex] += val;
for featureIndex := 0; featureIndex < len(data[0]); featureIndex++ {
gradients[classIndex][featureIndex] += val * dataPoint[featureIndex];
}
}
}
// Add an l2 regularizer.
for classIndex, _ := range(weights) {
interceptGradients[classIndex] += intercepts[classIndex] * l2Penalty;
for featureIndex, _ := range(weights[classIndex]) {
gradients[classIndex][featureIndex] += weights[classIndex][featureIndex] * l2Penalty;
}
}
return gradients, interceptGradients;
}
// Unpack params from the optimizer into weights and intercepts.
// Params are packed: [
// intercept[0], intercept[1], ... , intercept[K - 1],
// weight[0][0], weight[0][1], ..., weight[0][N - 1],
// ...
// weight[K - 1][0], weight[K - 1][1], ..., weight[K - 1][M - 1]
// ]
// K - Number of Labels
// N - Number of Features
func (this LogisticRegression) unpackOptimizerParams(params []float64) ([][]float64, []float64) {
var intercepts []float64 = params[:len(this.labels)];
var packedWeights []float64 = params[len(this.labels):];
var numFeatures int = len(packedWeights) / len(this.labels);
// TODO(eriq): Avoid this allocation?
var weights [][]float64 = make([][]float64, len(this.labels));
for i, _ := range(weights) {
weights[i] = packedWeights[(i * numFeatures) : ((i + 1) * numFeatures)];
}
return weights, intercepts;
}
// A wrapper for an optimizer function for NLL.
// The first two params will be curried.
func (this LogisticRegression) negativeLogLikelihoodOptimize(
data [][]float64,
dataLabels []int,
params []float64) float64 {
weights, intercepts := this.unpackOptimizerParams(params);
return negativeLogLikelihood(weights, intercepts, this.l2Penalty, data, dataLabels);
}
// A wrapper for an optimizer function for NLL.
// The first two params will be curried.
func (this LogisticRegression) negativeLogLikelihoodGradientOptimize(
data [][]float64,
dataLabels []int,
params []float64) []float64 {
weights, intercepts := this.unpackOptimizerParams(params);
weightGradients, interceptGradients := negativeLogLikelihoodGradient(
weights, intercepts, this.l2Penalty,
data, dataLabels);
// Packup the gradients.
var gradients []float64 = make([]float64, len(interceptGradients) + len(weightGradients) * len(weightGradients[0]));
for i, interceptGradient := range(interceptGradients) {
gradients[i] = interceptGradient;
}
for i, _ := range(weightGradients) {
for j, weightGradient := range(weightGradients[i]) {
gradients[len(interceptGradients) + (i * len(weightGradients[i]) + j)] = weightGradient;
}
}
return gradients;
}
// A wrapper for a batch optimizer function for NLL.
// The first param will be curried.
func (this LogisticRegression) negativeLogLikelihoodGradientBatchOptimize(
data [][]float64,
dataLabels []int,
params []float64,
points []int) []float64 {
return this.negativeLogLikelihoodGradientOptimize(
util.SelectIndexesFloat2D(data, points), util.SelectIndexesInt(dataLabels, points), params);
}
func dot(a []float64, b []float64) float64 {
if (len(a) != len(b)) {
panic(fmt.Sprintf("Length of LHS (%d) and length of RHS (%d) must match for a dot.", len(a), len(b)));
}
var aVec blas64.Vector = blas64.Vector{1, a};
var bVec blas64.Vector = blas64.Vector{1, b};
return blas64.Dot(len(a), aVec, bVec);
} | classification/logisticRegression.go | 0.780495 | 0.469095 | logisticRegression.go | starcoder |
package table
import (
"regexp"
"strings"
)
// FieldMatcher is a function type which is consumed by different table Cell finder functions.
type FieldMatcher func([]string) (string, bool)
// LineContaining returns a predicate checking whether a line contains specified tokens
func LineContaining(ss ...string) func(string) bool {
return func(line string) bool {
for _, s := range ss {
if !strings.Contains(line, s) {
return false
}
}
return true
}
}
// LineContainingSlices is the slice version of line containing
func LineContainingSlices(ls ...[]string) func(string) bool {
m := map[string]bool{}
for _, l := range ls {
for _, s := range l {
m[s] = true
}
}
return LineContaining(strBoolMapKeys(m)...)
}
// strBoolMapKeys returns a list of keys from a map[string]bool
func strBoolMapKeys(m map[string]bool) []string {
keys := make([]string, len(m))
i := 0
for s := range m {
keys[i] = s
i++
}
return keys
}
// LineContainingAny behaves like line containing but any given slice is enough
func LineContainingAny(ls ...[]string) func(string) bool {
predicates := make([]func(string) bool, len(ls))
for i, ss := range ls {
predicates[i] = LineContaining(ss...)
}
return AnyMatched(predicates...)
}
// LineContainingAnySingle behaves like LineContainingAny
// but any token is enough to match, instead of requiring a slice
func LineContainingAnySingle(ls ...string) func(string) bool {
predicates := make([]func(string) bool, len(ls))
for i, ss := range ls {
predicates[i] = LineContaining(ss)
}
return AnyMatched(predicates...)
}
// AllAreMatched accepts lines until all predicates are matched
func AllAreMatched(pp ...func(string) bool) func(string) bool {
index := 0
return func(line string) bool {
if index >= len(pp) {
return true
}
if pp[index](line) {
index++
}
return index >= len(pp)
}
}
// AnyMatched accepts line any one of predicates is satisfied
func AnyMatched(pp ...func(string) bool) func(string) bool {
return func(line string) bool {
if len(pp) == 0 {
return true
}
for _, p := range pp {
if p(line) {
return true
}
}
return false
}
}
// NonEmptyLine checks whether line is not empty
func NonEmptyLine() func(string) bool {
return func(s string) bool {
return !isWhiteSpace(s)
}
}
// EmptyLine matches empty line
func EmptyLine() func(string) bool {
return func(s string) bool {
return isWhiteSpace(s)
}
}
// LineFieldMatcher provides matchers for whole line or specific cell content
type LineFieldMatcher struct {
Re *regexp.Regexp
Sep string
}
// Find returns a matching cell in a given line.
func (fm LineFieldMatcher) Find(line []string) (string, bool) {
for _, s := range line {
if fm.Re.MatchString(s) {
return s, true
}
}
return "", false
}
// FindLine returns a matching line.
func (fm LineFieldMatcher) FindLine(fields []string) (string, bool) {
line := strings.Join(fields, fm.Sep)
return line, fm.Re.MatchString(line)
} | line.go | 0.784732 | 0.450662 | line.go | starcoder |
package schema
import "github.com/google/uuid"
// ValidPayment an example of a valid payment
func ValidPayment() *Payment {
ID := uuid.New().String()
return &Payment{
ID: ID,
Type: "Payment",
Version: 0,
OrganisationID: uuid.New().String(),
Attributes: ValidPaymentAttributes(),
}
}
// ValidPaymentAttributes an example of a valid paymentattributes
func ValidPaymentAttributes() PaymentAttributes {
return PaymentAttributes{
Amount: "200.10",
Currency: "great",
EndToEndReference: "here it is",
NumericReference: "1245",
PaymentID: "343535",
PaymentPurpose: "stuff",
PaymentScheme: "best",
PaymentType: "Credit",
ProcessingDate: "now",
Reference: "that guy",
SchemePaymentSubType: "InternetBanking",
SchemePaymentType: "ImmediatePayment",
BeneficiaryParty: ValidParty(),
DebtorParty: ValidParty(),
SponsorParty: ValidParty(),
ForeignExchange: ValidCurrencyExchange(),
ChargesInformation: ValidCharges(),
}
}
// ValidParty an example of a valid party
func ValidParty() Party {
return Party{
AccountName: "My account",
AccountNumber: "12345",
AccountNumberCode: "93847",
AccountType: 5,
Address: "123 lane",
BankID: "best bank",
BankIDCode: "12AFR",
Name: "<NAME>",
}
}
// ValidCurrencyExchange an example of a valid currency exchange
func ValidCurrencyExchange() CurrencyExchange {
return CurrencyExchange{
ContractReference: "FX123",
ExchangeRate: "2.00000",
OriginalAmount: "200.42",
OriginalCurrency: "USD",
}
}
// ValidCharges an example of a valid currency exchange
func ValidCharges() Charges {
moneyA, moneyB := ValidMoney(), ValidMoney()
return Charges{
BearerCode: "SHAR",
ReceiverChargesAmount: "1.00",
ReceiverChargesCurrency: "USD",
SenderCharges: []*Money{
&moneyA,
&moneyB,
},
}
}
// ValidMoney an example of a valid money
func ValidMoney() Money {
return Money{
Amount: "5.00",
Currency: "USD",
}
} | implementation/schema/examples.go | 0.796925 | 0.444444 | examples.go | starcoder |
package model
import (
"strings"
"math"
"k8s.io/klog"
)
var count map[string]int
var bicount map[string]map[string]int
var tricount map[string]map[string]map[string]int
var quadcount map[string]map[string]map[string]map[string]int
//laplase smoothing
var laplace_alpha float64
type Key struct {
first, second string
}
var bigramModel map[Key]float64
//trigram
type TriKey struct {
first, second, third string
}
var trigramModel map[TriKey]float64
//quadgram
type QuadKey struct {
first, second, third, fourth string
}
var quadgramModel map[QuadKey]float64
func init() {
ClearModel()
}
func ClearModel() {
count = map[string]int{}
bicount = map[string]map[string]int{}
tricount = map[string]map[string]map[string]int{}
quadcount = map[string]map[string]map[string]map[string]int{}
laplace_alpha = 0.001
bigramModel = map[Key]float64{}
trigramModel = map[TriKey]float64{}
quadgramModel = map[QuadKey]float64{}
}
func addBigram(first string, second string) {
mm, ok := bicount[first]
if !ok {
mm = map[string]int{}
bicount[first] = mm
}
mm[second]++
}
func addTrigram(first string, second string, third string) {
mm, ok := tricount[first]
if !ok {
mm = map[string]map[string]int{}
tricount[first] = mm
}
m2, ok := tricount[first][second]
if !ok {
m2 = map[string]int{}
tricount[first][second] = m2
}
m2[third]++
}
func addQuadgram(first string, second string, third string, fourth string) {
mm, ok := quadcount[first]
if !ok {
mm = map[string]map[string]map[string]int{}
quadcount[first] = mm
}
m2, ok := quadcount[first][second]
if !ok {
m2 = map[string]map[string]int{}
quadcount[first][second] = m2
}
m3, ok := quadcount[first][second][third]
if !ok {
m3 = map[string]int{}
quadcount[first][second][third] = m3
}
m3[fourth]++
}
func AddData(data string) (int, error) {
words := strings.Fields(data)
for i, word := range words {
count[word]++
if i < len(words) - 1 {
addBigram(word, words[i + 1])
}
if i < len(words) - 2 {
addTrigram(word, words[i + 1], words[i + 2])
}
if i < len(words) - 3 {
addQuadgram(word, words[i + 1], words[i + 2], words[i + 3])
}
}
klog.Infof("count: %+v", count)
klog.Infof("bicount: %+v", bicount)
klog.Infof("tricount: %+v", tricount)
klog.Infof("quadcount: %+v", quadcount)
buildModel()
return len(words), nil
}
func GetCount() int {
return len(count)
}
func GetNext(word string) (map[string]float32, error) {
//find all bi-counts with words
var probabilities map[string]float32
probabilities = map[string]float32{}
total := float32(count[word])
for option, value := range bicount[word] {
probabilities[option] = float32(value) / total
}
return probabilities, nil
}
func GetTriNext(first string, second string) (map[string]float32, error) {
var probabilities map[string]float32
probabilities = map[string]float32{}
total := 0.0
for option, value := range tricount[first][second] {
total += float64(value)
probabilities[option] = float32(value)
}
for option, value := range probabilities {
probabilities[option] = float32(value) / float32(total)
}
return probabilities, nil
}
func GetQuadNext(first string, second string, third string) (map[string]float32, error) {
var probabilities map[string]float32
probabilities = map[string]float32{}
total := 0.0
for option, value := range quadcount[first][second][third] {
total += float64(value)
probabilities[option] = float32(value)
}
for option, value := range probabilities {
probabilities[option] = float32(value) / float32(total)
}
return probabilities, nil
}
func GetEntropy(data string) (float64, error) {
//n := bicount[Key{"first", "second"}]
words := strings.Fields(data)
//n = lenth of the tokens (word count)
n := len(words)
var total float64
total = 0.0
for i, word := range words {
if i < len(words) - 1 {
total += getProbability(word, words[i + 1])
}
}
exponent := total * (float64(-1) / float64(n))
perplexity := math.Pow(250, exponent)
return perplexity, nil
}
func getProbability(first string, second string) float64 {
//check if word exists
if count[first] == 0 {
first = "<UKN>"
}
if count[second] == 0 {
second = "<UKN>"
}
return bigramModel[Key{first, second}]
}
func buildModel() {
//use log base of 250
logBase := 1 / math.Log(250)
v := float64(len(count)) + laplace_alpha
for key, value := range count {
denom := float64(value) + v
for key2, val2 := range bicount[key] {
bigramModel[Key{key, key2}] = (float64(val2) + laplace_alpha) / denom
//trigram
for key3, val3 := range tricount[key][key2] {
trigramModel[TriKey{key, key2, key3}] = (float64(val3) + laplace_alpha) / denom
//quadgram
for key4, val4 := range quadcount[key][key2][key3] {
quadgramModel[QuadKey{key, key2, key3, key4}] = (float64(val4) + laplace_alpha) / denom
}
//unknown token
quadgramModel[QuadKey{key, key2, key3, "<UKN>"}] = math.Log(laplace_alpha / denom) * logBase
}
//unknown token
trigramModel[TriKey{key, key2, "<UKN>"}] = math.Log(laplace_alpha / denom) * logBase
}
//add unknown token
bigramModel[Key{key, "<UKN>"}] = math.Log(laplace_alpha / denom) * logBase
trigramModel[TriKey{key, "<UKN>", "<UKN>"}] = math.Log(laplace_alpha / denom) * logBase
}
//handle unknown as first word.
bigramModel[Key{"<UKN>", "<UKN>"}] = math.Log(laplace_alpha / (v + laplace_alpha)) * logBase
trigramModel[TriKey{"<UKN>", "<UKN>", "<UKN>"}] = math.Log(laplace_alpha / (v + laplace_alpha)) * logBase
} | model/model.go | 0.623377 | 0.423816 | model.go | starcoder |
package validator
import (
"github.com/KludgePub/TheMazeRunner/maze"
)
// GetSolvedPath show path in map
func GetSolvedPath(m maze.Map, from, to maze.Point) []maze.Point {
stackPath := []maze.Point{from}
g := maze.DispatchToGraph(&m)
var isCanMove func(g *maze.Graph, cNode *maze.Node, endPoint maze.Point) bool
isCanMove = func(g *maze.Graph, cNode *maze.Node, endPoint maze.Point) bool {
if cNode.Visited {
return false
} else if cNode.Point == endPoint {
return true
}
cNode.Visited = true
if cNode.IsTopNeighbor {
stackPath = append(stackPath, cNode.TopNeighbor.Point)
if isCanMove(g, cNode.TopNeighbor, to) {
return true
}
stackPath = stackPath[:len(stackPath)-1]
}
if cNode.IsBottomNeighbor {
stackPath = append(stackPath, cNode.BottomNeighbor.Point)
if isCanMove(g, cNode.BottomNeighbor, to) {
return true
}
stackPath = stackPath[:len(stackPath)-1]
}
if cNode.IsRightNeighbor {
stackPath = append(stackPath, cNode.RightNeighbor.Point)
if isCanMove(g, cNode.RightNeighbor, to) {
return true
}
stackPath = stackPath[:len(stackPath)-1]
}
if cNode.IsLeftNeighbor {
stackPath = append(stackPath, cNode.LeftNeighbor.Point)
if isCanMove(g, cNode.LeftNeighbor, to) {
return true
}
stackPath = stackPath[:len(stackPath)-1]
}
return false
}
for _, n := range g.Nodes {
if n.Point == from {
isCanMove(g, n, to)
break
}
}
return stackPath
}
// GetPossiblePath from given path
func GetPossiblePath(givenPath []maze.Point, g *maze.Graph) []maze.Point {
possiblePath := make([]maze.Point, 0)
for i := 0; i < len(givenPath); i++ {
var fromNode *maze.Node
fromPoint := givenPath[i]
toPoint := fromPoint
// Get node fromPoint graph
for _, n := range g.Nodes {
if n.Point.X == fromPoint.X && n.Point.Y == fromPoint.Y {
fromNode = n
break
}
}
// If not found, then given point not possible in maze
if fromNode == nil {
return possiblePath
}
if len(givenPath) > i+1 {
toPoint = givenPath[i+1]
}
// Check if fromPoint point to point move possible
if !isPossibleToPass(fromNode, toPoint) {
possiblePath = append(possiblePath, fromPoint)
return possiblePath
}
possiblePath = append(possiblePath, fromPoint)
}
return possiblePath
}
// IsPathPossible validate if given path by points is possible in maze
func IsPathPossible(path []maze.Point, g *maze.Graph) bool {
if len(path) <= 1 {
return false
}
isPointsValid := true
for _, p := range path {
isMovePossible := false
for _, n := range g.Nodes {
if n.Point.X == p.X && n.Point.Y == p.Y {
isMovePossible = true
break
}
}
if !isMovePossible {
isPointsValid = false
break
}
}
if !isPointsValid {
return false
}
if isPointsValid {
for i := 0; i < len(path); i++ {
var to maze.Point
if i+1 >= len(path) {
to = path[i]
} else {
to = path[i+1]
}
if isPossibleToPass(g.Nodes[path[i].GetId()], to) {
continue
}
return false
}
}
return true
}
// canMove validate movement
func isPossibleToPass(from *maze.Node, to maze.Point) bool {
if from.Point.X == to.X && from.Point.Y == to.Y {
return true
}
// Check if we can move to left
if from.LeftNeighbor != nil {
if from.LeftNeighbor.Point.X == to.X && from.LeftNeighbor.Point.Y == to.Y {
return true
}
}
// Check if we can move to right
if from.RightNeighbor != nil {
if from.RightNeighbor.Point.X == to.X && from.RightNeighbor.Point.Y == to.Y {
return true
}
}
// Check if we can move to top
if from.TopNeighbor != nil {
if from.TopNeighbor.Point.X == to.X && from.TopNeighbor.Point.Y == to.Y {
return true
}
}
// Check if we can move to bottom
if from.BottomNeighbor != nil {
if from.BottomNeighbor.Point.X == to.X && from.BottomNeighbor.Point.Y == to.Y {
return true
}
}
return false
} | validator/path.go | 0.692538 | 0.630372 | path.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.