code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package events
// A middleware event callback
type MiddlewareCallback func(string, ...interface{})
// Create a middleware callback adapter that adapts an event message to a single bool
func MiddlewareBoolify(callback func(string, bool)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value bool
if len(data) > 0 {
value = data[0].(bool)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single byte
func MiddlewareByteify(callback func(string, byte)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value byte
if len(data) > 0 {
value = data[0].(byte)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single byte slice
func MiddlewareByteSliceify(callback func(string, []byte)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value []byte
if len(data) > 0 {
value = data[0].([]byte)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single complex64
func MiddlewareComplex64ify(callback func(string, complex64)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value complex64
if len(data) > 0 {
value = data[0].(complex64)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single complex128
func MiddlewareComplex128ify(callback func(string, complex128)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value complex128
if len(data) > 0 {
value = data[0].(complex128)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single error
func MiddlewareErrorify(callback func(string, error)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value error
if len(data) > 0 {
value = data[0].(error)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single float32
func MiddlewareFloat32ify(callback func(string, float32)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value float32
if len(data) > 0 {
value = data[0].(float32)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single float64
func MiddlewareFloat64ify(callback func(string, float64)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value float64
if len(data) > 0 {
value = data[0].(float64)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single int
func MiddlewareIntify(callback func(string, int)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value int
if len(data) > 0 {
value = data[0].(int)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single int8
func MiddlewareInt8ify(callback func(string, int8)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value int8
if len(data) > 0 {
value = data[0].(int8)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single int16
func MiddlewareInt16ify(callback func(string, int16)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value int16
if len(data) > 0 {
value = data[0].(int16)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single int32
func MiddlewareInt32ify(callback func(string, int32)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value int32
if len(data) > 0 {
value = data[0].(int32)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single int64
func MiddlewareInt64ify(callback func(string, int64)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value int64
if len(data) > 0 {
value = data[0].(int64)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single rune
func MiddlewareRuneify(callback func(string, rune)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value rune
if len(data) > 0 {
value = data[0].(rune)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single string
func MiddlewareStringify(callback func(string, string)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value string
if len(data) > 0 {
value = data[0].(string)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single uint
func MiddlewareUintify(callback func(string, uint)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value uint
if len(data) > 0 {
value = data[0].(uint)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single uint8
func MiddlewareUint8ify(callback func(string, uint8)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value uint8
if len(data) > 0 {
value = data[0].(uint8)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single uint16
func MiddlewareUint16ify(callback func(string, uint16)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value uint16
if len(data) > 0 {
value = data[0].(uint16)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single uint32
func MiddlewareUint32ify(callback func(string, uint32)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value uint32
if len(data) > 0 {
value = data[0].(uint32)
}
callback(channel, value)
}
}
// Create a middleware callback adapter that adapts an event message to a single uint64
func MiddlewareUint64ify(callback func(string, uint64)) MiddlewareCallback {
return func(channel string, data ...interface{}) {
var value uint64
if len(data) > 0 {
value = data[0].(uint64)
}
callback(channel, value)
}
} | middleware.go | 0.794704 | 0.47317 | middleware.go | starcoder |
package filecache implements a simple file cache.
A file cache can be created with either the NewDefaultCache() function to
get a cache with the defaults set, or NewCache() to get a new cache with
0 values for everything; you will not be able to store items in this cache
until the values are changed; specifically, at a minimum, you should change
the MaxItems field to be greater than zero.
Let's start with a basic example:
cache := filecache.NewDefaultCache()
cache.Start()
readme, err := cache.ReadFile("README.md")
if err != nil {
fmt.Println("[!] couldn't read the README:", err.Error())
} else {
fmt.Printf("[+] read %d bytes\n", len(readme))
}
You can transparently read and cache a file using RetrieveFile (and
RetrieveFileString); if the file is not in the cache, it will be read
from the file system and returned - the cache will start a background
thread to cache the file. Similarly, the WriterFile method will write
the file to the specified io.Writer. For example, you could create a
FileServer function along the lines of
func FileServer(w http.ResponseWriter, r *http.Request) {
path := r.URL.Path
if len(path) > 1 {
path = path[1:len(path)]
} else {
path = "."
}
err := cache.WriteFile(w, path)
if err == nil {
ServerError(w, r)
} else if err == filecache.ItemIsDirectory {
DirServer(w, r)
}
}
When cache.Start() is called, a goroutine is launched in the background
that routinely checks the cache for expired items. The delay between
runs is specified as the number of seconds given by cache.Every ("every
cache.Every seconds, check for expired items"). There are three criteria
used to determine whether an item in the cache should be expired; they are:
1. Has the file been modified on disk? (The cache stores the last time
of modification at the time of caching, and compares that to the
file's current last modification time).
2. Has the file been in the cache for longer than the maximum allowed
time? This check can be disabled by setting the cache's ExpireItem
field to 0; in this case, the cache will only expire items that have
been modified since caching or that satisfy the next condition.
3. Is the cache at capacity? When a file is being cached, a check is
made to see if the cache is currently filled. If it is, the item that
was last accessed the longest ago is expired and the new item takes
its place. When loading items asynchronously, this check might miss
the fact that the cache will be at capacity; the background scanner
performs a check after its regular checks to ensure that the cache is
not at capacity.
The background scanner can be disabled by setting cache.Every to 0; if so,
cache expiration is only done when the cache is at capacity.
Once the cache is no longer needed, a call to cache.Stop() will close down
the channels and signal the background scanner that it should stop.
*/
package filecache | doc.go | 0.692538 | 0.499329 | doc.go | starcoder |
package querybuilder
import (
"fmt"
"strconv"
"strings"
)
// QueryBuilder builds an insert query based on the number of arguments, how many values to insert,
// and an initial query defined as `insert into table_name(arg1, ..., argn) values %s;`.
// QueryBuilder maintains a map of queries that have already been built in memory to improve performance.
type QueryBuilder struct {
queries map[int]string
query string
numArgs int
}
// New creates a QueryBuilder and initializes it with the initial query, the number of arguments, and a 0-size queries map.
func New(query string, numArgs int) QueryBuilder {
qb := QueryBuilder{}
qb.queries = make(map[int]string)
qb.query = query
qb.numArgs = numArgs
return qb
}
// Init initializes the queries map with n default queries.
func (q QueryBuilder) Init(n int) {
var offset = 2
for i := offset; i < n+offset; i++ {
q.queries[i] = q.build(i)
}
}
// Build builds a query based on the number of values to insert.
func (q QueryBuilder) Build(size int) string {
query, ok := q.queries[size]
if ok {
return query
}
return q.build(size)
}
func (q QueryBuilder) build(size int) string {
var sb strings.Builder
sb.Grow(q.growLength(size))
for i := 0; i < size; i++ {
n := i * q.numArgs
sb.WriteString("(")
for j := 0; j < q.numArgs; j++ {
sb.WriteString("$")
sb.WriteString(strconv.Itoa(n + j + 1))
if j != q.numArgs-1 {
sb.WriteString(", ")
}
}
if i != size-1 {
sb.WriteString("),\n")
}
}
sb.WriteString(")")
query := fmt.Sprintf(q.query, sb.String())
q.queries[size] = query
return query
}
func (q QueryBuilder) growLength(size int) int {
digitsLength := q.numArgs * 3
numberOfDollars := q.numArgs
numberOfCommasAndSpaces := (q.numArgs - 1) * 2
numberOfParenthesis := 2
// valuesLength is the length of a value to be inserted ($1, ..., $2)
valuesLength := digitsLength + numberOfDollars + numberOfCommasAndSpaces + numberOfParenthesis
queryLength := len(q.query) + valuesLength*size + (size - 1) + (size-1)*2 - 2
return queryLength
} | app/gateways/db/querybuilder/query_builder.go | 0.781247 | 0.454654 | query_builder.go | starcoder |
package colorx
import (
"image/color"
"math"
)
// HSLA is an implementation of the HSV (Hue, Saturation and Value) color model. HSV is also known as HSB (Hue,
// Saturation, Brightness).
type HSLA struct {
H float64 // Hue ∈ [0, 360)
S float64 // Saturation ∈ [0, 1]
L float64 // Lightness ∈ [0, 1]
A float64 // Alpha ∈ [0, 1]
}
// HSLAModel can convert the color to the HSLA color model defined in this package.
var HSLAModel = color.ModelFunc(hslaModel)
func hslaModel(c color.Color) color.Color {
if _, ok := c.(HSLA); ok {
return c
}
r, g, b, a := c.RGBA()
h, s, l, ha := RGBAToHSLA(uint8(r>>8), uint8(g>>8), uint8(b>>8), uint8(a>>8))
return HSLA{
H: h,
S: s,
L: l,
A: ha,
}
}
// RGBAToHSLA converts RGBA to Hue, Saturation, Value and Alpha.
func RGBAToHSLA(r, g, b, a uint8) (float64, float64, float64, float64) {
var hue, saturation, lightness, alpha float64
// Convert R, G and B to floats.
red := float64(r) / math.MaxUint8
green := float64(g) / math.MaxUint8
blue := float64(b) / math.MaxUint8
alpha = float64(a) / math.MaxUint8
// Get the most and least dominant colors.
cMax := math.Max(red, math.Max(green, blue))
cMin := math.Min(red, math.Min(green, blue))
// Get color delta.
delta := cMax - cMin
// Value is the lightness of the dominant color.
lightness = (cMax + cMin) / 2.0
// Saturation is derived from the lightness, but it's zero if cMax is zero (saturation is initialized as zero).
if delta != 0.0 {
saturation = delta / (1.0 - math.Abs(2.0*lightness-1.0))
}
// Hue is derived from the dominant color.
switch cMax {
case cMin: // delta == 0
hue = 0.0
case red:
hue = math.FMA(60.0, math.Mod((green-blue)/delta, 6), 360.0)
case green:
hue = math.FMA(60.0, (blue-red)/delta+2, 360.0)
case blue:
hue = math.FMA(60.0, (red-green)/delta+4, 360.0)
}
hue = math.Mod(hue, 360.0)
return hue, saturation, lightness, alpha
}
// RGBA returns the alpha-premultiplied red, green, blue and alpha values for the color.
func (hsla HSLA) RGBA() (r, g, b, a uint32) {
var rgba color.RGBA
hsla.H = math.Mod(hsla.H+360.0, 360.0)
c := (1 - math.Abs(2*hsla.L-1)) * hsla.S
x := c * (1 - math.Abs(math.Mod(hsla.H/60.0, 2.0)-1))
m := hsla.L - c/2
rgba.A = uint8(hsla.A * math.MaxUint8)
// sextant will be the sextant of the dominant color.
sextant, _ := math.Modf(hsla.H / 60.0)
switch int(sextant) {
case 0:
rgba.R = uint8(math.Floor((c + m) * math.MaxUint8))
rgba.G = uint8(math.Floor((x + m) * math.MaxUint8))
rgba.B = uint8(math.Floor(m * math.MaxUint8))
case 1:
rgba.R = uint8(math.Floor((x + m) * math.MaxUint8))
rgba.G = uint8(math.Floor((c + m) * math.MaxUint8))
rgba.B = uint8(math.Floor(m * math.MaxUint8))
case 2:
rgba.R = uint8(math.Floor(m * math.MaxUint8))
rgba.G = uint8(math.Floor((c + m) * math.MaxUint8))
rgba.B = uint8(math.Floor((x + m) * math.MaxUint8))
case 3:
rgba.R = uint8(math.Floor(m * math.MaxUint8))
rgba.G = uint8(math.Floor((x + m) * math.MaxUint8))
rgba.B = uint8(math.Floor((c + m) * math.MaxUint8))
case 4:
rgba.R = uint8(math.Floor((x + m) * math.MaxUint8))
rgba.G = uint8(math.Floor(m * math.MaxUint8))
rgba.B = uint8(math.Floor((c + m) * math.MaxUint8))
default: // case 5
rgba.R = uint8(math.Floor((c + m) * math.MaxUint8))
rgba.G = uint8(math.Floor(m * math.MaxUint8))
rgba.B = uint8(math.Floor((x + m) * math.MaxUint8))
}
return rgba.RGBA()
} | hsl.go | 0.858985 | 0.489931 | hsl.go | starcoder |
package data
import (
"image/color"
"math"
)
// g : Gravity constant
const g = 6.67428e-11
// Body : A celestial body, star or planet
type Body struct {
IsStar bool // IsStar : Is this the star of the system
Name string // Name : The name of the star/planet
Radius float64 // Radius : The radius of the star/planet
Mass float64 // Mass : Tha mass of the star/planet
Position Vector2 // Position : The position of the star/planet
Velocity Vector2 // Velocity : The velocity of the star/planet
Color string // Color : The color of the star/planet in HEX form
ColorObj color.Color // ColorObj : The color of the star/planet in RGB form
DistanceToStar float64 // DistanceToStar : The distance to the star of the system
Orbit []Vector2 // Orbit : The orbit of the planet
}
// Attraction : Calculates the attraction between two bodies
func (b *Body) Attraction(other *Body) Vector2 {
// Calculate the vector to the other body
d := other.Position.Sub(b.Position)
// Calculate the distance to the other body
distance := d.Length()
if other.IsStar {
b.DistanceToStar = distance
}
// Calculate the force on the body
f := g * b.Mass * other.Mass / distance / distance // F = m*M / r^2
// Calculate the angle of the force
theta := math.Atan2(d.Y, d.X)
// Return the force vector
return Vector2{X: math.Cos(theta), Y: math.Sin(theta)}.Mul(f)
}
// UpdatePosition : Update the position each cycle
func (b *Body) UpdatePosition(solar *SolarSystem, timestamp float64) {
total := Vector2{}
// For each body, calculate the force to every other body
for i := range solar.Bodies {
body := solar.Bodies[i]
if b == body {
continue
}
total = total.Add(b.Attraction(body))
}
// Calculate the velocity change
b.Velocity = b.Velocity.Add(total.Mul(timestamp / b.Mass)) // F = m*a & a = v*t
// Calculate the position change
b.Position = b.Position.Add(b.Velocity.Mul(timestamp))
// Store the new position in the orbit
b.Orbit = append(b.Orbit, b.Position)
} | internal/data/body.go | 0.836555 | 0.55923 | body.go | starcoder |
package utils
import (
"fmt"
"reflect"
"strconv"
"strings"
"time"
)
var TimeFormats = []string{"1/2/2006", "1/2/2006 15:4:5", "2006-1-2 15:4:5", "2006-1-2 15:4", "2006-1-2", "1-2", "15:4:5", "15:4", "15", "15:4:5 Jan 2, 2006 MST"}
// Decoder is the interface that wraps the basic Read method.
type Decoder interface {
// Decode data type conversion
Decode(interface{}) error
}
// NewDecoder Get Decoder interface
func NewDecoder(val interface{}) Decoder {
return &decoder{input: val}
}
type decoder struct {
input interface{}
}
func (d *decoder) error(errInfo string) error {
return fmt.Errorf("[go-dal:utils:decode]%s", errInfo)
}
func (d *decoder) Decode(output interface{}) error {
outputValue := reflect.Indirect(reflect.ValueOf(output))
if !outputValue.CanAddr() {
return d.error("output must be addressable (a pointer)")
}
return d.decode(d.input, outputValue)
}
func (d *decoder) getKind(val reflect.Value) reflect.Kind {
kind := val.Kind()
switch {
case kind >= reflect.Int && kind <= reflect.Int64:
return reflect.Int
case kind >= reflect.Uint && kind <= reflect.Uint64:
return reflect.Uint
case kind >= reflect.Float32 && kind <= reflect.Float64:
return reflect.Float32
default:
return kind
}
}
func (d *decoder) decode(data interface{}, outputValue reflect.Value) (err error) {
dataVal := reflect.Indirect(reflect.ValueOf(data))
if !dataVal.IsValid() {
outputValue.Set(reflect.Zero(outputValue.Type()))
return
}
switch outputKind := d.getKind(outputValue); outputKind {
case reflect.Bool:
err = d.decodeBool(data, outputValue)
case reflect.String:
err = d.decodeString(data, outputValue)
case reflect.Int:
err = d.decodeInt(data, outputValue)
case reflect.Uint:
err = d.decodeUint(data, outputValue)
case reflect.Float32:
err = d.decodeFloat(data, outputValue)
case reflect.Struct:
switch outputType := outputValue.Type().String(); outputType {
case "time.Time":
err = d.decodeTime(data, outputValue)
default:
err = d.decodeStruct(data, outputValue)
}
case reflect.Map:
err = d.decodeMap(data, outputValue)
case reflect.Slice:
err = d.decodeSlice(data, outputValue)
case reflect.Interface:
err = d.decodeBasic(data, outputValue)
default:
err = fmt.Errorf("Unsupported type: %s", outputKind)
}
return
}
func (d *decoder) decodeString(data interface{}, val reflect.Value) error {
dataVal := reflect.ValueOf(data)
dataKind := d.getKind(dataVal)
switch {
case dataKind == reflect.String:
val.SetString(dataVal.String())
case dataKind == reflect.Bool:
if dataVal.Bool() {
val.SetString("1")
} else {
val.SetString("0")
}
case dataKind == reflect.Int:
val.SetString(strconv.FormatInt(dataVal.Int(), 10))
case dataKind == reflect.Uint:
val.SetString(strconv.FormatUint(dataVal.Uint(), 10))
case dataKind == reflect.Float32:
val.SetString(strconv.FormatFloat(dataVal.Float(), 'f', -1, 64))
default:
return fmt.Errorf("expected type '%s', got unconvertible type '%s'", val.Type(), dataVal.Type())
}
return nil
}
func (d *decoder) decodeInt(data interface{}, val reflect.Value) error {
dataVal := reflect.ValueOf(data)
dataKind := d.getKind(dataVal)
switch {
case dataKind == reflect.Int:
val.SetInt(dataVal.Int())
case dataKind == reflect.Uint:
val.SetInt(int64(dataVal.Uint()))
case dataKind == reflect.Float32:
val.SetInt(int64(dataVal.Float()))
case dataKind == reflect.Bool:
if dataVal.Bool() {
val.SetInt(1)
} else {
val.SetInt(0)
}
case dataKind == reflect.String:
dVal := dataVal.String()
if dVal == "" {
dVal = "0"
}
i, err := strconv.ParseInt(dVal, 10, 64)
if err == nil {
val.SetInt(i)
} else {
return err
}
default:
return fmt.Errorf("expected type '%s', got unconvertible type '%s'", val.Type(), dataVal.Type())
}
return nil
}
func (d *decoder) decodeUint(data interface{}, val reflect.Value) error {
dataVal := reflect.ValueOf(data)
dataKind := d.getKind(dataVal)
switch {
case dataKind == reflect.Int:
val.SetUint(uint64(dataVal.Int()))
case dataKind == reflect.Uint:
val.SetUint(dataVal.Uint())
case dataKind == reflect.Float32:
val.SetUint(uint64(dataVal.Float()))
case dataKind == reflect.Bool:
if dataVal.Bool() {
val.SetUint(1)
} else {
val.SetUint(0)
}
case dataKind == reflect.String:
dVal := dataVal.String()
if dVal == "" {
dVal = "0"
}
i, err := strconv.ParseUint(dVal, 10, 64)
if err == nil {
val.SetUint(i)
} else {
return err
}
default:
return fmt.Errorf("expected type '%s', got unconvertible type '%s'", val.Type(), dataVal.Type())
}
return nil
}
func (d *decoder) decodeBool(data interface{}, val reflect.Value) error {
dataVal := reflect.ValueOf(data)
dataKind := d.getKind(dataVal)
switch {
case dataKind == reflect.Bool:
val.SetBool(dataVal.Bool())
case dataKind == reflect.Int:
val.SetBool(dataVal.Int() != 0)
case dataKind == reflect.Uint:
val.SetBool(dataVal.Uint() != 0)
case dataKind == reflect.Float32:
val.SetBool(dataVal.Float() != 0)
case dataKind == reflect.String:
b, err := strconv.ParseBool(dataVal.String())
if err == nil {
val.SetBool(b)
} else if dataVal.String() == "" {
val.SetBool(false)
} else {
return err
}
default:
return fmt.Errorf("expected type '%s', got unconvertible type '%s'", val.Type(), dataVal.Type())
}
return nil
}
func (d *decoder) decodeFloat(data interface{}, val reflect.Value) error {
dataVal := reflect.ValueOf(data)
dataKind := d.getKind(dataVal)
switch {
case dataKind == reflect.Int:
val.SetFloat(float64(dataVal.Int()))
case dataKind == reflect.Uint:
val.SetFloat(float64(dataVal.Uint()))
case dataKind == reflect.Float32:
val.SetFloat(float64(dataVal.Float()))
case dataKind == reflect.Bool:
if dataVal.Bool() {
val.SetFloat(1)
} else {
val.SetFloat(0)
}
case dataKind == reflect.String:
dVal := dataVal.String()
if dVal == "" {
dVal = "0"
}
f, err := strconv.ParseFloat(dVal, 64)
if err == nil {
val.SetFloat(f)
} else {
return err
}
default:
return fmt.Errorf("expected type '%s', got unconvertible type '%s'", val.Type(), dataVal.Type())
}
return nil
}
func (d *decoder) decodeMap(data interface{}, val reflect.Value) error {
valType := val.Type()
valKeyType := valType.Key()
valElemType := valType.Elem()
valMap := val
if val.IsNil() {
mapType := reflect.MapOf(valKeyType, valElemType)
valMap = reflect.MakeMap(mapType)
}
dataVal := reflect.Indirect(reflect.ValueOf(data))
switch {
case dataVal.Kind() == reflect.Map:
for _, dataKey := range dataVal.MapKeys() {
currentKey := reflect.Indirect(reflect.New(valKeyType))
if err := d.decode(dataKey.Interface(), currentKey); err != nil {
return err
}
currentValue := reflect.Indirect(reflect.New(valElemType))
if err := d.decode(dataVal.MapIndex(dataKey).Interface(), currentValue); err != nil {
return err
}
valMap.SetMapIndex(currentKey, currentValue)
}
case dataVal.Kind() == reflect.Struct:
dataType := reflect.TypeOf(data)
for i, l := 0, dataType.NumField(); i < l; i++ {
field := dataType.Field(i)
fieldValue := dataVal.FieldByName(field.Name).Interface()
if reflect.DeepEqual(fieldValue, reflect.Zero(field.Type).Interface()) {
// fieldValue = reflect.Zero(field.Type).Interface()
continue
}
if field.Type.String() == "time.Time" && valElemType.Kind() == reflect.String {
if !reflect.DeepEqual(reflect.Zero(field.Type).Interface(), fieldValue) {
val.SetMapIndex(reflect.ValueOf(field.Name), reflect.ValueOf(fieldValue.(time.Time).Format(time.RFC3339Nano)))
}
continue
}
currentValue := reflect.Indirect(reflect.New(valElemType))
if err := d.decode(fieldValue, currentValue); err != nil {
return err
}
valMap.SetMapIndex(reflect.ValueOf(field.Name), currentValue)
}
default:
return fmt.Errorf("expected type '%s', got unconvertible type '%s'", val.Type(), dataVal.Type())
}
val.Set(valMap)
return nil
}
func (d *decoder) decodeSlice(data interface{}, val reflect.Value) error {
dataVal := reflect.Indirect(reflect.ValueOf(data))
if dataVal.Kind() != reflect.Slice {
return fmt.Errorf("Expected type slice")
}
if dataVal.Type() == val.Type() {
val.Set(dataVal)
return nil
}
valSlice := reflect.MakeSlice(reflect.SliceOf(val.Type().Elem()), dataVal.Len(), dataVal.Len())
for i, l := 0, dataVal.Len(); i < l; i++ {
currentData := dataVal.Index(i).Interface()
currentField := valSlice.Index(i)
if err := d.decode(currentData, currentField); err != nil {
return err
}
}
val.Set(valSlice)
return nil
}
func (d *decoder) decodeStruct(data interface{}, val reflect.Value) error {
dataVal := reflect.Indirect(reflect.ValueOf(data))
valType := val.Type()
if dataVal.Type() == valType {
val.Set(dataVal)
return nil
}
if kind := dataVal.Kind(); kind != reflect.Map {
return fmt.Errorf("Expected a map, got '%s'", kind.String())
}
for i, l := 0, valType.NumField(); i < l; i++ {
fieldName := valType.Field(i).Name
rawMapKey := reflect.ValueOf(fieldName)
rawMapValue := dataVal.MapIndex(rawMapKey)
if !rawMapValue.IsValid() {
dataValKeys := dataVal.MapKeys()
for j, jl := 0, len(dataValKeys); j < jl; j++ {
rawMapKeyName, ok := dataValKeys[j].Interface().(string)
if !ok {
continue
}
if strings.EqualFold(fieldName, rawMapKeyName) {
rawMapKey = dataValKeys[j]
rawMapValue = dataVal.MapIndex(dataValKeys[j])
break
}
}
if !rawMapValue.IsValid() {
continue
}
}
field := val.Field(i)
if !field.CanSet() {
continue
}
if err := d.decode(rawMapValue.Interface(), field); err != nil {
return err
}
}
return nil
}
func (d *decoder) decodeTime(data interface{}, val reflect.Value) error {
var tVal time.Time
if v, ok := data.(string); ok && v != "" {
var exist bool
for i, l := 0, len(TimeFormats); i < l; i++ {
t, err := time.Parse(TimeFormats[i], v)
if err == nil {
tVal = t
exist = true
break
}
}
if !exist {
return fmt.Errorf("Unknown time format.")
}
} else if v, ok := data.(time.Time); ok {
tVal = v
} else {
tVal = time.Now()
}
val.Set(reflect.ValueOf(tVal))
return nil
}
func (d *decoder) decodeBasic(data interface{}, val reflect.Value) error {
dataVal := reflect.ValueOf(data)
dataValType := dataVal.Type()
if !dataValType.AssignableTo(val.Type()) {
return fmt.Errorf("expected type '%s', got unconvertible type '%s'", val.Type(), dataValType)
}
val.Set(dataVal)
return nil
} | utils/decode.go | 0.61855 | 0.473475 | decode.go | starcoder |
package epoch_processing
import (
. "github.com/protolambda/zrnt/eth2/beacon"
)
func ProcessEpochJustification(state *BeaconState) {
previousEpoch := state.PreviousEpoch()
currentEpoch := state.Epoch()
// epoch numbers are trusted, no errors
previousBoundaryBlockRoot, _ := state.GetBlockRoot(previousEpoch.GetStartSlot())
currentBoundaryBlockRoot, _ := state.GetBlockRoot(currentEpoch.GetStartSlot())
oldPreviousJustifiedEpoch := state.PreviousJustifiedEpoch
oldCurrentJustifiedEpoch := state.CurrentJustifiedEpoch
previousEpochBoundaryAttesterIndices := state.FilterUnslashed(state.GetAttesters(
state.PreviousEpochAttestations,
func(att *AttestationData) bool {
return att.TargetRoot == previousBoundaryBlockRoot
}))
currentEpochBoundaryAttesterIndices := state.FilterUnslashed(state.GetAttesters(
state.CurrentEpochAttestations,
func(att *AttestationData) bool {
return att.TargetRoot == currentBoundaryBlockRoot
}))
// Rotate current into previous
state.PreviousJustifiedEpoch = state.CurrentJustifiedEpoch
state.PreviousJustifiedRoot = state.CurrentJustifiedRoot
// Rotate the justification bitfield up one epoch to make room for the current epoch
state.JustificationBitfield <<= 1
// Get the sum balances of the boundary attesters, and the total balance at the time.
previousEpochBoundaryAttestingBalance := state.GetTotalBalanceOf(previousEpochBoundaryAttesterIndices)
previousTotalBalance := state.GetTotalBalanceOf(state.ValidatorRegistry.GetActiveValidatorIndices(currentEpoch - 1))
currentEpochBoundaryAttestingBalance := state.GetTotalBalanceOf(currentEpochBoundaryAttesterIndices)
currentTotalBalance := state.GetTotalBalanceOf(state.ValidatorRegistry.GetActiveValidatorIndices(currentEpoch))
// > Justification
// If the previous epoch gets justified, fill the second last bit
if previousEpochBoundaryAttestingBalance*3 >= previousTotalBalance*2 {
state.CurrentJustifiedEpoch = previousEpoch
state.CurrentJustifiedRoot = previousBoundaryBlockRoot
state.JustificationBitfield |= 1 << 1
}
// If the current epoch gets justified, fill the last bit
if currentEpochBoundaryAttestingBalance*3 >= currentTotalBalance*2 {
state.CurrentJustifiedEpoch = currentEpoch
state.CurrentJustifiedRoot = currentBoundaryBlockRoot
state.JustificationBitfield |= 1 << 0
}
// > Finalization
bitf := state.JustificationBitfield
// The 2nd/3rd/4th most recent epochs are all justified, the 2nd using the 4th as source
if (bitf>>1)&7 == 7 && state.PreviousJustifiedEpoch == currentEpoch-3 {
state.FinalizedEpoch = oldPreviousJustifiedEpoch
}
// The 2nd/3rd most recent epochs are both justified, the 2nd using the 3rd as source
if (bitf>>1)&3 == 3 && state.PreviousJustifiedEpoch == currentEpoch-2 {
state.FinalizedEpoch = oldPreviousJustifiedEpoch
}
// The 1st/2nd/3rd most recent epochs are all justified, the 1st using the 3rd as source
if (bitf>>0)&7 == 7 && state.CurrentJustifiedEpoch == currentEpoch-2 {
state.FinalizedEpoch = oldCurrentJustifiedEpoch
}
// The 1st/2nd most recent epochs are both justified, the 1st using the 2nd as source
if (bitf>>0)&3 == 3 && state.CurrentJustifiedEpoch == currentEpoch-1 {
state.FinalizedEpoch = oldCurrentJustifiedEpoch
}
root, _ := state.GetBlockRoot(state.FinalizedEpoch.GetStartSlot())
state.FinalizedRoot = root
} | eth2/beacon/epoch_processing/epoch_justification.go | 0.63375 | 0.400251 | epoch_justification.go | starcoder |
package push_relabel
import (
"container/heap"
"math"
"sort"
)
// Arc defines an edge along which flow may occur in a flow network. Arcs are
// created in pairs: every Arc on a Node with positive Capacity, Flow, and
// Priority, has a reciprocal Arc on the target Node with Capacity of zero, and
// negative Flow and Priority of equivalent absolute values (the residual Arc).
type Arc struct {
// Capacity of the Arc in the flow network. Positive,
// however residual Arcs may have Capacity of zero.
Capacity int32
// Output Flow of the Arc in the network. Zero or positive in Arcs with Capacity > 0,
// and zero or negative in their Capacity=0 residuals.
Flow int32
// Priority is the (descending) order in which Arcs should be selected for.
Priority int8
// Index of the reciprocal of this Arc, in |Target.Arcs|.
reciprocal uint32
// Target Node of this Arc.
Target *Node
}
// Node defines a vertex in a flow network, through which flow occurs.
type Node struct {
// User-defined ID of this Node. Useful for identifying Nodes reached
// by walking Arcs.
ID uint32
// Height label of this Node. Run-time is reduced if this is initialized
// to the distance of the Node from the flow network sink.
Height uint32
// Ordered Arcs of this Node (both primary and residual).
Arcs []Arc
// Excess flow of this Node, which must be reduced to zero before push/relabel completes.
excess uint32
// next Arc in |Arcs| to be evaluated.
next uint32
}
// FindMaxFlow determines the maximum flow of the flow network rooted at |source|.
func FindMaxFlow(source, sink *Node) {
source.excess = math.MaxUint32
var active = &heightHeap{source}
for len(*active) != 0 {
var node = heap.Pop(active).(*Node)
if node.excess == 0 {
panic("invalid pre-excess")
}
discharge(node, sink, active)
if node.excess != 0 && node != source {
panic("invalid post-excess")
}
}
}
// discharge pushes all excess flow from a Node, relabeling the Node Height as required.
func discharge(node, sink *Node, active *heightHeap) {
for node.excess > 0 {
// "Relabel" case. We examine Arcs having available capacity in the residual graph
// to identity the neighbor(s) of minimal height to which we could push flow. Then, we
// increase the |node|'s height to be one higher. This maintains the invariant that
// we always push flow "downhill".
if node.next == uint32(len(node.Arcs)) {
var minHeight uint32 = math.MaxUint32
for _, adj := range node.Arcs {
if adj.Capacity-adj.Flow > 0 {
minHeight = min(minHeight, adj.Target.Height)
}
}
if minHeight == math.MaxUint32 {
return
}
node.Height = minHeight + 1
node.next = 0
}
var adj = node.Arcs[node.next]
var residual = adj.Capacity - adj.Flow
if residual > 0 && node.Height > adj.Target.Height {
var delta = min(node.excess, uint32(residual))
node.Arcs[node.next].Flow += int32(delta)
adj.Target.Arcs[adj.reciprocal].Flow -= int32(delta)
node.excess -= delta
adj.Target.excess += delta
if adj.Target.excess == delta && adj.Target != sink {
// Our push caused |adj.target| to become an active node.
heap.Push(active, adj.Target)
}
}
node.next++
}
}
// InitNodes returns a slice of Nodes having size |n|. If |nodes| has
// sufficient capacity, it is re-sliced and returned. Otherwise, a new
// backing slice is allocated. All Nodes are initialized to Height |height|.
func InitNodes(nodes []Node, n int, height int) []Node {
if cap(nodes) < n {
var t = make([]Node, n, n*2)
copy(t, nodes)
nodes = t
} else {
nodes = nodes[:n]
}
for i := range nodes {
nodes[i] = Node{
ID: uint32(i),
Height: uint32(height),
Arcs: nodes[i].Arcs[:0],
}
}
return nodes
}
// AddArc adds an Arc from |from| to |to|, having |capacity| and |priority|.
// It also creates a residual Arc from |to| to |from|.
func AddArc(from, to *Node, capacity, priority int) {
var fromInd, toInd = len(from.Arcs), len(to.Arcs)
if capacity < 0 || capacity > math.MaxInt32 {
panic("invalid capacity")
}
if priority < 0 || priority > math.MaxInt8 {
panic("invalid priority")
}
from.Arcs = append(from.Arcs, Arc{
Capacity: int32(capacity),
Priority: int8(priority),
reciprocal: uint32(toInd),
Target: to,
})
to.Arcs = append(to.Arcs, Arc{
Capacity: 0,
Priority: int8(-priority),
reciprocal: uint32(fromInd),
Target: from,
})
}
// SortNodeArcs orders the Arcs of one or more Nodes by their respective priorities.
func SortNodeArcs(nodes ...Node) {
for n := range nodes {
var arcs = nodes[n].Arcs
sort.Slice(arcs, func(i, j int) bool {
return arcs[i].Priority > arcs[j].Priority
})
// Fix-up reciprocal indices.
for i, a := range arcs {
a.Target.Arcs[a.reciprocal].reciprocal = uint32(i)
}
}
}
func min(a, b uint32) uint32 {
if a < b {
return a
}
return b
}
// heightHeap orders Nodes on descending Node Height.
type heightHeap []*Node
func (h heightHeap) Len() int { return len(h) }
func (h heightHeap) Less(i, j int) bool { return h[i].Height > h[j].Height }
func (h heightHeap) Swap(i, j int) { h[i], h[j] = h[j], h[i] }
func (h *heightHeap) Push(x interface{}) { *h = append(*h, x.(*Node)) }
func (h *heightHeap) Pop() interface{} {
var old, l = *h, len(*h)
var x = old[l-1]
*h = old[0 : l-1]
return x
} | allocator/push_relabel/push_relabel.go | 0.719778 | 0.584449 | push_relabel.go | starcoder |
package bulletproof
import (
"github.com/gtank/merlin"
"github.com/pkg/errors"
"github.com/coinbase/kryptology/pkg/core/curves"
)
// VerifyBatched verifies a given batched range proof.
// It takes in a list of commitments to the secret values as capV instead of a single commitment to a single point
// when compared to the unbatched single range proof case.
func (verifier *RangeVerifier) VerifyBatched(proof *RangeProof, capV []curves.Point, proofGenerators RangeProofGenerators, n int, transcript *merlin.Transcript) (bool, error) {
// Define nm as the total bits required for secrets, calculated as number of secrets * n
m := len(capV)
nm := n * m
// nm must be less than the number of generators generated
if nm > len(verifier.generators.G) {
return false, errors.New("ipp vector length must be less than maxVectorLength")
}
// In case where len(a) is less than number of generators precomputed by prover, trim to length
proofG := verifier.generators.G[0:nm]
proofH := verifier.generators.H[0:nm]
// Calc y,z,x from Fiat Shamir heuristic
y, z, err := calcyzBatched(capV, proof.capA, proof.capS, transcript, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
x, err := calcx(proof.capT1, proof.capT2, transcript, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
wBytes := transcript.ExtractBytes([]byte("getw"), 64)
w, err := verifier.curve.NewScalar().SetBytesWide(wBytes)
if err != nil {
return false, errors.Wrap(err, "rangeproof prove")
}
// Calc delta(y,z), redefined for batched case on pg21
deltayzBatched, err := deltayzBatched(y, z, n, m, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
// Check tHat: L65, pg20
// See equation 72 on pg21
tHatIsValid := verifier.checktHatBatched(proof, capV, proofGenerators.g, proofGenerators.h, deltayzBatched, x, z, m)
if !tHatIsValid {
return false, errors.New("rangeproof verify tHat is invalid")
}
// Verify IPP
hPrime, err := gethPrime(proofH, y, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
capPhmu := getPhmuBatched(proofG, hPrime, proofGenerators.h, proof.capA, proof.capS, x, y, z, proof.mu, n, m, verifier.curve)
ippVerified, err := verifier.ippVerifier.VerifyFromRangeProof(proofG, hPrime, capPhmu, proofGenerators.u.Mul(w), proof.tHat, proof.ipp, transcript)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
return ippVerified, nil
}
// L65, pg20.
func (verifier *RangeVerifier) checktHatBatched(proof *RangeProof, capV []curves.Point, g, h curves.Point, deltayz, x, z curves.Scalar, m int) bool {
// g^tHat * h^tau_x
gtHat := g.Mul(proof.tHat)
htaux := h.Mul(proof.taux)
lhs := gtHat.Add(htaux)
// V^z^2 * g^delta(y,z) * Tau_1^x * Tau_2^x^2
// g^delta(y,z) * V^(z^2*z^m) * Tau_1^x * Tau_2^x^2
zm := getknVector(z, m, verifier.curve)
zsquarezm := multiplyScalarToScalarVector(z.Square(), zm)
capVzsquaretwom := verifier.curve.Point.SumOfProducts(capV, zsquarezm)
gdeltayz := g.Mul(deltayz)
capTau1x := proof.capT1.Mul(x)
capTau2xsquare := proof.capT2.Mul(x.Square())
rhs := capVzsquaretwom.Add(gdeltayz).Add(capTau1x).Add(capTau2xsquare)
// Compare lhs =? rhs
return lhs.Equal(rhs)
} | pkg/bulletproof/range_batch_verifier.go | 0.801781 | 0.470068 | range_batch_verifier.go | starcoder |
package main
// ---------------------------------------------------------------------------------
// Representation of elements stored in the ledger
// ---------------------------------------------------------------------------------
// AssetType is use to check the type of an asset
type AssetType uint8
// Const representing the types of asset findable in the ledger
const (
ObjectiveType AssetType = iota
DataManagerType
DataSampleType
AlgoType
TraintupleType
TesttupleType
)
// Objective is the representation of one of the element type stored in the ledger
type Objective struct {
Name string `json:"name"`
AssetType AssetType `json:"assetType"`
DescriptionStorageAddress string `json:"descriptionStorageAddress"`
Metrics *HashDressName `json:"metrics"`
Owner string `json:"owner"`
TestDataset *Dataset `json:"testDataset"`
Permissions Permissions `json:"permissions"`
}
// DataManager is the representation of one of the elements type stored in the ledger
type DataManager struct {
Name string `json:"name"`
AssetType AssetType `json:"assetType"`
OpenerStorageAddress string `json:"openerStorageAddress"`
Type string `json:"type"`
Description *HashDress `json:"description"`
Owner string `json:"owner"`
ObjectiveKey string `json:"objectiveKey"`
Permissions Permissions `json:"permissions"`
}
// DataSample is the representation of one of the element type stored in the ledger
type DataSample struct {
AssetType AssetType `json:"assetType"`
DataManagerKeys []string `json:"dataManagerKeys"`
Owner string `json:"owner"`
TestOnly bool `json:"testOnly"`
}
// Algo is the representation of one of the element type stored in the ledger
type Algo struct {
Name string `json:"name"`
AssetType AssetType `json:"assetType"`
StorageAddress string `json:"storageAddress"`
Description *HashDress `json:"description"`
Owner string `json:"owner"`
Permissions Permissions `json:"permissions"`
}
// Traintuple is the representation of one the element type stored in the ledger. It describes a training task occuring on the platform
type Traintuple struct {
AssetType AssetType `json:"assetType"`
AlgoKey string `json:"algoKey"`
Creator string `json:"creator"`
Dataset *Dataset `json:"dataset"`
ComputePlanID string `json:"computePlanID"`
InModelKeys []string `json:"inModels"`
Log string `json:"log"`
ObjectiveKey string `json:"objectiveKey"`
OutModel *HashDress `json:"outModel"`
Perf float32 `json:"perf"`
Permissions Permissions `json:"permissions"`
Rank int `json:"rank"`
Status string `json:"status"`
Tag string `json:"tag"`
}
// Testtuple is the representation of one the element type stored in the ledger. It describes a training task occuring on the platform
type Testtuple struct {
AssetType AssetType `json:"assetType"`
AlgoKey string `json:"algo"`
Certified bool `json:"certified"`
Creator string `json:"creator"`
Dataset *TtDataset `json:"dataset"`
Log string `json:"log"`
Model *Model `json:"model"`
ObjectiveKey string `json:"objective"`
Permissions Permissions `json:"permissions"`
Status string `json:"status"`
Tag string `json:"tag"`
}
// ---------------------------------------------------------------------------------
// Struct used in the representation of elements stored in the ledger
// ---------------------------------------------------------------------------------
// HashDress stores a hash and a Storage Address
type HashDress struct {
Hash string `json:"hash"`
StorageAddress string `json:"storageAddress"`
}
// HashDressName stores a hash, storage address and a name
type HashDressName struct {
Name string `json:"name"`
Hash string `json:"hash"`
StorageAddress string `json:"storageAddress"`
}
// Model stores the traintupleKey leading to the model, its hash and storage addressl
type Model struct {
TraintupleKey string `json:"traintupleKey"`
Hash string `json:"hash"`
StorageAddress string `json:"storageAddress"`
}
// Dataset stores info about a dataManagerKey and a list of associated dataSample
type Dataset struct {
DataManagerKey string `json:"dataManagerKey"`
DataSampleKeys []string `json:"dataSampleKeys"`
Worker string `json:"worker"`
}
// ---------------------------------------------------------------------------------
// Struct used in the representation of outputs when querying some elements
// ---------------------------------------------------------------------------------
// TtDataset stores info about dataset in a Traintyple (train or test data) and in a PredTuple (later)
type TtDataset struct {
Worker string `json:"worker"`
DataSampleKeys []string `json:"keys"`
OpenerHash string `json:"openerHash"`
Perf float32 `json:"perf"`
}
// TtObjective stores info about a objective in a Traintuple
type TtObjective struct {
Key string `json:"hash"`
Metrics *HashDress `json:"metrics"`
}
// Node stores informations about node registered into the network,
// would be used to list authorized nodes for permissions
type Node struct {
ID string `json:"id"`
} | chaincode/ledger.go | 0.655777 | 0.425546 | ledger.go | starcoder |
package sequtil
import (
"github.com/dmiller/go-seq/iseq"
"reflect"
)
// DefaultCompareFn is a default function to use for comparisons.
// Handles identity, nils, strings, numerics, and things implementing the iseq.Comparer interface.
func DefaultCompareFn(k1 interface{}, k2 interface{}) int {
if k1 == k2 {
return 0
}
if k1 != nil {
if k2 == nil {
return 1
}
if c, ok := k1.(iseq.Comparer); ok {
return c.Compare(k2)
}
if c, ok := k2.(iseq.Comparer); ok {
return -c.Compare(k1)
}
if s, ok := k1.(string); ok {
return CompareString(s, k2)
}
if IsComparableNumeric(k1) {
return CompareComparableNumeric(k1, k2)
}
panic("Can't compare")
}
return -1
}
// IsComparableNumeric checks a value to see if it a numeric value amenable to comparison
func IsComparableNumeric(v interface{}) bool {
switch v.(type) {
case bool, int, int8, int32, int64,
uint, uint8, uint32, uint64,
float32, float64:
return true
}
return false
}
// CompareString compares two strings
func CompareString(s string, x interface{}) int {
if s2, ok := x.(string); ok {
if s < s2 {
return -1
}
if s == s2 {
return 0
}
return 1
}
panic("can't compare string to non-string")
}
// CompareComparableNumeric compares two values, assumed to comparable numerics
func CompareComparableNumeric(x1 interface{}, x2 interface{}) int {
// x1 should be numeric
switch x1 := x1.(type) {
case bool:
b1 := bool(x1)
if b1 {
return compareNumericInt(int64(1), x2)
}
return compareNumericInt(int64(0), x2)
case int, int8, int32, int64:
n1 := reflect.ValueOf(x1).Int()
return compareNumericInt(n1, x2)
case uint, uint8, uint32, uint64:
n1 := reflect.ValueOf(x1).Uint()
return compareNumericUint(n1, x2)
case float32, float64:
n1 := reflect.ValueOf(x1).Float()
return compareNumericFloat(n1, x2)
}
panic("Expect first arg to be numeric")
}
func compareNumericInt(n1 int64, x2 interface{}) int {
switch x2 := x2.(type) {
case bool:
b2 := bool(x2)
var n2 int64
if b2 {
n2 = 1
}
if n1 < n2 {
return -1
}
if n1 > n2 {
return 1
}
return 0
case int, int8, int32, int64:
n2 := reflect.ValueOf(x2).Int()
if n1 < n2 {
return -1
}
if n1 > n2 {
return 1
}
return 0
case uint, uint8, uint32, uint64:
n2 := reflect.ValueOf(x2).Uint()
if n1 < 0 {
return -1
}
un1 := uint64(n2)
if un1 < n2 {
return -1
}
if un1 > n2 {
return 1
}
return 0
case float32, float64:
n2 := reflect.ValueOf(x2).Float()
fn1 := float64(n1)
if fn1 < n2 {
return -1
}
if fn1 > n2 {
return 1
}
return 0
}
return -1 // what else, other than panic?
}
func compareNumericUint(n1 uint64, x2 interface{}) int {
switch x2 := x2.(type) {
case bool:
b2 := bool(x2)
var n2 uint64
if b2 {
n2 = 1
}
if n1 < n2 {
return -1
}
if n1 > n2 {
return 1
}
return 0
case int, int8, int32, int64:
n2 := reflect.ValueOf(x2).Int()
if n2 < 0 {
return 1
}
un2 := uint64(n2)
if n1 < un2 {
return -1
}
if n1 > un2 {
return 1
}
return 0
case uint, uint8, uint32, uint64:
n2 := reflect.ValueOf(x2).Uint()
if n1 < n2 {
return -1
}
if n1 > n2 {
return 1
}
return 0
case float32, float64:
n2 := reflect.ValueOf(x2).Float()
fn1 := float64(n1)
if fn1 < n2 {
return -1
}
if fn1 > n2 {
return 1
}
return 0
}
return -1 // what else, other than panic?
}
func compareNumericFloat(n1 float64, x2 interface{}) int {
var n2 float64
switch x2 := x2.(type) {
case bool, int, int8, int32, int64:
n2 = float64(reflect.ValueOf(x2).Int())
case uint, uint8, uint32, uint64:
n2 = float64(reflect.ValueOf(x2).Uint())
case float32, float64:
n2 = reflect.ValueOf(x2).Float()
default:
return -1 // what else, other than panic?
}
if n1 < n2 {
return -1
}
if n1 > n2 {
return 1
}
return 0
} | sequtil/compare.go | 0.664976 | 0.466299 | compare.go | starcoder |
package collect
import (
"github.com/sxyazi/go-collection/types"
"golang.org/x/exp/constraints"
"math"
"math/rand"
"reflect"
"sort"
"time"
)
/**
* Any slice
*/
func Each[T ~[]E, E any](items T, callback func(value E, index int)) {
for index, value := range items {
callback(value, index)
}
}
func Same[T ~[]E, E any](items, target T) bool {
if len(items) != len(target) {
return false
} else if len(items) == 0 {
return true
}
kind := reflect.TypeOf(items).Elem().Kind()
if kind == reflect.Slice {
return reflect.DeepEqual(items, target)
}
for index, item := range items {
if Compare(item, "!=", target[index]) {
return false
}
}
return true
}
func First[T ~[]E, E any](items T) (E, bool) {
var value E
if len(items) == 0 {
return value, false
}
value = items[0]
return value, true
}
func Last[T ~[]E, E any](items T) (E, bool) {
var value E
if len(items) == 0 {
return value, false
}
value = items[len(items)-1]
return value, true
}
func Index[T ~[]E, E any](items T, target E) int {
if len(items) == 0 {
return -1
}
for index, item := range items {
if Compare(item, "=", target) {
return index
}
}
return -1
}
func Contains[T ~[]E, E any](items T, item E) bool {
return Index(items, item) != -1
}
func Diff[T ~[]E, E any](items, target T) T {
var different T
for _, item := range items {
if Index(target, item) == -1 {
different = append(different, item)
}
}
return different
}
func Filter[T ~[]E, E any](items T, callback func(value E, index int) bool) T {
var filtered T
for index, item := range items {
if callback(item, index) {
filtered = append(filtered, item)
}
}
return filtered
}
func Map[T ~[]E, E any](items T, callback func(value E, index int) E) T {
mapped := make(T, len(items), cap(items))
for index, item := range items {
mapped[index] = callback(item, index)
}
return mapped
}
func Unique[T ~[]E, E any](items T) T {
if len(items) == 0 {
return items
}
c := NewComparisonSet(true)
return Filter(items, func(value E, _ int) bool {
if !c.Has(value) {
c.Add(value)
return true
}
return false
})
}
func Duplicates[T ~[]E, E any](items T) map[int]E {
m := make(map[int]E)
if len(items) == 0 {
return m
}
c := NewComparisonSet(true)
for index, item := range items {
if c.Has(item) {
m[index] = item
} else {
c.Add(item)
}
}
return m
}
func Merge[T ~[]E, E any](items T, targets ...T) T {
for _, target := range targets {
items = append(items, target...)
}
return items
}
func Random[T ~[]E, E any](items T) (E, bool) {
if len(items) == 0 {
var zero E
return zero, false
}
rand.Seed(time.Now().UnixNano())
return items[rand.Intn(len(items))], true
}
func Reverse[T ~[]E, E any](items T) T {
for i, j := 0, len(items)-1; i < j; i, j = i+1, j-1 {
items[i], items[j] = items[j], items[i]
}
return items
}
func Shuffle[T ~[]E, E any](items T) T {
rand.Seed(time.Now().UnixNano())
rand.Shuffle(len(items), func(i, j int) { items[i], items[j] = items[j], items[i] })
return items
}
func Slice[T ~[]E, E any](items T, offset int, args ...int) T {
start, end := OffsetToIndex(len(items), offset, args...)
return items[start:end]
}
func Split[T ~[]E, E any](items T, amount int) []T {
split := make([]T, int(math.Ceil(float64(len(items))/float64(amount))))
for i, item := range items {
split[i/amount] = append(split[i/amount], item)
}
return split
}
func Splice[T ~[]E, E any](items *T, offset int, args ...any) T {
length := len(*items)
if len(args) >= 1 {
length = args[0].(int)
}
start, end := OffsetToIndex(len(*items), offset, length)
slice := make(T, end-start)
copy(slice, (*items)[start:end])
if len(args) < 2 {
*items = append((*items)[:start], (*items)[end:]...)
return slice
}
var reps T
for _, rep := range args[1:] {
switch v := rep.(type) {
case E:
reps = append(reps, v)
case T:
reps = append(reps, v...)
default:
panic("replacement type error")
}
}
reps = append(reps, (*items)[end:]...)
*items = append((*items)[:start], reps...)
return slice
}
func Reduce[T ~[]E, E any](items T, initial E, callback func(carry E, value E, key int) E) E {
for key, value := range items {
initial = callback(initial, value, key)
}
return initial
}
func Pop[T ~[]E, E any](items *T) (E, bool) {
l := len(*items)
if l == 0 {
var zero E
return zero, false
}
value := (*items)[l-1]
*items = append((*items)[:l-1], (*items)[l:]...)
return value, true
}
func Push[T ~[]E, E any](items *T, item E) T {
*items = append(*items, item)
return *items
}
func Where[T ~[]E, E any](items T, args ...any) T {
if len(args) < 1 {
return items
}
// Where(target any)
if len(args) == 1 {
return Filter(items, func(value E, _ int) bool {
return Compare(value, "=", args[0])
})
}
var operator string
var key any = nil
var target any
// Where(key any, operator string, target any)
if len(args) >= 3 {
key = args[0]
operator = args[1].(string)
target = args[2]
} else {
// Where(operator string, target any) | Where(key any, target any)
switch v := args[0].(type) {
case string:
if Contains([]string{"=", "!=", ">", "<", ">=", "<="}, v) {
operator = v
target = args[1]
} else {
key = v
operator = "="
target = args[1]
}
default:
key = args[0]
operator = "="
target = args[1]
}
}
return Filter[T, E](items, func(value E, _ int) bool {
if key == nil {
return Compare(value, operator, target)
} else if c, err := AnyGet[any](value, key); err == nil {
return Compare(c, operator, target)
}
return false
})
}
func whereIn[T ~[]E, E any](operator string, items T, args ...any) T {
if len(items) == 0 || len(args) == 0 {
return items
}
var key any = nil
var targets reflect.Value
if len(args) == 1 {
// WhereIn(targets []any)
targets = reflect.ValueOf(args[0])
} else {
// WhereIn(key any, targets []any)
key = args[0]
targets = reflect.ValueOf(args[1])
}
if (targets.Kind() != reflect.Slice && targets.Kind() != reflect.Array) || targets.Len() == 0 {
if operator == "=" {
return make(T, 0)
} else {
return items
}
}
c := NewComparisonSet(true)
for i := 0; i < targets.Len(); i++ {
c.Add(targets.Index(i).Interface())
}
return Filter(items, func(value E, _ int) bool {
if key == nil {
if c.Has(value) {
return operator == "="
}
} else if v, err := AnyGet[any](value, key); err == nil {
if c.Has(v) {
return operator == "="
}
}
return operator != "="
})
}
func WhereIn[T ~[]E, E any](items T, args ...any) T {
return whereIn[T, E]("=", items, args...)
}
func WhereNotIn[T ~[]E, E any](items T, args ...any) T {
return whereIn[T, E]("!=", items, args...)
}
/**
* Number slice
*/
func Sum[T ~[]E, E constraints.Integer | constraints.Float](items T) (total E) {
for _, value := range items {
total += value
}
return
}
func Min[T ~[]E, E constraints.Integer | constraints.Float](items T) E {
if len(items) == 0 {
return 0
}
min := items[0]
for _, value := range items {
if min > value {
min = value
}
}
return min
}
func Max[T ~[]E, E constraints.Integer | constraints.Float](items T) E {
if len(items) == 0 {
return 0
}
max := items[0]
for _, value := range items {
if max < value {
max = value
}
}
return max
}
func Sort[T ~[]E, E constraints.Ordered](items T) T {
sort.Sort(&types.SortableSlice[T, E]{items, false})
return items
}
func SortDesc[T ~[]E, E constraints.Ordered](items T) T {
sort.Sort(&types.SortableSlice[T, E]{items, true})
return items
}
func Avg[T ~[]E, E constraints.Integer | constraints.Float](items T) float64 {
if len(items) == 0 {
return 0
}
return float64(Sum[T, E](items)) / float64(len(items))
}
func Median[T ~[]E, E constraints.Integer | constraints.Float](items T) float64 {
if len(items) == 0 {
return 0
}
replica := make(T, len(items))
copy(replica, items)
Sort[T, E](replica)
half := len(replica) / 2
if len(replica)%2 != 0 {
return float64(replica[half])
}
return float64(replica[half-1]+replica[half]) / 2
}
/**
* Map
*/
func Only[T ~map[K]V, K comparable, V any](items T, keys ...K) T {
m := make(T)
for _, key := range keys {
m[key] = items[key]
}
return m
}
func Except[T ~map[K]V, K comparable, V any](items T, keys ...K) T {
keysMap := map[K]struct{}{}
for _, key := range keys {
keysMap[key] = struct{}{}
}
m := make(T)
for key, value := range items {
if _, ok := keysMap[key]; !ok {
m[key] = value
}
}
return m
}
func Keys[T ~map[K]V, K comparable, V any](items T) (keys []K) {
for key := range items {
keys = append(keys, key)
}
return
}
func DiffKeys[T ~map[K]V, K comparable, V any](items T, target T) T {
m := make(T)
for key := range items {
if _, ok := target[key]; !ok {
m[key] = items[key]
}
}
return m
}
func Has[T ~map[K]V, K comparable, V any](items T, key K) bool {
if _, ok := items[key]; ok {
return true
} else {
return false
}
}
func Get[T ~map[K]V, K comparable, V any](items T, key K) (value V, _ bool) {
if !Has[T, K, V](items, key) {
return
}
return items[key], true
}
func Put[T ~map[K]V, K comparable, V any](items T, key K, value V) T {
items[key] = value
return items
}
func Pull[T ~map[K]V, K comparable, V any](items T, key K) (value V, _ bool) {
if v, ok := items[key]; ok {
delete(items, key)
return v, true
}
return
}
func MapSame[T ~map[K]V, K comparable, V any](items, target T) bool {
if len(items) != len(target) {
return false
} else if len(items) == 0 {
return true
}
kind := reflect.TypeOf(items).Elem().Kind()
if kind == reflect.Slice {
return reflect.DeepEqual(items, target)
}
for index, item := range items {
if tv, ok := target[index]; !ok {
return false
} else if Compare(item, "!=", tv) {
return false
}
}
return true
}
func MapMerge[T ~map[K]V, K comparable, V any](items T, targets ...T) T {
for _, target := range targets {
for key, value := range target {
items[key] = value
}
}
return items
}
func Union[T ~map[K]V, K comparable, V any](items T, target T) T {
for key, value := range target {
if _, ok := items[key]; !ok {
items[key] = value
}
}
return items
}
/**
* Standalone
*/
func Len(v any) int {
if v == nil {
return -1
}
switch reflect.TypeOf(v).Kind() {
case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice, reflect.String:
return reflect.ValueOf(v).Len()
default:
return -1
}
}
func Empty(v any) bool {
return Len(v) == 0
}
func Count[T ~[]E, E comparable](items T) map[E]int {
times := make(map[E]int)
for _, item := range items {
times[item]++
}
return times
}
func Times[T []E, E any](number int, callback func(number int) E) *SliceCollection[T, E] {
items := make(T, number)
for i := 0; i < number; i++ {
items[i] = callback(i + 1)
}
return UseSlice[T, E](items)
}
func sortBy[T ~[]E, E any, C func(item E, index int) R, R constraints.Ordered](items T, desc bool, callback C) *SliceCollection[T, E] {
structs := make([]*types.SortableStruct[R], len(items))
for index, item := range items {
structs[index] = &types.SortableStruct[R]{callback(item, index), index}
}
replica := make(T, len(items))
copy(replica, items)
sort.Sort(&types.SortableStructs[[]R, R]{structs, desc})
for index, s := range structs {
items[index] = replica[s.Attached.(int)]
}
return UseSlice[T, E](items)
}
func SortBy[T ~[]E, E any, C func(item E, index int) R, R constraints.Ordered](items T, callback C) *SliceCollection[T, E] {
return sortBy[T, E, C, R](items, false, callback)
}
func SortByDesc[T ~[]E, E any, C func(item E, index int) R, R constraints.Ordered](items T, callback C) *SliceCollection[T, E] {
return sortBy[T, E, C, R](items, true, callback)
} | functional.go | 0.693888 | 0.430387 | functional.go | starcoder |
package statistics
import (
"sort"
"github.com/ShoshinNikita/budget-manager/internal/db"
"github.com/ShoshinNikita/budget-manager/internal/pkg/money"
)
type SpentBySpendTypeDataset []SpentBySpendTypeData
type SpentBySpendTypeData struct {
SpendTypeName string `json:"spend_type_name"`
Spent money.Money `json:"spent"`
}
func CalculateSpentBySpendType(spendTypes []db.SpendType, spends []db.Spend) []SpentBySpendTypeDataset {
types, depth := prepareSpendTypesForDatasets(spendTypes, spends)
return createSpentBySpendTypeDatasets(types, depth)
}
type spendType struct {
db.SpendType
// Spent is an amount of money spent by this type or its children
Spent money.Money
// childrenIDs is a list with ids of child Spend Types sorted in descending order by field 'Spent'
childrenIDs []uint
}
func prepareSpendTypesForDatasets(spendTypes []db.SpendType,
spends []db.Spend) (types map[uint]spendType, maxChildDepth int) {
// Init Spend Types. Use Spend Type with id 0 for Spends without a type
types = make(map[uint]spendType, len(spendTypes)+1)
types[0] = spendType{
SpendType: db.SpendType{ID: 0, Name: "No Type"},
}
for _, t := range spendTypes {
types[t.ID] = spendType{SpendType: t}
}
// Sum spend costs by Spend Type. If Spend Type has a parent, it also will be updated
for _, spend := range spends {
var typeID uint
if spend.Type != nil {
typeID = spend.Type.ID
}
t := types[typeID]
t.Spent = t.Spent.Add(spend.Cost)
types[typeID] = t
for parentID := t.ParentID; parentID != 0; parentID = types[parentID].ParentID {
parentType := types[parentID]
parentType.Spent = parentType.Spent.Add(spend.Cost)
types[parentID] = parentType
}
}
// Filter types without Spends
for id := range types {
if types[id].Spent == 0 {
delete(types, id)
}
}
// Populate Spend Types with children ids and calculate max child depth
for id := range types {
var (
depth = 1
parentID = types[id].ParentID
childID = id
)
for parentID != 0 {
depth++
parentType := types[parentID]
var found bool
for _, id := range parentType.childrenIDs {
if childID == id {
found = true
break
}
}
if !found {
parentType.childrenIDs = append(parentType.childrenIDs, childID)
}
types[parentID] = parentType
parentID = parentType.ParentID
childID = parentType.ID
}
if maxChildDepth < depth {
maxChildDepth = depth
}
}
for id := range types {
t := types[id]
sort.Slice(t.childrenIDs, func(i, j int) bool {
return types[t.childrenIDs[i]].Spent > types[t.childrenIDs[j]].Spent
})
}
return types, maxChildDepth
}
func createSpentBySpendTypeDatasets(types map[uint]spendType, depth int) []SpentBySpendTypeDataset {
// Sort types in descending order to start dataset with the greatest values
sortedTypes := make([]spendType, 0, len(types))
for _, t := range types {
sortedTypes = append(sortedTypes, t)
}
sort.Slice(sortedTypes, func(i, j int) bool {
return sortedTypes[i].Spent > sortedTypes[j].Spent
})
// TODO: add type 'Other' to avoid small elements in datasets?
datasets := make([]SpentBySpendTypeDataset, depth)
for _, t := range sortedTypes {
if t.ParentID != 0 {
// Child Spend Types will be filled during processing of their parents
continue
}
addSpendTypeToDatasets(types, datasets, t.ID, 0)
}
return datasets
}
func addSpendTypeToDatasets(spendTypes map[uint]spendType, datasets []SpentBySpendTypeDataset,
typeID uint, depth int) {
spendType := spendTypes[typeID]
datasets[depth] = append(datasets[depth], SpentBySpendTypeData{
SpendTypeName: spendType.Name,
Spent: spendType.Spent,
})
// Fill next dataset level with child Spend Types (they are sorted in descending order)
left := spendType.Spent
for _, childID := range spendType.childrenIDs {
child := spendTypes[childID]
left = left.Sub(child.Spent)
addSpendTypeToDatasets(spendTypes, datasets, childID, depth+1)
}
if left != 0 {
// Fill all next dataset levels with left amount
for i := depth + 1; i < len(datasets); i++ {
datasets[i] = append(datasets[i], SpentBySpendTypeData{SpendTypeName: "", Spent: left})
}
}
} | internal/web/pages/statistics/spent_by_spend_type.go | 0.596551 | 0.458955 | spent_by_spend_type.go | starcoder |
package hipathsys
import (
"fmt"
"github.com/shopspring/decimal"
"math"
"math/big"
"strconv"
)
var IntegerTypeSpec = newAnyTypeSpec("Integer")
type integerType struct {
baseAnyType
value int32
decimalValue DecimalAccessor
}
type IntegerAccessor interface {
NumberAccessor
Primitive() int32
}
func NewInteger(value int32) IntegerAccessor {
return NewIntegerWithSource(value, nil)
}
func NewIntegerWithSource(value int32, source interface{}) IntegerAccessor {
return newInteger(value, source)
}
func ParseInteger(value string) (IntegerAccessor, error) {
if i, err := strconv.Atoi(value); err != nil {
return nil, fmt.Errorf("not an integer: %s", value)
} else {
return NewInteger(int32(i)), nil
}
}
func newInteger(value int32, source interface{}) IntegerAccessor {
return &integerType{
baseAnyType: baseAnyType{
source: source,
},
value: value,
}
}
func (t *integerType) DataType() DataTypes {
return IntegerDataType
}
func (t *integerType) Int() int32 {
return t.value
}
func (t *integerType) Int64() int64 {
return int64(t.value)
}
func (t *integerType) Float32() float32 {
return float32(t.value)
}
func (t *integerType) Float64() float64 {
return float64(t.value)
}
func (t *integerType) BigFloat() *big.Float {
return t.Decimal().BigFloat()
}
func (t *integerType) Decimal() decimal.Decimal {
return t.Value().Decimal()
}
func (t *integerType) Primitive() int32 {
return t.value
}
func (t *integerType) One() bool {
return t.value == 1
}
func (t *integerType) Positive() bool {
return t.value > 0
}
func (t *integerType) HasFraction() bool {
return false
}
func (t *integerType) TypeSpec() TypeSpecAccessor {
return IntegerTypeSpec
}
func (t *integerType) Value() DecimalAccessor {
if t.decimalValue == nil {
t.decimalValue = NewDecimalInt(t.value)
}
return t.decimalValue
}
func (t *integerType) WithValue(node NumberAccessor) DecimalValueAccessor {
if node == nil || node.DataType() == IntegerDataType {
return node
}
return NewInteger(node.Int())
}
func (t *integerType) ArithmeticOpSupported(ArithmeticOps) bool {
return true
}
func (t *integerType) Negate() AnyAccessor {
return newInteger(-t.value, nil)
}
func (t *integerType) Equal(node interface{}) bool {
if o, ok := node.(IntegerAccessor); ok {
return t.Int() == o.Int()
}
return decimalValueEqual(t, node)
}
func (t *integerType) Equivalent(node interface{}) bool {
if o, ok := node.(IntegerAccessor); ok {
return t.Int() == o.Int()
}
return decimalValueEquivalent(t, node)
}
func (t *integerType) Compare(comparator Comparator) (int, OperatorStatus) {
if TypeEqual(t, comparator) {
l, r := t.value, comparator.(IntegerAccessor).Int()
if l == r {
return 0, Evaluated
}
if l < r {
return -1, Evaluated
}
return 1, Evaluated
}
return decimalValueCompare(t, comparator)
}
func (t *integerType) String() string {
return strconv.FormatInt(int64(t.value), 10)
}
func (t *integerType) Ceiling() NumberAccessor {
return t
}
func (t *integerType) Exp() NumberAccessor {
return NewDecimalFloat64(math.Exp(t.Float64()))
}
func (t *integerType) Floor() NumberAccessor {
return t
}
func (t *integerType) Ln() (NumberAccessor, error) {
if t.value <= 0 {
return nil, fmt.Errorf("logarithmus cannot be applied to non-positive values %d", t.value)
}
return NewDecimalFloat64(math.Log(t.Float64())), nil
}
func (t *integerType) Log(base NumberAccessor) (NumberAccessor, error) {
if t.value <= 0 {
return nil, fmt.Errorf("logarithmus cannot be applied to non-positive values %d", t.value)
}
if !base.Positive() {
return nil, fmt.Errorf("logarithmus cannot be applied to non-positive base %f", base.Float64())
}
return NewDecimalFloat64(math.Log(t.Float64()) / math.Log(base.Float64())), nil
}
func (t *integerType) Power(exponent NumberAccessor) (NumberAccessor, bool) {
if exponent.One() {
return t, true
}
if exponent.DataType() == IntegerDataType {
return NewInteger(int32(math.Pow(t.Float64(), exponent.Float64()))), true
}
return NewDecimalInt(t.Int()).Power(exponent)
}
func (t *integerType) Round(precision int32) (NumberAccessor, error) {
if precision < 0 {
return nil, fmt.Errorf("precision must not be negative %d", precision)
}
return t, nil
}
func (t *integerType) Sqrt() (NumberAccessor, bool) {
r := math.Sqrt(t.Float64())
if math.IsNaN(r) {
return nil, false
}
return NewDecimalFloat64(r), true
}
func (t *integerType) Truncate(int32) NumberAccessor {
return t
}
func (t *integerType) Calc(operand DecimalValueAccessor, op ArithmeticOps) (DecimalValueAccessor, error) {
if operand == nil {
return nil, nil
}
if !t.ArithmeticOpSupported(op) || !operand.ArithmeticOpSupported(op) {
return nil, fmt.Errorf("arithmetic operator not supported: %c", op)
}
if ov, ok := operand.(IntegerAccessor); ok {
pov := ov.Primitive()
switch op {
case AdditionOp:
return NewInteger(t.Int() + pov), nil
case SubtractionOp:
return NewInteger(t.Int() - pov), nil
case MultiplicationOp:
return NewInteger(t.Int() * pov), nil
case DivisionOp:
if pov == 0 {
return nil, nil
}
return NewDecimalFloat64(float64(t.Int()) / float64(pov)), nil
case DivOp:
if pov == 0 {
return nil, nil
}
return NewInteger(t.Int() / pov), nil
case ModOp:
if pov == 0 {
return nil, nil
}
return NewInteger(t.Int() % pov), nil
default:
panic(fmt.Sprintf("Unhandled operator: %d", op))
}
}
return operand.WithValue(decimalCalc(t, operand.Value(), op)), nil
}
func (t *integerType) Abs() DecimalValueAccessor {
return NewInteger(int32(math.Abs(float64(t.Int()))))
}
func IntegerValue(node interface{}) interface{} {
if v, ok := node.(IntegerAccessor); !ok {
return nil
} else {
return v.Int()
}
} | hipathsys/integer_type.go | 0.681727 | 0.471406 | integer_type.go | starcoder |
package generation
import (
mat "github.com/nlpodyssey/spago/pkg/mat32"
)
// Hypotheses provides hypotheses data for a generation Scorer.
type Hypotheses struct {
config GeneratorConfig
beams []Hypothesis
worstScore mat.Float
}
// Hypothesis represents a single generation hypothesis, which is a sequence of
// Token IDs paired with a score.
type Hypothesis struct {
TokenIDs []int
Score mat.Float
}
const defaultHypothesisWorstScore mat.Float = 1e9
// NewHypotheses returns a new Hypotheses.
func NewHypotheses(config GeneratorConfig) *Hypotheses {
return &Hypotheses{
config: config,
beams: make([]Hypothesis, 0),
worstScore: 1e9,
}
}
// Len returns the number of hypotheses in the list.
func (h *Hypotheses) Len() int {
return len(h.beams)
}
// Add adds a new hypothesis to the list.
func (h *Hypotheses) Add(hypVector []int, sumLogProbs mat.Float) {
score := sumLogProbs / mat.Pow(mat.Float(len(hypVector)), h.config.LengthPenalty)
if h.Len() == h.config.NumBeams && score <= h.worstScore {
return
}
h.beams = append(h.beams, Hypothesis{TokenIDs: hypVector, Score: score})
if h.Len() <= h.config.NumBeams {
if score < h.worstScore {
h.worstScore = score
}
return
}
_, worstIndex, _ := h.findWorst()
h.beams = append(h.beams[:worstIndex], h.beams[worstIndex+1:]...)
h.worstScore, _, _ = h.findWorst()
}
func (h *Hypotheses) findWorst() (worstScore mat.Float, worstIndex int, ok bool) {
if h.Len() == 0 {
return defaultHypothesisWorstScore, -1, false
}
worstIndex = 0
worstScore = h.beams[0].Score
for i, hyp := range h.beams[1:] {
if hyp.Score < worstScore {
worstIndex = i
worstScore = hyp.Score
}
}
return worstScore, worstIndex, true
}
// IsDone reports whether there are enough hypotheses and none of the hypotheses
// being generated can become better than the worst one in the heap.
func (h *Hypotheses) IsDone(bestSumLogProbs mat.Float, curLen int) bool {
if h.Len() < h.config.NumBeams {
return false
}
if h.config.EarlyStopping {
return true
}
curScore := bestSumLogProbs / mat.Pow(mat.Float(curLen), h.config.LengthPenalty)
return h.worstScore >= curScore
}
// Beams returns the hypothesis beams.
func (h *Hypotheses) Beams() []Hypothesis {
return h.beams
} | pkg/nlp/transformers/generation/hypotheses.go | 0.744935 | 0.618608 | hypotheses.go | starcoder |
package ast
import (
"bytes"
"github.com/global-soft-ba/decisionTable/ast"
"reflect"
"strconv"
"time"
)
func checkDataTypePrecedence(typ1 ast.Node, typ2 ast.Node) reflect.Type {
if reflect.TypeOf(typ1) == reflect.TypeOf(typ2) {
return reflect.TypeOf(typ1)
}
switch typ1.(type) {
case Integer:
if reflect.TypeOf(typ2) == reflect.TypeOf(Float{}) {
return reflect.TypeOf(Float{})
}
case Float:
if reflect.TypeOf(typ2) == reflect.TypeOf(Integer{}) {
return reflect.TypeOf(Float{})
}
}
return nil
}
func checkDataTypePrecedences(types ...ast.Node) reflect.Type {
length := len(types)
switch length {
case 0:
return nil
case 1:
return reflect.TypeOf(types[0])
default:
init := types[0]
for i := 1; i < length; i++ {
result := checkDataTypePrecedence(init, types[i])
if result == nil {
return nil
} else if reflect.TypeOf(init) != result {
init = types[i]
}
}
return reflect.TypeOf(init)
}
}
// SFeelParser rules. - just for verifiction to see if the parser works correct
type Rule struct {
Type int
Literal string
}
type EmptyStatement struct {
ParserToken Token
}
func (l EmptyStatement) ParserLiteral() string {
return l.ParserToken.Literal
}
func (l EmptyStatement) String() string {
return ""
}
func (l EmptyStatement) GetOperandDataType() reflect.Type {
return reflect.TypeOf(l)
}
func (l EmptyStatement) GetChildren() []ast.Node {
return nil
}
type QualifiedName struct {
ParserRule Rule
Value []string
}
func (l QualifiedName) ParserLiteral() string { return l.ParserRule.Literal }
func (l QualifiedName) String() string {
var out bytes.Buffer
for i, val := range l.Value {
if i > 0 {
out.WriteString(SFeelSeparatorQualifiedName)
}
out.WriteString(val)
}
return out.String()
}
func (l QualifiedName) GetQualifiedName() string {
return l.String()
}
func (l QualifiedName) GetOperandDataType() reflect.Type {
return reflect.TypeOf(l)
}
func (l QualifiedName) GetChildren() []ast.Node {
return nil
}
type Integer struct {
ParserRule Rule
SignRule Rule
Value int64
}
func (l Integer) ParserLiteral() string {
return l.SignRule.Literal + l.ParserRule.Literal
}
func (l Integer) String() string {
var out bytes.Buffer
if l.SignRule.Type != -1 {
out.WriteString(l.SignRule.Literal)
}
out.WriteString(strconv.FormatInt(l.Value, 10))
return out.String()
}
func (l Integer) GetOperandDataType() reflect.Type {
return reflect.TypeOf(l)
}
func (l Integer) GetChildren() []ast.Node {
return nil
}
type Float struct {
ParserRule Rule
SignRule Rule
Value float64
}
func (l Float) ParserLiteral() string {
return l.SignRule.Literal + l.ParserRule.Literal
}
func (l Float) String() string {
var out bytes.Buffer
if l.SignRule.Type != -1 {
out.WriteString(l.SignRule.Literal)
}
out.WriteString(strconv.FormatFloat(l.Value, 'E', -1, 64))
return out.String()
}
func (l Float) GetOperandDataType() reflect.Type {
return reflect.TypeOf(l)
}
func (l Float) GetChildren() []ast.Node {
return nil
}
type Boolean struct {
ParserRule Rule
Value bool
}
func (l Boolean) ParserLiteral() string { return l.ParserRule.Literal }
func (l Boolean) String() string { return strconv.FormatBool(l.Value) }
func (l Boolean) GetOperandDataType() reflect.Type {
return reflect.TypeOf(l)
}
func (l Boolean) GetChildren() []ast.Node {
return nil
}
type String struct {
ParserRule Rule
Value string
}
func (l String) ParserLiteral() string { return l.ParserRule.Literal }
func (l String) String() string { return l.Value }
func (l String) GetOperandDataType() reflect.Type {
return reflect.TypeOf(l)
}
func (l String) GetChildren() []ast.Node {
return nil
}
type DateTime struct {
ParserRule Rule
Value time.Time
}
func (l DateTime) ParserLiteral() string { return l.ParserRule.Literal }
func (l DateTime) String() string { return l.Value.String() }
func (l DateTime) GetOperandDataType() reflect.Type {
return reflect.TypeOf(l)
}
func (l DateTime) GetChildren() []ast.Node {
return nil
} | lang/sfeel/ast/DataTypes.go | 0.617282 | 0.414958 | DataTypes.go | starcoder |
package hplot
import (
"image/color"
"gonum.org/v1/plot"
"gonum.org/v1/plot/plotter"
"gonum.org/v1/plot/vg"
"gonum.org/v1/plot/vg/draw"
)
// VertLine draws a vertical line at X and colors the
// left and right portions of the plot with the provided
// colors.
type VertLine struct {
X float64
Line draw.LineStyle
Left color.Color
Right color.Color
}
// VLine creates a vertical line at x with the default line style.
func VLine(x float64, left, right color.Color) *VertLine {
return &VertLine{
X: x,
Line: plotter.DefaultLineStyle,
Left: left,
Right: right,
}
}
func (vline *VertLine) Plot(c draw.Canvas, plt *plot.Plot) {
var (
trX, _ = plt.Transforms(&c)
x = trX(vline.X)
xmin = c.Min.X
xmax = c.Max.X
ymin = c.Min.Y
ymax = c.Max.Y
)
if vline.Left != nil && x > xmin {
c.SetColor(vline.Left)
rect := vg.Rectangle{
Min: vg.Point{X: xmin, Y: ymin},
Max: vg.Point{X: x, Y: ymax},
}
c.Fill(rect.Path())
}
if vline.Right != nil && x < xmax {
c.SetColor(vline.Right)
rect := vg.Rectangle{
Min: vg.Point{X: x, Y: ymin},
Max: vg.Point{X: xmax, Y: ymax},
}
c.Fill(rect.Path())
}
if vline.Line.Width != 0 && xmin <= x && x <= xmax {
c.StrokeLine2(vline.Line, x, ymin, x, ymax)
}
}
// Thumbnail returns the thumbnail for the VertLine,
// implementing the plot.Thumbnailer interface.
func (vline *VertLine) Thumbnail(c *draw.Canvas) {
if vline.Left != nil {
minX := c.Min.X
maxX := c.Center().X
minY := c.Min.Y
maxY := c.Max.Y
points := []vg.Point{
{X: minX, Y: minY},
{X: minX, Y: maxY},
{X: maxX, Y: maxY},
{X: maxX, Y: minY},
}
poly := c.ClipPolygonY(points)
c.FillPolygon(vline.Left, poly)
}
if vline.Right != nil {
minX := c.Center().X
maxX := c.Max.X
minY := c.Min.Y
maxY := c.Max.Y
points := []vg.Point{
{X: minX, Y: minY},
{X: minX, Y: maxY},
{X: maxX, Y: maxY},
{X: maxX, Y: minY},
}
poly := c.ClipPolygonY(points)
c.FillPolygon(vline.Right, poly)
}
if vline.Line.Width != 0 {
x := c.Center().X
c.StrokeLine2(vline.Line, x, c.Min.Y, x, c.Max.Y)
}
}
// HorizLine draws a horizontal line at Y and colors the
// top and bottom portions of the plot with the provided
// colors.
type HorizLine struct {
Y float64
Line draw.LineStyle
Top color.Color
Bottom color.Color
}
// HLine creates a horizontal line at y with the default line style.
func HLine(y float64, top, bottom color.Color) *HorizLine {
return &HorizLine{
Y: y,
Line: plotter.DefaultLineStyle,
Top: top,
Bottom: bottom,
}
}
func (hline *HorizLine) Plot(c draw.Canvas, plt *plot.Plot) {
var (
_, trY = plt.Transforms(&c)
y = trY(hline.Y)
xmin = c.Min.X
xmax = c.Max.X
ymin = c.Min.Y
ymax = c.Max.Y
)
if hline.Top != nil && y < ymax {
c.SetColor(hline.Top)
rect := vg.Rectangle{
Min: vg.Point{X: xmin, Y: y},
Max: vg.Point{X: xmax, Y: ymax},
}
c.Fill(rect.Path())
}
if hline.Bottom != nil && y > ymin {
c.SetColor(hline.Bottom)
rect := vg.Rectangle{
Min: vg.Point{X: xmin, Y: ymin},
Max: vg.Point{X: xmax, Y: y},
}
c.Fill(rect.Path())
}
if hline.Line.Width != 0 && ymin <= y && y <= ymax {
c.StrokeLine2(hline.Line, xmin, y, xmax, y)
}
}
// Thumbnail returns the thumbnail for the VertLine,
// implementing the plot.Thumbnailer interface.
func (hline *HorizLine) Thumbnail(c *draw.Canvas) {
if hline.Top != nil {
minX := c.Min.X
maxX := c.Max.X
minY := c.Center().Y
maxY := c.Max.Y
points := []vg.Point{
{X: minX, Y: minY},
{X: minX, Y: maxY},
{X: maxX, Y: maxY},
{X: maxX, Y: minY},
}
poly := c.ClipPolygonY(points)
c.FillPolygon(hline.Top, poly)
}
if hline.Bottom != nil {
minX := c.Min.X
maxX := c.Max.X
minY := c.Min.Y
maxY := c.Center().Y
points := []vg.Point{
{X: minX, Y: minY},
{X: minX, Y: maxY},
{X: maxX, Y: maxY},
{X: maxX, Y: minY},
}
poly := c.ClipPolygonY(points)
c.FillPolygon(hline.Bottom, poly)
}
if hline.Line.Width != 0 {
y := c.Center().Y
c.StrokeLine2(hline.Line, c.Min.X, y, c.Max.X, y)
}
}
var (
_ plot.Plotter = (*VertLine)(nil)
_ plot.Plotter = (*HorizLine)(nil)
_ plot.Thumbnailer = (*VertLine)(nil)
_ plot.Thumbnailer = (*HorizLine)(nil)
) | hplot/line.go | 0.82308 | 0.477737 | line.go | starcoder |
package cmd
import (
"fmt"
"regexp"
"strings"
"github.com/jaredbancroft/aoc2020/pkg/helpers"
"github.com/jaredbancroft/aoc2020/pkg/passport"
"github.com/spf13/cobra"
)
// day4Cmd represents the day4 command
var day4Cmd = &cobra.Command{
Use: "day4",
Short: "Advent of Code 2020 - Day 4: Passport Processing",
Long: `
Advent of Code 2020
--- Day 4: Passport Processing ---
You arrive at the airport only to realize that you grabbed your North Pole Credentials
instead of your passport. While these documents are extremely similar, North Pole
Credentials aren't issued by a country and therefore aren't actually valid documentation
for travel in most of the world.
It seems like you're not the only one having problems, though; a very long line has formed
for the automatic passport scanners, and the delay could upset your travel itinerary.
Due to some questionable network security, you realize you might be able to solve both
of these problems at the same time.
The automatic passport scanners are slow because they're having trouble detecting which
passports have all required fields. The expected fields are as follows:
byr (Birth Year)
iyr (Issue Year)
eyr (Expiration Year)
hgt (Height)
hcl (Hair Color)
ecl (Eye Color)
pid (Passport ID)
cid (Country ID)
Passport data is validated in batch files (your puzzle input). Each passport is represented
as a sequence of key:value pairs separated by spaces or newlines. Passports are separated by
blank lines.
Here is an example batch file containing four passports:
ecl:gry pid:860033327 eyr:2020 hcl:#fffffd
byr:1937 iyr:2017 cid:147 hgt:183cm
iyr:2013 ecl:amb cid:350 eyr:2023 pid:028048884
hcl:#cfa07d byr:1929
hcl:#ae17e1 iyr:2013
eyr:2024
ecl:brn pid:760753108 byr:1931
hgt:179cm
hcl:#cfa07d eyr:2025 pid:166559648
iyr:2011 ecl:brn hgt:59in
The first passport is valid - all eight fields are present. The second passport is
invalid - it is missing hgt (the Height field).
The third passport is interesting; the only missing field is cid, so it looks like data from
North Pole Credentials, not a passport at all! Surely, nobody would mind if you made the system
temporarily ignore missing cid fields. Treat this "passport" as valid.
The fourth passport is missing two fields, cid and byr. Missing cid is fine, but missing any
other field is not, so this passport is invalid.
According to the above rules, your improved system would report 2 valid passports.
Count the number of valid passports - those that have all required fields. Treat cid as optional.
In your batch file, how many passports are valid?
--- Part Two ---
The line is moving more quickly now, but you overhear airport security talking about how passports
with invalid data are getting through. Better add some data validation, quick!
You can continue to ignore the cid field, but each other field has strict rules about what values
are valid for automatic validation:
byr (Birth Year) - four digits; at least 1920 and at most 2002.
iyr (Issue Year) - four digits; at least 2010 and at most 2020.
eyr (Expiration Year) - four digits; at least 2020 and at most 2030.
hgt (Height) - a number followed by either cm or in:
If cm, the number must be at least 150 and at most 193.
If in, the number must be at least 59 and at most 76.
hcl (Hair Color) - a # followed by exactly six characters 0-9 or a-f.
ecl (Eye Color) - exactly one of: amb blu brn gry grn hzl oth.
pid (Passport ID) - a nine-digit number, including leading zeroes.
cid (Country ID) - ignored, missing or not.
Your job is to count the passports where all required fields are both present and valid according
to the above rules. Here are some example values:
byr valid: 2002
byr invalid: 2003
hgt valid: 60in
hgt valid: 190cm
hgt invalid: 190in
hgt invalid: 190
hcl valid: #123abc
hcl invalid: #123abz
hcl invalid: 123abc
ecl valid: brn
ecl invalid: wat
pid valid: 000000001
pid invalid: 0123456789
Here are some invalid passports:
eyr:1972 cid:100
hcl:#18171d ecl:amb hgt:170 pid:186cm iyr:2018 byr:1926
iyr:2019
hcl:#602927 eyr:1967 hgt:170cm
ecl:grn pid:012533040 byr:1946
hcl:dab227 iyr:2012
ecl:brn hgt:182cm pid:021572410 eyr:2020 byr:1992 cid:277
hgt:59cm ecl:zzz
eyr:2038 hcl:74454a iyr:2023
pid:3556412378 byr:2007
Here are some valid passports:
pid:087499704 hgt:74in ecl:grn iyr:2012 eyr:2030 byr:1980
hcl:#623a2f
eyr:2029 ecl:blu cid:129 byr:1989
iyr:2014 pid:896056539 hcl:#a97842 hgt:165cm
hcl:#888785
hgt:164cm byr:2001 iyr:2015 cid:88
pid:545766238 ecl:hzl
eyr:2022
iyr:2010 hgt:158cm hcl:#b6652a ecl:blu byr:1944 eyr:2021 pid:093154719
Count the number of valid passports - those that have all required fields and valid values.
Continue to treat cid as optional. In your batch file, how many passports are valid?`,
RunE: func(cmd *cobra.Command, args []string) error {
inputs, err := helpers.ReadGroupStringFile(input)
if err != nil {
return err
}
re := regexp.MustCompile(`\w{3}:[^\s]+`)
fields := make(map[string]string)
valid := 0
valid2 := 0
for _, inputGroup := range inputs {
for _, i := range inputGroup {
matches := re.FindAllString(i, -1)
for _, match := range matches {
m := strings.Split(match, ":")
fields[m[0]] = m[1]
}
}
p := passport.NewPassport(fields)
if p.Validate() {
valid++
}
if p.ValidateMore() {
valid2++
}
fields = make(map[string]string)
}
fmt.Println("Valid Passports: ", valid)
fmt.Println("Valid Passports: ", valid2)
return nil
},
}
func init() {
rootCmd.AddCommand(day4Cmd)
} | cmd/day4.go | 0.652906 | 0.402128 | day4.go | starcoder |
package core
// Arguments represents a structured set of arguments passed to a predicate.
// It allows destructive operations to internal properties because it is
// guaranteed by Thunks that arguments objects are never reused as a function
// call creates a Thunk.
type Arguments struct {
positionals []Value
expandedList Value
keywords []KeywordArgument
}
// NewArguments creates a new Arguments.
func NewArguments(ps []PositionalArgument, ks []KeywordArgument) Arguments {
vs := make([]Value, 0, len(ps))
l := Value(nil)
for i, p := range ps {
if p.expanded {
l = mergePositionalArguments(ps[i:])
break
}
vs = append(vs, p.value)
}
return Arguments{vs, l, ks}
}
// NewPositionalArguments creates an Arguments which consists of unexpanded
// positional arguments.
func NewPositionalArguments(vs ...Value) Arguments {
return Arguments{vs, nil, nil}
}
func mergePositionalArguments(ps []PositionalArgument) Value {
v := Value(EmptyList)
// Optimization for a common pattern of (func a b c ... ..xs).
// Note that Merge is O(n) but Prepend is O(1).
if last := len(ps) - 1; ps[last].expanded {
v = ps[last].value
ps = ps[:last]
}
for i := len(ps) - 1; i >= 0; i-- {
p := ps[i]
if p.expanded {
v = PApp(Merge, p.value, v)
} else {
v = cons(p.value, v)
}
}
return v
}
func (args *Arguments) nextPositional() Value {
if len(args.positionals) != 0 {
v := args.positionals[0]
args.positionals = args.positionals[1:]
return v
}
if args.expandedList == nil {
return nil
}
l := args.expandedList
args.expandedList = PApp(Rest, l)
return PApp(First, l)
}
func (args *Arguments) restPositionals() Value {
vs := args.positionals
l := args.expandedList
args.positionals = nil
args.expandedList = nil
if l == nil {
return NewList(vs...)
} else if len(vs) == 0 {
return l
}
return StrictPrepend(vs, l)
}
func (args *Arguments) searchKeyword(s string) Value {
for i := len(args.keywords) - 1; i >= 0; i-- {
k := args.keywords[i]
if k.name == s {
args.keywords = append(args.keywords[:i], args.keywords[i+1:]...)
return k.value
} else if k.name == "" {
d, err := EvalDictionary(k.value)
if err != nil {
return err
}
k := NewString(s)
// Using DictionaryType.{index,delete} methods is safe here
// because the key is always StringType.
if v, err := d.find(k); err == nil {
args.keywords = append(
args.keywords[:i],
append(
[]KeywordArgument{NewKeywordArgument("", d.delete(k))},
args.keywords[i+1:]...)...)
return v
}
}
}
return nil
}
func (args *Arguments) restKeywords() Value {
ks := args.keywords
args.keywords = nil
d := Value(EmptyDictionary)
for _, k := range ks {
// Using DictionaryType.Insert method is safe here
// because the key is always StringType.
if k.name == "" {
d = PApp(Merge, d, k.value)
} else {
d = PApp(Insert, d, NewString(k.name), k.value)
}
}
return d
}
// Merge merges 2 sets of arguments into one.
func (args Arguments) Merge(old Arguments) Arguments {
ks := append(args.keywords, old.keywords...)
if args.expandedList == nil {
return Arguments{append(args.positionals, old.positionals...), old.expandedList, ks}
}
l := Value(EmptyList)
if old.expandedList != nil {
l = old.expandedList
}
return Arguments{
args.positionals,
PApp(Merge, args.expandedList, StrictPrepend(old.positionals, l)),
ks,
}
}
func (args Arguments) checkEmptyness() Value {
if len(args.positionals) > 0 {
return argumentError("%d positional arguments are left", len(args.positionals))
}
// Testing args.expandedList is impossible because we cannot know its length
// without evaluating it.
// Keyword arguments are not checked in the current implementation as
// expanded dictionaries can contain extra arguments.
return nil
} | src/lib/core/arguments.go | 0.764012 | 0.464173 | arguments.go | starcoder |
package fake
import (
"strings"
)
// Character generates random character in the given language
func Character() string {
return f.Character()
}
// CharactersN generates n random characters in the given language
func CharactersN(n int) string {
return f.CharactersN(n)
}
// Characters generates from 1 to 5 characters in the given language
func Characters() string {
return f.Characters()
}
// Word generates random word
func Word() string {
return f.Word()
}
// WordsN generates n random words
func WordsN(n int) string {
return f.WordsN(n)
}
// Words generates from 1 to 5 random words
func Words() string {
return f.Words()
}
// Title generates from 2 to 5 titleized words
func Title() string {
return f.Title()
}
// Sentence generates random sentence
func Sentence() string {
return f.Sentence()
}
// SentencesN generates n random sentences
func SentencesN(n int) string {
return f.SentencesN(n)
}
// Sentences generates from 1 to 5 random sentences
func Sentences() string {
return f.Sentences()
}
// Paragraph generates paragraph
func Paragraph() string {
return f.Paragraph()
}
// ParagraphsN generates n paragraphs
func ParagraphsN(n int) string {
return f.ParagraphsN(n)
}
// Paragraphs generates from 1 to 5 paragraphs
func Paragraphs() string {
return f.Paragraphs()
}
// Character generates random character in the given f.language
func (f *Faker) Character() string {
return f.lookup(f.lang, "characters", true)
}
// CharactersN generates n random characters in the given f.language
func (f *Faker) CharactersN(n int) string {
var chars []string
for i := 0; i < n; i++ {
chars = append(chars, Character())
}
return strings.Join(chars, "")
}
// Characters generates from 1 to 5 characters in the given f.language
func (f *Faker) Characters() string {
return f.CharactersN(f.r.Intn(5) + 1)
}
// Word generates random word
func (f *Faker) Word() string {
return f.lookup(f.lang, "words", true)
}
// WordsN generates n random words
func (f *Faker) WordsN(n int) string {
words := make([]string, n)
for i := 0; i < n; i++ {
words[i] = Word()
}
return strings.Join(words, " ")
}
// Words generates from 1 to 5 random words
func (f *Faker) Words() string {
return f.WordsN(f.r.Intn(5) + 1)
}
// Title generates from 2 to 5 titleized words
func (f *Faker) Title() string {
return strings.ToTitle(WordsN(2 + f.r.Intn(4)))
}
// Sentence generates random sentence
func (f *Faker) Sentence() string {
var words []string
for i := 0; i < 3+f.r.Intn(12); i++ {
word := Word()
if f.r.Intn(5) == 0 {
word += ","
}
words = append(words, Word())
}
sentence := strings.Join(words, " ")
if f.r.Intn(8) == 0 {
sentence += "!"
} else {
sentence += "."
}
return sentence
}
// SentencesN generates n random sentences
func (f *Faker) SentencesN(n int) string {
sentences := make([]string, n)
for i := 0; i < n; i++ {
sentences[i] = Sentence()
}
return strings.Join(sentences, " ")
}
// Sentences generates from 1 to 5 random sentences
func (f *Faker) Sentences() string {
return f.SentencesN(f.r.Intn(5) + 1)
}
// Paragraph generates paragraph
func (f *Faker) Paragraph() string {
return f.SentencesN(f.r.Intn(10) + 1)
}
// ParagraphsN generates n paragraphs
func (f *Faker) ParagraphsN(n int) string {
var paragraphs []string
for i := 0; i < n; i++ {
paragraphs = append(paragraphs, Paragraph())
}
return strings.Join(paragraphs, "\t")
}
// Paragraphs generates from 1 to 5 paragraphs
func (f *Faker) Paragraphs() string {
return f.ParagraphsN(f.r.Intn(5) + 1)
} | lorem_ipsum.go | 0.714429 | 0.426859 | lorem_ipsum.go | starcoder |
package clust
import (
"fmt"
"math"
"math/rand"
"github.com/emer/etable/etensor"
"github.com/emer/etable/norm"
"github.com/emer/etable/simat"
"github.com/goki/ki/indent"
)
// Node is one node in the cluster
type Node struct {
Idx int `desc:"index into original distance matrix -- only valid for for terminal leaves"`
Dist float64 `desc:"distance for this node -- how far apart were all the kids from each other when this node was created -- is 0 for leaf nodes"`
ParDist float64 `desc:"total aggregate distance from parents -- the X axis offset at which our cluster starts"`
Y float64 `desc:"y-axis value for this node -- if a parent, it is the average of its kids Y's, otherwise it counts down"`
Kids []*Node `desc:"child nodes under this one"`
}
// IsLeaf returns true if node is a leaf of the tree with no kids
func (nn *Node) IsLeaf() bool {
return len(nn.Kids) == 0
}
// Sprint prints to string
func (nn *Node) Sprint(smat *simat.SimMat, depth int) string {
if nn.IsLeaf() {
return smat.Rows[nn.Idx] + " "
}
sv := fmt.Sprintf("\n%v%v: ", indent.Tabs(depth), nn.Dist)
for _, kn := range nn.Kids {
sv += kn.Sprint(smat, depth+1)
}
return sv
}
// Idxs collects all the indexes in this node
func (nn *Node) Idxs(ix []int, ctr *int) {
if nn.IsLeaf() {
ix[*ctr] = nn.Idx
(*ctr)++
} else {
for _, kn := range nn.Kids {
kn.Idxs(ix, ctr)
}
}
}
// NewNode merges two nodes into a new node
func NewNode(na, nb *Node, dst float64) *Node {
nn := &Node{Dist: dst}
nn.Kids = []*Node{na, nb}
return nn
}
// Glom implements basic agglomerative clustering, based on a raw similarity matrix as given.
// This calls GlomInit to initialize the root node with all of the leaves, and the calls
// GlomClust to do the iterative clustering process. If you want to start with pre-defined
// initial clusters, then call GlomClust with a root node so-initialized.
// The smat.Mat matrix must be an etensor.Float64.
func Glom(smat *simat.SimMat, dfunc DistFunc) *Node {
ntot := smat.Mat.Dim(0) // number of leaves
root := GlomInit(ntot)
return GlomClust(root, smat, dfunc)
}
// GlomStd implements basic agglomerative clustering, based on a raw similarity matrix as given.
// This calls GlomInit to initialize the root node with all of the leaves, and the calls
// GlomClust to do the iterative clustering process. If you want to start with pre-defined
// initial clusters, then call GlomClust with a root node so-initialized.
// The smat.Mat matrix must be an etensor.Float64.
// Std version uses std distance functions
func GlomStd(smat *simat.SimMat, std StdDists) *Node {
return Glom(smat, StdFunc(std))
}
// GlomInit returns a standard root node initialized with all of the leaves
func GlomInit(ntot int) *Node {
root := &Node{}
root.Kids = make([]*Node, ntot)
for i := 0; i < ntot; i++ {
root.Kids[i] = &Node{Idx: i}
}
return root
}
// GlomClust does the iterative agglomerative clustering, based on a raw similarity matrix as given,
// using a root node that has already been initialized with the starting clusters (all of the
// leaves by default, but could be anything if you want to start with predefined clusters).
// The smat.Mat matrix must be an etensor.Float64.
func GlomClust(root *Node, smat *simat.SimMat, dfunc DistFunc) *Node {
ntot := smat.Mat.Dim(0) // number of leaves
smatf := smat.Mat.(*etensor.Float64).Values
maxd := norm.Max64(smatf)
// indexes in each group
aidx := make([]int, ntot)
bidx := make([]int, ntot)
for {
var ma, mb []int
mval := math.MaxFloat64
for ai, ka := range root.Kids {
actr := 0
ka.Idxs(aidx, &actr)
aix := aidx[0:actr]
for bi := 0; bi < ai; bi++ {
kb := root.Kids[bi]
bctr := 0
kb.Idxs(bidx, &bctr)
bix := bidx[0:bctr]
dv := dfunc(aix, bix, ntot, maxd, smatf)
if dv < mval {
mval = dv
ma = []int{ai}
mb = []int{bi}
} else if dv == mval { // do all ties at same time
ma = append(ma, ai)
mb = append(mb, bi)
}
}
}
ni := 0
if len(ma) > 1 {
ni = rand.Intn(len(ma))
}
na := ma[ni]
nb := mb[ni]
// fmt.Printf("merging nodes at dist: %v: %v and %v\nA: %v\nB: %v\n", mval, na, nb, root.Kids[na].Sprint(smat, 0), root.Kids[nb].Sprint(smat, 0))
nn := NewNode(root.Kids[na], root.Kids[nb], mval)
for i := len(root.Kids) - 1; i >= 0; i-- {
if i == na || i == nb {
root.Kids = append(root.Kids[:i], root.Kids[i+1:]...)
}
}
root.Kids = append(root.Kids, nn)
if len(root.Kids) == 1 {
break
}
}
return root
} | clust/clust.go | 0.661048 | 0.472927 | clust.go | starcoder |
// Package flexpolyline contains tools to encode and decode FlexPolylines
// This file defines data structures to store FlexPolylines
package flexpolyline
import (
"fmt"
"math"
)
// FlexPolyline specification version
const FormatVersion uint = 1
// Number of decimal digits after the comma
type Precision uint8
func (p Precision) factor() float64 {
return math.Pow10(int(p))
}
// Whether the third dimension is present and what meaning it has
type Type3D uint8
const (
Absent Type3D = iota
Level
Altitude
Elevation
Reserved1
Reserved2
Custom1
Custom2
)
// A point on the Earth surface with an optional third dimension
type Point struct {
Lat float64
Lng float64
ThirdDim float64
}
// Structure to store FlexPolyline
type Polyline struct {
coordinates []Point
precision2D Precision
precision3D Precision
type3D Type3D
}
func (p* Polyline) Coordinates() []Point {
return p.coordinates
}
func (p* Polyline) Precision2D() Precision {
return p.precision2D
}
func (p* Polyline) Precision3D() Precision {
return p.precision3D
}
func (p* Polyline) Type3D() Type3D {
return p.type3D
}
// Creates a two dimensional FlexPolyline
func CreatePolyline(precision Precision, points []Point) (*Polyline, error) {
err := checkArgs(Absent, precision, 0)
if err != nil {
return nil, err
}
return &Polyline{
coordinates: points,
precision2D: precision,
}, nil
}
// Creates a two dimensional FlexPolyline. Panics if arguments are bad.
func MustCreatePolyline(precision Precision, points []Point) *Polyline {
p, err := CreatePolyline(precision, points)
if err != nil {
panic(err)
}
return p
}
// Creates a three dimensional FlexPolyline
func CreatePolyline3D(type3D Type3D, precision2D, precision3D Precision, points []Point) (*Polyline, error) {
err := checkArgs(type3D, precision2D, precision3D)
if err != nil {
return nil, err
}
return &Polyline{
coordinates: points,
precision2D: precision2D,
precision3D: precision3D,
type3D: type3D,
}, nil
}
// Creates a three dimensional FlexPolyline. Panics if arguments are bad.
func MustCreatePolyline3D(type3D Type3D, precision2D, precision3D Precision, points []Point) *Polyline {
p, err := CreatePolyline3D(type3D, precision2D, precision3D, points)
if err != nil {
panic(err)
}
return p
}
// Encodes a Polyline to a string
func (p *Polyline) Encode() (string, error) {
return Encode(p)
}
func circaCharsPerPoint(header *Polyline) int {
const assumedMaxThirdDimValue = 10000.
circaNumChars := 2.*math.Log(360.*header.Precision2D().factor())/math.Log(64)
if header.Type3D() != Absent {
circaNumChars += math.Log(assumedMaxThirdDimValue*header.Precision3D().factor())/math.Log(64)
}
return int(math.Ceil(circaNumChars))
}
func checkArgs(type3D Type3D, precision2D, precision3D Precision) error {
if precision2D > 15 {
return fmt.Errorf("Precision2D %d > max Precision2D (15)", precision2D)
}
if type3D > Custom2 {
return fmt.Errorf("Type3D %d > max Type3D (7)", type3D)
}
if type3D == Reserved1 || type3D == Reserved2 {
return fmt.Errorf("Type3D %d reserved for future use", type3D)
}
if precision3D > 15 {
return fmt.Errorf("Precision3D %d > max Precision3D (15)", precision3D)
}
return nil
} | golang/flexpolyline/data.go | 0.842798 | 0.626681 | data.go | starcoder |
package jsonmatch
import (
"errors"
"reflect"
)
// Canonicalization of types: For slices and maps the jsonmatch system uses
// []interface{} and map[string]interface{} respectively. The client may
// use any type alias they want for these types, but we need to convert them
// to their canoncial types while processing avoid reflection-fireworks in our code.
var canonicalMapType = reflect.TypeOf(map[string]interface{}{})
var canonicalSliceType = reflect.TypeOf([]interface{}{})
func isCanonicalType(t reflect.Type) bool {
return t == canonicalMapType || t == canonicalSliceType
}
// Converts maps and arrays to their canonical types
func toCanonicalType(value interface{}) (interface{}, bool, error) {
valueType := reflect.TypeOf(value)
if valueType.Kind() == reflect.Map && valueType != canonicalMapType {
if !valueType.ConvertibleTo(canonicalMapType) {
return nil, false, errors.New("Maps used with jsonmatch must be convertible to map[string]interface{}")
}
return reflect.ValueOf(value).Convert(canonicalMapType).Interface(), true, nil
} else if valueType.Kind() == reflect.Slice && valueType != canonicalSliceType {
if !valueType.ConvertibleTo(canonicalSliceType) {
slice, _ := intoInterfaceSlice(value)
return slice, true, nil
}
return reflect.ValueOf(value).Convert(canonicalSliceType).Interface(), true, nil
} else {
return value, false, nil
}
}
// Will attempt to convert the newValue to the same type as the oldValue if
// the new value is of a canonical type and the underlying old type is compatible
func matchType(newValue interface{}, oldValue interface{}) interface{} {
newValueType := reflect.TypeOf(newValue)
if !isCanonicalType(newValueType) {
return newValue
}
oldValueType := reflect.TypeOf(oldValue)
if newValueType == oldValueType {
return newValue
}
if newValueType.ConvertibleTo(oldValueType) {
return reflect.ValueOf(newValue).Convert(oldValueType).Interface()
}
return newValue
}
// Checks that a type is compatible with the refs system
func assertIsCompatible(value interface{}) error {
_, _, err := toCanonicalType(value)
return err
} | canonical_types.go | 0.616128 | 0.418697 | canonical_types.go | starcoder |
package types
import (
"fmt"
"github.com/src-d/go-mysql-server/sql"
dtypes "github.com/liquidata-inc/dolt/go/store/types"
)
type ValueToSql func(dtypes.Value) (interface{}, error)
type SqlToValue func(interface{}) (dtypes.Value, error)
type SqlType interface {
// NomsKind is the underlying NomsKind that this initialization structure represents.
NomsKind() dtypes.NomsKind
// SqlType is the sql.Type that will be returned for Values of the NomsKind returned by NomsKind().
// In other words, this is the SQL type that will be used as the default type for all Values of this NomsKind.
SqlType() sql.Type
// SqlTypes are the SQL types that will be directly processed to represent the underlying NomsKind of Value.
SqlTypes() []sql.Type
// GetValueToSql returns a function that accepts a Value (same type as returned by Value()) and returns the SQL representation.
GetValueToSql() ValueToSql
// GetSqlToValue returns a function that accepts any variable and returns a Value if applicable.
GetSqlToValue() SqlToValue
fmt.Stringer
}
var SqlTypeInitializers = []SqlType{
boolType{},
datetimeType{},
floatType{},
intType{},
stringType{},
uintType{},
uuidType{},
}
var sqlTypeString = map[sql.Type]string{
sql.Blob: "LONGBLOB",
sql.Boolean: "BOOLEAN",
sql.Date: "DATE",
sql.Datetime: "DATETIME",
sql.Float32: "FLOAT",
sql.Float64: "DOUBLE",
sql.Int8: "TINYINT",
sql.Int16: "SMALLINT",
sql.Int24: "MEDIUMINT",
sql.Int32: "INT",
sql.Int64: "BIGINT",
sql.JSON: "JSON",
sql.Text: "LONGTEXT",
sql.Timestamp: "TIMESTAMP",
sql.Uint8: "TINYINT UNSIGNED",
sql.Uint16: "SMALLINT UNSIGNED",
sql.Uint24: "MEDIUMINT UNSIGNED",
sql.Uint32: "INT UNSIGNED",
sql.Uint64: "BIGINT UNSIGNED",
}
func init() {
for _, sqlTypeInit := range SqlTypeInitializers {
kind := sqlTypeInit.NomsKind()
nomsKindToSqlType[kind] = sqlTypeInit.SqlType()
nomsValToSqlValFunc[kind] = sqlTypeInit.GetValueToSql()
nomsKindToValFunc[kind] = sqlTypeInit.GetSqlToValue()
if sqlStr, ok := sqlTypeString[sqlTypeInit.SqlType()]; ok {
nomsKindToSqlTypeStr[kind] = sqlStr
} else {
panic(fmt.Errorf("SQL type %v does not have a mapped string", sqlTypeInit.SqlType()))
}
for _, st := range sqlTypeInit.SqlTypes() {
if _, ok := sqlTypeToNomsKind[st]; ok {
panic(fmt.Errorf("SQL type %v already has a representation", st))
}
if _, ok := sqlTypeString[st]; !ok {
panic(fmt.Errorf("SQL type %v does not have a mapped string", st))
}
sqlTypeToNomsKind[st] = kind
}
}
}
var (
nomsKindToSqlType = make(map[dtypes.NomsKind]sql.Type)
nomsKindToSqlTypeStr = make(map[dtypes.NomsKind]string)
nomsKindToValFunc = make(map[dtypes.NomsKind]SqlToValue)
nomsValToSqlValFunc = make(map[dtypes.NomsKind]ValueToSql)
sqlTypeToNomsKind = make(map[sql.Type]dtypes.NomsKind)
)
func NomsKindToSqlType(nomsKind dtypes.NomsKind) (sql.Type, error) {
if st, ok := nomsKindToSqlType[nomsKind]; ok {
return st, nil
}
return nil, fmt.Errorf("no corresponding SQL type found for %v", nomsKind)
}
func NomsKindToSqlTypeString(nomsKind dtypes.NomsKind) (string, error) {
if str, ok := nomsKindToSqlTypeStr[nomsKind]; ok {
return str, nil
}
return "", fmt.Errorf("no corresponding SQL type found for %v", nomsKind)
}
func NomsValToSqlVal(val dtypes.Value) (interface{}, error) {
if dtypes.IsNull(val) {
return nil, nil
}
if valueToSQL, ok := nomsValToSqlValFunc[val.Kind()]; ok {
return valueToSQL(val)
}
return nil, fmt.Errorf("Value of %v is unsupported in SQL", val.Kind())
}
func SqlTypeToNomsKind(t sql.Type) (dtypes.NomsKind, error) {
if kind, ok := sqlTypeToNomsKind[t]; ok {
return kind, nil
}
return dtypes.UnknownKind, fmt.Errorf("unknown SQL type %v", t)
}
func SqlTypeToString(t sql.Type) (string, error) {
if str, ok := sqlTypeString[t]; ok {
return str, nil
}
return "", fmt.Errorf("no SQL string for SQL type %v", t.String())
}
func SqlValToNomsVal(val interface{}, kind dtypes.NomsKind) (dtypes.Value, error) {
if val == nil {
return nil, nil
}
if varToVal, ok := nomsKindToValFunc[kind]; ok {
return varToVal(val)
}
return nil, fmt.Errorf("Value of %v is unsupported in SQL", kind)
} | go/libraries/doltcore/sqle/types/types.go | 0.589716 | 0.457985 | types.go | starcoder |
package iso20022
// Specifies rates.
type CorporateActionRate2 struct {
// Percentage of a cash distribution that will be withheld by a tax authority.
WithholdingTax *RateFormat1Choice `xml:"WhldgTax,omitempty"`
// Rate at which the income will be withheld by the jurisdiction in which the income was originally paid, for which relief at source and/or reclaim may be possible.
WithholdingOfForeignTax *RateAndAmountFormat1Choice `xml:"WhldgOfFrgnTax,omitempty"`
// Rate at which the income will be withheld by the jurisdiction in which the account owner is located, for which relief at source and/or reclaim may be possible.
WithholdingOfLocalTax *RateAndAmountFormat1Choice `xml:"WhldgOfLclTax,omitempty"`
// Local tax (ZAS Anrechnungsbetrag) subject to interest down payment tax (proportion of interest liable for interest down payment tax/interim profit that is not covered by the tax exempt amount).
GermanLocalTax1 *RateAndAmountFormat1Choice `xml:"GrmnLclTax1,omitempty"`
// Local tax (ZAS Pflichtige Zinsen) interest liable for interest down payment tax (proportion of gross interest per unit/interim profits that is not covered by the credit in the interest pool).
GermanLocalTax2 *RateAndAmountFormat1Choice `xml:"GrmnLclTax2,omitempty"`
// Local tax (Zinstopf) offset interest per unit against tax exempt amount (variation to offset interest per unit in relation to tax exempt amount).
GermanLocalTax3 *RateAndAmountFormat1Choice `xml:"GrmnLclTax3,omitempty"`
// Local tax (Ertrag Besitzanteilig) yield liable for interest down payment tax.
GermanLocalTax4 *RateAndAmountFormat1Choice `xml:"GrmnLclTax4,omitempty"`
// Taxation applied on an amount clearly identified as an income.
TaxOnIncome *RateFormat1Choice `xml:"TaxOnIncm,omitempty"`
// Taxation applied on an amount clearly identified as capital profits, capital gains.
TaxOnProfit *RateFormat1Choice `xml:"TaxOnPrft,omitempty"`
// Percentage of cash that was paid in excess of actual tax obligation and was reclaimed.
TaxReclaim *RateFormat1Choice `xml:"TaxRclm,omitempty"`
// Percentage of fiscal tax to apply.
FiscalStamp *RateFormat1Choice `xml:"FsclStmp,omitempty"`
// Proportionate allocation used for the offer.
Proration *RateFormat1Choice `xml:"Prratn,omitempty"`
// Quantity of new securities for a given quantity of underlying securities, where the underlying securities will be exchanged or debited, eg, 2 for 1: 2 new equities credited for every 1 underlying equity debited = 2 resulting equities.
NewToOld *RatioFormat2Choice `xml:"NewToOd,omitempty"`
// Quantity of new equities that will be derived by the exercise of a given quantity of intermediate securities.
NewSecuritiesToUnderlyingSecurities *RatioFormat2Choice `xml:"NewSctiesToUndrlygScties,omitempty"`
// Quantity of additional securities for a given quantity of underlying securities where underlying securities are not exchanged or debited, eg, 1 for 1: 1 new
// equity credited for every 1 underlying equity = 2 resulting equities.
AdditionalQuantityForExistingSecurities *RatioFormat1Choice `xml:"AddtlQtyForExstgScties,omitempty"`
// Quantity of additional intermediate securities/new equities awarded for a given quantity of securities derived from subscription.
AdditionalQuantityForSubscribedResultantSecurities *RatioFormat1Choice `xml:"AddtlQtyForSbcbdRsltntScties,omitempty"`
// Percentage of the gross dividend rate on which tax must be paid .
RelatedTax *RelatedTaxType1 `xml:"RltdTax,omitempty"`
// Rate per share to which a non-resident is entitled.
NonResidentRate *RateAndAmountFormat1Choice `xml:"NonResdtRate,omitempty"`
// Rate used to calculate the amount of the charges/fees that cannot be categorised.
Charges *RateAndAmountFormat1Choice `xml:"Chrgs,omitempty"`
// The actual interest rate used for the payment of the interest for the specified interest period.
InterestForUsedPayment *RateAndAmountFormat1Choice `xml:"IntrstForUsdPmt,omitempty"`
// Public index rate applied to the amount paid to adjust it to inflation.
IndexFactor *RateAndAmountFormat1Choice `xml:"IndxFctr,omitempty"`
// Rate resulting from a fully franked dividend paid by a company; rate includes tax credit for companies that have made sufficient tax payments during fiscal period.
FullyFranked *RateAndAmountFormat1Choice `xml:"FullyFrnkd,omitempty"`
// Cash dividend amount per equity before deductions or allowances have been made.
GrossDividend *GrossDividendRate1Choice `xml:"GrssDvdd,omitempty"`
// Cash dividend amount per equity after deductions or allowances have been made.
NetDividend *NetDividendRate1Choice `xml:"NetDvdd,omitempty"`
// Dividend is final.
FinalDividend *AmountAndRateFormat2Choice `xml:"FnlDvdd,omitempty"`
// Dividend is provisional.
ProvisionalDividend *AmountAndRateFormat2Choice `xml:"PrvsnlDvdd,omitempty"`
// Rate of the cash premium made available if the securities holder consents or participates to an event, e.g. consent fees.
CashIncentive *RateFormat1Choice `xml:"CshIncntiv,omitempty"`
// Cash rate made available in an offer in order to encourage participation in the offer.
SolicitationFee *RateFormat1Choice `xml:"SlctnFee,omitempty"`
// A maximum percentage of shares available through the over subscription privilege, usually a percentage of the basic subscription shares, eg, an account owner subscribing to 100 shares may over subscribe to a maximum of 50 additional shares when the over subscription maximum is 50%.
MaximumAllowedOversubscription *RateFormat1Choice `xml:"MaxAllwdOvrsbcpt,omitempty"`
// Rate used for additional tax that cannot be categorised.
AdditionalTax *RateAndAmountFormat1Choice `xml:"AddtlTax,omitempty"`
// Amount in its original currency when conversion from/into another currency has occurred.
OriginalAmount *ActiveCurrencyAndAmount `xml:"OrgnlAmt,omitempty"`
// Provides information about a foreign exchange.
ExchangeRate *ForeignExchangeTerms8 `xml:"XchgRate,omitempty"`
// Rate applicable to the event announced, eg, redemption rate for a redemption event.
ApplicableRate *RateFormat1Choice `xml:"AplblRate,omitempty"`
}
func (c *CorporateActionRate2) AddWithholdingTax() *RateFormat1Choice {
c.WithholdingTax = new(RateFormat1Choice)
return c.WithholdingTax
}
func (c *CorporateActionRate2) AddWithholdingOfForeignTax() *RateAndAmountFormat1Choice {
c.WithholdingOfForeignTax = new(RateAndAmountFormat1Choice)
return c.WithholdingOfForeignTax
}
func (c *CorporateActionRate2) AddWithholdingOfLocalTax() *RateAndAmountFormat1Choice {
c.WithholdingOfLocalTax = new(RateAndAmountFormat1Choice)
return c.WithholdingOfLocalTax
}
func (c *CorporateActionRate2) AddGermanLocalTax1() *RateAndAmountFormat1Choice {
c.GermanLocalTax1 = new(RateAndAmountFormat1Choice)
return c.GermanLocalTax1
}
func (c *CorporateActionRate2) AddGermanLocalTax2() *RateAndAmountFormat1Choice {
c.GermanLocalTax2 = new(RateAndAmountFormat1Choice)
return c.GermanLocalTax2
}
func (c *CorporateActionRate2) AddGermanLocalTax3() *RateAndAmountFormat1Choice {
c.GermanLocalTax3 = new(RateAndAmountFormat1Choice)
return c.GermanLocalTax3
}
func (c *CorporateActionRate2) AddGermanLocalTax4() *RateAndAmountFormat1Choice {
c.GermanLocalTax4 = new(RateAndAmountFormat1Choice)
return c.GermanLocalTax4
}
func (c *CorporateActionRate2) AddTaxOnIncome() *RateFormat1Choice {
c.TaxOnIncome = new(RateFormat1Choice)
return c.TaxOnIncome
}
func (c *CorporateActionRate2) AddTaxOnProfit() *RateFormat1Choice {
c.TaxOnProfit = new(RateFormat1Choice)
return c.TaxOnProfit
}
func (c *CorporateActionRate2) AddTaxReclaim() *RateFormat1Choice {
c.TaxReclaim = new(RateFormat1Choice)
return c.TaxReclaim
}
func (c *CorporateActionRate2) AddFiscalStamp() *RateFormat1Choice {
c.FiscalStamp = new(RateFormat1Choice)
return c.FiscalStamp
}
func (c *CorporateActionRate2) AddProration() *RateFormat1Choice {
c.Proration = new(RateFormat1Choice)
return c.Proration
}
func (c *CorporateActionRate2) AddNewToOld() *RatioFormat2Choice {
c.NewToOld = new(RatioFormat2Choice)
return c.NewToOld
}
func (c *CorporateActionRate2) AddNewSecuritiesToUnderlyingSecurities() *RatioFormat2Choice {
c.NewSecuritiesToUnderlyingSecurities = new(RatioFormat2Choice)
return c.NewSecuritiesToUnderlyingSecurities
}
func (c *CorporateActionRate2) AddAdditionalQuantityForExistingSecurities() *RatioFormat1Choice {
c.AdditionalQuantityForExistingSecurities = new(RatioFormat1Choice)
return c.AdditionalQuantityForExistingSecurities
}
func (c *CorporateActionRate2) AddAdditionalQuantityForSubscribedResultantSecurities() *RatioFormat1Choice {
c.AdditionalQuantityForSubscribedResultantSecurities = new(RatioFormat1Choice)
return c.AdditionalQuantityForSubscribedResultantSecurities
}
func (c *CorporateActionRate2) AddRelatedTax() *RelatedTaxType1 {
c.RelatedTax = new(RelatedTaxType1)
return c.RelatedTax
}
func (c *CorporateActionRate2) AddNonResidentRate() *RateAndAmountFormat1Choice {
c.NonResidentRate = new(RateAndAmountFormat1Choice)
return c.NonResidentRate
}
func (c *CorporateActionRate2) AddCharges() *RateAndAmountFormat1Choice {
c.Charges = new(RateAndAmountFormat1Choice)
return c.Charges
}
func (c *CorporateActionRate2) AddInterestForUsedPayment() *RateAndAmountFormat1Choice {
c.InterestForUsedPayment = new(RateAndAmountFormat1Choice)
return c.InterestForUsedPayment
}
func (c *CorporateActionRate2) AddIndexFactor() *RateAndAmountFormat1Choice {
c.IndexFactor = new(RateAndAmountFormat1Choice)
return c.IndexFactor
}
func (c *CorporateActionRate2) AddFullyFranked() *RateAndAmountFormat1Choice {
c.FullyFranked = new(RateAndAmountFormat1Choice)
return c.FullyFranked
}
func (c *CorporateActionRate2) AddGrossDividend() *GrossDividendRate1Choice {
c.GrossDividend = new(GrossDividendRate1Choice)
return c.GrossDividend
}
func (c *CorporateActionRate2) AddNetDividend() *NetDividendRate1Choice {
c.NetDividend = new(NetDividendRate1Choice)
return c.NetDividend
}
func (c *CorporateActionRate2) AddFinalDividend() *AmountAndRateFormat2Choice {
c.FinalDividend = new(AmountAndRateFormat2Choice)
return c.FinalDividend
}
func (c *CorporateActionRate2) AddProvisionalDividend() *AmountAndRateFormat2Choice {
c.ProvisionalDividend = new(AmountAndRateFormat2Choice)
return c.ProvisionalDividend
}
func (c *CorporateActionRate2) AddCashIncentive() *RateFormat1Choice {
c.CashIncentive = new(RateFormat1Choice)
return c.CashIncentive
}
func (c *CorporateActionRate2) AddSolicitationFee() *RateFormat1Choice {
c.SolicitationFee = new(RateFormat1Choice)
return c.SolicitationFee
}
func (c *CorporateActionRate2) AddMaximumAllowedOversubscription() *RateFormat1Choice {
c.MaximumAllowedOversubscription = new(RateFormat1Choice)
return c.MaximumAllowedOversubscription
}
func (c *CorporateActionRate2) AddAdditionalTax() *RateAndAmountFormat1Choice {
c.AdditionalTax = new(RateAndAmountFormat1Choice)
return c.AdditionalTax
}
func (c *CorporateActionRate2) SetOriginalAmount(value, currency string) {
c.OriginalAmount = NewActiveCurrencyAndAmount(value, currency)
}
func (c *CorporateActionRate2) AddExchangeRate() *ForeignExchangeTerms8 {
c.ExchangeRate = new(ForeignExchangeTerms8)
return c.ExchangeRate
}
func (c *CorporateActionRate2) AddApplicableRate() *RateFormat1Choice {
c.ApplicableRate = new(RateFormat1Choice)
return c.ApplicableRate
} | CorporateActionRate2.go | 0.806662 | 0.562056 | CorporateActionRate2.go | starcoder |
package store
import (
"strings"
"sync"
)
// Trigram represents a sequence of 3 strings.
type Trigram [3]string
// TrigramMap is a 3-dimensional map which represents the frequency of each trigram.
type TrigramMap map[string]map[string]map[string]int
// TrigramStore is represents the storage of trigrams found until now.
type TrigramStore interface {
// AddTrigram adds a trigram to the store.
AddTrigram(trigram Trigram)
// MakeText generates a random text with the trigrams present in the store.
MakeText() string
}
// TrigramMapStore is an implementation of storage structure for trigrams, by using hash maps.
// It basically encapsulates a TrigramMap with a mutex, as well as an object capable of choosing what trigram to choose next.
type TrigramMapStore struct {
trigrams TrigramMap // Check documentation of TrigramMap above.
mutex *sync.Mutex // Mutex to control accesses to the TrigramMap
chooser Chooser
}
// NewMapTrigramStore creates a new TrigramStore.
func NewMapTrigramStore(chooser Chooser) *TrigramMapStore {
var store TrigramMapStore
store.trigrams = make(map[string]map[string]map[string]int)
store.mutex = &sync.Mutex{}
store.chooser = chooser
return &store
}
// AddTrigram adds a trigram to the store, increasing its "frequency" if it's already present in the store.
func (store *TrigramMapStore) AddTrigram(trigram Trigram) {
store.mutex.Lock()
defer store.mutex.Unlock()
elem0 := store.trigrams
elem1, ok0 := elem0[trigram[0]]
if !ok0 {
elem1 = make(map[string]map[string]int)
elem0[trigram[0]] = elem1
}
elem2, ok1 := elem1[trigram[1]]
if !ok1 {
elem2 = make(map[string]int)
elem1[trigram[1]] = elem2
}
elem2[trigram[2]]++
}
// MakeText generates a random text with the trigrams present in the store.
func (store *TrigramMapStore) MakeText() string {
store.mutex.Lock()
defer store.mutex.Unlock()
var text []string
var last2Words [2]string
if len(store.trigrams) == 0 {
return ""
}
// Make a text with 100 trigrams maximum:
for i := 0; i < 100; i++ {
if len(text) > 0 {
// Choose the next word, except if we encountered a path with zero possibilities for the next word.
possibleNextWords := store.trigrams[last2Words[0]][last2Words[1]]
if len(possibleNextWords) == 0 {
break
}
nextWord := store.chooser.ChooseNextWord(possibleNextWords)
text = append(text, nextWord)
// Update the last 2 words:
last2Words[0] = last2Words[1]
last2Words[1] = nextWord
} else {
// Choose a random trigram to start:
trigram := store.chooser.ChooseInitialTrigram(store.trigrams)
text = append(text, trigram[:]...)
// Update the last 2 words:
last2Words[0] = trigram[1]
last2Words[1] = trigram[2]
}
}
return strings.Join(text, " ")
}
func (store *TrigramMapStore) getTrigramFreq(trigram Trigram) int {
store.mutex.Lock()
defer store.mutex.Unlock()
return store.trigrams[trigram[0]][trigram[1]][trigram[2]]
} | store/store.go | 0.713332 | 0.441613 | store.go | starcoder |
package exec
import "math"
func I32DivS(i1, i2 int32) int32 {
if i1 == math.MinInt32 && i2 == -1 {
panic(TrapIntegerOverflow)
}
return i1 / i2
}
func I64DivS(i1, i2 int64) int64 {
if i1 == math.MinInt64 && i2 == -1 {
panic(TrapIntegerOverflow)
}
return i1 / i2
}
func Fmax(z1, z2 float64) float64 {
if math.IsNaN(z1) {
return z1
}
if math.IsNaN(z2) {
return z2
}
return math.Max(z1, z2)
}
func Fmin(z1, z2 float64) float64 {
if math.IsNaN(z1) {
return z1
}
if math.IsNaN(z2) {
return z2
}
return math.Min(z1, z2)
}
func I32TruncS(z float64) int32 {
z = math.Trunc(z)
if math.IsNaN(z) {
panic(TrapInvalidConversionToInteger)
}
if z < math.MinInt32 || z > math.MaxInt32 {
panic(TrapIntegerOverflow)
}
return int32(z)
}
func I32TruncU(z float64) uint32 {
z = math.Trunc(z)
if math.IsNaN(z) {
panic(TrapInvalidConversionToInteger)
}
if z <= -1 || z > math.MaxUint32 {
panic(TrapIntegerOverflow)
}
return uint32(z)
}
func I64TruncS(z float64) int64 {
z = math.Trunc(z)
if math.IsNaN(z) {
panic(TrapInvalidConversionToInteger)
}
if z < math.MinInt64 || z >= math.MaxInt64 {
panic(TrapIntegerOverflow)
}
return int64(z)
}
func I64TruncU(z float64) uint64 {
z = math.Trunc(z)
if math.IsNaN(z) {
panic(TrapInvalidConversionToInteger)
}
if z <= -1 || z >= math.MaxUint64 {
panic(TrapIntegerOverflow)
}
return uint64(z)
}
func I32TruncSatS(z float64) int32 {
switch {
case math.IsNaN(z):
return 0
case math.IsInf(z, -1) || z <= math.MinInt32:
return math.MinInt32
case math.IsInf(z, 1) || z >= math.MaxInt32:
return math.MaxInt32
default:
return int32(z)
}
}
func I32TruncSatU(z float64) uint32 {
switch {
case math.IsNaN(z) || math.IsInf(z, -1) || z < 0:
return 0
case math.IsInf(z, 1) || z >= math.MaxUint32:
return math.MaxUint32
default:
return uint32(z)
}
}
func I64TruncSatS(z float64) int64 {
switch {
case math.IsNaN(z):
return 0
case math.IsInf(z, -1) || z <= math.MinInt64:
return math.MinInt64
case math.IsInf(z, 1) || z >= math.MaxInt64:
return math.MaxInt64
default:
return int64(z)
}
}
func I64TruncSatU(z float64) uint64 {
switch {
case math.IsNaN(z) || math.IsInf(z, -1) || z < 0:
return 0
case math.IsInf(z, 1) || z >= math.MaxUint64:
return math.MaxUint64
default:
return uint64(z)
}
} | exec/numerics.go | 0.575588 | 0.448728 | numerics.go | starcoder |
package metrics
import (
"encoding/json"
"errors"
"fmt"
"net/http"
"regexp"
"sort"
"github.com/Jeffail/benthos/v3/internal/docs"
"github.com/Jeffail/benthos/v3/lib/log"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeRename] = TypeSpec{
constructor: NewRename,
Status: docs.StatusDeprecated,
Summary: `
Rename metric paths as they are registered.`,
FieldSpecs: docs.FieldSpecs{
docs.FieldCommon("by_regexp", `
A list of objects, each specifying an RE2 regular expression which will be
tested against each metric path registered. Each occurrence of the expression
will be replaced with the specified value. Inside the value $ signs are
interpreted as submatch expansions, e.g. $1 represents the first submatch.
The field `+"`to_label`"+` may contain any number of key/value pairs to be
added to a metric as labels, where the value may contain submatches from the
provided pattern. This allows you to extract (left-most) matched segments of the
renamed path into the label values.`,
[]interface{}{
map[string]interface{}{
"pattern": "foo\\.([a-z]*)\\.([a-z]*)",
"value": "foo.$1",
"to_label": map[string]interface{}{
"bar": "$2",
},
},
},
).WithChildren(
docs.FieldDeprecated("pattern"),
docs.FieldDeprecated("value"),
docs.FieldDeprecated("to_label").Map(),
).Array(),
docs.FieldCommon("child", "A child metric type, this is where renamed metrics will be routed.").HasType(docs.FieldMetrics),
},
Description: `
Metrics must be matched using dot notation even if the chosen output uses a
different form. For example, the path would be 'foo.bar' rather than 'foo_bar'
even when sending metrics to Prometheus. A full list of metrics paths that
Benthos registers can be found in [this list](/docs/components/metrics/about#paths).`,
Footnotes: `
## Examples
In order to replace the paths 'foo.bar.0.zap' and 'foo.baz.1.zap' with 'zip.bar'
and 'zip.baz' respectively, and store the respective values '0' and '1' under
the label key 'index' we could use this config:
` + "```yaml" + `
metrics:
rename:
by_regexp:
- pattern: "foo\\.([a-z]*)\\.([a-z]*)\\.zap"
value: "zip.$1"
to_label:
index: $2
child:
statsd:
prefix: foo
address: localhost:8125
` + "```" + `
These labels will only be injected into metrics registered without pre-existing
labels. Therefore it's currently not possible to combine labels registered from
the ` + "[`metric` processor](/docs/components/processors/metric)" + ` with labels
set via renaming.
## Debugging
In order to see logs breaking down which metrics are registered and whether they
are renamed enable logging at the TRACE level.`,
}
}
//------------------------------------------------------------------------------
// RenameByRegexpConfig contains config fields for a rename by regular
// expression pattern.
type RenameByRegexpConfig struct {
Pattern string `json:"pattern" yaml:"pattern"`
Value string `json:"value" yaml:"value"`
Labels map[string]string `json:"to_label" yaml:"to_label"`
}
// RenameConfig contains config fields for the Rename metric type.
type RenameConfig struct {
ByRegexp []RenameByRegexpConfig `json:"by_regexp" yaml:"by_regexp"`
Child *Config `json:"child" yaml:"child"`
}
// NewRenameConfig returns a RenameConfig with default values.
func NewRenameConfig() RenameConfig {
return RenameConfig{
ByRegexp: []RenameByRegexpConfig{},
Child: nil,
}
}
//------------------------------------------------------------------------------
type dummyRenameConfig struct {
ByRegexp []RenameByRegexpConfig `json:"by_regexp" yaml:"by_regexp"`
Child interface{} `json:"child" yaml:"child"`
}
// MarshalJSON prints an empty object instead of nil.
func (w RenameConfig) MarshalJSON() ([]byte, error) {
dummy := dummyRenameConfig{
ByRegexp: w.ByRegexp,
Child: w.Child,
}
if w.Child == nil {
dummy.Child = struct{}{}
}
return json.Marshal(dummy)
}
// MarshalYAML prints an empty object instead of nil.
func (w RenameConfig) MarshalYAML() (interface{}, error) {
dummy := dummyRenameConfig{
ByRegexp: w.ByRegexp,
Child: w.Child,
}
if w.Child == nil {
dummy.Child = struct{}{}
}
return dummy, nil
}
//------------------------------------------------------------------------------
type renameByRegexp struct {
expression *regexp.Regexp
value string
labels map[string]string
}
// Rename is a statistics object that wraps a separate statistics object
// and only permits statistics that pass through the whitelist to be recorded.
type Rename struct {
byRegexp []renameByRegexp
s Type
log log.Modular
}
// NewRename creates and returns a new Rename object
func NewRename(config Config, opts ...func(Type)) (Type, error) {
if config.Rename.Child == nil {
return nil, errors.New("cannot create a rename metric without a child")
}
child, err := New(*config.Rename.Child)
if err != nil {
return nil, err
}
r := &Rename{
s: child,
log: log.Noop(),
}
for _, opt := range opts {
opt(r)
}
for _, p := range config.Rename.ByRegexp {
re, err := regexp.Compile(p.Pattern)
if err != nil {
return nil, fmt.Errorf("invalid regular expression: '%s': %v", p, err)
}
r.byRegexp = append(r.byRegexp, renameByRegexp{
expression: re,
value: p.Value,
labels: p.Labels,
})
}
return r, nil
}
//------------------------------------------------------------------------------
// renamePath checks whether or not a given path is in the allowed set of
// paths for the Rename metrics stat.
func (r *Rename) renamePath(path string) (string, map[string]string) {
renamed := false
labels := map[string]string{}
for _, rr := range r.byRegexp {
newPath := rr.expression.ReplaceAllString(path, rr.value)
if newPath != path {
renamed = true
r.log.Tracef("Renamed metric path '%v' to '%v' as per regexp '%v'\n", path, newPath, rr.expression.String())
}
if rr.labels != nil && len(rr.labels) > 0 {
// Extract only the matching segment of the path (left-most)
leftPath := rr.expression.FindString(path)
if len(leftPath) > 0 {
for k, v := range rr.labels {
v = rr.expression.ReplaceAllString(leftPath, v)
labels[k] = v
r.log.Tracef("Renamed label '%v' to '%v' as per regexp '%v'\n", k, v, rr.expression.String())
}
}
}
path = newPath
}
if !renamed {
r.log.Tracef("Registered metric path '%v' unchanged\n", path)
}
return path, labels
}
//------------------------------------------------------------------------------
func labelsFromMap(labels map[string]string) ([]string, []string) {
names := make([]string, 0, len(labels))
for k := range labels {
names = append(names, k)
}
sort.Strings(names)
values := make([]string, 0, len(names))
for _, k := range names {
values = append(values, labels[k])
}
return names, values
}
// GetCounter returns a stat counter object for a path.
func (r *Rename) GetCounter(path string) StatCounter {
rpath, labels := r.renamePath(path)
if len(labels) == 0 {
return r.s.GetCounter(rpath)
}
names, values := labelsFromMap(labels)
return r.s.GetCounterVec(rpath, names).With(values...)
}
// GetCounterVec returns a stat counter object for a path with the labels
// and values.
func (r *Rename) GetCounterVec(path string, n []string) StatCounterVec {
rpath, _ := r.renamePath(path)
return r.s.GetCounterVec(rpath, n)
}
// GetTimer returns a stat timer object for a path.
func (r *Rename) GetTimer(path string) StatTimer {
rpath, labels := r.renamePath(path)
if len(labels) == 0 {
return r.s.GetTimer(rpath)
}
names, values := labelsFromMap(labels)
return r.s.GetTimerVec(rpath, names).With(values...)
}
// GetTimerVec returns a stat timer object for a path with the labels
// and values.
func (r *Rename) GetTimerVec(path string, n []string) StatTimerVec {
rpath, _ := r.renamePath(path)
return r.s.GetTimerVec(rpath, n)
}
// GetGauge returns a stat gauge object for a path.
func (r *Rename) GetGauge(path string) StatGauge {
rpath, labels := r.renamePath(path)
if len(labels) == 0 {
return r.s.GetGauge(rpath)
}
names, values := labelsFromMap(labels)
return r.s.GetGaugeVec(rpath, names).With(values...)
}
// GetGaugeVec returns a stat timer object for a path with the labels
// discarded.
func (r *Rename) GetGaugeVec(path string, n []string) StatGaugeVec {
rpath, _ := r.renamePath(path)
return r.s.GetGaugeVec(rpath, n)
}
// SetLogger sets the logger used to print connection errors.
func (r *Rename) SetLogger(log log.Modular) {
r.log = log.NewModule(".rename")
r.s.SetLogger(log)
}
// Close stops the Statsd object from aggregating metrics and cleans up
// resources.
func (r *Rename) Close() error {
return r.s.Close()
}
//------------------------------------------------------------------------------
// HandlerFunc returns an http.HandlerFunc for accessing metrics for appropriate
// child types
func (r *Rename) HandlerFunc() http.HandlerFunc {
if wHandlerFunc, ok := r.s.(WithHandlerFunc); ok {
return wHandlerFunc.HandlerFunc()
}
return func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(501)
w.Write([]byte("The child of this rename does not support HTTP metrics."))
}
}
//------------------------------------------------------------------------------ | lib/metrics/rename.go | 0.843283 | 0.636042 | rename.go | starcoder |
package values
import (
"bytes"
"fmt"
"strings"
"github.com/shanzi/gexpr/types"
)
type OperatorInterface interface {
// Arithmetic Operators
ADD(a, b Value) Value // +
SUB(a, b Value) Value // -
MUL(a, b Value) Value // *
DIV(a, b Value) Value // /
MOD(a, b Value) Value // %
// Binary Operators
AND(a, b Value) Value // &
OR(a, b Value) Value // |
XOR(a, b Value) Value // ^
LEFT_SHIFT(a, b Value) Value // <<
RIGHT_SHIFT(a, b Value) Value // >>
AND_NOT(a, b Value) Value // &^
// Logic Operators
EQUAL(a, b Value) Value // ==
NOT_EQUAL(a, b Value) Value // !=
GREATER(a, b Value) Value // >
LESS(a, b Value) Value // <
GEQ(a, b Value) Value // >=
LEQ(a, b Value) Value // <=
// Boolean Operators
BOOL_AND(a, b Value) Value // &&
BOOL_OR(a, b Value) Value // ||
// Unary operators
INC(a Value) Value // ++
DEC(a Value) Value // --
INV(a Value) Value // ^
BOOL_NOT(a Value) Value // !
NEGATIVE(a Value) Value // !
POSITIVE(a Value) Value // !
}
type _operators struct{}
var operators_singleton = &_operators{}
func Operators() OperatorInterface {
return operators_singleton
}
func (self *_operators) ADD(a, b Value) Value {
if types.AssertMatch(types.INTEGER, a.Type(), b.Type()) {
return Integer(a.Int64() + b.Int64())
}
if types.AssertMatch(types.FLOAT, a.Type(), b.Type()) {
return Float(a.Float64() + b.Float64())
}
if types.AssertMatch(types.STRING, a.Type(), b.Type()) {
var buf bytes.Buffer
buf.WriteString(a.String())
buf.WriteString(b.String())
return String(buf.String())
}
panic(fmt.Sprintf("Can not add %s and %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) SUB(a, b Value) Value {
if types.AssertMatch(types.INTEGER, a.Type(), b.Type()) {
return Integer(a.Int64() - b.Int64())
}
if types.AssertMatch(types.FLOAT, a.Type(), b.Type()) {
return Float(a.Float64() - b.Float64())
}
panic(fmt.Sprintf("Can not subtruct %s and %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) MUL(a, b Value) Value {
if types.AssertMatch(types.INTEGER, a.Type(), b.Type()) {
return Integer(a.Int64() * b.Int64())
}
if types.AssertMatch(types.FLOAT, a.Type(), b.Type()) {
return Float(a.Float64() * b.Float64())
}
panic(fmt.Sprintf("Can not multiply %s and %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) DIV(a, b Value) Value {
if types.AssertMatch(types.INTEGER, a.Type(), b.Type()) {
return Integer(a.Int64() / b.Int64())
}
if types.AssertMatch(types.FLOAT, a.Type(), b.Type()) {
return Float(a.Float64() / b.Float64())
}
panic(fmt.Sprintf("Can not divide %s with %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) MOD(a, b Value) Value {
if types.AssertMatch(types.INTEGER, a.Type(), b.Type()) {
return Integer(a.Int64() / b.Int64())
}
if types.AssertMatch(types.FLOAT, a.Type()) && types.AssertMatch(types.INTEGER, b.Type()) {
return Float(a.Float64() / b.Float64())
}
panic(fmt.Sprintf("Can not modulo %s with %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) AND(a, b Value) Value {
if types.AssertMatch(types.INTEGER, a.Type(), b.Type()) {
return Integer(a.Int64() & b.Int64())
}
panic(fmt.Sprintf("Can not apply binary operators on %s and %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) OR(a, b Value) Value {
if types.AssertMatch(types.INTEGER, a.Type(), b.Type()) {
return Integer(a.Int64() | b.Int64())
}
panic(fmt.Sprintf("Can not apply binary operators on %s and %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) XOR(a, b Value) Value {
if types.AssertMatch(types.INTEGER, a.Type(), b.Type()) {
return Integer(a.Int64() ^ b.Int64())
}
panic(fmt.Sprintf("Can not apply binary operators on %s and %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) LEFT_SHIFT(a, b Value) Value {
if types.AssertMatch(types.INTEGER, a.Type(), b.Type()) {
return Integer(a.Int64() << uint(b.Int64()))
}
panic(fmt.Sprintf("Can not apply binary operators on %s and %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) RIGHT_SHIFT(a, b Value) Value {
if types.AssertMatch(types.INTEGER, a.Type(), b.Type()) {
return Integer(a.Int64() >> uint(b.Int64()))
}
panic(fmt.Sprintf("Can not apply binary operators on %s and %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) AND_NOT(a, b Value) Value {
if types.AssertMatch(types.INTEGER, a.Type(), b.Type()) {
return Integer(a.Int64() &^ b.Int64())
}
panic(fmt.Sprintf("Can not apply binary operators on %s and %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) EQUAL(a, b Value) Value {
if types.AssertMatch(types.INTEGER, a.Type(), b.Type()) {
return Boolean(a.Int64() == b.Int64())
}
if types.AssertMatch(types.FLOAT, a.Type(), b.Type()) {
return Boolean(a.Float64() == b.Float64())
}
if types.AssertMatch(types.BOOLEAN, a.Type(), b.Type()) {
return Boolean(a.Bool() == b.Bool())
}
if types.AssertMatch(types.STRING, a.Type(), b.Type()) {
return Boolean(strings.Compare(a.String(), b.String()) == 0)
}
panic(fmt.Sprintf("Can not apply EQUAL operator on %s and %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) NOT_EQUAL(a, b Value) Value {
if types.AssertMatch(types.INTEGER, a.Type(), b.Type()) {
return Boolean(a.Int64() != b.Int64())
}
if types.AssertMatch(types.FLOAT, a.Type(), b.Type()) {
return Boolean(a.Float64() != b.Float64())
}
if types.AssertMatch(types.BOOLEAN, a.Type(), b.Type()) {
return Boolean(a.Bool() != b.Bool())
}
if types.AssertMatch(types.STRING, a.Type(), b.Type()) {
return Boolean(strings.Compare(a.String(), b.String()) != 0)
}
panic(fmt.Sprintf("Can not apply NOT_EQUAL operators on %s and %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) GREATER(a, b Value) Value {
if types.AssertMatch(types.INTEGER, a.Type(), b.Type()) {
return Boolean(a.Int64() > b.Int64())
}
if types.AssertMatch(types.FLOAT, a.Type(), b.Type()) {
return Boolean(a.Float64() > b.Float64())
}
if types.AssertMatch(types.STRING, a.Type(), b.Type()) {
return Boolean(strings.Compare(a.String(), b.String()) > 0)
}
panic(fmt.Sprintf("Can not apply GREATER operators on %s and %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) LESS(a, b Value) Value {
if types.AssertMatch(types.INTEGER, a.Type(), b.Type()) {
return Boolean(a.Int64() < b.Int64())
}
if types.AssertMatch(types.FLOAT, a.Type(), b.Type()) {
return Boolean(a.Float64() < b.Float64())
}
if types.AssertMatch(types.STRING, a.Type(), b.Type()) {
return Boolean(strings.Compare(a.String(), b.String()) < 0)
}
panic(fmt.Sprintf("Can not apply LESS operators on %s and %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) GEQ(a, b Value) Value {
if types.AssertMatch(types.INTEGER, a.Type(), b.Type()) {
return Boolean(a.Int64() >= b.Int64())
}
if types.AssertMatch(types.FLOAT, a.Type(), b.Type()) {
return Boolean(a.Float64() >= b.Float64())
}
if types.AssertMatch(types.STRING, a.Type(), b.Type()) {
return Boolean(strings.Compare(a.String(), b.String()) >= 0)
}
panic(fmt.Sprintf("Can not apply GEQ operators on %s and %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) LEQ(a, b Value) Value {
if types.AssertMatch(types.INTEGER, a.Type(), b.Type()) {
return Boolean(a.Int64() <= b.Int64())
}
if types.AssertMatch(types.FLOAT, a.Type(), b.Type()) {
return Boolean(a.Float64() <= b.Float64())
}
if types.AssertMatch(types.STRING, a.Type(), b.Type()) {
return Boolean(strings.Compare(a.String(), b.String()) <= 0)
}
panic(fmt.Sprintf("Can not apply LEQ operators on %s and %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) BOOL_AND(a, b Value) Value {
if types.AssertMatch(types.BOOLEAN, a.Type(), b.Type()) {
return Boolean(a.Bool() && b.Bool())
}
panic(fmt.Sprintf("Can not apply BOOL_AND operators on %s and %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) BOOL_OR(a, b Value) Value {
if types.AssertMatch(types.BOOLEAN, a.Type(), b.Type()) {
return Boolean(a.Bool() || b.Bool())
}
panic(fmt.Sprintf("Can not apply BOOL_OR operators on %s and %s", a.Type().Name(), b.Type().Name()))
}
func (self *_operators) INC(a Value) Value {
if types.AssertMatch(types.INTEGER, a.Type()) {
return Integer(a.Int64() + 1)
}
if types.AssertMatch(types.FLOAT, a.Type()) {
return Float(a.Float64() + 1)
}
panic(fmt.Sprintf("Can not apply INC operators on %s", a.Type().Name()))
}
func (self *_operators) DEC(a Value) Value {
if types.AssertMatch(types.INTEGER, a.Type()) {
return Integer(a.Int64() - 1)
}
if types.AssertMatch(types.FLOAT, a.Type()) {
return Float(a.Float64() - 1)
}
panic(fmt.Sprintf("Can not apply DEC operators on %s", a.Type().Name()))
}
func (self *_operators) INV(a Value) Value {
if types.AssertMatch(types.INTEGER, a.Type()) {
return Integer(^a.Int64())
}
panic(fmt.Sprintf("Can not apply INV operators on %s", a.Type().Name()))
}
func (self *_operators) BOOL_NOT(a Value) Value {
if types.AssertMatch(types.BOOLEAN, a.Type()) {
return Boolean(!a.Bool())
}
panic(fmt.Sprintf("Can not apply BOOL_NOT operators on %s", a.Type().Name()))
}
func (self *_operators) NEGATIVE(a Value) Value {
if types.AssertMatch(types.INTEGER, a.Type()) {
return Integer(-a.Int64())
}
if types.AssertMatch(types.FLOAT, a.Type()) {
return Float(-a.Float64())
}
panic(fmt.Sprintf("Can not apply BOOL_NOT operators on %s", a.Type().Name()))
}
func (self *_operators) POSITIVE(a Value) Value {
if types.AssertMatch(types.BOOLEAN, a.Type()) {
return Integer(+a.Int64())
}
if types.AssertMatch(types.FLOAT, a.Type()) {
return Float(-a.Float64())
}
panic(fmt.Sprintf("Can not apply BOOL_NOT operators on %s", a.Type().Name()))
} | values/operators.go | 0.653238 | 0.425187 | operators.go | starcoder |
package waktu
import (
"time"
)
// Time struct.
type Time struct {
time.Time
}
// LastDay func.
func LastDay() Time {
return Now().LastDay()
}
// Now func.
func Now() Time {
loc, _ := time.LoadLocation("Asia/Jakarta")
return Time{time.Now().In(loc)}
}
// Parse func.
func Parse(layout, value string) (Time, error) {
t, err := time.Parse(layout, value)
return Time{t}, err
}
// Date func.
func Date(year int, month Month, day, hour, min, sec, nsec int, loc *time.Location) Time {
if loc == nil {
loc = time.UTC
}
return Time{time.Date(year, time.Month(month), day, hour, min, sec, nsec, loc)}
}
// AddDate func.
func (t Time) AddDate(years int, months int, days int) Time {
return Time{t.Time.AddDate(years, months, days)}
}
// Add func.
func (t Time) Add(d time.Duration) Time {
return Time{t.Time.Add(d)}
}
// ResetTime ex: 1993-09-10 07:00:00 +0700 GMT+7.
func (t Time) ResetTime() Time {
return Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, time.UTC).In(t.Location())
}
// ResetTimeLocal ex: 1993-09-10 00:00:00 +0700 GMT+7.
func (t Time) ResetTimeLocal() Time {
return Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, t.Location())
}
// SetDate func.
func (t Time) SetDate(date interface{}) Time {
return Date(t.Year(), t.Month(), date.(int), t.Hour(), t.Minute(), t.Second(), t.Nanosecond(), t.Location())
}
// GetDate ex: 930910.
func (t Time) GetDate(format ...string) string {
layout := YYMMDD
if len(format) > 0 {
layout = format[0]
}
return t.Format(layout)
}
// GetDateUTC ex: 930910.
func (t Time) GetDateUTC(format ...string) string {
layout := YYMMDD
if len(format) > 0 {
layout = format[0]
}
return t.In(time.UTC).Format(layout)
}
// SetHour func.
func (t Time) SetHour(hour interface{}) Time {
return Date(t.Year(), t.Month(), t.Day(), hour.(int), t.Minute(), t.Second(), t.Nanosecond(), t.Location())
}
// SetMinute func.
func (t Time) SetMinute(minute interface{}) Time {
return Date(t.Year(), t.Month(), t.Day(), t.Hour(), minute.(int), t.Second(), t.Nanosecond(), t.Location())
}
// SetSecond func.
func (t Time) SetSecond(second interface{}) Time {
return Date(t.Year(), t.Month(), t.Day(), t.Hour(), t.Minute(), second.(int), t.Nanosecond(), t.Location())
}
// SetNanosecond func.
func (t Time) SetNanosecond(nanosecond interface{}) Time {
return Date(t.Year(), t.Month(), t.Day(), t.Hour(), t.Minute(), t.Second(), nanosecond.(int), t.Location())
}
// In func.
func (t Time) In(loc *time.Location) Time {
return Time{
t.Time.In(loc),
}
}
// GetTime 133700.
func (t Time) GetTime(format ...string) string {
layout := HHMMSS
if len(format) > 0 {
layout = format[0]
}
return t.Format(layout)
}
// LastDay ex: 1993-09-30 13:37:11.00000001 +0700 GMT+7.
func (t Time) LastDay() Time {
var twentyFour time.Duration = 24
day := twentyFour * time.Hour
return t.SetDate(1).AddDate(0, 1, 0).Add(-day - time.Nanosecond)
} | time.go | 0.741112 | 0.551393 | time.go | starcoder |
package levenshtein2
import (
"crypto/md5"
"encoding/json"
"log"
"math"
)
type ParametricState struct {
shapeID uint32
offset uint32
}
func newParametricState() ParametricState {
return ParametricState{}
}
func (ps *ParametricState) isDeadEnd() bool {
return ps.shapeID == 0
}
type Transition struct {
destShapeID uint32
deltaOffset uint32
}
func (t *Transition) apply(state ParametricState) ParametricState {
ps := ParametricState{
shapeID: t.destShapeID}
// don't need any offset if we are in the dead state,
// this ensures we have only one dead state.
if t.destShapeID != 0 {
ps.offset = state.offset + t.deltaOffset
}
return ps
}
type ParametricStateIndex struct {
stateIndex []uint32
stateQueue []ParametricState
numOffsets uint32
}
func newParametricStateIndex(queryLen,
numParamState uint32) ParametricStateIndex {
numOffsets := queryLen + 1
if numParamState == 0 {
numParamState = numOffsets
}
maxNumStates := numParamState * numOffsets
psi := ParametricStateIndex{
stateIndex: make([]uint32, maxNumStates),
stateQueue: make([]ParametricState, 0, 150),
numOffsets: numOffsets,
}
for i := uint32(0); i < maxNumStates; i++ {
psi.stateIndex[i] = math.MaxUint32
}
return psi
}
func (psi *ParametricStateIndex) numStates() int {
return len(psi.stateQueue)
}
func (psi *ParametricStateIndex) maxNumStates() int {
return len(psi.stateIndex)
}
func (psi *ParametricStateIndex) get(stateID uint32) ParametricState {
return psi.stateQueue[stateID]
}
func (psi *ParametricStateIndex) getOrAllocate(ps ParametricState) uint32 {
bucket := ps.shapeID*psi.numOffsets + ps.offset
if bucket < uint32(len(psi.stateIndex)) &&
psi.stateIndex[bucket] != math.MaxUint32 {
return psi.stateIndex[bucket]
}
nState := uint32(len(psi.stateQueue))
psi.stateQueue = append(psi.stateQueue, ps)
psi.stateIndex[bucket] = nState
return nState
}
type ParametricDFA struct {
distance []uint8
transitions []Transition
maxDistance uint8
transitionStride uint32
diameter uint32
}
func (pdfa *ParametricDFA) initialState() ParametricState {
return ParametricState{shapeID: 1}
}
// Returns true iff whatever characters come afterward,
// we will never reach a shorter distance
func (pdfa *ParametricDFA) isPrefixSink(state ParametricState, queryLen uint32) bool {
if state.isDeadEnd() {
return true
}
remOffset := queryLen - state.offset
if remOffset < pdfa.diameter {
stateDistances := pdfa.distance[pdfa.diameter*state.shapeID:]
prefixDistance := stateDistances[remOffset]
if prefixDistance > pdfa.maxDistance {
return false
}
for _, d := range stateDistances {
if d < prefixDistance {
return false
}
}
return true
}
return false
}
func (pdfa *ParametricDFA) numStates() int {
return len(pdfa.transitions) / int(pdfa.transitionStride)
}
func min(x, y uint32) uint32 {
if x < y {
return x
}
return y
}
func (pdfa *ParametricDFA) transition(state ParametricState,
chi uint32) Transition {
return pdfa.transitions[pdfa.transitionStride*state.shapeID+chi]
}
func (pdfa *ParametricDFA) getDistance(state ParametricState,
qLen uint32) Distance {
remainingOffset := qLen - state.offset
if state.isDeadEnd() || remainingOffset >= pdfa.diameter {
return Atleast{d: pdfa.maxDistance + 1}
}
dist := pdfa.distance[int(pdfa.diameter*state.shapeID)+int(remainingOffset)]
if dist > pdfa.maxDistance {
return Atleast{d: dist}
}
return Exact{d: dist}
}
func (pdfa *ParametricDFA) computeDistance(left, right string) Distance {
state := pdfa.initialState()
leftChars := []rune(left)
for _, chr := range []rune(right) {
start := state.offset
stop := min(start+pdfa.diameter, uint32(len(leftChars)))
chi := characteristicVector(leftChars[start:stop], chr)
transistion := pdfa.transition(state, uint32(chi))
state = transistion.apply(state)
if state.isDeadEnd() {
return Atleast{d: pdfa.maxDistance + 1}
}
}
return pdfa.getDistance(state, uint32(len(left)))
}
func (pdfa *ParametricDFA) buildDfa(query string, distance uint8, prefix bool) *DFA {
qLen := uint32(len([]rune(query)))
alphabet := queryChars(query)
psi := newParametricStateIndex(qLen, uint32(pdfa.numStates()))
maxNumStates := psi.maxNumStates()
deadEndStateID := psi.getOrAllocate(newParametricState())
if deadEndStateID != 0 {
return nil
}
initialStateID := psi.getOrAllocate(pdfa.initialState())
dfaBuilder := withMaxStates(uint32(maxNumStates))
mask := uint32((1 << pdfa.diameter) - 1)
for stateID := 0; stateID < math.MaxUint32; stateID++ {
if stateID == psi.numStates() {
break
}
state := psi.get(uint32(stateID))
if prefix && pdfa.isPrefixSink(state, qLen) {
distance := pdfa.getDistance(state, qLen)
dfaBuilder.addState(uint32(stateID), uint32(stateID), distance)
} else {
transition := pdfa.transition(state, 0)
defSuccessor := transition.apply(state)
defSuccessorID := psi.getOrAllocate(defSuccessor)
distance := pdfa.getDistance(state, qLen)
stateBuilder, err := dfaBuilder.addState(uint32(stateID), defSuccessorID, distance)
if err != nil {
log.Panicf("parametric_dfa: buildDfa, err: %v", err)
return nil
}
alphabet.resetNext()
chr, cv, err := alphabet.next()
for err == nil {
chi := cv.shiftAndMask(state.offset, mask)
transition := pdfa.transition(state, chi)
destState := transition.apply(state)
destStateID := psi.getOrAllocate(destState)
stateBuilder.addTransition(chr, destStateID)
chr, cv, err = alphabet.next()
}
}
}
dfaBuilder.setInitialState(initialStateID)
return dfaBuilder.build(distance)
}
func fromNfa(nfa *LevenshteinNFA) *ParametricDFA {
lookUp := newHash()
lookUp.getOrAllocate(*newMultiState())
initialState := nfa.initialStates()
lookUp.getOrAllocate(*initialState)
maxDistance := nfa.maxDistance()
msDiameter := nfa.msDiameter()
numChi := 1 << msDiameter
chiValues := make([]uint64, numChi)
for i := 0; i < numChi; i++ {
chiValues[i] = uint64(i)
}
transitions := make([]Transition, 0, numChi*int(msDiameter))
for stateID := 0; stateID < math.MaxUint32; stateID++ {
if stateID == len(lookUp.items) {
break
}
for _, chi := range chiValues {
destMs := newMultiState()
ms := lookUp.getFromID(stateID)
nfa.transition(ms, destMs, chi)
translation := destMs.normalize()
destID := lookUp.getOrAllocate(*destMs)
transitions = append(transitions, Transition{
destShapeID: uint32(destID),
deltaOffset: translation,
})
}
}
ns := len(lookUp.items)
diameter := int(msDiameter)
distances := make([]uint8, 0, diameter*ns)
for stateID := 0; stateID < ns; stateID++ {
ms := lookUp.getFromID(stateID)
for offset := 0; offset < diameter; offset++ {
dist := nfa.multistateDistance(ms, uint32(offset))
distances = append(distances, dist.distance())
}
}
return &ParametricDFA{
diameter: uint32(msDiameter),
transitions: transitions,
maxDistance: maxDistance,
transitionStride: uint32(numChi),
distance: distances,
}
}
type hash struct {
index map[[16]byte]int
items []MultiState
}
func newHash() *hash {
return &hash{
index: make(map[[16]byte]int, 100),
items: make([]MultiState, 0, 100),
}
}
func (h *hash) getOrAllocate(m MultiState) int {
size := len(h.items)
var exists bool
var pos int
md5 := getHash(&m)
if pos, exists = h.index[md5]; !exists {
h.index[md5] = size
pos = size
h.items = append(h.items, m)
}
return pos
}
func (h *hash) getFromID(id int) *MultiState {
return &h.items[id]
}
func getHash(ms *MultiState) [16]byte {
msBytes := []byte{}
for _, state := range ms.states {
jsonBytes, _ := json.Marshal(&state)
msBytes = append(msBytes, jsonBytes...)
}
return md5.Sum(msBytes)
} | parametric_dfa.go | 0.710327 | 0.462473 | parametric_dfa.go | starcoder |
package main
import (
"image"
"image/color"
"github.com/disintegration/imaging"
"github.com/gdamore/tcell"
)
type subimage interface {
SubImage(r image.Rectangle) image.Image
}
type mapper struct {
img image.Image // Original image
width int // Width of terminal window
height int // Twice the height of terminal window
window image.Rectangle // Windows into the original image
scaled image.Image // scaled image
gray bool // want grayscale
}
func newMapper(img image.Image, width, height int, gray bool) *mapper {
m := &mapper{
img: img,
width: width,
height: height,
window: img.Bounds(),
gray: gray,
}
m.Sync()
return m
}
func (m mapper) ColorModel() color.Model {
return m.img.ColorModel()
}
func (m mapper) Bounds() image.Rectangle {
return image.Rect(0, 0, m.width, m.height)
}
func (m mapper) At(x, y int) color.Color {
if y*m.width+x >= m.width*m.height {
return color.RGBA{}
}
return m.scaled.At(x, y)
}
func (m *mapper) SetSize(width, height int) {
m.width = width
m.height = height
// m.Sync()
}
func (m *mapper) Sync() {
img := m.img
si, ok := img.(subimage)
if ok {
img = si.SubImage(m.window)
}
m.scaled = imaging.Fit(img, m.width, m.height, imaging.Box)
if m.gray {
m.scaled = imaging.Grayscale(m.scaled)
}
}
func (m mapper) DrawTo(s tcell.Screen) {
m.Sync()
for r := 0; r < m.height; r += 2 {
for c := 0; c < m.width; c++ {
red, green, blue, _ := m.scaled.At(c, r).RGBA()
bg := tcell.NewRGBColor(int32(red), int32(green), int32(blue))
red, green, blue, _ = m.scaled.At(c, r+1).RGBA()
fg := tcell.NewRGBColor(int32(red), int32(green), int32(blue))
rn := '▄'
if fg == bg {
rn = ' '
}
s.SetCell(c, r/2, tcell.StyleDefault.Foreground(fg).Background(bg), rn)
}
}
}
func (m *mapper) ResetZoom() {
m.window = m.img.Bounds()
// m.Sync()
}
func (m *mapper) ZoomIn() {
sz := m.sz()
r := m.window.Inset(sz)
viewportAspect := float64(m.width) / float64(m.height)
aspect := float64(r.Dx()) / float64(r.Dy())
if aspect < viewportAspect {
d := int(float64(r.Dy())*viewportAspect) - r.Dx()
r.Min.X -= d / 2
r.Max.X += d / 2
} else if aspect > viewportAspect {
d := int(float64(r.Dx())/viewportAspect) - r.Dy()
r.Min.Y -= d / 2
r.Max.Y += d / 2
}
r = r.Intersect(m.img.Bounds())
if r.Dx() >= sz*2 && r.Dy() >= sz*2 {
m.window = r
// m.Sync()
}
}
func (m *mapper) ZoomOut() {
sz := m.sz()
r := m.window.Inset(-sz)
viewportAspect := float64(m.width) / float64(m.height)
aspect := float64(r.Dx()) / float64(r.Dy())
if aspect < viewportAspect {
d := int(float64(r.Dy())*viewportAspect) - r.Dx()
r.Min.X -= d / 2
r.Max.X += d / 2
} else if aspect > viewportAspect {
d := int(float64(r.Dx())/viewportAspect) - r.Dy()
r.Min.Y -= d / 2
r.Max.Y += d / 2
}
r = r.Intersect(m.img.Bounds())
m.window = r
// m.Sync()
}
func (m *mapper) Left() {
r := m.window.Sub(image.Point{X: m.sz(), Y: 0})
if r.In(m.img.Bounds()) {
m.window = r
// m.Sync()
}
}
func (m *mapper) Right() {
r := m.window.Add(image.Point{X: m.sz(), Y: 0})
if r.In(m.img.Bounds()) {
m.window = r
// m.Sync()
}
}
func (m *mapper) Up() {
r := m.window.Sub(image.Point{X: 0, Y: m.sz()})
if r.In(m.img.Bounds()) {
m.window = r
// m.Sync()
}
}
func (m *mapper) Down() {
r := m.window.Add(image.Point{X: 0, Y: m.sz()})
if r.In(m.img.Bounds()) {
m.window = r
// m.Sync()
}
}
func (m mapper) sz() int {
szx := m.img.Bounds().Dx() / m.width
szy := m.img.Bounds().Dy() / m.height
// the larger value is the edge that was fit
if szx > szy {
return szx
}
return szy
} | mapper.go | 0.620966 | 0.445831 | mapper.go | starcoder |
package ionhash
import (
"math"
"testing"
"github.com/amzn/ion-go/ion"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func compareReaders(t *testing.T, reader1, reader2 ion.Reader) {
for hasNext(t, reader1, reader2) {
type1 := reader1.Type()
type2 := reader2.Type()
require.Equal(t, type1.String(), type2.String(), "Ion Types did not match")
if type1 == ion.NoType {
break
}
ionHashReader, ok := reader2.(*hashReader)
require.True(t, ok, "Expected reader2 to be of type hashReader")
if ionHashReader.IsInStruct() {
compareFieldNames(t, reader1, reader2)
}
compareAnnotations(t, reader1, reader2)
isNull1 := reader1.IsNull()
isNull2 := reader2.IsNull()
require.Equal(t, isNull1, isNull2, "Expected readers to have matching IsNull() values")
if type1 == ion.NullType {
assert.True(t, isNull1, "Expected reader1.IsNull() to return true")
assert.True(t, isNull2, "Expected reader2.IsNull() to return true")
} else if ion.IsScalar(type1) {
compareScalars(t, type1, reader1, reader2)
} else if ion.IsContainer(type1) {
if !isNull1 {
assert.NoError(t, reader1.StepIn(), "Something went wrong executing reader1.StepIn()")
assert.NoError(t, reader2.StepIn(), "Something went wrong executing reader2.StepIn()")
compareReaders(t, reader1, reader2)
assert.NoError(t, reader1.StepOut(), "Something went wrong executing reader1.StepOut()")
assert.NoError(t, reader2.StepOut(), "Something went wrong executing reader2.StepOut()")
}
} else {
t.Error(&InvalidIonTypeError{type1})
}
}
assert.False(t, hasNext(t, reader1, reader2), "Expected hasNext() to return false")
}
// hasNext() checks that the readers have a Next value.
func hasNext(t *testing.T, reader1, reader2 ion.Reader) bool {
next1 := reader1.Next()
next2 := reader2.Next()
assert.Equal(t, next1, next2, "next results don't match")
if !next1 {
assert.NoError(t, reader1.Err(), "Something went wrong executing reader1.next()")
}
if !next2 {
assert.NoError(t, reader2.Err(), "Something went wrong executing reader2.next()")
}
return next1 && next2
}
func compareFieldNames(t *testing.T, reader1, reader2 ion.Reader) {
token1, err := reader1.FieldName()
require.NoError(t, err, "Something went wrong executing reader1.FieldName()")
token2, err := reader2.FieldName()
require.NoError(t, err, "Something went wrong executing reader2.FieldName()")
require.True(t, token1.Equal(token2), "Expected field names to match")
}
func compareAnnotations(t *testing.T, reader1, reader2 ion.Reader) {
an1, err := reader1.Annotations()
require.NoError(t, err, "Something went wrong executing reader1.Annotations()")
an2, err := reader2.Annotations()
require.NoError(t, err, "Something went wrong executing reader2.Annotations()")
require.Equal(t, len(an1), len(an2), "Expected readers to have same number of annotations")
for i := 0; i < len(an1); i++ {
assert.True(t, an1[i].Equal(&an2[i]))
}
}
func compareScalars(t *testing.T, ionType ion.Type, reader1, reader2 ion.Reader) {
isNull1 := reader1.IsNull()
isNull2 := reader2.IsNull()
require.Equal(t, isNull1, isNull2, "Expected readers to be both null or both non-null")
if isNull1 {
return
}
switch ionType {
case ion.BoolType:
value1, err := reader1.BoolValue()
assert.NoError(t, err, "Something went wrong executing reader1.BoolValue()")
value2, err := reader2.BoolValue()
assert.NoError(t, err, "Something went wrong executing reader2.BoolValue()")
assert.Equal(t, value1, value2, "Expected bool values to match")
case ion.IntType:
intSize, err := reader1.IntSize()
assert.NoError(t, err, "Something went wrong executing reader1.IntSize()")
switch intSize {
case ion.Int32:
int1, err := reader1.IntValue()
assert.NoError(t, err, "Something went wrong executing reader1.IntValue()")
int2, err := reader2.IntValue()
assert.NoError(t, err, "Something went wrong executing reader2.IntValue()")
assert.Equal(t, int1, int2, "Expected int values to match")
case ion.Int64:
int1, err := reader1.Int64Value()
assert.NoError(t, err, "Something went wrong executing reader1.Int64Value()")
int2, err := reader2.Int64Value()
assert.NoError(t, err, "Something went wrong executing reader2.Int64Value()")
assert.Equal(t, int1, int2, "Expected int values to match")
case ion.BigInt:
bigInt1, err := reader1.BigIntValue()
assert.NoError(t, err, "Something went wrong executing reader1.BigIntValue()")
bigInt2, err := reader2.BigIntValue()
assert.NoError(t, err, "Something went wrong executing reader2.BigIntValue()")
assert.Equal(t, bigInt1, bigInt2, "Expected big int values to match")
default:
t.Error("Expected intSize to be one of Int32, Int64, Uint64, or BigInt")
}
case ion.FloatType:
float1, err := reader1.FloatValue()
assert.NoError(t, err, "Something went wrong executing reader1.FloatValue()")
float2, err := reader2.FloatValue()
assert.NoError(t, err, "Something went wrong executing reader2.FloatValue()")
require.True(t, (float1 == nil) == (float2 == nil),
"Expected float values to be either both null or both not null")
if float1 != nil {
if !math.IsNaN(*float1) && !math.IsNaN(*float2) {
assert.Equal(t, float1, float2, "Expected float values to match")
} else if !math.IsNaN(*float1) || !math.IsNaN(*float2) {
assert.NotEqual(t, float1, float2, "Expected IsNaN float value to differ from a non-IsNaN float value")
}
}
case ion.DecimalType:
decimal1, err := reader1.DecimalValue()
assert.NoError(t, err, "Something went wrong executing reader1.DecimalValue()")
decimal2, err := reader2.DecimalValue()
assert.NoError(t, err, "Something went wrong executing reader2.DecimalValue()")
decimalStrictEquals(t, decimal1, decimal2)
case ion.TimestampType:
timestamp1, err := reader1.TimestampValue()
assert.NoError(t, err, "Something went wrong executing reader1.TimestampValue()")
timestamp2, err := reader2.TimestampValue()
assert.NoError(t, err, "Something went wrong executing reader2.TimestampValue()")
assert.Equal(t, timestamp1, timestamp2, "Expected timestamp values to match")
case ion.StringType:
str1, err := reader1.StringValue()
assert.NoError(t, err, "Something went wrong executing reader1.StringValue()")
str2, err := reader2.StringValue()
assert.NoError(t, err, "Something went wrong executing reader2.StringValue()")
assert.Equal(t, str1, str2, "Expected string values to match")
case ion.SymbolType:
token1, err := reader1.SymbolValue()
require.NoError(t, err, "Something went wrong executing reader1.SymbolValue()")
token2, err := reader2.SymbolValue()
require.NoError(t, err, "Something went wrong executing reader2.SymbolValue()")
if isNull1 {
assert.Nil(t, token1.Text, "Expected token1 to have null text")
assert.Nil(t, token2.Text, "Expected token2 to have null text")
} else {
require.Equal(t, token1.Text == nil, token2.Text == nil,
"Expected the text of both tokens to be null or both not null")
if token1.Text == nil {
assert.Equal(t, token1.LocalSID, token2.LocalSID, "Expected token SIDs to match")
} else {
assert.Equal(t, token1.Text, token2.Text, "Expected token to have matching text")
}
}
case ion.BlobType, ion.ClobType:
b1, err := reader1.ByteValue()
assert.NoError(t, err, "Something went wrong executing reader1.ByteValue()")
b2, err := reader2.ByteValue()
assert.NoError(t, err, "Something went wrong executing reader2.ByteValue()")
assert.True(t, b1 != nil && b2 != nil, "Expected byte arrays to be non-null")
assert.Equal(t, len(b1), len(b2), "Expected byte arrays to have same length")
assert.Equal(t, b1, b2, "Expected byte arrays to match")
default:
t.Error(InvalidIonTypeError{ionType})
}
}
// decimalStrictEquals() compares two Ion Decimal values by equality and negative zero.
func decimalStrictEquals(t *testing.T, decimal1, decimal2 *ion.Decimal) {
assert.Equal(t, decimal1, decimal2, "Expected decimal values to match")
zeroDecimal := ion.NewDecimalInt(0)
negativeZero1 := decimal1.Equal(zeroDecimal) && decimal1.Sign() < 0
negativeZero2 := decimal2.Equal(zeroDecimal) && decimal2.Sign() < 0
assert.Equal(t, negativeZero1, negativeZero2,
"Expected decimal values to be both negative zero or both not negative zero")
assert.True(t, decimal1.Equal(decimal2), "Expected decimal1.Equal(decimal2) to return true")
assert.True(t, decimal2.Equal(decimal1), "Expected decimal2.Equal(decimal1) to return true")
}
// Read all the values in the reader and write them in the writer.
func writeFromReaderToWriter(t *testing.T, reader ion.Reader, writer ion.Writer, errExpected bool) {
for reader.Next() {
name, err := reader.FieldName()
require.NoError(t, err, "Something went wrong executing reader.Annotations()")
if name != nil {
require.NoError(t, writer.FieldName(*name), "Something went wrong executing writer.FieldName(*name)")
}
annotations, err := reader.Annotations()
require.NoError(t, err, "Something went wrong executing reader.Annotations()")
if len(annotations) > 0 {
require.NoError(t, writer.Annotations(annotations...), "Something went wrong executing writer.Annotations(annotations...)")
}
currentType := reader.Type()
if reader.IsNull() {
require.NoError(t, writer.WriteNullType(currentType),
"Something went wrong executing writer.WriteNullType(currentType)")
continue
}
switch currentType {
case ion.NullType:
assert.NoError(t, writer.WriteNullType(ion.NullType), "Something went wrong while writing a Null value")
case ion.BoolType:
val, err := reader.BoolValue()
assert.NoError(t, err, "Something went wrong when reading Boolean value")
if val == nil {
assert.NoError(t, writer.WriteNullType(ion.BoolType))
} else {
assert.NoError(t, writer.WriteBool(*val), "Something went wrong while writing a Boolean value")
}
case ion.IntType:
intSize, err := reader.IntSize()
require.NoError(t, err, "Something went wrong when retrieving the Int size")
switch intSize {
case ion.Int32, ion.Int64:
val, err := reader.Int64Value()
assert.NoError(t, err, "Something went wrong when reading Int value")
assert.NoError(t, writer.WriteInt(*val), "Something went wrong when writing Int value")
case ion.BigInt:
val, err := reader.BigIntValue()
assert.NoError(t, err, "Something went wrong when reading Big Int value")
assert.NoError(t, writer.WriteBigInt(val), "Something went wrong when writing Big Int value")
default:
t.Error("Expected intSize to be one of Int32, Int64, Uint64, or BigInt")
}
case ion.FloatType:
val, err := reader.FloatValue()
assert.NoError(t, err, "Something went wrong when reading Float value")
assert.NoError(t, writer.WriteFloat(*val), "Something went wrong when writing Float value")
case ion.DecimalType:
val, err := reader.DecimalValue()
assert.NoError(t, err, "Something went wrong when reading Decimal value")
assert.NoError(t, writer.WriteDecimal(val), "Something went wrong when writing Decimal value")
case ion.TimestampType:
val, err := reader.TimestampValue()
assert.NoError(t, err, "Something went wrong when reading Timestamp value")
assert.NoError(t, writer.WriteTimestamp(*val), "Something went wrong when writing Timestamp value")
case ion.SymbolType:
val, err := reader.SymbolValue()
assert.NoError(t, err, "Something went wrong when reading Symbol value")
assert.NoError(t, writer.WriteSymbol(*val), "Something went wrong when writing Symbol value")
case ion.StringType:
val, err := reader.StringValue()
assert.NoError(t, err, "Something went wrong when reading String value")
require.NotNil(t, val)
assert.NoError(t, writer.WriteString(*val), "Something went wrong when writing String value")
case ion.ClobType:
val, err := reader.ByteValue()
assert.NoError(t, err, "Something went wrong when reading Clob value")
assert.NoError(t, writer.WriteClob(val), "Something went wrong when writing Clob value")
case ion.BlobType:
val, err := reader.ByteValue()
assert.NoError(t, err, "Something went wrong when reading Blob value")
assert.NoError(t, writer.WriteBlob(val), "Something went wrong when writing Blob value")
case ion.SexpType:
require.NoError(t, reader.StepIn(), "Something went wrong executing reader.StepIn()")
require.NoError(t, writer.BeginSexp(), "Something went wrong executing writer.BeginSexp()")
writeFromReaderToWriter(t, reader, writer, errExpected)
err := reader.StepOut()
if !errExpected {
require.NoError(t, err, "Something went wrong executing reader.StepOut()")
}
require.NoError(t, writer.EndSexp(), "Something went wrong executing writer.EndSexp()")
case ion.ListType:
require.NoError(t, reader.StepIn(), "Something went wrong executing reader.StepIn()")
require.NoError(t, writer.BeginList(), "Something went wrong executing writer.BeginList()")
writeFromReaderToWriter(t, reader, writer, errExpected)
err := reader.StepOut()
if !errExpected {
require.NoError(t, err, "Something went wrong executing reader.StepOut()")
}
require.NoError(t, writer.EndList(), "Something went wrong executing writer.EndList()")
case ion.StructType:
require.NoError(t, reader.StepIn(), "Something went wrong executing reader.StepIn()")
require.NoError(t, writer.BeginStruct(), "Something went wrong executing writer.BeginStruct()")
writeFromReaderToWriter(t, reader, writer, errExpected)
err := reader.StepOut()
if !errExpected {
require.NoError(t, err, "Something went wrong executing reader.StepOut()")
}
require.NoError(t, writer.EndStruct(), "Something went wrong executing writer.EndStruct()")
}
}
if !errExpected {
assert.NoError(t, reader.Err(), "Something went wrong writing from reader to writer")
}
}
func writeToWriters(t *testing.T, reader ion.Reader, writers ...ion.Writer) {
ionType := reader.Type()
annotations, err := reader.Annotations()
require.NoError(t, err, "Something went wrong executing reader.Annotations()")
if len(annotations) > 0 {
for _, writer := range writers {
require.NoError(t, writer.Annotations(annotations...),
"Something went wrong executing writer.Annotations(annotations...)")
}
}
fieldName, err := reader.FieldName()
if err == nil && fieldName != nil && (fieldName.Text == nil || (*fieldName.Text != "ion" && *fieldName.Text != "10n")) {
for _, writer := range writers {
require.NoError(t, writer.FieldName(*fieldName),
"Something went wrong executing writer.FieldName(*fieldName)")
}
}
if reader.IsNull() {
for _, writer := range writers {
require.NoError(t, writer.WriteNullType(reader.Type()),
"Something went wrong executing writer.WriteNullType(reader.Type())")
}
return
}
switch ionType {
case ion.NullType:
for _, writer := range writers {
require.NoError(t, writer.WriteNull(), "Something went wrong executing writer.WriteNull()")
}
case ion.BoolType:
boolValue, err := reader.BoolValue()
require.NoError(t, err)
for _, writer := range writers {
require.NoError(t, writer.WriteBool(*boolValue),
"Something went wrong executing writer.WriteBool(*boolValue)")
}
case ion.BlobType:
byteValue, err := reader.ByteValue()
require.NoError(t, err)
for _, writer := range writers {
require.NoError(t, writer.WriteBlob(byteValue),
"Something went wrong executing writer.WriteBlob(byteValue)")
}
case ion.ClobType:
byteValue, err := reader.ByteValue()
require.NoError(t, err)
for _, writer := range writers {
require.NoError(t, writer.WriteClob(byteValue),
"Something went wrong executing writer.WriteClob(byteValue)")
}
case ion.DecimalType:
decimalValue, err := reader.DecimalValue()
require.NoError(t, err)
for _, writer := range writers {
require.NoError(t, writer.WriteDecimal(decimalValue),
"Something went wrong executing writer.WriteDecimal(decimalValue)")
}
case ion.FloatType:
floatValue, err := reader.FloatValue()
require.NoError(t, err)
for _, writer := range writers {
require.NoError(t, writer.WriteFloat(*floatValue),
"Something went wrong executing writer.WriteFloat(*floatValue)")
}
case ion.IntType:
intSize, err := reader.IntSize()
require.NoError(t, err)
switch intSize {
case ion.Int32, ion.Int64:
intValue, err := reader.Int64Value()
require.NoError(t, err)
for _, writer := range writers {
require.NoError(t, writer.WriteInt(*intValue),
"Something went wrong executing writer.WriteInt(*intValue)")
}
case ion.BigInt:
bigIntValue, err := reader.BigIntValue()
require.NoError(t, err)
for _, writer := range writers {
require.NoError(t, writer.WriteBigInt(bigIntValue),
"Something went wrong executing writer.WriteBigInt(bigIntValue)")
}
default:
t.Error("Expected intSize to be one of Int32, Int64, Uint64, or BigInt")
}
case ion.StringType:
stringValue, err := reader.StringValue()
require.NoError(t, err)
require.NotNil(t, stringValue)
for _, writer := range writers {
require.NoError(t, writer.WriteString(*stringValue),
"Something went wrong executing writer.WriteString(stringValue)")
}
case ion.SymbolType:
symbolValue, err := reader.SymbolValue()
require.NoError(t, err)
for _, writer := range writers {
require.NoError(t, writer.WriteSymbol(*symbolValue),
"Something went wrong executing writer.WriteSymbol(*symbolValue)")
}
case ion.TimestampType:
timestampValue, err := reader.TimestampValue()
require.NoError(t, err)
for _, writer := range writers {
require.NoError(t, writer.WriteTimestamp(*timestampValue),
"Something went wrong executing writer.WriterTimestamp(*timestampValue)")
}
case ion.SexpType:
require.NoError(t, reader.StepIn())
for _, writer := range writers {
require.NoError(t, writer.BeginSexp(), "Something went wrong executing writer.BeginSexp()")
}
for reader.Next() {
writeToWriters(t, reader, writers...)
}
require.NoError(t, reader.Err(), "Something went wrong executing reader.Next()")
require.NoError(t, reader.StepOut())
for _, writer := range writers {
require.NoError(t, writer.EndSexp(), "Something went wrong executing writer.EndSexp()")
}
case ion.ListType:
require.NoError(t, reader.StepIn())
for _, writer := range writers {
require.NoError(t, writer.BeginList(), "Something went wrong executing writer.BeginList()")
}
for reader.Next() {
writeToWriters(t, reader, writers...)
}
require.NoError(t, reader.Err(), "Something went wrong executing reader.Next()")
require.NoError(t, reader.StepOut())
for _, writer := range writers {
require.NoError(t, writer.EndList(), "Something went wrong executing writer.EndList()")
}
case ion.StructType:
require.NoError(t, reader.StepIn())
for _, writer := range writers {
require.NoError(t, writer.BeginStruct(), "Something went wrong executing writer.BeginStruct()")
}
for reader.Next() {
writeToWriters(t, reader, writers...)
}
require.NoError(t, reader.Err(), "Something went wrong executing reader.Next()")
require.NoError(t, reader.StepOut())
for _, writer := range writers {
require.NoError(t, writer.EndStruct(), "Something went wrong executing writer.EndStruct()")
}
default:
t.Fatal(InvalidIonTypeError{ionType})
}
}
func readSexpAndAppendToList(t *testing.T, reader ion.Reader) []byte {
require.NoError(t, reader.StepIn())
updateBytes := []byte{}
for reader.Next() {
intValue, err := reader.Int64Value()
require.NoError(t, err, "Something went wrong executing reader.Int64Value()")
updateBytes = append(updateBytes, byte(*intValue))
}
require.NoError(t, reader.Err(), "Something went wrong executing reader.Next()")
require.NoError(t, reader.StepOut(), "Something went wrong executing reader.StepOut()")
return updateBytes
} | testing_utilities.go | 0.699768 | 0.671962 | testing_utilities.go | starcoder |
package geometry
import (
"github.com/dlespiau/dax"
"github.com/dlespiau/dax/math"
)
// Box is a rectangular cuboid centered around (0, 0, 0) with sizes Width,
// Height and Depth on the X, Y and Z axis respectively. Some control over the
// tesselation of each face is given through the number of segments on each
// direction.
type Box struct {
Width, Height, Depth float32
NumWidthSegments, NumHeightSegments, NumDepthSegments int
}
// BoxOptions contains optional parameters for the Box constructors.
type BoxOptions struct {
NumWidthSegments, NumHeightSegments, NumDepthSegments int
}
var defaultBox = Box{
Width: 1.0,
Height: 1.0,
Depth: 1.0,
NumWidthSegments: 1,
NumHeightSegments: 1,
NumDepthSegments: 1,
}
// NewBox creates a new box.
func NewBox(width, height, depth float32, options ...BoxOptions) *Box {
box := defaultBox
box.Width = width
box.Height = height
box.Depth = depth
if len(options) == 0 {
return &box
}
if options[0].NumWidthSegments > 0 {
box.NumWidthSegments = options[0].NumWidthSegments
}
if options[0].NumHeightSegments > 0 {
box.NumHeightSegments = options[0].NumHeightSegments
}
if options[0].NumDepthSegments > 0 {
box.NumDepthSegments = options[0].NumDepthSegments
}
return &box
}
type boxContext struct {
nVertices int
positions []float32
normals []float32
uvs []float32
indices []uint
}
func buildPlane(ctx *boxContext,
u, v, w int,
udir, vdir float32,
width, height, depth float32,
gridX, gridY int) {
segmentWidth := width / float32(gridX)
segmentHeight := height / float32(gridY)
widthHalf := width / 2
heightHalf := height / 2
depthHalf := depth / 2
gridX1 := gridX + 1
gridY1 := gridY + 1
vertexCounter := 0
vector := math.Vec3{}
// generate vertices, normals and uvs
for iy := 0; iy < gridY1; iy++ {
y := float32(iy)*segmentHeight - heightHalf
for ix := 0; ix < gridX1; ix++ {
x := float32(ix)*segmentWidth - widthHalf
// position
vector[u] = x * udir
vector[v] = y * vdir
vector[w] = depthHalf
ctx.positions = append(ctx.positions, vector[0], vector[1], vector[2])
// normal
vector[u] = 0
vector[v] = 0
if depth > 0 {
vector[w] = 1
} else {
vector[w] = -1
}
ctx.normals = append(ctx.normals, vector[0], vector[1], vector[2])
// uvs
ctx.uvs = append(ctx.uvs, float32(ix)/float32(gridX), 1-(float32(iy)/float32(gridY)))
// counters
vertexCounter++
}
}
// indices
// 1. you need three indices to draw a single face
// 2. a single segment consists of two faces
// 3. so we need to generate six (2*3) indices per segment
for iy := 0; iy < gridY; iy++ {
for ix := 0; ix < gridX; ix++ {
a := uint(ctx.nVertices + ix + gridX1*iy)
b := uint(ctx.nVertices + ix + gridX1*(iy+1))
c := uint(ctx.nVertices + (ix + 1) + gridX1*(iy+1))
d := uint(ctx.nVertices + (ix + 1) + gridX1*iy)
// faces
ctx.indices = append(ctx.indices, a, b, d, b, c, d)
}
}
// update total number of vertices
ctx.nVertices += vertexCounter
}
// GetMesh is part of the dax.Mesher interface.
func (b *Box) GetMesh() *dax.Mesh {
m := dax.NewMesh()
ctx := &boxContext{}
width := b.Width
height := b.Height
depth := b.Depth
widthSegments := b.NumWidthSegments
heightSegments := b.NumHeightSegments
depthSegments := b.NumDepthSegments
buildPlane(ctx, 2, 1, 0, -1, -1, depth, height, width, depthSegments, heightSegments) // px
buildPlane(ctx, 2, 1, 0, 1, -1, depth, height, -width, depthSegments, heightSegments) // nx
buildPlane(ctx, 0, 2, 1, 1, 1, width, depth, height, widthSegments, depthSegments) // py
buildPlane(ctx, 0, 2, 1, 1, -1, width, depth, -height, widthSegments, depthSegments) // ny
buildPlane(ctx, 0, 1, 2, 1, -1, width, height, depth, widthSegments, heightSegments) // pz
buildPlane(ctx, 0, 1, 2, -1, -1, width, height, -depth, widthSegments, heightSegments) // nz
m.AddAttribute("position", ctx.positions, 3)
m.AddAttribute("normal", ctx.normals, 3)
m.AddAttribute("uv", ctx.uvs, 2)
m.AddIndices(ctx.indices)
return m
} | geometry/box.go | 0.814238 | 0.61607 | box.go | starcoder |
package gofun
import "fmt"
// Either represents one of two values.
type Either struct {
isRight bool
x interface{}
}
// EitherOrElse returns x if x is Either pointer, otherwise y.
func EitherOrElse(x interface{}, y *Either) *Either {
z, isOk := x.(*Either)
if isOk {
return z
} else {
return y
}
}
// Left creates an Either with a left value.
func Left(x interface{}) *Either {
return &Either { isRight: false, x: x }
}
// Right creates an Either with a right value.
func Right(x interface{}) *Either {
return &Either { isRight: true, x: x }
}
// IsLeft returns true if e contains the left value, otherwise false.
func (e *Either) IsLeft() bool {
return !e.isRight
}
// IsRight returns true if e contains the right value, otherwise false.
func (e *Either) IsRight() bool {
return e.isRight
}
// GetLeft returns the left value.
func (e *Either) GetLeft() interface{} {
if e.isRight {
return nil
} else {
return e.x
}
}
// GetRight returns the right value.
func (e *Either) GetRight() interface{} {
if e.isRight {
return e.x
} else {
return nil
}
}
// GetLeftOrElse returns the left value if e contains the left value, otherwise x().
func (e *Either) GetLeftOrElse(x func() interface{}) interface{} {
if e.isRight {
return x()
} else {
return e.x
}
}
// LeftOrElse returns e if e contains the left value, otherwise e2().
func (e *Either) LeftOrElse(e2 func() *Either) interface{} {
if e.isRight {
return e2()
} else {
return e
}
}
// GetRightOrElse returns the right value if e contains the right value, otherwise x().
func (e *Either) GetRightOrElse(x func() interface{}) interface{} {
if e.isRight {
return e.x
} else {
return x()
}
}
// RightOrElse returns e if e contains the right value, otherwise e2().
func (e *Either) RightOrElse(e2 func() *Either) interface{} {
if e.isRight {
return e
} else {
return e2()
}
}
func (e *Either) String() string {
if e.isRight {
return fmt.Sprintf("Right[%v]", e.x)
} else {
return fmt.Sprintf("Left[%v]", e.x)
}
} | either.go | 0.783368 | 0.455683 | either.go | starcoder |
package semantic
import "fmt"
// Visit invokes visitor for all the children of the supplied node.
func Visit(node Node, visitor func(Node)) {
Replace(node, func(n Node) Node { visitor(n); return n })
}
// Replace invokes visitor for all the children of the supplied node, replacing
// the node with the returned value.
func Replace(node Node, visitor func(Node) Node) {
switch n := node.(type) {
case *Abort:
case *API:
(*Symbols)(&n.members).Visit(func(_ string, n Node) { visitor(n) })
case *ArrayAssign:
n.To = visitor(n.To).(*ArrayIndex)
n.Value = visitor(n.Value).(Expression)
case *ArrayIndex:
n.Array = visitor(n.Array).(Expression)
n.Index = visitor(n.Index).(Expression)
case *ArrayInitializer:
n.Array = visitor(n.Array).(Type)
for i, c := range n.Values {
n.Values[i] = visitor(c).(Expression)
}
case *Slice:
n.To = visitor(n.To).(Type)
case *SliceIndex:
n.Slice = visitor(n.Slice).(Expression)
n.Index = visitor(n.Index).(Expression)
case *SliceAssign:
n.To = visitor(n.To).(*SliceIndex)
n.Value = visitor(n.Value).(Expression)
case *Assert:
n.Condition = visitor(n.Condition).(Expression)
case *Assign:
n.LHS = visitor(n.LHS).(Expression)
n.RHS = visitor(n.RHS).(Expression)
case *Annotation:
for i, c := range n.Arguments {
n.Arguments[i] = visitor(c).(Expression)
}
case *Block:
for i, c := range n.Statements {
n.Statements[i] = visitor(c).(Statement)
}
case BoolValue:
case *BinaryOp:
if n.LHS != nil {
n.LHS = visitor(n.LHS).(Expression)
}
if n.RHS != nil {
n.RHS = visitor(n.RHS).(Expression)
}
case *BitTest:
n.Bitfield = visitor(n.Bitfield).(Expression)
n.Bits = visitor(n.Bits).(Expression)
case *UnaryOp:
n.Expression = visitor(n.Expression).(Expression)
case *Branch:
n.Condition = visitor(n.Condition).(Expression)
n.True = visitor(n.True).(*Block)
if n.False != nil {
n.False = visitor(n.False).(*Block)
}
case *Builtin:
case *Reference:
n.To = visitor(n.To).(Type)
case *Call:
n.Type = visitor(n.Type).(Type)
n.Target = visitor(n.Target).(*Callable)
for i, a := range n.Arguments {
n.Arguments[i] = visitor(a).(Expression)
}
case *Callable:
if n.Object != nil {
n.Object = visitor(n.Object).(Expression)
}
n.Function = visitor(n.Function).(*Function)
case *Case:
for i, c := range n.Conditions {
n.Conditions[i] = visitor(c).(Expression)
}
n.Block = visitor(n.Block).(*Block)
case *Cast:
n.Object = visitor(n.Object).(Expression)
n.Type = visitor(n.Type).(Type)
case *Class:
for i, f := range n.Fields {
n.Fields[i] = visitor(f).(*Field)
}
for i, m := range n.Methods {
n.Methods[i] = visitor(m).(*Function)
}
case *ClassInitializer:
for i, f := range n.Fields {
n.Fields[i] = visitor(f).(*FieldInitializer)
}
case *Choice:
for i, c := range n.Conditions {
n.Conditions[i] = visitor(c).(Expression)
}
n.Expression = visitor(n.Expression).(Expression)
case *Definition:
n.Expression = visitor(n.Expression).(Expression)
case *DefinitionUsage:
n.Expression = visitor(n.Expression).(Expression)
n.Definition = visitor(n.Definition).(*Definition)
case *DeclareLocal:
n.Local = visitor(n.Local).(*Local)
if n.Local.Value != nil {
n.Local.Value = visitor(n.Local.Value).(Expression)
}
case Documentation:
case *Enum:
for i, e := range n.Entries {
n.Entries[i] = visitor(e).(*EnumEntry)
}
case *EnumEntry:
case *Label:
case *Pseudonym:
n.To = visitor(n.To).(Type)
for i, m := range n.Methods {
n.Methods[i] = visitor(m).(*Function)
}
for i, l := range n.labels {
n.labels[i] = visitor(l).(*Label)
}
case *Fence:
if n.Statement != nil {
n.Statement = visitor(n.Statement).(Node)
}
case *Field:
n.Type = visitor(n.Type).(Type)
if n.Default != nil {
n.Default = visitor(n.Default).(Expression)
}
case *FieldInitializer:
n.Value = visitor(n.Value).(Expression)
case Float32Value:
case Float64Value:
case *Function:
if n.Return != nil {
n.Return = visitor(n.Return).(*Parameter)
}
for i, c := range n.FullParameters {
n.FullParameters[i] = visitor(c).(*Parameter)
}
if n.Block != nil {
n.Block = visitor(n.Block).(*Block)
}
n.Signature = visitor(n.Signature).(*Signature)
case *Parameter:
for i, c := range n.Annotations {
n.Annotations[i] = visitor(c).(*Annotation)
}
n.Type = visitor(n.Type).(Type)
case *Global:
case *StaticArray:
case *Signature:
case Int8Value:
case Int16Value:
case Int32Value:
case Int64Value:
case *Iteration:
n.Iterator = visitor(n.Iterator).(*Local)
n.From = visitor(n.From).(Expression)
n.To = visitor(n.To).(Expression)
n.Block = visitor(n.Block).(*Block)
case *MapIteration:
n.IndexIterator = visitor(n.IndexIterator).(*Local)
n.KeyIterator = visitor(n.KeyIterator).(*Local)
n.ValueIterator = visitor(n.ValueIterator).(*Local)
n.Map = visitor(n.Map).(Expression)
n.Block = visitor(n.Block).(*Block)
case Invalid:
case *Length:
n.Object = visitor(n.Object).(Expression)
case *Local:
n.Type = visitor(n.Type).(Type)
case *Map:
n.KeyType = visitor(n.KeyType).(Type)
n.ValueType = visitor(n.ValueType).(Type)
case *MapAssign:
n.To = visitor(n.To).(*MapIndex)
n.Value = visitor(n.Value).(Expression)
case *MapContains:
n.Key = visitor(n.Key).(Expression)
n.Map = visitor(n.Map).(Expression)
case *MapIndex:
n.Map = visitor(n.Map).(Expression)
n.Index = visitor(n.Index).(Expression)
case *MapRemove:
n.Map = visitor(n.Map).(Expression)
n.Key = visitor(n.Key).(Expression)
case *Member:
n.Object = visitor(n.Object).(Expression)
n.Field = visitor(n.Field).(*Field)
case *MessageValue:
for i, a := range n.Arguments {
n.Arguments[i] = visitor(a).(*FieldInitializer)
}
case *New:
n.Type = visitor(n.Type).(*Reference)
case *Pointer:
n.To = visitor(n.To).(Type)
case *Return:
if n.Value != nil {
n.Value = visitor(n.Value).(Expression)
}
case *Select:
n.Value = visitor(n.Value).(Expression)
for i, c := range n.Choices {
n.Choices[i] = visitor(c).(*Choice)
}
if n.Default != nil {
n.Default = visitor(n.Default).(Expression)
}
case StringValue:
case *Switch:
n.Value = visitor(n.Value).(Expression)
for i, c := range n.Cases {
n.Cases[i] = visitor(c).(*Case)
}
if n.Default != nil {
n.Default = visitor(n.Default).(*Block)
}
case Uint8Value:
case Uint16Value:
case Uint32Value:
case Uint64Value:
case *Unknown:
case *Clone:
n.Slice = visitor(n.Slice).(Expression)
case *Copy:
n.Src = visitor(n.Src).(Expression)
n.Dst = visitor(n.Dst).(Expression)
case *Create:
n.Type = visitor(n.Type).(*Reference)
n.Initializer = visitor(n.Initializer).(*ClassInitializer)
case *Ignore:
case *Make:
n.Size = visitor(n.Size).(Expression)
case Null:
case *PointerRange:
n.Pointer = visitor(n.Pointer).(Expression)
n.Range = visitor(n.Range).(*BinaryOp)
case *Read:
n.Slice = visitor(n.Slice).(Expression)
case *SliceContains:
n.Value = visitor(n.Value).(Expression)
n.Slice = visitor(n.Slice).(Expression)
case *SliceRange:
n.Slice = visitor(n.Slice).(Expression)
n.Range = visitor(n.Range).(*BinaryOp)
case *Write:
n.Slice = visitor(n.Slice).(Expression)
default:
panic(fmt.Errorf("Unsupported semantic node type %T", n))
}
} | gapil/semantic/visit.go | 0.520253 | 0.613873 | visit.go | starcoder |
package cl
import (
"math/big"
binaryquadraticform "github.com/getamis/alice/crypto/binaryquadraticform"
bqForm "github.com/getamis/alice/crypto/binaryquadraticform"
"github.com/getamis/alice/crypto/utils"
)
const (
// d = 90 Fig. 6 in paper. But a = 2^(40)*s. If we want to get 90, then we set it to be 90-40=50
distributionConstant = 50
)
var (
// the size of challenge space = 1024
sizeChallengeSpace = big.NewInt(1024)
)
/*
Notations:
- private key: x
- a fixed generator: g
- public key: h = g^x
- s : an upper bound of 1/π(ln|ΔK|)|ΔK|^(1/2) i.e. In this implementation, we set it to be Ceil(1/π(ln|ΔK|))*([|ΔK|^(1/2)]+1).
- challenge set c
- the message space: [0, p-1]. In our situation, the value p is the order of an elliptic curve group.
- distributionConstant: d
Alice(i.e. Prover) chooses a secret key: x in [1,s*2^d] and broadcasts the public key: h = g^x
Through the following protocol, Bob(i.e. Verifier) can be convinced that Alice knows x such that h^y = g^z for some z, and y = lcm(1,2,3,...,2^10), but Bob does not
learn x in this protocol. We use Fiat–Shamir heuristic in the original protocol to get the following:
Step 1: The prover
- randomly chooses an integers r in [1, 2^{d}*s].
- computes t=g^{r}.
- computes k:=H(t, g, f, h, p, q, A, C) mod c. Here H is a cryptography hash function.
- computes u:=r+kx in Z. Here Z is the ring of integer. The resulting proof is (u, t, h).
Step 2: The verifier verifies
- u in [0, (2^{d}+2^(50))s]. (Note: x in [0,s*2^(40)]. Then c*x in [0,s*2^50]. (2^{d}+2^(50))s = (2^(50)+2^(10))a).
- g^{u}=t*h^k.
Note: In our setting, d = 90.
*/
func newPubKey(proof *ProofMessage, d uint32, discirminantP *big.Int, a, c, p, q *big.Int, g, f, h *binaryquadraticform.BQuadraticForm) (*PublicKey, error) {
publicKey := &PublicKey{
p: p,
q: q,
a: a,
g: bqForm.NewCacheExp(g),
f: bqForm.NewCacheExp(f),
h: bqForm.NewCacheExp(h),
c: c,
d: d,
discriminantOrderP: discirminantP,
proof: proof,
}
err := publicKey.Verify()
if err != nil {
return nil, err
}
return publicKey, nil
}
func newPubKeyProof(x *big.Int, a, c, p, q *big.Int, g, f, h *binaryquadraticform.BQuadraticForm) (*ProofMessage, error) {
// Compute 2^{90}s = 2^(50)*a. Note that a = 2^(40)*s
upperBound1 := new(big.Int).Lsh(a, distributionConstant)
// r in [1, 2^{90}s] = [1, 2^50*a]
r, err := utils.RandomPositiveInt(upperBound1)
if err != nil {
return nil, err
}
// Compute t=g^{r}
t, err := g.Exp(r)
if err != nil {
return nil, err
}
// k:=H(t, g, f, h, p, q, A, C) mod c
// In our application c = 1024. If the field order is 2^32, we will get the uniform distribution D in [0,2^32-1].
// If we consider the distribution E := { x in D| x mod c } is also the uniform distribution in [0,1023]=[0,c-1].
k, salt, err := utils.HashProtosRejectSampling(big256bit, &Hash{
T1: t.ToMessage(),
T2: nil,
G: g.ToMessage(),
F: f.ToMessage(),
H: h.ToMessage(),
P: p.Bytes(),
Q: q.Bytes(),
A: a.Bytes(),
C: c.Bytes(),
})
if err != nil {
return nil, err
}
k = k.Mod(k, sizeChallengeSpace)
// Compute u:=r+kx in Z
u := new(big.Int).Mul(k, x)
u = u.Add(r, u)
proof := &ProofMessage{
Salt: salt,
U1: u.Bytes(),
U2: nil,
T1: t.ToMessage(),
T2: nil,
}
return proof, nil
}
func (pubKey *PublicKey) Verify() error {
proof := pubKey.GetPubKeyProof()
t, err := proof.T1.ToBQuadraticForm()
if err != nil {
return ErrInvalidMessage
}
// Compute (2^(50)+2^(10))a)
upperBound := new(big.Int).Add(new(big.Int).Lsh(pubKey.a, 50), new(big.Int).Lsh(pubKey.a, 10))
// u in [0, (2^{d}+2^(50))s] = [0, (2^(50)+2^(10))a)]
u := new(big.Int).SetBytes(proof.U1)
err = utils.InRange(u, big0, upperBound)
if err != nil {
return err
}
// Check g^{u1}=t1*c1^k
// k:=H(t1, t2, g, f, h, p, q, a, c) mod c
k, err := utils.HashProtosToInt(proof.Salt, &Hash{
T1: proof.T1,
T2: proof.T2,
G: pubKey.g.ToMessage(),
F: pubKey.f.ToMessage(),
H: pubKey.h.ToMessage(),
P: pubKey.p.Bytes(),
Q: pubKey.q.Bytes(),
A: pubKey.a.Bytes(),
C: pubKey.c.Bytes(),
})
if err != nil {
return err
}
k = k.Mod(k, sizeChallengeSpace)
// g^{u}=t*h^k
thk, err := pubKey.h.Exp(k)
if err != nil {
return err
}
thk, err = thk.Composition(t)
if err != nil {
return err
}
g := pubKey.g
gu, err := g.Exp(u)
if err != nil {
return err
}
if !gu.Equal(thk) {
return ErrDifferentBQForms
}
return nil
} | crypto/homo/cl/public_key.go | 0.641871 | 0.414425 | public_key.go | starcoder |
package astutil
import (
"bytes"
"fmt"
"go/ast"
"go/parser"
"go/printer"
"go/token"
"go/types"
"strconv"
"github.com/pkg/errors"
)
// Expr converts a template expression into an ast.Expr node.
func Expr(template string) ast.Expr {
expr, err := parser.ParseExpr(template)
if err != nil {
panic(err)
}
return expr
}
// Field builds an ast.Field from the given type and names.
func Field(typ ast.Expr, names ...*ast.Ident) *ast.Field {
return &ast.Field{
Names: names,
Type: typ,
}
}
// SelExpr builds an *ast.SelectorExpr.
func SelExpr(lhs, rhs string) *ast.SelectorExpr {
return &ast.SelectorExpr{
X: ast.NewIdent(lhs),
Sel: ast.NewIdent(rhs),
}
}
// ExprTemplateList converts a series of template expressions into a slice of
// ast.Expr.
func ExprTemplateList(examples ...string) []ast.Expr {
result := make([]ast.Expr, 0, len(examples))
for _, example := range examples {
result = append(result, Expr(example))
}
return result
}
// ExprList converts a series of template expressions into a slice of
// ast.Expr.
func ExprList(in ...ast.Expr) []ast.Expr {
result := make([]ast.Expr, 0, len(in))
for _, x := range in {
result = append(result, x)
}
return result
}
// Return - creates a return statement from the provided expressions.
func Return(expressions ...ast.Expr) ast.Stmt {
return &ast.ReturnStmt{
Results: expressions,
}
}
// Block - creates a block statement from the provided statements.
func Block(statements ...ast.Stmt) *ast.BlockStmt {
return &ast.BlockStmt{
List: statements,
Rbrace: statements[len(statements)-1].End(),
}
}
// If - creates an if statement.
func If(init ast.Stmt, condition ast.Expr, body *ast.BlockStmt, els ast.Stmt) *ast.IfStmt {
return &ast.IfStmt{
Init: init,
Cond: condition,
Body: body,
Else: els,
}
}
// For - creates a for statement
func For(init ast.Stmt, condition ast.Expr, post ast.Stmt, body *ast.BlockStmt) *ast.ForStmt {
return &ast.ForStmt{
Init: init,
Cond: condition,
Post: post,
Body: body,
}
}
// Range - create a range statement loop. for x,y := range {}
func Range(key, value ast.Expr, tok token.Token, iterable ast.Expr, body *ast.BlockStmt) *ast.RangeStmt {
return &ast.RangeStmt{
Key: key,
Value: value,
Tok: tok,
X: iterable,
Body: body,
}
}
// Switch - create a switch statement.
func Switch(init ast.Stmt, tag ast.Expr, body *ast.BlockStmt) *ast.SwitchStmt {
return &ast.SwitchStmt{
Init: init,
Tag: tag,
Body: body,
}
}
// CaseClause - create a clause statement.
func CaseClause(expr []ast.Expr, statements ...ast.Stmt) *ast.CaseClause {
return &ast.CaseClause{
List: expr,
Body: statements,
}
}
// Assign - creates an assignment statement from the provided
// expressions and token.
func Assign(to []ast.Expr, tok token.Token, from []ast.Expr) *ast.AssignStmt {
return &ast.AssignStmt{
Lhs: to,
Tok: tok,
Rhs: from,
}
}
// ValueSpec creates a value spec. i.e) x,y,z int
func ValueSpec(typ ast.Expr, names ...*ast.Ident) ast.Spec {
return &ast.ValueSpec{
Names: names,
Type: typ,
}
}
// VarList creates a variable list. i.e) var (a int, b bool, c string)
func VarList(specs ...ast.Spec) ast.Decl {
return &ast.GenDecl{
Tok: token.VAR,
Lparen: 1,
Specs: specs,
Rparen: 1,
}
}
func literalDecl(tok token.Token, name string, x ast.Expr) *ast.GenDecl {
return &ast.GenDecl{
Tok: tok,
Specs: []ast.Spec{
&ast.ValueSpec{
Names: []*ast.Ident{
&ast.Ident{
Name: name,
Obj: &ast.Object{
Kind: ast.Con,
Name: name,
},
},
},
Values: []ast.Expr{
x,
},
},
},
}
}
// Const creates a constant. i.e) const a = 0
func Const(name string, x ast.Expr) ast.Decl {
return literalDecl(token.CONST, name, x)
}
// CallExpr - creates a function call expression with the provided argument
// expressions.
func CallExpr(fun ast.Expr, args ...ast.Expr) *ast.CallExpr {
return &ast.CallExpr{
Fun: fun,
Args: args,
}
}
// TransformFields ...
func TransformFields(m func(*ast.Field) *ast.Field, fields ...*ast.Field) []*ast.Field {
result := make([]*ast.Field, 0, len(fields))
for _, field := range fields {
result = append(result, m(field))
}
return result
}
// MapFieldsToNameExpr - extracts all the names from the provided fields.
func MapFieldsToNameExpr(args ...*ast.Field) []ast.Expr {
result := make([]ast.Expr, 0, len(args))
for _, f := range args {
result = append(result, MapIdentToExpr(f.Names...)...)
}
return result
}
// FlattenFields unnests a field with multiple names.
func FlattenFields(args ...*ast.Field) []*ast.Field {
result := make([]*ast.Field, 0, len(args))
for _, f := range args {
for _, name := range f.Names {
result = append(result, Field(f.Type, name))
}
}
return result
}
// MapFieldsToNameIdent maps the set of fields to their names.
func MapFieldsToNameIdent(args ...*ast.Field) []*ast.Ident {
result := make([]*ast.Ident, 0, len(args))
for _, f := range args {
result = append(result, f.Names...)
}
return result
}
// MapFieldsToTypExpr - extracts the type for each name for each of the provided fields.
// i.e.) a,b int, c string, d float is transformed into: int, int, string, float
func MapFieldsToTypExpr(args ...*ast.Field) []ast.Expr {
r := []ast.Expr{}
for idx, f := range args {
if len(f.Names) == 0 {
f.Names = []*ast.Ident{ast.NewIdent(fmt.Sprintf("f%d", idx))}
}
for _ = range f.Names {
r = append(r, f.Type)
}
}
return r
}
// MapIdentToExpr converts all the Ident's to expressions.
func MapIdentToExpr(args ...*ast.Ident) []ast.Expr {
result := make([]ast.Expr, 0, len(args))
for _, ident := range args {
result = append(result, ident)
}
return result
}
// MapExprToString maps all the expressions to the corresponding strings.
func MapExprToString(args ...ast.Expr) []string {
result := make([]string, 0, len(args))
for _, expr := range args {
result = append(result, types.ExprString(expr))
}
return result
}
// TypePattern build a pattern matcher from the provided expressions.
func TypePattern(pattern ...ast.Expr) func(...ast.Expr) bool {
return func(testcase ...ast.Expr) bool {
if len(pattern) != len(testcase) {
return false
}
for idx := range pattern {
if types.ExprString(pattern[idx]) != types.ExprString(testcase[idx]) {
return false
}
}
return true
}
}
// IntegerLiteral builds a integer literal.
func IntegerLiteral(n int) ast.Expr {
return &ast.BasicLit{Kind: token.INT, Value: strconv.Itoa(n)}
}
// StringLiteral expression
func StringLiteral(s string) ast.Expr {
return &ast.BasicLit{
Kind: token.STRING,
Value: fmt.Sprintf("`%s`", s),
}
}
// Print an ast.Node
func Print(n ast.Node) (string, error) {
if n == nil {
return "", nil
}
dst := bytes.NewBuffer([]byte{})
fset := token.NewFileSet()
err := printer.Fprint(dst, fset, n)
return dst.String(), errors.Wrap(err, "failure to print ast")
}
// StructureFieldSelectors return an array of selector expressions from the given
// idents and a field of fields.
func StructureFieldSelectors(local *ast.Field, fields ...*ast.Field) []ast.Expr {
selectors := make([]ast.Expr, 0, len(fields))
for _, n := range local.Names {
for _, field := range fields {
sel := MapFieldsToNameIdent(field)[0]
sel.NamePos = 0
selectors = append(selectors, &ast.SelectorExpr{
X: n,
Sel: sel,
})
}
}
return selectors
} | directives/interp/astutil/astutil.go | 0.709925 | 0.402451 | astutil.go | starcoder |
package simpdf
import (
"fmt"
"math"
"strings"
"github.com/braddschick/simpdf/internal"
"github.com/braddschick/simpdf/pkg/models"
)
// Tables struct is a simple object for inclusion of a Table into the PDF document.
type Tables struct {
// Headers simple striing list of the headers with the alignment**content format
// Example: C**Titles will result as Titles with Center text alignment for the Column 1
Headers []string
// HeaderStyle contains the models.Styles that will depict the Header Row of the table
HeaderStyle models.Styles
// Rows contains each row of the table. The alignment**content format is observed.
// Each string list is a row {"L**Column1", "C**Column2", "R**Column3"}
Rows [][]string
// RowStyle contains the models.Styles that will depict each data row of the table.
RowStyle models.Styles
// HasAlternating denotes if there was an alternating style for the data rows.
HasAlternating bool
// AlternatingRowStyle contains the models.Styles that will depict each even data row.
AlternatingRowStyle models.Styles
// MaxColWidth float64 list that has the Maximum Column Width of each column based on data in the table.
MaxColWidth []float64
}
// BreakTableAlignment Tables function for spliting the alignment from the cell text.
// C, L, R - Center, Left, Right is alignment of the cell contents.
// Alignment must precede the cell contents followed by "**"
// Example of this is "C**Cell text goes here". This means the cell text will be Centered.
func BreakTableAlignment(str string) (string, string) {
strs := strings.Split(str, "**")
if len(strs) < 2 {
strs = []string{"L", str}
}
return strings.ToUpper(strs[0]), strs[1]
}
// TableColumnWidth will determine the width of each column at the max width of the contents
// plus 6 pts of padding.
// This should NOT be used directly but is provided for context. Use AddTable() instead.
func (s *SimPDF) TableColumnWidth(table Tables) []float64 {
iCols := make([]float64, len(table.Headers))
s.SetStyle(table.HeaderStyle, true)
for i := range iCols {
iCols[i] = math.Round(s.StringWidth(table.Headers[i])) + 6
}
s.SetStyle(table.RowStyle, true)
for _, str := range table.Rows {
for ix, j := range iCols {
vW := math.Round(s.StringWidth(str[ix])) + 6
if vW > j {
iCols[ix] = vW
}
}
}
return iCols
}
// CheckNullHeaders looks at each Tables.Header string value to see if it is a length of less than 1.
// If the header is the first header and is empty that is still okay and allowed. However, any others
// are empty then the return is true and the Header Row will NOT be displayed.
func (t *Tables) CheckNullHeaders() bool {
aF := false
for i, s := range t.Headers {
if len(s) < 1 && i != 0 {
aF = true
break
}
}
return aF
}
// AddTableHeader Adds the table header row to the PDF document. If fixWidth is not 0 then
// all cells will be set to the fixed width of the fixWidth value.
// This should NOT be used directly but is provided for context. Use AddTable() instead.
func (s *SimPDF) AddTableHeader(table Tables, fixWidth float64) {
if !table.CheckNullHeaders() {
s.SetStyle(table.HeaderStyle, false)
for i, r := range table.Headers {
b := fmt.Sprintf("%g", table.HeaderStyle.Border.Width.Top)
var w float64
if fixWidth == 0 {
w = table.MaxColWidth[i]
} else {
w = fixWidth
}
align, str := BreakTableAlignment(r)
s.PDF.CellFormat(w, table.HeaderStyle.LineSize, str, b, 0, align, true, 0, "")
}
s.NewLine(table.HeaderStyle.LineSize)
}
}
// AddTableRows Adds the table rows to the PDF document. If fixWidth is not 0 then
// all cells will be set to the fixed width of the fixWidth value.
// This should NOT be used directly but is provided for context. Use AddTable() instead.
func (s *SimPDF) AddTableRows(table Tables, fixWidth float64) {
for ir, r := range table.Rows {
b := fmt.Sprintf("%g", table.RowStyle.Border.Width.Top)
if ir%2 == 1 {
if table.HasAlternating {
s.SetStyle(table.AlternatingRowStyle, false)
}
} else {
s.SetStyle(table.RowStyle, false)
}
for ix, n := range r {
var w float64
if fixWidth == 0 {
w = table.MaxColWidth[ix]
} else {
w = fixWidth
}
align, str := BreakTableAlignment(n)
s.PDF.CellFormat(w, table.RowStyle.LineSize, str, b, 0, align, true, 0, "")
}
s.NewLine(table.RowStyle.LineSize)
}
sty, err := s.StyleName("Normal")
internal.IfError("AddTableRows secure", err, false)
s.SetStyle(sty, false)
}
// AddTable Simply adds the table to the PDF document. This is the main function for adding a
// table to the document. If fixWidth is not 0 then all cells will be set to the fixed width of
// the fixWidth value. If it is 0 then the width will be dependent on the cell contents.
func (s *SimPDF) AddTable(table Tables, altRowColor models.Styles, fixedWidth float64) {
if altRowColor.Name != "" {
table.HasAlternating = true
table.AlternatingRowStyle = altRowColor
}
s.NewLine(0)
s.NewLine(0)
table.MaxColWidth = s.TableColumnWidth(table)
s.AddTableHeader(table, fixedWidth)
s.AddTableRows(table, fixedWidth)
style, err := s.StyleName("Normal")
internal.IfError("AddTable public", err, false)
s.fontReset(style)
s.NewLine(0)
}
// DistributeColumnsEvenly returns a fixed width size that would allow the columns to be evenly
// distributed across the page width of the PDF document.
func (s *SimPDF) DistributeColumnsEvenly(numCols float64) float64 {
return (s.Width() - (s.Margin.Left + s.Margin.Right)) / numCols
} | tables.go | 0.753104 | 0.463444 | tables.go | starcoder |
package statsd
import (
"math"
"strconv"
"strings"
"github.com/atlassian/gostatsd"
)
const (
histogramThresholdsTagPrefix = "gsd_histogram:"
histogramThresholdsSeparator = "_"
)
func latencyHistogram(timer gostatsd.Timer, bucketLimit uint32) map[gostatsd.HistogramThreshold]int {
result := emptyHistogram(timer, bucketLimit)
if len(result) == 0 {
return result
}
infiniteThreshold := gostatsd.HistogramThreshold(math.Inf(1))
for _, value := range timer.Values {
for latencyBucket := range result {
if value <= float64(latencyBucket) {
result[latencyBucket] += 1
}
}
}
result[infiniteThreshold] = len(timer.Values)
return result
}
func emptyHistogram(timer gostatsd.Timer, bucketLimit uint32) map[gostatsd.HistogramThreshold]int {
result := make(map[gostatsd.HistogramThreshold]int)
if bucketLimit == 0 {
return result
}
thresholds := retrieveThresholds(timer, bucketLimit)
if thresholds == nil {
return nil
}
infiniteThreshold := gostatsd.HistogramThreshold(math.Inf(1))
for _, histogramThreshold := range thresholds {
result[histogramThreshold] = 0
}
result[infiniteThreshold] = 0
return result
}
func retrieveThresholds(timer gostatsd.Timer, bucketlimit uint32) []gostatsd.HistogramThreshold {
tag, found := findTag(timer.Tags, histogramThresholdsTagPrefix)
if found {
bucketsTagValue := tag[len(histogramThresholdsTagPrefix):]
stringThresholds := strings.Split(bucketsTagValue, histogramThresholdsSeparator)
floatThresholds := mapToThresholds(stringThresholds)
floatThresholds = floatThresholds[:(min(uint32(len(floatThresholds)), bucketlimit))]
if floatThresholds == nil {
return []gostatsd.HistogramThreshold{}
}
return floatThresholds
}
return nil
}
func mapToThresholds(vs []string) []gostatsd.HistogramThreshold {
var lb []gostatsd.HistogramThreshold
for _, v := range vs {
floatBucket, err := strconv.ParseFloat(v, 64)
if err == nil {
lb = append(lb, gostatsd.HistogramThreshold(floatBucket))
}
}
return lb
}
func hasHistogramTag(timer gostatsd.Timer) bool {
_, found := findTag(timer.Tags, histogramThresholdsTagPrefix)
return found
}
func findTag(a []string, prefix string) (string, bool) {
for _, n := range a {
if strings.HasPrefix(n, prefix) {
return n, true
}
}
return "", false
}
func min(a, b uint32) uint32 {
if a < b {
return a
}
return b
} | pkg/statsd/latency_histogram.go | 0.669096 | 0.45847 | latency_histogram.go | starcoder |
package condition
import (
"fmt"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/types"
jmespath "github.com/jmespath/go-jmespath"
)
//------------------------------------------------------------------------------
func init() {
Constructors[TypeJMESPath] = TypeSpec{
constructor: NewJMESPath,
description: `
Parses a message part as a JSON blob and attempts to apply a JMESPath expression
to it, expecting a boolean response. If the response is true the condition
passes, otherwise it does not. Please refer to the
[JMESPath website](http://jmespath.org/) for information and tutorials regarding
the syntax of expressions.
For example, with the following config:
` + "``` yaml" + `
jmespath:
part: 0
query: a == 'foo'
` + "```" + `
If the initial jmespaths of part 0 were:
` + "``` json" + `
{
"a": "foo"
}
` + "```" + `
Then the condition would pass.
JMESPath is traditionally used for mutating JSON, in order to do this please
instead use the ` + "[`jmespath`](../processors/README.md#jmespath)" + `
processor.`,
}
}
//------------------------------------------------------------------------------
// JMESPathConfig is a configuration struct containing fields for the jmespath
// condition.
type JMESPathConfig struct {
Part int `json:"part" yaml:"part"`
Query string `json:"query" yaml:"query"`
}
// NewJMESPathConfig returns a JMESPathConfig with default values.
func NewJMESPathConfig() JMESPathConfig {
return JMESPathConfig{
Part: 0,
Query: "",
}
}
//------------------------------------------------------------------------------
// JMESPath is a condition that checks message against a jmespath query.
type JMESPath struct {
stats metrics.Type
log log.Modular
part int
query *jmespath.JMESPath
mCount metrics.StatCounter
mTrue metrics.StatCounter
mFalse metrics.StatCounter
mErrJSONP metrics.StatCounter
mErrJMES metrics.StatCounter
mErr metrics.StatCounter
}
// NewJMESPath returns a JMESPath condition.
func NewJMESPath(
conf Config, mgr types.Manager, log log.Modular, stats metrics.Type,
) (Type, error) {
query, err := jmespath.Compile(conf.JMESPath.Query)
if err != nil {
return nil, fmt.Errorf("failed to compile JMESPath query: %v", err)
}
return &JMESPath{
stats: stats,
log: log,
part: conf.JMESPath.Part,
query: query,
mCount: stats.GetCounter("count"),
mTrue: stats.GetCounter("true"),
mFalse: stats.GetCounter("false"),
mErrJSONP: stats.GetCounter("error_json_parse"),
mErrJMES: stats.GetCounter("error_jmespath_search"),
mErr: stats.GetCounter("error"),
}, nil
}
//------------------------------------------------------------------------------
func safeSearch(part interface{}, j *jmespath.JMESPath) (res interface{}, err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("jmespath panic: %v", r)
}
}()
return j.Search(part)
}
// Check attempts to check a message part against a configured condition.
func (c *JMESPath) Check(msg types.Message) bool {
c.mCount.Incr(1)
index := c.part
if index < 0 {
index = msg.Len() + index
}
if index < 0 || index >= msg.Len() {
c.mFalse.Incr(1)
return false
}
jsonPart, err := msg.Get(index).JSON()
if err != nil {
c.log.Debugf("Failed to parse part into json: %v\n", err)
c.mErrJSONP.Incr(1)
c.mErr.Incr(1)
c.mFalse.Incr(1)
return false
}
var result interface{}
if result, err = safeSearch(jsonPart, c.query); err != nil {
c.log.Debugf("Failed to search json: %v\n", err)
c.mErrJMES.Incr(1)
c.mErr.Incr(1)
c.mFalse.Incr(1)
return false
}
resultBool, _ := result.(bool)
if resultBool {
c.mTrue.Incr(1)
} else {
c.mFalse.Incr(1)
}
return resultBool
}
//------------------------------------------------------------------------------ | lib/condition/jmespath.go | 0.670285 | 0.788746 | jmespath.go | starcoder |
package bsegtree
import (
"github.com/templexxx/bsegtree/internal/bitmap"
)
type BSTree struct {
count int // Number of intervals
root *node
// interval stack
base []Interval
// Min value of all intervals
min uint64
// Max value of all intervals
max uint64
// sum of To - from in intervals.
totalDeltas uint64
// (max - min) / totalDeltas
disjointPoint float64
}
func (t *BSTree) GetAll() []Interval {
return t.base
}
// Relations of two intervals
const (
SUBSET = iota
DISJOINT
INTERSECT_OR_SUPERSET
)
// New creates a Tree with segment tree implementation.
func New() Tree {
t := new(BSTree)
t.Clear()
return t
}
// Push new interval [from, To] To stack
// This new interval will be added after Build.
func (t *BSTree) Push(from, to []byte) {
fa := AbbreviatedKey(from)
ta := AbbreviatedKey(to)
t.base = append(t.base, Interval{t.count, fa, ta})
t.count++
if ta > t.max {
t.max = ta
}
if fa < t.min {
t.min = fa
}
t.totalDeltas += ta - fa
if t.totalDeltas != 0 && t.max-t.min != 0 {
t.disjointPoint = float64(t.max-t.min) / float64(t.totalDeltas)
}
}
// PushArray push new intervals [from, To] To stack.
// These new intervals will be added after Build.
func (t *BSTree) PushArray(from, to [][]byte) {
for i := 0; i < len(from); i++ {
t.Push(from[i], to[i])
}
}
// Build builds segment tree out of interval stack
func (t *BSTree) Build() {
if len(t.base) == 0 {
panic("No intervals in stack To build tree. Push intervals first")
}
var endpoint []uint64
endpoint, t.min, t.max = Endpoints(t.base)
leaves := elementaryIntervals(endpoint)
// Create tree nodes from interval endpoints
t.root = t.insertNodes(leaves)
for i := range t.base {
t.root.insertInterval(t.base[i])
}
}
// Query interval, return interval id.
func (t *BSTree) Query(from, to []byte) []int {
if t.root == nil {
return nil
}
fa, ta := AbbreviatedKey(from), AbbreviatedKey(to)
if ta > t.max {
ta = t.max
}
if fa < t.min {
fa = t.min
}
cnt := t.estimateIntervals(fa, ta)
if (cnt >= 48 && t.count <= 1024) || t.count <= 48 { // If true, serial will be faster.
result := make([]int, 0, cnt)
for _, i := range t.base {
if !i.Disjoint(fa, ta) {
result = append(result, i.ID)
}
}
return result
}
result := make([]int, 0, cnt)
var bm bitmap.Bitmap
if cnt != 1 { // There is no need to check repeated result when there will be only 1 interval.
bm = bitmap.New(t.count)
}
var bmp *bitmap.Bitmap
if bm != nil {
bmp = &bm
}
querySingle(t.root, fa, ta, &result, bmp)
if cnt == 1 {
if len(result) <= 1 {
return result
}
// on small result-set, we check for duplicates without allocation.
// https://github.com/toberndo/go-stree/pull/5/files
if (len(result) == 2 && result[0] != result[1]) || (len(result) == 3 && result[0] != result[1] && result[0] != result[2] && result[1] != result[2]) {
return result
}
bm = bitmap.New(t.count)
for _, id := range result {
bm.Set(id, true)
}
result = result[:0]
for i := 0; i < t.count; i++ {
if bm.Get(i) {
result = append(result, i)
}
}
}
return result
}
// querySingle traverse tree in search of overlaps
func querySingle(node *node, from, to uint64, result *[]int, bm *bitmap.Bitmap) {
if !node.Disjoint(from, to) {
for _, i := range node.overlap {
if bm != nil {
if !bm.Get(i.ID) {
*result = append(*result, i.ID)
bm.Set(i.ID, true)
}
} else {
*result = append(*result, i.ID)
}
}
if node.right != nil {
querySingle(node.right, from, to, result, bm)
}
if node.left != nil {
querySingle(node.left, from, to, result, bm)
}
}
}
func (t *BSTree) QueryPoint(p []byte) []int {
return t.Query(p, p)
}
// Clear reset Tree.
func (t *BSTree) Clear() {
t.count = 0
t.root = nil
t.base = t.base[:0]
t.min = 0
t.max = 0
t.totalDeltas = 0
t.disjointPoint = 0
}
func (t *BSTree) Clone() Tree {
nt := &BSTree{
count: t.count,
root: nil,
base: make([]Interval, 0, 1024),
min: t.min,
max: t.max,
totalDeltas: t.totalDeltas,
disjointPoint: t.disjointPoint,
}
for _, i := range t.base {
nt.base = append(nt.base, Interval{
ID: i.ID,
From: i.From,
To: i.To,
})
}
return nt
}
// insertNodes builds tree structure from given endpoints
func (t *BSTree) insertNodes(ls [][2]uint64) *node {
var n *node
if len(ls) == 1 {
n = &node{from: ls[0][0], to: ls[0][1]}
n.left = nil
n.right = nil
} else {
n = &node{from: ls[0][0], to: ls[len(ls)-1][1]}
center := len(ls) / 2
n.left = t.insertNodes(ls[:center])
n.right = t.insertNodes(ls[center:])
}
return n
}
// estimateIntervals estimates possible intervals count will be returned by Query/QueryPoint.
// We assume the dealt of each interval is smooth. I hope so :D
func (t *BSTree) estimateIntervals(from, to uint64) int {
if t.max == t.min {
return 1
}
delta := float64(to - from)
var cnt int
if delta == 0 && t.disjointPoint != 0 {
cnt = int(round(1/t.disjointPoint, 0))
} else {
cnt = int((delta*float64(t.count))/float64(t.max-t.min)) + 1 // +1 for potential cross intervals and point query.
}
if cnt < 1 {
return 1
}
if cnt > t.count {
return t.count
}
return cnt
} | bstree.go | 0.718199 | 0.422088 | bstree.go | starcoder |
package main
import (
// "fmt"
"math"
"strconv"
"strings"
)
// Coord is a combination of two integers that represent a coordiate.
type Coord struct {
x, y int
}
// GetManhattanDistance will get the Manhattan Distance for two points
func GetManhattanDistance(inputString string) (lowestDistance int) {
intersections := FindIntersections(inputString)
for _, intersection := range intersections {
// fmt.Printf("Intersection: %v", intersection)
tempDistance := int(math.Abs(float64(intersection.x)) + math.Abs(float64(intersection.y)))
if lowestDistance == 0 {
lowestDistance = tempDistance
} else {
if tempDistance < lowestDistance {
lowestDistance = tempDistance
}
}
}
return int(lowestDistance)
}
// ShortestWireSum will calculate the earliest crossing of two wire paths
func ShortestWireSum(inputString string) (lowestDistance int) {
wires := strings.Split(inputString, "\n")
wire1coords := ExpandRoute(wires[0])
wire2coords := ExpandRoute(wires[1])
for i1, coord1 := range wire1coords {
for i2, coord2 := range wire2coords {
if coord1 == coord2 {
if lowestDistance == 0 {
lowestDistance = i1 + i2 + 2
} else if i1+i2+2 < lowestDistance {
lowestDistance = i1 + i2
}
}
}
}
return lowestDistance
}
// FindIntersections finds all intersections for two sets of directions
func FindIntersections(inputString string) (sharedCoords []Coord) {
wires := strings.Split(inputString, "\n")
wire1coords := ExpandRoute(wires[0])
wire2coords := ExpandRoute(wires[1])
for _, coord1 := range wire1coords {
for _, coord2 := range wire2coords {
if coord1 == coord2 {
sharedCoords = append(sharedCoords, coord1)
}
}
}
return sharedCoords
}
//ExpandRoute will give all of the points on the grid.
func ExpandRoute(inputString string) (route []Coord) {
instructions := strings.Split(inputString, ",")
current := Coord{0, 0}
for _, inst := range instructions {
direction := inst[:1]
distance, _ := strconv.Atoi(inst[1:])
// fmt.Printf("Dir: %s, Dist: %d\n", direction, distance)
// fmt.Printf("Currnt Coord %v", current)
switch direction {
case "R":
for i := current.x + 1; i <= current.x+distance; i++ {
route = append(route, Coord{i, current.y})
}
case "L":
for i := current.x - 1; i >= current.x-distance; i-- {
route = append(route, Coord{i, current.y})
}
case "U":
for i := current.y + 1; i <= current.y+distance; i++ {
route = append(route, Coord{current.x, i})
}
case "D":
for i := current.y - 1; i >= current.y-distance; i-- {
route = append(route, Coord{current.x, i})
}
}
current = route[len(route)-1]
}
// fmt.Println(route)
return route
} | shart/day03/day03.go | 0.622115 | 0.481088 | day03.go | starcoder |
package curve
import (
"github.com/syahrul12345/secp256k1/fieldelement"
"github.com/syahrul12345/secp256k1/utils"
"github.com/bitherhq/go-bither/common/hexutil"
)
type testpoint struct {
X *fieldelement.FieldElement
Y *fieldelement.FieldElement
A fieldelement.FieldElement
B fieldelement.FieldElement
}
//NewTestPoint is a constructor function to create the new testing point
func NewTestPoint(x string, y string, a uint64, b uint64, prime int64) (*testpoint, error) {
var feX *fieldelement.FieldElement
var feY *fieldelement.FieldElement
feA := fieldelement.NewTestingFieldElement(hexutil.EncodeUint64(a), prime)
feB := fieldelement.NewTestingFieldElement(hexutil.EncodeUint64(b), prime)
if x == "nil" {
feX = nil
}
if y == "nil" {
feY = nil
} else {
feXVal := fieldelement.NewTestingFieldElement(x, prime)
feYVal := fieldelement.NewTestingFieldElement(y, prime)
feX = &feXVal
feY = &feYVal
// Check if point exists on the curve. SKip if the X or y == nil
onCurve := func(
x fieldelement.FieldElement,
y fieldelement.FieldElement,
a fieldelement.FieldElement,
b fieldelement.FieldElement) bool {
left := y.Pow("2")
r1 := x.Pow("3")
r2 := x.Mul(a)
r3 := b
r4 := r1.Add(r2)
right := r4.Add(r3)
return left.Equals(right)
}(fieldelement.NewTestingFieldElement(x, prime),
fieldelement.NewTestingFieldElement(y, prime),
fieldelement.NewTestingFieldElement(hexutil.EncodeUint64(0), prime),
fieldelement.NewTestingFieldElement(hexutil.EncodeUint64(7), prime),
)
if onCurve == false {
return nil,
&errorMessage{"The point doesnt exist on the curve"}
}
}
return &testpoint{
feX,
feY,
feA,
feB,
}, nil
}
//Helper Functions
//Equals will check if point1 is equals to point 2
func (point1 *testpoint) Equals(point2 *testpoint) bool {
x1 := point1.X
y1 := point1.Y
a1 := point1.A
b1 := point1.B
x2 := point2.X
y2 := point2.Y
a2 := point2.A
b2 := point2.B
//Check for nils and convert to nil tempfields
if x1 == nil || x2 == nil || y1 == nil || y2 == nil {
if x1 == x2 && y1 == y2 {
return true
}
return false
}
return x1.Equals(*x2) && y1.Equals(*y2) && a1.Equals(a2) && b1.Equals(b2)
}
//NotEquals will check if point1 is not equals to point2
func (point1 *testpoint) NotEquals(point2 *testpoint) bool {
x1 := point1.X
y1 := point1.Y
a1 := point1.A
b1 := point1.B
x2 := point2.X
y2 := point2.Y
a2 := point2.A
b2 := point2.B
if x1 == nil || x2 == nil || y1 == nil || y2 == nil {
if x1 == x2 && y1 == y2 {
return false
}
return true
}
return x1.NotEquals(*x2) || y1.NotEquals(*y2) || a1.NotEquals(a2) || b1.NotEquals(b2)
}
//Multiplies the a point with a coefficient
func (point1 *testpoint) Mul(coefficient string) (*testpoint, error) {
coeff := utils.ToBigInt(coefficient)
current := point1
result := &testpoint{
nil,
nil,
point1.A,
point1.B,
}
for coeff.Int64() > 0 {
// keep adding to ther result if the rightmost bit is 1
if (coeff.Int64() & 1) == 1 {
result, _ = result.Add(current)
}
current, _ = current.Add(current)
coeff.Rsh(coeff, 1)
}
return result, nil
}
//Adds point1 to point2
func (point1 *testpoint) Add(point2 *testpoint) (*testpoint, error) {
//Check if the points are on the same curve
a1 := point1.A
b1 := point1.B
a2 := point2.A
b2 := point2.B
if a1.NotEquals(a2) || b1.NotEquals(b2) {
return nil, &errorMessage{"They don't exist on the same point"}
}
x1 := point1.X
y1 := point1.Y
x2 := point2.X
y2 := point2.Y
//Case 0:
if x1 == nil {
return point2, nil
}
if x2 == nil {
return point1, nil
}
// Case 1: Point @ Infinity. X is equals, but Y different. Vertical line.
if x1.Equals(*x2) && y1.NotEquals(*y2) {
return &testpoint{
nil,
nil,
a1,
b2,
}, &errorMessage{"Point of infinity"}
}
//Case 2: Point 1 and Point 2 are totally differnet
if x1.NotEquals(*x2) && y1.NotEquals(*y2) {
numerator := y1.Sub(*y2)
denominator := x1.Sub(*x2)
s := numerator.TrueDiv(denominator)
x3 := s.Pow("2").Sub(*x1).Sub(*x2)
y3 := s.Mul(x1.Sub(x3)).Sub(*y1)
return &testpoint{
&x3,
&y3,
a1,
b2,
}, nil
}
//Case 3: The tangent of the point forms avertical line
if point1.Equals(point2) && point1.Y.Equals(point1.X.Mul(fieldelement.NewFieldElement(hexutil.EncodeUint64(0)))) {
return &testpoint{
nil,
nil,
a1,
b2,
}, &errorMessage{"Tagent forms a vertical line"}
}
//Case 4: The two points are exactly the same!
if point1.Equals(point2) {
s := x1.Pow("2").Add(x1.Pow("2").Add(x1.Pow("2"))).Add(a1).TrueDiv(y1.Add(*y1))
x3 := s.Pow("2").Sub(x1.Add(*x1))
y3 := s.Mul(x1.Sub(x3)).Sub(*y1)
return &testpoint{
&x3,
&y3,
a1,
b1,
}, nil
}
return nil, nil
} | curve/curveTestClasses.go | 0.731059 | 0.445107 | curveTestClasses.go | starcoder |
package xnumber
import (
"fmt"
"math"
)
// Accuracy represents an accuracy with some compare methods in accuracy.
type Accuracy func() float64
// NewAccuracy creates an Accuracy, using eps as its accuracy.
func NewAccuracy(eps float64) Accuracy {
return func() float64 {
return eps
}
}
// Equal checks eq between two float64.
func (eps Accuracy) Equal(a, b float64) bool {
return math.Abs(a-b) < eps()
}
// NotEqual checks ne between two float64.
func (eps Accuracy) NotEqual(a, b float64) bool {
return math.Abs(a-b) >= eps()
}
// Greater checks gt between two float64.
func (eps Accuracy) Greater(a, b float64) bool {
return math.Max(a, b) == a && math.Abs(a-b) > eps()
}
// Less checks lt between two float64.
func (eps Accuracy) Less(a, b float64) bool {
return math.Max(a, b) == b && math.Abs(a-b) > eps()
}
// GreaterOrEqual checks gte between two float64.
func (eps Accuracy) GreaterOrEqual(a, b float64) bool {
return math.Max(a, b) == a || math.Abs(a-b) < eps()
}
// LessOrEqual checks lte between two float64.
func (eps Accuracy) LessOrEqual(a, b float64) bool {
return math.Max(a, b) == b || math.Abs(a-b) < eps()
}
// _acc represents a default Accuracy with 1e-3 as default accuracy.
var _acc = NewAccuracy(1e-3)
// EqualInAccuracy checks eq between two float64 in default Accuracy: 1e-3.
func EqualInAccuracy(a, b float64) bool {
return _acc.Equal(a, b)
}
// NotEqualInAccuracy checks ne between two float64 in default Accuracy: 1e-3.
func NotEqualInAccuracy(a, b float64) bool {
return _acc.NotEqual(a, b)
}
// GreaterInAccuracy checks gt between two float64 in default Accuracy: 1e-3.
func GreaterInAccuracy(a, b float64) bool {
return _acc.Greater(a, b)
}
// LessInAccuracy checks lt between two float64 in default Accuracy: 1e-3.
func LessInAccuracy(a, b float64) bool {
return _acc.Less(a, b)
}
// GreaterOrEqualInAccuracy checks gte between two float64 in default Accuracy: 1e-3.
func GreaterOrEqualInAccuracy(a, b float64) bool {
return _acc.GreaterOrEqual(a, b)
}
// LessOrEqualInAccuracy checks lte between two float64 in default Accuracy: 1e-3.
func LessOrEqualInAccuracy(a, b float64) bool {
return _acc.LessOrEqual(a, b)
}
// RenderByte renders a byte size to string (using %.2f), support `B` `KB` `MB` `GB` `TB`.
func RenderByte(bytes float64) string {
divider := float64(1024)
minus := false
if bytes < 0 {
bytes = -bytes
minus = true
} else if bytes == 0 {
return "0B"
}
ret := func(s string) string {
if minus {
return fmt.Sprintf("-%s", s)
}
return s
}
// 1 - 1023B
b := bytes
if LessInAccuracy(b, divider) {
return ret(fmt.Sprintf("%dB", int(b)))
}
// 1 - 1023K
kb := bytes / divider
if LessInAccuracy(kb, divider) {
return ret(fmt.Sprintf("%.2fKB", kb))
}
// 1 - 1023M
mb := kb / divider
if LessInAccuracy(mb, divider) {
return ret(fmt.Sprintf("%.2fMB", mb))
}
// 1 - 1023G
gb := mb / divider
if LessInAccuracy(gb, divider) {
return ret(fmt.Sprintf("%.2fGB", gb))
}
// 1T -
tb := gb / divider
return ret(fmt.Sprintf("%.2fTB", tb))
}
// Bool returns 1 if value is true, otherwise returns 0.
func Bool(b bool) int {
if b {
return 1
}
return 0
}
// IntSize returns the int size (32 / 64).
func IntSize() int {
const intSize = 32 << (^uint(0) >> 63)
return intSize
}
const (
MinInt8 = int8(-128) // -1 << 7, see math.MinInt8.
MinInt16 = int16(-32768) // -1 << 15, see math.MinInt16.
MinInt32 = int32(-2147483648) // -1 << 31, see math.MinInt32.
MinInt64 = int64(-9223372036854775808) // -1 << 63, see math.MinInt64.
MinUint8 = uint8(0) // 0.
MinUint16 = uint16(0) // 0.
MinUint32 = uint32(0) // 0.
MinUint64 = uint64(0) // 0.
MaxInt8 = int8(127) // 1 << 7 - 1, see math.MaxInt8.
MaxInt16 = int16(32767) // 1 << 15 - 1, see math.MaxInt16.
MaxInt32 = int32(2147483647) // 1 << 31 - 1, see math.MaxInt32.
MaxInt64 = int64(9223372036854775807) // 1 << 63 - 1, see math.MaxInt64.
MaxUint8 = uint8(255) // 1 << 8 - 1, see math.MaxUint8.
MaxUint16 = uint16(65535) // 1 << 16 - 1, see math.MaxUint16.
MaxUint32 = uint32(4294967295) // 1 << 32 - 1, see math.MaxUint32.
MaxUint64 = uint64(18446744073709551615) // 1 << 64 - 1, see math.MaxUint64.
MaxFloat32 = float32(math.MaxFloat32) // 2**127 * (2**24 - 1) / 2**23, see math.MaxFloat32.
SmallestNonzeroFloat32 = float32(math.SmallestNonzeroFloat32) // 1 / 2**(127 - 1 + 23), see math.SmallestNonzeroFloat32.
MaxFloat64 = float64(math.MaxFloat64) // 2**1023 * (2**53 - 1) / 2**52, see math.MaxFloat64.
SmallestNonzeroFloat64 = float64(math.SmallestNonzeroFloat64) // 1 / 2**(1023 - 1 + 52), see math.SmallestNonzeroFloat64.
) | xnumber/xnumber.go | 0.890264 | 0.727903 | xnumber.go | starcoder |
package common
import (
"encoding/binary"
"encoding/hex"
"fmt"
"math"
)
// Sha1Hash is a convenient wrapper around the 20 bytes that make up a sha1Hash. Should be passed
// by pointer to avoid making copies.
type Sha1Hash struct {
Data [20]byte // The underlying data in the hash. Should not be manipulated directly
}
// Array returns a pointer to the underlying data
func (h *Sha1Hash) Array() *[20]byte {
return &h.Data
}
// String returns a hex-encoded string representation of the hash.
func (h *Sha1Hash) String() string {
return hex.EncodeToString(h.Data[:])
}
// Slice returns a slice of the underlying hash data. The data is not copied.
func (h *Sha1Hash) Slice() []byte {
return h.Data[:]
}
// SliceCopy copies and returns a slice of the underlying hash data.
func (h *Sha1Hash) SliceCopy() []byte {
result := make([]byte, 20)
copy(result, h.Data[:])
return result
}
// FromString reads the bytes from a string and attempts to copy them into the underlying hash.
// It assumes the input is valid, and returns itself for syntactic convenience. If the input is
// invalid it will panic.
func (h *Sha1Hash) FromString(str string) *Sha1Hash {
bytes, err := hex.DecodeString(str)
if err != nil {
panic(err)
}
copy(h.Data[:], bytes)
return h
}
// FromStringSafe reads the bytes from a string and attempts to copy them into the underlying hash.
// If the provided string isn't hex-encoded and of the right length, an error is returned.
func (h *Sha1Hash) FromStringSafe(str string) error {
bytes, err := hex.DecodeString(str)
if err != nil {
return err
}
if len(bytes) != 20 {
return fmt.Errorf("Expected 20-byte string but got %d", len(bytes))
}
copy(h.Data[:], bytes)
return nil
}
// FromSlice assumes the input is a slice of 20 bytes, and copies those bytes into the underlying
// hash. If there's any ambiguity about whether the input is valid, use FromSliceSafe(). It returns
// itself for syntactic convenience.
func (h *Sha1Hash) FromSlice(s []byte) *Sha1Hash {
copy(h.Data[:], s)
return h
}
// FromSliceSafe validates the length of the input and then copies it into the underlying hash. If
// the input is the wrong length, it returns an error.
func (h *Sha1Hash) FromSliceSafe(s []byte) error {
if len(s) != 20 {
return fmt.Errorf("Expected 20-byte string but got %d", len(s))
}
copy(h.Data[:], s)
return nil
}
// Blank overwrites the underlying hash to FF...FF. Used through out flu as the 'null' hash. Returns
// itself.
func (h *Sha1Hash) Blank() *Sha1Hash {
for i := 0; i < 20; i++ {
h.Data[i] = 255
}
return h
}
// IsBlank returns true if the sha1hash is blank (i.e., FF...FF); i.e., a 'null' hash. Benchmarked
// to be 4x faster than the naive loop=based approach
func (h *Sha1Hash) IsBlank() bool {
c1 := binary.BigEndian.Uint64(h.Data[:8])
c2 := binary.BigEndian.Uint64(h.Data[8:16])
c3 := binary.BigEndian.Uint32(h.Data[16:20])
return c1 == math.MaxUint64 && c1 == c2 && c3 == math.MaxUint32
} | common/sha1Hash.go | 0.812086 | 0.452838 | sha1Hash.go | starcoder |
package arrow
import (
"fmt"
"strconv"
"time"
)
type BooleanType struct{}
func (t *BooleanType) ID() Type { return BOOL }
func (t *BooleanType) Name() string { return "bool" }
func (t *BooleanType) String() string { return "bool" }
// BitWidth returns the number of bits required to store a single element of this data type in memory.
func (t *BooleanType) BitWidth() int { return 1 }
type FixedSizeBinaryType struct {
ByteWidth int
}
func (*FixedSizeBinaryType) ID() Type { return FIXED_SIZE_BINARY }
func (*FixedSizeBinaryType) Name() string { return "fixed_size_binary" }
func (t *FixedSizeBinaryType) BitWidth() int { return 8 * t.ByteWidth }
func (t *FixedSizeBinaryType) String() string {
return "fixed_size_binary[" + strconv.Itoa(t.ByteWidth) + "]"
}
type (
Timestamp int64
Time32 int32
Time64 int64
TimeUnit int
Date32 int32
Date64 int64
Duration int64
)
const (
Nanosecond TimeUnit = iota
Microsecond
Millisecond
Second
)
func (u TimeUnit) Multiplier() time.Duration {
return [...]time.Duration{time.Nanosecond, time.Microsecond, time.Millisecond, time.Second}[uint(u)&3]
}
func (u TimeUnit) String() string { return [...]string{"ns", "us", "ms", "s"}[uint(u)&3] }
// TimestampType is encoded as a 64-bit signed integer since the UNIX epoch (2017-01-01T00:00:00Z).
// The zero-value is a nanosecond and time zone neutral. Time zone neutral can be
// considered UTC without having "UTC" as a time zone.
type TimestampType struct {
Unit TimeUnit
TimeZone string
}
func (*TimestampType) ID() Type { return TIMESTAMP }
func (*TimestampType) Name() string { return "timestamp" }
func (t *TimestampType) String() string {
switch len(t.TimeZone) {
case 0:
return "timestamp[" + t.Unit.String() + "]"
default:
return "timestamp[" + t.Unit.String() + ", tz=" + t.TimeZone + "]"
}
}
// BitWidth returns the number of bits required to store a single element of this data type in memory.
func (*TimestampType) BitWidth() int { return 64 }
// Time32Type is encoded as a 32-bit signed integer, representing either seconds or milliseconds since midnight.
type Time32Type struct {
Unit TimeUnit
}
func (*Time32Type) ID() Type { return TIME32 }
func (*Time32Type) Name() string { return "time32" }
func (*Time32Type) BitWidth() int { return 32 }
func (t *Time32Type) String() string { return "time32[" + t.Unit.String() + "]" }
// Time64Type is encoded as a 64-bit signed integer, representing either microseconds or nanoseconds since midnight.
type Time64Type struct {
Unit TimeUnit
}
func (*Time64Type) ID() Type { return TIME64 }
func (*Time64Type) Name() string { return "time64" }
func (*Time64Type) BitWidth() int { return 64 }
func (t *Time64Type) String() string { return "time64[" + t.Unit.String() + "]" }
// DurationType is encoded as a 64-bit signed integer, representing an amount
// of elapsed time without any relation to a calendar artifact.
type DurationType struct {
Unit TimeUnit
}
func (*DurationType) ID() Type { return DURATION }
func (*DurationType) Name() string { return "duration" }
func (*DurationType) BitWidth() int { return 64 }
func (t *DurationType) String() string { return "duration[" + t.Unit.String() + "]" }
// Float16Type represents a floating point value encoded with a 16-bit precision.
type Float16Type struct{}
func (t *Float16Type) ID() Type { return FLOAT16 }
func (t *Float16Type) Name() string { return "float16" }
func (t *Float16Type) String() string { return "float16" }
// BitWidth returns the number of bits required to store a single element of this data type in memory.
func (t *Float16Type) BitWidth() int { return 16 }
// Decimal128Type represents a fixed-size 128-bit decimal type.
type Decimal128Type struct {
Precision int32
Scale int32
}
func (*Decimal128Type) ID() Type { return DECIMAL }
func (*Decimal128Type) Name() string { return "decimal" }
func (*Decimal128Type) BitWidth() int { return 128 }
func (t *Decimal128Type) String() string {
return fmt.Sprintf("%s(%d, %d)", t.Name(), t.Precision, t.Scale)
}
// MonthInterval represents a number of months.
type MonthInterval int32
// MonthIntervalType is encoded as a 32-bit signed integer,
// representing a number of months.
type MonthIntervalType struct{}
func (*MonthIntervalType) ID() Type { return INTERVAL }
func (*MonthIntervalType) Name() string { return "month_interval" }
func (*MonthIntervalType) String() string { return "month_interval" }
// BitWidth returns the number of bits required to store a single element of this data type in memory.
func (t *MonthIntervalType) BitWidth() int { return 32 }
// DayTimeInterval represents a number of days and milliseconds (fraction of day).
type DayTimeInterval struct {
Days int32 `json:"days"`
Milliseconds int32 `json:"milliseconds"`
}
// DayTimeIntervalType is encoded as a pair of 32-bit signed integer,
// representing a number of days and milliseconds (fraction of day).
type DayTimeIntervalType struct{}
func (*DayTimeIntervalType) ID() Type { return INTERVAL }
func (*DayTimeIntervalType) Name() string { return "day_time_interval" }
func (*DayTimeIntervalType) String() string { return "day_time_interval" }
// BitWidth returns the number of bits required to store a single element of this data type in memory.
func (t *DayTimeIntervalType) BitWidth() int { return 64 }
var (
FixedWidthTypes = struct {
Boolean FixedWidthDataType
Date32 FixedWidthDataType
Date64 FixedWidthDataType
DayTimeInterval FixedWidthDataType
Duration_s FixedWidthDataType
Duration_ms FixedWidthDataType
Duration_us FixedWidthDataType
Duration_ns FixedWidthDataType
Float16 FixedWidthDataType
MonthInterval FixedWidthDataType
Time32s FixedWidthDataType
Time32ms FixedWidthDataType
Time64us FixedWidthDataType
Time64ns FixedWidthDataType
Timestamp_s FixedWidthDataType
Timestamp_ms FixedWidthDataType
Timestamp_us FixedWidthDataType
Timestamp_ns FixedWidthDataType
}{
Boolean: &BooleanType{},
Date32: &Date32Type{},
Date64: &Date64Type{},
DayTimeInterval: &DayTimeIntervalType{},
Duration_s: &DurationType{Unit: Second},
Duration_ms: &DurationType{Unit: Millisecond},
Duration_us: &DurationType{Unit: Microsecond},
Duration_ns: &DurationType{Unit: Nanosecond},
Float16: &Float16Type{},
MonthInterval: &MonthIntervalType{},
Time32s: &Time32Type{Unit: Second},
Time32ms: &Time32Type{Unit: Millisecond},
Time64us: &Time64Type{Unit: Microsecond},
Time64ns: &Time64Type{Unit: Nanosecond},
Timestamp_s: &TimestampType{Unit: Second, TimeZone: "UTC"},
Timestamp_ms: &TimestampType{Unit: Millisecond, TimeZone: "UTC"},
Timestamp_us: &TimestampType{Unit: Microsecond, TimeZone: "UTC"},
Timestamp_ns: &TimestampType{Unit: Nanosecond, TimeZone: "UTC"},
}
_ FixedWidthDataType = (*FixedSizeBinaryType)(nil)
) | go/arrow/datatype_fixedwidth.go | 0.912397 | 0.442456 | datatype_fixedwidth.go | starcoder |
package onshape
import (
"encoding/json"
)
// BTPStatementThrow1080 struct for BTPStatementThrow1080
type BTPStatementThrow1080 struct {
BTPStatement269
BtType *string `json:"btType,omitempty"`
Value *BTPExpression9 `json:"value,omitempty"`
}
// NewBTPStatementThrow1080 instantiates a new BTPStatementThrow1080 object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewBTPStatementThrow1080() *BTPStatementThrow1080 {
this := BTPStatementThrow1080{}
return &this
}
// NewBTPStatementThrow1080WithDefaults instantiates a new BTPStatementThrow1080 object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewBTPStatementThrow1080WithDefaults() *BTPStatementThrow1080 {
this := BTPStatementThrow1080{}
return &this
}
// GetBtType returns the BtType field value if set, zero value otherwise.
func (o *BTPStatementThrow1080) GetBtType() string {
if o == nil || o.BtType == nil {
var ret string
return ret
}
return *o.BtType
}
// GetBtTypeOk returns a tuple with the BtType field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTPStatementThrow1080) GetBtTypeOk() (*string, bool) {
if o == nil || o.BtType == nil {
return nil, false
}
return o.BtType, true
}
// HasBtType returns a boolean if a field has been set.
func (o *BTPStatementThrow1080) HasBtType() bool {
if o != nil && o.BtType != nil {
return true
}
return false
}
// SetBtType gets a reference to the given string and assigns it to the BtType field.
func (o *BTPStatementThrow1080) SetBtType(v string) {
o.BtType = &v
}
// GetValue returns the Value field value if set, zero value otherwise.
func (o *BTPStatementThrow1080) GetValue() BTPExpression9 {
if o == nil || o.Value == nil {
var ret BTPExpression9
return ret
}
return *o.Value
}
// GetValueOk returns a tuple with the Value field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *BTPStatementThrow1080) GetValueOk() (*BTPExpression9, bool) {
if o == nil || o.Value == nil {
return nil, false
}
return o.Value, true
}
// HasValue returns a boolean if a field has been set.
func (o *BTPStatementThrow1080) HasValue() bool {
if o != nil && o.Value != nil {
return true
}
return false
}
// SetValue gets a reference to the given BTPExpression9 and assigns it to the Value field.
func (o *BTPStatementThrow1080) SetValue(v BTPExpression9) {
o.Value = &v
}
func (o BTPStatementThrow1080) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
serializedBTPStatement269, errBTPStatement269 := json.Marshal(o.BTPStatement269)
if errBTPStatement269 != nil {
return []byte{}, errBTPStatement269
}
errBTPStatement269 = json.Unmarshal([]byte(serializedBTPStatement269), &toSerialize)
if errBTPStatement269 != nil {
return []byte{}, errBTPStatement269
}
if o.BtType != nil {
toSerialize["btType"] = o.BtType
}
if o.Value != nil {
toSerialize["value"] = o.Value
}
return json.Marshal(toSerialize)
}
type NullableBTPStatementThrow1080 struct {
value *BTPStatementThrow1080
isSet bool
}
func (v NullableBTPStatementThrow1080) Get() *BTPStatementThrow1080 {
return v.value
}
func (v *NullableBTPStatementThrow1080) Set(val *BTPStatementThrow1080) {
v.value = val
v.isSet = true
}
func (v NullableBTPStatementThrow1080) IsSet() bool {
return v.isSet
}
func (v *NullableBTPStatementThrow1080) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableBTPStatementThrow1080(val *BTPStatementThrow1080) *NullableBTPStatementThrow1080 {
return &NullableBTPStatementThrow1080{value: val, isSet: true}
}
func (v NullableBTPStatementThrow1080) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableBTPStatementThrow1080) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | onshape/model_btp_statement_throw_1080.go | 0.709824 | 0.469763 | model_btp_statement_throw_1080.go | starcoder |
package lzma
// treeCodec encodes or decodes values with a fixed bit size. It is using a
// tree of probability value. The root of the tree is the most-significant bit.
type treeCodec struct {
probTree
}
// makeTreeCodec makes a tree codec. The bits value must be inside the range
// [1,32].
func makeTreeCodec(bits int) treeCodec {
return treeCodec{makeProbTree(bits)}
}
// deepcopy initializes tc as a deep copy of the source.
func (tc *treeCodec) deepcopy(src *treeCodec) {
tc.probTree.deepcopy(&src.probTree)
}
// Encode uses the range encoder to encode a fixed-bit-size value.
func (tc *treeCodec) Encode(e *rangeEncoder, v uint32) (err error) {
m := uint32(1)
for i := int(tc.bits) - 1; i >= 0; i-- {
b := (v >> uint(i)) & 1
if err := e.EncodeBit(b, &tc.probs[m]); err != nil {
return err
}
m = (m << 1) | b
}
return nil
}
// Decodes uses the range decoder to decode a fixed-bit-size value. Errors may
// be caused by the range decoder.
func (tc *treeCodec) Decode(d *rangeDecoder) (v uint32, err error) {
m := uint32(1)
for j := 0; j < int(tc.bits); j++ {
b, err := d.DecodeBit(&tc.probs[m])
if err != nil {
return 0, err
}
m = (m << 1) | b
}
return m - (1 << uint(tc.bits)), nil
}
// treeReverseCodec is another tree codec, where the least-significant bit is
// the start of the probability tree.
type treeReverseCodec struct {
probTree
}
// deepcopy initializes the treeReverseCodec as a deep copy of the
// source.
func (tc *treeReverseCodec) deepcopy(src *treeReverseCodec) {
tc.probTree.deepcopy(&src.probTree)
}
// makeTreeReverseCodec creates treeReverseCodec value. The bits argument must
// be in the range [1,32].
func makeTreeReverseCodec(bits int) treeReverseCodec {
return treeReverseCodec{makeProbTree(bits)}
}
// Encode uses range encoder to encode a fixed-bit-size value. The range
// encoder may cause errors.
func (tc *treeReverseCodec) Encode(v uint32, e *rangeEncoder) (err error) {
m := uint32(1)
for i := uint(0); i < uint(tc.bits); i++ {
b := (v >> i) & 1
if err := e.EncodeBit(b, &tc.probs[m]); err != nil {
return err
}
m = (m << 1) | b
}
return nil
}
// Decodes uses the range decoder to decode a fixed-bit-size value. Errors
// returned by the range decoder will be returned.
func (tc *treeReverseCodec) Decode(d *rangeDecoder) (v uint32, err error) {
m := uint32(1)
for j := uint(0); j < uint(tc.bits); j++ {
b, err := d.DecodeBit(&tc.probs[m])
if err != nil {
return 0, err
}
m = (m << 1) | b
v |= b << j
}
return v, nil
}
// probTree stores enough probability values to be used by the treeEncode and
// treeDecode methods of the range coder types.
type probTree struct {
probs []prob
bits byte
}
// deepcopy initializes the probTree value as a deep copy of the source.
func (t *probTree) deepcopy(src *probTree) {
if t == src {
return
}
t.probs = make([]prob, len(src.probs))
copy(t.probs, src.probs)
t.bits = src.bits
}
// makeProbTree initializes a probTree structure.
func makeProbTree(bits int) probTree {
if !(1 <= bits && bits <= 32) {
panic("bits outside of range [1,32]")
}
t := probTree{
bits: byte(bits),
probs: make([]prob, 1<<uint(bits)),
}
for i := range t.probs {
t.probs[i] = probInit
}
return t
}
// Bits provides the number of bits for the values to de- or encode.
func (t *probTree) Bits() int {
return int(t.bits)
} | vendor/github.com/ulikunitz/xz/lzma/treecodecs.go | 0.643105 | 0.463748 | treecodecs.go | starcoder |
package p18
import (
"fmt"
"hash/fnv"
"math"
c "s13g.com/euler/common"
)
// --- Day 18: Settlers of The North Pole ---
// http://adventofcode.com/2018/day/18
func Solve(input string) (string, string) {
lines := c.SplitByNewline(input)
return c.ToString(solveA(parseWorld(lines))), c.ToString(solveB(parseWorld(lines)))
}
func solveA(w world) int {
for i := 0; i < 10; i++ {
w.tick()
}
_, trees, lumberyards := w.countAll()
return trees * lumberyards
}
func solveB(w world) int {
target, round, states := 1000000000, 0, make(map[uint32]int)
for round < target {
w.tick()
h := w.hash()
// Loop detected!
if r, ok := states[h]; ok {
loopLength := round - r
numLoopsSkipF := float64(target-round-1) / float64(loopLength)
numLoopsSkip := int(numLoopsSkipF)
numMoreRounds := int(math.Round((numLoopsSkipF - float64(numLoopsSkip)) * float64(loopLength)))
// Just increment this number of rounds and then exit loop.
for i := 0; i < numMoreRounds; i++ {
w.tick()
}
break
}
states[h] = round
round++
}
_, trees, lumberyards := w.countAll()
return trees * lumberyards
}
type world struct {
width, height int
//0:ground, 1:tree, 2:lumberyard
grid []byte
}
func (w world) hash() uint32 {
h := fnv.New32a()
h.Write(w.grid)
return h.Sum32()
}
// Counts the different kinds of tiles adjacent to the given one.
func (w *world) countAdj(x, y int) (int, int, int) {
var counts [3]int
startX, startY := c.Max(0, x-1), c.Max(0, y-1)
endX, endY := c.Min(w.width, x+2), c.Min(w.height, y+2)
for yy := startY; yy < endY; yy++ {
for xx := startX; xx < endX; xx++ {
if yy == y && xx == x {
continue
}
counts[w.grid[yy*w.width+xx]]++
}
}
return counts[0], counts[1], counts[2]
}
// Count all types of tiles in the world.
func (w *world) countAll() (int, int, int) {
var counts [3]int
for y := 0; y < w.height; y++ {
for x := 0; x < w.width; x++ {
counts[w.grid[y*w.width+x]]++
}
}
return counts[0], counts[1], counts[2]
}
func (w *world) tick() {
newGrid := make([]byte, len(w.grid))
for y := 0; y < w.height; y++ {
for x := 0; x < w.width; x++ {
pos := y*w.width + x
_, trees, lumbers := w.countAdj(x, y)
switch w.grid[pos] {
case 0:
newGrid[pos] = 0
if trees >= 3 {
newGrid[pos] = 1
}
case 1:
newGrid[pos] = 1
if lumbers >= 3 {
newGrid[pos] = 2
}
case 2:
newGrid[pos] = 0
if lumbers >= 1 && trees >= 1 {
newGrid[pos] = 2
}
}
}
}
w.grid = newGrid
}
func (w *world) print() {
for y := 0; y < w.height; y++ {
for x := 0; x < w.width; x++ {
c := "."
pos := y*w.width + x
if w.grid[pos] == 1 {
c = "|"
} else if w.grid[pos] == 2 {
c = "#"
}
fmt.Print(c)
}
fmt.Print("\n")
}
}
func parseWorld(lines []string) world {
width, height := len(lines[0]), len(lines)
grid := make([]byte, width*height)
for y, l := range lines {
for x, c := range l {
var v byte
if c == '|' {
v = 1
} else if c == '#' {
v = 2
}
grid[y*width+x] = v
}
}
return world{width: width, height: height, grid: grid}
} | go/aoc18/p18/p18.go | 0.589007 | 0.434881 | p18.go | starcoder |
package convnet
import (
"encoding/json"
"math"
"math/rand"
)
// Vol is the basic building block of all data in a net.
// it is essentially just a 3D volume of numbers, with a
// width (sx), height (sy), and depth (depth).
// it is used to hold data for all filters, all volumes,
// all weights, and also stores all gradients w.r.t.
// the data. c is optionally a value to initialize the volume
// with. If c is missing, fills the Vol with random numbers.
type Vol struct {
Sx int `json:"sx"`
Sy int `json:"sy"`
Depth int `json:"depth"`
W []float64 `json:"w"`
Dw []float64 `json:"-"`
}
func NewVol1D(w []float64) *Vol {
v := &Vol{
Sx: 1,
Sy: 1,
Depth: len(w),
W: make([]float64, len(w)),
Dw: make([]float64, len(w)),
}
copy(v.W, w)
return v
}
func NewVol(sx, sy, depth int, c float64) *Vol {
n := sx * sy * depth
v := &Vol{
Sx: sx,
Sy: sy,
Depth: depth,
W: make([]float64, n),
Dw: make([]float64, n),
}
for i := range v.W {
v.W[i] = c
}
return v
}
func NewVolRand(sx, sy, depth int, r *rand.Rand) *Vol {
n := sx * sy * depth
v := &Vol{
Sx: sx,
Sy: sy,
Depth: depth,
W: make([]float64, n),
Dw: make([]float64, n),
}
// weight normalization is done to equalize the output
// variance of every neuron, otherwise neurons with a lot
// of incoming connections have outputs of larger variance
scale := math.Sqrt(1.0 / float64(sx*sy*depth))
for i := range v.W {
v.W[i] = r.NormFloat64() * scale
}
return v
}
func (v *Vol) index(x, y, d int) int {
return ((v.Sx*y)+x)*v.Depth + d
}
func (v *Vol) Get(x, y, d int) float64 {
return v.W[v.index(x, y, d)]
}
func (v *Vol) Set(x, y, d int, value float64) {
v.W[v.index(x, y, d)] = value
}
func (v *Vol) Add(x, y, d int, value float64) {
v.W[v.index(x, y, d)] += value
}
func (v *Vol) GetGrad(x, y, d int) float64 {
return v.Dw[v.index(x, y, d)]
}
func (v *Vol) SetGrad(x, y, d int, value float64) {
v.Dw[v.index(x, y, d)] = value
}
func (v *Vol) AddGrad(x, y, d int, value float64) {
v.Dw[v.index(x, y, d)] += value
}
func (v *Vol) CloneAndZero() *Vol {
return NewVol(v.Sx, v.Sy, v.Depth, 0.0)
}
func (v *Vol) Clone() *Vol {
v2 := &Vol{
Sx: v.Sx, Sy: v.Sy,
Depth: v.Depth,
W: make([]float64, len(v.W)),
Dw: make([]float64, len(v.W)),
}
copy(v2.W, v.W)
return v2
}
func (v *Vol) AddFrom(v2 *Vol) {
for k := range v.W {
v.W[k] += v2.W[k]
}
}
func (v *Vol) AddFromScaled(v2 *Vol, a float64) {
for k := range v.W {
v.W[k] += a * v2.W[k]
}
}
func (v *Vol) SetConst(a float64) {
for k := range v.W {
v.W[k] = a
}
}
func (v *Vol) UnmarshalJSON(b []byte) error {
var data struct {
Sx int `json:"sx"`
Sy int `json:"sy"`
Depth int `json:"depth"`
W []float64 `json:"w"`
}
if err := json.Unmarshal(b, &data); err != nil {
return err
}
v.Sx = data.Sx
v.Sy = data.Sy
v.Depth = data.Depth
n := v.Sx * v.Sy * v.Depth
v.W = make([]float64, n)
v.Dw = make([]float64, n)
copy(v.W, data.W)
return nil
} | vol.go | 0.688259 | 0.603815 | vol.go | starcoder |
package treemap
import (
"math"
)
// Position is an X, Y, Z tuple.
type Position struct {
X float64 `json:"x"`
Y float64 `json:"y"`
Z float64 `json:"z,omitempty"`
}
// Add returns the 3D vector p + q
func (p Position) Add(q Position) Position {
return Position{X: p.X + q.X, Y: p.Y + q.Y, Z: p.Z + q.Z}
}
// Tiler is a quick implementation of a solution for the tiling problem. It
// takes an amount of tiles to place in one plane starting at {0, 0} and
// identifies where a tile should be placed and what is the smaller rectangle
// containing all the passed tiles.
type Tiler struct {
tiles int
margin float64
dimension int
xReference float64
yReference float64
currentIndex int
maxWidth float64
maxHeight float64
bounds Position
}
const (
defaultMargin = 3
)
// NewTiler returns a tiler expecting to place totalTiles with a default margin
func NewTiler(totalTiles int) *Tiler {
return NewTilerWithMargin(totalTiles, defaultMargin)
}
// NewTilerWithMargin returns a tiler expecting to place totalTiles with the injected margin
func NewTilerWithMargin(totalTiles int, margin float64) *Tiler {
tiler := &Tiler{
tiles: totalTiles,
margin: margin,
dimension: int(math.Ceil(math.Sqrt(float64(totalTiles)))),
}
return tiler
}
// GetBounds returns the size of the rectangle containing all the tiles placed so far
func (g *Tiler) GetBounds() Position {
return Position{
X: g.maxWidth + g.margin,
Y: g.maxHeight + g.margin,
}
}
// NextPosition calculates where a tile of the passed dimensions should be placed
func (g *Tiler) NextPosition(width, height float64) Position {
g.currentIndex++
if g.currentIndex > g.dimension && g.yReference+height >= g.maxWidth {
g.currentIndex = 0
g.yReference = 0
g.xReference = g.maxWidth + g.margin
}
position := Position{X: g.xReference + (width+g.margin)/2, Y: g.yReference + (height+g.margin)/2}
if g.xReference+width > g.maxWidth {
g.maxWidth = g.xReference + width
}
if g.yReference+height > g.maxHeight {
g.maxHeight = g.yReference + height
}
g.yReference += height + g.margin
return position
} | position.go | 0.895211 | 0.435721 | position.go | starcoder |
package elastic
import (
"fmt"
"math"
"gopkg.in/olivere/elastic.v3"
"github.com/unchartedsoftware/veldt/binning"
"github.com/unchartedsoftware/veldt/tile"
)
// Bivariate represents an elasticsearch implementation of the bivariate tile.
type Bivariate struct {
tile.Bivariate
}
// GetQuery returns the tiling query.
func (b *Bivariate) GetQuery(coord *binning.TileCoord) elastic.Query {
// get tile bounds
bounds := b.TileBounds(coord)
// create the range queries
query := elastic.NewBoolQuery()
query.Must(elastic.NewRangeQuery(b.XField).
Gte(int64(bounds.MinX())).
Lt(int64(bounds.MaxX())))
query.Must(elastic.NewRangeQuery(b.YField).
Gte(int64(bounds.MinY())).
Lt(int64(bounds.MaxY())))
return query
}
// GetAggs returns the tiling aggregation.
func (b *Bivariate) GetAggs(coord *binning.TileCoord) map[string]elastic.Aggregation {
bounds := b.TileBounds(coord)
// compute binning itnernal
intervalX := int64(math.Max(1, b.BinSizeX(coord)))
intervalY := int64(math.Max(1, b.BinSizeY(coord)))
// create the binning aggregations
x := elastic.NewHistogramAggregation().
Field(b.XField).
Offset(int64(bounds.MinX())).
Interval(intervalX).
MinDocCount(1)
y := elastic.NewHistogramAggregation().
Field(b.YField).
Offset(int64(bounds.MinY())).
Interval(intervalY).
MinDocCount(1)
x.SubAggregation("y", y)
return map[string]elastic.Aggregation{
"x": x,
"y": y,
}
}
// GetAggsWithNested returns the tiling aggregation with a nested child agg.
func (b *Bivariate) GetAggsWithNested(coord *binning.TileCoord, id string, nested elastic.Aggregation) map[string]elastic.Aggregation {
bounds := b.TileBounds(coord)
// compute binning itnernal
intervalX := int64(math.Max(1, b.BinSizeX(coord)))
intervalY := int64(math.Max(1, b.BinSizeY(coord)))
// create the binning aggregations
x := elastic.NewHistogramAggregation().
Field(b.XField).
Offset(int64(bounds.MinX())).
Interval(intervalX).
MinDocCount(1)
y := elastic.NewHistogramAggregation().
Field(b.YField).
Offset(int64(bounds.MinY())).
Interval(intervalY).
MinDocCount(1)
x.SubAggregation("y", y)
aggs := map[string]elastic.Aggregation{
"x": x,
"y": y,
}
if nested != nil {
y.SubAggregation(id, nested)
aggs[id] = nested
}
return aggs
}
// GetBins parses the resulting histograms into bins.
func (b *Bivariate) GetBins(coord *binning.TileCoord, aggs *elastic.Aggregations) ([]*elastic.AggregationBucketHistogramItem, error) {
// parse aggregations
xAgg, ok := aggs.Histogram("x")
if !ok {
return nil, fmt.Errorf("histogram aggregation `x` was not found")
}
// allocate bins
bins := make([]*elastic.AggregationBucketHistogramItem, b.Resolution*b.Resolution)
// fill bins
for _, xBucket := range xAgg.Buckets {
x := xBucket.Key
xBin := b.GetXBin(coord, float64(x))
yAgg, ok := xBucket.Histogram("y")
if !ok {
return nil, fmt.Errorf("histogram aggregation `y` was not found")
}
for _, yBucket := range yAgg.Buckets {
y := yBucket.Key
yBin := b.GetYBin(coord, float64(y))
index := xBin + b.Resolution*yBin
bins[index] = yBucket
}
}
return bins, nil
} | generation/elastic/bivariate.go | 0.820037 | 0.458531 | bivariate.go | starcoder |
package interval
import (
"fmt"
"math"
"github.com/influxdata/flux/values"
)
const (
MaxTime = math.MaxInt64
MinTime = math.MinInt64
)
type Bounds struct {
start values.Time
stop values.Time
// index keeps track of how many windows have been added or subtracted as additional
// windows are added to or subtracted from the initial bounds. In essence, it tracks the
// offset from the original bounds in order to keep operations more straightforward.
// See the Window struct and the window tests for additional info.
index int
}
// NewBounds create a new Bounds given start and stop values
func NewBounds(start, stop values.Time) Bounds {
return Bounds{
start: start,
stop: stop,
}
}
func (b Bounds) Start() values.Time {
return b.start
}
func (b Bounds) Stop() values.Time {
return b.stop
}
func (b Bounds) IsEmpty() bool {
return b.start >= b.stop
}
// IsZero returns true if the start and stop values are both zero.
func (b Bounds) IsZero() bool {
return b.start == 0 && b.stop == 0
}
func (b Bounds) String() string {
return fmt.Sprintf("[%v, %v)", b.start, b.stop)
}
func (b Bounds) Contains(t values.Time) bool {
return t >= b.start && t < b.stop
}
func (b Bounds) Overlaps(o Bounds) bool {
return b.Contains(o.start) || (b.Contains(o.stop) && o.stop > b.start) || o.Contains(b.start)
}
func (b Bounds) Equal(o Bounds) bool {
return b == o
}
func (b Bounds) Length() values.Duration {
if b.IsEmpty() {
return values.ConvertDurationNsecs(0)
}
return b.stop.Sub(b.start)
}
// Intersect returns the intersection of two bounds.
// It returns empty bounds if one of the input bounds are empty.
// TODO: there are several places that implement bounds and related utilities.
// consider a central place for them?
func (b Bounds) Intersect(o Bounds) Bounds {
if b.IsEmpty() || o.IsEmpty() || !b.Overlaps(o) {
return Bounds{
start: b.start,
stop: b.stop,
}
}
i := Bounds{}
i.start = b.start
if o.start > b.start {
i.start = o.start
}
i.stop = b.stop
if o.stop < b.stop {
i.stop = o.stop
}
return i
}
// Union returns the smallest bounds which contain both input bounds.
// It returns empty bounds if one of the input bounds are empty.
func (b Bounds) Union(o Bounds) Bounds {
if b.IsEmpty() || o.IsEmpty() {
return Bounds{
start: values.Time(0),
stop: values.Time(0),
}
}
u := new(Bounds)
u.start = b.start
if o.start < b.start {
u.start = o.start
}
u.stop = b.stop
if o.stop > b.stop {
u.stop = o.stop
}
return *u
} | interval/bounds.go | 0.802865 | 0.558989 | bounds.go | starcoder |
package chart
import (
"math/rand"
"time"
)
var (
// Sequence contains some sequence utilities.
// These utilities can be useful for generating test data.
Sequence = &sequence{}
)
type sequence struct{}
// Float64 produces an array of floats from [start,end] by optional steps.
func (s sequence) Float64(start, end float64, steps ...float64) []float64 {
var values []float64
step := 1.0
if len(steps) > 0 {
step = steps[0]
}
if start < end {
for x := start; x <= end; x += step {
values = append(values, x)
}
} else {
for x := start; x >= end; x = x - step {
values = append(values, x)
}
}
return values
}
// Random generates a fixed length sequence of random values between (0, scale).
func (s sequence) Random(samples int, scale float64) []float64 {
rnd := rand.New(rand.NewSource(time.Now().Unix()))
values := make([]float64, samples)
for x := 0; x < samples; x++ {
values[x] = rnd.Float64() * scale
}
return values
}
// Random generates a fixed length sequence of random values with a given average, above and below that average by (-scale, scale)
func (s sequence) RandomWithAverage(samples int, average, scale float64) []float64 {
rnd := rand.New(rand.NewSource(time.Now().Unix()))
values := make([]float64, samples)
for x := 0; x < samples; x++ {
jitter := scale - (rnd.Float64() * (2 * scale))
values[x] = average + jitter
}
return values
}
// Days generates a sequence of timestamps by day, from -days to today.
func (s sequence) Days(days int) []time.Time {
var values []time.Time
for day := days; day >= 0; day-- {
values = append(values, time.Now().AddDate(0, 0, -day))
}
return values
}
func (s sequence) MarketHours(from, to time.Time, marketOpen, marketClose time.Time, isHoliday HolidayProvider) []time.Time {
var times []time.Time
cursor := Date.On(marketOpen, from)
toClose := Date.On(marketClose, to)
for cursor.Before(toClose) || cursor.Equal(toClose) {
todayOpen := Date.On(marketOpen, cursor)
todayClose := Date.On(marketClose, cursor)
isValidTradingDay := !isHoliday(cursor) && Date.IsWeekDay(cursor.Weekday())
if (cursor.Equal(todayOpen) || cursor.After(todayOpen)) && (cursor.Equal(todayClose) || cursor.Before(todayClose)) && isValidTradingDay {
times = append(times, cursor)
}
if cursor.After(todayClose) {
cursor = Date.NextMarketOpen(cursor, marketOpen, isHoliday)
} else {
cursor = Date.NextHour(cursor)
}
}
return times
}
func (s sequence) MarketHourQuarters(from, to time.Time, marketOpen, marketClose time.Time, isHoliday HolidayProvider) []time.Time {
var times []time.Time
cursor := Date.On(marketOpen, from)
toClose := Date.On(marketClose, to)
for cursor.Before(toClose) || cursor.Equal(toClose) {
isValidTradingDay := !isHoliday(cursor) && Date.IsWeekDay(cursor.Weekday())
if isValidTradingDay {
todayOpen := Date.On(marketOpen, cursor)
todayNoon := Date.NoonOn(cursor)
today2pm := Date.On(Date.Time(14, 0, 0, 0, cursor.Location()), cursor)
todayClose := Date.On(marketClose, cursor)
times = append(times, todayOpen, todayNoon, today2pm, todayClose)
}
cursor = Date.NextDay(cursor)
}
return times
}
func (s sequence) MarketDayCloses(from, to time.Time, marketOpen, marketClose time.Time, isHoliday HolidayProvider) []time.Time {
var times []time.Time
cursor := Date.On(marketOpen, from)
toClose := Date.On(marketClose, to)
for cursor.Before(toClose) || cursor.Equal(toClose) {
isValidTradingDay := !isHoliday(cursor) && Date.IsWeekDay(cursor.Weekday())
if isValidTradingDay {
todayClose := Date.On(marketClose, cursor)
times = append(times, todayClose)
}
cursor = Date.NextDay(cursor)
}
return times
}
func (s sequence) MarketDayAlternateCloses(from, to time.Time, marketOpen, marketClose time.Time, isHoliday HolidayProvider) []time.Time {
var times []time.Time
cursor := Date.On(marketOpen, from)
toClose := Date.On(marketClose, to)
for cursor.Before(toClose) || cursor.Equal(toClose) {
isValidTradingDay := !isHoliday(cursor) && Date.IsWeekDay(cursor.Weekday())
if isValidTradingDay {
todayClose := Date.On(marketClose, cursor)
times = append(times, todayClose)
}
cursor = cursor.AddDate(0, 0, 2)
}
return times
}
func (s sequence) MarketDayMondayCloses(from, to time.Time, marketOpen, marketClose time.Time, isHoliday HolidayProvider) []time.Time {
var times []time.Time
cursor := Date.On(marketClose, from)
toClose := Date.On(marketClose, to)
for cursor.Equal(toClose) || cursor.Before(toClose) {
isValidTradingDay := !isHoliday(cursor) && Date.IsWeekDay(cursor.Weekday())
if isValidTradingDay {
times = append(times, cursor)
}
cursor = Date.NextDayOfWeek(cursor, time.Monday)
}
return times
} | vendor/github.com/nicholasjackson/bench/vendor/github.com/wcharczuk/go-chart/sequence.go | 0.693265 | 0.605537 | sequence.go | starcoder |
package diff
import (
"github.com/antzucaro/matchr"
)
// VanillaLCS computes the reserences between two strings using the LCS
// algortithm from https://en.m.wikipedia.org/wiki/Longest_common_subsequence_problem
func VanillaLCS(l, r []string) Result {
return diffLCS(l, r)
}
func diffLCS(l, r []string, refiners ...Tokenizer) (res Result) {
var wL, wR []string
var resHead, resTail Result
var m [][]int
lookForSimilar := (len(refiners) > 0)
wL, wR, resHead = getSameHead(l, r)
wL, wR, resTail = getSameTail(wL, wR)
if lookForSimilar {
m = matrixLSS(wL, wR)
} else {
m = matrixLCS(wL, wR)
}
var iL, iR int
for iL, iR = len(wL), len(wR); iL != 0 && iR != 0; {
if m[iL][iR] == m[iL][iR-1] {
res.insert(newInsertedDiff(wR[iR-1]))
iR--
} else if m[iL][iR] == m[iL-1][iR] {
res.insert(newDeletedDiff(wL[iL-1]))
iL--
} else {
if lookForSimilar {
res.insert(adaptative(diffLCS, wL[iL-1], wR[iR-1], refiners...))
} else {
res.insert(newSameDiff(wL[iL-1]))
}
iL--
iR--
}
}
for iL != 0 {
res.insert(newDeletedDiff(wL[iL-1]))
iL--
}
for iR != 0 {
res.insert(newInsertedDiff(wR[iR-1]))
iR--
}
res.insert(resHead...)
res.append(resTail...)
return
}
func matrixLCS(a, b []string) [][]int {
m := make([][]int, len(a)+1)
for i := range m {
m[i] = make([]int, len(b)+1)
}
for ia := range a {
for ib := range b {
if a[ia] == b[ib] {
m[ia+1][ib+1] = m[ia][ib] + 1
} else if m[ia+1][ib] > m[ia][ib+1] {
m[ia+1][ib+1] = m[ia+1][ib]
} else {
m[ia+1][ib+1] = m[ia][ib+1]
}
}
}
return m
}
func matrixLSS(a, b []string) [][]int {
m := make([][]int, len(a)+1)
for i := range m {
m[i] = make([]int, len(b)+1)
}
isSimilar := similar(a, b)
for ia := range a {
for ib := range b {
if isSimilar[ia][ib] {
m[ia+1][ib+1] = m[ia][ib] + 1
} else if m[ia+1][ib] > m[ia][ib+1] { // MAX seq N-1
m[ia+1][ib+1] = m[ia+1][ib]
} else {
m[ia+1][ib+1] = m[ia][ib+1]
}
}
}
return m
}
func sequenceLCS(a, b []string, lookForSimilar bool) (seq [][2]int) {
var m [][]int
if lookForSimilar {
m = matrixLSS(a, b)
} else {
m = matrixLCS(a, b)
}
for ia, ib := len(a), len(b); ia != 0 && ib != 0; {
if m[ia][ib] == m[ia-1][ib] {
ia--
} else if m[ia][ib] == m[ia][ib-1] {
ib--
} else {
seq = append(seq, [2]int{ia - 1, ib - 1})
ia--
ib--
}
}
for i, j := 0, len(seq)-1; i < j; i, j = i+1, j-1 {
seq[i], seq[j] = seq[j], seq[i]
}
return seq
}
func getSameHead(l, r []string) ([]string, []string, Result) {
var res Result
var i int
for i = 0; i < len(l) && i < len(r); i++ {
if l[i] != r[i] {
return l[i:], r[i:], res
}
res.append(newSameDiff(l[i]))
}
return l[i:], r[i:], res
}
func getSameTail(l, r []string) ([]string, []string, Result) {
var res Result
lenL, lenR := len(l), len(r)
lastL, lastR := len(l)-1, len(r)-1
var i int
for i = 0; i < lenL && i < lenR; i++ {
if l[lastL-i] != r[lastR-i] {
return l[:lenL-i], r[:lenR-i], res
}
res.insert(newSameDiff(l[lastL-i]))
}
return l[:lenL-i], r[:lenR-i], res
}
func similar(a, b []string) [][]bool {
dist := make([][]float64, len(a))
for i := range dist {
dist[i] = make([]float64, len(b))
}
isSame := make([][]bool, len(a))
for i := range isSame {
isSame[i] = make([]bool, len(b))
}
for ia := range a {
var max float64
for ib := range b {
dist[ia][ib] = matchr.JaroWinkler(a[ia], b[ib], true)
if dist[ia][ib] > max {
max = dist[ia][ib]
}
}
if max < 0.75 {
continue
}
for ib := range b {
isSame[ia][ib] = (dist[ia][ib] == max)
}
}
return isSame
} | diff/lcs.go | 0.557123 | 0.406862 | lcs.go | starcoder |
package rendering
import (
. "github.com/balpha/go-unicornify/unicornify/core"
"math"
)
// ------- ShadowCastingTracer -------
type ShadowCastingTracer struct {
WorldView, LightView WorldView
SourceTracer, LightTracer Tracer
LightProjection SphereProjection
Lighten, Darken float64
}
func (t *ShadowCastingTracer) Trace(x, y float64, ray Vector) (bool, float64, Vector, Color) {
ok, z, dir, col := t.SourceTracer.Trace(x, y, ray)
if !ok {
return ok, z, dir, col
}
origPoint := t.WorldView.UnProject(Vector{x, y, z})
lp := t.LightView.ProjectSphere(origPoint, 0)
lx, ly := lp.X(), lp.Y()
lray := t.LightView.Ray(lx, ly)
lok, lz, ldir, _ := t.LightTracer.Trace(lx, ly, lray)
seeing := !lok || lz >= origPoint.Minus(t.LightView.CameraPosition).Length()-0.01
if !seeing {
col = Darken(col, uint8(t.Darken))
} else {
sp := ldir.Unit().ScalarProd(lray)
if sp > 0 { // Given a completely realistic world with no rounding errors, this wouldn't happen.
col = Darken(col, uint8((1-sp)*t.Darken))
} else if sp < 0 {
sp = -sp
if sp < 0.5 {
col = Darken(col, uint8((0.5-sp)*t.Darken*2))
} else {
col = Lighten(col, uint8((sp-0.5)*t.Lighten*2))
}
}
}
return ok, z, dir, col
}
func (t *ShadowCastingTracer) TraceDeep(x, y float64, ray Vector) (bool, TraceIntervals) {
return DeepifyTrace(t, x, y, ray)
}
func (t *ShadowCastingTracer) GetBounds() Bounds {
return t.SourceTracer.GetBounds()
}
func (t *ShadowCastingTracer) Pruned(rp RenderingParameters) Tracer {
prunedSource := t.SourceTracer.Pruned(rp)
prunedLT := t.LightTracer.Pruned(RenderingParameters{0, math.Inf(-1), math.Inf(+1), math.Inf(-1), math.Inf(+1)})
if prunedSource == nil {
return nil
} else if prunedSource == t.SourceTracer && prunedLT == t.LightTracer {
return t
} else {
copied := *t
copied.SourceTracer = prunedSource
copied.LightTracer = prunedLT
return &copied
}
}
func NewShadowCastingTracer(source Tracer, worldView WorldView, shadowCaster Thing, lightPos, lightTarget Vector, lighten, darken float64) *ShadowCastingTracer {
lightView := WorldView{
CameraPosition: lightPos,
LookAtPoint: lightTarget,
FocalLength: 1, // doesn't matter
}
lightView.Init()
lightTracer := shadowCaster.GetTracer(lightView)
lightProjection := worldView.ProjectSphere(lightPos, 0)
result := &ShadowCastingTracer{
SourceTracer: source,
LightTracer: lightTracer,
WorldView: worldView,
LightView: lightView,
LightProjection: lightProjection,
Lighten: lighten,
Darken: darken,
}
return result
} | unicornify/rendering/shadowcastingtracer.go | 0.81134 | 0.424293 | shadowcastingtracer.go | starcoder |
package dpsort
import "sort"
const insertionSortThreshold = 27
func dataequal(data sort.Interface, i, k int) bool {
return !(data.Less(i, k) || data.Less(k, i))
}
func insertionSort(data sort.Interface, lo, hi int) {
for i := lo + 1; i <= hi; i++ {
for k := i; k > lo && data.Less(k, k-1); k-- {
data.Swap(k, k-1)
}
}
}
func Sort(data sort.Interface) {
sortp(data, 0, data.Len()-1, 3)
}
func sortp(data sort.Interface, left, right int, div int) {
n := right - left
if n == 0 {
return
} else if n < insertionSortThreshold {
insertionSort(data, left, right)
return
}
third := n / div
// "medians"
m1 := left + third
m2 := right - third
if m1 <= left {
m1 = left + 1
}
if m2 >= right {
m2 = right - 1
}
if data.Less(m1, m2) {
data.Swap(m1, left)
data.Swap(m2, right)
} else {
data.Swap(m1, right)
data.Swap(m2, left)
}
// pointers
less := left + 1
great := right - 1
// sorting
for k := less; k <= great; k++ {
if data.Less(k, left) {
data.Swap(k, less)
less++
} else if data.Less(right, k) {
for k < great && data.Less(right, great) {
great--
}
data.Swap(k, great)
great--
if data.Less(k, left) {
data.Swap(k, less)
less++
}
}
}
// swaps
dist := great - less
if dist < 13 {
div++
}
data.Swap(less-1, left)
data.Swap(great+1, right)
// subarrays
sortp(data, left, less-2, div)
sortp(data, great+2, right, div)
// equal elements
if dist > n-13 && !dataequal(data, left, right) {
for k := less; k <= great; k++ {
if dataequal(data, k, left) {
data.Swap(k, less)
less++
} else if dataequal(data, k, right) {
data.Swap(k, great)
great--
if dataequal(data, k, left) {
data.Swap(k, less)
less++
}
}
}
}
// subarray
if data.Less(left, right) {
sortp(data, less, great, div)
}
} | sorts/dpsort/sort.go | 0.708112 | 0.435301 | sort.go | starcoder |
package ast
import (
"fmt"
"github.com/michaelquigley/pfxlog"
"github.com/pkg/errors"
"strings"
)
// NotExprNode implements logical NOT on a wrapped boolean expression
type NotExprNode struct {
expr BoolNode
}
func (node *NotExprNode) Accept(visitor Visitor) {
visitor.VisitNotExprNodeStart(node)
node.expr.Accept(visitor)
visitor.VisitNotExprNodeEnd(node)
}
func (node *NotExprNode) String() string {
return fmt.Sprintf("not (%v)", node.expr)
}
func (node *NotExprNode) GetType() NodeType {
return NodeTypeBool
}
func (node *NotExprNode) EvalBool(s Symbols) bool {
val := node.expr.EvalBool(s)
return !val
}
func (node *NotExprNode) TypeTransformBool(s SymbolTypes) (BoolNode, error) {
return node, transformBools(s, &node.expr)
}
func (node *NotExprNode) IsConst() bool {
return node.expr.IsConst()
}
func NewAndExprNode(left, right BoolNode) *AndExprNode {
return &AndExprNode{
left: left,
right: right,
}
}
// AndExprNode implements logical AND on two wrapped boolean expressions
type AndExprNode struct {
left BoolNode
right BoolNode
}
func (node *AndExprNode) Accept(visitor Visitor) {
visitor.VisitAndExprNodeStart(node)
node.left.Accept(visitor)
node.right.Accept(visitor)
visitor.VisitAndExprNodeEnd(node)
}
func (node *AndExprNode) GetType() NodeType {
return NodeTypeBool
}
func (node *AndExprNode) TypeTransformBool(s SymbolTypes) (BoolNode, error) {
return node, transformBools(s, &node.left, &node.right)
}
func (node *AndExprNode) EvalBool(s Symbols) bool {
if !node.left.EvalBool(s) {
return false
}
return node.right.EvalBool(s)
}
func (node *AndExprNode) String() string {
return fmt.Sprintf("%v && %v", node.left, node.right)
}
func (node *AndExprNode) IsConst() bool {
return false
}
// OrExprNode implements logical OR on two wrapped boolean expressions
type OrExprNode struct {
left BoolNode
right BoolNode
}
func (node *OrExprNode) Accept(visitor Visitor) {
visitor.VisitOrExprNodeStart(node)
node.left.Accept(visitor)
node.right.Accept(visitor)
visitor.VisitOrExprNodeEnd(node)
}
func (node *OrExprNode) GetType() NodeType {
return NodeTypeBool
}
func (node *OrExprNode) TypeTransformBool(s SymbolTypes) (BoolNode, error) {
return node, transformBools(s, &node.left, &node.right)
}
func (node *OrExprNode) EvalBool(s Symbols) bool {
leftResult := node.left.EvalBool(s)
if leftResult {
return true
}
return node.right.EvalBool(s)
}
func (node *OrExprNode) String() string {
return fmt.Sprintf("%v || %v", node.left, node.right)
}
func (node *OrExprNode) IsConst() bool {
return false
}
type SeekOptimizableBoolNode interface {
IsSeekable() bool
EvalBoolWithSeek(s Symbols, cursor TypeSeekableSetCursor) bool
}
type BinaryBoolExprNode struct {
left BoolNode
right BoolNode
op BinaryOp
}
func (node *BinaryBoolExprNode) Accept(visitor Visitor) {
visitor.VisitBinaryBoolExprNodeStart(node)
node.left.Accept(visitor)
node.right.Accept(visitor)
visitor.VisitBinaryBoolExprNodeEnd(node)
}
func (*BinaryBoolExprNode) GetType() NodeType {
return NodeTypeBool
}
func (node *BinaryBoolExprNode) EvalBool(s Symbols) bool {
leftResult := node.left.EvalBool(s)
rightResult := node.right.EvalBool(s)
switch node.op {
case BinaryOpEQ:
return leftResult == rightResult
case BinaryOpNEQ:
return leftResult != rightResult
}
pfxlog.Logger().Errorf("unhandled boolean binary expression type %v", node.op)
return false
}
func (node *BinaryBoolExprNode) String() string {
return fmt.Sprintf("%v %v %v", node.left, binaryOpNames[node.op], node.right)
}
func (node *BinaryBoolExprNode) IsConst() bool {
return false
}
type BinaryDatetimeExprNode struct {
left DatetimeNode
right DatetimeNode
op BinaryOp
}
func (node *BinaryDatetimeExprNode) Accept(visitor Visitor) {
visitor.VisitBinaryDatetimeExprNodeStart(node)
node.left.Accept(visitor)
node.right.Accept(visitor)
visitor.VisitBinaryDatetimeExprNodeEnd(node)
}
func (*BinaryDatetimeExprNode) GetType() NodeType {
return NodeTypeBool
}
func (node *BinaryDatetimeExprNode) EvalBool(s Symbols) bool {
leftResult := node.left.EvalDatetime(s)
rightResult := node.right.EvalDatetime(s)
if leftResult == nil || rightResult == nil {
return false
}
switch node.op {
case BinaryOpEQ:
return leftResult.Equal(*rightResult)
case BinaryOpNEQ:
return !leftResult.Equal(*rightResult)
case BinaryOpLT:
return leftResult.Before(*rightResult)
case BinaryOpLTE:
return !leftResult.After(*rightResult)
case BinaryOpGT:
return leftResult.After(*rightResult)
case BinaryOpGTE:
return !leftResult.Before(*rightResult)
}
pfxlog.Logger().Errorf("unhandled datetime binary expression type %v", node.op)
return false
}
func (node *BinaryDatetimeExprNode) String() string {
return fmt.Sprintf("%v %v %v", node.left, binaryOpNames[node.op], node.right)
}
func (node *BinaryDatetimeExprNode) IsConst() bool {
return false
}
type BinaryFloat64ExprNode struct {
left Float64Node
right Float64Node
op BinaryOp
}
func (node *BinaryFloat64ExprNode) Accept(visitor Visitor) {
visitor.VisitBinaryFloat64ExprNodeStart(node)
node.left.Accept(visitor)
node.right.Accept(visitor)
visitor.VisitBinaryFloat64ExprNodeEnd(node)
}
func (node *BinaryFloat64ExprNode) GetType() NodeType {
return NodeTypeFloat64
}
func (node *BinaryFloat64ExprNode) EvalBool(s Symbols) bool {
leftResult := node.left.EvalFloat64(s)
rightResult := node.right.EvalFloat64(s)
if leftResult == nil || rightResult == nil {
return false
}
switch node.op {
case BinaryOpEQ:
return *leftResult == *rightResult
case BinaryOpNEQ:
return *leftResult != *rightResult
case BinaryOpLT:
return *leftResult < *rightResult
case BinaryOpLTE:
return *leftResult <= *rightResult
case BinaryOpGT:
return *leftResult > *rightResult
case BinaryOpGTE:
return *leftResult >= *rightResult
}
pfxlog.Logger().Errorf("unhandled float64 binary expression type %v", node.op)
return false
}
func (node *BinaryFloat64ExprNode) String() string {
return fmt.Sprintf("%v %v %v", node.left, binaryOpNames[node.op], node.right)
}
func (node *BinaryFloat64ExprNode) IsConst() bool {
return false
}
type BinaryInt64ExprNode struct {
left Int64Node
right Int64Node
op BinaryOp
}
func (node *BinaryInt64ExprNode) Accept(visitor Visitor) {
visitor.VisitBinaryInt64ExprNodeStart(node)
node.left.Accept(visitor)
node.right.Accept(visitor)
visitor.VisitBinaryInt64ExprNodeEnd(node)
}
func (node *BinaryInt64ExprNode) GetType() NodeType {
return NodeTypeBool
}
func (node *BinaryInt64ExprNode) EvalBool(s Symbols) bool {
leftResult := node.left.EvalInt64(s)
rightResult := node.right.EvalInt64(s)
if leftResult == nil || rightResult == nil {
return false
}
switch node.op {
case BinaryOpEQ:
return *leftResult == *rightResult
case BinaryOpNEQ:
return *leftResult != *rightResult
case BinaryOpLT:
return *leftResult < *rightResult
case BinaryOpLTE:
return *leftResult <= *rightResult
case BinaryOpGT:
return *leftResult > *rightResult
case BinaryOpGTE:
return *leftResult >= *rightResult
}
pfxlog.Logger().Errorf("unhandled int64 binary expression type %v", node.op)
return false
}
func (node *BinaryInt64ExprNode) String() string {
return fmt.Sprintf("%v %v %v", node.left, binaryOpNames[node.op], node.right)
}
func (node *BinaryInt64ExprNode) IsConst() bool {
return false
}
type BinaryStringExprNode struct {
left StringNode
right StringNode
op BinaryOp
}
func (node *BinaryStringExprNode) Accept(visitor Visitor) {
visitor.VisitBinaryStringExprNodeStart(node)
node.left.Accept(visitor)
node.right.Accept(visitor)
visitor.VisitBinaryStringExprNodeEnd(node)
}
func (*BinaryStringExprNode) GetType() NodeType {
return NodeTypeBool
}
func (node *BinaryStringExprNode) IsSeekable() bool {
return (node.op == BinaryOpEQ || node.op == BinaryOpNEQ) &&
(node.left.IsConst() || node.right.IsConst())
}
func (node *BinaryStringExprNode) EvalBoolWithSeek(s Symbols, cursor TypeSeekableSetCursor) bool {
if rightResult := node.right.EvalString(s); rightResult != nil {
cursor.SeekToString(*rightResult)
if cursor.IsValid() {
return node.EvalBool(s)
}
}
return false
}
func (node *BinaryStringExprNode) EvalBool(s Symbols) bool {
leftResult := node.left.EvalString(s)
rightResult := node.right.EvalString(s)
if leftResult == nil || rightResult == nil {
return false
}
switch node.op {
case BinaryOpEQ:
return *leftResult == *rightResult
case BinaryOpNEQ:
return *leftResult != *rightResult
case BinaryOpLT:
return *leftResult < *rightResult
case BinaryOpLTE:
return *leftResult <= *rightResult
case BinaryOpGT:
return *leftResult > *rightResult
case BinaryOpGTE:
return *leftResult >= *rightResult
case BinaryOpContains:
return strings.Contains(*leftResult, *rightResult)
case BinaryOpNotContains:
return !strings.Contains(*leftResult, *rightResult)
}
pfxlog.Logger().Errorf("unhandled string binary expression type %v", node.op)
return false
}
func (node *BinaryStringExprNode) String() string {
return fmt.Sprintf("%v %v %v", node.left, binaryOpNames[node.op], node.right)
}
func (node *BinaryStringExprNode) IsConst() bool {
return false
}
type IsNilExprNode struct {
symbol SymbolNode
op BinaryOp
}
func (node *IsNilExprNode) Accept(visitor Visitor) {
visitor.VisitIsNilExprNodeStart(node)
node.symbol.Accept(visitor)
visitor.VisitIsNilExprNodeEnd(node)
}
func (*IsNilExprNode) GetType() NodeType {
return NodeTypeBool
}
func (node *IsNilExprNode) EvalBool(s Symbols) bool {
isNil := s.IsNil(node.symbol.Symbol())
switch node.op {
case BinaryOpEQ:
return isNil
case BinaryOpNEQ:
return !isNil
}
pfxlog.Logger().Errorf("unhandled binary expression type %v", node)
return true
}
func (node *IsNilExprNode) String() string {
return fmt.Sprintf("%v %v null", node.symbol, binaryOpNames[node.op])
}
func (node *IsNilExprNode) IsConst() bool {
return false
}
func NewInt64BetweenOp(nodes []Int64Node) (*Int64BetweenExprNode, error) {
if len(nodes) != 3 {
return nil, errors.Errorf("incorrect number of values provided to Int64BetweenExprNode: %v", len(nodes))
}
return &Int64BetweenExprNode{
left: nodes[0],
lower: nodes[1],
upper: nodes[2],
}, nil
}
type Int64BetweenExprNode struct {
left Int64Node
lower Int64Node
upper Int64Node
}
func (node *Int64BetweenExprNode) Accept(visitor Visitor) {
visitor.VisitInt64BetweenExprNodeStart(node)
node.left.Accept(visitor)
node.lower.Accept(visitor)
node.upper.Accept(visitor)
visitor.VisitInt64BetweenExprNodeEnd(node)
}
func (*Int64BetweenExprNode) GetType() NodeType {
return NodeTypeBool
}
func (node *Int64BetweenExprNode) EvalBool(s Symbols) bool {
leftResult := node.left.EvalInt64(s)
if leftResult == nil {
return false
}
lowerResult := node.lower.EvalInt64(s)
if lowerResult == nil {
return false
}
upperResult := node.upper.EvalInt64(s)
if upperResult == nil {
return false
}
return *leftResult >= *lowerResult && *leftResult < *upperResult
}
func (node *Int64BetweenExprNode) String() string {
return fmt.Sprintf("%v between %v and %v", node.left, node.lower, node.upper)
}
func (node *Int64BetweenExprNode) IsConst() bool {
return false
}
func NewFloat64BetweenOp(nodes []Float64Node) (*Float64BetweenExprNode, error) {
if len(nodes) != 3 {
return nil, errors.Errorf("incorrect number of values provided to Float64BetweenExprNode: %v", len(nodes))
}
return &Float64BetweenExprNode{
left: nodes[0],
lower: nodes[1],
upper: nodes[2],
}, nil
}
type Float64BetweenExprNode struct {
left Float64Node
lower Float64Node
upper Float64Node
}
func (node *Float64BetweenExprNode) Accept(visitor Visitor) {
visitor.VisitFloat64BetweenExprNodeStart(node)
node.left.Accept(visitor)
node.lower.Accept(visitor)
node.upper.Accept(visitor)
visitor.VisitFloat64BetweenExprNodeEnd(node)
}
func (*Float64BetweenExprNode) GetType() NodeType {
return NodeTypeBool
}
func (node *Float64BetweenExprNode) EvalBool(s Symbols) bool {
leftResult := node.left.EvalFloat64(s)
if leftResult == nil {
return false
}
lowerResult := node.lower.EvalFloat64(s)
if lowerResult == nil {
return false
}
upperResult := node.upper.EvalFloat64(s)
if upperResult == nil {
return false
}
return *leftResult >= *lowerResult && *leftResult < *upperResult
}
func (node *Float64BetweenExprNode) String() string {
return fmt.Sprintf("%v between %v and %v", node.left, node.lower, node.upper)
}
func (node *Float64BetweenExprNode) IsConst() bool {
return false
}
type DatetimeBetweenExprNode struct {
left DatetimeNode
lower DatetimeNode
upper DatetimeNode
}
func (node *DatetimeBetweenExprNode) Accept(visitor Visitor) {
visitor.VisitDatetimeBetweenExprNodeStart(node)
node.left.Accept(visitor)
node.lower.Accept(visitor)
node.upper.Accept(visitor)
visitor.VisitDatetimeBetweenExprNodeEnd(node)
}
func (*DatetimeBetweenExprNode) GetType() NodeType {
return NodeTypeBool
}
func (node *DatetimeBetweenExprNode) EvalBool(s Symbols) bool {
leftResult := node.left.EvalDatetime(s)
if leftResult == nil {
return false
}
lowerResult := node.lower.EvalDatetime(s)
if lowerResult == nil {
return false
}
upperResult := node.upper.EvalDatetime(s)
if upperResult == nil {
return false
}
return (leftResult.Equal(*lowerResult) || leftResult.After(*lowerResult)) && leftResult.Before(*upperResult)
}
func (node *DatetimeBetweenExprNode) String() string {
return fmt.Sprintf("%v between %v and %v", node.left, node.lower, node.upper)
}
func (node *DatetimeBetweenExprNode) IsConst() bool {
return false
} | storage/ast/node_expr.go | 0.767864 | 0.491639 | node_expr.go | starcoder |
package iso20022
// Specifies the details of the first leg in a two leg transaction process.
type TwoLegTransactionDetails1 struct {
// Specifies the date/time on which the trade was executed.
TradeDate *TradeDate1Choice `xml:"TradDt,omitempty"`
// Unambiguous identification of the reference assigned in the first leg of the transaction.
OpeningLegIdentification *Max35Text `xml:"OpngLegId,omitempty"`
// Unambiguous identification of the second leg of the transaction as known by the account owner (or the instructing party acting on its behalf).
ClosingLegIdentification *Max35Text `xml:"ClsgLegId,omitempty"`
// Principal amount of a trade (price multiplied by quantity).
GrossTradeAmount *AmountAndDirection29 `xml:"GrssTradAmt,omitempty"`
// Identifies other amounts pertaining to the transaction.
OtherAmounts []*OtherAmounts16 `xml:"OthrAmts,omitempty"`
// Provides additional information about the second leg in narrative form.
SecondLegNarrative *Max140Text `xml:"ScndLegNrrtv,omitempty"`
// Negotiated fixed price of the security to buy it back.
EndPrice *Price4 `xml:"EndPric,omitempty"`
// Closing date/time or maturity date/time of the transaction.
ClosingDate *ClosingDate1Choice `xml:"ClsgDt,omitempty"`
// Total amount of money to be paid or received in exchange for the securities. The amount includes the principal with any commissions and fees or accrued interest.
ClosingSettlementAmount *AmountAndDirection5 `xml:"ClsgSttlmAmt,omitempty"`
// Processing date of the trading session.
ProcessingDate *TradeDate4Choice `xml:"PrcgDt,omitempty"`
// Specifies the type of the second leg transaction.
TwoLegTransactionType *TwoLegTransactionType1Choice `xml:"TwoLegTxTp,omitempty"`
}
func (t *TwoLegTransactionDetails1) AddTradeDate() *TradeDate1Choice {
t.TradeDate = new(TradeDate1Choice)
return t.TradeDate
}
func (t *TwoLegTransactionDetails1) SetOpeningLegIdentification(value string) {
t.OpeningLegIdentification = (*Max35Text)(&value)
}
func (t *TwoLegTransactionDetails1) SetClosingLegIdentification(value string) {
t.ClosingLegIdentification = (*Max35Text)(&value)
}
func (t *TwoLegTransactionDetails1) AddGrossTradeAmount() *AmountAndDirection29 {
t.GrossTradeAmount = new(AmountAndDirection29)
return t.GrossTradeAmount
}
func (t *TwoLegTransactionDetails1) AddOtherAmounts() *OtherAmounts16 {
newValue := new(OtherAmounts16)
t.OtherAmounts = append(t.OtherAmounts, newValue)
return newValue
}
func (t *TwoLegTransactionDetails1) SetSecondLegNarrative(value string) {
t.SecondLegNarrative = (*Max140Text)(&value)
}
func (t *TwoLegTransactionDetails1) AddEndPrice() *Price4 {
t.EndPrice = new(Price4)
return t.EndPrice
}
func (t *TwoLegTransactionDetails1) AddClosingDate() *ClosingDate1Choice {
t.ClosingDate = new(ClosingDate1Choice)
return t.ClosingDate
}
func (t *TwoLegTransactionDetails1) AddClosingSettlementAmount() *AmountAndDirection5 {
t.ClosingSettlementAmount = new(AmountAndDirection5)
return t.ClosingSettlementAmount
}
func (t *TwoLegTransactionDetails1) AddProcessingDate() *TradeDate4Choice {
t.ProcessingDate = new(TradeDate4Choice)
return t.ProcessingDate
}
func (t *TwoLegTransactionDetails1) AddTwoLegTransactionType() *TwoLegTransactionType1Choice {
t.TwoLegTransactionType = new(TwoLegTransactionType1Choice)
return t.TwoLegTransactionType
} | TwoLegTransactionDetails1.go | 0.848941 | 0.495911 | TwoLegTransactionDetails1.go | starcoder |
package shape
import "github.com/oakmound/oak/v3/alg/intgeom"
// Condense finds a set of rectangles that covers the shape.
// Used to return a minimal set of rectangles in an appropriate time.
func Condense(sh Shape, w, h int) []intgeom.Rect2 {
condensed := []intgeom.Rect2{}
remainingSpaces := make(map[intgeom.Point2]struct{})
for x := 0; x < w; x++ {
for y := 0; y < h; y++ {
if sh.In(x, y, w, h) {
remainingSpaces[intgeom.Point2{x, y}] = struct{}{}
}
}
}
for k := range remainingSpaces {
topLeft := k
w := 0
h := 0
right := true
left := true
up := true
down := true
xIncrement := intgeom.Point2{1, 0}
yIncrement := intgeom.Point2{0, 1}
xDecrement := intgeom.Point2{-1, 0}
yDecrement := intgeom.Point2{0, -1}
for right || left || up || down {
var toCheck intgeom.Point2
if right {
toCheck = topLeft.Add(intgeom.Point2{w + 1, 0})
for i := 0; i <= h; i++ {
if _, ok := remainingSpaces[toCheck]; !ok {
right = false
break
}
toCheck = toCheck.Add(yIncrement)
}
if right {
w++
}
}
if left {
toCheck = topLeft.Add(intgeom.Point2{-1, 0})
for i := 0; i <= h; i++ {
if _, ok := remainingSpaces[toCheck]; !ok {
left = false
break
}
toCheck = toCheck.Add(yIncrement)
}
if left {
w++
topLeft = topLeft.Add(xDecrement)
}
}
if up {
toCheck = topLeft.Add(intgeom.Point2{0, -1})
for i := 0; i <= w; i++ {
if _, ok := remainingSpaces[toCheck]; !ok {
up = false
break
}
toCheck = toCheck.Add(xIncrement)
}
if up {
h++
topLeft = topLeft.Add(yDecrement)
}
}
if down {
toCheck = topLeft.Add(intgeom.Point2{0, h + 1})
for i := 0; i <= w; i++ {
if _, ok := remainingSpaces[toCheck]; !ok {
down = false
break
}
toCheck = toCheck.Add(xIncrement)
}
if down {
h++
}
}
}
condensed = append(condensed, intgeom.NewRect2WH(topLeft.X(), topLeft.Y(), w, h))
for x := topLeft.X(); x <= topLeft.X()+w; x++ {
for y := topLeft.Y(); y <= topLeft.Y()+h; y++ {
delete(remainingSpaces, intgeom.Point2{x, y})
}
}
}
return condensed
} | shape/condense.go | 0.553988 | 0.4474 | condense.go | starcoder |
package testutil
import (
"testing"
"github.com/stretchr/testify/require"
datatransfer "github.com/filecoin-project/go-data-transfer"
"github.com/filecoin-project/go-data-transfer/encoding"
)
//go:generate cbor-gen-for FakeDTType
// FakeDTType simple fake type for using with registries
type FakeDTType struct {
Data string
}
// Type satisfies registry.Entry
func (ft FakeDTType) Type() datatransfer.TypeIdentifier {
return "FakeDTType"
}
// AssertFakeDTVoucher asserts that a data transfer requests contains the expected fake data transfer voucher type
func AssertFakeDTVoucher(t *testing.T, request datatransfer.Request, expected *FakeDTType) {
require.Equal(t, datatransfer.TypeIdentifier("FakeDTType"), request.VoucherType())
fakeDTDecoder, err := encoding.NewDecoder(&FakeDTType{})
require.NoError(t, err)
decoded, err := request.Voucher(fakeDTDecoder)
require.NoError(t, err)
require.Equal(t, expected, decoded)
}
// AssertEqualFakeDTVoucher asserts that two requests have the same fake data transfer voucher
func AssertEqualFakeDTVoucher(t *testing.T, expectedRequest datatransfer.Request, request datatransfer.Request) {
require.Equal(t, expectedRequest.VoucherType(), request.VoucherType())
fakeDTDecoder, err := encoding.NewDecoder(&FakeDTType{})
require.NoError(t, err)
expectedDecoded, err := request.Voucher(fakeDTDecoder)
require.NoError(t, err)
decoded, err := request.Voucher(fakeDTDecoder)
require.NoError(t, err)
require.Equal(t, expectedDecoded, decoded)
}
// AssertFakeDTVoucherResult asserts that a data transfer response contains the expected fake data transfer voucher result type
func AssertFakeDTVoucherResult(t *testing.T, response datatransfer.Response, expected *FakeDTType) {
require.Equal(t, datatransfer.TypeIdentifier("FakeDTType"), response.VoucherResultType())
fakeDTDecoder, err := encoding.NewDecoder(&FakeDTType{})
require.NoError(t, err)
decoded, err := response.VoucherResult(fakeDTDecoder)
require.NoError(t, err)
require.Equal(t, expected, decoded)
}
// AssertEqualFakeDTVoucherResult asserts that two responses have the same fake data transfer voucher result
func AssertEqualFakeDTVoucherResult(t *testing.T, expectedResponse datatransfer.Response, response datatransfer.Response) {
require.Equal(t, expectedResponse.VoucherResultType(), response.VoucherResultType())
fakeDTDecoder, err := encoding.NewDecoder(&FakeDTType{})
require.NoError(t, err)
expectedDecoded, err := response.VoucherResult(fakeDTDecoder)
require.NoError(t, err)
decoded, err := response.VoucherResult(fakeDTDecoder)
require.NoError(t, err)
require.Equal(t, expectedDecoded, decoded)
}
// NewFakeDTType returns a fake dt type with random data
func NewFakeDTType() *FakeDTType {
return &FakeDTType{Data: string(RandomBytes(100))}
}
var _ datatransfer.Registerable = &FakeDTType{} | testutil/fakedttype.go | 0.738763 | 0.478224 | fakedttype.go | starcoder |
package types
// Uints is a slice of uint.
type Uints []uint
// Reset the slice.
func (s *Uints) Reset() {
*s = []uint{}
}
// Add new elements to the slice.
func (s *Uints) Add(values ...uint) {
*s = append(*s, values...)
}
// Contains say if "s" contains "values".
func (s Uints) Contains(values ...uint) bool {
findNum := 0
for i := range s {
for _, value := range values {
if s[i] == value {
findNum++
break
}
}
}
return findNum == len(values)
}
// ContainsOneOf says if "s" contains one of the "values".
func (s Uints) ContainsOneOf(values ...uint) bool {
for _, value := range values {
for i := range s {
if s[i] == value {
return true
}
}
}
return false
}
// Copy create a new copy of the slice.
func (s Uints) Copy() Uints {
out := make(Uints, s.Len())
copy(out, s)
return out
}
// Diff return the difference between "s" and "s2".
func (s Uints) Diff(s2 Uints) Uints {
if s.Empty() {
return s2.Copy()
} else if s2.Empty() {
return s.Copy()
}
out := Uints{}
if len(s) >= len(s2) {
for _, v := range s {
if !s2.Contains(v) {
out = append(out, v)
}
}
}
for _, v := range s2 {
if !s.Contains(v) {
out = append(out, v)
}
}
return out
}
// Empty says if the slice is empty.
func (s Uints) Empty() bool {
return len(s) == 0
}
// Equal says if "s" and "s2" are equal.
func (s Uints) Equal(s2 Uints) bool {
if len(s) == len(s2) {
for k := range s2 {
if s2[k] != s[k] {
return false
}
}
return true
}
return false
}
// Find the first element matching the pattern.
func (s Uints) Find(matcher func(v uint) bool) (uint, bool) {
for _, val := range s {
if matcher(val) {
return val, true
}
}
return 0, false
}
// FindAll elements matching the pattern.
func (s Uints) FindAll(matcher func(v uint) bool) Uints {
out := Uints{}
for _, val := range s {
if matcher(val) {
out = append(out, val)
}
}
return out
}
// First return the value of the first element.
func (s Uints) First() (uint, bool) {
if len(s) > 0 {
return s[0], true
}
return 0, false
}
// Get the element "i" and say if it has been found.
func (s Uints) Get(i int) (uint, bool) {
if i > s.Len() {
return 0, false
}
return s[i], true
}
// Intersect return the intersection between "s" and "s2".
func (s Uints) Intersect(s2 Uints) Uints {
out := Uints{}
for _, v := range s {
if s2.Contains(v) {
out = append(out, v)
}
}
return out
}
// Last return the value of the last element.
func (s Uints) Last() (uint, bool) {
if n := len(s); n > 0 {
return s[n-1], true
}
return 0, false
}
// Len return the size of the slice.
func (s Uints) Len() int {
return len(s)
}
// Mean of the slice.
func (s Uints) Mean() (mean float64) {
return float64(s.Sum()) / float64(s.Len())
}
// Sum of the slice.
func (s Uints) Sum() (sum uint) {
for _, v := range s {
sum += v
}
return
}
// Take n element and return a new slice.
func (s Uints) Take(n int) (out Uints) {
if n < 0 || n > s.Len() {
return s
}
return s[:n].Copy()
}
// ----------------- CONVERTING METHOD -----------------
// S convert Uints into []interface{}
func (s Uints) S() (out []interface{}) {
for _, v := range s {
out = append(out, v)
}
return
}
// Uint64s convert Uints to Uint64s
func (s Uints) Uint64s() (out Uint64s) {
for _, v := range s {
out = append(out, uint64(v))
}
return
} | uints.go | 0.765944 | 0.44897 | uints.go | starcoder |
package fsm
import (
"reflect"
"golang.org/x/xerrors"
)
// VerifyStateParameters verifies if the Parameters for an FSM specification are sound
func VerifyStateParameters(parameters Parameters) error {
environmentType := reflect.TypeOf(parameters.Environment)
stateType := reflect.TypeOf(parameters.StateType)
stateFieldType, ok := stateType.FieldByName(string(parameters.StateKeyField))
if !ok {
return xerrors.Errorf("state type has no field `%s`", parameters.StateKeyField)
}
if !stateFieldType.Type.Comparable() {
return xerrors.Errorf("state field `%s` is not comparable", parameters.StateKeyField)
}
// type check state handlers
for state, stateEntryFunc := range parameters.StateEntryFuncs {
if !reflect.TypeOf(state).AssignableTo(stateFieldType.Type) {
return xerrors.Errorf("state key is not assignable to: %s", stateFieldType.Type.Name())
}
err := inspectStateEntryFunc(stateEntryFunc, environmentType, stateType)
if err != nil {
return err
}
}
return nil
}
func VerifyEventParameters(state StateType, stateKeyField StateKeyField, events []EventBuilder) error {
stateType := reflect.TypeOf(state)
stateFieldType, ok := stateType.FieldByName(string(stateKeyField))
if !ok {
return xerrors.Errorf("state type has no field `%s`", stateKeyField)
}
if !stateFieldType.Type.Comparable() {
return xerrors.Errorf("state field `%s` is not comparable", stateKeyField)
}
callbacks := map[EventName]struct{}{}
// Build transition map and store sets of all events and states.
for _, evtIface := range events {
evt, ok := evtIface.(eventBuilder)
if !ok {
errEvt := evtIface.(errBuilder)
return errEvt.err
}
name := evt.name
_, exists := callbacks[name]
if exists {
return xerrors.Errorf("Duplicate event name `%+v`", name)
}
err := inspectActionFunc(name, evt.action, stateType)
if err != nil {
return err
}
for src, dst := range evt.transitionsSoFar {
_, justRecord := dst.(recordEvent)
if dst != nil && !justRecord && !reflect.TypeOf(dst).AssignableTo(stateFieldType.Type) {
return xerrors.Errorf("event `%+v` destination type is not assignable to: %s", name, stateFieldType.Type.Name())
}
if src != nil && !reflect.TypeOf(src).AssignableTo(stateFieldType.Type) {
return xerrors.Errorf("event `%+v` source type is not assignable to: %s", name, stateFieldType.Type.Name())
}
}
}
return nil
}
func inspectActionFunc(name EventName, action ActionFunc, stateType reflect.Type) error {
if action == nil {
return nil
}
atType := reflect.TypeOf(action)
if atType.Kind() != reflect.Func {
return xerrors.Errorf("event `%+v` has a callback that is not a function", name)
}
if atType.NumIn() < 1 {
return xerrors.Errorf("event `%+v` has a callback that does not take the state", name)
}
if !reflect.PtrTo(stateType).AssignableTo(atType.In(0)) {
return xerrors.Errorf("event `%+v` has a callback that does not take the state", name)
}
if atType.NumOut() != 1 || atType.Out(0).AssignableTo(reflect.TypeOf(new(error))) {
return xerrors.Errorf("event `%+v` callback should return exactly one param that is an error", name)
}
return nil
}
func inspectStateEntryFunc(stateEntryFunc interface{}, environmentType reflect.Type, stateType reflect.Type) error {
stateEntryFuncType := reflect.TypeOf(stateEntryFunc)
if stateEntryFuncType.Kind() != reflect.Func {
return xerrors.Errorf("handler for state is not a function")
}
if stateEntryFuncType.NumIn() != 3 {
return xerrors.Errorf("handler for state does not take correct number of arguments")
}
if !reflect.TypeOf((*Context)(nil)).Elem().AssignableTo(stateEntryFuncType.In(0)) {
return xerrors.Errorf("handler for state does not match context parameter")
}
if !environmentType.AssignableTo(stateEntryFuncType.In(1)) {
return xerrors.Errorf("handler for state does not match environment parameter")
}
if !stateType.AssignableTo(stateEntryFuncType.In(2)) {
return xerrors.Errorf("handler for state does not match state parameter")
}
if stateEntryFuncType.NumOut() != 1 || !stateEntryFuncType.Out(0).AssignableTo(reflect.TypeOf(new(error)).Elem()) {
return xerrors.Errorf("handler for state does not return an error")
}
return nil
} | fsm/verification.go | 0.623148 | 0.521654 | verification.go | starcoder |
package should
import "github.com/smartystreets/assertions"
var (
Equal = assertions.ShouldEqual
NotEqual = assertions.ShouldNotEqual
AlmostEqual = assertions.ShouldAlmostEqual
NotAlmostEqual = assertions.ShouldNotAlmostEqual
EqualJSON = assertions.ShouldEqualJSON
Resemble = assertions.ShouldResemble
NotResemble = assertions.ShouldNotResemble
PointTo = assertions.ShouldPointTo
NotPointTo = assertions.ShouldNotPointTo
BeNil = assertions.ShouldBeNil
NotBeNil = assertions.ShouldNotBeNil
BeTrue = assertions.ShouldBeTrue
BeFalse = assertions.ShouldBeFalse
BeZeroValue = assertions.ShouldBeZeroValue
NotBeZeroValue = assertions.ShouldNotBeZeroValue
BeGreaterThan = assertions.ShouldBeGreaterThan
BeGreaterThanOrEqualTo = assertions.ShouldBeGreaterThanOrEqualTo
BeLessThan = assertions.ShouldBeLessThan
BeLessThanOrEqualTo = assertions.ShouldBeLessThanOrEqualTo
BeBetween = assertions.ShouldBeBetween
NotBeBetween = assertions.ShouldNotBeBetween
BeBetweenOrEqual = assertions.ShouldBeBetweenOrEqual
NotBeBetweenOrEqual = assertions.ShouldNotBeBetweenOrEqual
Contain = assertions.ShouldContain
NotContain = assertions.ShouldNotContain
ContainKey = assertions.ShouldContainKey
NotContainKey = assertions.ShouldNotContainKey
BeIn = assertions.ShouldBeIn
NotBeIn = assertions.ShouldNotBeIn
BeEmpty = assertions.ShouldBeEmpty
NotBeEmpty = assertions.ShouldNotBeEmpty
HaveLength = assertions.ShouldHaveLength
StartWith = assertions.ShouldStartWith
NotStartWith = assertions.ShouldNotStartWith
EndWith = assertions.ShouldEndWith
NotEndWith = assertions.ShouldNotEndWith
BeBlank = assertions.ShouldBeBlank
NotBeBlank = assertions.ShouldNotBeBlank
ContainSubstring = assertions.ShouldContainSubstring
NotContainSubstring = assertions.ShouldNotContainSubstring
EqualWithout = assertions.ShouldEqualWithout
EqualTrimSpace = assertions.ShouldEqualTrimSpace
Panic = assertions.ShouldPanic
NotPanic = assertions.ShouldNotPanic
PanicWith = assertions.ShouldPanicWith
NotPanicWith = assertions.ShouldNotPanicWith
HaveSameTypeAs = assertions.ShouldHaveSameTypeAs
NotHaveSameTypeAs = assertions.ShouldNotHaveSameTypeAs
Implement = assertions.ShouldImplement
NotImplement = assertions.ShouldNotImplement
HappenBefore = assertions.ShouldHappenBefore
HappenOnOrBefore = assertions.ShouldHappenOnOrBefore
HappenAfter = assertions.ShouldHappenAfter
HappenOnOrAfter = assertions.ShouldHappenOnOrAfter
HappenBetween = assertions.ShouldHappenBetween
HappenOnOrBetween = assertions.ShouldHappenOnOrBetween
NotHappenOnOrBetween = assertions.ShouldNotHappenOnOrBetween
HappenWithin = assertions.ShouldHappenWithin
NotHappenWithin = assertions.ShouldNotHappenWithin
BeChronological = assertions.ShouldBeChronological
BeError = assertions.ShouldBeError
) | vendor/github.com/smartystreets/assertions/should/should.go | 0.580114 | 0.559591 | should.go | starcoder |
package api
import (
"fmt"
"image"
"image/color"
"io/ioutil"
"math"
"math/rand"
"github.com/fogleman/gg"
"golang.org/x/image/font/opentype"
)
// Params defines the parameters used to draw a vizualization
type Params struct {
N int
Max int32
Filename string
Format ImageFormat
Width int
Height int
TiltAngle float64
TiltAngleEvenAdjuster float64
TiltAngleOddAdjuster float64
LineLengthMultiplier float64
LineWidth float64
LineShorteningPower float64
BackgroundColor color.RGBA
LineColor color.RGBA
TextColor color.RGBA
GradientColors []color.RGBA
GridColor color.RGBA
GridLineWidth float64
GridColumns int
GridRows int
StartX float64
StartY float64
StartAngle float64
}
type ImageFormat string
const (
ImageFormatPNG ImageFormat = "png"
ImageFormatJPG ImageFormat = "jpg"
)
// Draw creates a visualization with the given params
func Draw(p *Params) (image.Image, error) {
// Init
dc := gg.NewContext(p.Width, p.Height)
dc.SetRGBA(rgba(p.BackgroundColor.RGBA()))
dc.Clear()
// Gradient
if len(p.GradientColors) > 0 {
grad := gg.NewRadialGradient(float64(p.Width)/2.0, float64(p.Height)/2.0, 0, float64(p.Width)/2.0, float64(p.Height)/2.0, float64(p.Width)*2)
for i, c := range p.GradientColors {
grad.AddColorStop(float64(i), c)
}
dc.SetFillStyle(grad)
dc.DrawRectangle(0, 0, float64(p.Width), float64(p.Height))
dc.Fill()
}
// Gird
dc.SetColor(p.GridColor)
dc.SetLineWidth(p.GridLineWidth)
columns := p.GridColumns
for i := 1; i <= columns; i++ {
width := p.Width / columns
dc.DrawLine(float64(i*width), 0, float64(i*width), float64(p.Height))
dc.Stroke()
}
rows := p.GridRows
for i := 1; i <= rows; i++ {
height := p.Height / rows
dc.DrawLine(0, float64(i*height), float64(p.Width), float64(i*height))
dc.Stroke()
}
// Lines
for i := 1; i <= p.N; i++ {
var graph *Node
chain := []int{}
memo := make(map[int]*Node)
Calc(int(rand.Int31n(p.Max)), &chain, memo)
graph = UpdateGraph(chain, graph, memo)
dc.SetRGBA(rgba(p.LineColor.RGBA()))
dc.SetLineWidth(float64(p.LineWidth))
DrawLine(p.StartX, p.StartY, 0, graph, dc, p)
dc.Stroke()
}
// Signature
dc.SetRGBA(rgba(p.TextColor.RGBA()))
fontBytes, err := ioutil.ReadFile("Magis Authentic.ttf")
if err != nil {
return nil, err
}
f, err := opentype.Parse(fontBytes)
if err != nil {
return nil, err
}
face, err := opentype.NewFace(f, &opentype.FaceOptions{
Size: 100,
DPI: 150,
// Hinting: font.HintingFull,
})
if err != nil {
return nil, err
}
dc.SetFontFace(face)
dc.DrawString("No -> 3n + 1", float64(p.Width)-1250, float64(p.Height)-500)
dc.DrawString("Ne -> n/2", float64(p.Width)-1250, float64(p.Height)-270)
// Save
filename := fmt.Sprintf("images/%s.%s", p.Filename, string(p.Format))
switch p.Format {
case ImageFormatPNG:
err := dc.SavePNG(filename)
if err != nil {
return nil, err
}
case ImageFormatJPG:
err := gg.SaveJPG(filename, dc.Image(), 100)
if err != nil {
return nil, err
}
}
return dc.Image(), nil
}
// DrawLine recursively draws a line from a given starting point in a graph
func DrawLine(x float64, y float64, angle float64, node *Node, dc *gg.Context, p *Params) {
if node.Value%2 == 0 {
angle += p.TiltAngle * p.TiltAngleEvenAdjuster
} else {
angle -= p.TiltAngle * p.TiltAngleOddAdjuster
}
lineLength := (float64(node.Value) / (1 + math.Pow(float64(node.Value), p.LineShorteningPower))) * p.LineLengthMultiplier
rad := angle * (math.Pi / 180)
x2 := x + lineLength*math.Cos(rad)
y2 := y + lineLength*math.Sin(rad)*-1
dc.DrawLine(x, y, x2, y2)
if node.EvenParent != nil {
DrawLine(x2, y2, angle, node.EvenParent, dc, p)
}
if node.OddParent != nil {
DrawLine(x2, y2, angle, node.OddParent, dc, p)
}
}
func rgba(r, g, b, a uint32) (float64, float64, float64, float64) {
return float64(r) / 255.0, float64(g) / 255.0, float64(b) / 255.0, float64(a) / 255.0
} | api/draw.go | 0.665519 | 0.468547 | draw.go | starcoder |
package grid
import (
"math"
"github.com/forestgiant/eff"
"github.com/forestgiant/eff/util"
)
// Grid eff.Shape container that places children in a grid pattern
type Grid struct {
eff.Shape
rows int
cols int
padding int
cellHeight int
}
func (g *Grid) rectForIndex(index int) eff.Rect {
roundDivide := func(v1 int, v2 int) int {
return util.RoundToInt(float64(v1) / float64(v2))
}
r := eff.Rect{}
if g.rows == 0 || g.cols == 0 {
return r
}
row := index / g.cols
col := index % g.cols
cellWidth := roundDivide(g.Rect().W-(g.padding*(g.cols+1)), g.cols)
cellHeight := roundDivide(g.Rect().H-(g.padding*(g.rows+1)), g.rows)
if g.cellHeight > 0 {
cellHeight = g.cellHeight
}
if cellWidth <= 0 {
cellWidth = 1
}
if cellHeight <= 0 {
cellHeight = 1
}
r.X = col*cellWidth + (g.padding * (col + 1))
r.Y = row*cellHeight + (g.padding * (row + 1))
r.W = cellWidth
r.H = cellHeight
return r
}
func (g *Grid) updateGrid() {
if len(g.Children()) == 0 || g.rows == 0 || g.cols == 0 {
return
}
for i, c := range g.Children() {
c.SetRect(g.rectForIndex(i))
}
}
func (g *Grid) updateRect() {
if g.cellHeight > 0 && g.rows > 0 {
rowCount := int(math.Ceil(float64((len(g.Children()) + 1)) / float64(g.cols)))
newHeight := rowCount*(g.cellHeight) + ((rowCount + 1) * g.padding)
g.SetRect(eff.Rect{
X: g.Rect().X,
Y: g.Rect().Y,
W: g.Rect().W,
H: newHeight,
})
g.RedrawChildren()
}
}
// AddChild adds a child to the grid and places it in the correct spot
func (g *Grid) AddChild(c eff.Drawable) error {
g.updateRect()
c.SetRect(g.rectForIndex(len(g.Children())))
return g.Shape.AddChild(c)
}
// RemoveChild removes a child from the grid
func (g *Grid) RemoveChild(c eff.Drawable) error {
err := g.Shape.RemoveChild(c)
if err != nil {
return err
}
g.updateRect()
g.updateGrid()
return nil
}
// SetRect sets the rectangle for the grid
func (g *Grid) SetRect(r eff.Rect) {
if r.W != g.Rect().W || r.H != g.Rect().H {
g.updateGrid()
}
g.Shape.SetRect(r)
}
// Rows returns the row count of the grid
func (g *Grid) Rows() int {
return g.rows
}
// SetRows sets the row count for the grid, updates all children
func (g *Grid) SetRows(r int) {
g.rows = r
g.updateGrid()
}
// Cols returns the column count of the grid
func (g *Grid) Cols() int {
return g.cols
}
// SetCols sets the column count for the grid, updates all children
func (g *Grid) SetCols(c int) {
g.cols = c
g.updateGrid()
}
// NewGrid creates a new grid instance, the cellHeight is an optional override of the derived cellHeight (grid.Rect().H/grid.Rows()), this will grow the grid to hold all children
func NewGrid(rows int, cols int, padding int, cellHeight int) *Grid {
g := &Grid{}
g.rows = rows
g.cols = cols
g.padding = padding
g.cellHeight = cellHeight
g.SetBackgroundColor(eff.White())
return g
} | component/grid/grid.go | 0.83363 | 0.556761 | grid.go | starcoder |
// Package images provides template functions for manipulating images.
package images
import (
"github.com/disintegration/gift"
"github.com/spf13/cast"
)
// Increment for re-generation of images using these filters.
const filterAPIVersion = 0
type Filters struct{}
// Overlay creates a filter that overlays src at position x y.
func (*Filters) Overlay(src ImageSource, x, y interface{}) gift.Filter {
return filter{
Options: newFilterOpts(src.Key(), x, y),
Filter: overlayFilter{src: src, x: cast.ToInt(x), y: cast.ToInt(y)},
}
}
// Brightness creates a filter that changes the brightness of an image.
// The percentage parameter must be in range (-100, 100).
func (*Filters) Brightness(percentage interface{}) gift.Filter {
return filter{
Options: newFilterOpts(percentage),
Filter: gift.Brightness(cast.ToFloat32(percentage)),
}
}
// ColorBalance creates a filter that changes the color balance of an image.
// The percentage parameters for each color channel (red, green, blue) must be in range (-100, 500).
func (*Filters) ColorBalance(percentageRed, percentageGreen, percentageBlue interface{}) gift.Filter {
return filter{
Options: newFilterOpts(percentageRed, percentageGreen, percentageBlue),
Filter: gift.ColorBalance(cast.ToFloat32(percentageRed), cast.ToFloat32(percentageGreen), cast.ToFloat32(percentageBlue)),
}
}
// Colorize creates a filter that produces a colorized version of an image.
// The hue parameter is the angle on the color wheel, typically in range (0, 360).
// The saturation parameter must be in range (0, 100).
// The percentage parameter specifies the strength of the effect, it must be in range (0, 100).
func (*Filters) Colorize(hue, saturation, percentage interface{}) gift.Filter {
return filter{
Options: newFilterOpts(hue, saturation, percentage),
Filter: gift.Colorize(cast.ToFloat32(hue), cast.ToFloat32(saturation), cast.ToFloat32(percentage)),
}
}
// Contrast creates a filter that changes the contrast of an image.
// The percentage parameter must be in range (-100, 100).
func (*Filters) Contrast(percentage interface{}) gift.Filter {
return filter{
Options: newFilterOpts(percentage),
Filter: gift.Contrast(cast.ToFloat32(percentage)),
}
}
// Gamma creates a filter that performs a gamma correction on an image.
// The gamma parameter must be positive. Gamma = 1 gives the original image.
// Gamma less than 1 darkens the image and gamma greater than 1 lightens it.
func (*Filters) Gamma(gamma interface{}) gift.Filter {
return filter{
Options: newFilterOpts(gamma),
Filter: gift.Gamma(cast.ToFloat32(gamma)),
}
}
// GaussianBlur creates a filter that applies a gaussian blur to an image.
func (*Filters) GaussianBlur(sigma interface{}) gift.Filter {
return filter{
Options: newFilterOpts(sigma),
Filter: gift.GaussianBlur(cast.ToFloat32(sigma)),
}
}
// Grayscale creates a filter that produces a grayscale version of an image.
func (*Filters) Grayscale() gift.Filter {
return filter{
Filter: gift.Grayscale(),
}
}
// Hue creates a filter that rotates the hue of an image.
// The hue angle shift is typically in range -180 to 180.
func (*Filters) Hue(shift interface{}) gift.Filter {
return filter{
Options: newFilterOpts(shift),
Filter: gift.Hue(cast.ToFloat32(shift)),
}
}
// Invert creates a filter that negates the colors of an image.
func (*Filters) Invert() gift.Filter {
return filter{
Filter: gift.Invert(),
}
}
// Pixelate creates a filter that applies a pixelation effect to an image.
func (*Filters) Pixelate(size interface{}) gift.Filter {
return filter{
Options: newFilterOpts(size),
Filter: gift.Pixelate(cast.ToInt(size)),
}
}
// Saturation creates a filter that changes the saturation of an image.
func (*Filters) Saturation(percentage interface{}) gift.Filter {
return filter{
Options: newFilterOpts(percentage),
Filter: gift.Saturation(cast.ToFloat32(percentage)),
}
}
// Sepia creates a filter that produces a sepia-toned version of an image.
func (*Filters) Sepia(percentage interface{}) gift.Filter {
return filter{
Options: newFilterOpts(percentage),
Filter: gift.Sepia(cast.ToFloat32(percentage)),
}
}
// Sigmoid creates a filter that changes the contrast of an image using a sigmoidal function and returns the adjusted image.
// It's a non-linear contrast change useful for photo adjustments as it preserves highlight and shadow detail.
func (*Filters) Sigmoid(midpoint, factor interface{}) gift.Filter {
return filter{
Options: newFilterOpts(midpoint, factor),
Filter: gift.Sigmoid(cast.ToFloat32(midpoint), cast.ToFloat32(factor)),
}
}
// UnsharpMask creates a filter that sharpens an image.
// The sigma parameter is used in a gaussian function and affects the radius of effect.
// Sigma must be positive. Sharpen radius roughly equals 3 * sigma.
// The amount parameter controls how much darker and how much lighter the edge borders become. Typically between 0.5 and 1.5.
// The threshold parameter controls the minimum brightness change that will be sharpened. Typically between 0 and 0.05.
func (*Filters) UnsharpMask(sigma, amount, threshold interface{}) gift.Filter {
return filter{
Options: newFilterOpts(sigma, amount, threshold),
Filter: gift.UnsharpMask(cast.ToFloat32(sigma), cast.ToFloat32(amount), cast.ToFloat32(threshold)),
}
}
type filter struct {
Options filterOpts
gift.Filter
}
// For cache-busting.
type filterOpts struct {
Version int
Vals interface{}
}
func newFilterOpts(vals ...interface{}) filterOpts {
return filterOpts{
Version: filterAPIVersion,
Vals: vals,
}
} | resources/images/filters.go | 0.897831 | 0.722099 | filters.go | starcoder |
package models
import (
"strings"
)
// MapZebedeeDataToSearchDataImport Performs default mapping of zebedee data to a SearchDataImport struct.
// It also optionally takes a limit which truncates the keywords to the desired amount. This value can be -1 for no
// truncation.
func MapZebedeeDataToSearchDataImport(zebedeeData ZebedeeData, keywordsLimit int) SearchDataImport {
searchData := SearchDataImport{
UID: zebedeeData.Description.Title,
DataType: zebedeeData.DataType,
CDID: zebedeeData.Description.CDID,
DatasetID: zebedeeData.Description.DatasetID,
Keywords: RectifyKeywords(zebedeeData.Description.Keywords, keywordsLimit),
MetaDescription: zebedeeData.Description.MetaDescription,
Summary: zebedeeData.Description.Summary,
ReleaseDate: zebedeeData.Description.ReleaseDate,
Title: zebedeeData.Description.Title,
}
return searchData
}
// RectifyKeywords sanitises a slice of keywords, splitting any that contain commas into seperate keywords and trimming
// any whitespace. It also optionally takes a limit which truncates the keywords to the desired amount. This value can
// be -1 for no truncation.
func RectifyKeywords(keywords []string, keywordsLimit int) []string {
var strArray []string
rectifiedKeywords := make([]string, 0)
if keywordsLimit == 0 {
return []string{""}
}
for i := range keywords {
strArray = strings.Split(keywords[i], ",")
for j := range strArray {
keyword := strings.TrimSpace(strArray[j])
rectifiedKeywords = append(rectifiedKeywords, keyword)
}
}
if (len(rectifiedKeywords) < keywordsLimit) || (keywordsLimit == -1) {
return rectifiedKeywords
}
return rectifiedKeywords[:keywordsLimit]
}
// MapDatasetVersionToSearchDataImport performs default mapping of datasetAPI data to a version metadata struct.
func MapVersionMetadataToSearchDataImport(cmdData CMDData) SearchDataImport {
versionMetaData := SearchDataImport{
UID: cmdData.UID,
ReleaseDate: cmdData.VersionDetails.ReleaseDate,
Keywords: cmdData.DatasetDetails.Keywords,
MetaDescription: cmdData.DatasetDetails.Description,
Title: cmdData.DatasetDetails.Title,
}
return versionMetaData
} | models/mapper.go | 0.749087 | 0.435241 | mapper.go | starcoder |
package gostat
import "fmt"
// bucket keep track of segmented data
type bucket struct {
// used later for statistics
sum, sum2 float64
// center and width of bucket. Bucket contains from c - w/2 to c+ w/2, both ends INCLUDED.
// There is always at leaset a data point on each end, unless the bucket is empty.
c, w float64
// nb of data points in bucket
n int
}
func (b bucket) Header() string {
return "[ From\t\tCenter\t\tTO\t ] =>\t n\t mean\t\t sigma\t\t density\t width\tsurface(n*w)\t"
}
func (b *bucket) String() string {
return fmt.Sprintf("[%10.3f\t%10.3f\t%10.3f] => %d\t%10.3f\t%10.3f\t%10.3f\t%10.3f\t%10.1f", b.low(), b.c, b.high(), b.n, b.mean(), b.sigma(), b.density(), b.w, float64(b.n)*b.w)
}
func (b *bucket) add(data float64) {
b.sum += data
b.sum2 += data * data
b.n += 1
}
// low is lower limit
func (b bucket) low() float64 {
return b.c - b.w/2.
}
// high is higher limit
func (b bucket) high() float64 {
return b.c + b.w/2.
}
func (b bucket) density() float64 {
if b.w == 0. {
return 0.
}
return float64(b.n) / b.w
}
func (b bucket) mean() float64 {
return b.sum / float64(b.n)
}
func (b bucket) sigma() float64 {
return b.sum2/float64(b.n) - b.mean()*b.mean()
}
// test if data can fit in this bucket ?
func (b bucket) contains(d float64) bool {
return (d >= b.low() && d <= b.high())
}
// NRepart gives an estimate f the number of data points that are below x (special rounding for x = c), assuming a GAUSSIAN NORMAL law.
func (b bucket) NRepart(x float64) float64 {
if x < b.low() {
return .0
}
if x > b.high() {
return float64(b.n)
}
// special case for dirac distribution
if b.w == 0 && x == b.c {
return float64(b.n) / 2.
}
// Otherwise, assume a normal distribution with given mean and sigma,
// and normalize to account for the fact we are using a truncated normal law.
return float64(b.n) *
PHI2(
(b.low()-b.mean())/b.sigma(),
(x-b.mean())/b.sigma()) /
PHI2(
(b.low()-b.mean())/b.sigma(),
(b.high()-b.mean())/b.sigma())
}
// =======================================
// buckets are sorted based on their center.
type buckets []bucket
func (bb buckets) Len() int { return len(bb) }
func (bb buckets) Swap(i, j int) { bb[i], bb[j] = bb[j], bb[i] }
func (bb buckets) Less(i, j int) bool { return bb[i].c < bb[j].c }
// eval evaluate the cost of merging i with i+1.
// Cost should be minimum.
func (bb buckets) eval(i int) float64 {
w1, w2 := bb[i].w, bb[i+1].w
w := bb[i+1].high() - bb[i].low()
n1, n2 := float64(bb[i].n), float64(bb[i+1].n)
n := n1 + n2
return n*n*w*w - n1*n1*w1*w1 - n2*n2*w2*w2
}
// merge bucket i with i+1.
func (bb buckets) merge(i int) buckets {
w := bb[i+1].high() - bb[i].low()
c := (bb[i+1].high() + bb[i].low()) / 2.
bb[i].c = c
bb[i].w = w
bb[i].sum += bb[i+1].sum
bb[i].sum2 += bb[i+1].sum2
bb[i].n += bb[i+1].n
if i+2 < len(bb) {
return append(bb[:i+1], bb[i+2:]...)
} else {
return bb[:i+1]
}
} | bucket.go | 0.713132 | 0.591635 | bucket.go | starcoder |
package mod256
// The ExpBase type contains lookup tables allowing fast repeated modular exponentiation with the same base value.
type ExpBase struct {
h, l [16]Residue
}
// FromResidue initialises ExpBase from a residue.
// It performs 224 squarings and 22 multiplications.
func (z *ExpBase) FromResidue(x *Residue) *ExpBase {
var r Residue
r.Copy(x)
z.l[0].m = r.m
z.l[0].r = [4]uint64{1,0,0,0}
z.l[1].Copy(&r)
for i:=0; i<32; i++ {
r.Square()
}
z.l[2].Copy(&r)
z.l[3].Copy(&r).Mul(&z.l[1])
for i:=0; i<32; i++ {
r.Square()
}
z.l[4].Copy(&r)
z.l[5].Copy(&r).Mul(&z.l[1])
z.l[6].Copy(&r).Mul(&z.l[2])
z.l[7].Copy(&r).Mul(&z.l[3])
for i:=0; i<32; i++ {
r.Square()
}
z.l[ 8].Copy(&r)
z.l[ 9].Copy(&r).Mul(&z.l[1])
z.l[10].Copy(&r).Mul(&z.l[2])
z.l[11].Copy(&r).Mul(&z.l[3])
z.l[12].Copy(&r).Mul(&z.l[4])
z.l[13].Copy(&r).Mul(&z.l[5])
z.l[14].Copy(&r).Mul(&z.l[6])
z.l[15].Copy(&r).Mul(&z.l[7])
for i:=0; i<32; i++ {
r.Square()
}
z.h[0].Copy(&z.l[0])
z.h[1].Copy(&r)
for i:=0; i<32; i++ {
r.Square()
}
z.h[2].Copy(&r)
z.h[3].Copy(&r).Mul(&z.h[1])
for i:=0; i<32; i++ {
r.Square()
}
z.h[4].Copy(&r)
z.h[5].Copy(&r).Mul(&z.h[1])
z.h[6].Copy(&r).Mul(&z.h[2])
z.h[7].Copy(&r).Mul(&z.h[3])
for i:=0; i<32; i++ {
r.Square()
}
z.h[ 8].Copy(&r)
z.h[ 9].Copy(&r).Mul(&z.h[1])
z.h[10].Copy(&r).Mul(&z.h[2])
z.h[11].Copy(&r).Mul(&z.h[3])
z.h[12].Copy(&r).Mul(&z.h[4])
z.h[13].Copy(&r).Mul(&z.h[5])
z.h[14].Copy(&r).Mul(&z.h[6])
z.h[15].Copy(&r).Mul(&z.h[7])
return z
}
// ExpPrecomp takes an ExpBase computed from the base value, a 256-bit integer as the exponent, and performs modular exponentiation.
// It performs 31 squarings and 63 multiplications.
func (z *Residue) ExpPrecomp(x *ExpBase, y [4]uint64) *Residue {
h := ((y[3] >> 60) & 8) |
((y[3] >> 29) & 4) |
((y[2] >> 62) & 2) |
((y[2] >> 31) & 1)
l := ((y[1] >> 60) & 8) |
((y[1] >> 29) & 4) |
((y[0] >> 62) & 2) |
((y[0] >> 31) & 1)
z.Copy(&x.h[h]).Mul(&x.l[l])
for i := 1; i<32; i++ {
y[3] <<= 1
y[2] <<= 1
y[1] <<= 1
y[0] <<= 1
h = ((y[3] >> 60) & 8) |
((y[3] >> 29) & 4) |
((y[2] >> 62) & 2) |
((y[2] >> 31) & 1)
l = ((y[1] >> 60) & 8) |
((y[1] >> 29) & 4) |
((y[0] >> 62) & 2) |
((y[0] >> 31) & 1)
z.Square().Mul(&x.h[h]).Mul(&x.l[l])
}
return z
}
// Exp performs modular exponentiation without storing precomputed values for later use.
// It performs 255 squarings and 74 multiplications.
func (z *Residue) Exp(x [4]uint64) *Residue {
var (
r Residue
t [16]Residue
)
r.Copy(z)
t[0].m = r.m
t[0].r = [4]uint64{1,0,0,0}
t[1].Copy(z)
for i:=0; i<64; i++ {
r.Square()
}
t[2].Copy(&r)
t[3].Copy(&r).Mul(&t[1])
for i:=0; i<64; i++ {
r.Square()
}
t[4].Copy(&r)
t[5].Copy(&r).Mul(&t[1])
t[6].Copy(&r).Mul(&t[2])
t[7].Copy(&r).Mul(&t[3])
for i:=0; i<64; i++ {
r.Square()
}
t[ 8].Copy(&r)
t[ 9].Copy(&r).Mul(&t[1])
t[10].Copy(&r).Mul(&t[2])
t[11].Copy(&r).Mul(&t[3])
t[12].Copy(&r).Mul(&t[4])
t[13].Copy(&r).Mul(&t[5])
t[14].Copy(&r).Mul(&t[6])
t[15].Copy(&r).Mul(&t[7])
y := x
j := ((y[3] >> 60) & 8) |
((y[2] >> 61) & 4) |
((y[1] >> 62) & 2) |
((y[0] >> 63) & 1)
z.Copy(&t[j])
for i := 1; i<64; i++ {
y[3] <<= 1
y[2] <<= 1
y[1] <<= 1
y[0] <<= 1
j = ((y[3] >> 60) & 8) |
((y[2] >> 61) & 4) |
((y[1] >> 62) & 2) |
((y[0] >> 63) & 1)
z.Square().Mul(&t[j])
}
return z
} | exp.go | 0.581065 | 0.414069 | exp.go | starcoder |
package parser
import (
"fmt"
"strings"
)
type NodeType string
const (
// PathNode is a string segment of a path.
PathNode NodeType = "Path"
// ListNode is an array element of a path.
ListNode NodeType = "List"
// ObjectNode is the final Node in a path, what is being referenced.
ObjectNode NodeType = "Object"
)
type Node interface {
Type() NodeType
DeepCopyNode() Node
// String converts the Node into an equivalent String representation.
// Calling Parse on the result yields an equivalent Node, but may differ in
// structure if the Node is a Path containing Path Nodes.
String() string
}
// Path represents an entire parsed path specification
type Path struct {
Nodes []Node
}
var _ Node = Path{}
func (r Path) Type() NodeType {
return PathNode
}
func (r Path) DeepCopyNode() Node {
rout := r.DeepCopy()
return &rout
}
func (r Path) DeepCopy() Path {
out := Path{
Nodes: make([]Node, len(r.Nodes)),
}
for i := 0; i < len(r.Nodes); i++ {
out.Nodes[i] = r.Nodes[i].DeepCopyNode()
}
return out
}
func (r Path) String() string {
result := strings.Builder{}
for i, n := range r.Nodes {
nStr := n.String()
if n.Type() == ObjectNode && i > 0 {
// No leading separator, and no separators before List Nodes.
result.WriteString(".")
}
result.WriteString(nStr)
}
return result.String()
}
type Object struct {
Reference string
}
var _ Node = Object{}
func (o Object) Type() NodeType {
return ObjectNode
}
func (o Object) DeepCopyNode() Node {
oOut := o.DeepCopy()
return &oOut
}
func (o Object) DeepCopy() Object {
return Object{
Reference: o.Reference,
}
}
func (o Object) String() string {
return quote(o.Reference)
}
type List struct {
KeyField string
KeyValue *string
Glob bool
}
var _ Node = List{}
func (l List) Type() NodeType {
return ListNode
}
func (l List) DeepCopyNode() Node {
lout := l.DeepCopy()
return &lout
}
func (l List) DeepCopy() List {
out := List{}
out.KeyField = l.KeyField
out.Glob = l.Glob
if l.KeyValue != nil {
out.KeyValue = new(string)
*out.KeyValue = *l.KeyValue
}
return out
}
func (l List) Value() (string, bool) {
if l.KeyValue == nil {
return "", false
}
return *l.KeyValue, true
}
func (l List) String() string {
key := quote(l.KeyField)
if l.Glob {
return fmt.Sprintf("[%s: *]", key)
}
if l.KeyValue != nil {
value := quote(*l.KeyValue)
return fmt.Sprintf("[%s: %s]", key, value)
}
// Represents an improperly specified List node.
return fmt.Sprintf("[%s: ]", key)
}
// quote adds double quotes around the passed string.
func quote(s string) string {
// Using fmt.Sprintf with %q converts whitespace to escape sequences, and we
// don't want that.
s = strings.ReplaceAll(s, `\`, `\\`)
s = strings.ReplaceAll(s, `"`, `\"`)
return `"` + s + `"`
} | pkg/mutation/path/parser/node.go | 0.687525 | 0.425546 | node.go | starcoder |
package main
import (
"math"
"github.com/gmlewis/pt/examples/utron/enclosure"
"github.com/gmlewis/pt/examples/utron/half-magnet"
"github.com/gmlewis/pt/examples/utron/half-utron"
. "github.com/gmlewis/pt/pt"
)
// All dimensions in mm
const (
utronEdge = 50.0
magnetHeight = 25.4
innerGap = 70.0
magnetDiam = 50.8
metalMargin = 0.5
magnetMargin = 10.0
)
var (
utronRadius = 0.5 * math.Sqrt(2*utronEdge*utronEdge)
)
func top() SDF {
top := enclosure.Top(utronEdge)
ch := 4 * magnetHeight
topCutout := Cylinder3D(ch, 0.5*magnetDiam+metalMargin, 1)
ssHeight := 0.5*(4*magnetHeight-utronEdge) - magnetMargin
m := Translate3d(V3{0, 0, 0.5*ch + 2*magnetHeight - ssHeight - metalMargin})
m = RotateY(-0.25 * math.Pi).Mul(m)
m = Translate3d(V3{0, 0, utronRadius}).Mul(m)
topCutout = Transform3D(topCutout, m)
side := magnetDiam + 2*metalMargin
big := 10 * utronEdge
boxCutout := NewCubeSDF(Vector{side, big, side})
m = Translate3d(V3{0, 0.5 * big, 0.5*side + 2*magnetHeight - ssHeight - metalMargin})
m = RotateY(-0.25 * math.Pi).Mul(m)
m = Translate3d(V3{0, 0, utronRadius}).Mul(m)
boxCutout = Transform3D(boxCutout, m)
topCutout = Union3D(topCutout, boxCutout)
top = Difference3D(top, topCutout)
return top
}
func base() SDF {
base := enclosure.Base(utronEdge)
ch := 4 * magnetHeight
baseCutout := Cylinder3D(ch, 0.5*magnetDiam+metalMargin, 1)
ssHeight := 0.5*(4*magnetHeight-utronEdge) - magnetMargin
m := Translate3d(V3{0, 0, -0.5*ch - 2*magnetHeight + ssHeight + metalMargin})
m = RotateY(-0.25 * math.Pi).Mul(m)
m = Translate3d(V3{0, 0, utronRadius}).Mul(m)
baseCutout = Transform3D(baseCutout, m)
side := magnetDiam + 2*metalMargin
big := 10 * utronEdge
boxCutout := NewCubeSDF(Vector{side, big, side})
m = Translate3d(V3{0, 0.5 * big, -0.5*side - 2*magnetHeight + ssHeight + metalMargin})
m = RotateY(-0.25 * math.Pi).Mul(m)
m = Translate3d(V3{0, 0, utronRadius}).Mul(m)
boxCutout = Transform3D(boxCutout, m)
baseCutout = Union3D(baseCutout, boxCutout)
base = Difference3D(base, baseCutout)
return base
}
func main() {
top := top()
base := base()
halfUtron := half_utron.HalfUtron(utronEdge)
utronLower := Transform3D(halfUtron, RotateX(math.Pi))
utronLower = Transform3D(utronLower, Translate3d(V3{0, 0, utronRadius}))
utronUpper := Transform3D(halfUtron, Translate3d(V3{0, 0, utronRadius}))
halfMagnet := half_magnet.HalfMagnet(utronEdge, innerGap, magnetDiam, magnetHeight, magnetMargin)
m := RotateX(0.5 * math.Pi)
m = Translate3d(V3{-0.5 * (innerGap + magnetDiam), 0, -2 * magnetHeight}).Mul(m)
m = RotateY(-0.25 * math.Pi).Mul(m)
m = Translate3d(V3{0, 0, utronRadius}).Mul(m)
halfMagnetLower := Transform3D(halfMagnet, m)
m = RotateX(-0.5 * math.Pi)
m = Translate3d(V3{-0.5 * (innerGap + magnetDiam), 0, 2 * magnetHeight}).Mul(m)
m = RotateY(-0.25 * math.Pi).Mul(m)
m = Translate3d(V3{0, 0, utronRadius}).Mul(m)
halfMagnetUpper := Transform3D(halfMagnet, m)
trim := 1.0 // To separate each magnet into its own piece and prevent merging.
magnet1 := Cylinder3D(magnetHeight-trim, 0.5*magnetDiam, 1)
magnet1 = Transform3D(magnet1, Translate3d(V3{0, 0, -1.5 * magnetHeight}))
magnet2 := Cylinder3D(magnetHeight-trim, 0.5*magnetDiam, 1)
magnet2 = Transform3D(magnet2, Translate3d(V3{0, 0, -0.5 * magnetHeight}))
magnet3 := Cylinder3D(magnetHeight-trim, 0.5*magnetDiam, 1)
magnet3 = Transform3D(magnet3, Translate3d(V3{0, 0, 0.5 * magnetHeight}))
magnet4 := Cylinder3D(magnetHeight-trim, 0.5*magnetDiam, 1)
magnet4 = Transform3D(magnet4, Translate3d(V3{0, 0, 1.5 * magnetHeight}))
magnets := Union3D(magnet1, magnet2, magnet3, magnet4)
m = Translate3d(V3{-innerGap - magnetDiam, 0, 0})
m = RotateY(-0.25 * math.Pi).Mul(m)
m = Translate3d(V3{0, 0, utronRadius}).Mul(m)
magnets = Transform3D(magnets, m)
s := Union3D(base, utronLower, utronUpper, halfMagnetLower, halfMagnetUpper, magnets, top)
RenderSTL(s, 800, "utron.stl")
// Write out separate parts.
RenderSTL(base, 800, "base.stl")
RenderSTL(top, 800, "top.stl")
RenderSTL(utronLower, 800, "utron-lower.stl")
RenderSTL(utronUpper, 800, "utron-upper.stl")
RenderSTL(halfMagnetLower, 800, "magnet-lower.stl")
RenderSTL(halfMagnetUpper, 800, "magnet-upper.stl")
} | examples/utron/main.go | 0.671147 | 0.40251 | main.go | starcoder |
package paxi
// Quorum records each acknowledgement and check for different types of quorum satisfied
type Quorum struct {
size int
acks map[ID]bool
zones map[int]int
nacks map[ID]bool
}
// NewQuorum returns a new Quorum
func NewQuorum() *Quorum {
q := &Quorum{
size: 0,
acks: make(map[ID]bool),
zones: make(map[int]int),
}
return q
}
// ACK adds id to quorum ack records
func (q *Quorum) ACK(id ID) {
if !q.acks[id] {
q.acks[id] = true
q.size++
q.zones[id.Zone()]++
}
}
// NACK adds id to quorum nack records
func (q *Quorum) NACK(id ID) {
if !q.nacks[id] {
q.nacks[id] = true
}
}
// ADD increase ack size by one
func (q *Quorum) ADD() {
q.size++
}
// Size returns current ack size
func (q *Quorum) Size() int {
return q.size
}
// Reset resets the quorum to empty
func (q *Quorum) Reset() {
q.size = 0
q.acks = make(map[ID]bool)
q.zones = make(map[int]int)
q.nacks = make(map[ID]bool)
}
func (q *Quorum) All() bool {
return q.size == config.n
}
// Majority quorum satisfied
func (q *Quorum) Majority() bool {
return q.size > config.n/2
}
// FastQuorum from fast paxos
func (q *Quorum) FastQuorum() bool {
return q.size >= config.n*3/4
}
// AllZones returns true if there is at one ack from each zone
func (q *Quorum) AllZones() bool {
return len(q.zones) == config.z
}
// ZoneMajority returns true if majority quorum satisfied in any zone
func (q *Quorum) ZoneMajority() bool {
for z, n := range q.zones {
if n > config.npz[z]/2 {
return true
}
}
return false
}
// GridRow == AllZones
func (q *Quorum) GridRow() bool {
return q.AllZones()
}
// GridColumn == all nodes in one zone
func (q *Quorum) GridColumn() bool {
for z, n := range q.zones {
if n == config.npz[z] {
return true
}
}
return false
}
// FGridQ1 is flexible grid quorum for phase 1
func (q *Quorum) FGridQ1(Fz int) bool {
zone := 0
for z, n := range q.zones {
if n > config.npz[z]/2 {
zone++
}
}
return zone >= config.z-Fz
}
// FGridQ2 is flexible grid quorum for phase 2
func (q *Quorum) FGridQ2(Fz int) bool {
zone := 0
for z, n := range q.zones {
if n > config.npz[z]/2 {
zone++
}
}
return zone >= Fz+1
}
/*
// Q1 returns true if config.Quorum type is satisfied
func (q *Quorum) Q1() bool {
switch config.Quorum {
case "majority":
return q.Majority()
case "grid":
return q.GridRow()
case "fgrid":
return q.FGridQ1()
case "group":
return q.ZoneMajority()
case "count":
return q.size >= config.n-config.F
default:
log.Error("Unknown quorum type")
return false
}
}
// Q2 returns true if config.Quorum type is satisfied
func (q *Quorum) Q2() bool {
switch config.Quorum {
case "majority":
return q.Majority()
case "grid":
return q.GridColumn()
case "fgrid":
return q.FGridQ2()
case "group":
return q.ZoneMajority()
case "count":
return q.size > config.F
default:
log.Error("Unknown quorum type")
return false
}
}
*/ | quorum.go | 0.680879 | 0.401219 | quorum.go | starcoder |
package ln
import "math"
type Mesh struct {
Box Box
Triangles []*Triangle
Tree *Tree
}
func NewMesh(triangles []*Triangle) *Mesh {
box := BoxForTriangles(triangles)
return &Mesh{box, triangles, nil}
}
func (m *Mesh) Compile() {
if m.Tree == nil {
shapes := make([]Shape, len(m.Triangles))
for i, triangle := range m.Triangles {
shapes[i] = triangle
}
m.Tree = NewTree(shapes)
}
}
func (m *Mesh) BoundingBox() Box {
return m.Box
}
func (m *Mesh) Contains(v Vector, f float64) bool {
return false
}
func (m *Mesh) Intersect(r Ray) Hit {
return m.Tree.Intersect(r)
}
func (m *Mesh) Paths() Paths {
var result Paths
for _, t := range m.Triangles {
result = append(result, t.Paths()...)
}
return result
}
func (m *Mesh) UpdateBoundingBox() {
m.Box = BoxForTriangles(m.Triangles)
}
func (m *Mesh) UnitCube() {
m.FitInside(Box{Vector{}, Vector{1, 1, 1}}, Vector{})
m.MoveTo(Vector{}, Vector{0.5, 0.5, 0.5})
}
func (m *Mesh) MoveTo(position, anchor Vector) {
matrix := Translate(position.Sub(m.Box.Anchor(anchor)))
m.Transform(matrix)
}
func (m *Mesh) FitInside(box Box, anchor Vector) {
scale := box.Size().Div(m.BoundingBox().Size()).MinComponent()
extra := box.Size().Sub(m.BoundingBox().Size().MulScalar(scale))
matrix := Identity()
matrix = matrix.Translate(m.BoundingBox().Min.MulScalar(-1))
matrix = matrix.Scale(Vector{scale, scale, scale})
matrix = matrix.Translate(box.Min.Add(extra.Mul(anchor)))
m.Transform(matrix)
}
func (m *Mesh) Transform(matrix Matrix) {
for _, t := range m.Triangles {
t.V1 = matrix.MulPosition(t.V1)
t.V2 = matrix.MulPosition(t.V2)
t.V3 = matrix.MulPosition(t.V3)
t.UpdateBoundingBox()
}
m.UpdateBoundingBox()
m.Tree = nil // dirty
}
func (m *Mesh) SaveBinarySTL(path string) error {
return SaveBinarySTL(path, m)
}
func (m *Mesh) Voxelize(size float64) []*Cube {
z1 := m.Box.Min.Z
z2 := m.Box.Max.Z
set := make(map[Vector]bool)
for z := z1; z <= z2; z += size {
plane := Plane{Vector{0, 0, z}, Vector{0, 0, 1}}
paths := plane.IntersectMesh(m)
for _, path := range paths {
for _, v := range path {
x := math.Floor(v.X/size+0.5) * size
y := math.Floor(v.Y/size+0.5) * size
z := math.Floor(v.Z/size+0.5) * size
set[Vector{x, y, z}] = true
}
}
}
var result []*Cube
for v, _ := range set {
cube := NewCube(v.SubScalar(size/2), v.AddScalar(size/2))
result = append(result, cube)
}
return result
} | ln/mesh.go | 0.704872 | 0.488893 | mesh.go | starcoder |
package vm
import (
"fmt"
"github.com/goby-lang/goby/compiler/bytecode"
)
type callObject struct {
method *MethodObject
receiverPtr int
argCount int
argSet *bytecode.ArgSet
argIndex int
lastArgIndex int
callFrame *normalCallFrame
sourceLine int
}
func newCallObject(receiver Object, method *MethodObject, receiverPtr, argCount int, argSet *bytecode.ArgSet, blockFrame *normalCallFrame, sourceLine int) *callObject {
cf := newNormalCallFrame(method.instructionSet, method.instructionSet.filename, sourceLine)
cf.self = receiver
cf.blockFrame = blockFrame
return &callObject{
method: method,
receiverPtr: receiverPtr,
argCount: argCount,
argSet: argSet,
// This is only for normal/optioned arguments
lastArgIndex: -1,
callFrame: cf,
sourceLine: sourceLine,
}
}
func (co *callObject) instructionSet() *instructionSet {
return co.method.instructionSet
}
func (co *callObject) paramTypes() []int {
return co.instructionSet().paramTypes.Types()
}
func (co *callObject) paramNames() []string {
return co.instructionSet().paramTypes.Names()
}
func (co *callObject) methodName() string {
return co.method.Name
}
func (co *callObject) argTypes() []int {
if co.argSet == nil {
return []int{}
}
return co.argSet.Types()
}
func (co *callObject) argPtr() int {
return co.receiverPtr + 1
}
func (co *callObject) argPosition() int {
return co.argPtr() + co.argIndex
}
func (co *callObject) assignNormalArguments(stack []*Pointer) {
for i, paramType := range co.paramTypes() {
if paramType == bytecode.NormalArg {
co.callFrame.insertLCL(i, 0, stack[co.argPosition()].Target)
co.argIndex++
}
}
}
func (co *callObject) assignNormalAndOptionedArguments(paramIndex int, stack []*Pointer) {
/*
Find first usable value as normal argument, for example:
```ruby
def foo(x, y:); end
foo(y: 100, 10)
```
In the example we can see that 'x' is the first parameter,
but in the method call it's the second argument.
This loop is for skipping other types of arguments and get the correct argument index.
*/
for argIndex, at := range co.argTypes() {
if co.lastArgIndex < argIndex && (at == bytecode.NormalArg || at == bytecode.OptionedArg) {
co.callFrame.insertLCL(paramIndex, 0, stack[co.argPtr()+argIndex].Target)
// Store latest index value (and compare them to current argument index)
// This is to make sure we won't get same argument's index twice.
co.lastArgIndex = argIndex
break
}
}
}
func (co *callObject) assignKeywordArguments(stack []*Pointer) (err error) {
for argIndex, argType := range co.argTypes() {
if argType == bytecode.RequiredKeywordArg || argType == bytecode.OptionalKeywordArg {
argName := co.argSet.Names()[argIndex]
paramIndex, ok := co.hasKeywordParam(argName)
if ok {
co.callFrame.insertLCL(paramIndex, 0, stack[co.argPtr()+argIndex].Target)
} else {
err = fmt.Errorf("unknown key %s for method %s", argName, co.methodName())
}
}
}
return
}
func (co *callObject) assignSplatArgument(stack []*Pointer, arr *ArrayObject) {
index := len(co.paramTypes()) - 1
for co.argIndex < co.argCount {
arr.Elements = append(arr.Elements, stack[co.argPosition()].Target)
co.argIndex++
}
co.callFrame.insertLCL(index, 0, arr)
}
func (co *callObject) hasKeywordParam(name string) (index int, result bool) {
for paramIndex, paramType := range co.paramTypes() {
paramName := co.paramNames()[paramIndex]
if paramName == name && (paramType == bytecode.RequiredKeywordArg || paramType == bytecode.OptionalKeywordArg) {
index = paramIndex
result = true
return
}
}
return
}
func (co *callObject) hasKeywordArgument(name string) (index int, result bool) {
for argIndex, argType := range co.argTypes() {
argName := co.argSet.Names()[argIndex]
if argName == name && (argType == bytecode.RequiredKeywordArg || argType == bytecode.OptionalKeywordArg) {
index = argIndex
result = true
return
}
}
return
}
func (co *callObject) normalParamsCount() (n int) {
for _, at := range co.paramTypes() {
if at == bytecode.NormalArg {
n++
}
}
return
}
func (co *callObject) normalArgsCount() (n int) {
for _, at := range co.argTypes() {
if at == bytecode.NormalArg {
n++
}
}
return
} | vm/call_object.go | 0.63375 | 0.406273 | call_object.go | starcoder |
package entoas
import (
"entgo.io/ent/entc/gen"
"github.com/bionicstork/contrib/entoas/serialization"
)
type (
// Edge wraps a gen.Edge and denotes an edge to be returned in an operation response. It recursively defines
// edges to load on the gen.Type the wrapped edge is pointing at.
Edge struct {
*gen.Edge
Edges Edges
}
Edges []*Edge
// A step when traversing the schema graph.
step struct {
from *gen.Type
over *gen.Edge
}
// walk is a sequence of steps.
walk []step
)
// EdgeTree returns the Edges to include on a type for the given serialization groups.
func EdgeTree(n *gen.Type, gs serialization.Groups) (Edges, error) { return edgeTree(n, walk{}, gs) }
// Flatten returns a list of all gen.Edge present in the tree.
func (es Edges) Flatten() []*gen.Edge {
var r []*gen.Edge
for _, t := range edges(es) {
r = append(r, t)
}
return r
}
// edges recursively adds all gen.Edge present on the given Edges to the given map.
func edges(es Edges) map[string]*gen.Edge {
m := make(map[string]*gen.Edge)
for _, e := range es {
m[e.Name] = e.Edge
for k, v := range edges(e.Edges) {
m[k] = v
}
}
return m
}
// edgeTree recursively collects the edges to load on this type for the requested groups.
func edgeTree(n *gen.Type, w walk, gs serialization.Groups) (Edges, error) {
// Iterate over the edges of the given type.
// If the type has an edge we need to eager load, do so.
// Recursively go down the current types edges and, if requested, eager load those too.
var es Edges
for _, e := range n.Edges {
a, err := EdgeAnnotation(e)
if err != nil {
return nil, err
}
// If the edge has at least one of the groups requested, load the edge.
if a.Groups.Match(gs) {
s := step{n, e}
// If we already visited this edge before don't do it again to prevent an endless cycle.
if w.visited(s) {
continue
}
w.push(s)
// Recursively collect the eager loads of edge-types edges.
es1, err := edgeTree(e.Type, w, gs)
if err != nil {
return nil, err
}
// Done visiting this edge.
w.pop()
es = append(es, &Edge{Edge: e, Edges: es1})
}
}
return es, nil
}
// visited returns if the given step has been done before.
func (w walk) visited(s step) bool {
if len(w) == 0 {
return false
}
for i := len(w) - 1; i >= 0; i-- {
if w[i].equal(s) {
return true
}
}
return false
}
// push adds a new step to the walk.
func (w *walk) push(s step) { *w = append(*w, s) }
// pop removes the last step of the walk.
func (w *walk) pop() {
if len(*w) > 0 {
*w = (*w)[:len(*w)-1]
}
}
// equal returns if the given step o is equal to the current step s.
func (s step) equal(o step) bool { return s.from.Name == o.from.Name && s.over.Name == o.over.Name } | entoas/edges.go | 0.713831 | 0.515681 | edges.go | starcoder |
package core
import (
"fmt"
"math"
)
type Shape interface {
IntersectCircle(c Circle) bool
IntersectRectangle(r Rectangle) bool
Pos() (x, y float64)
String() string
}
func NewDefSingleTarget(ind int, typ TargettableType) AttackPattern {
var arr [TargettableTypeCount]bool
arr[typ] = true
return AttackPattern{
Shape: &SingleTarget{Target: ind},
SelfHarm: true,
Targets: arr,
}
}
func NewDefCircHit(r float64, self bool, targets ...TargettableType) AttackPattern {
var arr [TargettableTypeCount]bool
for _, v := range targets {
if v < TargettableTypeCount {
arr[v] = true
}
}
return AttackPattern{
Shape: &Circle{
r: r,
},
Targets: arr,
SelfHarm: self,
}
}
func NewDefBoxHit(w, h float64, self bool, targets ...TargettableType) AttackPattern {
var arr [TargettableTypeCount]bool
for _, v := range targets {
if v < TargettableTypeCount {
arr[v] = true
}
}
return AttackPattern{
Shape: &Rectangle{
w: w,
h: h,
},
Targets: arr,
SelfHarm: self,
}
}
func NewCircleHit(x, y, r float64, self bool, targets ...TargettableType) AttackPattern {
var arr [TargettableTypeCount]bool
for _, v := range targets {
if v < TargettableTypeCount {
arr[v] = true
}
}
return AttackPattern{
Shape: &Circle{
x: x,
y: y,
r: r,
},
Targets: arr,
SelfHarm: self,
}
}
func NewCircle(x, y, r float64) *Circle {
return &Circle{
x: x,
y: y,
r: r,
}
}
type SingleTarget struct {
Target int
}
func (s *SingleTarget) IntersectCircle(in Circle) bool { return false }
func (s *SingleTarget) IntersectRectangle(in Rectangle) bool { return false }
func (s *SingleTarget) Pos() (float64, float64) { return 0, 0 }
func (s *SingleTarget) String() string { return fmt.Sprintf("single target: %v", s.Target) }
//this is for attack that only hits self
type SelfDamage struct{}
func (c *SelfDamage) IntersectCircle(in Circle) bool { return false }
func (c *SelfDamage) IntersectRectangle(in Rectangle) bool { return false }
type Circle struct {
x, y, r float64
}
func (c *Circle) String() string {
return fmt.Sprintf("r: %v x: %v y: %v", c.r, c.x, c.y)
}
func (c *Circle) IntersectCircle(c2 Circle) bool {
//(R0 - R1)^2 <= (x0 - x1)^2 + (y0 - y1)^2 <= (R0 + R1)^2
// lower := math.Pow(c.r-in.r, 2)
// upper := math.Pow(c.r+in.r, 2)
// val := math.Pow(c.x-in.x, 2) + math.Pow(c.y-in.y, 2)
return math.Pow(c.x-c2.x, 2)+math.Pow(c.y-c2.y, 2) < math.Pow(c.r+c2.r, 2)
}
/**
bool intersects(CircleType circle, RectType rect)
{
circleDistance.x = abs(circle.x - rect.x);
circleDistance.y = abs(circle.y - rect.y);
if (circleDistance.x > (rect.width/2 + circle.r)) { return false; }
if (circleDistance.y > (rect.height/2 + circle.r)) { return false; }
if (circleDistance.x <= (rect.width/2)) { return true; }
if (circleDistance.y <= (rect.height/2)) { return true; }
cornerDistance_sq = (circleDistance.x - rect.width/2)^2 +
(circleDistance.y - rect.height/2)^2;
return (cornerDistance_sq <= (circle.r^2));
}
https://stackoverflow.com/questions/401847/circle-rectangle-collision-detection-intersection
**/
func (c *Circle) IntersectRectangle(r Rectangle) bool {
cdX := math.Abs(c.x - r.x)
cdY := math.Abs(c.y - r.y)
if cdX > (r.w/2 + c.r) {
return false
}
if cdY > (r.h/2 + c.r) {
return false
}
if cdX <= (r.w / 2) {
return true
}
if cdY <= (r.h / 2) {
return true
}
sq := math.Pow(cdX-r.w/2, 2) + math.Pow(cdY-r.h/2, 2)
return sq <= math.Pow(c.r, 2)
}
func (c *Circle) Pos() (float64, float64) {
return c.x, c.y
}
type Rectangle struct {
x, y, w, h float64
}
func (r *Rectangle) String() string {
return fmt.Sprintf("w: %v h: %v x: %v y: %v", r.w, r.h, r.x, r.y)
}
func (r *Rectangle) IntersectCircle(c Circle) bool {
cdX := math.Abs(c.x - r.x)
cdY := math.Abs(c.y - r.y)
if cdX > (r.w/2 + c.r) {
return false
}
if cdY > (r.h/2 + c.r) {
return false
}
if cdX <= (r.w / 2) {
return true
}
if cdY <= (r.h / 2) {
return true
}
sq := math.Pow(cdX-r.w/2, 2) + math.Pow(cdY-r.h/2, 2)
return sq <= math.Pow(c.r, 2)
}
func (r *Rectangle) IntersectRectangle(r2 Rectangle) bool {
halfr2w := r2.w / 2
halfr2h := r2.h / 2
halfr1w := r.w / 2
halfr1h := r.h / 2
return r.x+halfr1w >= r2.x-halfr2w && //right side >= r2 left side
r.x-halfr1w <= r2.x+halfr2w && //left side <= r2 right side
r.y+halfr1h >= r2.y-halfr2h && //top side >= r2 bot side
r.y-halfr1h <= r2.y+halfr2h //bot side >= r2 topside
}
func (r *Rectangle) Pos() (float64, float64) {
return r.x, r.y
} | pkg/core/hitbox.go | 0.72952 | 0.446012 | hitbox.go | starcoder |
package gocountry
import (
"fmt"
"strings"
)
// Country defines and stores the known identifiers for a given country.
type Country struct {
full string
codeTwo string
codeThree string
num string
}
// Full returns the full name of the country.
func (c *Country) Full() string {
return c.full
}
// CodeTwo returns the ISO ALPHA-2 Code Example "US"
func (c *Country) CodeTwo() string {
return c.codeTwo
}
// CodeThree returns the ISO ALPHA-3 Code Example "BOL"
func (c *Country) CodeThree() string {
return c.codeThree
}
// Number returns the ISO Numeric Code UN M49 Numerical Code
func (c *Country) Number() string {
return c.num
}
// ExistsFull finds the country full name in the input string
func (c *Country) ExistsFull(text string) bool {
if strings.Contains(text, strings.ToLower(c.full)) {
return true
}
return false
}
// ExistsTwo finds the country ISO ALPHA-2 identifier in the input string
func (c *Country) ExistsTwo(text string) bool {
if strings.Contains(text, c.codeTwo) {
return true
}
return false
}
// ExistsThree finds the country ISO ALPHA-3 identifier in the input string
func (c *Country) ExistsThree(text string) bool {
if strings.Contains(text, c.codeThree) {
return true
}
return false
}
// ExistsNum will find ISO Numeric Code UN M49 Numerical Code in the input text
func (c *Country) ExistsNum(text string) bool {
if strings.Contains(text, c.num) {
return true
}
return false
}
func (c *Country) String() string {
return c.CodeTwo()
}
// Options toggle search functions and is required to pass into the Search function
type Options struct {
Full bool
CodeTwo bool
CodeThree bool
Number bool
}
// FindByNumber accepts and input int and returns the country associated or nil
func FindByNumber(num int) *Country {
search := fmt.Sprintf("%03d", num)
for _, c := range countries {
if c.ExistsNum(search) {
return c
}
}
return nil
}
// Search searches the input string text for occurrences of known countries
func Search(options *Options, text string) []*Country {
contains := make([]*Country, 0)
for _, c := range countries {
if options.Full && c.ExistsFull(strings.ToLower(text)) || options.CodeTwo && c.ExistsTwo(text) || options.CodeThree && c.ExistsThree(text) || options.Number && c.ExistsNum(text) {
contains = append(contains, c)
}
}
return contains
}
var countries = [...]*Country{
&Country{full: "Afghanistan", codeTwo: "AF", codeThree: "AFG", num: "004"},
&Country{full: "ALA Aland Islands", codeTwo: "AX", codeThree: "ALA", num: "248"},
&Country{full: "Albania", codeTwo: "AL", codeThree: "ALB", num: "008"},
&Country{full: "Algeria", codeTwo: "DZ", codeThree: "DZA", num: "012"},
&Country{full: "American Samoa", codeTwo: "AS", codeThree: "ASM", num: "016"},
&Country{full: "Andorra", codeTwo: "AD", codeThree: "AND", num: "020"},
&Country{full: "Angola", codeTwo: "AO", codeThree: "AGO", num: "024"},
&Country{full: "Anguilla", codeTwo: "AI", codeThree: "AIA", num: "660"},
&Country{full: "Antarctica", codeTwo: "AQ", codeThree: "ATA", num: "010"},
&Country{full: "Antigua and Barbuda", codeTwo: "AG", codeThree: "ATG", num: "028"},
&Country{full: "Argentina", codeTwo: "AR", codeThree: "ARG", num: "032"},
&Country{full: "Armenia", codeTwo: "AM", codeThree: "ARM", num: "051"},
&Country{full: "Aruba", codeTwo: "AW", codeThree: "ABW", num: "533"},
&Country{full: "Australia", codeTwo: "AU", codeThree: "AUS", num: "036"},
&Country{full: "Austria", codeTwo: "AT", codeThree: "AUT", num: "040"},
&Country{full: "Azerbaijan", codeTwo: "AZ", codeThree: "AZE", num: "031"},
&Country{full: "Bahamas", codeTwo: "BS", codeThree: "BHS", num: "044"},
&Country{full: "Bahrain", codeTwo: "BH", codeThree: "BHR", num: "048"},
&Country{full: "Bangladesh", codeTwo: "BD", codeThree: "BGD", num: "050"},
&Country{full: "Barbados", codeTwo: "BB", codeThree: "BRB", num: "052"},
&Country{full: "Belarus", codeTwo: "BY", codeThree: "BLR", num: "112"},
&Country{full: "Belgium", codeTwo: "BE", codeThree: "BEL", num: "056"},
&Country{full: "Belize", codeTwo: "BZ", codeThree: "BLZ", num: "084"},
&Country{full: "Benin", codeTwo: "BJ", codeThree: "BEN", num: "204"},
&Country{full: "Bermuda", codeTwo: "BM", codeThree: "BMU", num: "060"},
&Country{full: "Bhutan", codeTwo: "BT", codeThree: "BTN", num: "064"},
&Country{full: "Bolivia", codeTwo: "BO", codeThree: "BOL", num: "068"},
&Country{full: "Bosnia and Herzegovina", codeTwo: "BA", codeThree: "BIH", num: "070"},
&Country{full: "Botswana", codeTwo: "BW", codeThree: "BWA", num: "072"},
&Country{full: "Bouvet Island", codeTwo: "BV", codeThree: "BVT", num: "074"},
&Country{full: "Brazil", codeTwo: "BR", codeThree: "BRA", num: "076"},
&Country{full: "British Virgin Islands", codeTwo: "VG", codeThree: "VGB", num: "092"},
&Country{full: "British Indian Ocean Territory", codeTwo: "IO", codeThree: "IOT", num: "086"},
&Country{full: "Brunei Darussalam", codeTwo: "BN", codeThree: "BRN", num: "096"},
&Country{full: "Bulgaria", codeTwo: "BG", codeThree: "BGR", num: "100"},
&Country{full: "Burkina Faso", codeTwo: "BF", codeThree: "BFA", num: "854"},
&Country{full: "Burundi", codeTwo: "BI", codeThree: "BDI", num: "108"},
&Country{full: "Cambodia", codeTwo: "KH", codeThree: "KHM", num: "116"},
&Country{full: "Cameroon", codeTwo: "CM", codeThree: "CMR", num: "120"},
&Country{full: "Canada", codeTwo: "CA", codeThree: "CAN", num: "124"},
&Country{full: "C<NAME>", codeTwo: "CV", codeThree: "CPV", num: "132"},
&Country{full: "Cayman Islands", codeTwo: "KY", codeThree: "CYM", num: "136"},
&Country{full: "Central African Republic", codeTwo: "CF", codeThree: "CAF", num: "140"},
&Country{full: "Chad", codeTwo: "TD", codeThree: "TCD", num: "148"},
&Country{full: "Chile", codeTwo: "CL", codeThree: "CHL", num: "152"},
&Country{full: "China", codeTwo: "CN", codeThree: "CHN", num: "156"},
&Country{full: "Hong Kong, Special Administrative Region of China", codeTwo: "HK", codeThree: "HKG", num: "344"},
&Country{full: "Macao, Special Administrative Region of China", codeTwo: "MO", codeThree: "MAC", num: "446"},
&Country{full: "Christmas Island", codeTwo: "CX", codeThree: "CXR", num: "162"},
&Country{full: "Cocos Islands", codeTwo: "CC", codeThree: "CCK", num: "166"},
&Country{full: "Colombia", codeTwo: "CO", codeThree: "COL", num: "170"},
&Country{full: "Comoros", codeTwo: "KM", codeThree: "COM", num: "174"},
&Country{full: "Congo", codeTwo: "CG", codeThree: "COG", num: "178"},
&Country{full: "Congo, Democratic Republic of the", codeTwo: "CD", codeThree: "COD", num: "180"},
&Country{full: "Cook Islands", codeTwo: "CK", codeThree: "COK", num: "184"},
&Country{full: "Costa Rica", codeTwo: "CR", codeThree: "CRI", num: "188"},
&Country{full: "Côte d'Ivoire", codeTwo: "CI", codeThree: "CIV", num: "384"},
&Country{full: "Croatia", codeTwo: "HR", codeThree: "HRV", num: "191"},
&Country{full: "Cuba", codeTwo: "CU", codeThree: "CUB", num: "192"},
&Country{full: "Cyprus", codeTwo: "CY", codeThree: "CYP", num: "196"},
&Country{full: "Czech Republic", codeTwo: "CZ", codeThree: "CZE", num: "203"},
&Country{full: "Denmark", codeTwo: "DK", codeThree: "DNK", num: "208"},
&Country{full: "Djibouti", codeTwo: "DJ", codeThree: "DJI", num: "262"},
&Country{full: "Dominica", codeTwo: "DM", codeThree: "DMA", num: "212"},
&Country{full: "Dominican Republic", codeTwo: "DO", codeThree: "DOM", num: "214"},
&Country{full: "Ecuador", codeTwo: "EC", codeThree: "ECU", num: "218"},
&Country{full: "Egypt", codeTwo: "EG", codeThree: "EGY", num: "818"},
&Country{full: "El Salvador", codeTwo: "SV", codeThree: "SLV", num: "222"},
&Country{full: "Equatorial Guinea", codeTwo: "GQ", codeThree: "GNQ", num: "226"},
&Country{full: "Eritrea", codeTwo: "ER", codeThree: "ERI", num: "232"},
&Country{full: "Estonia", codeTwo: "EE", codeThree: "EST", num: "233"},
&Country{full: "Ethiopia", codeTwo: "ET", codeThree: "ETH", num: "231"},
&Country{full: "Falkland Islands", codeTwo: "FK", codeThree: "FLK", num: "238"},
&Country{full: "Faroe Islands", codeTwo: "FO", codeThree: "FRO", num: "234"},
&Country{full: "Fiji", codeTwo: "FJ", codeThree: "FJI", num: "242"},
&Country{full: "Finland", codeTwo: "FI", codeThree: "FIN", num: "246"},
&Country{full: "France", codeTwo: "FR", codeThree: "FRA", num: "250"},
&Country{full: "French Guiana", codeTwo: "GF", codeThree: "GUF", num: "254"},
&Country{full: "French Polynesia", codeTwo: "PF", codeThree: "PYF", num: "258"},
&Country{full: "French Southern Territories", codeTwo: "TF", codeThree: "ATF", num: "260"},
&Country{full: "Gabon", codeTwo: "GA", codeThree: "GAB", num: "266"},
&Country{full: "Gambia", codeTwo: "GM", codeThree: "GMB", num: "270"},
&Country{full: "Georgia", codeTwo: "GE", codeThree: "GEO", num: "268"},
&Country{full: "Germany", codeTwo: "DE", codeThree: "DEU", num: "276"},
&Country{full: "Ghana", codeTwo: "GH", codeThree: "GHA", num: "288"},
&Country{full: "Gibraltar", codeTwo: "GI", codeThree: "GIB", num: "292"},
&Country{full: "Greece", codeTwo: "GR", codeThree: "GRC", num: "300"},
&Country{full: "Greenland", codeTwo: "GL", codeThree: "GRL", num: "304"},
&Country{full: "Grenada", codeTwo: "GD", codeThree: "GRD", num: "308"},
&Country{full: "Guadeloupe", codeTwo: "GP", codeThree: "GLP", num: "312"},
&Country{full: "Guam", codeTwo: "GU", codeThree: "GUM", num: "316"},
&Country{full: "Guatemala", codeTwo: "GT", codeThree: "GTM", num: "320"},
&Country{full: "Guernsey", codeTwo: "GG", codeThree: "GGY", num: "831"},
&Country{full: "Guinea", codeTwo: "GN", codeThree: "GIN", num: "324"},
&Country{full: "Guinea-Bissau", codeTwo: "GW", codeThree: "GNB", num: "624"},
&Country{full: "Guyana", codeTwo: "GY", codeThree: "GUY", num: "328"},
&Country{full: "Haiti", codeTwo: "HT", codeThree: "HTI", num: "332"},
&Country{full: "Heard Island and Mcdonald Islands", codeTwo: "HM", codeThree: "HMD", num: "334"},
&Country{full: "Holy See", codeTwo: "VA", codeThree: "VAT", num: "336"},
&Country{full: "Honduras", codeTwo: "HN", codeThree: "HND", num: "340"},
&Country{full: "Hungary", codeTwo: "HU", codeThree: "HUN", num: "348"},
&Country{full: "Iceland", codeTwo: "IS", codeThree: "ISL", num: "352"},
&Country{full: "India", codeTwo: "IN", codeThree: "IND", num: "356"},
&Country{full: "Indonesia", codeTwo: "ID", codeThree: "IDN", num: "360"},
&Country{full: "Iran, Islamic Republic of", codeTwo: "IR", codeThree: "IRN", num: "364"},
&Country{full: "Iraq", codeTwo: "IQ", codeThree: "IRQ", num: "368"},
&Country{full: "Ireland", codeTwo: "IE", codeThree: "IRL", num: "372"},
&Country{full: "Isle of Man", codeTwo: "IM", codeThree: "IMN", num: "833"},
&Country{full: "Israel", codeTwo: "IL", codeThree: "ISR", num: "376"},
&Country{full: "Italy", codeTwo: "IT", codeThree: "ITA", num: "380"},
&Country{full: "Jamaica", codeTwo: "JM", codeThree: "JAM", num: "388"},
&Country{full: "Japan", codeTwo: "JP", codeThree: "JPN", num: "392"},
&Country{full: "Jersey", codeTwo: "JE", codeThree: "JEY", num: "832"},
&Country{full: "Jordan", codeTwo: "JO", codeThree: "JOR", num: "400"},
&Country{full: "Kazakhstan", codeTwo: "KZ", codeThree: "KAZ", num: "398"},
&Country{full: "Kenya", codeTwo: "KE", codeThree: "KEN", num: "404"},
&Country{full: "Kiribati", codeTwo: "KI", codeThree: "KIR", num: "296"},
&Country{full: "Korea, Democratic People's Republic of", codeTwo: "KP", codeThree: "PRK", num: "408"},
&Country{full: "Korea, Republic of", codeTwo: "KR", codeThree: "KOR", num: "410"},
&Country{full: "Kuwait", codeTwo: "KW", codeThree: "KWT", num: "414"},
&Country{full: "Kyrgyzstan", codeTwo: "KG", codeThree: "KGZ", num: "417"},
&Country{full: "L<NAME>", codeTwo: "LA", codeThree: "LAO", num: "418"},
&Country{full: "Latvia", codeTwo: "LV", codeThree: "LVA", num: "428"},
&Country{full: "Lebanon", codeTwo: "LB", codeThree: "LBN", num: "422"},
&Country{full: "Lesotho", codeTwo: "LS", codeThree: "LSO", num: "426"},
&Country{full: "Liberia", codeTwo: "LR", codeThree: "LBR", num: "430"},
&Country{full: "Libya", codeTwo: "LY", codeThree: "LBY", num: "434"},
&Country{full: "Liechtenstein", codeTwo: "LI", codeThree: "LIE", num: "438"},
&Country{full: "Lithuania", codeTwo: "LT", codeThree: "LTU", num: "440"},
&Country{full: "Luxembourg", codeTwo: "LU", codeThree: "LUX", num: "442"},
&Country{full: "Macedonia", codeTwo: "MK", codeThree: "MKD", num: "807"},
&Country{full: "Madagascar", codeTwo: "MG", codeThree: "MDG", num: "450"},
&Country{full: "Malawi", codeTwo: "MW", codeThree: "MWI", num: "454"},
&Country{full: "Malaysia", codeTwo: "MY", codeThree: "MYS", num: "458"},
&Country{full: "Maldives", codeTwo: "MV", codeThree: "MDV", num: "462"},
&Country{full: "Mali", codeTwo: "ML", codeThree: "MLI", num: "466"},
&Country{full: "Malta", codeTwo: "MT", codeThree: "MLT", num: "470"},
&Country{full: "Marshall Islands", codeTwo: "MH", codeThree: "MHL", num: "584"},
&Country{full: "Martinique", codeTwo: "MQ", codeThree: "MTQ", num: "474"},
&Country{full: "Mauritania", codeTwo: "MR", codeThree: "MRT", num: "478"},
&Country{full: "Mauritius", codeTwo: "MU", codeThree: "MUS", num: "480"},
&Country{full: "Mayotte", codeTwo: "YT", codeThree: "MYT", num: "175"},
&Country{full: "Mexico", codeTwo: "MX", codeThree: "MEX", num: "484"},
&Country{full: "Micronesia", codeTwo: "FM", codeThree: "FSM", num: "583"},
&Country{full: "Moldova", codeTwo: "MD", codeThree: "MDA", num: "498"},
&Country{full: "Monaco", codeTwo: "MC", codeThree: "MCO", num: "492"},
&Country{full: "Mongolia", codeTwo: "MN", codeThree: "MNG", num: "496"},
&Country{full: "Montenegro", codeTwo: "ME", codeThree: "MNE", num: "499"},
&Country{full: "Montserrat", codeTwo: "MS", codeThree: "MSR", num: "500"},
&Country{full: "Morocco", codeTwo: "MA", codeThree: "MAR", num: "504"},
&Country{full: "Mozambique", codeTwo: "MZ", codeThree: "MOZ", num: "508"},
&Country{full: "Myanmar", codeTwo: "MM", codeThree: "MMR", num: "104"},
&Country{full: "Namibia", codeTwo: "NA", codeThree: "NAM", num: "516"},
&Country{full: "Nauru", codeTwo: "NR", codeThree: "NRU", num: "520"},
&Country{full: "Nepal", codeTwo: "NP", codeThree: "NPL", num: "524"},
&Country{full: "Netherlands", codeTwo: "NL", codeThree: "NLD", num: "528"},
&Country{full: "Netherlands Antilles", codeTwo: "AN", codeThree: "ANT", num: "530"},
&Country{full: "New Caledonia", codeTwo: "NC", codeThree: "NCL", num: "540"},
&Country{full: "New Zealand", codeTwo: "NZ", codeThree: "NZL", num: "554"},
&Country{full: "Nicaragua", codeTwo: "NI", codeThree: "NIC", num: "558"},
&Country{full: "Niger", codeTwo: "NE", codeThree: "NER", num: "562"},
&Country{full: "Nigeria", codeTwo: "NG", codeThree: "NGA", num: "566"},
&Country{full: "Niue", codeTwo: "NU", codeThree: "NIU", num: "570"},
&Country{full: "Norfolk Island", codeTwo: "NF", codeThree: "NFK", num: "574"},
&Country{full: "Northern Mariana Islands", codeTwo: "MP", codeThree: "MNP", num: "580"},
&Country{full: "Norway", codeTwo: "NO", codeThree: "NOR", num: "578"},
&Country{full: "Oman", codeTwo: "OM", codeThree: "OMN", num: "512"},
&Country{full: "Pakistan", codeTwo: "PK", codeThree: "PAK", num: "586"},
&Country{full: "Palau", codeTwo: "PW", codeThree: "PLW", num: "585"},
&Country{full: "Palestine", codeTwo: "PS", codeThree: "PSE", num: "275"},
&Country{full: "Panama", codeTwo: "PA", codeThree: "PAN", num: "591"},
&Country{full: "Papua New Guinea", codeTwo: "PG", codeThree: "PNG", num: "598"},
&Country{full: "Paraguay", codeTwo: "PY", codeThree: "PRY", num: "600"},
&Country{full: "Peru", codeTwo: "PE", codeThree: "PER", num: "604"},
&Country{full: "Philippines", codeTwo: "PH", codeThree: "PHL", num: "608"},
&Country{full: "Pitcairn", codeTwo: "PN", codeThree: "PCN", num: "612"},
&Country{full: "Poland", codeTwo: "PL", codeThree: "POL", num: "616"},
&Country{full: "Portugal", codeTwo: "PT", codeThree: "PRT", num: "620"},
&Country{full: "Puerto Rico", codeTwo: "PR", codeThree: "PRI", num: "630"},
&Country{full: "Qatar", codeTwo: "QA", codeThree: "QAT", num: "634"},
&Country{full: "Réunion", codeTwo: "RE", codeThree: "REU", num: "638"},
&Country{full: "Romania", codeTwo: "RO", codeThree: "ROU", num: "642"},
&Country{full: "Russian Federation", codeTwo: "RU", codeThree: "RUS", num: "643"},
&Country{full: "Rwanda", codeTwo: "RW", codeThree: "RWA", num: "646"},
&Country{full: "Saint-Barthélemy", codeTwo: "BL", codeThree: "BLM", num: "652"},
&Country{full: "<NAME>", codeTwo: "SH", codeThree: "SHN", num: "654"},
&Country{full: "S<NAME>", codeTwo: "KN", codeThree: "KNA", num: "659"},
&Country{full: "<NAME>", codeTwo: "LC", codeThree: "LCA", num: "662"},
&Country{full: "Saint-Martin", codeTwo: "MF", codeThree: "MAF", num: "663"},
&Country{full: "<NAME>", codeTwo: "PM", codeThree: "SPM", num: "666"},
&Country{full: "<NAME>", codeTwo: "VC", codeThree: "VCT", num: "670"},
&Country{full: "Samoa", codeTwo: "WS", codeThree: "WSM", num: "882"},
&Country{full: "San Marino", codeTwo: "SM", codeThree: "SMR", num: "674"},
&Country{full: "<NAME>", codeTwo: "ST", codeThree: "STP", num: "678"},
&Country{full: "Saudi Arabia", codeTwo: "SA", codeThree: "SAU", num: "682"},
&Country{full: "Senegal", codeTwo: "SN", codeThree: "SEN", num: "686"},
&Country{full: "Serbia", codeTwo: "RS", codeThree: "SRB", num: "688"},
&Country{full: "Seychelles", codeTwo: "SC", codeThree: "SYC", num: "690"},
&Country{full: "Sierra Leone", codeTwo: "SL", codeThree: "SLE", num: "694"},
&Country{full: "Singapore", codeTwo: "SG", codeThree: "SGP", num: "702"},
&Country{full: "Slovakia", codeTwo: "SK", codeThree: "SVK", num: "703"},
&Country{full: "Slovenia", codeTwo: "SI", codeThree: "SVN", num: "705"},
&Country{full: "Solomon Islands", codeTwo: "SB", codeThree: "SLB", num: "090"},
&Country{full: "Somalia", codeTwo: "SO", codeThree: "SOM", num: "706"},
&Country{full: "South Africa", codeTwo: "ZA", codeThree: "ZAF", num: "710"},
&Country{full: "South Georgia and the South Sandwich Islands", codeTwo: "GS", codeThree: "SGS", num: "239"},
&Country{full: "South Sudan", codeTwo: "SS", codeThree: "SSD", num: "728"},
&Country{full: "Spain", codeTwo: "ES", codeThree: "ESP", num: "724"},
&Country{full: "Sri Lanka", codeTwo: "LK", codeThree: "LKA", num: "144"},
&Country{full: "Sudan", codeTwo: "SD", codeThree: "SDN", num: "736"},
&Country{full: "Suriname", codeTwo: "SR", codeThree: "SUR", num: "740"},
&Country{full: "Svalbard and Jan Mayen Islands", codeTwo: "SJ", codeThree: "SJM", num: "744"},
&Country{full: "Swaziland", codeTwo: "SZ", codeThree: "SWZ", num: "748"},
&Country{full: "Sweden", codeTwo: "SE", codeThree: "SWE", num: "752"},
&Country{full: "Switzerland", codeTwo: "CH", codeThree: "CHE", num: "756"},
&Country{full: "Syrian Arab Republic (Syria)", codeTwo: "SY", codeThree: "SYR", num: "760"},
&Country{full: "Taiwan, Republic of China", codeTwo: "TW", codeThree: "TWN", num: "158"},
&Country{full: "Tajikistan", codeTwo: "TJ", codeThree: "TJK", num: "762"},
&Country{full: "Tanzania *, United Republic of", codeTwo: "TZ", codeThree: "TZA", num: "834"},
&Country{full: "Thailand", codeTwo: "TH", codeThree: "THA", num: "764"},
&Country{full: "Timor-Leste", codeTwo: "TL", codeThree: "TLS", num: "626"},
&Country{full: "Togo", codeTwo: "TG", codeThree: "TGO", num: "768"},
&Country{full: "Tokelau", codeTwo: "TK", codeThree: "TKL", num: "772"},
&Country{full: "Tonga", codeTwo: "TO", codeThree: "TON", num: "776"},
&Country{full: "Trinidad and Tobago", codeTwo: "TT", codeThree: "TTO", num: "780"},
&Country{full: "Tunisia", codeTwo: "TN", codeThree: "TUN", num: "788"},
&Country{full: "Turkey", codeTwo: "TR", codeThree: "TUR", num: "792"},
&Country{full: "Turkmenistan", codeTwo: "TM", codeThree: "TKM", num: "795"},
&Country{full: "Turks and Caicos Islands", codeTwo: "TC", codeThree: "TCA", num: "796"},
&Country{full: "Tuvalu", codeTwo: "TV", codeThree: "TUV", num: "798"},
&Country{full: "Uganda", codeTwo: "UG", codeThree: "UGA", num: "800"},
&Country{full: "Ukraine", codeTwo: "UA", codeThree: "UKR", num: "804"},
&Country{full: "United Arab Emirates", codeTwo: "AE", codeThree: "ARE", num: "784"},
&Country{full: "United Kingdom", codeTwo: "GB", codeThree: "GBR", num: "826"},
&Country{full: "United States of America", codeTwo: "US", codeThree: "USA", num: "840"},
&Country{full: "United States Minor Outlying Islands", codeTwo: "UM", codeThree: "UMI", num: "581"},
&Country{full: "Uruguay", codeTwo: "UY", codeThree: "URY", num: "858"},
&Country{full: "Uzbekistan", codeTwo: "UZ", codeThree: "UZB", num: "860"},
&Country{full: "Vanuatu", codeTwo: "VU", codeThree: "VUT", num: "548"},
&Country{full: "Venezuela", codeTwo: "VE", codeThree: "VEN", num: "862"},
&Country{full: "Viet Nam", codeTwo: "VN", codeThree: "VNM", num: "704"},
&Country{full: "Virgin Islands, US", codeTwo: "VI", codeThree: "VIR", num: "850"},
&Country{full: "Wallis and Futuna Islands", codeTwo: "WF", codeThree: "WLF", num: "876"},
&Country{full: "Western Sahara", codeTwo: "EH", codeThree: "ESH", num: "732"},
&Country{full: "Yemen", codeTwo: "YE", codeThree: "YEM", num: "887"},
&Country{full: "Zambia", codeTwo: "ZM", codeThree: "ZMB", num: "894"},
&Country{full: "Zimbabwe", codeTwo: "ZW", codeThree: "ZWE", num: "716"},
} | country.go | 0.710226 | 0.607634 | country.go | starcoder |
package common
import (
"encoding/json"
"fmt"
"strconv"
)
// Label is attached to, and identifies a claim (or a set of claims). A label
// can be either an integer or a string.
type Label struct {
intValue int
strValue string
isInt bool
}
// Value returns the value of the label an interface{}. The underlying value
// will be either a string or an int.
func (l Label) Value() interface{} {
if l.isInt {
return l.intValue
}
return l.strValue
}
// IsInt returns true iff the label value is an integer.
func (l Label) IsInt() bool {
return l.isInt
}
// AsInt returns the value of the label as an integer, along with an error. If
// the underlying label value is an integer, error will be nil. For string
// labels, the value of -1 is returned, and the error is set.
func (l Label) AsInt() (int, error) {
if l.isInt {
return l.intValue, nil
}
return -1, fmt.Errorf("label value not an integer")
}
// String returns the Label value as a string. In case of string values, the
// value itself is returned. For integer Labels, the integer is formatted as
// string (as with fmt).
func (l Label) String() string {
if l.isInt {
return fmt.Sprint(l.intValue)
}
return l.strValue
}
// FromString populates the label value from the specified string, preferring
// to interpret it as a string representation of an int. Note: this is
// different from StringLabel(), which always sets the string value, even if
// provided text is a representation of an int.
func (l *Label) FromString(text string) {
intVal, err := strconv.Atoi(text)
if err == nil {
l.intValue = intVal
l.isInt = true
} else {
l.strValue = text
l.isInt = false
}
}
func (l Label) MarshalJSON() ([]byte, error) {
if l.isInt {
return json.Marshal(fmt.Sprint(l.intValue))
}
return json.Marshal(l.strValue)
}
func (l *Label) UnmarshalJSON(data []byte) error {
var strVal string
if err := json.Unmarshal(data, &strVal); err != nil {
return err
}
l.FromString(strVal)
return nil
}
func (l Label) MarshalCBOR() ([]byte, error) {
if l.isInt {
return em.Marshal(l.intValue)
}
return em.Marshal(l.strValue)
}
func (l *Label) UnmarshalCBOR(data []byte) error {
var intVal int
err := dm.Unmarshal(data, &intVal)
if err == nil {
l.intValue = intVal
l.isInt = true
return nil
}
var strVal string
err = dm.Unmarshal(data, &strVal)
if err == nil {
l.strValue = strVal
l.isInt = false
}
return fmt.Errorf("could not extract Label from CBOR: %v", data)
}
// NewIntLabel creates a new Label from the specified int value.
func NewIntLabel(value int) Label {
return Label{intValue: value, isInt: true}
}
// NewStringLabel creates a new Label from the specified string value.
func NewStringLabel(value string) Label {
return Label{strValue: value, isInt: false}
} | common/label.go | 0.801159 | 0.465691 | label.go | starcoder |
package mockstore
import (
"bytes"
"time"
)
type expectedDelete struct {
inputToken string
returnErr error
}
type expectedFind struct {
inputToken string
returnB []byte
returnFound bool
returnErr error
}
type expectedCommit struct {
inputToken string
inputB []byte
inputExpiry time.Time
returnErr error
}
type expectedAll struct {
returnMB map[string][]byte
returnErr error
}
type MockStore struct {
deleteExpectations []expectedDelete
findExpectations []expectedFind
commitExpectations []expectedCommit
allExpectations []expectedAll
}
func (m *MockStore) ExpectDelete(token string, err error) {
m.deleteExpectations = append(m.deleteExpectations, expectedDelete{
inputToken: token,
returnErr: err,
})
}
// Delete implements the Store interface
func (m *MockStore) Delete(token string) (err error) {
var (
indexToRemove int
expectationFound bool
)
for i, expectation := range m.deleteExpectations {
if expectation.inputToken == token {
indexToRemove = i
expectationFound = true
break
}
}
if !expectationFound {
panic("store.Delete called unexpectedly")
}
errToReturn := m.deleteExpectations[indexToRemove].returnErr
m.deleteExpectations = m.deleteExpectations[:indexToRemove+copy(m.deleteExpectations[indexToRemove:], m.deleteExpectations[indexToRemove+1:])]
return errToReturn
}
func (m *MockStore) ExpectFind(token string, b []byte, found bool, err error) {
m.findExpectations = append(m.findExpectations, expectedFind{
inputToken: token,
returnB: b,
returnFound: found,
returnErr: err,
})
}
// Find implements the Store interface
func (m *MockStore) Find(token string) (b []byte, found bool, err error) {
var (
indexToRemove int
expectationFound bool
)
for i, expectation := range m.findExpectations {
if expectation.inputToken == token {
indexToRemove = i
expectationFound = true
break
}
}
if !expectationFound {
panic("store.Find called unexpectedly")
}
valueToReturn := m.findExpectations[indexToRemove]
m.findExpectations = m.findExpectations[:indexToRemove+copy(m.findExpectations[indexToRemove:], m.findExpectations[indexToRemove+1:])]
return valueToReturn.returnB, valueToReturn.returnFound, valueToReturn.returnErr
}
func (m *MockStore) ExpectCommit(token string, b []byte, expiry time.Time, err error) {
m.commitExpectations = append(m.commitExpectations, expectedCommit{
inputToken: token,
inputB: b,
inputExpiry: expiry,
returnErr: err,
})
}
// Commit implements the Store interface
func (m *MockStore) Commit(token string, b []byte, expiry time.Time) (err error) {
var (
indexToRemove int
expectationFound bool
)
for i, expectation := range m.commitExpectations {
if expectation.inputToken == token && bytes.Equal(expectation.inputB, b) && expectation.inputExpiry == expiry {
indexToRemove = i
expectationFound = true
break
}
}
if !expectationFound {
panic("store.Commit called unexpectedly")
}
errToReturn := m.commitExpectations[indexToRemove].returnErr
m.commitExpectations = m.commitExpectations[:indexToRemove+copy(m.commitExpectations[indexToRemove:], m.commitExpectations[indexToRemove+1:])]
return errToReturn
}
func (m *MockStore) ExpectAll(mb map[string][]byte, err error) {
m.allExpectations = append(m.allExpectations, expectedAll{
returnMB: mb,
returnErr: err,
})
}
// All implements the IterableStore interface
func (m *MockStore) All() (map[string][]byte, error) {
var (
indexToRemove int
expectationFound bool
)
for i, expectation := range m.allExpectations {
if len(expectation.returnMB) == 3 {
indexToRemove = i
expectationFound = true
break
}
}
if !expectationFound {
panic("store.All called unexpectedly")
}
valueToReturn := m.allExpectations[indexToRemove]
m.allExpectations = m.allExpectations[:indexToRemove+copy(m.allExpectations[indexToRemove:], m.allExpectations[indexToRemove+1:])]
return valueToReturn.returnMB, valueToReturn.returnErr
} | mockstore/store.go | 0.644896 | 0.438905 | store.go | starcoder |
package stringset
// A stringset is a map from a string to an empty struct. We choose empty structs because they have a size
// of zero and make it fairly clear that this shouldn't be treated as a map.
type StringSet map[string]struct{}
// New creates a new stringset with the specified strings in it
func New(strings ...string) StringSet {
set := make(map[string]struct{}, len(strings))
for _, str := range strings {
set[str] = struct{}{}
}
return set
}
// FromList converts a list of strings a StringSet
func FromList(strings []string) StringSet {
return New(strings...)
}
// FromInterfaceList converts a list of interfaces that are known to be strings into a StringSet
func FromInterfaceList(strings []interface{}) StringSet {
set := make(map[string]struct{}, len(strings))
for _, str := range strings {
set[str.(string)] = struct{}{}
}
return set
}
// ToList converts a StringSet to a list of strings
func (inputSet StringSet) ToList() []string {
returnList := make([]string, 0, len(inputSet))
for key, _ := range inputSet {
returnList = append(returnList, key)
}
return returnList
}
// Clone copies a string set to a new string set
func (s StringSet) Clone() StringSet {
returnSet := make(map[string]struct{}, len(s))
for key, value := range s {
returnSet[key] = value
}
return returnSet
}
// Intersect returns a new StringSet with the intersection of all the elements in both sets
func (s1 StringSet) Intersect(s2 StringSet) StringSet {
return setOperation(s1, s2, true)
}
// Minus returns a new StringSet with the
func (s1 StringSet) Minus(s2 StringSet) StringSet {
return setOperation(s1, s2, false)
}
// setOperation is a helper method to either intersect or subtract sets
func setOperation(s1, s2 StringSet, wantElemsInSet2 bool) map[string]struct{} {
resultSet := make(map[string]struct{})
for key, _ := range s1 {
if _, ok := s2[key]; ok == wantElemsInSet2 {
resultSet[key] = struct{}{}
}
}
return resultSet
}
// AddSet adds all the elements in a string set to the operand set.
func (s StringSet) AddSet(newValues StringSet) {
for newValue, _ := range newValues {
s[newValue] = struct{}{}
}
}
// AddAll adds all the elements in a string slice to the operand set.
func (s StringSet) AddAll(newValues []string) {
for _, newValue := range newValues {
s[newValue] = struct{}{}
}
}
// Equals returns true if two string sets have exactly the same elements
func (s1 StringSet) Equals(s2 StringSet) bool {
if len(s1) != len(s2) {
return false
}
for key, _ := range s1 {
if _, ok := s2[key]; !ok {
return false
}
}
return true
}
// Add adds an element to a string set
func (s StringSet) Add(str string) {
s[str] = struct{}{}
}
// Delete removes an element from a string set
func (s StringSet) Remove(str string) {
delete(s, str)
}
// Contains returns true if a stringset contains the specified string
func (s StringSet) Contains(str string) bool {
_, ok := s[str]
return ok
}
// Partition takes in two string slices and returns a tuple with (strings only in the first set,
// strings in both sets, strings only in the second set). It is a utility function that uses the
// set implmentation
func Partition(s1, s2 []string) (only1 []string, both []string, only2 []string) {
set1 := FromList(s1)
set2 := FromList(s2)
return set1.Minus(set2).ToList(), set1.Intersect(set2).ToList(), set2.Minus(set1).ToList()
} | stringset/stringset.go | 0.837753 | 0.44565 | stringset.go | starcoder |
package avl
import (
"fmt"
"github.com/pkg/errors"
"github.com/alexander-yu/stream/quantile/order"
)
// Node represents a node in an AVL tree.
type Node struct {
left *Node
right *Node
val float64
height int
size int
}
func max(x int, y int) int {
if x > y {
return x
}
return y
}
// NewNode instantiates a Node struct with a a provided value.
func NewNode(val float64) *Node {
return &Node{
val: val,
height: 0,
size: 1,
}
}
// Left returns the left child of the node.
func (n *Node) Left() (order.Node, error) {
if n == nil {
return nil, errors.New("tried to retrieve child of nil node")
}
return n.left, nil
}
// Right returns the right child of the node.
func (n *Node) Right() (order.Node, error) {
if n == nil {
return nil, errors.New("tried to retrieve child of nil node")
}
return n.right, nil
}
// Height returns the height of the subtree rooted at the node.
func (n *Node) Height() int {
if n == nil {
return -1
}
return n.height
}
// Size returns the size of the subtree rooted at the node.
func (n *Node) Size() int {
if n == nil {
return 0
}
return n.size
}
// Value returns the value stored at the node.
func (n *Node) Value() float64 {
return n.val
}
// TreeString returns the string representation of the subtree rooted at the node.
func (n *Node) TreeString() string {
if n == nil {
return ""
}
return n.treeString("", "", true)
}
func (n *Node) add(val float64) *Node {
if n == nil {
return NewNode(val)
} else if val <= n.val {
n.left = n.left.add(val)
} else {
n.right = n.right.add(val)
}
n.size = n.left.Size() + n.right.Size() + 1
n.height = max(n.left.Height(), n.right.Height()) + 1
return n.balance()
}
func (n *Node) remove(val float64) *Node {
// this case occurs if we attempt to remove a value
// that does not exist in the subtree; this will
// result in remove() being a no-op
if n == nil {
return n
}
root := n
if val < root.val {
root.left = root.left.remove(val)
} else if val > root.val {
root.right = root.right.remove(val)
} else {
if root.left == nil {
return root.right
} else if root.right == nil {
return root.left
}
root = n.right.min()
root.right = n.right.removeMin()
root.left = n.left
}
root.size = root.left.Size() + root.right.Size() + 1
root.height = max(root.left.Height(), root.right.Height()) + 1
return root.balance()
}
func (n *Node) min() *Node {
if n.left == nil {
return n
}
return n.left.min()
}
func (n *Node) removeMin() *Node {
if n.left == nil {
return n.right
}
n.left = n.left.removeMin()
n.size = n.left.Size() + n.right.Size() + 1
n.height = max(n.left.Height(), n.right.Height()) + 1
return n.balance()
}
/*****************
* Rotations
*****************/
func (n *Node) balance() *Node {
if n.heightDiff() < -1 {
// Since we've entered this block, we already
// know that the right child is not nil
if n.right.heightDiff() > 0 {
n.right = n.right.rotateRight()
}
return n.rotateLeft()
} else if n.heightDiff() > 1 {
// Since we've entered this block, we already
// know that the left child is not nil
if n.left.heightDiff() < 0 {
n.left = n.left.rotateLeft()
}
return n.rotateRight()
}
return n
}
func (n *Node) heightDiff() int {
return n.left.Height() - n.right.Height()
}
func (n *Node) rotateLeft() *Node {
m := n.right
n.right = m.left
m.left = n
// No need to call size() here; we already know that n is not nil, since
// rotations are only called for non-leaf nodes
m.size = n.size
n.size = n.left.Size() + n.right.Size() + 1
n.height = max(n.left.Height(), n.right.Height()) + 1
m.height = max(m.left.Height(), m.right.Height()) + 1
return m
}
func (n *Node) rotateRight() *Node {
m := n.left
n.left = m.right
m.right = n
// No need to call size() here; we already know that n is not nil, since
// rotations are only called for non-leaf nodes
m.size = n.size
n.size = n.left.Size() + n.right.Size() + 1
n.height = max(n.left.Height(), n.right.Height()) + 1
m.height = max(m.left.Height(), m.right.Height()) + 1
return m
}
/*******************
* Order Statistics
*******************/
// Select returns the node with the kth smallest value in the
// subtree rooted at the node..
func (n *Node) Select(k int) order.Node {
if n == nil {
return nil
}
size := n.left.Size()
if k < size {
return n.left.Select(k)
} else if k > size {
return n.right.Select(k - size - 1)
}
return n
}
// Rank returns the number of nodes strictly less than the value that
// are contained in the subtree rooted at the node.
func (n *Node) Rank(val float64) int {
if n == nil {
return 0
} else if val < n.val {
return n.left.Rank(val)
} else if val > n.val {
return 1 + n.left.Size() + n.right.Rank(val)
}
return n.left.Size()
}
/*******************
* Pretty-printing
*******************/
// treeString recursively prints out a subtree rooted at the node in a sideways format, as below:
// │ ┌── 7.000000
// │ ┌── 6.000000
// │ │ └── 5.000000
// └── 4.000000
// │ ┌── 3.000000
// └── 2.000000
// └── 1.000000
// └── 1.000000
func (n *Node) treeString(prefix string, result string, isTail bool) string {
// isTail indicates whether or not the current node's parent branch needs to be represented
// as a "tail", i.e. its branch needs to hang in the string representation, rather than branch upwards.
if isTail {
// If true, then we need to print the subtree like this:
// │ ┌── [n.right.treeString()]
// └── [n.val]
// └── [n.left.treeString()]
if n.right != nil {
result = n.right.treeString(fmt.Sprintf("%s│ ", prefix), result, false)
}
result = fmt.Sprintf("%s%s└── %f\n", result, prefix, n.val)
if n.left != nil {
result = n.left.treeString(fmt.Sprintf("%s ", prefix), result, true)
}
} else {
// If false, then we need to print the subtree like this:
// ┌── [n.right.treeString()]
// ┌── [n.val]
// │ └── [n.left.treeString()]
if n.right != nil {
result = n.right.treeString(fmt.Sprintf("%s ", prefix), result, false)
}
result = fmt.Sprintf("%s%s┌── %f\n", result, prefix, n.val)
if n.left != nil {
result = n.left.treeString(fmt.Sprintf("%s│ ", prefix), result, true)
}
}
return result
} | quantile/ost/avl/node.go | 0.847542 | 0.544801 | node.go | starcoder |
package data
import (
"math"
)
// Interface defines methods required for type wishing to use SpacialTree.
type Interface interface {
// get the n-dimensional location of an item
Location() []float64
}
// SpacialTree defines the interface for use in n-dimension space
// partitoning trees, such as k-d trees and quad/oct-trees.
type SpacialTree interface {
// get the number of dimensions used in the tree
Dimensions() int
// get the total number of items in the tree
Len() int
// get a list of items in the tree
Items() []Interface
// build tree from item(s)
Build(items []Interface)
// check if item is in the tree, based on its location
QueryPoint(item Interface) bool
// get all items within the region defined by the list of mins/maxs
QueryRange(ranges [][2]float64) []Interface
// get the 1 nearest neighbor
NearestNeighbor(dist DistanceMetric, point ...float64) Interface
// get the k nearest neighbors. may return fewer than k
NearestNeighbors(dist DistanceMetric, k int, point ...float64) []Interface
}
// DistanceMetric is a type of function that calculates the distance
// between 2 n-dimensional points. Both arguments to the function should
// be equal length.
type DistanceMetric func([]float64, []float64) float64
// EuclideanSq is a DistanceMetric func which computes the
// euclidean/cartesian/geometric distance. It actually returns the sum of
// squares, without taking the square root.
func EuclideanSq(a, b []float64) float64 {
if len(a) != len(b) {
panic("a and b are different lengths")
}
sum := 0.0
for i := 0; i < len(a); i++ {
diff := b[i] - a[i]
sum += diff * diff
}
return sum
}
// Euclidean is the same as EuclideanSq() but takes the square root.
func Euclidean(a, b []float64) float64 {
return math.Sqrt(EuclideanSq(a, b))
}
// Manhattan is a DistanceMetric func which computes the
// manhattan/taxi cab/snake distance.
func Manhattan(a, b []float64) float64 {
if len(a) != len(b) {
panic("a and b are different lengths")
}
sum := 0.0
for i := 0; i < len(a); i++ {
diff := b[i] - a[i]
sum += math.Abs(diff)
}
return sum
}
// Chebyshev is a DistanceMetric func which computes the chebyshev distance,
// where the distance is the single most significant of the components.
func Chebyshev(a, b []float64) float64 {
if len(a) != len(b) {
panic("a and b are different lengths")
}
max := 0.0
for i := 0; i < len(a); i++ {
diff := b[i] - a[i]
max = math.Max(max, math.Abs(diff))
}
return max
}
// Canberra is a DistanceMetric func which computes the canberra distance:
// Sum( |b-a| / |b|+|a| )
func Canberra(a, b []float64) float64 {
if len(a) != len(b) {
panic("a and b are different lengths")
}
sum := 0.0
for i := 0; i < len(a); i++ {
diff := b[i] - a[i]
sum += math.Abs(diff) / (math.Abs(b[i]) + math.Abs(a[i]))
}
return sum
} | data/tree.go | 0.701611 | 0.500244 | tree.go | starcoder |
package cmd
import (
"testing"
"github.com/gomodule/redigo/redis"
"github.com/stretchr/testify/assert"
)
//ExampleList the key command
//mapList record the key and value of the operation
type ExampleList struct {
mapList map[string][]string
conn redis.Conn
}
//NewExampleList create list object
func NewExampleList(conn redis.Conn) *ExampleList {
return &ExampleList{
conn: conn,
mapList: make(map[string][]string),
}
}
//LsetEqual verify that the return value of the lset key operation is correct
func (el *ExampleList) LsetEqual(t *testing.T, key string, index int, value string) {
if _, ok := el.mapList[key]; !ok {
el.mapList[key] = make([]string, 0, 10)
}
if index < 0 {
index = -index
}
el.mapList[key][index] = value
reply, err := redis.String(el.conn.Do("lset", key, index, value))
assert.Equal(t, "OK", reply)
assert.Nil(t, err)
llen, err := redis.Int(el.conn.Do("llen", key))
assert.Equal(t, len(el.mapList[key]), llen)
assert.Nil(t, err)
}
//LsetEqualErr verify that the return err value of the Lset key operation is correct
func (el *ExampleList) LsetEqualErr(t *testing.T, errValue string, args ...interface{}) {
_, err := el.conn.Do("lset", args...)
assert.EqualError(t, err, errValue)
}
//LpushEqual verify that the return err value of the lpush key operation is correct
func (el *ExampleList) LpushEqual(t *testing.T, key string, values ...string) {
req := make([]interface{}, 0, len(values))
tmp := make([]string, 0, len(values))
if _, ok := el.mapList[key]; !ok {
el.mapList[key] = make([]string, 0, 0)
}
req = append(req, key)
for _, value := range values {
req = append(req, value)
}
for i := len(values) - 1; i >= 0; i-- {
tmp = append(tmp, values[i])
}
tmp = append(tmp, el.mapList[key]...)
el.mapList[key] = tmp
reply, err := redis.Int(el.conn.Do("lpush", req...))
assert.Equal(t, len(el.mapList[key]), reply)
assert.Nil(t, err)
}
//LpushEqualErr verify that the return err value of the Lpush key operation is correct
func (el *ExampleList) LpushEqualErr(t *testing.T, errValue string, args ...interface{}) {
_, err := el.conn.Do("lpush", args...)
assert.EqualError(t, err, errValue)
}
//LpopEqual verify that the return err value of the Lpop key operation is correct
func (el *ExampleList) LpopEqual(t *testing.T, key string) {
if vs, ok := el.mapList[key]; ok {
v := vs[0]
el.mapList[key] = el.mapList[key][1:]
reply, err := redis.String(el.conn.Do("lpop", key))
assert.Equal(t, v, reply)
assert.Nil(t, err)
} else {
reply, err := redis.String(el.conn.Do("lpop", key))
assert.Equal(t, "", reply)
assert.EqualError(t, err, "redigo: nil returned")
}
}
//LpopEqualErr verify that the return err value of the Lpop key operation is correct
func (el *ExampleList) LpopEqualErr(t *testing.T, errValue string, args ...interface{}) {
_, err := el.conn.Do("lpop", args...)
assert.EqualError(t, err, errValue)
}
//LindexEqual verify that the return err value of the Lindex key operation is correct
func (el *ExampleList) LindexEqual(t *testing.T, key string, index int) {
if index <= 0 {
index = -index
}
if vs, ok := el.mapList[key]; ok {
v := vs[index]
el.mapList[key] = el.mapList[key][:len(el.mapList[key])]
reply, err := redis.String(el.conn.Do("lindex", key, index))
assert.Equal(t, v, reply)
assert.Nil(t, err)
} else {
reply, err := redis.String(el.conn.Do("lindex", key, index))
assert.Equal(t, "", reply)
assert.EqualError(t, err, "redigo: nil returned")
}
}
//LindexEqualErr verify that the return err value of the Lindex key operation is correct
func (el *ExampleList) LindexEqualErr(t *testing.T, errValue string, args ...interface{}) {
_, err := el.conn.Do("lindex", args...)
assert.EqualError(t, err, errValue)
}
//LrangeEqual verify that the return value of the Lrange key operation is correct
func (el *ExampleList) LrangeEqual(t *testing.T, key string, start, end int) {
if start > len(el.mapList[key]) {
reply, err := redis.Strings(el.conn.Do("lrange", key, start, end))
assert.Equal(t, []string{}, reply)
assert.Nil(t, err)
return
}
v, ok := el.mapList[key]
if !ok {
reply, err := redis.Strings(el.conn.Do("lrange", key, start, end))
assert.Equal(t, v, reply)
assert.Nil(t, err)
return
}
if start < 0 {
start = -start
}
if end < 0 {
end = -end
}
if end <= len(el.mapList[key]) {
v = v[start : end+1]
}
reply, err := redis.Strings(el.conn.Do("lrange", key, start, end))
assert.Equal(t, v, reply)
assert.Nil(t, err)
}
//LrangeEqualErr verify that the return err value of the lrange key operation is correct
func (el *ExampleList) LrangeEqualErr(t *testing.T, errValue string, args ...interface{}) {
_, err := el.conn.Do("lrange", args...)
assert.EqualError(t, err, errValue)
}
//RpushEqual verify that the return value of the Rpush key operation is correct
func (el *ExampleList) RpushEqual(t *testing.T, key string, values []string) {
req := make([]interface{}, 0, len(values))
req = append(req, key)
if _, ok := el.mapList[key]; !ok {
el.mapList[key] = make([]string, 0, 0)
}
for _, value := range values {
el.mapList[key] = append(el.mapList[key], value)
req = append(req, value)
}
reply, err := redis.Int(el.conn.Do("rpush", req...))
assert.Equal(t, len(el.mapList[key]), reply)
assert.Nil(t, err)
}
//RpushEqualErr verify that the return err value of the Rpush key operation is correct
func (el *ExampleList) RpushEqualErr(t *testing.T, errValue string, args ...interface{}) {
_, err := el.conn.Do("Rpush", args...)
assert.EqualError(t, err, errValue)
}
//RpopEqual verify that the return value of the rpop key operation is correct
func (el *ExampleList) RpopEqual(t *testing.T, key string) {
v := el.mapList[key][len(el.mapList[key])-1]
el.mapList[key] = el.mapList[key][:len(el.mapList[key])]
reply, err := redis.String(el.conn.Do("rpop", key))
assert.Equal(t, v, reply)
assert.Nil(t, err)
}
//RpopEqualErr verify that the return err value of the rpop key operation is correct
func (el *ExampleList) RpopEqualErr(t *testing.T, errValue string, args ...interface{}) {
_, err := el.conn.Do("Rpop", args...)
assert.EqualError(t, err, errValue)
}
//LlenEqual verify that the return value of the Llen key operation is correct
func (el *ExampleList) LlenEqual(t *testing.T, key string) {
reply, err := redis.Int(el.conn.Do("llen", key))
assert.Equal(t, len(el.mapList[key]), reply)
assert.Nil(t, err)
}
//LlenEqualErr verify that the return err value of the Llen key operation is correct
func (el *ExampleList) LlenEqualErr(t *testing.T, errValue string, args ...interface{}) {
_, err := el.conn.Do("Llen", args...)
assert.EqualError(t, err, errValue)
} | tools/autotest/cmd/list.go | 0.611498 | 0.667757 | list.go | starcoder |
package alphavantage
import (
"fmt"
"io"
"net/http"
"net/url"
"github.com/pkg/errors"
)
// NewTimeSeriesService https://www.alphavantage.co/documentation/#time-series-data
// This suite of APIs provide realtime and historical global equity data in 4 different temporal resolutions:
// (1) daily, (2) weekly, (3) monthly, and (4) intraday.
// Daily, weekly, and monthly time series contain 20+ years of historical data.
func NewTimeSeriesService(s *Service) *TimeSeriesService {
rs := &TimeSeriesService{s: s}
return rs
}
// TimeSeriesService https://www.alphavantage.co/documentation/#time-series-data
// This suite of APIs provide realtime and historical global equity data in 4 different temporal resolutions:
// (1) daily, (2) weekly, (3) monthly, and (4) intraday.
// Daily, weekly, and monthly time series contain 20+ years of historical data.
type TimeSeriesService struct {
s *Service
}
// Intraday https://www.alphavantage.co/documentation/#intraday
// This API returns intraday time series (timestamp, open, high, low, close, volume) of the equity specified.
// datatype fixed to csv
func (r *TimeSeriesService) Intraday(symbol, interval string) *TimeSeriesIntradayCall {
c := &TimeSeriesIntradayCall{
DefaultCall: DefaultCall{
s: r.s,
urlParams: url.Values{},
},
}
c.urlParams.Set("function", "TIME_SERIES_INTRADAY")
c.urlParams.Set("symbol", symbol)
c.urlParams.Set("interval", interval)
c.urlParams.Set("datatype", "csv")
return c
}
// TimeSeriesIntradayCall https://www.alphavantage.co/documentation/#intraday
// This API returns intraday time series (timestamp, open, high, low, close, volume) of the equity specified.
// datatype fixed to csv
type TimeSeriesIntradayCall struct {
DefaultCall
}
// Outputsize By default, outputsize=compact.
// Strings compact and full are accepted with the following specifications:
// compact returns only the latest 100 data points;
// full returns the full-length time series of 20+ years of historical data.
// The "compact" option is recommended if you would like to reduce the data size of each API call.
func (c *TimeSeriesIntradayCall) Outputsize(outputsize string) *TimeSeriesIntradayCall {
c.urlParams.Set("outputsize", outputsize)
return c
}
// func (c *TimeSeriesIntradayCall) Datatype(datatype string) *TimeSeriesIntradayCall {
// c.urlParams.Set("datatype", datatype)
// return c
// }
func (c *TimeSeriesIntradayCall) doRequest() (*http.Response, error) {
reqHeaders := make(http.Header)
for k, v := range c.header {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
urls := ResolveRelative(c.s.BasePath)
urls += "?" + c.urlParams.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, errors.Wrapf(err, "http.NewRequest")
}
req.Header = reqHeaders
return SendRequest(c.ctx, c.s.client, req)
}
// Do send request
func (c *TimeSeriesIntradayCall) Do() (*TimeSeriesList, error) {
res, err := c.doRequest()
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, errors.Wrapf(err, "doRequest")
}
defer res.Body.Close()
if err := CheckResponse(res); err != nil {
return nil, errors.Wrapf(err, "CheckResponse")
}
ret := &TimeSeriesList{
ServerResponse: ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := new([]*TimeSeries)
if err := DecodeResponseCSV(target, res); err != nil {
return nil, errors.Wrapf(err, "DecodeResponseCSV")
}
ret.TimeSeries = *target
return ret, nil
}
// Daily https://www.alphavantage.co/documentation/#daily
// This API returns daily time series (date, daily open, daily high, daily low, daily close, daily volume) of the global equity specified, covering 20+ years of historical data.
// The most recent data point is the prices and volume information of the current trading day, updated realtime.
// datatype fixed to csv
func (r *TimeSeriesService) Daily(symbol string) *TimeSeriesDailyCall {
c := &TimeSeriesDailyCall{
DefaultCall: DefaultCall{
s: r.s,
urlParams: url.Values{},
},
}
c.urlParams.Set("function", "TIME_SERIES_DAILY")
c.urlParams.Set("symbol", symbol)
c.urlParams.Set("datatype", "csv")
return c
}
// TimeSeriesDailyCall https://www.alphavantage.co/documentation/#daily
// This API returns daily time series (date, daily open, daily high, daily low, daily close, daily volume) of the global equity specified, covering 20+ years of historical data.
// The most recent data point is the prices and volume information of the current trading day, updated realtime.
// datatype fixed to csv
type TimeSeriesDailyCall struct {
DefaultCall
}
// Outputsize By default, outputsize=compact.
// Strings compact and full are accepted with the following specifications:
// compact returns only the latest 100 data points;
// full returns the full-length time series of 20+ years of historical data.
// The "compact" option is recommended if you would like to reduce the data size of each API call.
func (c *TimeSeriesDailyCall) Outputsize(outputsize string) *TimeSeriesDailyCall {
c.urlParams.Set("outputsize", outputsize)
return c
}
// func (c *TimeSeriesDailyCall) Datatype(datatype string) *TimeSeriesDailyCall {
// c.urlParams.Set("datatype", datatype)
// return c
// }
func (c *TimeSeriesDailyCall) doRequest() (*http.Response, error) {
reqHeaders := make(http.Header)
for k, v := range c.header {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
urls := ResolveRelative(c.s.BasePath)
urls += "?" + c.urlParams.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, errors.Wrapf(err, "http.NewRequest")
}
req.Header = reqHeaders
return SendRequest(c.ctx, c.s.client, req)
}
// Do send request
func (c *TimeSeriesDailyCall) Do() (*TimeSeriesList, error) {
res, err := c.doRequest()
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, errors.Wrapf(err, "doRequest")
}
defer res.Body.Close()
if err := CheckResponse(res); err != nil {
return nil, errors.Wrapf(err, "CheckResponse")
}
ret := &TimeSeriesList{
ServerResponse: ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := new([]*TimeSeries)
if err := DecodeResponseCSV(target, res); err != nil {
return nil, errors.Wrapf(err, "DecodeResponseCSV")
}
ret.TimeSeries = *target
return ret, nil
}
// DailyAdj https://www.alphavantage.co/documentation/#dailyadj
// This API returns daily time series (date, daily open, daily high, daily low, daily close, daily volume, daily adjusted close, and split/dividend events) of the global equity specified, covering 20+ years of historical data.
// The most recent data point is the prices and volume information of the current trading day, updated realtime.
// datatype fixed to csv
func (r *TimeSeriesService) DailyAdj(symbol string) *TimeSeriesDailyAdjCall {
c := &TimeSeriesDailyAdjCall{
DefaultCall: DefaultCall{
s: r.s,
urlParams: url.Values{},
},
}
c.urlParams.Set("function", "TIME_SERIES_DAILY_ADJUSTED")
c.urlParams.Set("symbol", symbol)
c.urlParams.Set("datatype", "csv")
return c
}
// TimeSeriesDailyAdjCall https://www.alphavantage.co/documentation/#DailyAdj
// This API returns daily time series (date, daily open, daily high, daily low, daily close, daily volume, daily adjusted close, and split/dividend events) of the global equity specified, covering 20+ years of historical data.
// The most recent data point is the prices and volume information of the current trading day, updated realtime.
// datatype fixed to csv
type TimeSeriesDailyAdjCall struct {
DefaultCall
}
// Outputsize By default, outputsize=compact.
// Strings compact and full are accepted with the following specifications:
// compact returns only the latest 100 data points;
// full returns the full-length time series of 20+ years of historical data.
// The "compact" option is recommended if you would like to reduce the data size of each API call.
func (c *TimeSeriesDailyAdjCall) Outputsize(outputsize string) *TimeSeriesDailyAdjCall {
c.urlParams.Set("outputsize", outputsize)
return c
}
// func (c *TimeSeriesDailyAdjCall) Datatype(datatype string) *TimeSeriesDailyAdjCall {
// c.urlParams.Set("datatype", datatype)
// return c
// }
func (c *TimeSeriesDailyAdjCall) doRequest() (*http.Response, error) {
reqHeaders := make(http.Header)
for k, v := range c.header {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
urls := ResolveRelative(c.s.BasePath)
urls += "?" + c.urlParams.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, errors.Wrapf(err, "http.NewRequest")
}
req.Header = reqHeaders
return SendRequest(c.ctx, c.s.client, req)
}
// Do send request
func (c *TimeSeriesDailyAdjCall) Do() (*TimeSeriesList, error) {
res, err := c.doRequest()
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, errors.Wrapf(err, "doRequest")
}
defer res.Body.Close()
if err := CheckResponse(res); err != nil {
return nil, errors.Wrapf(err, "CheckResponse")
}
ret := &TimeSeriesList{
ServerResponse: ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := new([]*TimeSeries)
if err := DecodeResponseCSV(target, res); err != nil {
return nil, errors.Wrapf(err, "DecodeResponseCSV")
}
ret.TimeSeries = *target
return ret, nil
}
// Weekly https://www.alphavantage.co/documentation/#weekly
// This API returns weekly time series (last trading day of each week, weekly open, weekly high, weekly low, weekly close, weekly volume) of the global equity specified, covering 20+ years of historical data.
// The latest data point is the prices and volume information for the week (or partial week) that contains the current trading day, updated realtime.
// datatype fixed to csv
func (r *TimeSeriesService) Weekly(symbol string) *TimeSeriesWeeklyCall {
c := &TimeSeriesWeeklyCall{
DefaultCall: DefaultCall{
s: r.s,
urlParams: url.Values{},
},
}
c.urlParams.Set("function", "TIME_SERIES_WEEKLY")
c.urlParams.Set("symbol", symbol)
c.urlParams.Set("datatype", "csv")
return c
}
// TimeSeriesWeeklyCall https://www.alphavantage.co/documentation/#weekly
// This API returns weekly time series (last trading day of each week, weekly open, weekly high, weekly low, weekly close, weekly volume) of the global equity specified, covering 20+ years of historical data.
// The latest data point is the prices and volume information for the week (or partial week) that contains the current trading day, updated realtime.
// datatype fixed to csv
type TimeSeriesWeeklyCall struct {
DefaultCall
}
// func (c *TimeSeriesWeeklyCall) Datatype(datatype string) *TimeSeriesWeeklyCall {
// c.urlParams.Set("datatype", datatype)
// return c
// }
func (c *TimeSeriesWeeklyCall) doRequest() (*http.Response, error) {
reqHeaders := make(http.Header)
for k, v := range c.header {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
urls := ResolveRelative(c.s.BasePath)
urls += "?" + c.urlParams.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, errors.Wrapf(err, "http.NewRequest")
}
req.Header = reqHeaders
return SendRequest(c.ctx, c.s.client, req)
}
// Do send request
func (c *TimeSeriesWeeklyCall) Do() (*TimeSeriesList, error) {
res, err := c.doRequest()
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, errors.Wrapf(err, "doRequest")
}
defer res.Body.Close()
if err := CheckResponse(res); err != nil {
return nil, errors.Wrapf(err, "CheckResponse")
}
ret := &TimeSeriesList{
ServerResponse: ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := new([]*TimeSeries)
if err := DecodeResponseCSV(target, res); err != nil {
return nil, errors.Wrapf(err, "DecodeResponseCSV")
}
ret.TimeSeries = *target
return ret, nil
}
// WeeklyAdj https://www.alphavantage.co/documentation/#weeklyadj
// This API returns weekly adjusted time series (last trading day of each week, weekly open, weekly high, weekly low, weekly close, weekly adjusted close, weekly volume, weekly dividend) of the global equity specified, covering 20+ years of historical data.
// The latest data point is the prices and volume information for the week (or partial week) that contains the current trading day, updated realtime.
// datatype fixed to csv
func (r *TimeSeriesService) WeeklyAdj(symbol string) *TimeSeriesWeeklyAdjCall {
c := &TimeSeriesWeeklyAdjCall{
DefaultCall: DefaultCall{
s: r.s,
urlParams: url.Values{},
},
}
c.urlParams.Set("function", "TIME_SERIES_WEEKLY_ADJUSTED")
c.urlParams.Set("symbol", symbol)
c.urlParams.Set("datatype", "csv")
return c
}
// TimeSeriesWeeklyAdjCall https://www.alphavantage.co/documentation/#weeklyadj
// This API returns weekly adjusted time series (last trading day of each week, weekly open, weekly high, weekly low, weekly close, weekly adjusted close, weekly volume, weekly dividend) of the global equity specified, covering 20+ years of historical data.
// The latest data point is the prices and volume information for the week (or partial week) that contains the current trading day, updated realtime.
// datatype fixed to csv
type TimeSeriesWeeklyAdjCall struct {
DefaultCall
}
// func (c *TimeSeriesWeeklyAdjCall) Datatype(datatype string) *TimeSeriesWeeklyAdjCall {
// c.urlParams.Set("datatype", datatype)
// return c
// }
func (c *TimeSeriesWeeklyAdjCall) doRequest() (*http.Response, error) {
reqHeaders := make(http.Header)
for k, v := range c.header {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
urls := ResolveRelative(c.s.BasePath)
urls += "?" + c.urlParams.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, errors.Wrapf(err, "http.NewRequest")
}
req.Header = reqHeaders
return SendRequest(c.ctx, c.s.client, req)
}
// Do send request
func (c *TimeSeriesWeeklyAdjCall) Do() (*TimeSeriesList, error) {
res, err := c.doRequest()
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, errors.Wrapf(err, "doRequest")
}
defer res.Body.Close()
if err := CheckResponse(res); err != nil {
return nil, errors.Wrapf(err, "CheckResponse")
}
ret := &TimeSeriesList{
ServerResponse: ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := new([]*TimeSeries)
if err := DecodeResponseCSV(target, res); err != nil {
return nil, errors.Wrapf(err, "DecodeResponseCSV")
}
ret.TimeSeries = *target
return ret, nil
}
// Monthly https://www.alphavantage.co/documentation/#monthly
// This API returns monthly time series (last trading day of each month, monthly open, monthly high, monthly low, monthly close, monthly volume) of the global equity specified, covering 20+ years of historical data.
// The latest data point is the prices and volume information for the month (or partial month) that contains the current trading day, updated realtime.
// datatype fixed to csv
func (r *TimeSeriesService) Monthly(symbol string) *TimeSeriesMonthlyCall {
c := &TimeSeriesMonthlyCall{
DefaultCall: DefaultCall{
s: r.s,
urlParams: url.Values{},
},
}
c.urlParams.Set("function", "TIME_SERIES_MONTHLY")
c.urlParams.Set("symbol", symbol)
c.urlParams.Set("datatype", "csv")
return c
}
// TimeSeriesMonthlyCall https://www.alphavantage.co/documentation/#monthly
// This API returns monthly time series (last trading day of each month, monthly open, monthly high, monthly low, monthly close, monthly volume) of the global equity specified, covering 20+ years of historical data.
// The latest data point is the prices and volume information for the month (or partial month) that contains the current trading day, updated realtime.
// datatype fixed to csv
type TimeSeriesMonthlyCall struct {
DefaultCall
}
// func (c *TimeSeriesMonthlyCall) Datatype(datatype string) *TimeSeriesMonthlyCall {
// c.urlParams.Set("datatype", datatype)
// return c
// }
func (c *TimeSeriesMonthlyCall) doRequest() (*http.Response, error) {
reqHeaders := make(http.Header)
for k, v := range c.header {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
urls := ResolveRelative(c.s.BasePath)
urls += "?" + c.urlParams.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, errors.Wrapf(err, "http.NewRequest")
}
req.Header = reqHeaders
return SendRequest(c.ctx, c.s.client, req)
}
// Do send request
func (c *TimeSeriesMonthlyCall) Do() (*TimeSeriesList, error) {
res, err := c.doRequest()
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, errors.Wrapf(err, "doRequest")
}
defer res.Body.Close()
if err := CheckResponse(res); err != nil {
return nil, errors.Wrapf(err, "CheckResponse")
}
ret := &TimeSeriesList{
ServerResponse: ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := new([]*TimeSeries)
if err := DecodeResponseCSV(target, res); err != nil {
return nil, errors.Wrapf(err, "DecodeResponseCSV")
}
ret.TimeSeries = *target
return ret, nil
}
// MonthlyAdj https://www.alphavantage.co/documentation/#monthlyadj
// This API returns monthly adjusted time series (last trading day of each month, monthly open, monthly high, monthly low, monthly close, monthly adjusted close, monthly volume, monthly dividend) of the equity specified, covering 20+ years of historical data.
// The latest data point is the prices and volume information for the month (or partial month) that contains the current trading day, updated realtime.
// datatype fixed to csv
func (r *TimeSeriesService) MonthlyAdj(symbol string) *TimeSeriesMonthlyAdjCall {
c := &TimeSeriesMonthlyAdjCall{
DefaultCall: DefaultCall{
s: r.s,
urlParams: url.Values{},
},
}
c.urlParams.Set("function", "TIME_SERIES_MONTHLY_ADJUSTED")
c.urlParams.Set("symbol", symbol)
c.urlParams.Set("datatype", "csv")
return c
}
// TimeSeriesMonthlyAdjCall https://www.alphavantage.co/documentation/#monthlyadj
// This API returns monthly adjusted time series (last trading day of each month, monthly open, monthly high, monthly low, monthly close, monthly adjusted close, monthly volume, monthly dividend) of the equity specified, covering 20+ years of historical data.
// The latest data point is the prices and volume information for the month (or partial month) that contains the current trading day, updated realtime.
// datatype fixed to csv
type TimeSeriesMonthlyAdjCall struct {
DefaultCall
}
// func (c *TimeSeriesMonthlyAdjCall) Datatype(datatype string) *TimeSeriesMonthlyAdjCall {
// c.urlParams.Set("datatype", datatype)
// return c
// }
func (c *TimeSeriesMonthlyAdjCall) doRequest() (*http.Response, error) {
reqHeaders := make(http.Header)
for k, v := range c.header {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
urls := ResolveRelative(c.s.BasePath)
urls += "?" + c.urlParams.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, errors.Wrapf(err, "http.NewRequest")
}
req.Header = reqHeaders
return SendRequest(c.ctx, c.s.client, req)
}
// Do send request
func (c *TimeSeriesMonthlyAdjCall) Do() (*TimeSeriesList, error) {
res, err := c.doRequest()
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, errors.Wrapf(err, "doRequest")
}
defer res.Body.Close()
if err := CheckResponse(res); err != nil {
return nil, errors.Wrapf(err, "CheckResponse")
}
ret := &TimeSeriesList{
ServerResponse: ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := new([]*TimeSeries)
if err := DecodeResponseCSV(target, res); err != nil {
return nil, errors.Wrapf(err, "DecodeResponseCSV")
}
ret.TimeSeries = *target
return ret, nil
}
// QuoteEndpoint https://www.alphavantage.co/documentation/#latestprice
// A lightweight alternative to the time series APIs, this service returns the latest price and volume information for a security of your choice.
// datatype fixed to csv
func (r *TimeSeriesService) QuoteEndpoint(symbol string) *TimeSeriesQuoteEndpointCall {
c := &TimeSeriesQuoteEndpointCall{
DefaultCall: DefaultCall{
s: r.s,
urlParams: url.Values{},
},
symbol: symbol,
}
c.urlParams.Set("function", "GLOBAL_QUOTE")
c.urlParams.Set("symbol", symbol)
c.urlParams.Set("datatype", "csv")
return c
}
// TimeSeriesQuoteEndpointCall https://www.alphavantage.co/documentation/#latestprice
// A lightweight alternative to the time series APIs, this service returns the latest price and volume information for a security of your choice.
// datatype fixed to csv
type TimeSeriesQuoteEndpointCall struct {
DefaultCall
symbol string
}
// func (c *TimeSeriesQuoteEndpointCall) Datatype(datatype string) *TimeSeriesQuoteEndpointCall {
// c.urlParams.Set("datatype", datatype)
// return c
// }
func (c *TimeSeriesQuoteEndpointCall) doRequest() (*http.Response, error) {
reqHeaders := make(http.Header)
for k, v := range c.header {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
urls := ResolveRelative(c.s.BasePath)
urls += "?" + c.urlParams.Encode()
// fmt.Printf("%s\n", urls)
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, errors.Wrapf(err, "http.NewRequest")
}
req.Header = reqHeaders
return SendRequest(c.ctx, c.s.client, req)
}
// Do send request
func (c *TimeSeriesQuoteEndpointCall) Do() (*Quote, error) {
res, err := c.doRequest()
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, errors.Wrapf(err, "doRequest")
}
defer res.Body.Close()
if err := CheckResponse(res); err != nil {
return nil, errors.Wrapf(err, "CheckResponse")
}
target := new([]*Quote)
if err := DecodeResponseCSV(target, res); err != nil {
return nil, errors.Wrapf(err, "DecodeResponseCSV")
}
if len((*target)) == 0 {
return nil, fmt.Errorf("%s could not be found", c.symbol)
}
ret := (*target)[0]
ret.ServerResponse = ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
}
return ret, nil
}
// SearchEndpoint https://www.alphavantage.co/documentation/#symbolsearch
// We've got you covered! The Search Endpoint returns the best-matching symbols and market information based on keywords of your choice. The search results also contain match scores that provide you with the full flexibility to develop your own search and filtering logic.
// datatype fixed to csv
func (r *TimeSeriesService) SearchEndpoint(keywords string) *TimeSeriesSearchEndpointCall {
c := &TimeSeriesSearchEndpointCall{
DefaultCall: DefaultCall{
s: r.s,
urlParams: url.Values{},
},
}
c.urlParams.Set("function", "SYMBOL_SEARCH")
c.urlParams.Set("keywords", keywords)
c.urlParams.Set("datatype", "csv")
return c
}
// TimeSeriesSearchEndpointCall https://www.alphavantage.co/documentation/#symbolsearch
// We've got you covered! The Search Endpoint returns the best-matching symbols and market information based on keywords of your choice. The search results also contain match scores that provide you with the full flexibility to develop your own search and filtering logic.
// datatype fixed to csv
type TimeSeriesSearchEndpointCall struct {
DefaultCall
}
// func (c *TimeSeriesSearchEndpointCall) Datatype(datatype string) *TimeSeriesSearchEndpointCall {
// c.urlParams.Set("datatype", datatype)
// return c
// }
func (c *TimeSeriesSearchEndpointCall) doRequest() (*http.Response, error) {
reqHeaders := make(http.Header)
for k, v := range c.header {
reqHeaders[k] = v
}
reqHeaders.Set("User-Agent", c.s.userAgent())
var body io.Reader = nil
urls := ResolveRelative(c.s.BasePath)
urls += "?" + c.urlParams.Encode()
req, err := http.NewRequest("GET", urls, body)
if err != nil {
return nil, errors.Wrapf(err, "http.NewRequest")
}
req.Header = reqHeaders
return SendRequest(c.ctx, c.s.client, req)
}
// Do send request
func (c *TimeSeriesSearchEndpointCall) Do() (*SearchResultList, error) {
res, err := c.doRequest()
if res != nil && res.StatusCode == http.StatusNotModified {
if res.Body != nil {
res.Body.Close()
}
return nil, &Error{
Code: res.StatusCode,
Header: res.Header,
}
}
if err != nil {
return nil, errors.Wrapf(err, "doRequest")
}
defer res.Body.Close()
if err := CheckResponse(res); err != nil {
return nil, errors.Wrapf(err, "CheckResponse")
}
ret := &SearchResultList{
ServerResponse: ServerResponse{
Header: res.Header,
HTTPStatusCode: res.StatusCode,
},
}
target := new([]*SearchResult)
if err := DecodeResponseCSV(target, res); err != nil {
return nil, errors.Wrapf(err, "DecodeResponseCSV")
}
ret.SearchResults = *target
return ret, nil
} | stockTimeSeries.go | 0.751648 | 0.457561 | stockTimeSeries.go | starcoder |
package model
import (
"container/list"
"math"
)
// DeduplicateVerticesNoSorting is an O(n*n) algorithm for deduplicating an array of vertices, and returning a copy of the array containing only the unique vertices.
// This function does not modify the supplied array of vertices.
// Note 1: the order that vertices are encountered in an array matters for deduplication, for example:
// - Given vertices A, B, and C.
// - A and B are within model.Threshold of each other, as are B and C, but A and C are not.
// - If A or C is encountered first, both A and C will be included in the output.
// - However, if B is encountered first, only B will be in the output.
// Note 2: This should be used in situations where a hash map (O(n)) or sorting the array (O(n*log(n))) would be insufficient, for example:
// - Sorting 3D vertices can result in duplicate entries in the output, due to having to sort by Y or Z first.
// - If we sort by Y first, it is possible to have a point that matches in X and Y and is significantly different in Z in between points that match in X, Y, and Z.
// - If vertices A and C are effectively equal, and vertex B has the same X and Y, but a significantly different Z, it could be sorted between A and C, preventing A and C from deduplicating.
func DeduplicateVerticesNoSorting(vertices []CircuitVertex) []CircuitVertex {
// Note: we aren't using a set for deduplication due to using the Threshold for equality checks
uniqueVertices := make([]CircuitVertex, 0, len(vertices))
// Check each already added point to see if it is a duplicate of the current point.
for _, v := range vertices {
shouldAdd := true
for _, added := range uniqueVertices {
if v.Equals(added) {
shouldAdd = false
break
}
}
if shouldAdd {
uniqueVertices = append(uniqueVertices, v)
}
}
return uniqueVertices
}
// DeleteVertex removes the vertex at the specified index in the supplied array, and returns the updated array.
// This may update the supplied array, so it should be updated with the returned array.
// This version of delete vertex should not be used if the array may get cloned or is a clone.
func DeleteVertex(vertices []CircuitVertex, index int) []CircuitVertex {
if lastIndex := len(vertices) - 1; lastIndex < 0 {
return vertices
} else if index <= 0 {
return vertices[1:]
} else if index >= lastIndex {
return vertices[:lastIndex]
} else {
return append(vertices[:index], vertices[index+1:]...)
}
}
// DeleteVertexCopy returns a copy of the supplied array with the specified vertex removed.
// This does not modify the supplied array, so it is safe to use with algorithms that clone the vertex array.
func DeleteVertexCopy(vertices []CircuitVertex, toDelete CircuitVertex) []CircuitVertex {
updatedLen := len(vertices) - 1
if updatedLen < 0 {
return []CircuitVertex{}
}
updated := make([]CircuitVertex, updatedLen)
i := 0
for _, v := range vertices {
if !v.Equals(toDelete) {
if i >= updatedLen {
return vertices
}
updated[i] = v
i++
}
}
return updated
}
// FindClosestEdge finds, and returns, the edge that is the closest to this vertex.
func FindClosestEdge(vertex CircuitVertex, currentCircuit []CircuitEdge) CircuitEdge {
var closest CircuitEdge = nil
closestDistanceIncrease := math.MaxFloat64
for _, candidate := range currentCircuit {
if candidateDistanceIncrease := candidate.DistanceIncrease(vertex); candidateDistanceIncrease < closestDistanceIncrease {
closest = candidate
closestDistanceIncrease = candidateDistanceIncrease
}
}
return closest
}
// FindClosestEdgeList finds, and returns, the edge that is the closest to this vertex in the supplied linked list.
func FindClosestEdgeList(vertex CircuitVertex, currentCircuit *list.List) CircuitEdge {
var closest CircuitEdge = nil
closestDistanceIncrease := math.MaxFloat64
for i, link := 0, currentCircuit.Front(); i < currentCircuit.Len(); i, link = i+1, link.Next() {
candidate := link.Value.(CircuitEdge)
// Ignore edges already containing the vertex.
if candidate.GetEnd() == vertex || candidate.GetStart() == vertex {
continue
}
candidateDistanceIncrease := candidate.DistanceIncrease(vertex)
if candidateDistanceIncrease < closestDistanceIncrease {
closest = candidate
closestDistanceIncrease = candidateDistanceIncrease
}
}
return closest
}
// FindFarthestPoint finds the vertex in the array that is farthest from the supplied target vertex.
func FindFarthestPoint(target CircuitVertex, points []CircuitVertex) CircuitVertex {
var farthestPoint CircuitVertex
farthestDistance := 0.0
for _, point := range points {
if distance := point.DistanceTo(target); distance > farthestDistance {
farthestDistance = distance
farthestPoint = point
}
}
return farthestPoint
}
// IndexOfVertex returns the index (starting at 0) of the vertex in the array. If the vertex is not in the array, -1 will be returned.
func IndexOfVertex(vertices []CircuitVertex, vertex CircuitVertex) int {
for index, v := range vertices {
if v.Equals(vertex) {
return index
}
}
return -1
}
// InsertVertex inserts the supplied vertex at the specified index, 0-based.
// If the index is greater than the last index in the array, the vertex will be appended to the end of the array.
// This may modify the supplied array, so it should be updated with the returned array.
func InsertVertex(vertices []CircuitVertex, index int, vertex CircuitVertex) []CircuitVertex {
if index >= len(vertices) {
return append(vertices, vertex)
} else {
// copy all elements starting at the index one to the right to create a duplicate record at index and index+1.
vertices = append(vertices[:index+1], vertices[index:]...)
// update only the vertex at the index, so that there are no duplicates and the vertex is at the index.
vertices[index] = vertex
return vertices
}
}
// IsEdgeCloser returns true if the candidate edge is closer than the current closest edge.
func IsEdgeCloser(v CircuitVertex, candidateEdge CircuitEdge, currentEdge CircuitEdge) bool {
return candidateEdge.DistanceIncrease(v) < currentEdge.DistanceIncrease(v)
}
// Length returns the total length of the circuit (including the return to the start).
func Length(circuit []CircuitVertex) float64 {
numVertices := len(circuit)
if numVertices < 2 {
return 0.0
}
length := circuit[numVertices-1].DistanceTo(circuit[0])
for i, j := 0, 1; j < numVertices; i, j = i+1, j+1 {
length += circuit[i].DistanceTo(circuit[j])
}
return length
} | model/utilsvertex.go | 0.832509 | 0.749729 | utilsvertex.go | starcoder |
package av
import (
"fmt"
"math"
"gonum.org/v1/gonum/floats"
"gonum.org/v1/gonum/stat"
"github.com/matrix-profile-foundation/go-matrixprofile/util"
)
type AV string
const (
Default AV = "default" // Default is the default annotation vector of all ones
Complexity AV = "complexity" // Complexity is the annotation vector that focuses on areas of high "complexity"
MeanStd AV = "mean_std" // MeanStd is the annotation vector focusing on areas where the signal is within a standard deviation of the mean
Clipping AV = "clipping" // Clipping is the annotation vector reducing the importance of areas showing clipping effects on the positive and negative regime
)
// Create returns the annotation vector given an input time series and a window size m
func Create(av AV, ts []float64, m int) ([]float64, error) {
var avec []float64
switch av {
case Default:
avec = makeDefault(ts, m)
case Complexity:
avec = makeCompexity(ts, m)
case MeanStd:
avec = makeMeanStd(ts, m)
case Clipping:
avec = makeClipping(ts, m)
default:
return nil, fmt.Errorf("invalid annotation vector specified with matrix profile, %s", av)
}
return avec, nil
}
// makeDefault creates a default annotation vector of all ones resulting in
// no change to the matrix profile when applied
func makeDefault(d []float64, m int) []float64 {
av := make([]float64, len(d)-m+1)
for i := 0; i < len(av); i++ {
av[i] = 1.0
}
return av
}
// makeCompexity creates an annotation vector that is based on the complexity
// estimation of the signal.
func makeCompexity(d []float64, m int) []float64 {
av := make([]float64, len(d)-m+1)
var ce, minAV, maxAV float64
minAV = math.Inf(1)
maxAV = math.Inf(-1)
for i := 0; i < len(d)-m+1; i++ {
ce = 0.0
for j := 1; j < m; j++ {
ce += (d[i+j] - d[i+j-1]) * (d[i+j] - d[i+j-1])
}
av[i] = math.Sqrt(ce)
if av[i] < minAV {
minAV = av[i]
}
if av[i] > maxAV {
maxAV = av[i]
}
}
for i := 0; i < len(d)-m+1; i++ {
if maxAV == 0 {
av[i] = 0
} else {
av[i] = (av[i] - minAV) / maxAV
}
}
return av
}
// makeMeanStd creates an annotation vector by setting any values above the mean
// of the standard deviation vector to 0 and below to 1.
func makeMeanStd(d []float64, m int) []float64 {
av := make([]float64, len(d)-m+1)
_, std, _ := util.MovMeanStd(d, m)
mu := stat.Mean(std, nil)
for i := 0; i < len(d)-m+1; i++ {
if std[i] < mu {
av[i] = 1
}
}
return av
}
// makeClipping creates an annotation vector by setting subsequences with more
// clipping on the positive or negative side of the signal to lower importance.
func makeClipping(d []float64, m int) []float64 {
av := make([]float64, len(d)-m+1)
maxVal, minVal := floats.Max(d), floats.Min(d)
var numClip int
for i := 0; i < len(d)-m+1; i++ {
numClip = 0
for j := 0; j < m; j++ {
if d[i+j] == maxVal || d[i+j] == minVal {
numClip++
}
}
av[i] = float64(numClip)
}
minVal = floats.Min(av)
for i := 0; i < len(av); i++ {
av[i] -= minVal
}
maxVal = floats.Max(av)
for i := 0; i < len(av); i++ {
av[i] = 1 - av[i]/maxVal
}
return av
} | av/annotation_vector.go | 0.72662 | 0.476458 | annotation_vector.go | starcoder |
package nepcal
import (
"fmt"
"io"
"text/tabwriter"
)
// helper to generate a formatted string representation of a
// calendar for any given date.
type calendar struct {
// state that determines how far along the iteration process
// we have gotten, in generating the calendar for the month
iter int
// the time for which the calendar is being created
when Time
// the tabwriter to write into, only safe to use in flush()
tw *tabwriter.Writer
}
func newCalendar(t Time) *calendar {
return &calendar{
iter: 1,
when: t,
tw: nil,
}
}
// Creates the calendar and flushes it onto the underlying tabwriter,
// and transitively to the io.Writer with which it was created.
func (c *calendar) flushInto(w io.Writer) {
tw := tabwriter.NewWriter(w, 0, 0, 1, ' ', 0)
c.tw = tw
c.renderBSDateHeader()
c.renderStaticDaysHeader()
c.renderFirstRow()
c.renderCalWithoutFirstRow()
tw.Flush()
}
// renderFirstRow renders the first row of the calendar. The reason this needs
// to be handled separately is because there is a offset in each month which
// determines which day the month starts from - we need to tab space the 'offset' number
// of days, then start printing from the day after the offset.
func (c *calendar) renderFirstRow() {
offset := int(c.when.StartWeekday())
for i := 0; i < offset; i++ {
fmt.Fprintf(c.tw, "\t")
}
for i := 0; i < (7 - offset); i++ {
fmt.Fprintf(c.tw, "\t%s", c.reprValue(c.iter))
c.next()
}
fmt.Fprint(c.tw, "\n")
}
// renderCalWithoutFirstRow renders the rest of the calendar without the first row.
// renderFirstRow will handle that due to special circumstances. We basically loop over
// each row and print 7 numbers until we are at the end of the month.
func (c *calendar) renderCalWithoutFirstRow() {
daysInMonth := c.when.NumDaysInMonth()
for c.iter <= daysInMonth {
for i := 0; i < 7; i++ {
if c.iter > daysInMonth {
break
}
fmt.Fprintf(c.tw, "\t%s", c.reprValue(c.iter))
c.next()
}
fmt.Fprint(c.tw, "\n")
}
}
// renderStaticDaysHeader prints the static list of days for the calendar
func (c *calendar) renderStaticDaysHeader() {
for _, v := range []string{"Su", "Mo", "Tu", "We", "Th", "Fr", "Sa"} {
fmt.Fprintf(c.tw, "%s\t", v)
}
fmt.Fprint(c.tw, "\n")
}
// renderBSDateHeader prints the date corresponding to the time 't'. This will
// be the header of the calendar.
func (c *calendar) renderBSDateHeader() {
yy, mm, dd := c.when.Date()
fmt.Fprintf(c.tw, "\t\t%s %s, %s\n\t", mm.String(), c.reprValue(dd), c.reprValue(yy))
}
// next increments the value counter.
func (c *calendar) next() {
c.iter++
}
func (c *calendar) reprValue(val int) string {
return Numeral(val).String()
} | nepcal/calendar.go | 0.739986 | 0.427337 | calendar.go | starcoder |
package yijing
import (
"fmt"
"io"
"github.com/bitfield/qrand"
)
// Tails represents a coin toss resulting in tails.
const Tails = 2
// Heads represents a coin toss resulting in heads.
const Heads = 3
// Coin represents the result of a coin toss (Heads or Tails).
type Coin int
// CoinSet represents the three Coins required to produce a Line.
type CoinSet [3]Coin
type CoinSet6 [6]CoinSet
// Line represents a hexagram line.
type Line int
type LineTriple [3]Line
// These constants represent the various kinds of Line.
const (
OldYin Line = 6
YoungYang Line = 7
YoungYin Line = 8
OldYang Line = 9
)
const (
Yin = false
Yang = true
)
var RandReader io.Reader = qrand.Reader
// LineFromCoins takes a CoinSet and returns the equivalent Line.
func LineFromCoins(cs CoinSet) Line {
return Line(cs[0] + cs[1] + cs[2])
}
func CoinsFromBytes(bs []byte) CoinSet6 {
var coins []Coin
for _, b := range bs {
for i := 7; i >= 0; i-- {
if b>>i&1 == 1 {
coins = append(coins, Heads)
} else {
coins = append(coins, Tails)
}
}
}
return CoinSet6{
CoinSet{coins[0], coins[1], coins[2]},
CoinSet{coins[3], coins[4], coins[5]},
CoinSet{coins[6], coins[7], coins[8]},
CoinSet{coins[9], coins[10], coins[11]},
CoinSet{coins[12], coins[13], coins[14]},
CoinSet{coins[15], coins[16], coins[17]},
}
}
func LinesFromBytes(bs []byte) (lower, upper LineTriple) {
coinsets := CoinsFromBytes(bs)
for i, cs := range coinsets[0:3] {
lower[i] = LineFromCoins(cs)
}
for i, cs := range coinsets[3:6] {
upper[i] = LineFromCoins(cs)
}
return lower, upper
}
type Trigram int
const (
Heaven Trigram = iota
Earth
Thunder
Water
Mountain
Wind
Flame
Lake
)
type TrigramPair struct {
Lower, Upper Trigram
}
type Hexagram int
func (n Hexagram) String() string {
h := Hexagrams[n]
return fmt.Sprintf("%d. %c - %s (%s) - %s", n, h.Symbol, h.Chinese, h.Roman, h.English)
}
// Hexagram represents an individual hexagram. The Symbol shows the component
// lines, as a Unicode rune. The name of the hexagram is given in Chinese
// characters, Romanised Chinese and English.
type HexagramInfo struct {
Symbol rune
Chinese, Roman, English string
}
// Hexagrams represents the 64 I Ching hexagrams, in the King Wen sequence,
// indexed by number from 1-64.
var Hexagrams = []HexagramInfo{
{},
{'䷀', "乾", "qián", "The Creative"},
{'䷁', "坤", "kūn", "The Receptive"},
{'䷂', "屯", "zhūn", "Difficulty at the Beginning"},
}
var HexagramByTrigrams = map[Trigram]map[Trigram]Hexagram{
Heaven: map[Trigram]Hexagram{
Heaven: 1, Earth: 11, Thunder: 34, Water: 5, Mountain: 26, Wind: 9, Flame: 14, Lake: 43,
},
Earth: map[Trigram]Hexagram{
Heaven: 12, Earth: 2, Thunder: 16, Water: 8, Mountain: 23, Wind: 20, Flame: 35, Lake: 45,
},
Thunder: map[Trigram]Hexagram{
Heaven: 25, Earth: 24, Thunder: 51, Water: 3, Mountain: 27, Wind: 42, Flame: 21, Lake: 17,
},
Water: map[Trigram]Hexagram{
Heaven: 6, Earth: 7, Thunder: 40, Water: 29, Mountain: 4, Wind: 59, Flame: 64, Lake: 47,
},
Mountain: map[Trigram]Hexagram{
Heaven: 33, Earth: 15, Thunder: 62, Water: 39, Mountain: 52, Wind: 53, Flame: 56, Lake: 31,
},
Wind: map[Trigram]Hexagram{
Heaven: 44, Earth: 46, Thunder: 32, Water: 48, Mountain: 18, Wind: 57, Flame: 50, Lake: 28,
},
Flame: map[Trigram]Hexagram{
Heaven: 13, Earth: 36, Thunder: 55, Water: 63, Mountain: 22, Wind: 37, Flame: 30, Lake: 49,
},
Lake: map[Trigram]Hexagram{
Heaven: 10, Earth: 19, Thunder: 54, Water: 60, Mountain: 41, Wind: 61, Flame: 38, Lake: 58,
},
}
func HexagramFromTrigramPair(tp TrigramPair) Hexagram {
return HexagramByTrigrams[tp.Lower][tp.Upper]
}
func IsYang(line Line) bool {
return line == OldYang || line == YoungYang
}
var TrigramsByLineTypes = map[Trigram][]bool{
Heaven: {Yang, Yang, Yang},
Earth: {Yin, Yin, Yin},
Thunder: {Yin, Yin, Yang},
Water: {Yin, Yang, Yin},
Mountain: {Yang, Yin, Yin},
Wind: {Yang, Yang, Yin},
Flame: {Yang, Yin, Yang},
Lake: {Yin, Yang, Yang},
}
func LineTypesEqual(a, b []bool) bool {
for i := range a {
if a[i] != b[i] {
return false
}
}
return true
}
func TrigramFromLineTriple(input LineTriple) Trigram {
lines := []bool{
IsYang(input[0]),
IsYang(input[1]),
IsYang(input[2]),
}
for t, lt := range TrigramsByLineTypes {
if LineTypesEqual(lt, lines) {
return t
}
}
return Heaven // can't happen
}
func HexagramFromBytes(bs []byte) Hexagram {
lower, upper := LinesFromBytes(bs)
tp := TrigramPair{
Lower: TrigramFromLineTriple(lower),
Upper: TrigramFromLineTriple(upper),
}
return HexagramFromTrigramPair(tp)
}
func RandomHexagram() (Hexagram, error) {
b := make([]byte, 3)
_, err := RandReader.Read(b)
if err != nil {
return Hexagram(0), err
}
return HexagramFromBytes(b), nil
} | yijing.go | 0.776623 | 0.495606 | yijing.go | starcoder |
package dynamicstruct
// A FieldType indicates which member of the Field union struct should be used
// and how it should be serialized.
type FieldType uint8
const (
// UnknownType is the default field type.
UnknownType FieldType = iota
// BinaryType indicates that the field carries an opaque binary blob.
BinaryType
// BoolType indicates that the field carries a bool.
BoolType
// ByteStringType indicates that the field carries UTF-8 encoded bytes.
ByteStringType
// Complex128Type indicates that the field carries a complex128.
Complex128Type
// Complex64Type indicates that the field carries a complex128.
Complex64Type
// DurationType indicates that the field carries a time.Duration.
DurationType
// Float64Type indicates that the field carries a float64.
Float64Type
// Float32Type indicates that the field carries a float32.
Float32Type
// Int64Type indicates that the field carries an int64.
Int64Type
// Int32Type indicates that the field carries an int32.
Int32Type
// Int16Type indicates that the field carries an int16.
Int16Type
// Int8Type indicates that the field carries an int8.
Int8Type
// StringType indicates that the field carries a string.
StringType
// TimeType indicates that the field carries a time.Time that is
// representable by a UnixNano() stored as an int64.
TimeType
// TimeFullType indicates that the field carries a time.Time stored as-is.
TimeFullType
// Uint64Type indicates that the field carries a uint64.
Uint64Type
// Uint32Type indicates that the field carries a uint32.
Uint32Type
// Uint16Type indicates that the field carries a uint16.
Uint16Type
// Uint8Type indicates that the field carries a uint8.
Uint8Type
// UintptrType indicates that the field carries a uintptr.
UintptrType
// UintType indicates that the field carries a uintptr.
UintType
// ErrorType indicates that the field carries an error.
ErrorType
// SkipType indicates that the field is a no-op.
SkipType
)
type Field struct {
Name string
Type interface{}
Tag string
} | field.go | 0.551815 | 0.506164 | field.go | starcoder |
package local
import "sort"
type totalStat struct {
TotalUpload uint64
TotalDownload uint64
}
type highestStat struct {
HighestLastSecondBps uint64
HighestLastMinuteBps uint64
HighestLastTenMinutesBps uint64
HighestLastHourBps uint64
}
type lastStat struct {
LastSecondBps uint64
LastMinuteBps uint64
LastTenMinutesBps uint64
LastHourBps uint64
}
type Stat struct {
totalStat
highestStat
lastStat
Id string
Address string
ProtocolType string
FailedCount uint32
Latency int64
}
type Sorter func(string, Stats)
var (
statsSortMap = map[string]Sorter{
"failedcount": orderByFailedCount,
"latency": orderByLatency,
"download": orderByTotalDownload,
"upload": orderByTotalUpload,
"highestlasthourbps": orderByHighestLastHourBps,
"highestlasttenminutesbps": orderByHighestLastTenMinutesBps,
"highestlastminutebps": orderByHighestLastMinuteBps,
"highestlastsecondbps": orderByHighestLastSecondBps,
"lasthourbps": orderByLastHourBps,
"lasttenminutesbps": orderByLastTenMinutesBps,
"lastminutebps": orderByLastMinuteBps,
"lastsecondbps": orderByLastSecondBps,
"protocol": orderByProtocolType,
"address": orderByAddress,
}
)
func orderByFailedCount(order string, stats Stats) {
if order == "desc" {
sort.Sort(sort.Reverse(byFailedCount{stats}))
} else {
sort.Sort(byFailedCount{stats})
}
}
func orderByLatency(order string, stats Stats) {
if order == "desc" {
sort.Sort(sort.Reverse(byLatency{stats}))
} else {
sort.Sort(byLatency{stats})
}
}
func orderByTotalDownload(order string, stats Stats) {
if order == "desc" {
sort.Sort(sort.Reverse(byTotalDownload{stats}))
} else {
sort.Sort(byTotalDownload{stats})
}
}
func orderByTotalUpload(order string, stats Stats) {
if order == "desc" {
sort.Sort(sort.Reverse(byTotalUpload{stats}))
} else {
sort.Sort(byTotalUpload{stats})
}
}
func orderByHighestLastHourBps(order string, stats Stats) {
if order == "desc" {
sort.Sort(sort.Reverse(byHighestLastHourBps{stats}))
} else {
sort.Sort(byHighestLastHourBps{stats})
}
}
func orderByHighestLastTenMinutesBps(order string, stats Stats) {
if order == "desc" {
sort.Sort(sort.Reverse(byHighestLastTenMinutesBps{stats}))
} else {
sort.Sort(byHighestLastTenMinutesBps{stats})
}
}
func orderByHighestLastMinuteBps(order string, stats Stats) {
if order == "desc" {
sort.Sort(sort.Reverse(byHighestLastMinuteBps{stats}))
} else {
sort.Sort(byHighestLastMinuteBps{stats})
}
}
func orderByHighestLastSecondBps(order string, stats Stats) {
if order == "desc" {
sort.Sort(sort.Reverse(byHighestLastSecondBps{stats}))
} else {
sort.Sort(byHighestLastSecondBps{stats})
}
}
func orderByLastHourBps(order string, stats Stats) {
if order == "desc" {
sort.Sort(sort.Reverse(byLastHourBps{stats}))
} else {
sort.Sort(byLastHourBps{stats})
}
}
func orderByLastTenMinutesBps(order string, stats Stats) {
if order == "desc" {
sort.Sort(sort.Reverse(byLastTenMinutesBps{stats}))
} else {
sort.Sort(byLastTenMinutesBps{stats})
}
}
func orderByLastMinuteBps(order string, stats Stats) {
if order == "desc" {
sort.Sort(sort.Reverse(byLastMinuteBps{stats}))
} else {
sort.Sort(byLastMinuteBps{stats})
}
}
func orderByLastSecondBps(order string, stats Stats) {
if order == "desc" {
sort.Sort(sort.Reverse(byLastSecondBps{stats}))
} else {
sort.Sort(byLastSecondBps{stats})
}
}
func orderByProtocolType(order string, stats Stats) {
if order == "desc" {
sort.Sort(sort.Reverse(byProtocolType{stats}))
} else {
sort.Sort(byProtocolType{stats})
}
}
func orderByAddress(order string, stats Stats) {
if order == "desc" {
sort.Sort(sort.Reverse(stats))
} else {
sort.Sort(stats)
}
}
type Stats []*Stat
func (slice Stats) Len() int {
return len(slice)
}
func (slice Stats) Less(i, j int) bool {
return slice[i].Address < slice[j].Address
}
func (slice Stats) Swap(i, j int) {
slice[i], slice[j] = slice[j], slice[i]
}
type byLastSecondBps struct{ Stats }
func (slice byLastSecondBps) Less(i, j int) bool {
return slice.Stats[i].LastSecondBps < slice.Stats[j].LastSecondBps
}
type byLastMinuteBps struct{ Stats }
func (slice byLastMinuteBps) Less(i, j int) bool {
return slice.Stats[i].LastMinuteBps < slice.Stats[j].LastMinuteBps
}
type byLastTenMinutesBps struct{ Stats }
func (slice byLastTenMinutesBps) Less(i, j int) bool {
return slice.Stats[i].LastTenMinutesBps < slice.Stats[j].LastTenMinutesBps
}
type byLastHourBps struct{ Stats }
func (slice byLastHourBps) Less(i, j int) bool {
return slice.Stats[i].LastHourBps < slice.Stats[j].LastHourBps
}
type byHighestLastSecondBps struct{ Stats }
func (slice byHighestLastSecondBps) Less(i, j int) bool {
return slice.Stats[i].HighestLastSecondBps < slice.Stats[j].HighestLastSecondBps
}
type byHighestLastMinuteBps struct{ Stats }
func (slice byHighestLastMinuteBps) Less(i, j int) bool {
return slice.Stats[i].HighestLastMinuteBps < slice.Stats[j].HighestLastMinuteBps
}
type byHighestLastTenMinutesBps struct{ Stats }
func (slice byHighestLastTenMinutesBps) Less(i, j int) bool {
return slice.Stats[i].HighestLastTenMinutesBps < slice.Stats[j].HighestLastTenMinutesBps
}
type byHighestLastHourBps struct{ Stats }
func (slice byHighestLastHourBps) Less(i, j int) bool {
return slice.Stats[i].HighestLastHourBps < slice.Stats[j].HighestLastHourBps
}
type byLatency struct{ Stats }
func (slice byLatency) Less(i, j int) bool {
return slice.Stats[i].Latency < slice.Stats[j].Latency
}
type byFailedCount struct{ Stats }
func (slice byFailedCount) Less(i, j int) bool {
return slice.Stats[i].FailedCount < slice.Stats[j].FailedCount
}
type byTotalUpload struct{ Stats }
func (slice byTotalUpload) Less(i, j int) bool {
return slice.Stats[i].TotalUpload < slice.Stats[j].TotalUpload
}
type byTotalDownload struct{ Stats }
func (slice byTotalDownload) Less(i, j int) bool {
return slice.Stats[i].TotalDownload < slice.Stats[j].TotalDownload
}
type byProtocolType struct{ Stats }
func (slice byProtocolType) Less(i, j int) bool {
return slice.Stats[i].ProtocolType < slice.Stats[j].ProtocolType
} | local/stat.go | 0.516595 | 0.467332 | stat.go | starcoder |
// Package f64s provides common operations on float64 slices.
package f64s
import (
"fmt"
)
var (
errLength = fmt.Errorf("f64s: length mismatch")
errSortedIndices = fmt.Errorf("f64s: indices not sorted")
errDuplicateIndices = fmt.Errorf("f64s: duplicate indices")
)
// Filter creates a slice with all the elements x_i of src for which f(x_i) is true.
// Filter uses dst as work buffer, storing elements at the start of the slice.
// Filter clears dst if a slice is passed, and allocates a new slice if dst is nil.
func Filter(dst, src []float64, f func(v float64) bool) []float64 {
if dst == nil {
dst = make([]float64, 0, len(src))
}
dst = dst[:0]
for _, x := range src {
if f(x) {
dst = append(dst, x)
}
}
return dst
}
// Map creates a slice with all the elements f(x_i) where x_i are elements from src.
// Map uses dst as work buffer, storing elements at the start of the slice.
// Map allocates a new slice if dst is nil.
// Map will panic if the lengths of src and dst differ.
func Map(dst, src []float64, f func(v float64) float64) []float64 {
if dst == nil {
dst = make([]float64, len(src))
}
if len(src) != len(dst) {
panic(errLength)
}
for i, x := range src {
dst[i] = f(x)
}
return dst
}
// Find creates a slice with all indices corresponding to elements for which f(x) is true.
// Find uses dst as work buffer, storing indices at the start of the slice.
// Find clears dst if a slice is passed, and allocates a new slice if dst is nil.
func Find(dst []int, src []float64, f func(v float64) bool) []int {
if dst == nil {
dst = make([]int, 0, len(src))
}
dst = dst[:0]
for i, x := range src {
if f(x) {
dst = append(dst, i)
}
}
return dst
}
// Take creates a sub-slice of src with all elements indiced by the provided indices.
// Take uses dst as work buffer, storing elements at the start of the slice.
// Take clears dst if a slice is passed, and allocates a new slice if dst is nil.
// Take will panic if indices is not sorted or has duplicates.
// Take will panic if length of indices is larger than length of src.
// Take will panic if length of indices is different from length of dst.
func Take(dst, src []float64, indices []int) []float64 {
if len(indices) > len(src) {
panic(errLength)
}
if dst == nil {
dst = make([]float64, len(indices))
}
if len(dst) != len(indices) {
panic(errLength)
}
if len(indices) == 0 {
return dst
}
dst[0] = src[indices[0]]
for i := 1; i < len(indices); i++ {
v0 := indices[i-1]
v1 := indices[i]
switch {
case v0 == v1:
panic(errDuplicateIndices)
case v0 > v1:
panic(errSortedIndices)
}
dst[i] = src[v1]
}
return dst
} | sliceop/f64s/f64s.go | 0.756178 | 0.561455 | f64s.go | starcoder |
package draw2d
import (
"math"
)
var (
CurveRecursionLimit = 32
CurveCollinearityEpsilon = 1e-30
CurveAngleToleranceEpsilon = 0.01
)
/*
The function has the following parameters:
approximationScale :
Eventually determines the approximation accuracy. In practice we need to transform points from the World coordinate system to the Screen one.
It always has some scaling coefficient.
The curves are usually processed in the World coordinates, while the approximation accuracy should be eventually in pixels.
Usually it looks as follows:
curved.approximationScale(transform.scale());
where transform is the affine matrix that includes all the transformations, including viewport and zoom.
angleTolerance :
You set it in radians.
The less this value is the more accurate will be the approximation at sharp turns.
But 0 means that we don't consider angle conditions at all.
cuspLimit :
An angle in radians.
If 0, only the real cusps will have bevel cuts.
If more than 0, it will restrict the sharpness.
The more this value is the less sharp turns will be cut.
Typically it should not exceed 10-15 degrees.
*/
func cubicBezier(v VertexConverter, x1, y1, x2, y2, x3, y3, x4, y4, approximationScale, angleTolerance, cuspLimit float64) {
cuspLimit = computeCuspLimit(cuspLimit)
distanceToleranceSquare := 0.5 / approximationScale
distanceToleranceSquare = distanceToleranceSquare * distanceToleranceSquare
recursiveCubicBezier(v, x1, y1, x2, y2, x3, y3, x4, y4, 0, distanceToleranceSquare, angleTolerance, cuspLimit)
}
/*
* see cubicBezier comments for approximationScale and angleTolerance definition
*/
func quadraticBezier(v VertexConverter, x1, y1, x2, y2, x3, y3, approximationScale, angleTolerance float64) {
distanceToleranceSquare := 0.5 / approximationScale
distanceToleranceSquare = distanceToleranceSquare * distanceToleranceSquare
recursiveQuadraticBezierBezier(v, x1, y1, x2, y2, x3, y3, 0, distanceToleranceSquare, angleTolerance)
}
func computeCuspLimit(v float64) (r float64) {
if v == 0.0 {
r = 0.0
} else {
r = math.Pi - v
}
return
}
/**
* http://www.antigrain.com/research/adaptive_bezier/index.html
*/
func recursiveQuadraticBezierBezier(v VertexConverter, x1, y1, x2, y2, x3, y3 float64, level int, distanceToleranceSquare, angleTolerance float64) {
if level > CurveRecursionLimit {
return
}
// Calculate all the mid-points of the line segments
//----------------------
x12 := (x1 + x2) / 2
y12 := (y1 + y2) / 2
x23 := (x2 + x3) / 2
y23 := (y2 + y3) / 2
x123 := (x12 + x23) / 2
y123 := (y12 + y23) / 2
dx := x3 - x1
dy := y3 - y1
d := math.Abs(((x2-x3)*dy - (y2-y3)*dx))
if d > CurveCollinearityEpsilon {
// Regular case
//-----------------
if d*d <= distanceToleranceSquare*(dx*dx+dy*dy) {
// If the curvature doesn't exceed the distanceTolerance value
// we tend to finish subdivisions.
//----------------------
if angleTolerance < CurveAngleToleranceEpsilon {
v.Vertex(x123, y123)
return
}
// Angle & Cusp Condition
//----------------------
da := math.Abs(math.Atan2(y3-y2, x3-x2) - math.Atan2(y2-y1, x2-x1))
if da >= math.Pi {
da = 2*math.Pi - da
}
if da < angleTolerance {
// Finally we can stop the recursion
//----------------------
v.Vertex(x123, y123)
return
}
}
} else {
// Collinear case
//------------------
da := dx*dx + dy*dy
if da == 0 {
d = squareDistance(x1, y1, x2, y2)
} else {
d = ((x2-x1)*dx + (y2-y1)*dy) / da
if d > 0 && d < 1 {
// Simple collinear case, 1---2---3
// We can leave just two endpoints
return
}
if d <= 0 {
d = squareDistance(x2, y2, x1, y1)
} else if d >= 1 {
d = squareDistance(x2, y2, x3, y3)
} else {
d = squareDistance(x2, y2, x1+d*dx, y1+d*dy)
}
}
if d < distanceToleranceSquare {
v.Vertex(x2, y2)
return
}
}
// Continue subdivision
//----------------------
recursiveQuadraticBezierBezier(v, x1, y1, x12, y12, x123, y123, level+1, distanceToleranceSquare, angleTolerance)
recursiveQuadraticBezierBezier(v, x123, y123, x23, y23, x3, y3, level+1, distanceToleranceSquare, angleTolerance)
}
/**
* http://www.antigrain.com/research/adaptive_bezier/index.html
*/
func recursiveCubicBezier(v VertexConverter, x1, y1, x2, y2, x3, y3, x4, y4 float64, level int, distanceToleranceSquare, angleTolerance, cuspLimit float64) {
if level > CurveRecursionLimit {
return
}
// Calculate all the mid-points of the line segments
//----------------------
x12 := (x1 + x2) / 2
y12 := (y1 + y2) / 2
x23 := (x2 + x3) / 2
y23 := (y2 + y3) / 2
x34 := (x3 + x4) / 2
y34 := (y3 + y4) / 2
x123 := (x12 + x23) / 2
y123 := (y12 + y23) / 2
x234 := (x23 + x34) / 2
y234 := (y23 + y34) / 2
x1234 := (x123 + x234) / 2
y1234 := (y123 + y234) / 2
// Try to approximate the full cubic curve by a single straight line
//------------------
dx := x4 - x1
dy := y4 - y1
d2 := math.Abs(((x2-x4)*dy - (y2-y4)*dx))
d3 := math.Abs(((x3-x4)*dy - (y3-y4)*dx))
switch {
case d2 <= CurveCollinearityEpsilon && d3 <= CurveCollinearityEpsilon:
// All collinear OR p1==p4
//----------------------
k := dx*dx + dy*dy
if k == 0 {
d2 = squareDistance(x1, y1, x2, y2)
d3 = squareDistance(x4, y4, x3, y3)
} else {
k = 1 / k
da1 := x2 - x1
da2 := y2 - y1
d2 = k * (da1*dx + da2*dy)
da1 = x3 - x1
da2 = y3 - y1
d3 = k * (da1*dx + da2*dy)
if d2 > 0 && d2 < 1 && d3 > 0 && d3 < 1 {
// Simple collinear case, 1---2---3---4
// We can leave just two endpoints
return
}
if d2 <= 0 {
d2 = squareDistance(x2, y2, x1, y1)
} else if d2 >= 1 {
d2 = squareDistance(x2, y2, x4, y4)
} else {
d2 = squareDistance(x2, y2, x1+d2*dx, y1+d2*dy)
}
if d3 <= 0 {
d3 = squareDistance(x3, y3, x1, y1)
} else if d3 >= 1 {
d3 = squareDistance(x3, y3, x4, y4)
} else {
d3 = squareDistance(x3, y3, x1+d3*dx, y1+d3*dy)
}
}
if d2 > d3 {
if d2 < distanceToleranceSquare {
v.Vertex(x2, y2)
return
}
} else {
if d3 < distanceToleranceSquare {
v.Vertex(x3, y3)
return
}
}
break
case d2 <= CurveCollinearityEpsilon && d3 > CurveCollinearityEpsilon:
// p1,p2,p4 are collinear, p3 is significant
//----------------------
if d3*d3 <= distanceToleranceSquare*(dx*dx+dy*dy) {
if angleTolerance < CurveAngleToleranceEpsilon {
v.Vertex(x23, y23)
return
}
// Angle Condition
//----------------------
da1 := math.Abs(math.Atan2(y4-y3, x4-x3) - math.Atan2(y3-y2, x3-x2))
if da1 >= math.Pi {
da1 = 2*math.Pi - da1
}
if da1 < angleTolerance {
v.Vertex(x2, y2)
v.Vertex(x3, y3)
return
}
if cuspLimit != 0.0 {
if da1 > cuspLimit {
v.Vertex(x3, y3)
return
}
}
}
break
case d2 > CurveCollinearityEpsilon && d3 <= CurveCollinearityEpsilon:
// p1,p3,p4 are collinear, p2 is significant
//----------------------
if d2*d2 <= distanceToleranceSquare*(dx*dx+dy*dy) {
if angleTolerance < CurveAngleToleranceEpsilon {
v.Vertex(x23, y23)
return
}
// Angle Condition
//----------------------
da1 := math.Abs(math.Atan2(y3-y2, x3-x2) - math.Atan2(y2-y1, x2-x1))
if da1 >= math.Pi {
da1 = 2*math.Pi - da1
}
if da1 < angleTolerance {
v.Vertex(x2, y2)
v.Vertex(x3, y3)
return
}
if cuspLimit != 0.0 {
if da1 > cuspLimit {
v.Vertex(x2, y2)
return
}
}
}
break
case d2 > CurveCollinearityEpsilon && d3 > CurveCollinearityEpsilon:
// Regular case
//-----------------
if (d2+d3)*(d2+d3) <= distanceToleranceSquare*(dx*dx+dy*dy) {
// If the curvature doesn't exceed the distanceTolerance value
// we tend to finish subdivisions.
//----------------------
if angleTolerance < CurveAngleToleranceEpsilon {
v.Vertex(x23, y23)
return
}
// Angle & Cusp Condition
//----------------------
k := math.Atan2(y3-y2, x3-x2)
da1 := math.Abs(k - math.Atan2(y2-y1, x2-x1))
da2 := math.Abs(math.Atan2(y4-y3, x4-x3) - k)
if da1 >= math.Pi {
da1 = 2*math.Pi - da1
}
if da2 >= math.Pi {
da2 = 2*math.Pi - da2
}
if da1+da2 < angleTolerance {
// Finally we can stop the recursion
//----------------------
v.Vertex(x23, y23)
return
}
if cuspLimit != 0.0 {
if da1 > cuspLimit {
v.Vertex(x2, y2)
return
}
if da2 > cuspLimit {
v.Vertex(x3, y3)
return
}
}
}
break
}
// Continue subdivision
//----------------------
recursiveCubicBezier(v, x1, y1, x12, y12, x123, y123, x1234, y1234, level+1, distanceToleranceSquare, angleTolerance, cuspLimit)
recursiveCubicBezier(v, x1234, y1234, x234, y234, x34, y34, x4, y4, level+1, distanceToleranceSquare, angleTolerance, cuspLimit)
} | draw2d/curves.go | 0.760917 | 0.739105 | curves.go | starcoder |
package packet
import (
"bufio"
"bytes"
"encoding/binary"
"fmt"
"math"
)
// Packet is allows the reading of properties out of raw binary formats
// Each method on this interface when called will scan forwards through the bytes specified.
type Packet interface {
// Bool read a bool value from the packet
// Advances a byte with the packet
// Errors if the value cannot be converted to boolean
// Errors if there are no bytes remaining to be read
Bool() (bool, error)
// Float32 read a float32 value from the packet
// Advances 4 bytes with the packet
// Errors if the value cannot be converted to a float32
// Errors if there are less then 4 bytes remaining to be read
Float32() (float32, error)
// Uint64 read an uint64 value from the packet
// Advances 8 bytes with the packet
// Errors if the value cannot be converted to an uint64
// Errors if there are less than 8 bytes remaining to be read
Uint64() (uint64, error)
// Uint32 read an uint32 value from the packet
// Advances 4 bytes with the packet
// Errors if the value cannot be converted to an uint32
// Errors if there are less than 4 bytes remaining to be read
Uint32() (uint32, error)
// Uint16 read an uint16 value from the packet
// Advances 2 bytes with the packet
// Errors if the value cannot be converted to an uint16
// Errors if there are less than 2 bytes remaining to be read
Uint16() (uint16, error)
// Uint8 read an uint8 value from the packet
// Advances a byte with the packet
// Errors if the value cannot be converted to an uint8
// Errors if there are no bytes remaining to be read
Uint8() (uint8, error)
// Int64 read an int64 value from the packet
// Advances 8 bytes with the packet
// Errors if the value cannot be converted to an int64
// Errors if there are less than 8 bytes remaining to be read
Int64() (int64, error)
// Int32 read an int32 value from the packet
// Advances 4 bytes with the packet
// Errors if the value cannot be converted to an int32
// Errors if there are less then 4 bytes remaining to be read
Int32() (int32, error)
// Int16 read an int16 value from the packet
// Advances 2 bytes with the packet
// Errors if the value cannot be converted to an int16
// Errors if there are less then 2 bytes remaining to be read
Int16() (int16, error)
// Int8 read an int8 value from the packet
// Advances a byte with the packet
// Errors if the value cannot be converted to an int8
// Errors if there are no bytes remaining to be read
Int8() (int8, error)
}
// NewPacket returns a Packet using the byte slice input
func NewPacket(data []byte) Packet {
return &packet{
reader: bufio.NewReader(bytes.NewReader(data)),
}
}
type packet struct {
reader *bufio.Reader
}
func (p *packet) read() (byte, error) {
return p.reader.ReadByte()
}
func (p *packet) readN(n int) ([]byte, error) {
out := make([]byte, n)
for i := 0; i < n; i++ {
b, err := p.read()
if err != nil {
return nil, fmt.Errorf("unable to read %v bytes: %v", n, err)
}
out[i] = b
}
return out, nil
}
// Bool read a bool value from the packet
// Advances a byte with the packet
// Errors if the value cannot be converted to boolean
// Errors if there are no bytes remaining to be read
func (p *packet) Bool() (bool, error) {
b, err := p.read()
if err != nil {
return false, fmt.Errorf("failed to read byte: %v", err)
}
if b < 0 || b > 1 {
return false, fmt.Errorf("unexpected byte value %v is <0 or >1", b)
}
return b == 1, nil
}
// Float32 read a float32 value from the packet
// Advances 4 bytes with the packet
// Errors if the value cannot be converted to a float32
// Errors if there are less then 4 bytes remaining to be read
func (p *packet) Float32() (float32, error) {
b, err := p.readN(4)
if err != nil {
return 0, fmt.Errorf("failed to read bytes: %v", err)
}
return math.Float32frombits(binary.LittleEndian.Uint32(b)), nil
}
// Uint64 read an uint64 value from the packet
// Advances 8 bytes with the packet
// Errors if the value cannot be converted to an uint64
// Errors if there are less than 8 bytes remaining to be read
func (p *packet) Uint64() (uint64, error) {
b, err := p.readN(8)
if err != nil {
return 0, fmt.Errorf("failed to read bytes: %v", err)
}
return binary.LittleEndian.Uint64(b), nil
}
// Uint32 read an uint32 value from the packet
// Advances 4 bytes with the packet
// Errors if the value cannot be converted to an uint32
// Errors if there are less than 4 bytes remaining to be read
func (p *packet) Uint32() (uint32, error) {
b, err := p.readN(4)
if err != nil {
return 0, fmt.Errorf("failed to read bytes: %v", err)
}
return binary.LittleEndian.Uint32(b), nil
}
// Uint16 read an uint16 value from the packet
// Advances 2 bytes with the packet
// Errors if the value cannot be converted to an uint16
// Errors if there are less than 2 bytes remaining to be read
func (p *packet) Uint16() (uint16, error) {
b, err := p.readN(2)
if err != nil {
return 0, fmt.Errorf("failed to read bytes: %v", err)
}
return binary.LittleEndian.Uint16(b), nil
}
// Uint8 read an uint8 value from the packet
// Advances a byte with the packet
// Errors if the value cannot be converted to an uint8
// Errors if there are no bytes remaining to be read
func (p *packet) Uint8() (uint8, error) {
b, err := p.read()
if err != nil {
return 0, fmt.Errorf("failed to read bytes: %v", err)
}
return b, nil
}
// Int64 read an int64 value from the packet
// Advances 8 bytes with the packet
// Errors if the value cannot be converted to an int64
// Errors if there are less than 8 bytes remaining to be read
func (p *packet) Int64() (int64, error) {
b, err := p.readN(8)
if err != nil {
return 0, fmt.Errorf("failed to read bytes: %v", err)
}
return int64(binary.LittleEndian.Uint64(b)), nil
}
// Int32 read an int32 value from the packet
// Advances 4 bytes with the packet
// Errors if the value cannot be converted to an int32
// Errors if there are less then 4 bytes remaining to be read
func (p *packet) Int32() (int32, error) {
b, err := p.readN(4)
if err != nil {
return 0, fmt.Errorf("failed to read bytes: %v", err)
}
return int32(binary.LittleEndian.Uint32(b)), nil
}
// Int16 read an int16 value from the packet
// Advances 2 bytes with the packet
// Errors if the value cannot be converted to an int16
// Errors if there are less then 2 bytes remaining to be read
func (p *packet) Int16() (int16, error) {
b, err := p.readN(2)
if err != nil {
return 0, fmt.Errorf("failed to read bytes: %v", err)
}
return int16(binary.LittleEndian.Uint16(b)), nil
}
// Int8 read an int8 value from the packet
// Advances a byte with the packet
// Errors if the value cannot be converted to an int8
// Errors if there are no bytes remaining to be read
func (p *packet) Int8() (int8, error) {
b, err := p.read()
if err != nil {
return 0, fmt.Errorf("failed to read bytes: %v", err)
}
return int8(b), nil
} | packet.go | 0.797399 | 0.487734 | packet.go | starcoder |
package raph
import (
"math"
)
// Dijkstra instance is used to compute Dijkstra algorithm.
type Dijkstra struct {
G Graph
Q []string
Costs map[string]float64
PredsV map[string]string
PredsE map[string]string
}
// NewDijkstra initializes and returns a Dijkstra instance with graph g.
func NewDijkstra(g Graph) *Dijkstra {
vertices := g.Vertices
// append all vertices to queue
q := make([]string, 0, len(vertices))
for _, vertex := range vertices {
q = append(q, vertex.ID)
}
// initialize all costs with +infinity
costs := map[string]float64{}
for _, vertex := range vertices {
costs[vertex.ID] = math.Inf(0)
}
PredsV := map[string]string{}
PredsE := map[string]string{}
return &Dijkstra{g, q, costs, PredsV, PredsE}
}
// Reset resets the Dijkstra instance for further use.
func (d *Dijkstra) Reset() {
*d = *NewDijkstra(d.G)
}
// PickVertexFromQ returns the vertex with minimal cost and removes it from the queue.
func (d *Dijkstra) PickVertexFromQ() string {
min := d.Costs[d.Q[0]]
vertex := d.Q[0]
index := 0
for i, v := range d.Q {
cost := d.Costs[v]
if cost < min {
min = cost
vertex = v
index = i
}
}
d.Q = Remove(d.Q, index)
return vertex
}
// UpdateDistances updates the costs of s2 if it is not minimal. It also stores the edge crossed to get that minimal cost.
func (d *Dijkstra) UpdateDistances(s1, s2, edge string, s1s2Weight float64) {
cost := d.Costs[s2]
potentialCost := d.Costs[s1] + s1s2Weight
if potentialCost < cost {
d.Costs[s2] = potentialCost
d.PredsV[s2] = s1
d.PredsE[s2] = edge
}
}
// ShortestPath returns a slice of ids with its cost. The value minimized is the sum of specified costs (minimize slice).
func (d *Dijkstra) ShortestPath(query Query) ([]map[string]interface{}, float64) {
// init dijkstra
d.Reset()
d.Costs[query.From] = 0
// run dijkstra until queue is empty
for len(d.Q) > 0 {
s1 := d.PickVertexFromQ()
neighbors, edges := d.G.GetNeighborsWithCostsAndEdges(s1, *query.Constraint, query.Minimize...)
for s2, cost := range neighbors {
edge := edges[s2]
d.UpdateDistances(s1, s2, edge, cost)
}
}
// arrange return variables
path := GetPath(query.From, query.To, d.PredsV, d.PredsE)
detailedPath := GetDetailedPath(path, d.G)
cost := d.Costs[query.To]
return detailedPath, cost
}
// ShortestPathInverse returns the inverted shortest path (to -> from) defined in the query. It is used to compute ShortestPathOption.
func (d *Dijkstra) ShortestPathInverse(query Query) ([]map[string]interface{}, float64) {
tmp := query.From
query.From = query.To
query.To = tmp
query.Constraint.Label = "~" + query.Constraint.Label
return d.ShortestPath(query)
}
// ShortestPathOption returns a path (slice of nodes) with its cost. One of the vertices of the path includes the option specified in the query.
func (d *Dijkstra) ShortestPathOption(query Query) ([]map[string]interface{}, float64) {
// compute bi-directional shortest path
d.ShortestPath(query)
fromCosts, fromPredsV, fromPredsE := d.Costs, d.PredsV, d.PredsE
d.ShortestPathInverse(query)
toCosts, toPredsV, toPredsE := d.Costs, d.PredsV, d.PredsE
// select best vertex
cost := math.Inf(0)
minVertex := "none"
for vertexID, vertex := range d.G.Vertices {
if vertexCost, ok := vertex.Costs[query.Option]; ok {
pathCost := fromCosts[vertexID] + toCosts[vertexID]
if pathCost > 0 && pathCost < cost {
cost = pathCost + vertexCost
minVertex = vertexID
}
}
}
// gather paths from->vertex & vertex->to
path1 := GetPath(query.From, minVertex, fromPredsV, fromPredsE)
path2 := GetPath(query.To, minVertex, toPredsV, toPredsE)
Reverse(path2)
path := Concat(path1, path2)
detailedPath := GetDetailedPath(path, d.G)
// arrange return variables
for _, vertex := range detailedPath {
if vertex["id"] == minVertex {
vertex["option"] = query.Option
}
}
return detailedPath, cost
} | raph/dijkstra.go | 0.795618 | 0.4917 | dijkstra.go | starcoder |
package advent
import (
"math"
"strings"
)
var _ Problem = &extendedPolymerization{}
type extendedPolymerization struct {
dailyProblem
}
func NewExtendedPolymerization() Problem {
return &extendedPolymerization{
dailyProblem{
day: 14,
},
}
}
func (e *extendedPolymerization) Solve() interface{} {
input := e.GetInputLines()
var results []int64
results = append(results, e.minMax10(input))
results = append(results, e.minMax40(input))
return results
}
/*
The incredible pressures at this depth are starting to put a strain on your submarine. The submarine has polymerization equipment that would produce suitable materials to reinforce the submarine, and the nearby volcanically-active caves should even have the necessary input elements in sufficient quantities.
The submarine manual contains instructions for finding the optimal polymer formula; specifically, it offers a polymer template and a list of pair insertion rules (your puzzle input). You just need to work out what polymer would result after repeating the pair insertion process a few times.
For example:
NNCB
CH -> B
HH -> N
CB -> H
NH -> C
HB -> C
HC -> B
HN -> C
NN -> C
BH -> H
NC -> B
NB -> B
BN -> B
BB -> N
BC -> B
CC -> N
CN -> C
The first line is the polymer template - this is the starting Point of the process.
The following section defines the pair insertion rules. A rule like AB -> C means that when elements A and B are immediately adjacent, element C should be inserted between them. These insertions all happen simultaneously.
So, starting with the polymer template NNCB, the first step simultaneously considers all three pairs:
The first pair (NN) matches the rule NN -> C, so element C is inserted between the first N and the second N.
The second pair (NC) matches the rule NC -> B, so element B is inserted between the N and the C.
The third pair (CB) matches the rule CB -> H, so element H is inserted between the C and the B.
Note that these pairs overlap: the second element of one pair is the first element of the next pair. Also, because all pairs are considered simultaneously, inserted elements are not considered to be part of a pair until the next step.
After the first step of this process, the polymer becomes NCNBCHB.
Here are the results of a few steps using the above rules:
Template: NNCB
After step 1: NCNBCHB
After step 2: NBCCNBBBCBHCB
After step 3: NBBBCNCCNBBNBNBBCHBHHBCHB
After step 4: NBBNBNBBCCNBCNCCNBBNBBNBBBNBBNBBCBHCBHHNHCBBCBHCB
This polymer grows quickly. After step 5, it has length 97; After step 10, it has length 3073. After step 10, B occurs 1749 times, C occurs 298 times, H occurs 161 times, and N occurs 865 times; taking the quantity of the most common element (B, 1749) and subtracting the quantity of the least common element (H, 161) produces 1749 - 161 = 1588.
Apply 10 steps of pair insertion to the polymer template and find the most and least common elements in the result. What do you get if you take the quantity of the most common element and subtract the quantity of the least common element?
Your puzzle answer was 2967.
*/
func (e *extendedPolymerization) minMax10(input []string) int64 {
template, insertions := e.parseInput(input)
return e.minMaxDifference(template, insertions, 10)
}
/*
The resulting polymer isn't nearly strong enough to reinforce the submarine. You'll need to run more steps of the pair insertion process; a total of 40 steps should do it.
In the above example, the most common element is B (occurring 2192039569602 times) and the least common element is H (occurring 3849876073 times); subtracting these produces 2188189693529.
Apply 40 steps of pair insertion to the polymer template and find the most and least common elements in the result. What do you get if you take the quantity of the most common element and subtract the quantity of the least common element?
*/
func (e *extendedPolymerization) minMax40(input []string) int64 {
template, insertions := e.parseInput(input)
return e.minMaxDifference(template, insertions, 40)
}
func (e *extendedPolymerization) minMaxDifference(template string, insertions map[string]string, iterations int) int64 {
pairCount := e.addTemplateCounts(template)
for i := 0; i < iterations; i++ {
pairCount = e.growPolymer(pairCount, insertions)
}
charCount := e.charCount(pairCount, template[len(template)-1])
min, max := e.minMax(charCount)
return max - min
}
func (e *extendedPolymerization) addTemplateCounts(template string) map[string]int64 {
pairCount := make(map[string]int64)
for i := 0; i < len(template)-1; i++ {
pair := string(template[i]) + string(template[i+1])
pairCount[pair]++
}
return pairCount
}
func (e *extendedPolymerization) growPolymer(pairCount map[string]int64, insertionRules map[string]string) map[string]int64 {
nextCount := make(map[string]int64)
for pair, count := range pairCount {
rule := insertionRules[pair]
nextCount[pair[0:1]+rule] += count
nextCount[rule+pair[1:]] += count
}
return nextCount
}
func (e *extendedPolymerization) charCount(pairCount map[string]int64, lastChar byte) map[byte]int64 {
characterCount := make(map[byte]int64)
for pair, count := range pairCount {
characterCount[pair[0]] += count
}
characterCount[lastChar]++ //correct off by one
return characterCount
}
func (e *extendedPolymerization) minMax(count map[byte]int64) (int64, int64) {
var min int64 = math.MaxInt64
var max int64 = math.MinInt64
for c := range count {
if count[c] < min {
min = count[c]
}
if count[c] > max {
max = count[c]
}
}
return min, max
}
func (e *extendedPolymerization) parseInput(input []string) (template string, insertions map[string]string) {
insertions = make(map[string]string)
template = input[0]
for _, line := range input[2:] {
insertionStrings := strings.Split(line, " -> ")
insertions[insertionStrings[0]] = insertionStrings[1]
}
return
} | internal/advent/day14.go | 0.785432 | 0.683053 | day14.go | starcoder |
package hard100
type MedianFinderOfZeroToHundred struct {
Arr []int
Length int
n int
}
func NewMedianFinderOfZeroToHunder() *MedianFinderOfZeroToHundred {
m := new(MedianFinderOfZeroToHundred)
m.Arr = make([]int, 101)
m.Length = 101
return m
}
func (m *MedianFinderOfZeroToHundred) FindKth(k int) int {
cnt := 0
for i := 0; i < k; i++ {
cnt += m.Arr[i]
if cnt > k {
return i
}
}
return -1
}
func (m *MedianFinderOfZeroToHundred) addNum(num int) {
m.Arr[num]++
m.n++
}
func (m *MedianFinderOfZeroToHundred) getNum() int {
return m.n
}
func (m *MedianFinderOfZeroToHundred) FindMedian() int {
if m.n%2 == 1 {
return m.Arr[m.n/2+1]
}
return (m.Arr[m.n/2] + m.Arr[m.n/2+1]) / 2
}
type MedianArray struct {
Arr []int
n int
}
func NewMedianArray() *MedianArray {
ma := new(MedianArray)
ma.Arr = make([]int, 0)
ma.n = 0
return ma
}
func (m *MedianArray) FindKth(k int) int {
if k > len(m.Arr) {
return -1
}
return m.Arr[k-1]
}
func (m *MedianArray) addNum(num int) {
i := len(m.Arr) - 1
for i >= 0 && m.Arr[i] > num {
i--
}
m.Arr[i]++
}
func (m *MedianArray) getNum() int {
return len(m.Arr)
}
type Median99Percent struct {
Left *MedianArray
Right *MedianArray
Middle *MedianFinderOfZeroToHundred
}
func NewMedian99Percent() *Median99Percent {
m := new(Median99Percent)
m.Left = NewMedianArray()
m.Right = NewMedianArray()
m.Middle = NewMedianFinderOfZeroToHunder()
return m
}
func (m99 *Median99Percent) addNum(num int) {
if num < 0 {
m99.Left.addNum(num)
} else if num >= 0 && num <= 100 {
m99.Middle.addNum(num)
} else {
m99.Right.addNum(num)
}
}
func (m99 *Median99Percent) findMedian() int {
middleSize := m99.Middle.getNum()
leftSize := m99.Left.getNum()
rightSize := m99.Right.getNum()
n := middleSize + leftSize + rightSize
if (n & 1) != 1 {
k := n/2 + 1
if k > leftSize && k < leftSize+middleSize {
return m99.Middle.FindKth(k - leftSize)
} else if k <= leftSize {
return m99.Left.FindKth(k)
} else {
return m99.Right.FindKth(k - leftSize - middleSize)
}
} else {
k := n / 2
if k > leftSize && k+1 < leftSize+middleSize {
k = k - leftSize
return (m99.Middle.FindKth(k) + m99.Middle.FindKth(k+1)) / 2.0
} else if k+1 <= leftSize {
return (m99.Left.FindKth(k) + m99.Left.FindKth(k+1)) / 2.0
} else if k == leftSize && middleSize > 0 {
return (m99.Left.FindKth(k) + m99.Middle.FindKth(1)) / 2.0
} else if k == leftSize && middleSize == 0 {
return (m99.Left.FindKth(k) + m99.Right.FindKth(1)) / 2.0
} else if k == leftSize+middleSize {
return (m99.Middle.FindKth(k) + m99.Right.FindKth(1)) / 2
} else {
k = k - leftSize - rightSize
return (m99.Right.FindKth(k) + m99.Right.FindKth(k+1)) / 2
}
}
return -1
} | hard100/find_median_from_data_stream_initnite.go | 0.569733 | 0.516839 | find_median_from_data_stream_initnite.go | starcoder |
package gohotdraw
import (
_ "fmt"
"math"
)
type Dimension struct {
Width int
Height int
}
type Point struct {
X int
Y int
}
type Rectangle struct {
X int
Y int
Width int
Height int
}
func NewRectangle() *Rectangle {
return &Rectangle{0, 0, 0, 0}
}
func NewRectangleFromRect(r *Rectangle) *Rectangle {
return &Rectangle{r.X, r.Y, r.Width, r.Height}
}
func NewRectangleFromPoint(p *Point) *Rectangle {
return &Rectangle{p.X, p.Y, 0, 0}
}
func NewRectangleFromPoints(p1, p2 *Point) *Rectangle {
rect := NewRectangleFromPoint(p1)
rect.AddPoint(p2)
return rect
}
func (this *Rectangle) AddPoint(p *Point) {
this.Add(p.X, p.Y)
}
func (this *Rectangle) Add(newX, newY int) {
if (this.Width | this.Height) < 0 {
this.SetBounds(newX, newY, 0, 0)
return
}
x1 := this.X
y1 := this.Y
x2 := this.Width
y2 := this.Height
x2 += x1
y2 += y1
if x1 > newX {
x1 = newX
}
if y1 > newY {
y1 = newY
}
if x2 < newX {
x2 = newX
}
if y2 < newY {
y2 = newY
}
x2 -= x1
y2 -= y1
if x2 > math.MaxInt32 {
x2 = math.MaxInt32
}
if y2 > math.MaxInt32 {
y2 = math.MaxInt32
}
this.SetBounds(x1, y1, x2, y2)
}
func (this *Rectangle) SetBounds(x, y, width, height int) {
this.X = x
this.Y = y
this.Width = width
this.Height = height
}
func (this *Rectangle) Translate(dx, dy int) {
oldv := this.X
newv := oldv + dx
if dx < 0 {
// moving leftward
if newv > oldv {
// negative overflow
// Only adjust width if it was valid (>= 0).
if this.Width >= 0 {
// The right edge is now conceptually at
// newv+width, but we may move newv to prevent
// overflow. But we want the right edge to
// remain at its new location in spite of the
// clipping. Think of the following adjustment
// conceptually the same as:
// width += newv newv = MIN_VALUE width -= newv
this.Width += newv - math.MinInt32
// width may go negative if the right edge went past
// MIN_VALUE, but it cannot overflow since it cannot
// have moved more than MIN_VALUE and any non-negative
// number + MIN_VALUE does not overflow.
}
newv = math.MinInt32
}
} else {
// moving rightward (or staying still)
if newv < oldv {
// positive overflow
if this.Width >= 0 {
// Conceptually the same as:
// width += newv newv = MAX_VALUE width -= newv
this.Width += newv - math.MaxInt32
// With large widths and large displacements
// we may overflow so we need to check it.
if this.Width < 0 {
this.Width = math.MaxInt32
}
}
newv = math.MaxInt32
}
}
this.X = newv
oldv = this.Y
newv = oldv + dy
if dy < 0 {
// moving upward
if newv > oldv {
// negative overflow
if this.Height >= 0 {
this.Height += newv - math.MinInt32
// See above comment about no overflow in this case
}
newv = math.MinInt32
}
} else {
// moving downward (or staying still)
if newv < oldv {
// positive overflow
if this.Height >= 0 {
this.Height += newv - math.MaxInt32
if this.Height < 0 {
this.Height = math.MaxInt32
}
}
newv = math.MaxInt32
}
}
this.Y = newv
}
func (this *Rectangle) Grow(h, v int) {
x0 := this.X
y0 := this.Y
x1 := this.Width
y1 := this.Height
x1 += x0
y1 += y0
x0 -= h
y0 -= v
x1 += h
y1 += v
if x1 < x0 {
// Non-existant in X direction
// Final width must remain negative so subtract x0 before
// it is clipped so that we avoid the risk that the clipping
// of x0 will reverse the ordering of x0 and x1.
x1 -= x0
if x1 < math.MinInt32 {
x1 = math.MinInt32
}
if x0 < math.MinInt32 {
x0 = math.MinInt32
} else if x0 > math.MaxInt32 {
x0 = math.MaxInt32
}
} else { // (x1 >= x0)
// Clip x0 before we subtract it from x1 in case the clipping
// affects the representable area of the rectangle.
if x0 < math.MinInt32 {
x0 = math.MinInt32
} else if x0 > math.MaxInt32 {
x0 = math.MaxInt32
}
x1 -= x0
// The only way x1 can be negative now is if we clipped
// x0 against MIN and x1 is less than MIN - in which case
// we want to leave the width negative since the result
// did not intersect the representable area.
if x1 < math.MinInt32 {
x1 = math.MinInt32
} else if x1 > math.MaxInt32 {
x1 = math.MaxInt32
}
}
if y1 < y0 {
// Non-existant in Y direction
y1 -= y0
if y1 < math.MinInt32 {
y1 = math.MinInt32
}
if y0 < math.MinInt32 {
y0 = math.MinInt32
} else if y0 > math.MaxInt32 {
y0 = math.MaxInt32
}
} else { // (y1 >= y0)
if y0 < math.MinInt32 {
y0 = math.MinInt32
} else if y0 > math.MaxInt32 {
y0 = math.MaxInt32
}
y1 -= y0
if y1 < math.MinInt32 {
y1 = math.MinInt32
} else if y1 > math.MaxInt32 {
y1 = math.MaxInt32
}
}
this.SetBounds(x0, y0, x1, y1)
}
func (this *Rectangle) Union(r *Rectangle) *Rectangle {
tx2 := this.Width
ty2 := this.Height
if (tx2 | ty2) < 0 {
// This rectangle has negative dimensions...
// If r has non-negative dimensions then it is the answer.
// If r is non-existant (has a negative dimension), then both
// are non-existant and we can return any non-existant rectangle
// as an answer. Thus, returning r meets that criterion.
// Either way, r is our answer.
return NewRectangleFromRect(r)
}
rx2 := r.Width
ry2 := r.Height
if (rx2 | ry2) < 0 {
return NewRectangleFromRect(this)
}
tx1 := this.X
ty1 := this.Y
tx2 += tx1
ty2 += ty1
rx1 := r.X
ry1 := r.Y
rx2 += rx1
ry2 += ry1
if tx1 > rx1 {
tx1 = rx1
}
if ty1 > ry1 {
ty1 = ry1
}
if tx2 < rx2 {
tx2 = rx2
}
if ty2 < ry2 {
ty2 = ry2
}
tx2 -= tx1
ty2 -= ty1
// tx2,ty2 will never underflow since both original rectangles
// were already proven to be non-empty
// they might overflow, though...
if tx2 > math.MaxInt32 {
tx2 = math.MaxInt32
}
if ty2 > math.MaxInt32 {
ty2 = math.MaxInt32
}
return &Rectangle{tx1, ty1, tx2, ty2}
}
func (this *Rectangle) Contains(X, Y int) bool {
w := this.Width
h := this.Height
if (w | h) < 0 {
// At least one of the dimensions is negative...
return false
}
// Note: if either dimension is zero, tests below must return false...
x := this.X
y := this.Y
if X < x || Y < y {
return false
}
w += x
h += y
// overflow || intersect
return ((w < x || w > X) &&
(h < y || h > Y))
}
func (this *Rectangle) ContainsRect(rect *Rectangle) bool {
return (
rect.X >= this.X && rect.Y >= this.Y &&
(rect.X+int(math.Fmax(0, float64(rect.Width)))) <= this.X+int(math.Fmax(0, float64(this.Width))) &&
(rect.Y+int(math.Fmax(0, float64(rect.Height)))) <= this.Y+int(math.Fmax(0, float64(this.Height))))
}
func (this *Rectangle) ContainsPoint(point *Point) bool {
return this.Contains(point.X, point.Y)
}
func (this *Rectangle) IsEmpty() bool {
return (this.Width <= 0) || (this.Height <= 0)
} | code/gohotdraw-master/util.go | 0.575946 | 0.58166 | util.go | starcoder |
package accumcolor
import "image/color"
// An AccumNRGBA is a color.Color that supports accumulation of
// non-alpha-premultiplied RGBA color values. An invariant maintained
// by all methods is that either all fields are zero or each of R, G,
// B, and A divided by Tally produces a value in the range [0, 255].
type AccumNRGBA struct {
R uint64
G uint64
B uint64
A uint64
Tally uint64
}
// Valid returns true if and only if an AccumNRGBA is valid.
func (c AccumNRGBA) Valid() bool {
// If Tally is nonzero, each other field divided by it must lie in [0,
// 255].
switch {
case c.Tally == 0:
// The only time a Tally is allowed to be zero is if all other
// fields are zero.
var zero AccumNRGBA
return c == zero
case c.R/c.Tally > 255:
return false
case c.G/c.Tally > 255:
return false
case c.B/c.Tally > 255:
return false
case c.A/c.Tally > 255:
return false
default:
return true
}
}
// RGBA converts an AccumNRGBA to alpha-premultiplied colors.
func (c AccumNRGBA) RGBA() (r, g, b, a uint32) {
if c.Tally == 0 {
return
}
nrgba := color.NRGBA{
R: uint8(c.R / c.Tally),
G: uint8(c.G / c.Tally),
B: uint8(c.B / c.Tally),
A: uint8(c.A / c.Tally),
}
r, g, b, a = nrgba.RGBA()
return
}
// accumNRGBAModel is used to define a color model for AccumNRGBA.
func accumNRGBAModel(c color.Color) color.Color {
if _, ok := c.(AccumNRGBA); ok {
return c
}
nrgba := color.NRGBAModel.Convert(c).(color.NRGBA)
return AccumNRGBA{
R: uint64(nrgba.R),
G: uint64(nrgba.G),
B: uint64(nrgba.B),
A: uint64(nrgba.A),
Tally: 1,
}
}
// AccumNRGBAModel converts any color.Color to an AccumNRGBA color.
var AccumNRGBAModel = color.ModelFunc(accumNRGBAModel)
// Add accumulates color.
func (c *AccumNRGBA) Add(clr color.Color) {
other := AccumNRGBAModel.Convert(clr).(AccumNRGBA)
c.R += other.R
c.G += other.G
c.B += other.B
c.A += other.A
c.Tally += other.Tally
}
// Scale multiplies all components of an AccumNRGBA by a given value. This
// does not change the effective color but can be used for performing weighted
// averages.
func (c *AccumNRGBA) Scale(w uint64) {
c.R *= w
c.G *= w
c.B *= w
c.A *= w
c.Tally *= w
}
// NRGBA averages the accumulated color of an AccumNRGBA to produce an ordinary
// color.NRGBA.
func (c AccumNRGBA) NRGBA() color.NRGBA {
if c.Tally == 0 {
return color.NRGBA{}
}
return color.NRGBA{
R: uint8(c.R / c.Tally),
G: uint8(c.G / c.Tally),
B: uint8(c.B / c.Tally),
A: uint8(c.A / c.Tally),
}
} | accumcolor/accumcolor.go | 0.808294 | 0.665781 | accumcolor.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.