code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
package light
import (
"time"
"github.com/kasworld/h4o/_examples/app"
"github.com/kasworld/h4o/eventtype"
"github.com/kasworld/h4o/geometry"
"github.com/kasworld/h4o/graphic"
"github.com/kasworld/h4o/material"
"github.com/kasworld/h4o/math32"
"github.com/kasworld/h4o/util/helper"
"math"
"github.com/kasworld/h4o/_examples/util"
)
func init() {
app.DemoMap["light.point"] = &PointLight{}
}
type PointLight struct {
vl *util.PointLightMesh
hl *util.PointLightMesh
count float64
}
// Start is called once at the start of the demo.
func (t *PointLight) Start(a *app.App) {
// Create spheres
sphereGeom := geometry.NewSphere(0.5, 32, 16)
sphere1 := graphic.NewMesh(sphereGeom, material.NewStandard(&math32.Color{0, 0, 0.6}))
sphere1.SetPositionX(1)
a.Scene().Add(sphere1)
sphere2 := graphic.NewMesh(sphereGeom, material.NewStandard(&math32.Color{0, 0.5, 0.0}))
sphere2.SetPositionX(-1)
a.Scene().Add(sphere2)
// Create box
boxGeom := geometry.NewCube(4)
boxMat := material.NewStandard(&math32.Color{1, 1, 1})
boxMat.SetSide(material.SideBack)
a.Scene().Add(graphic.NewMesh(boxGeom, boxMat))
// Create axes helper
axes := helper.NewAxes(1)
a.Scene().Add(axes)
// Create vertical point light
t.vl = util.NewPointLightMesh(&math32.Color{1, 1, 1})
a.Scene().Add(t.vl.Mesh)
// Create horizontal point light
t.hl = util.NewPointLightMesh(&math32.Color{1, 1, 1})
a.Scene().Add(t.hl.Mesh)
// Add controls
if a.ControlFolder() == nil {
return
}
g := a.ControlFolder().AddGroup("Show lights")
cb1 := g.AddCheckBox("Horizontal").SetValue(t.hl.Mesh.Visible())
cb1.Subscribe(eventtype.OnChange, func(evname eventtype.EventType, ev interface{}) {
t.hl.Mesh.SetVisible(!t.hl.Mesh.Visible())
})
cb2 := g.AddCheckBox("Vertical").SetValue(t.vl.Mesh.Visible())
cb2.Subscribe(eventtype.OnChange, func(evname eventtype.EventType, ev interface{}) {
t.vl.Mesh.SetVisible(!t.vl.Mesh.Visible())
})
}
// Update is called every frame.
func (t *PointLight) Update(a *app.App, deltaTime time.Duration) {
t.vl.SetPosition(0, 1.5*float32(math.Sin(t.count)), 0)
t.hl.SetPosition(1.5*float32(math.Sin(t.count)), 1, 0)
t.count += 0.02 // TODO use deltaTime
}
// Cleanup is called once at the end of the demo.
func (t *PointLight) Cleanup(a *app.App) {} | _examples/demos/light/point.go | 0.596903 | 0.458955 | point.go | starcoder |
package heatmap
import (
"fmt"
"image"
"image/color"
"image/png"
"math"
"os"
"entityDetection/detection/centroid"
)
// Heatmap stores a heatmap image with its maximum point count
type Heatmap struct {
maxCount int
heatmapImage *image.RGBA
}
// NewHeatmap generates new Heatmap instance.
// Don't compute unless requested to.
func NewHeatmap(imageWidth int, imageHeight int) *Heatmap {
heatmapImage := image.NewRGBA(image.Rect(0, 0, imageWidth, imageHeight))
return &Heatmap{heatmapImage: heatmapImage}
}
// getRect finds the maximum coordinate from a list of points
func getRect(data []centroid.Centroid) (int, int) {
bottom := 0
left := 0
for _, point := range data {
if bottom < point.Y {
bottom = point.Y
}
if left < point.X {
left = point.X
}
}
return left, bottom
}
// GenerateHeatmap chains several helper functions to calculate the heatmap,
// then it writes the resultant image to a PNG. If the heatmap does not have
// preset image dimensions it searches through the data for the maximum X and Y
// coordinates and uses those to determine the image dimensions.
func (h *Heatmap) GenerateHeatmap(data []centroid.Centroid, destination string) string {
if h.heatmapImage == nil {
x, y := getRect(data)
h.heatmapImage = image.NewRGBA(image.Rect(0, 0, x, y))
}
pixelCounts := h.getPixelCounts(data)
h.generateHeatmapImage(pixelCounts)
// Output heatmap to PNG file
heatmapFile, err := os.Create(destination)
if err != nil {
fmt.Println("Error:", err)
return ""
}
png.Encode(heatmapFile, h.heatmapImage)
heatmapFile.Close()
return destination
}
// GetPixelCounts generates a 2D array of the same dimensions as the image.
// It then counts the presence of centroids at each pixel in the given data.
// It keeps track of min and max counts across all pixels.
func (h *Heatmap) getPixelCounts(locations []centroid.Centroid) [][]int {
pixelCounts := make([][]int, h.heatmapImage.Rect.Size().X)
for i := range pixelCounts {
pixelCounts[i] = make([]int, h.heatmapImage.Rect.Size().Y)
}
for _, loc := range locations {
pixelCounts[loc.X][loc.Y] += 1
pointCount := pixelCounts[loc.X][loc.Y]
if pointCount > h.maxCount {
h.maxCount = pointCount
}
}
return pixelCounts
}
// generateHeatmapImage generates a PNG image using the image dimensions and pixel counts.
func (h *Heatmap) generateHeatmapImage(pixelCounts [][]int) {
for x, col := range pixelCounts {
for y, count := range col {
h.heatmapImage.Set(x, y, h.pixelCountToColor(count))
}
}
}
// pixelCountToColor maps the centroid count at each pixel to a corresponding color and intensity.
func (h *Heatmap) pixelCountToColor(count int) color.RGBA {
r, g, b := 0, 0, 0
if count > 0 {
density := int(math.Round(float64(count) / float64(h.maxCount+1) * 511))
if density < 256 {
r = density
g = 255
} else {
r = 255
g = 255 - (density - 256)
}
}
return color.RGBA{R: uint8(r), G: uint8(g), B: uint8(b), A: 255}
} | detection/heatmap/heatmap.go | 0.855248 | 0.610076 | heatmap.go | starcoder |
package filter
import (
"github.com/sonirico/container/types"
"github.com/sonirico/container/utils"
)
// SliceWithInPlaceMutation filters the given array by applying in-place state mutation, which mutates
// the underlying array. Accepts a flag to indicate whether to copy the result array as the result to
// prevent keeping references to the original array which could be way bigger than the filtered result
// Time complexity: O(n)
// Space complexity: O(1) | O(n)
func SliceWithInPlaceMutation[T types.Any](slice []T, shouldCopy bool, fn func(T) bool) []T {
i := 0
for _, x := range slice {
if ok := fn(x); ok {
slice[i] = x
i++
}
}
if !shouldCopy {
return slice[:i]
}
return utils.SliceCopy(slice[:i])
}
// SliceWithAppend filters the given array by creating a new one while filling it with `append`. Beware
// of the fact that the longer the filtered result may be, the more empty slots into the array will be
// created.
// Time complexity: O(n)
// Space complexity: O(n)
func SliceWithAppend[T types.Any](slice []T, fn func(T) bool) []T {
i := 0
var r []T
for _, x := range slice {
if ok := fn(x); ok {
r = append(r, x)
i++
}
}
return r
}
// SliceWithAppendCopy behaves as SliceWithAppend but copies the filtered result in order to remove empty
// slots of the result.
// Time complexity: O(n)
// Space complexity: O(n)
func SliceWithAppendCopy[T types.Any](slice []T, fn func(T) bool) []T {
i := 0
var r []T
for _, x := range slice {
if ok := fn(x); ok {
r = append(r, x)
i++
}
}
return utils.SliceCopy(r)
}
// SliceWithDoubleFor filters the given slice without mutating it with as much less memory footprint as possible.
// However, it does so by iterating twice the slice. The first round is to know how many slots to allocate.
// Time complexity: O(2n)
// Space complexity: O(n)
func SliceWithDoubleFor[T types.Any](slice []T, fn func(T) bool) []T {
i := 0
for _, x := range slice {
if ok := fn(x); ok {
i++
}
}
r := make([]T, i)
i = 0
for _, x := range slice {
if ok := fn(x); ok {
r[i] = x
i++
}
}
return r
}
// Slice defaults to filtering the given array by applying in-place mutation as SliceWithInPlaceMutation.
func Slice[T types.Any](data []T, fn func(T) bool) []T {
return SliceWithInPlaceMutation(data, false, fn)
} | filter/slices.go | 0.765944 | 0.563738 | slices.go | starcoder |
package timeutil
import (
"time"
)
// IsGreaterThan compares two times and returns true if the left
// time is greater than the right time.
func IsGreaterThan(timeLeft time.Time, timeRight time.Time, orEqual bool) bool {
if timeLeft.After(timeRight) {
return true
} else if orEqual && timeLeft.Equal(timeRight) {
return true
}
return false
}
// IsLessThan compares two times and returns true if the left
// time is less than the right time.
func IsLessThan(timeLeft time.Time, timeRight time.Time, orEqual bool) bool {
if timeLeft.Before(timeRight) {
return true
} else if orEqual && timeLeft.Equal(timeRight) {
return true
}
return false
}
func TimeWithin(this, beg, end time.Time, eqBeg, eqEnd bool) bool {
return IsGreaterThan(this, beg, eqBeg) && IsLessThan(this, end, eqEnd)
}
// MinTime returns minTime if time in question is less than min time.
func MinTime(t, min time.Time) time.Time {
if IsLessThan(t, min, false) {
return min
}
return t
}
// MaxTime returns maxTime if time in question is greater than max time.
func MaxTime(t, max time.Time) time.Time {
if IsGreaterThan(t, max, false) {
return max
}
return t
}
// GreaterTime returns the greater of two times.
func GreaterTime(t1, t2 time.Time) time.Time {
if IsGreaterThan(t1, t2, false) {
return t1
}
return t2
}
// LesserTime returns the lesser of two times.
func LesserTime(t1, t2 time.Time) time.Time {
if IsLessThan(t1, t2, false) {
return t1
}
return t2
}
// MinMax takes two times and returns the earlier time first.
func MinMax(min, max time.Time) (time.Time, time.Time) {
if IsGreaterThan(min, max, false) {
return max, min
}
return min, max
}
// SliceMinMax returns the min and max times of a time slice.
func SliceMinMax(times []time.Time) (time.Time, time.Time) {
min := TimeMinRFC3339()
max := TimeMinRFC3339()
for i, t := range times {
if i == 0 {
min = t
max = t
} else if min.After(t) {
min = t
} else if max.Before(t) {
max = t
}
}
return min, max
} | time/timeutil/compare.go | 0.850562 | 0.556882 | compare.go | starcoder |
package msxsdk
import (
"encoding/json"
)
// SiteLocation struct for SiteLocation
type SiteLocation struct {
Latitude float64 `json:"latitude"`
Longitude float64 `json:"longitude"`
AdditionalProperties map[string]interface{}
}
type _SiteLocation SiteLocation
// NewSiteLocation instantiates a new SiteLocation object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewSiteLocation(latitude float64, longitude float64) *SiteLocation {
this := SiteLocation{}
this.Latitude = latitude
this.Longitude = longitude
return &this
}
// NewSiteLocationWithDefaults instantiates a new SiteLocation object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewSiteLocationWithDefaults() *SiteLocation {
this := SiteLocation{}
return &this
}
// GetLatitude returns the Latitude field value
func (o *SiteLocation) GetLatitude() float64 {
if o == nil {
var ret float64
return ret
}
return o.Latitude
}
// GetLatitudeOk returns a tuple with the Latitude field value
// and a boolean to check if the value has been set.
func (o *SiteLocation) GetLatitudeOk() (*float64, bool) {
if o == nil {
return nil, false
}
return &o.Latitude, true
}
// SetLatitude sets field value
func (o *SiteLocation) SetLatitude(v float64) {
o.Latitude = v
}
// GetLongitude returns the Longitude field value
func (o *SiteLocation) GetLongitude() float64 {
if o == nil {
var ret float64
return ret
}
return o.Longitude
}
// GetLongitudeOk returns a tuple with the Longitude field value
// and a boolean to check if the value has been set.
func (o *SiteLocation) GetLongitudeOk() (*float64, bool) {
if o == nil {
return nil, false
}
return &o.Longitude, true
}
// SetLongitude sets field value
func (o *SiteLocation) SetLongitude(v float64) {
o.Longitude = v
}
func (o SiteLocation) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["latitude"] = o.Latitude
}
if true {
toSerialize["longitude"] = o.Longitude
}
for key, value := range o.AdditionalProperties {
toSerialize[key] = value
}
return json.Marshal(toSerialize)
}
func (o *SiteLocation) UnmarshalJSON(bytes []byte) (err error) {
varSiteLocation := _SiteLocation{}
if err = json.Unmarshal(bytes, &varSiteLocation); err == nil {
*o = SiteLocation(varSiteLocation)
}
additionalProperties := make(map[string]interface{})
if err = json.Unmarshal(bytes, &additionalProperties); err == nil {
delete(additionalProperties, "latitude")
delete(additionalProperties, "longitude")
o.AdditionalProperties = additionalProperties
}
return err
}
type NullableSiteLocation struct {
value *SiteLocation
isSet bool
}
func (v NullableSiteLocation) Get() *SiteLocation {
return v.value
}
func (v *NullableSiteLocation) Set(val *SiteLocation) {
v.value = val
v.isSet = true
}
func (v NullableSiteLocation) IsSet() bool {
return v.isSet
}
func (v *NullableSiteLocation) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableSiteLocation(val *SiteLocation) *NullableSiteLocation {
return &NullableSiteLocation{value: val, isSet: true}
}
func (v NullableSiteLocation) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableSiteLocation) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | model_site_location.go | 0.797241 | 0.421254 | model_site_location.go | starcoder |
// Package meter storage metering
package meter
import (
"fmt"
"math"
"strconv"
"strings"
)
// Storage unit constants.
const (
Byte = 1
KByte = Byte << 10
MByte = KByte << 10
GByte = MByte << 10
TByte = GByte << 10
PByte = TByte << 10
EByte = PByte << 10
)
// ByteSize byte size
type ByteSize uint64
// Bytes to byte
func (b ByteSize) Bytes() uint64 {
return uint64(b)
}
// KBytes to kilobyte
func (b ByteSize) KBytes() float64 {
return float64(b/KByte) + float64(b%KByte)/float64(KByte)
}
// MBytes to megabyte
func (b ByteSize) MBytes() float64 {
return float64(b/MByte) + float64(b%MByte)/float64(MByte)
}
// GBytes to gigabyte
func (b ByteSize) GBytes() float64 {
return float64(b/GByte) + float64(b%GByte)/float64(GByte)
}
// TBytes to terabyte
func (b ByteSize) TBytes() float64 {
return float64(b/TByte) + float64(b%TByte)/float64(TByte)
}
// PBytes to petabyte
func (b ByteSize) PBytes() float64 {
return float64(b/PByte) + float64(b%PByte)/float64(PByte)
}
// EBytes to ebyte
func (b ByteSize) EBytes() float64 {
return float64(b/EByte) + float64(b%EByte)/float64(EByte)
}
// String to string like xxB,xxKB,xxMB,xxGB,xxTB,xxPB,xxEB
func (b ByteSize) String() string {
if b < 10 {
return fmt.Sprintf("%dB", b)
}
sizes := []string{"B", "KB", "MB", "GB", "TB", "PB", "EB"}
e := math.Floor(math.Log(float64(b)) / math.Log(1024))
val := float64(b) / math.Pow(1024, e)
return fmt.Sprintf("%0.2f%s", val, sizes[int(e)])
}
// HumanSize human readable string
func (b ByteSize) HumanSize() string {
return b.String()
}
// MarshalText marshal to text
func (b ByteSize) MarshalText() ([]byte, error) {
return []byte(b.String()), nil
}
// UnmarshalText unmarshal to ByteSize
func (b *ByteSize) UnmarshalText(t []byte) error {
var i int
// backup for error message
t0 := t
for ; i < len(t); i++ {
if c := t[i]; !((c >= '0' && c <= '9') || c == '.') {
if i == 0 {
*b = 0
return &strconv.NumError{
Func: "UnmarshalText",
Num: string(t0),
Err: strconv.ErrSyntax,
}
}
break
}
}
val, err := strconv.ParseFloat(string(t[:i]), 64)
if err != nil {
return &strconv.NumError{
Func: "UnmarshalText",
Num: string(t0),
Err: err,
}
}
unit := uint64(Byte)
unitStr := strings.ToLower(strings.TrimSpace(string(t[i:])))
switch unitStr {
case "", "b", "byte": // do nothing
case "k", "kb", "kilo", "kilobyte", "kilobytes":
unit = KByte
case "m", "mb", "mega", "megabyte", "megabytes":
unit = MByte
case "g", "gb", "giga", "gigabyte", "gigabytes":
unit = GByte
case "t", "tb", "tera", "terabyte", "terabytes":
unit = TByte
case "p", "pb", "peta", "petabyte", "petabytes":
unit = PByte
case "e", "eb", "exa", "exabyte", "exabytes":
unit = EByte
default:
*b = 0
return &strconv.NumError{
Func: "UnmarshalText",
Num: string(t0),
Err: strconv.ErrSyntax,
}
}
if uint64(val) > math.MaxUint64/unit {
*b = ByteSize(math.MaxUint64)
return &strconv.NumError{
Func: "UnmarshalText",
Num: string(t0),
Err: strconv.ErrRange,
}
}
*b = ByteSize(val * float64(unit))
return nil
}
// ParseBytes parse a human string to byte
func ParseBytes(s string) (uint64, error) {
v := ByteSize(0)
err := v.UnmarshalText([]byte(s))
return v.Bytes(), err
}
// HumanSize human readable string
func HumanSize(bytes uint64) (s string) {
return ByteSize(bytes).HumanSize()
} | meter.go | 0.676513 | 0.473718 | meter.go | starcoder |
package nodeset
import (
"fmt"
"github.com/insolar/insolar/network/consensus/gcpv2/api/member"
"github.com/insolar/insolar/network/consensus/gcpv2/phasebundle/stats"
)
type ConsensusStat uint8
const (
ConsensusStatUnknown ConsensusStat = iota
ConsensusStatTrusted
ConsensusStatDoubted
ConsensusStatMissingThere
ConsensusStatFraudSuspect
ConsensusStatFraud
maxConsensusStat
)
const ConsensusStatMissingHere = ConsensusStatUnknown
func FmtConsensusStat(v uint8) string {
switch ConsensusStat(v) {
case ConsensusStatUnknown:
return "U"
case ConsensusStatTrusted:
return "T"
case ConsensusStatDoubted:
return "D"
case ConsensusStatMissingThere:
return "Ø"
case ConsensusStatFraudSuspect:
return "f"
case ConsensusStatFraud:
return "F"
default:
return fmt.Sprintf("%d", v)
}
}
type innerStatTable struct {
stats.StatTable
}
type ConsensusStatTable struct {
innerStatTable
}
func NewConsensusStatTable(nodeCount int) ConsensusStatTable {
return ConsensusStatTable{innerStatTable{stats.NewStatTable(uint8(maxConsensusStat)-1, nodeCount)}}
}
func (t *ConsensusStatTable) NewRow() *ConsensusStatRow {
nr := NewConsensusStatRow(t.ColumnCount())
return &nr
}
func (t *ConsensusStatTable) AddRow(row *ConsensusStatRow) int {
return t.innerStatTable.AddRow(&row.Row)
}
func (t *ConsensusStatTable) PutRow(rowIndex int, row *ConsensusStatRow) {
t.innerStatTable.PutRow(rowIndex, &row.Row)
}
func (t *ConsensusStatTable) GetRow(rowIndex int) (*ConsensusStatRow, bool) {
row, ok := t.innerStatTable.GetRow(rowIndex)
if !ok {
return nil, false
}
return &ConsensusStatRow{innerRow{*row}}, true
}
func (t *ConsensusStatTable) GetColumn(colIndex int) *ConsensusStatColumn {
return &ConsensusStatColumn{innerStatColumn{t.StatTable.GetColumn(colIndex)}}
}
func (t *ConsensusStatTable) AsText(header string) string {
return t.TableFmt(header, FmtConsensusStat)
}
func (t *ConsensusStatTable) EqualsTyped(o *ConsensusStatTable) bool {
return o != nil && t.StatTable.Equals(&o.StatTable)
}
type innerStatColumn struct {
*stats.Column
}
type ConsensusStatColumn struct {
innerStatColumn
}
func (c *ConsensusStatColumn) GetSummaryByValue(value ConsensusStat) uint16 {
return c.innerStatColumn.GetSummaryByValue(uint8(value))
}
func (c *ConsensusStatColumn) String() string {
return c.StringFmt(FmtConsensusStat)
}
func NewConsensusStatRow(columnCount int) ConsensusStatRow {
return ConsensusStatRow{innerRow{stats.NewStatRow(uint8(maxConsensusStat)-1, columnCount)}}
}
type ConsensusStatRow struct {
innerRow
}
func (r *ConsensusStatRow) Get(column int) ConsensusStat {
return ConsensusStat(r.innerRow.Get(column))
}
func (r *ConsensusStatRow) HasValues(value ConsensusStat) bool {
return r.innerRow.HasValues(uint8(value))
}
func (r *ConsensusStatRow) HasAllValues(value ConsensusStat) bool {
return r.innerRow.HasAllValues(uint8(value))
}
func (r *ConsensusStatRow) HasAllValuesOf(value0, value1 ConsensusStat) bool {
return r.innerRow.HasAllValuesOf(uint8(value0), uint8(value1))
}
func (r *ConsensusStatRow) GetSummaryByValue(value ConsensusStat) uint16 {
return r.innerRow.GetSummaryByValue(uint8(value))
}
func (r *ConsensusStatRow) Set(column int, value ConsensusStat) ConsensusBitsetEntry {
return ConsensusBitsetEntry(r.innerRow.Set(column, uint8(value)))
}
func (r ConsensusStatRow) String() string {
return r.innerRow.Row.StringFmt(FmtConsensusStat, true)
}
func StateToConsensusStatRow(b member.StateBitset) ConsensusStatRow {
nodeStats := NewConsensusStatRow(b.Len())
for i, v := range b {
switch {
case v.IsTimeout():
nodeStats.Set(i, ConsensusStatMissingThere)
case v.IsFraud():
nodeStats.Set(i, ConsensusStatFraud)
case v.IsTrusted():
nodeStats.Set(i, ConsensusStatTrusted)
default:
nodeStats.Set(i, ConsensusStatDoubted)
}
}
return nodeStats
} | network/consensus/gcpv2/phasebundle/nodeset/consensus_stats.go | 0.511717 | 0.439507 | consensus_stats.go | starcoder |
package common
import (
"fmt"
"math/big"
"math/rand"
"reflect"
"strconv"
)
// -------------------------
// package Consts, Vars
// Lengths of hashes and addresses in bytes.
const (
HashLength = 32
)
// ------------------
// package Functions
// BytesToHash sets b to hash.
// If b is larger than len(h), b will be cropped from the left.
func BytesToHash(b []byte) Hash {
var h Hash
h.SetBytes(b)
return h
}
// BigToHash sets byte representation of b to hash.
// If b is larger than len(h), b will be cropped from the left.
func BigToHash(b *big.Int) Hash {
return BytesToHash(b.Bytes())
}
// HexToHash sets byte representation of s to hash.
// If b is larger than len(h), b will be cropped from the left.
func HexToHash(s string) Hash {
return BytesToHash(Ghex.MustDecodeString(s))
}
// ----------------------
// package Struct Hash
// Hash represents the 32 byte Keccak256 hash of arbitrary data.
type Hash [HashLength]byte
func (h Hash) MarshalJSON() ([]byte, error) {
js := strconv.Quote(Ghex.EncodeToString(h[:]))
return []byte(js), nil
}
// UnmarshalJSON parses a hash in hex syntax.
func (h *Hash) UnmarshalJSON(input []byte) error {
unQuote, err := strconv.Unquote(string(input))
if err != nil {
return err
}
srcBytes := []byte(unQuote)
dstBytes := make([]byte, Ghex.DecodeLen(len(srcBytes)))
_, errDecode := Ghex.Decode(dstBytes, srcBytes)
if errDecode != nil {
return errDecode
}
h.SetBytes(dstBytes)
return nil
}
// SetBytes sets the hash to the value of b.
// If b is larger than len(h), b will be cropped from the left.
func (h *Hash) SetBytes(b []byte) {
if len(b) > len(h) {
b = b[len(b)-HashLength:]
}
copy(h[HashLength-len(b):], b)
}
// Generate implements testing/quick.Generator.
func (h Hash) Generate(rand *rand.Rand, size int) reflect.Value {
m := rand.Intn(len(h))
for i := len(h) - 1; i > m; i-- {
h[i] = byte(rand.Uint32())
}
return reflect.ValueOf(h)
}
// Bytes gets the byte representation of the underlying hash.
func (h Hash) Bytes() []byte { return h[:] }
// Big converts a hash to a big integer.
func (h Hash) Big() *big.Int { return new(big.Int).SetBytes(h[:]) }
// Hex converts a hash to a hex string.
func (h Hash) Hex() string {
return Ghex.EncodeToString(h[:])
}
// TerminalString implements log.TerminalStringer, formatting a string for console
// output during logging.
func (h Hash) TerminalString() string {
return fmt.Sprintf("%x…%x", h[:3], h[29:])
}
// String implements the stringer interface and is used also by the logger when
// doing full logging into a file.
func (h Hash) String() string {
return h.Hex()
}
// Format implements fmt.Formatter, forcing the byte slice to be formatted as is,
// without going through the stringer interface used for logging.
func (h Hash) Format(s fmt.State, c rune) {
fmt.Fprintf(s, "%"+string(c), h[:])
} | common/hash.go | 0.733165 | 0.412767 | hash.go | starcoder |
package flowmon
import (
"fmt"
"reflect"
)
// FlowAggregate is a list of flows aggregated by a set of keys
type FlowAggregate struct {
Keys []string
Flows []*FlowInfo
TotalBytes DecUint64
TotalPackets DecUint64
LastTimeReceived DecUint64
FirstTimeReceived DecUint64
FirstTimeFlowStart DecUint64
LastTimeFlowEnd DecUint64
LastBps DecUint64
LastDeltaBps int
LastForwardingStatus uint32
}
func NewFlowAggregate(keys []string) *FlowAggregate {
return &FlowAggregate{
Keys: keys,
Flows: make([]*FlowInfo, 0),
}
}
// Append appends the FlowInfo to the current Aggregate
func (fa *FlowAggregate) AppendIfMatches(flowInfo *FlowInfo) (bool, error) {
match, err := fa.matches(flowInfo)
if err != nil {
return false, err
}
if !match {
return false, nil
}
fa.LastForwardingStatus = flowInfo.ForwardingStatus
fa.TotalBytes += DecUint64(flowInfo.Bytes)
fa.TotalPackets += DecUint64(flowInfo.Packets)
if fa.FirstTimeReceived == 0 {
fa.FirstTimeReceived = DecUint64(flowInfo.TimeReceived)
}
if fa.FirstTimeFlowStart == 0 {
fa.FirstTimeFlowStart = DecUint64(flowInfo.TimeFlowStart)
}
fa.LastTimeReceived = DecUint64(flowInfo.TimeReceived)
fa.LastTimeFlowEnd = DecUint64(flowInfo.TimeFlowEnd)
var newBps DecUint64 = 0
if fa.LastTimeFlowEnd != fa.FirstTimeFlowStart {
newBps = DecUint64(fa.TotalBytes / (fa.LastTimeFlowEnd - fa.FirstTimeFlowStart))
}
fa.LastDeltaBps = int(newBps) - int(fa.LastBps)
fa.LastBps = newBps
fa.Flows = append(fa.Flows, flowInfo)
return true, nil
}
func (fa *FlowAggregate) matches(flowInfo *FlowInfo) (bool, error) {
if len(fa.Flows) == 0 {
// Accept new members to the aggregate if emtpy
return true, nil
}
return fa.Flows[0].Key.Matches(flowInfo.Key, fa.Keys)
}
// GetFieldString returns the string representation of the given fieldName of the first flow
// Since only the first flow is used, it is assumed that only fields within the key list are
// used
func (fa *FlowAggregate) GetFieldString(fieldName string) (string, error) {
if len(fa.Flows) == 0 {
return "", fmt.Errorf("Empty Aggregate")
}
return fa.Flows[0].Key.GetFieldString(fieldName)
}
// Less compares two FlowAggregates by a field in their keys
func (fa *FlowAggregate) Less(fieldName string, other *FlowAggregate) (bool, error) {
if len(fa.Flows) == 0 || len(other.Flows) == 0 {
return false, fmt.Errorf("Empty Aggregate")
}
if !fa.isAggregate(fieldName) || !other.isAggregate(fieldName) {
return false, fmt.Errorf("Sorting key must be part of aggregate keys")
}
thisV, err := fa.Flows[0].Key.GetField(fieldName)
if err != nil {
return false, err
}
otherV, err := other.Flows[0].Key.GetField(fieldName)
if err != nil {
return false, err
}
switch thisV.(type) {
case uint32, uint64, HexUint32, DecUint32, DecUint64:
return reflect.ValueOf(thisV).Uint() < reflect.ValueOf(otherV).Uint(), nil
default:
return fmt.Sprint(thisV) < fmt.Sprint(otherV), nil
}
}
func (fa *FlowAggregate) isAggregate(fieldName string) bool {
for _, k := range fa.Keys {
if k == fieldName {
return true
}
}
return false
} | pkg/flowmon/aggregate.go | 0.671686 | 0.484624 | aggregate.go | starcoder |
package v1beta1
/*
For imports, we'll need the controller-runtime
[`conversion`](https://godoc.org/sigs.k8s.io/controller-runtime/pkg/conversion)
package, plus the API version for our hub type (v1beta2), and finally some of the
standard packages.
*/
import (
"sigs.k8s.io/controller-runtime/pkg/conversion"
observabilityv1beta2 "github.com/open-cluster-management/multicluster-observability-operator/api/v1beta2"
)
// +kubebuilder:docs-gen:collapse=Imports
/*
Our "spoke" versions need to implement the
[`Convertible`](https://godoc.org/sigs.k8s.io/controller-runtime/pkg/conversion#Convertible)
interface. Namely, they'll need `ConvertTo` and `ConvertFrom` methods to convert to/from
the hub version.
*/
/*
ConvertTo is expected to modify its argument to contain the converted object.
Most of the conversion is straightforward copying, except for converting our changed field.
*/
// ConvertTo converts this MultiClusterObservability to the Hub version (v1beta2).
func (src *MultiClusterObservability) ConvertTo(dstRaw conversion.Hub) error {
dst := dstRaw.(*observabilityv1beta2.MultiClusterObservability)
// TODO(morvencao)?: convert the AvailabilityConfig field
// availabilityConfig := src.Spec.AvailabilityConfig
dst.Spec.StorageConfig = &observabilityv1beta2.StorageConfig{
MetricObjectStorage: src.Spec.StorageConfig.MetricObjectStorage,
StorageClass: src.Spec.StorageConfig.StatefulSetStorageClass,
// How to convert the current storage size to new one?
AlertmanagerStorageSize: src.Spec.StorageConfig.StatefulSetSize,
RuleStorageSize: src.Spec.StorageConfig.StatefulSetSize,
StoreStorageSize: src.Spec.StorageConfig.StatefulSetSize,
CompactStorageSize: src.Spec.StorageConfig.StatefulSetSize,
ReceiveStorageSize: src.Spec.StorageConfig.StatefulSetSize,
}
dst.Spec.RetentionConfig = &observabilityv1beta2.RetentionConfig{
RetentionResolutionRaw: src.Spec.RetentionResolutionRaw,
RetentionResolution5m: src.Spec.RetentionResolution5m,
RetentionResolution1h: src.Spec.RetentionResolution1h,
}
dst.Spec.EnableDownsampling = src.Spec.EnableDownSampling
/*
The rest of the conversion is pretty rote.
*/
// ObjectMeta
dst.ObjectMeta = src.ObjectMeta
// Spec
dst.Spec.ImagePullPolicy = src.Spec.ImagePullPolicy
dst.Spec.ImagePullSecret = src.Spec.ImagePullSecret
dst.Spec.NodeSelector = src.Spec.NodeSelector
dst.Spec.Tolerations = src.Spec.Tolerations
dst.Spec.ObservabilityAddonSpec = src.Spec.ObservabilityAddonSpec
// Status
dst.Status.Conditions = src.Status.Conditions
// +kubebuilder:docs-gen:collapse=rote conversion
return nil
}
/*
ConvertFrom is expected to modify its receiver to contain the converted object.
Most of the conversion is straightforward copying, except for converting our changed field.
*/
// ConvertFrom converts from the Hub version (observabilityv1beta2) to this version.
func (dst *MultiClusterObservability) ConvertFrom(srcRaw conversion.Hub) error {
src := srcRaw.(*observabilityv1beta2.MultiClusterObservability)
// TODO(morvencao): convert the AvailabilityConfig field
// dst.Spec.AvailabilityConfig =
dst.Spec.RetentionResolutionRaw = src.Spec.RetentionConfig.RetentionResolutionRaw
dst.Spec.RetentionResolution5m = src.Spec.RetentionConfig.RetentionResolution5m
dst.Spec.RetentionResolution1h = src.Spec.RetentionConfig.RetentionResolution1h
dst.Spec.StorageConfig = &StorageConfigObject{
MetricObjectStorage: src.Spec.StorageConfig.MetricObjectStorage,
StatefulSetStorageClass: src.Spec.StorageConfig.StorageClass,
// How to convert the new storage size to old one?
// StatefulSetSize =
}
dst.Spec.EnableDownSampling = src.Spec.EnableDownsampling
/*
The rest of the conversion is pretty rote.
*/
// ObjectMeta
dst.ObjectMeta = src.ObjectMeta
// Spec
dst.Spec.ImagePullPolicy = src.Spec.ImagePullPolicy
dst.Spec.ImagePullSecret = src.Spec.ImagePullSecret
dst.Spec.NodeSelector = src.Spec.NodeSelector
dst.Spec.Tolerations = src.Spec.Tolerations
dst.Spec.ObservabilityAddonSpec = src.Spec.ObservabilityAddonSpec
// Status
dst.Status.Conditions = src.Status.Conditions
// +kubebuilder:docs-gen:collapse=rote conversion
return nil
} | api/v1beta1/multiclusterobservability_conversion.go | 0.721547 | 0.429728 | multiclusterobservability_conversion.go | starcoder |
package indexers
import (
"bytes"
"encoding/binary"
"io"
"math"
"github.com/utreexo/utreexod/chaincfg/chainhash"
"github.com/utreexo/utreexod/wire"
)
// proofStatsSize has 19 elements that are each 8 bytes big.
const proofStatsSize int = 8 * 19
// proofStats are the relevant proof statistics to check how big each proofs are.
type proofStats struct {
// The height of the chain for the below stats.
BlockHeight uint64
// The overhead of the multi interval proof.
MultiBlockProofOverheadSum float64
MultiBlockProofCount uint64
// The overhead of the single interval proof.
BlockProofOverheadSum float64
BlockProofCount uint64
// Total deletions vs the proven deletions by the multi-block proof.
TotalDels uint64
TotalProvenDels uint64
// Size of all the leaf datas.
LdSize uint64
LdCount uint64
// Size of all the targets in the batchproofs.
TgSize uint64
TgCount uint64
// Size of all the proofs in the batchproofs.
ProofSize uint64
ProofCount uint64
// Size of the multi-block targets.
MbTgSize uint64
MbTgCount uint64
// Size of the multi-block proofs.
MbProofSize uint64
MbProofCount uint64
// Size of the leafhashes for the multi-block proofs.
MbHashSize uint64
MbHashCount uint64
}
// UpdateTotalDelCount updates the deletion count in the proof stats.
func (ps *proofStats) UpdateTotalDelCount(delCount uint64) {
ps.TotalDels += delCount
}
// UpdateUDStats updates the all the udata statistics.
func (ps *proofStats) UpdateUDStats(excludeAccProof bool, ud *wire.UData) {
// Update target size.
ps.TgSize += uint64(wire.BatchProofSerializeTargetSize(&ud.AccProof))
ps.TgCount += uint64(len(ud.AccProof.Targets))
// Update leaf data size.
ps.LdSize += uint64(ud.SerializeUxtoDataSizeCompact(false))
ps.LdCount += uint64(len(ud.LeafDatas))
// Update proof size if the proof is to be included.
if !excludeAccProof {
ps.ProofSize += uint64(wire.BatchProofSerializeAccProofSize(&ud.AccProof))
ps.ProofCount += uint64(len(ud.AccProof.Proof))
}
// Calculate the proof overhead.
overhead := calcProofOverhead(ud)
ps.BlockProofCount++
ps.BlockProofOverheadSum += overhead
}
// UpdateMultiUDStats updates the multi-block utreexo data statistics.
func (ps *proofStats) UpdateMultiUDStats(delCount int, multiUd *wire.UData) {
// Update target size.
ps.MbTgSize += uint64(wire.BatchProofSerializeTargetSize(&multiUd.AccProof))
ps.MbTgCount += uint64(len(multiUd.AccProof.Targets))
// Update proof size.
ps.MbProofSize += uint64(wire.BatchProofSerializeAccProofSize(&multiUd.AccProof))
ps.MbProofCount += uint64(len(multiUd.AccProof.Proof))
// Update multi-block proof overhead.
overhead := calcProofOverhead(multiUd)
ps.MultiBlockProofCount++
ps.MultiBlockProofOverheadSum += overhead
// Update the multi-block proof hash size.
ps.MbHashSize += uint64(delCount * chainhash.HashSize)
ps.MbHashCount += uint64(delCount)
// Update proven dels by the multi-block proofs.
ps.TotalProvenDels += uint64(delCount)
}
// LogProofStats outputs a log of the proof statistics.
func (ps *proofStats) LogProofStats() {
log.Infof("height %d: totalProvenPercentage %f, totalDels %d, totalProvenDels %d, ldSize %d, ldCount %d, tgSize %d, tgCount %d, proofSize %d, proofCount %d "+
"mbTgSize %d, mbTgCount %d, mbProofSize %d, mbProofCount %d, mbHashSize %d, mbHashCount %d",
ps.BlockHeight, float64(ps.TotalProvenDels)/float64(ps.TotalDels), ps.TotalDels, ps.TotalProvenDels, ps.LdSize, ps.LdCount, ps.TgSize, ps.TgCount,
ps.ProofSize, ps.ProofCount, ps.MbTgSize, ps.MbTgCount,
ps.MbProofSize, ps.MbProofCount, ps.MbHashSize, ps.MbHashCount)
log.Infof("height %d, average-blockoverhead %f, average-multiblockoverhead %f, blockoverhead-sum %f, blockcount %d, mboverhead-sum %f, mbCount %d",
ps.BlockHeight, ps.BlockProofOverheadSum/float64(ps.BlockProofCount), ps.MultiBlockProofOverheadSum/float64(ps.MultiBlockProofCount),
ps.BlockProofOverheadSum, ps.BlockProofCount, ps.MultiBlockProofOverheadSum, ps.MultiBlockProofCount)
}
// Serialize serializes the proof statistics into the writer.
func (ps *proofStats) Serialize(w io.Writer) error {
// 19 * 8
var buf [8]byte
binary.BigEndian.PutUint64(buf[:], ps.BlockHeight)
_, err := w.Write(buf[:])
if err != nil {
return err
}
binary.BigEndian.PutUint64(buf[:], math.Float64bits(ps.MultiBlockProofOverheadSum))
_, err = w.Write(buf[:])
if err != nil {
return err
}
binary.BigEndian.PutUint64(buf[:], ps.MultiBlockProofCount)
_, err = w.Write(buf[:])
if err != nil {
return err
}
binary.BigEndian.PutUint64(buf[:], math.Float64bits(ps.BlockProofOverheadSum))
_, err = w.Write(buf[:])
if err != nil {
return err
}
binary.BigEndian.PutUint64(buf[:], ps.BlockProofCount)
_, err = w.Write(buf[:])
if err != nil {
return err
}
binary.BigEndian.PutUint64(buf[:], ps.TotalDels)
_, err = w.Write(buf[:])
if err != nil {
return err
}
binary.BigEndian.PutUint64(buf[:], ps.TotalProvenDels)
_, err = w.Write(buf[:])
if err != nil {
return err
}
binary.BigEndian.PutUint64(buf[:], ps.LdSize)
_, err = w.Write(buf[:])
if err != nil {
return err
}
binary.BigEndian.PutUint64(buf[:], ps.LdCount)
_, err = w.Write(buf[:])
if err != nil {
return err
}
binary.BigEndian.PutUint64(buf[:], ps.TgSize)
_, err = w.Write(buf[:])
if err != nil {
return err
}
binary.BigEndian.PutUint64(buf[:], ps.TgCount)
_, err = w.Write(buf[:])
if err != nil {
return err
}
binary.BigEndian.PutUint64(buf[:], ps.ProofSize)
_, err = w.Write(buf[:])
if err != nil {
return err
}
binary.BigEndian.PutUint64(buf[:], ps.ProofCount)
_, err = w.Write(buf[:])
if err != nil {
return err
}
binary.BigEndian.PutUint64(buf[:], ps.MbTgSize)
_, err = w.Write(buf[:])
if err != nil {
return err
}
binary.BigEndian.PutUint64(buf[:], ps.MbTgCount)
_, err = w.Write(buf[:])
if err != nil {
return err
}
binary.BigEndian.PutUint64(buf[:], ps.MbProofSize)
_, err = w.Write(buf[:])
if err != nil {
return err
}
binary.BigEndian.PutUint64(buf[:], ps.MbProofCount)
_, err = w.Write(buf[:])
if err != nil {
return err
}
binary.BigEndian.PutUint64(buf[:], ps.MbHashSize)
_, err = w.Write(buf[:])
if err != nil {
return err
}
binary.BigEndian.PutUint64(buf[:], ps.MbHashCount)
_, err = w.Write(buf[:])
if err != nil {
return err
}
return nil
}
// Deserialize deserializes the proof statistics from the reader.
func (ps *proofStats) Deserialize(r io.Reader) error {
var buf [8]byte
var res uint64
_, err := r.Read(buf[:])
if err != nil {
return err
}
ps.BlockHeight = binary.BigEndian.Uint64(buf[:])
_, err = r.Read(buf[:])
if err != nil {
return err
}
res = binary.BigEndian.Uint64(buf[:])
ps.MultiBlockProofOverheadSum = math.Float64frombits(res)
_, err = r.Read(buf[:])
if err != nil {
return err
}
ps.MultiBlockProofCount = binary.BigEndian.Uint64(buf[:])
_, err = r.Read(buf[:])
if err != nil {
return err
}
res = binary.BigEndian.Uint64(buf[:])
ps.BlockProofOverheadSum = math.Float64frombits(res)
_, err = r.Read(buf[:])
if err != nil {
return err
}
ps.BlockProofCount = binary.BigEndian.Uint64(buf[:])
_, err = r.Read(buf[:])
if err != nil {
return err
}
ps.TotalDels = binary.BigEndian.Uint64(buf[:])
_, err = r.Read(buf[:])
if err != nil {
return err
}
ps.TotalProvenDels = binary.BigEndian.Uint64(buf[:])
_, err = r.Read(buf[:])
if err != nil {
return err
}
ps.LdSize = binary.BigEndian.Uint64(buf[:])
_, err = r.Read(buf[:])
if err != nil {
return err
}
ps.LdCount = binary.BigEndian.Uint64(buf[:])
_, err = r.Read(buf[:])
if err != nil {
return err
}
ps.TgSize = binary.BigEndian.Uint64(buf[:])
_, err = r.Read(buf[:])
if err != nil {
return err
}
ps.TgCount = binary.BigEndian.Uint64(buf[:])
_, err = r.Read(buf[:])
if err != nil {
return err
}
ps.ProofSize = binary.BigEndian.Uint64(buf[:])
_, err = r.Read(buf[:])
if err != nil {
return err
}
ps.ProofCount = binary.BigEndian.Uint64(buf[:])
_, err = r.Read(buf[:])
if err != nil {
return err
}
ps.MbTgSize = binary.BigEndian.Uint64(buf[:])
_, err = r.Read(buf[:])
if err != nil {
return err
}
ps.MbTgCount = binary.BigEndian.Uint64(buf[:])
_, err = r.Read(buf[:])
if err != nil {
return err
}
ps.MbProofSize = binary.BigEndian.Uint64(buf[:])
_, err = r.Read(buf[:])
if err != nil {
return err
}
ps.MbProofCount = binary.BigEndian.Uint64(buf[:])
_, err = r.Read(buf[:])
if err != nil {
return err
}
ps.MbHashSize = binary.BigEndian.Uint64(buf[:])
_, err = r.Read(buf[:])
if err != nil {
return err
}
ps.MbHashCount = binary.BigEndian.Uint64(buf[:])
return nil
}
// WritePStats writes the proof statistics into the passed in flatfile. It always
// writes the stats in the beginning of the file.
func (ps *proofStats) WritePStats(pStatFF *FlatFileState) error {
w := bytes.NewBuffer(make([]byte, 0, proofStatsSize))
err := ps.Serialize(w)
if err != nil {
return err
}
_, err = pStatFF.dataFile.WriteAt(w.Bytes(), 0)
if err != nil {
return err
}
return nil
}
// InitPStats reads the proof statistics from the passed in flatfile and attempts to
// initializes the proof statistics. If the size read is smaller than the proofStatsSize,
// then nothing is initialized and the function returns.
func (ps *proofStats) InitPStats(pStatFF *FlatFileState) error {
buf := make([]byte, proofStatsSize)
n, err := pStatFF.dataFile.ReadAt(buf, 0)
if n < proofStatsSize {
return nil
}
if err != nil {
return err
}
reader := bytes.NewBuffer(buf)
err = ps.Deserialize(reader)
if err != nil {
return err
}
return nil
} | blockchain/indexers/utreexoproofstats.go | 0.588889 | 0.509093 | utreexoproofstats.go | starcoder |
package main
import (
"fmt"
"math"
)
// symmetric and lower use a packed representation that stores only
// the lower triangle.
type symmetric struct {
order int
ele []float64
}
type lower struct {
order int
ele []float64
}
// symmetric.print prints a square matrix from the packed representation,
// printing the upper triange as a transpose of the lower.
func (s *symmetric) print() {
const eleFmt = "%10.5f "
row, diag := 1, 0
for i, e := range s.ele {
fmt.Printf(eleFmt, e)
if i == diag {
for j, col := diag+row, row; col < s.order; j += col {
fmt.Printf(eleFmt, s.ele[j])
col++
}
fmt.Println()
row++
diag += row
}
}
}
// lower.print prints a square matrix from the packed representation,
// printing the upper triangle as all zeros.
func (l *lower) print() {
const eleFmt = "%10.5f "
row, diag := 1, 0
for i, e := range l.ele {
fmt.Printf(eleFmt, e)
if i == diag {
for j := row; j < l.order; j++ {
fmt.Printf(eleFmt, 0.)
}
fmt.Println()
row++
diag += row
}
}
}
// choleskyLower returns the cholesky decomposition of a symmetric real
// matrix. The matrix must be positive definite but this is not checked.
func (a *symmetric) choleskyLower() *lower {
l := &lower{a.order, make([]float64, len(a.ele))}
row, col := 1, 1
dr := 0 // index of diagonal element at end of row
dc := 0 // index of diagonal element at top of column
for i, e := range a.ele {
if i < dr {
d := (e - l.ele[i]) / l.ele[dc]
l.ele[i] = d
ci, cx := col, dc
for j := i + 1; j <= dr; j++ {
cx += ci
ci++
l.ele[j] += d * l.ele[cx]
}
col++
dc += col
} else {
l.ele[i] = math.Sqrt(e - l.ele[i])
row++
dr += row
col = 1
dc = 0
}
}
return l
}
func main() {
demo(&symmetric{3, []float64{
25,
15, 18,
-5, 0, 11}})
demo(&symmetric{4, []float64{
18,
22, 70,
54, 86, 174,
42, 62, 134, 106}})
}
func demo(a *symmetric) {
fmt.Println("A:")
a.print()
fmt.Println("L:")
a.choleskyLower().print()
} | lang/Go/cholesky-decomposition-1.go | 0.729038 | 0.442155 | cholesky-decomposition-1.go | starcoder |
package encoding
import (
"fmt"
"reflect"
"strings"
)
func stringifyType(t reflect.Type) reflect.Type {
switch t.Kind() {
case reflect.Map:
return reflect.MapOf(stringType, interfaceType)
case reflect.Slice:
return reflect.SliceOf(interfaceType)
case reflect.Struct:
return stringifyStructType(t)
case reflect.Ptr:
return stringifyType(t.Elem())
default:
return stringType
}
}
func stringifyStructType(t reflect.Type) reflect.Type {
fields := make([]reflect.StructField, t.NumField())
for i := 0; i < t.NumField(); i++ {
f := t.Field(i)
fields[i] = reflect.StructField{
Name: f.Name,
Type: stringifyType(f.Type),
Tag: f.Tag,
}
}
return reflect.StructOf(fields)
}
// Stringify converts any supported type into a stringified value
func Stringify(v interface{}) (interface{}, error) {
var err error
if v == nil {
return nil, nil
}
val := reflect.ValueOf(v)
switch val.Kind() {
case reflect.String, reflect.Bool, reflect.Int, reflect.Float64:
return fmt.Sprint(v), nil
case reflect.Map:
out := make(map[string]interface{})
for _, key := range val.MapKeys() {
v, err := Stringify(val.MapIndex(key).Interface())
switch {
case err != nil:
return nil, err
case v != nil:
out[key.String()] = v
}
}
return out, nil
case reflect.Slice:
out := make([]interface{}, val.Len())
for i := 0; i < val.Len(); i++ {
v, err = Stringify(val.Index(i).Interface())
switch {
case err != nil:
return nil, err
case v != nil:
out[i] = v
}
}
return out, nil
case reflect.Struct:
t := val.Type()
out := reflect.New(stringifyStructType(t)).Elem()
for i := 0; i < t.NumField(); i++ {
f := t.Field(i)
v := val.FieldByName(f.Name)
if tag, ok := f.Tag.Lookup("json"); ok {
if strings.Contains(tag, ",omitempty") {
if v.IsZero() {
continue
}
}
}
s, err := Stringify(v.Interface())
switch {
case err != nil:
return nil, err
case s != nil:
out.Field(i).Set(reflect.ValueOf(s))
}
}
return out.Interface(), nil
case reflect.Ptr:
if val.IsNil() {
return nil, nil
}
return Stringify(val.Elem().Interface())
}
return nil, fmt.Errorf("Unsupported type: '%v'", val.Kind())
} | cfn/encoding/stringify.go | 0.525612 | 0.430686 | stringify.go | starcoder |
package common
import (
"fmt"
"github.com/mikeyhu/glipso/interfaces"
)
// SYM is a symbol, beginning with a : and normally used as keys within maps
type SYM string
// IsType for SYM
func (s SYM) IsType() {}
// IsValue for SYM
func (s SYM) IsValue() {}
// String for SYM
func (s SYM) String() string {
return string(s)
}
// Equals checks equality with another item of type Type
func (s SYM) Equals(o interfaces.Equalable) interfaces.Value {
if other, ok := o.(SYM); ok {
return B(s == other)
}
return B(false)
}
// Apply for SYM only works on a single argument of MAP, and looks up a value in the MAP keyed to the SYM
func (s SYM) Apply(arguments []interfaces.Type, env interfaces.Scope) (interfaces.Value, error) {
if len(arguments) != 1 {
return NILL, fmt.Errorf("SYM Apply : expected 1 argument, recieved %d", len(arguments))
}
val, err := evaluateToValue(arguments[0], env)
if err != nil {
return nil, err
}
if mp, ok := val.(*MAP); ok {
if v, found := mp.lookup(s); found {
return v, nil
}
return NILL, nil
}
return NILL, fmt.Errorf("SYM Apply : expected MAP, recieved %v", arguments[0])
}
// MAP is an immutable hash-map, associating new entries with a MAP returns a new MAP
type MAP struct {
store map[interfaces.Equalable]interfaces.Value
parent *MAP
}
// IsType for MAP
func (m *MAP) IsType() {}
// IsValue for MAP
func (m *MAP) IsValue() {}
// String representation of MAP
func (m *MAP) String() string {
return fmt.Sprintf("%v", m.store)
}
func (m *MAP) lookup(k interfaces.Equalable) (interfaces.Value, bool) {
if result, ok := m.store[k]; ok {
return result, true
}
if m.parent != nil {
return m.parent.lookup(k)
}
return NILL, false
}
func initialiseMAP(arguments []interfaces.Value) (*MAP, error) {
count := len(arguments)
if count%2 > 0 {
return nil, fmt.Errorf("MAP Initialise : expected an even number of arguments, recieved %v", count)
}
mp := &MAP{map[interfaces.Equalable]interfaces.Value{}, nil}
for i := 0; i < count; i += 2 {
mp.store[arguments[i].(interfaces.Equalable)] = arguments[i+1]
}
return mp, nil
}
func (m *MAP) associate(arguments []interfaces.Value) (*MAP, error) {
count := len(arguments)
if count%2 > 0 {
return nil, fmt.Errorf("MAP Initialise : expected an even number of arguments, recieved %v", count)
}
mp := &MAP{map[interfaces.Equalable]interfaces.Value{}, m}
for i := 0; i < count; i += 2 {
mp.store[arguments[i].(interfaces.Equalable)] = arguments[i+1]
}
return mp, nil
}
func (m *MAP) ToSlice(interfaces.Scope) ([]interfaces.Type, error) {
keys := make([]interfaces.Type, len(m.store)*2)
i := 0
for k, v := range m.store {
keys[i] = k
i++
keys[i] = v
i++
}
return keys, nil
} | common/symbol.go | 0.690246 | 0.466116 | symbol.go | starcoder |
package encryption
import (
"strings"
"strconv"
"crypto/aes"
"crypto/cipher"
"math/rand"
"encoding/hex"
)
// ----- Encrypts a string for usage with node using AES encryption
func Encrypt(key, text string) (string) {
// Convert the text to encrpyt into a byte array and then pad the array to make sure it is in blocks of 16
ciphertext := []byte(text)
ciphertext = pad(ciphertext)
// Check that the bytes in the text array are devisable by the block size. This is required for the encrytion
if len(ciphertext)%aes.BlockSize != 0 {
panic("Cipher text is " + strconv.Itoa(len(ciphertext)) + " long but should be " + strconv.Itoa(aes.BlockSize))
} else {
// Create a new cipher using the key you want to use. IMPORTANT!!!!! The key needs to be 32 characters long
block, err := aes.NewCipher([]byte(key))
if err != nil {
panic("Error creating cipher")
} else {
// Next we need to generate the IV. This can be any string but NEEDS to be 16 characters long.
// The IV doesnt need to be secured and needs to be passed to where it is decrypted.
iv := []byte(generateIV(16))
// Set up the encrypter
mode := cipher.NewCBCEncrypter(block, iv)
// Encrypt the string
mode.CryptBlocks(ciphertext, ciphertext)
// We need to convert the encrypted string into something that ca be passed. We convert it into a hex string.
// We also include the IV into the string using "$" as a seperator so we can find it when we come to decrypt
return hex.EncodeToString(ciphertext) + "$" + hex.EncodeToString(iv)
}
}
// This is just a fall through with returning an empty string if we hit a problem
return ""
}
// ----- Decrypts aa encrypted AES string and is compatible with node crypto AES encryption
func Decrypt(key, text string) ([]byte) {
// The byte array we will return with the decrypted string. This could be converted to a string if required
var ciphertext []byte
// Split the string passed in looking for a "$" as this should contain the IV string required for decryption
textParts := strings.Split(text, "$")
if (len(textParts) == 2) {
// Assuming that this was encypted using either the node or the other method in this file it needs to be decoded from a hex string
ciphertext, _ = hex.DecodeString(textParts[0])
// Create the cipher for decrypting the encrypted string.IMPORTANT!!!! this key needs to be 32 characters long
block, _ := aes.NewCipher([]byte(key))
// Check that the cipher array size is devisable by the block size. If not then it cannot decrypt
if len(ciphertext)%aes.BlockSize != 0 {
panic("Cipher text is " + strconv.Itoa(len(ciphertext)) + " long but should be " + strconv.Itoa(aes.BlockSize))
} else {
// Set up the decrypter
mode := cipher.NewCBCDecrypter(block, []byte(textParts[1]))
// Decrypt the text into the bye array we set up
mode.CryptBlocks(ciphertext, ciphertext)
// The passed in text was probably padded when encypted to make up the block size. We need to remove this padding now the decryption is done
ciphertext = unpad(ciphertext)
}
}
// Return the decrypted text (Assuming everything went well)
return ciphertext
}
// ----- This generates a random string n charaters long for which can be used as the IV when encrypting
func generateIV(n int) string {
// all the characters that can be included when creating the IV
var letters = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890")
// Generate the random charater string
b := make([]rune, n)
for i := range b {
b[i] = letters[rand.Intn(len(letters))]
}
return string(b)
}
// ----- Pads a byte array to get it up to the required block size needed for encoding to AES
func pad(in []byte) []byte {
padding := 16 - (len(in) % 16)
if padding == 0 {
padding = 16
}
for i := 0; i < padding; i++ {
in = append(in, byte(padding))
}
return in
}
// Remove the characters that are present after decrypting an AES string
func unpad(in []byte) []byte {
if len(in) == 0 {
return nil
}
padding := in[len(in)-1]
if int(padding) > len(in) || padding > aes.BlockSize {
return nil
} else if padding == 0 {
return nil
}
for i := len(in) - 1; i > len(in)-int(padding)-1; i-- {
if in[i] != padding {
return nil
}
}
return in[:len(in)-int(padding)]
} | go-encryption.go | 0.722037 | 0.426322 | go-encryption.go | starcoder |
package dprec
import "fmt"
func NewQuat(w, x, y, z float64) Quat {
return Quat{
W: w,
X: x,
Y: y,
Z: z,
}
}
func IdentityQuat() Quat {
return Quat{
W: 1.0,
X: 0.0,
Y: 0.0,
Z: 0.0,
}
}
func RotationQuat(angle Angle, direction Vec3) Quat {
cs := Cos(angle / 2.0)
sn := Sin(angle / 2.0)
normalizedDirection := UnitVec3(direction)
return Quat{
W: cs,
X: sn * normalizedDirection.X,
Y: sn * normalizedDirection.Y,
Z: sn * normalizedDirection.Z,
}
}
func ConjugateQuat(q Quat) Quat {
return Quat{
W: q.W,
X: -q.X,
Y: -q.Y,
Z: -q.Z,
}
}
func QuatScalarProd(q Quat, value float64) Quat {
return Quat{
W: q.W * value,
X: q.X * value,
Y: q.Y * value,
Z: q.Z * value,
}
}
func QuatScalarQuot(q Quat, value float64) Quat {
return Quat{
W: q.W / value,
X: q.X / value,
Y: q.Y / value,
Z: q.Z / value,
}
}
func QuatProd(first, second Quat) Quat {
return Quat{
W: first.W*second.W - first.X*second.X - first.Y*second.Y - first.Z*second.Z,
X: first.W*second.X + first.X*second.W + first.Y*second.Z - first.Z*second.Y,
Y: first.W*second.Y - first.X*second.Z + first.Y*second.W + first.Z*second.X,
Z: first.W*second.Z + first.X*second.Y - first.Y*second.X + first.Z*second.W,
}
}
func QuatVec3Rotation(q Quat, v Vec3) Vec3 {
vectorQuat := Quat{
W: 0.0,
X: v.X,
Y: v.Y,
Z: v.Z,
}
res := QuatProd(QuatProd(q, vectorQuat), ConjugateQuat(q))
return Vec3{
X: res.X,
Y: res.Y,
Z: res.Z,
}
}
func UnitQuat(q Quat) Quat {
return QuatScalarQuot(q, q.Norm())
}
func InverseQuat(q Quat) Quat {
return QuatScalarQuot(ConjugateQuat(q), q.SqrNorm())
}
type Quat struct {
W float64
X float64
Y float64
Z float64
}
func (q Quat) SqrNorm() float64 {
return q.W*q.W + q.X*q.X + q.Y*q.Y + q.Z*q.Z
}
func (q Quat) Norm() float64 {
return Sqrt(q.SqrNorm())
}
func (q Quat) OrientationX() Vec3 {
return Vec3{
X: 1.0 - 2.0*(q.Y*q.Y+q.Z*q.Z),
Y: 2.0 * (q.X*q.Y + q.W*q.Z),
Z: 2.0 * (q.X*q.Z - q.W*q.Y),
}
}
func (q Quat) OrientationY() Vec3 {
return Vec3{
X: 2.0 * (q.X*q.Y - q.W*q.Z),
Y: 1.0 - 2.0*(q.X*q.X+q.Z*q.Z),
Z: 2.0 * (q.Y*q.Z + q.W*q.X),
}
}
func (q Quat) OrientationZ() Vec3 {
return Vec3{
X: 2.0 * (q.X*q.Z + q.W*q.Y),
Y: 2.0 * (q.Y*q.Z - q.W*q.X),
Z: 1.0 - 2.0*(q.X*q.X+q.Y*q.Y),
}
}
func (q Quat) GoString() string {
return fmt.Sprintf("(%f, %f, %f, %f)", q.W, q.X, q.Y, q.Z)
} | dprec/quat.go | 0.827515 | 0.758242 | quat.go | starcoder |
package ent
import (
"fmt"
"strings"
"entgo.io/ent/dialect/sql"
"github.com/Yiling-J/carrier/examples/ent_recipe/ent/ingredient"
)
// Ingredient is the model entity for the Ingredient schema.
type Ingredient struct {
config `json:"-"`
// ID of the ent.
ID int `json:"id,omitempty"`
// Name holds the value of the "name" field.
Name string `json:"name,omitempty"`
// Edges holds the relations/edges for other nodes in the graph.
// The values are being populated by the IngredientQuery when eager-loading is set.
Edges IngredientEdges `json:"edges"`
}
// IngredientEdges holds the relations/edges for other nodes in the graph.
type IngredientEdges struct {
// RecipeIngredients holds the value of the recipe_ingredients edge.
RecipeIngredients []*RecipeIngredient `json:"recipe_ingredients,omitempty"`
// loadedTypes holds the information for reporting if a
// type was loaded (or requested) in eager-loading or not.
loadedTypes [1]bool
}
// RecipeIngredientsOrErr returns the RecipeIngredients value or an error if the edge
// was not loaded in eager-loading.
func (e IngredientEdges) RecipeIngredientsOrErr() ([]*RecipeIngredient, error) {
if e.loadedTypes[0] {
return e.RecipeIngredients, nil
}
return nil, &NotLoadedError{edge: "recipe_ingredients"}
}
// scanValues returns the types for scanning values from sql.Rows.
func (*Ingredient) scanValues(columns []string) ([]interface{}, error) {
values := make([]interface{}, len(columns))
for i := range columns {
switch columns[i] {
case ingredient.FieldID:
values[i] = new(sql.NullInt64)
case ingredient.FieldName:
values[i] = new(sql.NullString)
default:
return nil, fmt.Errorf("unexpected column %q for type Ingredient", columns[i])
}
}
return values, nil
}
// assignValues assigns the values that were returned from sql.Rows (after scanning)
// to the Ingredient fields.
func (i *Ingredient) assignValues(columns []string, values []interface{}) error {
if m, n := len(values), len(columns); m < n {
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
}
for j := range columns {
switch columns[j] {
case ingredient.FieldID:
value, ok := values[j].(*sql.NullInt64)
if !ok {
return fmt.Errorf("unexpected type %T for field id", value)
}
i.ID = int(value.Int64)
case ingredient.FieldName:
if value, ok := values[j].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field name", values[j])
} else if value.Valid {
i.Name = value.String
}
}
}
return nil
}
// QueryRecipeIngredients queries the "recipe_ingredients" edge of the Ingredient entity.
func (i *Ingredient) QueryRecipeIngredients() *RecipeIngredientQuery {
return (&IngredientClient{config: i.config}).QueryRecipeIngredients(i)
}
// Update returns a builder for updating this Ingredient.
// Note that you need to call Ingredient.Unwrap() before calling this method if this Ingredient
// was returned from a transaction, and the transaction was committed or rolled back.
func (i *Ingredient) Update() *IngredientUpdateOne {
return (&IngredientClient{config: i.config}).UpdateOne(i)
}
// Unwrap unwraps the Ingredient entity that was returned from a transaction after it was closed,
// so that all future queries will be executed through the driver which created the transaction.
func (i *Ingredient) Unwrap() *Ingredient {
tx, ok := i.config.driver.(*txDriver)
if !ok {
panic("ent: Ingredient is not a transactional entity")
}
i.config.driver = tx.drv
return i
}
// String implements the fmt.Stringer.
func (i *Ingredient) String() string {
var builder strings.Builder
builder.WriteString("Ingredient(")
builder.WriteString(fmt.Sprintf("id=%v", i.ID))
builder.WriteString(", name=")
builder.WriteString(i.Name)
builder.WriteByte(')')
return builder.String()
}
// Ingredients is a parsable slice of Ingredient.
type Ingredients []*Ingredient
func (i Ingredients) config(cfg config) {
for _i := range i {
i[_i].config = cfg
}
} | examples/ent_recipe/ent/ingredient.go | 0.632843 | 0.443118 | ingredient.go | starcoder |
package rand
// Code ported to Go from: https://github.com/SRombauts/SimplexNoise
// Copyright (c) 2014-2018 <NAME> (<EMAIL>)
// Distributed under the MIT License (MIT) (See accompanying file licenses/SimplexNoise.txt)
import (
"github.com/maxfish/go-libs/pkg/fmath"
)
/**
* 1D Perlin simplex noise
* @param[in] x float coordinate
* @return Noise value in the range[-1; 1], value of 0 on all integer coordinates.
*/
func PerlinNoise1D(x float32, seed int32) float32 {
var n0, n1 float32 // Noise contributions from the two "corners"
// Corners coordinates (nearest integer values):
var i0 = int32(fmath.Floor(x))
var i1 = i0 + 1
// Distances to corners (between 0 and 1):
var x0 = x - float32(i0)
var x1 = x0 - 1.0
// Calculate the contribution from the first corner
var t0 = 1.0 - x0*x0
t0 *= t0
n0 = t0 * t0 * grad(hash(i0+seed*7), x0)
// Calculate the contribution from the second corner
var t1 = 1.0 - x1*x1
t1 *= t1
n1 = t1 * t1 * grad(hash(i1+seed*7), x1)
// The maximum value of this noise is 8*(3/4)^4 = 2.53125
// A factor of 0.395 scales to fit exactly within [-1,1]
return 0.395 * (n0 + n1)
}
/**
* Permutation table. This is just a random jumble of all numbers 0-255.
*
* This produce a repeatable pattern of 256, but <NAME> stated
* that it is not a problem for graphic texture as the noise features disappear
* at a distance far enough to be able to see a repeatable pattern of 256.
*
* This needs to be exactly the same for all instances on all platforms,
* so it's easiest to just keep it as static explicit data.
* This also removes the need for any initialisation of this class.
*
* Note that making this an uint32_t[] instead of a uint8_t[] might make the
* code run faster on platforms with a high penalty for unaligned single
* byte addressing. Intel x86 is generally single-byte-friendly, but
* some other CPUs are faster with 4-aligned reads.
* However, a char[] is smaller, which avoids cache trashing, and that
* is probably the most important aspect on most architectures.
* This array is accessed a *lot* by the noise functions.
* A vector-valued noise over 3D accesses it 96 times, and a
* float-valued 4D noise 64 times. We want this to fit in the cache!
*/
var perm = [256]byte{
151, 160, 137, 91, 90, 15,
131, 13, 201, 95, 96, 53, 194, 233, 7, 225, 140, 36, 103, 30, 69, 142, 8, 99, 37, 240, 21, 10, 23,
190, 6, 148, 247, 120, 234, 75, 0, 26, 197, 62, 94, 252, 219, 203, 117, 35, 11, 32, 57, 177, 33,
88, 237, 149, 56, 87, 174, 20, 125, 136, 171, 168, 68, 175, 74, 165, 71, 134, 139, 48, 27, 166,
77, 146, 158, 231, 83, 111, 229, 122, 60, 211, 133, 230, 220, 105, 92, 41, 55, 46, 245, 40, 244,
102, 143, 54, 65, 25, 63, 161, 1, 216, 80, 73, 209, 76, 132, 187, 208, 89, 18, 169, 200, 196,
135, 130, 116, 188, 159, 86, 164, 100, 109, 198, 173, 186, 3, 64, 52, 217, 226, 250, 124, 123,
5, 202, 38, 147, 118, 126, 255, 82, 85, 212, 207, 206, 59, 227, 47, 16, 58, 17, 182, 189, 28, 42,
223, 183, 170, 213, 119, 248, 152, 2, 44, 154, 163, 70, 221, 153, 101, 155, 167, 43, 172, 9,
129, 22, 39, 253, 19, 98, 108, 110, 79, 113, 224, 232, 178, 185, 112, 104, 218, 246, 97, 228,
251, 34, 242, 193, 238, 210, 144, 12, 191, 179, 162, 241, 81, 51, 145, 235, 249, 14, 239, 107,
49, 192, 214, 31, 181, 199, 106, 157, 184, 84, 204, 176, 115, 121, 50, 45, 127, 4, 150, 254,
138, 236, 205, 93, 222, 114, 67, 29, 24, 72, 243, 141, 128, 195, 78, 66, 215, 61, 156, 180,
}
/**
* Helper function to hash an integer using the above permutation table
*
* This inline function costs around 1ns, and is called N+1 times for a noise of N dimension.
*
* Using a real hash function would be better to improve the "repeatability of 256" of the above permutation table,
* but fast integer Hash functions uses more time and have bad random properties.
*
* @param[in] i Integer value to hash
*
* @return 8-bits hashed value
*/
func hash(i int32) int32 {
return int32(perm[byte(i)])
}
/**
* Helper function to compute gradients-dot-residual vectors (1D)
*
* @note that these generate gradients of more than unit length. To make
* a close match with the value range of classic Perlin noise, the final
* noise values need to be rescaled to fit nicely within [-1,1].
* (The simplex noise functions as such also have different scaling.)
* Note also that these noise functions are the most practical and useful
* signed version of Perlin noise.
*
* @param[in] hash hash value
* @param[in] x distance to the corner
*
* @return gradient value
*/
func grad(hash int32, x float32) float32 {
var h = hash & 0x0F // Convert low 4 bits of hash code
var grad = 1.0 + float32(h&7) // Gradient value 1.0, 2.0, ..., 8.0
if (h & 8) != 0 {
grad = -grad // Set a random sign for the gradient
}
return grad * x // Multiply the gradient with the distance
} | pkg/rand/perlin.go | 0.860633 | 0.44071 | perlin.go | starcoder |
package refconv
import (
"fmt"
"math"
"reflect"
"strconv"
"github.com/cstockton/go-conv/internal/refutil"
)
func (c Conv) convStrToUint64(v string) (uint64, error) {
if parsed, err := strconv.ParseUint(v, 10, 0); err == nil {
return parsed, nil
}
if parsed, err := strconv.ParseFloat(v, 64); err == nil {
return uint64(math.Max(0, parsed)), nil
}
if parsed, err := c.convStrToBool(v); err == nil {
if parsed {
return 1, nil
}
return 0, nil
}
return 0, fmt.Errorf("cannot convert %#v (type string) to uint64", v)
}
type uintConverter interface {
Uint64() (uint64, error)
}
// Uint64 attempts to convert the given value to uint64, returns the zero value
// and an error on failure.
func (c Conv) Uint64(from interface{}) (uint64, error) {
if T, ok := from.(string); ok {
return c.convStrToUint64(T)
} else if T, ok := from.(uint64); ok {
return T, nil
}
if c, ok := from.(uintConverter); ok {
return c.Uint64()
}
value := refutil.IndirectVal(reflect.ValueOf(from))
kind := value.Kind()
switch {
case reflect.String == kind:
return c.convStrToUint64(value.String())
case refutil.IsKindUint(kind):
return value.Uint(), nil
case refutil.IsKindInt(kind):
val := value.Int()
if val < 0 {
val = 0
}
return uint64(val), nil
case refutil.IsKindFloat(kind):
return uint64(math.Max(0, value.Float())), nil
case refutil.IsKindComplex(kind):
return uint64(math.Max(0, real(value.Complex()))), nil
case reflect.Bool == kind:
if value.Bool() {
return 1, nil
}
return 0, nil
case refutil.IsKindLength(kind):
return uint64(value.Len()), nil
}
return 0, newConvErr(from, "uint64")
}
// Uint attempts to convert the given value to uint, returns the zero value and
// an error on failure.
func (c Conv) Uint(from interface{}) (uint, error) {
if T, ok := from.(uint); ok {
return T, nil
}
to64, err := c.Uint64(from)
if err != nil {
return 0, newConvErr(from, "uint")
}
if to64 > mathMaxUint {
to64 = mathMaxUint // only possible on 32bit arch
}
return uint(to64), nil
}
// Uint8 attempts to convert the given value to uint8, returns the zero value
// and an error on failure.
func (c Conv) Uint8(from interface{}) (uint8, error) {
if T, ok := from.(uint8); ok {
return T, nil
}
to64, err := c.Uint64(from)
if err != nil {
return 0, newConvErr(from, "uint8")
}
if to64 > math.MaxUint8 {
to64 = math.MaxUint8
}
return uint8(to64), nil
}
// Uint16 attempts to convert the given value to uint16, returns the zero value
// and an error on failure.
func (c Conv) Uint16(from interface{}) (uint16, error) {
if T, ok := from.(uint16); ok {
return T, nil
}
to64, err := c.Uint64(from)
if err != nil {
return 0, newConvErr(from, "uint16")
}
if to64 > math.MaxUint16 {
to64 = math.MaxUint16
}
return uint16(to64), nil
}
// Uint32 attempts to convert the given value to uint32, returns the zero value
// and an error on failure.
func (c Conv) Uint32(from interface{}) (uint32, error) {
if T, ok := from.(uint32); ok {
return T, nil
}
to64, err := c.Uint64(from)
if err != nil {
return 0, newConvErr(from, "uint32")
}
if to64 > math.MaxUint32 {
to64 = math.MaxUint32
}
return uint32(to64), nil
} | vendor/github.com/cstockton/go-conv/internal/refconv/uint.go | 0.795062 | 0.436682 | uint.go | starcoder |
package idx
import (
"github.com/Fantom-foundation/go-lachesis/common/bigendian"
)
type (
// Epoch numeration.
Epoch uint32
// Event numeration.
Event uint32
// Txn numeration.
Txn uint32
// Block numeration.
Block uint64
// Lamport numeration.
Lamport uint32
// Frame numeration.
Frame uint32
// Pack numeration.
Pack uint32
)
// Bytes gets the byte representation of the index.
func (e Epoch) Bytes() []byte {
return bigendian.Int32ToBytes(uint32(e))
}
// Bytes gets the byte representation of the index.
func (e Event) Bytes() []byte {
return bigendian.Int32ToBytes(uint32(e))
}
// Bytes gets the byte representation of the index.
func (t Txn) Bytes() []byte {
return bigendian.Int32ToBytes(uint32(t))
}
// Bytes gets the byte representation of the index.
func (b Block) Bytes() []byte {
return bigendian.Int64ToBytes(uint64(b))
}
// Bytes gets the byte representation of the index.
func (l Lamport) Bytes() []byte {
return bigendian.Int32ToBytes(uint32(l))
}
// Bytes gets the byte representation of the index.
func (p Pack) Bytes() []byte {
return bigendian.Int32ToBytes(uint32(p))
}
// Bytes gets the byte representation of the index.
func (f Frame) Bytes() []byte {
return bigendian.Int32ToBytes(uint32(f))
}
// BytesToEpoch converts bytes to epoch index.
func BytesToEpoch(b []byte) Epoch {
return Epoch(bigendian.BytesToInt32(b))
}
// BytesToEvent converts bytes to event index.
func BytesToEvent(b []byte) Event {
return Event(bigendian.BytesToInt32(b))
}
// BytesToTxn converts bytes to transaction index.
func BytesToTxn(b []byte) Txn {
return Txn(bigendian.BytesToInt32(b))
}
// BytesToBlock converts bytes to block index.
func BytesToBlock(b []byte) Block {
return Block(bigendian.BytesToInt64(b))
}
// BytesToLamport converts bytes to block index.
func BytesToLamport(b []byte) Lamport {
return Lamport(bigendian.BytesToInt32(b))
}
// BytesToFrame converts bytes to block index.
func BytesToFrame(b []byte) Frame {
return Frame(bigendian.BytesToInt32(b))
}
// BytesToPack converts bytes to block index.
func BytesToPack(b []byte) Pack {
return Pack(bigendian.BytesToInt32(b))
}
// MaxLamport return max value
func MaxLamport(x, y Lamport) Lamport {
if x > y {
return x
}
return y
} | inter/idx/index.go | 0.724968 | 0.55652 | index.go | starcoder |
package x32
import (
"fmt"
)
// normalisationFunc is a transfer function for converting fx param values.
type normalisationFunc func(float32) float32
// paramInfo represents a VST fx parameter.
type paramInfo struct {
// x32AddrFormat is a format string for the X32 address suffix.
x32AddrFormat string
// normToX32 converts an fx parameter value to its X32 form,
normToX32 normalisationFunc
// plugToNorm converts a VST fx parameter value to its normalised form.
plugToNorm normalisationFunc
// format is a function which can perform the necessary type conversions on
// the fx parameter value.
format func(interface{}) interface{}
}
// plugParams describes the set of VST plugin parameters which correspond to
// the X32 controls available.
type plugParams struct {
// plugName is the VST name.
plugName string
// EQ X32 band -> Param indices.
// VST parameter indices are the parameter number N in Reaper OSC addresses:
// /fx/./fxparam/N/value
// The values in the arrays below correspond to the zero-based X32 eq bands.
// eqTypeBandParam holds the eq band type parameters corresponding to the X32 eq bands
eqTypeBandParam []int32
// eqFreqBandParam holds the eq band frequency parameters corresponding to the X32 eq bands
eqFreqBandParam []int32
// eqGainBandParam holds the eq band gain parameters corresponding to the X32 eq bands
eqGainBandParam []int32
// eqQBandParam holds the eq band Q parameters corresponding to the X32 eq bands
eqQBandParam []int32
// eqEnableBandParam holds the eq band enable parameters corresponding to the X32 eq bands
eqEnableBandParam []int32
// EQ plug Param indices -> X32 band/fx info
eqParamInfo map[int32]paramInfo
// TODO eqTypeMap
// eqFreqToPlug converts from Hz to the VST plugin representation for that frequency.
eqFreqToPlug normalisationFunc
// eqFreqFromPlug converts from the VST plugin representation of frequency to Hz.
eqFreqFromPlug normalisationFunc
// eqGainToPlug converts from dB to the VST plugin representation for that gain.
eqGainToPlug normalisationFunc
// eqGainFromPlug converts from the VST plugin representation of gain to dB.
eqGainFromPlug normalisationFunc
// eqQToPlug converts from octaves to the VST plugin representation for that Q.
eqQToPlug normalisationFunc
// eqQFromPlug converts from the VST plugin representation of Q to octaves.
eqQFromPlug normalisationFunc
// eqTypeToPlug converts from X32 eq type to the VST plugin representation for that type.
eqTypeToPlug normalisationFunc
// eqTypeFromPlug converts from the VST plugin representation of eq type to X32 id.
eqTypeFromPlug normalisationFunc
gateEnableParam int32
gateThresholdParam int32
gateRangeParam int32
gateAttackParam int32
gateHoldParam int32
gateReleaseParam int32
}
type fxInstance struct {
vstIndex int32
params *plugParams
}
// fxMap represents the VST<->X32 mapping of effects.
type fxMap struct {
eq *fxInstance
gate *fxInstance
dyn *fxInstance
}
func (f *fxMap) String() string {
nilOrIdx := func(f *fxInstance) string {
if f == nil {
return "<nil>"
}
return fmt.Sprintf("@%d", f.vstIndex)
}
return fmt.Sprintf("fxMap:{eq:%s gate:%s dyn:%s}", nilOrIdx(f.eq), nilOrIdx(f.gate), nilOrIdx(f.dyn))
} | plugs.go | 0.526343 | 0.571109 | plugs.go | starcoder |
package simulation
import (
"bytes"
"fmt"
"github.com/cosmos/cosmos-sdk/codec"
sdk "github.com/cosmos/cosmos-sdk/types"
"github.com/cosmos/cosmos-sdk/types/kv"
"github.com/daodiseomoney/core/x/treasury/types"
)
// NewDecodeStore returns a decoder function closure that unmarshals the KVPair's
// Value to the corresponding treasury type.
func NewDecodeStore(cdc codec.Codec) func(kvA, kvB kv.Pair) string {
return func(kvA, kvB kv.Pair) string {
switch {
case bytes.Equal(kvA.Key[:1], types.TaxRateKey):
var taxRateA, taxRateB sdk.DecProto
cdc.MustUnmarshal(kvA.Value, &taxRateA)
cdc.MustUnmarshal(kvB.Value, &taxRateB)
return fmt.Sprintf("%v\n%v", taxRateA, taxRateB)
case bytes.Equal(kvA.Key[:1], types.RewardWeightKey):
var rewardWeightA, rewardWeightB sdk.DecProto
cdc.MustUnmarshal(kvA.Value, &rewardWeightA)
cdc.MustUnmarshal(kvB.Value, &rewardWeightB)
return fmt.Sprintf("%v\n%v", rewardWeightA, rewardWeightB)
case bytes.Equal(kvA.Key[:1], types.TaxCapKey):
var taxCapA, taxCapB sdk.IntProto
cdc.MustUnmarshal(kvA.Value, &taxCapA)
cdc.MustUnmarshal(kvB.Value, &taxCapB)
return fmt.Sprintf("%v\n%v", taxCapA, taxCapB)
case bytes.Equal(kvA.Key[:1], types.TaxProceedsKey):
var taxProceedsA, taxProceedsB types.EpochTaxProceeds
cdc.MustUnmarshal(kvA.Value, &taxProceedsA)
cdc.MustUnmarshal(kvB.Value, &taxProceedsB)
return fmt.Sprintf("%v\n%v", taxProceedsA.TaxProceeds, taxProceedsB.TaxProceeds)
case bytes.Equal(kvA.Key[:1], types.EpochInitialIssuanceKey):
var epochInitialIssuanceA, epochInitialIssuanceB types.EpochInitialIssuance
cdc.MustUnmarshal(kvA.Value, &epochInitialIssuanceA)
cdc.MustUnmarshal(kvB.Value, &epochInitialIssuanceB)
return fmt.Sprintf("%v\n%v", epochInitialIssuanceA.Issuance, epochInitialIssuanceB.Issuance)
case bytes.Equal(kvA.Key[:1], types.TRKey):
var TaxRateA, TaxRateB sdk.DecProto
cdc.MustUnmarshal(kvA.Value, &TaxRateA)
cdc.MustUnmarshal(kvB.Value, &TaxRateB)
return fmt.Sprintf("%v\n%v", TaxRateA, TaxRateB)
case bytes.Equal(kvA.Key[:1], types.SRKey):
var SeigniorageRateA, SeigniorageRateB sdk.DecProto
cdc.MustUnmarshal(kvA.Value, &SeigniorageRateA)
cdc.MustUnmarshal(kvB.Value, &SeigniorageRateB)
return fmt.Sprintf("%v\n%v", SeigniorageRateA, SeigniorageRateB)
case bytes.Equal(kvA.Key[:1], types.TSLKey):
var TotalStakedLunaA, TotalStakedLunaB sdk.IntProto
cdc.MustUnmarshal(kvA.Value, &TotalStakedLunaA)
cdc.MustUnmarshal(kvB.Value, &TotalStakedLunaB)
return fmt.Sprintf("%v\n%v", TotalStakedLunaA, TotalStakedLunaB)
default:
panic(fmt.Sprintf("invalid oracle key prefix %X", kvA.Key[:1]))
}
}
} | x/treasury/simulation/decoder.go | 0.632616 | 0.400105 | decoder.go | starcoder |
package app
// These are usage (help) texts shown when the app is started without the required arguments.
// Note: Using an indentation of two spaces as it provides a nice "look" in the console.
var (
InitUsage = `Write an empty template for a configuration file in TOML format.
The empty configuration file is a good starting point, but you will need to fill in the details of the different
sections before autobot will be able to perform any work.`
VersionUsage = `Display the current build version of autobot.`
ServeUsage = `Start autobot as a web server (micro service).
The web service offers these endpoints:
- GET / responds with a service status
- GET /vehiclestore/status responds with a status of the vehicle store
- GET /lookup performs a vehicle lookup. Query params: country, hash, regnr or vin
- PATCH /vehicle enables/disables a vehicle based on the given operation
Example of a vehicle lookup:
- GET /lookup?regnr=BK33877&country=dk
While the server is running, a scheduler will periodically check for new vehicle data from its source(s).
This happens according to the cron-style time expression given in the config file.`
SyncUsage = `Synchronise manually with a specific data source.
The parameter "-p" (or "--provider") should specify the name of a provider to sync with.
The name must match the provider name from the config file; that would be "[Providers.NAME]".
Example:
if the config file contains "[Providers.TEST]", among others, and you want to run a synchronisation with TEST,
just use "-p TEST".
Please be patient as synchronisation may take a long time.`
LookupUsage = `Perform a vehicle lookup based on registration or VIN.
Formatting is currently limited to a human readable format.`
ClearUsage = `Clear the vehicle store of all data.
You need to run the sync command again before any vehicle data will be available.`
StatusUsage = `Show status of the vehicle store.
Shows some useful stats such as number of vehicles, time of last synchronisation etc.`
DisableUsage = `Disable a vehicle.
The disabled vehicle will only appear in a lookup if the option "--disabled" is used.
Disabling vehicles does not affect synchronisation.`
EnableUsage = `Enable a vehicle.
The reenabled vehicle will reappear in vehicle lookups as normal.
Enabling vehicles does not affect synchronisation.`
QueryUsage = `Query for vehicles.
Searches for vehicles using various criteria for text matching and sorting.
For now, only an upper limit of the number of vehicles to return, is supported.
It's the intention to support multiple output formats, but currently, just a fixed CSV format is supported.`
) | app/usage.go | 0.749087 | 0.616099 | usage.go | starcoder |
package filter
import (
"errors"
"fmt"
"image"
"image/color"
"github.com/fairhive-labs/go-pixelart/internal/colorutils"
)
const (
Min int = 3
)
var (
errNilMatrix = errors.New("kernel matrix cannot be nil")
errEmptyMatrix = errors.New("kernel matrix cannot be empty")
errKernelSize = fmt.Errorf("unsupported kernel size, min kernel size = %d", Min)
errMalformatedMatrix = errors.New("kernel size and matrix length are incompatible")
errIncompatibleFactor = errors.New("kernel factor cannot be 0")
Identity_3x3 = kernel{
3,
[]int{0, 0, 0, 0, 1, 0, 0, 0, 0},
1,
}
RidgeDetection_3x3_soft = kernel{
3,
[]int{0, -1, 0, -1, 4, -1, 0, -1, 0},
1,
}
RidgeDetection_3x3_hard = kernel{
3,
[]int{-1, -1, -1, -1, 8, -1, -1, -1, -1},
1,
}
Sharpen_3x3 = kernel{
3,
[]int{0, -1, 0, -1, 5, -1, 0, -1, 0},
1,
}
Gauss_3x3 = kernel{
3,
[]int{1, 1, 1, 1, 1, 1, 1, 1, 1},
9,
}
)
type matrix []int
type kernel struct {
size int
matrix matrix
factor int
}
type convolutionFilter struct {
k *kernel
pre, post TransformColor
}
func NewConvolutionFilter(k *kernel, pre, post TransformColor) *convolutionFilter {
return &convolutionFilter{k, pre, post}
}
func (f *convolutionFilter) Process(src *image.Image) *image.RGBA {
b := (*src).Bounds()
p := image.NewRGBA(image.Rect(0, 0, b.Max.X, b.Max.Y))
for x := 0; x < b.Max.X; x++ {
for y := 0; y < b.Max.Y; y++ {
c := processConvolution(src, x, y, b.Max.X, b.Max.Y, f.k, f.pre, f.post)
p.Set(x, y, c)
}
}
return p
}
func NewKernel(s int, m matrix, f int) (*kernel, error) {
if m == nil {
return nil, errNilMatrix
}
if len(m) == 0 {
return nil, errEmptyMatrix
}
if s < Min {
fmt.Printf("kernel size = %d\n", s)
return nil, errKernelSize
}
if s%2 == 0 {
fmt.Printf("kernel size = %d, kernel size must be an odd number\n", s)
return nil, errKernelSize
}
if s*s != len(m) {
fmt.Printf("kernel matrix contains %d elements and shoud contain %d elements\n", len(m), s*s)
return nil, errMalformatedMatrix
}
if f == 0 {
fmt.Printf("incompatible factor %d", f)
return nil, errIncompatibleFactor
}
return &kernel{s, m, f}, nil
}
func processConvolution(img *image.Image, x, y, xmax, ymax int, k *kernel, preProcessing, postProcessing TransformColor) color.Color {
if postProcessing == nil {
postProcessing = Identity
}
if k == nil {
return postProcessing(getPixel(preProcessing, img, x, y))
}
s := k.size
rs := 0 // red accumulator
gs := 0 // green accumulator
bs := 0 // blue accumulator
for i := 0; i < s; i++ {
for j := 0; j < s; j++ {
var c color.Color
if i+x-s/2 >= 0 && j+y-s/2 >= 0 && i+x-s/2 < xmax && j+y-s/2 < ymax {
c = getPixel(preProcessing, img, i+x-s/2, j+y-s/2)
} else {
c = getPixel(preProcessing, img, x, y)
}
r, g, b, _ := colorutils.RgbaValues(c)
rs += k.matrix[j*s+i] * int(r)
gs += k.matrix[j*s+i] * int(g)
bs += k.matrix[j*s+i] * int(b)
}
}
if k.factor != 1 {
rs /= k.factor
gs /= k.factor
bs /= k.factor
}
return postProcessing(color.RGBA{uint8(correctValue(rs)), uint8(correctValue(gs)), uint8(correctValue(bs)), 0xFF})
}
func getPixel(t TransformColor, img *image.Image, x, y int) color.Color {
if t == nil {
t = Identity
}
return t((*img).At(x, y))
}
func correctValue(x int) int {
if x < 0 {
return 0
}
if x > 0xFF {
return 0xFF
}
return x
}
func Gauss(s int) (*kernel, error) {
if s < Min {
fmt.Printf("kernel size = %d\n", s)
return nil, errKernelSize
}
if s%2 == 0 {
fmt.Printf("kernel size = %d, kernel size must be an odd number\n", s)
return nil, errKernelSize
}
n := s * s
m := make([]int, n)
for i := range m {
m[i] = 1
}
return NewKernel(s, m, n)
} | internal/filter/convolution.go | 0.533154 | 0.401219 | convolution.go | starcoder |
package generic
import (
"net/url"
"reflect"
"strconv"
"strings"
"time"
)
// asBool converts a specified value to boolean value.
func asBool(x interface{}) (result bool, isValid ValidFlag, err error) {
switch t := x.(type) {
case nil:
return result, false, nil
case int, int8, int16, int32, int64:
result = reflect.ValueOf(t).Int() != 0
case uint, uint8, uint16, uint32, uint64:
result = reflect.ValueOf(t).Uint() != 0
case float32:
result = x.(float32) != 0
case float64:
result = x.(float64) != 0
case bool:
result = x.(bool)
case string:
b, err := strconv.ParseBool(x.(string))
if err != nil {
return result, false, ErrInvalidGenericValue{Value: x}
}
result = b
default:
return result, false, ErrInvalidGenericValue{Value: x}
}
return result, true, nil
}
// asBool converts a specified value to float64 value.
func asFloat(x interface{}) (result float64, isValid ValidFlag, err error) {
switch v := x.(type) {
case nil:
return result, false, nil
case int:
result = float64(v)
case int8:
result = float64(v)
case int16:
result = float64(v)
case int32:
result = float64(v)
case int64:
result = float64(v)
case uint:
result = float64(v)
case uint8:
result = float64(v)
case uint16:
result = float64(v)
case uint32:
result = float64(v)
case uint64:
result = float64(v)
case float32:
result = float64(v)
case float64:
result = v
case bool:
if v {
result = 1
} else {
result = 0
}
case string:
f, err := strconv.ParseFloat(v, 64)
if err != nil {
return result, false, ErrInvalidGenericValue{Value: x}
}
result = f
default:
return result, false, ErrInvalidGenericValue{Value: x}
}
return result, true, nil
}
// asBool converts a specified value to int64 value.
func asInt(x interface{}) (result int64, isValid ValidFlag, err error) {
switch t := x.(type) {
case nil:
return result, false, nil
case int, int8, int16, int32, int64:
result = reflect.ValueOf(t).Int()
case uint, uint8, uint16, uint32, uint64:
result = int64(reflect.ValueOf(t).Uint())
case float32:
result = int64(x.(float32))
case float64:
result = int64(x.(float64))
case bool:
b := x.(bool)
if b {
result = 1
} else {
result = 0
}
case string:
result, err = strconv.ParseInt(x.(string), 10, 64)
if err != nil {
return 0, false, ErrInvalidGenericValue{Value: x}
}
default:
return result, false, ErrInvalidGenericValue{Value: x}
}
return result, true, nil
}
// asBool converts a specified value to string value.
func asString(x interface{}) (result string, isValid ValidFlag, err error) {
switch t := x.(type) {
case nil:
return result, false, nil
case int, int8, int16, int32, int64:
result = strconv.FormatInt(reflect.ValueOf(t).Int(), 10)
case uint, uint8, uint16, uint32, uint64:
result = strconv.FormatUint(reflect.ValueOf(t).Uint(), 10)
case float32, float64:
fs := strconv.FormatFloat(reflect.ValueOf(t).Float(), 'f', 10, 64)
result = strings.TrimRight(strings.TrimRight(fs, "0"), ".")
case bool:
result = strconv.FormatBool(x.(bool))
case string:
result = x.(string)
default:
return result, false, ErrInvalidGenericValue{Value: x}
}
return result, true, nil
}
// asBool converts a specified value to time.Time value.
func asTime(x interface{}) (result time.Time, isValid ValidFlag, err error) {
switch v := x.(type) {
case nil:
return result, false, nil
case time.Time:
result = v
if result.IsZero() {
return result, true, nil
}
default:
return result, false, ErrInvalidGenericValue{Value: x}
}
return result, true, nil
}
// asTimestamp converts a specified value to time.Time value.
func asTimestamp(x interface{}) (result time.Time, isValid ValidFlag, err error) {
return asTimestampWithFunc(x, func(i int64) time.Time {
return time.Unix(i, 0)
})
}
// asTimestampNanoseconds converts a specified value to time.Time value.
func asTimestampNanoseconds(x interface{}) (result time.Time, isValid ValidFlag, err error) {
return asTimestampWithFunc(x, func(i int64) time.Time {
return time.Unix(0, i)
})
}
// asTimestampMilliseconds converts a specified value to time.Time value.
func asTimestampMilliseconds(x interface{}) (result time.Time, isValid ValidFlag, err error) {
return asTimestampWithFunc(x, func(i int64) time.Time {
return time.Unix(0, i*1000000)
})
}
// asBool converts a specified value to uint64 value.
func asUint(x interface{}) (result uint64, isValid ValidFlag, err error) {
switch t := x.(type) {
case nil:
return 0, false, nil
case int, int8, int16, int32, int64:
i := reflect.ValueOf(t).Int()
if i < 0 {
return result, false, ErrInvalidGenericValue{Value: x}
}
result = uint64(i)
case uint, uint8, uint16, uint32, uint64:
result = reflect.ValueOf(t).Uint()
case float32:
f32 := x.(float32)
if f32 < 0 {
return result, false, ErrInvalidGenericValue{Value: x}
}
result = uint64(f32)
case float64:
f64 := x.(float64)
if f64 < 0 {
return result, false, ErrInvalidGenericValue{Value: x}
}
result = uint64(f64)
case bool:
if x.(bool) {
result = 1
} else {
result = 0
}
case string:
u64, err := strconv.ParseUint(x.(string), 10, 64)
if err != nil {
return result, false, ErrInvalidGenericValue{Value: x}
}
result = u64
default:
return result, false, ErrInvalidGenericValue{Value: x}
}
return result, true, nil
}
func asTimestampWithFunc(x interface{}, f func(i int64) time.Time) (result time.Time, isValid ValidFlag, err error) {
var i int64
switch t := x.(type) {
case nil:
return result, false, nil
case time.Time:
result = x.(time.Time)
if result.IsZero() {
return result, true, nil
}
return result, true, nil
case string:
result, err = time.Parse(time.RFC3339Nano, x.(string))
return result, err == nil, err
case int, int8, int16, int32, int64:
i = reflect.ValueOf(t).Int()
case uint, uint8, uint16, uint32, uint64:
i = int64(reflect.ValueOf(t).Uint())
case float32:
i = int64(x.(float32))
case float64:
i = int64(x.(float64))
default:
return result, false, ErrInvalidGenericValue{Value: x}
}
if i < 0 {
return result, false, ErrInvalidGenericValue{Value: x}
}
return f(i), true, nil
}
func asURL(x interface{}) (result *url.URL, isValid ValidFlag, err error) {
switch v := x.(type) {
case nil:
return nil, false, nil
case *url.URL:
result = v
case string:
result, err = url.Parse(v)
default:
err = ErrInvalidGenericValue{Value: x}
}
return result, (err == nil), err
} | convert.go | 0.687105 | 0.452354 | convert.go | starcoder |
package function
import (
"errors"
kanzi "github.com/flanglet/kanzi-go"
)
const (
_TRANSFORM_SKIP_MASK = 0xFF
)
// ByteTransformSequence encapsulates a sequence of transforms or functions in a function
type ByteTransformSequence struct {
transforms []kanzi.ByteTransform // transforms or functions
skipFlags byte // skip transforms
}
// NewByteTransformSequence creates a new instance of NewByteTransformSequence
// containing the transforms provided as parameter.
func NewByteTransformSequence(transforms []kanzi.ByteTransform) (*ByteTransformSequence, error) {
if transforms == nil {
return nil, errors.New("Invalid null transforms parameter")
}
if len(transforms) == 0 || len(transforms) > 8 {
return nil, errors.New("Only 1 to 8 transforms allowed")
}
this := new(ByteTransformSequence)
this.transforms = transforms
this.skipFlags = 0
return this, nil
}
// Forward applies the function to the src and writes the result
// to the destination. Runs Forward on each transform in the sequence.
// Returns number of bytes read, number of bytes
// written and possibly an error.
func (this *ByteTransformSequence) Forward(src, dst []byte) (uint, uint, error) {
if len(src) == 0 {
return 0, 0, nil
}
blockSize := len(src)
length := uint(blockSize)
requiredSize := this.MaxEncodedLen(blockSize)
this.skipFlags = 0
sa := [2]*[]byte{&src, &dst}
saIdx := 0
var err error
for i, t := range this.transforms {
in := *sa[saIdx]
out := *sa[saIdx^1]
// Check that the output buffer has enough room. If not, allocate a new one.
if len(out) < requiredSize {
buf := make([]byte, requiredSize)
sa[saIdx^1] = &buf
out = *sa[saIdx^1]
}
var err1 error
var oIdx uint
// Apply forward transform
if _, oIdx, err1 = t.Forward(in[0:length], out); err1 != nil {
// Transform failed. Either it does not apply to this type
// of data or a recoverable error occurred => revert
if &src != &dst {
copy(out[0:length], in[0:length])
}
oIdx = length
this.skipFlags |= (1 << (7 - uint(i)))
if err == nil {
err = err1
}
}
length = oIdx
saIdx ^= 1
}
for i := len(this.transforms); i < 8; i++ {
this.skipFlags |= (1 << (7 - uint(i)))
}
if saIdx != 1 {
in := *sa[0]
out := *sa[1]
copy(out, in[0:length])
}
if this.skipFlags != _TRANSFORM_SKIP_MASK {
err = nil
}
return uint(blockSize), length, err
}
// Inverse applies the reverse function to the src and writes the result
// to the destination. Runs Inverse on each transform in the sequence.
// Returns number of bytes read, number of bytes
// written and possibly an error.
func (this *ByteTransformSequence) Inverse(src, dst []byte) (uint, uint, error) {
if len(src) == 0 {
return 0, 0, nil
}
blockSize := len(src)
length := uint(blockSize)
if this.skipFlags == _TRANSFORM_SKIP_MASK {
if &src[0] != &dst[0] {
copy(dst, src)
}
return length, length, nil
}
sa := [2]*[]byte{&src, &dst}
saIdx := 0
var res error
// Process transforms sequentially in reverse order
for i := len(this.transforms) - 1; i >= 0; i-- {
if this.skipFlags&(1<<(7-uint(i))) != 0 {
continue
}
t := this.transforms[i]
in := *sa[saIdx]
saIdx ^= 1
out := *sa[saIdx]
// Apply inverse transform
_, length, res = t.Inverse(in[0:length], out[0:cap(out)])
if res != nil {
break
}
}
if saIdx != 1 {
in := *sa[0]
out := *sa[1]
copy(out, in[0:length])
}
return uint(blockSize), length, res
}
// MaxEncodedLen returns the max size required for the encoding output buffer
func (this ByteTransformSequence) MaxEncodedLen(srcLen int) int {
requiredSize := srcLen
for _, t := range this.transforms {
if f, isFunction := t.(kanzi.ByteFunction); isFunction == true {
reqSize := f.MaxEncodedLen(requiredSize)
if reqSize > requiredSize {
requiredSize = reqSize
}
}
}
return requiredSize
}
// Len returns the number of functions in the sequence (in [0..8])
func (this *ByteTransformSequence) Len() int {
return len(this.transforms)
}
// SkipFlags returns the flags describing which function to
// skip (bit set to 1)
func (this *ByteTransformSequence) SkipFlags() byte {
return this.skipFlags
}
// SetSkipFlags sets the flags describing which function to skip
func (this *ByteTransformSequence) SetSkipFlags(flags byte) bool {
this.skipFlags = flags
return true
} | function/ByteTransformSequence.go | 0.83152 | 0.677899 | ByteTransformSequence.go | starcoder |
package phy
import (
"fmt"
"math"
"github.com/Tnze/go-mc/bot/path"
"github.com/Tnze/go-mc/bot/world"
"github.com/Tnze/go-mc/bot/world/entity/player"
"github.com/Tnze/go-mc/data/block/shape"
)
const (
playerWidth = 0.6
playerHeight = 1.8
resetVel = 0.003
maxYawChange = 11
maxPitchChange = 7
stepHeight = 0.6
minJumpTicks = 14
ladderMaxSpeed = 0.15
ladderClimbSpeed = 0.2
gravity = 0.08
drag = 0.98
acceleration = 0.02
inertia = 0.91
slipperiness = 0.6
)
// World represents a provider of information about the surrounding world.
type World interface {
GetBlockStatus(x, y, z int) world.BlockStatus
}
// Surrounds represents the blocks surrounding the player (Y, Z, X).
type Surrounds []AABB
// State tracks physics state.
type State struct {
// player state.
Pos path.Point
Vel path.Point
Yaw, Pitch float64
lastJump uint32
// player state flags.
onGround bool
collision struct {
vertical bool
horizontal bool
}
tick uint32
Run bool
}
func (s *State) ServerPositionUpdate(player player.Pos, w World) error {
fmt.Printf("TELEPORT (y=%0.2f, velY=%0.3f): %0.2f, %0.2f, %0.2f\n", s.Pos.Y, s.Vel.Y, player.X-s.Pos.X, player.Y-s.Pos.Y, player.Z-s.Pos.Z)
s.Pos = path.Point{X: player.X, Y: player.Y, Z: player.Z}
s.Yaw, s.Pitch = float64(player.Yaw), float64(player.Pitch)
s.Vel = path.Point{}
s.onGround, s.collision.vertical, s.collision.horizontal = false, false, false
s.Run = true
return nil
}
func abs(i1, i2 int) int {
if i1 < i2 {
return i2 - i1
}
return i1 - i2
}
func (s *State) surroundings(query AABB, w World) Surrounds {
minY, maxY := int(math.Floor(query.Y.Min))-1, int(math.Floor(query.Y.Max))+1
minZ, maxZ := int(math.Floor(query.Z.Min)), int(math.Floor(query.Z.Max))+1
minX, maxX := int(math.Floor(query.X.Min)), int(math.Floor(query.X.Max))+1
out := Surrounds(make([]AABB, 0, abs(maxY, minY)*abs(maxZ, minZ)*abs(maxX, minX)*2))
for y := minY; y < maxY; y++ {
for z := minZ; z < maxZ; z++ {
for x := minX; x < maxX; x++ {
bStateID := w.GetBlockStatus(x, y, z)
if !path.AirLikeBlock(bStateID) {
bbs, err := shape.CollisionBoxes(bStateID)
if err != nil {
panic(err)
}
for _, box := range bbs {
out = append(out, AABB{
X: MinMax{Min: box.Min.X, Max: box.Max.X},
Y: MinMax{Min: box.Min.Y, Max: box.Max.Y},
Z: MinMax{Min: box.Min.Z, Max: box.Max.Z},
Block: bStateID,
}.Offset(float64(x), float64(y), float64(z)))
}
}
}
}
}
return out
}
func (s *State) BB() AABB {
return AABB{
X: MinMax{Min: -playerWidth / 2, Max: playerWidth / 2},
Y: MinMax{Max: playerHeight},
Z: MinMax{Min: -playerWidth / 2, Max: playerWidth / 2},
}.Offset(s.Pos.X, s.Pos.Y, s.Pos.Z)
}
func (s *State) Position() player.Pos {
return player.Pos{
X: s.Pos.X, Y: s.Pos.Y, Z: s.Pos.Z,
Yaw: float32(s.Yaw), Pitch: float32(s.Pitch),
OnGround: s.onGround,
}
}
func (s *State) Tick(input path.Inputs, w World) error {
s.tick++
if !s.Run {
return nil
}
var inertia = inertia
var acceleration = acceleration
if below := w.GetBlockStatus(int(math.Floor(s.Pos.X)), int(math.Floor(s.Pos.Y))-1, int(math.Floor(s.Pos.Z))); s.onGround && !path.AirLikeBlock(below) {
inertia *= slipperiness
acceleration = 0.1 * (0.1627714 / (inertia * inertia * inertia))
}
s.tickVelocity(input, inertia, acceleration, w)
s.tickPosition(w)
if path.IsLadder(w.GetBlockStatus(int(math.Floor(s.Pos.X)), int(math.Floor(s.Pos.Y)), int(math.Floor(s.Pos.Z)))) && s.collision.horizontal {
s.Vel.Y = ladderClimbSpeed
}
// Gravity
s.Vel.Y -= gravity
// Drag & friction.
s.Vel.Y *= drag
s.Vel.X *= inertia
s.Vel.Z *= inertia
return nil
}
func (s *State) tickVelocity(input path.Inputs, inertia, acceleration float64, w World) {
// Deadzone velocities when they get too low.
if math.Abs(s.Vel.X) < resetVel {
s.Vel.X = 0
}
if math.Abs(s.Vel.Y) < resetVel {
s.Vel.Y = 0
}
if math.Abs(s.Vel.Z) < resetVel {
s.Vel.Z = 0
}
s.applyLookInputs(input)
s.applyPosInputs(input, acceleration, inertia)
lower := w.GetBlockStatus(int(math.Floor(s.Pos.X)), int(math.Floor(s.Pos.Y)), int(math.Floor(s.Pos.Z)))
if path.IsLadder(lower) {
s.Vel.X = math.Min(math.Max(-ladderMaxSpeed, s.Vel.X), ladderMaxSpeed)
s.Vel.Z = math.Min(math.Max(-ladderMaxSpeed, s.Vel.Z), ladderMaxSpeed)
s.Vel.Y = math.Min(math.Max(-ladderMaxSpeed, s.Vel.Y), ladderMaxSpeed)
}
}
func (s *State) applyLookInputs(input path.Inputs) {
if !math.IsNaN(input.Yaw) {
errYaw := math.Min(math.Max(modYaw(input.Yaw, s.Yaw), -maxYawChange), maxYawChange)
s.Yaw += errYaw
}
errPitch := math.Min(math.Max(input.Pitch-s.Pitch, -maxPitchChange), maxPitchChange)
s.Pitch += errPitch
}
func (s *State) applyPosInputs(input path.Inputs, acceleration, inertia float64) {
// fmt.Println(input.Jump, s.lastJump, s.onGround)
if input.Jump && s.lastJump+minJumpTicks < s.tick && s.onGround {
s.lastJump = s.tick
s.Vel.Y = 0.42
}
speed := math.Sqrt(input.ThrottleX*input.ThrottleX + input.ThrottleZ*input.ThrottleZ)
if speed < 0.01 {
return
}
speed = acceleration / math.Max(speed, 1)
input.ThrottleX *= speed
input.ThrottleZ *= speed
s.Vel.X += input.ThrottleX
s.Vel.Z += input.ThrottleZ
}
func (s *State) tickPosition(w World) {
// fmt.Printf("TICK POSITION: %0.2f, %0.2f, %0.2f - (%0.2f, %0.2f, %0.2f)\n", s.Pos.X, s.Pos.Y, s.Pos.Z, s.Vel.X, s.Vel.Y, s.Vel.Z)
player, newVel := s.computeCollisionYXZ(s.BB(), s.BB().Offset(s.Vel.X, s.Vel.Y, s.Vel.Z), s.Vel, w)
//fmt.Printf("offset = %0.2f, %0.2f, %0.2f\n", player.X.Min-s.Pos.X, player.Y.Min-s.Pos.Y, player.Z.Min-s.Pos.Z)
//fmt.Printf("onGround = %v, s.Vel.Y = %0.3f, newVel.Y = %0.3f\n", s.onGround, s.Vel.Y, newVel.Y)
if s.onGround || (s.Vel.Y != newVel.Y && s.Vel.Y < 0) {
bb := s.BB()
//fmt.Printf("Player pos = %0.2f, %0.2f, %0.2f\n", bb.X.Min, bb.Y.Min, bb.Z.Min)
surroundings := s.surroundings(bb.Offset(s.Vel.X, stepHeight, s.Vel.Y), w)
outVel := s.Vel
outVel.Y = stepHeight
for _, b := range surroundings {
outVel.Y = b.YOffset(bb, outVel.Y)
}
bb = bb.Offset(0, outVel.Y, 0)
for _, b := range surroundings {
outVel.X = b.XOffset(bb, outVel.X)
}
bb = bb.Offset(outVel.X, 0, 0)
for _, b := range surroundings {
outVel.Z = b.ZOffset(bb, outVel.Z)
}
bb = bb.Offset(0, 0, outVel.Z)
//fmt.Printf("Post-collision = %0.2f, %0.2f, %0.2f\n", bb.X.Min, bb.Y.Min, bb.Z.Min)
outVel.Y *= -1
// Lower the player back down to be on the ground.
for _, b := range surroundings {
outVel.Y = b.YOffset(bb, outVel.Y)
}
bb = bb.Offset(0, outVel.Y, 0)
//fmt.Printf("Post-lower = %0.2f, %0.2f, %0.2f\n", bb.X.Min, bb.Y.Min, bb.Z.Min)
oldMove := newVel.X*newVel.X + newVel.Z*newVel.Z
newMove := outVel.X*outVel.X + outVel.Z*outVel.Z
// fmt.Printf("oldMove = %0.2f, newMove = %0.2f\n", oldMove*1000, newMove*1000)
if oldMove >= newMove || outVel.Y <= (0.000002-stepHeight) {
// fmt.Println("nope")
} else {
player = bb
newVel = outVel
}
}
// Update flags.
s.Pos.X = player.X.Min + playerWidth/2
s.Pos.Y = player.Y.Min
s.Pos.Z = player.Z.Min + playerWidth/2
s.collision.horizontal = newVel.X != s.Vel.X || newVel.Z != s.Vel.Z
s.collision.vertical = newVel.Y != s.Vel.Y
s.onGround = s.collision.vertical && s.Vel.Y < 0
s.Vel = newVel
}
func modYaw(new, old float64) float64 {
delta := math.Mod(new-old, 360)
if delta > 180 {
delta = 180 - delta
} else if delta < -180 {
delta += 360
}
// fmt.Printf("(%.2f - %.2f) = %.2f\n", new, old, delta)
return delta
}
func (s *State) computeCollisionYXZ(bb, query AABB, vel path.Point, w World) (outBB AABB, outVel path.Point) {
surroundings := s.surroundings(query, w)
outVel = vel
for _, b := range surroundings {
outVel.Y = b.YOffset(bb, outVel.Y)
}
bb = bb.Offset(0, outVel.Y, 0)
for _, b := range surroundings {
outVel.X = b.XOffset(bb, outVel.X)
}
bb = bb.Offset(outVel.X, 0, 0)
for _, b := range surroundings {
outVel.Z = b.ZOffset(bb, outVel.Z)
}
bb = bb.Offset(0, 0, outVel.Z)
return bb, outVel
}
// AtLookTarget returns true if the player look position is actually at the
// given pitch and yaw.
func (s *State) AtLookTarget(yaw, pitch float64) bool {
dYaw, dPitch := math.Abs(modYaw(yaw, s.Yaw)), math.Abs(pitch-s.Pitch)
return dYaw <= 0.8 && dPitch <= 1.1
} | bot/phy/phy.go | 0.63477 | 0.441071 | phy.go | starcoder |
package actionlint
// ExprNode is a node of expression syntax tree. To know the syntax, see
// https://docs.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions
type ExprNode interface {
// Token returns the first token of the node. This method is useful to get position of this node.
Token() *Token
}
// Variable
// VariableNode is node for variable access.
type VariableNode struct {
// Name is name of the variable
Name string
tok *Token
}
// Token returns the first token of the node. This method is useful to get position of this node.
func (n *VariableNode) Token() *Token {
return n.tok
}
// Literals
// NullNode is node for null literal.
type NullNode struct {
tok *Token
}
// Token returns the first token of the node. This method is useful to get position of this node.
func (n *NullNode) Token() *Token {
return n.tok
}
// BoolNode is node for boolean literal, true or false.
type BoolNode struct {
// Value is value of the boolean literal.
Value bool
tok *Token
}
// Token returns the first token of the node. This method is useful to get position of this node.
func (n *BoolNode) Token() *Token {
return n.tok
}
// IntNode is node for integer literal.
type IntNode struct {
// Value is value of the integer literal.
Value int
tok *Token
}
// Token returns the first token of the node. This method is useful to get position of this node.
func (n *IntNode) Token() *Token {
return n.tok
}
// FloatNode is node for float literal.
type FloatNode struct {
// Value is value of the float literal.
Value float64
tok *Token
}
// Token returns the first token of the node. This method is useful to get position of this node.
func (n *FloatNode) Token() *Token {
return n.tok
}
// StringNode is node for string literal.
type StringNode struct {
// Value is value of the string literal. Escapes are resolved and quotes at both edges are
// removed.
Value string
tok *Token
}
// Token returns the first token of the node. This method is useful to get position of this node.
func (n *StringNode) Token() *Token {
return n.tok
}
// Operators
// ObjectDerefNode represents property dereference of object like 'foo.bar'.
type ObjectDerefNode struct {
// Receiver is an expression at receiver of property dereference.
Receiver ExprNode
// Property is a name of property to access.
Property string
}
// Token returns the first token of the node. This method is useful to get position of this node.
func (n ObjectDerefNode) Token() *Token {
return n.Receiver.Token()
}
// ArrayDerefNode represents elements dereference of arrays like '*' in 'foo.bar.*.piyo'.
type ArrayDerefNode struct {
// Receiver is an expression at receiver of array element dereference.
Receiver ExprNode
}
// Token returns the first token of the node. This method is useful to get position of this node.
func (n ArrayDerefNode) Token() *Token {
return n.Receiver.Token()
}
// IndexAccessNode is node for index access, which represents dynamic object property access or
// array index access.
type IndexAccessNode struct {
// Operand is an expression at operand of index access, which should be array or object.
Operand ExprNode
// Index is an expression at index, which should be integer or string.
Index ExprNode
}
// Token returns the first token of the node. This method is useful to get position of this node.
func (n *IndexAccessNode) Token() *Token {
return n.Operand.Token()
}
// Note: Currently only ! is a logical unary operator
// NotOpNode is node for unary ! operator.
type NotOpNode struct {
// Operand is an expression at operand of ! operator.
Operand ExprNode
tok *Token
}
// Token returns the first token of the node. This method is useful to get position of this node.
func (n *NotOpNode) Token() *Token {
return n.tok
}
// CompareOpNodeKind is a kind of compare operators; ==, !=, <, <=, >, >=.
type CompareOpNodeKind int
const (
// CompareOpNodeKindInvalid is invalid and initial value of CompareOpNodeKind values.
CompareOpNodeKindInvalid CompareOpNodeKind = iota
// CompareOpNodeKindLess is kind for < operator.
CompareOpNodeKindLess
// CompareOpNodeKindLessEq is kind for <= operator.
CompareOpNodeKindLessEq
// CompareOpNodeKindGreater is kind for > operator.
CompareOpNodeKindGreater
// CompareOpNodeKindGreaterEq is kind for >= operator.
CompareOpNodeKindGreaterEq
// CompareOpNodeKindEq is kind for == operator.
CompareOpNodeKindEq
// CompareOpNodeKindNotEq is kind for != operator.
CompareOpNodeKindNotEq
)
// CompareOpNode is node for binary expression to compare values; ==, !=, <, <=, > or >=.
type CompareOpNode struct {
// Kind is a kind of this expression to show which operator is used.
Kind CompareOpNodeKind
// Left is an expression for left hand side of the binary operator.
Left ExprNode
// Right is an expression for right hand side of the binary operator.
Right ExprNode
}
// Token returns the first token of the node. This method is useful to get position of this node.
func (n *CompareOpNode) Token() *Token {
return n.Left.Token()
}
// LogicalOpNodeKind is a kind of logical operators; && and ||.
type LogicalOpNodeKind int
const (
// LogicalOpNodeKindInvalid is an invalid and initial value of LogicalOpNodeKind.
LogicalOpNodeKindInvalid LogicalOpNodeKind = iota
// LogicalOpNodeKindAnd is a kind for && operator.
LogicalOpNodeKindAnd
// LogicalOpNodeKindOr is a kind for || operator.
LogicalOpNodeKindOr
)
func (k LogicalOpNodeKind) String() string {
switch k {
case LogicalOpNodeKindAnd:
return "&&"
case LogicalOpNodeKindOr:
return "||"
default:
return "INVALID LOGICAL OPERATOR"
}
}
// LogicalOpNode is node for logical binary operators; && or ||.
type LogicalOpNode struct {
// Kind is a kind to show which operator is used.
Kind LogicalOpNodeKind
// Left is an expression for left hand side of the binary operator.
Left ExprNode
// Right is an expression for right hand side of the binary operator.
Right ExprNode
}
// Token returns the first token of the node. This method is useful to get position of this node.
func (n *LogicalOpNode) Token() *Token {
return n.Left.Token()
}
// FuncCallNode represents function call in expression.
// Note that currently only calling builtin functions is supported.
type FuncCallNode struct {
// Callee is a name of called function. This is string value because currently only built-in
// functions can be called.
Callee string
// Args is arguments of the function call.
Args []ExprNode
tok *Token
}
// Token returns the first token of the node. This method is useful to get position of this node.
func (n *FuncCallNode) Token() *Token {
return n.tok
}
// VisitExprNodeFunc is a visitor function for VisitExprNode(). The entering argument is set to
// true when it is called before visiting children. It is set to false when it is called after
// visiting children. It means that this function is called twice for the same node. The parent
// argument is the parent of the node. When the node is root, its parent is nil.
type VisitExprNodeFunc func(node, parent ExprNode, entering bool)
func visitExprNode(n, p ExprNode, f VisitExprNodeFunc) {
f(n, p, true)
switch n := n.(type) {
case *ObjectDerefNode:
visitExprNode(n.Receiver, n, f)
case *ArrayDerefNode:
visitExprNode(n.Receiver, n, f)
case *IndexAccessNode:
visitExprNode(n.Index, n, f)
visitExprNode(n.Operand, n, f)
case *NotOpNode:
visitExprNode(n.Operand, n, f)
case *CompareOpNode:
visitExprNode(n.Left, n, f)
visitExprNode(n.Right, n, f)
case *LogicalOpNode:
visitExprNode(n.Left, n, f)
visitExprNode(n.Right, n, f)
case *FuncCallNode:
for _, a := range n.Args {
visitExprNode(a, n, f)
}
}
f(n, p, false)
}
// VisitExprNode visits the given expression syntax tree with given function f.
func VisitExprNode(n ExprNode, f VisitExprNodeFunc) {
visitExprNode(n, nil, f)
} | expr_ast.go | 0.885074 | 0.63409 | expr_ast.go | starcoder |
package termui
import "strings"
/* Table is like:
┌Awesome Table ────────────────────────────────────────────────┐
│ Col0 | Col1 | Col2 | Col3 | Col4 | Col5 | Col6 |
│──────────────────────────────────────────────────────────────│
│ Some Item #1 | AAA | 123 | CCCCC | EEEEE | GGGGG | IIIII |
│──────────────────────────────────────────────────────────────│
│ Some Item #2 | BBB | 456 | DDDDD | FFFFF | HHHHH | JJJJJ |
└──────────────────────────────────────────────────────────────┘
Datapoints are a two dimensional array of strings: [][]string
Example:
data := [][]string{
{"Col0", "Col1", "Col3", "Col4", "Col5", "Col6"},
{"Some Item #1", "AAA", "123", "CCCCC", "EEEEE", "GGGGG", "IIIII"},
{"Some Item #2", "BBB", "456", "DDDDD", "FFFFF", "HHHHH", "JJJJJ"},
}
table := termui.NewTable()
table.Rows = data // type [][]string
table.FgColor = termui.ColorWhite
table.BgColor = termui.ColorDefault
table.Height = 7
table.Width = 62
table.Y = 0
table.X = 0
table.Border = true
*/
// Table tracks all the attributes of a Table instance
type Table struct {
Block
Rows [][]string
CellWidth []int
FgColor Attribute
BgColor Attribute
FgColors []Attribute
BgColors []Attribute
Separator bool
TextAlign Align
}
// NewTable returns a new Table instance
func NewTable() *Table {
table := &Table{Block: *NewBlock()}
table.FgColor = ColorWhite
table.BgColor = ColorDefault
table.Separator = true
return table
}
// CellsWidth calculates the width of a cell array and returns an int
func cellsWidth(cells []Cell) int {
width := 0
for _, c := range cells {
width += c.Width()
}
return width
}
// Analysis generates and returns an array of []Cell that represent all columns in the Table
func (table *Table) Analysis() [][]Cell {
var rowCells [][]Cell
length := len(table.Rows)
if length < 1 {
return rowCells
}
if len(table.FgColors) == len(table.Rows) {
table.FgColors = make([]Attribute, len(table.Rows))
}
if len(table.BgColors) == len(table.Rows) {
table.BgColors = make([]Attribute, len(table.Rows))
}
cellWidths := make([]int, len(table.Rows[0]))
for y, row := range table.Rows {
if table.FgColors[y] == 0 {
table.FgColors[y] = table.FgColor
}
if table.BgColors[y] == 0 {
table.BgColors[y] = table.BgColor
}
for x, str := range row {
cells := DefaultTxBuilder.Build(str, table.FgColors[y], table.BgColors[y])
cw := cellsWidth(cells)
if cellWidths[x] < cw {
cellWidths[x] = cw
}
rowCells = append(rowCells, cells)
}
}
table.CellWidth = cellWidths
return rowCells
}
// SetSize calculates the table size and sets the internal value
func (table *Table) SetSize() {
length := len(table.Rows)
if table.Separator {
table.Height = length*2 + 1
} else {
table.Height = length + 2
}
table.Width = 2
if length != 0 {
for _, cellWidth := range table.CellWidth {
table.Width += cellWidth + 3
}
}
}
// CalculatePosition ...
func (table *Table) CalculatePosition(x int, y int, coordinateX *int, coordinateY *int, cellStart *int) {
if table.Separator {
*coordinateY = table.innerArea.Min.Y + y*2
} else {
*coordinateY = table.innerArea.Min.Y + y
}
if x == 0 {
*cellStart = table.innerArea.Min.X
} else {
*cellStart += table.CellWidth[x-1] + 3
}
switch table.TextAlign {
case AlignRight:
*coordinateX = *cellStart + (table.CellWidth[x] - len(table.Rows[y][x])) + 2
case AlignCenter:
*coordinateX = *cellStart + (table.CellWidth[x]-len(table.Rows[y][x]))/2 + 2
default:
*coordinateX = *cellStart + 2
}
}
// Buffer ...
func (table *Table) Buffer() Buffer {
buffer := table.Block.Buffer()
rowCells := table.Analysis()
pointerX := table.innerArea.Min.X + 2
pointerY := table.innerArea.Min.Y
borderPointerX := table.innerArea.Min.X
for y, row := range table.Rows {
for x := range row {
table.CalculatePosition(x, y, &pointerX, &pointerY, &borderPointerX)
background := DefaultTxBuilder.Build(strings.Repeat(" ", table.CellWidth[x]+3), table.BgColors[y], table.BgColors[y])
cells := rowCells[y*len(row)+x]
for i, back := range background {
buffer.Set(borderPointerX+i, pointerY, back)
}
coordinateX := pointerX
for _, printer := range cells {
buffer.Set(coordinateX, pointerY, printer)
coordinateX += printer.Width()
}
if x != 0 {
dividors := DefaultTxBuilder.Build("|", table.FgColors[y], table.BgColors[y])
for _, dividor := range dividors {
buffer.Set(borderPointerX, pointerY, dividor)
}
}
}
if table.Separator {
border := DefaultTxBuilder.Build(strings.Repeat("─", table.Width-2), table.FgColor, table.BgColor)
for i, cell := range border {
buffer.Set(i+1, pointerY+1, cell)
}
}
}
return buffer
} | table.go | 0.711331 | 0.488893 | table.go | starcoder |
package validator
import (
"fmt"
"math/big"
"sort"
"strings"
"github.com/hyperledger/burrow/crypto"
)
var big0 = big.NewInt(0)
// A Validator multiset - can be used to capture the global state of validators or as an accumulator each block
type Set struct {
powers map[crypto.Address]*big.Int
publicKeys map[crypto.Address]crypto.Addressable
totalPower *big.Int
trim bool
}
func newSet() *Set {
return &Set{
totalPower: new(big.Int),
powers: make(map[crypto.Address]*big.Int),
publicKeys: make(map[crypto.Address]crypto.Addressable),
}
}
// Create a new Validators which can act as an accumulator for validator power changes
func NewSet() *Set {
return newSet()
}
// Like Set but removes entries when power is set to 0 this make Count() == CountNonZero() and prevents a set from leaking
// but does mean that a zero will not be iterated over when performing an update which is necessary in Ring
func NewTrimSet() *Set {
s := newSet()
s.trim = true
return s
}
// Implements Writer, but will never error
func (vs *Set) AlterPower(id crypto.PublicKey, power *big.Int) (flow *big.Int, err error) {
return vs.ChangePower(id, power), nil
}
// Add the power of a validator and returns the flow into that validator
func (vs *Set) ChangePower(id crypto.PublicKey, power *big.Int) *big.Int {
address := id.Address()
// Calculcate flow into this validator (postive means in, negative means out)
flow := new(big.Int).Sub(power, vs.Power(id.Address()))
vs.totalPower.Add(vs.totalPower, flow)
if vs.trim && power.Sign() == 0 {
delete(vs.publicKeys, address)
delete(vs.powers, address)
} else {
vs.publicKeys[address] = crypto.NewAddressable(id)
vs.powers[address] = new(big.Int).Set(power)
}
return flow
}
func (vs *Set) TotalPower() *big.Int {
return new(big.Int).Set(vs.totalPower)
}
// Returns the power of id but only if it is set
func (vs *Set) MaybePower(id crypto.Address) *big.Int {
if vs.powers[id] == nil {
return nil
}
return new(big.Int).Set(vs.powers[id])
}
func (vs *Set) Power(id crypto.Address) *big.Int {
if vs.powers[id] == nil {
return new(big.Int)
}
return new(big.Int).Set(vs.powers[id])
}
func (vs *Set) Equal(vsOther *Set) bool {
if vs.Count() != vsOther.Count() {
return false
}
// Stop iteration IFF we find a non-matching validator
return !vs.Iterate(func(id crypto.Addressable, power *big.Int) (stop bool) {
otherPower := vsOther.Power(id.Address())
if otherPower.Cmp(power) != 0 {
return true
}
return false
})
}
// Iterates over validators sorted by address
func (vs *Set) Iterate(iter func(id crypto.Addressable, power *big.Int) (stop bool)) (stopped bool) {
if vs == nil {
return
}
addresses := make(crypto.Addresses, 0, len(vs.powers))
for address := range vs.powers {
addresses = append(addresses, address)
}
sort.Sort(addresses)
for _, address := range addresses {
if iter(vs.publicKeys[address], new(big.Int).Set(vs.powers[address])) {
return true
}
}
return
}
func (vs *Set) CountNonZero() int {
var count int
vs.Iterate(func(id crypto.Addressable, power *big.Int) (stop bool) {
if power.Sign() != 0 {
count++
}
return
})
return count
}
func (vs *Set) Count() int {
return len(vs.publicKeys)
}
func (vs *Set) Validators() []*Validator {
if vs == nil {
return nil
}
pvs := make([]*Validator, 0, vs.Count())
vs.Iterate(func(id crypto.Addressable, power *big.Int) (stop bool) {
pvs = append(pvs, &Validator{PublicKey: id.PublicKey(), Power: power.Uint64()})
return
})
return pvs
}
func UnpersistSet(pvs []*Validator) *Set {
vs := NewSet()
for _, pv := range pvs {
vs.ChangePower(pv.PublicKey, new(big.Int).SetUint64(pv.Power))
}
return vs
}
func (vs *Set) String() string {
return fmt.Sprintf("Validators{TotalPower: %v; Count: %v; %v}", vs.TotalPower(), vs.Count(),
vs.Strings())
}
func (vs *Set) Strings() string {
strs := make([]string, 0, vs.Count())
vs.Iterate(func(id crypto.Addressable, power *big.Int) (stop bool) {
strs = append(strs, fmt.Sprintf("%v->%v", id.Address(), power))
return
})
return strings.Join(strs, ", ")
} | acm/validator/set.go | 0.811415 | 0.474875 | set.go | starcoder |
package internal
import (
"math"
)
// Check if coordinate is within [0, size-1], and if not, reflect out of bounds coordinates back into the value range
func reflect(size, x int) int {
if(x < 0) {
return -x - 1;
}
if(x >= size) {
return 2*size - x - 1;
}
return x;
}
// Returns the definite integral of the gaussian function with midpoint mu and standard deviation sigma for input x
func GaussianDefiniteIntegral(mu, sigma, x float32) float32 {
return 0.5 * (1 + float32(math.Erf( float64((x-mu)/(sqrt2 * sigma)) )) )
}
// Generates a 1D gaussian kernel for the given sigma. Based on symbolic integration via error function
func GaussianKernel1D(sigma float32) (kernel []float32) {
mu :=float32(0)
// Find minimal kernel width for which the area under the curve left of the kernel is below the acceptable error
acceptOut :=float32(0.01)
radius :=0
for {
val:=GaussianDefiniteIntegral(mu, sigma, float32(-0.5)-float32(radius))
if val < acceptOut {
radius--
break
}
radius++
}
width :=2*radius+1
kernel =make([]float32, width)
// Calculate left half of the kernel via symbolic integration
sum :=float32(0)
lower :=GaussianDefiniteIntegral(mu, sigma, float32(-0.5)-float32(radius) )
for i:=0; i<=radius; i++ {
upper :=GaussianDefiniteIntegral(mu, sigma, float32(-0.5)-float32(radius)+float32(i+1))
delta :=upper - lower
kernel[i]=delta
sum +=delta
lower =upper
}
// Mirror right half of the kernel to avoid numeric instability
for i:=1; i<=radius; i++ {
value := kernel[radius - i]
kernel[radius + i] = value
sum += value
}
// Normalize the sum of the kernel to 1, for dealing with the truncated part of the distribution.
factor:=1.0/sum
for i,_:=range(kernel) { kernel[i]*=factor }
return kernel
}
// Convolve the given 2D image provided by data and with with the given convolution kernel along the x axis, and store the result in res
func Convolve1DX(res, data []float32, width int, kernel []float32) {
height:=len(data)/width
k := len(kernel) / 2
for y:=0; y<height; y++ {
for x:=0; x<width; x++ {
sum := float32(0.0)
for i := -k; i <=k; i++ {
x1 := reflect(width, x+i)
sum+= data[y*width+x1]*kernel[i+k]
}
res[y*width+x] = sum
}
}
}
// Convolve the given 2D image provided by data and with with the given convolution kernel along the y axis, and store the result in res
func Convolve1DY(res, data []float32, width int, kernel []float32) {
height:=len(data)/width
k := len(kernel) / 2
for y:=0; y<height; y++ {
for x:=0; x<width; x++ {
sum := float32(0.0)
for i := -k; i <=k; i++ {
y1 := reflect(height, y+i)
sum+= data[y1*width+x]*kernel[i+k]
}
res[y*width+x] = sum
}
}
}
// Generate a convolution kernel for a 2D gauss filter of given standard deviation, and applies it to the 2D image given by data and width.
// Overwrites tmp and returns the result in res.
func GaussFilter2D(res, tmp, data[] float32, width int, sigma float32) {
kernel:=GaussianKernel1D(sigma)
Convolve1DX(tmp, data, width, kernel)
Convolve1DY(res, tmp, width, kernel)
}
// Applies unsharp mask to 2D image given bz data and width, using provided radius for Gauss filter and gain for combination.
// Results are clipped to min..max. Pixels below the threshold are left unchanged. Overwrites tmp, and returns the result in res
func ApplyUnsharpMask(res, data, blurred []float32, gain float32, min, max, absThreshold float32) {
for i, d:=range data {
if d<absThreshold {
res[i]=d
} else {
r:=d + (d-blurred[i])*gain
if r<min { r=min }
if r>max { r=max }
res[i]=r
}
}
}
// Applies unsharp mask to 2D image given bz data and width, using provided radius for Gauss filter and gain for combination.
// Results are clipped to min..max. Pixels below the threshold are left unchanged. Returns results in a newly allocated array
func UnsharpMask(data []float32, width int, sigma float32, gain float32, min, max, absThreshold float32) []float32 {
tmp:=make([]float32, len(data))
blurred:=make([]float32, len(data))
GaussFilter2D(blurred, tmp, data, width, sigma)
ApplyUnsharpMask(tmp, data, blurred, gain, min, max, absThreshold)
return tmp
} | internal/usm.go | 0.755457 | 0.653707 | usm.go | starcoder |
package gl_utils
import (
"github.com/go-gl/mathgl/mgl32"
"math"
)
// Camera2D a Camera based on an orthogonal projection
type Camera2D struct {
x float32
y float32
width float32
halfWidth float32
height float32
halfHeight float32
zoom float32
minZoom float32
maxZoom float32
centered bool
flipVertical bool
near float32
far float32
projectionMatrix mgl32.Mat4
inverseMatrix mgl32.Mat4
matrixDirty bool
}
// NewCamera2D sets up an orthogonal projection camera
func NewCamera2D(width int, height int, zoom float32) *Camera2D {
c := &Camera2D{
width: float32(width),
halfWidth: float32(width) / 2,
height: float32(height),
halfHeight: float32(height) / 2,
zoom: zoom,
minZoom: 0.01,
maxZoom: 20,
}
c.far = -2
c.near = 2
c.matrixDirty = true
c.rebuildMatrix()
return c
}
func (c *Camera2D) Width() float32 { return c.width }
func (c *Camera2D) Height() float32 { return c.height }
// ProjectionMatrix returns the projection matrix of the camera
func (c *Camera2D) ProjectionMatrix() *mgl32.Mat4 {
c.rebuildMatrix()
return &c.projectionMatrix
}
// SetPosition sets the current position of the camera. If the camera is centered, the center will be moving
func (c *Camera2D) SetPosition(x float32, y float32) {
c.x = x
c.y = y
c.matrixDirty = true
}
// Translate move the camera position by the specified amount
func (c *Camera2D) Translate(x float32, y float32) {
if c.flipVertical {
y = -y
}
c.x += x
c.y += y
c.matrixDirty = true
}
// Zoom returns the current zoom level
func (c *Camera2D) Zoom() float32 { return c.zoom }
// SetZoom sets the zoom factor
func (c *Camera2D) SetZoom(zoom float32) {
zoom = mgl32.Clamp(zoom, c.minZoom, c.maxZoom)
c.zoom = zoom
c.matrixDirty = true
}
// MinZoom returns the minimum zoom level allowed
func (c *Camera2D) MinZoom() float32 { return c.minZoom }
// MaxZoom returns the maximum zoom level allowed
func (c *Camera2D) MaxZoom() float32 { return c.maxZoom }
// SetZoomRange sets the minimum and maximum zoom factors allowed
func (c *Camera2D) SetZoomRange(minZoom float32, maxZoom float32) {
c.minZoom = minZoom
c.maxZoom = maxZoom
if c.zoom > c.maxZoom || c.zoom < c.minZoom {
c.SetZoom(c.zoom)
}
}
// SetCentered sets the center of the camera to the center of the screen
func (c *Camera2D) SetCentered(centered bool) {
c.centered = centered
c.matrixDirty = true
}
// SetFlipVertical sets the orientation of the vertical axis. Pass true to have a cartesian coordinate system
func (c *Camera2D) SetFlipVertical(flip bool) {
c.flipVertical = flip
c.matrixDirty = true
}
// SetVisibleArea configures the camera to make the specified area completely visible, position and zoom are changed accordingly
func (c *Camera2D) SetVisibleArea(x1 float32, y1 float32, x2 float32, y2 float32) {
width := math.Abs(float64(x2 - x1))
height := math.Abs(float64(y2 - y1))
zoom := float32(math.Min(float64(c.width)/width, float64(c.height)/height))
c.SetZoom(zoom)
x := math.Min(float64(x1), float64(x2))
y := math.Min(float64(y1), float64(y2))
if c.centered {
c.SetPosition(float32(x+width/2), float32(y+height/2))
} else {
c.SetPosition(float32(x), float32(y))
}
}
func (c *Camera2D) rebuildMatrix() {
if !c.matrixDirty {
return
}
var left, right, top, bottom float32
if c.centered {
halfWidth := c.halfWidth / c.zoom
halfHeight := c.halfHeight / c.zoom
left = -halfWidth
right = halfWidth
top = halfHeight
bottom = -halfHeight
} else {
right = c.width / c.zoom
top = c.height / c.zoom
}
left += c.x
right += c.x
top += c.y
bottom += c.y
if c.flipVertical {
bottom, top = top, bottom
}
c.projectionMatrix = mgl32.Ortho(left, right, top, bottom, c.near, c.far)
c.inverseMatrix = c.projectionMatrix.Inv()
c.matrixDirty = false
}
func (c *Camera2D) ScreenToWorld(vec mgl32.Vec2) mgl32.Vec3 {
if c.flipVertical {
vec[1] = c.height - vec[1]
}
x := (vec[0] - c.halfWidth) / c.halfWidth
y := (vec[1] - c.halfHeight) / c.halfHeight
return mgl32.TransformCoordinate(mgl32.Vec3{x, y, 0}, c.inverseMatrix)
}
func (c *Camera2D) WorldToScreen(vec mgl32.Vec3) mgl32.Vec2 {
ret := mgl32.TransformCoordinate(vec, c.projectionMatrix)
ret[0] = ret[0]*c.halfWidth + c.halfWidth
ret[1] = ret[1]*c.halfHeight + c.halfHeight
if c.flipVertical {
ret[1] = c.height - ret[1]
}
return mgl32.Vec2{ret[0], ret[1]}
} | gl_utils/camera_2d.go | 0.888487 | 0.574514 | camera_2d.go | starcoder |
package output
import (
"context"
"os"
"time"
"github.com/Jeffail/benthos/v3/internal/codec"
"github.com/Jeffail/benthos/v3/internal/docs"
"github.com/Jeffail/benthos/v3/internal/shutdown"
"github.com/Jeffail/benthos/v3/lib/log"
"github.com/Jeffail/benthos/v3/lib/metrics"
"github.com/Jeffail/benthos/v3/lib/output/writer"
"github.com/Jeffail/benthos/v3/lib/types"
)
//------------------------------------------------------------------------------
var multipartCodecDoc = (`
## Batches and Multipart Messages
When writing multipart (batched) messages using the ` + "`lines`" + ` codec the last message ends with double delimiters. E.g. the messages "foo", "bar" and "baz" would be written as:
` + "```" + `
foo\n
bar\n
baz\n
` + "```" + `
Whereas a multipart message [ "foo", "bar", "baz" ] would be written as:
` + "```" + `
foo\n
bar\n
baz\n\n
` + "```" + `
This enables consumers of this output feed to reconstruct the original batches. However, if you wish to avoid this behaviour then add a ` + "[`split` processor](/docs/components/processors/split)" + ` before messages reach this output.`)[1:]
func init() {
Constructors[TypeSTDOUT] = TypeSpec{
constructor: fromSimpleConstructor(NewSTDOUT),
Summary: `
Prints messages to stdout as a continuous stream of data, dividing messages according to the specified codec.`,
Description: multipartCodecDoc,
FieldSpecs: docs.FieldSpecs{
codec.WriterDocs.AtVersion("3.46.0"),
docs.FieldDeprecated("delimiter"),
},
Categories: []Category{
CategoryLocal,
},
}
}
//------------------------------------------------------------------------------
// STDOUTConfig contains configuration fields for the stdout based output type.
type STDOUTConfig struct {
Codec string `json:"codec" yaml:"codec"`
Delim string `json:"delimiter" yaml:"delimiter"`
}
// NewSTDOUTConfig creates a new STDOUTConfig with default values.
func NewSTDOUTConfig() STDOUTConfig {
return STDOUTConfig{
Codec: "lines",
Delim: "",
}
}
//------------------------------------------------------------------------------
// NewSTDOUT creates a new STDOUT output type.
func NewSTDOUT(conf Config, mgr types.Manager, log log.Modular, stats metrics.Type) (Type, error) {
if len(conf.STDOUT.Delim) > 0 {
conf.STDOUT.Codec = "delim:" + conf.STDOUT.Delim
}
f, err := newStdoutWriter(conf.STDOUT.Codec, log, stats)
if err != nil {
return nil, err
}
w, err := NewAsyncWriter(TypeSTDOUT, 1, f, log, stats)
if err != nil {
return nil, err
}
if aw, ok := w.(*AsyncWriter); ok {
aw.SetNoCancel()
}
return w, nil
}
type stdoutWriter struct {
handle codec.Writer
shutSig *shutdown.Signaller
}
func newStdoutWriter(codecStr string, log log.Modular, stats metrics.Type) (*stdoutWriter, error) {
codec, _, err := codec.GetWriter(codecStr)
if err != nil {
return nil, err
}
handle, err := codec(os.Stdout)
if err != nil {
return nil, err
}
return &stdoutWriter{
handle: handle,
shutSig: shutdown.NewSignaller(),
}, nil
}
func (w *stdoutWriter) ConnectWithContext(ctx context.Context) error {
return nil
}
func (w *stdoutWriter) WriteWithContext(ctx context.Context, msg types.Message) error {
err := writer.IterateBatchedSend(msg, func(i int, p types.Part) error {
return w.handle.Write(ctx, p)
})
if err != nil {
return err
}
if msg.Len() > 1 {
if w.handle != nil {
w.handle.EndBatch()
}
}
return nil
}
func (w *stdoutWriter) CloseAsync() {
}
func (w *stdoutWriter) WaitForClose(timeout time.Duration) error {
return nil
} | lib/output/stdout.go | 0.700485 | 0.496704 | stdout.go | starcoder |
package helper
import (
"fmt"
"log"
"github.com/wesovilabs/orion/internal/errors"
"github.com/zclconf/go-cty/cty"
)
// IsSlice returns true if value is a slice.
func IsSlice(value cty.Value) bool {
return value.Type().IsListType() || value.Type().IsTupleType() || value.Type().IsCollectionType()
}
// ToStrictString convert the value into a string.
func ToStrictString(value cty.Value) (string, errors.Error) {
if value.Type() == cty.String {
return value.AsString(), nil
}
return "", errors.InvalidArguments("expected a string field but it's not")
}
// IsMap return true if value is a map.
func IsMap(value cty.Value) bool {
return value.Type().IsMapType() || value.Type().IsObjectType()
}
// ToValueMap convert the map into a value.
func ToValueMap(input map[string]interface{}) cty.Value {
output := make(map[string]cty.Value)
for name, value := range input {
output[name] = ToValue(value)
}
return cty.ObjectVal(output)
}
func toValueMapString(input map[string]string) cty.Value {
output := make(map[string]cty.Value)
for name, value := range input {
output[name] = cty.StringVal(value)
}
return cty.MapVal(output)
}
// ToValueList convert the array into a value.
func ToValueList(input []interface{}) cty.Value {
output := make([]cty.Value, len(input))
for index := range input {
value := input[index]
output[index] = ToValue(value)
}
return cty.TupleVal(output)
}
func toValueList(input []string) cty.Value {
output := make([]cty.Value, len(input))
for index := range input {
value := input[index]
output[index] = cty.StringVal(value)
}
return cty.TupleVal(output)
}
// ToValue convert the interface into a value.
func ToValue(value interface{}) cty.Value {
switch v := value.(type) {
case string:
return cty.StringVal(v)
case int:
return cty.NumberIntVal(int64(v))
case int8, int16, int32, int64:
return cty.NumberIntVal(v.(int64))
case float32, float64:
return cty.NumberFloatVal(v.(float64))
case bool:
return cty.BoolVal(v)
case map[string]string:
return toValueMapString(v)
case map[string]interface{}:
return ToValueMap(v)
case []string:
return toValueList(v)
case []interface{}:
return ToValueList(v)
case []map[string]interface{}:
out := make([]cty.Value, len(v))
for index := range v {
item := v[index]
props := make(map[string]cty.Value)
for k, v := range item {
props[k] = ToValue(v)
}
out[index] = cty.MapVal(props)
}
return cty.ListVal(out)
case nil:
return cty.NilVal
default:
log.Fatal(fmt.Sprintf("unsupported type %s\n", v))
}
return cty.NilVal
} | helper/types.go | 0.689096 | 0.432243 | types.go | starcoder |
package csg
import (
"fmt"
"io"
"math"
"math/rand"
)
// F64Epsilon is the epsilon utilized for AlmostEqual
var F64Epsilon float64
func init() {
// Calculate the epsilon
F64Epsilon = math.Nextafter(1, 2) - 1
}
// Vector representation of a vector point in 3 dimensional space
type Vector struct {
X float64
Y float64
Z float64
}
// Get the i'th component of the vector, used for iteration purposes
func (v *Vector) Get(i int) float64 {
switch i {
case 0:
return v.X
case 1:
return v.Y
case 2:
return v.Z
}
return 0.0
}
// LengthSquared returns the length of this vector squared
func (v *Vector) LengthSquared() float64 {
return v.Dot(v)
}
// AlmostEquals returns true if this vector almost equals another vector (assuming a max delta of F64Epsilon)
func (v *Vector) AlmostEquals(e *Vector) bool {
return math.Abs(e.X-v.X) < F64Epsilon && math.Abs(e.Y-v.Y) < F64Epsilon && math.Abs(e.Z-v.Z) < F64Epsilon
}
// Equals returns true if the vectors are equal
func (v *Vector) Equals(e *Vector) bool {
return e.X == v.X && e.Y == v.Y && e.Z == v.Z
}
// Normalize returns a new vector which represents the normalized version of this vector
func (v *Vector) Normalize() *Vector {
l := v.Length()
return &Vector{X: v.X / l, Y: v.Y / l, Z: v.Z / l}
}
// Clone returns a clone of this vector
func (v *Vector) Clone() *Vector {
return &Vector{X: v.X, Y: v.Y, Z: v.Z}
}
// Negated returns a new vector which is negated
func (v *Vector) Negated() *Vector {
return &Vector{X: -v.X, Y: -v.Y, Z: -v.Z}
}
// Plus returns a new vector which is the resulting addition of these two vectors
func (v *Vector) Plus(a *Vector) *Vector {
return &Vector{X: v.X + a.X, Y: v.Y + a.Y, Z: v.Z + a.Z}
}
// Minus returns a new vector which is the resulting subtraction of these two vectors
func (v *Vector) Minus(a *Vector) *Vector {
return &Vector{X: v.X - a.X, Y: v.Y - a.Y, Z: v.Z - a.Z}
}
// Times returns a new vector which is the resulting multiplication of this vector and a scalar
func (v *Vector) Times(a float64) *Vector {
return &Vector{X: v.X * a, Y: v.Y * a, Z: v.Z * a}
}
// DividedBy returns a new vector which is the resulting division of this vector with a scalar
func (v *Vector) DividedBy(a float64) *Vector {
return &Vector{X: v.X / a, Y: v.Y / a, Z: v.Z / a}
}
// Dot returns the dot product of this vector and another
func (v *Vector) Dot(a *Vector) float64 {
return v.X*a.X + v.Y*a.Y + v.Z*a.Z
}
// Lerp returns the linear interpolation of this vector at t as a new vector
func (v *Vector) Lerp(a *Vector, t float64) *Vector {
return v.Plus(a.Minus(v).Times(t))
}
// Length returns the length of this vector
func (v *Vector) Length() float64 {
return math.Sqrt(v.Dot(v))
}
// Unit returns a new vector with the unit vector of this vector
func (v *Vector) Unit() *Vector {
return v.DividedBy(v.Length())
}
// Cross returns the cross project of these two vectors as a new vector
func (v *Vector) Cross(a *Vector) *Vector {
return &Vector{
X: v.Y*a.Z - v.Z*a.Y,
Y: v.Z*a.X - v.X*a.Z,
Z: v.X*a.Y - v.Y*a.X,
}
}
// Distance returns the distance of this vector (as a point) and another vector (as a point)
func (v *Vector) Distance(e *Vector) float64 {
dX := v.X - e.X
dY := v.Y - e.Y
dZ := v.Z - e.Z
return math.Sqrt(dX*dX + dY*dY + dZ*dZ)
}
// Max modifies this vector to have the max X, Y, Z values from the supplied vector
func (v *Vector) Max(m *Vector) {
if m.X > v.X {
v.X = m.X
}
if m.Y > v.Y {
v.Y = m.Y
}
if m.Z > v.Z {
v.Z = m.Z
}
}
// Min modifies this vector to have the min X, Y, Z values from the supplied vector
func (v *Vector) Min(m *Vector) {
if m.X < v.X {
v.X = m.X
}
if m.Y < v.Y {
v.Y = m.Y
}
if m.Z < v.Z {
v.Z = m.Z
}
}
// CopyFrom copies values from the specified vector
func (v *Vector) CopyFrom(e *Vector) {
v.X = e.X
v.Y = e.Y
v.Z = e.Z
}
// Set explicitly sets this vector
func (v *Vector) Set(x, y, z float64) {
v.X = x
v.Y = y
v.Z = z
}
// SetRandom sets this vector randomly using an upper and lower bounds
func (v *Vector) SetRandom(lower, upper float64) {
r := upper - lower
v.X = rand.Float64()*r + lower
v.Y = rand.Float64()*r + lower
v.Z = rand.Float64()*r + lower
}
// AddTo modifies this vector, adding the supplied vector to it
func (v *Vector) AddTo(e *Vector) {
v.X += e.X
v.Y += e.Y
v.Z += e.Z
}
//SetZero modifies this vector setting it to zero
func (v *Vector) SetZero() {
v.X = 0
v.Y = 0
v.Z = 0
}
//ScaleTo modifies this vector scaling it to the specified s value
func (v *Vector) ScaleTo(s float64) {
v.X = s * v.X
v.Y = s * v.Y
v.Z = s * v.Z
}
// MarshalToASCIISTL marshals this vector to ASCII STL
func (v *Vector) MarshalToASCIISTL(out io.Writer) {
fmt.Fprintf(out, "\t\tvertex %f %f %f\n", v.X, v.Y, v.Z)
}
//String returns a string representation of this vector
func (v *Vector) String() string {
return fmt.Sprintf("[%f %f %f]", v.X, v.Y, v.Z)
} | csg/vector.go | 0.878353 | 0.681406 | vector.go | starcoder |
package unixtime
// (unix)time.Time Borrowed largely from https://github.com/pieterclaerhout/example-json-unixtimestamp
// Differences include:
// - Additional forwarded methods
// Note:
// Duration is not included in/does not come from the above mentioned package
import (
"math"
"strconv"
"time"
)
// Time defines a timestamp encoded as epoch seconds in JSON
type Time time.Time
func Now() Time {
return Time(time.Now())
}
// MarshalJSON is used to convert the timestamp to JSON
func (t Time) MarshalJSON() ([]byte, error) {
return []byte(strconv.FormatInt(time.Time(t).Unix(), 10)), nil
}
// UnmarshalJSON is used to convert the timestamp from JSON
func (t *Time) UnmarshalJSON(s []byte) (err error) {
r := string(s)
q, err := strconv.ParseInt(r, 10, 64)
if err != nil {
return err
}
*(*time.Time)(t) = time.Unix(q, 0)
return nil
}
// Add returns the time t+d.
func (t Time) Add(d Duration) Time {
return Time(time.Time(t).Add(time.Duration(d)))
}
// Sub returns the duration t-u. If the result exceeds the maximum (or minimum)
// value that can be stored in a Duration, the maximum (or minimum) duration
// will be returned.
// To compute t-d for a duration d, use t.Add(-d).
func (t Time) Sub(u Time) Duration {
return Duration(time.Time(t).Sub(time.Time(u)))
}
// Unix returns t as a Unix time, the number of seconds elapsed
// since January 1, 1970 UTC. The result does not depend on the
// location associated with t.
func (t Time) Unix() int64 {
return time.Time(t).Unix()
}
// Time returns the JSON time as a time.Time instance in UTC
func (t Time) Time() time.Time {
return time.Time(t).UTC()
}
// String returns t as a formatted string
func (t Time) String() string {
return t.Time().String()
}
type Duration time.Duration
// MarshalJSON is used to convert the timestamp to JSON
func (d Duration) MarshalJSON() ([]byte, error) {
return []byte(strconv.FormatInt(int64(math.Ceil(time.Duration(d).Seconds())), 10)), nil
}
// UnmarshalJSON is used to convert the timestamp from JSON
func (d *Duration) UnmarshalJSON(s []byte) (err error) {
r := string(s)
q, err := strconv.ParseInt(r, 10, 64)
if err != nil {
return err
}
*(*time.Duration)(d) = time.Second * time.Duration(q)
return nil
} | unixtime.go | 0.860911 | 0.40642 | unixtime.go | starcoder |
package logic
// Operand is the template for operands.
type Operand interface {
Evaluate(ctx interface{}) (bool, error)
}
// Operator is the template for operators.
type Operator struct {
operands []Operand
}
// AndOperator is the implementation of the boolean AND operators.
type AndOperator struct {
Operator
}
// Evaluate performs the evaluation of the boolean AND operator.
func (o AndOperator) Evaluate(ctx interface{}) (bool, error) {
// fmt.Println("And.Evaluate()")
for _, operand := range o.operands {
var t bool
var err error
if t, err = operand.Evaluate(ctx); err != nil {
return false, err
}
if !t {
return false, nil
}
}
return true, nil
}
// And generates a new AndOperator with the given set of operands.
func And(operands ...Operand) AndOperator {
return AndOperator{
Operator{
operands,
},
}
}
// All generates a new AndOperator with the given set of operands.
func All(operands ...Operand) AndOperator {
return And(operands...)
}
// OrOperator is the template for boolean OR operators.
type OrOperator struct {
Operator
}
// Evaluate performs the evaluation of the boolean OR operator.
func (o OrOperator) Evaluate(ctx interface{}) (bool, error) {
// fmt.Println("Or.Evaluate()")
for _, operand := range o.operands {
var t bool
var err error
if t, err = operand.Evaluate(ctx); err != nil {
return false, err
}
if t {
return true, nil
}
}
return false, nil
}
// Or generates a new OrOperator with the given set of operands.
func Or(operands ...Operand) OrOperator {
return OrOperator{
Operator{
operands,
},
}
}
// Any generates a new OrOperator with the given set of operands.
func Any(operands ...Operand) OrOperator {
return Or(operands...)
}
// NotOperator is the implementation of the boolean NOT operators.
type NotOperator struct {
operand Operand
}
// Evaluate performs the evaluation of the boolean NOT operator.
func (o NotOperator) Evaluate(ctx interface{}) (bool, error) {
// fmt.Println("Not.Evaluate()")
result, err := o.operand.Evaluate(ctx)
return !result, err
}
// Not generates a new NotOperator with the given operand.
func Not(operand Operand) NotOperator {
return NotOperator{
operand,
}
}
// XorOperator is the implementation of the boolean XOR operators.
type XorOperator struct {
operand1 Operand
operand2 Operand
}
// Xor generates a new XorOperator with the given operands.
func Xor(operand1, operand2 Operand) XorOperator {
return XorOperator{
operand1,
operand2,
}
}
// Evaluate performs the evaluation of the boolean XOR operator.
func (o XorOperator) Evaluate(ctx interface{}) (bool, error) {
// fmt.Println("Xor.Evaluate()")
result1, err := o.operand1.Evaluate(ctx)
if err != nil {
return result1, err
}
result2, err := o.operand2.Evaluate(ctx)
return result1 != result2, err
}
// BoolOperand is the Operand version of the boolean native values.
type BoolOperand bool
const (
// True represents the boolean true value.
True BoolOperand = true
// False represents the boolean false value.
False BoolOperand = false
)
// Evaluate performs the evaluation of the typedef'd native boolean type.
func (o BoolOperand) Evaluate(ctx interface{}) (bool, error) {
return bool(o), nil
} | logic/logic.go | 0.781997 | 0.532668 | logic.go | starcoder |
package statusmodule
import (
"time"
"github.com/bwmarrin/discordgo"
bot "github.com/erikmcclure/sweetiebot/sweetiebot"
)
// StatusModule manages the status message
type StatusModule struct {
lastchange time.Time
}
// New StatusModule
func New() *StatusModule {
return &StatusModule{}
}
// Name of the module
func (w *StatusModule) Name() string {
return "Status"
}
// Commands in the module
func (w *StatusModule) Commands() []bot.Command {
return []bot.Command{
&setStatusCommand{},
&addStatusCommand{},
&removeStatusCommand{},
}
}
// Description of the module
func (w *StatusModule) Description(info *bot.GuildInfo) string { return "Manages the status message." }
// OnTick discord hook
func (w *StatusModule) OnTick(info *bot.GuildInfo, t time.Time) {
if info.Bot.IsMainGuild(info) {
if w.lastchange.Add(time.Duration(info.Config.Status.Cooldown) * time.Second).Before(t) {
w.lastchange = t
if len(info.Config.Status.Lines) > 0 {
info.Bot.DG.UpdateGameStatus(0, bot.MapGetRandomItem(info.Config.Status.Lines))
}
}
}
}
type setStatusCommand struct {
}
func (c *setStatusCommand) Info() *bot.CommandInfo {
return &bot.CommandInfo{
Name: "SetStatus",
Usage: "Sets the status message.",
Sensitive: true,
MainInstance: true,
}
}
func (c *setStatusCommand) Process(args []string, msg *discordgo.Message, indices []int, info *bot.GuildInfo) (string, bool, *discordgo.MessageEmbed) {
if !info.Bot.MainGuildID.Equals(info.ID) {
return "```\nYou can only do this from the main server!```", false, nil
}
if len(args) < 1 {
info.Bot.DG.UpdateGameStatus(0, "")
return "```\nRemoved status```", false, nil
}
arg := msg.Content[indices[0]:]
info.Bot.DG.UpdateGameStatus(0, arg)
return "```\nStatus was set to " + arg + "```", false, nil
}
func (c *setStatusCommand) Usage(info *bot.GuildInfo) *bot.CommandUsage {
return &bot.CommandUsage{
Desc: "Sets the status message to the given string, at least until it's automatically changed again. Only works from the main guild.",
Params: []bot.CommandUsageParam{
{Name: "arbitrary string", Desc: "String to set the status to. Be careful that it's a valid Discord status.", Optional: false},
},
}
}
type addStatusCommand struct {
}
func (c *addStatusCommand) Info() *bot.CommandInfo {
return &bot.CommandInfo{
Name: "AddStatus",
Usage: "Adds a status to the rotation",
Sensitive: true,
MainInstance: true,
}
}
func (c *addStatusCommand) Process(args []string, msg *discordgo.Message, indices []int, info *bot.GuildInfo) (string, bool, *discordgo.MessageEmbed) {
if !info.Bot.MainGuildID.Equals(info.ID) {
return "```\nYou can only do this from the main server!```", false, nil
}
if len(args) < 1 {
return "```\nNo status given.```", false, nil
}
arg := msg.Content[indices[0]:]
_, ok := info.Config.Status.Lines[arg]
if ok {
return "```\n" + arg + " is already in the status rotation!```", false, nil
}
info.Config.Status.Lines[arg] = true
info.SaveConfig()
return "```\nAdded " + arg + " to the status rotation.```", false, nil
}
func (c *addStatusCommand) Usage(info *bot.GuildInfo) *bot.CommandUsage {
return &bot.CommandUsage{
Desc: "Adds a string to the discord status rotation.",
Params: []bot.CommandUsageParam{
{Name: "arbitrary string", Desc: "Status string. Be careful that it's a valid Discord status.", Optional: false},
},
}
}
type removeStatusCommand struct {
}
func (c *removeStatusCommand) Info() *bot.CommandInfo {
return &bot.CommandInfo{
Name: "RemoveStatus",
Usage: "Removes a status message from the rotation.",
Sensitive: true,
MainInstance: true,
}
}
func (c *removeStatusCommand) Process(args []string, msg *discordgo.Message, indices []int, info *bot.GuildInfo) (string, bool, *discordgo.MessageEmbed) {
if !info.Bot.MainGuildID.Equals(info.ID) {
return "```\nYou can only do this from the main server!```", false, nil
}
if len(args) < 1 {
return "```\nNo status given.```", false, nil
}
arg := msg.Content[indices[0]:]
_, ok := info.Config.Status.Lines[arg]
if !ok {
return "```\n" + arg + " is not in the status rotation!```", false, nil
}
delete(info.Config.Status.Lines, arg)
info.SaveConfig()
return "```\nRemoved " + arg + " from the status rotation.```", false, nil
}
func (c *removeStatusCommand) Usage(info *bot.GuildInfo) *bot.CommandUsage {
return &bot.CommandUsage{
Desc: "Removes a string to the discord status rotation. Use " + info.Config.Basic.CommandPrefix + "getconfig status.lines to get a list of all strings currently in rotation.",
Params: []bot.CommandUsageParam{
{Name: "arbitrary string", Desc: "Status string that must exactly match the one you want to remove.", Optional: false},
},
}
} | statusmodule/StatusModule.go | 0.639961 | 0.428951 | StatusModule.go | starcoder |
package murmur2
import "hash"
type murmur2 struct {
data []byte
cached *uint32
}
// New32 creates a murmur 2 based hash.Hash32 implementation.
func New32() hash.Hash32 {
return &murmur2{
data: make([]byte, 0),
}
}
// Write a slice of data to the hasher.
func (mur *murmur2) Write(p []byte) (n int, err error) {
mur.data = append(mur.data, p...)
mur.cached = nil
return len(p), nil
}
// Sum appends the current hash to b and returns the resulting slice.
// It does not change the underlying hash state.
func (mur *murmur2) Sum(b []byte) []byte {
v := mur.Sum32()
return append(b, byte(v>>24), byte(v>>16), byte(v>>8), byte(v))
}
// Reset resets the Hash to its initial state.
func (mur *murmur2) Reset() {
mur.data = mur.data[0:0]
mur.cached = nil
}
// Size returns the number of bytes Sum will return.
func (mur *murmur2) Size() int {
return 4
}
// BlockSize returns the hash's underlying block size.
// The Write method must be able to accept any amount
// of data, but it may operate more efficiently if all writes
// are a multiple of the block size.
func (mur *murmur2) BlockSize() int {
return 4
}
const (
seed uint32 = uint32(0x9747b28c)
m int32 = int32(0x5bd1e995)
r uint32 = uint32(24)
)
func (mur *murmur2) Sum32() uint32 {
if mur.cached != nil {
return *mur.cached
}
length := int32(len(mur.data))
h := int32(seed ^ uint32(length))
length4 := length / 4
for i := int32(0); i < length4; i++ {
i4 := i * 4
k := int32(mur.data[i4+0]&0xff) +
(int32(mur.data[i4+1]&0xff) << 8) +
(int32(mur.data[i4+2]&0xff) << 16) +
(int32(mur.data[i4+3]&0xff) << 24)
k *= m
k ^= int32(uint32(k) >> r)
k *= m
h *= m
h ^= k
}
switch length % 4 {
case 3:
h ^= int32(mur.data[(length & ^3)+2]&0xff) << 16
fallthrough
case 2:
h ^= int32(mur.data[(length & ^3)+1]&0xff) << 8
fallthrough
case 1:
h ^= int32(mur.data[length & ^3] & 0xff)
h *= m
}
h ^= int32(uint32(h) >> 13)
h *= m
h ^= int32(uint32(h) >> 15)
cached := uint32(h)
mur.cached = &cached
return cached
} | lib/util/hash/murmur2/murmur2.go | 0.703549 | 0.455865 | murmur2.go | starcoder |
package lo25519
// edBlacklist is a list of elements of the ed25519 curve that have low order.
// The list was copied from https://github.com/jedisct1/libsodium/blob/141288535127c22162944e12fcadb8bc269671cc/src/libsodium/crypto_core/ed25519/ref10/ed25519_ref10.c
var edBlacklist = [7][32]byte{
/* 0 (order 4) */
{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
/* 1 (order 1) */
{0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00},
/* 2707385501144840649318225287225658788936804267575313519463743609750303402022
(order 8) */
{0x26, 0xe8, 0x95, 0x8f, 0xc2, 0xb2, 0x27, 0xb0, 0x45, 0xc3, 0xf4,
0x89, 0xf2, 0xef, 0x98, 0xf0, 0xd5, 0xdf, 0xac, 0x05, 0xd3, 0xc6,
0x33, 0x39, 0xb1, 0x38, 0x02, 0x88, 0x6d, 0x53, 0xfc, 0x05},
/* 55188659117513257062467267217118295137698188065244968500265048394206261417927
(order 8) */
{0xc7, 0x17, 0x6a, 0x70, 0x3d, 0x4d, 0xd8, 0x4f, 0xba, 0x3c, 0x0b,
0x76, 0x0d, 0x10, 0x67, 0x0f, 0x2a, 0x20, 0x53, 0xfa, 0x2c, 0x39,
0xcc, 0xc6, 0x4e, 0xc7, 0xfd, 0x77, 0x92, 0xac, 0x03, 0x7a},
/* p-1 (order 2) */
{0xec, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f},
/* p (=0, order 4) */
{0xed, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f},
/* p+1 (=1, order 1) */
{0xee, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x7f},
}
// IsLowOrder checks if the passed group element is of low order.
// Algorithm translated from the same source as the blacklist (see above).
func IsEdLowOrder(ge []byte) bool {
var (
c [7]byte
k int
i, j int
)
// cases j = 0..30
for j = 0; j < 31; j++ {
for i = 0; i < len(edBlacklist); i++ {
c[i] |= ge[j] ^ edBlacklist[i][j]
}
}
// case j = 31, ignore highest bit
for i = 0; i < len(edBlacklist); i++ {
c[i] |= (ge[j] & 0x7f) ^ edBlacklist[i][j]
}
k = 0
for i = 0; i < len(edBlacklist); i++ {
k |= int(c[i]) - 1
}
return ((k >> 8) & 1) == 1
} | internal/lo25519/ed25519.go | 0.547706 | 0.460532 | ed25519.go | starcoder |
package photoshop
import "fmt"
// Point represents a point on the screen
type Point struct {
// X is the x-coordinate
X float64
// Y is the y-coordinate
Y float64
}
// Size represents a width and height size
type Size struct {
// Width is the rectangle width
Width float64
// Height is the rectangle height
Height float64
}
// Rect represents an rectangle
type Rect struct {
// Location of the rectangle
Location Point
// Size of the rectanlge sides
Size Size
}
// Drawer draws shapes
type Drawer struct{}
// DrawEllipseInRect draws an ellipse in rectangle
func (d *Drawer) DrawEllipseInRect(r Rect) error {
fmt.Printf("Drawing ellipse in rect %v", r)
return nil
}
// DrawRect draws rectangle
func (d *Drawer) DrawRect(r Rect) error {
fmt.Printf("Drawing rect %v", r)
return nil
}
// VisualElement that is drawn on the screen
type VisualElement interface {
// Draw draws the visual element
Draw(drawer *Drawer) error
}
// Layer contains composition of visual elements
type Layer struct {
// Elements of visual elements
Elements []VisualElement
}
// Draw draws a layer
func (layer *Layer) Draw(drawer *Drawer) error {
for _, element := range layer.Elements {
if err := element.Draw(drawer); err != nil {
return err
}
fmt.Println()
}
return nil
}
// Square represents a square
type Square struct {
// Location of the square
Location Point
// Side size
Side float64
}
// Draw draws a square
func (square *Square) Draw(drawer *Drawer) error {
return drawer.DrawRect(Rect{
Location: square.Location,
Size: Size{
Height: square.Side,
Width: square.Side,
},
})
}
// Circle represents a circle shape
type Circle struct {
// Center of the circle
Center Point
// Radius of the circle
Radius float64
}
// Draw draws a circle
func (circle *Circle) Draw(drawer *Drawer) error {
rect := Rect{
Location: Point{
X: circle.Center.X - circle.Radius,
Y: circle.Center.Y - circle.Radius,
},
Size: Size{
Width: 2 * circle.Radius,
Height: 2 * circle.Radius,
},
}
return drawer.DrawEllipseInRect(rect)
} | structural-patterns/composite/photoshop/photoshop.go | 0.854733 | 0.460895 | photoshop.go | starcoder |
// Package usesgenerics defines an Analyzer that checks for usage of generic
// features added in Go 1.18.
package usesgenerics
import (
"go/ast"
"go/types"
"reflect"
"strings"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
"golang.org/x/tools/internal/typeparams"
)
var Analyzer = &analysis.Analyzer{
Name: "usesgenerics",
Doc: Doc,
Requires: []*analysis.Analyzer{inspect.Analyzer},
Run: run,
ResultType: reflect.TypeOf((*Result)(nil)),
FactTypes: []analysis.Fact{new(featuresFact)},
}
const Doc = `detect whether a package uses generics features
The usesgenerics analysis reports whether a package directly or transitively
uses certain features associated with generic programming in Go.`
// Result is the usesgenerics analyzer result type. The Direct field records
// features used directly by the package being analyzed (i.e. contained in the
// package source code). The Transitive field records any features used by the
// package or any of its transitive imports.
type Result struct {
Direct, Transitive Features
}
// Features is a set of flags reporting which features of generic Go code a
// package uses, or 0.
type Features int
const (
// GenericTypeDecls indicates whether the package declares types with type
// parameters.
GenericTypeDecls Features = 1 << iota
// GenericFuncDecls indicates whether the package declares functions with
// type parameters.
GenericFuncDecls
// EmbeddedTypeSets indicates whether the package declares interfaces that
// contain structural type restrictions, i.e. are not fully described by
// their method sets.
EmbeddedTypeSets
// TypeInstantiation indicates whether the package instantiates any generic
// types.
TypeInstantiation
// FuncInstantiation indicates whether the package instantiates any generic
// functions.
FuncInstantiation
)
func (f Features) String() string {
var feats []string
if f&GenericTypeDecls != 0 {
feats = append(feats, "typeDecl")
}
if f&GenericFuncDecls != 0 {
feats = append(feats, "funcDecl")
}
if f&EmbeddedTypeSets != 0 {
feats = append(feats, "typeSet")
}
if f&TypeInstantiation != 0 {
feats = append(feats, "typeInstance")
}
if f&FuncInstantiation != 0 {
feats = append(feats, "funcInstance")
}
return "features{" + strings.Join(feats, ",") + "}"
}
type featuresFact struct {
Features Features
}
func (f *featuresFact) AFact() {}
func (f *featuresFact) String() string { return f.Features.String() }
func run(pass *analysis.Pass) (interface{}, error) {
direct := directFeatures(pass)
transitive := direct | importedTransitiveFeatures(pass)
if transitive != 0 {
pass.ExportPackageFact(&featuresFact{transitive})
}
return &Result{
Direct: direct,
Transitive: transitive,
}, nil
}
// directFeatures computes which generic features are used directly by the
// package being analyzed.
func directFeatures(pass *analysis.Pass) Features {
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
nodeFilter := []ast.Node{
(*ast.FuncType)(nil),
(*ast.InterfaceType)(nil),
(*ast.ImportSpec)(nil),
(*ast.TypeSpec)(nil),
}
var direct Features
inspect.Preorder(nodeFilter, func(node ast.Node) {
switch n := node.(type) {
case *ast.FuncType:
if tparams := typeparams.ForFuncType(n); tparams != nil {
direct |= GenericFuncDecls
}
case *ast.InterfaceType:
tv := pass.TypesInfo.Types[n]
if iface, _ := tv.Type.(*types.Interface); iface != nil && !typeparams.IsMethodSet(iface) {
direct |= EmbeddedTypeSets
}
case *ast.TypeSpec:
if tparams := typeparams.ForTypeSpec(n); tparams != nil {
direct |= GenericTypeDecls
}
}
})
instances := typeparams.GetInstances(pass.TypesInfo)
for _, inst := range instances {
switch inst.Type.(type) {
case *types.Named:
direct |= TypeInstantiation
case *types.Signature:
direct |= FuncInstantiation
}
}
return direct
}
// importedTransitiveFeatures computes features that are used transitively via
// imports.
func importedTransitiveFeatures(pass *analysis.Pass) Features {
var feats Features
for _, imp := range pass.Pkg.Imports() {
var importedFact featuresFact
if pass.ImportPackageFact(imp, &importedFact) {
feats |= importedFact.Features
}
}
return feats
} | go/analysis/passes/usesgenerics/usesgenerics.go | 0.710226 | 0.469155 | usesgenerics.go | starcoder |
package analysis
// Possibility is an enumerator of possibilites.
type Possibility int
const (
// True represents a logical certanty or true.
True Possibility = iota
// Maybe represents the possibility of true or false.
Maybe
// False represents a logical certanty of false.
False
// Impossible represents a contradiction of certanties (for example
// True ∩ False)
Impossible
)
// MaybeTrue returns true iff a is True or Maybe.
func (a Possibility) MaybeTrue() bool {
return a == True || a == Maybe
}
// MaybeFalse returns true iff a is False or Maybe.
func (a Possibility) MaybeFalse() bool {
return a == False || a == Maybe
}
// Not returns the logical negation of a.
func (a Possibility) Not() Possibility {
switch a {
case False:
return True
case True:
return False
case Impossible:
return Impossible
default:
return Maybe
}
}
// And returns the logical-and of a and b.
func (a Possibility) And(b Possibility) Possibility {
switch {
case a == Impossible || b == Impossible:
return Impossible
case a == False, b == False:
return False
case a == Maybe, b == Maybe:
return Maybe
default:
return True
}
}
// Or returns the logical-or of a and b.
func (a Possibility) Or(b Possibility) Possibility {
switch {
case a == Impossible || b == Impossible:
return Impossible
case a == True, b == True:
return True
case a == Maybe, b == Maybe:
return Maybe
default:
return False
}
}
// Equals returns the possibility of a equaling b.
func (a Possibility) Equals(b Possibility) Possibility {
switch {
case a == Impossible || b == Impossible:
return Impossible
case a == Maybe, b == Maybe:
return Maybe
default:
if a == b {
return True
}
return False
}
}
// Union returns the union of possibile Possibilitys for a and b.
func (a Possibility) Union(b Possibility) Possibility {
if a == Impossible || b == Impossible {
return Impossible
}
if a.Equals(b) == True {
return a
}
return Maybe
}
// Intersect returns the intersection of possibile Possibilitys for a and b.
func (a Possibility) Intersect(b Possibility) Possibility {
if a == Impossible || b == Impossible {
return Impossible
}
if a == Maybe {
if b == Maybe {
return Maybe
}
a, b = b, a
}
// a is True or False
// b is True, False or Maybe
if b == Maybe || a == b {
return a
}
return Impossible
}
// Difference returns the possibile for v that are not found in o.
func (a Possibility) Difference(b Possibility) Possibility {
if a == Impossible || b == Impossible {
return Impossible
}
if a == Maybe {
return b.Not()
}
if a == b {
return Impossible
}
return a
} | gapil/analysis/possibility.go | 0.722331 | 0.458046 | possibility.go | starcoder |
package split
import (
"fmt"
"math"
"strings"
)
// A Complex represents a split-complex number.
type Complex struct {
re, im float64
}
// Real returns the real part of z, a float64 value.
func (z *Complex) Real() float64 {
return z.re
}
// Imag returns the imaginary part of z, a float64 value.
func (z *Complex) Imag() float64 {
return z.im
}
// SetReal sets the real part of z equal to a.
func (z *Complex) SetReal(a float64) {
z.re = a
}
// SetImag sets the imaginary part of z equal to b.
func (z *Complex) SetImag(b float64) {
z.im = b
}
// Cartesian returns the two Cartesian components of z.
func (z *Complex) Cartesian() (a, b float64) {
a, b = z.re, z.im
return
}
// String returns the string version of a Complex value. If z = a + bs, then
// the string is "(a+bs)", similar to complex128 values.
func (z *Complex) String() string {
a := make([]string, 5)
a[0] = "("
a[1] = fmt.Sprintf("%g", z.Real())
switch {
case math.Signbit(z.Imag()):
a[2] = fmt.Sprintf("%g", z.Imag())
case math.IsInf(z.Imag(), +1):
a[2] = "+Inf"
default:
a[2] = fmt.Sprintf("+%g", z.Imag())
}
a[3] = "s"
a[4] = ")"
return strings.Join(a, "")
}
// Equals returns true if y and z are equal.
func (z *Complex) Equals(y *Complex) bool {
if notEquals(z.Real(), y.Real()) || notEquals(z.Imag(), y.Imag()) {
return false
}
return true
}
// Copy copies y onto z, and returns z.
func (z *Complex) Copy(y *Complex) *Complex {
z.SetReal(y.Real())
z.SetImag(y.Imag())
return z
}
// New returns a pointer to a Complex value made from two given real float64
// values.
func New(a, b float64) *Complex {
z := new(Complex)
z.SetReal(a)
z.SetImag(b)
return z
}
// IsInf returns true if any of the components of z are infinite.
func (z *Complex) IsInf() bool {
if math.IsInf(z.Real(), 0) || math.IsInf(z.Imag(), 0) {
return true
}
return false
}
// Inf returns a pointer to a split-complex infinity value.
func (z *Complex) Inf(a, b int) *Complex {
z.SetReal(math.Inf(a))
z.SetImag(math.Inf(b))
return z
}
// IsNaN returns true if any component of z is NaN and neither is an infinity.
func (z *Complex) IsNaN() bool {
if math.IsInf(z.Real(), 0) || math.IsInf(z.Imag(), 0) {
return false
}
if math.IsNaN(z.Real()) || math.IsNaN(z.Imag()) {
return true
}
return false
}
// NaN returns a pointer to a split-complex NaN value.
func (z *Complex) NaN() *Complex {
nan := math.NaN()
z.SetReal(nan)
z.SetImag(nan)
return z
}
// Scal sets z equal to y scaled by a, and returns z.
func (z *Complex) Scal(y *Complex, a float64) *Complex {
z.SetReal(y.Real() * a)
z.SetImag(y.Imag() * a)
return z
}
// Neg sets z equal to the negative of y, and returns z.
func (z *Complex) Neg(y *Complex) *Complex {
return z.Scal(y, -1)
}
// Conj sets z equal to the conjugate of y, and returns z.
func (z *Complex) Conj(y *Complex) *Complex {
z.SetReal(y.Real())
z.SetImag(y.Imag() * -1)
return z
}
// Add sets z to the sum of x and y, and returns z.
func (z *Complex) Add(x, y *Complex) *Complex {
z.SetReal(x.Real() + y.Real())
z.SetImag(x.Imag() + y.Imag())
return z
}
// Sub sets z to the difference of x and y, and returns z.
func (z *Complex) Sub(x, y *Complex) *Complex {
z.SetReal(x.Real() - y.Real())
z.SetImag(x.Imag() - y.Imag())
return z
}
// Mul sets z to the product of x and y, and returns z.
func (z *Complex) Mul(x, y *Complex) *Complex {
p := new(Complex).Copy(x)
q := new(Complex).Copy(y)
z.SetReal((p.Real() * q.Real()) + (p.Imag() * q.Imag()))
z.SetImag((p.Real() * q.Imag()) + (p.Imag() * q.Real()))
return z
}
// Quad returns the quadrance of z, which can be either positive, negative, or
// zero.
func (z *Complex) Quad() float64 {
a, b := z.Real(), z.Imag()
return (a * a) - (b * b)
}
// IsZeroDiv returns true if z is a zero divisor (i.e. if z has vanishing
// quadrance).
func (z *Complex) IsZeroDiv() bool {
return !notEquals(z.Quad(), 0)
}
// Inv sets z equal to the inverse of y, and returns z. If y is a zero divisor,
// then Inv panics.
func (z *Complex) Inv(y *Complex) *Complex {
if y.IsZeroDiv() {
panic("zero divisor inverse")
}
return z.Scal(z.Conj(y), 1/y.Quad())
}
// Quo sets z equal to the quotient x/y, and returns z. If y is a zero divisor,
// then Quo panics.
func (z *Complex) Quo(x, y *Complex) *Complex {
if y.IsZeroDiv() {
panic("zero divisor denominator")
}
return z.Scal(z.Mul(x, z.Conj(y)), 1/y.Quad())
}
// Idempotent sets z equal to one of two possible idempotents (i.e. z = z*z).
func (z *Complex) Idempotent(sign int) *Complex {
z.SetReal(0.5)
if sign < 0 {
z.SetImag(-0.5)
return z
}
z.SetImag(0.5)
return z
}
// Rect sets z equal to the Complex value made from given curvilinear
// coordinates and quadrance sign, and returns z.
func (z *Complex) Rect(r, ξ float64, sign int) *Complex {
if sign > 0 {
z.SetReal(r * math.Cosh(ξ))
z.SetImag(r * math.Sinh(ξ))
return z
}
if sign < 0 {
z.SetReal(r * math.Sinh(ξ))
z.SetImag(r * math.Cosh(ξ))
return z
}
// sign = 0
z.SetReal(r)
z.SetImag(r)
return z
}
// Curv returns the curvilinear coordinates of a Complex value, along with the
// sign of the quadrance.
func (z *Complex) Curv() (r, ξ float64, sign int) {
quad := z.Quad()
if quad > 0 {
r = math.Sqrt(quad)
ξ = math.Atanh(z.Imag() / z.Real())
sign = +1
return
}
if quad < 0 {
r = math.Sqrt(-quad)
ξ = math.Atanh(z.Real() / z.Imag())
sign = -1
return
}
r = z.Real()
ξ = math.NaN()
sign = 0
return
} | complex.go | 0.893245 | 0.610221 | complex.go | starcoder |
package xlog
import (
"fmt"
"github.com/globalsign/mgo/bson"
"strings"
"time"
)
var (
// TimeOfDayLayouts supported time layouts in time-of-day
TimeOfDayLayouts = []string{
"15:04",
"1504",
"15:04:05",
"150405",
}
// StorageSizeSuffixes storage size suffixes
StorageSizeSuffixes = []struct {
S string
N int
}{
{"TB", 1024 * 1024 * 1024 * 1024},
{"GB", 1024 * 1024 * 1024},
{"MB", 1024 * 1024},
{"KB", 1024},
{"B", 1},
}
)
// ParseTimeOfDay parse a time-of-day with multiple layouts
func ParseTimeOfDay(s string) (t time.Time) {
var err error
if len(s) > 0 {
for _, layout := range TimeOfDayLayouts {
if t, err = time.Parse(layout, s); err == nil {
break
}
}
}
return
}
// ParseTimeRangeOfDay parse a time-range-of-day with multiple layouts
func ParseTimeRangeOfDay(s string) (begin time.Time, end time.Time) {
times := strings.Split(s, "-")
if len(times) > 0 {
begin = ParseTimeOfDay(strings.TrimSpace(times[0]))
if !begin.IsZero() {
if len(times) > 1 {
end = ParseTimeOfDay(strings.TrimSpace(times[1]))
}
if end.IsZero() {
end = begin.Add(time.Minute)
}
}
}
return
}
// BeginningOfDay beginning of day, to UTC
func BeginningOfDay() time.Time {
return BeginningOfTheDay(time.Now())
}
// EndOfDay end of day, to UTC
func EndOfDay() time.Time {
return EndOfTheDay(time.Now())
}
// BeginningOfTheDay beginning of the day specified, to UTC
func BeginningOfTheDay(t time.Time) time.Time {
return time.Date(t.Year(), t.Month(), t.Day(), 0, 0, 0, 0, time.UTC)
}
// EndOfTheDay end of the day specified, to UTC
func EndOfTheDay(t time.Time) time.Time {
return BeginningOfTheDay(t).Add(time.Hour * 24)
}
// SameDay the two time is the same day
func SameDay(t1 time.Time, t2 time.Time) bool {
return t1.Year() == t2.Year() && t1.Month() == t2.Month() && t1.Day() == t2.Day()
}
// FormatStorageSize format a storage size
func FormatStorageSize(bytes int) string {
if bytes <= 0 {
return "0"
}
for _, s := range StorageSizeSuffixes {
if bytes > s.N {
return fmt.Sprintf("%.2f%s", float64(bytes)/float64(s.N), s.S)
}
}
return ""
}
// BSONPutMatchField choose direct match or $in automatically
func BSONPutMatchField(m bson.M, key string, val []string) {
// skip empty value
if len(val) == 0 {
return
}
// use $in for comma separated values
if len(val) > 1 {
m[key] = bson.M{"$in": val}
} else {
m[key] = val[0]
}
return
}
// Quote quote a string if not already quoted
func Quote(s string) string {
if len(s) > 1 && strings.HasPrefix(s, "\"") && strings.HasSuffix(s, "\"") {
return s
}
return "\"" + s + "\""
}
// BSONPutTextField put a text field
func BSONPutTextField(m bson.M, val []string) {
// skip if is missing
if len(val) == 0 {
return
}
// re-arrange with quoted
var qs string
cs := make([]string, 0, len(val))
for _, c := range val {
cs = append(cs, Quote(c))
}
qs = strings.Join(cs, " ")
// build search
m["$text"] = bson.M{
"$search": qs,
}
}
// CompactField compact query field for possible comma separated values
func CompactField(str string) string {
str = strings.TrimSpace(str)
if strings.Contains(str, ",") {
var values = make([]string, 0)
var splits = strings.Split(str, ",")
for _, s := range splits {
values = append(values, strings.TrimSpace(s))
}
return strings.Join(values, ",")
}
return str
} | utils.go | 0.582135 | 0.40698 | utils.go | starcoder |
package btree
import (
"container/list"
"fmt"
)
// BinaryTree represents a binary search tree.
type BinaryTree struct {
root *Node
}
// NewBinaryTree creates a new binary tree.
func NewBinaryTree() *BinaryTree {
return &BinaryTree{}
}
// Sum returns the sum of all tree nodes.
func (t *BinaryTree) Sum() int {
if t.root == nil {
return 0
}
stack := list.New()
stack.PushFront(t.root)
var sum int
for stack.Len() > 0 {
e := stack.Back()
stack.Remove(e)
node := e.Value.(*Node)
sum += node.Data
if node.Left != nil {
stack.PushFront(node.Left)
}
if node.Right != nil {
stack.PushFront(node.Right)
}
}
return sum
}
// Add adds a new node on the binary search tree.
func (t *BinaryTree) Add(data int) {
if t.root == nil {
t.root = NewNode(data)
} else {
current := t.root
for {
if data < current.Data {
if current.Left == nil {
current.Left = NewNode(data)
break
} else {
current = current.Left
}
}
if data > current.Data {
if current.Right == nil {
current.Right = NewNode(data)
break
} else {
current = current.Right
}
}
}
}
}
// Add adds a new node onto the tree using binary search tree strategy.
func Add(root *Node, data int) {
if data < root.Data {
if root.Left == nil {
root.Left = NewNode(data)
} else {
Add(root.Left, data)
}
}
if data > root.Data {
if root.Right == nil {
root.Right = NewNode(data)
} else {
Add(root.Right, data)
}
}
}
// Sum returns the sum of all tree nodes.
func Sum(root *Node) int {
if root == nil {
return 0
}
return root.Data + Sum(root.Left) + Sum(root.Right)
}
// Height calculates the height of a tree.
func Height(root *Node) int {
if root == nil {
return -1
}
leftHeight := Height(root.Left)
rightHeight := Height(root.Right)
if leftHeight > rightHeight {
return leftHeight + 1
}
return rightHeight + 1
}
// PrintInOrder prints the tree in-order: Left, Root, Right.
func PrintInOrder(root *Node) {
if root.Left != nil {
PrintInOrder(root.Left)
}
fmt.Printf(" %d ", root.Data)
if root.Right != nil {
PrintInOrder(root.Right)
}
}
// PrintPreOrder prints the tree in pre-order: Root, Left, Right.
func PrintPreOrder(root *Node) {
fmt.Printf(" %d ", root.Data)
if root.Left != nil {
PrintPreOrder(root.Left)
}
if root.Right != nil {
PrintPreOrder(root.Right)
}
}
// PrintPostOrder prints the tree in post-order: Left, Right, Root.
func PrintPostOrder(root *Node) {
if root.Left != nil {
PrintPreOrder(root.Left)
}
if root.Right != nil {
PrintPreOrder(root.Right)
}
fmt.Printf(" %d ", root.Data)
}
// PrintLevelOrder prints the tree in level order.
func PrintLevelOrder(root *Node) {
if root == nil {
return
}
queue := list.New()
queue.PushBack(root)
for queue.Len() > 0 {
e := queue.Front()
queue.Remove(e)
v := e.Value
root = v.(*Node)
fmt.Printf(" %d ", root.Data)
if root.Left != nil {
queue.PushBack(root.Left)
}
if root.Right != nil {
queue.PushBack(root.Right)
}
}
} | btree/tree.go | 0.754734 | 0.427337 | tree.go | starcoder |
package utils
type NodeColor byte
const (
Red NodeColor = 0
Black = 1
DoubleBlack = 2
)
type Direction byte
const (
LEFT Direction = 0
RIGHT = 1
NODIR = 2
)
var nilNodeSingle = new(NilNode)
type TreeNodeInterface interface {
IsNilNode() bool
IsNotNilNode() bool
GetKey() CollectionObject
GetData() CollectionObject
GetKeyValue() interface{}
GetValue() interface{}
GetColor() NodeColor
SetColor(color NodeColor)
acceptNode(rv TreeNodeInterface)
isEqualTo(nodeInterface TreeNodeInterface) bool
isLesserThan(nodeInterface TreeNodeInterface) bool
isGreaterThan(nodeInterface TreeNodeInterface) bool
setValueFrom(nodeInterface TreeNodeInterface)
getLeft() TreeNodeInterface
getRight() TreeNodeInterface
getParent() TreeNodeInterface
setLeft(nodeInterface TreeNodeInterface)
setRight(nodeInterface TreeNodeInterface)
setParent(nodeInterface TreeNodeInterface)
}
type NilNode struct {
}
func GetNilNode() *NilNode {
return nilNodeSingle
}
func (*NilNode) IsNilNode() bool {
return true
}
func (*NilNode) IsNotNilNode() bool {
return false
}
func (*NilNode) GetValue() interface{} {
panic("implement me")
}
func (*NilNode) GetKey() CollectionObject {
panic("implement me")
}
func (*NilNode) GetKeyValue() interface{} {
panic("implement me")
}
func (*NilNode) GetData() CollectionObject {
panic("implement me")
}
func (*NilNode) GetColor() NodeColor {
return Black
}
func (*NilNode) SetColor(color NodeColor) {
panic("implement me")
}
func (*NilNode) acceptNode(rv TreeNodeInterface) {
panic("implement me")
}
func (*NilNode) isEqualTo(nodeInterface TreeNodeInterface) bool {
return false
}
func (*NilNode) isLesserThan(nodeInterface TreeNodeInterface) bool {
return false
}
func (*NilNode) isGreaterThan(nodeInterface TreeNodeInterface) bool {
return false
}
func (*NilNode) setValueFrom(nodeInterface TreeNodeInterface) {
panic("implement me")
}
func (*NilNode) getLeft() TreeNodeInterface {
return nil
}
func (*NilNode) getRight() TreeNodeInterface {
return nil
}
func (*NilNode) getParent() TreeNodeInterface {
return nil
}
func (*NilNode) setLeft(nodeInterface TreeNodeInterface) {
panic("implement me")
}
func (*NilNode) setRight(nodeInterface TreeNodeInterface) {
panic("implement me")
}
func (*NilNode) setParent(nodeInterface TreeNodeInterface) {
panic("implement me")
}
type TreeNode struct {
Data CollectionObject
color NodeColor
left TreeNodeInterface
right TreeNodeInterface
parent TreeNodeInterface
}
func (node *TreeNode) IsNilNode() bool {
return false
}
func (node *TreeNode) IsNotNilNode() bool {
return true
}
func (node *TreeNode) getLeft() TreeNodeInterface {
return node.left
}
func (node *TreeNode) getRight() TreeNodeInterface {
return node.right
}
func (node *TreeNode) getParent() TreeNodeInterface {
return node.parent
}
func (node *TreeNode) setLeft(nodeInterface TreeNodeInterface) {
node.left = nodeInterface
}
func (node *TreeNode) setRight(nodeInterface TreeNodeInterface) {
node.right = nodeInterface
}
func (node *TreeNode) setParent(nodeInterface TreeNodeInterface) {
node.parent = nodeInterface
}
func (node *TreeNode) GetValue() interface{} {
return node.Data.GetValue()
}
func (node *TreeNode) GetData() CollectionObject {
return node.Data
}
func (node *TreeNode) GetColor() NodeColor {
return node.color
}
func (node *TreeNode) SetColor(color NodeColor) {
node.color = color
}
func SwapColors(lv TreeNodeInterface, rv TreeNodeInterface) {
tmpColor := lv.GetColor()
lv.SetColor(rv.GetColor())
rv.SetColor(tmpColor)
}
func InsertBST(root TreeNodeInterface, node TreeNodeInterface) TreeNodeInterface {
if root.IsNilNode() {
return node
}
if root.isEqualTo(node) {
root.setValueFrom(node)
return root
}
if node.isLesserThan(root) {
left := InsertBST(root.getLeft(), node)
root.setLeft(left)
root.getLeft().setParent(root)
} else {
right := InsertBST(root.getRight(), node)
root.setRight(right)
root.getRight().setParent(root)
}
return root
}
func DeleteBST(root TreeNodeInterface, node TreeNodeInterface) TreeNodeInterface {
if root.IsNilNode() {
return root
}
if node.isLesserThan(root) {
return DeleteBST(root.getLeft(), node)
}
if node.isGreaterThan(root) {
return DeleteBST(root.getRight(), node)
}
if root.getLeft().IsNilNode() || root.getRight().IsNilNode() {
return root
}
temp := minValueNode(root.getRight())
root.acceptNode(temp)
return DeleteBST(root.getRight(), temp)
}
func FixAddRBTree(root TreeNodeInterface, node TreeNodeInterface) TreeNodeInterface {
var parent TreeNodeInterface
var grandparent TreeNodeInterface
for node != root && node.GetColor() == Red && node.getParent().GetColor() == Red {
parent = node.getParent()
grandparent = parent.getParent()
if parent == grandparent.getLeft() {
uncle := grandparent.getRight()
if uncle.GetColor() == Red {
uncle.SetColor(Black)
parent.SetColor(Black)
grandparent.SetColor(Red)
node = grandparent
} else {
if node == parent.getRight() {
root = rotateLeft(root, parent)
node = parent
parent = node.getParent()
}
root = rotateRight(root, grandparent)
SwapColors(parent, grandparent)
node = parent
}
} else {
uncle := grandparent.getLeft()
if uncle.GetColor() == Red {
uncle.SetColor(Black)
parent.SetColor(Black)
grandparent.SetColor(Red)
node = grandparent
} else {
if node == parent.getLeft() {
root = rotateRight(root, parent)
node = parent
parent = node.getParent()
}
root = rotateLeft(root, grandparent)
SwapColors(parent, grandparent)
node = parent
}
}
}
root.SetColor(Black)
return root
}
func FixDeleteRBTree(root TreeNodeInterface, node TreeNodeInterface) TreeNodeInterface {
if node.IsNilNode() {
return nil
}
if node == root {
root = GetNilNode()
return nil
}
if node.GetColor() == Red || node.getLeft().GetColor() == Red || node.getRight().GetColor() == Red {
var child TreeNodeInterface
if !node.getLeft().IsNilNode() {
child = node.getLeft()
} else {
child = node.getRight()
}
if node == node.getParent().getLeft() {
node.getParent().setLeft(child)
if !child.IsNilNode() {
child.setParent(node.getParent())
}
} else {
node.getParent().setRight(child)
if !child.IsNilNode() {
child.setParent(node.getParent())
}
}
child.SetColor(Black)
node.setParent(GetNilNode())
node.setLeft(GetNilNode())
node.setRight(GetNilNode())
} else {
var sibling TreeNodeInterface
var parent TreeNodeInterface
ptr := node
ptr.SetColor(DoubleBlack)
for ptr != root && ptr.GetColor() == DoubleBlack {
parent = ptr.getParent()
if ptr == parent.getLeft() {
sibling = parent.getRight()
if sibling.GetColor() == Red {
sibling.SetColor(Black)
parent.SetColor(Red)
root = rotateLeft(root, parent)
} else {
if sibling.getLeft().GetColor() == Black && sibling.getRight().GetColor() == Black {
sibling.SetColor(Red)
if parent.GetColor() == Red {
parent.SetColor(Black)
} else {
parent.SetColor(DoubleBlack)
}
ptr = parent
} else {
if sibling.getRight().GetColor() == Black {
sibling.getLeft().SetColor(Black)
sibling.SetColor(Red)
root = rotateRight(root, sibling)
sibling = parent.getRight()
}
sibling.SetColor(parent.GetColor())
parent.SetColor(Black)
sibling.getRight().SetColor(Black)
root = rotateLeft(root, parent)
break
}
}
} else {
sibling = parent.getLeft()
if sibling.GetColor() == Red {
sibling.SetColor(Black)
parent.SetColor(Red)
root = rotateRight(root, parent)
} else {
if sibling.getLeft().GetColor() == Black && sibling.getRight().GetColor() == Black {
sibling.SetColor(Red)
if parent.GetColor() == Red {
parent.SetColor(Black)
} else {
parent.SetColor(DoubleBlack)
}
ptr = parent
} else {
if sibling.getLeft().GetColor() == Black {
sibling.getRight().SetColor(Black)
sibling.SetColor(Red)
root = rotateLeft(root, sibling)
sibling = parent.getLeft()
}
sibling.SetColor(parent.GetColor())
parent.SetColor(Black)
sibling.getLeft().SetColor(Black)
root = rotateRight(root, parent)
break
}
}
}
}
if node == node.getParent().getLeft() {
node.getParent().setLeft(GetNilNode())
} else {
node.getParent().setRight(GetNilNode())
}
node.setParent(GetNilNode())
node.setLeft(GetNilNode())
node.setRight(GetNilNode())
root.SetColor(Black)
}
return root
}
func rotateLeft(root TreeNodeInterface, node TreeNodeInterface) TreeNodeInterface {
nodeToTurnLeft := node.getRight()
/* Turn nodeToTurnLeft's left sub-tree into node's right sub-tree */
node.setRight(nodeToTurnLeft.getLeft())
if nodeToTurnLeft.getLeft().IsNotNilNode() {
nodeToTurnLeft.getLeft().setParent(node)
}
/* nodeToTurnLeft's new parent was node's parent */
nodeToTurnLeft.setParent(node.getParent())
/* Set the parent to point to nodeToTurnLeft instead of node */
/* First see whether we're at the root */
if node.getParent().IsNilNode() {
root = nodeToTurnLeft
} else if node == node.getParent().getLeft() {
/* node was on the left of its parent */
node.getParent().setLeft(nodeToTurnLeft)
} else {
/* node must have been on the right */
node.getParent().setRight(nodeToTurnLeft)
}
/* Finally, put node on nodeToTurnLeft's left */
nodeToTurnLeft.setLeft(node)
node.setParent(nodeToTurnLeft)
return root
}
func rotateRight(root TreeNodeInterface, node TreeNodeInterface) TreeNodeInterface {
nodeToTurnRight := node.getLeft()
/* Turn nodeToTurnRight's left sub-tree into node's right sub-tree */
node.setLeft(nodeToTurnRight.getRight())
if nodeToTurnRight.getRight().IsNotNilNode() {
nodeToTurnRight.getRight().setParent(node)
}
/* nodeToTurnRight's new parent was node's parent */
nodeToTurnRight.setParent(node.getParent())
/* Set the parent to point to nodeToTurnRight instead of node */
/* First see whether we're at the root */
if node.getParent().IsNilNode() {
root = nodeToTurnRight
} else if node == node.getParent().getRight() {
/* node was on the right of its parent */
node.getParent().setRight(nodeToTurnRight)
} else {
/* node must have been on the left */
node.getParent().setLeft(nodeToTurnRight)
}
/* Finally, put node on nodeToTurnRight's left */
nodeToTurnRight.setRight(node)
node.setParent(nodeToTurnRight)
return root
}
func minValueNode(node TreeNodeInterface) TreeNodeInterface {
ptr := node
for !ptr.getLeft().IsNilNode() {
ptr = ptr.getLeft()
}
return ptr
}
type Visitor interface {
Visit(node TreeNodeInterface)
}
type DoVisitor struct {
Action func(each TreeNodeInterface)
}
func (v *DoVisitor) Visit(node TreeNodeInterface) {
if node.IsNilNode() {
return
}
v.Visit(node.getLeft())
v.Action(node)
v.Visit(node.getRight())
}
type ReverseDoVisitor struct {
Action func(each TreeNodeInterface)
}
func (v *ReverseDoVisitor) Visit(node TreeNodeInterface) {
if node.IsNilNode() {
return
}
v.Visit(node.getRight())
v.Action(node)
v.Visit(node.getLeft())
}
type ValueNode struct {
*TreeNode
}
func NewRBNode() *ValueNode {
node := new(ValueNode)
node.TreeNode = new(TreeNode)
node.parent = GetNilNode()
node.right = GetNilNode()
node.left = GetNilNode()
return node
}
func (node *ValueNode) isEqualTo(nodeInterface TreeNodeInterface) bool {
return node.Data.Equal(nodeInterface.(*ValueNode).Data)
}
func (node *ValueNode) isLesserThan(nodeInterface TreeNodeInterface) bool {
return node.Data.Less(nodeInterface.(*ValueNode).Data)
}
func (node *ValueNode) isGreaterThan(nodeInterface TreeNodeInterface) bool {
return node.Data.Greater(nodeInterface.(*ValueNode).Data)
}
func (node *ValueNode) setValueFrom(nodeInterface TreeNodeInterface) {
}
func (*ValueNode) GetKey() CollectionObject {
return nil
}
func (*ValueNode) GetKeyValue() interface{} {
return nil
}
func (node *ValueNode) acceptNode(rv TreeNodeInterface) {
node.Data = rv.(*ValueNode).Data
}
type KeyValueNode struct {
*TreeNode
Key CollectionObject
}
func NewDictNode() *KeyValueNode {
node := new(KeyValueNode)
node.TreeNode = new(TreeNode)
node.parent = GetNilNode()
node.right = GetNilNode()
node.left = GetNilNode()
return node
}
func (node *KeyValueNode) isEqualTo(nodeInterface TreeNodeInterface) bool {
if nodeInterface.IsNilNode() {
return false
}
return node.Key.Equal(nodeInterface.(*KeyValueNode).Key)
}
func (node *KeyValueNode) isLesserThan(nodeInterface TreeNodeInterface) bool {
if nodeInterface.IsNilNode() {
return false
}
return node.Key.Less(nodeInterface.(*KeyValueNode).Key)
}
func (node *KeyValueNode) isGreaterThan(nodeInterface TreeNodeInterface) bool {
if nodeInterface.IsNilNode() {
return false
}
return node.Key.Greater(nodeInterface.(*KeyValueNode).Key)
}
func (node *KeyValueNode) setValueFrom(nodeInterface TreeNodeInterface) {
node.Data = nodeInterface.(*KeyValueNode).Data
}
func (node *KeyValueNode) acceptNode(rv TreeNodeInterface) {
node.Key = rv.(*KeyValueNode).Key
node.Data = rv.(*KeyValueNode).Data
}
func (node *KeyValueNode) GetKey() CollectionObject {
return node.Key
}
func (node *KeyValueNode) GetKeyValue() interface{} {
return node.Key.GetValue()
}
func internalLookup(parent TreeNodeInterface, this TreeNodeInterface, key interface{}, dir Direction) (TreeNodeInterface, bool, Direction) {
tmpNode := new(KeyValueNode)
tmpNode.Key = &ValueHolder{key}
switch {
case this == nil:
return parent, false, dir
case tmpNode.isEqualTo(this):
return parent, true, dir
case tmpNode.isLesserThan(this):
return internalLookup(this, this.getLeft(), key, LEFT)
case tmpNode.isGreaterThan(this):
return internalLookup(this, this.getRight(), key, RIGHT)
default:
return parent, false, NODIR
}
}
func getParent(root TreeNodeInterface, key interface{}) (parent TreeNodeInterface, found bool, dir Direction) {
if root == nil {
return nil, false, NODIR
}
return internalLookup(nil, root, key, NODIR)
}
func GetNode(root TreeNodeInterface, key interface{}) (TreeNodeInterface, bool) {
parent, found, dir := getParent(root, key)
if found {
if parent == nil {
return root, true
} else {
var node TreeNodeInterface
switch dir {
case LEFT:
node = parent.getLeft()
case RIGHT:
node = parent.getRight()
}
if node != nil {
return node, true
}
}
}
return nil, false
} | utils/rbtree.go | 0.679923 | 0.425128 | rbtree.go | starcoder |
// Package interpolation implements various algorithms to fill in missing values in a Series or DataFrame.
package interpolation
import (
"context"
"github.com/padchin/dataframe-go"
)
// FillDirection is used to set the direction that nil values are filled.
type FillDirection uint8
func (opt FillDirection) has(x FillDirection) bool {
return opt&x != 0
}
const (
// Forward interpolates nil values from left to right.
Forward FillDirection = 1 << iota
// Backward interpolates nil values from right to left.
Backward
)
// FillRegion is used to set the fill region.
type FillRegion uint8
func (opt FillRegion) has(x FillRegion) bool {
return opt&x != 0
}
const (
// Interpolation estimates values between two known values.
Interpolation FillRegion = 1 << iota
// Extrapolation estimates values by extending a known sequence of values beyond
// what is certainly known.
Extrapolation
)
// InterpolateOptions is used to configure the Interpolate function.
type InterpolateOptions struct {
// Method sets the algorithm used to interpolate.
// Current options are: ForwardFill{} (default), BackwardFill{}, Linear{}, Spline{} and Lagrange{}.
Method interpolateMethod
// Limit sets the maximum number of consecutive nil values to fill.
// The default is unlimited, but if set, it must be greater than 0.
Limit *int
// FillDirection sets the direction that nil values are interpolated.
// The default is Forward.
FillDirection FillDirection
// FillRegion sets whether the interpolation function should fill nil values by interpolating and/or extrapolating.
// The default is both.
FillRegion *FillRegion
// InPlace will perform the interpolation operation on the current SeriesFloat64 or DataFrame.
// If InPlace is not set, an OrderedMapIntFloat64 will be returned. The original Series or DataFrame will be unmodified.
InPlace bool
// DontLock can be set to true if the Series or DataFrame should not be locked.
DontLock bool
// R is used to limit the range of the Series for interpolation purposes.
R *dataframe.Range
// HorizAxis is used to set the "x-axis" for the purposes of interpolation.
// If not set, the horizontal axis is deemed to be spaced out with units of 1.
// It must implement a dataframe.ToSeriesFloat64 or be a SeriesFloat64/SeriesTime.
// It must not contain nil values in the range R.
// When used with a DataFrame, it may be an int or string to identify the Series of the DataFrame to be used
// as the horizontal axis.
HorizAxis interface{}
}
// Interpolate will accept a DataFrame or SeriesFloat64 and interpolate the missing values.
// If the InPlace option is set, the DataFrame or SeriesFloat64 is modified "in place".
// Alternatively, a map[interface{}]*dataframe.OrderedMapIntFloat64 or *dataframe.OrderedMapIntFloat64 is returned respectively.
// When used with a DataFrame, only SeriesFloat64 columns (that are not set as the HorizAxis) are interpolated.
func Interpolate(ctx context.Context, sdf interface{}, opts InterpolateOptions) (interface{}, error) {
switch typ := sdf.(type) {
case *dataframe.SeriesFloat64:
x, err := interpolateSeriesFloat64(ctx, typ, opts)
if err != nil {
return nil, err
}
return x, err
case *dataframe.DataFrame:
x, err := interpolateDataFrame(ctx, typ, opts)
if err != nil {
return nil, err
}
return x, err
default:
panic("sdf must be a SeriesFloat64 or DataFrame")
}
return nil, nil
} | forecast/interpolation/interpolate.go | 0.849176 | 0.657085 | interpolate.go | starcoder |
package predictor
import (
"encoding/gob"
"fmt"
"log"
"os"
"github.com/pkg/errors"
G "gorgonia.org/gorgonia"
"gorgonia.org/tensor"
)
type convnet struct {
g *G.ExprGraph
w0, w1, w2, w3, w4 *G.Node // weights, the number at the back indicates which layer it's used for
d0, d1, d2, d3 float64 // dropout probabilities
out *G.Node
}
// NewConvnet creates new instance of convolution neural network
func NewConvnet(g *G.ExprGraph) NeuroNet {
w0 := G.NewTensor(g, dt, 4, G.WithShape(23, 1, 3, 3), G.WithName("w0"), G.WithInit(G.GlorotN(1.0)))
w1 := G.NewTensor(g, dt, 4, G.WithShape(184, 23, 3, 3), G.WithName("w1"), G.WithInit(G.GlorotN(1.0)))
w2 := G.NewTensor(g, dt, 4, G.WithShape(1656, 184, 3, 3), G.WithName("w2"), G.WithInit(G.GlorotN(1.0)))
w3 := G.NewMatrix(g, dt, G.WithShape(6624, 184), G.WithName("w3"), G.WithInit(G.GlorotN(1.0)))
w4 := G.NewMatrix(g, dt, G.WithShape(184, 10), G.WithName("w4"), G.WithInit(G.GlorotN(1.0)))
return &convnet{
g: g,
w0: w0,
w1: w1,
w2: w2,
w3: w3,
w4: w4,
d0: 0.2,
d1: 0.2,
d2: 0.2,
d3: 0.55,
}
}
// FromBackup will recreate previously trained convnet from backup file
func (c *convnet) FromBackup(fn string) error {
w0, w1, w2, w3, w4, err := readConvnetFromBackup(fn)
if err != nil {
return fmt.Errorf("cannot read from backup, %s", err)
}
err = G.Let((*c).w0, w0)
if err != nil {
return err
}
err = G.Let((*c).w1, w1)
if err != nil {
return err
}
err = G.Let((*c).w2, w2)
if err != nil {
return err
}
err = G.Let((*c).w3, w3)
if err != nil {
return err
}
err = G.Let((*c).w4, w4)
if err != nil {
return err
}
return nil
}
// Save crates backup file with convnet weights
func (c *convnet) Save(fn string) error {
f, err := os.Create(fn)
if err != nil {
return err
}
defer f.Close()
enc := gob.NewEncoder(f)
for _, node := range []*G.Node{c.w0, c.w1, c.w2, c.w3, c.w4} {
err := enc.Encode(node.Value())
if err != nil {
return err
}
}
return nil
}
// Predict, takes inputs tensor and returns tensor with normalized predictions
func (c *convnet) Predict(inputs tensor.Tensor) (solution *G.Node, err error) {
x := G.NewTensor(c.g, dt, 4, G.WithShape(1, 1, 19, 19), G.WithName("x"))
if err = G.Let(x, inputs); err != nil {
return
}
if err = c.fwd(x); err != nil {
return
}
prog, locMap, err := G.Compile(c.g)
if err != nil {
log.Fatalf("cannot compile tape machine program, %s", err)
}
vm := G.NewTapeMachine(c.g, G.WithPrecompiled(prog, locMap))
defer vm.Close()
if err = vm.RunAll(); err != nil {
log.Fatalf("cannot run tape machine, %s", err)
}
solution = c.out
return
}
func (m *convnet) Graph() *G.ExprGraph {
return m.g
}
func (m *convnet) outNode() *G.Node {
return m.out
}
func (m *convnet) learnables() G.Nodes {
return G.Nodes{m.w0, m.w1, m.w2, m.w3, m.w4}
}
func (m *convnet) fwd(x *G.Node) (err error) {
var c0, c1, c2, fc *G.Node
var a0, a1, a2, a3 *G.Node
var p0, p1, p2 *G.Node
var l0, l1, l2, l3 *G.Node
if c0, err = G.Conv2d(x, m.w0, tensor.Shape{3, 3}, []int{1, 1}, []int{1, 1}, []int{1, 1}); err != nil {
return errors.Wrap(err, "Layer 0 Convolution failed")
}
if a0, err = G.Rectify(c0); err != nil {
return errors.Wrap(err, "Layer 0 activation failed")
}
if p0, err = G.MaxPool2D(a0, tensor.Shape{2, 2}, []int{0, 0}, []int{2, 2}); err != nil {
return errors.Wrap(err, "Layer 0 Maxpooling failed")
}
if l0, err = G.Dropout(p0, m.d0); err != nil {
return errors.Wrap(err, "Unable to apply a dropout")
}
if c1, err = G.Conv2d(l0, m.w1, tensor.Shape{3, 3}, []int{1, 1}, []int{1, 1}, []int{1, 1}); err != nil {
return errors.Wrap(err, "Layer 1 Convolution failed")
}
if a1, err = G.Rectify(c1); err != nil {
return errors.Wrap(err, "Layer 1 activation failed")
}
if p1, err = G.MaxPool2D(a1, tensor.Shape{2, 2}, []int{0, 0}, []int{2, 2}); err != nil {
return errors.Wrap(err, "Layer 1 Maxpooling failed")
}
if l1, err = G.Dropout(p1, m.d1); err != nil {
return errors.Wrap(err, "Unable to apply a dropout to layer 1")
}
if c2, err = G.Conv2d(l1, m.w2, tensor.Shape{3, 3}, []int{1, 1}, []int{1, 1}, []int{1, 1}); err != nil {
return errors.Wrap(err, "Layer 2 Convolution failed")
}
if a2, err = G.Rectify(c2); err != nil {
return errors.Wrap(err, "Layer 2 activation failed")
}
if p2, err = G.MaxPool2D(a2, tensor.Shape{2, 2}, []int{0, 0}, []int{2, 2}); err != nil {
return errors.Wrap(err, "Layer 2 Maxpooling failed")
}
var r2 *G.Node
b, c, h, w := p2.Shape()[0], p2.Shape()[1], p2.Shape()[2], p2.Shape()[3]
if r2, err = G.Reshape(p2, tensor.Shape{b, c * h * w}); err != nil {
return errors.Wrap(err, "Unable to reshape layer 2")
}
if l2, err = G.Dropout(r2, m.d2); err != nil {
return errors.Wrap(err, "Unable to apply a dropout on layer 2")
}
if fc, err = G.Mul(l2, m.w3); err != nil {
return errors.Wrapf(err, "Unable to multiply l2 and w3")
}
if a3, err = G.Rectify(fc); err != nil {
return errors.Wrapf(err, "Unable to activate fc")
}
if l3, err = G.Dropout(a3, m.d3); err != nil {
return errors.Wrapf(err, "Unable to apply a dropout on layer 3")
}
var out *G.Node
if out, err = G.Mul(l3, m.w4); err != nil {
return errors.Wrapf(err, "Unable to multiply l3 and w4")
}
m.out, err = G.SoftMax(out)
return
}
func readConvnetFromBackup(fn string) (w0, w1, w2, w3, w4 tensor.Tensor, err error) {
f, err := os.Open(fn)
if err != nil {
return
}
defer f.Close()
dec := gob.NewDecoder(f)
var x0, x1, x2, x3, x4 *tensor.Dense
err = dec.Decode(&x0)
if err != nil {
return
}
err = dec.Decode(&x1)
if err != nil {
return
}
err = dec.Decode(&x2)
if err != nil {
return
}
err = dec.Decode(&x3)
if err != nil {
return
}
err = dec.Decode(&x4)
if err != nil {
return
}
return x0, x1, x2, x3, x4, nil
} | predictor/convnet.go | 0.651466 | 0.495972 | convnet.go | starcoder |
package fauxgl
type Triangle struct {
V1, V2, V3 Vertex
}
func NewTriangle(v1, v2, v3 Vertex) *Triangle {
t := Triangle{v1, v2, v3}
t.FixNormals()
return &t
}
func NewTriangleForPoints(p1, p2, p3 Vector) *Triangle {
v1 := Vertex{Position: p1}
v2 := Vertex{Position: p2}
v3 := Vertex{Position: p3}
return NewTriangle(v1, v2, v3)
}
func (t *Triangle) IsDegenerate() bool {
p1 := t.V1.Position
p2 := t.V2.Position
p3 := t.V3.Position
if p1 == p2 || p1 == p3 || p2 == p3 {
return true
}
if p1.IsDegenerate() || p2.IsDegenerate() || p3.IsDegenerate() {
return true
}
return false
}
func (t *Triangle) Normal() Vector {
e1 := t.V2.Position.Sub(t.V1.Position)
e2 := t.V3.Position.Sub(t.V1.Position)
return e1.Cross(e2).Normalize()
}
func (t *Triangle) Area() float64 {
e1 := t.V2.Position.Sub(t.V1.Position)
e2 := t.V3.Position.Sub(t.V1.Position)
n := e1.Cross(e2)
return n.Length() / 2
}
func (t *Triangle) FixNormals() {
n := t.Normal()
zero := Vector{}
if t.V1.Normal == zero {
t.V1.Normal = n
}
if t.V2.Normal == zero {
t.V2.Normal = n
}
if t.V3.Normal == zero {
t.V3.Normal = n
}
}
func (t *Triangle) BoundingBox() Box {
min := t.V1.Position.Min(t.V2.Position).Min(t.V3.Position)
max := t.V1.Position.Max(t.V2.Position).Max(t.V3.Position)
return Box{min, max}
}
func (t *Triangle) Transform(matrix Matrix) {
t.V1.Position = matrix.MulPosition(t.V1.Position)
t.V2.Position = matrix.MulPosition(t.V2.Position)
t.V3.Position = matrix.MulPosition(t.V3.Position)
t.V1.Normal = matrix.MulDirection(t.V1.Normal)
t.V2.Normal = matrix.MulDirection(t.V2.Normal)
t.V3.Normal = matrix.MulDirection(t.V3.Normal)
}
func (t *Triangle) ReverseWinding() {
t.V1, t.V2, t.V3 = t.V3, t.V2, t.V1
t.V1.Normal = t.V1.Normal.Negate()
t.V2.Normal = t.V2.Normal.Negate()
t.V3.Normal = t.V3.Normal.Negate()
}
func (t *Triangle) SetColor(c Color) {
t.V1.Color = c
t.V2.Color = c
t.V3.Color = c
}
// func (t *Triangle) RandomPoint() Vector {
// v1 := t.V1.Position
// v2 := t.V2.Position.Sub(v1)
// v3 := t.V3.Position.Sub(v1)
// for {
// a := rand.Float64()
// b := rand.Float64()
// if a+b <= 1 {
// return v1.Add(v2.MulScalar(a)).Add(v3.MulScalar(b))
// }
// }
// }
// func (t *Triangle) Area() float64 {
// e1 := t.V2.Position.Sub(t.V1.Position)
// e2 := t.V3.Position.Sub(t.V1.Position)
// return e1.Cross(e2).Length() / 2
// } | triangle.go | 0.616936 | 0.766665 | triangle.go | starcoder |
package internal
import (
"errors"
"fmt"
"math"
)
// Replacement mode for out of bounds values when projecting images
type HistoNormMode int
const (
HNMNone = iota // Do not normalize histogram
HNMLocScale // Normalize histogram by matching location and scale of the reference frame. Good for stacking lights
HNMLocBlack // Normalize histogram to match location of the reference frame by shifting black point. Good for RGB
HNMAuto // Auto mode. Uses ScaleLoc for stacking, and LocBlack for (L)RGB combination.
)
// Replacement mode for out of bounds values when projecting images
type OutOfBoundsMode int
const (
OOBModeNaN = iota // Replace with NaN. Stackers ignore NaNs, so they just take frames into account which have data for the given pixel
OOBModeRefLocation // Replace with reference frame location estimate. Good for projecting data for one channel before stacking
OOBModeOwnLocation // Replace with location estimate for the current frame. Good for projecting RGB, where locations can differ
)
// Postprocess all light frames with given settings, limiting concurrency to the number of available CPUs
func PostProcessLights(alignRef, histoRef *FITSImage, lights []*FITSImage, align int32, alignK int32, alignThreshold float32,
normalize HistoNormMode, oobMode OutOfBoundsMode, usmSigma, usmGain, usmThresh float32,
postProcessedPattern string, imageLevelParallelism int32) (numErrors int) {
var aligner *Aligner=nil
if align!=0 {
if alignRef==nil || alignRef.Stars==nil || len(alignRef.Stars)==0 {
LogFatal("Unable to align without star detections in reference frame")
}
aligner=NewAligner(alignRef.Naxisn, alignRef.Stars, alignK)
}
if usmGain>0 {
kernel:=GaussianKernel1D(usmSigma)
LogPrintf("Unsharp masking kernel sigma %.2f size %d: %v\n", usmSigma, len(kernel), kernel)
}
numErrors=0
sem :=make(chan bool, imageLevelParallelism)
for i, lightP := range(lights) {
sem <- true
go func(i int, lightP *FITSImage) {
defer func() { <-sem }()
res, err:=postProcessLight(aligner, histoRef, lightP, alignThreshold, normalize, oobMode, usmSigma, usmGain, usmThresh)
if err!=nil {
LogPrintf("%d: Error: %s\n", lightP.ID, err.Error())
numErrors++
} else if postProcessedPattern!="" {
// Write image to (temporary) file
err=res.WriteFile(fmt.Sprintf(postProcessedPattern, lightP.ID))
if err!=nil { LogFatalf("Error writing file: %s\n", err) }
}
if res!=lightP {
lightP.Data=nil
lights[i]=res
}
}(i, lightP)
}
for i:=0; i<cap(sem); i++ { // wait for goroutines to finish
sem <- true
}
return numErrors
}
// Postprocess a single light frame with given settings. Processing steps can include:
// normalization, alignment and resampling in reference frame, and unsharp masking
func postProcessLight(aligner *Aligner, histoRef, light *FITSImage, alignThreshold float32, normalize HistoNormMode,
oobMode OutOfBoundsMode, usmSigma, usmGain, usmThresh float32) (res *FITSImage, err error) {
// Match reference frame histogram
switch normalize {
case HNMNone:
// do nothing
case HNMLocScale:
light.MatchHistogram(histoRef.Stats)
LogPrintf("%d: %s\n", light.ID, light.Stats)
case HNMLocBlack:
light.ShiftBlackToMove(light.Stats.Location, histoRef.Stats.Location)
var err error
light.Stats, err=CalcExtendedStats(light.Data, light.Naxisn[0])
if err!=nil { return nil, err }
LogPrintf("%d: %s\n", light.ID, light.Stats)
}
// Is alignment to the reference frame required?
if aligner==nil || aligner.RefStars==nil || len(aligner.RefStars)==0 {
// Generally not required
light.Trans=IdentityTransform2D()
} else if (len(aligner.RefStars)==len(light.Stars) && (&aligner.RefStars[0]==&light.Stars[0])) {
// Not required for reference frame itself
light.Trans=IdentityTransform2D()
} else if light.Stars==nil || len(light.Stars)==0 {
// No stars - skip alignment and warn
LogPrintf("%d: warning: no stars found, skipping alignment", light.ID)
light.Trans=IdentityTransform2D()
} else {
// Alignment is required
// determine out of bounds fill value
var outOfBounds float32
switch(oobMode) {
case OOBModeNaN: outOfBounds=float32(math.NaN())
case OOBModeRefLocation: outOfBounds=histoRef.Stats.Location
case OOBModeOwnLocation: outOfBounds=light .Stats.Location
}
// Determine alignment of the image to the reference frame
trans, residual := aligner.Align(light.Naxisn, light.Stars, light.ID)
if residual>alignThreshold {
msg:=fmt.Sprintf("%d:Skipping image as residual %g is above limit %g", light.ID, residual, alignThreshold)
return nil, errors.New(msg)
}
light.Trans, light.Residual=trans, residual
LogPrintf("%d: Transform %v; oob %.3g residual %.3g\n", light.ID, light.Trans, outOfBounds, light.Residual)
// Project image into reference frame
light, err= light.Project(aligner.Naxisn, trans, outOfBounds)
if err!=nil { return nil, err }
}
// apply unsharp masking, if requested
if usmGain>0 {
light.Stats, err=CalcExtendedStats(light.Data, light.Naxisn[0])
if err!=nil { return nil, err }
absThresh:=light.Stats.Location + light.Stats.Scale*usmThresh
LogPrintf("%d: Unsharp masking with sigma %.3g gain %.3g thresh %.3g absThresh %.3g\n", light.ID, usmSigma, usmGain, usmThresh, absThresh)
light.Data=UnsharpMask(light.Data, int(light.Naxisn[0]), usmSigma, usmGain, light.Stats.Min, light.Stats.Max, absThresh)
light.Stats=CalcBasicStats(light.Data)
}
return light, nil
} | internal/postprocess.go | 0.505371 | 0.408395 | postprocess.go | starcoder |
package vector
import (
"errors"
"math"
"strconv"
)
const (
INVALID_DIMENSION = "Invalid dimension"
DIVISION_BY_ZERO = "Divison by zero not allowed"
INDEX_OUT_OF_BOUND = "Index out of bound"
)
type Vector struct {
dimension int
coordinates []float64
}
func New() Vector {
return Vector{1, []float64{0}}
}
func (av Vector) Dimension() int {
return av.dimension
}
func NewFromArray(values []float64) Vector {
if len(values) == 0 {
return Vector{1, []float64{0}}
}
return Vector{len(values), values}
}
func NewWithValues(values ...float64) Vector {
if len(values) == 0 {
return New()
}
coordinates := make([]float64, len(values))
for i, x := range values {
coordinates[i] = (float64)(x)
}
return Vector{len(values), coordinates}
}
func (av Vector) Add(v *Vector) (Vector, error) {
if av.dimension != (*v).Dimension() {
return Vector{}, errors.New(INVALID_DIMENSION)
}
dimension := av.dimension
coordinates := make([]float64, dimension)
for i := 0; i < dimension; i++ {
vCoordinate := (*v).Get(i)
coordinates[i] = av.Get(i) + vCoordinate
}
return Vector{dimension, coordinates}, nil
}
func max(x, y int) int {
a := (int)(math.Max((float64)(x), (float64)(y)))
return a
}
func (av Vector) Subtract(v *Vector) (Vector, error) {
if av.dimension != (*v).Dimension() {
return Vector{}, errors.New(INVALID_DIMENSION)
}
dimension := av.dimension
coordinates := make([]float64, dimension)
for i := 0; i < dimension; i++ {
vCoordinate := (*v).Get(i)
coordinates[i] = av.Get(i) - vCoordinate
}
return Vector{dimension, coordinates}, nil
}
func (av Vector) Get(i int) float64 {
if i >= av.dimension {
return 0
} else if i < 0 {
panic(INDEX_OUT_OF_BOUND + ": " + strconv.Itoa(i))
}
return av.coordinates[i]
}
func (av Vector) MultiplyByScalar(c float64) Vector {
coordinates := make([]float64, av.dimension)
for i := 0; i < len(av.coordinates); i++ {
coordinates[i] = (float64)((float64)(av.Get(i)) * c)
}
return Vector{av.dimension, coordinates}
}
func (av Vector) DivideByScalar(f float64) (Vector, error) {
if f == 0 {
return Vector{}, errors.New(DIVISION_BY_ZERO)
}
return av.MultiplyByScalar(1 / f), nil
}
func (av Vector) Length() float64 {
var length float64
for _, x := range av.coordinates {
length += (float64)(x) * (float64)(x)
}
return math.Sqrt(length)
}
func (av Vector) SquaredLength() float64 {
var length float64
for _, x := range av.coordinates {
length += (float64)(x) * (float64)(x)
}
return length
}
func (av Vector) UnitVector() Vector {
v := Vector{av.dimension, make([]float64, av.dimension)}
length := av.Length()
if length == 0 {
return v
}
for i, x := range av.coordinates {
v.coordinates[i] = float64((float64)(x) / length)
}
return v
}
func (av Vector) Set(i int, value float64) {
if i < av.dimension && i >= 0 {
av.coordinates[i] = value
} else {
panic(INDEX_OUT_OF_BOUND + ": " + strconv.Itoa(i))
}
}
func (av Vector) Equals(v *Vector) bool {
if av.dimension != (*v).Dimension() {
return false
}
dimension := av.dimension
for i := 0; i < dimension; i++ {
if (*v).Get(i) != av.Get(i) {
return false
}
}
return true
}
func (av Vector) Dot(v *Vector) (float64, error) {
if av.dimension != (*v).Dimension() {
return 0, errors.New(INVALID_DIMENSION)
}
var dotProduct float64 = 0
for i, x := range av.coordinates {
dotProduct = dotProduct + x*v.Get(i)
}
return dotProduct, nil
} | vector/vector.go | 0.713831 | 0.510252 | vector.go | starcoder |
package uxid
import (
cryptorand "crypto/rand"
"io"
"time"
)
const CrockfordEncoding string = "0123456789ABCDEFGHJKMNPQRSTVWXYZ"
func Generate(prefix string, size string) (id_str string, err error) {
time_string := EncodeTime(time.Now().UTC())
rand_string := EncodeRand(size)
if "" == prefix {
return time_string + rand_string, nil
}
return prefix + "_" + time_string + rand_string, nil
}
func EncodeTime(t time.Time) string {
ms := Timestamp(t)
// Create the time bytes to store timestamp
tbytes := make([]byte, 6)
tbytes[0] = byte(ms >> 40)
tbytes[1] = byte(ms >> 32)
tbytes[2] = byte(ms >> 24)
tbytes[3] = byte(ms >> 16)
tbytes[4] = byte(ms >> 8)
tbytes[5] = byte(ms)
// Now encode the bytes into a time string
tstring := make([]byte, 10)
tstring[0] = CrockfordEncoding[(tbytes[0]&224)>>5]
tstring[1] = CrockfordEncoding[tbytes[0]&31]
tstring[2] = CrockfordEncoding[(tbytes[1]&248)>>3]
tstring[3] = CrockfordEncoding[((tbytes[1]&7)<<2)|((tbytes[2]&192)>>6)]
tstring[4] = CrockfordEncoding[(tbytes[2]&62)>>1]
tstring[5] = CrockfordEncoding[((tbytes[2]&1)<<4)|((tbytes[3]&240)>>4)]
tstring[6] = CrockfordEncoding[((tbytes[3]&15)<<1)|((tbytes[4]&128)>>7)]
tstring[7] = CrockfordEncoding[(tbytes[4]&124)>>2]
tstring[8] = CrockfordEncoding[((tbytes[4]&3)<<3)|((tbytes[5]&224)>>5)]
tstring[9] = CrockfordEncoding[tbytes[5]&31]
return string(tstring[:])
}
func EncodeRand(size string) string {
switch size {
case "0", "xs", "xsmall":
return ""
case "1":
return EncodeRand1()
case "2", "s", "small":
return EncodeRand2()
case "3":
return EncodeRand3()
case "4":
return EncodeRand4()
case "5", "m", "medium":
return EncodeRand5()
case "6":
return EncodeRand6()
case "7", "l", "large":
return EncodeRand7()
case "8":
return EncodeRand8()
case "9":
return EncodeRand9()
}
return EncodeRand10()
}
func EncodeRand1() string {
entropy := cryptorand.Reader
// Populate the entropy bytes
ebytes := make([]byte, 1)
_, _ = io.ReadFull(entropy, ebytes[:])
// Convert the bytes to a string using CrockfordEncoding
estring := make([]byte, 2)
estring[0] = CrockfordEncoding[(ebytes[0]&248)>>3]
estring[1] = CrockfordEncoding[((ebytes[0] & 7) << 2)]
return string(estring[:2])
}
func EncodeRand2() string {
entropy := cryptorand.Reader
// Populate the entropy bytes
ebytes := make([]byte, 2)
_, _ = io.ReadFull(entropy, ebytes[:])
// Convert the bytes to a string using CrockfordEncoding
estring := make([]byte, 4)
estring[0] = CrockfordEncoding[(ebytes[0]&248)>>3]
estring[1] = CrockfordEncoding[((ebytes[0]&7)<<2)|((ebytes[1]&192)>>6)]
estring[2] = CrockfordEncoding[(ebytes[1]&62)>>1]
estring[3] = CrockfordEncoding[((ebytes[1] & 1) << 4)]
return string(estring[:])
}
func EncodeRand3() string {
entropy := cryptorand.Reader
// Populate the entropy bytes
ebytes := make([]byte, 3)
_, _ = io.ReadFull(entropy, ebytes[:])
// Convert the bytes to a string using CrockfordEncoding
estring := make([]byte, 5)
estring[0] = CrockfordEncoding[(ebytes[0]&248)>>3]
estring[1] = CrockfordEncoding[((ebytes[0]&7)<<2)|((ebytes[1]&192)>>6)]
estring[2] = CrockfordEncoding[(ebytes[1]&62)>>1]
estring[3] = CrockfordEncoding[((ebytes[1]&1)<<4)|((ebytes[2]&240)>>4)]
estring[4] = CrockfordEncoding[((ebytes[2] & 15) << 1)]
return string(estring[:])
}
func EncodeRand4() string {
entropy := cryptorand.Reader
// Populate the entropy bytes
ebytes := make([]byte, 4)
_, _ = io.ReadFull(entropy, ebytes[:])
// Convert the bytes to a string using CrockfordEncoding
estring := make([]byte, 7)
estring[0] = CrockfordEncoding[(ebytes[0]&248)>>3]
estring[1] = CrockfordEncoding[((ebytes[0]&7)<<2)|((ebytes[1]&192)>>6)]
estring[2] = CrockfordEncoding[(ebytes[1]&62)>>1]
estring[3] = CrockfordEncoding[((ebytes[1]&1)<<4)|((ebytes[2]&240)>>4)]
estring[4] = CrockfordEncoding[((ebytes[2]&15)<<1)|((ebytes[3]&128)>>7)]
estring[5] = CrockfordEncoding[(ebytes[3]&124)>>2]
estring[6] = CrockfordEncoding[((ebytes[3] & 3) << 3)]
return string(estring[:])
}
func EncodeRand5() string {
entropy := cryptorand.Reader
// Populate the entropy bytes
ebytes := make([]byte, 5)
_, _ = io.ReadFull(entropy, ebytes[:])
// Convert the bytes to a string using CrockfordEncoding
estring := make([]byte, 8)
estring[0] = CrockfordEncoding[(ebytes[0]&248)>>3]
estring[1] = CrockfordEncoding[((ebytes[0]&7)<<2)|((ebytes[1]&192)>>6)]
estring[2] = CrockfordEncoding[(ebytes[1]&62)>>1]
estring[3] = CrockfordEncoding[((ebytes[1]&1)<<4)|((ebytes[2]&240)>>4)]
estring[4] = CrockfordEncoding[((ebytes[2]&15)<<1)|((ebytes[3]&128)>>7)]
estring[5] = CrockfordEncoding[(ebytes[3]&124)>>2]
estring[6] = CrockfordEncoding[((ebytes[3]&3)<<3)|((ebytes[4]&224)>>5)]
estring[7] = CrockfordEncoding[ebytes[4]&31]
return string(estring[:])
}
func EncodeRand6() string {
entropy := cryptorand.Reader
// Populate the entropy bytes
ebytes := make([]byte, 6)
_, _ = io.ReadFull(entropy, ebytes[:])
// Convert the bytes to a string using CrockfordEncoding
estring := make([]byte, 10)
estring[0] = CrockfordEncoding[(ebytes[0]&248)>>3]
estring[1] = CrockfordEncoding[((ebytes[0]&7)<<2)|((ebytes[1]&192)>>6)]
estring[2] = CrockfordEncoding[(ebytes[1]&62)>>1]
estring[3] = CrockfordEncoding[((ebytes[1]&1)<<4)|((ebytes[2]&240)>>4)]
estring[4] = CrockfordEncoding[((ebytes[2]&15)<<1)|((ebytes[3]&128)>>7)]
estring[5] = CrockfordEncoding[(ebytes[3]&124)>>2]
estring[6] = CrockfordEncoding[((ebytes[3]&3)<<3)|((ebytes[4]&224)>>5)]
estring[7] = CrockfordEncoding[ebytes[4]&31]
estring[8] = CrockfordEncoding[(ebytes[5]&248)>>3]
estring[9] = CrockfordEncoding[((ebytes[5] & 7) << 2)]
return string(estring[:])
}
func EncodeRand7() string {
entropy := cryptorand.Reader
// Populate the entropy bytes
ebytes := make([]byte, 7)
_, _ = io.ReadFull(entropy, ebytes[:])
// Convert the bytes to a string using CrockfordEncoding
estring := make([]byte, 12)
estring[0] = CrockfordEncoding[(ebytes[0]&248)>>3]
estring[1] = CrockfordEncoding[((ebytes[0]&7)<<2)|((ebytes[1]&192)>>6)]
estring[2] = CrockfordEncoding[(ebytes[1]&62)>>1]
estring[3] = CrockfordEncoding[((ebytes[1]&1)<<4)|((ebytes[2]&240)>>4)]
estring[4] = CrockfordEncoding[((ebytes[2]&15)<<1)|((ebytes[3]&128)>>7)]
estring[5] = CrockfordEncoding[(ebytes[3]&124)>>2]
estring[6] = CrockfordEncoding[((ebytes[3]&3)<<3)|((ebytes[4]&224)>>5)]
estring[7] = CrockfordEncoding[ebytes[4]&31]
estring[8] = CrockfordEncoding[(ebytes[5]&248)>>3]
estring[9] = CrockfordEncoding[((ebytes[5]&7)<<2)|((ebytes[6]&192)>>6)]
estring[10] = CrockfordEncoding[(ebytes[6]&62)>>1]
estring[11] = CrockfordEncoding[((ebytes[6] & 1) << 4)]
return string(estring[:])
}
func EncodeRand8() string {
entropy := cryptorand.Reader
// Populate the entropy bytes
ebytes := make([]byte, 8)
_, _ = io.ReadFull(entropy, ebytes[:])
// Convert the bytes to a string using CrockfordEncoding
estring := make([]byte, 13)
estring[0] = CrockfordEncoding[(ebytes[0]&248)>>3]
estring[1] = CrockfordEncoding[((ebytes[0]&7)<<2)|((ebytes[1]&192)>>6)]
estring[2] = CrockfordEncoding[(ebytes[1]&62)>>1]
estring[3] = CrockfordEncoding[((ebytes[1]&1)<<4)|((ebytes[2]&240)>>4)]
estring[4] = CrockfordEncoding[((ebytes[2]&15)<<1)|((ebytes[3]&128)>>7)]
estring[5] = CrockfordEncoding[(ebytes[3]&124)>>2]
estring[6] = CrockfordEncoding[((ebytes[3]&3)<<3)|((ebytes[4]&224)>>5)]
estring[7] = CrockfordEncoding[ebytes[4]&31]
estring[8] = CrockfordEncoding[(ebytes[5]&248)>>3]
estring[9] = CrockfordEncoding[((ebytes[5]&7)<<2)|((ebytes[6]&192)>>6)]
estring[10] = CrockfordEncoding[(ebytes[6]&62)>>1]
estring[11] = CrockfordEncoding[((ebytes[6]&1)<<4)|((ebytes[7]&240)>>4)]
estring[12] = CrockfordEncoding[((ebytes[7] & 15) << 1)]
return string(estring[:])
}
func EncodeRand9() string {
entropy := cryptorand.Reader
// Populate the entropy bytes
ebytes := make([]byte, 9)
_, _ = io.ReadFull(entropy, ebytes[:])
// Convert the bytes to a string using CrockfordEncoding
estring := make([]byte, 15)
estring[0] = CrockfordEncoding[(ebytes[0]&248)>>3]
estring[1] = CrockfordEncoding[((ebytes[0]&7)<<2)|((ebytes[1]&192)>>6)]
estring[2] = CrockfordEncoding[(ebytes[1]&62)>>1]
estring[3] = CrockfordEncoding[((ebytes[1]&1)<<4)|((ebytes[2]&240)>>4)]
estring[4] = CrockfordEncoding[((ebytes[2]&15)<<1)|((ebytes[3]&128)>>7)]
estring[5] = CrockfordEncoding[(ebytes[3]&124)>>2]
estring[6] = CrockfordEncoding[((ebytes[3]&3)<<3)|((ebytes[4]&224)>>5)]
estring[7] = CrockfordEncoding[ebytes[4]&31]
estring[8] = CrockfordEncoding[(ebytes[5]&248)>>3]
estring[9] = CrockfordEncoding[((ebytes[5]&7)<<2)|((ebytes[6]&192)>>6)]
estring[10] = CrockfordEncoding[(ebytes[6]&62)>>1]
estring[11] = CrockfordEncoding[((ebytes[6]&1)<<4)|((ebytes[7]&240)>>4)]
estring[12] = CrockfordEncoding[((ebytes[7]&15)<<1)|((ebytes[8]&128)>>7)]
estring[13] = CrockfordEncoding[(ebytes[8]&124)>>2]
estring[14] = CrockfordEncoding[((ebytes[8] & 3) << 3)]
return string(estring[:])
}
func EncodeRand10() string {
entropy := cryptorand.Reader
// Populate the entropy bytes
ebytes := make([]byte, 10)
_, _ = io.ReadFull(entropy, ebytes[:])
// Convert the bytes to a string using CrockfordEncoding
estring := make([]byte, 16)
estring[0] = CrockfordEncoding[(ebytes[0]&248)>>3]
estring[1] = CrockfordEncoding[((ebytes[0]&7)<<2)|((ebytes[1]&192)>>6)]
estring[2] = CrockfordEncoding[(ebytes[1]&62)>>1]
estring[3] = CrockfordEncoding[((ebytes[1]&1)<<4)|((ebytes[2]&240)>>4)]
estring[4] = CrockfordEncoding[((ebytes[2]&15)<<1)|((ebytes[3]&128)>>7)]
estring[5] = CrockfordEncoding[(ebytes[3]&124)>>2]
estring[6] = CrockfordEncoding[((ebytes[3]&3)<<3)|((ebytes[4]&224)>>5)]
estring[7] = CrockfordEncoding[ebytes[4]&31]
estring[8] = CrockfordEncoding[(ebytes[5]&248)>>3]
estring[9] = CrockfordEncoding[((ebytes[5]&7)<<2)|((ebytes[6]&192)>>6)]
estring[10] = CrockfordEncoding[(ebytes[6]&62)>>1]
estring[11] = CrockfordEncoding[((ebytes[6]&1)<<4)|((ebytes[7]&240)>>4)]
estring[12] = CrockfordEncoding[((ebytes[7]&15)<<1)|((ebytes[8]&128)>>7)]
estring[13] = CrockfordEncoding[(ebytes[8]&124)>>2]
estring[14] = CrockfordEncoding[((ebytes[8]&3)<<3)|((ebytes[9]&224)>>5)]
estring[15] = CrockfordEncoding[ebytes[9]&31]
return string(estring[:])
}
func Timestamp(t time.Time) uint64 {
return uint64(t.Unix())*1000 +
uint64(t.Nanosecond()/int(time.Millisecond))
} | uxid.go | 0.671147 | 0.546133 | uxid.go | starcoder |
import "github.com/kaitai-io/kaitai_struct_go_runtime/kaitai"
/**
* BCD (Binary Coded Decimals) is a common way to encode integer
* numbers in a way that makes human-readable output somewhat
* simpler. In this encoding scheme, every decimal digit is encoded as
* either a single byte (8 bits), or a nibble (half of a byte, 4
* bits). This obviously wastes a lot of bits, but it makes translation
* into human-readable string much easier than traditional
* binary-to-decimal conversion process, which includes lots of
* divisions by 10.
*
* For example, encoding integer 31337 in 8-digit, 8 bits per digit,
* big endian order of digits BCD format yields
*
* ```
* 00 00 00 03 01 03 03 07
* ```
*
* Encoding the same integer as 8-digit, 4 bits per digit, little
* endian order BCD format would yield:
*
* ```
* 73 31 30 00
* ```
*
* Using this type of encoding in Kaitai Struct is pretty
* straightforward: one calls for this type, specifying desired
* encoding parameters, and gets result using either `as_int` or
* `as_str` attributes.
*/
type Bcd struct {
Digits []int
NumDigits uint8
BitsPerDigit uint8
IsLe bool
_io *kaitai.Stream
_root *Bcd
_parent interface{}
_f_asInt bool
asInt int
_f_asIntLe bool
asIntLe int
_f_lastIdx bool
lastIdx int
_f_asIntBe bool
asIntBe int
}
func NewBcd(numDigits uint8, bitsPerDigit uint8, isLe bool) *Bcd {
return &Bcd{
NumDigits: numDigits,
BitsPerDigit: bitsPerDigit,
IsLe: isLe,
}
}
func (this *Bcd) Read(io *kaitai.Stream, parent interface{}, root *Bcd) (err error) {
this._io = io
this._parent = parent
this._root = root
this.Digits = make([]int, this.NumDigits)
for i := range this.Digits {
switch (this.BitsPerDigit) {
case 4:
tmp1, err := this._io.ReadBitsIntBe(4)
if err != nil {
return err
}
this.Digits[i] = tmp1
case 8:
tmp2, err := this._io.ReadU1()
if err != nil {
return err
}
this.Digits[i] = tmp2
}
}
return err
}
/**
* Value of this BCD number as integer. Endianness would be selected based on `is_le` parameter given.
*/
func (this *Bcd) AsInt() (v int, err error) {
if (this._f_asInt) {
return this.asInt, nil
}
var tmp3 int;
if (this.IsLe) {
tmp4, err := this.AsIntLe()
if err != nil {
return 0, err
}
tmp3 = tmp4
} else {
tmp5, err := this.AsIntBe()
if err != nil {
return 0, err
}
tmp3 = tmp5
}
this.asInt = int(tmp3)
this._f_asInt = true
return this.asInt, nil
}
/**
* Value of this BCD number as integer (treating digit order as little-endian).
*/
func (this *Bcd) AsIntLe() (v int, err error) {
if (this._f_asIntLe) {
return this.asIntLe, nil
}
var tmp6 int8;
if (this.NumDigits < 2) {
tmp6 = 0
} else {
var tmp7 int8;
if (this.NumDigits < 3) {
tmp7 = 0
} else {
var tmp8 int8;
if (this.NumDigits < 4) {
tmp8 = 0
} else {
var tmp9 int8;
if (this.NumDigits < 5) {
tmp9 = 0
} else {
var tmp10 int8;
if (this.NumDigits < 6) {
tmp10 = 0
} else {
var tmp11 int8;
if (this.NumDigits < 7) {
tmp11 = 0
} else {
var tmp12 int8;
if (this.NumDigits < 8) {
tmp12 = 0
} else {
tmp12 = (this.Digits[7] * 10000000)
}
tmp11 = ((this.Digits[6] * 1000000) + tmp12)
}
tmp10 = ((this.Digits[5] * 100000) + tmp11)
}
tmp9 = ((this.Digits[4] * 10000) + tmp10)
}
tmp8 = ((this.Digits[3] * 1000) + tmp9)
}
tmp7 = ((this.Digits[2] * 100) + tmp8)
}
tmp6 = ((this.Digits[1] * 10) + tmp7)
}
this.asIntLe = int((this.Digits[0] + tmp6))
this._f_asIntLe = true
return this.asIntLe, nil
}
/**
* Index of last digit (0-based).
*/
func (this *Bcd) LastIdx() (v int, err error) {
if (this._f_lastIdx) {
return this.lastIdx, nil
}
this.lastIdx = int((this.NumDigits - 1))
this._f_lastIdx = true
return this.lastIdx, nil
}
/**
* Value of this BCD number as integer (treating digit order as big-endian).
*/
func (this *Bcd) AsIntBe() (v int, err error) {
if (this._f_asIntBe) {
return this.asIntBe, nil
}
tmp13, err := this.LastIdx()
if err != nil {
return 0, err
}
var tmp14 int8;
if (this.NumDigits < 2) {
tmp14 = 0
} else {
tmp15, err := this.LastIdx()
if err != nil {
return 0, err
}
var tmp16 int8;
if (this.NumDigits < 3) {
tmp16 = 0
} else {
tmp17, err := this.LastIdx()
if err != nil {
return 0, err
}
var tmp18 int8;
if (this.NumDigits < 4) {
tmp18 = 0
} else {
tmp19, err := this.LastIdx()
if err != nil {
return 0, err
}
var tmp20 int8;
if (this.NumDigits < 5) {
tmp20 = 0
} else {
tmp21, err := this.LastIdx()
if err != nil {
return 0, err
}
var tmp22 int8;
if (this.NumDigits < 6) {
tmp22 = 0
} else {
tmp23, err := this.LastIdx()
if err != nil {
return 0, err
}
var tmp24 int8;
if (this.NumDigits < 7) {
tmp24 = 0
} else {
tmp25, err := this.LastIdx()
if err != nil {
return 0, err
}
var tmp26 int8;
if (this.NumDigits < 8) {
tmp26 = 0
} else {
tmp27, err := this.LastIdx()
if err != nil {
return 0, err
}
tmp26 = (this.Digits[(tmp27 - 7)] * 10000000)
}
tmp24 = ((this.Digits[(tmp25 - 6)] * 1000000) + tmp26)
}
tmp22 = ((this.Digits[(tmp23 - 5)] * 100000) + tmp24)
}
tmp20 = ((this.Digits[(tmp21 - 4)] * 10000) + tmp22)
}
tmp18 = ((this.Digits[(tmp19 - 3)] * 1000) + tmp20)
}
tmp16 = ((this.Digits[(tmp17 - 2)] * 100) + tmp18)
}
tmp14 = ((this.Digits[(tmp15 - 1)] * 10) + tmp16)
}
this.asIntBe = int((this.Digits[tmp13] + tmp14))
this._f_asIntBe = true
return this.asIntBe, nil
} | bcd/src/go/bcd.go | 0.620392 | 0.400984 | bcd.go | starcoder |
package flume
const errorsSchema = `
{
"type": [
"string"
]
}
`
const eventSchema = `
{
"type": "record",
"name": "AvroFlumeEvent",
"fields": [
{
"name": "headers",
"type": {
"type": "map",
"values": "string"
}
},
{
"name": "body",
"type": "bytes"
}
]
}
`
const eventsSchema = `
{
"type": "array",
"items": {
"type": "record",
"name": "AvroFlumeEvent",
"fields": [
{
"name": "headers",
"type": {
"type": "map",
"values": "string"
}
},
{
"name": "body",
"type": "bytes"
}
]
}
}
`
const statusSchema = `
{
"type": "enum",
"name": "Status",
"symbols": [
"OK",
"FAILED",
"UNKNOWN"
]
}
`
const messageProtocol = `
{
"protocol": "AvroSourceProtocol",
"namespace": "org.apache.flume.source.avro",
"doc": "* Licensed to the Apache Software Foundation (ASF) under one\n * or more contributor license agreements. See the NOTICE file\n * distributed with this work for additional information\n * regarding copyright ownership. The ASF licenses this file\n * to you under the Apache License, Version 2.0 (the\n * \"License\"); you may not use this file except in compliance\n * with the License. You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing,\n * software distributed under the License is distributed on an\n * \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY\n * KIND, either express or implied. See the License for the\n * specific language governing permissions and limitations\n * under the License.",
"types": [
{
"type": "enum",
"name": "Status",
"symbols": [
"OK",
"FAILED",
"UNKNOWN"
]
},
{
"type": "record",
"name": "AvroFlumeEvent",
"fields": [
{
"name": "headers",
"type": {
"type": "map",
"values": "string"
}
},
{
"name": "body",
"type": "bytes"
}
]
}
],
"messages": {
"append": {
"request": [
{
"name": "event",
"type": "AvroFlumeEvent"
}
],
"response": "Status"
},
"appendBatch": {
"request": [
{
"name": "events",
"type": {
"type": "array",
"items": "AvroFlumeEvent"
}
}
],
"response": "Status"
}
}
}
` | flume/schemas.go | 0.7478 | 0.452354 | schemas.go | starcoder |
package dft
import (
"math"
"github.com/emer/etable/etensor"
"gonum.org/v1/gonum/dsp/fourier"
)
// Dft struct holds the variables for doing a fourier transform
type Params struct {
CompLogPow bool `def:"true" desc:"compute the log of the power and save that to a separate table -- generaly more useful for visualization of power than raw power values"`
LogMin float32 `viewif:"CompLogPow" def:"-100" desc:"minimum value a log can produce -- puts a lower limit on log output"`
LogOffSet float32 `viewif:"CompLogPow" def:"0" desc:"add this amount when taking the log of the dft power -- e.g., 1.0 makes everything positive -- affects the relative contrast of the outputs"`
PrevSmooth float32 `def:"0" desc:"how much of the previous step's power value to include in this one -- smooths out the power spectrum which can be artificially bumpy due to discrete window samples"`
CurSmooth float32 `inactive:"+" desc:" how much of current power to include"`
}
func (dft *Params) Initialize(winSamples int) {
dft.PrevSmooth = 0
dft.CurSmooth = 1.0 - dft.PrevSmooth
dft.CompLogPow = true
dft.LogOffSet = 0
dft.LogMin = -100
}
// Filter filters the current window_in input data according to current settings -- called by ProcessStep, but can be called separately
func (dft *Params) Filter(ch int, step int, windowIn *etensor.Float32, firstStep bool, winSamples int, fftCoefs []complex128, fft *fourier.CmplxFFT, power *etensor.Float32, logPower *etensor.Float32, powerForSegment *etensor.Float32, logPowerForSegment *etensor.Float32) {
dft.FftReal(fftCoefs, windowIn)
fft = fourier.NewCmplxFFT(len(fftCoefs))
fftCoefs = fft.Coefficients(nil, fftCoefs)
dft.Power(ch, step, firstStep, winSamples, fftCoefs, power, logPower, powerForSegment, logPowerForSegment)
}
// FftReal
func (dft *Params) FftReal(fftCoefs []complex128, in *etensor.Float32) {
var c complex128
for i := 0; i < len(fftCoefs); i++ {
c = complex(in.FloatVal1D(i), 0)
fftCoefs[i] = c
}
}
// Power
func (dft *Params) Power(ch, step int, firstStep bool, winSamples int, fftCoefs []complex128, power *etensor.Float32, logPower *etensor.Float32, powerForSegment *etensor.Float32, logPowerForSegment *etensor.Float32) {
// Mag() is absolute value SqMag is square of it - r*r + i*i
for k := 0; k < winSamples/2+1; k++ {
rl := real(fftCoefs[k])
im := imag(fftCoefs[k])
powr := float64(rl*rl + im*im) // why is complex converted to float here
if firstStep == false {
powr = float64(dft.PrevSmooth)*power.FloatVal1D(k) + float64(dft.CurSmooth)*powr
}
power.SetFloat1D(k, powr)
powerForSegment.SetFloat([]int{step, k, ch}, powr)
var logp float64
if dft.CompLogPow {
powr += float64(dft.LogOffSet)
if powr == 0 {
logp = float64(dft.LogMin)
} else {
logp = math.Log(powr)
}
logPower.SetFloat1D(k, logp)
logPowerForSegment.SetFloat([]int{step, k, ch}, logp)
}
}
} | dft/dft.go | 0.543106 | 0.46223 | dft.go | starcoder |
package machine
// digitalOceanDescriptions enumerates DigitalOcean instance offerings.
var digitalOceanDescriptions = []Description{
{Size: "512mb", CPU: 1, RAM: .5, Disk: "20", Region: "ams1", Price: 0.00744},
{Size: "512mb", CPU: 1, RAM: .5, Disk: "20", Region: "ams2", Price: 0.00744},
{Size: "512mb", CPU: 1, RAM: .5, Disk: "20", Region: "ams3", Price: 0.00744},
{Size: "512mb", CPU: 1, RAM: .5, Disk: "20", Region: "blr1", Price: 0.00744},
{Size: "512mb", CPU: 1, RAM: .5, Disk: "20", Region: "fra1", Price: 0.00744},
{Size: "512mb", CPU: 1, RAM: .5, Disk: "20", Region: "lon1", Price: 0.00744},
{Size: "512mb", CPU: 1, RAM: .5, Disk: "20", Region: "nyc1", Price: 0.00744},
{Size: "512mb", CPU: 1, RAM: .5, Disk: "20", Region: "nyc2", Price: 0.00744},
{Size: "512mb", CPU: 1, RAM: .5, Disk: "20", Region: "nyc3", Price: 0.00744},
{Size: "512mb", CPU: 1, RAM: .5, Disk: "20", Region: "sfo1", Price: 0.00744},
{Size: "512mb", CPU: 1, RAM: .5, Disk: "20", Region: "sfo2", Price: 0.00744},
{Size: "512mb", CPU: 1, RAM: .5, Disk: "20", Region: "sgp1", Price: 0.00744},
{Size: "512mb", CPU: 1, RAM: .5, Disk: "20", Region: "tor1", Price: 0.00744},
{Size: "1gb", CPU: 1, RAM: 1, Disk: "30", Region: "ams1", Price: 0.01488},
{Size: "1gb", CPU: 1, RAM: 1, Disk: "30", Region: "ams2", Price: 0.01488},
{Size: "1gb", CPU: 1, RAM: 1, Disk: "30", Region: "ams3", Price: 0.01488},
{Size: "1gb", CPU: 1, RAM: 1, Disk: "30", Region: "blr1", Price: 0.01488},
{Size: "1gb", CPU: 1, RAM: 1, Disk: "30", Region: "fra1", Price: 0.01488},
{Size: "1gb", CPU: 1, RAM: 1, Disk: "30", Region: "lon1", Price: 0.01488},
{Size: "1gb", CPU: 1, RAM: 1, Disk: "30", Region: "nyc1", Price: 0.01488},
{Size: "1gb", CPU: 1, RAM: 1, Disk: "30", Region: "nyc2", Price: 0.01488},
{Size: "1gb", CPU: 1, RAM: 1, Disk: "30", Region: "nyc3", Price: 0.01488},
{Size: "1gb", CPU: 1, RAM: 1, Disk: "30", Region: "sfo1", Price: 0.01488},
{Size: "1gb", CPU: 1, RAM: 1, Disk: "30", Region: "sfo2", Price: 0.01488},
{Size: "1gb", CPU: 1, RAM: 1, Disk: "30", Region: "sgp1", Price: 0.01488},
{Size: "1gb", CPU: 1, RAM: 1, Disk: "30", Region: "tor1", Price: 0.01488},
{Size: "2gb", CPU: 2, RAM: 2, Disk: "40", Region: "ams1", Price: 0.02976},
{Size: "2gb", CPU: 2, RAM: 2, Disk: "40", Region: "ams2", Price: 0.02976},
{Size: "2gb", CPU: 2, RAM: 2, Disk: "40", Region: "ams3", Price: 0.02976},
{Size: "2gb", CPU: 2, RAM: 2, Disk: "40", Region: "blr1", Price: 0.02976},
{Size: "2gb", CPU: 2, RAM: 2, Disk: "40", Region: "fra1", Price: 0.02976},
{Size: "2gb", CPU: 2, RAM: 2, Disk: "40", Region: "lon1", Price: 0.02976},
{Size: "2gb", CPU: 2, RAM: 2, Disk: "40", Region: "nyc1", Price: 0.02976},
{Size: "2gb", CPU: 2, RAM: 2, Disk: "40", Region: "nyc2", Price: 0.02976},
{Size: "2gb", CPU: 2, RAM: 2, Disk: "40", Region: "nyc3", Price: 0.02976},
{Size: "2gb", CPU: 2, RAM: 2, Disk: "40", Region: "sfo1", Price: 0.02976},
{Size: "2gb", CPU: 2, RAM: 2, Disk: "40", Region: "sfo2", Price: 0.02976},
{Size: "2gb", CPU: 2, RAM: 2, Disk: "40", Region: "sgp1", Price: 0.02976},
{Size: "2gb", CPU: 2, RAM: 2, Disk: "40", Region: "tor1", Price: 0.02976},
{Size: "4gb", CPU: 2, RAM: 4, Disk: "60", Region: "ams1", Price: 0.05952},
{Size: "4gb", CPU: 2, RAM: 4, Disk: "60", Region: "ams2", Price: 0.05952},
{Size: "4gb", CPU: 2, RAM: 4, Disk: "60", Region: "ams3", Price: 0.05952},
{Size: "4gb", CPU: 2, RAM: 4, Disk: "60", Region: "blr1", Price: 0.05952},
{Size: "4gb", CPU: 2, RAM: 4, Disk: "60", Region: "fra1", Price: 0.05952},
{Size: "4gb", CPU: 2, RAM: 4, Disk: "60", Region: "lon1", Price: 0.05952},
{Size: "4gb", CPU: 2, RAM: 4, Disk: "60", Region: "nyc1", Price: 0.05952},
{Size: "4gb", CPU: 2, RAM: 4, Disk: "60", Region: "nyc2", Price: 0.05952},
{Size: "4gb", CPU: 2, RAM: 4, Disk: "60", Region: "nyc3", Price: 0.05952},
{Size: "4gb", CPU: 2, RAM: 4, Disk: "60", Region: "sfo1", Price: 0.05952},
{Size: "4gb", CPU: 2, RAM: 4, Disk: "60", Region: "sfo2", Price: 0.05952},
{Size: "4gb", CPU: 2, RAM: 4, Disk: "60", Region: "sgp1", Price: 0.05952},
{Size: "4gb", CPU: 2, RAM: 4, Disk: "60", Region: "tor1", Price: 0.05952},
{Size: "8gb", CPU: 4, RAM: 8, Disk: "80", Region: "ams1", Price: 0.11905},
{Size: "8gb", CPU: 4, RAM: 8, Disk: "80", Region: "ams2", Price: 0.11905},
{Size: "8gb", CPU: 4, RAM: 8, Disk: "80", Region: "ams3", Price: 0.11905},
{Size: "8gb", CPU: 4, RAM: 8, Disk: "80", Region: "blr1", Price: 0.11905},
{Size: "8gb", CPU: 4, RAM: 8, Disk: "80", Region: "fra1", Price: 0.11905},
{Size: "8gb", CPU: 4, RAM: 8, Disk: "80", Region: "lon1", Price: 0.11905},
{Size: "8gb", CPU: 4, RAM: 8, Disk: "80", Region: "nyc1", Price: 0.11905},
{Size: "8gb", CPU: 4, RAM: 8, Disk: "80", Region: "nyc2", Price: 0.11905},
{Size: "8gb", CPU: 4, RAM: 8, Disk: "80", Region: "nyc3", Price: 0.11905},
{Size: "8gb", CPU: 4, RAM: 8, Disk: "80", Region: "sfo1", Price: 0.11905},
{Size: "8gb", CPU: 4, RAM: 8, Disk: "80", Region: "sfo2", Price: 0.11905},
{Size: "8gb", CPU: 4, RAM: 8, Disk: "80", Region: "sgp1", Price: 0.11905},
{Size: "8gb", CPU: 4, RAM: 8, Disk: "80", Region: "tor1", Price: 0.11905},
{Size: "16gb", CPU: 8, RAM: 16, Disk: "160", Region: "ams1", Price: 0.2381},
{Size: "16gb", CPU: 8, RAM: 16, Disk: "160", Region: "ams2", Price: 0.2381},
{Size: "16gb", CPU: 8, RAM: 16, Disk: "160", Region: "ams3", Price: 0.2381},
{Size: "16gb", CPU: 8, RAM: 16, Disk: "160", Region: "blr1", Price: 0.2381},
{Size: "16gb", CPU: 8, RAM: 16, Disk: "160", Region: "fra1", Price: 0.2381},
{Size: "16gb", CPU: 8, RAM: 16, Disk: "160", Region: "lon1", Price: 0.2381},
{Size: "16gb", CPU: 8, RAM: 16, Disk: "160", Region: "nyc1", Price: 0.2381},
{Size: "16gb", CPU: 8, RAM: 16, Disk: "160", Region: "nyc2", Price: 0.2381},
{Size: "16gb", CPU: 8, RAM: 16, Disk: "160", Region: "nyc3", Price: 0.2381},
{Size: "16gb", CPU: 8, RAM: 16, Disk: "160", Region: "sfo1", Price: 0.2381},
{Size: "16gb", CPU: 8, RAM: 16, Disk: "160", Region: "sfo2", Price: 0.2381},
{Size: "16gb", CPU: 8, RAM: 16, Disk: "160", Region: "sgp1", Price: 0.2381},
{Size: "16gb", CPU: 8, RAM: 16, Disk: "160", Region: "tor1", Price: 0.2381},
{Size: "m-16gb", CPU: 2, RAM: 16, Disk: "30", Region: "blr1", Price: 0.17857},
{Size: "m-16gb", CPU: 2, RAM: 16, Disk: "30", Region: "fra1", Price: 0.17857},
{Size: "m-16gb", CPU: 2, RAM: 16, Disk: "30", Region: "lon1", Price: 0.17857},
{Size: "m-16gb", CPU: 2, RAM: 16, Disk: "30", Region: "nyc1", Price: 0.17857},
{Size: "m-16gb", CPU: 2, RAM: 16, Disk: "30", Region: "nyc3", Price: 0.17857},
{Size: "m-16gb", CPU: 2, RAM: 16, Disk: "30", Region: "sfo2", Price: 0.17857},
{Size: "m-16gb", CPU: 2, RAM: 16, Disk: "30", Region: "tor1", Price: 0.17857},
{Size: "32gb", CPU: 12, RAM: 32, Disk: "320", Region: "ams2", Price: 0.47619},
{Size: "32gb", CPU: 12, RAM: 32, Disk: "320", Region: "ams3", Price: 0.47619},
{Size: "32gb", CPU: 12, RAM: 32, Disk: "320", Region: "blr1", Price: 0.47619},
{Size: "32gb", CPU: 12, RAM: 32, Disk: "320", Region: "fra1", Price: 0.47619},
{Size: "32gb", CPU: 12, RAM: 32, Disk: "320", Region: "lon1", Price: 0.47619},
{Size: "32gb", CPU: 12, RAM: 32, Disk: "320", Region: "nyc1", Price: 0.47619},
{Size: "32gb", CPU: 12, RAM: 32, Disk: "320", Region: "nyc2", Price: 0.47619},
{Size: "32gb", CPU: 12, RAM: 32, Disk: "320", Region: "nyc3", Price: 0.47619},
{Size: "32gb", CPU: 12, RAM: 32, Disk: "320", Region: "sfo1", Price: 0.47619},
{Size: "32gb", CPU: 12, RAM: 32, Disk: "320", Region: "sfo2", Price: 0.47619},
{Size: "32gb", CPU: 12, RAM: 32, Disk: "320", Region: "sgp1", Price: 0.47619},
{Size: "32gb", CPU: 12, RAM: 32, Disk: "320", Region: "tor1", Price: 0.47619},
{Size: "m-32gb", CPU: 4, RAM: 32, Disk: "90", Region: "blr1", Price: 0.35714},
{Size: "m-32gb", CPU: 4, RAM: 32, Disk: "90", Region: "fra1", Price: 0.35714},
{Size: "m-32gb", CPU: 4, RAM: 32, Disk: "90", Region: "lon1", Price: 0.35714},
{Size: "m-32gb", CPU: 4, RAM: 32, Disk: "90", Region: "nyc1", Price: 0.35714},
{Size: "m-32gb", CPU: 4, RAM: 32, Disk: "90", Region: "nyc3", Price: 0.35714},
{Size: "m-32gb", CPU: 4, RAM: 32, Disk: "90", Region: "sfo2", Price: 0.35714},
{Size: "m-32gb", CPU: 4, RAM: 32, Disk: "90", Region: "tor1", Price: 0.35714},
{Size: "48gb", CPU: 16, RAM: 48, Disk: "480", Region: "ams2", Price: 0.71429},
{Size: "48gb", CPU: 16, RAM: 48, Disk: "480", Region: "ams3", Price: 0.71429},
{Size: "48gb", CPU: 16, RAM: 48, Disk: "480", Region: "blr1", Price: 0.71429},
{Size: "48gb", CPU: 16, RAM: 48, Disk: "480", Region: "fra1", Price: 0.71429},
{Size: "48gb", CPU: 16, RAM: 48, Disk: "480", Region: "lon1", Price: 0.71429},
{Size: "48gb", CPU: 16, RAM: 48, Disk: "480", Region: "nyc1", Price: 0.71429},
{Size: "48gb", CPU: 16, RAM: 48, Disk: "480", Region: "nyc2", Price: 0.71429},
{Size: "48gb", CPU: 16, RAM: 48, Disk: "480", Region: "nyc3", Price: 0.71429},
{Size: "48gb", CPU: 16, RAM: 48, Disk: "480", Region: "sfo1", Price: 0.71429},
{Size: "48gb", CPU: 16, RAM: 48, Disk: "480", Region: "sfo2", Price: 0.71429},
{Size: "48gb", CPU: 16, RAM: 48, Disk: "480", Region: "sgp1", Price: 0.71429},
{Size: "48gb", CPU: 16, RAM: 48, Disk: "480", Region: "tor1", Price: 0.71429},
{Size: "m-64gb", CPU: 8, RAM: 64, Disk: "200", Region: "blr1", Price: 0.71429},
{Size: "m-64gb", CPU: 8, RAM: 64, Disk: "200", Region: "fra1", Price: 0.71429},
{Size: "m-64gb", CPU: 8, RAM: 64, Disk: "200", Region: "lon1", Price: 0.71429},
{Size: "m-64gb", CPU: 8, RAM: 64, Disk: "200", Region: "nyc1", Price: 0.71429},
{Size: "m-64gb", CPU: 8, RAM: 64, Disk: "200", Region: "nyc3", Price: 0.71429},
{Size: "m-64gb", CPU: 8, RAM: 64, Disk: "200", Region: "sfo2", Price: 0.71429},
{Size: "m-64gb", CPU: 8, RAM: 64, Disk: "200", Region: "tor1", Price: 0.71429},
{Size: "64gb", CPU: 20, RAM: 64, Disk: "640", Region: "ams2", Price: 0.95238},
{Size: "64gb", CPU: 20, RAM: 64, Disk: "640", Region: "ams3", Price: 0.95238},
{Size: "64gb", CPU: 20, RAM: 64, Disk: "640", Region: "blr1", Price: 0.95238},
{Size: "64gb", CPU: 20, RAM: 64, Disk: "640", Region: "fra1", Price: 0.95238},
{Size: "64gb", CPU: 20, RAM: 64, Disk: "640", Region: "lon1", Price: 0.95238},
{Size: "64gb", CPU: 20, RAM: 64, Disk: "640", Region: "nyc1", Price: 0.95238},
{Size: "64gb", CPU: 20, RAM: 64, Disk: "640", Region: "nyc2", Price: 0.95238},
{Size: "64gb", CPU: 20, RAM: 64, Disk: "640", Region: "nyc3", Price: 0.95238},
{Size: "64gb", CPU: 20, RAM: 64, Disk: "640", Region: "sfo1", Price: 0.95238},
{Size: "64gb", CPU: 20, RAM: 64, Disk: "640", Region: "sfo2", Price: 0.95238},
{Size: "64gb", CPU: 20, RAM: 64, Disk: "640", Region: "sgp1", Price: 0.95238},
{Size: "64gb", CPU: 20, RAM: 64, Disk: "640", Region: "tor1", Price: 0.95238},
{Size: "m-128gb", CPU: 16, RAM: 128, Disk: "340", Region: "blr1",
Price: 1.42857},
{Size: "m-128gb", CPU: 16, RAM: 128, Disk: "340", Region: "fra1",
Price: 1.42857},
{Size: "m-128gb", CPU: 16, RAM: 128, Disk: "340", Region: "lon1",
Price: 1.42857},
{Size: "m-128gb", CPU: 16, RAM: 128, Disk: "340", Region: "nyc1",
Price: 1.42857},
{Size: "m-128gb", CPU: 16, RAM: 128, Disk: "340", Region: "nyc3",
Price: 1.42857},
{Size: "m-128gb", CPU: 16, RAM: 128, Disk: "340", Region: "sfo2",
Price: 1.42857},
{Size: "m-128gb", CPU: 16, RAM: 128, Disk: "340", Region: "tor1",
Price: 1.42857},
{Size: "m-224gb", CPU: 32, RAM: 224, Disk: "500", Region: "blr1", Price: 2.5},
{Size: "m-224gb", CPU: 32, RAM: 224, Disk: "500", Region: "fra1", Price: 2.5},
{Size: "m-224gb", CPU: 32, RAM: 224, Disk: "500", Region: "lon1", Price: 2.5},
{Size: "m-224gb", CPU: 32, RAM: 224, Disk: "500", Region: "nyc1", Price: 2.5},
{Size: "m-224gb", CPU: 32, RAM: 224, Disk: "500", Region: "nyc3", Price: 2.5},
{Size: "m-224gb", CPU: 32, RAM: 224, Disk: "500", Region: "sfo2", Price: 2.5},
{Size: "m-224gb", CPU: 32, RAM: 224, Disk: "500", Region: "tor1", Price: 2.5},
} | cluster/machine/digitalOceanConstants.go | 0.52829 | 0.467028 | digitalOceanConstants.go | starcoder |
package tplink
import "math"
// Basic Instructions
type aliasValue struct {
errorCode
Value string `json:"alias"`
}
type delayTime struct {
errorCode
Delay int `json:"delay"`
}
type deviceIdValue struct {
errorCode
Value string `json:"deviceId"`
}
type errorCode struct {
ErrorCode int `json:"err_code,omitempty"`
}
type hardwareIdValue struct {
errorCode
Value string `json:"hwId"`
}
type locationValues struct {
errorCode
Latitude float64 `json:"latitude"`
Longitude float64 `json:"longitude"`
}
type networkValues struct {
errorCode
NetworkName string `json:"ssid"`
Password string `json:"password"`
KeyType int `json:"key_type"`
}
type onOffValue struct {
errorCode
Value int `json:"off"`
}
type stateValue struct {
errorCode
State int `json:"state"`
}
// EnergyMeter Types
type RealTimeEnergy struct {
errorCode
Current float32 `json:"current,omitempty"`
Voltage float32 `json:"voltage,omitempty"`
Power float32 `json:"power,omitempty"`
Total float32 `json:"total,omitempty"`
}
type GetRealTimeEnergy struct {
Energy RealTimeEnergy `json:"get_realtime"`
}
type ElectricityMeterInfo struct {
EMeter GetRealTimeEnergy `json:"emeter"`
}
func (e *ElectricityMeterInfo) Realtime() *RealTimeEnergy {
return &e.EMeter.Energy
}
// Daily Average
// SEND {"emeter": {"get_daystat": {"month": 11, "year": 2021}}}
// RECV {"emeter":{"get_daystat":{"day_list":[{"year":2021,"month":11,"day":1,"energy":1.172000},{"year":2021,"month":11,"day":2,"energy":1.170000},{"year":2021,"month":11,"day":3,"energy":1.128000},{"year":2021,"month":11,"day":4,"energy":1.148000},{"year":2021,"month":11,"day":5,"energy":1.171000},{"year":2021,"month":11,"day":6,"energy":1.169000},{"year":2021,"month":11,"day":7,"energy":1.166000},{"year":2021,"month":11,"day":8,"energy":1.163000},{"year":2021,"month":11,"day":9,"energy":1.159000},{"year":2021,"month":11,"day":10,"energy":1.126000},{"year":2021,"month":11,"day":11,"energy":1.130000},{"year":2021,"month":11,"day":12,"energy":1.133000},{"year":2021,"month":11,"day":13,"energy":1.131000},{"year":2021,"month":11,"day":14,"energy":0.899000}],"err_code":0}}}
// 1.133214285714286
// Daily Usage
// SEND {"emeter": {"get_daystat": {"month": 11, "year": 2021}}}
// RECV {"emeter":{"get_daystat":{"day_list":[{"year":2021,"month":11,"day":1,"energy":1.172000},{"year":2021,"month":11,"day":2,"energy":1.170000},{"year":2021,"month":11,"day":3,"energy":1.128000},{"year":2021,"month":11,"day":4,"energy":1.148000},{"year":2021,"month":11,"day":5,"energy":1.171000},{"year":2021,"month":11,"day":6,"energy":1.169000},{"year":2021,"month":11,"day":7,"energy":1.166000},{"year":2021,"month":11,"day":8,"energy":1.163000},{"year":2021,"month":11,"day":9,"energy":1.159000},{"year":2021,"month":11,"day":10,"energy":1.126000},{"year":2021,"month":11,"day":11,"energy":1.130000},{"year":2021,"month":11,"day":12,"energy":1.133000},{"year":2021,"month":11,"day":13,"energy":1.131000},{"year":2021,"month":11,"day":14,"energy":0.900000}],"err_code":0}}}
// [{'year': 2021, 'month': 11, 'day': 1, 'energy': 1.172}, {'year': 2021, 'month': 11, 'day': 2, 'energy': 1.17}, {'year': 2021, 'month': 11, 'day': 3, 'energy': 1.128}, {'year': 2021, 'month': 11, 'day': 4, 'energy': 1.148}, {'year': 2021, 'month': 11, 'day': 5, 'energy': 1.171}, {'year': 2021, 'month': 11, 'day': 6, 'energy': 1.169}, {'year': 2021, 'month': 11, 'day': 7, 'energy': 1.166}, {'year': 2021, 'month': 11, 'day': 8, 'energy': 1.163}, {'year': 2021, 'month': 11, 'day': 9, 'energy': 1.159}, {'year': 2021, 'month': 11, 'day': 10, 'energy': 1.126}, {'year': 2021, 'month': 11, 'day': 11, 'energy': 1.13}, {'year': 2021, 'month': 11, 'day': 12, 'energy': 1.133}, {'year': 2021, 'month': 11, 'day': 13, 'energy': 1.131}, {'year': 2021, 'month': 11, 'day': 14, 'energy': 0.9}]
type DayStat struct {
Day int `json:"day,omitempty"`
Energy float32 `json:"energy,omitempty"`
Month int `json:"month,omitempty"`
Year int `json:"year,omitempty"`
}
type DailyStats struct {
DayList []DayStat `json:"day_list,omitempty"`
errorCode
Month int `json:"month,omitempty"`
Year int `json:"year,omitempty"`
}
type getDailyStat struct {
Stats DailyStats `json:"get_daystat"`
}
type EMeterDailyStats struct {
EMeter getDailyStat `json:"emeter"`
}
func (e *EMeterDailyStats) DailyStats() *DailyStats {
return &e.EMeter.Stats
}
// Monthly Average
// SEND {"emeter": {"get_daystat": {"month": 11, "year": 2021}}}
// RECV {"emeter":{"get_daystat":{"day_list":[{"year":2021,"month":11,"day":1,"energy":1.172000},{"year":2021,"month":11,"day":2,"energy":1.170000},{"year":2021,"month":11,"day":3,"energy":1.128000},{"year":2021,"month":11,"day":4,"energy":1.148000},{"year":2021,"month":11,"day":5,"energy":1.171000},{"year":2021,"month":11,"day":6,"energy":1.169000},{"year":2021,"month":11,"day":7,"energy":1.166000},{"year":2021,"month":11,"day":8,"energy":1.163000},{"year":2021,"month":11,"day":9,"energy":1.159000},{"year":2021,"month":11,"day":10,"energy":1.126000},{"year":2021,"month":11,"day":11,"energy":1.130000},{"year":2021,"month":11,"day":12,"energy":1.133000},{"year":2021,"month":11,"day":13,"energy":1.131000},{"year":2021,"month":11,"day":14,"energy":0.900000}],"err_code":0}}}
// 1.1332857142857145
// Monthly Usage
// SEND {"emeter": {"get_monthstat": {"year": 2021}}}
// RECV {"emeter":{"get_monthstat":{"month_list":[{"year":2021,"month":1,"energy":23.111000},{"year":2021,"month":2,"energy":168.620000},{"year":2021,"month":3,"energy":12.785000},{"year":2021,"month":4,"energy":31.806000},{"year":2021,"month":5,"energy":33.877000},{"year":2021,"month":6,"energy":32.959000},{"year":2021,"month":7,"energy":34.774000},{"year":2021,"month":8,"energy":35.270000},{"year":2021,"month":9,"energy":35.105000},{"year":2021,"month":10,"energy":36.254000},{"year":2021,"month":11,"energy":15.867000}],"err_code":0}}}
// [{'year': 2021, 'month': 1, 'energy': 23.111}, {'year': 2021, 'month': 2, 'energy': 168.62}, {'year': 2021, 'month': 3, 'energy': 12.785}, {'year': 2021, 'month': 4, 'energy': 31.806}, {'year': 2021, 'month': 5, 'energy': 33.877}, {'year': 2021, 'month': 6, 'energy': 32.959}, {'year': 2021, 'month': 7, 'energy': 34.774}, {'year': 2021, 'month': 8, 'energy': 35.27}, {'year': 2021, 'month': 9, 'energy': 35.105}, {'year': 2021, 'month': 10, 'energy': 36.254}, {'year': 2021, 'month': 11, 'energy': 15.867}]
type MonthlyStats struct {
MonthList []DayStat `json:"month_list,omitempty"`
errorCode
Year int `json:"year,omitempty"`
}
type getMonthStat struct {
Stats MonthlyStats `json:"get_monthstat"`
}
type EMeterMonthlyStats struct {
EMeter getMonthStat `json:"emeter"`
}
func (e *EMeterMonthlyStats) MonthlyStats() *MonthlyStats {
return &e.EMeter.Stats
}
// Clear Usage Stats
// SEND {"emeter": {"erase_emeter_stat": {}}}
// RECV {"emeter":{"erase_emeter_stat":{"err_code":0}}}
// {'emeter': {'erase_emeter_stat': {'err_code': 0}}}
// SystemInfo Types
type SystemInfo struct {
errorCode
SoftwareVersion string `json:"sw_ver,omitempty"`
HardwareVersion string `json:"hw_ver,omitempty"`
Type string `json:"type,omitempty"`
Model string `json:"model,omitempty"`
MacAddress string `json:"mac,omitempty"`
DeviceId string `json:"deviceId,omitempty"`
HardwareId string `json:"hwId,omitempty"`
FirmwareId string `json:"fwId,omitempty"`
ManufacturerId string `json:"oemId,omitempty"`
Alias string `json:"alias,omitempty"`
DeviceName string `json:"dev_name,omitempty"`
IconHash string `json:"icon_hash,omitempty"`
RelayState int `json:"relay_state,omitempty"`
UpTime int `json:"on_time,omitempty"`
ActiveMode string `json:"active_mode,omitempty"`
Features string `json:"feature,omitempty"`
Updating int `json:"updating,omitempty"`
SignalStrength int `json:"rssi,omitempty"`
LedStatus int `json:"led_off,omitempty"`
Latitude float32 `json:"latitude,omitempty"`
Longitude float32 `json:"longitude,omitempty"`
}
type GetSystemInfo struct {
Info SystemInfo `json:"get_sysinfo"`
}
type DeviceInfo struct {
System GetSystemInfo `json:"system"`
}
func (d *DeviceInfo) SystemInfo() *SystemInfo {
return &d.System.Info
}
// Network Configuration
type networkSettings struct {
Settings networkValues `json:"set_stainfo"`
}
type NetworkSettings struct {
Interface networkSettings `json:"netif"`
}
func (n *NetworkSettings) ErrorCode() int {
return n.Interface.Settings.ErrorCode
}
func (n *NetworkSettings) GetNetwork() string {
return n.Interface.Settings.NetworkName
}
func (n *NetworkSettings) GetPassword() string {
return n.Interface.Settings.Password
}
func (n *NetworkSettings) SetSettings(network string, password string) {
n.Interface.Settings.KeyType = 3 // WPA2
n.Interface.Settings.NetworkName = network
n.Interface.Settings.Password = password
}
// System Alias
type systemAlias struct {
Alias aliasValue `json:"set_dev_alias"`
}
type SystemAlias struct {
System systemAlias `json:"system"`
}
func (a *SystemAlias) ErrorCode() int {
return a.System.Alias.ErrorCode
}
func (a *SystemAlias) GetAlias() string {
return a.System.Alias.Value
}
func (a *SystemAlias) SetAlias(s string) {
a.System.Alias.Value = s
}
// System Device ID
type deviceId struct {
DeviceId deviceIdValue `json:"set_device_id"`
}
type SystemDeviceId struct {
System deviceId `json:"system"`
}
func (d *SystemDeviceId) ErrorCode() int {
return d.System.DeviceId.ErrorCode
}
func (d *SystemDeviceId) GetDeviceId() string {
return d.System.DeviceId.Value
}
func (d *SystemDeviceId) SetDeviceId(id string) {
d.System.DeviceId.Value = id
}
// System Hardware ID
type hardwareId struct {
HardwareId hardwareIdValue `json:"set_hw_id"`
}
type SystemHardwareId struct {
System hardwareId `json:"system"`
}
func (d *SystemHardwareId) ErrorCode() int {
return d.System.HardwareId.ErrorCode
}
func (d *SystemHardwareId) GetHardwareId() string {
return d.System.HardwareId.Value
}
func (d *SystemHardwareId) SetHardwareId(id string) {
d.System.HardwareId.Value = id
}
// LED Status
type ledState struct {
LedState onOffValue `json:"set_led_off"`
}
type SystemLedState struct {
System ledState `json:"system"`
}
func (s *SystemLedState) ErrorCode() int {
return s.System.LedState.ErrorCode
}
func (s *SystemLedState) GetState() bool {
return s.System.LedState.Value != 0
}
func (s *SystemLedState) SetState(st bool) {
if st {
s.System.LedState.Value = 1
} else {
s.System.LedState.Value = 0
}
}
// System Location
type locationValue struct {
Location locationValues `json:"set_dev_location"`
}
type SystemLocation struct {
System locationValue `json:"system"`
}
func (loc *SystemLocation) ErrorCode() int {
return loc.System.Location.ErrorCode
}
func (loc *SystemLocation) GetLatitude() float64 {
lat := loc.System.Location.Latitude
return math.Floor(lat*1000) / 1000
}
func (loc *SystemLocation) GetLongitude() float64 {
lon := loc.System.Location.Longitude
return math.Floor(lon*1000) / 1000
}
func (loc *SystemLocation) SetLocation(lat float64, lon float64) {
loc.System.Location.Latitude = lat
loc.System.Location.Longitude = lon
}
// Reboot Command
type reboot struct {
Reboot delayTime `json:"reboot"`
}
type SystemReboot struct {
System reboot `json:"system"`
}
func (r *SystemReboot) ErrorCode() int {
return r.System.Reboot.ErrorCode
}
func (r *SystemReboot) GetDelay() int {
return r.System.Reboot.Delay
}
func (r *SystemReboot) SetDelay(delay int) {
r.System.Reboot.Delay = delay
}
// Relay State
type relayState struct {
RelayState stateValue `json:"set_relay_state"`
}
type SystemRelayState struct {
System relayState `json:"system"`
}
func (r *SystemRelayState) ErrorCode() int {
return r.System.RelayState.ErrorCode
}
func (r *SystemRelayState) GetRelayState() bool {
return r.System.RelayState.State != 0
}
func (r *SystemRelayState) SetRelayState(st bool) {
if st {
r.System.RelayState.State = 1
} else {
r.System.RelayState.State = 0
}
}
// Reset Command
type reset struct {
Reset delayTime `json:"reset"`
}
type SystemReset struct {
System reset `json:"system"`
}
func (r *SystemReset) ErrorCode() int {
return r.System.Reset.ErrorCode
}
func (r *SystemReset) GetDelay() int {
return r.System.Reset.Delay
}
func (r *SystemReset) SetDelay(d int) {
r.System.Reset.Delay = d
} | pkg/tplink/types.go | 0.662032 | 0.517571 | types.go | starcoder |
package ace
import (
"github.com/gopherjs/gopherjs/js"
)
// Range is a wrapper for the Ace Range type.
type Range struct {
*js.Object
}
// NewRange returns a newly created Range object.
func NewRange(startRow, startColumn, endRow, endColumn int) Range {
return Range{js.Global.Get("Range").New(startRow, startColumn, endRow, endColumn)}
}
// StartRow returns the starting row of this Range object.
func (r Range) StartRow() int {
return r.Get("start").Get("row").Int()
}
// StartColumn returns the starting column of this Range object.
func (r Range) StartColumn() int {
return r.Get("start").Get("column").Int()
}
// EndRow returns the ending row of this Range object.
func (r Range) EndRow() int {
return r.Get("end").Get("row").Int()
}
// EndColumn returns the ending column of this Range object.
func (r Range) EndColumn() int {
return r.Get("end").Get("column").Int()
}
// ClipRows returns the part of the current `Range` that occurs within the boundaries of `firstRow`
// and `lastRow` as a new `Range` object.
func (r Range) ClipRows(firstRow, lastRow int) Range {
return Range{r.Call("clipRows", firstRow, lastRow)}
}
// Clone returns a duplicate of the calling range.
func (r Range) Clone() Range {
return Range{r.Call("clone")}
}
// CollapseRows returns a range containing the starting and ending rows of the original range,
// but with a column value of `0`.
func (r Range) CollapseRows() Range {
return Range{r.Call("collapseRows")}
}
// Compare checks the row and column points with the row and column points of the calling range.
func (r Range) Compare(row, column int) int {
return r.Call("compare", row, column).Int()
}
// CompareStart checks the row and column points with the row and column points of the calling range.
func (r Range) CompareStart(row, column int) int {
return r.Call("compareStart", row, column).Int()
}
// CompareEnd checks the row and column points with the row and column points of the calling range.
func (r Range) CompareEnd(row, column int) int {
return r.Call("compareEnd", row, column).Int()
}
// CompareInside checks the row and column points with the row and column points of the calling range.
func (r Range) CompareInside(row, column int) int {
return r.Call("compareInside", row, column).Int()
}
// ComparePoint checks the row and column points of `p` with the row and column points of the calling range.
func (r Range) ComparePoint(p Range) int {
return r.Call("comparePoint", p).Int()
}
// CompareRange compares this range with the given range.
func (r Range) CompareRange(r2 Range) int {
return r.Call("compareRange", r2).Int()
}
// Contains returns `true` if the `row` and `column` provided are within the given range.
// This can better be expressed as returning `true` if:
// range.StartRow() <= row <= range.EndRow() &&
// range.StartColumn() <= column <= range.EndColumn()
func (r Range) Contains(row, column int) bool {
return r.Call("contains", row, column).Bool()
}
// ContainsRange checks the start and end points of `range` and compares them to the calling range.
// Returns `true` if the `range` is contained within the caller's range.
func (r Range) ContainsRange(r2 Range) bool {
return r.Call("containsRange", r2).Bool()
}
// Extend changes the row and column points for the calling range for both the starting and ending points.
func (r Range) Extend(row, column int) Range {
return Range{r.Call("extend", row, column)}
} | range.go | 0.91068 | 0.564519 | range.go | starcoder |
package color
import (
"image/color"
"github.com/goplus/interp"
)
func init() {
interp.RegisterPackage("image/color", extMap, typList)
}
var extMap = map[string]interface{}{
"(image/color.Alpha).RGBA": (color.Alpha).RGBA,
"(image/color.Alpha16).RGBA": (color.Alpha16).RGBA,
"(image/color.CMYK).RGBA": (color.CMYK).RGBA,
"(image/color.Color).RGBA": (color.Color).RGBA,
"(image/color.Gray).RGBA": (color.Gray).RGBA,
"(image/color.Gray16).RGBA": (color.Gray16).RGBA,
"(image/color.Model).Convert": (color.Model).Convert,
"(image/color.NRGBA).RGBA": (color.NRGBA).RGBA,
"(image/color.NRGBA64).RGBA": (color.NRGBA64).RGBA,
"(image/color.NYCbCrA).RGBA": (color.NYCbCrA).RGBA,
"(image/color.Palette).Convert": (color.Palette).Convert,
"(image/color.Palette).Index": (color.Palette).Index,
"(image/color.RGBA).RGBA": (color.RGBA).RGBA,
"(image/color.RGBA64).RGBA": (color.RGBA64).RGBA,
"(image/color.YCbCr).RGBA": (color.YCbCr).RGBA,
"image/color.Alpha16Model": &color.Alpha16Model,
"image/color.AlphaModel": &color.AlphaModel,
"image/color.Black": &color.Black,
"image/color.CMYKModel": &color.CMYKModel,
"image/color.CMYKToRGB": color.CMYKToRGB,
"image/color.Gray16Model": &color.Gray16Model,
"image/color.GrayModel": &color.GrayModel,
"image/color.ModelFunc": color.ModelFunc,
"image/color.NRGBA64Model": &color.NRGBA64Model,
"image/color.NRGBAModel": &color.NRGBAModel,
"image/color.NYCbCrAModel": &color.NYCbCrAModel,
"image/color.Opaque": &color.Opaque,
"image/color.RGBA64Model": &color.RGBA64Model,
"image/color.RGBAModel": &color.RGBAModel,
"image/color.RGBToCMYK": color.RGBToCMYK,
"image/color.RGBToYCbCr": color.RGBToYCbCr,
"image/color.Transparent": &color.Transparent,
"image/color.White": &color.White,
"image/color.YCbCrModel": &color.YCbCrModel,
"image/color.YCbCrToRGB": color.YCbCrToRGB,
}
var typList = []interface{}{
(*color.Alpha)(nil),
(*color.Alpha16)(nil),
(*color.CMYK)(nil),
(*color.Color)(nil),
(*color.Gray)(nil),
(*color.Gray16)(nil),
(*color.Model)(nil),
(*color.NRGBA)(nil),
(*color.NRGBA64)(nil),
(*color.NYCbCrA)(nil),
(*color.Palette)(nil),
(*color.RGBA)(nil),
(*color.RGBA64)(nil),
(*color.YCbCr)(nil),
} | pkg/image/color/export.go | 0.513181 | 0.588653 | export.go | starcoder |
package cluster
const (
single_linkage = iota
complete_linkage
average_linkage
mcquitty_linkage
median_linkage
centroid_linkage
ward_linkage
)
type HClusters struct {
// Data points [m x n]
X Matrix
// Distance metric
Metric MetricOp
// number of clusters
K int
// linkage method
Method int
// Distances between data points [m x m]
D *Distances
// Step-wise dendrogram
Dendrogram Linkages
// cluster center assignment index
Index []int
// cost
Cost float64
// indices of active elements
actives ActiveSet
}
// CutTree cuts the hierarchical cluster tree to generate K clusters.
func (c *HClusters) CutTree(K int) {
if c.Dendrogram == nil { return }
if K == 0 {
// by default, leave each element in its own cluster
K = len(c.X)
}
c.K = K
m := len(c.X)
uf := NewUnionFind(m)
// Starting with each element in its own cluster
// after each merge in the stepwise dendrogram, one less cluster remains
// therefore, (m - K) merges will occur
for i := 0; i < m-K; i++ {
linkage := c.Dendrogram[i]
uf.Union(linkage.First, linkage.Second)
}
// Now that all the merges have been done, determine the cluster index
c.Index = make([]int, m)
for i := 0; i < m; i++ {
c.Index[i] = uf.Find(i)
}
}
// CutTreeHeight cuts the hierarchical cluster tree to specified height.
func (c *HClusters) CutTreeHeight(height float64) {
if c.Dendrogram == nil { return }
m := len(c.X)
uf := NewUnionFind(m)
// Starting with each element in its own cluster
// after each merge in the stepwise dendrogram, one less cluster remains
// therefore, (m - k) merges will occur
for i := 0; i < m-1; i++ {
linkage := c.Dendrogram[i]
if linkage.Distance > height {
// i merges have occured: m - i clusters remain
c.K = m - i
break
}
uf.Union(linkage.First, linkage.Second)
}
// Now that all the merges have been done, determine the cluster index
c.Index = make([]int, m)
for i := 0; i < m; i++ {
c.Index[i] = uf.Find(i)
}
}
// linkage
type Linkage struct {
First, Second int
Distance float64
}
type Linkages []Linkage
func (x Linkages) Len() int {
return len(x)
}
func (x Linkages) Less(i, j int) bool {
return x[i].Distance < x[j].Distance
}
func (x Linkages) Swap(i, j int) {
x[i], x[j] = x[j], x[i]
} | ML/hclust_sl.go | 0.790692 | 0.518729 | hclust_sl.go | starcoder |
package inverter
import (
"fmt"
"time"
)
//WattHour type
type WattHour float64
//KWh type
type KWh float64
//ErrorCode type
type ErrorCode int64
//DailyStatistics of an Inverter with error code
type DailyStatistics struct {
DailyProduction WattHour
YearlyProduction WattHour
TotalProduction WattHour
ErrorCode ErrorCode
StatusCode int64
ErrorString string
}
//Data to be saved in the Database
type Data struct {
Info struct {
FirmWare string //Firmware Version
Product string //Product name
Object string //SolarGo
Date time.Time //Current date
}
AC struct {
Voltage float64 //Voltage on Inverter AC side
Current float64 //Current on Inverter AC side
Frequency float64 //Frequency on Inverter AC side
Power WattHour //Power on Inverter AC side
}
PV struct {
Voltage float64 //Voltage on Inverter PV side
Current float64 //Current on Inverter PV side
Power WattHour //Power on Inverter PV side
String1 struct {
Voltage float64 // Voltage of that string
Current float64 // Current of that string
}
String2 struct { //Some inverter support two solar strings
Voltage float64 // Voltage of that string
Current float64 // Current of that string
}
}
Service struct {
DeviceStatus int64 // Status of the inverter
Temperature float64 // Temperature in °C
ErrorCode int // Error Code of the inverter
PVPower WattHour // Photovoltaic production
MeterLocation string // Is the meter on "load" or "grid" or "unknown"
Mode string // In what mode the inverter is operated
Autonomy float64 // Autonomy Degree in %
SelfConsumption float64 // Selfconsumption of the produced electricity in %
}
Statistics struct {
Date time.Time // Current Time
Week int // Todays Week
Month int // Todays Month
Production WattHour // Todays Production
WeekDay string // Todays Weekday
}
Sums struct {
ProductionToday WattHour // Daily Production in Wh
ProductionTotal WattHour // Total Production in Wh
ProductionYear WattHour // Yearly Production in Wh
SumProdToday WattHour // Daily Production in Wh
SumProdTotal WattHour // Total Production in Wh
SumProdYear WattHour // Yearly Production in Wh
SumPowerGrid WattHour // negative if we direct power to the grid, positive if we consume power from the grid
SumPowerLoad WattHour // negative if consuming power, positive if generating
SumPowerBattery WattHour // negative if charging, positive if discharging
SumPowerPv WattHour // electricity production
}
Meter struct {
Production float64 // Current Production
ApparentPower float64 // Apparent Power
BlindPower float64 // Blind Power
EnergyProduction float64 // Smart-Meter energy produced
EnergyUsed float64 // Smart-Meter energy used
Feed WattHour // Fed into the Grid
Purchased WattHour // Purchased from Grid
Used WattHour // Locally used power
}
}
//GenericInverter provides an abstraction over a specific inverter
type GenericInverter interface {
//GetInverterStatistics of the inverter
GetInverterStatistics() (DailyStatistics, error)
//RetrieveData of the inverter
RetrieveData() (Data, error)
}
//ToKWh converts Wh to kWh
func (w *WattHour) ToKWh() KWh {
return KWh(*w / WattHour(1000.0))
}
//Converts the Daily Statistics into a human readable form
func (s *DailyStatistics) String() string {
return fmt.Sprintf("Daily Production: %.2f kWh\nYearly Production: %.2f kWh\nTotal Production: %.2f kWh", s.DailyProduction.ToKWh(), s.YearlyProduction.ToKWh(), s.TotalProduction.ToKWh())
} | inverter/inverter.go | 0.617859 | 0.467575 | inverter.go | starcoder |
package svg
import (
"fmt"
"strconv"
"strings"
"unicode"
)
// PathCommand is a representation of an SVG path command. It contains the
// operator symbol and the command's parameters.
type PathCommand struct {
Symbol string
Params []float64
}
// IsAbsolute returns true is the SVG path command is absolute.
func (c *PathCommand) IsAbsolute() bool {
return c.Symbol == strings.ToUpper(c.Symbol)
}
// Equal compares two commands.
func (c *PathCommand) Equal(o *PathCommand) bool {
if c.Symbol != o.Symbol {
return false
}
for i, param := range c.Params {
if param != o.Params[i] {
return false
}
}
return true
}
// Path is a collection of all the subpaths in path data attribute.
type Path struct {
Commands []*PathCommand
}
// Equal compares two paths.
func (p *Path) Equal(o *Path) bool {
if len(p.Commands) != len(o.Commands) {
return false
}
for i, command := range p.Commands {
if !command.Equal(o.Commands[i]) {
return false
}
}
return true
}
// NewPath takes value of a path data attribute transforms it into a series of
// commands containing the appropriate parameters.
func NewPath(raw string) (*Path, error) {
cmds, err := commands(raw)
if err != nil {
return nil, err
}
return &Path{Commands: cmds}, nil
}
// Subpaths computes all subpaths from a given path. 'Z' command is excluded from
// the resulting paths.
func (p *Path) Subpaths() []*Path {
path := &Path{}
var subpaths []*Path
var mostRecentStart *PathCommand
for _, command := range p.Commands {
switch strings.ToLower(command.Symbol) {
case startCommand:
if len(path.Commands) > 0 {
subpaths = append(subpaths, path)
}
path = &Path{Commands: []*PathCommand{command}}
mostRecentStart = &PathCommand{
Symbol: command.Symbol,
Params: command.Params,
}
case endCommand:
subpaths = append(subpaths, path)
path = &Path{}
default:
if len(path.Commands) == 0 {
path = &Path{Commands: []*PathCommand{mostRecentStart}}
}
path.Commands = append(path.Commands, command)
}
}
if len(path.Commands) > 0 {
subpaths = append(subpaths, path)
}
return subpaths
}
const (
startCommand = "m"
endCommand = "z"
)
// commandParams maps a command symbol to the number of parameters that
// command requires.
var commandParams = map[string]int{
"m": 2, "z": 0, "l": 2, "h": 1, "v": 1,
"c": 6, "s": 4, "q": 4, "t": 2, "a": 7,
}
// commands makes a slice of path commands from a raw path data attribute.
func commands(raw string) ([]*PathCommand, error) {
ts, err := tokenize(raw)
if err != nil {
return nil, err
}
tokens := *ts
// From specification, a path data attribute is invalid if it does not
// start with moveto command.
if len(tokens) > 0 && strings.ToLower(tokens[0].value) != startCommand {
return nil, fmt.Errorf(
"Path data does not start with a moveto command: %s", raw)
}
operands := []float64{}
cmds := []*PathCommand{}
for i := len(tokens) - 1; i >= 0; i-- {
value := tokens[i].value
if !tokens[i].operator {
number, err := strconv.ParseFloat(value, 64)
if err != nil {
return nil, fmt.Errorf("Invalid parameter syntax")
}
operands = append(operands, number)
continue
}
paramCount, ok := commandParams[strings.ToLower(value)]
if !ok {
return nil, fmt.Errorf("Invalid command '%s'", value)
}
operandCount := len(operands)
if paramCount == 0 && operandCount == 0 {
command := &PathCommand{Symbol: value}
cmds = append([]*PathCommand{command}, cmds...)
continue
}
if paramCount == 0 || operandCount%paramCount != 0 {
return nil, fmt.Errorf("Incorrect number of parameters for %v", value)
}
loopCount := operandCount / paramCount
for i := 0; i < loopCount; i++ {
operator := value
if operator == "m" && i < loopCount-1 {
operator = "l"
}
if operator == "M" && i < loopCount-1 {
operator = "L"
}
command := &PathCommand{operator, reverse(operands[:paramCount])}
cmds = append([]*PathCommand{command}, cmds...)
operands = operands[paramCount:]
}
}
return cmds, nil
}
// token can contain an operator or an operand as string.
type token struct {
value string
operator bool
}
// tokens is a collection of tokens
type tokens []token
// add appends a token if the value is non-empty.
// Returns true if a new token has been added.
func (ts *tokens) add(value []rune, operator bool) bool {
if len(value) == 0 {
return false
}
*ts = append(*ts, token{string(value), operator})
return true
}
// tokenize takes value of path data attribute and transforms it into a slice of
// tokens than represent operators and operands.
func tokenize(raw string) (*tokens, error) {
ts := &tokens{}
var operand []rune
for _, r := range raw {
switch {
case r == '.':
if len(operand) == 0 {
operand = append(operand, '0')
}
if contains(operand, '.') {
ts.add(operand, false)
operand = []rune{'0'}
}
fallthrough
case r >= '0' && r <= '9' || r == 'e':
operand = append(operand, r)
case r == '-':
if len(operand) > 0 && operand[len(operand)-1] == 'e' {
operand = append(operand, r)
continue
}
ts.add(operand, false)
operand = []rune{r}
case (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z'):
if ok := ts.add(operand, false); ok {
operand = []rune{}
}
ts.add([]rune{r}, true)
continue
case unicode.IsSpace(r) || r == ',':
if ok := ts.add(operand, false); ok {
operand = []rune{}
}
default:
return nil, fmt.Errorf("Unrecognized symbol '%s'", string(r))
}
}
ts.add(operand, false)
return ts, nil
}
func reverse(ops []float64) []float64 {
for i, j := 0, len(ops)-1; i < j; i, j = i+1, j-1 {
ops[i], ops[j] = ops[j], ops[i]
}
return ops
}
func contains(rs []rune, val rune) bool {
for _, r := range rs {
if r == val {
return true
}
}
return false
} | path.go | 0.738669 | 0.420481 | path.go | starcoder |
package continuous
import (
"github.com/jtejido/linear"
"github.com/jtejido/stats"
"github.com/jtejido/stats/err"
"math"
"math/rand"
)
// Laplace distribution
// https://en.wikipedia.org/wiki/Laplace_distribution
type Laplace struct {
location, scale float64 // μ, b
src rand.Source
natural linear.RealVector
}
func NewLaplace(location, scale float64) (*Laplace, error) {
return NewLaplaceWithSource(location, scale, nil)
}
func NewLaplaceWithSource(location, scale float64, src rand.Source) (*Laplace, error) {
if scale <= 0 {
return nil, err.Invalid()
}
return &Laplace{location, scale, src, nil}, nil
}
// μ ∈ (-∞,∞)
// b ∈ (0,∞)
func (l *Laplace) Parameters() stats.Limits {
return stats.Limits{
"μ": stats.Interval{math.Inf(-1), math.Inf(1), true, true},
"b": stats.Interval{0, math.Inf(1), true, true},
}
}
// x ∈ (0,∞)
func (l *Laplace) Support() stats.Interval {
return stats.Interval{math.Inf(-1), math.Inf(1), true, true}
}
func (l *Laplace) Probability(x float64) float64 {
if l.Support().IsWithinInterval(x) {
return (1 / (2 * l.scale)) * math.Exp(-(math.Abs(x-l.location) / l.scale))
}
return 0
}
func (l *Laplace) Distribution(x float64) float64 {
if l.Support().IsWithinInterval(x) {
z := (x - l.location) / l.scale
if z < 0 {
return (1. / 2) * math.Exp(z)
}
return 1. - (1./2)*math.Exp(-z)
}
return 0
}
func (l *Laplace) Entropy() float64 {
return 1 + math.Log(2*l.scale)
}
func (l *Laplace) ExKurtosis() float64 {
return 3
}
func (l *Laplace) Skewness() float64 {
return 0
}
func (l *Laplace) Inverse(p float64) float64 {
if p <= 0 {
return math.Inf(-1)
}
if p >= 1 {
return math.Inf(1)
}
if p < .5 {
return l.location + math.Log(2*p)*l.scale
}
return l.location - math.Log(2*(1-p))*l.scale
}
func (l *Laplace) Mean() float64 {
return l.location
}
func (l *Laplace) Median() float64 {
return l.location
}
func (l *Laplace) Mode() float64 {
return l.location
}
func (l *Laplace) Variance() float64 {
return 2 * (l.scale * l.scale)
}
func (l *Laplace) Rand() float64 {
var rnd float64
if l.src == nil {
rnd = rand.Float64()
} else {
rnd = rand.New(l.src).Float64()
}
u := rnd - 0.5
if u < 0 {
return l.location + l.scale*math.Log(1+2*u)
}
return l.location - l.scale*math.Log(1-2*u)
}
func (l *Laplace) ToExponential() {
vec, _ := linear.NewArrayRealVectorFromSlice([]float64{-1 / l.scale})
l.natural = vec
}
func (l *Laplace) SufficientStatistics(x float64) linear.RealVector {
vec, _ := linear.NewArrayRealVectorFromSlice([]float64{math.Abs(x - l.location)})
return vec
} | dist/continuous/laplace.go | 0.81593 | 0.460835 | laplace.go | starcoder |
package ligo
// Get a copy of a Seq.
// Only the seq structure is copied, the elements of the resulting
// seq are the same as the corresponding elements of the given seq.
func CopySeq(seq Seq) Seq {
if seq == nil {
return nil
} else if seq.Rest() == nil {
return cons(seq.First(), nil)
}
return cons(seq.First(), CopySeq(seq.Rest()))
}
// Append returns a new seq that is the concatenation of the copies.
// Supplied seqs are left unchanged.
// listboth := lisp.Append(list1, list2)
func Append(seqs ...Seq) Seq {
nextseq := make([]Seq, 0)
if len(seqs) == 0 {
return nil
} else if len(seqs) == 1 {
if seqs[0] != nil {
if seqs[0].Rest() != nil {
nextseq = append(nextseq, seqs[0].Rest())
return cons(seqs[0].First(), Append(nextseq...))
}
return cons(seqs[0].First(), nil)
}
return nil
}
if seqs[0] == nil {
return Append(seqs[1:]...)
}
if seqs[0].Rest() != nil {
nextseq = append(nextseq, Append(seqs[0].Rest()))
nextseq = append(nextseq, seqs[1:]...)
return cons(seqs[0].First(), Append(nextseq...))
}
nextseq = append(nextseq, seqs[1:]...)
return cons(seqs[0].First(), Append(nextseq...))
}
// RevAppend constructs a copy of seq, but with elements in reverse order.
// It then appends the tail to that reversed list and returns the result.
func RevAppend(seq, tail Seq) Seq {
if seq == nil {
return tail
}
return RevAppend(seq.Rest(), cons(seq.First(), tail))
}
// Reverse constructs a copy of seq, but with elements in reverse order.
func Reverse(seq Seq) Seq {
return RevAppend(seq, nil)
}
// Subseq creates a copy of seq bounded by start and end
func SubSeq(seq Seq, start, end uint) Seq {
if seq == nil || start >= end {
return nil
}
if seq.Rest() == nil {
return seq
}
if start > 0 {
return SubSeq(seq.Rest(), start-1, end-1)
} else if start == 0 {
return cons(seq.First(), SubSeq(seq.Rest(), start, end-1))
}
return nil
}
// Takes forst n elements of a seq
func Take(seq Seq, n uint) Seq {
return SubSeq(seq, 0, n)
}
// Drop first n elements of a seq
func Drop(seq Seq, n uint) Seq {
if n == 0 || seq == nil {
return seq
}
return Drop(seq.Rest(), n-1)
}
// Get the length of a given seq
func Length(seq Seq) int {
return len(ToSlice(seq))
} | ligo_simple_fns.go | 0.727589 | 0.578805 | ligo_simple_fns.go | starcoder |
package openapi
import (
"encoding/json"
)
// RecipientName The name of the recipient to whom the card will be shipped
type RecipientName struct {
FirstName string `json:"first_name"`
LastName string `json:"last_name"`
MiddleName *string `json:"middle_name,omitempty"`
}
// NewRecipientName instantiates a new RecipientName object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewRecipientName(firstName string, lastName string) *RecipientName {
this := RecipientName{}
this.FirstName = firstName
this.LastName = lastName
return &this
}
// NewRecipientNameWithDefaults instantiates a new RecipientName object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewRecipientNameWithDefaults() *RecipientName {
this := RecipientName{}
return &this
}
// GetFirstName returns the FirstName field value
func (o *RecipientName) GetFirstName() string {
if o == nil {
var ret string
return ret
}
return o.FirstName
}
// GetFirstNameOk returns a tuple with the FirstName field value
// and a boolean to check if the value has been set.
func (o *RecipientName) GetFirstNameOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.FirstName, true
}
// SetFirstName sets field value
func (o *RecipientName) SetFirstName(v string) {
o.FirstName = v
}
// GetLastName returns the LastName field value
func (o *RecipientName) GetLastName() string {
if o == nil {
var ret string
return ret
}
return o.LastName
}
// GetLastNameOk returns a tuple with the LastName field value
// and a boolean to check if the value has been set.
func (o *RecipientName) GetLastNameOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.LastName, true
}
// SetLastName sets field value
func (o *RecipientName) SetLastName(v string) {
o.LastName = v
}
// GetMiddleName returns the MiddleName field value if set, zero value otherwise.
func (o *RecipientName) GetMiddleName() string {
if o == nil || o.MiddleName == nil {
var ret string
return ret
}
return *o.MiddleName
}
// GetMiddleNameOk returns a tuple with the MiddleName field value if set, nil otherwise
// and a boolean to check if the value has been set.
func (o *RecipientName) GetMiddleNameOk() (*string, bool) {
if o == nil || o.MiddleName == nil {
return nil, false
}
return o.MiddleName, true
}
// HasMiddleName returns a boolean if a field has been set.
func (o *RecipientName) HasMiddleName() bool {
if o != nil && o.MiddleName != nil {
return true
}
return false
}
// SetMiddleName gets a reference to the given string and assigns it to the MiddleName field.
func (o *RecipientName) SetMiddleName(v string) {
o.MiddleName = &v
}
func (o RecipientName) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["first_name"] = o.FirstName
}
if true {
toSerialize["last_name"] = o.LastName
}
if o.MiddleName != nil {
toSerialize["middle_name"] = o.MiddleName
}
return json.Marshal(toSerialize)
}
type NullableRecipientName struct {
value *RecipientName
isSet bool
}
func (v NullableRecipientName) Get() *RecipientName {
return v.value
}
func (v *NullableRecipientName) Set(val *RecipientName) {
v.value = val
v.isSet = true
}
func (v NullableRecipientName) IsSet() bool {
return v.isSet
}
func (v *NullableRecipientName) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableRecipientName(val *RecipientName) *NullableRecipientName {
return &NullableRecipientName{value: val, isSet: true}
}
func (v NullableRecipientName) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableRecipientName) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
} | synctera/model_recipient_name.go | 0.684791 | 0.475484 | model_recipient_name.go | starcoder |
package astrewrite
import (
"fmt"
"go/ast"
)
// WalkFunc describes a function to be called for each node during a Walk. The
// returned node can be used to rewrite the AST. Walking stops if the returned
// bool is false.
type WalkFunc func(ast.Node) (ast.Node, bool)
// Walk traverses an AST in depth-first order: It starts by calling
// fn(node); node must not be nil. It returns the rewritten node. If fn returns
// true, Walk invokes fn recursively for each of the non-nil children of node,
// followed by a call of fn(nil). The returned node of fn can be used to
// rewrite the passed node to fn. Panics if the returned type is not the same
// type as the original one.
func Walk(node ast.Node, fn WalkFunc) ast.Node {
rewritten, ok := fn(node)
if !ok {
return rewritten
}
// walk children
// (the order of the cases matches the order
// of the corresponding node types in ast.go)
switch n := node.(type) {
// Comments and fields
case *ast.Comment:
// nothing to do
case *ast.CommentGroup:
for i, c := range n.List {
n.List[i] = Walk(c, fn).(*ast.Comment)
}
case *ast.Field:
if n.Doc != nil {
n.Doc = Walk(n.Doc, fn).(*ast.CommentGroup)
}
walkIdentList(n.Names, fn)
n.Type = Walk(n.Type, fn).(ast.Expr)
if n.Tag != nil {
n.Tag = Walk(n.Tag, fn).(*ast.BasicLit)
}
if n.Comment != nil {
n.Comment = Walk(n.Comment, fn).(*ast.CommentGroup)
}
case *ast.FieldList:
for i, f := range n.List {
n.List[i] = Walk(f, fn).(*ast.Field)
}
// Expressions
case *ast.BadExpr, *ast.Ident, *ast.BasicLit:
// nothing to do
case *ast.Ellipsis:
if n.Elt != nil {
n.Elt = Walk(n.Elt, fn).(ast.Expr)
}
case *ast.FuncLit:
n.Type = Walk(n.Type, fn).(*ast.FuncType)
n.Body = Walk(n.Body, fn).(*ast.BlockStmt)
case *ast.CompositeLit:
if n.Type != nil {
n.Type = Walk(n.Type, fn).(ast.Expr)
}
walkExprList(n.Elts, fn)
case *ast.ParenExpr:
n.X = Walk(n.X, fn).(ast.Expr)
case *ast.SelectorExpr:
n.X = Walk(n.X, fn).(ast.Expr)
n.Sel = Walk(n.Sel, fn).(*ast.Ident)
case *ast.IndexExpr:
n.X = Walk(n.X, fn).(ast.Expr)
n.Index = Walk(n.Index, fn).(ast.Expr)
case *ast.SliceExpr:
n.X = Walk(n.X, fn).(ast.Expr)
if n.Low != nil {
n.Low = Walk(n.Low, fn).(ast.Expr)
}
if n.High != nil {
n.High = Walk(n.High, fn).(ast.Expr)
}
if n.Max != nil {
n.Max = Walk(n.Max, fn).(ast.Expr)
}
case *ast.TypeAssertExpr:
n.X = Walk(n.X, fn).(ast.Expr)
if n.Type != nil {
n.Type = Walk(n.Type, fn).(ast.Expr)
}
case *ast.CallExpr:
n.Fun = Walk(n.Fun, fn).(ast.Expr)
walkExprList(n.Args, fn)
case *ast.StarExpr:
n.X = Walk(n.X, fn).(ast.Expr)
case *ast.UnaryExpr:
n.X = Walk(n.X, fn).(ast.Expr)
case *ast.BinaryExpr:
n.X = Walk(n.X, fn).(ast.Expr)
n.Y = Walk(n.Y, fn).(ast.Expr)
case *ast.KeyValueExpr:
n.Key = Walk(n.Key, fn).(ast.Expr)
n.Value = Walk(n.Value, fn).(ast.Expr)
// Types
case *ast.ArrayType:
if n.Len != nil {
n.Len = Walk(n.Len, fn).(ast.Expr)
}
n.Elt = Walk(n.Elt, fn).(ast.Expr)
case *ast.StructType:
n.Fields = Walk(n.Fields, fn).(*ast.FieldList)
case *ast.FuncType:
if n.Params != nil {
n.Params = Walk(n.Params, fn).(*ast.FieldList)
}
if n.Results != nil {
n.Results = Walk(n.Results, fn).(*ast.FieldList)
}
case *ast.InterfaceType:
n.Methods = Walk(n.Methods, fn).(*ast.FieldList)
case *ast.MapType:
n.Key = Walk(n.Key, fn).(ast.Expr)
n.Value = Walk(n.Value, fn).(ast.Expr)
case *ast.ChanType:
n.Value = Walk(n.Value, fn).(ast.Expr)
// Statements
case *ast.BadStmt:
// nothing to do
case *ast.DeclStmt:
n.Decl = Walk(n.Decl, fn).(ast.Decl)
case *ast.EmptyStmt:
// nothing to do
case *ast.LabeledStmt:
n.Label = Walk(n.Label, fn).(*ast.Ident)
n.Stmt = Walk(n.Stmt, fn).(ast.Stmt)
case *ast.ExprStmt:
n.X = Walk(n.X, fn).(ast.Expr)
case *ast.SendStmt:
n.Chan = Walk(n.Chan, fn).(ast.Expr)
n.Value = Walk(n.Value, fn).(ast.Expr)
case *ast.IncDecStmt:
n.X = Walk(n.X, fn).(ast.Expr)
case *ast.AssignStmt:
walkExprList(n.Lhs, fn)
walkExprList(n.Rhs, fn)
case *ast.GoStmt:
n.Call = Walk(n.Call, fn).(*ast.CallExpr)
case *ast.DeferStmt:
n.Call = Walk(n.Call, fn).(*ast.CallExpr)
case *ast.ReturnStmt:
walkExprList(n.Results, fn)
case *ast.BranchStmt:
if n.Label != nil {
n.Label = Walk(n.Label, fn).(*ast.Ident)
}
case *ast.BlockStmt:
walkStmtList(n.List, fn)
case *ast.IfStmt:
if n.Init != nil {
n.Init = Walk(n.Init, fn).(ast.Stmt)
}
n.Cond = Walk(n.Cond, fn).(ast.Expr)
n.Body = Walk(n.Body, fn).(*ast.BlockStmt)
if n.Else != nil {
n.Else = Walk(n.Else, fn).(ast.Stmt)
}
case *ast.CaseClause:
walkExprList(n.List, fn)
walkStmtList(n.Body, fn)
case *ast.SwitchStmt:
if n.Init != nil {
n.Init = Walk(n.Init, fn).(ast.Stmt)
}
if n.Tag != nil {
n.Tag = Walk(n.Tag, fn).(ast.Expr)
}
n.Body = Walk(n.Body, fn).(*ast.BlockStmt)
case *ast.TypeSwitchStmt:
if n.Init != nil {
n.Init = Walk(n.Init, fn).(ast.Stmt)
}
n.Assign = Walk(n.Assign, fn).(ast.Stmt)
n.Body = Walk(n.Body, fn).(*ast.BlockStmt)
case *ast.CommClause:
if n.Comm != nil {
n.Comm = Walk(n.Comm, fn).(ast.Stmt)
}
walkStmtList(n.Body, fn)
case *ast.SelectStmt:
n.Body = Walk(n.Body, fn).(*ast.BlockStmt)
case *ast.ForStmt:
if n.Init != nil {
n.Init = Walk(n.Init, fn).(ast.Stmt)
}
if n.Cond != nil {
n.Cond = Walk(n.Cond, fn).(ast.Expr)
}
if n.Post != nil {
n.Post = Walk(n.Post, fn).(ast.Stmt)
}
n.Body = Walk(n.Body, fn).(*ast.BlockStmt)
case *ast.RangeStmt:
if n.Key != nil {
n.Key = Walk(n.Key, fn).(ast.Expr)
}
if n.Value != nil {
n.Value = Walk(n.Value, fn).(ast.Expr)
}
n.X = Walk(n.X, fn).(ast.Expr)
n.Body = Walk(n.Body, fn).(*ast.BlockStmt)
// Declarations
case *ast.ImportSpec:
if n.Doc != nil {
n.Doc = Walk(n.Doc, fn).(*ast.CommentGroup)
}
if n.Name != nil {
n.Name = Walk(n.Name, fn).(*ast.Ident)
}
n.Path = Walk(n.Path, fn).(*ast.BasicLit)
if n.Comment != nil {
n.Comment = Walk(n.Comment, fn).(*ast.CommentGroup)
}
case *ast.ValueSpec:
if n.Doc != nil {
n.Doc = Walk(n.Doc, fn).(*ast.CommentGroup)
}
walkIdentList(n.Names, fn)
if n.Type != nil {
n.Type = Walk(n.Type, fn).(ast.Expr)
}
walkExprList(n.Values, fn)
if n.Comment != nil {
n.Comment = Walk(n.Comment, fn).(*ast.CommentGroup)
}
case *ast.TypeSpec:
if n.Doc != nil {
n.Doc = Walk(n.Doc, fn).(*ast.CommentGroup)
}
Walk(n.Name, fn)
Walk(n.Type, fn)
if n.Comment != nil {
n.Comment = Walk(n.Comment, fn).(*ast.CommentGroup)
}
case *ast.BadDecl:
// nothing to do
case *ast.GenDecl:
if n.Doc != nil {
n.Doc = Walk(n.Doc, fn).(*ast.CommentGroup)
}
for i, s := range n.Specs {
n.Specs[i] = Walk(s, fn).(ast.Spec)
}
case *ast.FuncDecl:
if n.Doc != nil {
n.Doc = Walk(n.Doc, fn).(*ast.CommentGroup)
}
if n.Recv != nil {
n.Recv = Walk(n.Recv, fn).(*ast.FieldList)
}
n.Name = Walk(n.Name, fn).(*ast.Ident)
n.Type = Walk(n.Type, fn).(*ast.FuncType)
if n.Body != nil {
n.Body = Walk(n.Body, fn).(*ast.BlockStmt)
}
// Files and packages
case *ast.File:
if n.Doc != nil {
n.Doc = Walk(n.Doc, fn).(*ast.CommentGroup)
}
n.Name = Walk(n.Name, fn).(*ast.Ident)
walkDeclList(n.Decls, fn)
// don't walk n.Comments - they have been
// visited already through the individual
// nodes
case *ast.Package:
for i, f := range n.Files {
n.Files[i] = Walk(f, fn).(*ast.File)
}
default:
panic(fmt.Sprintf("ast.Walk: unexpected node type %T", n))
}
fn(nil)
return rewritten
}
func walkIdentList(list []*ast.Ident, fn WalkFunc) {
for i, x := range list {
list[i] = Walk(x, fn).(*ast.Ident)
}
}
func walkExprList(list []ast.Expr, fn WalkFunc) {
for i, x := range list {
list[i] = Walk(x, fn).(ast.Expr)
}
}
func walkStmtList(list []ast.Stmt, fn WalkFunc) {
for i, x := range list {
list[i] = Walk(x, fn).(ast.Stmt)
}
}
func walkDeclList(list []ast.Decl, fn WalkFunc) {
for i, x := range list {
list[i] = Walk(x, fn).(ast.Decl)
}
} | vendor/github.com/fatih/astrewrite/astrewrite.go | 0.505371 | 0.665057 | astrewrite.go | starcoder |
package go_ehlers_indicators
import (
"fmt"
"math"
)
const (
M = iota
F
)
// MAMA from this paper: https://www.mesasoftware.com/papers/MAMA.pdf
func MAMAFAMA(vals []float64, fastLimit, slowLimit float64, which int) []float64 {
smooth := make([]float64, len(vals))
period := make([]float64, len(vals))
detrender := make([]float64, len(vals))
is := make([]float64, len(vals))
i2s := make([]float64, len(vals))
qs := make([]float64, len(vals))
q2s := make([]float64, len(vals))
res := make([]float64, len(vals))
ims := make([]float64, len(vals))
smoothPeriod := make([]float64, len(vals))
phase := make([]float64, len(vals))
mama := make([]float64, len(vals))
fama := make([]float64, len(vals))
for i := 0; i < len(vals); i++ {
if i <= 5 {
mama[i] = vals[i]
fama[i] = vals[i]
continue
}
smooth[i] = (4*vals[i] + 3*vals[i-1] + 2*vals[i-2] + vals[i-3]) / 10
detrender[i] = (0.0962*smooth[i] + 0.5769*smooth[i-2] - 0.5769*smooth[i-4] - 0.0962*smooth[i-6]) * (0.075*period[i-1] + 0.54)
// compute InPhase and Quadrature components
q1 := (0.0962*detrender[i] + 0.5769*detrender[i-2] - 0.5769*detrender[i-4] - 0.0962*detrender[i-6]) * (0.075*period[i-1] + 0.54)
qs[i] = q1
is[i] = detrender[i-3]
// Advance the phase of detrender and q1 by 90 Degrees
jI := (0.0962*is[i] + 0.05769*is[i-2] - 0.5769*is[i-4] - 0.0962*is[i-6]) * (0.075*period[i-1] + 0.54)
jQ := (0.0962*qs[i] + 0.5769*qs[i-2] - 0.5769*qs[i-4] - 0.0962*qs[i-6]) * (0.075*period[i-1] + 0.54)
// Phasor addition for 3 bar averaging
i2 := detrender[i-3] - jQ
i2s[i] = i2
q2 := q1 + jI
q2s[i] = q2
// smooth the I and Q components befor applying the discriminator
i2 = 0.2*i2 + 0.8*i2s[i-1]
i2s[i] = i2
q2 = 0.2*q2 + 0.8*qs[i-1]
q2s[i] = q2
// Homodyne Discriminator
re := i2*i2s[i-1] + q2*q2s[i-1]
res[i] = re
im := i2*q2s[i-1] - q2*i2s[i-1]
ims[i] = im
re = 0.2*re + 0.8*res[i-1]
im = 0.2*im + 0.8*ims[i-1]
if im != 0 && re != 0 {
period[i] = 360 / math.Atan(im/re)
}
if period[i] > 1.5*period[i-1] {
period[i] = 1.5 * period[i-1]
}
if period[i] < 0.67*period[i-1] {
period[i] = 0.67 * period[i-1]
}
if period[i] < 6 {
period[i] = 6
}
if period[i] > 50 {
period[i] = 50
}
period[i] = 0.2*period[i] + 0.8*period[i-1]
smoothPeriod[i] = 0.33*period[i] + 0.67*smoothPeriod[i-1]
if detrender[i-3] != 0 {
phase[i] = math.Atan(q1 / detrender[i-3])
}
deltaPhase := phase[i-1] - phase[i]
if deltaPhase < 1 {
deltaPhase = 1
}
alpha := fastLimit / deltaPhase
if alpha < slowLimit {
alpha = slowLimit
}
mama[i] = alpha*vals[i] + (1-alpha)*mama[i-1]
fama[i] = 0.5*alpha*mama[i] + (1-0.5*alpha)*fama[i-1]
}
if which == M {
return mama
} else if which == F {
return fama
}
fmt.Printf("Wrong 'which' parameter provided. accepted are M for MAMA or F for MAMA constants")
return nil
}
// MAMADefault provides a wrapper for MAMA with recommended fastsLimit of 0.5 and slowLimit of 0.05
func MAMADefault(vals []float64) []float64 {
fastLimit := 0.5
slowLimit := 0.05
return MAMA(vals, fastLimit, slowLimit)
}
// MAMA (MESA adaptive moving average)
func MAMA(vals []float64, fastLimit, slowLimit float64) []float64 {
return MAMAFAMA(vals, fastLimit, slowLimit, M)
} | mama.go | 0.577495 | 0.562657 | mama.go | starcoder |
package rango
func Cube(object *Object, material Material, side float64) {
var triangles = make([]Triangle, 0)
var halfSide float64 = side * 0.5
min := V(-halfSide, -halfSide, -halfSide)
max := V(halfSide, halfSide, halfSide)
var v0 Vector = Vector{}
var v1 Vector = Vector{}
var v2 Vector = Vector{}
/* front face */
v0 = V(min.X, min.Y, min.Z)
v1 = V(max.X, min.Y, min.Z)
v2 = V(max.X, max.Y, min.Z)
triangles = append(triangles, Triangle{v2, v1, v0})
v0 = V(max.X, max.Y, min.Z)
v1 = V(min.X, max.Y, min.Z)
v2 = V(max.X, min.Y, min.Z)
triangles = append(triangles, Triangle{v2, v1, v0})
/* right */
v0 = V(max.X, min.Y, min.Z)
v1 = V(max.X, min.Y, max.Z)
v2 = V(max.X, max.Y, max.Z)
triangles = append(triangles, Triangle{v2, v1, v0})
v0 = V(max.X, max.Y, max.Z)
v1 = V(max.X, max.Y, min.Z)
v2 = V(max.X, min.Y, min.Z)
triangles = append(triangles, Triangle{v2, v1, v0})
/* back */
v0 = V(max.X, min.Y, max.Z)
v1 = V(min.X, min.Y, max.Z)
v2 = V(min.X, max.Y, max.Z)
triangles = append(triangles, Triangle{v2, v1, v0})
v0 = V(min.X, max.Y, max.Z)
v1 = V(max.X, max.Y, max.Z)
v2 = V(max.X, min.Y, max.Z)
triangles = append(triangles, Triangle{v2, v1, v0})
/* left */
v0 = V(min.X, min.Y, max.Z)
v1 = V(min.X, min.Y, min.Z)
v2 = V(min.X, max.Y, min.Z)
triangles = append(triangles, Triangle{v2, v1, v0})
v0 = V(min.X, max.Y, min.Z)
v1 = V(min.X, max.Y, max.Z)
v2 = V(min.X, min.Y, max.Z)
triangles = append(triangles, Triangle{v2, v1, v0})
/* bottom */
v0 = V(min.X, min.Y, min.Z)
v1 = V(min.X, min.Y, max.Z)
v2 = V(max.X, min.Y, max.Z)
triangles = append(triangles, Triangle{v2, v1, v0})
v0 = V(max.X, min.Y, max.Z)
v1 = V(max.X, min.Y, min.Z)
v2 = V(min.X, min.Y, min.Z)
triangles = append(triangles, Triangle{v2, v1, v0})
/* top */
v0 = V(min.X, max.Y, min.Z)
v1 = V(max.X, max.Y, min.Z)
v2 = V(max.X, max.Y, max.Z)
triangles = append(triangles, Triangle{v2, v1, v0})
v0 = V(max.X, max.Y, max.Z)
v1 = V(min.X, max.Y, max.Z)
v2 = V(min.X, max.Y, min.Z)
triangles = append(triangles, Triangle{v2, v1, v0})
SetObject(object, material, 12, triangles)
} | rango/cube.go | 0.514644 | 0.437643 | cube.go | starcoder |
package imaging
import (
"image"
"image/color"
"github.com/disintegration/imaging"
)
type rawImg interface {
Set(x, y int, c color.Color)
Opaque() bool
}
func isFullyTransparent(c color.Color) bool {
// TODO: This can be optimized by checking the color type and
// only extract the needed alpha value.
_, _, _, a := c.RGBA()
return a == 0
}
// FillImageTransparency fills in-place all the fully transparent pixels of the
// input image with the given color.
func FillImageTransparency(img image.Image, c color.Color) {
var i rawImg
bounds := img.Bounds()
fillFunc := func() {
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
for x := bounds.Min.X; x < bounds.Max.X; x++ {
if isFullyTransparent(img.At(x, y)) {
i.Set(x, y, c)
}
}
}
}
switch raw := img.(type) {
case *image.Alpha:
i = raw
case *image.Alpha16:
i = raw
case *image.Gray:
i = raw
case *image.Gray16:
i = raw
case *image.NRGBA:
i = raw
col := color.NRGBAModel.Convert(c).(color.NRGBA)
fillFunc = func() {
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
for x := bounds.Min.X; x < bounds.Max.X; x++ {
i := raw.PixOffset(x, y)
if raw.Pix[i+3] == 0x00 {
raw.Pix[i] = col.R
raw.Pix[i+1] = col.G
raw.Pix[i+2] = col.B
raw.Pix[i+3] = col.A
}
}
}
}
case *image.NRGBA64:
i = raw
col := color.NRGBA64Model.Convert(c).(color.NRGBA64)
fillFunc = func() {
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
for x := bounds.Min.X; x < bounds.Max.X; x++ {
i := raw.PixOffset(x, y)
a := uint16(raw.Pix[i+6])<<8 | uint16(raw.Pix[i+7])
if a == 0 {
raw.Pix[i] = uint8(col.R >> 8)
raw.Pix[i+1] = uint8(col.R)
raw.Pix[i+2] = uint8(col.G >> 8)
raw.Pix[i+3] = uint8(col.G)
raw.Pix[i+4] = uint8(col.B >> 8)
raw.Pix[i+5] = uint8(col.B)
raw.Pix[i+6] = uint8(col.A >> 8)
raw.Pix[i+7] = uint8(col.A)
}
}
}
}
case *image.Paletted:
i = raw
fillFunc = func() {
for i := range raw.Palette {
if isFullyTransparent(raw.Palette[i]) {
raw.Palette[i] = c
}
}
}
case *image.RGBA:
i = raw
col := color.RGBAModel.Convert(c).(color.RGBA)
fillFunc = func() {
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
for x := bounds.Min.X; x < bounds.Max.X; x++ {
i := raw.PixOffset(x, y)
if raw.Pix[i+3] == 0x00 {
raw.Pix[i] = col.R
raw.Pix[i+1] = col.G
raw.Pix[i+2] = col.B
raw.Pix[i+3] = col.A
}
}
}
}
case *image.RGBA64:
i = raw
col := color.RGBA64Model.Convert(c).(color.RGBA64)
fillFunc = func() {
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
for x := bounds.Min.X; x < bounds.Max.X; x++ {
i := raw.PixOffset(x, y)
a := uint16(raw.Pix[i+6])<<8 | uint16(raw.Pix[i+7])
if a == 0 {
raw.Pix[i] = uint8(col.R >> 8)
raw.Pix[i+1] = uint8(col.R)
raw.Pix[i+2] = uint8(col.G >> 8)
raw.Pix[i+3] = uint8(col.G)
raw.Pix[i+4] = uint8(col.B >> 8)
raw.Pix[i+5] = uint8(col.B)
raw.Pix[i+6] = uint8(col.A >> 8)
raw.Pix[i+7] = uint8(col.A)
}
}
}
}
default:
return
}
if !i.Opaque() {
fillFunc()
}
}
// FillCenter creates an image with the specified dimensions and fills it with
// the centered and scaled source image.
func FillCenter(img image.Image, w, h int) *image.NRGBA {
return imaging.Fill(img, w, h, imaging.Center, imaging.Lanczos)
} | app/imaging/utils.go | 0.508544 | 0.483892 | utils.go | starcoder |
package stringsx
// Create a new Set that contains the given values.
func NewSet(values ...string) Set {
s := Set{}
for _, value := range values {
s[value] = struct{}{}
}
return s
}
// Set is unique a collection of strings. The internal order is not guaranteed.
type Set map[string]struct{}
// Len returns the size of the Set
func (s Set) Len() int {
return len(s)
}
// First returns the first item in a non-empty Set. If invoked on an empty Set, the
// method would panic. Because Set does not guarantee internal iteration order, this
// method in fact returns any element. The effect is only consistent when the Set
// contains only one item.
func (s Set) First() string {
if s.Len() == 0 {
panic("calling First on empty set")
}
for e := range s {
return e
}
panic("unreachable code")
}
// Contains returns true if the Set contains the given value.
func (s Set) Contains(value string) bool {
_, contains := s[value]
return contains
}
// All returns true if all Set elements fulfill the criteria.
func (s Set) All(criteria func(element string) bool) bool {
for e := range s {
if ok := criteria(e); !ok {
return false
}
}
return true
}
// Any returns true if any Set element fulfills the criteria.
func (s Set) Any(criteria func(element string) bool) bool {
for e := range s {
if ok := criteria(e); ok {
return true
}
}
return false
}
// ContainsAll returns true if this Set is a super set of the given Set.
func (s Set) ContainsAll(t Set) bool {
return t.All(s.Contains)
}
// ContainsAny returns true if this Set intersects with the given Set.
func (s Set) ContainsAny(t Set) bool {
return t.Any(s.Contains)
}
// Equals returns true if the two Set contains exactly the same elements.
func (s Set) Equals(t Set) bool {
return s.Len() == t.Len() && s.ContainsAll(t)
}
// Add adds a single element to the Set.
func (s Set) Add(elem string) {
s[elem] = struct{}{}
}
// AddAll adds multiple elements to the Set.
func (s Set) AddAll(elem ...string) {
for _, each := range elem {
s.Add(each)
}
}
// Remove removes a single element from the Set.
func (s Set) Remove(elem string) {
delete(s, elem)
}
// Array returns the elements of this Set in a slice. This method is not stable, because the
// internal iteration order is not guaranteed.
func (s Set) Array() []string {
var arr []string
for elem := range s {
arr = append(arr, elem)
}
return arr
} | stringsx/set.go | 0.87849 | 0.504272 | set.go | starcoder |
package go_kd_segment_tree
type TreeNode interface {
Search(p Point) []interface{}
Insert(seg *Segment) error
SearchRect(rect Rect) []interface{}
Dumps(prefix string) string
}
func NewNode(segments []*Segment,
tree *Tree,
level int,
) TreeNode {
if len(segments) == 0 {
return nil
}
if len(segments) <= tree.options.LeafNodeDataMax || level >= tree.options.TreeLevelMax {
mergedSegments := MergeSegments(segments)
return &LeafNode{
Segments: mergedSegments,
}
}
dimName, decreasePercent := findBestBranchingDim(segments, tree.dimTypes)
if decreasePercent < tree.options.BranchingDecreasePercentMin {
if tree.options.ConjunctionTargetRateMin > 0 {
conjunctionNode := NewConjunctionNode(tree, segments, nil, 1.0, level+1)
conjunctionTargetRate := float64(conjunctionNode.MaxInvertNodeNum()) / float64(len(tree.dimTypes)*len(segments))
if conjunctionTargetRate < tree.options.ConjunctionTargetRateMin {
return conjunctionNode
}
}
mergedSegments := MergeSegments(segments)
return &LeafNode{
Segments: mergedSegments,
}
}
switch tree.dimTypes[dimName].Type {
case DimTypeReal.Type:
node, pass, left, right := NewBinaryNode(tree, segments, dimName, decreasePercent, level)
if len(pass) > 0 {
node.Pass = NewNode(pass, tree, level+1)
}
if len(left) > 0 {
node.Left = NewNode(left, tree, level+1)
}
if len(right) > 0 {
node.Right = NewNode(right, tree, level+1)
}
return node
case DimTypeDiscrete.Type:
node, passSegments, children := NewHashNode(tree, segments, dimName, decreasePercent, level)
for childKey, childSegments := range children {
node.child[childKey] = NewNode(childSegments, tree, level+1)
}
if len(passSegments) > 0 {
node.pass = NewNode(passSegments, tree, level+1)
}
return node
}
return nil
}
func findBestBranchingDim(
segments []*Segment,
dimTypes map[interface{}]DimType,
) (interface{}, float64) {
if len(segments) == 0 {
return nil, 0
}
var maxDecreaseDimName interface{}
var maxDecrease int
for dimName, dimType := range dimTypes {
switch dimType.Type {
case DimTypeReal.Type:
decreaseC, _ := getRealDimSegmentsDecrease(segments, dimName)
if decreaseC > maxDecrease {
maxDecrease = decreaseC
maxDecreaseDimName = dimName
}
case DimTypeDiscrete.Type:
decreaseC, _ := getDiscreteDimSegmentsDecrease(segments, dimName)
if decreaseC > maxDecrease {
maxDecrease = decreaseC
maxDecreaseDimName = dimName
}
}
}
p := float64(maxDecrease) * 1.0 / float64(len(segments))
return maxDecreaseDimName, p
} | node.go | 0.655115 | 0.460228 | node.go | starcoder |
package types
import (
"time"
)
//------------------------------------------------------------------------------
// Metadata is an interface representing the metadata of a message part within
// a batch.
type Metadata interface {
// Get returns a metadata value if a key exists, otherwise an empty string.
Get(key string) string
// Set sets the value of a metadata key.
Set(key, value string) Metadata
// Delete removes the value of a metadata key.
Delete(key string) Metadata
// Iter iterates each metadata key/value pair.
Iter(f func(k, v string) error) error
// Copy returns a copy of the metadata object that can be edited without
// changing the contents of the original.
Copy() Metadata
}
//------------------------------------------------------------------------------
// Part is an interface representing a message part. It contains a byte array
// of raw data, metadata, and lazily parsed formats of the payload such as JSON.
type Part interface {
// Get returns a slice of bytes which is the underlying data of the part.
// It is not safe to edit the contents of this slice directly, to make
// changes to the contents of a part the data should be copied and set using
// SetData.
Get() []byte
// Metadata returns the metadata of a part.
Metadata() Metadata
// JSON attempts to parse the part as a JSON document and either returns the
// result or an error. The resulting document is also cached such that
// subsequent calls do not reparse the same data. If changes are made to the
// document it must be set using SetJSON, otherwise the underlying byte
// representation will not reflect the changes.
JSON() (interface{}, error)
// Set changes the underlying byte slice.
Set(d []byte) Part
// SetMetadata changes the underlying metadata to a new object.
SetMetadata(m Metadata) Part
// SetJSON attempts to marshal a JSON document into a byte slice and stores
// the result as the new contents of the part. The document is cached such
// that subsequent calls to JSON() receive it rather than reparsing the
// resulting byte slice.
SetJSON(doc interface{}) error
// Copy creates a shallow copy of the message, where values and metadata can
// be edited independently from the original version. However, editing the
// byte slice contents will alter the contents of the original, and if
// another process edits the bytes of the original it will also affect the
// contents of this message.
Copy() Part
// DeepCopy creates a deep copy of the message part, where the contents are
// copied and are therefore safe to edit without altering the original.
DeepCopy() Part
}
//------------------------------------------------------------------------------
// Message is an interface representing a payload of data that was received from
// an input. Messages contain multiple parts, where each part is a byte array.
// If an input supports only single part messages they will still be read as
// multipart messages with one part. Multiple part messages are synonymous with
// batches, and it is up to each component within Benthos to work with a batch
// appropriately.
type Message interface {
// Get attempts to access a message part from an index. If the index is
// negative then the part is found by counting backwards from the last part
// starting at -1. If the index is out of bounds then an empty part is
// returned.
Get(p int) Part
// SetAll replaces all parts of a message with a new set.
SetAll(parts []Part)
// Append appends new message parts to the message and returns the index of
// last part to be added.
Append(part ...Part) int
// Len returns the number of parts this message contains.
Len() int
// Iter will iterate each message part in order, calling the closure
// argument with the index and contents of the message part.
Iter(f func(i int, part Part) error) error
// LazyCondition lazily evaluates conditions on the message by caching the
// results as per a label to identify the condition. The cache of results is
// cleared whenever the contents of the message is changed.
LazyCondition(label string, cond Condition) bool
// Copy creates a shallow copy of the message, where the list of message
// parts can be edited independently from the original version. However,
// editing the byte array contents of a message part will alter the contents
// of the original, and if another process edits the bytes of the original
// it will also affect the contents of this message.
Copy() Message
// DeepCopy creates a deep copy of the message, where the message part
// contents are entirely copied and are therefore safe to edit without
// altering the original.
DeepCopy() Message
// CreatedAt returns the time at which the message was created.
CreatedAt() time.Time
}
//------------------------------------------------------------------------------ | lib/types/message.go | 0.674265 | 0.62561 | message.go | starcoder |
package randomnames
// List of adjectives from https://www.d.umn.edu/~rave0029/research/adjectives1.txt
import (
"math/rand"
"sync"
)
func init() {
adjSize = len(Adjectives)
}
// RandomAdjective returns a pseudo-random adjective from the list
func RandomAdjective() string {
return Adjectives[rand.Intn(adjSize)]
}
// SafeRandomAdjective returns a pseudo-random adjective from the list goro-safely
func SafeRandomAdjective() string {
adjSafety.Lock()
s := RandomAdjective()
adjSafety.Unlock() // manual unlock so we don't incur the defer penalty
return s
}
var (
adjSafety sync.Mutex
adjSize int
Adjectives []string = []string{
"Able",
"Above",
"Absent",
"Absolute",
"Abstract",
"Abundant",
"Academic",
"Acceptable",
"Accepted",
"Accessible",
"Accurate",
"Accused",
"Active",
"Actual",
"Acute",
"Added",
"Additional",
"Adequate",
"Adjacent",
"Administrative",
"Adorable",
"Advanced",
"Adverse",
"Advisory",
"Aesthetic",
"Afraid",
"African",
"Aggregate",
"Aggressive",
"Agreeable",
"Agreed",
"Agricultural",
"Alert",
"Alive",
"Alleged",
"Allied",
"Alone",
"Alright",
"Alternative",
"Amateur",
"Amazing",
"Ambitious",
"American",
"Amused",
"Ancient",
"Angry",
"Annoyed",
"Annual",
"Anonymous",
"Anxious",
"Appalling",
"Apparent",
"Applicable",
"Appropriate",
"Arab",
"Arbitrary",
"Architectural",
"Armed",
"Arrogant",
"Artificial",
"Artistic",
"Ashamed",
"Asian",
"Asleep",
"Assistant",
"Associated",
"Atomic",
"Attractive",
"Australian",
"Automatic",
"Autonomous",
"Available",
"Average",
"Awake",
"Aware",
"Awful",
"Awkward",
"Back",
"Bad",
"Balanced",
"Bare",
"Basic",
"Beautiful",
"Beneficial",
"Better",
"Bewildered",
"Big",
"Binding",
"Biological",
"Bitter",
"Bizarre",
"Black",
"Blank",
"Blind",
"Blonde",
"Bloody",
"Blue",
"Blushing",
"Boiling",
"Bold",
"Bored",
"Boring",
"Bottom",
"Brainy",
"Brave",
"Breakable",
"Breezy",
"Brief",
"Bright",
"Brilliant",
"British",
"Broad",
"Broken",
"Brown",
"Bumpy",
"Burning",
"Busy",
"Calm",
"Canadian",
"Capable",
"Capitalist",
"Careful",
"Casual",
"Catholic",
"Causal",
"Cautious",
"Central",
"Certain",
"Changing",
"Characteristic",
"Charming",
"Cheap",
"Cheerful",
"Chemical",
"Chief",
"Chilly",
"Chinese",
"Chosen",
"Christian",
"Chronic",
"Chubby",
"Circular",
"Civic",
"Civil",
"Civilian",
"Classic",
"Classical",
"Clean",
"Clear",
"Clever",
"Clinical",
"Close",
"Closed",
"Cloudy",
"Clumsy",
"Coastal",
"Cognitive",
"Coherent",
"Cold",
"Collective",
"Colonial",
"Colorful",
"Colossal",
"Coloured",
"Colourful",
"Combative",
"Combined",
"Comfortable",
"Coming",
"Commercial",
"Common",
"Communist",
"Compact",
"Comparable",
"Comparative",
"Compatible",
"Competent",
"Competitive",
"Complete",
"Complex",
"Complicated",
"Comprehensive",
"Compulsory",
"Conceptual",
"Concerned",
"Concrete",
"Condemned",
"Confident",
"Confidential",
"Confused",
"Conscious",
"Conservation",
"Conservative",
"Considerable",
"Consistent",
"Constant",
"Constitutional",
"Contemporary",
"Content",
"Continental",
"Continued",
"Continuing",
"Continuous",
"Controlled",
"Controversial",
"Convenient",
"Conventional",
"Convinced",
"Convincing",
"Cooing",
"Cool",
"Cooperative",
"Corporate",
"Correct",
"Corresponding",
"Costly",
"Courageous",
"Crazy",
"Creative",
"Creepy",
"Criminal",
"Critical",
"Crooked",
"Crowded",
"Crucial",
"Crude",
"Cruel",
"Cuddly",
"Cultural",
"Curious",
"Curly",
"Current",
"Curved",
"Cute",
"Daily",
"Damaged",
"Damp",
"Dangerous",
"Dark",
"Dead",
"Deaf",
"Deafening",
"Dear",
"Decent",
"Decisive",
"Deep",
"Defeated",
"Defensive",
"Defiant",
"Definite",
"Deliberate",
"Delicate",
"Delicious",
"Delighted",
"Delightful",
"Democratic",
"Dependent",
"Depressed",
"Desirable",
"Desperate",
"Detailed",
"Determined",
"Developed",
"Developing",
"Devoted",
"Different",
"Difficult",
"Digital",
"Diplomatic",
"Direct",
"Dirty",
"Disabled",
"Disappointed",
"Disastrous",
"Disciplinary",
"Disgusted",
"Distant",
"Distinct",
"Distinctive",
"Distinguished",
"Disturbed",
"Disturbing",
"Diverse",
"Divine",
"Dizzy",
"Domestic",
"Dominant",
"Double",
"Doubtful",
"Drab",
"Dramatic",
"Dreadful",
"Driving",
"Drunk",
"Dry",
"Dual",
"Due",
"Dull",
"Dusty",
"Dutch",
"Dying",
"Dynamic",
"Eager",
"Early",
"Eastern",
"Easy",
"Economic",
"Educational",
"Eerie",
"Effective",
"Efficient",
"Elaborate",
"Elated",
"Elderly",
"Eldest",
"Electoral",
"Electric",
"Electrical",
"Electronic",
"Elegant",
"Eligible",
"Embarrassed",
"Embarrassing",
"Emotional",
"Empirical",
"Empty",
"Enchanting",
"Encouraging",
"Endless",
"Energetic",
"English",
"Enormous",
"Enthusiastic",
"Entire",
"Entitled",
"Envious",
"Environmental",
"Equal",
"Equivalent",
"Essential",
"Established",
"Estimated",
"Ethical",
"Ethnic",
"European",
"Eventual",
"Everyday",
"Evident",
"Evil",
"Evolutionary",
"Exact",
"Excellent",
"Exceptional",
"Excess",
"Excessive",
"Excited",
"Exciting",
"Exclusive",
"Existing",
"Exotic",
"Expected",
"Expensive",
"Experienced",
"Experimental",
"Explicit",
"Extended",
"Extensive",
"External",
"Extra",
"Extraordinary",
"Extreme",
"Exuberant",
"Faint",
"Fair",
"Faithful",
"Familiar",
"Famous",
"Fancy",
"Fantastic",
"Far",
"Fascinating",
"Fashionable",
"Fast",
"Fat",
"Fatal",
"Favourable",
"Favourite",
"Federal",
"Fellow",
"Female",
"Feminist",
"Few",
"Fierce",
"Filthy",
"Final",
"Financial",
"Fine",
"Firm",
"Fiscal",
"Fit",
"Fixed",
"Flaky",
"Flat",
"Flexible",
"Fluffy",
"Fluttering",
"Flying",
"Following",
"Fond",
"Foolish",
"Foreign",
"Formal",
"Formidable",
"Forthcoming",
"Fortunate",
"Forward",
"Fragile",
"Frail",
"Frantic",
"Free",
"French",
"Frequent",
"Fresh",
"Friendly",
"Frightened",
"Front",
"Frozen",
"Full",
"Full-time",
"Fun",
"Functional",
"Fundamental",
"Funny",
"Furious",
"Future",
"Fuzzy",
"Gastric",
"General",
"Generous",
"Genetic",
"Gentle",
"Genuine",
"Geographical",
"German",
"Giant",
"Gigantic",
"Given",
"Glad",
"Glamorous",
"Gleaming",
"Global",
"Glorious",
"Golden",
"Good",
"Gorgeous",
"Gothic",
"Governing",
"Graceful",
"Gradual",
"Grand",
"Grateful",
"Greasy",
"Great",
"Greek",
"Green",
"Grey",
"Grieving",
"Grim",
"Gross",
"Grotesque",
"Growing",
"Grubby",
"Grumpy",
"Guilty",
"Handicapped",
"Handsome",
"Happy",
"Hard",
"Harsh",
"Head",
"Healthy",
"Heavy",
"Helpful",
"Helpless",
"Hidden",
"High",
"High-pitched",
"Hilarious",
"Hissing",
"Historic",
"Historical",
"Hollow",
"Holy",
"Homeless",
"Homely",
"Hon",
"Honest",
"Horizontal",
"Horrible",
"Hostile",
"Hot",
"Huge",
"Human",
"Hungry",
"Hurt",
"Hushed",
"Husky",
"Icy",
"Ideal",
"Identical",
"Ideological",
"Ill",
"Illegal",
"Imaginative",
"Immediate",
"Immense",
"Imperial",
"Implicit",
"Important",
"Impossible",
"Impressed",
"Impressive",
"Improved",
"Inadequate",
"Inappropriate",
"Inc",
"Inclined",
"Increased",
"Increasing",
"Incredible",
"Independent",
"Indian",
"Indirect",
"Individual",
"Industrial",
"Inevitable",
"Influential",
"Informal",
"Inherent",
"Initial",
"Injured",
"Inland",
"Inner",
"Innocent",
"Innovative",
"Inquisitive",
"Instant",
"Institutional",
"Insufficient",
"Intact",
"Integral",
"Integrated",
"Intellectual",
"Intelligent",
"Intense",
"Intensive",
"Interested",
"Interesting",
"Interim",
"Interior",
"Intermediate",
"Internal",
"International",
"Intimate",
"Invisible",
"Involved",
"Iraqi",
"Irish",
"Irrelevant",
"Islamic",
"Isolated",
"Israeli",
"Italian",
"Itchy",
"Japanese",
"Jealous",
"Jewish",
"Jittery",
"Joint",
"Jolly",
"Joyous",
"Judicial",
"Juicy",
"Junior",
"Just",
"Keen",
"Key",
"Kind",
"Known",
"Korean",
"Labour",
"Large",
"Large-scale",
"Late",
"Latin",
"Lazy",
"Leading",
"Left",
"Legal",
"Legislative",
"Legitimate",
"Lengthy",
"Lesser",
"Level",
"Lexical",
"Liable",
"Liberal",
"Light",
"Like",
"Likely",
"Limited",
"Linear",
"Linguistic",
"Liquid",
"Literary",
"Little",
"Live",
"Lively",
"Living",
"Local",
"Logical",
"Lonely",
"Long",
"Long-term",
"Loose",
"Lost",
"Loud",
"Lovely",
"Low",
"Loyal",
"Ltd",
"Lucky",
"Mad",
"Magenta",
"Magic",
"Magnetic",
"Magnificent",
"Magnificent�",
"Main",
"Major",
"Male",
"Mammoth",
"Managerial",
"Managing",
"Manual",
"Many",
"Marginal",
"Marine",
"Marked",
"Married",
"Marvellous",
"Marxist",
"Mass",
"Massive",
"Mathematical",
"Mature",
"Maximum",
"Mean",
"Meaningful",
"Mechanical",
"Medical",
"Medieval",
"Melodic",
"Melted",
"Mental",
"Mere",
"Metropolitan",
"Mid",
"Middle",
"Middle-class",
"Mighty",
"Mild",
"Military",
"Miniature",
"Minimal",
"Minimum",
"Ministerial",
"Minor",
"Miserable",
"Misleading",
"Missing",
"Misty",
"Mixed",
"Moaning",
"Mobile",
"Moderate",
"Modern",
"Modest",
"Molecular",
"Monetary",
"Monthly",
"Moral",
"Motionless",
"Muddy",
"Multiple",
"Mushy",
"Musical",
"Mute",
"Mutual",
"Mysterious",
"Naked",
"Narrow",
"Nasty",
"National",
"Native",
"Natural",
"Naughty",
"Naval",
"Near",
"Nearby",
"Neat",
"Necessary",
"Negative",
"Neighbouring",
"Nervous",
"Net",
"Neutral",
"New",
"Nice",
"Nineteenth-century",
"Noble",
"Noisy",
"Normal",
"Northern",
"Nosy",
"Notable",
"Novel",
"Nuclear",
"Numerous",
"Nursing",
"Nutritious",
"Nutty",
"Obedient",
"Objective",
"Obliged",
"Obnoxious",
"Obvious",
"Occasional",
"Occupational",
"Odd",
"Official",
"Ok",
"Okay",
"Old",
"Old-fashioned",
"Olympic",
"Only",
"Open",
"Operational",
"Opposite",
"Optimistic",
"Oral",
"Orange",
"Ordinary",
"Organic",
"Organisational",
"Original",
"Orthodox",
"Other",
"Outdoor",
"Outer",
"Outrageous",
"Outside",
"Outstanding",
"Overall",
"Overseas",
"Overwhelming",
"Painful",
"Pale",
"Palestinian",
"Panicky",
"Parallel",
"Parental",
"Parliamentary",
"Partial",
"Particular",
"Part-time",
"Passing",
"Passive",
"Past",
"Patient",
"Payable",
"Peaceful",
"Peculiar",
"Perfect",
"Permanent",
"Persistent",
"Personal",
"Petite",
"Philosophical",
"Physical",
"Pink",
"Plain",
"Planned",
"Plastic",
"Pleasant",
"Pleased",
"Poised",
"Polish",
"Polite",
"Political",
"Poor",
"Popular",
"Positive",
"Possible",
"Post-war",
"Potential",
"Powerful",
"Practical",
"Precious",
"Precise",
"Preferred",
"Pregnant",
"Preliminary",
"Premier",
"Prepared",
"Present",
"Presidential",
"Pretty",
"Previous",
"Prickly",
"Primary",
"Prime",
"Primitive",
"Principal",
"Printed",
"Prior",
"Private",
"Probable",
"Productive",
"Professional",
"Profitable",
"Profound",
"Progressive",
"Prominent",
"Promising",
"Proper",
"Proposed",
"Prospective",
"Protective",
"Protestant",
"Proud",
"Provincial",
"Psychiatric",
"Psychological",
"Public",
"Puny",
"Pure",
"Purple",
"Purring",
"Puzzled",
"Quaint",
"Qualified",
"Quick",
"Quickest",
"Quiet",
"Racial",
"Radical",
"Rainy",
"Random",
"Rapid",
"Rare",
"Raspy",
"Rational",
"Ratty",
"Raw",
"Ready",
"Real",
"Realistic",
"Rear",
"Reasonable",
"Recent",
"Red",
"Reduced",
"Redundant",
"Regional",
"Registered",
"Regular",
"Regulatory",
"Related",
"Relative",
"Relaxed",
"Relevant",
"Reliable",
"Relieved",
"Religious",
"Reluctant",
"Remaining",
"Remarkable",
"Remote",
"Renewed",
"Representative",
"Repulsive",
"Required",
"Resident",
"Residential",
"Resonant",
"Respectable",
"Respective",
"Responsible",
"Resulting",
"Retail",
"Retired",
"Revolutionary",
"Rich",
"Ridiculous",
"Right",
"Rigid",
"Ripe",
"Rising",
"Rival",
"Roasted",
"Robust",
"Rolling",
"Roman",
"Romantic",
"Rotten",
"Rough",
"Round",
"Royal",
"Rubber",
"Rude",
"Ruling",
"Running",
"Rural",
"Russian",
"Sacred",
"Sad",
"Safe",
"Salty",
"Satisfactory",
"Satisfied",
"Scared",
"Scary",
"Scattered",
"Scientific",
"Scornful",
"Scottish",
"Scrawny",
"Screeching",
"Secondary",
"Secret",
"Secure",
"Select",
"Selected",
"Selective",
"Selfish",
"Semantic",
"Senior",
"Sensible",
"Sensitive",
"Separate",
"Serious",
"Severe",
"Shaggy",
"Shaky",
"Shallow",
"Shared",
"Sharp",
"Sheer",
"Shiny",
"Shivering",
"Shocked",
"Short",
"Short-term",
"Shrill",
"Shy",
"Sick",
"Significant",
"Silent",
"Silky",
"Silly",
"Similar",
"Simple",
"Single",
"Skilled",
"Skinny",
"Sleepy",
"Slight",
"Slim",
"Slimy",
"Slippery",
"Slow",
"Small",
"Smart",
"Smiling",
"Smoggy",
"Smooth",
"So-called",
"Social",
"Socialist",
"Soft",
"Solar",
"Sole",
"Solid",
"Sophisticated",
"Sore",
"Sorry",
"Sound",
"Sour",
"Southern",
"Soviet",
"Spanish",
"Spare",
"Sparkling",
"Spatial",
"Special",
"Specific",
"Specified",
"Spectacular",
"Spicy",
"Spiritual",
"Splendid",
"Spontaneous",
"Sporting",
"Spotless",
"Spotty",
"Square",
"Squealing",
"Stable",
"Stale",
"Standard",
"Static",
"Statistical",
"Statutory",
"Steady",
"Steep",
"Sticky",
"Stiff",
"Still",
"Stingy",
"Stormy",
"Straight",
"Straightforward",
"Strange",
"Strategic",
"Strict",
"Striking",
"Striped",
"Strong",
"Structural",
"Stuck",
"Stupid",
"Subjective",
"Subsequent",
"Substantial",
"Subtle",
"Successful",
"Successive",
"Sudden",
"Sufficient",
"Suitable",
"Sunny",
"Super",
"Superb",
"Superior",
"Supporting",
"Supposed",
"Supreme",
"Sure",
"Surprised",
"Surprising",
"Surrounding",
"Surviving",
"Suspicious",
"Sweet",
"Swift",
"Swiss",
"Symbolic",
"Sympathetic",
"Systematic",
"Tall",
"Tame",
"Tan",
"Tart",
"Tasteless",
"Tasty",
"Technical",
"Technological",
"Teenage",
"Temporary",
"Tender",
"Tense",
"Terrible",
"Territorial",
"Testy",
"Then",
"Theoretical",
"Thick",
"Thin",
"Thirsty",
"Thorough",
"Thoughtful",
"Thoughtless",
"Thundering",
"Tight",
"Tiny",
"Tired",
"Top",
"Tory",
"Total",
"Tough",
"Toxic",
"Traditional",
"Tragic",
"Tremendous",
"Tricky",
"Tropical",
"Troubled",
"Turkish",
"Typical",
"Ugliest",
"Ugly",
"Ultimate",
"Unable",
"Unacceptable",
"Unaware",
"Uncertain",
"Unchanged",
"Uncomfortable",
"Unconscious",
"Underground",
"Underlying",
"Unemployed",
"Uneven",
"Unexpected",
"Unfair",
"Unfortunate",
"Unhappy",
"Uniform",
"Uninterested",
"Unique",
"United",
"Universal",
"Unknown",
"Unlikely",
"Unnecessary",
"Unpleasant",
"Unsightly",
"Unusual",
"Unwilling",
"Upper",
"Upset",
"Uptight",
"Urban",
"Urgent",
"Used",
"Useful",
"Useless",
"Usual",
"Vague",
"Valid",
"Valuable",
"Variable",
"Varied",
"Various",
"Varying",
"Vast",
"Verbal",
"Vertical",
"Very",
"Victorian",
"Victorious",
"Video-taped",
"Violent",
"Visible",
"Visiting",
"Visual",
"Vital",
"Vivacious",
"Vivid",
"Vocational",
"Voiceless",
"Voluntary",
"Vulnerable",
"Wandering",
"Warm",
"Wasteful",
"Watery",
"Weak",
"Wealthy",
"Weary",
"Wee",
"Weekly",
"Weird",
"Welcome",
"Well",
"Well-known",
"Welsh",
"Western",
"Wet",
"Whispering ",
"White",
"Whole",
"Wicked",
"Wide",
"Wide-eyed",
"Widespread",
"Wild",
"Willing",
"Wise",
"Witty",
"Wonderful",
"Wooden",
"Working",
"Working-class",
"Worldwide",
"Worried",
"Worrying",
"Worthwhile",
"Worthy",
"Written",
"Wrong",
"Yellow",
"Young",
"Yummy",
"Zany",
"Zealous",
}
) | adjectives.go | 0.55435 | 0.55911 | adjectives.go | starcoder |
package proto
const (
BackendSwagger = `{
"swagger": "2.0",
"info": {
"title": "backend.proto",
"version": "version not set"
},
"schemes": [
"http",
"https"
],
"consumes": [
"application/json"
],
"produces": [
"application/json"
],
"paths": {
"/v1/schema/{id}": {
"get": {
"summary": "Get Schema",
"operationId": "GetSchema",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/protoSchema"
}
}
},
"parameters": [
{
"name": "id",
"in": "path",
"required": true,
"type": "string"
}
],
"tags": [
"Backend"
]
}
},
"/v1/schema/{projectID}": {
"post": {
"summary": "Create Schema",
"operationId": "CreateSchema",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/protoSchema"
}
}
},
"parameters": [
{
"name": "projectID",
"in": "path",
"required": true,
"type": "string"
},
{
"name": "body",
"in": "body",
"required": true,
"schema": {
"$ref": "#/definitions/protoSchema"
}
}
],
"tags": [
"Backend"
]
}
},
"/v1/version": {
"get": {
"summary": "Version",
"operationId": "Version",
"responses": {
"200": {
"description": "A successful response.",
"schema": {
"$ref": "#/definitions/protoVersionResponse"
}
}
},
"tags": [
"Backend"
]
}
}
},
"definitions": {
"protoSchema": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"properties": {
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/protobufStruct"
}
},
"required": {
"type": "array",
"items": {
"type": "string"
}
},
"name": {
"type": "string"
},
"projectID": {
"type": "string"
},
"version": {
"type": "string"
}
}
},
"protoVersionResponse": {
"type": "object",
"properties": {
"value": {
"type": "string"
}
}
},
"protobufListValue": {
"type": "object",
"properties": {
"values": {
"type": "array",
"items": {
"$ref": "#/definitions/protobufValue"
},
"description": "Repeated field of dynamically typed values."
}
},
"description": ""ListValue" is a wrapper around a repeated field of values.\n\nThe JSON representation for "ListValue" is JSON array."
},
"protobufNullValue": {
"type": "string",
"enum": [
"NULL_VALUE"
],
"default": "NULL_VALUE",
"description": ""NullValue" is a singleton enumeration to represent the null value for the\n"Value" type union.\n\n The JSON representation for "NullValue" is JSON "null".\n\n - NULL_VALUE: Null value."
},
"protobufStruct": {
"type": "object",
"properties": {
"fields": {
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/protobufValue"
},
"description": "Unordered map of dynamically typed values."
}
},
"description": ""Struct" represents a structured data value, consisting of fields\nwhich map to dynamically typed values. In some languages, "Struct"\nmight be supported by a native representation. For example, in\nscripting languages like JS a struct is represented as an\nobject. The details of that representation are described together\nwith the proto support for the language.\n\nThe JSON representation for "Struct" is JSON object."
},
"protobufValue": {
"type": "object",
"properties": {
"null_value": {
"$ref": "#/definitions/protobufNullValue",
"description": "Represents a null value."
},
"number_value": {
"type": "number",
"format": "double",
"description": "Represents a double value."
},
"string_value": {
"type": "string",
"description": "Represents a string value."
},
"bool_value": {
"type": "boolean",
"format": "boolean",
"description": "Represents a boolean value."
},
"struct_value": {
"$ref": "#/definitions/protobufStruct",
"description": "Represents a structured value."
},
"list_value": {
"$ref": "#/definitions/protobufListValue",
"description": "Represents a repeated "Value"."
}
},
"description": ""Value" represents a dynamically typed value which can be either\nnull, a number, a string, a boolean, a recursive struct value, or a\nlist of values. A producer of value is expected to set one of that\nvariants, absence of any variant indicates an error.\n\nThe JSON representation for "Value" is JSON value."
}
}
}
`
ChemaSwagger = `{
"swagger": "2.0",
"info": {
"title": "schema.proto",
"version": "version not set"
},
"schemes": [
"http",
"https"
],
"consumes": [
"application/json"
],
"produces": [
"application/json"
],
"paths": {},
"definitions": {}
}
`
) | proto/swagger.pb.go | 0.843638 | 0.402157 | swagger.pb.go | starcoder |
package internal
import (
"fmt"
"strconv"
)
// SeniorAge is the minimum age from which a Passenger is considered a senior to the BusCompany.
const SeniorAge = 65
// Passenger represents a bus passenger, uniquely identified by their SSN.
type Passenger struct {
SSN string
SeatNumber uint8
Destination *BusStop
HasValidTicket bool
}
// Passengers represents a set of Passengers, using their SSN as key.
// Notice how the responsibility of handling passengers has shifted from Bus to Passengers
type Passengers map[string]Passenger
// NewPassengerSet returns an empty set of Passengers, ready to use.
func NewPassengerSet() Passengers {
return make(map[string]Passenger)
}
// Add adds a Passenger to Passengers. The Passenger will be overwritten if exists.
func (p Passengers) Add(newP Passenger) {
p[newP.SSN] = newP
}
// Remove removes a Passenger from Passengers.
func (p Passengers) Remove(delP Passenger) {
delete(p, delP.SSN)
}
// Visit calls visitor once for every Passenger in the set.
func (p Passengers) Visit(visitor func(Passenger)) {
for _, one := range p {
visitor(one)
}
}
// Find returns the Passenger with the given SSN. If none was found, an empty Passenger is returned.
func (p Passengers) Find(ssn string) Passenger {
if one, ok := p[ssn]; ok {
return one
}
return Passenger{}
}
// VisitUpdate calls visitor for each Passenger in the set. Updating their SSN's is not recommended.
func (p Passengers) VisitUpdate(visitor func(p *Passenger)) {
for ssn, pp := range p {
visitor(&pp)
(p)[ssn] = pp
}
}
// Manifest returns the SSN's of all Passengers in the set.
func (p Passengers) Manifest() []string {
ssns := make([]string, 0, len(p))
p.Visit(func(p Passenger) { ssns = append(ssns, p.SSN) })
return ssns
}
// Charge prints a message that the Passenger has been charged "amount" money, and returns a copy with validTicket = true.
func (p Passenger) Charge(amount float64) Passenger {
if p.HasValidTicket {
return p // We already charged this Passenger.
}
fmt.Printf("Passenger with SSN %s: charged %.2f of arbitrary money\n", p.SSN, amount)
p.HasValidTicket = true
return p
}
// IsSenior returns true if the Passenger is a senior, and false otherwise.
// IsSenior detects age by extracting the last two digits from the SSN and treating them like an age.
func (p Passenger) IsSenior() bool {
age, err := strconv.ParseInt(p.SSN[len(p.SSN)-2:], 10, 8)
if err != nil {
panic("invalid SSN: " + p.SSN)
}
return age >= SeniorAge
} | bus-service/internal/passenger.go | 0.702428 | 0.508361 | passenger.go | starcoder |
package collision
import (
"github.com/teomat/mater/aabb"
"github.com/teomat/mater/transform"
"github.com/teomat/mater/vect"
"log"
"math"
)
type PolygonAxis struct {
// The axis normal.
N vect.Vect
D float64
}
type PolygonShape struct {
// The raw vertices of the polygon. Do not touch!
// Use polygon.SetVerts() to change this.
Verts Vertices
// The transformed vertices. Do not touch!
TVerts Vertices
// The axes of the polygon. Do not touch!
Axes []PolygonAxis
// The transformed axes of the polygon Do not touch!
TAxes []PolygonAxis
// The number of vertices. Do not touch!
NumVerts int
}
// Creates a new PolygonShape with the given vertices offset by offset.
// Returns nil if the given vertices are not valid.
func NewPolygon(verts Vertices, offset vect.Vect) *Shape {
if verts == nil {
log.Printf("Error: no vertices passed!")
return nil
}
shape := new(Shape)
poly := &PolygonShape{}
poly.SetVerts(verts, offset)
shape.ShapeClass = poly
return shape
}
// Sets the vertices offset by the offset and calculates the PolygonAxes.
func (poly *PolygonShape) SetVerts(verts Vertices, offset vect.Vect) {
if verts == nil {
log.Printf("Error: no vertices passed!")
return
}
if verts.ValidatePolygon() == false {
log.Printf("Warning: vertices not valid")
}
numVerts := len(verts)
oldnumVerts := len(poly.Verts)
poly.NumVerts = numVerts
if oldnumVerts < numVerts {
//create new slices
poly.Verts = make(Vertices, numVerts)
poly.TVerts = make(Vertices, numVerts)
poly.Axes = make([]PolygonAxis, numVerts)
poly.TAxes = make([]PolygonAxis, numVerts)
} else {
//reuse old slices
poly.Verts = poly.Verts[:numVerts]
poly.TVerts = poly.TVerts[:numVerts]
poly.Axes = poly.Axes[:numVerts]
poly.TAxes = poly.TAxes[:numVerts]
}
for i := 0; i < numVerts; i++ {
a := vect.Add(offset, verts[i])
b := vect.Add(offset, verts[(i+1)%numVerts])
n := vect.Normalize(vect.Perp(vect.Sub(b, a)))
poly.Verts[i] = a
poly.Axes[i].N = n
poly.Axes[i].D = vect.Dot(n, a)
}
}
// Returns ShapeType_Polygon. Needed to implemet the ShapeClass interface.
func (poly *PolygonShape) ShapeType() ShapeType {
return ShapeType_Polygon
}
// Calculates the transformed vertices and axes and the bounding box.
func (poly *PolygonShape) update(xf transform.Transform) aabb.AABB {
//transform axes
{
src := poly.Axes
dst := poly.TAxes
for i := 0; i < poly.NumVerts; i++ {
n := xf.RotateVect(src[i].N)
dst[i].N = n
dst[i].D = vect.Dot(xf.Position, n) + src[i].D
}
}
//transform verts
{
inf := math.Inf(1)
aabb := aabb.AABB{
Lower: vect.Vect{inf, inf},
Upper: vect.Vect{-inf, -inf},
}
src := poly.Verts
dst := poly.TVerts
for i := 0; i < poly.NumVerts; i++ {
v := xf.TransformVect(src[i])
dst[i] = v
aabb.Lower.X = math.Min(aabb.Lower.X, v.X)
aabb.Upper.X = math.Max(aabb.Upper.X, v.X)
aabb.Lower.Y = math.Min(aabb.Lower.Y, v.Y)
aabb.Upper.Y = math.Max(aabb.Upper.Y, v.Y)
}
return aabb
}
}
// Returns true if the given point is located inside the box.
func (poly *PolygonShape) TestPoint(point vect.Vect) bool {
return poly.ContainsVert(point)
}
func (poly *PolygonShape) ContainsVert(v vect.Vect) bool {
for _, axis := range poly.TAxes {
dist := vect.Dot(axis.N, v) - axis.D
if dist > 0.0 {
return false
}
}
return true
}
func (poly *PolygonShape) ContainsVertPartial(v, n vect.Vect) bool {
for _, axis := range poly.TAxes {
if vect.Dot(axis.N, n) < 0.0 {
continue
}
dist := vect.Dot(axis.N, v) - axis.D
if dist > 0.0 {
return false
}
}
return true
}
func (poly *PolygonShape) ValueOnAxis(n vect.Vect, d float64) float64 {
verts := poly.TVerts
min := vect.Dot(n, verts[0])
for i := 1; i < poly.NumVerts; i++ {
min = math.Min(min, vect.Dot(n, verts[i]))
}
return min - d
} | collision/polygonShape.go | 0.857231 | 0.599749 | polygonShape.go | starcoder |
// Package counterpairpair is an example using go-frp and two counterpairs.
package counterpairpair
import (
"math/rand"
"github.com/gmlewis/go-frp/v2/examples/inception/counterpair"
h "github.com/gmlewis/go-frp/v2/html"
)
const max = 100
// MODEL
type Model struct {
first counterpair.Model
last counterpair.Model
}
func Init(firstTop, firstBottom, lastTop, lastBottom int) Model {
return Model{
first: counterpair.Init(firstTop, firstBottom),
last: counterpair.Init(lastTop, lastBottom),
}
}
// UPDATE
type Action func(Model) Model
func Updater(model Model) func(action Action) Model {
return func(action Action) Model { return model.Update(action) }
}
func (m Model) Update(action Action) Model { return action(m) }
func ResetAll(model Model) Model { return Init(0, 0, 0, 0) }
func RandomizeAll(model Model) Model {
return Init(rand.Intn(max), rand.Intn(max), rand.Intn(max), rand.Intn(max))
}
type CounterPairAction func(counterpair.Action) Model
func First(model Model) CounterPairAction {
return func(action counterpair.Action) Model {
return Model{
first: model.first.Update(action),
last: model.last,
}
}
}
func Last(model Model) CounterPairAction {
return func(action counterpair.Action) Model {
return Model{
first: model.first,
last: model.last.Update(action),
}
}
}
type WrapFunc func(model Model) interface{}
func identity(model Model) interface{} {
return model
}
func wrapper(model Model, wrapFunc WrapFunc) func(action Action) interface{} {
return func(action Action) interface{} {
newModel := model.Update(action)
return wrapFunc(newModel)
}
}
func firstWrapper(model Model, wrapFunc WrapFunc) counterpair.WrapFunc {
return func(cm counterpair.Model) interface{} {
newModel := Model{
first: cm,
last: model.last,
}
return wrapFunc(newModel)
}
}
func lastWrapper(model Model, wrapFunc WrapFunc) counterpair.WrapFunc {
return func(cm counterpair.Model) interface{} {
newModel := Model{
first: model.first,
last: cm,
}
return wrapFunc(newModel)
}
}
// VIEW
func (m Model) View(rootUpdateFunc, wrapFunc interface{}) h.HTML {
var wf WrapFunc
if wrapFunc == nil {
wf = identity
}
return h.Div(
m.first.View(rootUpdateFunc, firstWrapper(m, wf)),
m.last.View(rootUpdateFunc, lastWrapper(m, wf)),
h.Button(h.Text("Reset All")).OnClick(rootUpdateFunc, wrapper(m, wf), ResetAll),
h.Button(h.Text("Randomize All")).OnClick(rootUpdateFunc, wrapper(m, wf), RandomizeAll),
)
} | examples/inception/counterpairpair/counterpairpair.go | 0.702836 | 0.432782 | counterpairpair.go | starcoder |
package utils
//Represents a version vector
type VersionVector struct {
Val map[DCId]int64
}
// Create a new version vector
func NewVersionVector() *VersionVector {
return NewVersionVectorVV( nil)
}
// Create a new version vector that is a copy of the given version vector
func NewVersionVectorVV( otherVv *VersionVector) *VersionVector {
vv := VersionVector{}
vv.Val = make(map[DCId]int64)
if otherVv != nil {
for k, v := range otherVv.Val {
vv.Val[k] = v
}
}
return &vv
}
// Add Timestamp to version vector
func (vv *VersionVector)AddTS( ts *Timestamp) {
v,ok := vv.Val[ts.Dc]
if ok == false || v < ts.Ts {
vv.Val[ts.Dc] = ts.Ts
}
}
// Returns true if the given Timestamp is reflected in the version vector
func (vv *VersionVector) IncludesTS( ts *Timestamp) bool {
v,ok := vv.Val[ts.Dc]
if ok == false {
return false
}
return v >= ts.Ts
}
// Returns true if this version vector is strictly smaller or equal to the otherVv
func (vv *VersionVector)SmallerOrEqual( otherVv *VersionVector) bool {
if otherVv == nil {
return vv.IsEmpty()
}
for k, v := range vv.Val {
otherV, ok := otherVv.Val[k]
if ok {
if v > otherV {
return false
}
} else {
return false
}
}
return true
}
// Merge with other version vector, keeping the largest value for each entry
func (vv *VersionVector)PointwiseMax( otherVv *VersionVector) {
if otherVv == nil {
return
}
for k, v := range otherVv.Val {
oldV, ok := vv.Val[k]
if ok {
if v > oldV {
vv.Val[k] = v
}
} else {
vv.Val[k] = v
}
}
}
// Remove an entry in the current version vector if the otherVv has a larger value for the same entry
func (vv *VersionVector)RemoveIfLargerOrEqual( otherVv *VersionVector) {
if otherVv == nil {
return
}
for k, v := range otherVv.Val {
oldV, ok := vv.Val[k]
if ok && v >= oldV{
delete(vv.Val, k)
}
}
}
// Returns true is this VersionVector has no entry
func (vv *VersionVector)IsEmpty() bool {
return vv.Val == nil || len(vv.Val) == 0
} | src/rockscrdtdb/utils/version_vector.go | 0.73307 | 0.421254 | version_vector.go | starcoder |
package gomultifast
import "sort"
// used as node id number, used to count nodes
var nodeID = 0
// edge indicating the next node
type edge struct {
alpha rune // Edge alpha. An alpha is a text character in the trie
next *node // Target node of the edge
}
// A pattern to be stored in the trie
type pattern struct {
Pstring string // String to add to trie
Ident string // String identifier
}
// A node in the trie structure
type node struct {
id int // Node id
final bool // Is this a "final" node, meaning this node is the endpoint of a search
failureNode *node // The "failure node", i.e. a node where the search can continue in case of a failed search
depth int // Distance between this node and the root
matchedPatterns []pattern // Slice of matched patterns at a node
outgoing []edge // Slice of outgoing edges
}
// Match contains all found matches with some details
type Match struct {
Patterns []pattern // Slice containing matched patterns in the text
position int // The end position of matching patterns in the text
}
// Alphabetical implements sort.Interface for []Edge based on alphabetical position of Edge.alpha
type Alphabetical []edge
func (e Alphabetical) Len() int { return len(e) }
func (e Alphabetical) Swap(i, j int) { e[i], e[j] = e[j], e[i] }
func (e Alphabetical) Less(i, j int) bool { return e[i].alpha < e[j].alpha }
/*
If found, return next node lead to by an edge with a given alpha
*/
func (nd *node) findNext(alpha rune) *node {
for _, edge := range nd.outgoing {
if edge.alpha == alpha {
return edge.next
}
}
return nil
}
// If the alpha does not yet exist, return pointer to a new node with an outgoing edge with the given alpha
func (nd *node) createNext(alpha rune) *node {
next := nd.findNext(alpha)
if next != nil {
// The edge already exists
return nil
}
// Otherwise register new node and edge
next = nodeCreate()
nd.registerOutgoingEdge(next, alpha)
return next
}
// Make a new Edge with an alpha and a pointer to a node
func (nd *node) registerOutgoingEdge(next *node, alpha rune) {
newEdge := edge{alpha: alpha, next: next}
nd.outgoing = append(nd.outgoing, newEdge)
}
// Make a new node, and return a pointer to it
func nodeCreate() *node {
nd := node{id: nodeID}
nodeID++
return &nd
}
// Check if Pattern exists in a given node
func (nd *node) hasPattern(newPattern *pattern) bool {
for _, mp := range nd.matchedPatterns {
if mp.Pstring == newPattern.Pstring {
return true
}
}
return false
}
// If pattern doesn't already exist in the node, add it
func (nd *node) registerPattern(newPattern *pattern) {
// Check if the new pattern already exists in the node
if nd.hasPattern(newPattern) {
return
}
nd.matchedPatterns = append(nd.matchedPatterns, *newPattern)
}
// Sort the outgoing edges of a node alphabetically to enable binary search
func (nd *node) sortEdges() {
sort.Sort(Alphabetical(nd.outgoing))
}
// Perform a binary search for a given alpha among the outgoing edges of a node
func (nd *node) binarySearchNext(alpha rune) *node {
i := sort.Search(len(nd.outgoing), func(i int) bool { return nd.outgoing[i].alpha >= alpha })
if i < len(nd.outgoing) && nd.outgoing[i].alpha == alpha {
// alpha was found
return nd.outgoing[i].next
}
return nil
} | ac_node.go | 0.739799 | 0.518485 | ac_node.go | starcoder |
package square
// An item variation (i.e., product) in the Catalog object model. Each item may have a maximum of 250 item variations.
type CatalogItemVariation struct {
// The ID of the `CatalogItem` associated with this item variation.
ItemId string `json:"item_id,omitempty"`
// The item variation's name. This is a searchable attribute for use in applicable query filters, and its value length is of Unicode code points.
Name string `json:"name,omitempty"`
// The item variation's SKU, if any. This is a searchable attribute for use in applicable query filters.
Sku string `json:"sku,omitempty"`
// The universal product code (UPC) of the item variation, if any. This is a searchable attribute for use in applicable query filters. The value of this attribute should be a number of 12-14 digits long. This restriction is enforced on the Square Seller Dashboard, Square Point of Sale or Retail Point of Sale apps, where this attribute shows in the GTIN field. If a non-compliant UPC value is assigned to this attribute using the API, the value is not editable on the Seller Dashboard, Square Point of Sale or Retail Point of Sale apps unless it is updated to fit the expected format.
Upc string `json:"upc,omitempty"`
// The order in which this item variation should be displayed. This value is read-only. On writes, the ordinal for each item variation within a parent `CatalogItem` is set according to the item variations's position. On reads, the value is not guaranteed to be sequential or unique.
Ordinal int32 `json:"ordinal,omitempty"`
// Indicates whether the item variation's price is fixed or determined at the time of sale. See [CatalogPricingType](#type-catalogpricingtype) for possible values
PricingType string `json:"pricing_type,omitempty"`
PriceMoney *Money `json:"price_money,omitempty"`
// Per-location price and inventory overrides.
LocationOverrides []ItemVariationLocationOverrides `json:"location_overrides,omitempty"`
// If `true`, inventory tracking is active for the variation.
TrackInventory bool `json:"track_inventory,omitempty"`
// Indicates whether the item variation displays an alert when its inventory quantity is less than or equal to its `inventory_alert_threshold`. See [InventoryAlertType](#type-inventoryalerttype) for possible values
InventoryAlertType string `json:"inventory_alert_type,omitempty"`
// If the inventory quantity for the variation is less than or equal to this value and `inventory_alert_type` is `LOW_QUANTITY`, the variation displays an alert in the merchant dashboard. This value is always an integer.
InventoryAlertThreshold int64 `json:"inventory_alert_threshold,omitempty"`
// Arbitrary user metadata to associate with the item variation. This attribute value length is of Unicode code points.
UserData string `json:"user_data,omitempty"`
// If the `CatalogItem` that owns this item variation is of type `APPOINTMENTS_SERVICE`, then this is the duration of the service in milliseconds. For example, a 30 minute appointment would have the value `1800000`, which is equal to 30 (minutes) * 60 (seconds per minute) * 1000 (milliseconds per second).
ServiceDuration int64 `json:"service_duration,omitempty"`
// If the `CatalogItem` that owns this item variation is of type `APPOINTMENTS_SERVICE`, a bool representing whether this service is available for booking.
AvailableForBooking bool `json:"available_for_booking,omitempty"`
// List of item option values associated with this item variation. Listed in the same order as the item options of the parent item.
ItemOptionValues []CatalogItemOptionValueForItemVariation `json:"item_option_values,omitempty"`
// ID of the ‘CatalogMeasurementUnit’ that is used to measure the quantity sold of this item variation. If left unset, the item will be sold in whole quantities.
MeasurementUnitId string `json:"measurement_unit_id,omitempty"`
// Tokens of employees that can perform the service represented by this variation. Only valid for variations of type `APPOINTMENTS_SERVICE`.
TeamMemberIds []string `json:"team_member_ids,omitempty"`
} | square/model_catalog_item_variation.go | 0.850065 | 0.488527 | model_catalog_item_variation.go | starcoder |
package neat
import (
"context"
"fmt"
"github.com/pkg/errors"
"github.com/spf13/cast"
"github.com/yaricom/goNEAT/v2/neat/math"
"gopkg.in/yaml.v3"
"io"
"io/ioutil"
"strconv"
"strings"
)
// GenomeCompatibilityMethod defines the method to calculate genomes compatibility
type GenomeCompatibilityMethod string
const (
GenomeCompatibilityMethodLinear GenomeCompatibilityMethod = "linear"
GenomeCompatibilityMethodFast GenomeCompatibilityMethod = "fast"
)
// Validate is to check if this genome compatibility method supported by algorithm
func (g GenomeCompatibilityMethod) Validate() error {
if g != GenomeCompatibilityMethodLinear && g != GenomeCompatibilityMethodFast {
return errors.Errorf("unsupported genome compatibility method: [%s]", g)
}
return nil
}
// EpochExecutorType is to define the type of epoch evaluator
type EpochExecutorType string
const (
EpochExecutorTypeSequential EpochExecutorType = "sequential"
EpochExecutorTypeParallel EpochExecutorType = "parallel"
)
// Validate is to check is this executor type is supported by algorithm
func (e EpochExecutorType) Validate() error {
if e != EpochExecutorTypeSequential && e != EpochExecutorTypeParallel {
return errors.Errorf("unsupported epoch executor type: [%s]", e)
}
return nil
}
// Options The NEAT algorithm options.
type Options struct {
// Probability of mutating a single trait param
TraitParamMutProb float64 `yaml:"trait_param_mut_prob"`
// Power of mutation on a single trait param
TraitMutationPower float64 `yaml:"trait_mutation_power"`
// The power of a link weight mutation
WeightMutPower float64 `yaml:"weight_mut_power"`
// These 3 global coefficients are used to determine the formula for
// computing the compatibility between 2 genomes. The formula is:
// disjoint_coeff * pdg + excess_coeff * peg + mutdiff_coeff * mdmg.
// See the compatibility method in the Genome class for more info
// They can be thought of as the importance of disjoint Genes,
// excess Genes, and parametric difference between Genes of the
// same function, respectively.
DisjointCoeff float64 `yaml:"disjoint_coeff"`
ExcessCoeff float64 `yaml:"excess_coeff"`
MutdiffCoeff float64 `yaml:"mutdiff_coeff"`
// This global tells compatibility threshold under which
// two Genomes are considered the same species
CompatThreshold float64 `yaml:"compat_threshold"`
/* Globals involved in the epoch cycle - mating, reproduction, etc.. */
// How much does age matter? Gives a fitness boost up to some young age (niching).
// If it is 1, then young species get no fitness boost.
AgeSignificance float64 `yaml:"age_significance"`
// Percent of average fitness for survival, how many get to reproduce based on survival_thresh * pop_size
SurvivalThresh float64 `yaml:"survival_thresh"`
// Probabilities of a non-mating reproduction
MutateOnlyProb float64 `yaml:"mutate_only_prob"`
MutateRandomTraitProb float64 `yaml:"mutate_random_trait_prob"`
MutateLinkTraitProb float64 `yaml:"mutate_link_trait_prob"`
MutateNodeTraitProb float64 `yaml:"mutate_node_trait_prob"`
MutateLinkWeightsProb float64 `yaml:"mutate_link_weights_prob"`
MutateToggleEnableProb float64 `yaml:"mutate_toggle_enable_prob"`
MutateGeneReenableProb float64 `yaml:"mutate_gene_reenable_prob"`
MutateAddNodeProb float64 `yaml:"mutate_add_node_prob"`
MutateAddLinkProb float64 `yaml:"mutate_add_link_prob"`
// probability of mutation involving disconnected inputs connection
MutateConnectSensors float64 `yaml:"mutate_connect_sensors"`
// Probabilities of a mate being outside species
InterspeciesMateRate float64 `yaml:"interspecies_mate_rate"`
MateMultipointProb float64 `yaml:"mate_multipoint_prob"`
MateMultipointAvgProb float64 `yaml:"mate_multipoint_avg_prob"`
MateSinglepointProb float64 `yaml:"mate_singlepoint_prob"`
// Prob. of mating without mutation
MateOnlyProb float64 `yaml:"mate_only_prob"`
// Probability of forcing selection of ONLY links that are naturally recurrent
RecurOnlyProb float64 `yaml:"recur_only_prob"`
// Size of population
PopSize int `yaml:"pop_size"`
// Age when Species starts to be penalized
DropOffAge int `yaml:"dropoff_age"`
// Number of tries mutate_add_link will attempt to find an open link
NewLinkTries int `yaml:"newlink_tries"`
// Tells to print population to file every n generations
PrintEvery int `yaml:"print_every"`
// The number of babies to stolen off to the champions
BabiesStolen int `yaml:"babies_stolen"`
// The number of runs to average over in an experiment
NumRuns int `yaml:"num_runs"`
// The number of epochs (generations) to execute training
NumGenerations int `yaml:"num_generations"`
// The epoch's executor type to apply (sequential, parallel)
EpochExecutorType EpochExecutorType `yaml:"epoch_executor"`
// The genome compatibility testing method to use (linear, fast (make sense for large genomes))
GenCompatMethod GenomeCompatibilityMethod `yaml:"genome_compat_method"`
// The neuron nodes activation functions list to choose from
NodeActivators []math.NodeActivationType `yaml:"-"`
// The probabilities of selection of the specific node activator function
NodeActivatorsProb []float64 `yaml:"-"`
// NodeActivatorsWithProbs the list of supported node activation with probability of each one
NodeActivatorsWithProbs []string `yaml:"node_activators"`
// LogLevel the log output details level
LogLevel string `yaml:"log_level"`
}
// RandomNodeActivationType Returns next random node activation type among registered with this context
func (c *Options) RandomNodeActivationType() (math.NodeActivationType, error) {
// quick check for the most cases
if len(c.NodeActivators) == 1 {
return c.NodeActivators[0], nil
}
// find next random
index := math.SingleRouletteThrow(c.NodeActivatorsProb)
if index < 0 || index >= len(c.NodeActivators) {
return 0, fmt.Errorf("unexpected error when trying to find random node activator, activator index: %d", index)
}
return c.NodeActivators[index], nil
}
// set default values for activator type and its probability of selection
func (c *Options) initNodeActivators() (err error) {
if len(c.NodeActivatorsWithProbs) == 0 {
c.NodeActivators = []math.NodeActivationType{math.SigmoidSteepenedActivation}
c.NodeActivatorsProb = []float64{1.0}
return nil
}
// create activators
actFns := c.NodeActivatorsWithProbs
c.NodeActivators = make([]math.NodeActivationType, len(actFns))
c.NodeActivatorsProb = make([]float64, len(actFns))
for i, line := range actFns {
fields := strings.Fields(line)
if c.NodeActivators[i], err = math.NodeActivators.ActivationTypeFromName(fields[0]); err != nil {
return err
}
if prob, err := strconv.ParseFloat(fields[1], 64); err != nil {
return err
} else {
c.NodeActivatorsProb[i] = prob
}
}
return nil
}
// Validate is to validate that this options has valid values
func (c *Options) Validate() error {
if err := c.EpochExecutorType.Validate(); err != nil {
return err
}
if err := c.GenCompatMethod.Validate(); err != nil {
return err
}
return nil
}
// NeatContext is to get Context which carries NEAT options inside to be propagated
func (c *Options) NeatContext() context.Context {
return NewContext(context.Background(), c)
}
// LoadYAMLOptions is to load NEAT options encoded as YAML file
func LoadYAMLOptions(r io.Reader) (*Options, error) {
content, err := ioutil.ReadAll(r)
if err != nil {
return nil, err
}
// read options
var opts Options
if err = yaml.Unmarshal(content, &opts); err != nil {
return nil, errors.Wrap(err, "failed to decode NEAT options from YAML")
}
// initialize logger
if err = InitLogger(opts.LogLevel); err != nil {
return nil, errors.Wrap(err, "failed to initialize logger")
}
// read node activators
if err = opts.initNodeActivators(); err != nil {
return nil, errors.Wrap(err, "failed to read node activators")
}
if err = opts.Validate(); err != nil {
return nil, errors.Wrap(err, "invalid NEAT options")
}
return &opts, nil
}
// LoadNeatOptions Loads NEAT options configuration from provided reader encode in plain text format (.neat)
func LoadNeatOptions(r io.Reader) (*Options, error) {
c := &Options{}
// read configuration
var name string
var param string
for {
_, err := fmt.Fscanf(r, "%s %v\n", &name, ¶m)
if err == io.EOF {
break
} else if err != nil {
return nil, err
}
switch name {
case "trait_param_mut_prob":
c.TraitParamMutProb = cast.ToFloat64(param)
case "trait_mutation_power":
c.TraitMutationPower = cast.ToFloat64(param)
case "weight_mut_power":
c.WeightMutPower = cast.ToFloat64(param)
case "disjoint_coeff":
c.DisjointCoeff = cast.ToFloat64(param)
case "excess_coeff":
c.ExcessCoeff = cast.ToFloat64(param)
case "mutdiff_coeff":
c.MutdiffCoeff = cast.ToFloat64(param)
case "compat_threshold":
c.CompatThreshold = cast.ToFloat64(param)
case "age_significance":
c.AgeSignificance = cast.ToFloat64(param)
case "survival_thresh":
c.SurvivalThresh = cast.ToFloat64(param)
case "mutate_only_prob":
c.MutateOnlyProb = cast.ToFloat64(param)
case "mutate_random_trait_prob":
c.MutateRandomTraitProb = cast.ToFloat64(param)
case "mutate_link_trait_prob":
c.MutateLinkTraitProb = cast.ToFloat64(param)
case "mutate_node_trait_prob":
c.MutateNodeTraitProb = cast.ToFloat64(param)
case "mutate_link_weights_prob":
c.MutateLinkWeightsProb = cast.ToFloat64(param)
case "mutate_toggle_enable_prob":
c.MutateToggleEnableProb = cast.ToFloat64(param)
case "mutate_gene_reenable_prob":
c.MutateGeneReenableProb = cast.ToFloat64(param)
case "mutate_add_node_prob":
c.MutateAddNodeProb = cast.ToFloat64(param)
case "mutate_add_link_prob":
c.MutateAddLinkProb = cast.ToFloat64(param)
case "mutate_connect_sensors":
c.MutateConnectSensors = cast.ToFloat64(param)
case "interspecies_mate_rate":
c.InterspeciesMateRate = cast.ToFloat64(param)
case "mate_multipoint_prob":
c.MateMultipointProb = cast.ToFloat64(param)
case "mate_multipoint_avg_prob":
c.MateMultipointAvgProb = cast.ToFloat64(param)
case "mate_singlepoint_prob":
c.MateSinglepointProb = cast.ToFloat64(param)
case "mate_only_prob":
c.MateOnlyProb = cast.ToFloat64(param)
case "recur_only_prob":
c.RecurOnlyProb = cast.ToFloat64(param)
case "pop_size":
c.PopSize = cast.ToInt(param)
case "dropoff_age":
c.DropOffAge = cast.ToInt(param)
case "newlink_tries":
c.NewLinkTries = cast.ToInt(param)
case "print_every":
c.PrintEvery = cast.ToInt(param)
case "babies_stolen":
c.BabiesStolen = cast.ToInt(param)
case "num_runs":
c.NumRuns = cast.ToInt(param)
case "num_generations":
c.NumGenerations = cast.ToInt(param)
case "epoch_executor":
c.EpochExecutorType = EpochExecutorType(param)
case "genome_compat_method":
c.GenCompatMethod = GenomeCompatibilityMethod(param)
case "log_level":
c.LogLevel = param
default:
return nil, errors.Errorf("unknown configuration parameter found: %s = %s", name, param)
}
}
// initialize logger
if err := InitLogger(c.LogLevel); err != nil {
return nil, errors.Wrap(err, "failed to initialize logger")
}
if err := c.initNodeActivators(); err != nil {
return nil, err
}
if err := c.Validate(); err != nil {
return nil, err
}
return c, nil
} | neat/neat.go | 0.730866 | 0.411761 | neat.go | starcoder |
package graph
import "sort"
type NodeID int
// Node represents a node in a graph with access to its
// incident edges. If its in a directed graph, it also has access
// to its incoming/outgoing edges. It can also store a value.
type Node interface {
// GetID returns the node's unique identifier.
GetID() NodeID
// GetIncomingEdges returns the edges that are pointing to this node in a directed graph.
// The edges are sorted by NodeID on the other side of the incoming edge (ascending).
// In a undirected graph, this returns a "cannot use this method" error.
GetIncomingEdges() ([]Edge, error)
// GetOutgoingEdges returns the edges that are stemming from this node in a directed graph.
// The edges are sorted by NodeID on the other side of the outgoing edge (ascending).
// In a undirected graph, this returns a "cannot use this method" error
GetOutgoingEdges() ([]Edge, error)
// GetIncidentEdges returns all the edges that this node is an endpoint of (directed or undirected).
// If the edges are from an undirected graph, the nodes in each edge will be sorted by id (ascending).
// Then the edges are sorted by the first entry and the second entry in this NodeID slice (ascending).
// If the edges are from a directed graph, the incoming edges are first then the outgoing edges.
GetIncidentEdges() ([]Edge, error)
// GetValue return the value stored in this node.
// If there is no value then this returns a "no value" error.
GetValue() (interface{}, error)
removeRef() Node
}
type rawDirectedNode struct {
ID NodeID
Incoming []NodeID
Outgoing []NodeID
RawGraphRef *rawDirectedGraph
Value wrappedValue
}
func (rn rawDirectedNode) GetID() NodeID {
return rn.ID
}
func (rn rawDirectedNode) GetIncomingEdges() ([]Edge, error) {
incoming := make([]Edge, 0)
for _, fromID := range rn.Incoming {
edge, err := rn.RawGraphRef.GetEdge(fromID, rn.ID)
if err != nil {
return nil, err
}
incoming = append(incoming, edge)
}
return incoming, nil
}
func (rn rawDirectedNode) GetOutgoingEdges() ([]Edge, error) {
outgoing := make([]Edge, 0)
for _, toID := range rn.Outgoing {
edge, err := rn.RawGraphRef.GetEdge(rn.ID, toID)
if err != nil {
return nil, err
}
outgoing = append(outgoing, edge)
}
return outgoing, nil
}
func (rn rawDirectedNode) GetIncidentEdges() ([]Edge, error) {
incoming, err := rn.GetIncomingEdges()
if err != nil {
return nil, err
}
outgoing, err := rn.GetOutgoingEdges()
if err != nil {
return nil, err
}
incident := append(incoming, outgoing...)
return incident, nil
}
func (rn rawDirectedNode) GetValue() (interface{}, error) {
if !rn.Value.HasValue {
return nil, noValueFoundInNodeError{rn.ID}
}
return rn.Value.RawValue, nil
}
func (rn rawDirectedNode) removeRef() Node {
rn.RawGraphRef = nil
return rn
}
type rawUndirectedNode struct {
ID NodeID
Neighbors []NodeID
RawGraphRef *rawUndirectedGraph
Value wrappedValue
}
func (rn rawUndirectedNode) GetID() NodeID {
return rn.ID
}
func (rn rawUndirectedNode) GetIncomingEdges() ([]Edge, error) {
return nil, cannotUseForUndirectedGraphError{"Node.GetIncomingEdges"}
}
func (rn rawUndirectedNode) GetOutgoingEdges() ([]Edge, error) {
return nil, cannotUseForUndirectedGraphError{"Node.GetOutgoingEdges"}
}
func (rn rawUndirectedNode) GetIncidentEdges() ([]Edge, error) {
edges := make([]Edge, 0)
for _, nodeID := range rn.Neighbors {
edge, err := rn.RawGraphRef.GetEdge(rn.ID, nodeID)
if err != nil {
return nil, err
}
edges = append(edges, edge)
}
return edges, nil
}
func (rn rawUndirectedNode) GetValue() (interface{}, error) {
if !rn.Value.HasValue {
return nil, noValueFoundInNodeError{rn.ID}
}
return rn.Value.RawValue, nil
}
func (rn rawUndirectedNode) removeRef() Node {
rn.RawGraphRef = nil
return rn
}
func sortNodes(nodes []Node) {
sort.Slice(nodes, func(i, j int) bool {
return nodes[i].GetID() < nodes[j].GetID()
})
}
func sortNodeIDs(nodeIDs []NodeID) {
sort.Slice(nodeIDs, func(i, j int) bool {
return nodeIDs[i] < nodeIDs[j]
})
} | graph/node.go | 0.741206 | 0.595346 | node.go | starcoder |
package game
import (
"fmt"
"math"
"math/rand"
)
type Ball struct {
Pos [2]float64
Velocity [2]float64
radius float64
}
func NewBall() Ball {
b := Ball{Pos: [2]float64{0, 0}}
angle := rand.Float64()*90.0 - 45.0
if rand.Float32() < 0.5 {
angle += 180
}
angle = angle * math.Pi / 180
intialSpeed := rand.Float64()*25 + 75 //intial speed is between 75-100 units
b.Velocity[0] = math.Cos(angle) * intialSpeed
b.Velocity[1] = math.Sin(angle) * intialSpeed
b.radius = 0.5 //should be the same here and C renderer to avoid wrong behavior
return b
}
//game physics, called every frame
func (b *Ball) Update(dt float64, p []Player, resetFun func(i float64)) {
//checks if any of the players scored
if b.Pos[0] < -32 && b.Velocity[0] < 0 {
p[1].Score++
resetFun(-1)
fmt.Printf("%v : %v \n", p[0].Score, p[1].Score)
if p[1].Score > 9 {
fmt.Println("Player Right Won !")
isRunning = false
}
}
if b.Pos[0] > 32 && b.Velocity[0] > 0 {
p[0].Score++
resetFun(1)
fmt.Printf("%v : %v \n", p[0].Score, p[1].Score)
if p[0].Score > 9 {
fmt.Println("Player Left Won !")
isRunning = false
}
}
//calculates new Velocity
b.Pos[0] += dt * b.Velocity[0]
b.Pos[1] += dt * b.Velocity[1]
b.resolveCollisions(p, dt)
}
func (b *Ball) resolveCollisions(p []Player, dt float64) {
//checks floor and ceiling collisions
if b.Pos[1]+b.radius > 18 && b.Velocity[1] > 0 {
b.Velocity[1] *= -1
}
if b.Pos[1]-b.radius < -18 && b.Velocity[1] < 0 {
b.Velocity[1] *= -1
}
//checks players collision and increases speed if bounced back
b.resolvePlayer(p[0], dt)
b.resolvePlayer(p[1], dt)
}
func (b *Ball) resolvePlayer(p Player, dt float64) {
var ballSidePoint [2]float64
if p.Pos[0] > 0 {
ballSidePoint[0] = b.Pos[0] + b.radius
} else {
ballSidePoint[0] = b.Pos[0] - b.radius
}
ballSidePoint[1] = b.Pos[1]
/*line A and B are determined by the Player's edge point and the Ball's
Velocity, in order to determine Player reflecting area and
prevent false scoring when Ball speed is too high*/
var lineA, lineB line
playerEdge := p.Pos
if playerEdge[0] > 0 {
playerEdge[0] -= p.width
} else {
playerEdge[0] += p.width
}
playerEdge[1] += p.length
if b.Velocity[0] != 0 {
lineA.a = b.Velocity[1] / b.Velocity[0]
lineA.b = -(lineA.a)*playerEdge[0] + playerEdge[1]
} else {
lineA.a = 0
lineA.b = playerEdge[1]
}
playerEdge[1] -= 2 * p.length
if b.Velocity[0] != 0 {
lineB.a = b.Velocity[1] / b.Velocity[0]
lineB.b = -(lineB.a)*playerEdge[0] + playerEdge[1]
} else {
lineB.a = 0
lineB.b = playerEdge[1]
}
ballBottom := [2]float64{b.Pos[0], b.Pos[1] - b.radius}
ballTop := [2]float64{b.Pos[0], b.Pos[1] + b.radius}
//checks if Ball is inside the Player reflecting area
if ballBottom[1] <= lineA.a*ballBottom[0]+lineA.b &&
ballTop[1] >= lineB.a*ballTop[0]+lineB.b {
if playerEdge[0] > 0 && ballSidePoint[0] >= playerEdge[0] && b.Velocity[0] > 0 {
b.Velocity[0] *= -1 * reflectionGain
b.Velocity[1] *= reflectionGain
}
if playerEdge[0] < 0 && ballSidePoint[0] <= playerEdge[0] && b.Velocity[0] < 0 {
b.Velocity[0] *= -1 * reflectionGain
b.Velocity[1] *= reflectionGain
}
}
} | game/ball.go | 0.519765 | 0.561515 | ball.go | starcoder |
package confusionmatrix
import (
"math"
"fmt"
)
type ConfusionMatrix struct {
TruePositives int
FalsePositives int
TrueNegatives int
FalseNegatives int
}
func (cm *ConfusionMatrix) Update(actual, predicted bool) {
if actual {
if predicted {
cm.TruePositives += 1
} else {
cm.FalseNegatives += 1
}
} else {
if predicted {
cm.FalsePositives += 1
} else {
cm. TrueNegatives += 1
}
}
}
func (cm *ConfusionMatrix) Positives() int {
return cm.TruePositives + cm.FalseNegatives
}
func (cm *ConfusionMatrix) Negatives() int {
return cm.TrueNegatives + cm.FalsePositives
}
func (cm *ConfusionMatrix) Trials() int {
return cm.Positives() + cm.Negatives()
}
func (cm *ConfusionMatrix) Accuracy() float64 {
return float64(cm.TruePositives + cm.TrueNegatives) / float64(cm.Trials())
}
func (cm *ConfusionMatrix) Precision() float64 {
return float64(cm.TruePositives) / float64(cm.TruePositives + cm.FalsePositives)
}
func (cm *ConfusionMatrix) Recall() float64 {
return float64(cm.TruePositives) / float64(cm.Positives())
}
func (cm *ConfusionMatrix) F(beta float64) float64 {
betaSqrd, p, r := math.Pow(beta, 2), cm.Precision(), cm.Recall()
return (1.0 + betaSqrd) * ((p * r) / ((betaSqrd * p) + r))
}
func (cm *ConfusionMatrix) FalsePositiveRate() float64 {
return float64(cm.FalsePositives) / float64(cm.Negatives())
}
func (cm *ConfusionMatrix) FalseNegativeRate() float64 {
return float64(cm.FalseNegatives) / float64(cm.Positives())
}
func (cm *ConfusionMatrix) PositivePredictiveValue() float64 {
return cm.Precision()
}
func (cm *ConfusionMatrix) FalseDiscoveryRate() float64 {
return float64(cm.FalsePositives) / float64(cm.TruePositives + cm.FalsePositives)
}
func (cm *ConfusionMatrix) NegativePredictiveValue() float64 {
return float64(cm.TrueNegatives) / float64(cm.TrueNegatives + cm.FalseNegatives)
}
func (cm *ConfusionMatrix) Sensitivity() float64 {
return cm.Recall()
}
func (cm *ConfusionMatrix) Specificity() float64 {
return float64(cm.TrueNegatives) / float64(cm.Negatives())
}
func (cm *ConfusionMatrix) BalancedClassificationRate() float64 {
return cm.Sensitivity() / cm.Specificity()
}
func (cm *ConfusionMatrix) MatthewsCorrelationCoefficient() float64 {
tp, tn, fp, fn := float64(cm.TruePositives), float64(cm.TrueNegatives), float64(cm.FalsePositives), float64(cm.FalseNegatives)
return ((tp * tn) - (fp * fn)) / math.Sqrt((tp + fp) * (tp + fn) * (tn + fp) * (tn + fn))
}
func (cm ConfusionMatrix) String() string {
return fmt.Sprintf("%d trials: TP=%d, FP=%d, FN=%d, TN=%d, acc=%3.2f, P=%3.2f, R=%3.2f, F1=%3.2f",
cm.Trials(),
cm.TruePositives,
cm.FalsePositives,
cm.FalseNegatives,
cm.TrueNegatives,
cm.Accuracy(),
cm.Precision(),
cm.Recall(),
cm.F(1.0))
} | confusionmatrix.go | 0.834946 | 0.461199 | confusionmatrix.go | starcoder |
package ent
import (
"fmt"
"strings"
"time"
"entgo.io/ent/dialect/sql"
"github.com/sundaytycoon/buttons-api/internal/storage/servicedb/ent/user"
)
// User is the model entity for the User schema.
type User struct {
config `json:"-"`
// ID of the ent.
ID string `json:"id,omitempty"`
// CreatedAt holds the value of the "created_at" field.
// 해당 row를 최초로 만든 시간은 언제인지?
CreatedAt time.Time `json:"created_at,omitempty"`
// CreatedBy holds the value of the "created_by" field.
// 해당 row를 최초로 만든 주체는 누구인지?
CreatedBy string `json:"created_by,omitempty"`
// UpdatedAt holds the value of the "updated_at" field.
// 해당 row를 수정한 마지막 시간은 언제인지
UpdatedAt time.Time `json:"updated_at,omitempty"`
// UpdatedBy holds the value of the "updated_by" field.
// 해당 row를 수정한 마지막 주체 누구인지
UpdatedBy string `json:"updated_by,omitempty"`
// Status holds the value of the "status" field.
// 해당 사용자는 서비스에서 유효한지 아닌지
Status user.Status `json:"status,omitempty"`
// Type holds the value of the "type" field.
// 일반 사용자인지, 어드민 인지, 스태프 인지.. 등
Type user.Type `json:"type,omitempty"`
// Signup holds the value of the "signup" field.
// 회원가입이 잘 끝난 사용자인지 아닌지?
Signup bool `json:"signup,omitempty"`
// Username holds the value of the "username" field.
// 서비스에서 유일한 사용자의 이름
Username string `json:"username,omitempty"`
// Edges holds the relations/edges for other nodes in the graph.
// The values are being populated by the UserQuery when eager-loading is set.
Edges UserEdges `json:"edges"`
}
// UserEdges holds the relations/edges for other nodes in the graph.
type UserEdges struct {
// Meta holds the value of the meta edge.
Meta []*UserMeta `json:"meta,omitempty"`
// OauthProviders holds the value of the oauth_providers edge.
OauthProviders []*UserOAuthProvider `json:"oauth_providers,omitempty"`
// Devices holds the value of the devices edge.
Devices []*UserDevice `json:"devices,omitempty"`
// loadedTypes holds the information for reporting if a
// type was loaded (or requested) in eager-loading or not.
loadedTypes [3]bool
}
// MetaOrErr returns the Meta value or an error if the edge
// was not loaded in eager-loading.
func (e UserEdges) MetaOrErr() ([]*UserMeta, error) {
if e.loadedTypes[0] {
return e.Meta, nil
}
return nil, &NotLoadedError{edge: "meta"}
}
// OauthProvidersOrErr returns the OauthProviders value or an error if the edge
// was not loaded in eager-loading.
func (e UserEdges) OauthProvidersOrErr() ([]*UserOAuthProvider, error) {
if e.loadedTypes[1] {
return e.OauthProviders, nil
}
return nil, &NotLoadedError{edge: "oauth_providers"}
}
// DevicesOrErr returns the Devices value or an error if the edge
// was not loaded in eager-loading.
func (e UserEdges) DevicesOrErr() ([]*UserDevice, error) {
if e.loadedTypes[2] {
return e.Devices, nil
}
return nil, &NotLoadedError{edge: "devices"}
}
// scanValues returns the types for scanning values from sql.Rows.
func (*User) scanValues(columns []string) ([]interface{}, error) {
values := make([]interface{}, len(columns))
for i := range columns {
switch columns[i] {
case user.FieldSignup:
values[i] = new(sql.NullBool)
case user.FieldID, user.FieldCreatedBy, user.FieldUpdatedBy, user.FieldStatus, user.FieldType, user.FieldUsername:
values[i] = new(sql.NullString)
case user.FieldCreatedAt, user.FieldUpdatedAt:
values[i] = new(sql.NullTime)
default:
return nil, fmt.Errorf("unexpected column %q for type User", columns[i])
}
}
return values, nil
}
// assignValues assigns the values that were returned from sql.Rows (after scanning)
// to the User fields.
func (u *User) assignValues(columns []string, values []interface{}) error {
if m, n := len(values), len(columns); m < n {
return fmt.Errorf("mismatch number of scan values: %d != %d", m, n)
}
for i := range columns {
switch columns[i] {
case user.FieldID:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field id", values[i])
} else if value.Valid {
u.ID = value.String
}
case user.FieldCreatedAt:
if value, ok := values[i].(*sql.NullTime); !ok {
return fmt.Errorf("unexpected type %T for field created_at", values[i])
} else if value.Valid {
u.CreatedAt = value.Time
}
case user.FieldCreatedBy:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field created_by", values[i])
} else if value.Valid {
u.CreatedBy = value.String
}
case user.FieldUpdatedAt:
if value, ok := values[i].(*sql.NullTime); !ok {
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
} else if value.Valid {
u.UpdatedAt = value.Time
}
case user.FieldUpdatedBy:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field updated_by", values[i])
} else if value.Valid {
u.UpdatedBy = value.String
}
case user.FieldStatus:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field status", values[i])
} else if value.Valid {
u.Status = user.Status(value.String)
}
case user.FieldType:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field type", values[i])
} else if value.Valid {
u.Type = user.Type(value.String)
}
case user.FieldSignup:
if value, ok := values[i].(*sql.NullBool); !ok {
return fmt.Errorf("unexpected type %T for field signup", values[i])
} else if value.Valid {
u.Signup = value.Bool
}
case user.FieldUsername:
if value, ok := values[i].(*sql.NullString); !ok {
return fmt.Errorf("unexpected type %T for field username", values[i])
} else if value.Valid {
u.Username = value.String
}
}
}
return nil
}
// QueryMeta queries the "meta" edge of the User entity.
func (u *User) QueryMeta() *UserMetaQuery {
return (&UserClient{config: u.config}).QueryMeta(u)
}
// QueryOauthProviders queries the "oauth_providers" edge of the User entity.
func (u *User) QueryOauthProviders() *UserOAuthProviderQuery {
return (&UserClient{config: u.config}).QueryOauthProviders(u)
}
// QueryDevices queries the "devices" edge of the User entity.
func (u *User) QueryDevices() *UserDeviceQuery {
return (&UserClient{config: u.config}).QueryDevices(u)
}
// Update returns a builder for updating this User.
// Note that you need to call User.Unwrap() before calling this method if this User
// was returned from a transaction, and the transaction was committed or rolled back.
func (u *User) Update() *UserUpdateOne {
return (&UserClient{config: u.config}).UpdateOne(u)
}
// Unwrap unwraps the User entity that was returned from a transaction after it was closed,
// so that all future queries will be executed through the driver which created the transaction.
func (u *User) Unwrap() *User {
tx, ok := u.config.driver.(*txDriver)
if !ok {
panic("ent: User is not a transactional entity")
}
u.config.driver = tx.drv
return u
}
// String implements the fmt.Stringer.
func (u *User) String() string {
var builder strings.Builder
builder.WriteString("User(")
builder.WriteString(fmt.Sprintf("id=%v", u.ID))
builder.WriteString(", created_at=")
builder.WriteString(u.CreatedAt.Format(time.ANSIC))
builder.WriteString(", created_by=")
builder.WriteString(u.CreatedBy)
builder.WriteString(", updated_at=")
builder.WriteString(u.UpdatedAt.Format(time.ANSIC))
builder.WriteString(", updated_by=")
builder.WriteString(u.UpdatedBy)
builder.WriteString(", status=")
builder.WriteString(fmt.Sprintf("%v", u.Status))
builder.WriteString(", type=")
builder.WriteString(fmt.Sprintf("%v", u.Type))
builder.WriteString(", signup=")
builder.WriteString(fmt.Sprintf("%v", u.Signup))
builder.WriteString(", username=")
builder.WriteString(u.Username)
builder.WriteByte(')')
return builder.String()
}
// Users is a parsable slice of User.
type Users []*User
func (u Users) config(cfg config) {
for _i := range u {
u[_i].config = cfg
}
} | internal/storage/servicedb/ent/user.go | 0.52074 | 0.408277 | user.go | starcoder |
package getenv
import (
"fmt"
"os"
"reflect"
"sort"
"strconv"
"strings"
)
func isDigit(b byte) bool {
return '0' <= b && b <= '9'
}
func isUpper(b byte) bool {
return 'A' <= b && b <= 'Z'
}
func isLower(b byte) bool {
return 'a' <= b && b <= 'z'
}
func isAlpha(b byte) bool {
return isUpper(b) || isLower(b)
}
func parseInt(s string, k reflect.Kind) (int64, error) {
switch k {
case reflect.Int:
return strconv.ParseInt(s, 10, 0)
case reflect.Int8:
return strconv.ParseInt(s, 10, 8)
case reflect.Int16:
return strconv.ParseInt(s, 10, 16)
case reflect.Int32:
return strconv.ParseInt(s, 10, 32)
case reflect.Int64:
return strconv.ParseInt(s, 10, 64)
default:
panic(fmt.Errorf("bug: unsupported integer types %v", k))
}
}
func parseUint(s string, k reflect.Kind) (uint64, error) {
switch k {
case reflect.Uint:
return strconv.ParseUint(s, 10, 0)
case reflect.Uint8:
return strconv.ParseUint(s, 10, 8)
case reflect.Uint16:
return strconv.ParseUint(s, 10, 16)
case reflect.Uint32:
return strconv.ParseUint(s, 10, 32)
case reflect.Uint64, reflect.Uintptr:
return strconv.ParseUint(s, 10, 64)
default:
panic(fmt.Errorf("bug: unsupported unsigned integer types %v", k))
}
}
func parseFloat(s string, k reflect.Kind) (float64, error) {
switch k {
case reflect.Float32:
return strconv.ParseFloat(s, 32)
case reflect.Float64:
return strconv.ParseFloat(s, 64)
default:
panic(fmt.Errorf("bug: unsupported float types %v", k))
}
}
type ParseFunc func(iv interface{}, envName, envValue string) error
func defaultParseFunc(iv interface{}, envName, envValue string) error {
ref := reflect.ValueOf(iv)
if ref.Kind() != reflect.Ptr {
return ErrValue
}
ref = reflect.Indirect(ref)
kind := ref.Kind()
switch kind {
case reflect.String:
ref.SetString(envValue)
case reflect.Bool:
v, err := strconv.ParseBool(envValue)
if err != nil {
return err
}
ref.SetBool(v)
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
v, err := parseInt(envValue, kind)
if err != nil {
return err
}
ref.SetInt(v)
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
reflect.Uint64, reflect.Uintptr:
v, err := parseUint(envValue, kind)
if err != nil {
return err
}
ref.SetUint(v)
case reflect.Float32, reflect.Float64:
v, err := parseFloat(envValue, kind)
if err != nil {
return err
}
ref.SetFloat(v)
default:
panic(fmt.Errorf("bug: unsupported value types %v", kind))
}
return nil
}
type CheckFunc func(iv interface{}, envName string) error
func defaultCheckFunc(iv interface{}, envName string) error {
// allow any value
return nil
}
var ErrName = fmt.Errorf("name must be non-empty printable ascii string and that must not contain spaces and '='")
func checkName(s string) error {
n := len(s)
// must be non-empty string
if n == 0 {
return ErrName
}
// first character must be [A-Za-z_]
c := s[0]
if !isAlpha(c) && c != '_' {
return ErrName
}
// following characters must be [0-9A-Za-z_]
for i := 1; i < n; i++ {
c := s[i]
if isAlpha(c) || isDigit(c) || c == '_' {
continue
}
return ErrName
}
return nil
}
var ErrValue = fmt.Errorf("value must be non-nil pointer of following types: string, bool, uintptr, 8-64 bit int or uint and 32-64 bit float")
func checkValue(v interface{}) (interface{}, error) {
ref := reflect.ValueOf(v)
if ref.Kind() != reflect.Ptr {
return nil, ErrValue
}
ref = reflect.Indirect(ref)
switch ref.Kind() {
case reflect.String, reflect.Bool,
reflect.Int, reflect.Uint, reflect.Uintptr,
reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64,
reflect.Float32, reflect.Float64:
return ref.Interface(), nil
}
return nil, ErrValue
}
type Env struct {
Name string
Description string
DefaultValue interface{}
Value interface{}
Required bool
Parse ParseFunc
Check CheckFunc
}
var name2envs = map[string]*Env{}
var ErrNameAlready = fmt.Errorf("environment variable name is already registered")
// Register environment variables to be read by the Parse function.
// The parsefn and checkfn functions are used as value parser and value checker. If the function is nil, the default function will be used.
func Set(name, desc string, value interface{}, required bool, parsefn ParseFunc, checkfn CheckFunc) error {
var defval interface{}
// check arguments
if err := checkName(name); err != nil {
return err
} else if v, ok := name2envs[name]; ok && v != nil {
return fmt.Errorf("%w: %q already registered", ErrNameAlready, name)
} else if defval, err = checkValue(value); err != nil {
return err
}
if parsefn == nil {
parsefn = defaultParseFunc
}
if checkfn == nil {
checkfn = defaultCheckFunc
}
// set env
name2envs[name] = &Env{
Name: name,
Description: desc,
DefaultValue: defval,
Value: value,
Required: required,
Parse: parsefn,
Check: checkfn,
}
return nil
}
type UsageFunc func(name, desc string, defval interface{}, required bool)
func Usage(usagefn UsageFunc) {
names := make([]string, 0, len(name2envs))
for name := range name2envs {
names = append(names, name)
}
sort.Strings(names)
for _, name := range names {
env := name2envs[name]
usagefn(name, env.Description, env.DefaultValue, env.Required)
}
}
var ErrEnvVar = fmt.Errorf("invalid environment variable")
var ErrNotDefined = fmt.Errorf("required environment variable not defined")
func Parse() error {
for name, env := range name2envs {
if v := strings.TrimSpace(os.Getenv(name)); v != "" {
if err := env.Parse(env.Value, name, v); err != nil {
return fmt.Errorf("%w: %q %v", ErrEnvVar, name, err)
} else if err = env.Check(env.Value, name); err != nil {
return fmt.Errorf("%w: %q %v", ErrEnvVar, name, err)
}
continue
} else if env.Required {
return fmt.Errorf("%w: %q", ErrNotDefined, name)
}
}
return nil
} | getenv/getenv.go | 0.5769 | 0.415788 | getenv.go | starcoder |
package day03
import (
"math"
"strconv"
"strings"
)
type Point struct {
x int
y int
}
func origin() Point {
return Point{0, 0}
}
type GridVal [2]int
type Grid map[Point]GridVal
func grid_key(x int, y int) Point {
return Point{x, y}
}
func abs(a int) int {
if a < 0 {
return -a
}
return a
}
func update(x int, y int, check int, best *int, best_step *int, grid Grid, steps int) {
key := grid_key(x, y)
val := grid[key]
intersection := val[1] == 0 && val[0] > 0
val[check] = steps
if check > 0 && key != origin() && intersection {
dist := abs(x) + abs(y)
if dist < *best {
*best = dist
}
step_dist := val[0] + val[1]
if step_dist < *best_step {
*best_step = step_dist
}
}
grid[key] = val
}
func parse_lines(input string, grid Grid, wire int, part2 bool) int {
segments := strings.Split(input, ",")
prev := Point{0,0}
best := math.MaxInt32
best_steps := math.MaxInt32
steps := 0
for _, seg := range segments {
dir := seg[0]
inc, _ := strconv.Atoi(seg[1:])
switch dir {
case 'U':
for y := prev.y + 1; y <= prev.y + inc; y++ {
steps++
update(prev.x, y, wire, &best, &best_steps, grid, steps)
}
prev.y += inc
case 'D':
for y := prev.y - 1; y >= prev.y - inc; y-- {
steps++
update(prev.x, y, wire, &best, &best_steps, grid, steps)
}
prev.y -= inc
case 'L':
for x := prev.x - 1; x >= prev.x - inc; x-- {
steps++
update(x, prev.y, wire, &best, &best_steps, grid, steps)
}
prev.x -= inc
case 'R':
for x := prev.x + 1; x <= prev.x + inc; x++ {
steps++
update(x, prev.y, wire, &best, &best_steps, grid, steps)
}
prev.x += inc
}
}
if part2 {
return best_steps
}
return best
}
func Day03_part1_solve(wire_a string, wire_b string) int {
grid := make(Grid)
parse_lines(wire_a, grid, 0, false)
return parse_lines(wire_b, grid, 1, false)
}
func Day03_part2_solve(wire_a string, wire_b string) int {
grid := make(Grid)
parse_lines(wire_a, grid, 0, true)
return parse_lines(wire_b, grid, 1, true)
} | day03/day03.go | 0.717012 | 0.441312 | day03.go | starcoder |
package cflow
import (
"fmt"
)
// Event model interface
type Evt interface{
Vars() []Var // Return the list of needed variables.
Weight() float64 // Define the weight (from available variables).
}
// TreeVar groups the branch name and a value
// of the proper type, as needed by rtree.ReadVar.
type Var struct {
Name string // Name of the branch
Value interface{} // Pointer of the same type of the stored branch.
}
// Event yields type with both raw and
// weighted yields, and a name for a cut stage.
type yields struct {
Name string // Name of the cut stage.
Raw float64 // Raw yields.
Wgt float64 // Weighted yields, as defined by Evt.weight()
}
// cutFlow is a slice of Yields, once per cut.
type cutFlow []yields
// Cut contains the needed information
type Cut struct {
Name string // Name of the cut.
Sel func(e Evt) bool // Function defining the cut.
}
// newCutFlow creates a CutFlow object corresponding
// to a given cut sequence.
func newCutFlow(cuts []Cut) cutFlow {
cf := make([]yields, len(cuts))
for i, cut := range cuts {
cf[i].Name = cut.Name
}
return cf
}
// Print outputs nicely the result
func (cf cutFlow) Print() {
// Table header
ul25 := "--------------------------"
ul30 := "-------------------------------"
fmt.Printf("\n| %-25s| %-30s| %-30s|\n", "Cut name", "Raw Yields", "Weighted Yields")
fmt.Printf( "| %-25s| %22s %6s | %22s %6s |\n", "", "Abs", "Rel", "Abs", "Rel")
fmt.Printf("|%s|%s|%s|\n", ul25, ul30, ul30)
// Print each cut yields
for i, y := range cf {
var yref yields
switch i {
case 0 : yref = cf[0]
default: yref = cf[i-1]
}
absEff := efficiency(y, cf[0])
relEff := efficiency(y, yref )
fmt.Printf("| %-25s|%16.0f %4.0f%% %4.0f%% |%16.2f %4.0f%% %4.0f%% |\n",
y.Name,
y.Raw, absEff.Raw, relEff.Raw,
y.Wgt, absEff.Wgt, relEff.Wgt,
)
}
fmt.Printf("\n")
}
func efficiency(y, yref yields) yields {
return yields{
Name: y.Name,
Raw: y.Raw / yref.Raw * 100,
Wgt: y.Wgt / yref.Wgt * 100,
}
} | cflow/cflow.go | 0.590189 | 0.425784 | cflow.go | starcoder |
package design
import (
"fmt"
"io"
"time"
"github.com/gregoryv/draw"
"github.com/gregoryv/draw/shape"
"github.com/gregoryv/draw/types/date"
)
// NewGanttChart returns a GanttChart spanning days from the given
// date. Panics if date cannot be resolved.
func NewGanttChart(from date.String, days int) *GanttChart {
return newGanttChart(from.Time(), days)
}
// newGanttChart returns a chart showing days from optional
// start time. If no start is given, time.Now() is used.
func newGanttChart(start time.Time, days int) *GanttChart {
d := &GanttChart{
Diagram: NewDiagram(),
start: start,
days: days,
tasks: make([]*Task, 0),
padLeft: 16,
padTop: 10,
colSpace: 4,
rowSpace: 0,
Mark: time.Now(),
}
return d
}
type GanttChart struct {
*Diagram
start time.Time
days int
tasks []*Task
padLeft, padTop int
colSpace int // between day or week
rowSpace int // between tasks
// Set a marker at this date.
Mark time.Time
Weeks bool
}
func (g *GanttChart) MarkDate(yyyymmdd date.String) {
g.Mark = yyyymmdd.Time()
}
// isToday returns true if time.Now matches start + ndays
func (g *GanttChart) isToday(ndays int) bool {
t := g.start.AddDate(0, 0, ndays)
sameYear := t.Year() == g.Mark.Year()
sameDay := t.YearDay() == g.Mark.YearDay()
return sameYear && sameDay
}
// Add new task from start spanning 3 days. Default color is green.
func (g *GanttChart) Add(txt string) *Task {
task := NewTask(txt)
g.tasks = append(g.tasks, task)
return task
}
// Add new task. Default color is green.
func (g *GanttChart) Place(task *Task) *GanttAdjuster {
return &GanttAdjuster{
start: g.start,
task: task,
}
}
type GanttAdjuster struct {
start time.Time
task *Task
}
func (a *GanttAdjuster) At(from date.String, days int) {
a.task.from = from.Time()
a.task.to = from.Time().AddDate(0, 0, days)
}
func (a *GanttAdjuster) After(parent *Task, days int) {
a.task.from = parent.to
a.task.to = parent.to.AddDate(0, 0, days)
}
func (d *GanttChart) SetRowSpace(rowSpace int) {
d.rowSpace = rowSpace
}
func (d *GanttChart) WriteSVG(w io.Writer) error {
columns := d.addHeader()
bars := make([]*shape.Rect, len(d.tasks))
start := d.padLeft + d.taskWidth()
lineHeight := d.Diagram.Font.LineHeight
headerHeight := d.padTop + lineHeight*3
for i, t := range d.tasks {
rect := shape.NewRect("")
rect.SetHeight(d.Diagram.Font.Height)
rect.SetClass(t.class)
bars[i] = rect
d.drawTask(i, t)
y := i*lineHeight + headerHeight + i*d.rowSpace
d.Diagram.Place(rect).At(start, y)
}
// adjust the bars
for j, t := range d.tasks {
var width int
for i := 0; i < d.days; i++ {
now := d.start.AddDate(0, 0, i)
var col *shape.Label
if d.Weeks {
col = columns[i/7]
} else {
col = columns[i]
}
switch {
case now.Equal(t.from):
d.VAlignLeft(col, bars[j])
case now.After(t.from) && now.Before(t.to) || now.Equal(t.to):
if !d.Weeks || (d.Weeks && now.Weekday() == time.Sunday) {
width += col.Width()
width += d.colSpace
}
}
}
if !d.Weeks {
width -= d.colSpace
}
bars[j].SetWidth(width)
}
return d.Diagram.WriteSVG(w)
}
func (d *GanttChart) addHeader() []*shape.Label {
now := d.start
year := shape.NewLabel(fmt.Sprintf("%v", now.Year()))
d.Diagram.Place(year).At(d.padLeft, d.padTop)
offset := d.padLeft + d.taskWidth()
var lastDay *shape.Label
columns := make([]*shape.Label, 0)
var col *shape.Label
for i := 0; i < d.days; i++ {
day := now.Day()
colName := day
if d.Weeks {
_, colName = now.ISOWeek()
}
wday := now.Weekday()
if day == 1 || d.Weeks && wday == 1 || !d.Weeks {
col = newCol(colName)
columns = append(columns, col)
if !d.Weeks && now.Weekday() == time.Saturday {
bg := shape.NewRect("")
bg.SetClass("weekend")
bg.SetWidth((col.Width() + d.colSpace) * 2)
bg.SetHeight(len(d.tasks)*col.Font.LineHeight +
d.padTop +
d.Diagram.Font.LineHeight +
d.colSpace +
len(d.tasks)*d.rowSpace)
d.Diagram.Place(bg).RightOf(lastDay, d.colSpace)
shape.Move(bg, -d.colSpace/2, d.colSpace)
}
if i == 0 {
d.Diagram.Place(col).Below(year, d.colSpace)
col.SetX(offset)
} else {
d.Diagram.Place(col).RightOf(lastDay, d.colSpace)
}
if day == 1 {
monthName := now.Month().String()
if d.Weeks {
monthName = monthName[:3]
}
label := shape.NewLabel(monthName)
d.Diagram.Place(label).Above(col, d.colSpace)
}
lastDay = col
}
if d.isToday(i) {
x, y := col.Position()
mark := shape.NewCircle(4)
mark.SetX(x + 1)
mark.SetY(y - 1)
mark.SetClass("fill-red")
d.Diagram.Place(mark)
}
now = now.AddDate(0, 0, 1)
}
return columns
}
func (d *GanttChart) drawTask(i int, t *Task) {
label := shape.NewLabel(t.txt)
lineHeight := d.Diagram.Font.LineHeight
headerHeight := d.padTop + lineHeight*3
x := d.padLeft
y := i*lineHeight + headerHeight - lineHeight/3 + i*d.rowSpace
d.Diagram.Place(label).At(x, y)
}
func newCol(day int) *shape.Label {
col := shape.NewLabel(fmt.Sprintf("%02v", day))
col.Font.Height = 10
return col
}
func (d *GanttChart) SaveAs(filename string) error {
return saveAs(d, d.Diagram.Style, filename)
}
// Inline returns rendered SVG with inlined style
func (d *GanttChart) Inline() string {
return draw.Inline(d, d.Diagram.Style)
}
// String returns rendered SVG
func (d *GanttChart) String() string { return toString(d) }
func (d *GanttChart) taskWidth() int {
x := 0
for _, t := range d.tasks {
w := d.Diagram.Font.TextWidth(t.txt)
if w > x {
x = w
}
}
return x + d.padLeft
}
// NewTask returns a green task.
func NewTask(txt string) *Task {
return &Task{
txt: txt,
class: "span-green",
}
}
// Task is the colorized span of a gantt chart.
type Task struct {
txt string
from, to time.Time
class string
}
// Red sets class of task to span-red
func (t *Task) Red() *Task { t.class = "span-red"; return t }
// Blue sets class of task to span-blue
func (t *Task) Blue() *Task { t.class = "span-blue"; return t }
// Yellow sets class of task to span-yellow
func (t *Task) Yellow() *Task { t.class = "span-yellow"; return t }
// Orange sets class of task to span-orange
func (t *Task) Orange() *Task { t.class = "span-orange"; return t }
func (t *Task) GetClass() string {
return t.class
} | design/ganttchart.go | 0.660282 | 0.424352 | ganttchart.go | starcoder |
package main
import (
"fmt"
"log"
"os"
)
type Cypher struct {
rowIndex int
colIndex int
char rune
}
func cyphers() []Cypher {
return []Cypher{
{rowIndex: 1, colIndex: 1, char: 'B'},
{rowIndex: 1, colIndex: 2, char: 'G'},
{rowIndex: 1, colIndex: 3, char: 'W'},
{rowIndex: 1, colIndex: 4, char: 'K'},
{rowIndex: 1, colIndex: 5, char: 'Z'},
{rowIndex: 2, colIndex: 1, char: 'Q'},
{rowIndex: 2, colIndex: 2, char: 'P'},
{rowIndex: 2, colIndex: 3, char: 'N'},
{rowIndex: 2, colIndex: 4, char: 'D'},
{rowIndex: 2, colIndex: 5, char: 'S'},
{rowIndex: 3, colIndex: 1, char: 'I'},
{rowIndex: 3, colIndex: 2, char: 'O'},
{rowIndex: 3, colIndex: 3, char: 'A'},
{rowIndex: 3, colIndex: 4, char: 'X'},
{rowIndex: 3, colIndex: 5, char: 'E'},
{rowIndex: 4, colIndex: 1, char: 'F'},
{rowIndex: 4, colIndex: 2, char: 'C'},
{rowIndex: 4, colIndex: 3, char: 'L'},
{rowIndex: 4, colIndex: 4, char: 'U'},
{rowIndex: 4, colIndex: 5, char: 'M'},
{rowIndex: 5, colIndex: 1, char: 'T'},
{rowIndex: 5, colIndex: 2, char: 'H'},
{rowIndex: 5, colIndex: 3, char: 'Y'},
{rowIndex: 5, colIndex: 4, char: 'V'},
{rowIndex: 5, colIndex: 5, char: 'R'},
}
}
func charToCypher(input rune, cyphers []Cypher) (Cypher, error) {
for _, seed := range cyphers {
if seed.char == input {
return seed, nil
}
}
return Cypher{rowIndex: 0, colIndex: 0, char: ' '}, fmt.Errorf("Input character not part of seeds")
}
func posToCypher(rowNo int, colNo int, cyphers []Cypher) (Cypher, error) {
for _, seed := range cyphers {
if (seed.rowIndex == rowNo) && (seed.colIndex == colNo) {
return seed, nil
}
}
return Cypher{rowIndex: 0, colIndex: 0, char: ' '}, fmt.Errorf("Input rows and columns not part of seeds")
}
func planTextToCypherCoordinate(input string, cyphers []Cypher) ([]Cypher, error) {
coordinates := []Cypher{}
for _, ch := range input {
coordinate, err := charToCypher(ch, cyphers)
if err != nil {
return []Cypher{}, err
}
coordinates = append(coordinates, coordinate)
}
return coordinates, nil
}
func plainToCypherText(input string, cyphers []Cypher) (string, error) {
inputCoords, err := planTextToCypherCoordinate(input, cyphers)
if err != nil {
return "", err
}
flattenRow := []Cypher{}
flattenCol := []Cypher{}
for index := 0; index < len(inputCoords); index++ {
currRow := inputCoords[index].rowIndex
currCol := inputCoords[index].colIndex
index++
nextRow := inputCoords[index].rowIndex
nextCol := inputCoords[index].colIndex
coordinateRow, err := posToCypher(currRow, nextRow, cyphers)
if err != nil {
return "", err
}
flattenRow = append(flattenRow, coordinateRow)
coordinateCol, err := posToCypher(currCol, nextCol, cyphers)
if err != nil {
return "", err
}
flattenCol = append(flattenCol, coordinateCol)
}
merged := []Cypher{}
merged = append(merged, flattenRow...)
merged = append(merged, flattenCol...)
cypherText := []rune{}
for _, item := range merged {
cypherText = append(cypherText, item.char)
}
return string(cypherText), nil
}
func cypherToCypherCoordinate(input string, cyphers []Cypher) ([]Cypher, error) {
coordinates := []Cypher{}
for _, ch := range input {
coordinate, err := charToCypher(ch, cyphers)
if err != nil {
return []Cypher{}, err
}
coordinates = append(coordinates, coordinate)
}
return coordinates, nil
}
func cypherToPlainText(input string, cyphers []Cypher) (string, error) {
cypherCoords, err := cypherToCypherCoordinate(input, cyphers)
if err != nil {
return "", err
}
startIndexForCol := (len(cypherCoords) - len(cypherCoords)%2)/2 + len(cypherCoords)%2
rows := []int{}
cols := []int{}
if len(cypherCoords)%2 != 0 {
for index := 0; index < startIndexForCol; index++{
if index == startIndexForCol-1{
rows = append(rows, cypherCoords[index].rowIndex)
cols = append(cols, cypherCoords[index].colIndex)
}else {
rows = append(rows, cypherCoords[index].rowIndex)
rows = append(rows, cypherCoords[index].colIndex)
}
}
for index := startIndexForCol;index < len(cypherCoords); index++{
cols = append(cols, cypherCoords[index].rowIndex)
cols = append(cols, cypherCoords[index].colIndex)
}
}else{
for index := 0; index < len(cypherCoords); index++{
rows = append(rows, cypherCoords[index].rowIndex)
}
}
coordinates := []Cypher{}
for index := 0; index < len(rows); index++{
coordinate, err := posToCypher(rows[index], cols[index], cyphers)
if err != nil{
return "", err
}
coordinates = append(coordinates, coordinate)
}
plainText := []rune{}
for _, coordinate := range coordinates{
plainText = append(plainText, coordinate.char)
}
return string(plainText), nil
}
func main() {
args := os.Args
if len(args) < 2 {
log.Fatal("Minimum of two arguments")
}
} | cmd/bifid/main.go | 0.550607 | 0.402128 | main.go | starcoder |
package vector
import "math"
// Vector represents a mathematical n-dimensional vector
type Vector []float64
// Equal compares a and b elementwise under a precision eps.
func (a Vector) Equal(b Vector, eps float64) bool {
if len(a) != len(b) {
return false
}
for i := range a {
if math.Abs(a[i]-b[i]) > eps {
return false
}
}
return true
}
// Add sums a and b elementwise.
// Add panics if dimensions are different.
func (a Vector) Add(b Vector) Vector {
if len(a) != len(b) {
panic("different dimensions")
}
c := make(Vector, len(a))
for i := range a {
c[i] = a[i] + b[i]
}
return c
}
// Negate changes the sign of all elements of a.
func (a Vector) Negate() Vector {
b := make(Vector, len(a))
for i := range a {
b[i] = -a[i]
}
return b
}
// Sub subtracts a and b elementwise.
// Sub panics if dimensions are different.
func (a Vector) Sub(b Vector) Vector {
return a.Add(b.Negate())
}
// HadamardProduct multiplies a and b elementwise.
// HadamardProduct panics if dimensions are different.
func (a Vector) HadamardProduct(b Vector) Vector {
if len(a) != len(b) {
panic("different dimensions")
}
c := make(Vector, len(a))
for i := range a {
c[i] = a[i] * b[i]
}
return c
}
// Sum returns the sum of elements of a.
func (a Vector) Sum() float64 {
var sum float64
for i := range a {
sum += a[i]
}
return sum
}
// Dot performs the dot product of a and b.
// Dot panics if dimensions are different.
func (a Vector) Dot(b Vector) float64 {
return a.HadamardProduct(b).Sum()
}
// Lenght returns the length of a.
func (a Vector) Lenght() float64 {
return math.Sqrt(a.Dot(a))
}
// Angle computes the angle between a and b.
// Angle panics if a or b are zero.
func (a Vector) Angle(b Vector) float64 {
m, n := a.Lenght(), b.Lenght()
if m == 0 || n == 0 {
panic("vector is zero")
}
return math.Acos(a.Dot(b) / (m * n))
}
// Cross computes the cross product of a and b.
// Cross panics if a and b are not 3D.
func (a Vector) Cross(b Vector) Vector {
if len(a) != 3 || len(b) != 3 {
panic("vectors are not 3D")
}
c := make(Vector, 3)
c[0] = a[1]*b[2] - a[2]*b[1]
c[1] = a[2]*b[0] - a[0]*b[2]
c[2] = a[0]*b[1] - a[1]*b[0]
return c
} | vector.go | 0.900964 | 0.765987 | vector.go | starcoder |
package main
/*
题目:解码异或后的数组
未知 整数数组 arr 由 n 个非负整数组成。
经编码后变为长度为 n - 1 的另一个整数数组 encoded ,其中 encoded[i] = arr[i] XOR arr[i + 1] 。例如,arr = [1,0,2,1] 经编码后得到 encoded = [1,2,3] 。
给你编码后的数组 encoded 和原数组 arr 的第一个元素 first(arr[0])。
请解码返回原数组 arr 。可以证明答案存在并且是唯一的。
提示:
2 <= n <= 10^4
encoded.length == n - 1
0 <= encoded[i] <= 10^5
0 <= first <= 10^5
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/decode-xored-array
*/
/*
方法一: 异或
时间复杂度:О(n)
空间复杂度:О(1)
运行时间:36 ms 内存消耗:6.8 MB
*/
func decode(encoded []int, first int) []int {
result := make([]int, len(encoded)+1)
result[0] = first
for i, v := range encoded {
result[i+1] = result[i] ^ v
}
return result
}
func countSubIslands(grid1 [][]int, grid2 [][]int) int {
n, m := len(grid1), len(grid1[0])
uf := [2]*Unionfind{NewUnionFind(n*m + 1), NewUnionFind(n*m + 1)}
for i := range grid1 {
for j := range grid1[0] {
cur, right, down := i*m+j, i*m+j+1, (i+1)*m+j
if grid1[i][j] == 0 {
uf[0].Union(cur, n*m)
} else {
if i+1 < n && grid1[i+1][j] == 1 {
uf[0].Union(cur, down)
}
if j+1 < m && grid1[i][j+1] == 1 {
uf[0].Union(cur, right)
}
}
if grid2[i][j] == 0 {
uf[1].Union(cur, n*m)
} else {
if i+1 < n && grid2[i+1][j] == 1 {
uf[1].Union(cur, down)
}
if j+1 < m && grid2[i][j+1] == 1 {
uf[1].Union(cur, right)
}
}
}
}
valid := [2]map[int]map[int]bool{}
for i := range valid {
valid[i] = make(map[int]map[int]bool, 0)
}
for i := 0; i < 2; i++ {
for j, v := range uf[0].parent {
if v != m*n {
if _, ok := valid[i][v]; !ok {
valid[i][v] = make(map[int]bool)
}
valid[i][v][j] = true
}
}
}
ans := 0
flag:
for k, check := range valid[1] {
if _, ok := valid[0][k]; ok {
for checkK := range check {
if !valid[0][k][checkK] {
continue flag
}
}
ans++
}
}
return ans
}
type Unionfind struct {
parent []int
size []int
}
func NewUnionFind(n int) *Unionfind {
uf := &Unionfind{
parent: make([]int, n),
size: make([]int, n),
}
for i := range uf.parent {
uf.parent[i] = i
uf.size[i] = 1
}
return uf
}
func (uf *Unionfind) Union(x, y int) {
rootX, rootY := uf.Find(x), uf.Find(y)
if rootX != rootY {
if uf.size[rootX] > uf.size[rootY] {
rootX, rootY = rootY, rootX
}
uf.parent[rootX] = rootY
uf.size[rootY] += uf.size[rootX]
}
}
func (uf *Unionfind) Find(x int) int {
if x != uf.parent[x] {
uf.parent[x] = uf.Find(uf.parent[x])
}
return uf.parent[x]
} | internal/leetcode/1720.decode-xored-array/main.go | 0.513181 | 0.428233 | main.go | starcoder |
package rendering
import (
"image/color"
"math"
"github.com/veandco/go-sdl2/sdl"
"github.com/wdevore/RangerGo/api"
"github.com/wdevore/RangerGo/engine/geometry"
"github.com/wdevore/RangerGo/engine/maths"
)
type renderState struct {
clearColor color.RGBA
drawColor color.RGBA
current api.IAffineTransform
}
func newRS() *renderState {
o := new(renderState)
o.clearColor = NewPaletteInt64(Black).Color()
o.drawColor = NewPaletteInt64(White).Color()
o.current = maths.NewTransform()
return o
}
type renderContext struct {
world api.IWorld
stack []*renderState
stackTop int
clearColor color.RGBA
drawColor color.RGBA
windowSize api.IPoint
current api.IAffineTransform
post api.IAffineTransform // Pre allocated cache
}
const stackDepth = 100
// Scratch working variables
var v1 = geometry.NewPoint()
var v2 = geometry.NewPoint()
// NewRenderContext constructs an IRenderContext object
func NewRenderContext(world api.IWorld) api.IRenderContext {
o := new(renderContext)
o.world = world
o.clearColor = NewPaletteInt64(Orange).Color()
o.drawColor = NewPaletteInt64(White).Color()
o.current = maths.NewTransform()
o.post = maths.NewTransform()
o.windowSize = world.WindowSize()
return o
}
func (rc *renderContext) Initialize() {
rc.stack = make([]*renderState, stackDepth)
for i := 0; i < stackDepth; i++ {
rc.stack[i] = newRS()
}
// Apply centered view-space matrix
rc.Apply(rc.world.ViewSpace())
}
func (rc *renderContext) Apply(aft api.IAffineTransform) {
// Concat this transform onto the current transform but don't push it.
// Use post multiply
maths.Multiply(aft, rc.current, rc.post)
rc.current.SetByTransform(rc.post)
}
func (rc *renderContext) Pre() {
c := rc.clearColor
renderer := rc.world.Renderer()
renderer.SetDrawColor(c.R, c.G, c.B, c.A)
renderer.Clear()
//Draw checkered board as an clear indicator for debugging
//NOTE: disable this code for release builds
//draw_checkerboard(context);
}
func (rc *renderContext) Save() {
top := rc.stack[rc.stackTop]
top.clearColor = rc.clearColor
top.drawColor = rc.drawColor
top.current.SetByTransform(rc.current)
rc.stackTop++
}
func (rc *renderContext) Restore() {
rc.stackTop--
top := rc.stack[rc.stackTop]
rc.clearColor = top.clearColor
rc.drawColor = top.drawColor
rc.current.SetByTransform(top.current)
c := rc.clearColor
renderer := rc.world.Renderer()
renderer.SetDrawColor(c.R, c.G, c.B, c.A)
}
func (rc *renderContext) Post() {
renderer := rc.world.Renderer()
renderer.Present()
}
// =_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.
// Transforms
// =_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.
func (rc *renderContext) TransformPoint(p, out api.IPoint) {
rc.current.TransformToPoint(p, out)
}
func (rc *renderContext) TransformPoints(p1, p2, out1, out2 api.IPoint) {
rc.current.TransformToPoint(p1, out1)
rc.current.TransformToPoint(p2, out2)
}
func (rc *renderContext) TransformArray(vertices, bucket []api.IPoint) {
for i := 0; i < len(vertices); i++ {
rc.current.TransformToPoint(vertices[i], bucket[i])
}
}
func (rc *renderContext) TransformMesh(mesh api.IMesh) {
vertices := mesh.Vertices()
bucket := mesh.Bucket()
for i := 0; i < len(vertices); i++ {
rc.current.TransformToPoint(vertices[i], bucket[i])
}
}
func (rc *renderContext) TransformPolygon(poly api.IPolygon) {
vertices := poly.Mesh().Vertices()
bucket := poly.Mesh().Bucket()
for i := 0; i < len(vertices); i++ {
rc.current.TransformToPoint(vertices[i], bucket[i])
}
}
// =_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.
// Rendering
// =_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.=_.
func (rc *renderContext) SetDrawColor(color api.IPalette) {
rc.drawColor = color.Color()
renderer := rc.world.Renderer()
renderer.SetDrawColor(rc.drawColor.R, rc.drawColor.G, rc.drawColor.B, rc.drawColor.A)
}
func (rc *renderContext) DrawPoint(x, y int32) {
renderer := rc.world.Renderer()
renderer.DrawPoint(x, y)
}
func (rc *renderContext) DrawBigPoint(x, y int32) {
renderer := rc.world.Renderer()
renderer.DrawPoint(x-1, y-1)
renderer.DrawPoint(x, y-1)
renderer.DrawPoint(x+1, y-1)
renderer.DrawPoint(x+1, y)
renderer.DrawPoint(x+1, y+1)
renderer.DrawPoint(x, y+1)
renderer.DrawPoint(x-1, y+1)
renderer.DrawPoint(x-1, y)
renderer.DrawPoint(x, y)
}
func (rc *renderContext) DrawLine(x1, y1, x2, y2 int32) {
renderer := rc.world.Renderer()
renderer.DrawLine(x1, y1, x2, y2)
}
func (rc *renderContext) DrawLineUsing(p1, p2 api.IPoint) {
renderer := rc.world.Renderer()
renderer.DrawLine(int32(p1.X()), int32(p1.Y()), int32(p2.X()), int32(p2.Y()))
}
var sdlRect = &sdl.Rect{}
func (rc *renderContext) DrawRectangle(rect api.IRectangle) {
renderer := rc.world.Renderer()
sdlRect.X, sdlRect.Y = rect.Min().ComponentsAsInt32()
sdlRect.W, sdlRect.H = rect.DimesionsAsInt32()
renderer.DrawRect(sdlRect)
}
func (rc *renderContext) DrawFilledRectangle(rect api.IRectangle) {
renderer := rc.world.Renderer()
sdlRect.X, sdlRect.Y = rect.Min().ComponentsAsInt32()
sdlRect.W, sdlRect.H = rect.DimesionsAsInt32()
renderer.FillRect(sdlRect)
}
func (rc *renderContext) DrawCheckerBoard(size int) {
renderer := rc.world.Renderer()
flip := false
col := int32(0)
row := int32(0)
w, h := rc.windowSize.ComponentsAsInt32()
s := int32(size)
for row < h {
for col < w {
if flip {
renderer.SetDrawColor(100, 100, 100, 255)
} else {
renderer.SetDrawColor(80, 80, 80, 255)
}
sdlRect.X = col
sdlRect.Y = row
sdlRect.W = col + s
sdlRect.H = row + s
renderer.FillRect(sdlRect)
flip = !flip
col += s
}
flip = !flip
col = 0
row += s
}
}
func (rc *renderContext) RenderLine(x1, y1, x2, y2 float64) {
rc.DrawLine(int32(x1), int32(y1), int32(x2), int32(y2))
}
func (rc *renderContext) RenderLines(mesh api.IMesh) {
first := true
for _, v := range mesh.Bucket() {
if first {
v1.SetByPoint(v)
first = false
continue
} else {
v2.SetByPoint(v)
first = true
}
rc.DrawLine(int32(v1.X()), int32(v1.Y()), int32(v2.X()), int32(v2.Y()))
}
}
func (rc *renderContext) RenderPolygon(poly api.IPolygon, style int) {
bucs := poly.Mesh().Bucket()
for i := 0; i < len(bucs)-1; i++ {
rc.DrawLine(int32(bucs[i].X()), int32(bucs[i].Y()), int32(bucs[i+1].X()), int32(bucs[i+1].Y()))
}
end := len(bucs) - 1
if style == api.CLOSED {
rc.DrawLine(int32(bucs[end].X()), int32(bucs[end].Y()), int32(bucs[0].X()), int32(bucs[0].Y()))
}
}
var irect = geometry.NewRectangle()
func (rc *renderContext) RenderAARectangle(min, max api.IPoint, fillStyle int) {
irect.Set(math.Round(min.X()), math.Round(min.Y()), math.Round(max.X()), math.Round(max.Y()))
if fillStyle == api.FILLED {
rc.DrawFilledRectangle(irect)
} else if fillStyle == api.OUTLINED {
rc.DrawRectangle(irect)
} else {
rc.DrawFilledRectangle(irect)
rc.DrawRectangle(irect)
}
}
func (rc *renderContext) RenderCheckerBoard(mesh api.IMesh, oddColor api.IPalette, evenColor api.IPalette) {
flip := false
pFlip := false
pX := 0.0
vertices := mesh.Bucket()
build := true
renderer := rc.world.Renderer()
for _, vertex := range vertices {
if build {
v1.SetByPoint(vertex)
build = false
// We have reached the end of the row when the next
// X value is suddenly less than the current X value.
if v1.X() < pX {
flip = !pFlip
pFlip = flip
}
pX = v1.X()
continue
} else {
v2.SetByPoint(vertex)
build = true
}
if flip {
renderer.SetDrawColor(oddColor.R(), oddColor.G(), oddColor.B(), oddColor.A())
} else {
renderer.SetDrawColor(evenColor.R(), evenColor.G(), evenColor.B(), evenColor.A())
}
// upper-left
minx := int32(math.Round(v1.X()))
miny := int32(math.Round(v1.Y()))
// bottom-right
maxx := int32(math.Round(v2.X()))
maxy := int32(math.Round(v2.Y()))
sdlRect.X = minx
sdlRect.Y = miny
sdlRect.W = maxx - minx
sdlRect.H = maxy - miny
renderer.FillRect(sdlRect)
flip = !flip
}
}
var shifts = []int{0, 1, 2, 3, 4, 5, 6, 7}
func (rc *renderContext) DrawText(x, y float64, text string, scale int, fill int, invert bool) {
rasterFont := rc.world.RasterFont()
cx := int32(x)
s := int32(scale)
rowWidth := int32(rasterFont.GlyphWidth())
// Is the text colored or the space around it (aka inverted)
bitInvert := uint8(1)
if invert {
bitInvert = 0
}
renderer := rc.world.Renderer()
for _, c := range text {
if c == ' ' {
cx += rowWidth * s // move to next column/char/glyph
continue
}
gy := int32(y) // move y back to the "top" for each char
glyph := rasterFont.Glyph(byte(c))
for _, g := range glyph {
gx := cx // set to current column
for _, shift := range shifts {
bit := (g >> shift) & 1
if bit == bitInvert {
if scale == 1 {
renderer.DrawPoint(gx, gy)
} else {
fillet := fill
if fill >= scale {
fillet = 0
}
for xl := int32(0); xl < int32(scale-fillet); xl++ {
for yl := int32(0); yl < int32(scale-fillet); yl++ {
renderer.DrawPoint(gx+xl, gy+yl)
}
}
}
}
gx += s
}
gy += s // move to next pixel-row in char
}
cx += rowWidth * s // move to next column/char/glyph
}
} | engine/rendering/render_context.go | 0.702326 | 0.416263 | render_context.go | starcoder |
package lib
import (
"errors"
"fmt"
"time"
"github.com/dcaiafa/nitro"
)
type Time struct {
time time.Time
}
var _ /* implements */ nitro.Indexable = Time{}
func NewTime(t time.Time) Time {
return Time{time: t}
}
func (t Time) String() string { return t.time.String() }
func (t Time) Type() string { return "time" }
func (t Time) EvalOp(op nitro.Op, operand nitro.Value) (nitro.Value, error) {
switch op {
case nitro.OpEq, nitro.OpSub, nitro.OpLT, nitro.OpLE, nitro.OpGT, nitro.OpGE:
operandTime, ok := operand.(Time)
if !ok {
return nil, fmt.Errorf(
"invalid operation between time and %v",
nitro.TypeName(operand))
}
switch op {
case nitro.OpEq:
return nitro.NewBool(t.time.Equal(operandTime.time)), nil
case nitro.OpSub:
return Duration{t.time.Sub(operandTime.time)}, nil
case nitro.OpLT:
return nitro.NewBool(t.time.Before(operandTime.time)), nil
case nitro.OpLE:
return nitro.NewBool(t.time.Before(operandTime.time) ||
t.time.Equal(operandTime.time)), nil
case nitro.OpGT:
return nitro.NewBool(t.time.After(operandTime.time)), nil
case nitro.OpGE:
return nitro.NewBool(t.time.After(operandTime.time) ||
t.time.Equal(operandTime.time)), nil
default:
panic("unreachable")
}
case nitro.OpAdd:
operandDur, ok := operand.(Duration)
if !ok {
return nil, fmt.Errorf(
"invalid operation between time and %v",
nitro.TypeName(operand))
}
return Time{t.time.Add(operandDur.dur)}, nil
default:
return nil, fmt.Errorf("operation is not supported by time")
}
}
func (t Time) Index(key nitro.Value) (nitro.Value, error) {
keyStr, ok := key.(nitro.String)
if !ok {
return nil, fmt.Errorf(
"time cannot be indexed by %q",
nitro.TypeName(key))
}
switch keyStr.String() {
case "utc":
return nitro.NativeFn(t.utc), nil
case "local":
return nitro.NativeFn(t.local), nil
case "format":
return nitro.NativeFn(t.format), nil
case "unix":
return nitro.NativeFn(t.unix), nil
case "unixnano":
return nitro.NativeFn(t.unixnano), nil
case "year":
return nitro.NewInt(int64(t.time.Year())), nil
case "month":
return nitro.NewInt(int64(t.time.Month())), nil
case "day":
return nitro.NewInt(int64(t.time.Day())), nil
case "hour":
return nitro.NewInt(int64(t.time.Hour())), nil
case "minute":
return nitro.NewInt(int64(t.time.Minute())), nil
case "second":
return nitro.NewInt(int64(t.time.Second())), nil
case "nanosecond":
return nitro.NewInt(int64(t.time.Nanosecond())), nil
default:
return nil, fmt.Errorf(
"time does not have member %q",
keyStr.String())
}
}
func (t Time) IndexRef(key nitro.Value) (nitro.ValueRef, error) {
return nitro.ValueRef{}, fmt.Errorf("time is read-only")
}
func (t Time) utc(m *nitro.VM, args []nitro.Value, nRet int) ([]nitro.Value, error) {
if len(args) != 0 {
return nil, fmt.Errorf("utc takes no arguments")
}
t.time = t.time.UTC()
return []nitro.Value{t}, nil
}
func (t Time) local(m *nitro.VM, args []nitro.Value, nRet int) ([]nitro.Value, error) {
if len(args) != 0 {
return nil, fmt.Errorf("local takes no arguments")
}
t.time = t.time.Local()
return []nitro.Value{t}, nil
}
var errTimeFormatUsage = errors.New(
"invalid usage. Expected <time>.format(layout: string?)")
func (t Time) format(m *nitro.VM, args []nitro.Value, nRet int) ([]nitro.Value, error) {
if len(args) != 0 && len(args) != 1 {
return nil, errTimeFormatUsage
}
layout := time.RFC3339
if len(args) == 1 {
layoutArg, ok := args[0].(nitro.String)
if !ok {
return nil, errTimeFormatUsage
}
layout = convertTimeLayout(layoutArg.String())
}
res := t.time.Format(layout)
return []nitro.Value{nitro.NewString(res)}, nil
}
func (t Time) unix(m *nitro.VM, args []nitro.Value, nRet int) ([]nitro.Value, error) {
if len(args) != 0 {
return nil, errTakesNoArgs
}
return []nitro.Value{nitro.NewInt(t.time.Unix())}, nil
}
func (t Time) unixnano(m *nitro.VM, args []nitro.Value, nRet int) ([]nitro.Value, error) {
if len(args) != 0 {
return nil, errTakesNoArgs
}
return []nitro.Value{nitro.NewInt(t.time.UnixNano())}, nil
}
func now(m *nitro.VM, args []nitro.Value, nRet int) ([]nitro.Value, error) {
if len(args) != 0 {
return nil, errTakesNoArgs
}
return []nitro.Value{Time{time.Now()}}, nil
}
func parseTime(m *nitro.VM, args []nitro.Value, nRet int) ([]nitro.Value, error) {
timeStr, err := getStringArg(args, 0)
if err != nil {
return nil, err
}
var layout string
if len(args) >= 2 {
layout, err = getStringArg(args, 1)
if err != nil {
return nil, err
}
layout = convertTimeLayout(layout)
} else {
layout = time.RFC3339
}
t, err := time.Parse(layout, timeStr)
if err != nil {
return nil, err
}
return []nitro.Value{Time{time: t}}, nil
}
func timeFromUnix(m *nitro.VM, args []nitro.Value, nRet int) ([]nitro.Value, error) {
var err error
var sec, nano int64
sec, err = getIntArg(args, 0)
if err != nil {
return nil, err
}
if len(args) >= 2 {
nano, err = getIntArg(args, 1)
if err != nil {
return nil, err
}
}
t := time.Unix(sec, nano)
return []nitro.Value{Time{time: t}}, nil
}
func convertTimeLayout(timeFmt string) string {
switch timeFmt {
case "ansic":
return time.ANSIC
case "unixdate":
return time.UnixDate
case "rubydate":
return time.RubyDate
case "rfc822":
return time.RFC822
case "rfc822z":
return time.RFC822Z
case "rfc850":
return time.RFC850
case "rfc1123":
return time.RFC1123
case "rfc1123z":
return time.RFC1123Z
case "rfc3339":
return time.RFC3339
case "rfc3339nano":
return time.RFC3339Nano
case "kitchen":
return time.Kitchen
case "stamp":
return time.Stamp
case "stampmilli":
return time.StampMilli
case "stampmicro":
return time.StampMicro
case "stampnano":
return time.StampNano
default:
return timeFmt
}
} | lib/time.go | 0.607663 | 0.478407 | time.go | starcoder |
package utils
import (
"time"
"px.dev/pixie/src/shared/services/jwtpb"
)
// ClaimType represents the type of claims we allow in our system.
type ClaimType int
const (
// UnknownClaimType is an unknown type.
UnknownClaimType ClaimType = iota
// UserClaimType is a claim for a user.
UserClaimType
// ServiceClaimType is a claim for a service.
ServiceClaimType
// ClusterClaimType is a claim type for a cluster.
ClusterClaimType
)
// GetClaimsType gets the type of the given claim.
func GetClaimsType(c *jwtpb.JWTClaims) ClaimType {
switch c.CustomClaims.(type) {
case *jwtpb.JWTClaims_UserClaims:
return UserClaimType
case *jwtpb.JWTClaims_ServiceClaims:
return ServiceClaimType
case *jwtpb.JWTClaims_ClusterClaims:
return ClusterClaimType
default:
return UnknownClaimType
}
}
// GenerateJWTForUser creates a protobuf claims for the given user.
func GenerateJWTForUser(userID string, orgID string, email string, expiresAt time.Time, audience string) *jwtpb.JWTClaims {
claims := jwtpb.JWTClaims{
Subject: userID,
// Standard claims.
Audience: audience,
ExpiresAt: expiresAt.Unix(),
IssuedAt: time.Now().Unix(),
Issuer: "PL",
Scopes: []string{"user"},
}
claims.CustomClaims = &jwtpb.JWTClaims_UserClaims{
UserClaims: &jwtpb.UserJWTClaims{
Email: email,
UserID: userID,
OrgID: orgID,
},
}
return &claims
}
// GenerateJWTForAPIUser creates a protobuf claims for the api user.
func GenerateJWTForAPIUser(userID string, orgID string, expiresAt time.Time, audience string) *jwtpb.JWTClaims {
claims := jwtpb.JWTClaims{
Subject: orgID,
// Standard claims.
Audience: audience,
ExpiresAt: expiresAt.Unix(),
IssuedAt: time.Now().Unix(),
Issuer: "PL",
Scopes: []string{"user"},
}
claims.CustomClaims = &jwtpb.JWTClaims_UserClaims{
UserClaims: &jwtpb.UserJWTClaims{
OrgID: orgID,
UserID: userID,
IsAPIUser: true,
},
}
return &claims
}
// GenerateJWTForService creates a protobuf claims for the given service.
func GenerateJWTForService(serviceID string, audience string) *jwtpb.JWTClaims {
pbClaims := jwtpb.JWTClaims{
Audience: audience,
Subject: serviceID,
Issuer: "PL",
ExpiresAt: time.Now().Add(time.Minute * 10).Unix(),
Scopes: []string{"service"},
CustomClaims: &jwtpb.JWTClaims_ServiceClaims{
ServiceClaims: &jwtpb.ServiceJWTClaims{
ServiceID: serviceID,
},
},
}
return &pbClaims
}
// GenerateJWTForCluster creates a protobuf claims for the given cluster.
func GenerateJWTForCluster(clusterID string, audience string) *jwtpb.JWTClaims {
pbClaims := jwtpb.JWTClaims{
Audience: audience,
ExpiresAt: time.Now().Add(time.Hour).Unix(),
// The IssuedAt begins earlier, to give leeway for user's clusters
// which may have some clock skew.
IssuedAt: time.Now().Add(-2 * time.Minute).Unix(),
NotBefore: time.Now().Add(-2 * time.Minute).Unix(),
Issuer: "pixielabs.ai",
Subject: "pixielabs.ai/vizier",
Scopes: []string{"cluster"},
CustomClaims: &jwtpb.JWTClaims_ClusterClaims{
ClusterClaims: &jwtpb.ClusterJWTClaims{
ClusterID: clusterID,
},
},
}
return &pbClaims
} | src/shared/services/utils/claims.go | 0.548674 | 0.467271 | claims.go | starcoder |
package validator
import (
"fmt"
"net"
"unicode/utf8"
)
// BetweenString is
func BetweenString(v string, left int64, right int64) bool {
return DigitsBetweenInt64(int64(utf8.RuneCountInString(v)), left, right)
}
// InString check if string str is a member of the set of strings params
func InString(str string, params []string) bool {
for _, param := range params {
if str == param {
return true
}
}
return false
}
// compareString determine if a comparison passes between the given values.
func compareString(first string, second int64, operator string) bool {
switch operator {
case "<":
return int64(utf8.RuneCountInString(first)) < second
case ">":
return int64(utf8.RuneCountInString(first)) > second
case "<=":
return int64(utf8.RuneCountInString(first)) <= second
case ">=":
return int64(utf8.RuneCountInString(first)) >= second
case "==":
return int64(utf8.RuneCountInString(first)) == second
default:
panic(fmt.Sprintf("validator: compareString unsupport operator %s", operator))
}
}
// IsAlpha check if the string may be only contains letters (a-zA-Z). Empty string is valid.
func IsAlpha(str string) bool {
if IsNull(str) {
return true
}
return rxAlpha.MatchString(str)
}
// IsAlphaNum check if the string may be only contains letters and numbers. Empty string is valid.
func IsAlphaNum(str string) bool {
if IsNull(str) {
return true
}
return rxAlphaNum.MatchString(str)
}
// IsAlphaDash check if the string may be only contains letters, numbers, dashes and underscores. Empty string is valid.
func IsAlphaDash(str string) bool {
if IsNull(str) {
return true
}
return rxAlphaDash.MatchString(str)
}
// IsAlphaUnicode check if the string may be only contains letters (a-zA-Z). Empty string is valid.
func IsAlphaUnicode(str string) bool {
if IsNull(str) {
return true
}
return rxAlphaUnicode.MatchString(str)
}
// IsAlphaNumUnicode check if the string may be only contains letters and numbers. Empty string is valid.
func IsAlphaNumUnicode(str string) bool {
if IsNull(str) {
return true
}
return rxAlphaNumUnicode.MatchString(str)
}
// IsAlphaDashUnicode check if the string may be only contains letters, numbers, dashes and underscores. Empty string is valid.
func IsAlphaDashUnicode(str string) bool {
if IsNull(str) {
return true
}
return rxAlphaDashUnicode.MatchString(str)
}
// IsNumeric check if the string must be numeric. Empty string is valid.
func IsNumeric(str string) bool {
if IsNull(str) {
return true
}
return rxNumeric.MatchString(str)
}
// IsInt check if the string must be an integer. Empty string is valid.
func IsInt(str string) bool {
if IsNull(str) {
return true
}
return rxInt.MatchString(str)
}
// IsFloat check if the string must be an float. Empty string is valid.
func IsFloat(str string) bool {
if IsNull(str) {
return true
}
return rxFloat.MatchString(str)
}
// IsNull check if the string is null.
func IsNull(str string) bool {
return len(str) == 0
}
// IsEmail check if the string is an email.
func IsEmail(str string) bool {
// TODO uppercase letters are not supported
return rxEmail.MatchString(str)
}
// IsIPv4 check if the string is an ipv4 address.
func IsIPv4(v string) bool {
ip := net.ParseIP(v)
return ip != nil && ip.To4() != nil
}
// IsIPv6 check if the string is an ipv6 address.
func IsIPv6(v string) bool {
ip := net.ParseIP(v)
return ip != nil && ip.To4() == nil
}
// IsIP check if the string is an ip address.
func IsIP(v string) bool {
ip := net.ParseIP(v)
return ip != nil
}
// DistinctString is the validation function for validating an attribute is unique among other values.
func DistinctString(v []string) bool {
return inArrayString(v, v)
}
func inArrayString(needle []string, haystack []string) bool {
for _, n := range needle {
for _, s := range haystack {
if n == s {
return true
}
}
}
return false
} | validator_string.go | 0.651466 | 0.404302 | validator_string.go | starcoder |
package primitive
import (
"fmt"
"strings"
"math"
"github.com/fogleman/gg"
"github.com/golang/freetype/raster"
)
type Line struct {
Worker *Worker
X1, Y1 float64
X2, Y2 float64
tX1, tY1 float64
tX2, tY2 float64
Width float64
}
func NewRandomLine(worker *Worker) *Line {
rnd := worker.Rnd
x1 := rnd.Float64() * float64(worker.W)
y1 := rnd.Float64() * float64(worker.H)
x2 := rnd.Float64() * float64(worker.W)
y2 := rnd.Float64() * float64(worker.H)
if (x1 > x2) {
x1, x2 = x2, x1
y1, y2 = y2, y1
}
width := 1.0 / 2
q := &Line{worker, x1, y1, x2, y2, x1, y1, x2, y2, width}
q.Extend()
q.Mutate()
return q
}
func (q *Line) Extend() {
W := float64(q.Worker.W)
H := float64(q.Worker.H)
if q.X1 == q.X2 {
q.tY1 = 0
q.tY2 = H
} else {
slope := W/H
slope1 := (q.Y2 - q.Y1)/(q.X2 - q.X1)
if slope < math.Abs(slope1) {
q.tX1 = q.X1 - q.Y1/slope1
q.tX2 = q.X2 + (H - q.Y2)/slope1
q.tY1 = 0
q.tY2 = H
} else if slope > math.Abs(slope1) {
q.tY1 = q.Y1 - q.X1 * slope1
q.tY2 = q.Y2 + (W - q.X2) * slope1
q.tX1 = 0
q.tX2 = W
} else {
q.tX1 = 0
q.tY1 = 0
q.tX2 = W
q.tY2 = H
}
}
}
func (q *Line) Draw(dc *gg.Context, scale float64) {
dc.DrawLine(q.tX1, q.tY1, q.tX2, q.tY2)
dc.SetLineWidth(q.Width * scale)
dc.Stroke()
}
func (q *Line) SVG(attrs string) string {
// TODO: this is a little silly
attrs = strings.Replace(attrs, "fill", "stroke", -1)
return fmt.Sprintf(
"<line %s fill=\"none\" x1=\"%f\" y1=\"%f\" x2=\"%f\" y2=\"%f\" stroke-width=\"%f\" />",
attrs, q.tX1, q.tY1, q.tX2, q.tY2, q.Width)
}
func (q *Line) Copy() Shape {
a := *q
return &a
}
func (q *Line) Mutate() {
const m = 16
w := q.Worker.W
h := q.Worker.H
rnd := q.Worker.Rnd
for {
switch rnd.Intn(2) {
case 0:
q.X1 = clamp(q.X1+rnd.NormFloat64()*16, -m, float64(w-1+m))
q.Y1 = clamp(q.Y1+rnd.NormFloat64()*16, -m, float64(h-1+m))
case 1:
q.X2 = clamp(q.X2+rnd.NormFloat64()*16, -m, float64(w-1+m))
q.Y2 = clamp(q.Y2+rnd.NormFloat64()*16, -m, float64(h-1+m))
case 2:
q.Width = clamp(q.Width+rnd.NormFloat64(), 1, 4)
}
q.Extend()
if q.Valid() {
break
}
}
}
func (q *Line) Valid() bool {
return (q.tX1 == 0 || q.tY1 == 0)
}
func (q *Line) Rasterize() []Scanline {
var path raster.Path
p1 := fixp(q.tX1, q.tY1)
p2 := fixp(q.tX2, q.tY2)
path.Start(p1)
path.Add1(p2)
width := fix(q.Width)
return strokePath(q.Worker, path, width, raster.RoundCapper, raster.RoundJoiner)
} | primitive/line.go | 0.654122 | 0.572902 | line.go | starcoder |
package caser
import (
"strings"
)
func Convert(v string, f, t CaseType) string {
var splitFn func(string) []string
var mapFn func(string) string
var joinFn func([]string) string
var o string
switch f {
case CaseType_Camel:
splitFn = splitOnCapitalOrNumber
case CaseType_Pascal:
splitFn = splitOnCapitalOrNumber
case CaseType_Snake, CaseType_Snakeupper:
splitFn = splitOnUnderscore
case CaseType_Kebab, CaseType_Kebabupper:
splitFn = splitOnDash
case CaseType_Phrase:
splitFn = splitOnSpace
default:
o = v
}
switch t {
case CaseType_Camel:
mapFn = mapPipeline(strings.ToLower, strings.Title)
joinFn = joinWithNoSpace
case CaseType_Phrase:
mapFn = mapPipeline(strings.ToLower, strings.Title)
joinFn = joinWithSpace
case CaseType_Snake:
mapFn = strings.ToLower
joinFn = joinWithUnderscore
case CaseType_Snakeupper:
mapFn = strings.ToUpper
joinFn = joinWithUnderscore
case CaseType_Pascal:
mapFn = mapPipeline(strings.ToLower, strings.Title)
joinFn = joinWithNoSpace
case CaseType_Kebab:
mapFn = strings.ToLower
joinFn = joinWithDash
case CaseType_Kebabupper:
mapFn = strings.ToUpper
joinFn = joinWithDash
default:
o = v
}
if splitFn != nil && mapFn != nil && joinFn != nil {
o = splitMapJoin(v, splitFn, mapFn, joinFn)
switch t {
case CaseType_Camel:
o = lcFirst(o)
}
}
return o
}
func CamelToPhase(v string) string {
return Convert(v, CaseType_Camel, CaseType_Phrase)
}
func CamelToSnake(v string) string {
return Convert(v, CaseType_Camel, CaseType_Snake)
}
func CamelToPascal(v string) string {
return Convert(v, CaseType_Camel, CaseType_Pascal)
}
func CamelToKebab(v string) string {
return Convert(v, CaseType_Camel, CaseType_Kebab)
}
func CamelToKebabUpper(v string) string {
return Convert(v, CaseType_Camel, CaseType_Kebabupper)
}
func PascalToPhase(v string) string {
return Convert(v, CaseType_Pascal, CaseType_Phrase)
}
func PascalToCamel(v string) string {
return Convert(v, CaseType_Pascal, CaseType_Camel)
}
func PascalToSnake(v string) string {
return Convert(v, CaseType_Pascal, CaseType_Snake)
}
func PascalToSnakeUpper(v string) string {
return Convert(v, CaseType_Pascal, CaseType_Snakeupper)
}
func PascalToKebab(v string) string {
return Convert(v, CaseType_Pascal, CaseType_Kebab)
}
func PascalToKebabUpper(v string) string {
return Convert(v, CaseType_Pascal, CaseType_Kebabupper)
}
func SnakeToPhase(v string) string {
return Convert(v, CaseType_Snake, CaseType_Phrase)
}
func SnakeToCamel(v string) string {
return Convert(v, CaseType_Snake, CaseType_Camel)
}
func SnakeToPascal(v string) string {
return Convert(v, CaseType_Snake, CaseType_Pascal)
}
func SnakeToKebab(v string) string {
return Convert(v, CaseType_Snake, CaseType_Kebab)
}
func SnakeToKebabUpper(v string) string {
return Convert(v, CaseType_Snake, CaseType_Kebabupper)
}
func KebabToPhase(v string) string {
return Convert(v, CaseType_Kebab, CaseType_Phrase)
}
func KebabToSnake(v string) string {
return Convert(v, CaseType_Kebab, CaseType_Snake)
}
func KebabToSnakeUpper(v string) string {
return Convert(v, CaseType_Kebab, CaseType_Snakeupper)
}
func PhraseToCamel(v string) string {
return Convert(v, CaseType_Phrase, CaseType_Camel)
}
func PhraseToPascal(v string) string {
return Convert(v, CaseType_Phrase, CaseType_Pascal)
}
func PhraseToSnake(v string) string {
return Convert(v, CaseType_Phrase, CaseType_Snake)
}
func PhraseToSnakeUpper(v string) string {
return Convert(v, CaseType_Phrase, CaseType_Snakeupper)
}
func PhraseToKebab(v string) string {
return Convert(v, CaseType_Phrase, CaseType_Kebab)
}
func PhraseToKebabUpper(v string) string {
return Convert(v, CaseType_Phrase, CaseType_Kebabupper)
} | convert.go | 0.511717 | 0.407923 | convert.go | starcoder |
package rel
import (
"fmt"
"sort"
"strings"
"github.com/arr-ai/frozen"
)
// Names represents a set of names.
type Names frozen.Set
// EmptyNames is the empty set of names.
var EmptyNames = Names(frozen.Set{})
// NewNames returns a new set of names with the given names.
func NewNames(names ...string) Names {
s := frozen.Set{}
for _, name := range names {
s = s.With(name)
}
return Names(s)
}
// Names returns a slice of the names in the set.
func (n Names) Names() []string {
names := make([]string, 0, n.Count())
for e := n.Enumerator(); e.MoveNext(); {
names = append(names, e.Current())
}
return names
}
// Names returns a sorted slice of the names in the set.
func (n Names) OrderedNames() []string {
names := n.Names()
sort.Strings(names)
return names
}
// Bool returns true iff there are names in the set.
func (n Names) IsTrue() bool {
return n.Count() != 0
}
// Count returns the number of names in a set of names.
func (n Names) Count() int {
return (frozen.Set(n)).Count()
}
// Hash computes a hash value for the set of names.
func (n Names) Hash(seed uint32) uint32 {
return uint32((frozen.Set(n)).Hash(uintptr(seed) + 0x4e351c91))
}
// Equal returns true iff the given sets of names are equal.
func (n Names) Equal(i interface{}) bool {
if x, ok := i.(Names); ok {
return (frozen.Set(n)).Equal(frozen.Set(x))
}
return false
}
// String returns a string representation of the set of names.
func (n Names) String() string {
return fmt.Sprintf("|%s|", strings.Join(n.OrderedNames(), ", "))
}
// With returns a set with all the input names and the given name.
func (n Names) With(name string) Names {
return Names((frozen.Set(n)).With(name))
}
// Without returns a set with all the input names, excluding the given name.
func (n Names) Without(name string) Names {
return Names((frozen.Set(n)).Without(name))
}
// Has returns true iff the given name is in the set of names.
func (n Names) Has(name string) bool {
return (frozen.Set(n)).Has(name)
}
// Any returns an arbitrary element from `n`.
func (n Names) Any() string {
return (frozen.Set(n)).Any().(string)
}
// Enumerator returns an enumerator over a set of names.
func (n Names) Enumerator() *NamesEnumerator {
return &NamesEnumerator{(frozen.Set(n)).Range()}
}
// TheOne return the single name in the set; panics otherwise.
func (n Names) TheOne() string {
if n.Count() != 1 {
panic("Names.TheOne expects exactly one name in the set")
}
e := n.Enumerator()
e.MoveNext()
return e.Current()
}
// ToSlice returns a slice of the names in the set.
func (n Names) ToSlice() []string {
names := make([]string, n.Count())
i := 0
for e := n.Enumerator(); e.MoveNext(); {
names[i] = e.Current()
i++
}
return names
}
// Intersect returns names in both sets.
func (n Names) Intersect(o Names) Names {
return Names((frozen.Set(n)).Intersection(frozen.Set(o)))
}
// Minus returns names in one set not found in the other.
func (n Names) Minus(o Names) Names {
return Names((frozen.Set(n)).Difference(frozen.Set(o)))
}
// IsSubsetOf returns true if `n`` is a subset of `o`.
func (n Names) IsSubsetOf(o Names) bool {
return (frozen.Set(n)).IsSubsetOf(frozen.Set(o))
}
// NamesEnumerator represents an enumerator over a set of names.
type NamesEnumerator struct {
i frozen.Iterator
}
// MoveNext moves the enumerator to the next Value.
func (e *NamesEnumerator) MoveNext() bool {
return e.i.Next()
}
// Current returns the enumerator's current Value.
func (e *NamesEnumerator) Current() string {
return e.i.Value().(string)
} | rel/names.go | 0.838548 | 0.432243 | names.go | starcoder |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.